From aa5b95e28d430f4a28ae6b2bc558977bdc6322ac Mon Sep 17 00:00:00 2001 From: Anton Karpets Date: Tue, 27 Feb 2024 14:41:25 +0200 Subject: [PATCH 001/172] =?UTF-8?q?=F0=9F=8F=A5Source=20Jira:=20add=20new?= =?UTF-8?q?=20fields=20(#35656)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .../source-jira/acceptance-test-config.yml | 1 + .../integration_tests/expected_records.jsonl | 30 +- .../connectors/source-jira/metadata.yaml | 2 +- .../connectors/source-jira/pyproject.toml | 2 +- .../source_jira/schemas/board_issues.json | 4 + .../source_jira/schemas/filter_sharing.json | 3 + .../source_jira/schemas/filters.json | 7 + .../source_jira/schemas/issues.json | 682 +++++++++++++++++- .../schemas/permission_schemes.json | 3 + .../source_jira/schemas/sprint_issues.json | 44 ++ .../schemas/users_groups_detailed.json | 3 + .../source_jira/schemas/workflows.json | 3 + .../unit_tests/test_date_time_transformer.py | 17 +- docs/integrations/sources/jira.md | 117 +-- 14 files changed, 838 insertions(+), 80 deletions(-) diff --git a/airbyte-integrations/connectors/source-jira/acceptance-test-config.yml b/airbyte-integrations/connectors/source-jira/acceptance-test-config.yml index 7790ca76c93a..bcce37ac8c3f 100644 --- a/airbyte-integrations/connectors/source-jira/acceptance-test-config.yml +++ b/airbyte-integrations/connectors/source-jira/acceptance-test-config.yml @@ -32,6 +32,7 @@ acceptance_tests: - name: fields/updated bypass_reason: "Unstable data" timeout_seconds: 2400 + fail_on_extra_columns: false incremental: tests: - config_path: "secrets/config.json" diff --git a/airbyte-integrations/connectors/source-jira/integration_tests/expected_records.jsonl b/airbyte-integrations/connectors/source-jira/integration_tests/expected_records.jsonl index 62e84cc83433..dbe962a57d5d 100644 --- a/airbyte-integrations/connectors/source-jira/integration_tests/expected_records.jsonl +++ b/airbyte-integrations/connectors/source-jira/integration_tests/expected_records.jsonl @@ -6,23 +6,23 @@ {"stream": "boards", "data": {"id": 1, "self": "https://airbyteio.atlassian.net/rest/agile/1.0/board/1", "name": "IT board", "type": "scrum", "location": {"projectId": 10000, "displayName": "integration-tests (IT)", "projectName": "integration-tests", "projectKey": "IT", "projectTypeKey": "software", "avatarURI": "https://airbyteio.atlassian.net/rest/api/2/universal_avatar/view/type/project/avatar/10424?size=small", "name": "integration-tests (IT)"}, "projectId": "10000", "projectKey": "IT"}, "emitted_at": 1697453213161} {"stream": "boards", "data": {"id": 17, "self": "https://airbyteio.atlassian.net/rest/agile/1.0/board/17", "name": "TESTKEY13 board", "type": "scrum", "location": {"projectId": 10016, "displayName": "Test project 13 (TESTKEY13)", "projectName": "Test project 13", "projectKey": "TESTKEY13", "projectTypeKey": "software", "avatarURI": "https://airbyteio.atlassian.net/rest/api/2/universal_avatar/view/type/project/avatar/10425?size=small", "name": "Test project 13 (TESTKEY13)"}, "projectId": "10016", "projectKey": "TESTKEY13"}, "emitted_at": 1697453213162} {"stream": "boards", "data": {"id": 58, "self": "https://airbyteio.atlassian.net/rest/agile/1.0/board/58", "name": "TTMP2 board", "type": "simple", "location": {"projectId": 10064, "displayName": "Test Team Managed Project 2 (TTMP2)", "projectName": "Test Team Managed Project 2", "projectKey": "TTMP2", "projectTypeKey": "software", "avatarURI": "https://airbyteio.atlassian.net/rest/api/2/universal_avatar/view/type/project/avatar/10412?size=small", "name": "Test Team Managed Project 2 (TTMP2)"}, "projectId": "10064", "projectKey": "TTMP2"}, "emitted_at": 1697453213464} -{"stream": "board_issues", "data": {"expand": "operations,versionedRepresentations,editmeta,changelog,renderedFields", "id": "10012", "self": "https://airbyteio.atlassian.net/rest/agile/1.0/issue/10012", "key": "IT-6", "fields": {"updated": "2023-10-12T13:30:02.307000-07:00", "created": "2021-03-11T06:14:18.085-0800"}, "boardId": 1, "created": "2021-03-11T06:14:18.085000-08:00", "updated": "2023-10-12T13:30:02.307000-07:00"}, "emitted_at": 1697453214841} -{"stream": "board_issues", "data": {"expand": "operations,versionedRepresentations,editmeta,changelog,renderedFields", "id": "10019", "self": "https://airbyteio.atlassian.net/rest/agile/1.0/issue/10019", "key": "IT-9", "fields": {"updated": "2023-04-05T04:57:18.118000-07:00", "created": "2021-03-11T06:14:24.791-0800"}, "boardId": 1, "created": "2021-03-11T06:14:24.791000-08:00", "updated": "2023-04-05T04:57:18.118000-07:00"}, "emitted_at": 1697453214846} -{"stream": "board_issues", "data": {"expand": "operations,versionedRepresentations,editmeta,changelog,renderedFields", "id": "10000", "self": "https://airbyteio.atlassian.net/rest/agile/1.0/issue/10000", "key": "IT-1", "fields": {"updated": "2022-05-17T04:26:28.885000-07:00", "created": "2020-12-07T06:12:17.863-0800"}, "boardId": 1, "created": "2020-12-07T06:12:17.863000-08:00", "updated": "2022-05-17T04:26:28.885000-07:00"}, "emitted_at": 1697453214847} +{"stream": "board_issues", "data": {"expand": "operations,versionedRepresentations,editmeta,changelog,renderedFields", "id": "10012", "self": "https://airbyteio.atlassian.net/rest/agile/1.0/issue/10012", "key": "IT-6", "fields": {"updated": "2023-10-12T13:30:02.307000-07:00", "created": "2021-03-11T06:14:18.085000-08:00"}, "boardId": 1, "created": "2021-03-11T06:14:18.085000-08:00", "updated": "2023-10-12T13:30:02.307000-07:00"}, "emitted_at": 1697453214841} +{"stream": "board_issues", "data": {"expand": "operations,versionedRepresentations,editmeta,changelog,renderedFields", "id": "10019", "self": "https://airbyteio.atlassian.net/rest/agile/1.0/issue/10019", "key": "IT-9", "fields": {"updated": "2023-04-05T04:57:18.118000-07:00", "created": "2021-03-11T06:14:24.791000-08:00"}, "boardId": 1, "created": "2021-03-11T06:14:24.791000-08:00", "updated": "2023-04-05T04:57:18.118000-07:00"}, "emitted_at": 1697453214846} +{"stream": "board_issues", "data": {"expand": "operations,versionedRepresentations,editmeta,changelog,renderedFields", "id": "10000", "self": "https://airbyteio.atlassian.net/rest/agile/1.0/issue/10000", "key": "IT-1", "fields": {"updated": "2022-05-17T04:26:28.885000-07:00", "created": "2020-12-07T06:12:17.863000-08:00"}, "boardId": 1, "created": "2020-12-07T06:12:17.863000-08:00", "updated": "2022-05-17T04:26:28.885000-07:00"}, "emitted_at": 1697453214847} {"stream": "dashboards", "data": {"id": "10000", "isFavourite": false, "name": "Default dashboard", "popularity": 0, "self": "https://airbyteio.atlassian.net/rest/api/3/dashboard/10000", "sharePermissions": [{"id": 10000, "type": "global"}], "editPermissions": [], "view": "/jira/dashboards/10000", "isWritable": true, "systemDashboard": true}, "emitted_at": 1697453217135} {"stream": "dashboards", "data": {"description": "A dashboard to help auditors identify sample of issues to check.", "id": "10002", "isFavourite": true, "name": "Test dashboard 1", "owner": {"self": "https://airbyteio.atlassian.net/rest/api/3/user?accountId=5fc9e78d2730d800760becc4", "displayName": "integration test", "active": true, "accountId": "5fc9e78d2730d800760becc4", "avatarUrls": {"16x16": "https://secure.gravatar.com/avatar/0a7841feac7218131ce7b427283c24ef?d=https%3A%2F%2Favatar-management--avatars.us-west-2.prod.public.atl-paas.net%2Finitials%2FIT-5.png", "24x24": "https://secure.gravatar.com/avatar/0a7841feac7218131ce7b427283c24ef?d=https%3A%2F%2Favatar-management--avatars.us-west-2.prod.public.atl-paas.net%2Finitials%2FIT-5.png", "32x32": "https://secure.gravatar.com/avatar/0a7841feac7218131ce7b427283c24ef?d=https%3A%2F%2Favatar-management--avatars.us-west-2.prod.public.atl-paas.net%2Finitials%2FIT-5.png", "48x48": "https://secure.gravatar.com/avatar/0a7841feac7218131ce7b427283c24ef?d=https%3A%2F%2Favatar-management--avatars.us-west-2.prod.public.atl-paas.net%2Finitials%2FIT-5.png"}}, "popularity": 1, "rank": 0, "self": "https://airbyteio.atlassian.net/rest/api/3/dashboard/10002", "sharePermissions": [], "editPermissions": [], "view": "/jira/dashboards/10002", "isWritable": true, "systemDashboard": false}, "emitted_at": 1697453217136} {"stream": "dashboards", "data": {"description": "A dashboard to help auditors identify sample of issues to check.", "id": "10011", "isFavourite": true, "name": "Test dashboard 10", "owner": {"self": "https://airbyteio.atlassian.net/rest/api/3/user?accountId=5fc9e78d2730d800760becc4", "displayName": "integration test", "active": true, "accountId": "5fc9e78d2730d800760becc4", "avatarUrls": {"16x16": "https://secure.gravatar.com/avatar/0a7841feac7218131ce7b427283c24ef?d=https%3A%2F%2Favatar-management--avatars.us-west-2.prod.public.atl-paas.net%2Finitials%2FIT-5.png", "24x24": "https://secure.gravatar.com/avatar/0a7841feac7218131ce7b427283c24ef?d=https%3A%2F%2Favatar-management--avatars.us-west-2.prod.public.atl-paas.net%2Finitials%2FIT-5.png", "32x32": "https://secure.gravatar.com/avatar/0a7841feac7218131ce7b427283c24ef?d=https%3A%2F%2Favatar-management--avatars.us-west-2.prod.public.atl-paas.net%2Finitials%2FIT-5.png", "48x48": "https://secure.gravatar.com/avatar/0a7841feac7218131ce7b427283c24ef?d=https%3A%2F%2Favatar-management--avatars.us-west-2.prod.public.atl-paas.net%2Finitials%2FIT-5.png"}}, "popularity": 1, "rank": 9, "self": "https://airbyteio.atlassian.net/rest/api/3/dashboard/10011", "sharePermissions": [], "editPermissions": [], "view": "/jira/dashboards/10011", "isWritable": true, "systemDashboard": false}, "emitted_at": 1697453217137} -{"stream": "filters", "data": {"expand": "description,owner,jql,viewUrl,searchUrl,favourite,favouritedCount,sharePermissions,editPermissions,isWritable,subscriptions", "self": "https://airbyteio.atlassian.net/rest/api/3/filter/10003", "id": "10003", "name": "Filter for EX board", "owner": {"self": "https://airbyteio.atlassian.net/rest/api/3/user?accountId=5fc9e78d2730d800760becc4", "accountId": "5fc9e78d2730d800760becc4", "avatarUrls": {"48x48": "https://secure.gravatar.com/avatar/0a7841feac7218131ce7b427283c24ef?d=https%3A%2F%2Favatar-management--avatars.us-west-2.prod.public.atl-paas.net%2Finitials%2FIT-5.png", "24x24": "https://secure.gravatar.com/avatar/0a7841feac7218131ce7b427283c24ef?d=https%3A%2F%2Favatar-management--avatars.us-west-2.prod.public.atl-paas.net%2Finitials%2FIT-5.png", "16x16": "https://secure.gravatar.com/avatar/0a7841feac7218131ce7b427283c24ef?d=https%3A%2F%2Favatar-management--avatars.us-west-2.prod.public.atl-paas.net%2Finitials%2FIT-5.png", "32x32": "https://secure.gravatar.com/avatar/0a7841feac7218131ce7b427283c24ef?d=https%3A%2F%2Favatar-management--avatars.us-west-2.prod.public.atl-paas.net%2Finitials%2FIT-5.png"}, "displayName": "integration test", "active": true}, "jql": "project = EX ORDER BY Rank ASC", "viewUrl": "https://airbyteio.atlassian.net/issues/?filter=10003", "searchUrl": "https://airbyteio.atlassian.net/rest/api/3/search?jql=project+%3D+EX+ORDER+BY+Rank+ASC", "favourite": false, "favouritedCount": 0, "sharePermissions": [{"id": 10004, "type": "project", "project": {"self": "https://airbyteio.atlassian.net/rest/api/3/project/10003", "id": "10003", "key": "EX", "assigneeType": "PROJECT_LEAD", "name": "Example", "roles": {}, "avatarUrls": {"48x48": "https://airbyteio.atlassian.net/rest/api/3/universal_avatar/view/type/project/avatar/10406", "24x24": "https://airbyteio.atlassian.net/rest/api/3/universal_avatar/view/type/project/avatar/10406?size=small", "16x16": "https://airbyteio.atlassian.net/rest/api/3/universal_avatar/view/type/project/avatar/10406?size=xsmall", "32x32": "https://airbyteio.atlassian.net/rest/api/3/universal_avatar/view/type/project/avatar/10406?size=medium"}, "projectTypeKey": "software", "simplified": false, "style": "classic", "properties": {}}}], "isWritable": true, "subscriptions": []}, "emitted_at": 1697453218286} -{"stream": "filters", "data": {"expand": "description,owner,jql,viewUrl,searchUrl,favourite,favouritedCount,sharePermissions,editPermissions,isWritable,subscriptions", "self": "https://airbyteio.atlassian.net/rest/api/3/filter/10000", "id": "10000", "name": "Filter for IT board", "owner": {"self": "https://airbyteio.atlassian.net/rest/api/3/user?accountId=5fc9e78d2730d800760becc4", "accountId": "5fc9e78d2730d800760becc4", "avatarUrls": {"48x48": "https://secure.gravatar.com/avatar/0a7841feac7218131ce7b427283c24ef?d=https%3A%2F%2Favatar-management--avatars.us-west-2.prod.public.atl-paas.net%2Finitials%2FIT-5.png", "24x24": "https://secure.gravatar.com/avatar/0a7841feac7218131ce7b427283c24ef?d=https%3A%2F%2Favatar-management--avatars.us-west-2.prod.public.atl-paas.net%2Finitials%2FIT-5.png", "16x16": "https://secure.gravatar.com/avatar/0a7841feac7218131ce7b427283c24ef?d=https%3A%2F%2Favatar-management--avatars.us-west-2.prod.public.atl-paas.net%2Finitials%2FIT-5.png", "32x32": "https://secure.gravatar.com/avatar/0a7841feac7218131ce7b427283c24ef?d=https%3A%2F%2Favatar-management--avatars.us-west-2.prod.public.atl-paas.net%2Finitials%2FIT-5.png"}, "displayName": "integration test", "active": true}, "jql": "project = IT ORDER BY Rank ASC", "viewUrl": "https://airbyteio.atlassian.net/issues/?filter=10000", "searchUrl": "https://airbyteio.atlassian.net/rest/api/3/search?jql=project+%3D+IT+ORDER+BY+Rank+ASC", "favourite": false, "favouritedCount": 0, "sharePermissions": [{"id": 10058, "type": "group", "group": {"name": "Test group 2", "groupId": "5ddb26f1-2d31-414a-ac34-b2d6de38805d", "self": "https://airbyteio.atlassian.net/rest/api/3/group?groupId=5ddb26f1-2d31-414a-ac34-b2d6de38805d"}}, {"id": 10059, "type": "group", "group": {"name": "Test group 0", "groupId": "ee8d15d1-6462-406a-b0a6-8065b7e4cdd7", "self": "https://airbyteio.atlassian.net/rest/api/3/group?groupId=ee8d15d1-6462-406a-b0a6-8065b7e4cdd7"}}, {"id": 10057, "type": "project", "project": {"self": "https://airbyteio.atlassian.net/rest/api/3/project/10000", "id": "10000", "key": "IT", "assigneeType": "PROJECT_LEAD", "name": "integration-tests", "roles": {}, "avatarUrls": {"48x48": "https://airbyteio.atlassian.net/rest/api/3/universal_avatar/view/type/project/avatar/10424", "24x24": "https://airbyteio.atlassian.net/rest/api/3/universal_avatar/view/type/project/avatar/10424?size=small", "16x16": "https://airbyteio.atlassian.net/rest/api/3/universal_avatar/view/type/project/avatar/10424?size=xsmall", "32x32": "https://airbyteio.atlassian.net/rest/api/3/universal_avatar/view/type/project/avatar/10424?size=medium"}, "projectCategory": {"self": "https://airbyteio.atlassian.net/rest/api/3/projectCategory/10004", "id": "10004", "name": "Test category 2", "description": "Test Project Category 2"}, "projectTypeKey": "software", "simplified": false, "style": "classic", "properties": {}}}], "isWritable": true, "subscriptions": []}, "emitted_at": 1697453218287} -{"stream": "filters", "data": {"expand": "description,owner,jql,viewUrl,searchUrl,favourite,favouritedCount,sharePermissions,editPermissions,isWritable,subscriptions", "self": "https://airbyteio.atlassian.net/rest/api/3/filter/10001", "id": "10001", "name": "Filter for P2 board", "owner": {"self": "https://airbyteio.atlassian.net/rest/api/3/user?accountId=5fc9e78d2730d800760becc4", "accountId": "5fc9e78d2730d800760becc4", "avatarUrls": {"48x48": "https://secure.gravatar.com/avatar/0a7841feac7218131ce7b427283c24ef?d=https%3A%2F%2Favatar-management--avatars.us-west-2.prod.public.atl-paas.net%2Finitials%2FIT-5.png", "24x24": "https://secure.gravatar.com/avatar/0a7841feac7218131ce7b427283c24ef?d=https%3A%2F%2Favatar-management--avatars.us-west-2.prod.public.atl-paas.net%2Finitials%2FIT-5.png", "16x16": "https://secure.gravatar.com/avatar/0a7841feac7218131ce7b427283c24ef?d=https%3A%2F%2Favatar-management--avatars.us-west-2.prod.public.atl-paas.net%2Finitials%2FIT-5.png", "32x32": "https://secure.gravatar.com/avatar/0a7841feac7218131ce7b427283c24ef?d=https%3A%2F%2Favatar-management--avatars.us-west-2.prod.public.atl-paas.net%2Finitials%2FIT-5.png"}, "displayName": "integration test", "active": true}, "jql": "project = P2 ORDER BY Rank ASC", "viewUrl": "https://airbyteio.atlassian.net/issues/?filter=10001", "searchUrl": "https://airbyteio.atlassian.net/rest/api/3/search?jql=project+%3D+P2+ORDER+BY+Rank+ASC", "favourite": false, "favouritedCount": 0, "sharePermissions": [{"id": 10063, "type": "group", "group": {"name": "Test group 0", "groupId": "ee8d15d1-6462-406a-b0a6-8065b7e4cdd7", "self": "https://airbyteio.atlassian.net/rest/api/3/group?groupId=ee8d15d1-6462-406a-b0a6-8065b7e4cdd7"}}, {"id": 10064, "type": "group", "group": {"name": "Test group 1", "groupId": "bda1faf1-1a1a-42d1-82e4-a428c8b8f67c", "self": "https://airbyteio.atlassian.net/rest/api/3/group?groupId=bda1faf1-1a1a-42d1-82e4-a428c8b8f67c"}}, {"id": 10062, "type": "project", "project": {"self": "https://airbyteio.atlassian.net/rest/api/3/project/10001", "id": "10001", "key": "P2", "assigneeType": "PROJECT_LEAD", "name": "project-2", "roles": {}, "avatarUrls": {"48x48": "https://airbyteio.atlassian.net/rest/api/3/universal_avatar/view/type/project/avatar/10411", "24x24": "https://airbyteio.atlassian.net/rest/api/3/universal_avatar/view/type/project/avatar/10411?size=small", "16x16": "https://airbyteio.atlassian.net/rest/api/3/universal_avatar/view/type/project/avatar/10411?size=xsmall", "32x32": "https://airbyteio.atlassian.net/rest/api/3/universal_avatar/view/type/project/avatar/10411?size=medium"}, "projectTypeKey": "software", "simplified": false, "style": "classic", "properties": {}}}], "isWritable": true, "subscriptions": []}, "emitted_at": 1697453218288} +{"stream": "filters", "data": {"expand": "description,owner,jql,viewUrl,searchUrl,favourite,favouritedCount,sharePermissions,editPermissions,isWritable,approximateLastUsed,subscriptions", "self": "https://airbyteio.atlassian.net/rest/api/3/filter/10003", "id": "10003", "name": "Filter for EX board", "owner": {"self": "https://airbyteio.atlassian.net/rest/api/3/user?accountId=5fc9e78d2730d800760becc4", "accountId": "5fc9e78d2730d800760becc4", "avatarUrls": {"48x48": "https://secure.gravatar.com/avatar/0a7841feac7218131ce7b427283c24ef?d=https%3A%2F%2Favatar-management--avatars.us-west-2.prod.public.atl-paas.net%2Finitials%2FIT-5.png", "24x24": "https://secure.gravatar.com/avatar/0a7841feac7218131ce7b427283c24ef?d=https%3A%2F%2Favatar-management--avatars.us-west-2.prod.public.atl-paas.net%2Finitials%2FIT-5.png", "16x16": "https://secure.gravatar.com/avatar/0a7841feac7218131ce7b427283c24ef?d=https%3A%2F%2Favatar-management--avatars.us-west-2.prod.public.atl-paas.net%2Finitials%2FIT-5.png", "32x32": "https://secure.gravatar.com/avatar/0a7841feac7218131ce7b427283c24ef?d=https%3A%2F%2Favatar-management--avatars.us-west-2.prod.public.atl-paas.net%2Finitials%2FIT-5.png"}, "displayName": "integration test", "active": true}, "jql": "project = EX ORDER BY Rank ASC", "viewUrl": "https://airbyteio.atlassian.net/issues/?filter=10003", "searchUrl": "https://airbyteio.atlassian.net/rest/api/3/search?jql=project%20%3D%20EX%20ORDER%20BY%20Rank%20ASC", "favourite": false, "favouritedCount": 0, "sharePermissions": [{"id": 10004, "type": "project", "project": {"self": "https://airbyteio.atlassian.net/rest/api/3/project/10003", "id": "10003", "key": "EX", "assigneeType": "PROJECT_LEAD", "name": "Example", "roles": {}, "avatarUrls": {"48x48": "https://airbyteio.atlassian.net/rest/api/3/universal_avatar/view/type/project/avatar/10406", "24x24": "https://airbyteio.atlassian.net/rest/api/3/universal_avatar/view/type/project/avatar/10406?size=small", "16x16": "https://airbyteio.atlassian.net/rest/api/3/universal_avatar/view/type/project/avatar/10406?size=xsmall", "32x32": "https://airbyteio.atlassian.net/rest/api/3/universal_avatar/view/type/project/avatar/10406?size=medium"}, "projectTypeKey": "software", "simplified": false, "style": "classic", "properties": {}}}], "isWritable": true, "approximateLastUsed": null, "subscriptions": []}, "emitted_at": 1709025224852} +{"stream": "filters", "data": {"expand": "description,owner,jql,viewUrl,searchUrl,favourite,favouritedCount,sharePermissions,editPermissions,isWritable,approximateLastUsed,subscriptions", "self": "https://airbyteio.atlassian.net/rest/api/3/filter/10000", "id": "10000", "name": "Filter for IT board", "owner": {"self": "https://airbyteio.atlassian.net/rest/api/3/user?accountId=5fc9e78d2730d800760becc4", "accountId": "5fc9e78d2730d800760becc4", "avatarUrls": {"48x48": "https://secure.gravatar.com/avatar/0a7841feac7218131ce7b427283c24ef?d=https%3A%2F%2Favatar-management--avatars.us-west-2.prod.public.atl-paas.net%2Finitials%2FIT-5.png", "24x24": "https://secure.gravatar.com/avatar/0a7841feac7218131ce7b427283c24ef?d=https%3A%2F%2Favatar-management--avatars.us-west-2.prod.public.atl-paas.net%2Finitials%2FIT-5.png", "16x16": "https://secure.gravatar.com/avatar/0a7841feac7218131ce7b427283c24ef?d=https%3A%2F%2Favatar-management--avatars.us-west-2.prod.public.atl-paas.net%2Finitials%2FIT-5.png", "32x32": "https://secure.gravatar.com/avatar/0a7841feac7218131ce7b427283c24ef?d=https%3A%2F%2Favatar-management--avatars.us-west-2.prod.public.atl-paas.net%2Finitials%2FIT-5.png"}, "displayName": "integration test", "active": true}, "jql": "project = IT ORDER BY Rank ASC", "viewUrl": "https://airbyteio.atlassian.net/issues/?filter=10000", "searchUrl": "https://airbyteio.atlassian.net/rest/api/3/search?jql=project%20%3D%20IT%20ORDER%20BY%20Rank%20ASC", "favourite": false, "favouritedCount": 0, "sharePermissions": [{"id": 10058, "type": "group", "group": {"name": "Test group 2", "groupId": "5ddb26f1-2d31-414a-ac34-b2d6de38805d", "self": "https://airbyteio.atlassian.net/rest/api/3/group?groupId=5ddb26f1-2d31-414a-ac34-b2d6de38805d"}}, {"id": 10059, "type": "group", "group": {"name": "Test group 0", "groupId": "ee8d15d1-6462-406a-b0a6-8065b7e4cdd7", "self": "https://airbyteio.atlassian.net/rest/api/3/group?groupId=ee8d15d1-6462-406a-b0a6-8065b7e4cdd7"}}, {"id": 10057, "type": "project", "project": {"self": "https://airbyteio.atlassian.net/rest/api/3/project/10000", "id": "10000", "key": "IT", "assigneeType": "PROJECT_LEAD", "name": "integration-tests", "roles": {}, "avatarUrls": {"48x48": "https://airbyteio.atlassian.net/rest/api/3/universal_avatar/view/type/project/avatar/10424", "24x24": "https://airbyteio.atlassian.net/rest/api/3/universal_avatar/view/type/project/avatar/10424?size=small", "16x16": "https://airbyteio.atlassian.net/rest/api/3/universal_avatar/view/type/project/avatar/10424?size=xsmall", "32x32": "https://airbyteio.atlassian.net/rest/api/3/universal_avatar/view/type/project/avatar/10424?size=medium"}, "projectCategory": {"self": "https://airbyteio.atlassian.net/rest/api/3/projectCategory/10004", "id": "10004", "name": "Test category 2", "description": "Test Project Category 2"}, "projectTypeKey": "software", "simplified": false, "style": "classic", "properties": {}}}], "isWritable": true, "approximateLastUsed": null, "subscriptions": []}, "emitted_at": 1709025224854} +{"stream": "filters", "data": {"expand": "description,owner,jql,viewUrl,searchUrl,favourite,favouritedCount,sharePermissions,editPermissions,isWritable,approximateLastUsed,subscriptions", "self": "https://airbyteio.atlassian.net/rest/api/3/filter/10001", "id": "10001", "name": "Filter for P2 board", "owner": {"self": "https://airbyteio.atlassian.net/rest/api/3/user?accountId=5fc9e78d2730d800760becc4", "accountId": "5fc9e78d2730d800760becc4", "avatarUrls": {"48x48": "https://secure.gravatar.com/avatar/0a7841feac7218131ce7b427283c24ef?d=https%3A%2F%2Favatar-management--avatars.us-west-2.prod.public.atl-paas.net%2Finitials%2FIT-5.png", "24x24": "https://secure.gravatar.com/avatar/0a7841feac7218131ce7b427283c24ef?d=https%3A%2F%2Favatar-management--avatars.us-west-2.prod.public.atl-paas.net%2Finitials%2FIT-5.png", "16x16": "https://secure.gravatar.com/avatar/0a7841feac7218131ce7b427283c24ef?d=https%3A%2F%2Favatar-management--avatars.us-west-2.prod.public.atl-paas.net%2Finitials%2FIT-5.png", "32x32": "https://secure.gravatar.com/avatar/0a7841feac7218131ce7b427283c24ef?d=https%3A%2F%2Favatar-management--avatars.us-west-2.prod.public.atl-paas.net%2Finitials%2FIT-5.png"}, "displayName": "integration test", "active": true}, "jql": "project = P2 ORDER BY Rank ASC", "viewUrl": "https://airbyteio.atlassian.net/issues/?filter=10001", "searchUrl": "https://airbyteio.atlassian.net/rest/api/3/search?jql=project%20%3D%20P2%20ORDER%20BY%20Rank%20ASC", "favourite": false, "favouritedCount": 0, "sharePermissions": [{"id": 10063, "type": "group", "group": {"name": "Test group 0", "groupId": "ee8d15d1-6462-406a-b0a6-8065b7e4cdd7", "self": "https://airbyteio.atlassian.net/rest/api/3/group?groupId=ee8d15d1-6462-406a-b0a6-8065b7e4cdd7"}}, {"id": 10064, "type": "group", "group": {"name": "Test group 1", "groupId": "bda1faf1-1a1a-42d1-82e4-a428c8b8f67c", "self": "https://airbyteio.atlassian.net/rest/api/3/group?groupId=bda1faf1-1a1a-42d1-82e4-a428c8b8f67c"}}, {"id": 10062, "type": "project", "project": {"self": "https://airbyteio.atlassian.net/rest/api/3/project/10001", "id": "10001", "key": "P2", "assigneeType": "PROJECT_LEAD", "name": "project-2", "roles": {}, "avatarUrls": {"48x48": "https://airbyteio.atlassian.net/rest/api/3/universal_avatar/view/type/project/avatar/10411", "24x24": "https://airbyteio.atlassian.net/rest/api/3/universal_avatar/view/type/project/avatar/10411?size=small", "16x16": "https://airbyteio.atlassian.net/rest/api/3/universal_avatar/view/type/project/avatar/10411?size=xsmall", "32x32": "https://airbyteio.atlassian.net/rest/api/3/universal_avatar/view/type/project/avatar/10411?size=medium"}, "projectTypeKey": "software", "simplified": false, "style": "classic", "properties": {}}}], "isWritable": true, "approximateLastUsed": null, "subscriptions": []}, "emitted_at": 1709025224856} {"stream": "filter_sharing", "data": {"id": 10004, "type": "project", "project": {"self": "https://airbyteio.atlassian.net/rest/api/3/project/10003", "id": "10003", "key": "EX", "assigneeType": "PROJECT_LEAD", "name": "Example", "roles": {}, "avatarUrls": {"48x48": "https://airbyteio.atlassian.net/rest/api/3/universal_avatar/view/type/project/avatar/10406", "24x24": "https://airbyteio.atlassian.net/rest/api/3/universal_avatar/view/type/project/avatar/10406?size=small", "16x16": "https://airbyteio.atlassian.net/rest/api/3/universal_avatar/view/type/project/avatar/10406?size=xsmall", "32x32": "https://airbyteio.atlassian.net/rest/api/3/universal_avatar/view/type/project/avatar/10406?size=medium"}, "projectTypeKey": "software", "simplified": false, "style": "classic", "properties": {}}, "filterId": "10003"}, "emitted_at": 1697453219638} {"stream": "filter_sharing", "data": {"id": 10058, "type": "group", "group": {"name": "Test group 2", "groupId": "5ddb26f1-2d31-414a-ac34-b2d6de38805d", "self": "https://airbyteio.atlassian.net/rest/api/3/group?groupId=5ddb26f1-2d31-414a-ac34-b2d6de38805d"}, "filterId": "10000"}, "emitted_at": 1697453219940} {"stream": "filter_sharing", "data": {"id": 10059, "type": "group", "group": {"name": "Test group 0", "groupId": "ee8d15d1-6462-406a-b0a6-8065b7e4cdd7", "self": "https://airbyteio.atlassian.net/rest/api/3/group?groupId=ee8d15d1-6462-406a-b0a6-8065b7e4cdd7"}, "filterId": "10000"}, "emitted_at": 1697453219940} {"stream": "groups", "data": {"name": "Test group 17", "groupId": "022bc924-ac57-442d-80c9-df042b73ad87"}, "emitted_at": 1697453247031} {"stream": "groups", "data": {"name": "administrators", "groupId": "0ca6e087-7a61-4986-a269-98fe268854a1"}, "emitted_at": 1697453247032} {"stream": "groups", "data": {"name": "jira-servicemanagement-customers-airbyteio", "groupId": "125680d3-7e85-41ad-a662-892b6590272e"}, "emitted_at": 1697453247033} -{"stream": "issues", "data": {"expand": "customfield_10030.properties,operations,versionedRepresentations,editmeta,changelog,customfield_10029.properties,customfield_10010.requestTypePractice,transitions,renderedFields,customfield_10229.properties", "id": "10080", "self": "https://airbyteio.atlassian.net/rest/api/3/issue/10080", "key": "IT-24", "renderedFields": {"statuscategorychangedate": "11/Mar/21 6:17 AM", "timespent": "5 hours, 48 minutes", "aggregatetimespent": "5 hours, 48 minutes", "created": "11/Mar/21 6:17 AM", "customfield_10017": "", "timeestimate": "0 minutes", "updated": "05/Apr/23 4:58 AM", "description": "

Test description 74

", "timetracking": {"remainingEstimate": "0 minutes", "timeSpent": "5 hours, 48 minutes", "remainingEstimateSeconds": 0, "timeSpentSeconds": 20880}, "attachment": [{"self": "https://airbyteio.atlassian.net/rest/api/3/attachment/10123", "id": "10123", "filename": "demo.xlsx", "author": {"self": "https://airbyteio.atlassian.net/rest/api/3/user?accountId=5fc9e78d2730d800760becc4", "accountId": "5fc9e78d2730d800760becc4", "emailAddress": "integration-test@airbyte.io", "avatarUrls": {"48x48": "https://secure.gravatar.com/avatar/0a7841feac7218131ce7b427283c24ef?d=https%3A%2F%2Favatar-management--avatars.us-west-2.prod.public.atl-paas.net%2Finitials%2FIT-5.png", "24x24": "https://secure.gravatar.com/avatar/0a7841feac7218131ce7b427283c24ef?d=https%3A%2F%2Favatar-management--avatars.us-west-2.prod.public.atl-paas.net%2Finitials%2FIT-5.png", "16x16": "https://secure.gravatar.com/avatar/0a7841feac7218131ce7b427283c24ef?d=https%3A%2F%2Favatar-management--avatars.us-west-2.prod.public.atl-paas.net%2Finitials%2FIT-5.png", "32x32": "https://secure.gravatar.com/avatar/0a7841feac7218131ce7b427283c24ef?d=https%3A%2F%2Favatar-management--avatars.us-west-2.prod.public.atl-paas.net%2Finitials%2FIT-5.png"}, "displayName": "integration test", "active": true, "timeZone": "America/Los_Angeles", "accountType": "atlassian"}, "created": "14/Apr/21 2:11 PM", "size": "7 kB", "content": "https://airbyteio.atlassian.net/rest/api/3/attachment/content/10123"}], "aggregatetimeestimate": "0 minutes", "environment": "", "comment": {"comments": [], "self": "https://airbyteio.atlassian.net/rest/api/3/issue/10080/comment", "maxResults": 0, "total": 0, "startAt": 0}, "worklog": {"startAt": 0, "maxResults": 20, "total": 3, "worklogs": [{"self": "https://airbyteio.atlassian.net/rest/api/3/issue/10080/worklog/11708", "author": {"self": "https://airbyteio.atlassian.net/rest/api/3/user?accountId=5fc9e78d2730d800760becc4", "accountId": "5fc9e78d2730d800760becc4", "emailAddress": "integration-test@airbyte.io", "avatarUrls": {"48x48": "https://secure.gravatar.com/avatar/0a7841feac7218131ce7b427283c24ef?d=https%3A%2F%2Favatar-management--avatars.us-west-2.prod.public.atl-paas.net%2Finitials%2FIT-5.png", "24x24": "https://secure.gravatar.com/avatar/0a7841feac7218131ce7b427283c24ef?d=https%3A%2F%2Favatar-management--avatars.us-west-2.prod.public.atl-paas.net%2Finitials%2FIT-5.png", "16x16": "https://secure.gravatar.com/avatar/0a7841feac7218131ce7b427283c24ef?d=https%3A%2F%2Favatar-management--avatars.us-west-2.prod.public.atl-paas.net%2Finitials%2FIT-5.png", "32x32": "https://secure.gravatar.com/avatar/0a7841feac7218131ce7b427283c24ef?d=https%3A%2F%2Favatar-management--avatars.us-west-2.prod.public.atl-paas.net%2Finitials%2FIT-5.png"}, "displayName": "integration test", "active": true, "timeZone": "America/Los_Angeles", "accountType": "atlassian"}, "updateAuthor": {"self": "https://airbyteio.atlassian.net/rest/api/3/user?accountId=5fc9e78d2730d800760becc4", "accountId": "5fc9e78d2730d800760becc4", "emailAddress": "integration-test@airbyte.io", "avatarUrls": {"48x48": "https://secure.gravatar.com/avatar/0a7841feac7218131ce7b427283c24ef?d=https%3A%2F%2Favatar-management--avatars.us-west-2.prod.public.atl-paas.net%2Finitials%2FIT-5.png", "24x24": "https://secure.gravatar.com/avatar/0a7841feac7218131ce7b427283c24ef?d=https%3A%2F%2Favatar-management--avatars.us-west-2.prod.public.atl-paas.net%2Finitials%2FIT-5.png", "16x16": "https://secure.gravatar.com/avatar/0a7841feac7218131ce7b427283c24ef?d=https%3A%2F%2Favatar-management--avatars.us-west-2.prod.public.atl-paas.net%2Finitials%2FIT-5.png", "32x32": "https://secure.gravatar.com/avatar/0a7841feac7218131ce7b427283c24ef?d=https%3A%2F%2Favatar-management--avatars.us-west-2.prod.public.atl-paas.net%2Finitials%2FIT-5.png"}, "displayName": "integration test", "active": true, "timeZone": "America/Los_Angeles", "accountType": "atlassian"}, "created": "15/Apr/21 11:39 AM", "updated": "15/Apr/21 11:39 AM", "started": "14/Apr/21 6:48 PM", "timeSpent": "2 hours, 21 minutes", "id": "11708", "issueId": "10080"}, {"self": "https://airbyteio.atlassian.net/rest/api/3/issue/10080/worklog/11709", "author": {"self": "https://airbyteio.atlassian.net/rest/api/3/user?accountId=5fc9e78d2730d800760becc4", "accountId": "5fc9e78d2730d800760becc4", "emailAddress": "integration-test@airbyte.io", "avatarUrls": {"48x48": "https://secure.gravatar.com/avatar/0a7841feac7218131ce7b427283c24ef?d=https%3A%2F%2Favatar-management--avatars.us-west-2.prod.public.atl-paas.net%2Finitials%2FIT-5.png", "24x24": "https://secure.gravatar.com/avatar/0a7841feac7218131ce7b427283c24ef?d=https%3A%2F%2Favatar-management--avatars.us-west-2.prod.public.atl-paas.net%2Finitials%2FIT-5.png", "16x16": "https://secure.gravatar.com/avatar/0a7841feac7218131ce7b427283c24ef?d=https%3A%2F%2Favatar-management--avatars.us-west-2.prod.public.atl-paas.net%2Finitials%2FIT-5.png", "32x32": "https://secure.gravatar.com/avatar/0a7841feac7218131ce7b427283c24ef?d=https%3A%2F%2Favatar-management--avatars.us-west-2.prod.public.atl-paas.net%2Finitials%2FIT-5.png"}, "displayName": "integration test", "active": true, "timeZone": "America/Los_Angeles", "accountType": "atlassian"}, "updateAuthor": {"self": "https://airbyteio.atlassian.net/rest/api/3/user?accountId=5fc9e78d2730d800760becc4", "accountId": "5fc9e78d2730d800760becc4", "emailAddress": "integration-test@airbyte.io", "avatarUrls": {"48x48": "https://secure.gravatar.com/avatar/0a7841feac7218131ce7b427283c24ef?d=https%3A%2F%2Favatar-management--avatars.us-west-2.prod.public.atl-paas.net%2Finitials%2FIT-5.png", "24x24": "https://secure.gravatar.com/avatar/0a7841feac7218131ce7b427283c24ef?d=https%3A%2F%2Favatar-management--avatars.us-west-2.prod.public.atl-paas.net%2Finitials%2FIT-5.png", "16x16": "https://secure.gravatar.com/avatar/0a7841feac7218131ce7b427283c24ef?d=https%3A%2F%2Favatar-management--avatars.us-west-2.prod.public.atl-paas.net%2Finitials%2FIT-5.png", "32x32": "https://secure.gravatar.com/avatar/0a7841feac7218131ce7b427283c24ef?d=https%3A%2F%2Favatar-management--avatars.us-west-2.prod.public.atl-paas.net%2Finitials%2FIT-5.png"}, "displayName": "integration test", "active": true, "timeZone": "America/Los_Angeles", "accountType": "atlassian"}, "created": "15/Apr/21 11:39 AM", "updated": "15/Apr/21 11:39 AM", "started": "14/Apr/21 6:48 PM", "timeSpent": "37 minutes", "id": "11709", "issueId": "10080"}, {"self": "https://airbyteio.atlassian.net/rest/api/3/issue/10080/worklog/11710", "author": {"self": "https://airbyteio.atlassian.net/rest/api/3/user?accountId=5fc9e78d2730d800760becc4", "accountId": "5fc9e78d2730d800760becc4", "emailAddress": "integration-test@airbyte.io", "avatarUrls": {"48x48": "https://secure.gravatar.com/avatar/0a7841feac7218131ce7b427283c24ef?d=https%3A%2F%2Favatar-management--avatars.us-west-2.prod.public.atl-paas.net%2Finitials%2FIT-5.png", "24x24": "https://secure.gravatar.com/avatar/0a7841feac7218131ce7b427283c24ef?d=https%3A%2F%2Favatar-management--avatars.us-west-2.prod.public.atl-paas.net%2Finitials%2FIT-5.png", "16x16": "https://secure.gravatar.com/avatar/0a7841feac7218131ce7b427283c24ef?d=https%3A%2F%2Favatar-management--avatars.us-west-2.prod.public.atl-paas.net%2Finitials%2FIT-5.png", "32x32": "https://secure.gravatar.com/avatar/0a7841feac7218131ce7b427283c24ef?d=https%3A%2F%2Favatar-management--avatars.us-west-2.prod.public.atl-paas.net%2Finitials%2FIT-5.png"}, "displayName": "integration test", "active": true, "timeZone": "America/Los_Angeles", "accountType": "atlassian"}, "updateAuthor": {"self": "https://airbyteio.atlassian.net/rest/api/3/user?accountId=5fc9e78d2730d800760becc4", "accountId": "5fc9e78d2730d800760becc4", "emailAddress": "integration-test@airbyte.io", "avatarUrls": {"48x48": "https://secure.gravatar.com/avatar/0a7841feac7218131ce7b427283c24ef?d=https%3A%2F%2Favatar-management--avatars.us-west-2.prod.public.atl-paas.net%2Finitials%2FIT-5.png", "24x24": "https://secure.gravatar.com/avatar/0a7841feac7218131ce7b427283c24ef?d=https%3A%2F%2Favatar-management--avatars.us-west-2.prod.public.atl-paas.net%2Finitials%2FIT-5.png", "16x16": "https://secure.gravatar.com/avatar/0a7841feac7218131ce7b427283c24ef?d=https%3A%2F%2Favatar-management--avatars.us-west-2.prod.public.atl-paas.net%2Finitials%2FIT-5.png", "32x32": "https://secure.gravatar.com/avatar/0a7841feac7218131ce7b427283c24ef?d=https%3A%2F%2Favatar-management--avatars.us-west-2.prod.public.atl-paas.net%2Finitials%2FIT-5.png"}, "displayName": "integration test", "active": true, "timeZone": "America/Los_Angeles", "accountType": "atlassian"}, "created": "15/Apr/21 11:39 AM", "updated": "15/Apr/21 11:39 AM", "started": "14/Apr/21 6:48 PM", "timeSpent": "2 hours, 50 minutes", "id": "11710", "issueId": "10080"}]}}, "transitions": [{"id": "11", "name": "To Do", "to": {"self": "https://airbyteio.atlassian.net/rest/api/3/status/10000", "description": "", "iconUrl": "https://airbyteio.atlassian.net/", "name": "To Do", "id": "10000", "statusCategory": {"self": "https://airbyteio.atlassian.net/rest/api/3/statuscategory/2", "id": 2, "key": "new", "colorName": "blue-gray", "name": "To Do"}}, "hasScreen": false, "isGlobal": true, "isInitial": false, "isAvailable": true, "isConditional": false, "isLooped": false}, {"id": "21", "name": "In Progress", "to": {"self": "https://airbyteio.atlassian.net/rest/api/3/status/3", "description": "This issue is being actively worked on at the moment by the assignee.", "iconUrl": "https://airbyteio.atlassian.net/images/icons/statuses/inprogress.png", "name": "In Progress", "id": "3", "statusCategory": {"self": "https://airbyteio.atlassian.net/rest/api/3/statuscategory/4", "id": 4, "key": "indeterminate", "colorName": "yellow", "name": "In Progress"}}, "hasScreen": false, "isGlobal": true, "isInitial": false, "isAvailable": true, "isConditional": false, "isLooped": false}, {"id": "31", "name": "Done", "to": {"self": "https://airbyteio.atlassian.net/rest/api/3/status/10001", "description": "", "iconUrl": "https://airbyteio.atlassian.net/", "name": "Done", "id": "10001", "statusCategory": {"self": "https://airbyteio.atlassian.net/rest/api/3/statuscategory/3", "id": 3, "key": "done", "colorName": "green", "name": "Done"}}, "hasScreen": false, "isGlobal": true, "isInitial": false, "isAvailable": true, "isConditional": false, "isLooped": false}, {"id": "41", "name": "Approved", "to": {"self": "https://airbyteio.atlassian.net/rest/api/3/status/10005", "description": "This was auto-generated by Jira Service Management during workflow import", "iconUrl": "https://airbyteio.atlassian.net/images/icons/status_generic.gif", "name": "Approved", "id": "10005", "statusCategory": {"self": "https://airbyteio.atlassian.net/rest/api/3/statuscategory/3", "id": 3, "key": "done", "colorName": "green", "name": "Done"}}, "hasScreen": false, "isGlobal": true, "isInitial": false, "isAvailable": true, "isConditional": false, "isLooped": false}, {"id": "51", "name": "In review", "to": {"self": "https://airbyteio.atlassian.net/rest/api/3/status/10004", "description": "This was auto-generated by Jira Service Management during workflow import", "iconUrl": "https://airbyteio.atlassian.net/images/icons/status_generic.gif", "name": "In review", "id": "10004", "statusCategory": {"self": "https://airbyteio.atlassian.net/rest/api/3/statuscategory/4", "id": 4, "key": "indeterminate", "colorName": "yellow", "name": "In Progress"}}, "hasScreen": false, "isGlobal": true, "isInitial": false, "isAvailable": true, "isConditional": false, "isLooped": false}, {"id": "61", "name": "Reopened", "to": {"self": "https://airbyteio.atlassian.net/rest/api/3/status/4", "description": "This issue was once resolved, but the resolution was deemed incorrect. From here issues are either marked assigned or resolved.", "iconUrl": "https://airbyteio.atlassian.net/images/icons/statuses/reopened.png", "name": "Reopened", "id": "4", "statusCategory": {"self": "https://airbyteio.atlassian.net/rest/api/3/statuscategory/2", "id": 2, "key": "new", "colorName": "blue-gray", "name": "To Do"}}, "hasScreen": false, "isGlobal": true, "isInitial": false, "isAvailable": true, "isConditional": false, "isLooped": false}, {"id": "71", "name": "Declined", "to": {"self": "https://airbyteio.atlassian.net/rest/api/3/status/10002", "description": "This was auto-generated by Jira Service Management during workflow import", "iconUrl": "https://airbyteio.atlassian.net/images/icons/statuses/generic.png", "name": "Declined", "id": "10002", "statusCategory": {"self": "https://airbyteio.atlassian.net/rest/api/3/statuscategory/3", "id": 3, "key": "done", "colorName": "green", "name": "Done"}}, "hasScreen": false, "isGlobal": true, "isInitial": false, "isAvailable": true, "isConditional": false, "isLooped": false}, {"id": "81", "name": "Open", "to": {"self": "https://airbyteio.atlassian.net/rest/api/3/status/1", "description": "The issue is open and ready for the assignee to start work on it.", "iconUrl": "https://airbyteio.atlassian.net/images/icons/statuses/open.png", "name": "Open", "id": "1", "statusCategory": {"self": "https://airbyteio.atlassian.net/rest/api/3/statuscategory/2", "id": 2, "key": "new", "colorName": "blue-gray", "name": "To Do"}}, "hasScreen": false, "isGlobal": true, "isInitial": false, "isAvailable": true, "isConditional": false, "isLooped": false}, {"id": "91", "name": "Pending", "to": {"self": "https://airbyteio.atlassian.net/rest/api/3/status/10003", "description": "This was auto-generated by Jira Service Management during workflow import", "iconUrl": "https://airbyteio.atlassian.net/images/icons/status_generic.gif", "name": "Pending", "id": "10003", "statusCategory": {"self": "https://airbyteio.atlassian.net/rest/api/3/statuscategory/4", "id": 4, "key": "indeterminate", "colorName": "yellow", "name": "In Progress"}}, "hasScreen": false, "isGlobal": true, "isInitial": false, "isAvailable": true, "isConditional": false, "isLooped": false}, {"id": "101", "name": "Closed", "to": {"self": "https://airbyteio.atlassian.net/rest/api/3/status/6", "description": "The issue is considered finished, the resolution is correct. Issues which are closed can be reopened.", "iconUrl": "https://airbyteio.atlassian.net/images/icons/statuses/closed.png", "name": "Closed", "id": "6", "statusCategory": {"self": "https://airbyteio.atlassian.net/rest/api/3/statuscategory/3", "id": 3, "key": "done", "colorName": "green", "name": "Done"}}, "hasScreen": false, "isGlobal": true, "isInitial": false, "isAvailable": true, "isConditional": false, "isLooped": false}], "changelog": {"startAt": 0, "maxResults": 8, "total": 8, "histories": [{"id": "15179", "author": {"self": "https://airbyteio.atlassian.net/rest/api/3/user?accountId=5fc9e78d2730d800760becc4", "accountId": "5fc9e78d2730d800760becc4", "emailAddress": "integration-test@airbyte.io", "avatarUrls": {"48x48": "https://secure.gravatar.com/avatar/0a7841feac7218131ce7b427283c24ef?d=https%3A%2F%2Favatar-management--avatars.us-west-2.prod.public.atl-paas.net%2Finitials%2FIT-5.png", "24x24": "https://secure.gravatar.com/avatar/0a7841feac7218131ce7b427283c24ef?d=https%3A%2F%2Favatar-management--avatars.us-west-2.prod.public.atl-paas.net%2Finitials%2FIT-5.png", "16x16": "https://secure.gravatar.com/avatar/0a7841feac7218131ce7b427283c24ef?d=https%3A%2F%2Favatar-management--avatars.us-west-2.prod.public.atl-paas.net%2Finitials%2FIT-5.png", "32x32": "https://secure.gravatar.com/avatar/0a7841feac7218131ce7b427283c24ef?d=https%3A%2F%2Favatar-management--avatars.us-west-2.prod.public.atl-paas.net%2Finitials%2FIT-5.png"}, "displayName": "integration test", "active": true, "timeZone": "America/Los_Angeles", "accountType": "atlassian"}, "created": "2023-04-05T04:58:35.333-0700", "items": [{"field": "Sprint", "fieldtype": "custom", "fieldId": "customfield_10020", "from": "", "fromString": "", "to": "10", "toString": "IT Sprint 9"}]}, {"id": "14989", "author": {"self": "https://airbyteio.atlassian.net/rest/api/3/user?accountId=5fc9e78d2730d800760becc4", "accountId": "5fc9e78d2730d800760becc4", "emailAddress": "integration-test@airbyte.io", "avatarUrls": {"48x48": "https://secure.gravatar.com/avatar/0a7841feac7218131ce7b427283c24ef?d=https%3A%2F%2Favatar-management--avatars.us-west-2.prod.public.atl-paas.net%2Finitials%2FIT-5.png", "24x24": "https://secure.gravatar.com/avatar/0a7841feac7218131ce7b427283c24ef?d=https%3A%2F%2Favatar-management--avatars.us-west-2.prod.public.atl-paas.net%2Finitials%2FIT-5.png", "16x16": "https://secure.gravatar.com/avatar/0a7841feac7218131ce7b427283c24ef?d=https%3A%2F%2Favatar-management--avatars.us-west-2.prod.public.atl-paas.net%2Finitials%2FIT-5.png", "32x32": "https://secure.gravatar.com/avatar/0a7841feac7218131ce7b427283c24ef?d=https%3A%2F%2Favatar-management--avatars.us-west-2.prod.public.atl-paas.net%2Finitials%2FIT-5.png"}, "displayName": "integration test", "active": true, "timeZone": "America/Los_Angeles", "accountType": "atlassian"}, "created": "2021-04-15T11:39:47.917-0700", "items": [{"field": "timeestimate", "fieldtype": "jira", "fieldId": "timeestimate", "from": "0", "fromString": "0", "to": "0", "toString": "0"}, {"field": "timespent", "fieldtype": "jira", "fieldId": "timespent", "from": "10680", "fromString": "10680", "to": "20880", "toString": "20880"}, {"field": "WorklogId", "fieldtype": "jira", "from": null, "fromString": null, "to": "11710", "toString": "11710"}]}, {"id": "14988", "author": {"self": "https://airbyteio.atlassian.net/rest/api/3/user?accountId=5fc9e78d2730d800760becc4", "accountId": "5fc9e78d2730d800760becc4", "emailAddress": "integration-test@airbyte.io", "avatarUrls": {"48x48": "https://secure.gravatar.com/avatar/0a7841feac7218131ce7b427283c24ef?d=https%3A%2F%2Favatar-management--avatars.us-west-2.prod.public.atl-paas.net%2Finitials%2FIT-5.png", "24x24": "https://secure.gravatar.com/avatar/0a7841feac7218131ce7b427283c24ef?d=https%3A%2F%2Favatar-management--avatars.us-west-2.prod.public.atl-paas.net%2Finitials%2FIT-5.png", "16x16": "https://secure.gravatar.com/avatar/0a7841feac7218131ce7b427283c24ef?d=https%3A%2F%2Favatar-management--avatars.us-west-2.prod.public.atl-paas.net%2Finitials%2FIT-5.png", "32x32": "https://secure.gravatar.com/avatar/0a7841feac7218131ce7b427283c24ef?d=https%3A%2F%2Favatar-management--avatars.us-west-2.prod.public.atl-paas.net%2Finitials%2FIT-5.png"}, "displayName": "integration test", "active": true, "timeZone": "America/Los_Angeles", "accountType": "atlassian"}, "created": "2021-04-15T11:39:47.314-0700", "items": [{"field": "timeestimate", "fieldtype": "jira", "fieldId": "timeestimate", "from": "0", "fromString": "0", "to": "0", "toString": "0"}, {"field": "timespent", "fieldtype": "jira", "fieldId": "timespent", "from": "8460", "fromString": "8460", "to": "10680", "toString": "10680"}, {"field": "WorklogId", "fieldtype": "jira", "from": null, "fromString": null, "to": "11709", "toString": "11709"}]}, {"id": "14987", "author": {"self": "https://airbyteio.atlassian.net/rest/api/3/user?accountId=5fc9e78d2730d800760becc4", "accountId": "5fc9e78d2730d800760becc4", "emailAddress": "integration-test@airbyte.io", "avatarUrls": {"48x48": "https://secure.gravatar.com/avatar/0a7841feac7218131ce7b427283c24ef?d=https%3A%2F%2Favatar-management--avatars.us-west-2.prod.public.atl-paas.net%2Finitials%2FIT-5.png", "24x24": "https://secure.gravatar.com/avatar/0a7841feac7218131ce7b427283c24ef?d=https%3A%2F%2Favatar-management--avatars.us-west-2.prod.public.atl-paas.net%2Finitials%2FIT-5.png", "16x16": "https://secure.gravatar.com/avatar/0a7841feac7218131ce7b427283c24ef?d=https%3A%2F%2Favatar-management--avatars.us-west-2.prod.public.atl-paas.net%2Finitials%2FIT-5.png", "32x32": "https://secure.gravatar.com/avatar/0a7841feac7218131ce7b427283c24ef?d=https%3A%2F%2Favatar-management--avatars.us-west-2.prod.public.atl-paas.net%2Finitials%2FIT-5.png"}, "displayName": "integration test", "active": true, "timeZone": "America/Los_Angeles", "accountType": "atlassian"}, "created": "2021-04-15T11:39:46.691-0700", "items": [{"field": "timeestimate", "fieldtype": "jira", "fieldId": "timeestimate", "from": null, "fromString": null, "to": "0", "toString": "0"}, {"field": "timespent", "fieldtype": "jira", "fieldId": "timespent", "from": null, "fromString": null, "to": "8460", "toString": "8460"}, {"field": "WorklogId", "fieldtype": "jira", "from": null, "fromString": null, "to": "11708", "toString": "11708"}]}, {"id": "14800", "author": {"self": "https://airbyteio.atlassian.net/rest/api/3/user?accountId=5fc9e78d2730d800760becc4", "accountId": "5fc9e78d2730d800760becc4", "emailAddress": "integration-test@airbyte.io", "avatarUrls": {"48x48": "https://secure.gravatar.com/avatar/0a7841feac7218131ce7b427283c24ef?d=https%3A%2F%2Favatar-management--avatars.us-west-2.prod.public.atl-paas.net%2Finitials%2FIT-5.png", "24x24": "https://secure.gravatar.com/avatar/0a7841feac7218131ce7b427283c24ef?d=https%3A%2F%2Favatar-management--avatars.us-west-2.prod.public.atl-paas.net%2Finitials%2FIT-5.png", "16x16": "https://secure.gravatar.com/avatar/0a7841feac7218131ce7b427283c24ef?d=https%3A%2F%2Favatar-management--avatars.us-west-2.prod.public.atl-paas.net%2Finitials%2FIT-5.png", "32x32": "https://secure.gravatar.com/avatar/0a7841feac7218131ce7b427283c24ef?d=https%3A%2F%2Favatar-management--avatars.us-west-2.prod.public.atl-paas.net%2Finitials%2FIT-5.png"}, "displayName": "integration test", "active": true, "timeZone": "America/Los_Angeles", "accountType": "atlassian"}, "created": "2021-04-15T07:18:07.884-0700", "items": [{"field": "RemoteIssueLink", "fieldtype": "jira", "from": null, "fromString": null, "to": "10046", "toString": "This issue links to \"TSTSUP-111 (My Acme Tracker)\""}]}, {"id": "14718", "author": {"self": "https://airbyteio.atlassian.net/rest/api/3/user?accountId=5fc9e78d2730d800760becc4", "accountId": "5fc9e78d2730d800760becc4", "emailAddress": "integration-test@airbyte.io", "avatarUrls": {"48x48": "https://secure.gravatar.com/avatar/0a7841feac7218131ce7b427283c24ef?d=https%3A%2F%2Favatar-management--avatars.us-west-2.prod.public.atl-paas.net%2Finitials%2FIT-5.png", "24x24": "https://secure.gravatar.com/avatar/0a7841feac7218131ce7b427283c24ef?d=https%3A%2F%2Favatar-management--avatars.us-west-2.prod.public.atl-paas.net%2Finitials%2FIT-5.png", "16x16": "https://secure.gravatar.com/avatar/0a7841feac7218131ce7b427283c24ef?d=https%3A%2F%2Favatar-management--avatars.us-west-2.prod.public.atl-paas.net%2Finitials%2FIT-5.png", "32x32": "https://secure.gravatar.com/avatar/0a7841feac7218131ce7b427283c24ef?d=https%3A%2F%2Favatar-management--avatars.us-west-2.prod.public.atl-paas.net%2Finitials%2FIT-5.png"}, "displayName": "integration test", "active": true, "timeZone": "America/Los_Angeles", "accountType": "atlassian"}, "created": "2021-04-15T00:08:54.455-0700", "items": [{"field": "Link", "fieldtype": "jira", "from": null, "fromString": null, "to": "IT-22", "toString": "This issue is duplicated by IT-22"}]}, {"id": "14716", "author": {"self": "https://airbyteio.atlassian.net/rest/api/3/user?accountId=5fc9e78d2730d800760becc4", "accountId": "5fc9e78d2730d800760becc4", "emailAddress": "integration-test@airbyte.io", "avatarUrls": {"48x48": "https://secure.gravatar.com/avatar/0a7841feac7218131ce7b427283c24ef?d=https%3A%2F%2Favatar-management--avatars.us-west-2.prod.public.atl-paas.net%2Finitials%2FIT-5.png", "24x24": "https://secure.gravatar.com/avatar/0a7841feac7218131ce7b427283c24ef?d=https%3A%2F%2Favatar-management--avatars.us-west-2.prod.public.atl-paas.net%2Finitials%2FIT-5.png", "16x16": "https://secure.gravatar.com/avatar/0a7841feac7218131ce7b427283c24ef?d=https%3A%2F%2Favatar-management--avatars.us-west-2.prod.public.atl-paas.net%2Finitials%2FIT-5.png", "32x32": "https://secure.gravatar.com/avatar/0a7841feac7218131ce7b427283c24ef?d=https%3A%2F%2Favatar-management--avatars.us-west-2.prod.public.atl-paas.net%2Finitials%2FIT-5.png"}, "displayName": "integration test", "active": true, "timeZone": "America/Los_Angeles", "accountType": "atlassian"}, "created": "2021-04-15T00:08:48.880-0700", "items": [{"field": "Link", "fieldtype": "jira", "from": null, "fromString": null, "to": "IT-23", "toString": "This issue is duplicated by IT-23"}]}, {"id": "14596", "author": {"self": "https://airbyteio.atlassian.net/rest/api/3/user?accountId=5fc9e78d2730d800760becc4", "accountId": "5fc9e78d2730d800760becc4", "emailAddress": "integration-test@airbyte.io", "avatarUrls": {"48x48": "https://secure.gravatar.com/avatar/0a7841feac7218131ce7b427283c24ef?d=https%3A%2F%2Favatar-management--avatars.us-west-2.prod.public.atl-paas.net%2Finitials%2FIT-5.png", "24x24": "https://secure.gravatar.com/avatar/0a7841feac7218131ce7b427283c24ef?d=https%3A%2F%2Favatar-management--avatars.us-west-2.prod.public.atl-paas.net%2Finitials%2FIT-5.png", "16x16": "https://secure.gravatar.com/avatar/0a7841feac7218131ce7b427283c24ef?d=https%3A%2F%2Favatar-management--avatars.us-west-2.prod.public.atl-paas.net%2Finitials%2FIT-5.png", "32x32": "https://secure.gravatar.com/avatar/0a7841feac7218131ce7b427283c24ef?d=https%3A%2F%2Favatar-management--avatars.us-west-2.prod.public.atl-paas.net%2Finitials%2FIT-5.png"}, "displayName": "integration test", "active": true, "timeZone": "America/Los_Angeles", "accountType": "atlassian"}, "created": "2021-04-14T14:11:01.899-0700", "items": [{"field": "Attachment", "fieldtype": "jira", "fieldId": "attachment", "from": null, "fromString": null, "to": "10123", "toString": "demo.xlsx"}]}]}, "fields": {"statuscategorychangedate": "2021-03-11T06:17:33.483-0800", "issuetype": {"self": "https://airbyteio.atlassian.net/rest/api/3/issuetype/10004", "id": "10004", "description": "A problem or error.", "iconUrl": "https://airbyteio.atlassian.net/rest/api/2/universal_avatar/view/type/issuetype/avatar/10303?size=medium", "name": "Bug", "subtask": false, "avatarId": 10303, "hierarchyLevel": 0}, "timespent": 20880, "project": {"self": "https://airbyteio.atlassian.net/rest/api/3/project/10000", "id": "10000", "key": "IT", "name": "integration-tests", "projectTypeKey": "software", "simplified": false, "avatarUrls": {"48x48": "https://airbyteio.atlassian.net/rest/api/3/universal_avatar/view/type/project/avatar/10424", "24x24": "https://airbyteio.atlassian.net/rest/api/3/universal_avatar/view/type/project/avatar/10424?size=small", "16x16": "https://airbyteio.atlassian.net/rest/api/3/universal_avatar/view/type/project/avatar/10424?size=xsmall", "32x32": "https://airbyteio.atlassian.net/rest/api/3/universal_avatar/view/type/project/avatar/10424?size=medium"}, "projectCategory": {"self": "https://airbyteio.atlassian.net/rest/api/3/projectCategory/10004", "id": "10004", "description": "Test Project Category 2", "name": "Test category 2"}}, "fixVersions": [], "aggregatetimespent": 20880, "workratio": -1, "watches": {"self": "https://airbyteio.atlassian.net/rest/api/3/issue/IT-24/watchers", "watchCount": 1, "isWatching": true}, "issuerestriction": {"issuerestrictions": {}, "shouldDisplay": false}, "created": "2021-03-11T06:17:33.169000-08:00", "customfield_10020": [{"id": 10, "name": "IT Sprint 9", "state": "future", "boardId": 1, "startDate": "2022-09-06T11:25:59.072Z", "endDate": "2022-09-20T11:25:00.000Z"}], "priority": {"self": "https://airbyteio.atlassian.net/rest/api/3/priority/3", "iconUrl": "https://airbyteio.atlassian.net/images/icons/priorities/medium.svg", "name": "Medium", "id": "3"}, "labels": [], "customfield_10018": {"hasEpicLinkFieldDependency": false, "showField": false, "nonEditableReason": {"reason": "PLUGIN_LICENSE_ERROR", "message": "The Parent Link is only available to Jira Premium users."}}, "customfield_10019": "0|i000hr:", "customfield_10217": [], "timeestimate": 0, "versions": [], "issuelinks": [{"id": "10244", "self": "https://airbyteio.atlassian.net/rest/api/3/issueLink/10244", "type": {"id": "10002", "name": "Duplicate", "inward": "is duplicated by", "outward": "duplicates", "self": "https://airbyteio.atlassian.net/rest/api/3/issueLinkType/10002"}, "inwardIssue": {"id": "10069", "key": "IT-22", "self": "https://airbyteio.atlassian.net/rest/api/3/issue/10069", "fields": {"summary": "Test 63", "status": {"self": "https://airbyteio.atlassian.net/rest/api/3/status/10000", "description": "", "iconUrl": "https://airbyteio.atlassian.net/", "name": "To Do", "id": "10000", "statusCategory": {"self": "https://airbyteio.atlassian.net/rest/api/3/statuscategory/2", "id": 2, "key": "new", "colorName": "blue-gray", "name": "To Do"}}, "priority": {"self": "https://airbyteio.atlassian.net/rest/api/3/priority/3", "iconUrl": "https://airbyteio.atlassian.net/images/icons/priorities/medium.svg", "name": "Medium", "id": "3"}, "issuetype": {"self": "https://airbyteio.atlassian.net/rest/api/3/issuetype/10000", "id": "10000", "description": "A big user story that needs to be broken down. Created by Jira Software - do not edit or delete.", "iconUrl": "https://airbyteio.atlassian.net/images/icons/issuetypes/epic.svg", "name": "Epic", "subtask": false, "hierarchyLevel": 1}}}}, {"id": "10243", "self": "https://airbyteio.atlassian.net/rest/api/3/issueLink/10243", "type": {"id": "10002", "name": "Duplicate", "inward": "is duplicated by", "outward": "duplicates", "self": "https://airbyteio.atlassian.net/rest/api/3/issueLinkType/10002"}, "inwardIssue": {"id": "10075", "key": "IT-23", "self": "https://airbyteio.atlassian.net/rest/api/3/issue/10075", "fields": {"summary": "Test 69", "status": {"self": "https://airbyteio.atlassian.net/rest/api/3/status/10000", "description": "", "iconUrl": "https://airbyteio.atlassian.net/", "name": "To Do", "id": "10000", "statusCategory": {"self": "https://airbyteio.atlassian.net/rest/api/3/statuscategory/2", "id": 2, "key": "new", "colorName": "blue-gray", "name": "To Do"}}, "priority": {"self": "https://airbyteio.atlassian.net/rest/api/3/priority/3", "iconUrl": "https://airbyteio.atlassian.net/images/icons/priorities/medium.svg", "name": "Medium", "id": "3"}, "issuetype": {"self": "https://airbyteio.atlassian.net/rest/api/3/issuetype/10004", "id": "10004", "description": "A problem or error.", "iconUrl": "https://airbyteio.atlassian.net/rest/api/2/universal_avatar/view/type/issuetype/avatar/10303?size=medium", "name": "Bug", "subtask": false, "avatarId": 10303, "hierarchyLevel": 0}}}}], "updated": "2023-04-05T04:58:35.329000-07:00", "status": {"self": "https://airbyteio.atlassian.net/rest/api/3/status/10000", "description": "", "iconUrl": "https://airbyteio.atlassian.net/", "name": "To Do", "id": "10000", "statusCategory": {"self": "https://airbyteio.atlassian.net/rest/api/3/statuscategory/2", "id": 2, "key": "new", "colorName": "blue-gray", "name": "To Do"}}, "components": [], "description": {"type": "doc", "version": 1, "content": [{"type": "paragraph", "content": [{"type": "text", "text": "Test description 74"}]}]}, "timetracking": {"remainingEstimate": "0m", "timeSpent": "5h 48m", "remainingEstimateSeconds": 0, "timeSpentSeconds": 20880}, "attachment": [{"self": "https://airbyteio.atlassian.net/rest/api/3/attachment/10123", "id": "10123", "filename": "demo.xlsx", "author": {"self": "https://airbyteio.atlassian.net/rest/api/3/user?accountId=5fc9e78d2730d800760becc4", "accountId": "5fc9e78d2730d800760becc4", "emailAddress": "integration-test@airbyte.io", "avatarUrls": {"48x48": "https://secure.gravatar.com/avatar/0a7841feac7218131ce7b427283c24ef?d=https%3A%2F%2Favatar-management--avatars.us-west-2.prod.public.atl-paas.net%2Finitials%2FIT-5.png", "24x24": "https://secure.gravatar.com/avatar/0a7841feac7218131ce7b427283c24ef?d=https%3A%2F%2Favatar-management--avatars.us-west-2.prod.public.atl-paas.net%2Finitials%2FIT-5.png", "16x16": "https://secure.gravatar.com/avatar/0a7841feac7218131ce7b427283c24ef?d=https%3A%2F%2Favatar-management--avatars.us-west-2.prod.public.atl-paas.net%2Finitials%2FIT-5.png", "32x32": "https://secure.gravatar.com/avatar/0a7841feac7218131ce7b427283c24ef?d=https%3A%2F%2Favatar-management--avatars.us-west-2.prod.public.atl-paas.net%2Finitials%2FIT-5.png"}, "displayName": "integration test", "active": true, "timeZone": "America/Los_Angeles", "accountType": "atlassian"}, "created": "2021-04-14T14:11:01.652-0700", "size": 7360, "content": "https://airbyteio.atlassian.net/rest/api/3/attachment/content/10123"}], "aggregatetimeestimate": 0, "summary": "Test 74", "creator": {"self": "https://airbyteio.atlassian.net/rest/api/3/user?accountId=5fc9e78d2730d800760becc4", "accountId": "5fc9e78d2730d800760becc4", "emailAddress": "integration-test@airbyte.io", "avatarUrls": {"48x48": "https://secure.gravatar.com/avatar/0a7841feac7218131ce7b427283c24ef?d=https%3A%2F%2Favatar-management--avatars.us-west-2.prod.public.atl-paas.net%2Finitials%2FIT-5.png", "24x24": "https://secure.gravatar.com/avatar/0a7841feac7218131ce7b427283c24ef?d=https%3A%2F%2Favatar-management--avatars.us-west-2.prod.public.atl-paas.net%2Finitials%2FIT-5.png", "16x16": "https://secure.gravatar.com/avatar/0a7841feac7218131ce7b427283c24ef?d=https%3A%2F%2Favatar-management--avatars.us-west-2.prod.public.atl-paas.net%2Finitials%2FIT-5.png", "32x32": "https://secure.gravatar.com/avatar/0a7841feac7218131ce7b427283c24ef?d=https%3A%2F%2Favatar-management--avatars.us-west-2.prod.public.atl-paas.net%2Finitials%2FIT-5.png"}, "displayName": "integration test", "active": true, "timeZone": "America/Los_Angeles", "accountType": "atlassian"}, "subtasks": [], "reporter": {"self": "https://airbyteio.atlassian.net/rest/api/3/user?accountId=5fc9e78d2730d800760becc4", "accountId": "5fc9e78d2730d800760becc4", "emailAddress": "integration-test@airbyte.io", "avatarUrls": {"48x48": "https://secure.gravatar.com/avatar/0a7841feac7218131ce7b427283c24ef?d=https%3A%2F%2Favatar-management--avatars.us-west-2.prod.public.atl-paas.net%2Finitials%2FIT-5.png", "24x24": "https://secure.gravatar.com/avatar/0a7841feac7218131ce7b427283c24ef?d=https%3A%2F%2Favatar-management--avatars.us-west-2.prod.public.atl-paas.net%2Finitials%2FIT-5.png", "16x16": "https://secure.gravatar.com/avatar/0a7841feac7218131ce7b427283c24ef?d=https%3A%2F%2Favatar-management--avatars.us-west-2.prod.public.atl-paas.net%2Finitials%2FIT-5.png", "32x32": "https://secure.gravatar.com/avatar/0a7841feac7218131ce7b427283c24ef?d=https%3A%2F%2Favatar-management--avatars.us-west-2.prod.public.atl-paas.net%2Finitials%2FIT-5.png"}, "displayName": "integration test", "active": true, "timeZone": "America/Los_Angeles", "accountType": "atlassian"}, "aggregateprogress": {"progress": 20880, "total": 20880, "percent": 100}, "progress": {"progress": 20880, "total": 20880, "percent": 100}, "votes": {"self": "https://airbyteio.atlassian.net/rest/api/3/issue/IT-24/votes", "votes": 1, "hasVoted": true}, "comment": {"comments": [], "self": "https://airbyteio.atlassian.net/rest/api/3/issue/10080/comment", "maxResults": 0, "total": 0, "startAt": 0}, "worklog": {"startAt": 0, "maxResults": 20, "total": 3, "worklogs": [{"self": "https://airbyteio.atlassian.net/rest/api/3/issue/10080/worklog/11708", "author": {"self": "https://airbyteio.atlassian.net/rest/api/3/user?accountId=5fc9e78d2730d800760becc4", "accountId": "5fc9e78d2730d800760becc4", "emailAddress": "integration-test@airbyte.io", "avatarUrls": {"48x48": "https://secure.gravatar.com/avatar/0a7841feac7218131ce7b427283c24ef?d=https%3A%2F%2Favatar-management--avatars.us-west-2.prod.public.atl-paas.net%2Finitials%2FIT-5.png", "24x24": "https://secure.gravatar.com/avatar/0a7841feac7218131ce7b427283c24ef?d=https%3A%2F%2Favatar-management--avatars.us-west-2.prod.public.atl-paas.net%2Finitials%2FIT-5.png", "16x16": "https://secure.gravatar.com/avatar/0a7841feac7218131ce7b427283c24ef?d=https%3A%2F%2Favatar-management--avatars.us-west-2.prod.public.atl-paas.net%2Finitials%2FIT-5.png", "32x32": "https://secure.gravatar.com/avatar/0a7841feac7218131ce7b427283c24ef?d=https%3A%2F%2Favatar-management--avatars.us-west-2.prod.public.atl-paas.net%2Finitials%2FIT-5.png"}, "displayName": "integration test", "active": true, "timeZone": "America/Los_Angeles", "accountType": "atlassian"}, "updateAuthor": {"self": "https://airbyteio.atlassian.net/rest/api/3/user?accountId=5fc9e78d2730d800760becc4", "accountId": "5fc9e78d2730d800760becc4", "emailAddress": "integration-test@airbyte.io", "avatarUrls": {"48x48": "https://secure.gravatar.com/avatar/0a7841feac7218131ce7b427283c24ef?d=https%3A%2F%2Favatar-management--avatars.us-west-2.prod.public.atl-paas.net%2Finitials%2FIT-5.png", "24x24": "https://secure.gravatar.com/avatar/0a7841feac7218131ce7b427283c24ef?d=https%3A%2F%2Favatar-management--avatars.us-west-2.prod.public.atl-paas.net%2Finitials%2FIT-5.png", "16x16": "https://secure.gravatar.com/avatar/0a7841feac7218131ce7b427283c24ef?d=https%3A%2F%2Favatar-management--avatars.us-west-2.prod.public.atl-paas.net%2Finitials%2FIT-5.png", "32x32": "https://secure.gravatar.com/avatar/0a7841feac7218131ce7b427283c24ef?d=https%3A%2F%2Favatar-management--avatars.us-west-2.prod.public.atl-paas.net%2Finitials%2FIT-5.png"}, "displayName": "integration test", "active": true, "timeZone": "America/Los_Angeles", "accountType": "atlassian"}, "comment": {"type": "doc", "version": 1, "content": [{"type": "paragraph", "content": [{"text": "I did some work here. 0", "type": "text"}]}]}, "created": "2021-04-15T11:39:46.574-0700", "updated": "2021-04-15T11:39:46.574-0700", "started": "2021-04-14T18:48:52.747-0700", "timeSpent": "2h 21m", "timeSpentSeconds": 8460, "id": "11708", "issueId": "10080"}, {"self": "https://airbyteio.atlassian.net/rest/api/3/issue/10080/worklog/11709", "author": {"self": "https://airbyteio.atlassian.net/rest/api/3/user?accountId=5fc9e78d2730d800760becc4", "accountId": "5fc9e78d2730d800760becc4", "emailAddress": "integration-test@airbyte.io", "avatarUrls": {"48x48": "https://secure.gravatar.com/avatar/0a7841feac7218131ce7b427283c24ef?d=https%3A%2F%2Favatar-management--avatars.us-west-2.prod.public.atl-paas.net%2Finitials%2FIT-5.png", "24x24": "https://secure.gravatar.com/avatar/0a7841feac7218131ce7b427283c24ef?d=https%3A%2F%2Favatar-management--avatars.us-west-2.prod.public.atl-paas.net%2Finitials%2FIT-5.png", "16x16": "https://secure.gravatar.com/avatar/0a7841feac7218131ce7b427283c24ef?d=https%3A%2F%2Favatar-management--avatars.us-west-2.prod.public.atl-paas.net%2Finitials%2FIT-5.png", "32x32": "https://secure.gravatar.com/avatar/0a7841feac7218131ce7b427283c24ef?d=https%3A%2F%2Favatar-management--avatars.us-west-2.prod.public.atl-paas.net%2Finitials%2FIT-5.png"}, "displayName": "integration test", "active": true, "timeZone": "America/Los_Angeles", "accountType": "atlassian"}, "updateAuthor": {"self": "https://airbyteio.atlassian.net/rest/api/3/user?accountId=5fc9e78d2730d800760becc4", "accountId": "5fc9e78d2730d800760becc4", "emailAddress": "integration-test@airbyte.io", "avatarUrls": {"48x48": "https://secure.gravatar.com/avatar/0a7841feac7218131ce7b427283c24ef?d=https%3A%2F%2Favatar-management--avatars.us-west-2.prod.public.atl-paas.net%2Finitials%2FIT-5.png", "24x24": "https://secure.gravatar.com/avatar/0a7841feac7218131ce7b427283c24ef?d=https%3A%2F%2Favatar-management--avatars.us-west-2.prod.public.atl-paas.net%2Finitials%2FIT-5.png", "16x16": "https://secure.gravatar.com/avatar/0a7841feac7218131ce7b427283c24ef?d=https%3A%2F%2Favatar-management--avatars.us-west-2.prod.public.atl-paas.net%2Finitials%2FIT-5.png", "32x32": "https://secure.gravatar.com/avatar/0a7841feac7218131ce7b427283c24ef?d=https%3A%2F%2Favatar-management--avatars.us-west-2.prod.public.atl-paas.net%2Finitials%2FIT-5.png"}, "displayName": "integration test", "active": true, "timeZone": "America/Los_Angeles", "accountType": "atlassian"}, "comment": {"type": "doc", "version": 1, "content": [{"type": "paragraph", "content": [{"text": "I did some work here. 1", "type": "text"}]}]}, "created": "2021-04-15T11:39:47.215-0700", "updated": "2021-04-15T11:39:47.215-0700", "started": "2021-04-14T18:48:52.747-0700", "timeSpent": "37m", "timeSpentSeconds": 2220, "id": "11709", "issueId": "10080"}, {"self": "https://airbyteio.atlassian.net/rest/api/3/issue/10080/worklog/11710", "author": {"self": "https://airbyteio.atlassian.net/rest/api/3/user?accountId=5fc9e78d2730d800760becc4", "accountId": "5fc9e78d2730d800760becc4", "emailAddress": "integration-test@airbyte.io", "avatarUrls": {"48x48": "https://secure.gravatar.com/avatar/0a7841feac7218131ce7b427283c24ef?d=https%3A%2F%2Favatar-management--avatars.us-west-2.prod.public.atl-paas.net%2Finitials%2FIT-5.png", "24x24": "https://secure.gravatar.com/avatar/0a7841feac7218131ce7b427283c24ef?d=https%3A%2F%2Favatar-management--avatars.us-west-2.prod.public.atl-paas.net%2Finitials%2FIT-5.png", "16x16": "https://secure.gravatar.com/avatar/0a7841feac7218131ce7b427283c24ef?d=https%3A%2F%2Favatar-management--avatars.us-west-2.prod.public.atl-paas.net%2Finitials%2FIT-5.png", "32x32": "https://secure.gravatar.com/avatar/0a7841feac7218131ce7b427283c24ef?d=https%3A%2F%2Favatar-management--avatars.us-west-2.prod.public.atl-paas.net%2Finitials%2FIT-5.png"}, "displayName": "integration test", "active": true, "timeZone": "America/Los_Angeles", "accountType": "atlassian"}, "updateAuthor": {"self": "https://airbyteio.atlassian.net/rest/api/3/user?accountId=5fc9e78d2730d800760becc4", "accountId": "5fc9e78d2730d800760becc4", "emailAddress": "integration-test@airbyte.io", "avatarUrls": {"48x48": "https://secure.gravatar.com/avatar/0a7841feac7218131ce7b427283c24ef?d=https%3A%2F%2Favatar-management--avatars.us-west-2.prod.public.atl-paas.net%2Finitials%2FIT-5.png", "24x24": "https://secure.gravatar.com/avatar/0a7841feac7218131ce7b427283c24ef?d=https%3A%2F%2Favatar-management--avatars.us-west-2.prod.public.atl-paas.net%2Finitials%2FIT-5.png", "16x16": "https://secure.gravatar.com/avatar/0a7841feac7218131ce7b427283c24ef?d=https%3A%2F%2Favatar-management--avatars.us-west-2.prod.public.atl-paas.net%2Finitials%2FIT-5.png", "32x32": "https://secure.gravatar.com/avatar/0a7841feac7218131ce7b427283c24ef?d=https%3A%2F%2Favatar-management--avatars.us-west-2.prod.public.atl-paas.net%2Finitials%2FIT-5.png"}, "displayName": "integration test", "active": true, "timeZone": "America/Los_Angeles", "accountType": "atlassian"}, "comment": {"type": "doc", "version": 1, "content": [{"type": "paragraph", "content": [{"text": "I did some work here. 2", "type": "text"}]}]}, "created": "2021-04-15T11:39:47.834-0700", "updated": "2021-04-15T11:39:47.834-0700", "started": "2021-04-14T18:48:52.747-0700", "timeSpent": "2h 50m", "timeSpentSeconds": 10200, "id": "11710", "issueId": "10080"}]}}, "projectId": "10000", "projectKey": "IT", "created": "2021-03-11T06:17:33.169000-08:00", "updated": "2023-04-05T04:58:35.329000-07:00"}, "emitted_at": 1706087956389} -{"stream": "issues", "data": {"expand": "customfield_10030.properties,operations,versionedRepresentations,editmeta,changelog,customfield_10029.properties,customfield_10010.requestTypePractice,transitions,renderedFields,customfield_10229.properties", "id": "10626", "self": "https://airbyteio.atlassian.net/rest/api/3/issue/10626", "key": "IT-26", "renderedFields": {"statuscategorychangedate": "17/May/22 4:28 AM", "timespent": "1 day", "aggregatetimespent": "1 day", "created": "17/May/22 4:28 AM", "customfield_10017": "dark_yellow", "timeestimate": "1 week, 1 day", "aggregatetimeoriginalestimate": "2 weeks, 4 days, 5 hours", "updated": "12/Oct/23 1:43 PM", "timeoriginalestimate": "2 weeks, 4 days, 5 hours", "description": "

Implement OAUth

", "customfield_10011": "Test 2", "customfield_10013": "ghx-label-2", "timetracking": {"originalEstimate": "2 weeks, 4 days, 5 hours", "remainingEstimate": "1 week, 1 day", "timeSpent": "1 day", "originalEstimateSeconds": 421200, "remainingEstimateSeconds": 172800, "timeSpentSeconds": 28800}, "attachment": [], "aggregatetimeestimate": "1 week, 1 day", "environment": "", "comment": {"comments": [], "self": "https://airbyteio.atlassian.net/rest/api/3/issue/10626/comment", "maxResults": 0, "total": 0, "startAt": 0}, "worklog": {"startAt": 0, "maxResults": 20, "total": 1, "worklogs": [{"self": "https://airbyteio.atlassian.net/rest/api/3/issue/10626/worklog/11820", "author": {"self": "https://airbyteio.atlassian.net/rest/api/3/user?accountId=557058%3A295406f3-a1fc-4733-b906-dd15d021bd79", "accountId": "557058:295406f3-a1fc-4733-b906-dd15d021bd79", "avatarUrls": {"48x48": "https://secure.gravatar.com/avatar/182fc208a1a2e6cc41393ab6c9363d9c?d=https%3A%2F%2Favatar-management--avatars.us-west-2.prod.public.atl-paas.net%2Finitials%2FTT-6.png", "24x24": "https://secure.gravatar.com/avatar/182fc208a1a2e6cc41393ab6c9363d9c?d=https%3A%2F%2Favatar-management--avatars.us-west-2.prod.public.atl-paas.net%2Finitials%2FTT-6.png", "16x16": "https://secure.gravatar.com/avatar/182fc208a1a2e6cc41393ab6c9363d9c?d=https%3A%2F%2Favatar-management--avatars.us-west-2.prod.public.atl-paas.net%2Finitials%2FTT-6.png", "32x32": "https://secure.gravatar.com/avatar/182fc208a1a2e6cc41393ab6c9363d9c?d=https%3A%2F%2Favatar-management--avatars.us-west-2.prod.public.atl-paas.net%2Finitials%2FTT-6.png"}, "displayName": "Tempo Timesheets", "active": true, "timeZone": "America/Los_Angeles", "accountType": "app"}, "updateAuthor": {"self": "https://airbyteio.atlassian.net/rest/api/3/user?accountId=557058%3A295406f3-a1fc-4733-b906-dd15d021bd79", "accountId": "557058:295406f3-a1fc-4733-b906-dd15d021bd79", "avatarUrls": {"48x48": "https://secure.gravatar.com/avatar/182fc208a1a2e6cc41393ab6c9363d9c?d=https%3A%2F%2Favatar-management--avatars.us-west-2.prod.public.atl-paas.net%2Finitials%2FTT-6.png", "24x24": "https://secure.gravatar.com/avatar/182fc208a1a2e6cc41393ab6c9363d9c?d=https%3A%2F%2Favatar-management--avatars.us-west-2.prod.public.atl-paas.net%2Finitials%2FTT-6.png", "16x16": "https://secure.gravatar.com/avatar/182fc208a1a2e6cc41393ab6c9363d9c?d=https%3A%2F%2Favatar-management--avatars.us-west-2.prod.public.atl-paas.net%2Finitials%2FTT-6.png", "32x32": "https://secure.gravatar.com/avatar/182fc208a1a2e6cc41393ab6c9363d9c?d=https%3A%2F%2Favatar-management--avatars.us-west-2.prod.public.atl-paas.net%2Finitials%2FTT-6.png"}, "displayName": "Tempo Timesheets", "active": true, "timeZone": "America/Los_Angeles", "accountType": "app"}, "created": "05/Apr/23 5:08 AM", "updated": "05/Apr/23 5:08 AM", "started": "05/Apr/23 1:00 AM", "timeSpent": "1 day", "id": "11820", "issueId": "10626"}]}}, "transitions": [{"id": "11", "name": "To Do", "to": {"self": "https://airbyteio.atlassian.net/rest/api/3/status/10000", "description": "", "iconUrl": "https://airbyteio.atlassian.net/", "name": "To Do", "id": "10000", "statusCategory": {"self": "https://airbyteio.atlassian.net/rest/api/3/statuscategory/2", "id": 2, "key": "new", "colorName": "blue-gray", "name": "To Do"}}, "hasScreen": false, "isGlobal": true, "isInitial": false, "isAvailable": true, "isConditional": false, "isLooped": false}, {"id": "21", "name": "In Progress", "to": {"self": "https://airbyteio.atlassian.net/rest/api/3/status/3", "description": "This issue is being actively worked on at the moment by the assignee.", "iconUrl": "https://airbyteio.atlassian.net/images/icons/statuses/inprogress.png", "name": "In Progress", "id": "3", "statusCategory": {"self": "https://airbyteio.atlassian.net/rest/api/3/statuscategory/4", "id": 4, "key": "indeterminate", "colorName": "yellow", "name": "In Progress"}}, "hasScreen": false, "isGlobal": true, "isInitial": false, "isAvailable": true, "isConditional": false, "isLooped": false}, {"id": "31", "name": "Done", "to": {"self": "https://airbyteio.atlassian.net/rest/api/3/status/10001", "description": "", "iconUrl": "https://airbyteio.atlassian.net/", "name": "Done", "id": "10001", "statusCategory": {"self": "https://airbyteio.atlassian.net/rest/api/3/statuscategory/3", "id": 3, "key": "done", "colorName": "green", "name": "Done"}}, "hasScreen": false, "isGlobal": true, "isInitial": false, "isAvailable": true, "isConditional": false, "isLooped": false}, {"id": "41", "name": "Approved", "to": {"self": "https://airbyteio.atlassian.net/rest/api/3/status/10005", "description": "This was auto-generated by Jira Service Management during workflow import", "iconUrl": "https://airbyteio.atlassian.net/images/icons/status_generic.gif", "name": "Approved", "id": "10005", "statusCategory": {"self": "https://airbyteio.atlassian.net/rest/api/3/statuscategory/3", "id": 3, "key": "done", "colorName": "green", "name": "Done"}}, "hasScreen": false, "isGlobal": true, "isInitial": false, "isAvailable": true, "isConditional": false, "isLooped": false}, {"id": "51", "name": "In review", "to": {"self": "https://airbyteio.atlassian.net/rest/api/3/status/10004", "description": "This was auto-generated by Jira Service Management during workflow import", "iconUrl": "https://airbyteio.atlassian.net/images/icons/status_generic.gif", "name": "In review", "id": "10004", "statusCategory": {"self": "https://airbyteio.atlassian.net/rest/api/3/statuscategory/4", "id": 4, "key": "indeterminate", "colorName": "yellow", "name": "In Progress"}}, "hasScreen": false, "isGlobal": true, "isInitial": false, "isAvailable": true, "isConditional": false, "isLooped": false}, {"id": "61", "name": "Reopened", "to": {"self": "https://airbyteio.atlassian.net/rest/api/3/status/4", "description": "This issue was once resolved, but the resolution was deemed incorrect. From here issues are either marked assigned or resolved.", "iconUrl": "https://airbyteio.atlassian.net/images/icons/statuses/reopened.png", "name": "Reopened", "id": "4", "statusCategory": {"self": "https://airbyteio.atlassian.net/rest/api/3/statuscategory/2", "id": 2, "key": "new", "colorName": "blue-gray", "name": "To Do"}}, "hasScreen": false, "isGlobal": true, "isInitial": false, "isAvailable": true, "isConditional": false, "isLooped": false}, {"id": "71", "name": "Declined", "to": {"self": "https://airbyteio.atlassian.net/rest/api/3/status/10002", "description": "This was auto-generated by Jira Service Management during workflow import", "iconUrl": "https://airbyteio.atlassian.net/images/icons/statuses/generic.png", "name": "Declined", "id": "10002", "statusCategory": {"self": "https://airbyteio.atlassian.net/rest/api/3/statuscategory/3", "id": 3, "key": "done", "colorName": "green", "name": "Done"}}, "hasScreen": false, "isGlobal": true, "isInitial": false, "isAvailable": true, "isConditional": false, "isLooped": false}, {"id": "81", "name": "Open", "to": {"self": "https://airbyteio.atlassian.net/rest/api/3/status/1", "description": "The issue is open and ready for the assignee to start work on it.", "iconUrl": "https://airbyteio.atlassian.net/images/icons/statuses/open.png", "name": "Open", "id": "1", "statusCategory": {"self": "https://airbyteio.atlassian.net/rest/api/3/statuscategory/2", "id": 2, "key": "new", "colorName": "blue-gray", "name": "To Do"}}, "hasScreen": false, "isGlobal": true, "isInitial": false, "isAvailable": true, "isConditional": false, "isLooped": false}, {"id": "91", "name": "Pending", "to": {"self": "https://airbyteio.atlassian.net/rest/api/3/status/10003", "description": "This was auto-generated by Jira Service Management during workflow import", "iconUrl": "https://airbyteio.atlassian.net/images/icons/status_generic.gif", "name": "Pending", "id": "10003", "statusCategory": {"self": "https://airbyteio.atlassian.net/rest/api/3/statuscategory/4", "id": 4, "key": "indeterminate", "colorName": "yellow", "name": "In Progress"}}, "hasScreen": false, "isGlobal": true, "isInitial": false, "isAvailable": true, "isConditional": false, "isLooped": false}, {"id": "101", "name": "Closed", "to": {"self": "https://airbyteio.atlassian.net/rest/api/3/status/6", "description": "The issue is considered finished, the resolution is correct. Issues which are closed can be reopened.", "iconUrl": "https://airbyteio.atlassian.net/images/icons/statuses/closed.png", "name": "Closed", "id": "6", "statusCategory": {"self": "https://airbyteio.atlassian.net/rest/api/3/statuscategory/3", "id": 3, "key": "done", "colorName": "green", "name": "Done"}}, "hasScreen": false, "isGlobal": true, "isInitial": false, "isAvailable": true, "isConditional": false, "isLooped": false}], "changelog": {"startAt": 0, "maxResults": 4, "total": 4, "histories": [{"id": "15198", "author": {"self": "https://airbyteio.atlassian.net/rest/api/3/user?accountId=5fc9e78d2730d800760becc4", "accountId": "5fc9e78d2730d800760becc4", "emailAddress": "integration-test@airbyte.io", "avatarUrls": {"48x48": "https://secure.gravatar.com/avatar/0a7841feac7218131ce7b427283c24ef?d=https%3A%2F%2Favatar-management--avatars.us-west-2.prod.public.atl-paas.net%2Finitials%2FIT-5.png", "24x24": "https://secure.gravatar.com/avatar/0a7841feac7218131ce7b427283c24ef?d=https%3A%2F%2Favatar-management--avatars.us-west-2.prod.public.atl-paas.net%2Finitials%2FIT-5.png", "16x16": "https://secure.gravatar.com/avatar/0a7841feac7218131ce7b427283c24ef?d=https%3A%2F%2Favatar-management--avatars.us-west-2.prod.public.atl-paas.net%2Finitials%2FIT-5.png", "32x32": "https://secure.gravatar.com/avatar/0a7841feac7218131ce7b427283c24ef?d=https%3A%2F%2Favatar-management--avatars.us-west-2.prod.public.atl-paas.net%2Finitials%2FIT-5.png"}, "displayName": "integration test", "active": true, "timeZone": "America/Los_Angeles", "accountType": "atlassian"}, "created": "2023-10-12T13:43:15.036-0700", "items": [{"field": "timeestimate", "fieldtype": "jira", "fieldId": "timeestimate", "from": null, "fromString": null, "to": "172800", "toString": "172800"}]}, {"id": "15197", "author": {"self": "https://airbyteio.atlassian.net/rest/api/3/user?accountId=5fc9e78d2730d800760becc4", "accountId": "5fc9e78d2730d800760becc4", "emailAddress": "integration-test@airbyte.io", "avatarUrls": {"48x48": "https://secure.gravatar.com/avatar/0a7841feac7218131ce7b427283c24ef?d=https%3A%2F%2Favatar-management--avatars.us-west-2.prod.public.atl-paas.net%2Finitials%2FIT-5.png", "24x24": "https://secure.gravatar.com/avatar/0a7841feac7218131ce7b427283c24ef?d=https%3A%2F%2Favatar-management--avatars.us-west-2.prod.public.atl-paas.net%2Finitials%2FIT-5.png", "16x16": "https://secure.gravatar.com/avatar/0a7841feac7218131ce7b427283c24ef?d=https%3A%2F%2Favatar-management--avatars.us-west-2.prod.public.atl-paas.net%2Finitials%2FIT-5.png", "32x32": "https://secure.gravatar.com/avatar/0a7841feac7218131ce7b427283c24ef?d=https%3A%2F%2Favatar-management--avatars.us-west-2.prod.public.atl-paas.net%2Finitials%2FIT-5.png"}, "displayName": "integration test", "active": true, "timeZone": "America/Los_Angeles", "accountType": "atlassian"}, "created": "2023-10-12T13:43:05.182-0700", "items": [{"field": "timeoriginalestimate", "fieldtype": "jira", "fieldId": "timeoriginalestimate", "from": null, "fromString": null, "to": "421200", "toString": "421200"}]}, {"id": "15186", "author": {"self": "https://airbyteio.atlassian.net/rest/api/3/user?accountId=557058%3A295406f3-a1fc-4733-b906-dd15d021bd79", "accountId": "557058:295406f3-a1fc-4733-b906-dd15d021bd79", "avatarUrls": {"48x48": "https://secure.gravatar.com/avatar/182fc208a1a2e6cc41393ab6c9363d9c?d=https%3A%2F%2Favatar-management--avatars.us-west-2.prod.public.atl-paas.net%2Finitials%2FTT-6.png", "24x24": "https://secure.gravatar.com/avatar/182fc208a1a2e6cc41393ab6c9363d9c?d=https%3A%2F%2Favatar-management--avatars.us-west-2.prod.public.atl-paas.net%2Finitials%2FTT-6.png", "16x16": "https://secure.gravatar.com/avatar/182fc208a1a2e6cc41393ab6c9363d9c?d=https%3A%2F%2Favatar-management--avatars.us-west-2.prod.public.atl-paas.net%2Finitials%2FTT-6.png", "32x32": "https://secure.gravatar.com/avatar/182fc208a1a2e6cc41393ab6c9363d9c?d=https%3A%2F%2Favatar-management--avatars.us-west-2.prod.public.atl-paas.net%2Finitials%2FTT-6.png"}, "displayName": "Tempo Timesheets", "active": true, "timeZone": "America/Los_Angeles", "accountType": "app"}, "created": "2023-04-05T05:08:50.115-0700", "items": [{"field": "timespent", "fieldtype": "jira", "fieldId": "timespent", "from": null, "fromString": null, "to": "28800", "toString": "28800"}, {"field": "WorklogId", "fieldtype": "jira", "from": null, "fromString": null, "to": "11820", "toString": "11820"}]}, {"id": "15128", "author": {"self": "https://airbyteio.atlassian.net/rest/api/3/user?accountId=5fc9e78d2730d800760becc4", "accountId": "5fc9e78d2730d800760becc4", "emailAddress": "integration-test@airbyte.io", "avatarUrls": {"48x48": "https://secure.gravatar.com/avatar/0a7841feac7218131ce7b427283c24ef?d=https%3A%2F%2Favatar-management--avatars.us-west-2.prod.public.atl-paas.net%2Finitials%2FIT-5.png", "24x24": "https://secure.gravatar.com/avatar/0a7841feac7218131ce7b427283c24ef?d=https%3A%2F%2Favatar-management--avatars.us-west-2.prod.public.atl-paas.net%2Finitials%2FIT-5.png", "16x16": "https://secure.gravatar.com/avatar/0a7841feac7218131ce7b427283c24ef?d=https%3A%2F%2Favatar-management--avatars.us-west-2.prod.public.atl-paas.net%2Finitials%2FIT-5.png", "32x32": "https://secure.gravatar.com/avatar/0a7841feac7218131ce7b427283c24ef?d=https%3A%2F%2Favatar-management--avatars.us-west-2.prod.public.atl-paas.net%2Finitials%2FIT-5.png"}, "displayName": "integration test", "active": true, "timeZone": "America/Los_Angeles", "accountType": "atlassian"}, "created": "2022-05-17T04:28:19.837-0700", "items": [{"field": "Link", "fieldtype": "jira", "from": null, "fromString": null, "to": "IT-25", "toString": "This issue clones IT-25"}]}]}, "fields": {"statuscategorychangedate": "2022-05-17T04:28:19.775-0700", "issuetype": {"self": "https://airbyteio.atlassian.net/rest/api/3/issuetype/10000", "id": "10000", "description": "A big user story that needs to be broken down. Created by Jira Software - do not edit or delete.", "iconUrl": "https://airbyteio.atlassian.net/images/icons/issuetypes/epic.svg", "name": "Epic", "subtask": false, "hierarchyLevel": 1}, "timespent": 28800, "project": {"self": "https://airbyteio.atlassian.net/rest/api/3/project/10000", "id": "10000", "key": "IT", "name": "integration-tests", "projectTypeKey": "software", "simplified": false, "avatarUrls": {"48x48": "https://airbyteio.atlassian.net/rest/api/3/universal_avatar/view/type/project/avatar/10424", "24x24": "https://airbyteio.atlassian.net/rest/api/3/universal_avatar/view/type/project/avatar/10424?size=small", "16x16": "https://airbyteio.atlassian.net/rest/api/3/universal_avatar/view/type/project/avatar/10424?size=xsmall", "32x32": "https://airbyteio.atlassian.net/rest/api/3/universal_avatar/view/type/project/avatar/10424?size=medium"}, "projectCategory": {"self": "https://airbyteio.atlassian.net/rest/api/3/projectCategory/10004", "id": "10004", "description": "Test Project Category 2", "name": "Test category 2"}}, "fixVersions": [], "aggregatetimespent": 28800, "workratio": 6, "issuerestriction": {"issuerestrictions": {}, "shouldDisplay": false}, "watches": {"self": "https://airbyteio.atlassian.net/rest/api/3/issue/IT-26/watchers", "watchCount": 1, "isWatching": true}, "created": "2022-05-17T04:28:19.523000-07:00", "priority": {"self": "https://airbyteio.atlassian.net/rest/api/3/priority/4", "iconUrl": "https://airbyteio.atlassian.net/images/icons/priorities/low.svg", "name": "Low", "id": "4"}, "labels": [], "customfield_10017": "dark_yellow", "customfield_10018": {"hasEpicLinkFieldDependency": false, "showField": false, "nonEditableReason": {"reason": "PLUGIN_LICENSE_ERROR", "message": "The Parent Link is only available to Jira Premium users."}}, "customfield_10217": [], "customfield_10019": "0|i00773:", "timeestimate": 172800, "aggregatetimeoriginalestimate": 421200, "versions": [], "issuelinks": [{"id": "10263", "self": "https://airbyteio.atlassian.net/rest/api/3/issueLink/10263", "type": {"id": "10001", "name": "Cloners", "inward": "is cloned by", "outward": "clones", "self": "https://airbyteio.atlassian.net/rest/api/3/issueLinkType/10001"}, "outwardIssue": {"id": "10625", "key": "IT-25", "self": "https://airbyteio.atlassian.net/rest/api/3/issue/10625", "fields": {"summary": "Aggregate issues", "status": {"self": "https://airbyteio.atlassian.net/rest/api/3/status/10000", "description": "", "iconUrl": "https://airbyteio.atlassian.net/", "name": "To Do", "id": "10000", "statusCategory": {"self": "https://airbyteio.atlassian.net/rest/api/3/statuscategory/2", "id": 2, "key": "new", "colorName": "blue-gray", "name": "To Do"}}, "priority": {"self": "https://airbyteio.atlassian.net/rest/api/3/priority/4", "iconUrl": "https://airbyteio.atlassian.net/images/icons/priorities/low.svg", "name": "Low", "id": "4"}, "issuetype": {"self": "https://airbyteio.atlassian.net/rest/api/3/issuetype/10000", "id": "10000", "description": "A big user story that needs to be broken down. Created by Jira Software - do not edit or delete.", "iconUrl": "https://airbyteio.atlassian.net/images/icons/issuetypes/epic.svg", "name": "Epic", "subtask": false, "hierarchyLevel": 1}}}}], "assignee": {"self": "https://airbyteio.atlassian.net/rest/api/3/user?accountId=5fc9e78d2730d800760becc4", "accountId": "5fc9e78d2730d800760becc4", "emailAddress": "integration-test@airbyte.io", "avatarUrls": {"48x48": "https://secure.gravatar.com/avatar/0a7841feac7218131ce7b427283c24ef?d=https%3A%2F%2Favatar-management--avatars.us-west-2.prod.public.atl-paas.net%2Finitials%2FIT-5.png", "24x24": "https://secure.gravatar.com/avatar/0a7841feac7218131ce7b427283c24ef?d=https%3A%2F%2Favatar-management--avatars.us-west-2.prod.public.atl-paas.net%2Finitials%2FIT-5.png", "16x16": "https://secure.gravatar.com/avatar/0a7841feac7218131ce7b427283c24ef?d=https%3A%2F%2Favatar-management--avatars.us-west-2.prod.public.atl-paas.net%2Finitials%2FIT-5.png", "32x32": "https://secure.gravatar.com/avatar/0a7841feac7218131ce7b427283c24ef?d=https%3A%2F%2Favatar-management--avatars.us-west-2.prod.public.atl-paas.net%2Finitials%2FIT-5.png"}, "displayName": "integration test", "active": true, "timeZone": "America/Los_Angeles", "accountType": "atlassian"}, "updated": "2023-10-12T13:43:15.025000-07:00", "status": {"self": "https://airbyteio.atlassian.net/rest/api/3/status/10000", "description": "", "iconUrl": "https://airbyteio.atlassian.net/", "name": "To Do", "id": "10000", "statusCategory": {"self": "https://airbyteio.atlassian.net/rest/api/3/statuscategory/2", "id": 2, "key": "new", "colorName": "blue-gray", "name": "To Do"}}, "components": [{"self": "https://airbyteio.atlassian.net/rest/api/3/component/10049", "id": "10049", "name": "Component 3", "description": "This is a Jira component"}], "timeoriginalestimate": 421200, "description": {"version": 1, "type": "doc", "content": [{"type": "paragraph", "content": [{"type": "text", "text": "Implement OAUth"}]}]}, "customfield_10011": "Test 2", "customfield_10012": {"self": "https://airbyteio.atlassian.net/rest/api/3/customFieldOption/10016", "value": "To Do", "id": "10016"}, "customfield_10013": "ghx-label-2", "timetracking": {"originalEstimate": "2w 4d 5h", "remainingEstimate": "1w 1d", "timeSpent": "1d", "originalEstimateSeconds": 421200, "remainingEstimateSeconds": 172800, "timeSpentSeconds": 28800}, "attachment": [], "aggregatetimeestimate": 172800, "summary": "CLONE - Aggregate issues", "creator": {"self": "https://airbyteio.atlassian.net/rest/api/3/user?accountId=5fc9e78d2730d800760becc4", "accountId": "5fc9e78d2730d800760becc4", "emailAddress": "integration-test@airbyte.io", "avatarUrls": {"48x48": "https://secure.gravatar.com/avatar/0a7841feac7218131ce7b427283c24ef?d=https%3A%2F%2Favatar-management--avatars.us-west-2.prod.public.atl-paas.net%2Finitials%2FIT-5.png", "24x24": "https://secure.gravatar.com/avatar/0a7841feac7218131ce7b427283c24ef?d=https%3A%2F%2Favatar-management--avatars.us-west-2.prod.public.atl-paas.net%2Finitials%2FIT-5.png", "16x16": "https://secure.gravatar.com/avatar/0a7841feac7218131ce7b427283c24ef?d=https%3A%2F%2Favatar-management--avatars.us-west-2.prod.public.atl-paas.net%2Finitials%2FIT-5.png", "32x32": "https://secure.gravatar.com/avatar/0a7841feac7218131ce7b427283c24ef?d=https%3A%2F%2Favatar-management--avatars.us-west-2.prod.public.atl-paas.net%2Finitials%2FIT-5.png"}, "displayName": "integration test", "active": true, "timeZone": "America/Los_Angeles", "accountType": "atlassian"}, "subtasks": [], "reporter": {"self": "https://airbyteio.atlassian.net/rest/api/3/user?accountId=5fc9e78d2730d800760becc4", "accountId": "5fc9e78d2730d800760becc4", "emailAddress": "integration-test@airbyte.io", "avatarUrls": {"48x48": "https://secure.gravatar.com/avatar/0a7841feac7218131ce7b427283c24ef?d=https%3A%2F%2Favatar-management--avatars.us-west-2.prod.public.atl-paas.net%2Finitials%2FIT-5.png", "24x24": "https://secure.gravatar.com/avatar/0a7841feac7218131ce7b427283c24ef?d=https%3A%2F%2Favatar-management--avatars.us-west-2.prod.public.atl-paas.net%2Finitials%2FIT-5.png", "16x16": "https://secure.gravatar.com/avatar/0a7841feac7218131ce7b427283c24ef?d=https%3A%2F%2Favatar-management--avatars.us-west-2.prod.public.atl-paas.net%2Finitials%2FIT-5.png", "32x32": "https://secure.gravatar.com/avatar/0a7841feac7218131ce7b427283c24ef?d=https%3A%2F%2Favatar-management--avatars.us-west-2.prod.public.atl-paas.net%2Finitials%2FIT-5.png"}, "displayName": "integration test", "active": true, "timeZone": "America/Los_Angeles", "accountType": "atlassian"}, "aggregateprogress": {"progress": 28800, "total": 201600, "percent": 14}, "progress": {"progress": 28800, "total": 201600, "percent": 14}, "votes": {"self": "https://airbyteio.atlassian.net/rest/api/3/issue/IT-26/votes", "votes": 0, "hasVoted": false}, "comment": {"comments": [], "self": "https://airbyteio.atlassian.net/rest/api/3/issue/10626/comment", "maxResults": 0, "total": 0, "startAt": 0}, "worklog": {"startAt": 0, "maxResults": 20, "total": 1, "worklogs": [{"self": "https://airbyteio.atlassian.net/rest/api/3/issue/10626/worklog/11820", "author": {"self": "https://airbyteio.atlassian.net/rest/api/3/user?accountId=557058%3A295406f3-a1fc-4733-b906-dd15d021bd79", "accountId": "557058:295406f3-a1fc-4733-b906-dd15d021bd79", "avatarUrls": {"48x48": "https://secure.gravatar.com/avatar/182fc208a1a2e6cc41393ab6c9363d9c?d=https%3A%2F%2Favatar-management--avatars.us-west-2.prod.public.atl-paas.net%2Finitials%2FTT-6.png", "24x24": "https://secure.gravatar.com/avatar/182fc208a1a2e6cc41393ab6c9363d9c?d=https%3A%2F%2Favatar-management--avatars.us-west-2.prod.public.atl-paas.net%2Finitials%2FTT-6.png", "16x16": "https://secure.gravatar.com/avatar/182fc208a1a2e6cc41393ab6c9363d9c?d=https%3A%2F%2Favatar-management--avatars.us-west-2.prod.public.atl-paas.net%2Finitials%2FTT-6.png", "32x32": "https://secure.gravatar.com/avatar/182fc208a1a2e6cc41393ab6c9363d9c?d=https%3A%2F%2Favatar-management--avatars.us-west-2.prod.public.atl-paas.net%2Finitials%2FTT-6.png"}, "displayName": "Tempo Timesheets", "active": true, "timeZone": "America/Los_Angeles", "accountType": "app"}, "updateAuthor": {"self": "https://airbyteio.atlassian.net/rest/api/3/user?accountId=557058%3A295406f3-a1fc-4733-b906-dd15d021bd79", "accountId": "557058:295406f3-a1fc-4733-b906-dd15d021bd79", "avatarUrls": {"48x48": "https://secure.gravatar.com/avatar/182fc208a1a2e6cc41393ab6c9363d9c?d=https%3A%2F%2Favatar-management--avatars.us-west-2.prod.public.atl-paas.net%2Finitials%2FTT-6.png", "24x24": "https://secure.gravatar.com/avatar/182fc208a1a2e6cc41393ab6c9363d9c?d=https%3A%2F%2Favatar-management--avatars.us-west-2.prod.public.atl-paas.net%2Finitials%2FTT-6.png", "16x16": "https://secure.gravatar.com/avatar/182fc208a1a2e6cc41393ab6c9363d9c?d=https%3A%2F%2Favatar-management--avatars.us-west-2.prod.public.atl-paas.net%2Finitials%2FTT-6.png", "32x32": "https://secure.gravatar.com/avatar/182fc208a1a2e6cc41393ab6c9363d9c?d=https%3A%2F%2Favatar-management--avatars.us-west-2.prod.public.atl-paas.net%2Finitials%2FTT-6.png"}, "displayName": "Tempo Timesheets", "active": true, "timeZone": "America/Los_Angeles", "accountType": "app"}, "comment": {"version": 1, "type": "doc", "content": [{"type": "paragraph", "content": [{"type": "text", "text": "time-tracking"}]}]}, "created": "2023-04-05T05:08:50.033-0700", "updated": "2023-04-05T05:08:50.033-0700", "started": "2023-04-05T01:00:00.000-0700", "timeSpent": "1d", "timeSpentSeconds": 28800, "id": "11820", "issueId": "10626"}]}}, "projectId": "10000", "projectKey": "IT", "created": "2022-05-17T04:28:19.523000-07:00", "updated": "2023-10-12T13:43:15.025000-07:00"}, "emitted_at": 1706087956632} +{"stream": "issues", "data": {"expand": "customfield_10030.properties,operations,versionedRepresentations,editmeta,changelog,customfield_10029.properties,customfield_10010.requestTypePractice,transitions,renderedFields,customfield_10229.properties", "id": "10080", "self": "https://airbyteio.atlassian.net/rest/api/3/issue/10080", "key": "IT-24", "renderedFields": {"statuscategorychangedate": "11/Mar/21 6:17 AM", "timespent": "5 hours, 48 minutes", "aggregatetimespent": "5 hours, 48 minutes", "created": "11/Mar/21 6:17 AM", "customfield_10017": "", "timeestimate": "0 minutes", "updated": "05/Apr/23 4:58 AM", "description": "

Test description 74

", "timetracking": {"remainingEstimate": "0 minutes", "timeSpent": "5 hours, 48 minutes", "remainingEstimateSeconds": 0, "timeSpentSeconds": 20880}, "attachment": [{"self": "https://airbyteio.atlassian.net/rest/api/3/attachment/10123", "id": "10123", "filename": "demo.xlsx", "author": {"self": "https://airbyteio.atlassian.net/rest/api/3/user?accountId=5fc9e78d2730d800760becc4", "accountId": "5fc9e78d2730d800760becc4", "emailAddress": "integration-test@airbyte.io", "avatarUrls": {"48x48": "https://secure.gravatar.com/avatar/0a7841feac7218131ce7b427283c24ef?d=https%3A%2F%2Favatar-management--avatars.us-west-2.prod.public.atl-paas.net%2Finitials%2FIT-5.png", "24x24": "https://secure.gravatar.com/avatar/0a7841feac7218131ce7b427283c24ef?d=https%3A%2F%2Favatar-management--avatars.us-west-2.prod.public.atl-paas.net%2Finitials%2FIT-5.png", "16x16": "https://secure.gravatar.com/avatar/0a7841feac7218131ce7b427283c24ef?d=https%3A%2F%2Favatar-management--avatars.us-west-2.prod.public.atl-paas.net%2Finitials%2FIT-5.png", "32x32": "https://secure.gravatar.com/avatar/0a7841feac7218131ce7b427283c24ef?d=https%3A%2F%2Favatar-management--avatars.us-west-2.prod.public.atl-paas.net%2Finitials%2FIT-5.png"}, "displayName": "integration test", "active": true, "timeZone": "America/Los_Angeles", "accountType": "atlassian"}, "created": "14/Apr/21 2:11 PM", "size": "7 kB", "content": "https://airbyteio.atlassian.net/rest/api/3/attachment/content/10123"}], "aggregatetimeestimate": "0 minutes", "environment": "", "comment": {"comments": [], "self": "https://airbyteio.atlassian.net/rest/api/3/issue/10080/comment", "maxResults": 0, "total": 0, "startAt": 0}, "worklog": {"startAt": 0, "maxResults": 20, "total": 3, "worklogs": [{"self": "https://airbyteio.atlassian.net/rest/api/3/issue/10080/worklog/11708", "author": {"self": "https://airbyteio.atlassian.net/rest/api/3/user?accountId=5fc9e78d2730d800760becc4", "accountId": "5fc9e78d2730d800760becc4", "emailAddress": "integration-test@airbyte.io", "avatarUrls": {"48x48": "https://secure.gravatar.com/avatar/0a7841feac7218131ce7b427283c24ef?d=https%3A%2F%2Favatar-management--avatars.us-west-2.prod.public.atl-paas.net%2Finitials%2FIT-5.png", "24x24": "https://secure.gravatar.com/avatar/0a7841feac7218131ce7b427283c24ef?d=https%3A%2F%2Favatar-management--avatars.us-west-2.prod.public.atl-paas.net%2Finitials%2FIT-5.png", "16x16": "https://secure.gravatar.com/avatar/0a7841feac7218131ce7b427283c24ef?d=https%3A%2F%2Favatar-management--avatars.us-west-2.prod.public.atl-paas.net%2Finitials%2FIT-5.png", "32x32": "https://secure.gravatar.com/avatar/0a7841feac7218131ce7b427283c24ef?d=https%3A%2F%2Favatar-management--avatars.us-west-2.prod.public.atl-paas.net%2Finitials%2FIT-5.png"}, "displayName": "integration test", "active": true, "timeZone": "America/Los_Angeles", "accountType": "atlassian"}, "updateAuthor": {"self": "https://airbyteio.atlassian.net/rest/api/3/user?accountId=5fc9e78d2730d800760becc4", "accountId": "5fc9e78d2730d800760becc4", "emailAddress": "integration-test@airbyte.io", "avatarUrls": {"48x48": "https://secure.gravatar.com/avatar/0a7841feac7218131ce7b427283c24ef?d=https%3A%2F%2Favatar-management--avatars.us-west-2.prod.public.atl-paas.net%2Finitials%2FIT-5.png", "24x24": "https://secure.gravatar.com/avatar/0a7841feac7218131ce7b427283c24ef?d=https%3A%2F%2Favatar-management--avatars.us-west-2.prod.public.atl-paas.net%2Finitials%2FIT-5.png", "16x16": "https://secure.gravatar.com/avatar/0a7841feac7218131ce7b427283c24ef?d=https%3A%2F%2Favatar-management--avatars.us-west-2.prod.public.atl-paas.net%2Finitials%2FIT-5.png", "32x32": "https://secure.gravatar.com/avatar/0a7841feac7218131ce7b427283c24ef?d=https%3A%2F%2Favatar-management--avatars.us-west-2.prod.public.atl-paas.net%2Finitials%2FIT-5.png"}, "displayName": "integration test", "active": true, "timeZone": "America/Los_Angeles", "accountType": "atlassian"}, "created": "15/Apr/21 11:39 AM", "updated": "15/Apr/21 11:39 AM", "started": "14/Apr/21 6:48 PM", "timeSpent": "2 hours, 21 minutes", "id": "11708", "issueId": "10080"}, {"self": "https://airbyteio.atlassian.net/rest/api/3/issue/10080/worklog/11709", "author": {"self": "https://airbyteio.atlassian.net/rest/api/3/user?accountId=5fc9e78d2730d800760becc4", "accountId": "5fc9e78d2730d800760becc4", "emailAddress": "integration-test@airbyte.io", "avatarUrls": {"48x48": "https://secure.gravatar.com/avatar/0a7841feac7218131ce7b427283c24ef?d=https%3A%2F%2Favatar-management--avatars.us-west-2.prod.public.atl-paas.net%2Finitials%2FIT-5.png", "24x24": "https://secure.gravatar.com/avatar/0a7841feac7218131ce7b427283c24ef?d=https%3A%2F%2Favatar-management--avatars.us-west-2.prod.public.atl-paas.net%2Finitials%2FIT-5.png", "16x16": "https://secure.gravatar.com/avatar/0a7841feac7218131ce7b427283c24ef?d=https%3A%2F%2Favatar-management--avatars.us-west-2.prod.public.atl-paas.net%2Finitials%2FIT-5.png", "32x32": "https://secure.gravatar.com/avatar/0a7841feac7218131ce7b427283c24ef?d=https%3A%2F%2Favatar-management--avatars.us-west-2.prod.public.atl-paas.net%2Finitials%2FIT-5.png"}, "displayName": "integration test", "active": true, "timeZone": "America/Los_Angeles", "accountType": "atlassian"}, "updateAuthor": {"self": "https://airbyteio.atlassian.net/rest/api/3/user?accountId=5fc9e78d2730d800760becc4", "accountId": "5fc9e78d2730d800760becc4", "emailAddress": "integration-test@airbyte.io", "avatarUrls": {"48x48": "https://secure.gravatar.com/avatar/0a7841feac7218131ce7b427283c24ef?d=https%3A%2F%2Favatar-management--avatars.us-west-2.prod.public.atl-paas.net%2Finitials%2FIT-5.png", "24x24": "https://secure.gravatar.com/avatar/0a7841feac7218131ce7b427283c24ef?d=https%3A%2F%2Favatar-management--avatars.us-west-2.prod.public.atl-paas.net%2Finitials%2FIT-5.png", "16x16": "https://secure.gravatar.com/avatar/0a7841feac7218131ce7b427283c24ef?d=https%3A%2F%2Favatar-management--avatars.us-west-2.prod.public.atl-paas.net%2Finitials%2FIT-5.png", "32x32": "https://secure.gravatar.com/avatar/0a7841feac7218131ce7b427283c24ef?d=https%3A%2F%2Favatar-management--avatars.us-west-2.prod.public.atl-paas.net%2Finitials%2FIT-5.png"}, "displayName": "integration test", "active": true, "timeZone": "America/Los_Angeles", "accountType": "atlassian"}, "created": "15/Apr/21 11:39 AM", "updated": "15/Apr/21 11:39 AM", "started": "14/Apr/21 6:48 PM", "timeSpent": "37 minutes", "id": "11709", "issueId": "10080"}, {"self": "https://airbyteio.atlassian.net/rest/api/3/issue/10080/worklog/11710", "author": {"self": "https://airbyteio.atlassian.net/rest/api/3/user?accountId=5fc9e78d2730d800760becc4", "accountId": "5fc9e78d2730d800760becc4", "emailAddress": "integration-test@airbyte.io", "avatarUrls": {"48x48": "https://secure.gravatar.com/avatar/0a7841feac7218131ce7b427283c24ef?d=https%3A%2F%2Favatar-management--avatars.us-west-2.prod.public.atl-paas.net%2Finitials%2FIT-5.png", "24x24": "https://secure.gravatar.com/avatar/0a7841feac7218131ce7b427283c24ef?d=https%3A%2F%2Favatar-management--avatars.us-west-2.prod.public.atl-paas.net%2Finitials%2FIT-5.png", "16x16": "https://secure.gravatar.com/avatar/0a7841feac7218131ce7b427283c24ef?d=https%3A%2F%2Favatar-management--avatars.us-west-2.prod.public.atl-paas.net%2Finitials%2FIT-5.png", "32x32": "https://secure.gravatar.com/avatar/0a7841feac7218131ce7b427283c24ef?d=https%3A%2F%2Favatar-management--avatars.us-west-2.prod.public.atl-paas.net%2Finitials%2FIT-5.png"}, "displayName": "integration test", "active": true, "timeZone": "America/Los_Angeles", "accountType": "atlassian"}, "updateAuthor": {"self": "https://airbyteio.atlassian.net/rest/api/3/user?accountId=5fc9e78d2730d800760becc4", "accountId": "5fc9e78d2730d800760becc4", "emailAddress": "integration-test@airbyte.io", "avatarUrls": {"48x48": "https://secure.gravatar.com/avatar/0a7841feac7218131ce7b427283c24ef?d=https%3A%2F%2Favatar-management--avatars.us-west-2.prod.public.atl-paas.net%2Finitials%2FIT-5.png", "24x24": "https://secure.gravatar.com/avatar/0a7841feac7218131ce7b427283c24ef?d=https%3A%2F%2Favatar-management--avatars.us-west-2.prod.public.atl-paas.net%2Finitials%2FIT-5.png", "16x16": "https://secure.gravatar.com/avatar/0a7841feac7218131ce7b427283c24ef?d=https%3A%2F%2Favatar-management--avatars.us-west-2.prod.public.atl-paas.net%2Finitials%2FIT-5.png", "32x32": "https://secure.gravatar.com/avatar/0a7841feac7218131ce7b427283c24ef?d=https%3A%2F%2Favatar-management--avatars.us-west-2.prod.public.atl-paas.net%2Finitials%2FIT-5.png"}, "displayName": "integration test", "active": true, "timeZone": "America/Los_Angeles", "accountType": "atlassian"}, "created": "15/Apr/21 11:39 AM", "updated": "15/Apr/21 11:39 AM", "started": "14/Apr/21 6:48 PM", "timeSpent": "2 hours, 50 minutes", "id": "11710", "issueId": "10080"}]}}, "transitions": [{"id": "11", "name": "To Do", "to": {"self": "https://airbyteio.atlassian.net/rest/api/3/status/10000", "description": "", "iconUrl": "https://airbyteio.atlassian.net/", "name": "To Do", "id": "10000", "statusCategory": {"self": "https://airbyteio.atlassian.net/rest/api/3/statuscategory/2", "id": 2, "key": "new", "colorName": "blue-gray", "name": "To Do"}}, "hasScreen": false, "isGlobal": true, "isInitial": false, "isAvailable": true, "isConditional": false, "isLooped": false}, {"id": "21", "name": "In Progress", "to": {"self": "https://airbyteio.atlassian.net/rest/api/3/status/3", "description": "This issue is being actively worked on at the moment by the assignee.", "iconUrl": "https://airbyteio.atlassian.net/images/icons/statuses/inprogress.png", "name": "In Progress", "id": "3", "statusCategory": {"self": "https://airbyteio.atlassian.net/rest/api/3/statuscategory/4", "id": 4, "key": "indeterminate", "colorName": "yellow", "name": "In Progress"}}, "hasScreen": false, "isGlobal": true, "isInitial": false, "isAvailable": true, "isConditional": false, "isLooped": false}, {"id": "31", "name": "Done", "to": {"self": "https://airbyteio.atlassian.net/rest/api/3/status/10001", "description": "", "iconUrl": "https://airbyteio.atlassian.net/", "name": "Done", "id": "10001", "statusCategory": {"self": "https://airbyteio.atlassian.net/rest/api/3/statuscategory/3", "id": 3, "key": "done", "colorName": "green", "name": "Done"}}, "hasScreen": false, "isGlobal": true, "isInitial": false, "isAvailable": true, "isConditional": false, "isLooped": false}, {"id": "41", "name": "Approved", "to": {"self": "https://airbyteio.atlassian.net/rest/api/3/status/10005", "description": "This was auto-generated by Jira Service Management during workflow import", "iconUrl": "https://airbyteio.atlassian.net/images/icons/status_generic.gif", "name": "Approved", "id": "10005", "statusCategory": {"self": "https://airbyteio.atlassian.net/rest/api/3/statuscategory/3", "id": 3, "key": "done", "colorName": "green", "name": "Done"}}, "hasScreen": false, "isGlobal": true, "isInitial": false, "isAvailable": true, "isConditional": false, "isLooped": false}, {"id": "51", "name": "In review", "to": {"self": "https://airbyteio.atlassian.net/rest/api/3/status/10004", "description": "This was auto-generated by Jira Service Management during workflow import", "iconUrl": "https://airbyteio.atlassian.net/images/icons/status_generic.gif", "name": "In review", "id": "10004", "statusCategory": {"self": "https://airbyteio.atlassian.net/rest/api/3/statuscategory/4", "id": 4, "key": "indeterminate", "colorName": "yellow", "name": "In Progress"}}, "hasScreen": false, "isGlobal": true, "isInitial": false, "isAvailable": true, "isConditional": false, "isLooped": false}, {"id": "61", "name": "Reopened", "to": {"self": "https://airbyteio.atlassian.net/rest/api/3/status/4", "description": "This issue was once resolved, but the resolution was deemed incorrect. From here issues are either marked assigned or resolved.", "iconUrl": "https://airbyteio.atlassian.net/images/icons/statuses/reopened.png", "name": "Reopened", "id": "4", "statusCategory": {"self": "https://airbyteio.atlassian.net/rest/api/3/statuscategory/2", "id": 2, "key": "new", "colorName": "blue-gray", "name": "To Do"}}, "hasScreen": false, "isGlobal": true, "isInitial": false, "isAvailable": true, "isConditional": false, "isLooped": false}, {"id": "71", "name": "Declined", "to": {"self": "https://airbyteio.atlassian.net/rest/api/3/status/10002", "description": "This was auto-generated by Jira Service Management during workflow import", "iconUrl": "https://airbyteio.atlassian.net/images/icons/statuses/generic.png", "name": "Declined", "id": "10002", "statusCategory": {"self": "https://airbyteio.atlassian.net/rest/api/3/statuscategory/3", "id": 3, "key": "done", "colorName": "green", "name": "Done"}}, "hasScreen": false, "isGlobal": true, "isInitial": false, "isAvailable": true, "isConditional": false, "isLooped": false}, {"id": "81", "name": "Open", "to": {"self": "https://airbyteio.atlassian.net/rest/api/3/status/1", "description": "The issue is open and ready for the assignee to start work on it.", "iconUrl": "https://airbyteio.atlassian.net/images/icons/statuses/open.png", "name": "Open", "id": "1", "statusCategory": {"self": "https://airbyteio.atlassian.net/rest/api/3/statuscategory/2", "id": 2, "key": "new", "colorName": "blue-gray", "name": "To Do"}}, "hasScreen": false, "isGlobal": true, "isInitial": false, "isAvailable": true, "isConditional": false, "isLooped": false}, {"id": "91", "name": "Pending", "to": {"self": "https://airbyteio.atlassian.net/rest/api/3/status/10003", "description": "This was auto-generated by Jira Service Management during workflow import", "iconUrl": "https://airbyteio.atlassian.net/images/icons/status_generic.gif", "name": "Pending", "id": "10003", "statusCategory": {"self": "https://airbyteio.atlassian.net/rest/api/3/statuscategory/4", "id": 4, "key": "indeterminate", "colorName": "yellow", "name": "In Progress"}}, "hasScreen": false, "isGlobal": true, "isInitial": false, "isAvailable": true, "isConditional": false, "isLooped": false}, {"id": "101", "name": "Closed", "to": {"self": "https://airbyteio.atlassian.net/rest/api/3/status/6", "description": "The issue is considered finished, the resolution is correct. Issues which are closed can be reopened.", "iconUrl": "https://airbyteio.atlassian.net/images/icons/statuses/closed.png", "name": "Closed", "id": "6", "statusCategory": {"self": "https://airbyteio.atlassian.net/rest/api/3/statuscategory/3", "id": 3, "key": "done", "colorName": "green", "name": "Done"}}, "hasScreen": false, "isGlobal": true, "isInitial": false, "isAvailable": true, "isConditional": false, "isLooped": false}], "changelog": {"startAt": 0, "maxResults": 8, "total": 8, "histories": [{"id": "15179", "author": {"self": "https://airbyteio.atlassian.net/rest/api/3/user?accountId=5fc9e78d2730d800760becc4", "accountId": "5fc9e78d2730d800760becc4", "emailAddress": "integration-test@airbyte.io", "avatarUrls": {"48x48": "https://secure.gravatar.com/avatar/0a7841feac7218131ce7b427283c24ef?d=https%3A%2F%2Favatar-management--avatars.us-west-2.prod.public.atl-paas.net%2Finitials%2FIT-5.png", "24x24": "https://secure.gravatar.com/avatar/0a7841feac7218131ce7b427283c24ef?d=https%3A%2F%2Favatar-management--avatars.us-west-2.prod.public.atl-paas.net%2Finitials%2FIT-5.png", "16x16": "https://secure.gravatar.com/avatar/0a7841feac7218131ce7b427283c24ef?d=https%3A%2F%2Favatar-management--avatars.us-west-2.prod.public.atl-paas.net%2Finitials%2FIT-5.png", "32x32": "https://secure.gravatar.com/avatar/0a7841feac7218131ce7b427283c24ef?d=https%3A%2F%2Favatar-management--avatars.us-west-2.prod.public.atl-paas.net%2Finitials%2FIT-5.png"}, "displayName": "integration test", "active": true, "timeZone": "America/Los_Angeles", "accountType": "atlassian"}, "created": "2023-04-05T04:58:35.333-0700", "items": [{"field": "Sprint", "fieldtype": "custom", "fieldId": "customfield_10020", "from": "", "fromString": "", "to": "10", "toString": "IT Sprint 9"}]}, {"id": "14989", "author": {"self": "https://airbyteio.atlassian.net/rest/api/3/user?accountId=5fc9e78d2730d800760becc4", "accountId": "5fc9e78d2730d800760becc4", "emailAddress": "integration-test@airbyte.io", "avatarUrls": {"48x48": "https://secure.gravatar.com/avatar/0a7841feac7218131ce7b427283c24ef?d=https%3A%2F%2Favatar-management--avatars.us-west-2.prod.public.atl-paas.net%2Finitials%2FIT-5.png", "24x24": "https://secure.gravatar.com/avatar/0a7841feac7218131ce7b427283c24ef?d=https%3A%2F%2Favatar-management--avatars.us-west-2.prod.public.atl-paas.net%2Finitials%2FIT-5.png", "16x16": "https://secure.gravatar.com/avatar/0a7841feac7218131ce7b427283c24ef?d=https%3A%2F%2Favatar-management--avatars.us-west-2.prod.public.atl-paas.net%2Finitials%2FIT-5.png", "32x32": "https://secure.gravatar.com/avatar/0a7841feac7218131ce7b427283c24ef?d=https%3A%2F%2Favatar-management--avatars.us-west-2.prod.public.atl-paas.net%2Finitials%2FIT-5.png"}, "displayName": "integration test", "active": true, "timeZone": "America/Los_Angeles", "accountType": "atlassian"}, "created": "2021-04-15T11:39:47.917-0700", "items": [{"field": "timeestimate", "fieldtype": "jira", "fieldId": "timeestimate", "from": "0", "fromString": "0", "to": "0", "toString": "0"}, {"field": "timespent", "fieldtype": "jira", "fieldId": "timespent", "from": "10680", "fromString": "10680", "to": "20880", "toString": "20880"}, {"field": "WorklogId", "fieldtype": "jira", "from": null, "fromString": null, "to": "11710", "toString": "11710"}]}, {"id": "14988", "author": {"self": "https://airbyteio.atlassian.net/rest/api/3/user?accountId=5fc9e78d2730d800760becc4", "accountId": "5fc9e78d2730d800760becc4", "emailAddress": "integration-test@airbyte.io", "avatarUrls": {"48x48": "https://secure.gravatar.com/avatar/0a7841feac7218131ce7b427283c24ef?d=https%3A%2F%2Favatar-management--avatars.us-west-2.prod.public.atl-paas.net%2Finitials%2FIT-5.png", "24x24": "https://secure.gravatar.com/avatar/0a7841feac7218131ce7b427283c24ef?d=https%3A%2F%2Favatar-management--avatars.us-west-2.prod.public.atl-paas.net%2Finitials%2FIT-5.png", "16x16": "https://secure.gravatar.com/avatar/0a7841feac7218131ce7b427283c24ef?d=https%3A%2F%2Favatar-management--avatars.us-west-2.prod.public.atl-paas.net%2Finitials%2FIT-5.png", "32x32": "https://secure.gravatar.com/avatar/0a7841feac7218131ce7b427283c24ef?d=https%3A%2F%2Favatar-management--avatars.us-west-2.prod.public.atl-paas.net%2Finitials%2FIT-5.png"}, "displayName": "integration test", "active": true, "timeZone": "America/Los_Angeles", "accountType": "atlassian"}, "created": "2021-04-15T11:39:47.314-0700", "items": [{"field": "timeestimate", "fieldtype": "jira", "fieldId": "timeestimate", "from": "0", "fromString": "0", "to": "0", "toString": "0"}, {"field": "timespent", "fieldtype": "jira", "fieldId": "timespent", "from": "8460", "fromString": "8460", "to": "10680", "toString": "10680"}, {"field": "WorklogId", "fieldtype": "jira", "from": null, "fromString": null, "to": "11709", "toString": "11709"}]}, {"id": "14987", "author": {"self": "https://airbyteio.atlassian.net/rest/api/3/user?accountId=5fc9e78d2730d800760becc4", "accountId": "5fc9e78d2730d800760becc4", "emailAddress": "integration-test@airbyte.io", "avatarUrls": {"48x48": "https://secure.gravatar.com/avatar/0a7841feac7218131ce7b427283c24ef?d=https%3A%2F%2Favatar-management--avatars.us-west-2.prod.public.atl-paas.net%2Finitials%2FIT-5.png", "24x24": "https://secure.gravatar.com/avatar/0a7841feac7218131ce7b427283c24ef?d=https%3A%2F%2Favatar-management--avatars.us-west-2.prod.public.atl-paas.net%2Finitials%2FIT-5.png", "16x16": "https://secure.gravatar.com/avatar/0a7841feac7218131ce7b427283c24ef?d=https%3A%2F%2Favatar-management--avatars.us-west-2.prod.public.atl-paas.net%2Finitials%2FIT-5.png", "32x32": "https://secure.gravatar.com/avatar/0a7841feac7218131ce7b427283c24ef?d=https%3A%2F%2Favatar-management--avatars.us-west-2.prod.public.atl-paas.net%2Finitials%2FIT-5.png"}, "displayName": "integration test", "active": true, "timeZone": "America/Los_Angeles", "accountType": "atlassian"}, "created": "2021-04-15T11:39:46.691-0700", "items": [{"field": "timeestimate", "fieldtype": "jira", "fieldId": "timeestimate", "from": null, "fromString": null, "to": "0", "toString": "0"}, {"field": "timespent", "fieldtype": "jira", "fieldId": "timespent", "from": null, "fromString": null, "to": "8460", "toString": "8460"}, {"field": "WorklogId", "fieldtype": "jira", "from": null, "fromString": null, "to": "11708", "toString": "11708"}]}, {"id": "14800", "author": {"self": "https://airbyteio.atlassian.net/rest/api/3/user?accountId=5fc9e78d2730d800760becc4", "accountId": "5fc9e78d2730d800760becc4", "emailAddress": "integration-test@airbyte.io", "avatarUrls": {"48x48": "https://secure.gravatar.com/avatar/0a7841feac7218131ce7b427283c24ef?d=https%3A%2F%2Favatar-management--avatars.us-west-2.prod.public.atl-paas.net%2Finitials%2FIT-5.png", "24x24": "https://secure.gravatar.com/avatar/0a7841feac7218131ce7b427283c24ef?d=https%3A%2F%2Favatar-management--avatars.us-west-2.prod.public.atl-paas.net%2Finitials%2FIT-5.png", "16x16": "https://secure.gravatar.com/avatar/0a7841feac7218131ce7b427283c24ef?d=https%3A%2F%2Favatar-management--avatars.us-west-2.prod.public.atl-paas.net%2Finitials%2FIT-5.png", "32x32": "https://secure.gravatar.com/avatar/0a7841feac7218131ce7b427283c24ef?d=https%3A%2F%2Favatar-management--avatars.us-west-2.prod.public.atl-paas.net%2Finitials%2FIT-5.png"}, "displayName": "integration test", "active": true, "timeZone": "America/Los_Angeles", "accountType": "atlassian"}, "created": "2021-04-15T07:18:07.884-0700", "items": [{"field": "RemoteIssueLink", "fieldtype": "jira", "from": null, "fromString": null, "to": "10046", "toString": "This issue links to \"TSTSUP-111 (My Acme Tracker)\""}]}, {"id": "14718", "author": {"self": "https://airbyteio.atlassian.net/rest/api/3/user?accountId=5fc9e78d2730d800760becc4", "accountId": "5fc9e78d2730d800760becc4", "emailAddress": "integration-test@airbyte.io", "avatarUrls": {"48x48": "https://secure.gravatar.com/avatar/0a7841feac7218131ce7b427283c24ef?d=https%3A%2F%2Favatar-management--avatars.us-west-2.prod.public.atl-paas.net%2Finitials%2FIT-5.png", "24x24": "https://secure.gravatar.com/avatar/0a7841feac7218131ce7b427283c24ef?d=https%3A%2F%2Favatar-management--avatars.us-west-2.prod.public.atl-paas.net%2Finitials%2FIT-5.png", "16x16": "https://secure.gravatar.com/avatar/0a7841feac7218131ce7b427283c24ef?d=https%3A%2F%2Favatar-management--avatars.us-west-2.prod.public.atl-paas.net%2Finitials%2FIT-5.png", "32x32": "https://secure.gravatar.com/avatar/0a7841feac7218131ce7b427283c24ef?d=https%3A%2F%2Favatar-management--avatars.us-west-2.prod.public.atl-paas.net%2Finitials%2FIT-5.png"}, "displayName": "integration test", "active": true, "timeZone": "America/Los_Angeles", "accountType": "atlassian"}, "created": "2021-04-15T00:08:54.455-0700", "items": [{"field": "Link", "fieldtype": "jira", "from": null, "fromString": null, "to": "IT-22", "toString": "This issue is duplicated by IT-22"}]}, {"id": "14716", "author": {"self": "https://airbyteio.atlassian.net/rest/api/3/user?accountId=5fc9e78d2730d800760becc4", "accountId": "5fc9e78d2730d800760becc4", "emailAddress": "integration-test@airbyte.io", "avatarUrls": {"48x48": "https://secure.gravatar.com/avatar/0a7841feac7218131ce7b427283c24ef?d=https%3A%2F%2Favatar-management--avatars.us-west-2.prod.public.atl-paas.net%2Finitials%2FIT-5.png", "24x24": "https://secure.gravatar.com/avatar/0a7841feac7218131ce7b427283c24ef?d=https%3A%2F%2Favatar-management--avatars.us-west-2.prod.public.atl-paas.net%2Finitials%2FIT-5.png", "16x16": "https://secure.gravatar.com/avatar/0a7841feac7218131ce7b427283c24ef?d=https%3A%2F%2Favatar-management--avatars.us-west-2.prod.public.atl-paas.net%2Finitials%2FIT-5.png", "32x32": "https://secure.gravatar.com/avatar/0a7841feac7218131ce7b427283c24ef?d=https%3A%2F%2Favatar-management--avatars.us-west-2.prod.public.atl-paas.net%2Finitials%2FIT-5.png"}, "displayName": "integration test", "active": true, "timeZone": "America/Los_Angeles", "accountType": "atlassian"}, "created": "2021-04-15T00:08:48.880-0700", "items": [{"field": "Link", "fieldtype": "jira", "from": null, "fromString": null, "to": "IT-23", "toString": "This issue is duplicated by IT-23"}]}, {"id": "14596", "author": {"self": "https://airbyteio.atlassian.net/rest/api/3/user?accountId=5fc9e78d2730d800760becc4", "accountId": "5fc9e78d2730d800760becc4", "emailAddress": "integration-test@airbyte.io", "avatarUrls": {"48x48": "https://secure.gravatar.com/avatar/0a7841feac7218131ce7b427283c24ef?d=https%3A%2F%2Favatar-management--avatars.us-west-2.prod.public.atl-paas.net%2Finitials%2FIT-5.png", "24x24": "https://secure.gravatar.com/avatar/0a7841feac7218131ce7b427283c24ef?d=https%3A%2F%2Favatar-management--avatars.us-west-2.prod.public.atl-paas.net%2Finitials%2FIT-5.png", "16x16": "https://secure.gravatar.com/avatar/0a7841feac7218131ce7b427283c24ef?d=https%3A%2F%2Favatar-management--avatars.us-west-2.prod.public.atl-paas.net%2Finitials%2FIT-5.png", "32x32": "https://secure.gravatar.com/avatar/0a7841feac7218131ce7b427283c24ef?d=https%3A%2F%2Favatar-management--avatars.us-west-2.prod.public.atl-paas.net%2Finitials%2FIT-5.png"}, "displayName": "integration test", "active": true, "timeZone": "America/Los_Angeles", "accountType": "atlassian"}, "created": "2021-04-14T14:11:01.899-0700", "items": [{"field": "Attachment", "fieldtype": "jira", "fieldId": "attachment", "from": null, "fromString": null, "to": "10123", "toString": "demo.xlsx"}]}]}, "fields": {"statuscategorychangedate": "2021-03-11T06:17:33.483000-08:00", "issuetype": {"self": "https://airbyteio.atlassian.net/rest/api/3/issuetype/10004", "id": "10004", "description": "A problem or error.", "iconUrl": "https://airbyteio.atlassian.net/rest/api/2/universal_avatar/view/type/issuetype/avatar/10303?size=medium", "name": "Bug", "subtask": false, "avatarId": 10303, "hierarchyLevel": 0}, "timespent": 20880, "project": {"self": "https://airbyteio.atlassian.net/rest/api/3/project/10000", "id": "10000", "key": "IT", "name": "integration-tests", "projectTypeKey": "software", "simplified": false, "avatarUrls": {"48x48": "https://airbyteio.atlassian.net/rest/api/3/universal_avatar/view/type/project/avatar/10424", "24x24": "https://airbyteio.atlassian.net/rest/api/3/universal_avatar/view/type/project/avatar/10424?size=small", "16x16": "https://airbyteio.atlassian.net/rest/api/3/universal_avatar/view/type/project/avatar/10424?size=xsmall", "32x32": "https://airbyteio.atlassian.net/rest/api/3/universal_avatar/view/type/project/avatar/10424?size=medium"}, "projectCategory": {"self": "https://airbyteio.atlassian.net/rest/api/3/projectCategory/10004", "id": "10004", "description": "Test Project Category 2", "name": "Test category 2"}}, "fixVersions": [], "aggregatetimespent": 20880, "workratio": -1, "issuerestriction": {"issuerestrictions": {}, "shouldDisplay": false}, "watches": {"self": "https://airbyteio.atlassian.net/rest/api/3/issue/IT-24/watchers", "watchCount": 1, "isWatching": true}, "created": "2021-03-11T06:17:33.169000-08:00", "customfield_10020": [{"id": 10, "name": "IT Sprint 9", "state": "future", "boardId": 1, "startDate": "2022-09-06T11:25:59.072Z", "endDate": "2022-09-20T11:25:00.000Z"}], "priority": {"self": "https://airbyteio.atlassian.net/rest/api/3/priority/3", "iconUrl": "https://airbyteio.atlassian.net/images/icons/priorities/medium.svg", "name": "Medium", "id": "3"}, "labels": [], "customfield_10018": {"hasEpicLinkFieldDependency": false, "showField": false, "nonEditableReason": {"reason": "PLUGIN_LICENSE_ERROR", "message": "The Parent Link is only available to Jira Premium users."}}, "customfield_10217": [], "customfield_10019": "0|i000hr:", "timeestimate": 0, "versions": [], "issuelinks": [{"id": "10244", "self": "https://airbyteio.atlassian.net/rest/api/3/issueLink/10244", "type": {"id": "10002", "name": "Duplicate", "inward": "is duplicated by", "outward": "duplicates", "self": "https://airbyteio.atlassian.net/rest/api/3/issueLinkType/10002"}, "inwardIssue": {"id": "10069", "key": "IT-22", "self": "https://airbyteio.atlassian.net/rest/api/3/issue/10069", "fields": {"summary": "Test 63", "status": {"self": "https://airbyteio.atlassian.net/rest/api/3/status/10000", "description": "", "iconUrl": "https://airbyteio.atlassian.net/", "name": "To Do", "id": "10000", "statusCategory": {"self": "https://airbyteio.atlassian.net/rest/api/3/statuscategory/2", "id": 2, "key": "new", "colorName": "blue-gray", "name": "To Do"}}, "priority": {"self": "https://airbyteio.atlassian.net/rest/api/3/priority/3", "iconUrl": "https://airbyteio.atlassian.net/images/icons/priorities/medium.svg", "name": "Medium", "id": "3"}, "issuetype": {"self": "https://airbyteio.atlassian.net/rest/api/3/issuetype/10000", "id": "10000", "description": "A big user story that needs to be broken down. Created by Jira Software - do not edit or delete.", "iconUrl": "https://airbyteio.atlassian.net/images/icons/issuetypes/epic.svg", "name": "Epic", "subtask": false, "hierarchyLevel": 1}}}}, {"id": "10243", "self": "https://airbyteio.atlassian.net/rest/api/3/issueLink/10243", "type": {"id": "10002", "name": "Duplicate", "inward": "is duplicated by", "outward": "duplicates", "self": "https://airbyteio.atlassian.net/rest/api/3/issueLinkType/10002"}, "inwardIssue": {"id": "10075", "key": "IT-23", "self": "https://airbyteio.atlassian.net/rest/api/3/issue/10075", "fields": {"summary": "Test 69", "status": {"self": "https://airbyteio.atlassian.net/rest/api/3/status/10000", "description": "", "iconUrl": "https://airbyteio.atlassian.net/", "name": "To Do", "id": "10000", "statusCategory": {"self": "https://airbyteio.atlassian.net/rest/api/3/statuscategory/2", "id": 2, "key": "new", "colorName": "blue-gray", "name": "To Do"}}, "priority": {"self": "https://airbyteio.atlassian.net/rest/api/3/priority/3", "iconUrl": "https://airbyteio.atlassian.net/images/icons/priorities/medium.svg", "name": "Medium", "id": "3"}, "issuetype": {"self": "https://airbyteio.atlassian.net/rest/api/3/issuetype/10004", "id": "10004", "description": "A problem or error.", "iconUrl": "https://airbyteio.atlassian.net/rest/api/2/universal_avatar/view/type/issuetype/avatar/10303?size=medium", "name": "Bug", "subtask": false, "avatarId": 10303, "hierarchyLevel": 0}}}}], "updated": "2023-04-05T04:58:35.329000-07:00", "status": {"self": "https://airbyteio.atlassian.net/rest/api/3/status/10000", "description": "", "iconUrl": "https://airbyteio.atlassian.net/", "name": "To Do", "id": "10000", "statusCategory": {"self": "https://airbyteio.atlassian.net/rest/api/3/statuscategory/2", "id": 2, "key": "new", "colorName": "blue-gray", "name": "To Do"}}, "components": [], "description": {"type": "doc", "version": 1, "content": [{"type": "paragraph", "content": [{"type": "text", "text": "Test description 74"}]}]}, "timetracking": {"remainingEstimate": "0m", "timeSpent": "5h 48m", "remainingEstimateSeconds": 0, "timeSpentSeconds": 20880}, "attachment": [{"self": "https://airbyteio.atlassian.net/rest/api/3/attachment/10123", "id": "10123", "filename": "demo.xlsx", "author": {"self": "https://airbyteio.atlassian.net/rest/api/3/user?accountId=5fc9e78d2730d800760becc4", "accountId": "5fc9e78d2730d800760becc4", "emailAddress": "integration-test@airbyte.io", "avatarUrls": {"48x48": "https://secure.gravatar.com/avatar/0a7841feac7218131ce7b427283c24ef?d=https%3A%2F%2Favatar-management--avatars.us-west-2.prod.public.atl-paas.net%2Finitials%2FIT-5.png", "24x24": "https://secure.gravatar.com/avatar/0a7841feac7218131ce7b427283c24ef?d=https%3A%2F%2Favatar-management--avatars.us-west-2.prod.public.atl-paas.net%2Finitials%2FIT-5.png", "16x16": "https://secure.gravatar.com/avatar/0a7841feac7218131ce7b427283c24ef?d=https%3A%2F%2Favatar-management--avatars.us-west-2.prod.public.atl-paas.net%2Finitials%2FIT-5.png", "32x32": "https://secure.gravatar.com/avatar/0a7841feac7218131ce7b427283c24ef?d=https%3A%2F%2Favatar-management--avatars.us-west-2.prod.public.atl-paas.net%2Finitials%2FIT-5.png"}, "displayName": "integration test", "active": true, "timeZone": "America/Los_Angeles", "accountType": "atlassian"}, "created": "2021-04-14T14:11:01.652-0700", "size": 7360, "content": "https://airbyteio.atlassian.net/rest/api/3/attachment/content/10123"}], "aggregatetimeestimate": 0, "summary": "Test 74", "creator": {"self": "https://airbyteio.atlassian.net/rest/api/3/user?accountId=5fc9e78d2730d800760becc4", "accountId": "5fc9e78d2730d800760becc4", "emailAddress": "integration-test@airbyte.io", "avatarUrls": {"48x48": "https://secure.gravatar.com/avatar/0a7841feac7218131ce7b427283c24ef?d=https%3A%2F%2Favatar-management--avatars.us-west-2.prod.public.atl-paas.net%2Finitials%2FIT-5.png", "24x24": "https://secure.gravatar.com/avatar/0a7841feac7218131ce7b427283c24ef?d=https%3A%2F%2Favatar-management--avatars.us-west-2.prod.public.atl-paas.net%2Finitials%2FIT-5.png", "16x16": "https://secure.gravatar.com/avatar/0a7841feac7218131ce7b427283c24ef?d=https%3A%2F%2Favatar-management--avatars.us-west-2.prod.public.atl-paas.net%2Finitials%2FIT-5.png", "32x32": "https://secure.gravatar.com/avatar/0a7841feac7218131ce7b427283c24ef?d=https%3A%2F%2Favatar-management--avatars.us-west-2.prod.public.atl-paas.net%2Finitials%2FIT-5.png"}, "displayName": "integration test", "active": true, "timeZone": "America/Los_Angeles", "accountType": "atlassian"}, "subtasks": [], "reporter": {"self": "https://airbyteio.atlassian.net/rest/api/3/user?accountId=5fc9e78d2730d800760becc4", "accountId": "5fc9e78d2730d800760becc4", "emailAddress": "integration-test@airbyte.io", "avatarUrls": {"48x48": "https://secure.gravatar.com/avatar/0a7841feac7218131ce7b427283c24ef?d=https%3A%2F%2Favatar-management--avatars.us-west-2.prod.public.atl-paas.net%2Finitials%2FIT-5.png", "24x24": "https://secure.gravatar.com/avatar/0a7841feac7218131ce7b427283c24ef?d=https%3A%2F%2Favatar-management--avatars.us-west-2.prod.public.atl-paas.net%2Finitials%2FIT-5.png", "16x16": "https://secure.gravatar.com/avatar/0a7841feac7218131ce7b427283c24ef?d=https%3A%2F%2Favatar-management--avatars.us-west-2.prod.public.atl-paas.net%2Finitials%2FIT-5.png", "32x32": "https://secure.gravatar.com/avatar/0a7841feac7218131ce7b427283c24ef?d=https%3A%2F%2Favatar-management--avatars.us-west-2.prod.public.atl-paas.net%2Finitials%2FIT-5.png"}, "displayName": "integration test", "active": true, "timeZone": "America/Los_Angeles", "accountType": "atlassian"}, "aggregateprogress": {"progress": 20880, "total": 20880, "percent": 100}, "progress": {"progress": 20880, "total": 20880, "percent": 100}, "comment": {"comments": [], "self": "https://airbyteio.atlassian.net/rest/api/3/issue/10080/comment", "maxResults": 0, "total": 0, "startAt": 0}, "votes": {"self": "https://airbyteio.atlassian.net/rest/api/3/issue/IT-24/votes", "votes": 1, "hasVoted": true}, "worklog": {"startAt": 0, "maxResults": 20, "total": 3, "worklogs": [{"self": "https://airbyteio.atlassian.net/rest/api/3/issue/10080/worklog/11708", "author": {"self": "https://airbyteio.atlassian.net/rest/api/3/user?accountId=5fc9e78d2730d800760becc4", "accountId": "5fc9e78d2730d800760becc4", "emailAddress": "integration-test@airbyte.io", "avatarUrls": {"48x48": "https://secure.gravatar.com/avatar/0a7841feac7218131ce7b427283c24ef?d=https%3A%2F%2Favatar-management--avatars.us-west-2.prod.public.atl-paas.net%2Finitials%2FIT-5.png", "24x24": "https://secure.gravatar.com/avatar/0a7841feac7218131ce7b427283c24ef?d=https%3A%2F%2Favatar-management--avatars.us-west-2.prod.public.atl-paas.net%2Finitials%2FIT-5.png", "16x16": "https://secure.gravatar.com/avatar/0a7841feac7218131ce7b427283c24ef?d=https%3A%2F%2Favatar-management--avatars.us-west-2.prod.public.atl-paas.net%2Finitials%2FIT-5.png", "32x32": "https://secure.gravatar.com/avatar/0a7841feac7218131ce7b427283c24ef?d=https%3A%2F%2Favatar-management--avatars.us-west-2.prod.public.atl-paas.net%2Finitials%2FIT-5.png"}, "displayName": "integration test", "active": true, "timeZone": "America/Los_Angeles", "accountType": "atlassian"}, "updateAuthor": {"self": "https://airbyteio.atlassian.net/rest/api/3/user?accountId=5fc9e78d2730d800760becc4", "accountId": "5fc9e78d2730d800760becc4", "emailAddress": "integration-test@airbyte.io", "avatarUrls": {"48x48": "https://secure.gravatar.com/avatar/0a7841feac7218131ce7b427283c24ef?d=https%3A%2F%2Favatar-management--avatars.us-west-2.prod.public.atl-paas.net%2Finitials%2FIT-5.png", "24x24": "https://secure.gravatar.com/avatar/0a7841feac7218131ce7b427283c24ef?d=https%3A%2F%2Favatar-management--avatars.us-west-2.prod.public.atl-paas.net%2Finitials%2FIT-5.png", "16x16": "https://secure.gravatar.com/avatar/0a7841feac7218131ce7b427283c24ef?d=https%3A%2F%2Favatar-management--avatars.us-west-2.prod.public.atl-paas.net%2Finitials%2FIT-5.png", "32x32": "https://secure.gravatar.com/avatar/0a7841feac7218131ce7b427283c24ef?d=https%3A%2F%2Favatar-management--avatars.us-west-2.prod.public.atl-paas.net%2Finitials%2FIT-5.png"}, "displayName": "integration test", "active": true, "timeZone": "America/Los_Angeles", "accountType": "atlassian"}, "comment": {"type": "doc", "version": 1, "content": [{"type": "paragraph", "content": [{"text": "I did some work here. 0", "type": "text"}]}]}, "created": "2021-04-15T11:39:46.574000-07:00", "updated": "2021-04-15T11:39:46.574000-07:00", "started": "2021-04-14T18:48:52.747000-07:00", "timeSpent": "2h 21m", "timeSpentSeconds": 8460, "id": "11708", "issueId": "10080"}, {"self": "https://airbyteio.atlassian.net/rest/api/3/issue/10080/worklog/11709", "author": {"self": "https://airbyteio.atlassian.net/rest/api/3/user?accountId=5fc9e78d2730d800760becc4", "accountId": "5fc9e78d2730d800760becc4", "emailAddress": "integration-test@airbyte.io", "avatarUrls": {"48x48": "https://secure.gravatar.com/avatar/0a7841feac7218131ce7b427283c24ef?d=https%3A%2F%2Favatar-management--avatars.us-west-2.prod.public.atl-paas.net%2Finitials%2FIT-5.png", "24x24": "https://secure.gravatar.com/avatar/0a7841feac7218131ce7b427283c24ef?d=https%3A%2F%2Favatar-management--avatars.us-west-2.prod.public.atl-paas.net%2Finitials%2FIT-5.png", "16x16": "https://secure.gravatar.com/avatar/0a7841feac7218131ce7b427283c24ef?d=https%3A%2F%2Favatar-management--avatars.us-west-2.prod.public.atl-paas.net%2Finitials%2FIT-5.png", "32x32": "https://secure.gravatar.com/avatar/0a7841feac7218131ce7b427283c24ef?d=https%3A%2F%2Favatar-management--avatars.us-west-2.prod.public.atl-paas.net%2Finitials%2FIT-5.png"}, "displayName": "integration test", "active": true, "timeZone": "America/Los_Angeles", "accountType": "atlassian"}, "updateAuthor": {"self": "https://airbyteio.atlassian.net/rest/api/3/user?accountId=5fc9e78d2730d800760becc4", "accountId": "5fc9e78d2730d800760becc4", "emailAddress": "integration-test@airbyte.io", "avatarUrls": {"48x48": "https://secure.gravatar.com/avatar/0a7841feac7218131ce7b427283c24ef?d=https%3A%2F%2Favatar-management--avatars.us-west-2.prod.public.atl-paas.net%2Finitials%2FIT-5.png", "24x24": "https://secure.gravatar.com/avatar/0a7841feac7218131ce7b427283c24ef?d=https%3A%2F%2Favatar-management--avatars.us-west-2.prod.public.atl-paas.net%2Finitials%2FIT-5.png", "16x16": "https://secure.gravatar.com/avatar/0a7841feac7218131ce7b427283c24ef?d=https%3A%2F%2Favatar-management--avatars.us-west-2.prod.public.atl-paas.net%2Finitials%2FIT-5.png", "32x32": "https://secure.gravatar.com/avatar/0a7841feac7218131ce7b427283c24ef?d=https%3A%2F%2Favatar-management--avatars.us-west-2.prod.public.atl-paas.net%2Finitials%2FIT-5.png"}, "displayName": "integration test", "active": true, "timeZone": "America/Los_Angeles", "accountType": "atlassian"}, "comment": {"type": "doc", "version": 1, "content": [{"type": "paragraph", "content": [{"text": "I did some work here. 1", "type": "text"}]}]}, "created": "2021-04-15T11:39:47.215000-07:00", "updated": "2021-04-15T11:39:47.215000-07:00", "started": "2021-04-14T18:48:52.747000-07:00", "timeSpent": "37m", "timeSpentSeconds": 2220, "id": "11709", "issueId": "10080"}, {"self": "https://airbyteio.atlassian.net/rest/api/3/issue/10080/worklog/11710", "author": {"self": "https://airbyteio.atlassian.net/rest/api/3/user?accountId=5fc9e78d2730d800760becc4", "accountId": "5fc9e78d2730d800760becc4", "emailAddress": "integration-test@airbyte.io", "avatarUrls": {"48x48": "https://secure.gravatar.com/avatar/0a7841feac7218131ce7b427283c24ef?d=https%3A%2F%2Favatar-management--avatars.us-west-2.prod.public.atl-paas.net%2Finitials%2FIT-5.png", "24x24": "https://secure.gravatar.com/avatar/0a7841feac7218131ce7b427283c24ef?d=https%3A%2F%2Favatar-management--avatars.us-west-2.prod.public.atl-paas.net%2Finitials%2FIT-5.png", "16x16": "https://secure.gravatar.com/avatar/0a7841feac7218131ce7b427283c24ef?d=https%3A%2F%2Favatar-management--avatars.us-west-2.prod.public.atl-paas.net%2Finitials%2FIT-5.png", "32x32": "https://secure.gravatar.com/avatar/0a7841feac7218131ce7b427283c24ef?d=https%3A%2F%2Favatar-management--avatars.us-west-2.prod.public.atl-paas.net%2Finitials%2FIT-5.png"}, "displayName": "integration test", "active": true, "timeZone": "America/Los_Angeles", "accountType": "atlassian"}, "updateAuthor": {"self": "https://airbyteio.atlassian.net/rest/api/3/user?accountId=5fc9e78d2730d800760becc4", "accountId": "5fc9e78d2730d800760becc4", "emailAddress": "integration-test@airbyte.io", "avatarUrls": {"48x48": "https://secure.gravatar.com/avatar/0a7841feac7218131ce7b427283c24ef?d=https%3A%2F%2Favatar-management--avatars.us-west-2.prod.public.atl-paas.net%2Finitials%2FIT-5.png", "24x24": "https://secure.gravatar.com/avatar/0a7841feac7218131ce7b427283c24ef?d=https%3A%2F%2Favatar-management--avatars.us-west-2.prod.public.atl-paas.net%2Finitials%2FIT-5.png", "16x16": "https://secure.gravatar.com/avatar/0a7841feac7218131ce7b427283c24ef?d=https%3A%2F%2Favatar-management--avatars.us-west-2.prod.public.atl-paas.net%2Finitials%2FIT-5.png", "32x32": "https://secure.gravatar.com/avatar/0a7841feac7218131ce7b427283c24ef?d=https%3A%2F%2Favatar-management--avatars.us-west-2.prod.public.atl-paas.net%2Finitials%2FIT-5.png"}, "displayName": "integration test", "active": true, "timeZone": "America/Los_Angeles", "accountType": "atlassian"}, "comment": {"type": "doc", "version": 1, "content": [{"type": "paragraph", "content": [{"text": "I did some work here. 2", "type": "text"}]}]}, "created": "2021-04-15T11:39:47.834000-07:00", "updated": "2021-04-15T11:39:47.834000-07:00", "started": "2021-04-14T18:48:52.747000-07:00", "timeSpent": "2h 50m", "timeSpentSeconds": 10200, "id": "11710", "issueId": "10080"}]}}, "projectId": "10000", "projectKey": "IT", "created": "2021-03-11T06:17:33.169000-08:00", "updated": "2023-04-05T04:58:35.329000-07:00"}, "emitted_at": 1709025497894} +{"stream": "issues", "data": {"expand": "customfield_10030.properties,operations,versionedRepresentations,editmeta,changelog,customfield_10029.properties,customfield_10010.requestTypePractice,transitions,renderedFields,customfield_10229.properties", "id": "10626", "self": "https://airbyteio.atlassian.net/rest/api/3/issue/10626", "key": "IT-26", "renderedFields": {"statuscategorychangedate": "17/May/22 4:28 AM", "timespent": "1 day", "aggregatetimespent": "1 day", "lastViewed": "02/Feb/24 7:46 AM", "created": "17/May/22 4:28 AM", "customfield_10017": "dark_yellow", "aggregatetimeoriginalestimate": "2 weeks, 4 days, 5 hours", "timeestimate": "1 week, 1 day", "updated": "12/Oct/23 1:43 PM", "timeoriginalestimate": "2 weeks, 4 days, 5 hours", "description": "

Implement OAUth

", "customfield_10011": "Test 2", "customfield_10013": "ghx-label-2", "timetracking": {"originalEstimate": "2 weeks, 4 days, 5 hours", "remainingEstimate": "1 week, 1 day", "timeSpent": "1 day", "originalEstimateSeconds": 421200, "remainingEstimateSeconds": 172800, "timeSpentSeconds": 28800}, "attachment": [], "aggregatetimeestimate": "1 week, 1 day", "environment": "", "comment": {"comments": [], "self": "https://airbyteio.atlassian.net/rest/api/3/issue/10626/comment", "maxResults": 0, "total": 0, "startAt": 0}, "worklog": {"startAt": 0, "maxResults": 20, "total": 1, "worklogs": [{"self": "https://airbyteio.atlassian.net/rest/api/3/issue/10626/worklog/11820", "author": {"self": "https://airbyteio.atlassian.net/rest/api/3/user?accountId=557058%3A295406f3-a1fc-4733-b906-dd15d021bd79", "accountId": "557058:295406f3-a1fc-4733-b906-dd15d021bd79", "avatarUrls": {"48x48": "https://secure.gravatar.com/avatar/182fc208a1a2e6cc41393ab6c9363d9c?d=https%3A%2F%2Favatar-management--avatars.us-west-2.prod.public.atl-paas.net%2Finitials%2FTT-6.png", "24x24": "https://secure.gravatar.com/avatar/182fc208a1a2e6cc41393ab6c9363d9c?d=https%3A%2F%2Favatar-management--avatars.us-west-2.prod.public.atl-paas.net%2Finitials%2FTT-6.png", "16x16": "https://secure.gravatar.com/avatar/182fc208a1a2e6cc41393ab6c9363d9c?d=https%3A%2F%2Favatar-management--avatars.us-west-2.prod.public.atl-paas.net%2Finitials%2FTT-6.png", "32x32": "https://secure.gravatar.com/avatar/182fc208a1a2e6cc41393ab6c9363d9c?d=https%3A%2F%2Favatar-management--avatars.us-west-2.prod.public.atl-paas.net%2Finitials%2FTT-6.png"}, "displayName": "Tempo Timesheets", "active": true, "timeZone": "America/Los_Angeles", "accountType": "app"}, "updateAuthor": {"self": "https://airbyteio.atlassian.net/rest/api/3/user?accountId=557058%3A295406f3-a1fc-4733-b906-dd15d021bd79", "accountId": "557058:295406f3-a1fc-4733-b906-dd15d021bd79", "avatarUrls": {"48x48": "https://secure.gravatar.com/avatar/182fc208a1a2e6cc41393ab6c9363d9c?d=https%3A%2F%2Favatar-management--avatars.us-west-2.prod.public.atl-paas.net%2Finitials%2FTT-6.png", "24x24": "https://secure.gravatar.com/avatar/182fc208a1a2e6cc41393ab6c9363d9c?d=https%3A%2F%2Favatar-management--avatars.us-west-2.prod.public.atl-paas.net%2Finitials%2FTT-6.png", "16x16": "https://secure.gravatar.com/avatar/182fc208a1a2e6cc41393ab6c9363d9c?d=https%3A%2F%2Favatar-management--avatars.us-west-2.prod.public.atl-paas.net%2Finitials%2FTT-6.png", "32x32": "https://secure.gravatar.com/avatar/182fc208a1a2e6cc41393ab6c9363d9c?d=https%3A%2F%2Favatar-management--avatars.us-west-2.prod.public.atl-paas.net%2Finitials%2FTT-6.png"}, "displayName": "Tempo Timesheets", "active": true, "timeZone": "America/Los_Angeles", "accountType": "app"}, "created": "05/Apr/23 5:08 AM", "updated": "05/Apr/23 5:08 AM", "started": "05/Apr/23 1:00 AM", "timeSpent": "1 day", "id": "11820", "issueId": "10626"}]}}, "transitions": [{"id": "11", "name": "To Do", "to": {"self": "https://airbyteio.atlassian.net/rest/api/3/status/10000", "description": "", "iconUrl": "https://airbyteio.atlassian.net/", "name": "To Do", "id": "10000", "statusCategory": {"self": "https://airbyteio.atlassian.net/rest/api/3/statuscategory/2", "id": 2, "key": "new", "colorName": "blue-gray", "name": "To Do"}}, "hasScreen": false, "isGlobal": true, "isInitial": false, "isAvailable": true, "isConditional": false, "isLooped": false}, {"id": "21", "name": "In Progress", "to": {"self": "https://airbyteio.atlassian.net/rest/api/3/status/3", "description": "This issue is being actively worked on at the moment by the assignee.", "iconUrl": "https://airbyteio.atlassian.net/images/icons/statuses/inprogress.png", "name": "In Progress", "id": "3", "statusCategory": {"self": "https://airbyteio.atlassian.net/rest/api/3/statuscategory/4", "id": 4, "key": "indeterminate", "colorName": "yellow", "name": "In Progress"}}, "hasScreen": false, "isGlobal": true, "isInitial": false, "isAvailable": true, "isConditional": false, "isLooped": false}, {"id": "31", "name": "Done", "to": {"self": "https://airbyteio.atlassian.net/rest/api/3/status/10001", "description": "", "iconUrl": "https://airbyteio.atlassian.net/", "name": "Done", "id": "10001", "statusCategory": {"self": "https://airbyteio.atlassian.net/rest/api/3/statuscategory/3", "id": 3, "key": "done", "colorName": "green", "name": "Done"}}, "hasScreen": false, "isGlobal": true, "isInitial": false, "isAvailable": true, "isConditional": false, "isLooped": false}, {"id": "41", "name": "Approved", "to": {"self": "https://airbyteio.atlassian.net/rest/api/3/status/10005", "description": "This was auto-generated by Jira Service Management during workflow import", "iconUrl": "https://airbyteio.atlassian.net/images/icons/status_generic.gif", "name": "Approved", "id": "10005", "statusCategory": {"self": "https://airbyteio.atlassian.net/rest/api/3/statuscategory/3", "id": 3, "key": "done", "colorName": "green", "name": "Done"}}, "hasScreen": false, "isGlobal": true, "isInitial": false, "isAvailable": true, "isConditional": false, "isLooped": false}, {"id": "51", "name": "In review", "to": {"self": "https://airbyteio.atlassian.net/rest/api/3/status/10004", "description": "This was auto-generated by Jira Service Management during workflow import", "iconUrl": "https://airbyteio.atlassian.net/images/icons/status_generic.gif", "name": "In review", "id": "10004", "statusCategory": {"self": "https://airbyteio.atlassian.net/rest/api/3/statuscategory/4", "id": 4, "key": "indeterminate", "colorName": "yellow", "name": "In Progress"}}, "hasScreen": false, "isGlobal": true, "isInitial": false, "isAvailable": true, "isConditional": false, "isLooped": false}, {"id": "61", "name": "Reopened", "to": {"self": "https://airbyteio.atlassian.net/rest/api/3/status/4", "description": "This issue was once resolved, but the resolution was deemed incorrect. From here issues are either marked assigned or resolved.", "iconUrl": "https://airbyteio.atlassian.net/images/icons/statuses/reopened.png", "name": "Reopened", "id": "4", "statusCategory": {"self": "https://airbyteio.atlassian.net/rest/api/3/statuscategory/2", "id": 2, "key": "new", "colorName": "blue-gray", "name": "To Do"}}, "hasScreen": false, "isGlobal": true, "isInitial": false, "isAvailable": true, "isConditional": false, "isLooped": false}, {"id": "71", "name": "Declined", "to": {"self": "https://airbyteio.atlassian.net/rest/api/3/status/10002", "description": "This was auto-generated by Jira Service Management during workflow import", "iconUrl": "https://airbyteio.atlassian.net/images/icons/statuses/generic.png", "name": "Declined", "id": "10002", "statusCategory": {"self": "https://airbyteio.atlassian.net/rest/api/3/statuscategory/3", "id": 3, "key": "done", "colorName": "green", "name": "Done"}}, "hasScreen": false, "isGlobal": true, "isInitial": false, "isAvailable": true, "isConditional": false, "isLooped": false}, {"id": "81", "name": "Open", "to": {"self": "https://airbyteio.atlassian.net/rest/api/3/status/1", "description": "The issue is open and ready for the assignee to start work on it.", "iconUrl": "https://airbyteio.atlassian.net/images/icons/statuses/open.png", "name": "Open", "id": "1", "statusCategory": {"self": "https://airbyteio.atlassian.net/rest/api/3/statuscategory/2", "id": 2, "key": "new", "colorName": "blue-gray", "name": "To Do"}}, "hasScreen": false, "isGlobal": true, "isInitial": false, "isAvailable": true, "isConditional": false, "isLooped": false}, {"id": "91", "name": "Pending", "to": {"self": "https://airbyteio.atlassian.net/rest/api/3/status/10003", "description": "This was auto-generated by Jira Service Management during workflow import", "iconUrl": "https://airbyteio.atlassian.net/images/icons/status_generic.gif", "name": "Pending", "id": "10003", "statusCategory": {"self": "https://airbyteio.atlassian.net/rest/api/3/statuscategory/4", "id": 4, "key": "indeterminate", "colorName": "yellow", "name": "In Progress"}}, "hasScreen": false, "isGlobal": true, "isInitial": false, "isAvailable": true, "isConditional": false, "isLooped": false}, {"id": "101", "name": "Closed", "to": {"self": "https://airbyteio.atlassian.net/rest/api/3/status/6", "description": "The issue is considered finished, the resolution is correct. Issues which are closed can be reopened.", "iconUrl": "https://airbyteio.atlassian.net/images/icons/statuses/closed.png", "name": "Closed", "id": "6", "statusCategory": {"self": "https://airbyteio.atlassian.net/rest/api/3/statuscategory/3", "id": 3, "key": "done", "colorName": "green", "name": "Done"}}, "hasScreen": false, "isGlobal": true, "isInitial": false, "isAvailable": true, "isConditional": false, "isLooped": false}], "changelog": {"startAt": 0, "maxResults": 4, "total": 4, "histories": [{"id": "15198", "author": {"self": "https://airbyteio.atlassian.net/rest/api/3/user?accountId=5fc9e78d2730d800760becc4", "accountId": "5fc9e78d2730d800760becc4", "emailAddress": "integration-test@airbyte.io", "avatarUrls": {"48x48": "https://secure.gravatar.com/avatar/0a7841feac7218131ce7b427283c24ef?d=https%3A%2F%2Favatar-management--avatars.us-west-2.prod.public.atl-paas.net%2Finitials%2FIT-5.png", "24x24": "https://secure.gravatar.com/avatar/0a7841feac7218131ce7b427283c24ef?d=https%3A%2F%2Favatar-management--avatars.us-west-2.prod.public.atl-paas.net%2Finitials%2FIT-5.png", "16x16": "https://secure.gravatar.com/avatar/0a7841feac7218131ce7b427283c24ef?d=https%3A%2F%2Favatar-management--avatars.us-west-2.prod.public.atl-paas.net%2Finitials%2FIT-5.png", "32x32": "https://secure.gravatar.com/avatar/0a7841feac7218131ce7b427283c24ef?d=https%3A%2F%2Favatar-management--avatars.us-west-2.prod.public.atl-paas.net%2Finitials%2FIT-5.png"}, "displayName": "integration test", "active": true, "timeZone": "America/Los_Angeles", "accountType": "atlassian"}, "created": "2023-10-12T13:43:15.036-0700", "items": [{"field": "timeestimate", "fieldtype": "jira", "fieldId": "timeestimate", "from": null, "fromString": null, "to": "172800", "toString": "172800"}]}, {"id": "15197", "author": {"self": "https://airbyteio.atlassian.net/rest/api/3/user?accountId=5fc9e78d2730d800760becc4", "accountId": "5fc9e78d2730d800760becc4", "emailAddress": "integration-test@airbyte.io", "avatarUrls": {"48x48": "https://secure.gravatar.com/avatar/0a7841feac7218131ce7b427283c24ef?d=https%3A%2F%2Favatar-management--avatars.us-west-2.prod.public.atl-paas.net%2Finitials%2FIT-5.png", "24x24": "https://secure.gravatar.com/avatar/0a7841feac7218131ce7b427283c24ef?d=https%3A%2F%2Favatar-management--avatars.us-west-2.prod.public.atl-paas.net%2Finitials%2FIT-5.png", "16x16": "https://secure.gravatar.com/avatar/0a7841feac7218131ce7b427283c24ef?d=https%3A%2F%2Favatar-management--avatars.us-west-2.prod.public.atl-paas.net%2Finitials%2FIT-5.png", "32x32": "https://secure.gravatar.com/avatar/0a7841feac7218131ce7b427283c24ef?d=https%3A%2F%2Favatar-management--avatars.us-west-2.prod.public.atl-paas.net%2Finitials%2FIT-5.png"}, "displayName": "integration test", "active": true, "timeZone": "America/Los_Angeles", "accountType": "atlassian"}, "created": "2023-10-12T13:43:05.182-0700", "items": [{"field": "timeoriginalestimate", "fieldtype": "jira", "fieldId": "timeoriginalestimate", "from": null, "fromString": null, "to": "421200", "toString": "421200"}]}, {"id": "15186", "author": {"self": "https://airbyteio.atlassian.net/rest/api/3/user?accountId=557058%3A295406f3-a1fc-4733-b906-dd15d021bd79", "accountId": "557058:295406f3-a1fc-4733-b906-dd15d021bd79", "avatarUrls": {"48x48": "https://secure.gravatar.com/avatar/182fc208a1a2e6cc41393ab6c9363d9c?d=https%3A%2F%2Favatar-management--avatars.us-west-2.prod.public.atl-paas.net%2Finitials%2FTT-6.png", "24x24": "https://secure.gravatar.com/avatar/182fc208a1a2e6cc41393ab6c9363d9c?d=https%3A%2F%2Favatar-management--avatars.us-west-2.prod.public.atl-paas.net%2Finitials%2FTT-6.png", "16x16": "https://secure.gravatar.com/avatar/182fc208a1a2e6cc41393ab6c9363d9c?d=https%3A%2F%2Favatar-management--avatars.us-west-2.prod.public.atl-paas.net%2Finitials%2FTT-6.png", "32x32": "https://secure.gravatar.com/avatar/182fc208a1a2e6cc41393ab6c9363d9c?d=https%3A%2F%2Favatar-management--avatars.us-west-2.prod.public.atl-paas.net%2Finitials%2FTT-6.png"}, "displayName": "Tempo Timesheets", "active": true, "timeZone": "America/Los_Angeles", "accountType": "app"}, "created": "2023-04-05T05:08:50.115-0700", "items": [{"field": "timespent", "fieldtype": "jira", "fieldId": "timespent", "from": null, "fromString": null, "to": "28800", "toString": "28800"}, {"field": "WorklogId", "fieldtype": "jira", "from": null, "fromString": null, "to": "11820", "toString": "11820"}]}, {"id": "15128", "author": {"self": "https://airbyteio.atlassian.net/rest/api/3/user?accountId=5fc9e78d2730d800760becc4", "accountId": "5fc9e78d2730d800760becc4", "emailAddress": "integration-test@airbyte.io", "avatarUrls": {"48x48": "https://secure.gravatar.com/avatar/0a7841feac7218131ce7b427283c24ef?d=https%3A%2F%2Favatar-management--avatars.us-west-2.prod.public.atl-paas.net%2Finitials%2FIT-5.png", "24x24": "https://secure.gravatar.com/avatar/0a7841feac7218131ce7b427283c24ef?d=https%3A%2F%2Favatar-management--avatars.us-west-2.prod.public.atl-paas.net%2Finitials%2FIT-5.png", "16x16": "https://secure.gravatar.com/avatar/0a7841feac7218131ce7b427283c24ef?d=https%3A%2F%2Favatar-management--avatars.us-west-2.prod.public.atl-paas.net%2Finitials%2FIT-5.png", "32x32": "https://secure.gravatar.com/avatar/0a7841feac7218131ce7b427283c24ef?d=https%3A%2F%2Favatar-management--avatars.us-west-2.prod.public.atl-paas.net%2Finitials%2FIT-5.png"}, "displayName": "integration test", "active": true, "timeZone": "America/Los_Angeles", "accountType": "atlassian"}, "created": "2022-05-17T04:28:19.837-0700", "items": [{"field": "Link", "fieldtype": "jira", "from": null, "fromString": null, "to": "IT-25", "toString": "This issue clones IT-25"}]}]}, "fields": {"statuscategorychangedate": "2022-05-17T04:28:19.775000-07:00", "issuetype": {"self": "https://airbyteio.atlassian.net/rest/api/3/issuetype/10000", "id": "10000", "description": "A big user story that needs to be broken down. Created by Jira Software - do not edit or delete.", "iconUrl": "https://airbyteio.atlassian.net/images/icons/issuetypes/epic.svg", "name": "Epic", "subtask": false, "hierarchyLevel": 1}, "timespent": 28800, "project": {"self": "https://airbyteio.atlassian.net/rest/api/3/project/10000", "id": "10000", "key": "IT", "name": "integration-tests", "projectTypeKey": "software", "simplified": false, "avatarUrls": {"48x48": "https://airbyteio.atlassian.net/rest/api/3/universal_avatar/view/type/project/avatar/10424", "24x24": "https://airbyteio.atlassian.net/rest/api/3/universal_avatar/view/type/project/avatar/10424?size=small", "16x16": "https://airbyteio.atlassian.net/rest/api/3/universal_avatar/view/type/project/avatar/10424?size=xsmall", "32x32": "https://airbyteio.atlassian.net/rest/api/3/universal_avatar/view/type/project/avatar/10424?size=medium"}, "projectCategory": {"self": "https://airbyteio.atlassian.net/rest/api/3/projectCategory/10004", "id": "10004", "description": "Test Project Category 2", "name": "Test category 2"}}, "fixVersions": [], "aggregatetimespent": 28800, "workratio": 6, "issuerestriction": {"issuerestrictions": {}, "shouldDisplay": false}, "watches": {"self": "https://airbyteio.atlassian.net/rest/api/3/issue/IT-26/watchers", "watchCount": 1, "isWatching": true}, "lastViewed": "2024-02-02T07:46:21.824000-08:00", "created": "2022-05-17T04:28:19.523000-07:00", "priority": {"self": "https://airbyteio.atlassian.net/rest/api/3/priority/4", "iconUrl": "https://airbyteio.atlassian.net/images/icons/priorities/low.svg", "name": "Low", "id": "4"}, "labels": [], "customfield_10017": "dark_yellow", "customfield_10018": {"hasEpicLinkFieldDependency": false, "showField": false, "nonEditableReason": {"reason": "PLUGIN_LICENSE_ERROR", "message": "The Parent Link is only available to Jira Premium users."}}, "customfield_10217": [], "customfield_10019": "0|i00773:", "aggregatetimeoriginalestimate": 421200, "timeestimate": 172800, "versions": [], "issuelinks": [{"id": "10263", "self": "https://airbyteio.atlassian.net/rest/api/3/issueLink/10263", "type": {"id": "10001", "name": "Cloners", "inward": "is cloned by", "outward": "clones", "self": "https://airbyteio.atlassian.net/rest/api/3/issueLinkType/10001"}, "outwardIssue": {"id": "10625", "key": "IT-25", "self": "https://airbyteio.atlassian.net/rest/api/3/issue/10625", "fields": {"summary": "Aggregate issues", "status": {"self": "https://airbyteio.atlassian.net/rest/api/3/status/10000", "description": "", "iconUrl": "https://airbyteio.atlassian.net/", "name": "To Do", "id": "10000", "statusCategory": {"self": "https://airbyteio.atlassian.net/rest/api/3/statuscategory/2", "id": 2, "key": "new", "colorName": "blue-gray", "name": "To Do"}}, "priority": {"self": "https://airbyteio.atlassian.net/rest/api/3/priority/4", "iconUrl": "https://airbyteio.atlassian.net/images/icons/priorities/low.svg", "name": "Low", "id": "4"}, "issuetype": {"self": "https://airbyteio.atlassian.net/rest/api/3/issuetype/10000", "id": "10000", "description": "A big user story that needs to be broken down. Created by Jira Software - do not edit or delete.", "iconUrl": "https://airbyteio.atlassian.net/images/icons/issuetypes/epic.svg", "name": "Epic", "subtask": false, "hierarchyLevel": 1}}}}], "assignee": {"self": "https://airbyteio.atlassian.net/rest/api/3/user?accountId=5fc9e78d2730d800760becc4", "accountId": "5fc9e78d2730d800760becc4", "emailAddress": "integration-test@airbyte.io", "avatarUrls": {"48x48": "https://secure.gravatar.com/avatar/0a7841feac7218131ce7b427283c24ef?d=https%3A%2F%2Favatar-management--avatars.us-west-2.prod.public.atl-paas.net%2Finitials%2FIT-5.png", "24x24": "https://secure.gravatar.com/avatar/0a7841feac7218131ce7b427283c24ef?d=https%3A%2F%2Favatar-management--avatars.us-west-2.prod.public.atl-paas.net%2Finitials%2FIT-5.png", "16x16": "https://secure.gravatar.com/avatar/0a7841feac7218131ce7b427283c24ef?d=https%3A%2F%2Favatar-management--avatars.us-west-2.prod.public.atl-paas.net%2Finitials%2FIT-5.png", "32x32": "https://secure.gravatar.com/avatar/0a7841feac7218131ce7b427283c24ef?d=https%3A%2F%2Favatar-management--avatars.us-west-2.prod.public.atl-paas.net%2Finitials%2FIT-5.png"}, "displayName": "integration test", "active": true, "timeZone": "America/Los_Angeles", "accountType": "atlassian"}, "updated": "2023-10-12T13:43:15.025000-07:00", "status": {"self": "https://airbyteio.atlassian.net/rest/api/3/status/10000", "description": "", "iconUrl": "https://airbyteio.atlassian.net/", "name": "To Do", "id": "10000", "statusCategory": {"self": "https://airbyteio.atlassian.net/rest/api/3/statuscategory/2", "id": 2, "key": "new", "colorName": "blue-gray", "name": "To Do"}}, "components": [{"self": "https://airbyteio.atlassian.net/rest/api/3/component/10049", "id": "10049", "name": "Component 3", "description": "This is a Jira component"}], "timeoriginalestimate": 421200, "description": {"version": 1, "type": "doc", "content": [{"type": "paragraph", "content": [{"type": "text", "text": "Implement OAUth"}]}]}, "customfield_10011": "Test 2", "customfield_10012": {"self": "https://airbyteio.atlassian.net/rest/api/3/customFieldOption/10016", "value": "To Do", "id": "10016"}, "customfield_10013": "ghx-label-2", "timetracking": {"originalEstimate": "2w 4d 5h", "remainingEstimate": "1w 1d", "timeSpent": "1d", "originalEstimateSeconds": 421200, "remainingEstimateSeconds": 172800, "timeSpentSeconds": 28800}, "attachment": [], "aggregatetimeestimate": 172800, "summary": "CLONE - Aggregate issues", "creator": {"self": "https://airbyteio.atlassian.net/rest/api/3/user?accountId=5fc9e78d2730d800760becc4", "accountId": "5fc9e78d2730d800760becc4", "emailAddress": "integration-test@airbyte.io", "avatarUrls": {"48x48": "https://secure.gravatar.com/avatar/0a7841feac7218131ce7b427283c24ef?d=https%3A%2F%2Favatar-management--avatars.us-west-2.prod.public.atl-paas.net%2Finitials%2FIT-5.png", "24x24": "https://secure.gravatar.com/avatar/0a7841feac7218131ce7b427283c24ef?d=https%3A%2F%2Favatar-management--avatars.us-west-2.prod.public.atl-paas.net%2Finitials%2FIT-5.png", "16x16": "https://secure.gravatar.com/avatar/0a7841feac7218131ce7b427283c24ef?d=https%3A%2F%2Favatar-management--avatars.us-west-2.prod.public.atl-paas.net%2Finitials%2FIT-5.png", "32x32": "https://secure.gravatar.com/avatar/0a7841feac7218131ce7b427283c24ef?d=https%3A%2F%2Favatar-management--avatars.us-west-2.prod.public.atl-paas.net%2Finitials%2FIT-5.png"}, "displayName": "integration test", "active": true, "timeZone": "America/Los_Angeles", "accountType": "atlassian"}, "subtasks": [], "reporter": {"self": "https://airbyteio.atlassian.net/rest/api/3/user?accountId=5fc9e78d2730d800760becc4", "accountId": "5fc9e78d2730d800760becc4", "emailAddress": "integration-test@airbyte.io", "avatarUrls": {"48x48": "https://secure.gravatar.com/avatar/0a7841feac7218131ce7b427283c24ef?d=https%3A%2F%2Favatar-management--avatars.us-west-2.prod.public.atl-paas.net%2Finitials%2FIT-5.png", "24x24": "https://secure.gravatar.com/avatar/0a7841feac7218131ce7b427283c24ef?d=https%3A%2F%2Favatar-management--avatars.us-west-2.prod.public.atl-paas.net%2Finitials%2FIT-5.png", "16x16": "https://secure.gravatar.com/avatar/0a7841feac7218131ce7b427283c24ef?d=https%3A%2F%2Favatar-management--avatars.us-west-2.prod.public.atl-paas.net%2Finitials%2FIT-5.png", "32x32": "https://secure.gravatar.com/avatar/0a7841feac7218131ce7b427283c24ef?d=https%3A%2F%2Favatar-management--avatars.us-west-2.prod.public.atl-paas.net%2Finitials%2FIT-5.png"}, "displayName": "integration test", "active": true, "timeZone": "America/Los_Angeles", "accountType": "atlassian"}, "aggregateprogress": {"progress": 28800, "total": 201600, "percent": 14}, "progress": {"progress": 28800, "total": 201600, "percent": 14}, "votes": {"self": "https://airbyteio.atlassian.net/rest/api/3/issue/IT-26/votes", "votes": 0, "hasVoted": false}, "comment": {"comments": [], "self": "https://airbyteio.atlassian.net/rest/api/3/issue/10626/comment", "maxResults": 0, "total": 0, "startAt": 0}, "worklog": {"startAt": 0, "maxResults": 20, "total": 1, "worklogs": [{"self": "https://airbyteio.atlassian.net/rest/api/3/issue/10626/worklog/11820", "author": {"self": "https://airbyteio.atlassian.net/rest/api/3/user?accountId=557058%3A295406f3-a1fc-4733-b906-dd15d021bd79", "accountId": "557058:295406f3-a1fc-4733-b906-dd15d021bd79", "avatarUrls": {"48x48": "https://secure.gravatar.com/avatar/182fc208a1a2e6cc41393ab6c9363d9c?d=https%3A%2F%2Favatar-management--avatars.us-west-2.prod.public.atl-paas.net%2Finitials%2FTT-6.png", "24x24": "https://secure.gravatar.com/avatar/182fc208a1a2e6cc41393ab6c9363d9c?d=https%3A%2F%2Favatar-management--avatars.us-west-2.prod.public.atl-paas.net%2Finitials%2FTT-6.png", "16x16": "https://secure.gravatar.com/avatar/182fc208a1a2e6cc41393ab6c9363d9c?d=https%3A%2F%2Favatar-management--avatars.us-west-2.prod.public.atl-paas.net%2Finitials%2FTT-6.png", "32x32": "https://secure.gravatar.com/avatar/182fc208a1a2e6cc41393ab6c9363d9c?d=https%3A%2F%2Favatar-management--avatars.us-west-2.prod.public.atl-paas.net%2Finitials%2FTT-6.png"}, "displayName": "Tempo Timesheets", "active": true, "timeZone": "America/Los_Angeles", "accountType": "app"}, "updateAuthor": {"self": "https://airbyteio.atlassian.net/rest/api/3/user?accountId=557058%3A295406f3-a1fc-4733-b906-dd15d021bd79", "accountId": "557058:295406f3-a1fc-4733-b906-dd15d021bd79", "avatarUrls": {"48x48": "https://secure.gravatar.com/avatar/182fc208a1a2e6cc41393ab6c9363d9c?d=https%3A%2F%2Favatar-management--avatars.us-west-2.prod.public.atl-paas.net%2Finitials%2FTT-6.png", "24x24": "https://secure.gravatar.com/avatar/182fc208a1a2e6cc41393ab6c9363d9c?d=https%3A%2F%2Favatar-management--avatars.us-west-2.prod.public.atl-paas.net%2Finitials%2FTT-6.png", "16x16": "https://secure.gravatar.com/avatar/182fc208a1a2e6cc41393ab6c9363d9c?d=https%3A%2F%2Favatar-management--avatars.us-west-2.prod.public.atl-paas.net%2Finitials%2FTT-6.png", "32x32": "https://secure.gravatar.com/avatar/182fc208a1a2e6cc41393ab6c9363d9c?d=https%3A%2F%2Favatar-management--avatars.us-west-2.prod.public.atl-paas.net%2Finitials%2FTT-6.png"}, "displayName": "Tempo Timesheets", "active": true, "timeZone": "America/Los_Angeles", "accountType": "app"}, "comment": {"version": 1, "type": "doc", "content": [{"type": "paragraph", "content": [{"type": "text", "text": "time-tracking"}]}]}, "created": "2023-04-05T05:08:50.033000-07:00", "updated": "2023-04-05T05:08:50.033000-07:00", "started": "2023-04-05T01:00:00-07:00", "timeSpent": "1d", "timeSpentSeconds": 28800, "id": "11820", "issueId": "10626"}]}}, "projectId": "10000", "projectKey": "IT", "created": "2022-05-17T04:28:19.523000-07:00", "updated": "2023-10-12T13:43:15.025000-07:00"}, "emitted_at": 1709025497920} {"stream": "issue_comments", "data": {"self": "https://airbyteio.atlassian.net/rest/api/3/issue/10625/comment/10755", "id": "10755", "author": {"self": "https://airbyteio.atlassian.net/rest/api/3/user?accountId=5fc9e78d2730d800760becc4", "accountId": "5fc9e78d2730d800760becc4", "emailAddress": "integration-test@airbyte.io", "avatarUrls": {"48x48": "https://secure.gravatar.com/avatar/0a7841feac7218131ce7b427283c24ef?d=https%3A%2F%2Favatar-management--avatars.us-west-2.prod.public.atl-paas.net%2Finitials%2FIT-5.png", "24x24": "https://secure.gravatar.com/avatar/0a7841feac7218131ce7b427283c24ef?d=https%3A%2F%2Favatar-management--avatars.us-west-2.prod.public.atl-paas.net%2Finitials%2FIT-5.png", "16x16": "https://secure.gravatar.com/avatar/0a7841feac7218131ce7b427283c24ef?d=https%3A%2F%2Favatar-management--avatars.us-west-2.prod.public.atl-paas.net%2Finitials%2FIT-5.png", "32x32": "https://secure.gravatar.com/avatar/0a7841feac7218131ce7b427283c24ef?d=https%3A%2F%2Favatar-management--avatars.us-west-2.prod.public.atl-paas.net%2Finitials%2FIT-5.png"}, "displayName": "integration test", "active": true, "timeZone": "America/Los_Angeles", "accountType": "atlassian"}, "body": {"version": 1, "type": "doc", "content": [{"type": "paragraph", "content": [{"type": "text", "text": "Closed"}]}]}, "updateAuthor": {"self": "https://airbyteio.atlassian.net/rest/api/3/user?accountId=5fc9e78d2730d800760becc4", "accountId": "5fc9e78d2730d800760becc4", "emailAddress": "integration-test@airbyte.io", "avatarUrls": {"48x48": "https://secure.gravatar.com/avatar/0a7841feac7218131ce7b427283c24ef?d=https%3A%2F%2Favatar-management--avatars.us-west-2.prod.public.atl-paas.net%2Finitials%2FIT-5.png", "24x24": "https://secure.gravatar.com/avatar/0a7841feac7218131ce7b427283c24ef?d=https%3A%2F%2Favatar-management--avatars.us-west-2.prod.public.atl-paas.net%2Finitials%2FIT-5.png", "16x16": "https://secure.gravatar.com/avatar/0a7841feac7218131ce7b427283c24ef?d=https%3A%2F%2Favatar-management--avatars.us-west-2.prod.public.atl-paas.net%2Finitials%2FIT-5.png", "32x32": "https://secure.gravatar.com/avatar/0a7841feac7218131ce7b427283c24ef?d=https%3A%2F%2Favatar-management--avatars.us-west-2.prod.public.atl-paas.net%2Finitials%2FIT-5.png"}, "displayName": "integration test", "active": true, "timeZone": "America/Los_Angeles", "accountType": "atlassian"}, "created": "2022-05-17T04:06:55.076000-07:00", "updated": "2022-05-17T04:06:55.076000-07:00", "jsdPublic": true, "issueId": "IT-25"}, "emitted_at": 1697453253441} {"stream": "issue_comments", "data": {"self": "https://airbyteio.atlassian.net/rest/api/3/issue/10075/comment/10521", "id": "10521", "author": {"self": "https://airbyteio.atlassian.net/rest/api/3/user?accountId=5fc9e78d2730d800760becc4", "accountId": "5fc9e78d2730d800760becc4", "emailAddress": "integration-test@airbyte.io", "avatarUrls": {"48x48": "https://secure.gravatar.com/avatar/0a7841feac7218131ce7b427283c24ef?d=https%3A%2F%2Favatar-management--avatars.us-west-2.prod.public.atl-paas.net%2Finitials%2FIT-5.png", "24x24": "https://secure.gravatar.com/avatar/0a7841feac7218131ce7b427283c24ef?d=https%3A%2F%2Favatar-management--avatars.us-west-2.prod.public.atl-paas.net%2Finitials%2FIT-5.png", "16x16": "https://secure.gravatar.com/avatar/0a7841feac7218131ce7b427283c24ef?d=https%3A%2F%2Favatar-management--avatars.us-west-2.prod.public.atl-paas.net%2Finitials%2FIT-5.png", "32x32": "https://secure.gravatar.com/avatar/0a7841feac7218131ce7b427283c24ef?d=https%3A%2F%2Favatar-management--avatars.us-west-2.prod.public.atl-paas.net%2Finitials%2FIT-5.png"}, "displayName": "integration test", "active": true, "timeZone": "America/Los_Angeles", "accountType": "atlassian"}, "body": {"type": "doc", "version": 1, "content": [{"type": "paragraph", "content": [{"text": "Lorem ipsum dolor sit amet, consectetur adipiscing elit. Pellentesque eget venenatis elit. Duis eu justo eget augue iaculis fermentum. Sed semper quam laoreet nisi egestas at posuere augue semper.", "type": "text"}]}]}, "updateAuthor": {"self": "https://airbyteio.atlassian.net/rest/api/3/user?accountId=5fc9e78d2730d800760becc4", "accountId": "5fc9e78d2730d800760becc4", "emailAddress": "integration-test@airbyte.io", "avatarUrls": {"48x48": "https://secure.gravatar.com/avatar/0a7841feac7218131ce7b427283c24ef?d=https%3A%2F%2Favatar-management--avatars.us-west-2.prod.public.atl-paas.net%2Finitials%2FIT-5.png", "24x24": "https://secure.gravatar.com/avatar/0a7841feac7218131ce7b427283c24ef?d=https%3A%2F%2Favatar-management--avatars.us-west-2.prod.public.atl-paas.net%2Finitials%2FIT-5.png", "16x16": "https://secure.gravatar.com/avatar/0a7841feac7218131ce7b427283c24ef?d=https%3A%2F%2Favatar-management--avatars.us-west-2.prod.public.atl-paas.net%2Finitials%2FIT-5.png", "32x32": "https://secure.gravatar.com/avatar/0a7841feac7218131ce7b427283c24ef?d=https%3A%2F%2Favatar-management--avatars.us-west-2.prod.public.atl-paas.net%2Finitials%2FIT-5.png"}, "displayName": "integration test", "active": true, "timeZone": "America/Los_Angeles", "accountType": "atlassian"}, "created": "2021-04-14T14:32:43.099000-07:00", "updated": "2021-04-14T14:32:43.099000-07:00", "jsdPublic": true, "issueId": "IT-23"}, "emitted_at": 1697453254086} {"stream": "issue_comments", "data": {"self": "https://airbyteio.atlassian.net/rest/api/3/issue/10075/comment/10639", "id": "10639", "author": {"self": "https://airbyteio.atlassian.net/rest/api/3/user?accountId=5fc9e78d2730d800760becc4", "accountId": "5fc9e78d2730d800760becc4", "emailAddress": "integration-test@airbyte.io", "avatarUrls": {"48x48": "https://secure.gravatar.com/avatar/0a7841feac7218131ce7b427283c24ef?d=https%3A%2F%2Favatar-management--avatars.us-west-2.prod.public.atl-paas.net%2Finitials%2FIT-5.png", "24x24": "https://secure.gravatar.com/avatar/0a7841feac7218131ce7b427283c24ef?d=https%3A%2F%2Favatar-management--avatars.us-west-2.prod.public.atl-paas.net%2Finitials%2FIT-5.png", "16x16": "https://secure.gravatar.com/avatar/0a7841feac7218131ce7b427283c24ef?d=https%3A%2F%2Favatar-management--avatars.us-west-2.prod.public.atl-paas.net%2Finitials%2FIT-5.png", "32x32": "https://secure.gravatar.com/avatar/0a7841feac7218131ce7b427283c24ef?d=https%3A%2F%2Favatar-management--avatars.us-west-2.prod.public.atl-paas.net%2Finitials%2FIT-5.png"}, "displayName": "integration test", "active": true, "timeZone": "America/Los_Angeles", "accountType": "atlassian"}, "body": {"type": "doc", "version": 1, "content": [{"type": "paragraph", "content": [{"text": "Linked related issue!", "type": "text"}]}]}, "updateAuthor": {"self": "https://airbyteio.atlassian.net/rest/api/3/user?accountId=5fc9e78d2730d800760becc4", "accountId": "5fc9e78d2730d800760becc4", "emailAddress": "integration-test@airbyte.io", "avatarUrls": {"48x48": "https://secure.gravatar.com/avatar/0a7841feac7218131ce7b427283c24ef?d=https%3A%2F%2Favatar-management--avatars.us-west-2.prod.public.atl-paas.net%2Finitials%2FIT-5.png", "24x24": "https://secure.gravatar.com/avatar/0a7841feac7218131ce7b427283c24ef?d=https%3A%2F%2Favatar-management--avatars.us-west-2.prod.public.atl-paas.net%2Finitials%2FIT-5.png", "16x16": "https://secure.gravatar.com/avatar/0a7841feac7218131ce7b427283c24ef?d=https%3A%2F%2Favatar-management--avatars.us-west-2.prod.public.atl-paas.net%2Finitials%2FIT-5.png", "32x32": "https://secure.gravatar.com/avatar/0a7841feac7218131ce7b427283c24ef?d=https%3A%2F%2Favatar-management--avatars.us-west-2.prod.public.atl-paas.net%2Finitials%2FIT-5.png"}, "displayName": "integration test", "active": true, "timeZone": "America/Los_Angeles", "accountType": "atlassian"}, "created": "2021-04-15T00:08:48.998000-07:00", "updated": "2021-04-15T00:08:48.998000-07:00", "jsdPublic": true, "issueId": "IT-23"}, "emitted_at": 1697453254087} @@ -92,9 +92,9 @@ {"stream": "permissions", "data": {"key": "ADD_COMMENTS", "name": "Add Comments", "type": "PROJECT", "description": "Ability to comment on issues."}, "emitted_at": 1697453358813} {"stream": "permissions", "data": {"key": "ADMINISTER", "name": "Administer Jira", "type": "GLOBAL", "description": "Create and administer projects, issue types, fields, workflows, and schemes for all projects. Users with this permission can perform most administration tasks, except: managing users, importing data, and editing system email settings."}, "emitted_at": 1697453358814} {"stream": "permissions", "data": {"key": "ADMINISTER_PROJECTS", "name": "Administer Projects", "type": "PROJECT", "description": "Ability to administer a project in Jira."}, "emitted_at": 1697453358814} -{"stream": "permission_schemes", "data": {"expand": "permissions,user,group,projectRole,field,all", "id": 10056, "self": "https://airbyteio.atlassian.net/rest/api/3/permissionscheme/10056", "name": "CAW software permission scheme", "description": "The permission scheme for Jira Software Free. In Free, any registered user can access and administer this project.", "permissions": [{"id": 14200, "self": "https://airbyteio.atlassian.net/rest/api/3/permissionscheme/10056/permission/14200", "holder": {"type": "projectRole", "parameter": "10003", "value": "10003", "expand": "projectRole"}, "permission": "ADD_COMMENTS"}, {"id": 14201, "self": "https://airbyteio.atlassian.net/rest/api/3/permissionscheme/10056/permission/14201", "holder": {"type": "projectRole", "parameter": "10003", "value": "10003", "expand": "projectRole"}, "permission": "ADMINISTER_PROJECTS"}, {"id": 14202, "self": "https://airbyteio.atlassian.net/rest/api/3/permissionscheme/10056/permission/14202", "holder": {"type": "projectRole", "parameter": "10003", "value": "10003", "expand": "projectRole"}, "permission": "ASSIGNABLE_USER"}, {"id": 14203, "self": "https://airbyteio.atlassian.net/rest/api/3/permissionscheme/10056/permission/14203", "holder": {"type": "projectRole", "parameter": "10003", "value": "10003", "expand": "projectRole"}, "permission": "ASSIGN_ISSUES"}, {"id": 14204, "self": "https://airbyteio.atlassian.net/rest/api/3/permissionscheme/10056/permission/14204", "holder": {"type": "projectRole", "parameter": "10003", "value": "10003", "expand": "projectRole"}, "permission": "BROWSE_PROJECTS"}, {"id": 14205, "self": "https://airbyteio.atlassian.net/rest/api/3/permissionscheme/10056/permission/14205", "holder": {"type": "projectRole", "parameter": "10003", "value": "10003", "expand": "projectRole"}, "permission": "CLOSE_ISSUES"}, {"id": 14206, "self": "https://airbyteio.atlassian.net/rest/api/3/permissionscheme/10056/permission/14206", "holder": {"type": "projectRole", "parameter": "10003", "value": "10003", "expand": "projectRole"}, "permission": "CREATE_ATTACHMENTS"}, {"id": 14207, "self": "https://airbyteio.atlassian.net/rest/api/3/permissionscheme/10056/permission/14207", "holder": {"type": "projectRole", "parameter": "10003", "value": "10003", "expand": "projectRole"}, "permission": "CREATE_ISSUES"}, {"id": 14208, "self": "https://airbyteio.atlassian.net/rest/api/3/permissionscheme/10056/permission/14208", "holder": {"type": "projectRole", "parameter": "10003", "value": "10003", "expand": "projectRole"}, "permission": "DELETE_ALL_ATTACHMENTS"}, {"id": 14209, "self": "https://airbyteio.atlassian.net/rest/api/3/permissionscheme/10056/permission/14209", "holder": {"type": "projectRole", "parameter": "10003", "value": "10003", "expand": "projectRole"}, "permission": "DELETE_ALL_COMMENTS"}, {"id": 14210, "self": "https://airbyteio.atlassian.net/rest/api/3/permissionscheme/10056/permission/14210", "holder": {"type": "projectRole", "parameter": "10003", "value": "10003", "expand": "projectRole"}, "permission": "DELETE_ALL_WORKLOGS"}, {"id": 14211, "self": "https://airbyteio.atlassian.net/rest/api/3/permissionscheme/10056/permission/14211", "holder": {"type": "projectRole", "parameter": "10003", "value": "10003", "expand": "projectRole"}, "permission": "DELETE_ISSUES"}, {"id": 14212, "self": "https://airbyteio.atlassian.net/rest/api/3/permissionscheme/10056/permission/14212", "holder": {"type": "projectRole", "parameter": "10003", "value": "10003", "expand": "projectRole"}, "permission": "DELETE_OWN_ATTACHMENTS"}, {"id": 14213, "self": "https://airbyteio.atlassian.net/rest/api/3/permissionscheme/10056/permission/14213", "holder": {"type": "projectRole", "parameter": "10003", "value": "10003", "expand": "projectRole"}, "permission": "DELETE_OWN_COMMENTS"}, {"id": 14214, "self": "https://airbyteio.atlassian.net/rest/api/3/permissionscheme/10056/permission/14214", "holder": {"type": "projectRole", "parameter": "10003", "value": "10003", "expand": "projectRole"}, "permission": "DELETE_OWN_WORKLOGS"}, {"id": 14215, "self": "https://airbyteio.atlassian.net/rest/api/3/permissionscheme/10056/permission/14215", "holder": {"type": "projectRole", "parameter": "10003", "value": "10003", "expand": "projectRole"}, "permission": "EDIT_ALL_COMMENTS"}, {"id": 14216, "self": "https://airbyteio.atlassian.net/rest/api/3/permissionscheme/10056/permission/14216", "holder": {"type": "projectRole", "parameter": "10003", "value": "10003", "expand": "projectRole"}, "permission": "EDIT_ALL_WORKLOGS"}, {"id": 14217, "self": "https://airbyteio.atlassian.net/rest/api/3/permissionscheme/10056/permission/14217", "holder": {"type": "projectRole", "parameter": "10003", "value": "10003", "expand": "projectRole"}, "permission": "EDIT_ISSUES"}, {"id": 14218, "self": "https://airbyteio.atlassian.net/rest/api/3/permissionscheme/10056/permission/14218", "holder": {"type": "projectRole", "parameter": "10003", "value": "10003", "expand": "projectRole"}, "permission": "EDIT_OWN_COMMENTS"}, {"id": 14219, "self": "https://airbyteio.atlassian.net/rest/api/3/permissionscheme/10056/permission/14219", "holder": {"type": "projectRole", "parameter": "10003", "value": "10003", "expand": "projectRole"}, "permission": "EDIT_OWN_WORKLOGS"}, {"id": 14220, "self": "https://airbyteio.atlassian.net/rest/api/3/permissionscheme/10056/permission/14220", "holder": {"type": "projectRole", "parameter": "10003", "value": "10003", "expand": "projectRole"}, "permission": "LINK_ISSUES"}, {"id": 14221, "self": "https://airbyteio.atlassian.net/rest/api/3/permissionscheme/10056/permission/14221", "holder": {"type": "projectRole", "parameter": "10003", "value": "10003", "expand": "projectRole"}, "permission": "MANAGE_SPRINTS_PERMISSION"}, {"id": 14222, "self": "https://airbyteio.atlassian.net/rest/api/3/permissionscheme/10056/permission/14222", "holder": {"type": "projectRole", "parameter": "10003", "value": "10003", "expand": "projectRole"}, "permission": "MANAGE_WATCHERS"}, {"id": 14223, "self": "https://airbyteio.atlassian.net/rest/api/3/permissionscheme/10056/permission/14223", "holder": {"type": "projectRole", "parameter": "10003", "value": "10003", "expand": "projectRole"}, "permission": "MODIFY_REPORTER"}, {"id": 14224, "self": "https://airbyteio.atlassian.net/rest/api/3/permissionscheme/10056/permission/14224", "holder": {"type": "projectRole", "parameter": "10003", "value": "10003", "expand": "projectRole"}, "permission": "MOVE_ISSUES"}, {"id": 14225, "self": "https://airbyteio.atlassian.net/rest/api/3/permissionscheme/10056/permission/14225", "holder": {"type": "projectRole", "parameter": "10003", "value": "10003", "expand": "projectRole"}, "permission": "RESOLVE_ISSUES"}, {"id": 14226, "self": "https://airbyteio.atlassian.net/rest/api/3/permissionscheme/10056/permission/14226", "holder": {"type": "projectRole", "parameter": "10003", "value": "10003", "expand": "projectRole"}, "permission": "SCHEDULE_ISSUES"}, {"id": 14227, "self": "https://airbyteio.atlassian.net/rest/api/3/permissionscheme/10056/permission/14227", "holder": {"type": "projectRole", "parameter": "10003", "value": "10003", "expand": "projectRole"}, "permission": "TRANSITION_ISSUES"}, {"id": 14228, "self": "https://airbyteio.atlassian.net/rest/api/3/permissionscheme/10056/permission/14228", "holder": {"type": "projectRole", "parameter": "10003", "value": "10003", "expand": "projectRole"}, "permission": "VIEW_DEV_TOOLS"}, {"id": 14229, "self": "https://airbyteio.atlassian.net/rest/api/3/permissionscheme/10056/permission/14229", "holder": {"type": "projectRole", "parameter": "10003", "value": "10003", "expand": "projectRole"}, "permission": "VIEW_READONLY_WORKFLOW"}, {"id": 14230, "self": "https://airbyteio.atlassian.net/rest/api/3/permissionscheme/10056/permission/14230", "holder": {"type": "projectRole", "parameter": "10003", "value": "10003", "expand": "projectRole"}, "permission": "VIEW_VOTERS_AND_WATCHERS"}, {"id": 14231, "self": "https://airbyteio.atlassian.net/rest/api/3/permissionscheme/10056/permission/14231", "holder": {"type": "projectRole", "parameter": "10003", "value": "10003", "expand": "projectRole"}, "permission": "WORK_ON_ISSUES"}, {"id": 14232, "self": "https://airbyteio.atlassian.net/rest/api/3/permissionscheme/10056/permission/14232", "holder": {"type": "projectRole", "parameter": "10003", "value": "10003", "expand": "projectRole"}, "permission": "io.tempo.jira__log-work-for-others"}, {"id": 14233, "self": "https://airbyteio.atlassian.net/rest/api/3/permissionscheme/10056/permission/14233", "holder": {"type": "projectRole", "parameter": "10003", "value": "10003", "expand": "projectRole"}, "permission": "io.tempo.jira__set-billable-hours"}, {"id": 14234, "self": "https://airbyteio.atlassian.net/rest/api/3/permissionscheme/10056/permission/14234", "holder": {"type": "projectRole", "parameter": "10003", "value": "10003", "expand": "projectRole"}, "permission": "io.tempo.jira__view-all-worklogs"}, {"id": 14235, "self": "https://airbyteio.atlassian.net/rest/api/3/permissionscheme/10056/permission/14235", "holder": {"type": "projectRole", "parameter": "10003", "value": "10003", "expand": "projectRole"}, "permission": "io.tempo.jira__view-issue-hours"}, {"id": 14236, "self": "https://airbyteio.atlassian.net/rest/api/3/permissionscheme/10056/permission/14236", "holder": {"type": "applicationRole"}, "permission": "ADMINISTER_PROJECTS"}, {"id": 14237, "self": "https://airbyteio.atlassian.net/rest/api/3/permissionscheme/10056/permission/14237", "holder": {"type": "applicationRole"}, "permission": "BROWSE_PROJECTS"}, {"id": 14238, "self": "https://airbyteio.atlassian.net/rest/api/3/permissionscheme/10056/permission/14238", "holder": {"type": "applicationRole"}, "permission": "MANAGE_SPRINTS_PERMISSION"}, {"id": 14239, "self": "https://airbyteio.atlassian.net/rest/api/3/permissionscheme/10056/permission/14239", "holder": {"type": "applicationRole"}, "permission": "VIEW_DEV_TOOLS"}, {"id": 14240, "self": "https://airbyteio.atlassian.net/rest/api/3/permissionscheme/10056/permission/14240", "holder": {"type": "applicationRole"}, "permission": "VIEW_READONLY_WORKFLOW"}, {"id": 14241, "self": "https://airbyteio.atlassian.net/rest/api/3/permissionscheme/10056/permission/14241", "holder": {"type": "applicationRole"}, "permission": "ASSIGNABLE_USER"}, {"id": 14242, "self": "https://airbyteio.atlassian.net/rest/api/3/permissionscheme/10056/permission/14242", "holder": {"type": "applicationRole"}, "permission": "CLOSE_ISSUES"}, {"id": 14243, "self": "https://airbyteio.atlassian.net/rest/api/3/permissionscheme/10056/permission/14243", "holder": {"type": "applicationRole"}, "permission": "CREATE_ISSUES"}, {"id": 14244, "self": "https://airbyteio.atlassian.net/rest/api/3/permissionscheme/10056/permission/14244", "holder": {"type": "applicationRole"}, "permission": "DELETE_ISSUES"}, {"id": 14245, "self": "https://airbyteio.atlassian.net/rest/api/3/permissionscheme/10056/permission/14245", "holder": {"type": "applicationRole"}, "permission": "EDIT_ISSUES"}, {"id": 14246, "self": "https://airbyteio.atlassian.net/rest/api/3/permissionscheme/10056/permission/14246", "holder": {"type": "applicationRole"}, "permission": "LINK_ISSUES"}, {"id": 14247, "self": "https://airbyteio.atlassian.net/rest/api/3/permissionscheme/10056/permission/14247", "holder": {"type": "applicationRole"}, "permission": "MODIFY_REPORTER"}, {"id": 14248, "self": "https://airbyteio.atlassian.net/rest/api/3/permissionscheme/10056/permission/14248", "holder": {"type": "applicationRole"}, "permission": "MOVE_ISSUES"}, {"id": 14249, "self": "https://airbyteio.atlassian.net/rest/api/3/permissionscheme/10056/permission/14249", "holder": {"type": "applicationRole"}, "permission": "RESOLVE_ISSUES"}, {"id": 14250, "self": "https://airbyteio.atlassian.net/rest/api/3/permissionscheme/10056/permission/14250", "holder": {"type": "applicationRole"}, "permission": "SCHEDULE_ISSUES"}, {"id": 14251, "self": "https://airbyteio.atlassian.net/rest/api/3/permissionscheme/10056/permission/14251", "holder": {"type": "applicationRole"}, "permission": "TRANSITION_ISSUES"}, {"id": 14252, "self": "https://airbyteio.atlassian.net/rest/api/3/permissionscheme/10056/permission/14252", "holder": {"type": "applicationRole"}, "permission": "MANAGE_WATCHERS"}, {"id": 14253, "self": "https://airbyteio.atlassian.net/rest/api/3/permissionscheme/10056/permission/14253", "holder": {"type": "applicationRole"}, "permission": "VIEW_VOTERS_AND_WATCHERS"}, {"id": 14254, "self": "https://airbyteio.atlassian.net/rest/api/3/permissionscheme/10056/permission/14254", "holder": {"type": "applicationRole"}, "permission": "ADD_COMMENTS"}, {"id": 14255, "self": "https://airbyteio.atlassian.net/rest/api/3/permissionscheme/10056/permission/14255", "holder": {"type": "applicationRole"}, "permission": "DELETE_ALL_COMMENTS"}, {"id": 14256, "self": "https://airbyteio.atlassian.net/rest/api/3/permissionscheme/10056/permission/14256", "holder": {"type": "applicationRole"}, "permission": "DELETE_OWN_COMMENTS"}, {"id": 14257, "self": "https://airbyteio.atlassian.net/rest/api/3/permissionscheme/10056/permission/14257", "holder": {"type": "applicationRole"}, "permission": "EDIT_ALL_COMMENTS"}, {"id": 14258, "self": "https://airbyteio.atlassian.net/rest/api/3/permissionscheme/10056/permission/14258", "holder": {"type": "applicationRole"}, "permission": "EDIT_OWN_COMMENTS"}, {"id": 14259, "self": "https://airbyteio.atlassian.net/rest/api/3/permissionscheme/10056/permission/14259", "holder": {"type": "applicationRole"}, "permission": "CREATE_ATTACHMENTS"}, {"id": 14260, "self": "https://airbyteio.atlassian.net/rest/api/3/permissionscheme/10056/permission/14260", "holder": {"type": "applicationRole"}, "permission": "DELETE_ALL_ATTACHMENTS"}, {"id": 14261, "self": "https://airbyteio.atlassian.net/rest/api/3/permissionscheme/10056/permission/14261", "holder": {"type": "applicationRole"}, "permission": "DELETE_OWN_ATTACHMENTS"}, {"id": 14262, "self": "https://airbyteio.atlassian.net/rest/api/3/permissionscheme/10056/permission/14262", "holder": {"type": "applicationRole"}, "permission": "DELETE_ALL_WORKLOGS"}, {"id": 14263, "self": "https://airbyteio.atlassian.net/rest/api/3/permissionscheme/10056/permission/14263", "holder": {"type": "applicationRole"}, "permission": "DELETE_OWN_WORKLOGS"}, {"id": 14264, "self": "https://airbyteio.atlassian.net/rest/api/3/permissionscheme/10056/permission/14264", "holder": {"type": "applicationRole"}, "permission": "EDIT_ALL_WORKLOGS"}, {"id": 14265, "self": "https://airbyteio.atlassian.net/rest/api/3/permissionscheme/10056/permission/14265", "holder": {"type": "applicationRole"}, "permission": "EDIT_OWN_WORKLOGS"}, {"id": 14266, "self": "https://airbyteio.atlassian.net/rest/api/3/permissionscheme/10056/permission/14266", "holder": {"type": "applicationRole"}, "permission": "WORK_ON_ISSUES"}, {"id": 14267, "self": "https://airbyteio.atlassian.net/rest/api/3/permissionscheme/10056/permission/14267", "holder": {"type": "applicationRole"}, "permission": "ASSIGN_ISSUES"}, {"id": 14404, "self": "https://airbyteio.atlassian.net/rest/api/3/permissionscheme/10056/permission/14404", "holder": {"type": "projectRole", "parameter": "10003", "value": "10003", "expand": "projectRole"}, "permission": "SET_ISSUE_SECURITY"}, {"id": 14481, "self": "https://airbyteio.atlassian.net/rest/api/3/permissionscheme/10056/permission/14481", "holder": {"type": "projectRole", "parameter": "10003", "value": "10003", "expand": "projectRole"}, "permission": "VIEW_AGGREGATED_DATA"}, {"id": 14542, "self": "https://airbyteio.atlassian.net/rest/api/3/permissionscheme/10056/permission/14542", "holder": {"type": "applicationRole"}, "permission": "VIEW_AGGREGATED_DATA"}, {"id": 14714, "self": "https://airbyteio.atlassian.net/rest/api/3/permissionscheme/10056/permission/14714", "holder": {"type": "applicationRole"}, "permission": "VIEW_PROJECTS"}, {"id": 14715, "self": "https://airbyteio.atlassian.net/rest/api/3/permissionscheme/10056/permission/14715", "holder": {"type": "applicationRole"}, "permission": "VIEW_ISSUES"}, {"id": 14836, "self": "https://airbyteio.atlassian.net/rest/api/3/permissionscheme/10056/permission/14836", "holder": {"type": "projectRole", "parameter": "10003", "value": "10003", "expand": "projectRole"}, "permission": "VIEW_PROJECTS"}, {"id": 14837, "self": "https://airbyteio.atlassian.net/rest/api/3/permissionscheme/10056/permission/14837", "holder": {"type": "projectRole", "parameter": "10003", "value": "10003", "expand": "projectRole"}, "permission": "VIEW_ISSUES"}, {"id": 15117, "self": "https://airbyteio.atlassian.net/rest/api/3/permissionscheme/10056/permission/15117", "holder": {"type": "projectRole", "parameter": "10003", "value": "10003", "expand": "projectRole"}, "permission": "SERVICEDESK_AGENT"}]}, "emitted_at": 1697453360118} -{"stream": "permission_schemes", "data": {"expand": "permissions,user,group,projectRole,field,all", "id": 10055, "self": "https://airbyteio.atlassian.net/rest/api/3/permissionscheme/10055", "name": "CLK software permission scheme", "description": "The permission scheme for Jira Software Free. In Free, any registered user can access and administer this project.", "permissions": [{"id": 14132, "self": "https://airbyteio.atlassian.net/rest/api/3/permissionscheme/10055/permission/14132", "holder": {"type": "projectRole", "parameter": "10003", "value": "10003", "expand": "projectRole"}, "permission": "ADD_COMMENTS"}, {"id": 14133, "self": "https://airbyteio.atlassian.net/rest/api/3/permissionscheme/10055/permission/14133", "holder": {"type": "projectRole", "parameter": "10003", "value": "10003", "expand": "projectRole"}, "permission": "ADMINISTER_PROJECTS"}, {"id": 14134, "self": "https://airbyteio.atlassian.net/rest/api/3/permissionscheme/10055/permission/14134", "holder": {"type": "projectRole", "parameter": "10003", "value": "10003", "expand": "projectRole"}, "permission": "ASSIGNABLE_USER"}, {"id": 14135, "self": "https://airbyteio.atlassian.net/rest/api/3/permissionscheme/10055/permission/14135", "holder": {"type": "projectRole", "parameter": "10003", "value": "10003", "expand": "projectRole"}, "permission": "ASSIGN_ISSUES"}, {"id": 14136, "self": "https://airbyteio.atlassian.net/rest/api/3/permissionscheme/10055/permission/14136", "holder": {"type": "projectRole", "parameter": "10003", "value": "10003", "expand": "projectRole"}, "permission": "BROWSE_PROJECTS"}, {"id": 14137, "self": "https://airbyteio.atlassian.net/rest/api/3/permissionscheme/10055/permission/14137", "holder": {"type": "projectRole", "parameter": "10003", "value": "10003", "expand": "projectRole"}, "permission": "CLOSE_ISSUES"}, {"id": 14138, "self": "https://airbyteio.atlassian.net/rest/api/3/permissionscheme/10055/permission/14138", "holder": {"type": "projectRole", "parameter": "10003", "value": "10003", "expand": "projectRole"}, "permission": "CREATE_ATTACHMENTS"}, {"id": 14139, "self": "https://airbyteio.atlassian.net/rest/api/3/permissionscheme/10055/permission/14139", "holder": {"type": "projectRole", "parameter": "10003", "value": "10003", "expand": "projectRole"}, "permission": "CREATE_ISSUES"}, {"id": 14140, "self": "https://airbyteio.atlassian.net/rest/api/3/permissionscheme/10055/permission/14140", "holder": {"type": "projectRole", "parameter": "10003", "value": "10003", "expand": "projectRole"}, "permission": "DELETE_ALL_ATTACHMENTS"}, {"id": 14141, "self": "https://airbyteio.atlassian.net/rest/api/3/permissionscheme/10055/permission/14141", "holder": {"type": "projectRole", "parameter": "10003", "value": "10003", "expand": "projectRole"}, "permission": "DELETE_ALL_COMMENTS"}, {"id": 14142, "self": "https://airbyteio.atlassian.net/rest/api/3/permissionscheme/10055/permission/14142", "holder": {"type": "projectRole", "parameter": "10003", "value": "10003", "expand": "projectRole"}, "permission": "DELETE_ALL_WORKLOGS"}, {"id": 14143, "self": "https://airbyteio.atlassian.net/rest/api/3/permissionscheme/10055/permission/14143", "holder": {"type": "projectRole", "parameter": "10003", "value": "10003", "expand": "projectRole"}, "permission": "DELETE_ISSUES"}, {"id": 14144, "self": "https://airbyteio.atlassian.net/rest/api/3/permissionscheme/10055/permission/14144", "holder": {"type": "projectRole", "parameter": "10003", "value": "10003", "expand": "projectRole"}, "permission": "DELETE_OWN_ATTACHMENTS"}, {"id": 14145, "self": "https://airbyteio.atlassian.net/rest/api/3/permissionscheme/10055/permission/14145", "holder": {"type": "projectRole", "parameter": "10003", "value": "10003", "expand": "projectRole"}, "permission": "DELETE_OWN_COMMENTS"}, {"id": 14146, "self": "https://airbyteio.atlassian.net/rest/api/3/permissionscheme/10055/permission/14146", "holder": {"type": "projectRole", "parameter": "10003", "value": "10003", "expand": "projectRole"}, "permission": "DELETE_OWN_WORKLOGS"}, {"id": 14147, "self": "https://airbyteio.atlassian.net/rest/api/3/permissionscheme/10055/permission/14147", "holder": {"type": "projectRole", "parameter": "10003", "value": "10003", "expand": "projectRole"}, "permission": "EDIT_ALL_COMMENTS"}, {"id": 14148, "self": "https://airbyteio.atlassian.net/rest/api/3/permissionscheme/10055/permission/14148", "holder": {"type": "projectRole", "parameter": "10003", "value": "10003", "expand": "projectRole"}, "permission": "EDIT_ALL_WORKLOGS"}, {"id": 14149, "self": "https://airbyteio.atlassian.net/rest/api/3/permissionscheme/10055/permission/14149", "holder": {"type": "projectRole", "parameter": "10003", "value": "10003", "expand": "projectRole"}, "permission": "EDIT_ISSUES"}, {"id": 14150, "self": "https://airbyteio.atlassian.net/rest/api/3/permissionscheme/10055/permission/14150", "holder": {"type": "projectRole", "parameter": "10003", "value": "10003", "expand": "projectRole"}, "permission": "EDIT_OWN_COMMENTS"}, {"id": 14151, "self": "https://airbyteio.atlassian.net/rest/api/3/permissionscheme/10055/permission/14151", "holder": {"type": "projectRole", "parameter": "10003", "value": "10003", "expand": "projectRole"}, "permission": "EDIT_OWN_WORKLOGS"}, {"id": 14152, "self": "https://airbyteio.atlassian.net/rest/api/3/permissionscheme/10055/permission/14152", "holder": {"type": "projectRole", "parameter": "10003", "value": "10003", "expand": "projectRole"}, "permission": "LINK_ISSUES"}, {"id": 14153, "self": "https://airbyteio.atlassian.net/rest/api/3/permissionscheme/10055/permission/14153", "holder": {"type": "projectRole", "parameter": "10003", "value": "10003", "expand": "projectRole"}, "permission": "MANAGE_SPRINTS_PERMISSION"}, {"id": 14154, "self": "https://airbyteio.atlassian.net/rest/api/3/permissionscheme/10055/permission/14154", "holder": {"type": "projectRole", "parameter": "10003", "value": "10003", "expand": "projectRole"}, "permission": "MANAGE_WATCHERS"}, {"id": 14155, "self": "https://airbyteio.atlassian.net/rest/api/3/permissionscheme/10055/permission/14155", "holder": {"type": "projectRole", "parameter": "10003", "value": "10003", "expand": "projectRole"}, "permission": "MODIFY_REPORTER"}, {"id": 14156, "self": "https://airbyteio.atlassian.net/rest/api/3/permissionscheme/10055/permission/14156", "holder": {"type": "projectRole", "parameter": "10003", "value": "10003", "expand": "projectRole"}, "permission": "MOVE_ISSUES"}, {"id": 14157, "self": "https://airbyteio.atlassian.net/rest/api/3/permissionscheme/10055/permission/14157", "holder": {"type": "projectRole", "parameter": "10003", "value": "10003", "expand": "projectRole"}, "permission": "RESOLVE_ISSUES"}, {"id": 14158, "self": "https://airbyteio.atlassian.net/rest/api/3/permissionscheme/10055/permission/14158", "holder": {"type": "projectRole", "parameter": "10003", "value": "10003", "expand": "projectRole"}, "permission": "SCHEDULE_ISSUES"}, {"id": 14159, "self": "https://airbyteio.atlassian.net/rest/api/3/permissionscheme/10055/permission/14159", "holder": {"type": "projectRole", "parameter": "10003", "value": "10003", "expand": "projectRole"}, "permission": "TRANSITION_ISSUES"}, {"id": 14160, "self": "https://airbyteio.atlassian.net/rest/api/3/permissionscheme/10055/permission/14160", "holder": {"type": "projectRole", "parameter": "10003", "value": "10003", "expand": "projectRole"}, "permission": "VIEW_DEV_TOOLS"}, {"id": 14161, "self": "https://airbyteio.atlassian.net/rest/api/3/permissionscheme/10055/permission/14161", "holder": {"type": "projectRole", "parameter": "10003", "value": "10003", "expand": "projectRole"}, "permission": "VIEW_READONLY_WORKFLOW"}, {"id": 14162, "self": "https://airbyteio.atlassian.net/rest/api/3/permissionscheme/10055/permission/14162", "holder": {"type": "projectRole", "parameter": "10003", "value": "10003", "expand": "projectRole"}, "permission": "VIEW_VOTERS_AND_WATCHERS"}, {"id": 14163, "self": "https://airbyteio.atlassian.net/rest/api/3/permissionscheme/10055/permission/14163", "holder": {"type": "projectRole", "parameter": "10003", "value": "10003", "expand": "projectRole"}, "permission": "WORK_ON_ISSUES"}, {"id": 14164, "self": "https://airbyteio.atlassian.net/rest/api/3/permissionscheme/10055/permission/14164", "holder": {"type": "projectRole", "parameter": "10003", "value": "10003", "expand": "projectRole"}, "permission": "io.tempo.jira__log-work-for-others"}, {"id": 14165, "self": "https://airbyteio.atlassian.net/rest/api/3/permissionscheme/10055/permission/14165", "holder": {"type": "projectRole", "parameter": "10003", "value": "10003", "expand": "projectRole"}, "permission": "io.tempo.jira__set-billable-hours"}, {"id": 14166, "self": "https://airbyteio.atlassian.net/rest/api/3/permissionscheme/10055/permission/14166", "holder": {"type": "projectRole", "parameter": "10003", "value": "10003", "expand": "projectRole"}, "permission": "io.tempo.jira__view-all-worklogs"}, {"id": 14167, "self": "https://airbyteio.atlassian.net/rest/api/3/permissionscheme/10055/permission/14167", "holder": {"type": "projectRole", "parameter": "10003", "value": "10003", "expand": "projectRole"}, "permission": "io.tempo.jira__view-issue-hours"}, {"id": 14168, "self": "https://airbyteio.atlassian.net/rest/api/3/permissionscheme/10055/permission/14168", "holder": {"type": "applicationRole"}, "permission": "ADMINISTER_PROJECTS"}, {"id": 14169, "self": "https://airbyteio.atlassian.net/rest/api/3/permissionscheme/10055/permission/14169", "holder": {"type": "applicationRole"}, "permission": "BROWSE_PROJECTS"}, {"id": 14170, "self": "https://airbyteio.atlassian.net/rest/api/3/permissionscheme/10055/permission/14170", "holder": {"type": "applicationRole"}, "permission": "MANAGE_SPRINTS_PERMISSION"}, {"id": 14171, "self": "https://airbyteio.atlassian.net/rest/api/3/permissionscheme/10055/permission/14171", "holder": {"type": "applicationRole"}, "permission": "VIEW_DEV_TOOLS"}, {"id": 14172, "self": "https://airbyteio.atlassian.net/rest/api/3/permissionscheme/10055/permission/14172", "holder": {"type": "applicationRole"}, "permission": "VIEW_READONLY_WORKFLOW"}, {"id": 14173, "self": "https://airbyteio.atlassian.net/rest/api/3/permissionscheme/10055/permission/14173", "holder": {"type": "applicationRole"}, "permission": "ASSIGNABLE_USER"}, {"id": 14174, "self": "https://airbyteio.atlassian.net/rest/api/3/permissionscheme/10055/permission/14174", "holder": {"type": "applicationRole"}, "permission": "CLOSE_ISSUES"}, {"id": 14175, "self": "https://airbyteio.atlassian.net/rest/api/3/permissionscheme/10055/permission/14175", "holder": {"type": "applicationRole"}, "permission": "CREATE_ISSUES"}, {"id": 14176, "self": "https://airbyteio.atlassian.net/rest/api/3/permissionscheme/10055/permission/14176", "holder": {"type": "applicationRole"}, "permission": "DELETE_ISSUES"}, {"id": 14177, "self": "https://airbyteio.atlassian.net/rest/api/3/permissionscheme/10055/permission/14177", "holder": {"type": "applicationRole"}, "permission": "EDIT_ISSUES"}, {"id": 14178, "self": "https://airbyteio.atlassian.net/rest/api/3/permissionscheme/10055/permission/14178", "holder": {"type": "applicationRole"}, "permission": "LINK_ISSUES"}, {"id": 14179, "self": "https://airbyteio.atlassian.net/rest/api/3/permissionscheme/10055/permission/14179", "holder": {"type": "applicationRole"}, "permission": "MODIFY_REPORTER"}, {"id": 14180, "self": "https://airbyteio.atlassian.net/rest/api/3/permissionscheme/10055/permission/14180", "holder": {"type": "applicationRole"}, "permission": "MOVE_ISSUES"}, {"id": 14181, "self": "https://airbyteio.atlassian.net/rest/api/3/permissionscheme/10055/permission/14181", "holder": {"type": "applicationRole"}, "permission": "RESOLVE_ISSUES"}, {"id": 14182, "self": "https://airbyteio.atlassian.net/rest/api/3/permissionscheme/10055/permission/14182", "holder": {"type": "applicationRole"}, "permission": "SCHEDULE_ISSUES"}, {"id": 14183, "self": "https://airbyteio.atlassian.net/rest/api/3/permissionscheme/10055/permission/14183", "holder": {"type": "applicationRole"}, "permission": "TRANSITION_ISSUES"}, {"id": 14184, "self": "https://airbyteio.atlassian.net/rest/api/3/permissionscheme/10055/permission/14184", "holder": {"type": "applicationRole"}, "permission": "MANAGE_WATCHERS"}, {"id": 14185, "self": "https://airbyteio.atlassian.net/rest/api/3/permissionscheme/10055/permission/14185", "holder": {"type": "applicationRole"}, "permission": "VIEW_VOTERS_AND_WATCHERS"}, {"id": 14186, "self": "https://airbyteio.atlassian.net/rest/api/3/permissionscheme/10055/permission/14186", "holder": {"type": "applicationRole"}, "permission": "ADD_COMMENTS"}, {"id": 14187, "self": "https://airbyteio.atlassian.net/rest/api/3/permissionscheme/10055/permission/14187", "holder": {"type": "applicationRole"}, "permission": "DELETE_ALL_COMMENTS"}, {"id": 14188, "self": "https://airbyteio.atlassian.net/rest/api/3/permissionscheme/10055/permission/14188", "holder": {"type": "applicationRole"}, "permission": "DELETE_OWN_COMMENTS"}, {"id": 14189, "self": "https://airbyteio.atlassian.net/rest/api/3/permissionscheme/10055/permission/14189", "holder": {"type": "applicationRole"}, "permission": "EDIT_ALL_COMMENTS"}, {"id": 14190, "self": "https://airbyteio.atlassian.net/rest/api/3/permissionscheme/10055/permission/14190", "holder": {"type": "applicationRole"}, "permission": "EDIT_OWN_COMMENTS"}, {"id": 14191, "self": "https://airbyteio.atlassian.net/rest/api/3/permissionscheme/10055/permission/14191", "holder": {"type": "applicationRole"}, "permission": "CREATE_ATTACHMENTS"}, {"id": 14192, "self": "https://airbyteio.atlassian.net/rest/api/3/permissionscheme/10055/permission/14192", "holder": {"type": "applicationRole"}, "permission": "DELETE_ALL_ATTACHMENTS"}, {"id": 14193, "self": "https://airbyteio.atlassian.net/rest/api/3/permissionscheme/10055/permission/14193", "holder": {"type": "applicationRole"}, "permission": "DELETE_OWN_ATTACHMENTS"}, {"id": 14194, "self": "https://airbyteio.atlassian.net/rest/api/3/permissionscheme/10055/permission/14194", "holder": {"type": "applicationRole"}, "permission": "DELETE_ALL_WORKLOGS"}, {"id": 14195, "self": "https://airbyteio.atlassian.net/rest/api/3/permissionscheme/10055/permission/14195", "holder": {"type": "applicationRole"}, "permission": "DELETE_OWN_WORKLOGS"}, {"id": 14196, "self": "https://airbyteio.atlassian.net/rest/api/3/permissionscheme/10055/permission/14196", "holder": {"type": "applicationRole"}, "permission": "EDIT_ALL_WORKLOGS"}, {"id": 14197, "self": "https://airbyteio.atlassian.net/rest/api/3/permissionscheme/10055/permission/14197", "holder": {"type": "applicationRole"}, "permission": "EDIT_OWN_WORKLOGS"}, {"id": 14198, "self": "https://airbyteio.atlassian.net/rest/api/3/permissionscheme/10055/permission/14198", "holder": {"type": "applicationRole"}, "permission": "WORK_ON_ISSUES"}, {"id": 14199, "self": "https://airbyteio.atlassian.net/rest/api/3/permissionscheme/10055/permission/14199", "holder": {"type": "applicationRole"}, "permission": "ASSIGN_ISSUES"}, {"id": 14405, "self": "https://airbyteio.atlassian.net/rest/api/3/permissionscheme/10055/permission/14405", "holder": {"type": "projectRole", "parameter": "10003", "value": "10003", "expand": "projectRole"}, "permission": "SET_ISSUE_SECURITY"}, {"id": 14482, "self": "https://airbyteio.atlassian.net/rest/api/3/permissionscheme/10055/permission/14482", "holder": {"type": "projectRole", "parameter": "10003", "value": "10003", "expand": "projectRole"}, "permission": "VIEW_AGGREGATED_DATA"}, {"id": 14543, "self": "https://airbyteio.atlassian.net/rest/api/3/permissionscheme/10055/permission/14543", "holder": {"type": "applicationRole"}, "permission": "VIEW_AGGREGATED_DATA"}, {"id": 14712, "self": "https://airbyteio.atlassian.net/rest/api/3/permissionscheme/10055/permission/14712", "holder": {"type": "applicationRole"}, "permission": "VIEW_PROJECTS"}, {"id": 14713, "self": "https://airbyteio.atlassian.net/rest/api/3/permissionscheme/10055/permission/14713", "holder": {"type": "applicationRole"}, "permission": "VIEW_ISSUES"}, {"id": 14834, "self": "https://airbyteio.atlassian.net/rest/api/3/permissionscheme/10055/permission/14834", "holder": {"type": "projectRole", "parameter": "10003", "value": "10003", "expand": "projectRole"}, "permission": "VIEW_PROJECTS"}, {"id": 14835, "self": "https://airbyteio.atlassian.net/rest/api/3/permissionscheme/10055/permission/14835", "holder": {"type": "projectRole", "parameter": "10003", "value": "10003", "expand": "projectRole"}, "permission": "VIEW_ISSUES"}, {"id": 15118, "self": "https://airbyteio.atlassian.net/rest/api/3/permissionscheme/10055/permission/15118", "holder": {"type": "projectRole", "parameter": "10003", "value": "10003", "expand": "projectRole"}, "permission": "SERVICEDESK_AGENT"}]}, "emitted_at": 1697453360125} -{"stream": "permission_schemes", "data": {"expand": "permissions,user,group,projectRole,field,all", "id": 0, "self": "https://airbyteio.atlassian.net/rest/api/3/permissionscheme/0", "name": "Default Permission Scheme", "description": "This is the default Permission Scheme. Any new projects that are created will be assigned this scheme.", "permissions": [{"id": 10004, "self": "https://airbyteio.atlassian.net/rest/api/3/permissionscheme/0/permission/10004", "holder": {"type": "projectRole", "parameter": "10002", "value": "10002", "expand": "projectRole"}, "permission": "ADMINISTER_PROJECTS"}, {"id": 10005, "self": "https://airbyteio.atlassian.net/rest/api/3/permissionscheme/0/permission/10005", "holder": {"type": "applicationRole"}, "permission": "BROWSE_PROJECTS"}, {"id": 10006, "self": "https://airbyteio.atlassian.net/rest/api/3/permissionscheme/0/permission/10006", "holder": {"type": "applicationRole"}, "permission": "CREATE_ISSUES"}, {"id": 10007, "self": "https://airbyteio.atlassian.net/rest/api/3/permissionscheme/0/permission/10007", "holder": {"type": "applicationRole"}, "permission": "ADD_COMMENTS"}, {"id": 10008, "self": "https://airbyteio.atlassian.net/rest/api/3/permissionscheme/0/permission/10008", "holder": {"type": "applicationRole"}, "permission": "CREATE_ATTACHMENTS"}, {"id": 10009, "self": "https://airbyteio.atlassian.net/rest/api/3/permissionscheme/0/permission/10009", "holder": {"type": "applicationRole"}, "permission": "ASSIGN_ISSUES"}, {"id": 10010, "self": "https://airbyteio.atlassian.net/rest/api/3/permissionscheme/0/permission/10010", "holder": {"type": "applicationRole"}, "permission": "ASSIGNABLE_USER"}, {"id": 10011, "self": "https://airbyteio.atlassian.net/rest/api/3/permissionscheme/0/permission/10011", "holder": {"type": "applicationRole"}, "permission": "RESOLVE_ISSUES"}, {"id": 10012, "self": "https://airbyteio.atlassian.net/rest/api/3/permissionscheme/0/permission/10012", "holder": {"type": "applicationRole"}, "permission": "LINK_ISSUES"}, {"id": 10013, "self": "https://airbyteio.atlassian.net/rest/api/3/permissionscheme/0/permission/10013", "holder": {"type": "applicationRole"}, "permission": "EDIT_ISSUES"}, {"id": 10014, "self": "https://airbyteio.atlassian.net/rest/api/3/permissionscheme/0/permission/10014", "holder": {"type": "projectRole", "parameter": "10002", "value": "10002", "expand": "projectRole"}, "permission": "DELETE_ISSUES"}, {"id": 10015, "self": "https://airbyteio.atlassian.net/rest/api/3/permissionscheme/0/permission/10015", "holder": {"type": "applicationRole"}, "permission": "CLOSE_ISSUES"}, {"id": 10016, "self": "https://airbyteio.atlassian.net/rest/api/3/permissionscheme/0/permission/10016", "holder": {"type": "applicationRole"}, "permission": "MOVE_ISSUES"}, {"id": 10017, "self": "https://airbyteio.atlassian.net/rest/api/3/permissionscheme/0/permission/10017", "holder": {"type": "applicationRole"}, "permission": "SCHEDULE_ISSUES"}, {"id": 10018, "self": "https://airbyteio.atlassian.net/rest/api/3/permissionscheme/0/permission/10018", "holder": {"type": "projectRole", "parameter": "10002", "value": "10002", "expand": "projectRole"}, "permission": "MODIFY_REPORTER"}, {"id": 10019, "self": "https://airbyteio.atlassian.net/rest/api/3/permissionscheme/0/permission/10019", "holder": {"type": "applicationRole"}, "permission": "WORK_ON_ISSUES"}, {"id": 10020, "self": "https://airbyteio.atlassian.net/rest/api/3/permissionscheme/0/permission/10020", "holder": {"type": "projectRole", "parameter": "10002", "value": "10002", "expand": "projectRole"}, "permission": "DELETE_ALL_WORKLOGS"}, {"id": 10021, "self": "https://airbyteio.atlassian.net/rest/api/3/permissionscheme/0/permission/10021", "holder": {"type": "applicationRole"}, "permission": "DELETE_OWN_WORKLOGS"}, {"id": 10022, "self": "https://airbyteio.atlassian.net/rest/api/3/permissionscheme/0/permission/10022", "holder": {"type": "projectRole", "parameter": "10002", "value": "10002", "expand": "projectRole"}, "permission": "EDIT_ALL_WORKLOGS"}, {"id": 10023, "self": "https://airbyteio.atlassian.net/rest/api/3/permissionscheme/0/permission/10023", "holder": {"type": "applicationRole"}, "permission": "EDIT_OWN_WORKLOGS"}, {"id": 10024, "self": "https://airbyteio.atlassian.net/rest/api/3/permissionscheme/0/permission/10024", "holder": {"type": "applicationRole"}, "permission": "VIEW_VOTERS_AND_WATCHERS"}, {"id": 10025, "self": "https://airbyteio.atlassian.net/rest/api/3/permissionscheme/0/permission/10025", "holder": {"type": "projectRole", "parameter": "10002", "value": "10002", "expand": "projectRole"}, "permission": "MANAGE_WATCHERS"}, {"id": 10026, "self": "https://airbyteio.atlassian.net/rest/api/3/permissionscheme/0/permission/10026", "holder": {"type": "projectRole", "parameter": "10002", "value": "10002", "expand": "projectRole"}, "permission": "EDIT_ALL_COMMENTS"}, {"id": 10027, "self": "https://airbyteio.atlassian.net/rest/api/3/permissionscheme/0/permission/10027", "holder": {"type": "applicationRole"}, "permission": "EDIT_OWN_COMMENTS"}, {"id": 10028, "self": "https://airbyteio.atlassian.net/rest/api/3/permissionscheme/0/permission/10028", "holder": {"type": "projectRole", "parameter": "10002", "value": "10002", "expand": "projectRole"}, "permission": "DELETE_ALL_COMMENTS"}, {"id": 10029, "self": "https://airbyteio.atlassian.net/rest/api/3/permissionscheme/0/permission/10029", "holder": {"type": "applicationRole"}, "permission": "DELETE_OWN_COMMENTS"}, {"id": 10030, "self": "https://airbyteio.atlassian.net/rest/api/3/permissionscheme/0/permission/10030", "holder": {"type": "projectRole", "parameter": "10002", "value": "10002", "expand": "projectRole"}, "permission": "DELETE_ALL_ATTACHMENTS"}, {"id": 10031, "self": "https://airbyteio.atlassian.net/rest/api/3/permissionscheme/0/permission/10031", "holder": {"type": "applicationRole"}, "permission": "DELETE_OWN_ATTACHMENTS"}, {"id": 10033, "self": "https://airbyteio.atlassian.net/rest/api/3/permissionscheme/0/permission/10033", "holder": {"type": "applicationRole"}, "permission": "VIEW_DEV_TOOLS"}, {"id": 10200, "self": "https://airbyteio.atlassian.net/rest/api/3/permissionscheme/0/permission/10200", "holder": {"type": "applicationRole"}, "permission": "VIEW_READONLY_WORKFLOW"}, {"id": 10300, "self": "https://airbyteio.atlassian.net/rest/api/3/permissionscheme/0/permission/10300", "holder": {"type": "applicationRole"}, "permission": "TRANSITION_ISSUES"}, {"id": 10301, "self": "https://airbyteio.atlassian.net/rest/api/3/permissionscheme/0/permission/10301", "holder": {"type": "projectRole", "parameter": "10003", "value": "10003", "expand": "projectRole"}, "permission": "ADD_COMMENTS"}, {"id": 10302, "self": "https://airbyteio.atlassian.net/rest/api/3/permissionscheme/0/permission/10302", "holder": {"type": "projectRole", "parameter": "10003", "value": "10003", "expand": "projectRole"}, "permission": "ADMINISTER_PROJECTS"}, {"id": 10303, "self": "https://airbyteio.atlassian.net/rest/api/3/permissionscheme/0/permission/10303", "holder": {"type": "projectRole", "parameter": "10003", "value": "10003", "expand": "projectRole"}, "permission": "ASSIGNABLE_USER"}, {"id": 10304, "self": "https://airbyteio.atlassian.net/rest/api/3/permissionscheme/0/permission/10304", "holder": {"type": "projectRole", "parameter": "10003", "value": "10003", "expand": "projectRole"}, "permission": "ASSIGN_ISSUES"}, {"id": 10305, "self": "https://airbyteio.atlassian.net/rest/api/3/permissionscheme/0/permission/10305", "holder": {"type": "projectRole", "parameter": "10003", "value": "10003", "expand": "projectRole"}, "permission": "BROWSE_PROJECTS"}, {"id": 10306, "self": "https://airbyteio.atlassian.net/rest/api/3/permissionscheme/0/permission/10306", "holder": {"type": "projectRole", "parameter": "10003", "value": "10003", "expand": "projectRole"}, "permission": "CLOSE_ISSUES"}, {"id": 10307, "self": "https://airbyteio.atlassian.net/rest/api/3/permissionscheme/0/permission/10307", "holder": {"type": "projectRole", "parameter": "10003", "value": "10003", "expand": "projectRole"}, "permission": "CREATE_ATTACHMENTS"}, {"id": 10308, "self": "https://airbyteio.atlassian.net/rest/api/3/permissionscheme/0/permission/10308", "holder": {"type": "projectRole", "parameter": "10003", "value": "10003", "expand": "projectRole"}, "permission": "CREATE_ISSUES"}, {"id": 10309, "self": "https://airbyteio.atlassian.net/rest/api/3/permissionscheme/0/permission/10309", "holder": {"type": "projectRole", "parameter": "10003", "value": "10003", "expand": "projectRole"}, "permission": "DELETE_ALL_ATTACHMENTS"}, {"id": 10310, "self": "https://airbyteio.atlassian.net/rest/api/3/permissionscheme/0/permission/10310", "holder": {"type": "projectRole", "parameter": "10003", "value": "10003", "expand": "projectRole"}, "permission": "DELETE_ALL_COMMENTS"}, {"id": 10311, "self": "https://airbyteio.atlassian.net/rest/api/3/permissionscheme/0/permission/10311", "holder": {"type": "projectRole", "parameter": "10003", "value": "10003", "expand": "projectRole"}, "permission": "DELETE_ALL_WORKLOGS"}, {"id": 10312, "self": "https://airbyteio.atlassian.net/rest/api/3/permissionscheme/0/permission/10312", "holder": {"type": "projectRole", "parameter": "10003", "value": "10003", "expand": "projectRole"}, "permission": "DELETE_ISSUES"}, {"id": 10313, "self": "https://airbyteio.atlassian.net/rest/api/3/permissionscheme/0/permission/10313", "holder": {"type": "projectRole", "parameter": "10003", "value": "10003", "expand": "projectRole"}, "permission": "DELETE_OWN_ATTACHMENTS"}, {"id": 10314, "self": "https://airbyteio.atlassian.net/rest/api/3/permissionscheme/0/permission/10314", "holder": {"type": "projectRole", "parameter": "10003", "value": "10003", "expand": "projectRole"}, "permission": "DELETE_OWN_COMMENTS"}, {"id": 10315, "self": "https://airbyteio.atlassian.net/rest/api/3/permissionscheme/0/permission/10315", "holder": {"type": "projectRole", "parameter": "10003", "value": "10003", "expand": "projectRole"}, "permission": "DELETE_OWN_WORKLOGS"}, {"id": 10316, "self": "https://airbyteio.atlassian.net/rest/api/3/permissionscheme/0/permission/10316", "holder": {"type": "projectRole", "parameter": "10003", "value": "10003", "expand": "projectRole"}, "permission": "EDIT_ALL_COMMENTS"}, {"id": 10317, "self": "https://airbyteio.atlassian.net/rest/api/3/permissionscheme/0/permission/10317", "holder": {"type": "projectRole", "parameter": "10003", "value": "10003", "expand": "projectRole"}, "permission": "EDIT_ALL_WORKLOGS"}, {"id": 10318, "self": "https://airbyteio.atlassian.net/rest/api/3/permissionscheme/0/permission/10318", "holder": {"type": "projectRole", "parameter": "10003", "value": "10003", "expand": "projectRole"}, "permission": "EDIT_ISSUES"}, {"id": 10319, "self": "https://airbyteio.atlassian.net/rest/api/3/permissionscheme/0/permission/10319", "holder": {"type": "projectRole", "parameter": "10003", "value": "10003", "expand": "projectRole"}, "permission": "EDIT_OWN_COMMENTS"}, {"id": 10320, "self": "https://airbyteio.atlassian.net/rest/api/3/permissionscheme/0/permission/10320", "holder": {"type": "projectRole", "parameter": "10003", "value": "10003", "expand": "projectRole"}, "permission": "EDIT_OWN_WORKLOGS"}, {"id": 10321, "self": "https://airbyteio.atlassian.net/rest/api/3/permissionscheme/0/permission/10321", "holder": {"type": "projectRole", "parameter": "10003", "value": "10003", "expand": "projectRole"}, "permission": "LINK_ISSUES"}, {"id": 10322, "self": "https://airbyteio.atlassian.net/rest/api/3/permissionscheme/0/permission/10322", "holder": {"type": "projectRole", "parameter": "10003", "value": "10003", "expand": "projectRole"}, "permission": "MANAGE_SPRINTS_PERMISSION"}, {"id": 10323, "self": "https://airbyteio.atlassian.net/rest/api/3/permissionscheme/0/permission/10323", "holder": {"type": "projectRole", "parameter": "10003", "value": "10003", "expand": "projectRole"}, "permission": "MANAGE_WATCHERS"}, {"id": 10324, "self": "https://airbyteio.atlassian.net/rest/api/3/permissionscheme/0/permission/10324", "holder": {"type": "projectRole", "parameter": "10003", "value": "10003", "expand": "projectRole"}, "permission": "MODIFY_REPORTER"}, {"id": 10325, "self": "https://airbyteio.atlassian.net/rest/api/3/permissionscheme/0/permission/10325", "holder": {"type": "projectRole", "parameter": "10003", "value": "10003", "expand": "projectRole"}, "permission": "MOVE_ISSUES"}, {"id": 10326, "self": "https://airbyteio.atlassian.net/rest/api/3/permissionscheme/0/permission/10326", "holder": {"type": "projectRole", "parameter": "10003", "value": "10003", "expand": "projectRole"}, "permission": "RESOLVE_ISSUES"}, {"id": 10327, "self": "https://airbyteio.atlassian.net/rest/api/3/permissionscheme/0/permission/10327", "holder": {"type": "projectRole", "parameter": "10003", "value": "10003", "expand": "projectRole"}, "permission": "SCHEDULE_ISSUES"}, {"id": 10328, "self": "https://airbyteio.atlassian.net/rest/api/3/permissionscheme/0/permission/10328", "holder": {"type": "projectRole", "parameter": "10003", "value": "10003", "expand": "projectRole"}, "permission": "SET_ISSUE_SECURITY"}, {"id": 10329, "self": "https://airbyteio.atlassian.net/rest/api/3/permissionscheme/0/permission/10329", "holder": {"type": "projectRole", "parameter": "10003", "value": "10003", "expand": "projectRole"}, "permission": "TRANSITION_ISSUES"}, {"id": 10330, "self": "https://airbyteio.atlassian.net/rest/api/3/permissionscheme/0/permission/10330", "holder": {"type": "projectRole", "parameter": "10003", "value": "10003", "expand": "projectRole"}, "permission": "VIEW_DEV_TOOLS"}, {"id": 10331, "self": "https://airbyteio.atlassian.net/rest/api/3/permissionscheme/0/permission/10331", "holder": {"type": "projectRole", "parameter": "10003", "value": "10003", "expand": "projectRole"}, "permission": "VIEW_READONLY_WORKFLOW"}, {"id": 10332, "self": "https://airbyteio.atlassian.net/rest/api/3/permissionscheme/0/permission/10332", "holder": {"type": "projectRole", "parameter": "10003", "value": "10003", "expand": "projectRole"}, "permission": "VIEW_VOTERS_AND_WATCHERS"}, {"id": 10333, "self": "https://airbyteio.atlassian.net/rest/api/3/permissionscheme/0/permission/10333", "holder": {"type": "projectRole", "parameter": "10003", "value": "10003", "expand": "projectRole"}, "permission": "WORK_ON_ISSUES"}, {"id": 10464, "self": "https://airbyteio.atlassian.net/rest/api/3/permissionscheme/0/permission/10464", "holder": {"type": "projectRole", "parameter": "10003", "value": "10003", "expand": "projectRole"}, "permission": "io.tempo.jira__log-work-for-others"}, {"id": 10465, "self": "https://airbyteio.atlassian.net/rest/api/3/permissionscheme/0/permission/10465", "holder": {"type": "projectRole", "parameter": "10003", "value": "10003", "expand": "projectRole"}, "permission": "io.tempo.jira__set-billable-hours"}, {"id": 10466, "self": "https://airbyteio.atlassian.net/rest/api/3/permissionscheme/0/permission/10466", "holder": {"type": "projectRole", "parameter": "10003", "value": "10003", "expand": "projectRole"}, "permission": "io.tempo.jira__view-all-worklogs"}, {"id": 10467, "self": "https://airbyteio.atlassian.net/rest/api/3/permissionscheme/0/permission/10467", "holder": {"type": "projectRole", "parameter": "10003", "value": "10003", "expand": "projectRole"}, "permission": "io.tempo.jira__view-issue-hours"}, {"id": 14538, "self": "https://airbyteio.atlassian.net/rest/api/3/permissionscheme/0/permission/14538", "holder": {"type": "projectRole", "parameter": "10003", "value": "10003", "expand": "projectRole"}, "permission": "VIEW_AGGREGATED_DATA"}, {"id": 14599, "self": "https://airbyteio.atlassian.net/rest/api/3/permissionscheme/0/permission/14599", "holder": {"type": "applicationRole"}, "permission": "VIEW_AGGREGATED_DATA"}, {"id": 14600, "self": "https://airbyteio.atlassian.net/rest/api/3/permissionscheme/0/permission/14600", "holder": {"type": "applicationRole"}, "permission": "VIEW_PROJECTS"}, {"id": 14601, "self": "https://airbyteio.atlassian.net/rest/api/3/permissionscheme/0/permission/14601", "holder": {"type": "applicationRole"}, "permission": "VIEW_ISSUES"}, {"id": 14722, "self": "https://airbyteio.atlassian.net/rest/api/3/permissionscheme/0/permission/14722", "holder": {"type": "projectRole", "parameter": "10003", "value": "10003", "expand": "projectRole"}, "permission": "VIEW_PROJECTS"}, {"id": 14723, "self": "https://airbyteio.atlassian.net/rest/api/3/permissionscheme/0/permission/14723", "holder": {"type": "projectRole", "parameter": "10003", "value": "10003", "expand": "projectRole"}, "permission": "VIEW_ISSUES"}, {"id": 15119, "self": "https://airbyteio.atlassian.net/rest/api/3/permissionscheme/0/permission/15119", "holder": {"type": "projectRole", "parameter": "10003", "value": "10003", "expand": "projectRole"}, "permission": "SERVICEDESK_AGENT"}]}, "emitted_at": 1697453360131} +{"stream": "permission_schemes", "data": {"expand": "permissions,user,group,projectRole,field,all", "id": 10056, "self": "https://airbyteio.atlassian.net/rest/api/3/permissionscheme/10056", "name": "CAW software permission scheme", "description": "The permission scheme for Jira Software Free. In Free, any registered user can access and administer this project.", "permissions": [{"id": 14200, "self": "https://airbyteio.atlassian.net/rest/api/3/permissionscheme/10056/permission/14200", "holder": {"type": "projectRole", "parameter": "10003", "value": "10003", "expand": "projectRole"}, "permission": "ADD_COMMENTS"}, {"id": 14201, "self": "https://airbyteio.atlassian.net/rest/api/3/permissionscheme/10056/permission/14201", "holder": {"type": "projectRole", "parameter": "10003", "value": "10003", "expand": "projectRole"}, "permission": "ADMINISTER_PROJECTS"}, {"id": 14202, "self": "https://airbyteio.atlassian.net/rest/api/3/permissionscheme/10056/permission/14202", "holder": {"type": "projectRole", "parameter": "10003", "value": "10003", "expand": "projectRole"}, "permission": "ASSIGNABLE_USER"}, {"id": 14203, "self": "https://airbyteio.atlassian.net/rest/api/3/permissionscheme/10056/permission/14203", "holder": {"type": "projectRole", "parameter": "10003", "value": "10003", "expand": "projectRole"}, "permission": "ASSIGN_ISSUES"}, {"id": 14204, "self": "https://airbyteio.atlassian.net/rest/api/3/permissionscheme/10056/permission/14204", "holder": {"type": "projectRole", "parameter": "10003", "value": "10003", "expand": "projectRole"}, "permission": "BROWSE_PROJECTS"}, {"id": 14205, "self": "https://airbyteio.atlassian.net/rest/api/3/permissionscheme/10056/permission/14205", "holder": {"type": "projectRole", "parameter": "10003", "value": "10003", "expand": "projectRole"}, "permission": "CLOSE_ISSUES"}, {"id": 14206, "self": "https://airbyteio.atlassian.net/rest/api/3/permissionscheme/10056/permission/14206", "holder": {"type": "projectRole", "parameter": "10003", "value": "10003", "expand": "projectRole"}, "permission": "CREATE_ATTACHMENTS"}, {"id": 14207, "self": "https://airbyteio.atlassian.net/rest/api/3/permissionscheme/10056/permission/14207", "holder": {"type": "projectRole", "parameter": "10003", "value": "10003", "expand": "projectRole"}, "permission": "CREATE_ISSUES"}, {"id": 14208, "self": "https://airbyteio.atlassian.net/rest/api/3/permissionscheme/10056/permission/14208", "holder": {"type": "projectRole", "parameter": "10003", "value": "10003", "expand": "projectRole"}, "permission": "DELETE_ALL_ATTACHMENTS"}, {"id": 14209, "self": "https://airbyteio.atlassian.net/rest/api/3/permissionscheme/10056/permission/14209", "holder": {"type": "projectRole", "parameter": "10003", "value": "10003", "expand": "projectRole"}, "permission": "DELETE_ALL_COMMENTS"}, {"id": 14210, "self": "https://airbyteio.atlassian.net/rest/api/3/permissionscheme/10056/permission/14210", "holder": {"type": "projectRole", "parameter": "10003", "value": "10003", "expand": "projectRole"}, "permission": "DELETE_ALL_WORKLOGS"}, {"id": 14211, "self": "https://airbyteio.atlassian.net/rest/api/3/permissionscheme/10056/permission/14211", "holder": {"type": "projectRole", "parameter": "10003", "value": "10003", "expand": "projectRole"}, "permission": "DELETE_ISSUES"}, {"id": 14212, "self": "https://airbyteio.atlassian.net/rest/api/3/permissionscheme/10056/permission/14212", "holder": {"type": "projectRole", "parameter": "10003", "value": "10003", "expand": "projectRole"}, "permission": "DELETE_OWN_ATTACHMENTS"}, {"id": 14213, "self": "https://airbyteio.atlassian.net/rest/api/3/permissionscheme/10056/permission/14213", "holder": {"type": "projectRole", "parameter": "10003", "value": "10003", "expand": "projectRole"}, "permission": "DELETE_OWN_COMMENTS"}, {"id": 14214, "self": "https://airbyteio.atlassian.net/rest/api/3/permissionscheme/10056/permission/14214", "holder": {"type": "projectRole", "parameter": "10003", "value": "10003", "expand": "projectRole"}, "permission": "DELETE_OWN_WORKLOGS"}, {"id": 14215, "self": "https://airbyteio.atlassian.net/rest/api/3/permissionscheme/10056/permission/14215", "holder": {"type": "projectRole", "parameter": "10003", "value": "10003", "expand": "projectRole"}, "permission": "EDIT_ALL_COMMENTS"}, {"id": 14216, "self": "https://airbyteio.atlassian.net/rest/api/3/permissionscheme/10056/permission/14216", "holder": {"type": "projectRole", "parameter": "10003", "value": "10003", "expand": "projectRole"}, "permission": "EDIT_ALL_WORKLOGS"}, {"id": 14217, "self": "https://airbyteio.atlassian.net/rest/api/3/permissionscheme/10056/permission/14217", "holder": {"type": "projectRole", "parameter": "10003", "value": "10003", "expand": "projectRole"}, "permission": "EDIT_ISSUES"}, {"id": 14218, "self": "https://airbyteio.atlassian.net/rest/api/3/permissionscheme/10056/permission/14218", "holder": {"type": "projectRole", "parameter": "10003", "value": "10003", "expand": "projectRole"}, "permission": "EDIT_OWN_COMMENTS"}, {"id": 14219, "self": "https://airbyteio.atlassian.net/rest/api/3/permissionscheme/10056/permission/14219", "holder": {"type": "projectRole", "parameter": "10003", "value": "10003", "expand": "projectRole"}, "permission": "EDIT_OWN_WORKLOGS"}, {"id": 14220, "self": "https://airbyteio.atlassian.net/rest/api/3/permissionscheme/10056/permission/14220", "holder": {"type": "projectRole", "parameter": "10003", "value": "10003", "expand": "projectRole"}, "permission": "LINK_ISSUES"}, {"id": 14221, "self": "https://airbyteio.atlassian.net/rest/api/3/permissionscheme/10056/permission/14221", "holder": {"type": "projectRole", "parameter": "10003", "value": "10003", "expand": "projectRole"}, "permission": "MANAGE_SPRINTS_PERMISSION"}, {"id": 14222, "self": "https://airbyteio.atlassian.net/rest/api/3/permissionscheme/10056/permission/14222", "holder": {"type": "projectRole", "parameter": "10003", "value": "10003", "expand": "projectRole"}, "permission": "MANAGE_WATCHERS"}, {"id": 14223, "self": "https://airbyteio.atlassian.net/rest/api/3/permissionscheme/10056/permission/14223", "holder": {"type": "projectRole", "parameter": "10003", "value": "10003", "expand": "projectRole"}, "permission": "MODIFY_REPORTER"}, {"id": 14224, "self": "https://airbyteio.atlassian.net/rest/api/3/permissionscheme/10056/permission/14224", "holder": {"type": "projectRole", "parameter": "10003", "value": "10003", "expand": "projectRole"}, "permission": "MOVE_ISSUES"}, {"id": 14225, "self": "https://airbyteio.atlassian.net/rest/api/3/permissionscheme/10056/permission/14225", "holder": {"type": "projectRole", "parameter": "10003", "value": "10003", "expand": "projectRole"}, "permission": "RESOLVE_ISSUES"}, {"id": 14226, "self": "https://airbyteio.atlassian.net/rest/api/3/permissionscheme/10056/permission/14226", "holder": {"type": "projectRole", "parameter": "10003", "value": "10003", "expand": "projectRole"}, "permission": "SCHEDULE_ISSUES"}, {"id": 14227, "self": "https://airbyteio.atlassian.net/rest/api/3/permissionscheme/10056/permission/14227", "holder": {"type": "projectRole", "parameter": "10003", "value": "10003", "expand": "projectRole"}, "permission": "TRANSITION_ISSUES"}, {"id": 14228, "self": "https://airbyteio.atlassian.net/rest/api/3/permissionscheme/10056/permission/14228", "holder": {"type": "projectRole", "parameter": "10003", "value": "10003", "expand": "projectRole"}, "permission": "VIEW_DEV_TOOLS"}, {"id": 14229, "self": "https://airbyteio.atlassian.net/rest/api/3/permissionscheme/10056/permission/14229", "holder": {"type": "projectRole", "parameter": "10003", "value": "10003", "expand": "projectRole"}, "permission": "VIEW_READONLY_WORKFLOW"}, {"id": 14230, "self": "https://airbyteio.atlassian.net/rest/api/3/permissionscheme/10056/permission/14230", "holder": {"type": "projectRole", "parameter": "10003", "value": "10003", "expand": "projectRole"}, "permission": "VIEW_VOTERS_AND_WATCHERS"}, {"id": 14231, "self": "https://airbyteio.atlassian.net/rest/api/3/permissionscheme/10056/permission/14231", "holder": {"type": "projectRole", "parameter": "10003", "value": "10003", "expand": "projectRole"}, "permission": "WORK_ON_ISSUES"}, {"id": 14232, "self": "https://airbyteio.atlassian.net/rest/api/3/permissionscheme/10056/permission/14232", "holder": {"type": "projectRole", "parameter": "10003", "value": "10003", "expand": "projectRole"}, "permission": "io.tempo.jira__log-work-for-others"}, {"id": 14233, "self": "https://airbyteio.atlassian.net/rest/api/3/permissionscheme/10056/permission/14233", "holder": {"type": "projectRole", "parameter": "10003", "value": "10003", "expand": "projectRole"}, "permission": "io.tempo.jira__set-billable-hours"}, {"id": 14234, "self": "https://airbyteio.atlassian.net/rest/api/3/permissionscheme/10056/permission/14234", "holder": {"type": "projectRole", "parameter": "10003", "value": "10003", "expand": "projectRole"}, "permission": "io.tempo.jira__view-all-worklogs"}, {"id": 14235, "self": "https://airbyteio.atlassian.net/rest/api/3/permissionscheme/10056/permission/14235", "holder": {"type": "projectRole", "parameter": "10003", "value": "10003", "expand": "projectRole"}, "permission": "io.tempo.jira__view-issue-hours"}, {"id": 14236, "self": "https://airbyteio.atlassian.net/rest/api/3/permissionscheme/10056/permission/14236", "holder": {"type": "applicationRole"}, "permission": "ADMINISTER_PROJECTS"}, {"id": 14237, "self": "https://airbyteio.atlassian.net/rest/api/3/permissionscheme/10056/permission/14237", "holder": {"type": "applicationRole"}, "permission": "BROWSE_PROJECTS"}, {"id": 14238, "self": "https://airbyteio.atlassian.net/rest/api/3/permissionscheme/10056/permission/14238", "holder": {"type": "applicationRole"}, "permission": "MANAGE_SPRINTS_PERMISSION"}, {"id": 14239, "self": "https://airbyteio.atlassian.net/rest/api/3/permissionscheme/10056/permission/14239", "holder": {"type": "applicationRole"}, "permission": "VIEW_DEV_TOOLS"}, {"id": 14240, "self": "https://airbyteio.atlassian.net/rest/api/3/permissionscheme/10056/permission/14240", "holder": {"type": "applicationRole"}, "permission": "VIEW_READONLY_WORKFLOW"}, {"id": 14241, "self": "https://airbyteio.atlassian.net/rest/api/3/permissionscheme/10056/permission/14241", "holder": {"type": "applicationRole"}, "permission": "ASSIGNABLE_USER"}, {"id": 14242, "self": "https://airbyteio.atlassian.net/rest/api/3/permissionscheme/10056/permission/14242", "holder": {"type": "applicationRole"}, "permission": "CLOSE_ISSUES"}, {"id": 14243, "self": "https://airbyteio.atlassian.net/rest/api/3/permissionscheme/10056/permission/14243", "holder": {"type": "applicationRole"}, "permission": "CREATE_ISSUES"}, {"id": 14244, "self": "https://airbyteio.atlassian.net/rest/api/3/permissionscheme/10056/permission/14244", "holder": {"type": "applicationRole"}, "permission": "DELETE_ISSUES"}, {"id": 14245, "self": "https://airbyteio.atlassian.net/rest/api/3/permissionscheme/10056/permission/14245", "holder": {"type": "applicationRole"}, "permission": "EDIT_ISSUES"}, {"id": 14246, "self": "https://airbyteio.atlassian.net/rest/api/3/permissionscheme/10056/permission/14246", "holder": {"type": "applicationRole"}, "permission": "LINK_ISSUES"}, {"id": 14247, "self": "https://airbyteio.atlassian.net/rest/api/3/permissionscheme/10056/permission/14247", "holder": {"type": "applicationRole"}, "permission": "MODIFY_REPORTER"}, {"id": 14248, "self": "https://airbyteio.atlassian.net/rest/api/3/permissionscheme/10056/permission/14248", "holder": {"type": "applicationRole"}, "permission": "MOVE_ISSUES"}, {"id": 14249, "self": "https://airbyteio.atlassian.net/rest/api/3/permissionscheme/10056/permission/14249", "holder": {"type": "applicationRole"}, "permission": "RESOLVE_ISSUES"}, {"id": 14250, "self": "https://airbyteio.atlassian.net/rest/api/3/permissionscheme/10056/permission/14250", "holder": {"type": "applicationRole"}, "permission": "SCHEDULE_ISSUES"}, {"id": 14251, "self": "https://airbyteio.atlassian.net/rest/api/3/permissionscheme/10056/permission/14251", "holder": {"type": "applicationRole"}, "permission": "TRANSITION_ISSUES"}, {"id": 14252, "self": "https://airbyteio.atlassian.net/rest/api/3/permissionscheme/10056/permission/14252", "holder": {"type": "applicationRole"}, "permission": "MANAGE_WATCHERS"}, {"id": 14253, "self": "https://airbyteio.atlassian.net/rest/api/3/permissionscheme/10056/permission/14253", "holder": {"type": "applicationRole"}, "permission": "VIEW_VOTERS_AND_WATCHERS"}, {"id": 14254, "self": "https://airbyteio.atlassian.net/rest/api/3/permissionscheme/10056/permission/14254", "holder": {"type": "applicationRole"}, "permission": "ADD_COMMENTS"}, {"id": 14255, "self": "https://airbyteio.atlassian.net/rest/api/3/permissionscheme/10056/permission/14255", "holder": {"type": "applicationRole"}, "permission": "DELETE_ALL_COMMENTS"}, {"id": 14256, "self": "https://airbyteio.atlassian.net/rest/api/3/permissionscheme/10056/permission/14256", "holder": {"type": "applicationRole"}, "permission": "DELETE_OWN_COMMENTS"}, {"id": 14257, "self": "https://airbyteio.atlassian.net/rest/api/3/permissionscheme/10056/permission/14257", "holder": {"type": "applicationRole"}, "permission": "EDIT_ALL_COMMENTS"}, {"id": 14258, "self": "https://airbyteio.atlassian.net/rest/api/3/permissionscheme/10056/permission/14258", "holder": {"type": "applicationRole"}, "permission": "EDIT_OWN_COMMENTS"}, {"id": 14259, "self": "https://airbyteio.atlassian.net/rest/api/3/permissionscheme/10056/permission/14259", "holder": {"type": "applicationRole"}, "permission": "CREATE_ATTACHMENTS"}, {"id": 14260, "self": "https://airbyteio.atlassian.net/rest/api/3/permissionscheme/10056/permission/14260", "holder": {"type": "applicationRole"}, "permission": "DELETE_ALL_ATTACHMENTS"}, {"id": 14261, "self": "https://airbyteio.atlassian.net/rest/api/3/permissionscheme/10056/permission/14261", "holder": {"type": "applicationRole"}, "permission": "DELETE_OWN_ATTACHMENTS"}, {"id": 14262, "self": "https://airbyteio.atlassian.net/rest/api/3/permissionscheme/10056/permission/14262", "holder": {"type": "applicationRole"}, "permission": "DELETE_ALL_WORKLOGS"}, {"id": 14263, "self": "https://airbyteio.atlassian.net/rest/api/3/permissionscheme/10056/permission/14263", "holder": {"type": "applicationRole"}, "permission": "DELETE_OWN_WORKLOGS"}, {"id": 14264, "self": "https://airbyteio.atlassian.net/rest/api/3/permissionscheme/10056/permission/14264", "holder": {"type": "applicationRole"}, "permission": "EDIT_ALL_WORKLOGS"}, {"id": 14265, "self": "https://airbyteio.atlassian.net/rest/api/3/permissionscheme/10056/permission/14265", "holder": {"type": "applicationRole"}, "permission": "EDIT_OWN_WORKLOGS"}, {"id": 14266, "self": "https://airbyteio.atlassian.net/rest/api/3/permissionscheme/10056/permission/14266", "holder": {"type": "applicationRole"}, "permission": "WORK_ON_ISSUES"}, {"id": 14267, "self": "https://airbyteio.atlassian.net/rest/api/3/permissionscheme/10056/permission/14267", "holder": {"type": "applicationRole"}, "permission": "ASSIGN_ISSUES"}, {"id": 14404, "self": "https://airbyteio.atlassian.net/rest/api/3/permissionscheme/10056/permission/14404", "holder": {"type": "projectRole", "parameter": "10003", "value": "10003", "expand": "projectRole"}, "permission": "SET_ISSUE_SECURITY"}, {"id": 14481, "self": "https://airbyteio.atlassian.net/rest/api/3/permissionscheme/10056/permission/14481", "holder": {"type": "projectRole", "parameter": "10003", "value": "10003", "expand": "projectRole"}, "permission": "VIEW_AGGREGATED_DATA"}, {"id": 14542, "self": "https://airbyteio.atlassian.net/rest/api/3/permissionscheme/10056/permission/14542", "holder": {"type": "applicationRole"}, "permission": "VIEW_AGGREGATED_DATA"}, {"id": 15117, "self": "https://airbyteio.atlassian.net/rest/api/3/permissionscheme/10056/permission/15117", "holder": {"type": "projectRole", "parameter": "10003", "value": "10003", "expand": "projectRole"}, "permission": "SERVICEDESK_AGENT"}]}, "emitted_at": 1709025744854} +{"stream": "permission_schemes", "data": {"expand": "permissions,user,group,projectRole,field,all", "id": 10055, "self": "https://airbyteio.atlassian.net/rest/api/3/permissionscheme/10055", "name": "CLK software permission scheme", "description": "The permission scheme for Jira Software Free. In Free, any registered user can access and administer this project.", "permissions": [{"id": 14132, "self": "https://airbyteio.atlassian.net/rest/api/3/permissionscheme/10055/permission/14132", "holder": {"type": "projectRole", "parameter": "10003", "value": "10003", "expand": "projectRole"}, "permission": "ADD_COMMENTS"}, {"id": 14133, "self": "https://airbyteio.atlassian.net/rest/api/3/permissionscheme/10055/permission/14133", "holder": {"type": "projectRole", "parameter": "10003", "value": "10003", "expand": "projectRole"}, "permission": "ADMINISTER_PROJECTS"}, {"id": 14134, "self": "https://airbyteio.atlassian.net/rest/api/3/permissionscheme/10055/permission/14134", "holder": {"type": "projectRole", "parameter": "10003", "value": "10003", "expand": "projectRole"}, "permission": "ASSIGNABLE_USER"}, {"id": 14135, "self": "https://airbyteio.atlassian.net/rest/api/3/permissionscheme/10055/permission/14135", "holder": {"type": "projectRole", "parameter": "10003", "value": "10003", "expand": "projectRole"}, "permission": "ASSIGN_ISSUES"}, {"id": 14136, "self": "https://airbyteio.atlassian.net/rest/api/3/permissionscheme/10055/permission/14136", "holder": {"type": "projectRole", "parameter": "10003", "value": "10003", "expand": "projectRole"}, "permission": "BROWSE_PROJECTS"}, {"id": 14137, "self": "https://airbyteio.atlassian.net/rest/api/3/permissionscheme/10055/permission/14137", "holder": {"type": "projectRole", "parameter": "10003", "value": "10003", "expand": "projectRole"}, "permission": "CLOSE_ISSUES"}, {"id": 14138, "self": "https://airbyteio.atlassian.net/rest/api/3/permissionscheme/10055/permission/14138", "holder": {"type": "projectRole", "parameter": "10003", "value": "10003", "expand": "projectRole"}, "permission": "CREATE_ATTACHMENTS"}, {"id": 14139, "self": "https://airbyteio.atlassian.net/rest/api/3/permissionscheme/10055/permission/14139", "holder": {"type": "projectRole", "parameter": "10003", "value": "10003", "expand": "projectRole"}, "permission": "CREATE_ISSUES"}, {"id": 14140, "self": "https://airbyteio.atlassian.net/rest/api/3/permissionscheme/10055/permission/14140", "holder": {"type": "projectRole", "parameter": "10003", "value": "10003", "expand": "projectRole"}, "permission": "DELETE_ALL_ATTACHMENTS"}, {"id": 14141, "self": "https://airbyteio.atlassian.net/rest/api/3/permissionscheme/10055/permission/14141", "holder": {"type": "projectRole", "parameter": "10003", "value": "10003", "expand": "projectRole"}, "permission": "DELETE_ALL_COMMENTS"}, {"id": 14142, "self": "https://airbyteio.atlassian.net/rest/api/3/permissionscheme/10055/permission/14142", "holder": {"type": "projectRole", "parameter": "10003", "value": "10003", "expand": "projectRole"}, "permission": "DELETE_ALL_WORKLOGS"}, {"id": 14143, "self": "https://airbyteio.atlassian.net/rest/api/3/permissionscheme/10055/permission/14143", "holder": {"type": "projectRole", "parameter": "10003", "value": "10003", "expand": "projectRole"}, "permission": "DELETE_ISSUES"}, {"id": 14144, "self": "https://airbyteio.atlassian.net/rest/api/3/permissionscheme/10055/permission/14144", "holder": {"type": "projectRole", "parameter": "10003", "value": "10003", "expand": "projectRole"}, "permission": "DELETE_OWN_ATTACHMENTS"}, {"id": 14145, "self": "https://airbyteio.atlassian.net/rest/api/3/permissionscheme/10055/permission/14145", "holder": {"type": "projectRole", "parameter": "10003", "value": "10003", "expand": "projectRole"}, "permission": "DELETE_OWN_COMMENTS"}, {"id": 14146, "self": "https://airbyteio.atlassian.net/rest/api/3/permissionscheme/10055/permission/14146", "holder": {"type": "projectRole", "parameter": "10003", "value": "10003", "expand": "projectRole"}, "permission": "DELETE_OWN_WORKLOGS"}, {"id": 14147, "self": "https://airbyteio.atlassian.net/rest/api/3/permissionscheme/10055/permission/14147", "holder": {"type": "projectRole", "parameter": "10003", "value": "10003", "expand": "projectRole"}, "permission": "EDIT_ALL_COMMENTS"}, {"id": 14148, "self": "https://airbyteio.atlassian.net/rest/api/3/permissionscheme/10055/permission/14148", "holder": {"type": "projectRole", "parameter": "10003", "value": "10003", "expand": "projectRole"}, "permission": "EDIT_ALL_WORKLOGS"}, {"id": 14149, "self": "https://airbyteio.atlassian.net/rest/api/3/permissionscheme/10055/permission/14149", "holder": {"type": "projectRole", "parameter": "10003", "value": "10003", "expand": "projectRole"}, "permission": "EDIT_ISSUES"}, {"id": 14150, "self": "https://airbyteio.atlassian.net/rest/api/3/permissionscheme/10055/permission/14150", "holder": {"type": "projectRole", "parameter": "10003", "value": "10003", "expand": "projectRole"}, "permission": "EDIT_OWN_COMMENTS"}, {"id": 14151, "self": "https://airbyteio.atlassian.net/rest/api/3/permissionscheme/10055/permission/14151", "holder": {"type": "projectRole", "parameter": "10003", "value": "10003", "expand": "projectRole"}, "permission": "EDIT_OWN_WORKLOGS"}, {"id": 14152, "self": "https://airbyteio.atlassian.net/rest/api/3/permissionscheme/10055/permission/14152", "holder": {"type": "projectRole", "parameter": "10003", "value": "10003", "expand": "projectRole"}, "permission": "LINK_ISSUES"}, {"id": 14153, "self": "https://airbyteio.atlassian.net/rest/api/3/permissionscheme/10055/permission/14153", "holder": {"type": "projectRole", "parameter": "10003", "value": "10003", "expand": "projectRole"}, "permission": "MANAGE_SPRINTS_PERMISSION"}, {"id": 14154, "self": "https://airbyteio.atlassian.net/rest/api/3/permissionscheme/10055/permission/14154", "holder": {"type": "projectRole", "parameter": "10003", "value": "10003", "expand": "projectRole"}, "permission": "MANAGE_WATCHERS"}, {"id": 14155, "self": "https://airbyteio.atlassian.net/rest/api/3/permissionscheme/10055/permission/14155", "holder": {"type": "projectRole", "parameter": "10003", "value": "10003", "expand": "projectRole"}, "permission": "MODIFY_REPORTER"}, {"id": 14156, "self": "https://airbyteio.atlassian.net/rest/api/3/permissionscheme/10055/permission/14156", "holder": {"type": "projectRole", "parameter": "10003", "value": "10003", "expand": "projectRole"}, "permission": "MOVE_ISSUES"}, {"id": 14157, "self": "https://airbyteio.atlassian.net/rest/api/3/permissionscheme/10055/permission/14157", "holder": {"type": "projectRole", "parameter": "10003", "value": "10003", "expand": "projectRole"}, "permission": "RESOLVE_ISSUES"}, {"id": 14158, "self": "https://airbyteio.atlassian.net/rest/api/3/permissionscheme/10055/permission/14158", "holder": {"type": "projectRole", "parameter": "10003", "value": "10003", "expand": "projectRole"}, "permission": "SCHEDULE_ISSUES"}, {"id": 14159, "self": "https://airbyteio.atlassian.net/rest/api/3/permissionscheme/10055/permission/14159", "holder": {"type": "projectRole", "parameter": "10003", "value": "10003", "expand": "projectRole"}, "permission": "TRANSITION_ISSUES"}, {"id": 14160, "self": "https://airbyteio.atlassian.net/rest/api/3/permissionscheme/10055/permission/14160", "holder": {"type": "projectRole", "parameter": "10003", "value": "10003", "expand": "projectRole"}, "permission": "VIEW_DEV_TOOLS"}, {"id": 14161, "self": "https://airbyteio.atlassian.net/rest/api/3/permissionscheme/10055/permission/14161", "holder": {"type": "projectRole", "parameter": "10003", "value": "10003", "expand": "projectRole"}, "permission": "VIEW_READONLY_WORKFLOW"}, {"id": 14162, "self": "https://airbyteio.atlassian.net/rest/api/3/permissionscheme/10055/permission/14162", "holder": {"type": "projectRole", "parameter": "10003", "value": "10003", "expand": "projectRole"}, "permission": "VIEW_VOTERS_AND_WATCHERS"}, {"id": 14163, "self": "https://airbyteio.atlassian.net/rest/api/3/permissionscheme/10055/permission/14163", "holder": {"type": "projectRole", "parameter": "10003", "value": "10003", "expand": "projectRole"}, "permission": "WORK_ON_ISSUES"}, {"id": 14164, "self": "https://airbyteio.atlassian.net/rest/api/3/permissionscheme/10055/permission/14164", "holder": {"type": "projectRole", "parameter": "10003", "value": "10003", "expand": "projectRole"}, "permission": "io.tempo.jira__log-work-for-others"}, {"id": 14165, "self": "https://airbyteio.atlassian.net/rest/api/3/permissionscheme/10055/permission/14165", "holder": {"type": "projectRole", "parameter": "10003", "value": "10003", "expand": "projectRole"}, "permission": "io.tempo.jira__set-billable-hours"}, {"id": 14166, "self": "https://airbyteio.atlassian.net/rest/api/3/permissionscheme/10055/permission/14166", "holder": {"type": "projectRole", "parameter": "10003", "value": "10003", "expand": "projectRole"}, "permission": "io.tempo.jira__view-all-worklogs"}, {"id": 14167, "self": "https://airbyteio.atlassian.net/rest/api/3/permissionscheme/10055/permission/14167", "holder": {"type": "projectRole", "parameter": "10003", "value": "10003", "expand": "projectRole"}, "permission": "io.tempo.jira__view-issue-hours"}, {"id": 14168, "self": "https://airbyteio.atlassian.net/rest/api/3/permissionscheme/10055/permission/14168", "holder": {"type": "applicationRole"}, "permission": "ADMINISTER_PROJECTS"}, {"id": 14169, "self": "https://airbyteio.atlassian.net/rest/api/3/permissionscheme/10055/permission/14169", "holder": {"type": "applicationRole"}, "permission": "BROWSE_PROJECTS"}, {"id": 14170, "self": "https://airbyteio.atlassian.net/rest/api/3/permissionscheme/10055/permission/14170", "holder": {"type": "applicationRole"}, "permission": "MANAGE_SPRINTS_PERMISSION"}, {"id": 14171, "self": "https://airbyteio.atlassian.net/rest/api/3/permissionscheme/10055/permission/14171", "holder": {"type": "applicationRole"}, "permission": "VIEW_DEV_TOOLS"}, {"id": 14172, "self": "https://airbyteio.atlassian.net/rest/api/3/permissionscheme/10055/permission/14172", "holder": {"type": "applicationRole"}, "permission": "VIEW_READONLY_WORKFLOW"}, {"id": 14173, "self": "https://airbyteio.atlassian.net/rest/api/3/permissionscheme/10055/permission/14173", "holder": {"type": "applicationRole"}, "permission": "ASSIGNABLE_USER"}, {"id": 14174, "self": "https://airbyteio.atlassian.net/rest/api/3/permissionscheme/10055/permission/14174", "holder": {"type": "applicationRole"}, "permission": "CLOSE_ISSUES"}, {"id": 14175, "self": "https://airbyteio.atlassian.net/rest/api/3/permissionscheme/10055/permission/14175", "holder": {"type": "applicationRole"}, "permission": "CREATE_ISSUES"}, {"id": 14176, "self": "https://airbyteio.atlassian.net/rest/api/3/permissionscheme/10055/permission/14176", "holder": {"type": "applicationRole"}, "permission": "DELETE_ISSUES"}, {"id": 14177, "self": "https://airbyteio.atlassian.net/rest/api/3/permissionscheme/10055/permission/14177", "holder": {"type": "applicationRole"}, "permission": "EDIT_ISSUES"}, {"id": 14178, "self": "https://airbyteio.atlassian.net/rest/api/3/permissionscheme/10055/permission/14178", "holder": {"type": "applicationRole"}, "permission": "LINK_ISSUES"}, {"id": 14179, "self": "https://airbyteio.atlassian.net/rest/api/3/permissionscheme/10055/permission/14179", "holder": {"type": "applicationRole"}, "permission": "MODIFY_REPORTER"}, {"id": 14180, "self": "https://airbyteio.atlassian.net/rest/api/3/permissionscheme/10055/permission/14180", "holder": {"type": "applicationRole"}, "permission": "MOVE_ISSUES"}, {"id": 14181, "self": "https://airbyteio.atlassian.net/rest/api/3/permissionscheme/10055/permission/14181", "holder": {"type": "applicationRole"}, "permission": "RESOLVE_ISSUES"}, {"id": 14182, "self": "https://airbyteio.atlassian.net/rest/api/3/permissionscheme/10055/permission/14182", "holder": {"type": "applicationRole"}, "permission": "SCHEDULE_ISSUES"}, {"id": 14183, "self": "https://airbyteio.atlassian.net/rest/api/3/permissionscheme/10055/permission/14183", "holder": {"type": "applicationRole"}, "permission": "TRANSITION_ISSUES"}, {"id": 14184, "self": "https://airbyteio.atlassian.net/rest/api/3/permissionscheme/10055/permission/14184", "holder": {"type": "applicationRole"}, "permission": "MANAGE_WATCHERS"}, {"id": 14185, "self": "https://airbyteio.atlassian.net/rest/api/3/permissionscheme/10055/permission/14185", "holder": {"type": "applicationRole"}, "permission": "VIEW_VOTERS_AND_WATCHERS"}, {"id": 14186, "self": "https://airbyteio.atlassian.net/rest/api/3/permissionscheme/10055/permission/14186", "holder": {"type": "applicationRole"}, "permission": "ADD_COMMENTS"}, {"id": 14187, "self": "https://airbyteio.atlassian.net/rest/api/3/permissionscheme/10055/permission/14187", "holder": {"type": "applicationRole"}, "permission": "DELETE_ALL_COMMENTS"}, {"id": 14188, "self": "https://airbyteio.atlassian.net/rest/api/3/permissionscheme/10055/permission/14188", "holder": {"type": "applicationRole"}, "permission": "DELETE_OWN_COMMENTS"}, {"id": 14189, "self": "https://airbyteio.atlassian.net/rest/api/3/permissionscheme/10055/permission/14189", "holder": {"type": "applicationRole"}, "permission": "EDIT_ALL_COMMENTS"}, {"id": 14190, "self": "https://airbyteio.atlassian.net/rest/api/3/permissionscheme/10055/permission/14190", "holder": {"type": "applicationRole"}, "permission": "EDIT_OWN_COMMENTS"}, {"id": 14191, "self": "https://airbyteio.atlassian.net/rest/api/3/permissionscheme/10055/permission/14191", "holder": {"type": "applicationRole"}, "permission": "CREATE_ATTACHMENTS"}, {"id": 14192, "self": "https://airbyteio.atlassian.net/rest/api/3/permissionscheme/10055/permission/14192", "holder": {"type": "applicationRole"}, "permission": "DELETE_ALL_ATTACHMENTS"}, {"id": 14193, "self": "https://airbyteio.atlassian.net/rest/api/3/permissionscheme/10055/permission/14193", "holder": {"type": "applicationRole"}, "permission": "DELETE_OWN_ATTACHMENTS"}, {"id": 14194, "self": "https://airbyteio.atlassian.net/rest/api/3/permissionscheme/10055/permission/14194", "holder": {"type": "applicationRole"}, "permission": "DELETE_ALL_WORKLOGS"}, {"id": 14195, "self": "https://airbyteio.atlassian.net/rest/api/3/permissionscheme/10055/permission/14195", "holder": {"type": "applicationRole"}, "permission": "DELETE_OWN_WORKLOGS"}, {"id": 14196, "self": "https://airbyteio.atlassian.net/rest/api/3/permissionscheme/10055/permission/14196", "holder": {"type": "applicationRole"}, "permission": "EDIT_ALL_WORKLOGS"}, {"id": 14197, "self": "https://airbyteio.atlassian.net/rest/api/3/permissionscheme/10055/permission/14197", "holder": {"type": "applicationRole"}, "permission": "EDIT_OWN_WORKLOGS"}, {"id": 14198, "self": "https://airbyteio.atlassian.net/rest/api/3/permissionscheme/10055/permission/14198", "holder": {"type": "applicationRole"}, "permission": "WORK_ON_ISSUES"}, {"id": 14199, "self": "https://airbyteio.atlassian.net/rest/api/3/permissionscheme/10055/permission/14199", "holder": {"type": "applicationRole"}, "permission": "ASSIGN_ISSUES"}, {"id": 14405, "self": "https://airbyteio.atlassian.net/rest/api/3/permissionscheme/10055/permission/14405", "holder": {"type": "projectRole", "parameter": "10003", "value": "10003", "expand": "projectRole"}, "permission": "SET_ISSUE_SECURITY"}, {"id": 14482, "self": "https://airbyteio.atlassian.net/rest/api/3/permissionscheme/10055/permission/14482", "holder": {"type": "projectRole", "parameter": "10003", "value": "10003", "expand": "projectRole"}, "permission": "VIEW_AGGREGATED_DATA"}, {"id": 14543, "self": "https://airbyteio.atlassian.net/rest/api/3/permissionscheme/10055/permission/14543", "holder": {"type": "applicationRole"}, "permission": "VIEW_AGGREGATED_DATA"}, {"id": 15118, "self": "https://airbyteio.atlassian.net/rest/api/3/permissionscheme/10055/permission/15118", "holder": {"type": "projectRole", "parameter": "10003", "value": "10003", "expand": "projectRole"}, "permission": "SERVICEDESK_AGENT"}]}, "emitted_at": 1709025744861} +{"stream": "permission_schemes", "data": {"expand": "permissions,user,group,projectRole,field,all", "id": 0, "self": "https://airbyteio.atlassian.net/rest/api/3/permissionscheme/0", "name": "Default Permission Scheme", "description": "This is the default Permission Scheme. Any new projects that are created will be assigned this scheme.", "permissions": [{"id": 10004, "self": "https://airbyteio.atlassian.net/rest/api/3/permissionscheme/0/permission/10004", "holder": {"type": "projectRole", "parameter": "10002", "value": "10002", "expand": "projectRole"}, "permission": "ADMINISTER_PROJECTS"}, {"id": 10005, "self": "https://airbyteio.atlassian.net/rest/api/3/permissionscheme/0/permission/10005", "holder": {"type": "applicationRole"}, "permission": "BROWSE_PROJECTS"}, {"id": 10006, "self": "https://airbyteio.atlassian.net/rest/api/3/permissionscheme/0/permission/10006", "holder": {"type": "applicationRole"}, "permission": "CREATE_ISSUES"}, {"id": 10007, "self": "https://airbyteio.atlassian.net/rest/api/3/permissionscheme/0/permission/10007", "holder": {"type": "applicationRole"}, "permission": "ADD_COMMENTS"}, {"id": 10008, "self": "https://airbyteio.atlassian.net/rest/api/3/permissionscheme/0/permission/10008", "holder": {"type": "applicationRole"}, "permission": "CREATE_ATTACHMENTS"}, {"id": 10009, "self": "https://airbyteio.atlassian.net/rest/api/3/permissionscheme/0/permission/10009", "holder": {"type": "applicationRole"}, "permission": "ASSIGN_ISSUES"}, {"id": 10010, "self": "https://airbyteio.atlassian.net/rest/api/3/permissionscheme/0/permission/10010", "holder": {"type": "applicationRole"}, "permission": "ASSIGNABLE_USER"}, {"id": 10011, "self": "https://airbyteio.atlassian.net/rest/api/3/permissionscheme/0/permission/10011", "holder": {"type": "applicationRole"}, "permission": "RESOLVE_ISSUES"}, {"id": 10012, "self": "https://airbyteio.atlassian.net/rest/api/3/permissionscheme/0/permission/10012", "holder": {"type": "applicationRole"}, "permission": "LINK_ISSUES"}, {"id": 10013, "self": "https://airbyteio.atlassian.net/rest/api/3/permissionscheme/0/permission/10013", "holder": {"type": "applicationRole"}, "permission": "EDIT_ISSUES"}, {"id": 10014, "self": "https://airbyteio.atlassian.net/rest/api/3/permissionscheme/0/permission/10014", "holder": {"type": "projectRole", "parameter": "10002", "value": "10002", "expand": "projectRole"}, "permission": "DELETE_ISSUES"}, {"id": 10015, "self": "https://airbyteio.atlassian.net/rest/api/3/permissionscheme/0/permission/10015", "holder": {"type": "applicationRole"}, "permission": "CLOSE_ISSUES"}, {"id": 10016, "self": "https://airbyteio.atlassian.net/rest/api/3/permissionscheme/0/permission/10016", "holder": {"type": "applicationRole"}, "permission": "MOVE_ISSUES"}, {"id": 10017, "self": "https://airbyteio.atlassian.net/rest/api/3/permissionscheme/0/permission/10017", "holder": {"type": "applicationRole"}, "permission": "SCHEDULE_ISSUES"}, {"id": 10018, "self": "https://airbyteio.atlassian.net/rest/api/3/permissionscheme/0/permission/10018", "holder": {"type": "projectRole", "parameter": "10002", "value": "10002", "expand": "projectRole"}, "permission": "MODIFY_REPORTER"}, {"id": 10019, "self": "https://airbyteio.atlassian.net/rest/api/3/permissionscheme/0/permission/10019", "holder": {"type": "applicationRole"}, "permission": "WORK_ON_ISSUES"}, {"id": 10020, "self": "https://airbyteio.atlassian.net/rest/api/3/permissionscheme/0/permission/10020", "holder": {"type": "projectRole", "parameter": "10002", "value": "10002", "expand": "projectRole"}, "permission": "DELETE_ALL_WORKLOGS"}, {"id": 10021, "self": "https://airbyteio.atlassian.net/rest/api/3/permissionscheme/0/permission/10021", "holder": {"type": "applicationRole"}, "permission": "DELETE_OWN_WORKLOGS"}, {"id": 10022, "self": "https://airbyteio.atlassian.net/rest/api/3/permissionscheme/0/permission/10022", "holder": {"type": "projectRole", "parameter": "10002", "value": "10002", "expand": "projectRole"}, "permission": "EDIT_ALL_WORKLOGS"}, {"id": 10023, "self": "https://airbyteio.atlassian.net/rest/api/3/permissionscheme/0/permission/10023", "holder": {"type": "applicationRole"}, "permission": "EDIT_OWN_WORKLOGS"}, {"id": 10024, "self": "https://airbyteio.atlassian.net/rest/api/3/permissionscheme/0/permission/10024", "holder": {"type": "applicationRole"}, "permission": "VIEW_VOTERS_AND_WATCHERS"}, {"id": 10025, "self": "https://airbyteio.atlassian.net/rest/api/3/permissionscheme/0/permission/10025", "holder": {"type": "projectRole", "parameter": "10002", "value": "10002", "expand": "projectRole"}, "permission": "MANAGE_WATCHERS"}, {"id": 10026, "self": "https://airbyteio.atlassian.net/rest/api/3/permissionscheme/0/permission/10026", "holder": {"type": "projectRole", "parameter": "10002", "value": "10002", "expand": "projectRole"}, "permission": "EDIT_ALL_COMMENTS"}, {"id": 10027, "self": "https://airbyteio.atlassian.net/rest/api/3/permissionscheme/0/permission/10027", "holder": {"type": "applicationRole"}, "permission": "EDIT_OWN_COMMENTS"}, {"id": 10028, "self": "https://airbyteio.atlassian.net/rest/api/3/permissionscheme/0/permission/10028", "holder": {"type": "projectRole", "parameter": "10002", "value": "10002", "expand": "projectRole"}, "permission": "DELETE_ALL_COMMENTS"}, {"id": 10029, "self": "https://airbyteio.atlassian.net/rest/api/3/permissionscheme/0/permission/10029", "holder": {"type": "applicationRole"}, "permission": "DELETE_OWN_COMMENTS"}, {"id": 10030, "self": "https://airbyteio.atlassian.net/rest/api/3/permissionscheme/0/permission/10030", "holder": {"type": "projectRole", "parameter": "10002", "value": "10002", "expand": "projectRole"}, "permission": "DELETE_ALL_ATTACHMENTS"}, {"id": 10031, "self": "https://airbyteio.atlassian.net/rest/api/3/permissionscheme/0/permission/10031", "holder": {"type": "applicationRole"}, "permission": "DELETE_OWN_ATTACHMENTS"}, {"id": 10033, "self": "https://airbyteio.atlassian.net/rest/api/3/permissionscheme/0/permission/10033", "holder": {"type": "applicationRole"}, "permission": "VIEW_DEV_TOOLS"}, {"id": 10200, "self": "https://airbyteio.atlassian.net/rest/api/3/permissionscheme/0/permission/10200", "holder": {"type": "applicationRole"}, "permission": "VIEW_READONLY_WORKFLOW"}, {"id": 10300, "self": "https://airbyteio.atlassian.net/rest/api/3/permissionscheme/0/permission/10300", "holder": {"type": "applicationRole"}, "permission": "TRANSITION_ISSUES"}, {"id": 10301, "self": "https://airbyteio.atlassian.net/rest/api/3/permissionscheme/0/permission/10301", "holder": {"type": "projectRole", "parameter": "10003", "value": "10003", "expand": "projectRole"}, "permission": "ADD_COMMENTS"}, {"id": 10302, "self": "https://airbyteio.atlassian.net/rest/api/3/permissionscheme/0/permission/10302", "holder": {"type": "projectRole", "parameter": "10003", "value": "10003", "expand": "projectRole"}, "permission": "ADMINISTER_PROJECTS"}, {"id": 10303, "self": "https://airbyteio.atlassian.net/rest/api/3/permissionscheme/0/permission/10303", "holder": {"type": "projectRole", "parameter": "10003", "value": "10003", "expand": "projectRole"}, "permission": "ASSIGNABLE_USER"}, {"id": 10304, "self": "https://airbyteio.atlassian.net/rest/api/3/permissionscheme/0/permission/10304", "holder": {"type": "projectRole", "parameter": "10003", "value": "10003", "expand": "projectRole"}, "permission": "ASSIGN_ISSUES"}, {"id": 10305, "self": "https://airbyteio.atlassian.net/rest/api/3/permissionscheme/0/permission/10305", "holder": {"type": "projectRole", "parameter": "10003", "value": "10003", "expand": "projectRole"}, "permission": "BROWSE_PROJECTS"}, {"id": 10306, "self": "https://airbyteio.atlassian.net/rest/api/3/permissionscheme/0/permission/10306", "holder": {"type": "projectRole", "parameter": "10003", "value": "10003", "expand": "projectRole"}, "permission": "CLOSE_ISSUES"}, {"id": 10307, "self": "https://airbyteio.atlassian.net/rest/api/3/permissionscheme/0/permission/10307", "holder": {"type": "projectRole", "parameter": "10003", "value": "10003", "expand": "projectRole"}, "permission": "CREATE_ATTACHMENTS"}, {"id": 10308, "self": "https://airbyteio.atlassian.net/rest/api/3/permissionscheme/0/permission/10308", "holder": {"type": "projectRole", "parameter": "10003", "value": "10003", "expand": "projectRole"}, "permission": "CREATE_ISSUES"}, {"id": 10309, "self": "https://airbyteio.atlassian.net/rest/api/3/permissionscheme/0/permission/10309", "holder": {"type": "projectRole", "parameter": "10003", "value": "10003", "expand": "projectRole"}, "permission": "DELETE_ALL_ATTACHMENTS"}, {"id": 10310, "self": "https://airbyteio.atlassian.net/rest/api/3/permissionscheme/0/permission/10310", "holder": {"type": "projectRole", "parameter": "10003", "value": "10003", "expand": "projectRole"}, "permission": "DELETE_ALL_COMMENTS"}, {"id": 10311, "self": "https://airbyteio.atlassian.net/rest/api/3/permissionscheme/0/permission/10311", "holder": {"type": "projectRole", "parameter": "10003", "value": "10003", "expand": "projectRole"}, "permission": "DELETE_ALL_WORKLOGS"}, {"id": 10312, "self": "https://airbyteio.atlassian.net/rest/api/3/permissionscheme/0/permission/10312", "holder": {"type": "projectRole", "parameter": "10003", "value": "10003", "expand": "projectRole"}, "permission": "DELETE_ISSUES"}, {"id": 10313, "self": "https://airbyteio.atlassian.net/rest/api/3/permissionscheme/0/permission/10313", "holder": {"type": "projectRole", "parameter": "10003", "value": "10003", "expand": "projectRole"}, "permission": "DELETE_OWN_ATTACHMENTS"}, {"id": 10314, "self": "https://airbyteio.atlassian.net/rest/api/3/permissionscheme/0/permission/10314", "holder": {"type": "projectRole", "parameter": "10003", "value": "10003", "expand": "projectRole"}, "permission": "DELETE_OWN_COMMENTS"}, {"id": 10315, "self": "https://airbyteio.atlassian.net/rest/api/3/permissionscheme/0/permission/10315", "holder": {"type": "projectRole", "parameter": "10003", "value": "10003", "expand": "projectRole"}, "permission": "DELETE_OWN_WORKLOGS"}, {"id": 10316, "self": "https://airbyteio.atlassian.net/rest/api/3/permissionscheme/0/permission/10316", "holder": {"type": "projectRole", "parameter": "10003", "value": "10003", "expand": "projectRole"}, "permission": "EDIT_ALL_COMMENTS"}, {"id": 10317, "self": "https://airbyteio.atlassian.net/rest/api/3/permissionscheme/0/permission/10317", "holder": {"type": "projectRole", "parameter": "10003", "value": "10003", "expand": "projectRole"}, "permission": "EDIT_ALL_WORKLOGS"}, {"id": 10318, "self": "https://airbyteio.atlassian.net/rest/api/3/permissionscheme/0/permission/10318", "holder": {"type": "projectRole", "parameter": "10003", "value": "10003", "expand": "projectRole"}, "permission": "EDIT_ISSUES"}, {"id": 10319, "self": "https://airbyteio.atlassian.net/rest/api/3/permissionscheme/0/permission/10319", "holder": {"type": "projectRole", "parameter": "10003", "value": "10003", "expand": "projectRole"}, "permission": "EDIT_OWN_COMMENTS"}, {"id": 10320, "self": "https://airbyteio.atlassian.net/rest/api/3/permissionscheme/0/permission/10320", "holder": {"type": "projectRole", "parameter": "10003", "value": "10003", "expand": "projectRole"}, "permission": "EDIT_OWN_WORKLOGS"}, {"id": 10321, "self": "https://airbyteio.atlassian.net/rest/api/3/permissionscheme/0/permission/10321", "holder": {"type": "projectRole", "parameter": "10003", "value": "10003", "expand": "projectRole"}, "permission": "LINK_ISSUES"}, {"id": 10322, "self": "https://airbyteio.atlassian.net/rest/api/3/permissionscheme/0/permission/10322", "holder": {"type": "projectRole", "parameter": "10003", "value": "10003", "expand": "projectRole"}, "permission": "MANAGE_SPRINTS_PERMISSION"}, {"id": 10323, "self": "https://airbyteio.atlassian.net/rest/api/3/permissionscheme/0/permission/10323", "holder": {"type": "projectRole", "parameter": "10003", "value": "10003", "expand": "projectRole"}, "permission": "MANAGE_WATCHERS"}, {"id": 10324, "self": "https://airbyteio.atlassian.net/rest/api/3/permissionscheme/0/permission/10324", "holder": {"type": "projectRole", "parameter": "10003", "value": "10003", "expand": "projectRole"}, "permission": "MODIFY_REPORTER"}, {"id": 10325, "self": "https://airbyteio.atlassian.net/rest/api/3/permissionscheme/0/permission/10325", "holder": {"type": "projectRole", "parameter": "10003", "value": "10003", "expand": "projectRole"}, "permission": "MOVE_ISSUES"}, {"id": 10326, "self": "https://airbyteio.atlassian.net/rest/api/3/permissionscheme/0/permission/10326", "holder": {"type": "projectRole", "parameter": "10003", "value": "10003", "expand": "projectRole"}, "permission": "RESOLVE_ISSUES"}, {"id": 10327, "self": "https://airbyteio.atlassian.net/rest/api/3/permissionscheme/0/permission/10327", "holder": {"type": "projectRole", "parameter": "10003", "value": "10003", "expand": "projectRole"}, "permission": "SCHEDULE_ISSUES"}, {"id": 10328, "self": "https://airbyteio.atlassian.net/rest/api/3/permissionscheme/0/permission/10328", "holder": {"type": "projectRole", "parameter": "10003", "value": "10003", "expand": "projectRole"}, "permission": "SET_ISSUE_SECURITY"}, {"id": 10329, "self": "https://airbyteio.atlassian.net/rest/api/3/permissionscheme/0/permission/10329", "holder": {"type": "projectRole", "parameter": "10003", "value": "10003", "expand": "projectRole"}, "permission": "TRANSITION_ISSUES"}, {"id": 10330, "self": "https://airbyteio.atlassian.net/rest/api/3/permissionscheme/0/permission/10330", "holder": {"type": "projectRole", "parameter": "10003", "value": "10003", "expand": "projectRole"}, "permission": "VIEW_DEV_TOOLS"}, {"id": 10331, "self": "https://airbyteio.atlassian.net/rest/api/3/permissionscheme/0/permission/10331", "holder": {"type": "projectRole", "parameter": "10003", "value": "10003", "expand": "projectRole"}, "permission": "VIEW_READONLY_WORKFLOW"}, {"id": 10332, "self": "https://airbyteio.atlassian.net/rest/api/3/permissionscheme/0/permission/10332", "holder": {"type": "projectRole", "parameter": "10003", "value": "10003", "expand": "projectRole"}, "permission": "VIEW_VOTERS_AND_WATCHERS"}, {"id": 10333, "self": "https://airbyteio.atlassian.net/rest/api/3/permissionscheme/0/permission/10333", "holder": {"type": "projectRole", "parameter": "10003", "value": "10003", "expand": "projectRole"}, "permission": "WORK_ON_ISSUES"}, {"id": 10464, "self": "https://airbyteio.atlassian.net/rest/api/3/permissionscheme/0/permission/10464", "holder": {"type": "projectRole", "parameter": "10003", "value": "10003", "expand": "projectRole"}, "permission": "io.tempo.jira__log-work-for-others"}, {"id": 10465, "self": "https://airbyteio.atlassian.net/rest/api/3/permissionscheme/0/permission/10465", "holder": {"type": "projectRole", "parameter": "10003", "value": "10003", "expand": "projectRole"}, "permission": "io.tempo.jira__set-billable-hours"}, {"id": 10466, "self": "https://airbyteio.atlassian.net/rest/api/3/permissionscheme/0/permission/10466", "holder": {"type": "projectRole", "parameter": "10003", "value": "10003", "expand": "projectRole"}, "permission": "io.tempo.jira__view-all-worklogs"}, {"id": 10467, "self": "https://airbyteio.atlassian.net/rest/api/3/permissionscheme/0/permission/10467", "holder": {"type": "projectRole", "parameter": "10003", "value": "10003", "expand": "projectRole"}, "permission": "io.tempo.jira__view-issue-hours"}, {"id": 14538, "self": "https://airbyteio.atlassian.net/rest/api/3/permissionscheme/0/permission/14538", "holder": {"type": "projectRole", "parameter": "10003", "value": "10003", "expand": "projectRole"}, "permission": "VIEW_AGGREGATED_DATA"}, {"id": 14599, "self": "https://airbyteio.atlassian.net/rest/api/3/permissionscheme/0/permission/14599", "holder": {"type": "applicationRole"}, "permission": "VIEW_AGGREGATED_DATA"}, {"id": 15119, "self": "https://airbyteio.atlassian.net/rest/api/3/permissionscheme/0/permission/15119", "holder": {"type": "projectRole", "parameter": "10003", "value": "10003", "expand": "projectRole"}, "permission": "SERVICEDESK_AGENT"}]}, "emitted_at": 1709025744865} {"stream": "projects", "data": {"expand": "description,lead,issueTypes,url,projectKeys,permissions,insight", "self": "https://airbyteio.atlassian.net/rest/api/3/project/10000", "id": "10000", "key": "IT", "description": "Test", "lead": {"self": "https://airbyteio.atlassian.net/rest/api/3/user?accountId=5fc9e78d2730d800760becc4", "accountId": "5fc9e78d2730d800760becc4", "accountType": "atlassian", "avatarUrls": {"48x48": "https://secure.gravatar.com/avatar/0a7841feac7218131ce7b427283c24ef?d=https%3A%2F%2Favatar-management--avatars.us-west-2.prod.public.atl-paas.net%2Finitials%2FIT-5.png", "24x24": "https://secure.gravatar.com/avatar/0a7841feac7218131ce7b427283c24ef?d=https%3A%2F%2Favatar-management--avatars.us-west-2.prod.public.atl-paas.net%2Finitials%2FIT-5.png", "16x16": "https://secure.gravatar.com/avatar/0a7841feac7218131ce7b427283c24ef?d=https%3A%2F%2Favatar-management--avatars.us-west-2.prod.public.atl-paas.net%2Finitials%2FIT-5.png", "32x32": "https://secure.gravatar.com/avatar/0a7841feac7218131ce7b427283c24ef?d=https%3A%2F%2Favatar-management--avatars.us-west-2.prod.public.atl-paas.net%2Finitials%2FIT-5.png"}, "displayName": "integration test", "active": true}, "name": "integration-tests", "avatarUrls": {"48x48": "https://airbyteio.atlassian.net/rest/api/3/universal_avatar/view/type/project/avatar/10424", "24x24": "https://airbyteio.atlassian.net/rest/api/3/universal_avatar/view/type/project/avatar/10424?size=small", "16x16": "https://airbyteio.atlassian.net/rest/api/3/universal_avatar/view/type/project/avatar/10424?size=xsmall", "32x32": "https://airbyteio.atlassian.net/rest/api/3/universal_avatar/view/type/project/avatar/10424?size=medium"}, "projectCategory": {"self": "https://airbyteio.atlassian.net/rest/api/3/projectCategory/10004", "id": "10004", "name": "Test category 2", "description": "Test Project Category 2"}, "projectTypeKey": "software", "simplified": false, "style": "classic", "isPrivate": false, "properties": {}}, "emitted_at": 1697453360572} {"stream": "projects", "data": {"expand": "description,lead,issueTypes,url,projectKeys,permissions,insight", "self": "https://airbyteio.atlassian.net/rest/api/3/project/10016", "id": "10016", "key": "TESTKEY13", "description": "Test project 13 description", "lead": {"self": "https://airbyteio.atlassian.net/rest/api/3/user?accountId=5fc9e78d2730d800760becc4", "accountId": "5fc9e78d2730d800760becc4", "accountType": "atlassian", "avatarUrls": {"48x48": "https://secure.gravatar.com/avatar/0a7841feac7218131ce7b427283c24ef?d=https%3A%2F%2Favatar-management--avatars.us-west-2.prod.public.atl-paas.net%2Finitials%2FIT-5.png", "24x24": "https://secure.gravatar.com/avatar/0a7841feac7218131ce7b427283c24ef?d=https%3A%2F%2Favatar-management--avatars.us-west-2.prod.public.atl-paas.net%2Finitials%2FIT-5.png", "16x16": "https://secure.gravatar.com/avatar/0a7841feac7218131ce7b427283c24ef?d=https%3A%2F%2Favatar-management--avatars.us-west-2.prod.public.atl-paas.net%2Finitials%2FIT-5.png", "32x32": "https://secure.gravatar.com/avatar/0a7841feac7218131ce7b427283c24ef?d=https%3A%2F%2Favatar-management--avatars.us-west-2.prod.public.atl-paas.net%2Finitials%2FIT-5.png"}, "displayName": "integration test", "active": true}, "name": "Test project 13", "avatarUrls": {"48x48": "https://airbyteio.atlassian.net/rest/api/3/universal_avatar/view/type/project/avatar/10425", "24x24": "https://airbyteio.atlassian.net/rest/api/3/universal_avatar/view/type/project/avatar/10425?size=small", "16x16": "https://airbyteio.atlassian.net/rest/api/3/universal_avatar/view/type/project/avatar/10425?size=xsmall", "32x32": "https://airbyteio.atlassian.net/rest/api/3/universal_avatar/view/type/project/avatar/10425?size=medium"}, "projectCategory": {"self": "https://airbyteio.atlassian.net/rest/api/3/projectCategory/10000", "id": "10000", "name": "Category 1", "description": "Category 1"}, "projectTypeKey": "software", "simplified": false, "style": "classic", "isPrivate": false, "properties": {}}, "emitted_at": 1697453360573} {"stream": "projects", "data": {"expand": "description,lead,issueTypes,url,projectKeys,permissions,insight", "self": "https://airbyteio.atlassian.net/rest/api/3/project/10064", "id": "10064", "key": "TTMP2", "description": "", "lead": {"self": "https://airbyteio.atlassian.net/rest/api/3/user?accountId=5fc9e78d2730d800760becc4", "accountId": "5fc9e78d2730d800760becc4", "accountType": "atlassian", "avatarUrls": {"48x48": "https://secure.gravatar.com/avatar/0a7841feac7218131ce7b427283c24ef?d=https%3A%2F%2Favatar-management--avatars.us-west-2.prod.public.atl-paas.net%2Finitials%2FIT-5.png", "24x24": "https://secure.gravatar.com/avatar/0a7841feac7218131ce7b427283c24ef?d=https%3A%2F%2Favatar-management--avatars.us-west-2.prod.public.atl-paas.net%2Finitials%2FIT-5.png", "16x16": "https://secure.gravatar.com/avatar/0a7841feac7218131ce7b427283c24ef?d=https%3A%2F%2Favatar-management--avatars.us-west-2.prod.public.atl-paas.net%2Finitials%2FIT-5.png", "32x32": "https://secure.gravatar.com/avatar/0a7841feac7218131ce7b427283c24ef?d=https%3A%2F%2Favatar-management--avatars.us-west-2.prod.public.atl-paas.net%2Finitials%2FIT-5.png"}, "displayName": "integration test", "active": true}, "name": "Test Team Managed Project 2", "avatarUrls": {"48x48": "https://airbyteio.atlassian.net/rest/api/3/universal_avatar/view/type/project/avatar/10412", "24x24": "https://airbyteio.atlassian.net/rest/api/3/universal_avatar/view/type/project/avatar/10412?size=small", "16x16": "https://airbyteio.atlassian.net/rest/api/3/universal_avatar/view/type/project/avatar/10412?size=xsmall", "32x32": "https://airbyteio.atlassian.net/rest/api/3/universal_avatar/view/type/project/avatar/10412?size=medium"}, "projectCategory": {"self": "https://airbyteio.atlassian.net/rest/api/3/projectCategory/10000", "id": "10000", "name": "Category 1", "description": "Category 1"}, "projectTypeKey": "software", "simplified": true, "style": "next-gen", "isPrivate": false, "properties": {}, "entityId": "6fc48839-dfa5-487d-ad8f-8b540f1748d7", "uuid": "6fc48839-dfa5-487d-ad8f-8b540f1748d7"}, "emitted_at": 1697453360578} @@ -122,7 +122,7 @@ {"stream": "screens", "data": {"id": 1, "name": "Default Screen", "description": "Allows to update all system fields."}, "emitted_at": 1697453371057} {"stream": "screens", "data": {"id": 2, "name": "Workflow Screen", "description": "This screen is used in the workflow and enables you to assign issues"}, "emitted_at": 1697453371058} {"stream": "screens", "data": {"id": 3, "name": "Resolve Issue Screen", "description": "Allows to set resolution, change fix versions and assign an issue."}, "emitted_at": 1697453371058} -{"stream": "screen_tabs", "data": {"id": 10000, "name": "Field Tab", "screenId": 1}, "emitted_at": 1697453372487} +{"stream": "screen_tabs", "data": {"id": 10000, "name": "General", "screenId": 1}, "emitted_at": 1697453372487} {"stream": "screen_tabs", "data": {"id": 10148, "name": "Tab1", "screenId": 1}, "emitted_at": 1697453372488} {"stream": "screen_tabs", "data": {"id": 10149, "name": "Tab2", "screenId": 1}, "emitted_at": 1697453372489} {"stream": "screen_tab_fields", "data": {"id": "summary", "name": "Summary", "screenId": 1, "tabId": 10000}, "emitted_at": 1697453418442} @@ -134,9 +134,9 @@ {"stream": "sprints", "data": {"id": 2, "self": "https://airbyteio.atlassian.net/rest/agile/1.0/sprint/2", "state": "active", "name": "IT Sprint 1", "startDate": "2022-05-17T11:25:59.072000+00:00", "endDate": "2022-05-31T11:25:00+00:00", "createdDate": "2022-05-17T11:24:12.933000+00:00", "originBoardId": 1, "goal": "Deliver results", "boardId": 1}, "emitted_at": 1697453469489} {"stream": "sprints", "data": {"id": 3, "self": "https://airbyteio.atlassian.net/rest/agile/1.0/sprint/3", "state": "future", "name": "IT Sprint 2", "startDate": "2022-05-31T11:25:59.072000+00:00", "endDate": "2022-06-14T11:25:00+00:00", "createdDate": "2023-04-05T11:57:09.557000+00:00", "originBoardId": 1, "boardId": 1}, "emitted_at": 1697453469490} {"stream": "sprints", "data": {"id": 4, "self": "https://airbyteio.atlassian.net/rest/agile/1.0/sprint/4", "state": "future", "name": "IT Sprint 3", "startDate": "2022-06-14T11:25:59.072000+00:00", "endDate": "2022-06-28T11:25:00+00:00", "createdDate": "2023-04-05T11:57:30.379000+00:00", "originBoardId": 1, "boardId": 1}, "emitted_at": 1697453469490} -{"stream": "sprint_issues", "data": {"expand": "operations,versionedRepresentations,editmeta,changelog,renderedFields", "id": "2-10012", "self": "https://airbyteio.atlassian.net/rest/agile/1.0/issue/10012", "key": "IT-6", "fields": {"customfield_10016": null, "updated": "2023-10-12T13:30:02.307000-07:00", "created": "2021-03-11T06:14:18.085-0800", "status": {"self": "https://airbyteio.atlassian.net/rest/api/2/status/10000", "description": "", "iconUrl": "https://airbyteio.atlassian.net/", "name": "To Do", "id": "10000", "statusCategory": {"self": "https://airbyteio.atlassian.net/rest/api/2/statuscategory/2", "id": 2, "key": "new", "colorName": "blue-gray", "name": "To Do"}}}, "issueId": "10012", "sprintId": 2, "created": "2021-03-11T06:14:18.085000-08:00", "updated": "2023-10-12T13:30:02.307000-07:00"}, "emitted_at": 1697453471411} -{"stream": "sprint_issues", "data": {"expand": "operations,versionedRepresentations,editmeta,changelog,renderedFields", "id": "2-10019", "self": "https://airbyteio.atlassian.net/rest/agile/1.0/issue/10019", "key": "IT-9", "fields": {"customfield_10016": null, "updated": "2023-04-05T04:57:18.118000-07:00", "created": "2021-03-11T06:14:24.791-0800", "status": {"self": "https://airbyteio.atlassian.net/rest/api/2/status/10000", "description": "", "iconUrl": "https://airbyteio.atlassian.net/", "name": "To Do", "id": "10000", "statusCategory": {"self": "https://airbyteio.atlassian.net/rest/api/2/statuscategory/2", "id": 2, "key": "new", "colorName": "blue-gray", "name": "To Do"}}}, "issueId": "10019", "sprintId": 2, "created": "2021-03-11T06:14:24.791000-08:00", "updated": "2023-04-05T04:57:18.118000-07:00"}, "emitted_at": 1697453471413} -{"stream": "sprint_issues", "data": {"expand": "operations,versionedRepresentations,editmeta,changelog,renderedFields", "id": "2-10000", "self": "https://airbyteio.atlassian.net/rest/agile/1.0/issue/10000", "key": "IT-1", "fields": {"customfield_10016": null, "updated": "2022-05-17T04:26:28.885000-07:00", "created": "2020-12-07T06:12:17.863-0800", "status": {"self": "https://airbyteio.atlassian.net/rest/api/2/status/10001", "description": "", "iconUrl": "https://airbyteio.atlassian.net/", "name": "Done", "id": "10001", "statusCategory": {"self": "https://airbyteio.atlassian.net/rest/api/2/statuscategory/3", "id": 3, "key": "done", "colorName": "green", "name": "Done"}}, "customfield_10026": null}, "issueId": "10000", "sprintId": 2, "created": "2020-12-07T06:12:17.863000-08:00", "updated": "2022-05-17T04:26:28.885000-07:00"}, "emitted_at": 1697453471414} +{"stream": "sprint_issues", "data": {"expand": "operations,versionedRepresentations,editmeta,changelog,renderedFields", "id": "2-10012", "self": "https://airbyteio.atlassian.net/rest/agile/1.0/issue/10012", "key": "IT-6", "fields": {"customfield_10016": null, "updated": "2023-10-12T13:30:02.307000-07:00", "created": "2021-03-11T06:14:18.085000-08:00", "status": {"self": "https://airbyteio.atlassian.net/rest/api/2/status/10000", "description": "", "iconUrl": "https://airbyteio.atlassian.net/", "name": "To Do", "id": "10000", "statusCategory": {"self": "https://airbyteio.atlassian.net/rest/api/2/statuscategory/2", "id": 2, "key": "new", "colorName": "blue-gray", "name": "To Do"}}}, "issueId": "10012", "sprintId": 2, "created": "2021-03-11T06:14:18.085000-08:00", "updated": "2023-10-12T13:30:02.307000-07:00"}, "emitted_at": 1709025995081} +{"stream": "sprint_issues", "data": {"expand": "operations,versionedRepresentations,editmeta,changelog,renderedFields", "id": "2-10019", "self": "https://airbyteio.atlassian.net/rest/agile/1.0/issue/10019", "key": "IT-9", "fields": {"customfield_10016": null, "updated": "2023-04-05T04:57:18.118000-07:00", "created": "2021-03-11T06:14:24.791000-08:00", "status": {"self": "https://airbyteio.atlassian.net/rest/api/2/status/10000", "description": "", "iconUrl": "https://airbyteio.atlassian.net/", "name": "To Do", "id": "10000", "statusCategory": {"self": "https://airbyteio.atlassian.net/rest/api/2/statuscategory/2", "id": 2, "key": "new", "colorName": "blue-gray", "name": "To Do"}}}, "issueId": "10019", "sprintId": 2, "created": "2021-03-11T06:14:24.791000-08:00", "updated": "2023-04-05T04:57:18.118000-07:00"}, "emitted_at": 1709025995086} +{"stream": "sprint_issues", "data": {"expand": "operations,versionedRepresentations,editmeta,changelog,renderedFields", "id": "2-10000", "self": "https://airbyteio.atlassian.net/rest/agile/1.0/issue/10000", "key": "IT-1", "fields": {"customfield_10016": null, "updated": "2022-05-17T04:26:28.885000-07:00", "created": "2020-12-07T06:12:17.863000-08:00", "status": {"self": "https://airbyteio.atlassian.net/rest/api/2/status/10001", "description": "", "iconUrl": "https://airbyteio.atlassian.net/", "name": "Done", "id": "10001", "statusCategory": {"self": "https://airbyteio.atlassian.net/rest/api/2/statuscategory/3", "id": 3, "key": "done", "colorName": "green", "name": "Done"}}, "customfield_10026": null}, "issueId": "10000", "sprintId": 2, "created": "2020-12-07T06:12:17.863000-08:00", "updated": "2022-05-17T04:26:28.885000-07:00"}, "emitted_at": 1709025995087} {"stream": "time_tracking", "data": {"key": "JIRA", "name": "JIRA provided time tracking"}, "emitted_at": 1697453477445} {"stream": "time_tracking", "data": {"key": "is.origo.jira.tempo-plugin__timetracking-provider", "name": "Tempo Timesheets"}, "emitted_at": 1697453477446} {"stream": "users", "data": {"self": "https://airbyteio.atlassian.net/rest/api/3/user?accountId=5fc9e78d2730d800760becc4", "accountId": "5fc9e78d2730d800760becc4", "accountType": "atlassian", "emailAddress": "integration-test@airbyte.io", "avatarUrls": {"48x48": "https://secure.gravatar.com/avatar/0a7841feac7218131ce7b427283c24ef?d=https%3A%2F%2Favatar-management--avatars.us-west-2.prod.public.atl-paas.net%2Finitials%2FIT-5.png", "24x24": "https://secure.gravatar.com/avatar/0a7841feac7218131ce7b427283c24ef?d=https%3A%2F%2Favatar-management--avatars.us-west-2.prod.public.atl-paas.net%2Finitials%2FIT-5.png", "16x16": "https://secure.gravatar.com/avatar/0a7841feac7218131ce7b427283c24ef?d=https%3A%2F%2Favatar-management--avatars.us-west-2.prod.public.atl-paas.net%2Finitials%2FIT-5.png", "32x32": "https://secure.gravatar.com/avatar/0a7841feac7218131ce7b427283c24ef?d=https%3A%2F%2Favatar-management--avatars.us-west-2.prod.public.atl-paas.net%2Finitials%2FIT-5.png"}, "displayName": "integration test", "active": true, "locale": "en_US"}, "emitted_at": 1697453477834} diff --git a/airbyte-integrations/connectors/source-jira/metadata.yaml b/airbyte-integrations/connectors/source-jira/metadata.yaml index 654ccb7759c9..2c97a3658351 100644 --- a/airbyte-integrations/connectors/source-jira/metadata.yaml +++ b/airbyte-integrations/connectors/source-jira/metadata.yaml @@ -10,7 +10,7 @@ data: connectorSubtype: api connectorType: source definitionId: 68e63de2-bb83-4c7e-93fa-a8a9051e3993 - dockerImageTag: 1.0.2 + dockerImageTag: 1.1.0 dockerRepository: airbyte/source-jira documentationUrl: https://docs.airbyte.com/integrations/sources/jira githubIssueLabel: source-jira diff --git a/airbyte-integrations/connectors/source-jira/pyproject.toml b/airbyte-integrations/connectors/source-jira/pyproject.toml index d025905d1811..a8b5cd007aca 100644 --- a/airbyte-integrations/connectors/source-jira/pyproject.toml +++ b/airbyte-integrations/connectors/source-jira/pyproject.toml @@ -3,7 +3,7 @@ requires = [ "poetry-core>=1.0.0",] build-backend = "poetry.core.masonry.api" [tool.poetry] -version = "1.0.2" +version = "1.1.0" name = "source-jira" description = "Source implementation for Jira." authors = [ "Airbyte ",] diff --git a/airbyte-integrations/connectors/source-jira/source_jira/schemas/board_issues.json b/airbyte-integrations/connectors/source-jira/source_jira/schemas/board_issues.json index 7005d03e3d5e..9ad5cbd716fc 100644 --- a/airbyte-integrations/connectors/source-jira/source_jira/schemas/board_issues.json +++ b/airbyte-integrations/connectors/source-jira/source_jira/schemas/board_issues.json @@ -47,6 +47,10 @@ "type": "object" } }, + "created": { + "type": ["null", "string"], + "format": "date-time" + }, "updated": { "type": ["null", "string"], "format": "date-time" diff --git a/airbyte-integrations/connectors/source-jira/source_jira/schemas/filter_sharing.json b/airbyte-integrations/connectors/source-jira/source_jira/schemas/filter_sharing.json index e3c19d2e8876..9669174b65a5 100644 --- a/airbyte-integrations/connectors/source-jira/source_jira/schemas/filter_sharing.json +++ b/airbyte-integrations/connectors/source-jira/source_jira/schemas/filter_sharing.json @@ -2169,6 +2169,9 @@ "description": "The group that the filter is shared with. For a request, specify the `name` property for the group.", "type": "object", "properties": { + "groupId": { + "type": ["null", "string"] + }, "name": { "type": "string", "description": "The name of group." diff --git a/airbyte-integrations/connectors/source-jira/source_jira/schemas/filters.json b/airbyte-integrations/connectors/source-jira/source_jira/schemas/filters.json index 989078b38a16..713095601e17 100644 --- a/airbyte-integrations/connectors/source-jira/source_jira/schemas/filters.json +++ b/airbyte-integrations/connectors/source-jira/source_jira/schemas/filters.json @@ -2454,6 +2454,9 @@ "description": "The group that the filter is shared with. For a request, specify the `name` property for the group.", "type": "object", "properties": { + "groupId": { + "type": ["null", "string"] + }, "name": { "type": "string", "description": "The name of group." @@ -2472,6 +2475,10 @@ "isWritable": { "type": "boolean" }, + "approximateLastUsed": { + "type": ["null", "string"], + "format": "date-time" + }, "subscriptions": { "type": "array", "description": "The users that are subscribed to the filter.", diff --git a/airbyte-integrations/connectors/source-jira/source_jira/schemas/issues.json b/airbyte-integrations/connectors/source-jira/source_jira/schemas/issues.json index 1af2ea566e43..865b5ed8e446 100644 --- a/airbyte-integrations/connectors/source-jira/source_jira/schemas/issues.json +++ b/airbyte-integrations/connectors/source-jira/source_jira/schemas/issues.json @@ -509,6 +509,9 @@ "items": { "type": ["null", "object"], "properties": { + "fieldId": { + "type": ["null", "string"] + }, "field": { "type": ["null", "string"] }, @@ -526,6 +529,12 @@ }, "toString": { "type": ["null", "string"] + }, + "tmpFromAccountId": { + "type": ["null", "string"] + }, + "tmpToAccountId": { + "type": ["null", "string"] } } } @@ -550,12 +559,681 @@ "created": { "type": ["string", "null"], "format": "date-time", - "description": "This field is not shown in schema / swagger, but exists in records and we use it as cursor fiekd. Updated may be absent. Added to solve the #4341" + "description": "This field is not shown in schema / swagger, but exists in records and we use it as cursor field. Updated may be absent. Added to solve the #4341" }, "updated": { "type": ["string", "null"], "format": "date-time", - "description": "This field is not shown in schema / swagger, but exists in records and we use it as cursor fiekd. Updated may be absent. Added to solve the #4341" + "description": "This field is not shown in schema / swagger, but exists in records and we use it as cursor field. Updated may be absent. Added to solve the #4341" + }, + "aggregateprogress": { + "type": ["null", "object"], + "properties": { + "percent": { + "type": ["null", "integer"] + }, + "progress": { + "type": ["null", "integer"] + }, + "total": { + "type": ["null", "integer"] + } + } + }, + "assignee": { + "type": ["null", "object"], + "properties": { + "accountId": { + "type": ["null", "string"] + }, + "accountType": { + "type": ["null", "string"] + }, + "active": { + "type": ["null", "boolean"] + }, + "avatarUrls": { + "type": ["null", "object"], + "properties": { + "16x16": { + "type": ["null", "string"] + }, + "24x24": { + "type": ["null", "string"] + }, + "32x32": { + "type": ["null", "string"] + }, + "48x48": { + "type": ["null", "string"] + } + } + }, + "displayName": { + "type": ["null", "string"] + }, + "emailAddress": { + "type": ["null", "string"] + }, + "self": { + "type": ["null", "string"] + }, + "timeZone": { + "type": ["null", "string"] + } + } + }, + "aggregatetimeestimate": { + "type": ["null", "integer"] + }, + "aggregatetimeoriginalestimate": { + "type": ["null", "integer"] + }, + "aggregatetimespent": { + "type": ["null", "integer"] + }, + "attachment": { + "type": ["null", "array"] + }, + "comment": { + "type": ["null", "object"], + "properties": { + "comments": { + "type": ["null", "array"] + }, + "maxResults": { + "type": ["null", "integer"] + }, + "self": { + "type": ["null", "string"] + }, + "startAt": { + "type": ["null", "integer"] + }, + "total": { + "type": ["null", "integer"] + } + } + }, + "components": { + "type": ["null", "array"] + }, + "creator": { + "type": ["null", "object"], + "properties": { + "accountId": { + "type": ["null", "string"] + }, + "accountType": { + "type": ["null", "string"] + }, + "active": { + "type": ["null", "boolean"] + }, + "avatarUrls": { + "type": ["null", "object"], + "properties": { + "16x16": { + "type": ["null", "string"] + }, + "24x24": { + "type": ["null", "string"] + }, + "32x32": { + "type": ["null", "string"] + }, + "48x48": { + "type": ["null", "string"] + } + } + }, + "displayName": { + "type": ["null", "string"] + }, + "emailAddress": { + "type": ["null", "string"] + }, + "self": { + "type": ["null", "string"] + }, + "timeZone": { + "type": ["null", "string"] + } + } + }, + "description": { + "type": ["null", "object"], + "properties": { + "content": { + "type": ["null", "array"], + "items": { + "type": ["null", "object"], + "properties": { + "content": { + "type": ["null", "array"], + "items": { + "type": ["null", "object"], + "properties": { + "text": { + "type": ["null", "string"] + }, + "type": { + "type": ["null", "string"] + } + } + } + }, + "type": { + "type": ["null", "string"] + } + } + } + }, + "type": { + "type": ["null", "string"] + }, + "version": { + "type": ["null", "integer"] + } + } + }, + "fixVersions": { + "type": ["null", "array"] + }, + "issuelinks": { + "type": ["null", "array"] + }, + "issuerestriction": { + "type": ["null", "object"], + "properties": { + "issuerestrictions": { + "type": ["null", "object"] + }, + "shouldDisplay": { + "type": ["null", "boolean"] + } + } + }, + "issuetype": { + "type": ["null", "object"], + "properties": { + "avatarId": { + "type": ["null", "integer"] + }, + "description": { + "type": ["null", "string"] + }, + "entityId": { + "type": ["null", "string"] + }, + "hierarchyLevel": { + "type": ["null", "integer"] + }, + "iconUrl": { + "type": ["null", "string"] + }, + "id": { + "type": ["null", "string"] + }, + "name": { + "type": ["null", "string"] + }, + "self": { + "type": ["null", "string"] + }, + "subtask": { + "type": ["null", "boolean"] + } + } + }, + "labels": { + "type": ["null", "array"], + "items": { + "type": ["null", "string"] + } + }, + "lastViewed": { + "type": ["null", "string"], + "format": "date-time" + }, + "priority": { + "type": ["null", "object"], + "properties": { + "iconUrl": { + "type": ["null", "string"] + }, + "id": { + "type": ["null", "string"] + }, + "name": { + "type": ["null", "string"] + }, + "self": { + "type": ["null", "string"] + } + } + }, + "progress": { + "type": ["null", "object"], + "properties": { + "percent": { + "type": ["null", "integer"] + }, + "progress": { + "type": ["null", "integer"] + }, + "total": { + "type": ["null", "integer"] + } + } + }, + "project": { + "type": ["null", "object"], + "properties": { + "avatarUrls": { + "type": ["null", "object"], + "properties": { + "16x16": { + "type": ["null", "string"] + }, + "24x24": { + "type": ["null", "string"] + }, + "32x32": { + "type": ["null", "string"] + }, + "48x48": { + "type": ["null", "string"] + } + } + }, + "id": { + "type": ["null", "string"] + }, + "key": { + "type": ["null", "string"] + }, + "name": { + "type": ["null", "string"] + }, + "projectCategory": { + "type": ["null", "object"], + "properties": { + "description": { + "type": ["null", "string"] + }, + "id": { + "type": ["null", "string"] + }, + "name": { + "type": ["null", "string"] + }, + "self": { + "type": ["null", "string"] + } + } + }, + "projectTypeKey": { + "type": ["null", "string"] + }, + "self": { + "type": ["null", "string"] + }, + "simplified": { + "type": ["null", "boolean"] + } + } + }, + "reporter": { + "type": ["null", "object"], + "properties": { + "accountId": { + "type": ["null", "string"] + }, + "accountType": { + "type": ["null", "string"] + }, + "active": { + "type": ["null", "boolean"] + }, + "avatarUrls": { + "type": ["null", "object"], + "properties": { + "16x16": { + "type": ["null", "string"] + }, + "24x24": { + "type": ["null", "string"] + }, + "32x32": { + "type": ["null", "string"] + }, + "48x48": { + "type": ["null", "string"] + } + } + }, + "displayName": { + "type": ["null", "string"] + }, + "emailAddress": { + "type": ["null", "string"] + }, + "self": { + "type": ["null", "string"] + }, + "timeZone": { + "type": ["null", "string"] + } + } + }, + "resolution": { + "type": ["null", "object"], + "properties": { + "description": { + "type": ["null", "string"] + }, + "id": { + "type": ["null", "string"] + }, + "name": { + "type": ["null", "string"] + }, + "self": { + "type": ["null", "string"] + } + } + }, + "resolutiondate": { + "type": ["null", "string"], + "format": "date-time" + }, + "status": { + "type": ["null", "object"], + "properties": { + "description": { + "type": ["null", "string"] + }, + "iconUrl": { + "type": ["null", "string"] + }, + "id": { + "type": ["null", "string"] + }, + "name": { + "type": ["null", "string"] + }, + "self": { + "type": ["null", "string"] + }, + "statusCategory": { + "type": ["null", "object"], + "properties": { + "self": { + "type": ["null", "string"] + }, + "id": { + "type": ["null", "integer"] + }, + "key": { + "type": ["null", "string"] + }, + "colorName": { + "type": ["null", "string"] + }, + "name": { + "type": ["null", "string"] + } + } + } + } + }, + "statuscategorychangedate": { + "type": ["null", "string"], + "format": "date-time" + }, + "subtasks": { + "type": ["null", "array"] + }, + "summary": { + "type": ["null", "string"] + }, + "timeestimate": { + "type": ["null", "integer"] + }, + "timeoriginalestimate": { + "type": ["null", "integer"] + }, + "timespent": { + "type": ["null", "integer"] + }, + "timetracking": { + "type": ["null", "object"], + "properties": { + "remainingEstimate": { + "type": ["null", "string"] + }, + "remainingEstimateSeconds": { + "type": ["null", "integer"] + }, + "timeSpent": { + "type": ["null", "string"] + }, + "timeSpentSeconds": { + "type": ["null", "integer"] + }, + "originalEstimate": { + "type": ["null", "string"] + }, + "originalEstimateSeconds": { + "type": ["null", "integer"] + } + } + }, + "versions": { + "type": ["null", "array"] + }, + "votes": { + "type": ["null", "object"], + "properties": { + "hasVoted": { + "type": ["null", "boolean"] + }, + "self": { + "type": ["null", "string"] + }, + "votes": { + "type": ["null", "integer"] + } + } + }, + "watches": { + "type": ["null", "object"], + "properties": { + "isWatching": { + "type": ["null", "boolean"] + }, + "self": { + "type": ["null", "string"] + }, + "watchCount": { + "type": ["null", "integer"] + } + } + }, + "worklog": { + "type": ["null", "object"], + "properties": { + "maxResults": { + "type": ["null", "integer"] + }, + "startAt": { + "type": ["null", "integer"] + }, + "total": { + "type": ["null", "integer"] + }, + "worklogs": { + "type": ["null", "array"], + "items": { + "type": ["null", "object"], + "properties": { + "author": { + "type": ["null", "object"], + "properties": { + "accountId": { + "type": ["null", "string"] + }, + "accountType": { + "type": ["null", "string"] + }, + "active": { + "type": ["null", "boolean"] + }, + "avatarUrls": { + "type": ["null", "object"], + "properties": { + "16x16": { + "type": ["null", "string"] + }, + "24x24": { + "type": ["null", "string"] + }, + "32x32": { + "type": ["null", "string"] + }, + "48x48": { + "type": ["null", "string"] + } + } + }, + "displayName": { + "type": ["null", "string"] + }, + "emailAddress": { + "type": ["null", "string"] + }, + "self": { + "type": ["null", "string"] + }, + "timeZone": { + "type": ["null", "string"] + } + } + }, + "comment": { + "type": ["null", "object"], + "properties": { + "version": { + "type": ["null", "integer"] + }, + "type": { + "type": ["null", "string"] + }, + "content": { + "type": ["null", "array"], + "items": { + "type": ["null", "object"], + "properties": { + "type": { + "type": ["null", "string"] + }, + "content": { + "type": "array", + "items": { + "type": ["null", "object"], + "properties": { + "type": { + "type": ["null", "string"] + }, + "text": { + "type": ["null", "string"] + } + } + } + } + } + } + } + } + }, + "created": { + "type": ["null", "string"], + "format": "date-time" + }, + "started": { + "type": ["null", "string"], + "format": "date-time" + }, + "updated": { + "type": ["null", "string"], + "format": "date-time" + }, + "id": { + "type": ["null", "string"] + }, + "issueId": { + "type": ["null", "string"] + }, + "self": { + "type": ["null", "string"] + }, + "timeSpent": { + "type": ["null", "string"] + }, + "timeSpentSeconds": { + "type": ["null", "integer"] + }, + "updateAuthor": { + "type": ["null", "object"], + "properties": { + "accountId": { + "type": ["null", "string"] + }, + "accountType": { + "type": ["null", "string"] + }, + "active": { + "type": ["null", "boolean"] + }, + "avatarUrls": { + "type": ["null", "object"], + "properties": { + "16x16": { + "type": ["null", "string"] + }, + "24x24": { + "type": ["null", "string"] + }, + "32x32": { + "type": ["null", "string"] + }, + "48x48": { + "type": ["null", "string"] + } + } + }, + "displayName": { + "type": ["null", "string"] + }, + "emailAddress": { + "type": ["null", "string"] + }, + "self": { + "type": ["null", "string"] + }, + "timeZone": { + "type": ["null", "string"] + } + } + } + } + } + } + } + }, + "workratio": { + "type": ["null", "integer"] } }, "additionalProperties": true diff --git a/airbyte-integrations/connectors/source-jira/source_jira/schemas/permission_schemes.json b/airbyte-integrations/connectors/source-jira/source_jira/schemas/permission_schemes.json index 4cb128d0cc15..30d6b2927ad4 100644 --- a/airbyte-integrations/connectors/source-jira/source_jira/schemas/permission_schemes.json +++ b/airbyte-integrations/connectors/source-jira/source_jira/schemas/permission_schemes.json @@ -156,6 +156,9 @@ "type": "string", "description": "Expand options that include additional permission holder details in the response.", "readOnly": true + }, + "value": { + "type": ["null", "string"] } } }, diff --git a/airbyte-integrations/connectors/source-jira/source_jira/schemas/sprint_issues.json b/airbyte-integrations/connectors/source-jira/source_jira/schemas/sprint_issues.json index 3dbe298f2b9e..a26c830fde5a 100644 --- a/airbyte-integrations/connectors/source-jira/source_jira/schemas/sprint_issues.json +++ b/airbyte-integrations/connectors/source-jira/source_jira/schemas/sprint_issues.json @@ -47,12 +47,56 @@ "type": "object" } }, + "created": { + "type": ["null", "string"], + "format": "date-time" + }, "updated": { "type": ["null", "string"], "format": "date-time" }, "timetracking": { "type": ["null", "object"] + }, + "status": { + "type": ["null", "object"], + "properties": { + "description": { + "type": ["null", "string"] + }, + "iconUrl": { + "type": ["null", "string"] + }, + "id": { + "type": ["null", "string"] + }, + "name": { + "type": ["null", "string"] + }, + "self": { + "type": ["null", "string"] + }, + "statusCategory": { + "type": ["null", "object"], + "properties": { + "colorName": { + "type": ["null", "string"] + }, + "id": { + "type": ["null", "integer"] + }, + "key": { + "type": ["null", "string"] + }, + "name": { + "type": ["null", "string"] + }, + "self": { + "type": ["null", "string"] + } + } + } + } } } }, diff --git a/airbyte-integrations/connectors/source-jira/source_jira/schemas/users_groups_detailed.json b/airbyte-integrations/connectors/source-jira/source_jira/schemas/users_groups_detailed.json index 520765178691..d8f790028ae6 100644 --- a/airbyte-integrations/connectors/source-jira/source_jira/schemas/users_groups_detailed.json +++ b/airbyte-integrations/connectors/source-jira/source_jira/schemas/users_groups_detailed.json @@ -90,6 +90,9 @@ "items": { "type": "object", "properties": { + "groupId": { + "type": ["null", "string"] + }, "name": { "type": "string", "description": "The name of group." diff --git a/airbyte-integrations/connectors/source-jira/source_jira/schemas/workflows.json b/airbyte-integrations/connectors/source-jira/source_jira/schemas/workflows.json index 3e6990ac18b6..5643d0595d6e 100644 --- a/airbyte-integrations/connectors/source-jira/source_jira/schemas/workflows.json +++ b/airbyte-integrations/connectors/source-jira/source_jira/schemas/workflows.json @@ -5,6 +5,9 @@ "id": { "type": "object", "properties": { + "entityId": { + "type": ["null", "string"] + }, "name": { "type": "string", "description": "The name of the workflow." diff --git a/airbyte-integrations/connectors/source-jira/unit_tests/test_date_time_transformer.py b/airbyte-integrations/connectors/source-jira/unit_tests/test_date_time_transformer.py index 263443b0c2f7..b0bbe6f80b1c 100644 --- a/airbyte-integrations/connectors/source-jira/unit_tests/test_date_time_transformer.py +++ b/airbyte-integrations/connectors/source-jira/unit_tests/test_date_time_transformer.py @@ -8,7 +8,7 @@ @pytest.mark.parametrize( - "origin_item,subschema,expected", + "origin_item,sub_schema,expected", [ ("2023-05-08T03:04:45.139-0700", {"type": "string", "format": "date-time"}, "2023-05-08T03:04:45.139000-07:00"), ("2022-10-31T09:00:00.594Z", {"type": "string", "format": "date-time"}, "2022-10-31T09:00:00.594000+00:00"), @@ -17,9 +17,20 @@ (1234, {"type": "integer"}, 1234), ], ) -def test_converting_date_to_date_time(origin_item, subschema, expected, config): +def test_converting_date_to_date_time(origin_item, sub_schema, expected, config): authenticator = SourceJira().get_authenticator(config=config) args = {"authenticator": authenticator, "domain": config["domain"], "projects": config.get("projects", [])} stream = ApplicationRoles(**args) - actual = stream.transformer.default_convert(origin_item, subschema) + actual = stream.transformer.default_convert(origin_item, sub_schema) assert actual == expected + + +def test_converting_date_with_incorrect_format_returning_original_value(config, caplog): + sub_schema = {"type": "string", "format": "date-time"} + incorrectly_formatted_date = "incorrectly_formatted_date" + authenticator = SourceJira().get_authenticator(config=config) + args = {"authenticator": authenticator, "domain": config["domain"], "projects": config.get("projects", [])} + stream = ApplicationRoles(**args) + actual = stream.transformer.default_convert(incorrectly_formatted_date, sub_schema) + assert actual == incorrectly_formatted_date + assert f"{incorrectly_formatted_date}: doesn't match expected format." in caplog.text diff --git a/docs/integrations/sources/jira.md b/docs/integrations/sources/jira.md index 4eaf8278a348..e742a19206e3 100644 --- a/docs/integrations/sources/jira.md +++ b/docs/integrations/sources/jira.md @@ -122,61 +122,62 @@ The Jira connector should not run into Jira API limitations under normal usage. ## CHANGELOG -| Version | Date | Pull Request | Subject | -|:--------|:-----------|:-----------------------------------------------------------|:------------------------------------------------------------------------------------------------------------------------| -| 1.0.2 | 2024-02-12 | [35160](https://github.com/airbytehq/airbyte/pull/35160) | Manage dependencies with Poetry. | -| 1.0.1 | 2024-01-24 | [34470](https://github.com/airbytehq/airbyte/pull/34470) | Add state checkpoint interval for all streams | -| 1.0.0 | 2024-01-01 | [33715](https://github.com/airbytehq/airbyte/pull/33715) | Save state for stream `Board Issues` per `board` | -| 0.14.1 | 2023-12-19 | [33625](https://github.com/airbytehq/airbyte/pull/33625) | Skip 404 error | -| 0.14.0 | 2023-12-15 | [33532](https://github.com/airbytehq/airbyte/pull/33532) | Add lookback window | -| 0.13.0 | 2023-12-12 | [33353](https://github.com/airbytehq/airbyte/pull/33353) | Fix check command to check access for all available streams | -| 0.12.0 | 2023-12-01 | [33011](https://github.com/airbytehq/airbyte/pull/33011) | Fix BoardIssues stream; increase number of retries for backoff policy to 10 | -| 0.11.0 | 2023-11-29 | [32927](https://github.com/airbytehq/airbyte/pull/32927) | Fix incremental syncs for stream Issues | -| 0.10.2 | 2023-10-26 | [31896](https://github.com/airbytehq/airbyte/pull/31896) | Provide better guidance when configuring the connector with an invalid domain | -| 0.10.1 | 2023-10-23 | [31702](https://github.com/airbytehq/airbyte/pull/31702) | Base image migration: remove Dockerfile and use the python-connector-base image | -| 0.10.0 | 2023-10-13 | [\#31385](https://github.com/airbytehq/airbyte/pull/31385) | Fixed `aggregatetimeoriginalestimate, timeoriginalestimate` field types for the `Issues` stream schema | -| 0.9.0 | 2023-09-26 | [\#30688](https://github.com/airbytehq/airbyte/pull/30688) | Added `createdDate` field to sprints schema, Removed `Expand Issues stream` from spec | -| 0.8.0 | 2023-09-26 | [\#30755](https://github.com/airbytehq/airbyte/pull/30755) | Add new streams: `Issue custom field options`, `IssueTypes`, `Project Roles` | -| 0.7.2 | 2023-09-19 | [\#30675](https://github.com/airbytehq/airbyte/pull/30675) | Ensure invalid URL does not trigger Sentry alert | -| 0.7.1 | 2023-09-19 | [\#30585](https://github.com/airbytehq/airbyte/pull/30585) | Add skip for 404 error in issue properties steam | -| 0.7.0 | 2023-09-17 | [\#30532](https://github.com/airbytehq/airbyte/pull/30532) | Add foreign key to stream record where it misseing | -| 0.6.3 | 2023-09-19 | [\#30515](https://github.com/airbytehq/airbyte/pull/30515) | Add transform for invalid date-time format, add 404 handling for check | -| 0.6.2 | 2023-09-19 | [\#30578](https://github.com/airbytehq/airbyte/pull/30578) | Fetch deleted and archived Projects | -| 0.6.1 | 2023-09-17 | [\#30550](https://github.com/airbytehq/airbyte/pull/30550) | Update `Issues` expand settings | -| 0.6.0 | 2023-09-17 | [\#30507](https://github.com/airbytehq/airbyte/pull/30507) | Add new stream `IssueTransitions` | -| 0.5.0 | 2023-09-14 | [\#29960](https://github.com/airbytehq/airbyte/pull/29960) | Add `boardId` to `sprints` stream | -| 0.3.14 | 2023-09-11 | [\#30297](https://github.com/airbytehq/airbyte/pull/30297) | Remove `requests` and `pendulum` from setup dependencies | -| 0.3.13 | 2023-09-01 | [\#30108](https://github.com/airbytehq/airbyte/pull/30108) | Skip 404 error for stream `IssueWatchers` | -| 0.3.12 | 2023-06-01 | [\#26652](https://github.com/airbytehq/airbyte/pull/26652) | Expand on `leads` for `projects` stream | -| 0.3.11 | 2023-06-01 | [\#26906](https://github.com/airbytehq/airbyte/pull/26906) | Handle project permissions error | -| 0.3.10 | 2023-05-26 | [\#26652](https://github.com/airbytehq/airbyte/pull/26652) | Fixed bug when `board` doesn't support `sprints` | -| 0.3.9 | 2023-05-16 | [\#26114](https://github.com/airbytehq/airbyte/pull/26114) | Update fields info in docs and spec, update to latest airbyte-cdk | -| 0.3.8 | 2023-05-04 | [\#25798](https://github.com/airbytehq/airbyte/pull/25798) | Add sprint info to `sprint_issues` and `sprints` streams for team-managed projects | -| 0.3.7 | 2023-04-18 | [\#25275](https://github.com/airbytehq/airbyte/pull/25275) | Add missing types to issues json schema | -| 0.3.6 | 2023-04-10 | [\#24636](https://github.com/airbytehq/airbyte/pull/24636) | Removed Connector Domain Pattern from Spec | -| 0.3.5 | 2023-04-05 | [\#24890](https://github.com/airbytehq/airbyte/pull/24890) | Fix streams "IssuePropertyKeys", "ScreenTabFields" | -| 0.3.4 | 2023-02-14 | [\#23006](https://github.com/airbytehq/airbyte/pull/23006) | Remove caching for `Issues` stream | -| 0.3.3 | 2023-01-04 | [\#20739](https://github.com/airbytehq/airbyte/pull/20739) | fix: check_connection fails if no projects are defined | -| 0.3.2 | 2022-12-23 | [\#20859](https://github.com/airbytehq/airbyte/pull/20859) | Fixed pagination for streams `issue_remote_links`, `sprints` | -| 0.3.1 | 2022-12-14 | [\#20128](https://github.com/airbytehq/airbyte/pull/20128) | Improved code to become beta | -| 0.3.0 | 2022-11-03 | [\#18901](https://github.com/airbytehq/airbyte/pull/18901) | Adds UserGroupsDetailed schema, fix Incremental normalization, add Incremental support for IssueComments, IssueWorklogs | -| 0.2.23 | 2022-10-28 | [\#18505](https://github.com/airbytehq/airbyte/pull/18505) | Correcting `max_results` bug introduced in connector stream | -| 0.2.22 | 2022-10-03 | [\#16944](https://github.com/airbytehq/airbyte/pull/16944) | Adds support for `max_results` to `users` stream | -| 0.2.21 | 2022-07-28 | [\#15135](https://github.com/airbytehq/airbyte/pull/15135) | Adds components to `fields` object on `issues` stream | -| 0.2.20 | 2022-05-25 | [\#13202](https://github.com/airbytehq/airbyte/pull/13202) | Adds resolutiondate to `fields` object on `issues` stream | -| 0.2.19 | 2022-05-04 | [\#10835](https://github.com/airbytehq/airbyte/pull/10835) | Change description for array fields | -| 0.2.18 | 2021-12-23 | [\#7378](https://github.com/airbytehq/airbyte/pull/7378) | Adds experimental endpoint Pull Request | -| 0.2.17 | 2021-12-23 | [\#9079](https://github.com/airbytehq/airbyte/pull/9079) | Update schema for `filters` stream + fix fetching `filters` stream | -| 0.2.16 | 2021-12-21 | [\#8999](https://github.com/airbytehq/airbyte/pull/8999) | Update connector fields title/description | -| 0.2.15 | 2021-11-01 | [\#7398](https://github.com/airbytehq/airbyte/pull/7398) | Add option to render fields in HTML format and fix sprint_issue ids | -| 0.2.14 | 2021-10-27 | [\#7408](https://github.com/airbytehq/airbyte/pull/7408) | Fix normalization step error. Fix schemas. Fix `acceptance-test-config.yml`. Fix `streams.py`. | -| 0.2.13 | 2021-10-20 | [\#7222](https://github.com/airbytehq/airbyte/pull/7222) | Source Jira: Make recently added configs optional for backwards compatibility | -| 0.2.12 | 2021-10-19 | [\#6621](https://github.com/airbytehq/airbyte/pull/6621) | Add Board, Epic, and Sprint streams | -| 0.2.11 | 2021-09-02 | [\#6523](https://github.com/airbytehq/airbyte/pull/6523) | Add cache and more streams \(boards and sprints\) | -| 0.2.9 | 2021-07-28 | [\#5426](https://github.com/airbytehq/airbyte/pull/5426) | Changed cursor field from fields.created to fields.updated for Issues stream. Made Issues worklogs stream full refresh. | -| 0.2.8 | 2021-07-28 | [\#4947](https://github.com/airbytehq/airbyte/pull/4947) | Source Jira: fixing schemas accordinately to response. | -| 0.2.7 | 2021-07-19 | [\#4817](https://github.com/airbytehq/airbyte/pull/4817) | Fixed `labels` schema properties issue. | -| 0.2.6 | 2021-06-15 | [\#4113](https://github.com/airbytehq/airbyte/pull/4113) | Fixed `user` stream with the correct endpoint and query param. | -| 0.2.5 | 2021-06-09 | [\#3973](https://github.com/airbytehq/airbyte/pull/3973) | Added `AIRBYTE_ENTRYPOINT` in base Docker image for Kubernetes support. | -| 0.2.4 | | | Implementing base_read acceptance test dived by stream groups. | -| 0.2.3 | | | Implementing incremental sync. Migrated to airbyte-cdk. Adding all available entities in Jira Cloud. | +| Version | Date | Pull Request | Subject | +|:--------|:-----------|:-----------------------------------------------------------|:-----------------------------------------------------------------------------------------------------------------------------------------------------------------| +| 1.1.0 | 2024-02-27 | [35656](https://github.com/airbytehq/airbyte/pull/35656) | Add new fields to streams `board_issues`, `filter_sharing`, `filters`, `issues`, `permission_schemes`, `sprint_issues`, `users_groups_detailed`, and `workflows` | +| 1.0.2 | 2024-02-12 | [35160](https://github.com/airbytehq/airbyte/pull/35160) | Manage dependencies with Poetry. | +| 1.0.1 | 2024-01-24 | [34470](https://github.com/airbytehq/airbyte/pull/34470) | Add state checkpoint interval for all streams | +| 1.0.0 | 2024-01-01 | [33715](https://github.com/airbytehq/airbyte/pull/33715) | Save state for stream `Board Issues` per `board` | +| 0.14.1 | 2023-12-19 | [33625](https://github.com/airbytehq/airbyte/pull/33625) | Skip 404 error | +| 0.14.0 | 2023-12-15 | [33532](https://github.com/airbytehq/airbyte/pull/33532) | Add lookback window | +| 0.13.0 | 2023-12-12 | [33353](https://github.com/airbytehq/airbyte/pull/33353) | Fix check command to check access for all available streams | +| 0.12.0 | 2023-12-01 | [33011](https://github.com/airbytehq/airbyte/pull/33011) | Fix BoardIssues stream; increase number of retries for backoff policy to 10 | +| 0.11.0 | 2023-11-29 | [32927](https://github.com/airbytehq/airbyte/pull/32927) | Fix incremental syncs for stream Issues | +| 0.10.2 | 2023-10-26 | [31896](https://github.com/airbytehq/airbyte/pull/31896) | Provide better guidance when configuring the connector with an invalid domain | +| 0.10.1 | 2023-10-23 | [31702](https://github.com/airbytehq/airbyte/pull/31702) | Base image migration: remove Dockerfile and use the python-connector-base image | +| 0.10.0 | 2023-10-13 | [\#31385](https://github.com/airbytehq/airbyte/pull/31385) | Fixed `aggregatetimeoriginalestimate, timeoriginalestimate` field types for the `Issues` stream schema | +| 0.9.0 | 2023-09-26 | [\#30688](https://github.com/airbytehq/airbyte/pull/30688) | Added `createdDate` field to sprints schema, Removed `Expand Issues stream` from spec | +| 0.8.0 | 2023-09-26 | [\#30755](https://github.com/airbytehq/airbyte/pull/30755) | Add new streams: `Issue custom field options`, `IssueTypes`, `Project Roles` | +| 0.7.2 | 2023-09-19 | [\#30675](https://github.com/airbytehq/airbyte/pull/30675) | Ensure invalid URL does not trigger Sentry alert | +| 0.7.1 | 2023-09-19 | [\#30585](https://github.com/airbytehq/airbyte/pull/30585) | Add skip for 404 error in issue properties steam | +| 0.7.0 | 2023-09-17 | [\#30532](https://github.com/airbytehq/airbyte/pull/30532) | Add foreign key to stream record where it missing | +| 0.6.3 | 2023-09-19 | [\#30515](https://github.com/airbytehq/airbyte/pull/30515) | Add transform for invalid date-time format, add 404 handling for check | +| 0.6.2 | 2023-09-19 | [\#30578](https://github.com/airbytehq/airbyte/pull/30578) | Fetch deleted and archived Projects | +| 0.6.1 | 2023-09-17 | [\#30550](https://github.com/airbytehq/airbyte/pull/30550) | Update `Issues` expand settings | +| 0.6.0 | 2023-09-17 | [\#30507](https://github.com/airbytehq/airbyte/pull/30507) | Add new stream `IssueTransitions` | +| 0.5.0 | 2023-09-14 | [\#29960](https://github.com/airbytehq/airbyte/pull/29960) | Add `boardId` to `sprints` stream | +| 0.3.14 | 2023-09-11 | [\#30297](https://github.com/airbytehq/airbyte/pull/30297) | Remove `requests` and `pendulum` from setup dependencies | +| 0.3.13 | 2023-09-01 | [\#30108](https://github.com/airbytehq/airbyte/pull/30108) | Skip 404 error for stream `IssueWatchers` | +| 0.3.12 | 2023-06-01 | [\#26652](https://github.com/airbytehq/airbyte/pull/26652) | Expand on `leads` for `projects` stream | +| 0.3.11 | 2023-06-01 | [\#26906](https://github.com/airbytehq/airbyte/pull/26906) | Handle project permissions error | +| 0.3.10 | 2023-05-26 | [\#26652](https://github.com/airbytehq/airbyte/pull/26652) | Fixed bug when `board` doesn't support `sprints` | +| 0.3.9 | 2023-05-16 | [\#26114](https://github.com/airbytehq/airbyte/pull/26114) | Update fields info in docs and spec, update to latest airbyte-cdk | +| 0.3.8 | 2023-05-04 | [\#25798](https://github.com/airbytehq/airbyte/pull/25798) | Add sprint info to `sprint_issues` and `sprints` streams for team-managed projects | +| 0.3.7 | 2023-04-18 | [\#25275](https://github.com/airbytehq/airbyte/pull/25275) | Add missing types to issues json schema | +| 0.3.6 | 2023-04-10 | [\#24636](https://github.com/airbytehq/airbyte/pull/24636) | Removed Connector Domain Pattern from Spec | +| 0.3.5 | 2023-04-05 | [\#24890](https://github.com/airbytehq/airbyte/pull/24890) | Fix streams "IssuePropertyKeys", "ScreenTabFields" | +| 0.3.4 | 2023-02-14 | [\#23006](https://github.com/airbytehq/airbyte/pull/23006) | Remove caching for `Issues` stream | +| 0.3.3 | 2023-01-04 | [\#20739](https://github.com/airbytehq/airbyte/pull/20739) | fix: check_connection fails if no projects are defined | +| 0.3.2 | 2022-12-23 | [\#20859](https://github.com/airbytehq/airbyte/pull/20859) | Fixed pagination for streams `issue_remote_links`, `sprints` | +| 0.3.1 | 2022-12-14 | [\#20128](https://github.com/airbytehq/airbyte/pull/20128) | Improved code to become beta | +| 0.3.0 | 2022-11-03 | [\#18901](https://github.com/airbytehq/airbyte/pull/18901) | Adds UserGroupsDetailed schema, fix Incremental normalization, add Incremental support for IssueComments, IssueWorklogs | +| 0.2.23 | 2022-10-28 | [\#18505](https://github.com/airbytehq/airbyte/pull/18505) | Correcting `max_results` bug introduced in connector stream | +| 0.2.22 | 2022-10-03 | [\#16944](https://github.com/airbytehq/airbyte/pull/16944) | Adds support for `max_results` to `users` stream | +| 0.2.21 | 2022-07-28 | [\#15135](https://github.com/airbytehq/airbyte/pull/15135) | Adds components to `fields` object on `issues` stream | +| 0.2.20 | 2022-05-25 | [\#13202](https://github.com/airbytehq/airbyte/pull/13202) | Adds resolutiondate to `fields` object on `issues` stream | +| 0.2.19 | 2022-05-04 | [\#10835](https://github.com/airbytehq/airbyte/pull/10835) | Change description for array fields | +| 0.2.18 | 2021-12-23 | [\#7378](https://github.com/airbytehq/airbyte/pull/7378) | Adds experimental endpoint Pull Request | +| 0.2.17 | 2021-12-23 | [\#9079](https://github.com/airbytehq/airbyte/pull/9079) | Update schema for `filters` stream + fix fetching `filters` stream | +| 0.2.16 | 2021-12-21 | [\#8999](https://github.com/airbytehq/airbyte/pull/8999) | Update connector fields title/description | +| 0.2.15 | 2021-11-01 | [\#7398](https://github.com/airbytehq/airbyte/pull/7398) | Add option to render fields in HTML format and fix sprint_issue ids | +| 0.2.14 | 2021-10-27 | [\#7408](https://github.com/airbytehq/airbyte/pull/7408) | Fix normalization step error. Fix schemas. Fix `acceptance-test-config.yml`. Fix `streams.py`. | +| 0.2.13 | 2021-10-20 | [\#7222](https://github.com/airbytehq/airbyte/pull/7222) | Source Jira: Make recently added configs optional for backwards compatibility | +| 0.2.12 | 2021-10-19 | [\#6621](https://github.com/airbytehq/airbyte/pull/6621) | Add Board, Epic, and Sprint streams | +| 0.2.11 | 2021-09-02 | [\#6523](https://github.com/airbytehq/airbyte/pull/6523) | Add cache and more streams \(boards and sprints\) | +| 0.2.9 | 2021-07-28 | [\#5426](https://github.com/airbytehq/airbyte/pull/5426) | Changed cursor field from fields.created to fields.updated for Issues stream. Made Issues worklogs stream full refresh. | +| 0.2.8 | 2021-07-28 | [\#4947](https://github.com/airbytehq/airbyte/pull/4947) | Source Jira: fixing schemas accordingly to response. | +| 0.2.7 | 2021-07-19 | [\#4817](https://github.com/airbytehq/airbyte/pull/4817) | Fixed `labels` schema properties issue. | +| 0.2.6 | 2021-06-15 | [\#4113](https://github.com/airbytehq/airbyte/pull/4113) | Fixed `user` stream with the correct endpoint and query param. | +| 0.2.5 | 2021-06-09 | [\#3973](https://github.com/airbytehq/airbyte/pull/3973) | Added `AIRBYTE_ENTRYPOINT` in base Docker image for Kubernetes support. | +| 0.2.4 | | | Implementing base_read acceptance test dived by stream groups. | +| 0.2.3 | | | Implementing incremental sync. Migrated to airbyte-cdk. Adding all available entities in Jira Cloud. | From 4369cee477eb7aa0ffeca65f7e67276cb052cf88 Mon Sep 17 00:00:00 2001 From: Daryna Ishchenko <80129833+darynaishchenko@users.noreply.github.com> Date: Tue, 27 Feb 2024 14:46:06 +0200 Subject: [PATCH 002/172] :bug: Source Hubspot: make start date not required (#34597) --- .../source-hubspot/acceptance-test-config.yml | 59 +++++++++++++++++-- .../connectors/source-hubspot/metadata.yaml | 2 +- .../connectors/source-hubspot/pyproject.toml | 2 +- .../source-hubspot/source_hubspot/source.py | 9 ++- .../source-hubspot/source_hubspot/spec.yaml | 4 +- .../source-hubspot/unit_tests/test_source.py | 2 +- docs/integrations/sources/hubspot.md | 5 +- 7 files changed, 70 insertions(+), 13 deletions(-) diff --git a/airbyte-integrations/connectors/source-hubspot/acceptance-test-config.yml b/airbyte-integrations/connectors/source-hubspot/acceptance-test-config.yml index 4b052480a688..d0146f24d744 100644 --- a/airbyte-integrations/connectors/source-hubspot/acceptance-test-config.yml +++ b/airbyte-integrations/connectors/source-hubspot/acceptance-test-config.yml @@ -60,6 +60,21 @@ acceptance_tests: - name: cars_web_analytics bypass_reason: Unable to populate ignored_fields: + engagements_tasks: + - name: hs_num_associated_* + bypass_reason: Floating value + - name: hs_object_source_* + bypass_reason: Floating value + - name: properties_hs_num_associated_* + bypass_reason: Floating value + - name: properties_hs_object_source_* + bypass_reason: Floating value + - name: properties_hs_object_source + bypass_reason: Floating value + - name: properties_hs_time_* + bypass_reason: Hubspot time depend on current time + - name: properties/hs_time_* + bypass_reason: Hubspot time depend on current time contact_lists: - name: ilsFilterBranch bypass_reason: Floating fields order @@ -185,11 +200,6 @@ acceptance_tests: bypass_reason: Hubspot time depend on current time - name: updatedAt bypass_reason: field changes too often - engagements_tasks: - - name: properties_hs_time_* - bypass_reason: Hubspot time depend on current time - - name: properties/hs_time_* - bypass_reason: Hubspot time depend on current time full_refresh: tests: - config_path: secrets/config.json @@ -228,6 +238,40 @@ acceptance_tests: bypass_reason: Hubspot time depend on current time - name: properties/hs_time_* bypass_reason: Hubspot time depend on current time + - config_path: secrets/config_oauth_no_start_date.json + configured_catalog_path: sample_files/full_refresh_oauth_catalog.json + ignored_fields: + contact_lists: + - name: ilsFilterBranch + bypass_reason: Floating fields order + companies: + - name: properties_hs_time_* + bypass_reason: Hubspot time depend on current time + - name: properties/hs_time_* + bypass_reason: Hubspot time depend on current time + contacts: + - name: properties_hs_time_* + bypass_reason: Hubspot time depend on current time + - name: properties/hs_time_* + bypass_reason: Hubspot time depend on current time + - name: properties/hs_v2_cumulative_time_* + bypass_reason: Hubspot time depend on current time + - name: properties/hs_v2_latest_time_* + bypass_reason: Hubspot time depend on current time + - name: properties_hs_v2_cumulative_time_* + bypass_reason: Hubspot time depend on current time + - name: properties_hs_v2_latest_time_* + bypass_reason: Hubspot time depend on current time + deals: + - name: properties_hs_time_* + bypass_reason: Hubspot time depend on current time + - name: properties/hs_time_* + bypass_reason: Hubspot time depend on current time + tickets: + - name: properties_hs_time_* + bypass_reason: Hubspot time depend on current time + - name: properties/hs_time_* + bypass_reason: Hubspot time depend on current time incremental: tests: - config_path: secrets/config_oauth.json @@ -235,3 +279,8 @@ acceptance_tests: future_state: future_state_path: integration_tests/abnormal_state.json timeout_seconds: 7200 + - config_path: secrets/config_oauth_no_start_date.json + configured_catalog_path: sample_files/incremental_catalog.json + future_state: + future_state_path: integration_tests/abnormal_state.json + timeout_seconds: 7200 diff --git a/airbyte-integrations/connectors/source-hubspot/metadata.yaml b/airbyte-integrations/connectors/source-hubspot/metadata.yaml index 38f03af7a246..daa438f5ee77 100644 --- a/airbyte-integrations/connectors/source-hubspot/metadata.yaml +++ b/airbyte-integrations/connectors/source-hubspot/metadata.yaml @@ -10,7 +10,7 @@ data: connectorSubtype: api connectorType: source definitionId: 36c891d9-4bd9-43ac-bad2-10e12756272c - dockerImageTag: 3.2.0 + dockerImageTag: 3.3.0 dockerRepository: airbyte/source-hubspot documentationUrl: https://docs.airbyte.com/integrations/sources/hubspot githubIssueLabel: source-hubspot diff --git a/airbyte-integrations/connectors/source-hubspot/pyproject.toml b/airbyte-integrations/connectors/source-hubspot/pyproject.toml index 21bb9a12a9f4..b36d715999e6 100644 --- a/airbyte-integrations/connectors/source-hubspot/pyproject.toml +++ b/airbyte-integrations/connectors/source-hubspot/pyproject.toml @@ -3,7 +3,7 @@ requires = [ "poetry-core>=1.0.0",] build-backend = "poetry.core.masonry.api" [tool.poetry] -version = "3.2.0" +version = "3.3.0" name = "source-hubspot" description = "Source implementation for HubSpot." authors = [ "Airbyte ",] diff --git a/airbyte-integrations/connectors/source-hubspot/source_hubspot/source.py b/airbyte-integrations/connectors/source-hubspot/source_hubspot/source.py index dd643ff170b2..baa562164e87 100644 --- a/airbyte-integrations/connectors/source-hubspot/source_hubspot/source.py +++ b/airbyte-integrations/connectors/source-hubspot/source_hubspot/source.py @@ -64,6 +64,13 @@ Workflows, ) +""" +https://github.com/airbytehq/oncall/issues/3800 +we use start date 2006-01-01 as date of creation of Hubspot to retrieve all data if start date was not provided + +""" +DEFAULT_START_DATE = "2006-06-01T00:00:00Z" + class SourceHubspot(AbstractSource): logger = AirbyteLogger() @@ -105,7 +112,7 @@ def get_api(config: Mapping[str, Any]) -> API: return API(credentials=credentials) def get_common_params(self, config) -> Mapping[str, Any]: - start_date = config["start_date"] + start_date = config.get("start_date", DEFAULT_START_DATE) credentials = config["credentials"] api = self.get_api(config=config) return dict(api=api, start_date=start_date, credentials=credentials) diff --git a/airbyte-integrations/connectors/source-hubspot/source_hubspot/spec.yaml b/airbyte-integrations/connectors/source-hubspot/source_hubspot/spec.yaml index 3510c2b97d3b..6f105f4d05a5 100644 --- a/airbyte-integrations/connectors/source-hubspot/source_hubspot/spec.yaml +++ b/airbyte-integrations/connectors/source-hubspot/source_hubspot/spec.yaml @@ -4,7 +4,6 @@ connectionSpecification: title: HubSpot Source Spec type: object required: - - start_date - credentials additionalProperties: true properties: @@ -14,7 +13,8 @@ connectionSpecification: pattern: ^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$ description: >- UTC date and time in the format 2017-01-25T00:00:00Z. Any data before - this date will not be replicated. + this date will not be replicated. If not set, "2006-06-01T00:00:00Z" (Hubspot creation date) will be used as start date. + It's recommended to provide relevant to your data start date value to optimize synchronization. examples: - "2017-01-25T00:00:00Z" format: date-time diff --git a/airbyte-integrations/connectors/source-hubspot/unit_tests/test_source.py b/airbyte-integrations/connectors/source-hubspot/unit_tests/test_source.py index de5e3c40a44a..7ee8b639bbbd 100644 --- a/airbyte-integrations/connectors/source-hubspot/unit_tests/test_source.py +++ b/airbyte-integrations/connectors/source-hubspot/unit_tests/test_source.py @@ -50,7 +50,7 @@ def test_check_connection_empty_config(config): def test_check_connection_invalid_config(config): - config.pop("start_date") + config.pop("credentials") with pytest.raises(KeyError): SourceHubspot().check_connection(logger, config=config) diff --git a/docs/integrations/sources/hubspot.md b/docs/integrations/sources/hubspot.md index 37cdc49c57c9..6c3cc01f2598 100644 --- a/docs/integrations/sources/hubspot.md +++ b/docs/integrations/sources/hubspot.md @@ -102,7 +102,7 @@ Next, you need to configure the appropriate scopes for the following streams. Pl ::: - **Not Recommended:**To authenticate using a Private App, select **Private App** and enter the Access Token for your HubSpot account. 5. For **Start date**, use the provided datepicker or enter the date programmatically in the following format: - `yyyy-mm-ddThh:mm:ssZ`. The data added on and after this date will be replicated. + `yyyy-mm-ddThh:mm:ssZ`. The data added on and after this date will be replicated. If not set, "2006-06-01T00:00:00Z" (Hubspot creation date) will be used as start date. It's recommended to provide relevant to your data start date value to optimize synchronization. 6. Click **Set up source** and wait for the tests to complete. @@ -116,7 +116,7 @@ Next, you need to configure the appropriate scopes for the following streams. Pl - **Recommended:** To authenticate using a Private App, select **Private App** and enter the Access Token for your HubSpot account. - **Not Recommended:**To authenticate using OAuth, select **OAuth** and enter your Client ID, Client Secret, and Refresh Token. 5. For **Start date**, use the provided datepicker or enter the date programmatically in the following format: - `yyyy-mm-ddThh:mm:ssZ`. The data added on and after this date will be replicated. + `yyyy-mm-ddThh:mm:ssZ`. The data added on and after this date will be replicated. If not set, "2006-06-01T00:00:00Z" (Hubspot creation date) will be used as start date. It's recommended to provide relevant to your data start date value to optimize synchronization. 6. Click **Set up source** and wait for the tests to complete. @@ -322,6 +322,7 @@ The connector is restricted by normal HubSpot [rate limitations](https://legacyd | Version | Date | Pull Request | Subject | |:--------|:-----------|:---------------------------------------------------------|:---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------| +| 3.3.0 | 2024-02-16 | [34597](https://github.com/airbytehq/airbyte/pull/34597) | Make start date not required, sync all data from default value if it's not provided | | 3.2.0 | 2024-02-15 | [35328](https://github.com/airbytehq/airbyte/pull/35328) | Add mailingIlsListsIncluded and mailingIlsListsExcluded fields to Marketing emails stream schema | | 3.1.1 | 2024-02-12 | [35165](https://github.com/airbytehq/airbyte/pull/35165) | Manage dependencies with Poetry. | | 3.1.0 | 2024-02-05 | [34829](https://github.com/airbytehq/airbyte/pull/34829) | Add `Contacts Form Submissions` stream | From a05b81ed5ee18d150af7e35a6da368dcf86ffa8b Mon Sep 17 00:00:00 2001 From: Anton Karpets Date: Tue, 27 Feb 2024 16:52:41 +0200 Subject: [PATCH 003/172] =?UTF-8?q?=F0=9F=8F=A5Source=20Zendesk=20Support:?= =?UTF-8?q?=20update=20expected=20records=20(#35658)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .../integration_tests/expected_records.jsonl | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/airbyte-integrations/connectors/source-zendesk-support/integration_tests/expected_records.jsonl b/airbyte-integrations/connectors/source-zendesk-support/integration_tests/expected_records.jsonl index e08e4169e436..5cb0a349cd6c 100644 --- a/airbyte-integrations/connectors/source-zendesk-support/integration_tests/expected_records.jsonl +++ b/airbyte-integrations/connectors/source-zendesk-support/integration_tests/expected_records.jsonl @@ -47,8 +47,8 @@ {"stream": "ticket_metric_events", "data": {"id": 4992797383183, "ticket_id": 121, "metric": "agent_work_time", "instance_id": 0, "type": "measure", "time": "2022-06-17T14:49:20Z"}, "emitted_at": 1697714863384} {"stream": "ticket_metric_events", "data": {"id": 4992797383311, "ticket_id": 121, "metric": "pausable_update_time", "instance_id": 0, "type": "measure", "time": "2022-06-17T14:49:20Z"}, "emitted_at": 1697714863386} {"stream": "ticket_metric_events", "data": {"id": 4992797383439, "ticket_id": 121, "metric": "reply_time", "instance_id": 0, "type": "measure", "time": "2022-06-17T14:49:20Z"}, "emitted_at": 1697714863386} -{"stream": "ticket_skips", "data": {"id": 7290033348623, "ticket_id": 121, "user_id": 360786799676, "reason": "I have no idea.", "created_at": "2023-06-27T08:24:02Z", "updated_at": "2023-06-27T08:24:02Z", "ticket": {"url": "https://d3v-airbyte.zendesk.com/api/v2/tickets/121.json", "id": 121, "external_id": null, "via": {"channel": "voice", "source": {"rel": "voicemail", "from": {"formatted_phone": "+1 (689) 689-8023", "phone": "+16896898023", "name": "Caller +1 (689) 689-8023"}, "to": {"formatted_phone": "+1 (205) 953-1462", "phone": "+12059531462", "name": "Airbyte", "brand_id": 360000358316}}}, "created_at": "2022-06-17T14:49:20Z", "updated_at": "2022-06-17T16:01:42Z", "type": null, "subject": "Voicemail from: Caller +1 (689) 689-8023", "raw_subject": "Voicemail from: Caller +1 (689) 689-8023", "description": "Call from: +1 (689) 689-8023\\nTime of call: June 17, 2022 at 2:48:27 PM", "priority": null, "status": "new", "recipient": null, "requester_id": 4992781783439, "submitter_id": 4992781783439, "assignee_id": null, "organization_id": null, "group_id": null, "collaborator_ids": [], "follower_ids": [], "email_cc_ids": [], "forum_topic_id": null, "problem_id": null, "has_incidents": false, "is_public": false, "due_at": null, "tags": [], "custom_fields": [], "satisfaction_rating": {"score": "offered"}, "sharing_agreement_ids": [], "custom_status_id": 4044356, "fields": [], "followup_ids": [], "ticket_form_id": 360000084116, "deleted_ticket_form_id": null, "brand_id": 360000358316, "allow_channelback": false, "allow_attachments": true, "from_messaging_channel": false}}, "emitted_at": 1697714864517} -{"stream": "ticket_skips", "data": {"id": 7290088475023, "ticket_id": 125, "user_id": 360786799676, "reason": "Another test skip.", "created_at": "2023-06-27T08:30:01Z", "updated_at": "2023-06-27T08:30:01Z", "ticket": {"url": "https://d3v-airbyte.zendesk.com/api/v2/tickets/125.json", "id": 125, "external_id": null, "via": {"channel": "web", "source": {"from": {}, "to": {}, "rel": null}}, "created_at": "2022-07-18T10:16:53Z", "updated_at": "2022-07-18T10:36:02Z", "type": "question", "subject": "Ticket Test 2", "raw_subject": "Ticket Test 2", "description": "238473846", "priority": "urgent", "status": "open", "recipient": null, "requester_id": 360786799676, "submitter_id": 360786799676, "assignee_id": 361089721035, "organization_id": 360033549136, "group_id": 5059439464079, "collaborator_ids": [360786799676], "follower_ids": [360786799676], "email_cc_ids": [], "forum_topic_id": null, "problem_id": null, "has_incidents": false, "is_public": false, "due_at": null, "tags": [], "custom_fields": [], "satisfaction_rating": {"score": "unoffered"}, "sharing_agreement_ids": [], "custom_status_id": 4044376, "fields": [], "followup_ids": [], "ticket_form_id": 360000084116, "deleted_ticket_form_id": null, "brand_id": 360000358316, "allow_channelback": false, "allow_attachments": true, "from_messaging_channel": false}}, "emitted_at": 1697714864519} +{"stream": "ticket_skips", "data": {"id": 7290033348623, "ticket_id": 121, "user_id": 360786799676, "reason": "I have no idea.", "created_at": "2023-06-27T08:24:02Z", "updated_at": "2023-06-27T08:24:02Z", "ticket": {"url": "https://d3v-airbyte.zendesk.com/api/v2/tickets/121.json", "id": 121, "external_id": null, "via": {"channel": "voice", "source": {"rel": "voicemail", "from": {"formatted_phone": "+1 (689) 689-8023", "phone": "+16896898023", "name": "Caller +1 (689) 689-8023"}, "to": {"formatted_phone": "+1 (205) 953-1462", "phone": "+12059531462", "name": "Airbyte", "brand_id": 360000358316}}}, "created_at": "2022-06-17T14:49:20Z", "updated_at": "2022-06-17T16:01:42Z", "generated_timestamp": 1655481702, "type": null, "subject": "Voicemail from: Caller +1 (689) 689-8023", "raw_subject": "Voicemail from: Caller +1 (689) 689-8023", "description": "Call from: +1 (689) 689-8023\\nTime of call: June 17, 2022 at 2:48:27 PM", "priority": null, "status": "new", "recipient": null, "requester_id": 4992781783439, "submitter_id": 4992781783439, "assignee_id": null, "organization_id": null, "group_id": null, "collaborator_ids": [], "follower_ids": [], "email_cc_ids": [], "forum_topic_id": null, "problem_id": null, "has_incidents": false, "is_public": false, "due_at": null, "tags": [], "custom_fields": [], "satisfaction_rating": {"score": "offered"}, "sharing_agreement_ids": [], "custom_status_id": 4044356, "fields": [], "followup_ids": [], "ticket_form_id": 360000084116, "deleted_ticket_form_id": null, "brand_id": 360000358316, "allow_channelback": false, "allow_attachments": true, "from_messaging_channel": false}}, "emitted_at": 1709038262604} +{"stream": "ticket_skips", "data": {"id": 7290088475023, "ticket_id": 125, "user_id": 360786799676, "reason": "Another test skip.", "created_at": "2023-06-27T08:30:01Z", "updated_at": "2023-06-27T08:30:01Z", "ticket": {"url": "https://d3v-airbyte.zendesk.com/api/v2/tickets/125.json", "id": 125, "external_id": null, "via": {"channel": "web", "source": {"from": {}, "to": {}, "rel": null}}, "created_at": "2022-07-18T10:16:53Z", "updated_at": "2022-07-18T10:36:02Z", "generated_timestamp": 1658140562, "type": "question", "subject": "Ticket Test 2", "raw_subject": "Ticket Test 2", "description": "238473846", "priority": "urgent", "status": "open", "recipient": null, "requester_id": 360786799676, "submitter_id": 360786799676, "assignee_id": 361089721035, "organization_id": 360033549136, "group_id": 5059439464079, "collaborator_ids": [360786799676], "follower_ids": [360786799676], "email_cc_ids": [], "forum_topic_id": null, "problem_id": null, "has_incidents": false, "is_public": false, "due_at": null, "tags": [], "custom_fields": [], "satisfaction_rating": {"score": "unoffered"}, "sharing_agreement_ids": [], "custom_status_id": 4044376, "fields": [], "followup_ids": [], "ticket_form_id": 360000084116, "deleted_ticket_form_id": null, "brand_id": 360000358316, "allow_channelback": false, "allow_attachments": true, "from_messaging_channel": false}}, "emitted_at": 1709038262605} {"stream": "tickets", "data": {"url": "https://d3v-airbyte.zendesk.com/api/v2/tickets/121.json", "id": 121, "external_id": null, "via": {"channel": "voice", "source": {"rel": "voicemail", "from": {"formatted_phone": "+1 (689) 689-8023", "phone": "+16896898023", "name": "Caller +1 (689) 689-8023"}, "to": {"formatted_phone": "+1 (205) 953-1462", "phone": "+12059531462", "name": "Airbyte", "brand_id": 360000358316}}}, "created_at": "2022-06-17T14:49:20Z", "updated_at": "2022-06-17T16:01:42Z", "type": null, "subject": "Voicemail from: Caller +1 (689) 689-8023", "raw_subject": "Voicemail from: Caller +1 (689) 689-8023", "description": "Call from: +1 (689) 689-8023\\nTime of call: June 17, 2022 at 2:48:27 PM", "priority": null, "status": "new", "recipient": null, "requester_id": 4992781783439, "submitter_id": 4992781783439, "assignee_id": null, "organization_id": null, "group_id": null, "collaborator_ids": [], "follower_ids": [], "email_cc_ids": [], "forum_topic_id": null, "problem_id": null, "has_incidents": false, "is_public": false, "due_at": null, "tags": [], "custom_fields": [], "satisfaction_rating": {"score": "offered"}, "sharing_agreement_ids": [], "custom_status_id": 4044356, "fields": [], "followup_ids": [], "ticket_form_id": 360000084116, "brand_id": 360000358316, "allow_channelback": false, "allow_attachments": true, "from_messaging_channel": false, "generated_timestamp": 1655481702}, "emitted_at": 1697714865818} {"stream": "tickets", "data": {"url": "https://d3v-airbyte.zendesk.com/api/v2/tickets/122.json", "id": 122, "external_id": null, "via": {"channel": "voice", "source": {"rel": "voicemail", "from": {"formatted_phone": "+1 (912) 420-0314", "phone": "+19124200314", "name": "Caller +1 (912) 420-0314"}, "to": {"formatted_phone": "+1 (205) 953-1462", "phone": "+12059531462", "name": "Airbyte", "brand_id": 360000358316}}}, "created_at": "2022-06-17T19:52:39Z", "updated_at": "2022-06-17T21:01:41Z", "type": null, "subject": "Voicemail from: Caller +1 (912) 420-0314", "raw_subject": "Voicemail from: Caller +1 (912) 420-0314", "description": "Call from: +1 (912) 420-0314\\nTime of call: June 17, 2022 at 7:52:02 PM", "priority": null, "status": "new", "recipient": null, "requester_id": 4993467856015, "submitter_id": 4993467856015, "assignee_id": null, "organization_id": null, "group_id": null, "collaborator_ids": [], "follower_ids": [], "email_cc_ids": [], "forum_topic_id": null, "problem_id": null, "has_incidents": false, "is_public": false, "due_at": null, "tags": [], "custom_fields": [], "satisfaction_rating": {"score": "offered"}, "sharing_agreement_ids": [], "custom_status_id": 4044356, "fields": [], "followup_ids": [], "ticket_form_id": 360000084116, "brand_id": 360000358316, "allow_channelback": false, "allow_attachments": true, "from_messaging_channel": false, "generated_timestamp": 1655499701}, "emitted_at": 1697714865822} {"stream": "tickets", "data": {"url": "https://d3v-airbyte.zendesk.com/api/v2/tickets/125.json", "id": 125, "external_id": null, "via": {"channel": "web", "source": {"from": {}, "to": {}, "rel": null}}, "created_at": "2022-07-18T10:16:53Z", "updated_at": "2022-07-18T10:36:02Z", "type": "question", "subject": "Ticket Test 2", "raw_subject": "Ticket Test 2", "description": "238473846", "priority": "urgent", "status": "open", "recipient": null, "requester_id": 360786799676, "submitter_id": 360786799676, "assignee_id": 361089721035, "organization_id": 360033549136, "group_id": 5059439464079, "collaborator_ids": [360786799676], "follower_ids": [360786799676], "email_cc_ids": [], "forum_topic_id": null, "problem_id": null, "has_incidents": false, "is_public": false, "due_at": null, "tags": [], "custom_fields": [], "satisfaction_rating": {"score": "unoffered"}, "sharing_agreement_ids": [], "custom_status_id": 4044376, "fields": [], "followup_ids": [], "ticket_form_id": 360000084116, "brand_id": 360000358316, "allow_channelback": false, "allow_attachments": true, "from_messaging_channel": false, "generated_timestamp": 1658140562}, "emitted_at": 1697714865824} From 5a8f43a38cb725a348e1c5d01cb39d4f0c52f0ba Mon Sep 17 00:00:00 2001 From: Anton Karpets Date: Tue, 27 Feb 2024 18:11:39 +0200 Subject: [PATCH 004/172] =?UTF-8?q?=F0=9F=8F=A5Source=20Snapchat=20Marketi?= =?UTF-8?q?ng:=20add=20new=20fields=20(#35660)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .../acceptance-test-config.yml | 1 - .../integration_tests/expected_records.jsonl | 28 +++--- .../source-snapchat-marketing/metadata.yaml | 2 +- .../source-snapchat-marketing/pyproject.toml | 2 +- .../schemas/ads.json | 6 ++ .../schemas/adsquads.json | 90 +++++++++++++++++++ .../schemas/creatives.json | 8 ++ .../schemas/media.json | 6 ++ .../sources/snapchat-marketing.md | 43 ++++----- 9 files changed, 148 insertions(+), 38 deletions(-) diff --git a/airbyte-integrations/connectors/source-snapchat-marketing/acceptance-test-config.yml b/airbyte-integrations/connectors/source-snapchat-marketing/acceptance-test-config.yml index c8675862e84e..dc72aecef099 100644 --- a/airbyte-integrations/connectors/source-snapchat-marketing/acceptance-test-config.yml +++ b/airbyte-integrations/connectors/source-snapchat-marketing/acceptance-test-config.yml @@ -32,7 +32,6 @@ acceptance_tests: bypass_reason: "The data changes from sync to sync" - name: "campaigns_stats_lifetime" bypass_reason: "The data changes from sync to sync" - fail_on_extra_columns: false incremental: tests: - config_path: "secrets/config.json" diff --git a/airbyte-integrations/connectors/source-snapchat-marketing/integration_tests/expected_records.jsonl b/airbyte-integrations/connectors/source-snapchat-marketing/integration_tests/expected_records.jsonl index b270c11318a9..b01406215203 100644 --- a/airbyte-integrations/connectors/source-snapchat-marketing/integration_tests/expected_records.jsonl +++ b/airbyte-integrations/connectors/source-snapchat-marketing/integration_tests/expected_records.jsonl @@ -2,9 +2,9 @@ {"stream": "ads", "data": {"id": "96e5549f-4065-490e-93dd-ffb9f7973b77", "updated_at": "2022-07-01T17:15:14.970Z", "created_at": "2022-07-01T17:02:02.439Z", "name": "Extract yor data anywhere - Ad", "ad_squad_id": "ac6548b1-419e-4137-8320-e4e536766f72", "creative_id": "32224b92-fe08-4fb4-9c13-85f4f794d810", "status": "ACTIVE", "type": "REMOTE_WEBPAGE", "render_type": "STATIC", "review_status": "REJECTED", "review_status_reasons": ["Your ad contains low quality imagery, video, or sound that does not appear to be intentional. Some examples include audio or text that is cut off, imagery that is distorted, blurred, flashing brightly, rotated, or a broken video file. Please edit, resubmit for review, and our team will take another look. Questions? Contact us via our Business Help Center at https://businesshelp.snapchat.com", "Your Brand Name does not reflect the Paying Advertiser. Paying Advertiser examples include your Organization Name, Product Name, Parent Company, Artist Name or Co-Branded Partnerships. Please update your Brand Name to be reflective of the content you are advertising, resubmit for review, and our team will take another look."], "delivery_status": ["INVALID_NOT_APPROVED_REVIEW_STATUS", "INVALID_EFFECTIVE_INVALID"]}, "emitted_at": 1674640110702} {"stream": "ads", "data": {"id": "417d0269-80fb-496a-b5f3-ec0bac665144", "updated_at": "2021-07-22T12:54:00.462Z", "created_at": "2021-07-22T10:40:00.657Z", "name": "Snowflake - Ad", "ad_squad_id": "c67feaff-0ed8-4c05-b6a9-84c6de0e147f", "creative_id": "94aa10ab-97ca-4dc0-900d-4202212d6e2c", "status": "PAUSED", "type": "AD_TO_PLACE", "render_type": "STATIC", "review_status": "REJECTED", "review_status_reasons": ["Your ad encourages Snapchatters to take an action that is not possible with your selected ad type and attachment. Examples include: \n(i) An incorrect prompt to Snapchatters ('Watch' when the attachment is an app download), (ii) No attached content (asking Snapchatters to 'Play Now' with no game attached) or (iii) An action not applicable to the selected ad type ('Swipe Down'). \n\nPlease update your creative, resubmit for review, and our team will take another look."], "delivery_status": ["INVALID_NOT_ACTIVE", "INVALID_NOT_EFFECTIVE_ACTIVE", "INVALID_NOT_APPROVED_REVIEW_STATUS", "INVALID_EFFECTIVE_INVALID"]}, "emitted_at": 1674640110987} {"stream": "ads", "data": {"id": "8831ae74-bd1e-4ea1-9628-76e549041bea", "updated_at": "2022-07-01T17:24:43.416Z", "created_at": "2022-07-01T17:14:30.568Z", "name": "Open-source data integration - Ad", "ad_squad_id": "87f00f80-8ae6-44ac-ab63-2c976d43de64", "creative_id": "88ca89d3-b8a9-47df-984d-27c9e256279b", "status": "ACTIVE", "type": "REMOTE_WEBPAGE", "render_type": "STATIC", "review_status": "APPROVED", "delivery_status": ["INVALID_EFFECTIVE_INVALID"]}, "emitted_at": 1674640110988} -{"stream": "adsquads", "data": {"id": "ac6548b1-419e-4137-8320-e4e536766f72", "updated_at": "2022-07-01T17:02:02.325Z", "created_at": "2022-07-01T17:02:01.459Z", "name": "Extract yor data anywhere - Ad Set", "status": "ACTIVE", "campaign_id": "3369834f-5bc0-47bd-a4e9-ee48d424d90c", "type": "SNAP_ADS", "targeting": {"regulated_content": false, "geos": [{"country_code": "us"}]}, "targeting_reach_status": "VALID", "placement": "UNSUPPORTED", "billing_event": "IMPRESSION", "auto_bid": true, "target_bid": false, "bid_strategy": "AUTO_BID", "daily_budget_micro": 5000000, "start_time": "2022-07-01T17:02:01.308Z", "optimization_goal": "SWIPES", "delivery_constraint": "DAILY_BUDGET", "pacing_type": "STANDARD", "child_ad_type": "REMOTE_WEBPAGE", "forced_view_setting": "NONE", "creation_state": "PUBLISHED", "delivery_status": ["INVALID_EFFECTIVE_INVALID", "INVALID_AD_SQUAD_HAS_NO_ACTIVE_ADS"], "skadnetwork_properties": {"status": "NEVER_ENROLLED"}}, "emitted_at": 1674640111416} -{"stream": "adsquads", "data": {"id": "87f00f80-8ae6-44ac-ab63-2c976d43de64", "updated_at": "2022-07-01T17:24:43.438Z", "created_at": "2022-07-01T17:14:29.123Z", "name": "Open-source data integration - Ad Set", "status": "ACTIVE", "campaign_id": "d180d36e-1212-479b-84a5-8b0663ceaf82", "type": "SNAP_ADS", "targeting": {"regulated_content": false, "demographics": [{"min_age": "21", "languages": ["en"]}], "interests": [{"category_id": ["SLC_229", "SLC_39"], "operation": "INCLUDE"}], "geos": [{"country_code": "us"}], "enable_targeting_expansion": false, "auto_expansion_options": {"interest_expansion_option": {"enabled": false}}}, "targeting_reach_status": "VALID", "placement": "UNSUPPORTED", "billing_event": "IMPRESSION", "auto_bid": true, "target_bid": false, "bid_strategy": "AUTO_BID", "lifetime_budget_micro": 15000000, "start_time": "2022-07-01T17:09:13.000Z", "end_time": "2022-07-02T17:09:13.000Z", "optimization_goal": "SWIPES", "delivery_constraint": "LIFETIME_BUDGET", "pacing_type": "STANDARD", "child_ad_type": "REMOTE_WEBPAGE", "forced_view_setting": "NONE", "creation_state": "PUBLISHED", "delivery_status": ["INVALID_OVER_BUDGET_AD_SQUAD_FINALIZED_LIFETIME_SPEND", "INVALID_END_TIME", "INVALID_EFFECTIVE_INVALID", "INVALID_START_TIME_AFTER_END_TIME"], "skadnetwork_properties": {"status": "WITHDRAWN", "ecid_enrollment_status": "DETACHED"}, "delivery_properties_version": 1656695977818}, "emitted_at": 1674640111722} -{"stream": "adsquads", "data": {"id": "c67feaff-0ed8-4c05-b6a9-84c6de0e147f", "updated_at": "2021-07-22T12:53:31.636Z", "created_at": "2021-07-22T10:39:58.989Z", "name": "Snowflake - Ad Set", "status": "PAUSED", "campaign_id": "519b708a-c8ca-47fb-8264-d92a3b773949", "type": "SNAP_ADS", "targeting": {"regulated_content": false, "geos": [{"country_code": "us", "operation": "INCLUDE"}], "locations": [{"circles": [{"latitude": 40.74111000005462, "longitude": -74.00765749991542, "radius": 5.0, "unit": "MILES", "name": "Tesla, 860 Washington St, New York, NY 10014, United States"}], "operation": "INCLUDE"}]}, "targeting_reach_status": "VALID", "placement": "UNSUPPORTED", "billing_event": "IMPRESSION", "auto_bid": true, "target_bid": false, "bid_strategy": "AUTO_BID", "daily_budget_micro": 50000000, "start_time": "2021-07-22T10:39:57.697Z", "optimization_goal": "SWIPES", "event_sources": {"PLACE": ["bb766df2-9994-11e8-bfe6-3fd872610c42"]}, "delivery_constraint": "DAILY_BUDGET", "pacing_type": "STANDARD", "child_ad_type": "AD_TO_PLACE", "forced_view_setting": "NONE", "creation_state": "PUBLISHED", "delivery_status": ["INVALID_NOT_ACTIVE", "INVALID_NOT_EFFECTIVE_ACTIVE", "INVALID_AD_SQUAD_HAS_NO_ACTIVE_ADS", "INVALID_EFFECTIVE_INVALID"], "skadnetwork_properties": {"status": "NEVER_ENROLLED"}}, "emitted_at": 1674640111722} +{"stream": "adsquads", "data": {"id": "ac6548b1-419e-4137-8320-e4e536766f72", "updated_at": "2022-07-01T17:02:02.325Z", "created_at": "2022-07-01T17:02:01.459Z", "name": "Extract yor data anywhere - Ad Set", "status": "ACTIVE", "campaign_id": "3369834f-5bc0-47bd-a4e9-ee48d424d90c", "type": "SNAP_ADS", "targeting": {"regulated_content": false, "geos": [{"country_code": "us"}]}, "targeting_reach_status": "VALID", "placement": "UNSUPPORTED", "billing_event": "IMPRESSION", "auto_bid": true, "target_bid": false, "bid_strategy": "AUTO_BID", "daily_budget_micro": 5000000, "start_time": "2022-07-01T17:02:01.308Z", "optimization_goal": "SWIPES", "delivery_constraint": "DAILY_BUDGET", "pacing_type": "STANDARD", "child_ad_type": "REMOTE_WEBPAGE", "forced_view_setting": "NONE", "creation_state": "PUBLISHED", "delivery_status": ["INVALID_EFFECTIVE_INVALID", "INVALID_AD_SQUAD_HAS_NO_ACTIVE_ADS"], "skadnetwork_properties": {"status": "NEVER_ENROLLED"}}, "emitted_at": 1709042257429} +{"stream": "adsquads", "data": {"id": "87f00f80-8ae6-44ac-ab63-2c976d43de64", "updated_at": "2022-07-01T17:24:43.438Z", "created_at": "2022-07-01T17:14:29.123Z", "name": "Open-source data integration - Ad Set", "status": "ACTIVE", "campaign_id": "d180d36e-1212-479b-84a5-8b0663ceaf82", "type": "SNAP_ADS", "targeting": {"regulated_content": false, "demographics": [{"min_age": "21", "languages": ["en"]}], "interests": [{"category_id": ["SLC_229", "SLC_39"], "operation": "INCLUDE"}], "geos": [{"country_code": "us"}], "enable_targeting_expansion": false, "auto_expansion_options": {"interest_expansion_option": {"enabled": false}}}, "targeting_reach_status": "VALID", "placement": "UNSUPPORTED", "billing_event": "IMPRESSION", "auto_bid": true, "target_bid": false, "bid_strategy": "AUTO_BID", "lifetime_budget_micro": 15000000, "start_time": "2022-07-01T17:09:13.000Z", "end_time": "2022-07-02T17:09:13.000Z", "optimization_goal": "SWIPES", "delivery_constraint": "LIFETIME_BUDGET", "pacing_type": "STANDARD", "child_ad_type": "REMOTE_WEBPAGE", "forced_view_setting": "NONE", "creation_state": "PUBLISHED", "delivery_status": ["INVALID_END_TIME", "INVALID_OVER_BUDGET_AD_SQUAD_FINALIZED_LIFETIME_SPEND", "INVALID_START_TIME_AFTER_END_TIME", "INVALID_EFFECTIVE_INVALID"], "skadnetwork_properties": {"status": "WITHDRAWN", "ecid_enrollment_status": "DETACHED", "enable_skoverlay": false}, "delivery_properties_version": 1656695977818}, "emitted_at": 1709042257833} +{"stream": "adsquads", "data": {"id": "c67feaff-0ed8-4c05-b6a9-84c6de0e147f", "updated_at": "2021-07-22T12:53:31.636Z", "created_at": "2021-07-22T10:39:58.989Z", "name": "Snowflake - Ad Set", "status": "PAUSED", "campaign_id": "519b708a-c8ca-47fb-8264-d92a3b773949", "type": "SNAP_ADS", "targeting": {"regulated_content": false, "geos": [{"country_code": "us", "operation": "INCLUDE"}], "locations": [{"circles": [{"latitude": 40.74111000005462, "longitude": -74.00765749991542, "radius": 5.0, "unit": "MILES", "name": "Tesla, 860 Washington St, New York, NY 10014, United States"}], "operation": "INCLUDE"}]}, "targeting_reach_status": "VALID", "placement": "UNSUPPORTED", "billing_event": "IMPRESSION", "auto_bid": true, "target_bid": false, "bid_strategy": "AUTO_BID", "daily_budget_micro": 50000000, "start_time": "2021-07-22T10:39:57.697Z", "optimization_goal": "SWIPES", "event_sources": {"PLACE": ["bb766df2-9994-11e8-bfe6-3fd872610c42"]}, "delivery_constraint": "DAILY_BUDGET", "pacing_type": "STANDARD", "child_ad_type": "AD_TO_PLACE", "forced_view_setting": "NONE", "creation_state": "PUBLISHED", "delivery_status": ["INVALID_AD_SQUAD_HAS_NO_ACTIVE_ADS", "INVALID_NOT_ACTIVE", "INVALID_EFFECTIVE_INVALID", "INVALID_NOT_EFFECTIVE_ACTIVE"], "skadnetwork_properties": {"status": "NEVER_ENROLLED"}}, "emitted_at": 1709042257834} {"stream": "campaigns", "data": {"id": "3369834f-5bc0-47bd-a4e9-ee48d424d90c", "updated_at": "2022-07-01T17:02:00.987Z", "created_at": "2022-07-01T17:02:00.987Z", "name": "Extract yor data anywhere - Campaign", "ad_account_id": "04214c00-3aa5-4123-b5c8-363c32c40e42", "status": "ACTIVE", "objective": "WEB_VIEW", "start_time": "2022-07-01T17:02:00.309Z", "buy_model": "AUCTION", "delivery_status": ["INVALID_CAMPAIGN_HAS_NO_ACTIVE_AD_SQUAD"], "creation_state": "PUBLISHED"}, "emitted_at": 1683708503134} {"stream": "campaigns", "data": {"id": "13abdd04-e4e5-48a4-9d83-faca2c54c69d", "updated_at": "2021-07-22T10:39:14.535Z", "created_at": "2021-07-22T10:31:54.777Z", "name": "Snowflake Integration - Campaign", "ad_account_id": "e4cd371b-8de8-4011-a8d2-860fe77c09e1", "status": "PAUSED", "objective": "BRAND_AWARENESS", "start_time": "2021-07-22T10:31:29.707Z", "buy_model": "AUCTION", "delivery_status": ["INVALID_NOT_EFFECTIVE_ACTIVE", "INVALID_NOT_ACTIVE", "INVALID_CAMPAIGN_HAS_NO_ACTIVE_AD_SQUAD"], "creation_state": "PUBLISHED"}, "emitted_at": 1683708503509} {"stream": "campaigns", "data": {"id": "34f8da04-4842-42b9-b020-4868f5cf36ba", "updated_at": "2021-07-22T10:33:59.114Z", "created_at": "2021-07-22T10:29:42.068Z", "name": "Snowflake Integration - Campaign", "ad_account_id": "e4cd371b-8de8-4011-a8d2-860fe77c09e1", "status": "PAUSED", "objective": "BRAND_AWARENESS", "start_time": "2021-07-22T10:29:16.993Z", "buy_model": "AUCTION", "delivery_status": ["INVALID_NOT_EFFECTIVE_ACTIVE", "INVALID_NOT_ACTIVE", "INVALID_CAMPAIGN_HAS_NO_ACTIVE_AD_SQUAD"], "creation_state": "PUBLISHED"}, "emitted_at": 1683708503509} @@ -17,17 +17,17 @@ {"stream": "creatives", "data": {"id": "1a65cb1c-a13e-413e-8302-44c8b344adf7", "updated_at": "2021-07-22T10:34:06.202Z", "created_at": "2021-07-22T10:31:55.392Z", "name": "Snowflake Integration - Creative", "ad_account_id": "e4cd371b-8de8-4011-a8d2-860fe77c09e1", "type": "AD_TO_PLACE", "packaging_status": "SUCCESS", "review_status": "PENDING_REVIEW", "shareable": true, "forced_view_eligibility": "NONE", "headline": "Snowflake Integration", "brand_name": "Google", "call_to_action": "SEE_PLACE", "render_type": "STATIC", "top_snap_media_id": "16d70457-09b9-4095-8e25-e9e3b625a905", "top_snap_crop_position": "MIDDLE", "ad_product": "SNAP_AD", "ad_to_place_properties": {"place_id": "f998f638-9992-11e8-b2d0-73fce447d849"}}, "emitted_at": 1674640113102} {"stream": "creatives", "data": {"id": "88ca89d3-b8a9-47df-984d-27c9e256279b", "updated_at": "2022-07-01T17:24:43.298Z", "created_at": "2022-07-01T17:14:28.949Z", "name": "Open-source data integration - Creative", "ad_account_id": "e4cd371b-8de8-4011-a8d2-860fe77c09e1", "type": "WEB_VIEW", "packaging_status": "SUCCESS", "review_status": "APPROVED", "review_status_details": "", "shareable": true, "forced_view_eligibility": "NONE", "headline": "Open-source data integration", "brand_name": "airbyte", "call_to_action": "MORE", "render_type": "STATIC", "top_snap_media_id": "56bbbea0-f602-4a81-b8ed-ce8bf5ec91d4", "top_snap_crop_position": "MIDDLE", "web_view_properties": {"url": "https://airbyte.io/", "allow_snap_javascript_sdk": false, "use_immersive_mode": false, "deep_link_urls": [], "block_preload": true}, "ad_product": "SNAP_AD"}, "emitted_at": 1674640113102} {"stream": "creatives", "data": {"id": "94aa10ab-97ca-4dc0-900d-4202212d6e2c", "updated_at": "2021-07-22T10:57:16.071Z", "created_at": "2021-07-22T10:39:56.815Z", "name": "Snowflake - Creative", "ad_account_id": "e4cd371b-8de8-4011-a8d2-860fe77c09e1", "type": "AD_TO_PLACE", "packaging_status": "SUCCESS", "review_status": "DISAPPROVED", "review_status_details": "Your ad encourages Snapchatters to take an action that is not possible with your selected ad type and attachment. Examples include: \n(i) An incorrect prompt to Snapchatters ('Watch' when the attachment is an app download), (ii) No attached content (asking Snapchatters to 'Play Now' with no game attached) or (iii) An action not applicable to the selected ad type ('Swipe Down'). \n\nPlease update your creative, resubmit for review, and our team will take another look.", "shareable": true, "forced_view_eligibility": "NONE", "headline": "Snowflake", "brand_name": "Tesla", "call_to_action": "SEE_PLACE", "render_type": "STATIC", "top_snap_media_id": "19aa1d00-c92c-468d-8941-905b0deac1a6", "top_snap_crop_position": "MIDDLE", "ad_product": "SNAP_AD", "ad_to_place_properties": {"place_id": "bb766df2-9994-11e8-bfe6-3fd872610c42"}}, "emitted_at": 1674640113102} -{"stream": "media", "data": {"id": "0a67c108-8f77-47e3-b233-a19fd31886b6", "updated_at": "2022-07-01T17:02:00.994Z", "created_at": "2022-07-01T17:01:36.082Z", "name": "blob.jpeg", "ad_account_id": "04214c00-3aa5-4123-b5c8-363c32c40e42", "type": "IMAGE", "media_status": "READY", "file_name": "a42a0f47-eb2d-4a63-b9b0-3ee727122013.jpeg", "download_link": "https://storage.googleapis.com/ad-manager-creatives-production-europe/0a67c108-8f77-47e3-b233-a19fd31886b6/a42a0f47-eb2d-4a63-b9b0-3ee727122013.jpeg", "image_metadata": {"height_px": 1920, "width_px": 1080, "image_format": "JPEG"}, "file_size_in_bytes": 232558, "is_demo_media": false, "hash": "fmJHDw==", "visibility": "VISIBLE"}, "emitted_at": 1674640113565} -{"stream": "media", "data": {"id": "43bc06da-0a96-4ea9-849e-753683c12f8b", "updated_at": "2021-08-03T07:35:20.085Z", "created_at": "2021-08-03T07:35:15.879Z", "name": "blob.jpeg", "ad_account_id": "04214c00-3aa5-4123-b5c8-363c32c40e42", "type": "IMAGE", "media_status": "READY", "file_name": "2c953b49-d746-4fd0-a672-16e2dee42e68.jpeg", "download_link": "https://storage.googleapis.com/ad-manager-creatives-production-europe/43bc06da-0a96-4ea9-849e-753683c12f8b/2c953b49-d746-4fd0-a672-16e2dee42e68.jpeg", "image_metadata": {"height_px": 1920, "width_px": 1080, "image_format": "JPEG"}, "file_size_in_bytes": 442393, "is_demo_media": false, "hash": "S0A/mg==", "visibility": "VISIBLE"}, "emitted_at": 1674640113565} -{"stream": "media", "data": {"id": "5b8037f6-ebb7-4863-b316-321df888c07c", "updated_at": "2021-07-07T07:40:09.531Z", "created_at": "2021-07-07T07:31:03.707Z", "name": "blob.jpeg", "ad_account_id": "04214c00-3aa5-4123-b5c8-363c32c40e42", "type": "IMAGE", "media_status": "READY", "file_name": "80d8c4db-1f33-4b97-b088-0b744361e603.jpeg", "download_link": "https://storage.googleapis.com/ad-manager-creatives-production-europe/5b8037f6-ebb7-4863-b316-321df888c07c/80d8c4db-1f33-4b97-b088-0b744361e603.jpeg", "image_metadata": {"height_px": 1920, "width_px": 1080, "image_format": "JPEG"}, "file_size_in_bytes": 219345, "is_demo_media": false, "hash": "jxa+Zg==", "visibility": "VISIBLE"}, "emitted_at": 1674640113566} -{"stream": "media", "data": {"id": "606e5b8e-458e-4983-876d-115765202546", "updated_at": "2021-08-03T08:02:42.122Z", "created_at": "2021-08-03T08:02:39.007Z", "name": "blob.jpeg", "ad_account_id": "04214c00-3aa5-4123-b5c8-363c32c40e42", "type": "IMAGE", "media_status": "READY", "file_name": "05d02e69-e405-41cc-9f2e-a4dfca618ae5.jpeg", "download_link": "https://storage.googleapis.com/ad-manager-creatives-production-europe/606e5b8e-458e-4983-876d-115765202546/05d02e69-e405-41cc-9f2e-a4dfca618ae5.jpeg", "image_metadata": {"height_px": 1920, "width_px": 1080, "image_format": "JPEG"}, "file_size_in_bytes": 231452, "is_demo_media": false, "hash": "fsZLAg==", "visibility": "VISIBLE"}, "emitted_at": 1674640113566} -{"stream": "media", "data": {"id": "aeedca16-d6a8-4c76-aa5e-fa0cfcdc647a", "updated_at": "2021-06-09T13:12:56.350Z", "created_at": "2021-06-09T13:12:52.369Z", "name": "blob.jpeg", "ad_account_id": "04214c00-3aa5-4123-b5c8-363c32c40e42", "type": "IMAGE", "media_status": "READY", "file_name": "4b2d034c-51df-4e09-899a-cee8946e2f76.jpeg", "download_link": "https://storage.googleapis.com/ad-manager-creatives-production-europe/aeedca16-d6a8-4c76-aa5e-fa0cfcdc647a/4b2d034c-51df-4e09-899a-cee8946e2f76.jpeg", "image_metadata": {"height_px": 1920, "width_px": 1080, "image_format": "JPEG"}, "file_size_in_bytes": 153526, "is_demo_media": false, "hash": "p+xelw==", "visibility": "VISIBLE"}, "emitted_at": 1674640113566} -{"stream": "media", "data": {"id": "e6be1b9e-6007-408a-b66b-b64a45effeb3", "updated_at": "2021-06-11T08:04:42.202Z", "created_at": "2021-06-11T08:04:11.986Z", "name": "blob.jpeg", "ad_account_id": "04214c00-3aa5-4123-b5c8-363c32c40e42", "type": "IMAGE", "media_status": "READY", "file_name": "18750465-4fd5-4071-90e9-44713a96d22b.jpeg", "download_link": "https://storage.googleapis.com/ad-manager-creatives-production-europe/e6be1b9e-6007-408a-b66b-b64a45effeb3/18750465-4fd5-4071-90e9-44713a96d22b.jpeg", "image_metadata": {"height_px": 1920, "width_px": 1080, "image_format": "JPEG"}, "file_size_in_bytes": 427072, "is_demo_media": false, "hash": "+9vx3g==", "visibility": "VISIBLE"}, "emitted_at": 1674640113566} -{"stream": "media", "data": {"id": "16d70457-09b9-4095-8e25-e9e3b625a905", "updated_at": "2021-07-22T10:32:05.719Z", "created_at": "2021-07-22T10:29:36.145Z", "ad_account_id": "e4cd371b-8de8-4011-a8d2-860fe77c09e1", "type": "VIDEO", "media_status": "READY", "file_name": "file.mp4", "download_link": "https://storage.googleapis.com/creativesuite-prod-media-public/b47418c3-cb02-48bb-bd41-94efbdd71283/file.mp4", "duration_in_seconds": 100.2, "video_metadata": {"width_px": 1080, "height_px": 1920, "rotation": null, "integrated_loudness": null, "true_peak": null}, "file_size_in_bytes": 5908636, "is_demo_media": false, "hash": "5mLt+w==", "visibility": "VISIBLE"}, "emitted_at": 1674640113879} -{"stream": "media", "data": {"id": "19aa1d00-c92c-468d-8941-905b0deac1a6", "updated_at": "2021-07-22T10:42:03.830Z", "created_at": "2021-07-22T10:39:50.894Z", "ad_account_id": "e4cd371b-8de8-4011-a8d2-860fe77c09e1", "type": "VIDEO", "media_status": "READY", "file_name": "file.mp4", "download_link": "https://storage.googleapis.com/creativesuite-prod-media-public/2c8a2bcc-6273-48c1-b8bb-784cf1e33a0b/file.mp4", "duration_in_seconds": 100.2, "video_metadata": {"width_px": 1080, "height_px": 1920, "rotation": null, "integrated_loudness": null, "true_peak": null}, "file_size_in_bytes": 5908636, "is_demo_media": false, "hash": "5mLt+w==", "visibility": "VISIBLE"}, "emitted_at": 1674640113879} -{"stream": "media", "data": {"id": "1d78d00f-1189-45c7-b866-a24abbe1cc26", "updated_at": "2021-07-22T10:47:05.780Z", "created_at": "2021-07-22T10:45:25.935Z", "ad_account_id": "e4cd371b-8de8-4011-a8d2-860fe77c09e1", "type": "VIDEO", "media_status": "READY", "file_name": "file.mp4", "download_link": "https://storage.googleapis.com/creativesuite-prod-media-public/9a0e1076-0023-45b4-8a78-2a7df5b9e5f9/file.mp4", "duration_in_seconds": 100.2, "video_metadata": {"width_px": 1080, "height_px": 1920, "rotation": null, "integrated_loudness": null, "true_peak": null}, "file_size_in_bytes": 5908636, "is_demo_media": false, "hash": "5mLt+w==", "visibility": "VISIBLE"}, "emitted_at": 1674640113879} -{"stream": "media", "data": {"id": "56bbbea0-f602-4a81-b8ed-ce8bf5ec91d4", "updated_at": "2022-07-01T17:14:28.577Z", "created_at": "2022-07-01T17:09:02.386Z", "name": "blob.jpeg", "ad_account_id": "e4cd371b-8de8-4011-a8d2-860fe77c09e1", "type": "IMAGE", "media_status": "READY", "file_name": "693ed113-4bb4-4987-89ca-c779d0c9431b.jpeg", "download_link": "https://storage.googleapis.com/ad-manager-creatives-production-europe/56bbbea0-f602-4a81-b8ed-ce8bf5ec91d4/693ed113-4bb4-4987-89ca-c779d0c9431b.jpeg", "image_metadata": {"height_px": 1920, "width_px": 1080, "image_format": "JPEG"}, "file_size_in_bytes": 351893, "is_demo_media": false, "hash": "P0mNqg==", "visibility": "VISIBLE"}, "emitted_at": 1674640113880} -{"stream": "organizations", "data": {"id": "7f064d90-52a1-42db-b25b-7539e663e926", "updated_at": "2023-06-07T16:39:22.334Z", "created_at": "2020-12-15T11:13:03.910Z", "name": "Daxtarity Inc.", "country": "US", "postal_code": "94121", "locality": "San Francisco", "contact_name": "Team Airbyte", "contact_email": "integration-test@airbyte.io", "contact_phone": "+14156236785", "address_line_1": "350 29th avenue", "administrative_district_level_1": "US-CA", "accepted_term_version": "8", "contact_phone_optin": true, "configuration_settings": {"notifications_enabled": true}, "type": "ENTERPRISE", "state": "ACTIVE", "roles": ["member", "business_admin", "admin"], "my_display_name": "Team Airbyte", "my_invited_email": "integration-test@airbyte.io", "my_member_id": "b9b2ab5f-e886-470c-92ae-0725d79a9146", "createdByCaller": true}, "emitted_at": 1686262790831} +{"stream": "media", "data": {"id": "0a67c108-8f77-47e3-b233-a19fd31886b6", "updated_at": "2024-01-09T22:32:03.236Z", "created_at": "2022-07-01T17:01:36.082Z", "name": "blob.jpeg", "ad_account_id": "04214c00-3aa5-4123-b5c8-363c32c40e42", "type": "IMAGE", "media_status": "READY", "file_name": "a42a0f47-eb2d-4a63-b9b0-3ee727122013.jpeg", "download_link": "https://storage.googleapis.com/ad-manager-creatives-production-europe/0a67c108-8f77-47e3-b233-a19fd31886b6/a42a0f47-eb2d-4a63-b9b0-3ee727122013.jpeg", "image_metadata": {"height_px": 1920, "width_px": 1080, "image_format": "JPEG"}, "file_size_in_bytes": 232558, "is_demo_media": false, "hash": "fmJHDw==", "visibility": "VISIBLE", "media_usages": ["TOP_SNAP"]}, "emitted_at": 1709042796992} +{"stream": "media", "data": {"id": "43bc06da-0a96-4ea9-849e-753683c12f8b", "updated_at": "2024-01-10T18:46:04.542Z", "created_at": "2021-08-03T07:35:15.879Z", "name": "blob.jpeg", "ad_account_id": "04214c00-3aa5-4123-b5c8-363c32c40e42", "type": "IMAGE", "media_status": "READY", "file_name": "2c953b49-d746-4fd0-a672-16e2dee42e68.jpeg", "download_link": "https://storage.googleapis.com/ad-manager-creatives-production-europe/43bc06da-0a96-4ea9-849e-753683c12f8b/2c953b49-d746-4fd0-a672-16e2dee42e68.jpeg", "image_metadata": {"height_px": 1920, "width_px": 1080, "image_format": "JPEG"}, "file_size_in_bytes": 442393, "is_demo_media": false, "hash": "S0A/mg==", "visibility": "VISIBLE", "media_usages": ["TOP_SNAP"]}, "emitted_at": 1709042796993} +{"stream": "media", "data": {"id": "5b8037f6-ebb7-4863-b316-321df888c07c", "updated_at": "2024-01-10T19:41:48.892Z", "created_at": "2021-07-07T07:31:03.707Z", "name": "blob.jpeg", "ad_account_id": "04214c00-3aa5-4123-b5c8-363c32c40e42", "type": "IMAGE", "media_status": "READY", "file_name": "80d8c4db-1f33-4b97-b088-0b744361e603.jpeg", "download_link": "https://storage.googleapis.com/ad-manager-creatives-production-europe/5b8037f6-ebb7-4863-b316-321df888c07c/80d8c4db-1f33-4b97-b088-0b744361e603.jpeg", "image_metadata": {"height_px": 1920, "width_px": 1080, "image_format": "JPEG"}, "file_size_in_bytes": 219345, "is_demo_media": false, "hash": "jxa+Zg==", "visibility": "VISIBLE", "media_usages": ["TOP_SNAP"]}, "emitted_at": 1709042796994} +{"stream": "media", "data": {"id": "606e5b8e-458e-4983-876d-115765202546", "updated_at": "2024-01-10T21:03:33.518Z", "created_at": "2021-08-03T08:02:39.007Z", "name": "blob.jpeg", "ad_account_id": "04214c00-3aa5-4123-b5c8-363c32c40e42", "type": "IMAGE", "media_status": "READY", "file_name": "05d02e69-e405-41cc-9f2e-a4dfca618ae5.jpeg", "download_link": "https://storage.googleapis.com/ad-manager-creatives-production-europe/606e5b8e-458e-4983-876d-115765202546/05d02e69-e405-41cc-9f2e-a4dfca618ae5.jpeg", "image_metadata": {"height_px": 1920, "width_px": 1080, "image_format": "JPEG"}, "file_size_in_bytes": 231452, "is_demo_media": false, "hash": "fsZLAg==", "visibility": "VISIBLE", "media_usages": ["TOP_SNAP"]}, "emitted_at": 1709042796994} +{"stream": "media", "data": {"id": "aeedca16-d6a8-4c76-aa5e-fa0cfcdc647a", "updated_at": "2024-01-10T22:51:21.901Z", "created_at": "2021-06-09T13:12:52.369Z", "name": "blob.jpeg", "ad_account_id": "04214c00-3aa5-4123-b5c8-363c32c40e42", "type": "IMAGE", "media_status": "READY", "file_name": "4b2d034c-51df-4e09-899a-cee8946e2f76.jpeg", "download_link": "https://storage.googleapis.com/ad-manager-creatives-production-europe/aeedca16-d6a8-4c76-aa5e-fa0cfcdc647a/4b2d034c-51df-4e09-899a-cee8946e2f76.jpeg", "image_metadata": {"height_px": 1920, "width_px": 1080, "image_format": "JPEG"}, "file_size_in_bytes": 153526, "is_demo_media": false, "hash": "p+xelw==", "visibility": "VISIBLE", "media_usages": ["TOP_SNAP"]}, "emitted_at": 1709042796994} +{"stream": "media", "data": {"id": "e6be1b9e-6007-408a-b66b-b64a45effeb3", "updated_at": "2024-01-11T01:07:43.028Z", "created_at": "2021-06-11T08:04:11.986Z", "name": "blob.jpeg", "ad_account_id": "04214c00-3aa5-4123-b5c8-363c32c40e42", "type": "IMAGE", "media_status": "READY", "file_name": "18750465-4fd5-4071-90e9-44713a96d22b.jpeg", "download_link": "https://storage.googleapis.com/ad-manager-creatives-production-europe/e6be1b9e-6007-408a-b66b-b64a45effeb3/18750465-4fd5-4071-90e9-44713a96d22b.jpeg", "image_metadata": {"height_px": 1920, "width_px": 1080, "image_format": "JPEG"}, "file_size_in_bytes": 427072, "is_demo_media": false, "hash": "+9vx3g==", "visibility": "VISIBLE", "media_usages": ["TOP_SNAP"]}, "emitted_at": 1709042796995} +{"stream": "media", "data": {"id": "16d70457-09b9-4095-8e25-e9e3b625a905", "updated_at": "2024-01-09T23:27:41.735Z", "created_at": "2021-07-22T10:29:36.145Z", "ad_account_id": "e4cd371b-8de8-4011-a8d2-860fe77c09e1", "type": "VIDEO", "media_status": "READY", "file_name": "file.mp4", "download_link": "https://storage.googleapis.com/creativesuite-prod-media-public/b47418c3-cb02-48bb-bd41-94efbdd71283/file.mp4", "duration_in_seconds": 100.2, "video_metadata": {"width_px": 1080, "height_px": 1920, "rotation": null, "integrated_loudness": null, "true_peak": null}, "file_size_in_bytes": 5908636, "is_demo_media": false, "hash": "5mLt+w==", "visibility": "VISIBLE", "media_usages": ["TOP_SNAP"]}, "emitted_at": 1709042797398} +{"stream": "media", "data": {"id": "19aa1d00-c92c-468d-8941-905b0deac1a6", "updated_at": "2024-01-09T23:54:52.341Z", "created_at": "2021-07-22T10:39:50.894Z", "ad_account_id": "e4cd371b-8de8-4011-a8d2-860fe77c09e1", "type": "VIDEO", "media_status": "READY", "file_name": "file.mp4", "download_link": "https://storage.googleapis.com/creativesuite-prod-media-public/2c8a2bcc-6273-48c1-b8bb-784cf1e33a0b/file.mp4", "duration_in_seconds": 100.2, "video_metadata": {"width_px": 1080, "height_px": 1920, "rotation": null, "integrated_loudness": null, "true_peak": null}, "file_size_in_bytes": 5908636, "is_demo_media": false, "hash": "5mLt+w==", "visibility": "VISIBLE", "media_usages": ["TOP_SNAP"]}, "emitted_at": 1709042797399} +{"stream": "media", "data": {"id": "1d78d00f-1189-45c7-b866-a24abbe1cc26", "updated_at": "2024-01-09T23:54:59.948Z", "created_at": "2021-07-22T10:45:25.935Z", "ad_account_id": "e4cd371b-8de8-4011-a8d2-860fe77c09e1", "type": "VIDEO", "media_status": "READY", "file_name": "file.mp4", "download_link": "https://storage.googleapis.com/creativesuite-prod-media-public/9a0e1076-0023-45b4-8a78-2a7df5b9e5f9/file.mp4", "duration_in_seconds": 100.2, "video_metadata": {"width_px": 1080, "height_px": 1920, "rotation": null, "integrated_loudness": null, "true_peak": null}, "file_size_in_bytes": 5908636, "is_demo_media": false, "hash": "5mLt+w==", "visibility": "VISIBLE", "media_usages": ["TOP_SNAP"]}, "emitted_at": 1709042797400} +{"stream": "media", "data": {"id": "56bbbea0-f602-4a81-b8ed-ce8bf5ec91d4", "updated_at": "2024-01-10T19:54:49.803Z", "created_at": "2022-07-01T17:09:02.386Z", "name": "blob.jpeg", "ad_account_id": "e4cd371b-8de8-4011-a8d2-860fe77c09e1", "type": "IMAGE", "media_status": "READY", "file_name": "693ed113-4bb4-4987-89ca-c779d0c9431b.jpeg", "download_link": "https://storage.googleapis.com/ad-manager-creatives-production-europe/56bbbea0-f602-4a81-b8ed-ce8bf5ec91d4/693ed113-4bb4-4987-89ca-c779d0c9431b.jpeg", "image_metadata": {"height_px": 1920, "width_px": 1080, "image_format": "JPEG"}, "file_size_in_bytes": 351893, "is_demo_media": false, "hash": "P0mNqg==", "visibility": "VISIBLE", "media_usages": ["TOP_SNAP"]}, "emitted_at": 1709042797400} +{"stream": "organizations", "data": {"id": "7f064d90-52a1-42db-b25b-7539e663e926", "updated_at": "2023-06-07T16:39:22.334Z", "created_at": "2020-12-15T11:13:03.910Z", "name": "Daxtarity Inc.", "country": "US", "postal_code": "94121", "locality": "San Francisco", "contact_name": "Team Airbyte", "contact_email": "integration-test@airbyte.io", "contact_phone": "+14156236785", "address_line_1": "350 29th avenue", "administrative_district_level_1": "US-CA", "accepted_term_version": "8", "contact_phone_optin": true, "configuration_settings": {"notifications_enabled": true}, "type": "ENTERPRISE", "state": "ACTIVE", "roles": ["admin", "member", "business_admin"], "my_display_name": "Airbyte Integration Test account", "my_invited_email": "integration-test+snapchatdevaccount@airbyte.io", "my_member_id": "add6e319-95d5-4e37-8c7e-5fa4bd2f4aa7", "createdByCaller": false}, "emitted_at": 1709043273579} {"stream": "segments", "data": {"id": "4629391772795692", "updated_at": "2021-07-22T21:50:36.818Z", "created_at": "2021-07-22T21:50:36.669Z", "name": "Created from Postman First Account 1", "ad_account_id": "04214c00-3aa5-4123-b5c8-363c32c40e42", "organization_id": "7f064d90-52a1-42db-b25b-7539e663e926", "description": "Test segment from Postman First Account 1", "status": "ACTIVE", "targetable_status": "READY", "upload_status": "NO_UPLOAD", "source_type": "FIRST_PARTY", "retention_in_days": 180, "approximate_number_users": 0, "visible_to": ["AdAccountEntity_04214c00-3aa5-4123-b5c8-363c32c40e42"]}, "emitted_at": 1674640114897} {"stream": "segments", "data": {"id": "5760336100911495", "updated_at": "2021-07-29T13:01:26.090Z", "created_at": "2021-07-29T13:01:25.961Z", "name": "postman_test_1", "ad_account_id": "04214c00-3aa5-4123-b5c8-363c32c40e42", "organization_id": "7f064d90-52a1-42db-b25b-7539e663e926", "description": "postman_test_1", "status": "ACTIVE", "targetable_status": "READY", "upload_status": "NO_UPLOAD", "source_type": "FIRST_PARTY", "retention_in_days": 180, "approximate_number_users": 0, "visible_to": ["AdAccountEntity_04214c00-3aa5-4123-b5c8-363c32c40e42"]}, "emitted_at": 1674640114898} {"stream": "segments", "data": {"id": "5790170873686082", "updated_at": "2021-07-22T21:38:32.729Z", "created_at": "2021-07-22T21:38:32.597Z", "name": "Audience_Match_Email_Example", "ad_account_id": "04214c00-3aa5-4123-b5c8-363c32c40e42", "organization_id": "7f064d90-52a1-42db-b25b-7539e663e926", "description": "Custom Email Test Audience", "status": "ACTIVE", "targetable_status": "READY", "upload_status": "COMPLETE", "source_type": "FIRST_PARTY", "retention_in_days": 9999, "approximate_number_users": 0, "visible_to": ["AdAccountEntity_04214c00-3aa5-4123-b5c8-363c32c40e42"]}, "emitted_at": 1674640114898} diff --git a/airbyte-integrations/connectors/source-snapchat-marketing/metadata.yaml b/airbyte-integrations/connectors/source-snapchat-marketing/metadata.yaml index 7c4cb1d20351..a024f27d9973 100644 --- a/airbyte-integrations/connectors/source-snapchat-marketing/metadata.yaml +++ b/airbyte-integrations/connectors/source-snapchat-marketing/metadata.yaml @@ -8,7 +8,7 @@ data: connectorSubtype: api connectorType: source definitionId: 200330b2-ea62-4d11-ac6d-cfe3e3f8ab2b - dockerImageTag: 0.3.2 + dockerImageTag: 0.4.0 dockerRepository: airbyte/source-snapchat-marketing githubIssueLabel: source-snapchat-marketing icon: snapchat.svg diff --git a/airbyte-integrations/connectors/source-snapchat-marketing/pyproject.toml b/airbyte-integrations/connectors/source-snapchat-marketing/pyproject.toml index 4b3af63198c9..3efcb8143a35 100644 --- a/airbyte-integrations/connectors/source-snapchat-marketing/pyproject.toml +++ b/airbyte-integrations/connectors/source-snapchat-marketing/pyproject.toml @@ -3,7 +3,7 @@ requires = [ "poetry-core>=1.0.0",] build-backend = "poetry.core.masonry.api" [tool.poetry] -version = "0.3.2" +version = "0.4.0" name = "source-snapchat-marketing" description = "Source implementation for Snapchat Marketing." authors = [ "Airbyte ",] diff --git a/airbyte-integrations/connectors/source-snapchat-marketing/source_snapchat_marketing/schemas/ads.json b/airbyte-integrations/connectors/source-snapchat-marketing/source_snapchat_marketing/schemas/ads.json index 8d71bcbea453..f4e1333621c6 100644 --- a/airbyte-integrations/connectors/source-snapchat-marketing/source_snapchat_marketing/schemas/ads.json +++ b/airbyte-integrations/connectors/source-snapchat-marketing/source_snapchat_marketing/schemas/ads.json @@ -31,6 +31,12 @@ "review_status": { "type": ["null", "string"] }, + "review_status_reasons": { + "type": ["null", "array"], + "items": { + "type": ["null", "string"] + } + }, "delivery_status": { "type": ["null", "array"], "items": { diff --git a/airbyte-integrations/connectors/source-snapchat-marketing/source_snapchat_marketing/schemas/adsquads.json b/airbyte-integrations/connectors/source-snapchat-marketing/source_snapchat_marketing/schemas/adsquads.json index 956b9699f6e9..9fae20d4268c 100644 --- a/airbyte-integrations/connectors/source-snapchat-marketing/source_snapchat_marketing/schemas/adsquads.json +++ b/airbyte-integrations/connectors/source-snapchat-marketing/source_snapchat_marketing/schemas/adsquads.json @@ -35,6 +35,76 @@ "properties": { "country_code": { "type": ["null", "string"] + }, + "operation": { + "type": ["null", "string"] + } + } + } + }, + "locations": { + "type": ["null", "array"], + "items": { + "type": ["null", "object"], + "properties": { + "circles": { + "type": ["null", "array"], + "items": { + "type": ["null", "object"], + "properties": { + "latitude": { + "type": ["null", "number"] + }, + "longitude": { + "type": ["null", "number"] + }, + "name": { + "type": ["null", "string"] + }, + "radius": { + "type": ["null", "number"] + }, + "unit": { + "type": ["null", "string"] + } + } + } + }, + "operation": { + "type": ["null", "string"] + } + } + } + }, + "auto_expansion_options": { + "type": ["null", "object"], + "properties": { + "interest_expansion_option": { + "type": ["null", "object"], + "properties": { + "enabled": { + "type": ["null", "boolean"] + } + } + } + } + }, + "enable_targeting_expansion": { + "type": ["null", "boolean"] + }, + "interests": { + "type": ["null", "array"], + "items": { + "type": ["null", "object"], + "properties": { + "category_id": { + "type": ["null", "array"], + "items": { + "type": ["null", "string"] + } + }, + "operation": { + "type": ["null", "string"] } } } @@ -77,6 +147,9 @@ "delivery_constraint": { "type": ["null", "string"] }, + "delivery_properties_version": { + "type": ["null", "integer"] + }, "pacing_type": { "type": ["null", "string"] }, @@ -95,9 +168,26 @@ "type": ["null", "string"] } }, + "event_sources": { + "type": ["null", "object"], + "properties": { + "PLACE": { + "type": ["null", "array"], + "items": { + "type": ["null", "string"] + } + } + } + }, "skadnetwork_properties": { "type": ["null", "object"], "properties": { + "ecid_enrollment_status": { + "type": ["null", "string"] + }, + "enable_skoverlay": { + "type": ["null", "boolean"] + }, "status": { "type": ["null", "string"] } diff --git a/airbyte-integrations/connectors/source-snapchat-marketing/source_snapchat_marketing/schemas/creatives.json b/airbyte-integrations/connectors/source-snapchat-marketing/source_snapchat_marketing/schemas/creatives.json index f8d17493f2e0..cc3cc84535be 100644 --- a/airbyte-integrations/connectors/source-snapchat-marketing/source_snapchat_marketing/schemas/creatives.json +++ b/airbyte-integrations/connectors/source-snapchat-marketing/source_snapchat_marketing/schemas/creatives.json @@ -74,6 +74,14 @@ }, "ad_product": { "type": ["null", "string"] + }, + "ad_to_place_properties": { + "type": ["null", "object"], + "properties": { + "place_id": { + "type": ["null", "string"] + } + } } } } diff --git a/airbyte-integrations/connectors/source-snapchat-marketing/source_snapchat_marketing/schemas/media.json b/airbyte-integrations/connectors/source-snapchat-marketing/source_snapchat_marketing/schemas/media.json index 207aff729768..bc056c1207ae 100644 --- a/airbyte-integrations/connectors/source-snapchat-marketing/source_snapchat_marketing/schemas/media.json +++ b/airbyte-integrations/connectors/source-snapchat-marketing/source_snapchat_marketing/schemas/media.json @@ -19,6 +19,12 @@ "media_status": { "type": ["null", "string"] }, + "media_usages": { + "type": ["null", "array"], + "items": { + "type": ["null", "string"] + } + }, "file_name": { "type": ["null", "string"] }, diff --git a/docs/integrations/sources/snapchat-marketing.md b/docs/integrations/sources/snapchat-marketing.md index a11dd398f260..f6c6d8e0b39d 100644 --- a/docs/integrations/sources/snapchat-marketing.md +++ b/docs/integrations/sources/snapchat-marketing.md @@ -111,24 +111,25 @@ Snapchat Marketing API has limitations to 1000 items per page. ## Changelog -| Version | Date | Pull Request | Subject | -|:--------|:-----------|:---------------------------------------------------------|:--------------------------------------------------------------| -| 0.3.2 | 2024-02-12 | [35171](https://github.com/airbytehq/airbyte/pull/35171) | Manage dependencies with Poetry. | -| 0.3.0 | 2023-05-22 | [26358](https://github.com/airbytehq/airbyte/pull/26358) | Remove deprecated authSpecification in favour of advancedAuth | -| 0.2.0 | 2023-05-10 | [25948](https://github.com/airbytehq/airbyte/pull/25948) | Introduce new field in the `Campaigns` stream schema | -| 0.1.16 | 2023-04-20 | [20897](https://github.com/airbytehq/airbyte/pull/20897) | Add missing fields to Basic Stats schema | -| 0.1.15 | 2023-03-02 | [22869](https://github.com/airbytehq/airbyte/pull/22869) | Specified date formatting in specification | -| 0.1.14 | 2023-02-10 | [22808](https://github.com/airbytehq/airbyte/pull/22808) | Enable default `AvailabilityStrategy` | -| 0.1.13 | 2023-01-27 | [22023](https://github.com/airbytehq/airbyte/pull/22023) | Set `AvailabilityStrategy` for streams explicitly to `None` | -| 0.1.12 | 2023-01-11 | [21267](https://github.com/airbytehq/airbyte/pull/21267) | Fix parse empty error response | -| 0.1.11 | 2022-12-23 | [20865](https://github.com/airbytehq/airbyte/pull/20865) | Handle 403 permission error | -| 0.1.10 | 2022-12-15 | [20537](https://github.com/airbytehq/airbyte/pull/20537) | Run on CDK 0.15.0 | -| 0.1.9 | 2022-12-14 | [20498](https://github.com/airbytehq/airbyte/pull/20498) | Fix output state when no records are read | -| 0.1.8 | 2022-10-05 | [17596](https://github.com/airbytehq/airbyte/pull/17596) | Retry 429 and 5xx errors when refreshing access token | -| 0.1.6 | 2022-07-21 | [14924](https://github.com/airbytehq/airbyte/pull/14924) | Remove `additionalProperties` field from specs | -| 0.1.5 | 2022-07-13 | [14577](https://github.com/airbytehq/airbyte/pull/14577) | Added stats streams hourly, daily, lifetime | -| 0.1.4 | 2021-12-07 | [8429](https://github.com/airbytehq/airbyte/pull/8429) | Update titles and descriptions | -| 0.1.3 | 2021-11-10 | [7811](https://github.com/airbytehq/airbyte/pull/7811) | Add oauth2.0, fix stream_state | -| 0.1.2 | 2021-11-08 | [7499](https://github.com/airbytehq/airbyte/pull/7499) | Remove base-python dependencies | -| 0.1.1 | 2021-07-29 | [5072](https://github.com/airbytehq/airbyte/pull/5072) | Fix bug with incorrect stream\_state value | -| 0.1.0 | 2021-07-26 | [4843](https://github.com/airbytehq/airbyte/pull/4843) | Initial release supporting the Snapchat Marketing API | +| Version | Date | Pull Request | Subject | +|:--------|:-----------|:---------------------------------------------------------|:-------------------------------------------------------------------------------| +| 0.4.0 | 2024-02-27 | [35660](https://github.com/airbytehq/airbyte/pull/35660) | Add new fields to streams `ads`, `adsquads`, `creatives`, and `media` | +| 0.3.2 | 2024-02-12 | [35171](https://github.com/airbytehq/airbyte/pull/35171) | Manage dependencies with Poetry. | +| 0.3.0 | 2023-05-22 | [26358](https://github.com/airbytehq/airbyte/pull/26358) | Remove deprecated authSpecification in favour of advancedAuth | +| 0.2.0 | 2023-05-10 | [25948](https://github.com/airbytehq/airbyte/pull/25948) | Introduce new field in the `Campaigns` stream schema | +| 0.1.16 | 2023-04-20 | [20897](https://github.com/airbytehq/airbyte/pull/20897) | Add missing fields to Basic Stats schema | +| 0.1.15 | 2023-03-02 | [22869](https://github.com/airbytehq/airbyte/pull/22869) | Specified date formatting in specification | +| 0.1.14 | 2023-02-10 | [22808](https://github.com/airbytehq/airbyte/pull/22808) | Enable default `AvailabilityStrategy` | +| 0.1.13 | 2023-01-27 | [22023](https://github.com/airbytehq/airbyte/pull/22023) | Set `AvailabilityStrategy` for streams explicitly to `None` | +| 0.1.12 | 2023-01-11 | [21267](https://github.com/airbytehq/airbyte/pull/21267) | Fix parse empty error response | +| 0.1.11 | 2022-12-23 | [20865](https://github.com/airbytehq/airbyte/pull/20865) | Handle 403 permission error | +| 0.1.10 | 2022-12-15 | [20537](https://github.com/airbytehq/airbyte/pull/20537) | Run on CDK 0.15.0 | +| 0.1.9 | 2022-12-14 | [20498](https://github.com/airbytehq/airbyte/pull/20498) | Fix output state when no records are read | +| 0.1.8 | 2022-10-05 | [17596](https://github.com/airbytehq/airbyte/pull/17596) | Retry 429 and 5xx errors when refreshing access token | +| 0.1.6 | 2022-07-21 | [14924](https://github.com/airbytehq/airbyte/pull/14924) | Remove `additionalProperties` field from specs | +| 0.1.5 | 2022-07-13 | [14577](https://github.com/airbytehq/airbyte/pull/14577) | Added stats streams hourly, daily, lifetime | +| 0.1.4 | 2021-12-07 | [8429](https://github.com/airbytehq/airbyte/pull/8429) | Update titles and descriptions | +| 0.1.3 | 2021-11-10 | [7811](https://github.com/airbytehq/airbyte/pull/7811) | Add oauth2.0, fix stream_state | +| 0.1.2 | 2021-11-08 | [7499](https://github.com/airbytehq/airbyte/pull/7499) | Remove base-python dependencies | +| 0.1.1 | 2021-07-29 | [5072](https://github.com/airbytehq/airbyte/pull/5072) | Fix bug with incorrect stream\_state value | +| 0.1.0 | 2021-07-26 | [4843](https://github.com/airbytehq/airbyte/pull/4843) | Initial release supporting the Snapchat Marketing API | From 7baf1540a7a31498e7b7c97a350aa36a7442fb6f Mon Sep 17 00:00:00 2001 From: Maxime Carbonneau-Leclerc <3360483+maxi297@users.noreply.github.com> Date: Tue, 27 Feb 2024 13:53:51 -0500 Subject: [PATCH 005/172] =?UTF-8?q?[ISSUE=20#35110]=20match=20CATs=20recor?= =?UTF-8?q?ds=20only=20one=20primary=20key=20when=20primary=20k=E2=80=A6?= =?UTF-8?q?=20(#35556)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .../connector-acceptance-test/CHANGELOG.md | 3 + .../tests/test_core.py | 62 ++++++- .../utils/asserts.py | 5 +- .../connector-acceptance-test/poetry.lock | 5 +- .../connector-acceptance-test/pyproject.toml | 2 +- .../unit_tests/test_asserts.py | 51 ----- .../unit_tests/test_core.py | 174 +++++++++--------- 7 files changed, 156 insertions(+), 146 deletions(-) diff --git a/airbyte-integrations/bases/connector-acceptance-test/CHANGELOG.md b/airbyte-integrations/bases/connector-acceptance-test/CHANGELOG.md index 5166e0fc1dc0..e7d36c2769c2 100644 --- a/airbyte-integrations/bases/connector-acceptance-test/CHANGELOG.md +++ b/airbyte-integrations/bases/connector-acceptance-test/CHANGELOG.md @@ -1,5 +1,8 @@ # Changelog +## 3.6.0 +Relaxing CATs validation when a stream has a primary key defined. + ## 3.5.0 Add `validate_stream_statuses` to TestBasicRead.test_read:: Validate all statuses for all streams in the catalogs were emitted in correct order. diff --git a/airbyte-integrations/bases/connector-acceptance-test/connector_acceptance_test/tests/test_core.py b/airbyte-integrations/bases/connector-acceptance-test/connector_acceptance_test/tests/test_core.py index 29e1fc4e39fd..ce2df96dc858 100644 --- a/airbyte-integrations/bases/connector-acceptance-test/connector_acceptance_test/tests/test_core.py +++ b/airbyte-integrations/bases/connector-acceptance-test/connector_acceptance_test/tests/test_core.py @@ -12,7 +12,7 @@ from os.path import splitext from pathlib import Path from threading import Thread -from typing import Any, Dict, List, Mapping, MutableMapping, Optional, Set, Tuple +from typing import Any, Dict, Iterable, List, Mapping, MutableMapping, Optional, Set, Tuple from xmlrpc.client import Boolean import connector_acceptance_test.utils.docs as docs_utils @@ -21,6 +21,7 @@ import pytest import requests from airbyte_protocol.models import ( + AirbyteMessage, AirbyteRecordMessage, AirbyteStream, AirbyteStreamStatus, @@ -838,14 +839,23 @@ def primary_keys_for_records(streams, records): for stream in streams_with_primary_key: stream_records = [r for r in records if r.stream == stream.stream.name] for stream_record in stream_records: - pk_values = {} - for pk_path in stream.stream.source_defined_primary_key: - pk_value = reduce(lambda data, key: data.get(key) if isinstance(data, dict) else None, pk_path, stream_record.data) - pk_values[tuple(pk_path)] = pk_value - + pk_values = _extract_primary_key_value(stream_record.data, stream.stream.source_defined_primary_key) yield pk_values, stream_record +def _extract_pk_values(records: Iterable[Mapping[str, Any]], primary_key: List[List[str]]) -> Iterable[dict[Tuple[str], Any]]: + for record in records: + yield _extract_primary_key_value(record, primary_key) + + +def _extract_primary_key_value(record: Mapping[str, Any], primary_key: List[List[str]]) -> dict[Tuple[str], Any]: + pk_values = {} + for pk_path in primary_key: + pk_value: Any = reduce(lambda data, key: data.get(key) if isinstance(data, dict) else None, pk_path, record) + pk_values[tuple(pk_path)] = pk_value + return pk_values + + @pytest.mark.default_timeout(TEN_MINUTES) class TestBasicRead(BaseTest): @staticmethod @@ -953,6 +963,7 @@ def _validate_expected_records( flags, ignored_fields: Optional[Mapping[str, List[IgnoredFieldsConfiguration]]], detailed_logger: Logger, + configured_catalog: ConfiguredAirbyteCatalog, ): """ We expect some records from stream to match expected_records, partially or fully, in exact or any order. @@ -972,6 +983,7 @@ def _validate_expected_records( extra_records=flags.extra_records, ignored_fields=ignored_field_names, detailed_logger=detailed_logger, + configured_catalog=configured_catalog, ) @pytest.fixture(name="should_validate_schema") @@ -1081,6 +1093,7 @@ async def test_read( flags=expect_records_config, ignored_fields=ignored_fields, detailed_logger=detailed_logger, + configured_catalog=configured_catalog, ) if should_validate_stream_statuses: @@ -1142,8 +1155,45 @@ def compare_records( extra_records: bool, ignored_fields: List[str], detailed_logger: Logger, + configured_catalog: ConfiguredAirbyteCatalog, ): """Compare records using combination of restrictions""" + configured_streams = [stream for stream in configured_catalog.streams if stream.stream.name == stream_name] + if len(configured_streams) != 1: + raise ValueError(f"Expected exactly one stream matching name {stream_name} but got {len(configured_streams)}") + + configured_stream = configured_streams[0] + if configured_stream.stream.source_defined_primary_key: + # as part of the migration for relaxing CATs, we are starting only with the streams that defines primary keys + expected_primary_keys = list(_extract_pk_values(expected, configured_stream.stream.source_defined_primary_key)) + actual_primary_keys = list(_extract_pk_values(actual, configured_stream.stream.source_defined_primary_key)) + if exact_order: + assert ( + actual_primary_keys[: len(expected_primary_keys)] == expected_primary_keys + ), f"Expected to see those primary keys in order in the actual response for stream {stream_name}." + else: + expected_but_not_found = set(map(make_hashable, expected_primary_keys)).difference( + set(map(make_hashable, actual_primary_keys)) + ) + assert ( + not expected_but_not_found + ), f"Expected to see those primary keys in the actual response for stream {stream_name} but they were not found." + else: + TestBasicRead.legacy_compare_records( + stream_name, actual, expected, extra_fields, exact_order, extra_records, ignored_fields, detailed_logger + ) + + @staticmethod + def legacy_compare_records( + stream_name: str, + actual: List[Mapping[str, Any]], + expected: List[Mapping[str, Any]], + extra_fields: bool, + exact_order: bool, + extra_records: bool, + ignored_fields: List[str], + detailed_logger: Logger, + ): if exact_order: if ignored_fields: for item in actual: diff --git a/airbyte-integrations/bases/connector-acceptance-test/connector_acceptance_test/utils/asserts.py b/airbyte-integrations/bases/connector-acceptance-test/connector_acceptance_test/utils/asserts.py index 8362aac10053..36a3e01c1158 100644 --- a/airbyte-integrations/bases/connector-acceptance-test/connector_acceptance_test/utils/asserts.py +++ b/airbyte-integrations/bases/connector-acceptance-test/connector_acceptance_test/utils/asserts.py @@ -88,7 +88,10 @@ def verify_records_schema( stream_validators = {} for stream in catalog.streams: schema_to_validate_against = stream.stream.json_schema - validator = NoAdditionalPropertiesValidator if fail_on_extra_columns else Draft7ValidatorWithStrictInteger + # We will be disabling strict `NoAdditionalPropertiesValidator` until we have a better plan for schema validation. The consequence + # is that we will lack visibility on new fields that are not added on the root level (root level is validated by Datadog) + # validator = NoAdditionalPropertiesValidator if fail_on_extra_columns else Draft7ValidatorWithStrictInteger + validator = Draft7ValidatorWithStrictInteger stream_validators[stream.stream.name] = validator(schema_to_validate_against, format_checker=CustomFormatChecker()) stream_errors = defaultdict(dict) for record in records: diff --git a/airbyte-integrations/bases/connector-acceptance-test/poetry.lock b/airbyte-integrations/bases/connector-acceptance-test/poetry.lock index fff2f1561b59..508501085564 100644 --- a/airbyte-integrations/bases/connector-acceptance-test/poetry.lock +++ b/airbyte-integrations/bases/connector-acceptance-test/poetry.lock @@ -1,4 +1,4 @@ -# This file is automatically @generated by Poetry 1.7.1 and should not be changed by hand. +# This file is automatically @generated by Poetry 1.6.1 and should not be changed by hand. [[package]] name = "airbyte-protocol-models" @@ -1226,6 +1226,7 @@ files = [ {file = "PyYAML-6.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:bf07ee2fef7014951eeb99f56f39c9bb4af143d8aa3c21b1677805985307da34"}, {file = "PyYAML-6.0.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:855fb52b0dc35af121542a76b9a84f8d1cd886ea97c84703eaa6d88e37a2ad28"}, {file = "PyYAML-6.0.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:40df9b996c2b73138957fe23a16a4f0ba614f4c0efce1e9406a184b6d07fa3a9"}, + {file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a08c6f0fe150303c1c6b71ebcd7213c2858041a7e01975da3a99aed1e7a378ef"}, {file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6c22bec3fbe2524cde73d7ada88f6566758a8f7227bfbf93a408a9d86bcc12a0"}, {file = "PyYAML-6.0.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8d4e9c88387b0f5c7d5f281e55304de64cf7f9c0021a3525bd3b1c542da3b0e4"}, {file = "PyYAML-6.0.1-cp312-cp312-win32.whl", hash = "sha256:d483d2cdf104e7c9fa60c544d92981f12ad66a457afae824d146093b8c294c54"}, @@ -1687,4 +1688,4 @@ multidict = ">=4.0" [metadata] lock-version = "2.0" python-versions = "^3.10" -content-hash = "1c468b66c56cfccd5e5bff7d9c69f01c729d828132a8a56a7089447f5da0f534" +content-hash = "9d53af4fe5cca16b6ce5a61f3f7d286b561af9920f77163e00e4e59eacc9e4f6" diff --git a/airbyte-integrations/bases/connector-acceptance-test/pyproject.toml b/airbyte-integrations/bases/connector-acceptance-test/pyproject.toml index ed80ea830fd8..41522609d0cb 100644 --- a/airbyte-integrations/bases/connector-acceptance-test/pyproject.toml +++ b/airbyte-integrations/bases/connector-acceptance-test/pyproject.toml @@ -4,7 +4,7 @@ build-backend = "poetry.core.masonry.api" [tool.poetry] name = "connector-acceptance-test" -version = "3.5.0" +version = "3.6.0" description = "Contains acceptance tests for connectors." authors = ["Airbyte "] license = "MIT" diff --git a/airbyte-integrations/bases/connector-acceptance-test/unit_tests/test_asserts.py b/airbyte-integrations/bases/connector-acceptance-test/unit_tests/test_asserts.py index d0732458ec4d..0b78b93148c6 100644 --- a/airbyte-integrations/bases/connector-acceptance-test/unit_tests/test_asserts.py +++ b/airbyte-integrations/bases/connector-acceptance-test/unit_tests/test_asserts.py @@ -87,57 +87,6 @@ def test_verify_records_schema(configured_catalog: ConfiguredAirbyteCatalog): ] -@pytest.mark.parametrize( - "json_schema, record, should_fail", - [ - ({"type": "object", "properties": {"a": {"type": "string"}}}, {"a": "str", "b": "extra_string"}, True), - ( - {"type": "object", "properties": {"a": {"type": "string"}, "some_obj": {"type": ["null", "object"]}}}, - {"a": "str", "some_obj": {"b": "extra_string"}}, - False, - ), - ( - { - "type": "object", - "properties": {"a": {"type": "string"}, "some_obj": {"type": ["null", "object"], "properties": {"a": {"type": "string"}}}}, - }, - {"a": "str", "some_obj": {"a": "str", "b": "extra_string"}}, - True, - ), - ( - {"type": "object", "properties": {"a": {"type": "string"}, "b": {"type": "array", "items": {"type": "object"}}}}, - {"a": "str", "b": [{"a": "extra_string"}]}, - False, - ), - ( - { - "type": "object", - "properties": { - "a": {"type": "string"}, - "b": {"type": "array", "items": {"type": "object", "properties": {"a": {"type": "string"}}}}, - }, - }, - {"a": "str", "b": [{"a": "string", "b": "extra_string"}]}, - True, - ), - ], - ids=[ - "simple_schema_and_record_with_extra_property", - "schema_with_object_without_properties_and_record_with_object_with_property", - "schema_with_object_with_properties_and_record_with_object_with_extra_property", - "schema_with_array_of_objects_without_properties_and_record_with_array_of_objects_with_property", - "schema_with_array_of_objects_with_properties_and_record_with_array_of_objects_with_extra_property", - ], -) -def test_verify_records_schema_with_fail_on_extra_columns(configured_catalog: ConfiguredAirbyteCatalog, json_schema, record, should_fail): - """Test that fail_on_extra_columns works correctly with nested objects, array of objects""" - configured_catalog.streams[0].stream.json_schema = json_schema - records = [AirbyteRecordMessage(stream="my_stream", data=record, emitted_at=0)] - streams_with_errors = verify_records_schema(records, configured_catalog, fail_on_extra_columns=True) - errors = [error.message for error in streams_with_errors["my_stream"].values()] - assert errors if should_fail else not errors - - @pytest.mark.parametrize( "record, configured_catalog, valid", [ diff --git a/airbyte-integrations/bases/connector-acceptance-test/unit_tests/test_core.py b/airbyte-integrations/bases/connector-acceptance-test/unit_tests/test_core.py index 3e5dbda69f3e..fd7f8b020a0b 100644 --- a/airbyte-integrations/bases/connector-acceptance-test/unit_tests/test_core.py +++ b/airbyte-integrations/bases/connector-acceptance-test/unit_tests/test_core.py @@ -592,60 +592,69 @@ def test_configured_catalog_fixture(mocker, test_strictness_level, configured_ca assert configured_catalog == test_core.build_configured_catalog_from_custom_catalog.return_value +_DEFAULT_RECORD_CONFIG = ExpectedRecordsConfig(path="foobar") + + @pytest.mark.parametrize( - "schema, ignored_fields, expect_records_config, record, expected_records_by_stream, expectation", + "schema, ignored_fields, expect_records_config, record, expected_records_by_stream, primary_key, expectation", [ - ({"type": "object"}, {}, ExpectedRecordsConfig(path="foobar"), {"aa": 23}, {}, does_not_raise()), - ({"type": "object"}, {}, ExpectedRecordsConfig(path="foobar"), {}, {}, does_not_raise()), + ({"type": "object"}, {}, _DEFAULT_RECORD_CONFIG, {"aa": 23}, {}, None, does_not_raise()), + ({"type": "object"}, {}, _DEFAULT_RECORD_CONFIG, {}, {}, None, does_not_raise()), ( {"type": "object", "properties": {"created": {"type": "string"}}}, {}, - ExpectedRecordsConfig(path="foobar"), + _DEFAULT_RECORD_CONFIG, {"aa": 23}, {}, + None, pytest.raises(AssertionError, match="should have some fields mentioned by json schema"), ), ( {"type": "object", "properties": {"created": {"type": "string"}}}, {}, - ExpectedRecordsConfig(path="foobar"), + _DEFAULT_RECORD_CONFIG, {"created": "23"}, {}, + None, does_not_raise(), ), ( {"type": "object", "properties": {"created": {"type": "string"}}}, {}, - ExpectedRecordsConfig(path="foobar"), + _DEFAULT_RECORD_CONFIG, {"root": {"created": "23"}}, {}, + None, pytest.raises(AssertionError, match="should have some fields mentioned by json schema"), ), # Recharge shop stream case ( {"type": "object", "properties": {"shop": {"type": ["null", "object"]}, "store": {"type": ["null", "object"]}}}, {}, - ExpectedRecordsConfig(path="foobar"), + _DEFAULT_RECORD_CONFIG, {"shop": {"a": "23"}, "store": {"b": "23"}}, {}, + None, does_not_raise(), ), # Fail when expected and actual records are not equal ( {"type": "object"}, {}, - ExpectedRecordsConfig(path="foobar"), + _DEFAULT_RECORD_CONFIG, {"constant_field": "must equal", "fast_changing_field": [{"field": 2}]}, {"test_stream": [{"constant_field": "must equal", "fast_changing_field": [{"field": 1}]}]}, + None, pytest.raises(Failed, match="Stream test_stream: All expected records must be produced"), ), # Expected and Actual records are not equal but we ignore fast changing field ( {"type": "object"}, {"test_stream": [IgnoredFieldsConfiguration(name="fast_changing_field/*/field", bypass_reason="test")]}, - ExpectedRecordsConfig(path="foobar"), + _DEFAULT_RECORD_CONFIG, {"constant_field": "must equal", "fast_changing_field": [{"field": 2}]}, {"test_stream": [{"constant_field": "must equal", "fast_changing_field": [{"field": 1}]}]}, + None, does_not_raise(), ), # Fail when expected and actual records are not equal and exact_order=True @@ -655,6 +664,7 @@ def test_configured_catalog_fixture(mocker, test_strictness_level, configured_ca ExpectedRecordsConfig(extra_fields=False, exact_order=True, extra_records=True, path="foobar"), {"constant_field": "must equal", "fast_changing_field": [{"field": 2}]}, {"test_stream": [{"constant_field": "must equal", "fast_changing_field": [{"field": 1}]}]}, + None, pytest.raises(AssertionError, match="Stream test_stream: Mismatch of record order or values"), ), # Expected and Actual records are not equal but we ignore fast changing field (for case when exact_order=True) @@ -664,15 +674,81 @@ def test_configured_catalog_fixture(mocker, test_strictness_level, configured_ca ExpectedRecordsConfig(extra_fields=False, exact_order=True, extra_records=True, path="foobar"), {"constant_field": "must equal", "fast_changing_field": [{"field": 1}]}, {"test_stream": [{"constant_field": "must equal", "fast_changing_field": [{"field": 2}]}]}, + None, + does_not_raise(), + ), + # Match by primary key + ( + {"type": "object"}, + {}, + _DEFAULT_RECORD_CONFIG, + {"primary_key": "a primary_key"}, + {"test_stream": [{"primary_key": "a primary_key"}]}, + [["primary_key"]], + does_not_raise(), + ), + # Match by primary key when actual has added fields + ( + {"type": "object"}, + {}, + _DEFAULT_RECORD_CONFIG, + {"primary_key": "a primary_key", "a field that should be ignored": "ignored value"}, + {"test_stream": [{"primary_key": "a primary_key"}]}, + [["primary_key"]], + does_not_raise(), + ), + # Match by primary key when non primary key field values differ + ( + {"type": "object"}, + {}, + _DEFAULT_RECORD_CONFIG, + {"primary_key": "a primary_key", "matching key": "value 1"}, + {"test_stream": [{"primary_key": "a primary_key", "non matching key": "value 2"}]}, + [["primary_key"]], + does_not_raise(), + ), + # Match nested primary key + ( + {"type": "object"}, + {}, + _DEFAULT_RECORD_CONFIG, + {"top_level_field": {"child_field": "a primary_key"}, "matching key": "value 1"}, + {"test_stream": [{"top_level_field": {"child_field": "a primary_key"}, "matching key": "value 1"}]}, + [["top_level_field", "child_field"]], + does_not_raise(), + ), + # Match composite primary key + ( + {"type": "object"}, + {}, + _DEFAULT_RECORD_CONFIG, + {"primary_key_1": "a primary_key_1", "primary_key_2": "a primary_key_2"}, + {"test_stream": [{"primary_key_1": "a primary_key_1", "primary_key_2": "a primary_key_2"}]}, + [["primary_key_1"], ["primary_key_2"]], + does_not_raise(), + ), + # Match composite and nested primary key + ( + {"type": "object"}, + {}, + _DEFAULT_RECORD_CONFIG, + {"primary_key_1": "a primary_key_1", "primary_key_2_1": {"primary_key_2_2": "primary_key_2"}}, + {"test_stream": [{"primary_key_1": "a primary_key_1", "primary_key_2_1": {"primary_key_2_2": "primary_key_2"}}]}, + [["primary_key_1"], ["primary_key_2_1", "primary_key_2_2"]], does_not_raise(), ), ], ) -async def test_read(mocker, schema, ignored_fields, expect_records_config, record, expected_records_by_stream, expectation): +async def test_read(mocker, schema, ignored_fields, expect_records_config, record, expected_records_by_stream, primary_key, expectation): configured_catalog = ConfiguredAirbyteCatalog( streams=[ ConfiguredAirbyteStream( - stream=AirbyteStream.parse_obj({"name": "test_stream", "json_schema": schema, "supported_sync_modes": ["full_refresh"]}), + stream=AirbyteStream.parse_obj({ + "name": "test_stream", + "json_schema": schema, + "supported_sync_modes": ["full_refresh"], + "source_defined_primary_key": primary_key + }), sync_mode="full_refresh", destination_sync_mode="overwrite", ) @@ -702,78 +778,6 @@ async def test_read(mocker, schema, ignored_fields, expect_records_config, recor ) -@pytest.mark.parametrize( - "config_fail_on_extra_columns, record_has_unexpected_column, expectation_should_fail", - [ - (True, True, True), - (True, False, False), - (False, False, False), - (False, True, False), - ], -) -@pytest.mark.parametrize("additional_properties", [True, False, None]) -async def test_fail_on_extra_columns( - mocker, config_fail_on_extra_columns, record_has_unexpected_column, expectation_should_fail, additional_properties -): - schema = {"type": "object", "properties": {"field_1": {"type": ["string"]}, "field_2": {"type": ["string"]}}} - if additional_properties: - schema["additionalProperties"] = additional_properties - - record = {"field_1": "value", "field_2": "value"} - if record_has_unexpected_column: - record["surprise_field"] = "value" - - configured_catalog = ConfiguredAirbyteCatalog( - streams=[ - ConfiguredAirbyteStream( - stream=AirbyteStream.parse_obj({"name": "test_stream", "json_schema": schema, "supported_sync_modes": ["full_refresh"]}), - sync_mode="full_refresh", - destination_sync_mode="overwrite", - ) - ] - ) - docker_runner_mock = mocker.MagicMock( - call_read=mocker.AsyncMock( - return_value=[AirbyteMessage(type=Type.RECORD, record=AirbyteRecordMessage(stream="test_stream", data=record, emitted_at=111))] - ) - ) - - t = test_core.TestBasicRead() - if expectation_should_fail: - with pytest.raises(Failed, match="test_stream"): - await t.test_read( - connector_config=None, - configured_catalog=configured_catalog, - expect_records_config=ExpectedRecordsConfig(path="foobar"), - should_validate_schema=True, - should_validate_data_points=False, - should_validate_stream_statuses=False, - should_fail_on_extra_columns=config_fail_on_extra_columns, - empty_streams=set(), - expected_records_by_stream={}, - docker_runner=docker_runner_mock, - ignored_fields=None, - detailed_logger=MagicMock(), - certified_file_based_connector=False, - ) - else: - t.test_read( - connector_config=None, - configured_catalog=configured_catalog, - expect_records_config=ExpectedRecordsConfig(path="foobar"), - should_validate_schema=True, - should_validate_data_points=False, - should_validate_stream_statuses=False, - should_fail_on_extra_columns=config_fail_on_extra_columns, - empty_streams=set(), - expected_records_by_stream={}, - docker_runner=docker_runner_mock, - ignored_fields=None, - detailed_logger=MagicMock(), - certified_file_based_connector=False, - ) - - @pytest.mark.parametrize( "output, expect_trace_message_on_failure, should_fail", [ @@ -1459,7 +1463,7 @@ async def test_read_validate_async_output_stream_statuses(mocker): await t.test_read( connector_config=None, configured_catalog=configured_catalog, - expect_records_config=ExpectedRecordsConfig(path="foobar"), + expect_records_config=_DEFAULT_RECORD_CONFIG, should_validate_schema=False, should_validate_data_points=False, should_validate_stream_statuses=True, @@ -1559,7 +1563,7 @@ async def test_read_validate_stream_statuses_exceptions(mocker, output): await t.test_read( connector_config=None, configured_catalog=configured_catalog, - expect_records_config=ExpectedRecordsConfig(path="foobar"), + expect_records_config=_DEFAULT_RECORD_CONFIG, should_validate_schema=False, should_validate_data_points=False, should_validate_stream_statuses=True, From fdfd048225de6e2256c6d79f18626a5be45c25ce Mon Sep 17 00:00:00 2001 From: Marius Posta Date: Tue, 27 Feb 2024 11:01:38 -0800 Subject: [PATCH 006/172] python CDK: fix gradle task dependency (#35609) --- airbyte-cdk/python/README.md | 3 +- .../models/declarative_component_schema.py | 1076 +++++++++-------- airbyte-cdk/python/build.gradle | 2 +- 3 files changed, 555 insertions(+), 526 deletions(-) diff --git a/airbyte-cdk/python/README.md b/airbyte-cdk/python/README.md index b8998a9d8d83..5b0bb5840a50 100644 --- a/airbyte-cdk/python/README.md +++ b/airbyte-cdk/python/README.md @@ -65,7 +65,8 @@ pip install -e ".[dev]" # [dev] installs development-only dependencies If the iteration you are working on includes changes to the models, you might want to regenerate them. In order to do that, you can run: ```bash -./gradlew :airbyte-cdk:python:build +cd airbyte-cdk/python +./gradlew build ``` This will generate the files based on the schemas, add the license information and format the code. If you want to only do the former and rely on diff --git a/airbyte-cdk/python/airbyte_cdk/sources/declarative/models/declarative_component_schema.py b/airbyte-cdk/python/airbyte_cdk/sources/declarative/models/declarative_component_schema.py index f70d3aef7523..cd79f70ce273 100644 --- a/airbyte-cdk/python/airbyte_cdk/sources/declarative/models/declarative_component_schema.py +++ b/airbyte-cdk/python/airbyte_cdk/sources/declarative/models/declarative_component_schema.py @@ -11,145 +11,145 @@ class AuthFlowType(Enum): - oauth2_0 = "oauth2.0" - oauth1_0 = "oauth1.0" + oauth2_0 = 'oauth2.0' + oauth1_0 = 'oauth1.0' class BasicHttpAuthenticator(BaseModel): - type: Literal["BasicHttpAuthenticator"] + type: Literal['BasicHttpAuthenticator'] username: str = Field( ..., - description="The username that will be combined with the password, base64 encoded and used to make requests. Fill it in the user inputs.", + description='The username that will be combined with the password, base64 encoded and used to make requests. Fill it in the user inputs.', examples=["{{ config['username'] }}", "{{ config['api_key'] }}"], - title="Username", + title='Username', ) password: Optional[str] = Field( - "", - description="The password that will be combined with the username, base64 encoded and used to make requests. Fill it in the user inputs.", - examples=["{{ config['password'] }}", ""], - title="Password", + '', + description='The password that will be combined with the username, base64 encoded and used to make requests. Fill it in the user inputs.', + examples=["{{ config['password'] }}", ''], + title='Password', ) - parameters: Optional[Dict[str, Any]] = Field(None, alias="$parameters") + parameters: Optional[Dict[str, Any]] = Field(None, alias='$parameters') class BearerAuthenticator(BaseModel): - type: Literal["BearerAuthenticator"] + type: Literal['BearerAuthenticator'] api_token: str = Field( ..., - description="Token to inject as request header for authenticating with the API.", + description='Token to inject as request header for authenticating with the API.', examples=["{{ config['api_key'] }}", "{{ config['token'] }}"], - title="Bearer Token", + title='Bearer Token', ) - parameters: Optional[Dict[str, Any]] = Field(None, alias="$parameters") + parameters: Optional[Dict[str, Any]] = Field(None, alias='$parameters') class CheckStream(BaseModel): - type: Literal["CheckStream"] + type: Literal['CheckStream'] stream_names: List[str] = Field( ..., - description="Names of the streams to try reading from when running a check operation.", - examples=[["users"], ["users", "contacts"]], - title="Stream Names", + description='Names of the streams to try reading from when running a check operation.', + examples=[['users'], ['users', 'contacts']], + title='Stream Names', ) class ConstantBackoffStrategy(BaseModel): - type: Literal["ConstantBackoffStrategy"] + type: Literal['ConstantBackoffStrategy'] backoff_time_in_seconds: Union[float, str] = Field( ..., - description="Backoff time in seconds.", + description='Backoff time in seconds.', examples=[30, 30.5, "{{ config['backoff_time'] }}"], - title="Backoff Time", + title='Backoff Time', ) - parameters: Optional[Dict[str, Any]] = Field(None, alias="$parameters") + parameters: Optional[Dict[str, Any]] = Field(None, alias='$parameters') class CustomAuthenticator(BaseModel): class Config: extra = Extra.allow - type: Literal["CustomAuthenticator"] + type: Literal['CustomAuthenticator'] class_name: str = Field( ..., - description="Fully-qualified name of the class that will be implementing the custom authentication strategy. Has to be a sub class of DeclarativeAuthenticator. The format is `source_..`.", - examples=["source_railz.components.ShortLivedTokenAuthenticator"], - title="Class Name", + description='Fully-qualified name of the class that will be implementing the custom authentication strategy. Has to be a sub class of DeclarativeAuthenticator. The format is `source_..`.', + examples=['source_railz.components.ShortLivedTokenAuthenticator'], + title='Class Name', ) - parameters: Optional[Dict[str, Any]] = Field(None, alias="$parameters") + parameters: Optional[Dict[str, Any]] = Field(None, alias='$parameters') class CustomBackoffStrategy(BaseModel): class Config: extra = Extra.allow - type: Literal["CustomBackoffStrategy"] + type: Literal['CustomBackoffStrategy'] class_name: str = Field( ..., - description="Fully-qualified name of the class that will be implementing the custom backoff strategy. The format is `source_..`.", - examples=["source_railz.components.MyCustomBackoffStrategy"], - title="Class Name", + description='Fully-qualified name of the class that will be implementing the custom backoff strategy. The format is `source_..`.', + examples=['source_railz.components.MyCustomBackoffStrategy'], + title='Class Name', ) - parameters: Optional[Dict[str, Any]] = Field(None, alias="$parameters") + parameters: Optional[Dict[str, Any]] = Field(None, alias='$parameters') class CustomErrorHandler(BaseModel): class Config: extra = Extra.allow - type: Literal["CustomErrorHandler"] + type: Literal['CustomErrorHandler'] class_name: str = Field( ..., - description="Fully-qualified name of the class that will be implementing the custom error handler. The format is `source_..`.", - examples=["source_railz.components.MyCustomErrorHandler"], - title="Class Name", + description='Fully-qualified name of the class that will be implementing the custom error handler. The format is `source_..`.', + examples=['source_railz.components.MyCustomErrorHandler'], + title='Class Name', ) - parameters: Optional[Dict[str, Any]] = Field(None, alias="$parameters") + parameters: Optional[Dict[str, Any]] = Field(None, alias='$parameters') class CustomIncrementalSync(BaseModel): class Config: extra = Extra.allow - type: Literal["CustomIncrementalSync"] + type: Literal['CustomIncrementalSync'] class_name: str = Field( ..., - description="Fully-qualified name of the class that will be implementing the custom incremental sync. The format is `source_..`.", - examples=["source_railz.components.MyCustomIncrementalSync"], - title="Class Name", + description='Fully-qualified name of the class that will be implementing the custom incremental sync. The format is `source_..`.', + examples=['source_railz.components.MyCustomIncrementalSync'], + title='Class Name', ) cursor_field: str = Field( ..., - description="The location of the value on a record that will be used as a bookmark during sync.", + description='The location of the value on a record that will be used as a bookmark during sync.', ) - parameters: Optional[Dict[str, Any]] = Field(None, alias="$parameters") + parameters: Optional[Dict[str, Any]] = Field(None, alias='$parameters') class CustomPaginationStrategy(BaseModel): class Config: extra = Extra.allow - type: Literal["CustomPaginationStrategy"] + type: Literal['CustomPaginationStrategy'] class_name: str = Field( ..., - description="Fully-qualified name of the class that will be implementing the custom pagination strategy. The format is `source_..`.", - examples=["source_railz.components.MyCustomPaginationStrategy"], - title="Class Name", + description='Fully-qualified name of the class that will be implementing the custom pagination strategy. The format is `source_..`.', + examples=['source_railz.components.MyCustomPaginationStrategy'], + title='Class Name', ) - parameters: Optional[Dict[str, Any]] = Field(None, alias="$parameters") + parameters: Optional[Dict[str, Any]] = Field(None, alias='$parameters') class CustomRecordExtractor(BaseModel): class Config: extra = Extra.allow - type: Literal["CustomRecordExtractor"] + type: Literal['CustomRecordExtractor'] class_name: str = Field( ..., - description="Fully-qualified name of the class that will be implementing the custom record extraction strategy. The format is `source_..`.", - examples=["source_railz.components.MyCustomRecordExtractor"], - title="Class Name", + description='Fully-qualified name of the class that will be implementing the custom record extraction strategy. The format is `source_..`.', + examples=['source_railz.components.MyCustomRecordExtractor'], + title='Class Name', ) - parameters: Optional[Dict[str, Any]] = Field(None, alias="$parameters") + parameters: Optional[Dict[str, Any]] = Field(None, alias='$parameters') class CustomRecordFilter(BaseModel): @@ -159,8 +159,8 @@ class Config: type: Literal['CustomRecordFilter'] class_name: str = Field( ..., - description='Fully-qualified name of the class that will be implementing the custom record filtering. The format is `source_..`.', - examples=['source_railz.components.MyCustomRecordFilter'], + description='Fully-qualified name of the class that will be implementing the custom record filter strategy. The format is `source_..`.', + examples=['source_railz.components.MyCustomCustomRecordFilter'], title='Class Name', ) parameters: Optional[Dict[str, Any]] = Field(None, alias='$parameters') @@ -170,465 +170,471 @@ class CustomRequester(BaseModel): class Config: extra = Extra.allow - type: Literal["CustomRequester"] + type: Literal['CustomRequester'] class_name: str = Field( ..., - description="Fully-qualified name of the class that will be implementing the custom requester strategy. The format is `source_..`.", - examples=["source_railz.components.MyCustomRecordExtractor"], - title="Class Name", + description='Fully-qualified name of the class that will be implementing the custom requester strategy. The format is `source_..`.', + examples=['source_railz.components.MyCustomRecordExtractor'], + title='Class Name', ) - parameters: Optional[Dict[str, Any]] = Field(None, alias="$parameters") + parameters: Optional[Dict[str, Any]] = Field(None, alias='$parameters') class CustomRetriever(BaseModel): class Config: extra = Extra.allow - type: Literal["CustomRetriever"] + type: Literal['CustomRetriever'] class_name: str = Field( ..., - description="Fully-qualified name of the class that will be implementing the custom retriever strategy. The format is `source_..`.", - examples=["source_railz.components.MyCustomRetriever"], - title="Class Name", + description='Fully-qualified name of the class that will be implementing the custom retriever strategy. The format is `source_..`.', + examples=['source_railz.components.MyCustomRetriever'], + title='Class Name', ) - parameters: Optional[Dict[str, Any]] = Field(None, alias="$parameters") + parameters: Optional[Dict[str, Any]] = Field(None, alias='$parameters') class CustomPartitionRouter(BaseModel): class Config: extra = Extra.allow - type: Literal["CustomPartitionRouter"] + type: Literal['CustomPartitionRouter'] class_name: str = Field( ..., - description="Fully-qualified name of the class that will be implementing the custom partition router. The format is `source_..`.", - examples=["source_railz.components.MyCustomPartitionRouter"], - title="Class Name", + description='Fully-qualified name of the class that will be implementing the custom partition router. The format is `source_..`.', + examples=['source_railz.components.MyCustomPartitionRouter'], + title='Class Name', ) - parameters: Optional[Dict[str, Any]] = Field(None, alias="$parameters") + parameters: Optional[Dict[str, Any]] = Field(None, alias='$parameters') class CustomTransformation(BaseModel): class Config: extra = Extra.allow - type: Literal["CustomTransformation"] + type: Literal['CustomTransformation'] class_name: str = Field( ..., - description="Fully-qualified name of the class that will be implementing the custom transformation. The format is `source_..`.", - examples=["source_railz.components.MyCustomTransformation"], - title="Class Name", + description='Fully-qualified name of the class that will be implementing the custom transformation. The format is `source_..`.', + examples=['source_railz.components.MyCustomTransformation'], + title='Class Name', ) - parameters: Optional[Dict[str, Any]] = Field(None, alias="$parameters") + parameters: Optional[Dict[str, Any]] = Field(None, alias='$parameters') class RefreshTokenUpdater(BaseModel): refresh_token_name: Optional[str] = Field( - "refresh_token", - description="The name of the property which contains the updated refresh token in the response from the token refresh endpoint.", - examples=["refresh_token"], - title="Refresh Token Property Name", + 'refresh_token', + description='The name of the property which contains the updated refresh token in the response from the token refresh endpoint.', + examples=['refresh_token'], + title='Refresh Token Property Name', ) access_token_config_path: Optional[List[str]] = Field( - ["credentials", "access_token"], - description="Config path to the access token. Make sure the field actually exists in the config.", - examples=[["credentials", "access_token"], ["access_token"]], - title="Config Path To Access Token", + ['credentials', 'access_token'], + description='Config path to the access token. Make sure the field actually exists in the config.', + examples=[['credentials', 'access_token'], ['access_token']], + title='Config Path To Access Token', ) refresh_token_config_path: Optional[List[str]] = Field( - ["credentials", "refresh_token"], - description="Config path to the access token. Make sure the field actually exists in the config.", - examples=[["credentials", "refresh_token"], ["refresh_token"]], - title="Config Path To Refresh Token", + ['credentials', 'refresh_token'], + description='Config path to the access token. Make sure the field actually exists in the config.', + examples=[['credentials', 'refresh_token'], ['refresh_token']], + title='Config Path To Refresh Token', ) token_expiry_date_config_path: Optional[List[str]] = Field( - ["credentials", "token_expiry_date"], - description="Config path to the expiry date. Make sure actually exists in the config.", - examples=[["credentials", "token_expiry_date"]], - title="Config Path To Expiry Date", + ['credentials', 'token_expiry_date'], + description='Config path to the expiry date. Make sure actually exists in the config.', + examples=[['credentials', 'token_expiry_date']], + title='Config Path To Expiry Date', ) class OAuthAuthenticator(BaseModel): - type: Literal["OAuthAuthenticator"] + type: Literal['OAuthAuthenticator'] client_id: str = Field( ..., - description="The OAuth client ID. Fill it in the user inputs.", + description='The OAuth client ID. Fill it in the user inputs.', examples=["{{ config['client_id }}", "{{ config['credentials']['client_id }}"], - title="Client ID", + title='Client ID', ) client_secret: str = Field( ..., - description="The OAuth client secret. Fill it in the user inputs.", + description='The OAuth client secret. Fill it in the user inputs.', examples=[ "{{ config['client_secret }}", "{{ config['credentials']['client_secret }}", ], - title="Client Secret", + title='Client Secret', ) refresh_token: Optional[str] = Field( None, - description="Credential artifact used to get a new access token.", + description='Credential artifact used to get a new access token.', examples=[ "{{ config['refresh_token'] }}", "{{ config['credentials]['refresh_token'] }}", ], - title="Refresh Token", + title='Refresh Token', ) token_refresh_endpoint: str = Field( ..., - description="The full URL to call to obtain a new access token.", - examples=["https://connect.squareup.com/oauth2/token"], - title="Token Refresh Endpoint", + description='The full URL to call to obtain a new access token.', + examples=['https://connect.squareup.com/oauth2/token'], + title='Token Refresh Endpoint', ) access_token_name: Optional[str] = Field( - "access_token", - description="The name of the property which contains the access token in the response from the token refresh endpoint.", - examples=["access_token"], - title="Access Token Property Name", + 'access_token', + description='The name of the property which contains the access token in the response from the token refresh endpoint.', + examples=['access_token'], + title='Access Token Property Name', ) expires_in_name: Optional[str] = Field( - "expires_in", - description="The name of the property which contains the expiry date in the response from the token refresh endpoint.", - examples=["expires_in"], - title="Token Expiry Property Name", + 'expires_in', + description='The name of the property which contains the expiry date in the response from the token refresh endpoint.', + examples=['expires_in'], + title='Token Expiry Property Name', ) grant_type: Optional[str] = Field( - "refresh_token", - description="Specifies the OAuth2 grant type. If set to refresh_token, the refresh_token needs to be provided as well. For client_credentials, only client id and secret are required. Other grant types are not officially supported.", - examples=["refresh_token", "client_credentials"], - title="Grant Type", + 'refresh_token', + description='Specifies the OAuth2 grant type. If set to refresh_token, the refresh_token needs to be provided as well. For client_credentials, only client id and secret are required. Other grant types are not officially supported.', + examples=['refresh_token', 'client_credentials'], + title='Grant Type', ) refresh_request_body: Optional[Dict[str, Any]] = Field( None, - description="Body of the request sent to get a new access token.", + description='Body of the request sent to get a new access token.', examples=[ { - "applicationId": "{{ config['application_id'] }}", - "applicationSecret": "{{ config['application_secret'] }}", - "token": "{{ config['token'] }}", + 'applicationId': "{{ config['application_id'] }}", + 'applicationSecret': "{{ config['application_secret'] }}", + 'token': "{{ config['token'] }}", } ], - title="Refresh Request Body", + title='Refresh Request Body', ) scopes: Optional[List[str]] = Field( None, - description="List of scopes that should be granted to the access token.", - examples=[["crm.list.read", "crm.objects.contacts.read", "crm.schema.contacts.read"]], - title="Scopes", + description='List of scopes that should be granted to the access token.', + examples=[ + ['crm.list.read', 'crm.objects.contacts.read', 'crm.schema.contacts.read'] + ], + title='Scopes', ) token_expiry_date: Optional[str] = Field( None, - description="The access token expiry date.", - examples=["2023-04-06T07:12:10.421833+00:00", 1680842386], - title="Token Expiry Date", + description='The access token expiry date.', + examples=['2023-04-06T07:12:10.421833+00:00', 1680842386], + title='Token Expiry Date', ) token_expiry_date_format: Optional[str] = Field( None, - description="The format of the time to expiration datetime. Provide it if the time is returned as a date-time string instead of seconds.", - examples=["%Y-%m-%d %H:%M:%S.%f+00:00"], - title="Token Expiry Date Format", + description='The format of the time to expiration datetime. Provide it if the time is returned as a date-time string instead of seconds.', + examples=['%Y-%m-%d %H:%M:%S.%f+00:00'], + title='Token Expiry Date Format', ) refresh_token_updater: Optional[RefreshTokenUpdater] = Field( None, - description="When the token updater is defined, new refresh tokens, access tokens and the access token expiry date are written back from the authentication response to the config object. This is important if the refresh token can only used once.", - title="Token Updater", + description='When the token updater is defined, new refresh tokens, access tokens and the access token expiry date are written back from the authentication response to the config object. This is important if the refresh token can only used once.', + title='Token Updater', ) - parameters: Optional[Dict[str, Any]] = Field(None, alias="$parameters") + parameters: Optional[Dict[str, Any]] = Field(None, alias='$parameters') class ExponentialBackoffStrategy(BaseModel): - type: Literal["ExponentialBackoffStrategy"] + type: Literal['ExponentialBackoffStrategy'] factor: Optional[Union[float, str]] = Field( 5, - description="Multiplicative constant applied on each retry.", - examples=[5, 5.5, "10"], - title="Factor", + description='Multiplicative constant applied on each retry.', + examples=[5, 5.5, '10'], + title='Factor', ) - parameters: Optional[Dict[str, Any]] = Field(None, alias="$parameters") + parameters: Optional[Dict[str, Any]] = Field(None, alias='$parameters') class SessionTokenRequestBearerAuthenticator(BaseModel): - type: Literal["Bearer"] + type: Literal['Bearer'] class HttpMethod(Enum): - GET = "GET" - POST = "POST" + GET = 'GET' + POST = 'POST' class Action(Enum): - SUCCESS = "SUCCESS" - FAIL = "FAIL" - RETRY = "RETRY" - IGNORE = "IGNORE" + SUCCESS = 'SUCCESS' + FAIL = 'FAIL' + RETRY = 'RETRY' + IGNORE = 'IGNORE' class HttpResponseFilter(BaseModel): - type: Literal["HttpResponseFilter"] + type: Literal['HttpResponseFilter'] action: Action = Field( ..., - description="Action to execute if a response matches the filter.", - examples=["SUCCESS", "FAIL", "RETRY", "IGNORE"], - title="Action", + description='Action to execute if a response matches the filter.', + examples=['SUCCESS', 'FAIL', 'RETRY', 'IGNORE'], + title='Action', ) error_message: Optional[str] = Field( None, - description="Error Message to display if the response matches the filter.", - title="Error Message", + description='Error Message to display if the response matches the filter.', + title='Error Message', ) error_message_contains: Optional[str] = Field( None, - description="Match the response if its error message contains the substring.", - example=["This API operation is not enabled for this site"], - title="Error Message Substring", + description='Match the response if its error message contains the substring.', + example=['This API operation is not enabled for this site'], + title='Error Message Substring', ) http_codes: Optional[List[int]] = Field( None, - description="Match the response if its HTTP code is included in this list.", + description='Match the response if its HTTP code is included in this list.', examples=[[420, 429], [500]], - title="HTTP Codes", + title='HTTP Codes', ) predicate: Optional[str] = Field( None, - description="Match the response if the predicate evaluates to true.", + description='Match the response if the predicate evaluates to true.', examples=[ "{{ 'Too much requests' in response }}", "{{ 'error_code' in response and response['error_code'] == 'ComplexityException' }}", ], - title="Predicate", + title='Predicate', ) - parameters: Optional[Dict[str, Any]] = Field(None, alias="$parameters") + parameters: Optional[Dict[str, Any]] = Field(None, alias='$parameters') class InlineSchemaLoader(BaseModel): - type: Literal["InlineSchemaLoader"] + type: Literal['InlineSchemaLoader'] schema_: Optional[Dict[str, Any]] = Field( None, - alias="schema", + alias='schema', description='Describes a streams\' schema. Refer to the Data Types documentation for more details on which types are valid.', - title="Schema", + title='Schema', ) class JsonFileSchemaLoader(BaseModel): - type: Literal["JsonFileSchemaLoader"] + type: Literal['JsonFileSchemaLoader'] file_path: Optional[str] = Field( None, description="Path to the JSON file defining the schema. The path is relative to the connector module's root.", - example=["./schemas/users.json"], - title="File Path", + example=['./schemas/users.json'], + title='File Path', ) - parameters: Optional[Dict[str, Any]] = Field(None, alias="$parameters") + parameters: Optional[Dict[str, Any]] = Field(None, alias='$parameters') class JsonDecoder(BaseModel): - type: Literal["JsonDecoder"] + type: Literal['JsonDecoder'] class MinMaxDatetime(BaseModel): - type: Literal["MinMaxDatetime"] + type: Literal['MinMaxDatetime'] datetime: str = Field( ..., - description="Datetime value.", - examples=["2021-01-01", "2021-01-01T00:00:00Z", "{{ config['start_time'] }}"], - title="Datetime", + description='Datetime value.', + examples=['2021-01-01', '2021-01-01T00:00:00Z', "{{ config['start_time'] }}"], + title='Datetime', ) datetime_format: Optional[str] = Field( - "", + '', description='Format of the datetime value. Defaults to "%Y-%m-%dT%H:%M:%S.%f%z" if left empty. Use placeholders starting with "%" to describe the format the API is using. The following placeholders are available:\n * **%s**: Epoch unix timestamp - `1686218963`\n * **%ms**: Epoch unix timestamp - `1686218963123`\n * **%a**: Weekday (abbreviated) - `Sun`\n * **%A**: Weekday (full) - `Sunday`\n * **%w**: Weekday (decimal) - `0` (Sunday), `6` (Saturday)\n * **%d**: Day of the month (zero-padded) - `01`, `02`, ..., `31`\n * **%b**: Month (abbreviated) - `Jan`\n * **%B**: Month (full) - `January`\n * **%m**: Month (zero-padded) - `01`, `02`, ..., `12`\n * **%y**: Year (without century, zero-padded) - `00`, `01`, ..., `99`\n * **%Y**: Year (with century) - `0001`, `0002`, ..., `9999`\n * **%H**: Hour (24-hour, zero-padded) - `00`, `01`, ..., `23`\n * **%I**: Hour (12-hour, zero-padded) - `01`, `02`, ..., `12`\n * **%p**: AM/PM indicator\n * **%M**: Minute (zero-padded) - `00`, `01`, ..., `59`\n * **%S**: Second (zero-padded) - `00`, `01`, ..., `59`\n * **%f**: Microsecond (zero-padded to 6 digits) - `000000`, `000001`, ..., `999999`\n * **%z**: UTC offset - `(empty)`, `+0000`, `-04:00`\n * **%Z**: Time zone name - `(empty)`, `UTC`, `GMT`\n * **%j**: Day of the year (zero-padded) - `001`, `002`, ..., `366`\n * **%U**: Week number of the year (Sunday as first day) - `00`, `01`, ..., `53`\n * **%W**: Week number of the year (Monday as first day) - `00`, `01`, ..., `53`\n * **%c**: Date and time representation - `Tue Aug 16 21:30:00 1988`\n * **%x**: Date representation - `08/16/1988`\n * **%X**: Time representation - `21:30:00`\n * **%%**: Literal \'%\' character\n\n Some placeholders depend on the locale of the underlying system - in most cases this locale is configured as en/US. For more information see the [Python documentation](https://docs.python.org/3/library/datetime.html#strftime-and-strptime-format-codes).\n', - examples=["%Y-%m-%dT%H:%M:%S.%f%z", "%Y-%m-%d", "%s"], - title="Datetime Format", + examples=['%Y-%m-%dT%H:%M:%S.%f%z', '%Y-%m-%d', '%s'], + title='Datetime Format', ) max_datetime: Optional[str] = Field( None, - description="Ceiling applied on the datetime value. Must be formatted with the datetime_format field.", - examples=["2021-01-01T00:00:00Z", "2021-01-01"], - title="Max Datetime", + description='Ceiling applied on the datetime value. Must be formatted with the datetime_format field.', + examples=['2021-01-01T00:00:00Z', '2021-01-01'], + title='Max Datetime', ) min_datetime: Optional[str] = Field( None, - description="Floor applied on the datetime value. Must be formatted with the datetime_format field.", - examples=["2010-01-01T00:00:00Z", "2010-01-01"], - title="Min Datetime", + description='Floor applied on the datetime value. Must be formatted with the datetime_format field.', + examples=['2010-01-01T00:00:00Z', '2010-01-01'], + title='Min Datetime', ) - parameters: Optional[Dict[str, Any]] = Field(None, alias="$parameters") + parameters: Optional[Dict[str, Any]] = Field(None, alias='$parameters') class NoAuth(BaseModel): - type: Literal["NoAuth"] - parameters: Optional[Dict[str, Any]] = Field(None, alias="$parameters") + type: Literal['NoAuth'] + parameters: Optional[Dict[str, Any]] = Field(None, alias='$parameters') class NoPagination(BaseModel): - type: Literal["NoPagination"] + type: Literal['NoPagination'] class OAuthConfigSpecification(BaseModel): class Config: extra = Extra.allow - oauth_user_input_from_connector_config_specification: Optional[Dict[str, Any]] = Field( + oauth_user_input_from_connector_config_specification: Optional[ + Dict[str, Any] + ] = Field( None, description="OAuth specific blob. This is a Json Schema used to validate Json configurations used as input to OAuth.\nMust be a valid non-nested JSON that refers to properties from ConnectorSpecification.connectionSpecification\nusing special annotation 'path_in_connector_config'.\nThese are input values the user is entering through the UI to authenticate to the connector, that might also shared\nas inputs for syncing data via the connector.\nExamples:\nif no connector values is shared during oauth flow, oauth_user_input_from_connector_config_specification=[]\nif connector values such as 'app_id' inside the top level are used to generate the API url for the oauth flow,\n oauth_user_input_from_connector_config_specification={\n app_id: {\n type: string\n path_in_connector_config: ['app_id']\n }\n }\nif connector values such as 'info.app_id' nested inside another object are used to generate the API url for the oauth flow,\n oauth_user_input_from_connector_config_specification={\n app_id: {\n type: string\n path_in_connector_config: ['info', 'app_id']\n }\n }", examples=[ - {"app_id": {"type": "string", "path_in_connector_config": ["app_id"]}}, + {'app_id': {'type': 'string', 'path_in_connector_config': ['app_id']}}, { - "app_id": { - "type": "string", - "path_in_connector_config": ["info", "app_id"], + 'app_id': { + 'type': 'string', + 'path_in_connector_config': ['info', 'app_id'], } }, ], - title="OAuth user input", + title='OAuth user input', ) complete_oauth_output_specification: Optional[Dict[str, Any]] = Field( None, description="OAuth specific blob. This is a Json Schema used to validate Json configurations produced by the OAuth flows as they are\nreturned by the distant OAuth APIs.\nMust be a valid JSON describing the fields to merge back to `ConnectorSpecification.connectionSpecification`.\nFor each field, a special annotation `path_in_connector_config` can be specified to determine where to merge it,\nExamples:\n complete_oauth_output_specification={\n refresh_token: {\n type: string,\n path_in_connector_config: ['credentials', 'refresh_token']\n }\n }", examples=[ { - "refresh_token": { - "type": "string,", - "path_in_connector_config": ["credentials", "refresh_token"], + 'refresh_token': { + 'type': 'string,', + 'path_in_connector_config': ['credentials', 'refresh_token'], } } ], - title="OAuth output specification", + title='OAuth output specification', ) complete_oauth_server_input_specification: Optional[Dict[str, Any]] = Field( None, - description="OAuth specific blob. This is a Json Schema used to validate Json configurations persisted as Airbyte Server configurations.\nMust be a valid non-nested JSON describing additional fields configured by the Airbyte Instance or Workspace Admins to be used by the\nserver when completing an OAuth flow (typically exchanging an auth code for refresh token).\nExamples:\n complete_oauth_server_input_specification={\n client_id: {\n type: string\n },\n client_secret: {\n type: string\n }\n }", - examples=[{"client_id": {"type": "string"}, "client_secret": {"type": "string"}}], - title="OAuth input specification", + description='OAuth specific blob. This is a Json Schema used to validate Json configurations persisted as Airbyte Server configurations.\nMust be a valid non-nested JSON describing additional fields configured by the Airbyte Instance or Workspace Admins to be used by the\nserver when completing an OAuth flow (typically exchanging an auth code for refresh token).\nExamples:\n complete_oauth_server_input_specification={\n client_id: {\n type: string\n },\n client_secret: {\n type: string\n }\n }', + examples=[ + {'client_id': {'type': 'string'}, 'client_secret': {'type': 'string'}} + ], + title='OAuth input specification', ) complete_oauth_server_output_specification: Optional[Dict[str, Any]] = Field( None, description="OAuth specific blob. This is a Json Schema used to validate Json configurations persisted as Airbyte Server configurations that\nalso need to be merged back into the connector configuration at runtime.\nThis is a subset configuration of `complete_oauth_server_input_specification` that filters fields out to retain only the ones that\nare necessary for the connector to function with OAuth. (some fields could be used during oauth flows but not needed afterwards, therefore\nthey would be listed in the `complete_oauth_server_input_specification` but not `complete_oauth_server_output_specification`)\nMust be a valid non-nested JSON describing additional fields configured by the Airbyte Instance or Workspace Admins to be used by the\nconnector when using OAuth flow APIs.\nThese fields are to be merged back to `ConnectorSpecification.connectionSpecification`.\nFor each field, a special annotation `path_in_connector_config` can be specified to determine where to merge it,\nExamples:\n complete_oauth_server_output_specification={\n client_id: {\n type: string,\n path_in_connector_config: ['credentials', 'client_id']\n },\n client_secret: {\n type: string,\n path_in_connector_config: ['credentials', 'client_secret']\n }\n }", examples=[ { - "client_id": { - "type": "string,", - "path_in_connector_config": ["credentials", "client_id"], + 'client_id': { + 'type': 'string,', + 'path_in_connector_config': ['credentials', 'client_id'], }, - "client_secret": { - "type": "string,", - "path_in_connector_config": ["credentials", "client_secret"], + 'client_secret': { + 'type': 'string,', + 'path_in_connector_config': ['credentials', 'client_secret'], }, } ], - title="OAuth server output specification", + title='OAuth server output specification', ) class OffsetIncrement(BaseModel): - type: Literal["OffsetIncrement"] + type: Literal['OffsetIncrement'] page_size: Optional[Union[int, str]] = Field( None, - description="The number of records to include in each pages.", + description='The number of records to include in each pages.', examples=[100, "{{ config['page_size'] }}"], - title="Limit", + title='Limit', ) inject_on_first_request: Optional[bool] = Field( False, - description="Using the `offset` with value `0` during the first request", - title="Inject Offset", + description='Using the `offset` with value `0` during the first request', + title='Inject Offset', ) - parameters: Optional[Dict[str, Any]] = Field(None, alias="$parameters") + parameters: Optional[Dict[str, Any]] = Field(None, alias='$parameters') class PageIncrement(BaseModel): - type: Literal["PageIncrement"] + type: Literal['PageIncrement'] page_size: Optional[int] = Field( None, - description="The number of records to include in each pages.", - examples=[100, "100"], - title="Page Size", + description='The number of records to include in each pages.', + examples=[100, '100'], + title='Page Size', ) start_from_page: Optional[int] = Field( 0, - description="Index of the first page to request.", + description='Index of the first page to request.', examples=[0, 1], - title="Start From Page", + title='Start From Page', ) inject_on_first_request: Optional[bool] = Field( False, - description="Using the `page number` with value defined by `start_from_page` during the first request", - title="Inject Page Number", + description='Using the `page number` with value defined by `start_from_page` during the first request', + title='Inject Page Number', ) - parameters: Optional[Dict[str, Any]] = Field(None, alias="$parameters") + parameters: Optional[Dict[str, Any]] = Field(None, alias='$parameters') class PrimaryKey(BaseModel): __root__: Union[str, List[str], List[List[str]]] = Field( ..., - description="The stream field to be used to distinguish unique records. Can either be a single field, an array of fields representing a composite key, or an array of arrays representing a composite key where the fields are nested fields.", - examples=["id", ["code", "type"]], - title="Primary Key", + description='The stream field to be used to distinguish unique records. Can either be a single field, an array of fields representing a composite key, or an array of arrays representing a composite key where the fields are nested fields.', + examples=['id', ['code', 'type']], + title='Primary Key', ) class RecordFilter(BaseModel): - type: Literal["RecordFilter"] + type: Literal['RecordFilter'] condition: Optional[str] = Field( - "", - description="The predicate to filter a record. Records will be removed if evaluated to False.", + '', + description='The predicate to filter a record. Records will be removed if evaluated to False.', examples=[ "{{ record['created_at'] >= stream_interval['start_time'] }}", "{{ record.status in ['active', 'expired'] }}", ], ) - parameters: Optional[Dict[str, Any]] = Field(None, alias="$parameters") + parameters: Optional[Dict[str, Any]] = Field(None, alias='$parameters') class SchemaNormalization(Enum): - None_ = "None" - Default = "Default" + None_ = 'None' + Default = 'Default' class RemoveFields(BaseModel): - type: Literal["RemoveFields"] + type: Literal['RemoveFields'] condition: Optional[str] = Field( - "", - description="The predicate to filter a property by a property value. Property will be removed if it is empty OR expression is evaluated to True.", + '', + description='The predicate to filter a property by a property value. Property will be removed if it is empty OR expression is evaluated to True.,', examples=[ "{{ property|string == '' }}", - "{{ property is integer }}", - "{{ property|length > 5 }}", + '{{ property is integer }}', + '{{ property|length > 5 }}', "{{ property == 'some_string_to_match' }}", ], ) field_pointers: List[List[str]] = Field( ..., - description="Array of paths defining the field to remove. Each item is an array whose field describe the path of a field to remove.", - examples=[["tags"], [["content", "html"], ["content", "plain_text"]]], - title="Field Paths", + description='Array of paths defining the field to remove. Each item is an array whose field describe the path of a field to remove.', + examples=[['tags'], [['content', 'html'], ['content', 'plain_text']]], + title='Field Paths', ) class RequestPath(BaseModel): - type: Literal["RequestPath"] + type: Literal['RequestPath'] class InjectInto(Enum): - request_parameter = "request_parameter" - header = "header" - body_data = "body_data" - body_json = "body_json" + request_parameter = 'request_parameter' + header = 'header' + body_data = 'body_data' + body_json = 'body_json' class RequestOption(BaseModel): - type: Literal["RequestOption"] + type: Literal['RequestOption'] field_name: str = Field( ..., - description="Configures which key should be used in the location that the descriptor is being injected into", - examples=["segment_id"], - title="Request Option", + description='Configures which key should be used in the location that the descriptor is being injected into', + examples=['segment_id'], + title='Request Option', ) inject_into: InjectInto = Field( ..., - description="Configures where the descriptor should be set on the HTTP requests. Note that request parameters that are already encoded in the URL path will not be duplicated.", - examples=["request_parameter", "header", "body_data", "body_json"], - title="Inject Into", + description='Configures where the descriptor should be set on the HTTP requests. Note that request parameters that are already encoded in the URL path will not be duplicated.', + examples=['request_parameter', 'header', 'body_data', 'body_json'], + title='Inject Into', ) @@ -640,106 +646,106 @@ class Config: class LegacySessionTokenAuthenticator(BaseModel): - type: Literal["LegacySessionTokenAuthenticator"] + type: Literal['LegacySessionTokenAuthenticator'] header: str = Field( ..., - description="The name of the session token header that will be injected in the request", - examples=["X-Session"], - title="Session Request Header", + description='The name of the session token header that will be injected in the request', + examples=['X-Session'], + title='Session Request Header', ) login_url: str = Field( ..., - description="Path of the login URL (do not include the base URL)", - examples=["session"], - title="Login Path", + description='Path of the login URL (do not include the base URL)', + examples=['session'], + title='Login Path', ) session_token: Optional[str] = Field( None, - description="Session token to use if using a pre-defined token. Not needed if authenticating with username + password pair", + description='Session token to use if using a pre-defined token. Not needed if authenticating with username + password pair', example=["{{ config['session_token'] }}"], - title="Session Token", + title='Session Token', ) session_token_response_key: str = Field( ..., - description="Name of the key of the session token to be extracted from the response", - examples=["id"], - title="Response Token Response Key", + description='Name of the key of the session token to be extracted from the response', + examples=['id'], + title='Response Token Response Key', ) username: Optional[str] = Field( None, - description="Username used to authenticate and obtain a session token", + description='Username used to authenticate and obtain a session token', examples=[" {{ config['username'] }}"], - title="Username", + title='Username', ) password: Optional[str] = Field( - "", - description="Password used to authenticate and obtain a session token", - examples=["{{ config['password'] }}", ""], - title="Password", + '', + description='Password used to authenticate and obtain a session token', + examples=["{{ config['password'] }}", ''], + title='Password', ) validate_session_url: str = Field( ..., - description="Path of the URL to use to validate that the session token is valid (do not include the base URL)", - examples=["user/current"], - title="Validate Session Path", + description='Path of the URL to use to validate that the session token is valid (do not include the base URL)', + examples=['user/current'], + title='Validate Session Path', ) - parameters: Optional[Dict[str, Any]] = Field(None, alias="$parameters") + parameters: Optional[Dict[str, Any]] = Field(None, alias='$parameters') class ValueType(Enum): - string = "string" - number = "number" - integer = "integer" - boolean = "boolean" + string = 'string' + number = 'number' + integer = 'integer' + boolean = 'boolean' class WaitTimeFromHeader(BaseModel): - type: Literal["WaitTimeFromHeader"] + type: Literal['WaitTimeFromHeader'] header: str = Field( ..., - description="The name of the response header defining how long to wait before retrying.", - examples=["Retry-After"], - title="Response Header Name", + description='The name of the response header defining how long to wait before retrying.', + examples=['Retry-After'], + title='Response Header Name', ) regex: Optional[str] = Field( None, - description="Optional regex to apply on the header to extract its value. The regex should define a capture group defining the wait time.", - examples=["([-+]?\\d+)"], - title="Extraction Regex", + description='Optional regex to apply on the header to extract its value. The regex should define a capture group defining the wait time.', + examples=['([-+]?\\d+)'], + title='Extraction Regex', ) - parameters: Optional[Dict[str, Any]] = Field(None, alias="$parameters") + parameters: Optional[Dict[str, Any]] = Field(None, alias='$parameters') class WaitUntilTimeFromHeader(BaseModel): - type: Literal["WaitUntilTimeFromHeader"] + type: Literal['WaitUntilTimeFromHeader'] header: str = Field( ..., - description="The name of the response header defining how long to wait before retrying.", - examples=["wait_time"], - title="Response Header", + description='The name of the response header defining how long to wait before retrying.', + examples=['wait_time'], + title='Response Header', ) min_wait: Optional[Union[float, str]] = Field( None, - description="Minimum time to wait before retrying.", - examples=[10, "60"], - title="Minimum Wait Time", + description='Minimum time to wait before retrying.', + examples=[10, '60'], + title='Minimum Wait Time', ) regex: Optional[str] = Field( None, - description="Optional regex to apply on the header to extract its value. The regex should define a capture group defining the wait time.", - examples=["([-+]?\\d+)"], - title="Extraction Regex", + description='Optional regex to apply on the header to extract its value. The regex should define a capture group defining the wait time.', + examples=['([-+]?\\d+)'], + title='Extraction Regex', ) - parameters: Optional[Dict[str, Any]] = Field(None, alias="$parameters") + parameters: Optional[Dict[str, Any]] = Field(None, alias='$parameters') class AddedFieldDefinition(BaseModel): - type: Literal["AddedFieldDefinition"] + type: Literal['AddedFieldDefinition'] path: List[str] = Field( ..., - description="List of strings defining the path where to add the value on the record.", - examples=[["segment_id"], ["metadata", "segment_id"]], - title="Path", + description='List of strings defining the path where to add the value on the record.', + examples=[['segment_id'], ['metadata', 'segment_id']], + title='Path', ) value: str = Field( ..., @@ -749,185 +755,187 @@ class AddedFieldDefinition(BaseModel): "{{ record['MetaData']['LastUpdatedTime'] }}", "{{ stream_partition['segment_id'] }}", ], - title="Value", + title='Value', ) value_type: Optional[ValueType] = Field( None, - description="Type of the value. If not specified, the type will be inferred from the value.", - title="Value Type", + description='Type of the value. If not specified, the type will be inferred from the value.', + title='Value Type', ) - parameters: Optional[Dict[str, Any]] = Field(None, alias="$parameters") + parameters: Optional[Dict[str, Any]] = Field(None, alias='$parameters') class AddFields(BaseModel): - type: Literal["AddFields"] + type: Literal['AddFields'] fields: List[AddedFieldDefinition] = Field( ..., - description="List of transformations (path and corresponding value) that will be added to the record.", - title="Fields", + description='List of transformations (path and corresponding value) that will be added to the record.', + title='Fields', ) - parameters: Optional[Dict[str, Any]] = Field(None, alias="$parameters") + parameters: Optional[Dict[str, Any]] = Field(None, alias='$parameters') class ApiKeyAuthenticator(BaseModel): - type: Literal["ApiKeyAuthenticator"] + type: Literal['ApiKeyAuthenticator'] api_token: Optional[str] = Field( None, - description="The API key to inject in the request. Fill it in the user inputs.", + description='The API key to inject in the request. Fill it in the user inputs.', examples=["{{ config['api_key'] }}", "Token token={{ config['api_key'] }}"], - title="API Key", + title='API Key', ) header: Optional[str] = Field( None, - description="The name of the HTTP header that will be set to the API key. This setting is deprecated, use inject_into instead. Header and inject_into can not be defined at the same time.", - examples=["Authorization", "Api-Token", "X-Auth-Token"], - title="Header Name", + description='The name of the HTTP header that will be set to the API key. This setting is deprecated, use inject_into instead. Header and inject_into can not be defined at the same time.', + examples=['Authorization', 'Api-Token', 'X-Auth-Token'], + title='Header Name', ) inject_into: Optional[RequestOption] = Field( None, - description="Configure how the API Key will be sent in requests to the source API. Either inject_into or header has to be defined.", + description='Configure how the API Key will be sent in requests to the source API. Either inject_into or header has to be defined.', examples=[ - {"inject_into": "header", "field_name": "Authorization"}, - {"inject_into": "request_parameter", "field_name": "authKey"}, + {'inject_into': 'header', 'field_name': 'Authorization'}, + {'inject_into': 'request_parameter', 'field_name': 'authKey'}, ], - title="Inject API Key Into Outgoing HTTP Request", + title='Inject API Key Into Outgoing HTTP Request', ) - parameters: Optional[Dict[str, Any]] = Field(None, alias="$parameters") + parameters: Optional[Dict[str, Any]] = Field(None, alias='$parameters') class AuthFlow(BaseModel): - auth_flow_type: Optional[AuthFlowType] = Field(None, description="The type of auth to use", title="Auth flow type") + auth_flow_type: Optional[AuthFlowType] = Field( + None, description='The type of auth to use', title='Auth flow type' + ) predicate_key: Optional[List[str]] = Field( None, - description="JSON path to a field in the connectorSpecification that should exist for the advanced auth to be applicable.", - examples=[["credentials", "auth_type"]], - title="Predicate key", + description='JSON path to a field in the connectorSpecification that should exist for the advanced auth to be applicable.', + examples=[['credentials', 'auth_type']], + title='Predicate key', ) predicate_value: Optional[str] = Field( None, - description="Value of the predicate_key fields for the advanced auth to be applicable.", - examples=["Oauth"], - title="Predicate value", + description='Value of the predicate_key fields for the advanced auth to be applicable.', + examples=['Oauth'], + title='Predicate value', ) oauth_config_specification: Optional[OAuthConfigSpecification] = None class CursorPagination(BaseModel): - type: Literal["CursorPagination"] + type: Literal['CursorPagination'] cursor_value: str = Field( ..., - description="Value of the cursor defining the next page to fetch.", + description='Value of the cursor defining the next page to fetch.', examples=[ - "{{ headers.link.next.cursor }}", + '{{ headers.link.next.cursor }}', "{{ last_records[-1]['key'] }}", "{{ response['nextPage'] }}", ], - title="Cursor Value", + title='Cursor Value', ) page_size: Optional[int] = Field( None, - description="The number of records to include in each pages.", + description='The number of records to include in each pages.', examples=[100], - title="Page Size", + title='Page Size', ) stop_condition: Optional[str] = Field( None, - description="Template string evaluating when to stop paginating.", + description='Template string evaluating when to stop paginating.', examples=[ - "{{ response.data.has_more is false }}", + '{{ response.data.has_more is false }}', "{{ 'next' not in headers['link'] }}", ], - title="Stop Condition", + title='Stop Condition', ) decoder: Optional[JsonDecoder] = Field( None, - description="Component decoding the response so records can be extracted.", - title="Decoder", + description='Component decoding the response so records can be extracted.', + title='Decoder', ) - parameters: Optional[Dict[str, Any]] = Field(None, alias="$parameters") + parameters: Optional[Dict[str, Any]] = Field(None, alias='$parameters') class DatetimeBasedCursor(BaseModel): - type: Literal["DatetimeBasedCursor"] + type: Literal['DatetimeBasedCursor'] cursor_field: str = Field( ..., - description="The location of the value on a record that will be used as a bookmark during sync. To ensure no data loss, the API must return records in ascending order based on the cursor field. Nested fields are not supported, so the field must be at the top level of the record. You can use a combination of Add Field and Remove Field transformations to move the nested field to the top.", - examples=["created_at", "{{ config['record_cursor'] }}"], - title="Cursor Field", + description='The location of the value on a record that will be used as a bookmark during sync. To ensure no data loss, the API must return records in ascending order based on the cursor field. Nested fields are not supported, so the field must be at the top level of the record. You can use a combination of Add Field and Remove Field transformations to move the nested field to the top.', + examples=['created_at', "{{ config['record_cursor'] }}"], + title='Cursor Field', ) datetime_format: str = Field( ..., - description="The datetime format used to format the datetime values that are sent in outgoing requests to the API. Use placeholders starting with \"%\" to describe the format the API is using. The following placeholders are available:\n * **%s**: Epoch unix timestamp - `1686218963`\n * **%ms**: Epoch unix timestamp (milliseconds) - `1686218963123`\n * **%a**: Weekday (abbreviated) - `Sun`\n * **%A**: Weekday (full) - `Sunday`\n * **%w**: Weekday (decimal) - `0` (Sunday), `6` (Saturday)\n * **%d**: Day of the month (zero-padded) - `01`, `02`, ..., `31`\n * **%b**: Month (abbreviated) - `Jan`\n * **%B**: Month (full) - `January`\n * **%m**: Month (zero-padded) - `01`, `02`, ..., `12`\n * **%y**: Year (without century, zero-padded) - `00`, `01`, ..., `99`\n * **%Y**: Year (with century) - `0001`, `0002`, ..., `9999`\n * **%H**: Hour (24-hour, zero-padded) - `00`, `01`, ..., `23`\n * **%I**: Hour (12-hour, zero-padded) - `01`, `02`, ..., `12`\n * **%p**: AM/PM indicator\n * **%M**: Minute (zero-padded) - `00`, `01`, ..., `59`\n * **%S**: Second (zero-padded) - `00`, `01`, ..., `59`\n * **%f**: Microsecond (zero-padded to 6 digits) - `000000`\n * **%z**: UTC offset - `(empty)`, `+0000`, `-04:00`\n * **%Z**: Time zone name - `(empty)`, `UTC`, `GMT`\n * **%j**: Day of the year (zero-padded) - `001`, `002`, ..., `366`\n * **%U**: Week number of the year (starting Sunday) - `00`, ..., `53`\n * **%W**: Week number of the year (starting Monday) - `00`, ..., `53`\n * **%c**: Date and time - `Tue Aug 16 21:30:00 1988`\n * **%x**: Date standard format - `08/16/1988`\n * **%X**: Time standard format - `21:30:00`\n * **%%**: Literal '%' character\n\n Some placeholders depend on the locale of the underlying system - in most cases this locale is configured as en/US. For more information see the [Python documentation](https://docs.python.org/3/library/datetime.html#strftime-and-strptime-format-codes).\n", - examples=["%Y-%m-%dT%H:%M:%S.%f%z", "%Y-%m-%d", "%s", "%ms"], - title="Outgoing Datetime Format", + description='The datetime format used to format the datetime values that are sent in outgoing requests to the API. Use placeholders starting with "%" to describe the format the API is using. The following placeholders are available:\n * **%s**: Epoch unix timestamp - `1686218963`\n * **%ms**: Epoch unix timestamp (milliseconds) - `1686218963123`\n * **%a**: Weekday (abbreviated) - `Sun`\n * **%A**: Weekday (full) - `Sunday`\n * **%w**: Weekday (decimal) - `0` (Sunday), `6` (Saturday)\n * **%d**: Day of the month (zero-padded) - `01`, `02`, ..., `31`\n * **%b**: Month (abbreviated) - `Jan`\n * **%B**: Month (full) - `January`\n * **%m**: Month (zero-padded) - `01`, `02`, ..., `12`\n * **%y**: Year (without century, zero-padded) - `00`, `01`, ..., `99`\n * **%Y**: Year (with century) - `0001`, `0002`, ..., `9999`\n * **%H**: Hour (24-hour, zero-padded) - `00`, `01`, ..., `23`\n * **%I**: Hour (12-hour, zero-padded) - `01`, `02`, ..., `12`\n * **%p**: AM/PM indicator\n * **%M**: Minute (zero-padded) - `00`, `01`, ..., `59`\n * **%S**: Second (zero-padded) - `00`, `01`, ..., `59`\n * **%f**: Microsecond (zero-padded to 6 digits) - `000000`\n * **%z**: UTC offset - `(empty)`, `+0000`, `-04:00`\n * **%Z**: Time zone name - `(empty)`, `UTC`, `GMT`\n * **%j**: Day of the year (zero-padded) - `001`, `002`, ..., `366`\n * **%U**: Week number of the year (starting Sunday) - `00`, ..., `53`\n * **%W**: Week number of the year (starting Monday) - `00`, ..., `53`\n * **%c**: Date and time - `Tue Aug 16 21:30:00 1988`\n * **%x**: Date standard format - `08/16/1988`\n * **%X**: Time standard format - `21:30:00`\n * **%%**: Literal \'%\' character\n\n Some placeholders depend on the locale of the underlying system - in most cases this locale is configured as en/US. For more information see the [Python documentation](https://docs.python.org/3/library/datetime.html#strftime-and-strptime-format-codes).\n', + examples=['%Y-%m-%dT%H:%M:%S.%f%z', '%Y-%m-%d', '%s', '%ms'], + title='Outgoing Datetime Format', ) start_datetime: Union[str, MinMaxDatetime] = Field( ..., - description="The datetime that determines the earliest record that should be synced.", - examples=["2020-01-1T00:00:00Z", "{{ config['start_time'] }}"], - title="Start Datetime", + description='The datetime that determines the earliest record that should be synced.', + examples=['2020-01-1T00:00:00Z', "{{ config['start_time'] }}"], + title='Start Datetime', ) cursor_datetime_formats: Optional[List[str]] = Field( None, - description="The possible formats for the cursor field, in order of preference. The first format that matches the cursor field value will be used to parse it. If not provided, the `datetime_format` will be used.", - title="Cursor Datetime Formats", + description='The possible formats for the cursor field, in order of preference. The first format that matches the cursor field value will be used to parse it. If not provided, the `datetime_format` will be used.', + title='Cursor Datetime Formats', ) cursor_granularity: Optional[str] = Field( None, - description="Smallest increment the datetime_format has (ISO 8601 duration) that is used to ensure the start of a slice does not overlap with the end of the previous one, e.g. for %Y-%m-%d the granularity should be P1D, for %Y-%m-%dT%H:%M:%SZ the granularity should be PT1S. Given this field is provided, `step` needs to be provided as well.", - examples=["PT1S"], - title="Cursor Granularity", + description='Smallest increment the datetime_format has (ISO 8601 duration) that is used to ensure the start of a slice does not overlap with the end of the previous one, e.g. for %Y-%m-%d the granularity should be P1D, for %Y-%m-%dT%H:%M:%SZ the granularity should be PT1S. Given this field is provided, `step` needs to be provided as well.', + examples=['PT1S'], + title='Cursor Granularity', ) end_datetime: Optional[Union[str, MinMaxDatetime]] = Field( None, - description="The datetime that determines the last record that should be synced. If not provided, `{{ now_utc() }}` will be used.", - examples=["2021-01-1T00:00:00Z", "{{ now_utc() }}", "{{ day_delta(-1) }}"], - title="End Datetime", + description='The datetime that determines the last record that should be synced. If not provided, `{{ now_utc() }}` will be used.', + examples=['2021-01-1T00:00:00Z', '{{ now_utc() }}', '{{ day_delta(-1) }}'], + title='End Datetime', ) end_time_option: Optional[RequestOption] = Field( None, - description="Optionally configures how the end datetime will be sent in requests to the source API.", - title="Inject End Time Into Outgoing HTTP Request", + description='Optionally configures how the end datetime will be sent in requests to the source API.', + title='Inject End Time Into Outgoing HTTP Request', ) is_data_feed: Optional[bool] = Field( None, - description="A data feed API is an API that does not allow filtering and paginates the content from the most recent to the least recent. Given this, the CDK needs to know when to stop paginating and this field will generate a stop condition for pagination.", - title="Whether the target API is formatted as a data feed", + description='A data feed API is an API that does not allow filtering and paginates the content from the most recent to the least recent. Given this, the CDK needs to know when to stop paginating and this field will generate a stop condition for pagination.', + title='Whether the target API is formatted as a data feed', ) lookback_window: Optional[str] = Field( None, - description="Time interval before the start_datetime to read data for, e.g. P1M for looking back one month.", - examples=["P1D", "P{{ config['lookback_days'] }}D"], - title="Lookback Window", + description='Time interval before the start_datetime to read data for, e.g. P1M for looking back one month.', + examples=['P1D', "P{{ config['lookback_days'] }}D"], + title='Lookback Window', ) partition_field_end: Optional[str] = Field( None, - description="Name of the partition start time field.", - examples=["ending_time"], - title="Partition Field End", + description='Name of the partition start time field.', + examples=['ending_time'], + title='Partition Field End', ) partition_field_start: Optional[str] = Field( None, - description="Name of the partition end time field.", - examples=["starting_time"], - title="Partition Field Start", + description='Name of the partition end time field.', + examples=['starting_time'], + title='Partition Field Start', ) start_time_option: Optional[RequestOption] = Field( None, - description="Optionally configures how the start datetime will be sent in requests to the source API.", - title="Inject Start Time Into Outgoing HTTP Request", + description='Optionally configures how the start datetime will be sent in requests to the source API.', + title='Inject Start Time Into Outgoing HTTP Request', ) step: Optional[str] = Field( None, - description="The size of the time window (ISO8601 duration). Given this field is provided, `cursor_granularity` needs to be provided as well.", - examples=["P1W", "{{ config['step_increment'] }}"], - title="Step", + description='The size of the time window (ISO8601 duration). Given this field is provided, `cursor_granularity` needs to be provided as well.', + examples=['P1W', "{{ config['step_increment'] }}"], + title='Step', ) - parameters: Optional[Dict[str, Any]] = Field(None, alias="$parameters") + parameters: Optional[Dict[str, Any]] = Field(None, alias='$parameters') class DefaultErrorHandler(BaseModel): - type: Literal["DefaultErrorHandler"] + type: Literal['DefaultErrorHandler'] backoff_strategies: Optional[ List[ Union[ @@ -940,143 +948,145 @@ class DefaultErrorHandler(BaseModel): ] ] = Field( None, - description="List of backoff strategies to use to determine how long to wait before retrying a retryable request.", - title="Backoff Strategies", + description='List of backoff strategies to use to determine how long to wait before retrying a retryable request.', + title='Backoff Strategies', ) max_retries: Optional[int] = Field( 5, - description="The maximum number of time to retry a retryable request before giving up and failing.", + description='The maximum number of time to retry a retryable request before giving up and failing.', examples=[5, 0, 10], - title="Max Retry Count", + title='Max Retry Count', ) response_filters: Optional[List[HttpResponseFilter]] = Field( None, description="List of response filters to iterate on when deciding how to handle an error. When using an array of multiple filters, the filters will be applied sequentially and the response will be selected if it matches any of the filter's predicate.", - title="Response Filters", + title='Response Filters', ) - parameters: Optional[Dict[str, Any]] = Field(None, alias="$parameters") + parameters: Optional[Dict[str, Any]] = Field(None, alias='$parameters') class DefaultPaginator(BaseModel): - type: Literal["DefaultPaginator"] - pagination_strategy: Union[CursorPagination, CustomPaginationStrategy, OffsetIncrement, PageIncrement] = Field( + type: Literal['DefaultPaginator'] + pagination_strategy: Union[ + CursorPagination, CustomPaginationStrategy, OffsetIncrement, PageIncrement + ] = Field( ..., - description="Strategy defining how records are paginated.", - title="Pagination Strategy", + description='Strategy defining how records are paginated.', + title='Pagination Strategy', ) decoder: Optional[JsonDecoder] = Field( None, - description="Component decoding the response so records can be extracted.", - title="Decoder", + description='Component decoding the response so records can be extracted.', + title='Decoder', ) page_size_option: Optional[RequestOption] = None page_token_option: Optional[Union[RequestOption, RequestPath]] = None - parameters: Optional[Dict[str, Any]] = Field(None, alias="$parameters") + parameters: Optional[Dict[str, Any]] = Field(None, alias='$parameters') class DpathExtractor(BaseModel): - type: Literal["DpathExtractor"] + type: Literal['DpathExtractor'] field_path: List[str] = Field( ..., description='List of potentially nested fields describing the full path of the field to extract. Use "*" to extract all values from an array. See more info in the [docs](https://docs.airbyte.com/connector-development/config-based/understanding-the-yaml-file/record-selector).', examples=[ - ["data"], - ["data", "records"], - ["data", "{{ parameters.name }}"], - ["data", "*", "record"], + ['data'], + ['data', 'records'], + ['data', '{{ parameters.name }}'], + ['data', '*', 'record'], ], - title="Field Path", + title='Field Path', ) decoder: Optional[JsonDecoder] = Field( None, - description="Component decoding the response so records can be extracted.", - title="Decoder", + description='Component decoding the response so records can be extracted.', + title='Decoder', ) - parameters: Optional[Dict[str, Any]] = Field(None, alias="$parameters") + parameters: Optional[Dict[str, Any]] = Field(None, alias='$parameters') class SessionTokenRequestApiKeyAuthenticator(BaseModel): - type: Literal["ApiKey"] + type: Literal['ApiKey'] inject_into: RequestOption = Field( ..., - description="Configure how the API Key will be sent in requests to the source API.", + description='Configure how the API Key will be sent in requests to the source API.', examples=[ - {"inject_into": "header", "field_name": "Authorization"}, - {"inject_into": "request_parameter", "field_name": "authKey"}, + {'inject_into': 'header', 'field_name': 'Authorization'}, + {'inject_into': 'request_parameter', 'field_name': 'authKey'}, ], - title="Inject API Key Into Outgoing HTTP Request", + title='Inject API Key Into Outgoing HTTP Request', ) class ListPartitionRouter(BaseModel): - type: Literal["ListPartitionRouter"] + type: Literal['ListPartitionRouter'] cursor_field: str = Field( ..., description='While iterating over list values, the name of field used to reference a list value. The partition value can be accessed with string interpolation. e.g. "{{ stream_partition[\'my_key\'] }}" where "my_key" is the value of the cursor_field.', - examples=["section", "{{ config['section_key'] }}"], - title="Current Partition Value Identifier", + examples=['section', "{{ config['section_key'] }}"], + title='Current Partition Value Identifier', ) values: Union[str, List[str]] = Field( ..., - description="The list of attributes being iterated over and used as input for the requests made to the source API.", - examples=[["section_a", "section_b", "section_c"], "{{ config['sections'] }}"], - title="Partition Values", + description='The list of attributes being iterated over and used as input for the requests made to the source API.', + examples=[['section_a', 'section_b', 'section_c'], "{{ config['sections'] }}"], + title='Partition Values', ) request_option: Optional[RequestOption] = Field( None, - description="A request option describing where the list value should be injected into and under what field name if applicable.", - title="Inject Partition Value Into Outgoing HTTP Request", + description='A request option describing where the list value should be injected into and under what field name if applicable.', + title='Inject Partition Value Into Outgoing HTTP Request', ) - parameters: Optional[Dict[str, Any]] = Field(None, alias="$parameters") + parameters: Optional[Dict[str, Any]] = Field(None, alias='$parameters') class RecordSelector(BaseModel): - type: Literal["RecordSelector"] + type: Literal['RecordSelector'] extractor: Union[CustomRecordExtractor, DpathExtractor] - record_filter: Optional[Union[RecordFilter, CustomRecordFilter]] = Field( + record_filter: Optional[Union[CustomRecordFilter, RecordFilter]] = Field( None, - description="Responsible for filtering records to be emitted by the Source.", - title="Record Filter", + description='Responsible for filtering records to be emitted by the Source.', + title='Record Filter', ) schema_normalization: Optional[SchemaNormalization] = SchemaNormalization.None_ - parameters: Optional[Dict[str, Any]] = Field(None, alias="$parameters") + parameters: Optional[Dict[str, Any]] = Field(None, alias='$parameters') class Spec(BaseModel): - type: Literal["Spec"] + type: Literal['Spec'] connection_specification: Dict[str, Any] = Field( ..., - description="A connection specification describing how a the connector can be configured.", - title="Connection Specification", + description='A connection specification describing how a the connector can be configured.', + title='Connection Specification', ) documentation_url: Optional[str] = Field( None, description="URL of the connector's documentation page.", - examples=["https://docs.airbyte.com/integrations/sources/dremio"], - title="Documentation URL", + examples=['https://docs.airbyte.com/integrations/sources/dremio'], + title='Documentation URL', ) advanced_auth: Optional[AuthFlow] = Field( None, - description="Advanced specification for configuring the authentication flow.", - title="Advanced Auth", + description='Advanced specification for configuring the authentication flow.', + title='Advanced Auth', ) class CompositeErrorHandler(BaseModel): - type: Literal["CompositeErrorHandler"] + type: Literal['CompositeErrorHandler'] error_handlers: List[Union[CompositeErrorHandler, DefaultErrorHandler]] = Field( ..., - description="List of error handlers to iterate on to determine how to handle a failed response.", - title="Error Handlers", + description='List of error handlers to iterate on to determine how to handle a failed response.', + title='Error Handlers', ) - parameters: Optional[Dict[str, Any]] = Field(None, alias="$parameters") + parameters: Optional[Dict[str, Any]] = Field(None, alias='$parameters') class DeclarativeSource(BaseModel): class Config: extra = Extra.forbid - type: Literal["DeclarativeSource"] + type: Literal['DeclarativeSource'] check: CheckStream streams: List[DeclarativeStream] version: str @@ -1085,7 +1095,7 @@ class Config: spec: Optional[Spec] = None metadata: Optional[Dict[str, Any]] = Field( None, - description="For internal Airbyte use only - DO NOT modify manually. Used by consumers of declarative manifests for storing related metadata.", + description='For internal Airbyte use only - DO NOT modify manually. Used by consumers of declarative manifests for storing related metadata.', ) @@ -1093,12 +1103,12 @@ class SelectiveAuthenticator(BaseModel): class Config: extra = Extra.allow - type: Literal["SelectiveAuthenticator"] + type: Literal['SelectiveAuthenticator'] authenticator_selection_path: List[str] = Field( ..., - description="Path of the field in config with selected authenticator name", - examples=[["auth"], ["auth", "type"]], - title="Authenticator Selection Path", + description='Path of the field in config with selected authenticator name', + examples=[['auth'], ['auth', 'type']], + title='Authenticator Selection Path', ) authenticators: Dict[ str, @@ -1114,109 +1124,119 @@ class Config: ], ] = Field( ..., - description="Authenticators to select from.", + description='Authenticators to select from.', examples=[ { - "authenticators": { - "token": "#/definitions/ApiKeyAuthenticator", - "oauth": "#/definitions/OAuthAuthenticator", + 'authenticators': { + 'token': '#/definitions/ApiKeyAuthenticator', + 'oauth': '#/definitions/OAuthAuthenticator', } } ], - title="Authenticators", + title='Authenticators', ) - parameters: Optional[Dict[str, Any]] = Field(None, alias="$parameters") + parameters: Optional[Dict[str, Any]] = Field(None, alias='$parameters') class DeclarativeStream(BaseModel): class Config: extra = Extra.allow - type: Literal["DeclarativeStream"] + type: Literal['DeclarativeStream'] retriever: Union[CustomRetriever, SimpleRetriever] = Field( ..., - description="Component used to coordinate how records are extracted across stream slices and request pages.", - title="Retriever", + description='Component used to coordinate how records are extracted across stream slices and request pages.', + title='Retriever', ) - incremental_sync: Optional[Union[CustomIncrementalSync, DatetimeBasedCursor]] = Field( + incremental_sync: Optional[ + Union[CustomIncrementalSync, DatetimeBasedCursor] + ] = Field( None, - description="Component used to fetch data incrementally based on a time field in the data.", - title="Incremental Sync", + description='Component used to fetch data incrementally based on a time field in the data.', + title='Incremental Sync', + ) + name: Optional[str] = Field( + '', description='The stream name.', example=['Users'], title='Name' + ) + primary_key: Optional[PrimaryKey] = Field( + '', description='The primary key of the stream.', title='Primary Key' ) - name: Optional[str] = Field("", description="The stream name.", example=["Users"], title="Name") - primary_key: Optional[PrimaryKey] = Field("", description="The primary key of the stream.", title="Primary Key") schema_loader: Optional[Union[InlineSchemaLoader, JsonFileSchemaLoader]] = Field( None, - description="Component used to retrieve the schema for the current stream.", - title="Schema Loader", + description='Component used to retrieve the schema for the current stream.', + title='Schema Loader', ) - transformations: Optional[List[Union[AddFields, CustomTransformation, RemoveFields]]] = Field( + transformations: Optional[ + List[Union[AddFields, CustomTransformation, RemoveFields]] + ] = Field( None, - description="A list of transformations to be applied to each output record.", - title="Transformations", + description='A list of transformations to be applied to each output record.', + title='Transformations', ) - parameters: Optional[Dict[str, Any]] = Field(None, alias="$parameters") + parameters: Optional[Dict[str, Any]] = Field(None, alias='$parameters') class SessionTokenAuthenticator(BaseModel): - type: Literal["SessionTokenAuthenticator"] + type: Literal['SessionTokenAuthenticator'] login_requester: HttpRequester = Field( ..., - description="Description of the request to perform to obtain a session token to perform data requests. The response body is expected to be a JSON object with a session token property.", + description='Description of the request to perform to obtain a session token to perform data requests. The response body is expected to be a JSON object with a session token property.', examples=[ { - "type": "HttpRequester", - "url_base": "https://my_api.com", - "path": "/login", - "authenticator": { - "type": "BasicHttpAuthenticator", - "username": "{{ config.username }}", - "password": "{{ config.password }}", + 'type': 'HttpRequester', + 'url_base': 'https://my_api.com', + 'path': '/login', + 'authenticator': { + 'type': 'BasicHttpAuthenticator', + 'username': '{{ config.username }}', + 'password': '{{ config.password }}', }, } ], - title="Login Requester", + title='Login Requester', ) session_token_path: List[str] = Field( ..., - description="The path in the response body returned from the login requester to the session token.", - examples=[["access_token"], ["result", "token"]], - title="Session Token Path", + description='The path in the response body returned from the login requester to the session token.', + examples=[['access_token'], ['result', 'token']], + title='Session Token Path', ) expiration_duration: Optional[str] = Field( None, - description="The duration in ISO 8601 duration notation after which the session token expires, starting from the time it was obtained. Omitting it will result in the session token being refreshed for every request.", - examples=["PT1H", "P1D"], - title="Expiration Duration", + description='The duration in ISO 8601 duration notation after which the session token expires, starting from the time it was obtained. Omitting it will result in the session token being refreshed for every request.', + examples=['PT1H', 'P1D'], + title='Expiration Duration', ) - request_authentication: Union[SessionTokenRequestApiKeyAuthenticator, SessionTokenRequestBearerAuthenticator] = Field( + request_authentication: Union[ + SessionTokenRequestApiKeyAuthenticator, SessionTokenRequestBearerAuthenticator + ] = Field( ..., - description="Authentication method to use for requests sent to the API, specifying how to inject the session token.", - title="Data Request Authentication", + description='Authentication method to use for requests sent to the API, specifying how to inject the session token.', + title='Data Request Authentication', ) - parameters: Optional[Dict[str, Any]] = Field(None, alias="$parameters") + parameters: Optional[Dict[str, Any]] = Field(None, alias='$parameters') class HttpRequester(BaseModel): - type: Literal["HttpRequester"] + type: Literal['HttpRequester'] url_base: str = Field( ..., - description="Base URL of the API source. Do not put sensitive information (e.g. API tokens) into this field - Use the Authentication component for this.", + description='Base URL of the API source. Do not put sensitive information (e.g. API tokens) into this field - Use the Authentication component for this.', examples=[ - "https://connect.squareup.com/v2", + 'https://connect.squareup.com/v2', "{{ config['base_url'] or 'https://app.posthog.com'}}/api/", ], - title="API Base URL", + title='API Base URL', ) path: str = Field( ..., - description="Path the specific API endpoint that this stream represents. Do not put sensitive information (e.g. API tokens) into this field - Use the Authentication component for this.", + description='Path the specific API endpoint that this stream represents. Do not put sensitive information (e.g. API tokens) into this field - Use the Authentication component for this.', examples=[ - "/products", + '/products', "/quotes/{{ stream_partition['id'] }}/quote_line_groups", "/trades/{{ config['symbol_id'] }}/history", ], - title="URL Path", + title='URL Path', ) authenticator: Optional[ Union[ @@ -1232,97 +1252,101 @@ class HttpRequester(BaseModel): ] ] = Field( None, - description="Authentication method to use for requests sent to the API.", - title="Authenticator", + description='Authentication method to use for requests sent to the API.', + title='Authenticator', ) - error_handler: Optional[Union[DefaultErrorHandler, CustomErrorHandler, CompositeErrorHandler]] = Field( + error_handler: Optional[ + Union[DefaultErrorHandler, CustomErrorHandler, CompositeErrorHandler] + ] = Field( None, - description="Error handler component that defines how to handle errors.", - title="Error Handler", + description='Error handler component that defines how to handle errors.', + title='Error Handler', ) http_method: Optional[HttpMethod] = Field( HttpMethod.GET, - description="The HTTP method used to fetch data from the source (can be GET or POST).", - examples=["GET", "POST"], - title="HTTP Method", + description='The HTTP method used to fetch data from the source (can be GET or POST).', + examples=['GET', 'POST'], + title='HTTP Method', ) request_body_data: Optional[Union[str, Dict[str, str]]] = Field( None, - description="Specifies how to populate the body of the request with a non-JSON payload. Plain text will be sent as is, whereas objects will be converted to a urlencoded form.", + description='Specifies how to populate the body of the request with a non-JSON payload. Plain text will be sent as is, whereas objects will be converted to a urlencoded form.', examples=[ '[{"clause": {"type": "timestamp", "operator": 10, "parameters":\n [{"value": {{ stream_interval[\'start_time\'] | int * 1000 }} }]\n }, "orderBy": 1, "columnName": "Timestamp"}]/\n' ], - title="Request Body Payload (Non-JSON)", + title='Request Body Payload (Non-JSON)', ) request_body_json: Optional[Union[str, Dict[str, Any]]] = Field( None, - description="Specifies how to populate the body of the request with a JSON payload. Can contain nested objects.", + description='Specifies how to populate the body of the request with a JSON payload. Can contain nested objects.', examples=[ - {"sort_order": "ASC", "sort_field": "CREATED_AT"}, - {"key": "{{ config['value'] }}"}, - {"sort": {"field": "updated_at", "order": "ascending"}}, + {'sort_order': 'ASC', 'sort_field': 'CREATED_AT'}, + {'key': "{{ config['value'] }}"}, + {'sort': {'field': 'updated_at', 'order': 'ascending'}}, ], - title="Request Body JSON Payload", + title='Request Body JSON Payload', ) request_headers: Optional[Union[str, Dict[str, str]]] = Field( None, - description="Return any non-auth headers. Authentication headers will overwrite any overlapping headers returned from this method.", - examples=[{"Output-Format": "JSON"}, {"Version": "{{ config['version'] }}"}], - title="Request Headers", + description='Return any non-auth headers. Authentication headers will overwrite any overlapping headers returned from this method.', + examples=[{'Output-Format': 'JSON'}, {'Version': "{{ config['version'] }}"}], + title='Request Headers', ) request_parameters: Optional[Union[str, Dict[str, str]]] = Field( None, - description="Specifies the query parameters that should be set on an outgoing HTTP request given the inputs.", + description='Specifies the query parameters that should be set on an outgoing HTTP request given the inputs.', examples=[ - {"unit": "day"}, + {'unit': 'day'}, { - "query": 'last_event_time BETWEEN TIMESTAMP "{{ stream_interval.start_time }}" AND TIMESTAMP "{{ stream_interval.end_time }}"' + 'query': 'last_event_time BETWEEN TIMESTAMP "{{ stream_interval.start_time }}" AND TIMESTAMP "{{ stream_interval.end_time }}"' }, - {"searchIn": "{{ ','.join(config.get('search_in', [])) }}"}, - {"sort_by[asc]": "updated_at"}, + {'searchIn': "{{ ','.join(config.get('search_in', [])) }}"}, + {'sort_by[asc]': 'updated_at'}, ], - title="Query Parameters", + title='Query Parameters', ) use_cache: Optional[bool] = Field( False, - description="Enables stream requests caching. This field is automatically set by the CDK.", - title="Use Cache", + description='Enables stream requests caching. This field is automatically set by the CDK.', + title='Use Cache', ) - parameters: Optional[Dict[str, Any]] = Field(None, alias="$parameters") + parameters: Optional[Dict[str, Any]] = Field(None, alias='$parameters') class ParentStreamConfig(BaseModel): - type: Literal["ParentStreamConfig"] + type: Literal['ParentStreamConfig'] parent_key: str = Field( ..., - description="The primary key of records from the parent stream that will be used during the retrieval of records for the current substream. This parent identifier field is typically a characteristic of the child records being extracted from the source API.", - examples=["id", "{{ config['parent_record_id'] }}"], - title="Parent Key", + description='The primary key of records from the parent stream that will be used during the retrieval of records for the current substream. This parent identifier field is typically a characteristic of the child records being extracted from the source API.', + examples=['id', "{{ config['parent_record_id'] }}"], + title='Parent Key', + ) + stream: DeclarativeStream = Field( + ..., description='Reference to the parent stream.', title='Parent Stream' ) - stream: DeclarativeStream = Field(..., description="Reference to the parent stream.", title="Parent Stream") partition_field: str = Field( ..., - description="While iterating over parent records during a sync, the parent_key value can be referenced by using this field.", - examples=["parent_id", "{{ config['parent_partition_field'] }}"], - title="Current Parent Key Value Identifier", + description='While iterating over parent records during a sync, the parent_key value can be referenced by using this field.', + examples=['parent_id', "{{ config['parent_partition_field'] }}"], + title='Current Parent Key Value Identifier', ) request_option: Optional[RequestOption] = Field( None, - description="A request option describing where the parent key value should be injected into and under what field name if applicable.", - title="Request Option", + description='A request option describing where the parent key value should be injected into and under what field name if applicable.', + title='Request Option', ) - parameters: Optional[Dict[str, Any]] = Field(None, alias="$parameters") + parameters: Optional[Dict[str, Any]] = Field(None, alias='$parameters') class SimpleRetriever(BaseModel): - type: Literal["SimpleRetriever"] + type: Literal['SimpleRetriever'] record_selector: RecordSelector = Field( ..., - description="Component that describes how to extract records from a HTTP response.", + description='Component that describes how to extract records from a HTTP response.', ) requester: Union[CustomRequester, HttpRequester] = Field( ..., - description="Requester component that describes how to prepare HTTP requests to send to the source API.", + description='Requester component that describes how to prepare HTTP requests to send to the source API.', ) paginator: Optional[Union[DefaultPaginator, NoPagination]] = Field( None, @@ -1337,24 +1361,28 @@ class SimpleRetriever(BaseModel): CustomPartitionRouter, ListPartitionRouter, SubstreamPartitionRouter, - List[Union[CustomPartitionRouter, ListPartitionRouter, SubstreamPartitionRouter]], + List[ + Union[ + CustomPartitionRouter, ListPartitionRouter, SubstreamPartitionRouter + ] + ], ] ] = Field( [], - description="PartitionRouter component that describes how to partition the stream, enabling incremental syncs and checkpointing.", - title="Partition Router", + description='PartitionRouter component that describes how to partition the stream, enabling incremental syncs and checkpointing.', + title='Partition Router', ) - parameters: Optional[Dict[str, Any]] = Field(None, alias="$parameters") + parameters: Optional[Dict[str, Any]] = Field(None, alias='$parameters') class SubstreamPartitionRouter(BaseModel): - type: Literal["SubstreamPartitionRouter"] + type: Literal['SubstreamPartitionRouter'] parent_stream_configs: List[ParentStreamConfig] = Field( ..., - description="Specifies which parent streams are being iterated over and how parent records should be used to partition the child stream data set.", - title="Parent Stream Configs", + description='Specifies which parent streams are being iterated over and how parent records should be used to partition the child stream data set.', + title='Parent Stream Configs', ) - parameters: Optional[Dict[str, Any]] = Field(None, alias="$parameters") + parameters: Optional[Dict[str, Any]] = Field(None, alias='$parameters') CompositeErrorHandler.update_forward_refs() diff --git a/airbyte-cdk/python/build.gradle b/airbyte-cdk/python/build.gradle index 61f355742382..d1531c04d445 100644 --- a/airbyte-cdk/python/build.gradle +++ b/airbyte-cdk/python/build.gradle @@ -15,7 +15,7 @@ def generateComponentManifestClassFiles = tasks.register('generateComponentManif generateComponentManifestClassFiles.configure { dependsOn generateCodeGeneratorImage } -tasks.register('generate').configure { +tasks.named('assemble').configure { dependsOn generateComponentManifestClassFiles } From f1924e56b5862d47e15d2d5b4bfeff23c25d1bb8 Mon Sep 17 00:00:00 2001 From: Ella Rohm-Ensing Date: Tue, 27 Feb 2024 11:04:45 -0800 Subject: [PATCH 007/172] unpin source-github in cloud (#35546) --- airbyte-integrations/connectors/source-github/metadata.yaml | 1 - 1 file changed, 1 deletion(-) diff --git a/airbyte-integrations/connectors/source-github/metadata.yaml b/airbyte-integrations/connectors/source-github/metadata.yaml index 9ff2bc163d6a..f9819d6c3283 100644 --- a/airbyte-integrations/connectors/source-github/metadata.yaml +++ b/airbyte-integrations/connectors/source-github/metadata.yaml @@ -24,7 +24,6 @@ data: packageName: airbyte-source-github registries: cloud: - dockerImageTag: 1.5.7 enabled: true oss: enabled: true From ae39cbe82fe8d87f43b5ef70438fceef41d5f0f5 Mon Sep 17 00:00:00 2001 From: Akash Kulkarni <113392464+akashkulk@users.noreply.github.com> Date: Tue, 27 Feb 2024 13:54:30 -0800 Subject: [PATCH 008/172] Fix error message for saved offset not valid. (#35675) --- .../connectors/source-mongodb-v2/metadata.yaml | 2 +- .../integrations/source/mongodb/cdc/MongoDbCdcInitializer.java | 2 +- airbyte-integrations/connectors/source-mysql/metadata.yaml | 2 +- .../source/mysql/initialsync/MySqlInitialReadUtil.java | 2 +- airbyte-integrations/connectors/source-postgres/metadata.yaml | 2 +- .../source/postgres/cdc/PostgresCdcCtidInitializer.java | 2 +- docs/integrations/sources/mongodb-v2.md | 3 ++- docs/integrations/sources/mysql.md | 1 + docs/integrations/sources/postgres.md | 3 ++- 9 files changed, 11 insertions(+), 8 deletions(-) diff --git a/airbyte-integrations/connectors/source-mongodb-v2/metadata.yaml b/airbyte-integrations/connectors/source-mongodb-v2/metadata.yaml index 7cfabf8d314a..9a2ef15cb43e 100644 --- a/airbyte-integrations/connectors/source-mongodb-v2/metadata.yaml +++ b/airbyte-integrations/connectors/source-mongodb-v2/metadata.yaml @@ -5,7 +5,7 @@ data: connectorSubtype: database connectorType: source definitionId: b2e713cd-cc36-4c0a-b5bd-b47cb8a0561e - dockerImageTag: 1.2.13 + dockerImageTag: 1.2.14 dockerRepository: airbyte/source-mongodb-v2 documentationUrl: https://docs.airbyte.com/integrations/sources/mongodb-v2 githubIssueLabel: source-mongodb-v2 diff --git a/airbyte-integrations/connectors/source-mongodb-v2/src/main/java/io/airbyte/integrations/source/mongodb/cdc/MongoDbCdcInitializer.java b/airbyte-integrations/connectors/source-mongodb-v2/src/main/java/io/airbyte/integrations/source/mongodb/cdc/MongoDbCdcInitializer.java index 279eb053f3f2..3230092cc0c2 100644 --- a/airbyte-integrations/connectors/source-mongodb-v2/src/main/java/io/airbyte/integrations/source/mongodb/cdc/MongoDbCdcInitializer.java +++ b/airbyte-integrations/connectors/source-mongodb-v2/src/main/java/io/airbyte/integrations/source/mongodb/cdc/MongoDbCdcInitializer.java @@ -119,7 +119,7 @@ public List> createCdcIterators( AirbyteTraceMessageUtility.emitAnalyticsTrace(cdcCursorInvalidMessage()); if (config.shouldFailSyncOnInvalidCursor()) { throw new ConfigErrorException( - "Saved offset is not valid. Please reset the connection, and then increase oplog retention or reduce sync frequency to prevent his from happening in the future. See https://docs.airbyte.com/integrations/sources/mongodb-v2#mongodb-oplog-and-change-streams for more details"); + "Saved offset is not valid. Please reset the connection, and then increase oplog retention and/or increase sync frequency to prevent his from happening in the future. See https://docs.airbyte.com/integrations/sources/mongodb-v2#mongodb-oplog-and-change-streams for more details"); } LOGGER.info("Saved offset is not valid. Airbyte will trigger a full refresh."); // If the offset in the state is invalid, reset the state to the initial STATE diff --git a/airbyte-integrations/connectors/source-mysql/metadata.yaml b/airbyte-integrations/connectors/source-mysql/metadata.yaml index f80f0c555620..c3072a70cac7 100644 --- a/airbyte-integrations/connectors/source-mysql/metadata.yaml +++ b/airbyte-integrations/connectors/source-mysql/metadata.yaml @@ -9,7 +9,7 @@ data: connectorSubtype: database connectorType: source definitionId: 435bb9a5-7887-4809-aa58-28c27df0d7ad - dockerImageTag: 3.3.11 + dockerImageTag: 3.3.12 dockerRepository: airbyte/source-mysql documentationUrl: https://docs.airbyte.com/integrations/sources/mysql githubIssueLabel: source-mysql diff --git a/airbyte-integrations/connectors/source-mysql/src/main/java/io/airbyte/integrations/source/mysql/initialsync/MySqlInitialReadUtil.java b/airbyte-integrations/connectors/source-mysql/src/main/java/io/airbyte/integrations/source/mysql/initialsync/MySqlInitialReadUtil.java index 47aa83ee09fc..58c2e0780eaf 100644 --- a/airbyte-integrations/connectors/source-mysql/src/main/java/io/airbyte/integrations/source/mysql/initialsync/MySqlInitialReadUtil.java +++ b/airbyte-integrations/connectors/source-mysql/src/main/java/io/airbyte/integrations/source/mysql/initialsync/MySqlInitialReadUtil.java @@ -117,7 +117,7 @@ public static List> getCdcReadIterators(fi if (!sourceConfig.get("replication_method").has(INVALID_CDC_CURSOR_POSITION_PROPERTY) || sourceConfig.get("replication_method").get( INVALID_CDC_CURSOR_POSITION_PROPERTY).asText().equals(FAIL_SYNC_OPTION)) { throw new ConfigErrorException( - "Saved offset no longer present on the server. Please reset the connection, and then increase binlog retention or reduce sync frequency. See https://docs.airbyte.com/integrations/sources/mysql/mysql-troubleshooting#under-cdc-incremental-mode-there-are-still-full-refresh-syncs for more details."); + "Saved offset no longer present on the server. Please reset the connection, and then increase binlog retention and/or increase sync frequency. See https://docs.airbyte.com/integrations/sources/mysql/mysql-troubleshooting#under-cdc-incremental-mode-there-are-still-full-refresh-syncs for more details."); } LOGGER.warn("Saved offset no longer present on the server, Airbyte is going to trigger a sync from scratch"); } diff --git a/airbyte-integrations/connectors/source-postgres/metadata.yaml b/airbyte-integrations/connectors/source-postgres/metadata.yaml index 22a65152d31f..44cb753419f5 100644 --- a/airbyte-integrations/connectors/source-postgres/metadata.yaml +++ b/airbyte-integrations/connectors/source-postgres/metadata.yaml @@ -9,7 +9,7 @@ data: connectorSubtype: database connectorType: source definitionId: decd338e-5647-4c0b-adf4-da0e75f5a750 - dockerImageTag: 3.3.12 + dockerImageTag: 3.3.13 dockerRepository: airbyte/source-postgres documentationUrl: https://docs.airbyte.com/integrations/sources/postgres githubIssueLabel: source-postgres diff --git a/airbyte-integrations/connectors/source-postgres/src/main/java/io/airbyte/integrations/source/postgres/cdc/PostgresCdcCtidInitializer.java b/airbyte-integrations/connectors/source-postgres/src/main/java/io/airbyte/integrations/source/postgres/cdc/PostgresCdcCtidInitializer.java index 45c20156aab2..5d6baced6c9e 100644 --- a/airbyte-integrations/connectors/source-postgres/src/main/java/io/airbyte/integrations/source/postgres/cdc/PostgresCdcCtidInitializer.java +++ b/airbyte-integrations/connectors/source-postgres/src/main/java/io/airbyte/integrations/source/postgres/cdc/PostgresCdcCtidInitializer.java @@ -117,7 +117,7 @@ public static List> cdcCtidIteratorsCombin if (!sourceConfig.get("replication_method").has(INVALID_CDC_CURSOR_POSITION_PROPERTY) || sourceConfig.get("replication_method").get( INVALID_CDC_CURSOR_POSITION_PROPERTY).asText().equals(FAIL_SYNC_OPTION)) { throw new ConfigErrorException( - "Saved offset is before replication slot's confirmed lsn. Please reset the connection, and then increase WAL retention or reduce sync frequency to prevent this from happening in the future. See https://docs.airbyte.com/integrations/sources/postgres/postgres-troubleshooting#under-cdc-incremental-mode-there-are-still-full-refresh-syncs for more details."); + "Saved offset is before replication slot's confirmed lsn. Please reset the connection, and then increase WAL retention and/or increase sync frequency to prevent this from happening in the future. See https://docs.airbyte.com/integrations/sources/postgres/postgres-troubleshooting#under-cdc-incremental-mode-there-are-still-full-refresh-syncs for more details."); } LOGGER.warn("Saved offset is before Replication slot's confirmed_flush_lsn, Airbyte will trigger sync from scratch"); } else if (!isDebugMode(sourceConfig) && PostgresUtils.shouldFlushAfterSync(sourceConfig)) { diff --git a/docs/integrations/sources/mongodb-v2.md b/docs/integrations/sources/mongodb-v2.md index c51d5b6d64f1..4c785556d7c0 100644 --- a/docs/integrations/sources/mongodb-v2.md +++ b/docs/integrations/sources/mongodb-v2.md @@ -214,7 +214,8 @@ For more information regarding configuration parameters, please see [MongoDb Doc | Version | Date | Pull Request | Subject | |:--------|:-----------|:---------------------------------------------------------|:----------------------------------------------------------------------------------------------------------| -| 1.2.13 | 2024-02-22 | [35569](https://github.com/airbytehq/airbyte/pull/35569) | Fix logging bug. | +| 1.2.14 | 2024-02-27 | [35675](https://github.com/airbytehq/airbyte/pull/35675) | Fix invalid cdc error message. | +| 1.2.13 | 2024-02-22 | [35569](https://github.com/airbytehq/airbyte/pull/35569) | Fix logging bug. | | 1.2.12 | 2024-02-21 | [35526](https://github.com/airbytehq/airbyte/pull/35526) | Improve error handling. | | 1.2.11 | 2024-02-20 | [35375](https://github.com/airbytehq/airbyte/pull/35375) | Add config to throw an error on invalid CDC position and enable it by default. | | 1.2.10 | 2024-02-13 | [35036](https://github.com/airbytehq/airbyte/pull/34751) | Emit analytics message for invalid CDC cursor. | diff --git a/docs/integrations/sources/mysql.md b/docs/integrations/sources/mysql.md index 26aafa4f12df..f8d7de3fed39 100644 --- a/docs/integrations/sources/mysql.md +++ b/docs/integrations/sources/mysql.md @@ -223,6 +223,7 @@ Any database or table encoding combination of charset and collation is supported | Version | Date | Pull Request | Subject | |:--------|:-----------|:-----------------------------------------------------------|:------------------------------------------------------------------------------------------------------------------------------------------------| +| 3.3.12 | 2024-02-27 | [35675](https://github.com/airbytehq/airbyte/pull/35675) | Fix invalid cdc error message. | | 3.3.11 | 2024-02-23 | [35527](https://github.com/airbytehq/airbyte/pull/35527) | Adopt 0.23.1 and shutdown timeouts. | | 3.3.10 | 2024-02-22 | [35569](https://github.com/airbytehq/airbyte/pull/35569) | Fix logging bug. | | 3.3.9 | 2024-02-21 | [35525](https://github.com/airbytehq/airbyte/pull/35338) | Adopt 0.21.4 and reduce cdc state compression threshold to 1MB. | diff --git a/docs/integrations/sources/postgres.md b/docs/integrations/sources/postgres.md index 56ddc87ab7bb..b302ad829718 100644 --- a/docs/integrations/sources/postgres.md +++ b/docs/integrations/sources/postgres.md @@ -292,7 +292,8 @@ According to Postgres [documentation](https://www.postgresql.org/docs/14/datatyp | Version | Date | Pull Request | Subject | |---------|------------|----------------------------------------------------------|----------------------------------------------------------------------------------------------------------------------------------------------------------------------------| -| 3.3.12 | 2024-02-22 | [35569](https://github.com/airbytehq/airbyte/pull/35569) | Fix logging bug. | +| 3.3.13 | 2024-02-27 | [35675](https://github.com/airbytehq/airbyte/pull/35675) | Fix invalid cdc error message. | +| 3.3.12 | 2024-02-22 | [35569](https://github.com/airbytehq/airbyte/pull/35569) | Fix logging bug. | | 3.3.11 | 2024-02-20 | [35304](https://github.com/airbytehq/airbyte/pull/35304) | Add config to throw an error on invalid CDC position and enable it by default. | | 3.3.10 | 2024-02-13 | [35036](https://github.com/airbytehq/airbyte/pull/34751) | Emit analytics message for invalid CDC cursor. | | 3.3.9 | 2024-02-13 | [35224](https://github.com/airbytehq/airbyte/pull/35224) | Adopt CDK 0.20.4 | From 4390db5c7786098c521601f9399f3d938724f390 Mon Sep 17 00:00:00 2001 From: Rodi Reich Zilberman <867491+rodireich@users.noreply.github.com> Date: Tue, 27 Feb 2024 14:22:28 -0800 Subject: [PATCH 009/172] [source-mongodb-v2] remove default connection string options (#35673) (#35677) Co-authored-by: David Wallace --- .../connectors/source-mongodb-v2/metadata.yaml | 2 +- .../source/mongodb/MongoConnectionUtils.java | 2 +- .../cdc/MongoDbDebeziumPropertiesManager.java | 9 ++------- .../source/mongodb/MongoConnectionUtilsTest.java | 13 ------------- .../MongoDbDebeziumPropertiesManagerTest.java | 16 ++++------------ docs/integrations/sources/mongodb-v2.md | 1 + 6 files changed, 9 insertions(+), 34 deletions(-) diff --git a/airbyte-integrations/connectors/source-mongodb-v2/metadata.yaml b/airbyte-integrations/connectors/source-mongodb-v2/metadata.yaml index 9a2ef15cb43e..70c2882ab0a0 100644 --- a/airbyte-integrations/connectors/source-mongodb-v2/metadata.yaml +++ b/airbyte-integrations/connectors/source-mongodb-v2/metadata.yaml @@ -5,7 +5,7 @@ data: connectorSubtype: database connectorType: source definitionId: b2e713cd-cc36-4c0a-b5bd-b47cb8a0561e - dockerImageTag: 1.2.14 + dockerImageTag: 1.2.15 dockerRepository: airbyte/source-mongodb-v2 documentationUrl: https://docs.airbyte.com/integrations/sources/mongodb-v2 githubIssueLabel: source-mongodb-v2 diff --git a/airbyte-integrations/connectors/source-mongodb-v2/src/main/java/io/airbyte/integrations/source/mongodb/MongoConnectionUtils.java b/airbyte-integrations/connectors/source-mongodb-v2/src/main/java/io/airbyte/integrations/source/mongodb/MongoConnectionUtils.java index 7e48aa7468a4..d527cc1a24ea 100644 --- a/airbyte-integrations/connectors/source-mongodb-v2/src/main/java/io/airbyte/integrations/source/mongodb/MongoConnectionUtils.java +++ b/airbyte-integrations/connectors/source-mongodb-v2/src/main/java/io/airbyte/integrations/source/mongodb/MongoConnectionUtils.java @@ -50,7 +50,7 @@ public static MongoClient createMongoClient(final MongoDbSourceConfig config) { } private static String buildConnectionString(final MongoDbSourceConfig config) { - return MongoDbDebeziumPropertiesManager.buildConnectionString(config.getDatabaseConfig(), true); + return MongoDbDebeziumPropertiesManager.buildConnectionString(config.getDatabaseConfig()); } } diff --git a/airbyte-integrations/connectors/source-mongodb-v2/src/main/java/io/airbyte/integrations/source/mongodb/cdc/MongoDbDebeziumPropertiesManager.java b/airbyte-integrations/connectors/source-mongodb-v2/src/main/java/io/airbyte/integrations/source/mongodb/cdc/MongoDbDebeziumPropertiesManager.java index c715be6080cd..b9a5b3708e6d 100644 --- a/airbyte-integrations/connectors/source-mongodb-v2/src/main/java/io/airbyte/integrations/source/mongodb/cdc/MongoDbDebeziumPropertiesManager.java +++ b/airbyte-integrations/connectors/source-mongodb-v2/src/main/java/io/airbyte/integrations/source/mongodb/cdc/MongoDbDebeziumPropertiesManager.java @@ -52,7 +52,7 @@ public MongoDbDebeziumPropertiesManager(final Properties properties, protected Properties getConnectionConfiguration(final JsonNode config) { final Properties properties = new Properties(); - properties.setProperty(MONGODB_CONNECTION_STRING_KEY, buildConnectionString(config, false)); + properties.setProperty(MONGODB_CONNECTION_STRING_KEY, buildConnectionString(config)); properties.setProperty(MONGODB_CONNECTION_MODE_KEY, MONGODB_CONNECTION_MODE_VALUE); if (config.has(USERNAME_CONFIGURATION_KEY)) { @@ -106,10 +106,9 @@ public static String normalizeName(final String name) { * removing any values accidentally copied and pasted from the MongoDB Atlas UI. * * @param config The connector configuration. - * @param useSecondary Whether to use the secondary for reads. * @return The connection string. */ - public static String buildConnectionString(final JsonNode config, final boolean useSecondary) { + public static String buildConnectionString(final JsonNode config) { final String connectionString = config.get(CONNECTION_STRING_CONFIGURATION_KEY) .asText() .trim() @@ -117,10 +116,6 @@ public static String buildConnectionString(final JsonNode config, final boolean .replaceAll(CREDENTIALS_PLACEHOLDER, ""); final StringBuilder builder = new StringBuilder(); builder.append(connectionString); - builder.append("?retryWrites=false&provider=airbyte&tls=true"); - if (useSecondary) { - builder.append("&readPreference=secondary"); - } return builder.toString(); } diff --git a/airbyte-integrations/connectors/source-mongodb-v2/src/test/java/io/airbyte/integrations/source/mongodb/MongoConnectionUtilsTest.java b/airbyte-integrations/connectors/source-mongodb-v2/src/test/java/io/airbyte/integrations/source/mongodb/MongoConnectionUtilsTest.java index 08180f774d34..277841134d8a 100644 --- a/airbyte-integrations/connectors/source-mongodb-v2/src/test/java/io/airbyte/integrations/source/mongodb/MongoConnectionUtilsTest.java +++ b/airbyte-integrations/connectors/source-mongodb-v2/src/test/java/io/airbyte/integrations/source/mongodb/MongoConnectionUtilsTest.java @@ -10,7 +10,6 @@ import static org.junit.jupiter.api.Assertions.assertNotNull; import static org.junit.jupiter.api.Assertions.assertNull; -import com.mongodb.ReadPreference; import com.mongodb.ServerAddress; import com.mongodb.client.MongoClient; import com.mongodb.client.internal.MongoClientImpl; @@ -40,9 +39,6 @@ void testCreateMongoClient() { assertNotNull(mongoClient); assertEquals(List.of(new ServerAddress(host, port)), ((MongoClientImpl) mongoClient).getSettings().getClusterSettings().getHosts()); - assertEquals(ReadPreference.secondaryPreferred(), ((MongoClientImpl) mongoClient).getSettings().getReadPreference()); - assertEquals(false, ((MongoClientImpl) mongoClient).getSettings().getRetryWrites()); - assertEquals(true, ((MongoClientImpl) mongoClient).getSettings().getSslSettings().isEnabled()); assertEquals(List.of("sync", MongoConstants.DRIVER_NAME), ((MongoClientImpl) mongoClient).getMongoDriverInformation().getDriverNames()); assertEquals(username, ((MongoClientImpl) mongoClient).getSettings().getCredential().getUserName()); assertEquals(password, new String(((MongoClientImpl) mongoClient).getSettings().getCredential().getPassword())); @@ -68,9 +64,6 @@ void testCreateMongoClientWithQuotesInConnectionString() { assertNotNull(mongoClient); assertEquals(List.of(new ServerAddress(host, port)), ((MongoClientImpl) mongoClient).getSettings().getClusterSettings().getHosts()); - assertEquals(ReadPreference.secondaryPreferred(), ((MongoClientImpl) mongoClient).getSettings().getReadPreference()); - assertEquals(false, ((MongoClientImpl) mongoClient).getSettings().getRetryWrites()); - assertEquals(true, ((MongoClientImpl) mongoClient).getSettings().getSslSettings().isEnabled()); assertEquals(List.of("sync", MongoConstants.DRIVER_NAME), ((MongoClientImpl) mongoClient).getMongoDriverInformation().getDriverNames()); assertEquals(username, ((MongoClientImpl) mongoClient).getSettings().getCredential().getUserName()); assertEquals(password, new String(((MongoClientImpl) mongoClient).getSettings().getCredential().getPassword())); @@ -89,9 +82,6 @@ void testCreateMongoClientWithoutCredentials() { assertNotNull(mongoClient); assertEquals(List.of(new ServerAddress(host, port)), ((MongoClientImpl) mongoClient).getSettings().getClusterSettings().getHosts()); - assertEquals(ReadPreference.secondaryPreferred(), ((MongoClientImpl) mongoClient).getSettings().getReadPreference()); - assertEquals(false, ((MongoClientImpl) mongoClient).getSettings().getRetryWrites()); - assertEquals(true, ((MongoClientImpl) mongoClient).getSettings().getSslSettings().isEnabled()); assertEquals(List.of("sync", MongoConstants.DRIVER_NAME), ((MongoClientImpl) mongoClient).getMongoDriverInformation().getDriverNames()); assertNull(((MongoClientImpl) mongoClient).getSettings().getCredential()); } @@ -115,9 +105,6 @@ void testCreateMongoClientWithCredentialPlaceholderInConnectionString() { assertNotNull(mongoClient); assertEquals(List.of(new ServerAddress(host, port)), ((MongoClientImpl) mongoClient).getSettings().getClusterSettings().getHosts()); - assertEquals(ReadPreference.secondaryPreferred(), ((MongoClientImpl) mongoClient).getSettings().getReadPreference()); - assertEquals(false, ((MongoClientImpl) mongoClient).getSettings().getRetryWrites()); - assertEquals(true, ((MongoClientImpl) mongoClient).getSettings().getSslSettings().isEnabled()); assertEquals(List.of("sync", MongoConstants.DRIVER_NAME), ((MongoClientImpl) mongoClient).getMongoDriverInformation().getDriverNames()); assertEquals(username, ((MongoClientImpl) mongoClient).getSettings().getCredential().getUserName()); assertEquals(password, new String(((MongoClientImpl) mongoClient).getSettings().getCredential().getPassword())); diff --git a/airbyte-integrations/connectors/source-mongodb-v2/src/test/java/io/airbyte/integrations/source/mongodb/cdc/MongoDbDebeziumPropertiesManagerTest.java b/airbyte-integrations/connectors/source-mongodb-v2/src/test/java/io/airbyte/integrations/source/mongodb/cdc/MongoDbDebeziumPropertiesManagerTest.java index c635a8e37dde..f095f676d0ee 100644 --- a/airbyte-integrations/connectors/source-mongodb-v2/src/test/java/io/airbyte/integrations/source/mongodb/cdc/MongoDbDebeziumPropertiesManagerTest.java +++ b/airbyte-integrations/connectors/source-mongodb-v2/src/test/java/io/airbyte/integrations/source/mongodb/cdc/MongoDbDebeziumPropertiesManagerTest.java @@ -50,7 +50,7 @@ class MongoDbDebeziumPropertiesManagerTest { private static final String DATABASE_NAME = "test_database"; private static final Path PATH = Path.of("."); - public static final String EXPECTED_CONNECTION_STRING = "mongodb://localhost:27017/?retryWrites=false&provider=airbyte&tls=true"; + public static final String EXPECTED_CONNECTION_STRING = "mongodb://localhost:27017/"; @Test void testDebeziumProperties() { @@ -185,7 +185,7 @@ void testNormalizeName() { @Test void testCreateConnectionString() { final JsonNode config = createConfiguration(Optional.of("username"), Optional.of("password"), Optional.of("admin")); - final String connectionString = MongoDbDebeziumPropertiesManager.buildConnectionString(config, false); + final String connectionString = MongoDbDebeziumPropertiesManager.buildConnectionString(config); assertNotNull(connectionString); assertEquals(EXPECTED_CONNECTION_STRING, connectionString); } @@ -193,26 +193,18 @@ void testCreateConnectionString() { @Test void testCreateConnectionStringQuotedString() { final JsonNode config = createConfiguration(Optional.of("username"), Optional.of("password"), Optional.of("admin")); - final String connectionString = MongoDbDebeziumPropertiesManager.buildConnectionString(config, false); + final String connectionString = MongoDbDebeziumPropertiesManager.buildConnectionString(config); ((ObjectNode) config).put(CONNECTION_STRING_CONFIGURATION_KEY, "\"" + config.get(CONNECTION_STRING_CONFIGURATION_KEY) + "\""); assertNotNull(connectionString); assertEquals(EXPECTED_CONNECTION_STRING, connectionString); } - @Test - void testCreateConnectionStringUseSecondary() { - final JsonNode config = createConfiguration(Optional.of("username"), Optional.of("password"), Optional.of("admin")); - final String connectionString = MongoDbDebeziumPropertiesManager.buildConnectionString(config, true); - assertNotNull(connectionString); - assertEquals("mongodb://localhost:27017/?retryWrites=false&provider=airbyte&tls=true&readPreference=secondary", connectionString); - } - @Test void testCreateConnectionStringPlaceholderCredentials() { final JsonNode config = createConfiguration(Optional.of("username"), Optional.of("password"), Optional.of("admin")); ((ObjectNode) config).put(CONNECTION_STRING_CONFIGURATION_KEY, config.get(CONNECTION_STRING_CONFIGURATION_KEY).asText() .replaceAll("mongodb://", "mongodb://" + CREDENTIALS_PLACEHOLDER)); - final String connectionString = MongoDbDebeziumPropertiesManager.buildConnectionString(config, false); + final String connectionString = MongoDbDebeziumPropertiesManager.buildConnectionString(config); assertNotNull(connectionString); assertEquals(EXPECTED_CONNECTION_STRING, connectionString); } diff --git a/docs/integrations/sources/mongodb-v2.md b/docs/integrations/sources/mongodb-v2.md index 4c785556d7c0..f13754901c73 100644 --- a/docs/integrations/sources/mongodb-v2.md +++ b/docs/integrations/sources/mongodb-v2.md @@ -214,6 +214,7 @@ For more information regarding configuration parameters, please see [MongoDb Doc | Version | Date | Pull Request | Subject | |:--------|:-----------|:---------------------------------------------------------|:----------------------------------------------------------------------------------------------------------| +| 1.2.15 | 2024-02-27 | [35673](https://github.com/airbytehq/airbyte/pull/35673) | Consume user provided connection string. | | 1.2.14 | 2024-02-27 | [35675](https://github.com/airbytehq/airbyte/pull/35675) | Fix invalid cdc error message. | | 1.2.13 | 2024-02-22 | [35569](https://github.com/airbytehq/airbyte/pull/35569) | Fix logging bug. | | 1.2.12 | 2024-02-21 | [35526](https://github.com/airbytehq/airbyte/pull/35526) | Improve error handling. | From 3e389098289587e57a9783d456e4d47a9fb5e938 Mon Sep 17 00:00:00 2001 From: Bindi Pankhudi Date: Tue, 27 Feb 2024 14:52:08 -0800 Subject: [PATCH 010/172] Docs/PyAirbyte: Added basic demo notebook link (#35678) Co-authored-by: bindipankhudi --- docs/using-airbyte/airbyte-lib/getting-started.mdx | 1 + 1 file changed, 1 insertion(+) diff --git a/docs/using-airbyte/airbyte-lib/getting-started.mdx b/docs/using-airbyte/airbyte-lib/getting-started.mdx index 50baa2320b10..dec1c6978f9e 100644 --- a/docs/using-airbyte/airbyte-lib/getting-started.mdx +++ b/docs/using-airbyte/airbyte-lib/getting-started.mdx @@ -40,6 +40,7 @@ for name, records in result.streams.items(): ## Quickstarts +* [Basic Demo](https://github.com/airbytehq/quickstarts/blob/main/airbyte_lib_notebooks/AirbyteLib_Basic_Features_Demo.ipynb) * [CoinAPI](https://github.com/airbytehq/quickstarts/blob/main/airbyte_lib_notebooks/AirbyteLib_CoinAPI_Demo.ipynb) * [GA4](https://github.com/airbytehq/quickstarts/blob/main/airbyte_lib_notebooks/AirbyteLib_GA4_Demo.ipynb) * [Shopify](https://github.com/airbytehq/quickstarts/blob/main/airbyte_lib_notebooks/PyAirbyte_Shopify_Demo.ipynb) From 3d7ab9da0c83cb24a8b3ed3de26c2994d7a03d4b Mon Sep 17 00:00:00 2001 From: Evan Tahler Date: Tue, 27 Feb 2024 15:54:25 -0800 Subject: [PATCH 011/172] Fix doc headers on sub-pages (#35683) --- docusaurus/src/remark/docsHeaderDecoration.js | 3 +- docusaurus/src/remark/specDecoration.js | 65 ++++++++++++------- docusaurus/src/remark/utils.js | 50 ++++++++++---- 3 files changed, 82 insertions(+), 36 deletions(-) diff --git a/docusaurus/src/remark/docsHeaderDecoration.js b/docusaurus/src/remark/docsHeaderDecoration.js index 26109925264a..615286fd798c 100644 --- a/docusaurus/src/remark/docsHeaderDecoration.js +++ b/docusaurus/src/remark/docsHeaderDecoration.js @@ -10,7 +10,8 @@ const toAttributes = (props) => const plugin = () => { const transformer = async (ast, vfile) => { - if (!isDocsPage(vfile)) return; + const docsPageInfo = isDocsPage(vfile); + if (!docsPageInfo.isDocsPage) return; const registryEntry = await getRegistryEntry(vfile); diff --git a/docusaurus/src/remark/specDecoration.js b/docusaurus/src/remark/specDecoration.js index 2817de919f71..0474e252764b 100644 --- a/docusaurus/src/remark/specDecoration.js +++ b/docusaurus/src/remark/specDecoration.js @@ -15,19 +15,30 @@ async function injectSpecSchema(ast) { visit(ast, "mdxJsxFlowElement", (node) => { if (node.name !== "SpecSchema" && node.name !== "AirbyteLibExample") return; - const connectorName = node.attributes.find((attr) => attr.name === "connector").value; - const connectorSpec = registry.find((c) => c.dockerRepository_oss === `airbyte/${connectorName}`).spec_oss.connectionSpecification; + const connectorName = node.attributes.find( + (attr) => attr.name === "connector" + ).value; + const connectorSpec = registry.find( + (c) => c.dockerRepository_oss === `airbyte/${connectorName}` + ).spec_oss.connectionSpecification; node.attributes.push({ type: "mdxJsxAttribute", name: "specJSON", - value: JSON.stringify(connectorSpec) + value: JSON.stringify(connectorSpec), }); }); } async function injectDefaultAirbyteLibSection(vfile, ast) { const registryEntry = await getRegistryEntry(vfile); - if (!isDocsPage(vfile) || !registryEntry || !isPypiConnector(registryEntry) || vfile.value.includes("## Usage with airbyte-lib")) { + const docsPageInfo = isDocsPage(vfile); + + if ( + !docsPageInfo.isTrueDocsPage || + !registryEntry || + !isPypiConnector(registryEntry) || + vfile.value.includes("## Usage with airbyte-lib") + ) { return; } const connectorName = registryEntry.dockerRepository_oss.split("/").pop(); @@ -36,31 +47,41 @@ async function injectDefaultAirbyteLibSection(vfile, ast) { visit(ast, "heading", (node, index, parent) => { if (!added && isChangelogHeading(node)) { added = true; - parent.children.splice(index, 0, { - type: "heading", - depth: 2, - children: [{ type: "text", value: "Reference" }] - }, { - type: "mdxJsxFlowElement", - name: "SpecSchema", - attributes: [ - { - type: "mdxJsxAttribute", - name: "connector", - value: connectorName - }, - ] - }); + parent.children.splice( + index, + 0, + { + type: "heading", + depth: 2, + children: [{ type: "text", value: "Reference" }], + }, + { + type: "mdxJsxFlowElement", + name: "SpecSchema", + attributes: [ + { + type: "mdxJsxAttribute", + name: "connector", + value: connectorName, + }, + ], + } + ); } }); if (!added) { - throw new Error(`Could not find a changelog heading in ${vfile.path} to add the default airbyte-lib section. This connector won't have a reference section. Make sure there is either a ## Changelog section or add a manual reference section.`); + throw new Error( + `Could not find a changelog heading in ${vfile.path} to add the default airbyte-lib section. This connector won't have a reference section. Make sure there is either a ## Changelog section or add a manual reference section.` + ); } } function isChangelogHeading(node) { - return node.depth === 2 && node.children.length === 1 && node.children[0].value.toLowerCase() === "changelog"; + return ( + node.depth === 2 && + node.children.length === 1 && + node.children[0].value.toLowerCase() === "changelog" + ); } - module.exports = plugin; diff --git a/docusaurus/src/remark/utils.js b/docusaurus/src/remark/utils.js index bd5544ff0fe4..02c4d856d2e3 100644 --- a/docusaurus/src/remark/utils.js +++ b/docusaurus/src/remark/utils.js @@ -1,29 +1,53 @@ const { catalog } = require("../connector_registry"); +// the migration guide and troubleshooting guide are not connectors, but also not in a sub-folder, e.g. /integrations/sources/mssql-migrations +const connectorPageAlternativeEndings = ["-migrations", "-troubleshooting"]; +const connectorPageAlternativeEndingsRegExp = new RegExp( + connectorPageAlternativeEndings.join("|"), + "gi" +); + const isDocsPage = (vfile) => { + let response = { isDocsPage: false, isTrueDocsPage: false }; + if ( - !vfile.path.includes("integrations/sources") && - !vfile.path.includes("integrations/destinations") + vfile.path.includes("integrations/sources") || + vfile.path.includes("integrations/destinations") ) { - return false; + response.isDocsPage = true; + response.isTrueDocsPage = true; } - // skip the root files in integrations/source and integrations/destinations - if (vfile.path.includes("README.md")) { - return false; + if (response.isDocsPage === true) { + for (const ending of connectorPageAlternativeEndings) { + if (vfile.path.includes(ending)) { + response.isTrueDocsPage = false; + } + } } - if (vfile.path.includes("-migrations.md")) { - return false; - } - - return true; + return response; }; const getRegistryEntry = async (vfile) => { + if ( + !vfile.path.includes("integrations/sources") && + !vfile.path.includes("integrations/destinations") + ) { + return; + } + + // troubleshooting pages are sub-pages, but migration pages are not? + // ["sources", "mysql"] vs ["sources", "mysql", "troubleshooting"] vs ["sources", "mysql-migrations"] const pathParts = vfile.path.split("/"); - const connectorName = pathParts.pop().split(".")[0]; - const connectorType = pathParts.pop(); + while (pathParts[0] !== "integrations") pathParts.shift(); + pathParts.shift(); + const connectorType = pathParts.shift(); + const connectorName = pathParts + .shift() + .split(".")[0] + .replace(connectorPageAlternativeEndingsRegExp, ""); + const dockerRepository = `airbyte/${connectorType.replace( /s$/, "" From 0a3fe3dc8e71ded329e831adc6b0439c0c9beb7b Mon Sep 17 00:00:00 2001 From: Evan Tahler Date: Tue, 27 Feb 2024 17:09:08 -0800 Subject: [PATCH 012/172] Archive unpopular / unsupported destinations (#35359) Co-authored-by: Ella Rohm-Ensing --- .../connectors/destination-amazon-sqs/metadata.yaml | 6 +++--- .../destination-bigquery-denormalized/metadata.yaml | 2 +- .../connectors/destination-cassandra/metadata.yaml | 4 ++-- .../connectors/destination-cumulio/metadata.yaml | 6 +++--- .../connectors/destination-databend/metadata.yaml | 6 +++--- .../connectors/destination-doris/metadata.yaml | 4 ++-- .../connectors/destination-exasol/metadata.yaml | 4 ++-- .../connectors/destination-firebolt/metadata.yaml | 6 +++--- .../connectors/destination-keen/metadata.yaml | 6 +++--- .../connectors/destination-kinesis/metadata.yaml | 6 +++--- .../destination-mariadb-columnstore/metadata.yaml | 6 +++--- .../connectors/destination-meilisearch/metadata.yaml | 6 +++--- .../connectors/destination-mqtt/metadata.yaml | 6 +++--- .../connectors/destination-pulsar/metadata.yaml | 6 +++--- .../connectors/destination-r2/metadata.yaml | 4 ++-- .../connectors/destination-rabbitmq/metadata.yaml | 6 +++--- .../connectors/destination-redpanda/metadata.yaml | 4 ++-- .../connectors/destination-rockset/metadata.yaml | 6 +++--- .../destination-scaffold-destination-python/metadata.yaml | 2 +- .../connectors/destination-scylla/metadata.yaml | 6 +++--- .../connectors/destination-selectdb/metadata.yaml | 4 ++-- .../connectors/destination-tidb/metadata.yaml | 4 ++-- .../connectors/destination-timeplus/metadata.yaml | 6 +++--- .../connectors/destination-vertica/metadata.yaml | 6 +++--- .../connectors/destination-xata/metadata.yaml | 6 +++--- .../connectors/destination-yugabytedb/metadata.yaml | 4 ++-- 26 files changed, 66 insertions(+), 66 deletions(-) diff --git a/airbyte-integrations/connectors/destination-amazon-sqs/metadata.yaml b/airbyte-integrations/connectors/destination-amazon-sqs/metadata.yaml index 8b6fa7635281..3676f4f05113 100644 --- a/airbyte-integrations/connectors/destination-amazon-sqs/metadata.yaml +++ b/airbyte-integrations/connectors/destination-amazon-sqs/metadata.yaml @@ -10,9 +10,9 @@ data: name: Amazon SQS registries: cloud: - enabled: false # hide Amazon SQS Destination https://github.com/airbytehq/airbyte/issues/16316 + enabled: false oss: - enabled: true + enabled: false releaseStage: alpha documentationUrl: https://docs.airbyte.com/integrations/destinations/amazon-sqs tags: @@ -20,5 +20,5 @@ data: ab_internal: sl: 100 ql: 200 - supportLevel: community + supportLevel: archived metadataSpecVersion: "1.0" diff --git a/airbyte-integrations/connectors/destination-bigquery-denormalized/metadata.yaml b/airbyte-integrations/connectors/destination-bigquery-denormalized/metadata.yaml index a651fabef37f..a10b99ed6aa7 100644 --- a/airbyte-integrations/connectors/destination-bigquery-denormalized/metadata.yaml +++ b/airbyte-integrations/connectors/destination-bigquery-denormalized/metadata.yaml @@ -31,5 +31,5 @@ data: ab_internal: sl: 100 ql: 300 - supportLevel: community + supportLevel: archived metadataSpecVersion: "1.0" diff --git a/airbyte-integrations/connectors/destination-cassandra/metadata.yaml b/airbyte-integrations/connectors/destination-cassandra/metadata.yaml index 825ba9edb491..7b6e8b0d152e 100644 --- a/airbyte-integrations/connectors/destination-cassandra/metadata.yaml +++ b/airbyte-integrations/connectors/destination-cassandra/metadata.yaml @@ -10,9 +10,9 @@ data: name: Cassandra registries: cloud: - enabled: false # hide Cassandra Destination https://github.com/airbytehq/airbyte-cloud/issues/2606 + enabled: false oss: - enabled: true + enabled: false releaseStage: alpha documentationUrl: https://docs.airbyte.com/integrations/destinations/cassandra tags: diff --git a/airbyte-integrations/connectors/destination-cumulio/metadata.yaml b/airbyte-integrations/connectors/destination-cumulio/metadata.yaml index d45dd2ff46db..bef0baecb57e 100644 --- a/airbyte-integrations/connectors/destination-cumulio/metadata.yaml +++ b/airbyte-integrations/connectors/destination-cumulio/metadata.yaml @@ -10,9 +10,9 @@ data: name: Cumul.io registries: cloud: - enabled: true + enabled: false oss: - enabled: true + enabled: false releaseStage: alpha documentationUrl: https://docs.airbyte.com/integrations/destinations/cumulio tags: @@ -20,5 +20,5 @@ data: ab_internal: sl: 100 ql: 100 - supportLevel: community + supportLevel: archived metadataSpecVersion: "1.0" diff --git a/airbyte-integrations/connectors/destination-databend/metadata.yaml b/airbyte-integrations/connectors/destination-databend/metadata.yaml index 4b2de407755e..59633494896d 100644 --- a/airbyte-integrations/connectors/destination-databend/metadata.yaml +++ b/airbyte-integrations/connectors/destination-databend/metadata.yaml @@ -10,9 +10,9 @@ data: name: Databend registries: cloud: - enabled: true + enabled: false oss: - enabled: true + enabled: false releaseStage: alpha documentationUrl: https://docs.airbyte.com/integrations/destinations/databend tags: @@ -20,5 +20,5 @@ data: ab_internal: sl: 100 ql: 100 - supportLevel: community + supportLevel: archived metadataSpecVersion: "1.0" diff --git a/airbyte-integrations/connectors/destination-doris/metadata.yaml b/airbyte-integrations/connectors/destination-doris/metadata.yaml index 86f0098edb73..6ba856f9dc09 100644 --- a/airbyte-integrations/connectors/destination-doris/metadata.yaml +++ b/airbyte-integrations/connectors/destination-doris/metadata.yaml @@ -12,7 +12,7 @@ data: cloud: enabled: false oss: - enabled: true + enabled: false releaseStage: alpha documentationUrl: https://docs.airbyte.com/integrations/destinations/doris tags: @@ -20,5 +20,5 @@ data: ab_internal: sl: 100 ql: 100 - supportLevel: community + supportLevel: archived metadataSpecVersion: "1.0" diff --git a/airbyte-integrations/connectors/destination-exasol/metadata.yaml b/airbyte-integrations/connectors/destination-exasol/metadata.yaml index 45c7215bafee..90b0a6b5a82d 100644 --- a/airbyte-integrations/connectors/destination-exasol/metadata.yaml +++ b/airbyte-integrations/connectors/destination-exasol/metadata.yaml @@ -11,7 +11,7 @@ data: cloud: enabled: false oss: - enabled: true + enabled: false releaseStage: alpha documentationUrl: https://docs.airbyte.com/integrations/destinations/exasol tags: @@ -19,5 +19,5 @@ data: ab_internal: sl: 100 ql: 100 - supportLevel: community + supportLevel: archived metadataSpecVersion: "1.0" diff --git a/airbyte-integrations/connectors/destination-firebolt/metadata.yaml b/airbyte-integrations/connectors/destination-firebolt/metadata.yaml index 942439e480c1..bc04b2e4a36c 100644 --- a/airbyte-integrations/connectors/destination-firebolt/metadata.yaml +++ b/airbyte-integrations/connectors/destination-firebolt/metadata.yaml @@ -10,9 +10,9 @@ data: name: Firebolt registries: cloud: - enabled: true + enabled: false oss: - enabled: true + enabled: false releaseStage: alpha documentationUrl: https://docs.airbyte.com/integrations/destinations/firebolt supportsDbt: true @@ -21,5 +21,5 @@ data: ab_internal: sl: 100 ql: 100 - supportLevel: community + supportLevel: archived metadataSpecVersion: "1.0" diff --git a/airbyte-integrations/connectors/destination-keen/metadata.yaml b/airbyte-integrations/connectors/destination-keen/metadata.yaml index 64963c7874be..3eade610c13f 100644 --- a/airbyte-integrations/connectors/destination-keen/metadata.yaml +++ b/airbyte-integrations/connectors/destination-keen/metadata.yaml @@ -10,9 +10,9 @@ data: name: Chargify (Keen) registries: cloud: - enabled: true + enabled: false oss: - enabled: true + enabled: false releaseStage: alpha documentationUrl: https://docs.airbyte.com/integrations/destinations/keen tags: @@ -20,5 +20,5 @@ data: ab_internal: sl: 100 ql: 100 - supportLevel: community + supportLevel: archived metadataSpecVersion: "1.0" diff --git a/airbyte-integrations/connectors/destination-kinesis/metadata.yaml b/airbyte-integrations/connectors/destination-kinesis/metadata.yaml index 144113267bf6..2487a9162b88 100644 --- a/airbyte-integrations/connectors/destination-kinesis/metadata.yaml +++ b/airbyte-integrations/connectors/destination-kinesis/metadata.yaml @@ -10,9 +10,9 @@ data: name: Kinesis registries: cloud: - enabled: true + enabled: false oss: - enabled: true + enabled: false releaseStage: alpha documentationUrl: https://docs.airbyte.com/integrations/destinations/kinesis tags: @@ -20,5 +20,5 @@ data: ab_internal: sl: 100 ql: 100 - supportLevel: community + supportLevel: archived metadataSpecVersion: "1.0" diff --git a/airbyte-integrations/connectors/destination-mariadb-columnstore/metadata.yaml b/airbyte-integrations/connectors/destination-mariadb-columnstore/metadata.yaml index dbb6081f1f5f..049d4f5af9de 100644 --- a/airbyte-integrations/connectors/destination-mariadb-columnstore/metadata.yaml +++ b/airbyte-integrations/connectors/destination-mariadb-columnstore/metadata.yaml @@ -10,9 +10,9 @@ data: name: MariaDB ColumnStore registries: cloud: - enabled: false # hide MariaDB Destination https://github.com/airbytehq/airbyte-cloud/issues/2611 + enabled: false oss: - enabled: true + enabled: false releaseStage: alpha documentationUrl: https://docs.airbyte.com/integrations/destinations/mariadb-columnstore tags: @@ -20,5 +20,5 @@ data: ab_internal: sl: 100 ql: 100 - supportLevel: community + supportLevel: archived metadataSpecVersion: "1.0" diff --git a/airbyte-integrations/connectors/destination-meilisearch/metadata.yaml b/airbyte-integrations/connectors/destination-meilisearch/metadata.yaml index 79a5f5851984..1b30cb150c98 100644 --- a/airbyte-integrations/connectors/destination-meilisearch/metadata.yaml +++ b/airbyte-integrations/connectors/destination-meilisearch/metadata.yaml @@ -10,9 +10,9 @@ data: name: MeiliSearch registries: cloud: - enabled: false # hide MeiliSearch Destination https://github.com/airbytehq/airbyte/issues/16313 + enabled: false oss: - enabled: true + enabled: false releaseStage: alpha documentationUrl: https://docs.airbyte.com/integrations/destinations/meilisearch tags: @@ -20,5 +20,5 @@ data: ab_internal: sl: 100 ql: 100 - supportLevel: community + supportLevel: archived metadataSpecVersion: "1.0" diff --git a/airbyte-integrations/connectors/destination-mqtt/metadata.yaml b/airbyte-integrations/connectors/destination-mqtt/metadata.yaml index 85d5e49baa42..b99536f01874 100644 --- a/airbyte-integrations/connectors/destination-mqtt/metadata.yaml +++ b/airbyte-integrations/connectors/destination-mqtt/metadata.yaml @@ -10,9 +10,9 @@ data: name: MQTT registries: cloud: - enabled: false # hide MQTT Destination https://github.com/airbytehq/airbyte-cloud/issues/2613 + enabled: false oss: - enabled: true + enabled: false releaseStage: alpha documentationUrl: https://docs.airbyte.com/integrations/destinations/mqtt tags: @@ -20,5 +20,5 @@ data: ab_internal: sl: 100 ql: 100 - supportLevel: community + supportLevel: archived metadataSpecVersion: "1.0" diff --git a/airbyte-integrations/connectors/destination-pulsar/metadata.yaml b/airbyte-integrations/connectors/destination-pulsar/metadata.yaml index d910ddb6f8b9..a83610be5921 100644 --- a/airbyte-integrations/connectors/destination-pulsar/metadata.yaml +++ b/airbyte-integrations/connectors/destination-pulsar/metadata.yaml @@ -10,9 +10,9 @@ data: name: Pulsar registries: cloud: - enabled: false # hide Pulsar Destination https://github.com/airbytehq/airbyte-cloud/issues/2614 + enabled: false oss: - enabled: true + enabled: false releaseStage: alpha documentationUrl: https://docs.airbyte.com/integrations/destinations/pulsar tags: @@ -20,5 +20,5 @@ data: ab_internal: sl: 100 ql: 100 - supportLevel: community + supportLevel: archived metadataSpecVersion: "1.0" diff --git a/airbyte-integrations/connectors/destination-r2/metadata.yaml b/airbyte-integrations/connectors/destination-r2/metadata.yaml index 13e4ae9a353b..d9a0911792a7 100644 --- a/airbyte-integrations/connectors/destination-r2/metadata.yaml +++ b/airbyte-integrations/connectors/destination-r2/metadata.yaml @@ -12,7 +12,7 @@ data: cloud: enabled: false oss: - enabled: true + enabled: false releaseStage: alpha documentationUrl: https://docs.airbyte.com/integrations/destinations/r2 tags: @@ -20,5 +20,5 @@ data: ab_internal: sl: 100 ql: 100 - supportLevel: community + supportLevel: archived metadataSpecVersion: "1.0" diff --git a/airbyte-integrations/connectors/destination-rabbitmq/metadata.yaml b/airbyte-integrations/connectors/destination-rabbitmq/metadata.yaml index 4a5dcdbb4f45..56bd8775f7d2 100644 --- a/airbyte-integrations/connectors/destination-rabbitmq/metadata.yaml +++ b/airbyte-integrations/connectors/destination-rabbitmq/metadata.yaml @@ -10,9 +10,9 @@ data: name: RabbitMQ registries: cloud: - enabled: false # hide RabbitMQ Destination https://github.com/airbytehq/airbyte/issues/16315 + enabled: false oss: - enabled: true + enabled: false releaseStage: alpha documentationUrl: https://docs.airbyte.com/integrations/destinations/rabbitmq tags: @@ -20,5 +20,5 @@ data: ab_internal: sl: 100 ql: 100 - supportLevel: community + supportLevel: archived metadataSpecVersion: "1.0" diff --git a/airbyte-integrations/connectors/destination-redpanda/metadata.yaml b/airbyte-integrations/connectors/destination-redpanda/metadata.yaml index 6ad22c5a4a55..eb046e009954 100644 --- a/airbyte-integrations/connectors/destination-redpanda/metadata.yaml +++ b/airbyte-integrations/connectors/destination-redpanda/metadata.yaml @@ -12,7 +12,7 @@ data: cloud: enabled: false oss: - enabled: true + enabled: false releaseStage: alpha documentationUrl: https://docs.airbyte.com/integrations/destinations/redpanda tags: @@ -20,5 +20,5 @@ data: ab_internal: sl: 100 ql: 100 - supportLevel: community + supportLevel: archived metadataSpecVersion: "1.0" diff --git a/airbyte-integrations/connectors/destination-rockset/metadata.yaml b/airbyte-integrations/connectors/destination-rockset/metadata.yaml index 9c1839c55999..281376e03f2c 100644 --- a/airbyte-integrations/connectors/destination-rockset/metadata.yaml +++ b/airbyte-integrations/connectors/destination-rockset/metadata.yaml @@ -9,9 +9,9 @@ data: name: Rockset registries: cloud: - enabled: false # hide Rockset Destination https://github.com/airbytehq/airbyte-cloud/issues/2615 + enabled: false oss: - enabled: true + enabled: false releaseStage: alpha documentationUrl: https://docs.airbyte.com/integrations/destinations/rockset tags: @@ -19,5 +19,5 @@ data: ab_internal: sl: 100 ql: 100 - supportLevel: community + supportLevel: archived metadataSpecVersion: "1.0" diff --git a/airbyte-integrations/connectors/destination-scaffold-destination-python/metadata.yaml b/airbyte-integrations/connectors/destination-scaffold-destination-python/metadata.yaml index 20b876b5d86b..6665273ac1a7 100644 --- a/airbyte-integrations/connectors/destination-scaffold-destination-python/metadata.yaml +++ b/airbyte-integrations/connectors/destination-scaffold-destination-python/metadata.yaml @@ -23,7 +23,7 @@ data: name: Scaffold Destination Python releaseDate: TODO releaseStage: alpha - supportLevel: community + supportLevel: archived documentationUrl: https://docs.airbyte.com/integrations/destinations/scaffold-destination-python tags: - language:python diff --git a/airbyte-integrations/connectors/destination-scylla/metadata.yaml b/airbyte-integrations/connectors/destination-scylla/metadata.yaml index c1c8e06d6a5e..fb5797bd3675 100644 --- a/airbyte-integrations/connectors/destination-scylla/metadata.yaml +++ b/airbyte-integrations/connectors/destination-scylla/metadata.yaml @@ -10,9 +10,9 @@ data: name: Scylla registries: cloud: - enabled: false # hide Scylla Destination https://github.com/airbytehq/airbyte-cloud/issues/2617 + enabled: false oss: - enabled: true + enabled: false releaseStage: alpha documentationUrl: https://docs.airbyte.com/integrations/destinations/scylla tags: @@ -20,5 +20,5 @@ data: ab_internal: sl: 100 ql: 100 - supportLevel: community + supportLevel: archived metadataSpecVersion: "1.0" diff --git a/airbyte-integrations/connectors/destination-selectdb/metadata.yaml b/airbyte-integrations/connectors/destination-selectdb/metadata.yaml index fe717849c4a5..ec29978a603a 100644 --- a/airbyte-integrations/connectors/destination-selectdb/metadata.yaml +++ b/airbyte-integrations/connectors/destination-selectdb/metadata.yaml @@ -12,7 +12,7 @@ data: cloud: enabled: false oss: - enabled: true + enabled: false releaseStage: alpha documentationUrl: https://docs.airbyte.com/integrations/destinations/selectdb tags: @@ -20,5 +20,5 @@ data: ab_internal: sl: 100 ql: 100 - supportLevel: community + supportLevel: archived metadataSpecVersion: "1.0" diff --git a/airbyte-integrations/connectors/destination-tidb/metadata.yaml b/airbyte-integrations/connectors/destination-tidb/metadata.yaml index 1d7ff079f914..0101bc0c560a 100644 --- a/airbyte-integrations/connectors/destination-tidb/metadata.yaml +++ b/airbyte-integrations/connectors/destination-tidb/metadata.yaml @@ -16,7 +16,7 @@ data: cloud: enabled: false oss: - enabled: true + enabled: false releaseStage: alpha documentationUrl: https://docs.airbyte.com/integrations/destinations/tidb supportsDbt: true @@ -25,5 +25,5 @@ data: ab_internal: sl: 100 ql: 100 - supportLevel: community + supportLevel: archived metadataSpecVersion: "1.0" diff --git a/airbyte-integrations/connectors/destination-timeplus/metadata.yaml b/airbyte-integrations/connectors/destination-timeplus/metadata.yaml index f1b8331630ab..9cb94f5d8584 100644 --- a/airbyte-integrations/connectors/destination-timeplus/metadata.yaml +++ b/airbyte-integrations/connectors/destination-timeplus/metadata.yaml @@ -10,9 +10,9 @@ data: name: Timeplus registries: cloud: - enabled: true + enabled: false oss: - enabled: true + enabled: false releaseStage: alpha documentationUrl: https://docs.airbyte.com/integrations/destinations/timeplus tags: @@ -20,5 +20,5 @@ data: ab_internal: sl: 100 ql: 100 - supportLevel: community + supportLevel: archived metadataSpecVersion: "1.0" diff --git a/airbyte-integrations/connectors/destination-vertica/metadata.yaml b/airbyte-integrations/connectors/destination-vertica/metadata.yaml index ba8ce1298b98..f3e3f04864fc 100644 --- a/airbyte-integrations/connectors/destination-vertica/metadata.yaml +++ b/airbyte-integrations/connectors/destination-vertica/metadata.yaml @@ -1,9 +1,9 @@ data: registries: cloud: - enabled: true + enabled: false oss: - enabled: true + enabled: false connectorSubtype: database connectorType: destination definitionId: ca81ee7c-3163-9678-af40-094cc31e5e42 @@ -20,5 +20,5 @@ data: ab_internal: sl: 100 ql: 100 - supportLevel: community + supportLevel: archived metadataSpecVersion: "1.0" diff --git a/airbyte-integrations/connectors/destination-xata/metadata.yaml b/airbyte-integrations/connectors/destination-xata/metadata.yaml index 967f1ec2a534..cdb67521b926 100644 --- a/airbyte-integrations/connectors/destination-xata/metadata.yaml +++ b/airbyte-integrations/connectors/destination-xata/metadata.yaml @@ -1,9 +1,9 @@ data: registries: cloud: - enabled: true + enabled: false oss: - enabled: true + enabled: false connectorSubtype: database connectorType: destination definitionId: 2a51c92d-0fb4-4e54-94d2-cce631f24d1f @@ -20,5 +20,5 @@ data: ab_internal: sl: 100 ql: 100 - supportLevel: community + supportLevel: archived metadataSpecVersion: "1.0" diff --git a/airbyte-integrations/connectors/destination-yugabytedb/metadata.yaml b/airbyte-integrations/connectors/destination-yugabytedb/metadata.yaml index 29f00360f6d1..a6bba3494783 100644 --- a/airbyte-integrations/connectors/destination-yugabytedb/metadata.yaml +++ b/airbyte-integrations/connectors/destination-yugabytedb/metadata.yaml @@ -12,7 +12,7 @@ data: cloud: enabled: false oss: - enabled: true + enabled: false releaseStage: alpha documentationUrl: https://docs.airbyte.com/integrations/destinations/yugabytedb tags: @@ -20,5 +20,5 @@ data: ab_internal: sl: 100 ql: 100 - supportLevel: community + supportLevel: archived metadataSpecVersion: "1.0" From 6f5c9a331440159c002726563be33124d9b5b4a0 Mon Sep 17 00:00:00 2001 From: Bindi Pankhudi Date: Tue, 27 Feb 2024 17:54:26 -0800 Subject: [PATCH 013/172] Docs/PyAirbyte: created pyairbyte folder for docs (#35690) Co-authored-by: bindipankhudi --- .../{airbyte-lib => pyairbyte}/getting-started.mdx | 14 +++++++------- docusaurus/redirects.yml | 2 +- 2 files changed, 8 insertions(+), 8 deletions(-) rename docs/using-airbyte/{airbyte-lib => pyairbyte}/getting-started.mdx (76%) diff --git a/docs/using-airbyte/airbyte-lib/getting-started.mdx b/docs/using-airbyte/pyairbyte/getting-started.mdx similarity index 76% rename from docs/using-airbyte/airbyte-lib/getting-started.mdx rename to docs/using-airbyte/pyairbyte/getting-started.mdx index dec1c6978f9e..86d0d13adff9 100644 --- a/docs/using-airbyte/airbyte-lib/getting-started.mdx +++ b/docs/using-airbyte/pyairbyte/getting-started.mdx @@ -20,7 +20,7 @@ pip install 'git+https://github.com/airbytehq/PyAirbyte.git' Data can be extracted from sources and loaded into caches: -Try with Colab +Try with Colab ```python import airbyte as ab @@ -40,12 +40,12 @@ for name, records in result.streams.items(): ## Quickstarts -* [Basic Demo](https://github.com/airbytehq/quickstarts/blob/main/airbyte_lib_notebooks/AirbyteLib_Basic_Features_Demo.ipynb) -* [CoinAPI](https://github.com/airbytehq/quickstarts/blob/main/airbyte_lib_notebooks/AirbyteLib_CoinAPI_Demo.ipynb) -* [GA4](https://github.com/airbytehq/quickstarts/blob/main/airbyte_lib_notebooks/AirbyteLib_GA4_Demo.ipynb) -* [Shopify](https://github.com/airbytehq/quickstarts/blob/main/airbyte_lib_notebooks/PyAirbyte_Shopify_Demo.ipynb) -* [GitHub](https://github.com/airbytehq/quickstarts/blob/main/airbyte_lib_notebooks/AirbyteLib_Github_Incremental_Demo.ipynb) -* [Postgres (cache)](https://github.com/airbytehq/quickstarts/blob/main/airbyte_lib_notebooks/PyAirbyte_Postgres_Custom_Cache_Demo.ipynb) +* [Basic Demo](https://github.com/airbytehq/quickstarts/blob/main/pyairbyte_notebooks/PyAirbyte_Basic_Features_Demo.ipynb) +* [CoinAPI](https://github.com/airbytehq/quickstarts/blob/main/pyairbyte_notebooks/PyAirbyte_CoinAPI_Demo.ipynb) +* [GA4](https://github.com/airbytehq/quickstarts/blob/main/pyairbyte_notebooks/PyAirbyte_GA4_Demo.ipynb) +* [Shopify](https://github.com/airbytehq/quickstarts/blob/main/pyairbyte_notebooks/PyAirbyte_Shopify_Demo.ipynb) +* [GitHub](https://github.com/airbytehq/quickstarts/blob/main/pyairbyte_notebooks/PyAirbyte_Github_Incremental_Demo.ipynb) +* [Postgres (cache)](https://github.com/airbytehq/quickstarts/blob/main/pyairbyte_notebooks/PyAirbyte_Postgres_Custom_Cache_Demo.ipynb) ## API Reference diff --git a/docusaurus/redirects.yml b/docusaurus/redirects.yml index b5f2bf9e8afc..d7b11e6a2110 100644 --- a/docusaurus/redirects.yml +++ b/docusaurus/redirects.yml @@ -95,4 +95,4 @@ to: /access-management/sso - from: - /pyairbyte - to: /using-airbyte/airbyte-lib/getting-started + to: /using-airbyte/pyairbyte/getting-started From 91f0218e21f34e4100e40c225b987580fd4b2b32 Mon Sep 17 00:00:00 2001 From: Anton Karpets Date: Wed, 28 Feb 2024 11:05:20 +0200 Subject: [PATCH 014/172] =?UTF-8?q?=F0=9F=8F=A5Source=20Stripe:=20fix=20ex?= =?UTF-8?q?pected=20records=20(#35692)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .../integration_tests/expected_records.jsonl | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/airbyte-integrations/connectors/source-stripe/integration_tests/expected_records.jsonl b/airbyte-integrations/connectors/source-stripe/integration_tests/expected_records.jsonl index c6602a1a5df7..836d18df9e2b 100644 --- a/airbyte-integrations/connectors/source-stripe/integration_tests/expected_records.jsonl +++ b/airbyte-integrations/connectors/source-stripe/integration_tests/expected_records.jsonl @@ -47,8 +47,8 @@ {"stream": "products", "data": {"id": "prod_KouQ5ez86yREmB", "object": "product", "active": true, "attributes": [], "created": 1640124902, "default_price": "price_1K9GbqEcXtiJtvvhJ3lZe4i5", "description": null, "features": [], "images": [], "livemode": false, "metadata": {}, "name": "edgao-test-product", "package_dimensions": null, "shippable": null, "statement_descriptor": null, "tax_code": "txcd_10000000", "type": "service", "unit_label": null, "updated": 1696839715, "url": null}, "emitted_at": 1697627307635} {"stream": "products", "data": {"id": "prod_NHcKselSHfKdfc", "object": "product", "active": true, "attributes": [], "created": 1675345504, "default_price": "price_1MX364EcXtiJtvvhE3WgTl4O", "description": "Test Product 1 description", "features": [], "images": ["https://files.stripe.com/links/MDB8YWNjdF8xSndub2lFY1h0aUp0dnZofGZsX3Rlc3RfdjBOT09UaHRiNVl2WmJ6clNYRUlmcFFD00cCBRNHnV"], "livemode": false, "metadata": {}, "name": "Test Product 1", "package_dimensions": null, "shippable": null, "statement_descriptor": null, "tax_code": "txcd_10301000", "type": "service", "unit_label": null, "updated": 1696839789, "url": null}, "emitted_at": 1697627307877} {"stream": "products", "data": {"id": "prod_NCgx1XP2IFQyKF", "object": "product", "active": true, "attributes": [], "created": 1674209524, "default_price": null, "description": null, "features": [], "images": [], "livemode": false, "metadata": {}, "name": "tu", "package_dimensions": null, "shippable": null, "statement_descriptor": null, "tax_code": "txcd_10000000", "type": "service", "unit_label": null, "updated": 1696839225, "url": null}, "emitted_at": 1697627307879} -{"stream": "subscriptions", "data": {"id": "sub_1O2Dg0EcXtiJtvvhz7Q4zS0n", "object": "subscription", "application": null, "application_fee_percent": null, "automatic_tax": {"enabled": true, "liability": {"type": "self"}}, "billing_cycle_anchor": 1697550676.0, "billing_cycle_anchor_config": null, "billing_thresholds": null, "cancel_at": null, "cancel_at_period_end": false, "canceled_at": 1697550676.0, "cancellation_details": {"comment": null, "feedback": null, "reason": "cancellation_requested"}, "collection_method": "charge_automatically", "created": 1697550676, "currency": "usd", "current_period_end": 1705499476.0, "current_period_start": 1702821076, "customer": "cus_NGoTFiJFVbSsvZ", "days_until_due": null, "default_payment_method": null, "default_source": null, "default_tax_rates": [], "description": null, "discount": null, "ended_at": 1705329724.0, "invoice_settings": {"account_tax_ids": null, "issuer": {"type": "self"}}, "items": {"object": "list", "data": [{"id": "si_OptSP2o3XZUBpx", "object": "subscription_item", "billing_thresholds": null, "created": 1697550677, "metadata": {}, "plan": {"id": "price_1MSHZoEcXtiJtvvh6O8TYD8T", "object": "plan", "active": true, "aggregate_usage": null, "amount": 600, "amount_decimal": "600", "billing_scheme": "per_unit", "created": 1674209524, "currency": "usd", "interval": "month", "interval_count": 1, "livemode": false, "metadata": {}, "nickname": null, "product": "prod_NCgx1XP2IFQyKF", "tiers_mode": null, "transform_usage": null, "trial_period_days": null, "usage_type": "licensed"}, "price": {"id": "price_1MSHZoEcXtiJtvvh6O8TYD8T", "object": "price", "active": true, "billing_scheme": "per_unit", "created": 1674209524, "currency": "usd", "custom_unit_amount": null, "livemode": false, "lookup_key": null, "metadata": {}, "nickname": null, "product": "prod_NCgx1XP2IFQyKF", "recurring": {"aggregate_usage": null, "interval": "month", "interval_count": 1, "trial_period_days": null, "usage_type": "licensed"}, "tax_behavior": "exclusive", "tiers_mode": null, "transform_quantity": null, "type": "recurring", "unit_amount": 600, "unit_amount_decimal": "600"}, "quantity": 1, "subscription": "sub_1O2Dg0EcXtiJtvvhz7Q4zS0n", "tax_rates": []}], "has_more": false, "total_count": 1.0, "url": "/v1/subscription_items?subscription=sub_1O2Dg0EcXtiJtvvhz7Q4zS0n"}, "latest_invoice": "in_1OOKkUEcXtiJtvvheUUavyuB", "livemode": false, "metadata": {}, "next_pending_invoice_item_invoice": null, "on_behalf_of": null, "pause_collection": null, "payment_settings": {"payment_method_options": null, "payment_method_types": null, "save_default_payment_method": null}, "pending_invoice_item_interval": null, "pending_setup_intent": null, "pending_update": null, "plan": {"id": "price_1MSHZoEcXtiJtvvh6O8TYD8T", "object": "plan", "active": true, "aggregate_usage": null, "amount": 600, "amount_decimal": "600", "billing_scheme": "per_unit", "created": 1674209524, "currency": "usd", "interval": "month", "interval_count": 1, "livemode": false, "metadata": {}, "nickname": null, "product": "prod_NCgx1XP2IFQyKF", "tiers_mode": null, "transform_usage": null, "trial_period_days": null, "usage_type": "licensed"}, "quantity": 1, "schedule": "sub_sched_1O2Dg0EcXtiJtvvh7GtbtIhP", "start_date": 1697550676, "status": "canceled", "test_clock": null, "transfer_data": null, "trial_end": null, "trial_settings": {"end_behavior": {"missing_payment_method": "create_invoice"}}, "trial_start": null, "updated": 1697550676}, "emitted_at": 1707158969393} -{"stream":"subscription_schedule","data":{"id":"sub_sched_1O2Dg0EcXtiJtvvh7GtbtIhP","object":"subscription_schedule","application":null,"canceled_at":"1705329724","completed_at":null,"created":1697550676,"current_phase":null,"customer":"cus_NGoTFiJFVbSsvZ","default_settings":{"application_fee_percent":null,"automatic_tax":{"enabled":false, "liability": null},"billing_cycle_anchor":"automatic","billing_thresholds":null,"collection_method":"charge_automatically","default_payment_method":null,"default_source":null,"description":"Test Test","invoice_settings":"{'account_tax_ids': None, 'days_until_due': None, 'issuer': {'type': 'self'}}","on_behalf_of":null,"transfer_data":null},"end_behavior":"cancel","livemode":false,"metadata":{},"phases":[{"add_invoice_items":[],"application_fee_percent":null,"automatic_tax":{"enabled":true, "liability": {"type": "self"}},"billing_cycle_anchor":null,"billing_thresholds":null,"collection_method":"charge_automatically","coupon":null,"currency":"usd","default_payment_method":null,"default_tax_rates":[],"description":"Test Test","end_date":1705499476,"invoice_settings":"{'account_tax_ids': None, 'days_until_due': None, 'issuer': None}","items":[{"billing_thresholds":null,"metadata":{},"plan":"price_1MSHZoEcXtiJtvvh6O8TYD8T","price":"price_1MSHZoEcXtiJtvvh6O8TYD8T","quantity":1,"tax_rates":[]}],"metadata":{},"on_behalf_of":null,"proration_behavior":"create_prorations","start_date":1697550676,"transfer_data":null,"trial_end":null}],"released_at":null,"released_subscription":null,"renewal_interval":null,"status":"canceled","subscription":"sub_1O2Dg0EcXtiJtvvhz7Q4zS0n","test_clock":null,"updated":1697550676},"emitted_at":1705636378620} +{"stream": "subscriptions", "data": {"id": "sub_1OoDDUEcXtiJtvvh4elaXYFT", "object": "subscription", "application": null, "application_fee_percent": null, "automatic_tax": {"enabled": true, "liability": {"type": "self"}}, "billing_cycle_anchor": 1708988652.0, "billing_cycle_anchor_config": null, "billing_thresholds": null, "cancel_at": null, "cancel_at_period_end": false, "canceled_at": null, "cancellation_details": {"comment": null, "feedback": null, "reason": null}, "collection_method": "charge_automatically", "created": 1708988652, "currency": "usd", "current_period_end": 1711494252.0, "current_period_start": 1708988652, "customer": "cus_NGoTFiJFVbSsvZ", "days_until_due": null, "default_payment_method": null, "default_source": null, "default_tax_rates": [], "description": null, "discount": null, "ended_at": null, "invoice_settings": {"account_tax_ids": null, "issuer": {"type": "self"}}, "items": {"object": "list", "data": [{"id": "si_PdUBTrsn2C4ShU", "object": "subscription_item", "billing_thresholds": null, "created": 1708988653, "metadata": {}, "plan": {"id": "price_1MSHZoEcXtiJtvvh6O8TYD8T", "object": "plan", "active": true, "aggregate_usage": null, "amount": 600, "amount_decimal": "600", "billing_scheme": "per_unit", "created": 1674209524, "currency": "usd", "interval": "month", "interval_count": 1, "livemode": false, "metadata": {}, "nickname": null, "product": "prod_NCgx1XP2IFQyKF", "tiers_mode": null, "transform_usage": null, "trial_period_days": null, "usage_type": "licensed"}, "price": {"id": "price_1MSHZoEcXtiJtvvh6O8TYD8T", "object": "price", "active": true, "billing_scheme": "per_unit", "created": 1674209524, "currency": "usd", "custom_unit_amount": null, "livemode": false, "lookup_key": null, "metadata": {}, "nickname": null, "product": "prod_NCgx1XP2IFQyKF", "recurring": {"aggregate_usage": null, "interval": "month", "interval_count": 1, "trial_period_days": null, "usage_type": "licensed"}, "tax_behavior": "exclusive", "tiers_mode": null, "transform_quantity": null, "type": "recurring", "unit_amount": 600, "unit_amount_decimal": "600"}, "quantity": 1, "subscription": "sub_1OoDDUEcXtiJtvvh4elaXYFT", "tax_rates": []}], "has_more": false, "total_count": 1.0, "url": "/v1/subscription_items?subscription=sub_1OoDDUEcXtiJtvvh4elaXYFT"}, "latest_invoice": "in_1OoDDUEcXtiJtvvhxhAEJymZ", "livemode": false, "metadata": {}, "next_pending_invoice_item_invoice": null, "on_behalf_of": null, "pause_collection": null, "payment_settings": {"payment_method_options": null, "payment_method_types": null, "save_default_payment_method": "off"}, "pending_invoice_item_interval": null, "pending_setup_intent": null, "pending_update": null, "plan": {"id": "price_1MSHZoEcXtiJtvvh6O8TYD8T", "object": "plan", "active": true, "aggregate_usage": null, "amount": 600, "amount_decimal": "600", "billing_scheme": "per_unit", "created": 1674209524, "currency": "usd", "interval": "month", "interval_count": 1, "livemode": false, "metadata": {}, "nickname": null, "product": "prod_NCgx1XP2IFQyKF", "tiers_mode": null, "transform_usage": null, "trial_period_days": null, "usage_type": "licensed"}, "quantity": 1, "schedule": "sub_sched_1OoDexEcXtiJtvvhVrIXDZrd", "start_date": 1708988652, "status": "active", "test_clock": null, "transfer_data": null, "trial_end": null, "trial_settings": {"end_behavior": {"missing_payment_method": "create_invoice"}}, "trial_start": null, "updated": 1708988652}, "emitted_at": 1709108424696} +{"stream": "subscription_schedule", "data": {"id": "sub_sched_1OoDexEcXtiJtvvhVrIXDZrd", "object": "subscription_schedule", "application": null, "canceled_at": null, "completed_at": null, "created": 1708990355, "current_phase": {"end_date": 1735603200, "start_date": 1708988652}, "customer": "cus_NGoTFiJFVbSsvZ", "default_settings": {"application_fee_percent": null, "automatic_tax": {"enabled": true, "liability": {"type": "self"}}, "billing_cycle_anchor": "automatic", "billing_thresholds": null, "collection_method": "charge_automatically", "default_payment_method": null, "default_source": null, "description": null, "invoice_settings": "{'account_tax_ids': None, 'days_until_due': None, 'issuer': {'type': 'self'}}", "on_behalf_of": null, "transfer_data": null}, "end_behavior": "cancel", "livemode": false, "metadata": {}, "phases": [{"add_invoice_items": [], "application_fee_percent": null, "automatic_tax": {"enabled": true, "liability": {"type": "self"}}, "billing_cycle_anchor": null, "billing_thresholds": null, "collection_method": "charge_automatically", "coupon": null, "currency": "usd", "default_payment_method": null, "default_tax_rates": [], "description": null, "end_date": 1735603200, "invoice_settings": "{'account_tax_ids': None, 'days_until_due': None, 'issuer': None}", "items": [{"billing_thresholds": null, "metadata": {}, "plan": "price_1MSHZoEcXtiJtvvh6O8TYD8T", "price": "price_1MSHZoEcXtiJtvvh6O8TYD8T", "quantity": 1, "tax_rates": []}], "metadata": {}, "on_behalf_of": null, "proration_behavior": "create_prorations", "start_date": 1708988652, "transfer_data": null, "trial_end": null}, {"add_invoice_items": [], "application_fee_percent": null, "automatic_tax": {"enabled": true, "liability": {"type": "self"}}, "billing_cycle_anchor": null, "billing_thresholds": null, "collection_method": "charge_automatically", "coupon": null, "currency": "usd", "default_payment_method": null, "default_tax_rates": [], "description": null, "end_date": 1742947200, "invoice_settings": "{'account_tax_ids': None, 'days_until_due': None, 'issuer': None}", "items": [{"billing_thresholds": null, "metadata": {}, "plan": "price_1MSHZoEcXtiJtvvh6O8TYD8T", "price": "price_1MSHZoEcXtiJtvvh6O8TYD8T", "quantity": 1, "tax_rates": []}], "metadata": {}, "on_behalf_of": null, "proration_behavior": "create_prorations", "start_date": 1735603200, "transfer_data": null, "trial_end": null}], "released_at": null, "released_subscription": null, "renewal_interval": null, "status": "active", "subscription": "sub_1OoDDUEcXtiJtvvh4elaXYFT", "test_clock": null, "updated": 1708990355}, "emitted_at": 1709108832609} {"stream": "transfers", "data": {"id": "tr_1NH18zEcXtiJtvvhnd827cNO", "object": "transfer", "amount": 10000, "amount_reversed": 0, "balance_transaction": "txn_1NH190EcXtiJtvvhBO3PeR7p", "created": 1686301085, "currency": "usd", "description": null, "destination": "acct_1Jx8unEYmRTj5on1", "destination_payment": "py_1NH18zEYmRTj5on1GkCCsqLK", "livemode": false, "metadata": {}, "reversals": {"object": "list", "data": [], "has_more": false, "total_count": 0.0, "url": "/v1/transfers/tr_1NH18zEcXtiJtvvhnd827cNO/reversals"}, "reversed": false, "source_transaction": null, "source_type": "card", "transfer_group": null, "updated": 1686301085}, "emitted_at": 1697627313262} {"stream": "transfers", "data": {"id": "tr_1NGoaCEcXtiJtvvhjmHtOGOm", "object": "transfer", "amount": 100, "amount_reversed": 100, "balance_transaction": "txn_1NGoaDEcXtiJtvvhsZrNMsdJ", "created": 1686252800, "currency": "usd", "description": null, "destination": "acct_1Jx8unEYmRTj5on1", "destination_payment": "py_1NGoaCEYmRTj5on1LAlAIG3a", "livemode": false, "metadata": {}, "reversals": {"object": "list", "data": [{"id": "trr_1NGolCEcXtiJtvvhOYPck3CP", "object": "transfer_reversal", "amount": 100, "balance_transaction": "txn_1NGolCEcXtiJtvvhZRy4Kd5S", "created": 1686253482, "currency": "usd", "destination_payment_refund": "pyr_1NGolBEYmRTj5on1STal3rmp", "metadata": {}, "source_refund": null, "transfer": "tr_1NGoaCEcXtiJtvvhjmHtOGOm"}], "has_more": false, "total_count": 1.0, "url": "/v1/transfers/tr_1NGoaCEcXtiJtvvhjmHtOGOm/reversals"}, "reversed": true, "source_transaction": null, "source_type": "card", "transfer_group": "ORDER10", "updated": 1686252800}, "emitted_at": 1697627313264} {"stream": "refunds", "data": {"id": "re_3MVuZyEcXtiJtvvh0A6rSbeJ", "object": "refund", "amount": 200000, "balance_transaction": "txn_3MVuZyEcXtiJtvvh0v0QyAMx", "charge": "ch_3MVuZyEcXtiJtvvh0tiVC7DI", "created": 1675074488, "currency": "usd", "destination_details": {"card": {"reference": "5871771120000631", "reference_status": "available", "reference_type": "acquirer_reference_number", "type": "refund"}, "type": "card"}, "metadata": {}, "payment_intent": "pi_3MVuZyEcXtiJtvvh07Ehi4cx", "reason": "fraudulent", "receipt_number": "3278-5368", "source_transfer_reversal": null, "status": "succeeded", "transfer_reversal": null}, "emitted_at": 1701882752716} @@ -68,6 +68,6 @@ {"stream": "invoice_line_items", "data": {"id": "il_1K9GKLEcXtiJtvvhhHaYMebN", "object": "line_item", "amount": 8400, "amount_excluding_tax": 8400, "currency": "usd", "description": "a box of parsnips", "discount_amounts": [], "discountable": true, "discounts": [], "invoice_item": "ii_1K9GKLEcXtiJtvvhmr2AYOAx", "livemode": false, "metadata": {}, "period": {"end": 1640123817, "start": 1640123817}, "plan": null, "price": {"id": "price_1K9GKLEcXtiJtvvhXbrg33lq", "object": "price", "active": false, "billing_scheme": "per_unit", "created": 1640123817, "currency": "usd", "custom_unit_amount": null, "livemode": false, "lookup_key": null, "metadata": {}, "nickname": null, "product": "prod_Kou8cQxtIpF1p7", "recurring": null, "tax_behavior": "unspecified", "tiers_mode": null, "transform_quantity": null, "type": "one_time", "unit_amount": 8400, "unit_amount_decimal": "8400"}, "proration": false, "proration_details": {"credited_items": null}, "quantity": 1, "subscription": null, "tax_amounts": [], "tax_rates": [], "type": "invoiceitem", "unit_amount_excluding_tax": "8400", "invoice_id": "in_1K9GK0EcXtiJtvvhSo2LvGqT"}, "emitted_at": 1697627336438} {"stream": "invoice_line_items", "data": {"id": "il_1MX384EcXtiJtvvh3j2K123f", "object": "line_item", "amount": 6000, "amount_excluding_tax": 6000, "currency": "usd", "description": "Test Product 1", "discount_amounts": [{"amount": 500, "discount": "di_1MX384EcXtiJtvvhkOrY57Ep"}], "discountable": true, "discounts": ["di_1MX384EcXtiJtvvhkOrY57Ep"], "invoice_item": "ii_1MX384EcXtiJtvvhguyn3iYb", "livemode": false, "metadata": {}, "period": {"end": 1675345628, "start": 1675345628}, "plan": null, "price": {"id": "price_1MX364EcXtiJtvvhE3WgTl4O", "object": "price", "active": true, "billing_scheme": "per_unit", "created": 1675345504, "currency": "usd", "custom_unit_amount": null, "livemode": false, "lookup_key": null, "metadata": {}, "nickname": null, "product": "prod_NHcKselSHfKdfc", "recurring": null, "tax_behavior": "exclusive", "tiers_mode": null, "transform_quantity": null, "type": "one_time", "unit_amount": 2000, "unit_amount_decimal": "2000"}, "proration": false, "proration_details": {"credited_items": null}, "quantity": 3, "subscription": null, "tax_amounts": [{"amount": 0, "inclusive": false, "tax_rate": "txr_1MX384EcXtiJtvvhAhVE20Ii", "taxability_reason": "not_collecting", "taxable_amount": 0}], "tax_rates": [], "type": "invoiceitem", "unit_amount_excluding_tax": "2000", "invoice_id": "in_1MX37hEcXtiJtvvhRSl1KbQm"}, "emitted_at": 1697627336446} {"stream": "invoice_line_items", "data": {"id": "il_1MX2yfEcXtiJtvvhiunY2j1x", "object": "line_item", "amount": 25200, "amount_excluding_tax": 25200, "currency": "usd", "description": "edgao-test-product", "discount_amounts": [{"amount": 2520, "discount": "di_1MX2ysEcXtiJtvvh8ORqRVKm"}], "discountable": true, "discounts": ["di_1MX2ysEcXtiJtvvh8ORqRVKm"], "invoice_item": "ii_1MX2yfEcXtiJtvvhfhyOG7SP", "livemode": false, "metadata": {}, "period": {"end": 1675345045, "start": 1675345045}, "plan": null, "price": {"id": "price_1K9GbqEcXtiJtvvhJ3lZe4i5", "object": "price", "active": true, "billing_scheme": "per_unit", "created": 1640124902, "currency": "usd", "custom_unit_amount": null, "livemode": false, "lookup_key": null, "metadata": {}, "nickname": null, "product": "prod_KouQ5ez86yREmB", "recurring": null, "tax_behavior": "inclusive", "tiers_mode": null, "transform_quantity": null, "type": "one_time", "unit_amount": 12600, "unit_amount_decimal": "12600"}, "proration": false, "proration_details": {"credited_items": null}, "quantity": 2, "subscription": null, "tax_amounts": [{"amount": 0, "inclusive": true, "tax_rate": "txr_1MX2yfEcXtiJtvvhVcMEMTRj", "taxability_reason": "not_collecting", "taxable_amount": 0}], "tax_rates": [], "type": "invoiceitem", "unit_amount_excluding_tax": "12600", "invoice_id": "in_1MX2yFEcXtiJtvvhMXhUCgKx"}, "emitted_at": 1697627336449} -{"stream": "subscription_items", "data": {"id": "si_OptSP2o3XZUBpx", "object": "subscription_item", "billing_thresholds": null, "created": 1697550677, "metadata": {}, "plan": {"id": "price_1MSHZoEcXtiJtvvh6O8TYD8T", "object": "plan", "active": true, "aggregate_usage": null, "amount": 600, "amount_decimal": "600", "billing_scheme": "per_unit", "created": 1674209524, "currency": "usd", "interval": "month", "interval_count": 1, "livemode": false, "metadata": {}, "nickname": null, "product": "prod_NCgx1XP2IFQyKF", "tiers_mode": null, "transform_usage": null, "trial_period_days": null, "usage_type": "licensed"}, "price": {"id": "price_1MSHZoEcXtiJtvvh6O8TYD8T", "object": "price", "active": true, "billing_scheme": "per_unit", "created": 1674209524, "currency": "usd", "custom_unit_amount": null, "livemode": false, "lookup_key": null, "metadata": {}, "nickname": null, "product": "prod_NCgx1XP2IFQyKF", "recurring": {"aggregate_usage": null, "interval": "month", "interval_count": 1, "trial_period_days": null, "usage_type": "licensed"}, "tax_behavior": "exclusive", "tiers_mode": null, "transform_quantity": null, "type": "recurring", "unit_amount": 600, "unit_amount_decimal": "600"}, "quantity": 1, "subscription": "sub_1O2Dg0EcXtiJtvvhz7Q4zS0n", "tax_rates": []}, "emitted_at": 1697627337431} +{"stream": "subscription_items", "data": {"id": "si_PdUBTrsn2C4ShU", "object": "subscription_item", "billing_thresholds": null, "created": 1708988653, "metadata": {}, "plan": {"id": "price_1MSHZoEcXtiJtvvh6O8TYD8T", "object": "plan", "active": true, "aggregate_usage": null, "amount": 600, "amount_decimal": "600", "billing_scheme": "per_unit", "created": 1674209524, "currency": "usd", "interval": "month", "interval_count": 1, "livemode": false, "metadata": {}, "nickname": null, "product": "prod_NCgx1XP2IFQyKF", "tiers_mode": null, "transform_usage": null, "trial_period_days": null, "usage_type": "licensed"}, "price": {"id": "price_1MSHZoEcXtiJtvvh6O8TYD8T", "object": "price", "active": true, "billing_scheme": "per_unit", "created": 1674209524, "currency": "usd", "custom_unit_amount": null, "livemode": false, "lookup_key": null, "metadata": {}, "nickname": null, "product": "prod_NCgx1XP2IFQyKF", "recurring": {"aggregate_usage": null, "interval": "month", "interval_count": 1, "trial_period_days": null, "usage_type": "licensed"}, "tax_behavior": "exclusive", "tiers_mode": null, "transform_quantity": null, "type": "recurring", "unit_amount": 600, "unit_amount_decimal": "600"}, "quantity": 1, "subscription": "sub_1OoDDUEcXtiJtvvh4elaXYFT", "tax_rates": []}, "emitted_at": 1709108968914} {"stream": "transfer_reversals", "data": {"id": "trr_1NGolCEcXtiJtvvhOYPck3CP", "object": "transfer_reversal", "amount": 100, "balance_transaction": "txn_1NGolCEcXtiJtvvhZRy4Kd5S", "created": 1686253482, "currency": "usd", "destination_payment_refund": "pyr_1NGolBEYmRTj5on1STal3rmp", "metadata": {}, "source_refund": null, "transfer": "tr_1NGoaCEcXtiJtvvhjmHtOGOm"}, "emitted_at": 1697627338960} -{"stream": "usage_records", "data": {"id": "sis_1OUqWiEcXtiJtvvh3WGqc4Vk", "object": "usage_record_summary", "invoice": null, "livemode": false, "period": {"end": null, "start": 1702821076}, "subscription_item": "si_OptSP2o3XZUBpx", "total_usage": 1}, "emitted_at": 1700233660884} +{"stream": "usage_records", "data": {"id": "sis_1OoiXpEcXtiJtvvhsC9WWdfe", "object": "usage_record_summary", "invoice": null, "livemode": false, "period": {"end": null, "start": 1708988652}, "subscription_item": "si_PdUBTrsn2C4ShU", "total_usage": 1}, "emitted_at": 1709109077213} From 33f6759004fed84ae9e1bfae8cc302e9f052c98d Mon Sep 17 00:00:00 2001 From: Anton Karpets Date: Wed, 28 Feb 2024 12:28:13 +0200 Subject: [PATCH 015/172] =?UTF-8?q?=F0=9F=8F=A5Source=20Recharge:=20fix=20?= =?UTF-8?q?expected=20records=20(#35695)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .../source-recharge/integration_tests/expected_records.jsonl | 2 +- .../integration_tests/expected_records_orders_modern_api.jsonl | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/airbyte-integrations/connectors/source-recharge/integration_tests/expected_records.jsonl b/airbyte-integrations/connectors/source-recharge/integration_tests/expected_records.jsonl index 491a25b9836a..0187e149bfd8 100644 --- a/airbyte-integrations/connectors/source-recharge/integration_tests/expected_records.jsonl +++ b/airbyte-integrations/connectors/source-recharge/integration_tests/expected_records.jsonl @@ -10,5 +10,5 @@ {"stream": "products", "data": {"collection_id": null, "created_at": "2021-05-13T08:08:28", "discount_amount": 5.0, "discount_type": "percentage", "handle": "airbit-box-corner-short-sleeve-t-shirt", "id": 1853649, "images": {"large": "https://cdn.shopify.com/s/files/1/0565/0628/6251/products/t_neon_green_47f548d4-fda5-4e21-8066-1d4caadbe581_large.jpg", "medium": "https://cdn.shopify.com/s/files/1/0565/0628/6251/products/t_neon_green_47f548d4-fda5-4e21-8066-1d4caadbe581_medium.jpg", "original": "https://cdn.shopify.com/s/files/1/0565/0628/6251/products/t_neon_green_47f548d4-fda5-4e21-8066-1d4caadbe581.jpg", "small": "https://cdn.shopify.com/s/files/1/0565/0628/6251/products/t_neon_green_47f548d4-fda5-4e21-8066-1d4caadbe581_small.jpg"}, "product_id": 6642695864491, "shopify_product_id": 6642695864491, "subscription_defaults": {"apply_cutoff_date_to_checkout": false, "charge_interval_frequency": 30, "cutoff_day_of_month": null, "cutoff_day_of_week": null, "expire_after_specific_number_of_charges": null, "modifiable_properties": [], "number_charges_until_expiration": null, "order_day_of_month": null, "order_day_of_week": null, "order_interval_frequency_options": ["30"], "order_interval_unit": "day", "storefront_purchase_options": "subscription_and_onetime"}, "title": "Airbit Box Corner Short sleeve t-shirt", "updated_at": "2021-05-13T08:08:28"}, "emitted_at": 1706644170248} {"stream": "products", "data": {"collection_id": null, "created_at": "2021-05-13T07:27:34", "discount_amount": 5.0, "discount_type": "percentage", "handle": "i-make-beats-wool-blend-snapback", "id": 1853639, "images": {"large": "https://cdn.shopify.com/s/files/1/0565/0628/6251/products/c_black1_large.jpg", "medium": "https://cdn.shopify.com/s/files/1/0565/0628/6251/products/c_black1_medium.jpg", "original": "https://cdn.shopify.com/s/files/1/0565/0628/6251/products/c_black1.jpg", "small": "https://cdn.shopify.com/s/files/1/0565/0628/6251/products/c_black1_small.jpg"}, "product_id": 6644278001835, "shopify_product_id": 6644278001835, "subscription_defaults": {"apply_cutoff_date_to_checkout": false, "charge_interval_frequency": 30, "cutoff_day_of_month": null, "cutoff_day_of_week": null, "expire_after_specific_number_of_charges": null, "modifiable_properties": [], "number_charges_until_expiration": null, "order_day_of_month": null, "order_day_of_week": null, "order_interval_frequency_options": ["30"], "order_interval_unit": "day", "storefront_purchase_options": "subscription_and_onetime"}, "title": "I Make Beats Wool Blend Snapback", "updated_at": "2021-05-13T07:27:34"}, "emitted_at": 1706644170251} {"stream": "products", "data": {"collection_id": null, "created_at": "2021-05-13T08:20:10", "discount_amount": 0.0, "discount_type": "percentage", "handle": "new-mug", "id": 1853655, "images": {"large": "https://cdn.shopify.com/s/files/1/0565/0628/6251/products/m_black_red_large.jpg", "medium": "https://cdn.shopify.com/s/files/1/0565/0628/6251/products/m_black_red_medium.jpg", "original": "https://cdn.shopify.com/s/files/1/0565/0628/6251/products/m_black_red.jpg", "small": "https://cdn.shopify.com/s/files/1/0565/0628/6251/products/m_black_red_small.jpg"}, "product_id": 6688261701803, "shopify_product_id": 6688261701803, "subscription_defaults": {"apply_cutoff_date_to_checkout": false, "charge_interval_frequency": 30, "cutoff_day_of_month": null, "cutoff_day_of_week": null, "expire_after_specific_number_of_charges": null, "modifiable_properties": [], "number_charges_until_expiration": null, "order_day_of_month": null, "order_day_of_week": null, "order_interval_frequency_options": ["30"], "order_interval_unit": "day", "storefront_purchase_options": "subscription_and_onetime"}, "title": "NEW!!! MUG", "updated_at": "2021-05-13T08:20:10"}, "emitted_at": 1706644170252} -{"stream": "shop", "data": {"shop": {"allow_customers_to_skip_delivery": 1, "checkout_logo_url": null, "created_at": "Wed, 21 Apr 2021 11:44:38 GMT", "currency": "USD", "customer_portal_domain": "", "disabled_currencies_historical": [], "domain": "airbyte.myshopify.com", "email": "integration-test@airbyte.io", "enabled_presentment_currencies": ["USD"], "enabled_presentment_currencies_symbols": [{"currency": "USD", "location": "before", "suffix": " USD", "symbol": "$"}], "external_platform": "shopify", "iana_timezone": "Europe/Zaporozhye", "id": 126593, "my_shopify_domain": "airbyte.myshopify.com", "name": "airbyte", "payment_processor": "shopify_payments", "platform_domain": "airbyte.myshopify.com", "shop_email": "integration-test@airbyte.io", "shop_phone": "1111111111", "subscriptions_enabled": 1, "test_mode": false, "timezone": "(GMT+02:00) Europe/Zaporozhye", "updated_at": "Thu, 13 Jul 2023 15:26:57 GMT"}, "store": {"checkout_logo_url": null, "checkout_platform": "shopify", "created_at": "Wed, 21 Apr 2021 11:44:38 GMT", "currency": "USD", "customer_portal_domain": "", "disabled_currencies_historical": [], "domain": "airbyte.myshopify.com", "email": "integration-test@airbyte.io", "enabled_presentment_currencies": ["USD"], "enabled_presentment_currencies_symbols": [{"currency": "USD", "location": "before", "suffix": " USD", "symbol": "$"}], "external_platform": "shopify", "iana_timezone": "Europe/Zaporozhye", "id": 126593, "my_shopify_domain": "airbyte.myshopify.com", "name": "airbyte", "payment_processor": "shopify_payments", "platform_domain": "airbyte.myshopify.com", "shop_email": "integration-test@airbyte.io", "shop_phone": "1111111111", "subscriptions_enabled": 1, "test_mode": false, "timezone": "(GMT+02:00) Europe/Zaporozhye", "updated_at": "Thu, 13 Jul 2023 15:26:57 GMT"}}, "emitted_at": 1706644179022} +{"stream": "shop", "data": {"shop": {"allow_customers_to_skip_delivery": 1, "checkout_logo_url": null, "created_at": "Wed, 21 Apr 2021 11:44:38 GMT", "currency": "USD", "customer_portal_domain": "", "disabled_currencies_historical": [], "domain": "airbyte.myshopify.com", "email": "integration-test@airbyte.io", "enabled_presentment_currencies": ["USD"], "enabled_presentment_currencies_symbols": [{"currency": "USD", "location": "before", "suffix": " USD", "symbol": "$"}], "external_platform": "shopify", "iana_timezone": "Europe/Zaporozhye", "id": 126593, "my_shopify_domain": "airbyte.myshopify.com", "name": "airbyte", "payment_processor": "shopify_payments", "platform_domain": "airbyte.myshopify.com", "shop_email": "integration-test@airbyte.io", "shop_phone": "1111111111", "subscriptions_enabled": 1, "test_mode": false, "timezone": "(GMT+02:00) Europe/Zaporozhye", "updated_at": "Tue, 30 Jan 2024 18:56:54 GMT"}, "store": {"checkout_logo_url": null, "checkout_platform": "shopify", "created_at": "Wed, 21 Apr 2021 11:44:38 GMT", "currency": "USD", "customer_portal_domain": "", "disabled_currencies_historical": [], "domain": "airbyte.myshopify.com", "email": "integration-test@airbyte.io", "enabled_presentment_currencies": ["USD"], "enabled_presentment_currencies_symbols": [{"currency": "USD", "location": "before", "suffix": " USD", "symbol": "$"}], "external_platform": "shopify", "iana_timezone": "Europe/Zaporozhye", "id": 126593, "my_shopify_domain": "airbyte.myshopify.com", "name": "airbyte", "payment_processor": "shopify_payments", "platform_domain": "airbyte.myshopify.com", "shop_email": "integration-test@airbyte.io", "shop_phone": "1111111111", "subscriptions_enabled": 1, "test_mode": false, "timezone": "(GMT+02:00) Europe/Zaporozhye", "updated_at": "Tue, 30 Jan 2024 18:56:54 GMT"}}, "emitted_at": 1709114164153} {"stream": "subscriptions", "data": {"id": 153601366, "address_id": 69282975, "customer_id": 64962974, "analytics_data": {"utm_params": []}, "cancellation_reason": null, "cancellation_reason_comments": null, "cancelled_at": null, "charge_interval_frequency": "365", "created_at": "2021-05-13T09:46:47+00:00", "expire_after_specific_number_of_charges": null, "external_product_id": {"ecommerce": "6642695864491"}, "external_variant_id": {"ecommerce": "39684723835051"}, "has_queued_charges": 1, "is_prepaid": false, "is_skippable": true, "is_swappable": false, "max_retries_reached": 0, "next_charge_scheduled_at": "2024-05-12", "order_day_of_month": null, "order_day_of_week": null, "order_interval_frequency": "365", "order_interval_unit": "day", "presentment_currency": "USD", "price": 24.3, "product_title": "Airbit Box Corner Short sleeve t-shirt", "properties": [], "quantity": 1, "sku": null, "sku_override": false, "status": "active", "updated_at": "2023-05-13T04:07:32+00:00", "variant_title": "L / City Green"}, "emitted_at": 1706644181724} diff --git a/airbyte-integrations/connectors/source-recharge/integration_tests/expected_records_orders_modern_api.jsonl b/airbyte-integrations/connectors/source-recharge/integration_tests/expected_records_orders_modern_api.jsonl index 9962d32adf33..e4230976f8bc 100644 --- a/airbyte-integrations/connectors/source-recharge/integration_tests/expected_records_orders_modern_api.jsonl +++ b/airbyte-integrations/connectors/source-recharge/integration_tests/expected_records_orders_modern_api.jsonl @@ -10,5 +10,5 @@ {"stream": "products", "data": {"collection_id": null, "created_at": "2021-05-13T08:08:28", "discount_amount": 5.0, "discount_type": "percentage", "handle": "airbit-box-corner-short-sleeve-t-shirt", "id": 1853649, "images": {"large": "https://cdn.shopify.com/s/files/1/0565/0628/6251/products/t_neon_green_47f548d4-fda5-4e21-8066-1d4caadbe581_large.jpg", "medium": "https://cdn.shopify.com/s/files/1/0565/0628/6251/products/t_neon_green_47f548d4-fda5-4e21-8066-1d4caadbe581_medium.jpg", "original": "https://cdn.shopify.com/s/files/1/0565/0628/6251/products/t_neon_green_47f548d4-fda5-4e21-8066-1d4caadbe581.jpg", "small": "https://cdn.shopify.com/s/files/1/0565/0628/6251/products/t_neon_green_47f548d4-fda5-4e21-8066-1d4caadbe581_small.jpg"}, "product_id": 6642695864491, "shopify_product_id": 6642695864491, "subscription_defaults": {"apply_cutoff_date_to_checkout": false, "charge_interval_frequency": 30, "cutoff_day_of_month": null, "cutoff_day_of_week": null, "expire_after_specific_number_of_charges": null, "modifiable_properties": [], "number_charges_until_expiration": null, "order_day_of_month": null, "order_day_of_week": null, "order_interval_frequency_options": ["30"], "order_interval_unit": "day", "storefront_purchase_options": "subscription_and_onetime"}, "title": "Airbit Box Corner Short sleeve t-shirt", "updated_at": "2021-05-13T08:08:28"}, "emitted_at": 1706644311039} {"stream": "products", "data": {"collection_id": null, "created_at": "2021-05-13T07:27:34", "discount_amount": 5.0, "discount_type": "percentage", "handle": "i-make-beats-wool-blend-snapback", "id": 1853639, "images": {"large": "https://cdn.shopify.com/s/files/1/0565/0628/6251/products/c_black1_large.jpg", "medium": "https://cdn.shopify.com/s/files/1/0565/0628/6251/products/c_black1_medium.jpg", "original": "https://cdn.shopify.com/s/files/1/0565/0628/6251/products/c_black1.jpg", "small": "https://cdn.shopify.com/s/files/1/0565/0628/6251/products/c_black1_small.jpg"}, "product_id": 6644278001835, "shopify_product_id": 6644278001835, "subscription_defaults": {"apply_cutoff_date_to_checkout": false, "charge_interval_frequency": 30, "cutoff_day_of_month": null, "cutoff_day_of_week": null, "expire_after_specific_number_of_charges": null, "modifiable_properties": [], "number_charges_until_expiration": null, "order_day_of_month": null, "order_day_of_week": null, "order_interval_frequency_options": ["30"], "order_interval_unit": "day", "storefront_purchase_options": "subscription_and_onetime"}, "title": "I Make Beats Wool Blend Snapback", "updated_at": "2021-05-13T07:27:34"}, "emitted_at": 1706644311045} {"stream": "products", "data": {"collection_id": null, "created_at": "2021-05-13T08:20:10", "discount_amount": 0.0, "discount_type": "percentage", "handle": "new-mug", "id": 1853655, "images": {"large": "https://cdn.shopify.com/s/files/1/0565/0628/6251/products/m_black_red_large.jpg", "medium": "https://cdn.shopify.com/s/files/1/0565/0628/6251/products/m_black_red_medium.jpg", "original": "https://cdn.shopify.com/s/files/1/0565/0628/6251/products/m_black_red.jpg", "small": "https://cdn.shopify.com/s/files/1/0565/0628/6251/products/m_black_red_small.jpg"}, "product_id": 6688261701803, "shopify_product_id": 6688261701803, "subscription_defaults": {"apply_cutoff_date_to_checkout": false, "charge_interval_frequency": 30, "cutoff_day_of_month": null, "cutoff_day_of_week": null, "expire_after_specific_number_of_charges": null, "modifiable_properties": [], "number_charges_until_expiration": null, "order_day_of_month": null, "order_day_of_week": null, "order_interval_frequency_options": ["30"], "order_interval_unit": "day", "storefront_purchase_options": "subscription_and_onetime"}, "title": "NEW!!! MUG", "updated_at": "2021-05-13T08:20:10"}, "emitted_at": 1706644311046} -{"stream": "shop", "data": {"shop": {"allow_customers_to_skip_delivery": 1, "checkout_logo_url": null, "created_at": "Wed, 21 Apr 2021 11:44:38 GMT", "currency": "USD", "customer_portal_domain": "", "disabled_currencies_historical": [], "domain": "airbyte.myshopify.com", "email": "integration-test@airbyte.io", "enabled_presentment_currencies": ["USD"], "enabled_presentment_currencies_symbols": [{"currency": "USD", "location": "before", "suffix": " USD", "symbol": "$"}], "external_platform": "shopify", "iana_timezone": "Europe/Zaporozhye", "id": 126593, "my_shopify_domain": "airbyte.myshopify.com", "name": "airbyte", "payment_processor": "shopify_payments", "platform_domain": "airbyte.myshopify.com", "shop_email": "integration-test@airbyte.io", "shop_phone": "1111111111", "subscriptions_enabled": 1, "test_mode": false, "timezone": "(GMT+02:00) Europe/Zaporozhye", "updated_at": "Thu, 13 Jul 2023 15:26:57 GMT"}, "store": {"checkout_logo_url": null, "checkout_platform": "shopify", "created_at": "Wed, 21 Apr 2021 11:44:38 GMT", "currency": "USD", "customer_portal_domain": "", "disabled_currencies_historical": [], "domain": "airbyte.myshopify.com", "email": "integration-test@airbyte.io", "enabled_presentment_currencies": ["USD"], "enabled_presentment_currencies_symbols": [{"currency": "USD", "location": "before", "suffix": " USD", "symbol": "$"}], "external_platform": "shopify", "iana_timezone": "Europe/Zaporozhye", "id": 126593, "my_shopify_domain": "airbyte.myshopify.com", "name": "airbyte", "payment_processor": "shopify_payments", "platform_domain": "airbyte.myshopify.com", "shop_email": "integration-test@airbyte.io", "shop_phone": "1111111111", "subscriptions_enabled": 1, "test_mode": false, "timezone": "(GMT+02:00) Europe/Zaporozhye", "updated_at": "Thu, 13 Jul 2023 15:26:57 GMT"}}, "emitted_at": 1706644319680} +{"stream": "shop", "data": {"shop": {"allow_customers_to_skip_delivery": 1, "checkout_logo_url": null, "created_at": "Wed, 21 Apr 2021 11:44:38 GMT", "currency": "USD", "customer_portal_domain": "", "disabled_currencies_historical": [], "domain": "airbyte.myshopify.com", "email": "integration-test@airbyte.io", "enabled_presentment_currencies": ["USD"], "enabled_presentment_currencies_symbols": [{"currency": "USD", "location": "before", "suffix": " USD", "symbol": "$"}], "external_platform": "shopify", "iana_timezone": "Europe/Zaporozhye", "id": 126593, "my_shopify_domain": "airbyte.myshopify.com", "name": "airbyte", "payment_processor": "shopify_payments", "platform_domain": "airbyte.myshopify.com", "shop_email": "integration-test@airbyte.io", "shop_phone": "1111111111", "subscriptions_enabled": 1, "test_mode": false, "timezone": "(GMT+02:00) Europe/Zaporozhye", "updated_at": "Tue, 30 Jan 2024 18:56:54 GMT"}, "store": {"checkout_logo_url": null, "checkout_platform": "shopify", "created_at": "Wed, 21 Apr 2021 11:44:38 GMT", "currency": "USD", "customer_portal_domain": "", "disabled_currencies_historical": [], "domain": "airbyte.myshopify.com", "email": "integration-test@airbyte.io", "enabled_presentment_currencies": ["USD"], "enabled_presentment_currencies_symbols": [{"currency": "USD", "location": "before", "suffix": " USD", "symbol": "$"}], "external_platform": "shopify", "iana_timezone": "Europe/Zaporozhye", "id": 126593, "my_shopify_domain": "airbyte.myshopify.com", "name": "airbyte", "payment_processor": "shopify_payments", "platform_domain": "airbyte.myshopify.com", "shop_email": "integration-test@airbyte.io", "shop_phone": "1111111111", "subscriptions_enabled": 1, "test_mode": false, "timezone": "(GMT+02:00) Europe/Zaporozhye", "updated_at": "Tue, 30 Jan 2024 18:56:54 GMT"}}, "emitted_at": 1709114209054} {"stream": "subscriptions", "data": {"id": 153601366, "address_id": 69282975, "customer_id": 64962974, "analytics_data": {"utm_params": []}, "cancellation_reason": null, "cancellation_reason_comments": null, "cancelled_at": null, "charge_interval_frequency": "365", "created_at": "2021-05-13T09:46:47+00:00", "expire_after_specific_number_of_charges": null, "external_product_id": {"ecommerce": "6642695864491"}, "external_variant_id": {"ecommerce": "39684723835051"}, "has_queued_charges": 1, "is_prepaid": false, "is_skippable": true, "is_swappable": false, "max_retries_reached": 0, "next_charge_scheduled_at": "2024-05-12", "order_day_of_month": null, "order_day_of_week": null, "order_interval_frequency": "365", "order_interval_unit": "day", "presentment_currency": "USD", "price": 24.3, "product_title": "Airbit Box Corner Short sleeve t-shirt", "properties": [], "quantity": 1, "sku": null, "sku_override": false, "status": "active", "updated_at": "2023-05-13T04:07:32+00:00", "variant_title": "L / City Green"}, "emitted_at": 1706644322400} From 7ac622c74c168dafcdbdd63c2186411495416267 Mon Sep 17 00:00:00 2001 From: Artem Inzhyyants <36314070+artem1205@users.noreply.github.com> Date: Wed, 28 Feb 2024 12:38:12 +0100 Subject: [PATCH 016/172] Source Monday: fix empty activity logs extractor (#35696) Signed-off-by: Artem Inzhyyants --- .../connectors/source-monday/metadata.yaml | 2 +- .../connectors/source-monday/pyproject.toml | 2 +- .../source-monday/source_monday/extractor.py | 3 +- .../integrations/monday_requests/__init__.py | 2 + .../request_authenticators/__init__.py | 2 + .../api_token_authenticator.py | 2 - .../integrations/monday_responses/__init__.py | 4 +- .../monday_responses/records/__init__.py | 2 + .../integrations/test_teams_stream.py | 41 ++++++++------- .../unit_tests/integrations/utils.py | 6 +-- .../unit_tests/test_components.py | 52 ++++++++----------- .../unit_tests/test_extractor.py | 39 ++++++-------- .../unit_tests/test_graphql_requester.py | 25 ++++----- .../test_item_pagination_strategy.py | 9 ++-- docs/integrations/sources/monday.md | 1 + 15 files changed, 90 insertions(+), 102 deletions(-) diff --git a/airbyte-integrations/connectors/source-monday/metadata.yaml b/airbyte-integrations/connectors/source-monday/metadata.yaml index abda37283e00..f93fca602b41 100644 --- a/airbyte-integrations/connectors/source-monday/metadata.yaml +++ b/airbyte-integrations/connectors/source-monday/metadata.yaml @@ -10,7 +10,7 @@ data: connectorSubtype: api connectorType: source definitionId: 80a54ea2-9959-4040-aac1-eee42423ec9b - dockerImageTag: 2.0.3 + dockerImageTag: 2.0.4 releases: breakingChanges: 2.0.0: diff --git a/airbyte-integrations/connectors/source-monday/pyproject.toml b/airbyte-integrations/connectors/source-monday/pyproject.toml index d40e70477332..7a6e9d54a01f 100644 --- a/airbyte-integrations/connectors/source-monday/pyproject.toml +++ b/airbyte-integrations/connectors/source-monday/pyproject.toml @@ -3,7 +3,7 @@ requires = [ "poetry-core>=1.0.0",] build-backend = "poetry.core.masonry.api" [tool.poetry] -version = "2.0.3" +version = "2.0.4" name = "source-monday" description = "Source implementation for Monday." authors = [ "Airbyte ",] diff --git a/airbyte-integrations/connectors/source-monday/source_monday/extractor.py b/airbyte-integrations/connectors/source-monday/source_monday/extractor.py index 830aafc5cf9a..1c1917e4b9d2 100644 --- a/airbyte-integrations/connectors/source-monday/source_monday/extractor.py +++ b/airbyte-integrations/connectors/source-monday/source_monday/extractor.py @@ -41,9 +41,8 @@ def extract_records(self, response: requests.Response) -> List[Record]: return result for board_data in response_body["data"]["boards"]: - if not isinstance(board_data, dict): + if not isinstance(board_data, dict) or not board_data.get("activity_logs"): continue - for record in board_data.get("activity_logs", []): json_data = json.loads(record["data"]) new_record = record diff --git a/airbyte-integrations/connectors/source-monday/unit_tests/integrations/monday_requests/__init__.py b/airbyte-integrations/connectors/source-monday/unit_tests/integrations/monday_requests/__init__.py index 85ff5a122786..7b6858c0cfbb 100644 --- a/airbyte-integrations/connectors/source-monday/unit_tests/integrations/monday_requests/__init__.py +++ b/airbyte-integrations/connectors/source-monday/unit_tests/integrations/monday_requests/__init__.py @@ -1 +1,3 @@ from .teams_requests_builder import TeamsRequestBuilder + +__all__ = ["TeamsRequestBuilder"] diff --git a/airbyte-integrations/connectors/source-monday/unit_tests/integrations/monday_requests/request_authenticators/__init__.py b/airbyte-integrations/connectors/source-monday/unit_tests/integrations/monday_requests/request_authenticators/__init__.py index c670d8c3e6c7..1f3c5a215740 100644 --- a/airbyte-integrations/connectors/source-monday/unit_tests/integrations/monday_requests/request_authenticators/__init__.py +++ b/airbyte-integrations/connectors/source-monday/unit_tests/integrations/monday_requests/request_authenticators/__init__.py @@ -1 +1,3 @@ from .api_token_authenticator import ApiTokenAuthenticator + +__all__ = ["ApiTokenAuthenticator"] diff --git a/airbyte-integrations/connectors/source-monday/unit_tests/integrations/monday_requests/request_authenticators/api_token_authenticator.py b/airbyte-integrations/connectors/source-monday/unit_tests/integrations/monday_requests/request_authenticators/api_token_authenticator.py index dcd4fdcf3b54..dcea8f89ad79 100644 --- a/airbyte-integrations/connectors/source-monday/unit_tests/integrations/monday_requests/request_authenticators/api_token_authenticator.py +++ b/airbyte-integrations/connectors/source-monday/unit_tests/integrations/monday_requests/request_authenticators/api_token_authenticator.py @@ -1,7 +1,5 @@ # Copyright (c) 2023 Airbyte, Inc., all rights reserved. -import base64 - from .authenticator import Authenticator diff --git a/airbyte-integrations/connectors/source-monday/unit_tests/integrations/monday_responses/__init__.py b/airbyte-integrations/connectors/source-monday/unit_tests/integrations/monday_responses/__init__.py index 3b3481e236d4..3dcc74d79f7c 100644 --- a/airbyte-integrations/connectors/source-monday/unit_tests/integrations/monday_responses/__init__.py +++ b/airbyte-integrations/connectors/source-monday/unit_tests/integrations/monday_responses/__init__.py @@ -1,2 +1,4 @@ -from .teams_response_builder import TeamsResponseBuilder from .error_response_builder import ErrorResponseBuilder +from .teams_response_builder import TeamsResponseBuilder + +__all__ = ["ErrorResponseBuilder", "TeamsResponseBuilder"] diff --git a/airbyte-integrations/connectors/source-monday/unit_tests/integrations/monday_responses/records/__init__.py b/airbyte-integrations/connectors/source-monday/unit_tests/integrations/monday_responses/records/__init__.py index dcbc806ca42a..b57ff0fb28d7 100644 --- a/airbyte-integrations/connectors/source-monday/unit_tests/integrations/monday_responses/records/__init__.py +++ b/airbyte-integrations/connectors/source-monday/unit_tests/integrations/monday_responses/records/__init__.py @@ -1 +1,3 @@ from .teams_record_builder import TeamsRecordBuilder + +__all__ = ["TeamsRecordBuilder"] diff --git a/airbyte-integrations/connectors/source-monday/unit_tests/integrations/test_teams_stream.py b/airbyte-integrations/connectors/source-monday/unit_tests/integrations/test_teams_stream.py index ff8322903b5c..4e91ea4dca9f 100644 --- a/airbyte-integrations/connectors/source-monday/unit_tests/integrations/test_teams_stream.py +++ b/airbyte-integrations/connectors/source-monday/unit_tests/integrations/test_teams_stream.py @@ -33,7 +33,7 @@ def test_given_one_page_when_read_teams_then_return_records(self, http_mocker): http_mocker.get( TeamsRequestBuilder.teams_endpoint(api_token_authenticator).build(), - TeamsResponseBuilder.teams_response().with_record(TeamsRecordBuilder.teams_record()).build() + TeamsResponseBuilder.teams_response().with_record(TeamsRecordBuilder.teams_record()).build(), ) output = read_stream("teams", SyncMode.full_refresh, self._config) @@ -50,17 +50,18 @@ def test_given_retryable_error_and_one_page_when_read_teams_then_return_records( TeamsRequestBuilder.teams_endpoint(api_token_authenticator).build(), [ ErrorResponseBuilder.response_with_status(200).build(), - TeamsResponseBuilder.teams_response().with_record(TeamsRecordBuilder.teams_record()).build() - ] + TeamsResponseBuilder.teams_response().with_record(TeamsRecordBuilder.teams_record()).build(), + ], ) - with patch('time.sleep', return_value=None): + with patch("time.sleep", return_value=None): output = read_stream("teams", SyncMode.full_refresh, self._config) assert len(output.records) == 1 error_logs = [ - error for error in get_log_messages_by_log_level(output.logs, LogLevel.INFO) + error + for error in get_log_messages_by_log_level(output.logs, LogLevel.INFO) if f'Response Code: 200, Response Text: {json.dumps({"error_code": "ComplexityException", "status_code": 200})}' in error ] assert len(error_logs) == 1 @@ -73,17 +74,17 @@ def test_given_retryable_error_when_read_teams_then_stop_syncing(self, http_mock api_token_authenticator = self.get_authenticator(self._config) http_mocker.get( - TeamsRequestBuilder.teams_endpoint(api_token_authenticator).build(), - ErrorResponseBuilder.response_with_status(200).build() + TeamsRequestBuilder.teams_endpoint(api_token_authenticator).build(), ErrorResponseBuilder.response_with_status(200).build() ) - with patch('time.sleep', return_value=None): + with patch("time.sleep", return_value=None): output = read_stream("teams", SyncMode.full_refresh, self._config) - + assert len(output.records) == 0 error_logs = [ - error for error in get_log_messages_by_log_level(output.logs, LogLevel.INFO) + error + for error in get_log_messages_by_log_level(output.logs, LogLevel.INFO) if f'Response Code: 200, Response Text: {json.dumps({"error_code": "ComplexityException", "status_code": 200})}' in error ] assert len(error_logs) == 5 @@ -96,17 +97,17 @@ def test_given_retryable_500_error_when_read_teams_then_stop_syncing(self, http_ api_token_authenticator = self.get_authenticator(self._config) http_mocker.get( - TeamsRequestBuilder.teams_endpoint(api_token_authenticator).build(), - ErrorResponseBuilder.response_with_status(500).build() + TeamsRequestBuilder.teams_endpoint(api_token_authenticator).build(), ErrorResponseBuilder.response_with_status(500).build() ) - with patch('time.sleep', return_value=None): + with patch("time.sleep", return_value=None): output = read_stream("teams", SyncMode.full_refresh, self._config) - + assert len(output.records) == 0 error_logs = [ - error for error in get_log_messages_by_log_level(output.logs, LogLevel.INFO) + error + for error in get_log_messages_by_log_level(output.logs, LogLevel.INFO) if f'Response Code: 500, Response Text: {json.dumps({"error_message": "Internal server error", "status_code": 500})}' in error ] assert len(error_logs) == 5 @@ -119,17 +120,17 @@ def test_given_403_error_when_read_teams_then_ignore_the_stream(self, http_mocke api_token_authenticator = self.get_authenticator(self._config) http_mocker.get( - TeamsRequestBuilder.teams_endpoint(api_token_authenticator).build(), - ErrorResponseBuilder.response_with_status(403).build() + TeamsRequestBuilder.teams_endpoint(api_token_authenticator).build(), ErrorResponseBuilder.response_with_status(403).build() ) - with patch('time.sleep', return_value=None): + with patch("time.sleep", return_value=None): output = read_stream("teams", SyncMode.full_refresh, self._config) assert len(output.records) == 0 error_logs = [ - error for error in get_log_messages_by_log_level(output.logs, LogLevel.INFO) - if f'Ignoring response for failed request with error message None' in error + error + for error in get_log_messages_by_log_level(output.logs, LogLevel.INFO) + if "Ignoring response for failed request with error message None" in error ] assert len(error_logs) == 1 diff --git a/airbyte-integrations/connectors/source-monday/unit_tests/integrations/utils.py b/airbyte-integrations/connectors/source-monday/unit_tests/integrations/utils.py index 473ddcc61f0f..eab0deb8d5b1 100644 --- a/airbyte-integrations/connectors/source-monday/unit_tests/integrations/utils.py +++ b/airbyte-integrations/connectors/source-monday/unit_tests/integrations/utils.py @@ -12,11 +12,7 @@ def read_stream( - stream_name: str, - sync_mode: SyncMode, - config: Dict[str, Any], - state: Optional[Dict[str, Any]] = None, - expecting_exception: bool = False + stream_name: str, sync_mode: SyncMode, config: Dict[str, Any], state: Optional[Dict[str, Any]] = None, expecting_exception: bool = False ) -> EntrypointOutput: catalog = CatalogBuilder().with_stream(stream_name, sync_mode).build() return read(SourceMonday(), config, catalog, state, expecting_exception) diff --git a/airbyte-integrations/connectors/source-monday/unit_tests/test_components.py b/airbyte-integrations/connectors/source-monday/unit_tests/test_components.py index 670aff5e4e44..30571cbd43f7 100644 --- a/airbyte-integrations/connectors/source-monday/unit_tests/test_components.py +++ b/airbyte-integrations/connectors/source-monday/unit_tests/test_components.py @@ -104,7 +104,6 @@ def test_null_records(caplog): @pytest.fixture def mock_parent_stream(): - def mock_parent_stream_slices(*args, **kwargs): return iter([{"ids": [123]}]) @@ -121,28 +120,28 @@ def mock_parent_stream_slices(*args, **kwargs): return mock_stream -@pytest.mark.parametrize("stream_state, parent_records, expected_slices", + +@pytest.mark.parametrize( + "stream_state, parent_records, expected_slices", [ ({}, [], [{}]), ( {"updated_at": "2022-01-01T00:00:00Z"}, - [AirbyteMessage( - type=Type.RECORD, - record={ "data": {"id": 123, "name": "Sample Record", "updated_at": "2023-01-01T00:00:00Z"}, "stream": "projects", "emitted_at": 1632095449} - )], - [{'parent_stream_id': [123]}] + [ + AirbyteMessage( + type=Type.RECORD, + record={ + "data": {"id": 123, "name": "Sample Record", "updated_at": "2023-01-01T00:00:00Z"}, + "stream": "projects", + "emitted_at": 1632095449, + }, + ) + ], + [{"parent_stream_id": [123]}], ), - ( - {"updated_at": "2022-01-01T00:00:00Z"}, - AirbyteMessage(type=Type.LOG), - [] - ) - ], - ids=[ - "no stream state", - "successfully read parent record", - "skip non_record AirbyteMessage" - ] + ({"updated_at": "2022-01-01T00:00:00Z"}, AirbyteMessage(type=Type.LOG), []), + ], + ids=["no stream state", "successfully read parent record", "skip non_record AirbyteMessage"], ) def test_read_parent_stream(mock_parent_stream, stream_state, parent_records, expected_slices): @@ -151,17 +150,13 @@ def test_read_parent_stream(mock_parent_stream, stream_state, parent_records, ex parameters={}, cursor_field="updated_at", parent_stream_configs=[mock_parent_stream.parent_config], - nested_items_per_page=10 + nested_items_per_page=10, ) mock_parent_stream.read_records = MagicMock(return_value=parent_records) slicer.parent_cursor_field = "updated_at" - slices = list(slicer.read_parent_stream( - sync_mode=SyncMode.full_refresh, - cursor_field="updated_at", - stream_state=stream_state - )) + slices = list(slicer.read_parent_stream(sync_mode=SyncMode.full_refresh, cursor_field="updated_at", stream_state=stream_state)) assert slices == expected_slices @@ -173,14 +168,11 @@ def test_set_initial_state(): parameters={}, cursor_field="updated_at_int", parent_stream_configs=[MagicMock(parent_stream_name="parent_stream")], - nested_items_per_page=10 + nested_items_per_page=10, ) - initial_stream_state = { - "updated_at_int": 1662459010, - "parent_stream": {"parent_cursor_field": 1662459011} - } + initial_stream_state = {"updated_at_int": 1662459010, "parent_stream": {"parent_cursor_field": 1662459011}} - expected_state = { "updated_at_int": 1662459010 } + expected_state = {"updated_at_int": 1662459010} slicer.set_initial_state(initial_stream_state) assert slicer._state == expected_state diff --git a/airbyte-integrations/connectors/source-monday/unit_tests/test_extractor.py b/airbyte-integrations/connectors/source-monday/unit_tests/test_extractor.py index 869c7ab7bbca..4e61708797a1 100644 --- a/airbyte-integrations/connectors/source-monday/unit_tests/test_extractor.py +++ b/airbyte-integrations/connectors/source-monday/unit_tests/test_extractor.py @@ -11,19 +11,7 @@ def test_extract_records(): # Mock the response response = MagicMock() response_body = { - "data": { - "boards": [ - { - "activity_logs": [ - { - "data": "{\"pulse_id\": 123}", - "entity": "pulse", - "created_at": "16367386880000000" - } - ] - } - ] - } + "data": {"boards": [{"activity_logs": [{"data": '{"pulse_id": 123}', "entity": "pulse", "created_at": "16367386880000000"}]}]} } response.json.return_value = response_body @@ -36,18 +24,21 @@ def test_extract_records(): assert records[0]["created_at_int"] == 1636738688 +def test_empty_activity_logs_extract_records(): + response = MagicMock() + response_body = {"data": {"boards": [{"activity_logs": None}]}} + + response.json.return_value = response_body + extractor = MondayActivityExtractor(parameters={}) + records = extractor.extract_records(response) + + assert len(records) == 0 + + def test_extract_records_incremental(): # Mock the response response = MagicMock() - response_body = { - "data": { - "boards": [ - { - "id": 1 - } - ] - } - } + response_body = {"data": {"boards": [{"id": 1}]}} response.json.return_value = response_body extractor = MondayIncrementalItemsExtractor( @@ -55,9 +46,9 @@ def test_extract_records_incremental(): field_path=["data", "ccccc"], config=MagicMock(), field_path_pagination=["data", "bbbb"], - field_path_incremental=["data", "boards", "*"] + field_path_incremental=["data", "boards", "*"], ) records = extractor.extract_records(response) # Assertions - assert records == [{'id': 1}] + assert records == [{"id": 1}] diff --git a/airbyte-integrations/connectors/source-monday/unit_tests/test_graphql_requester.py b/airbyte-integrations/connectors/source-monday/unit_tests/test_graphql_requester.py index b4f46146b6bc..d3d1295df997 100644 --- a/airbyte-integrations/connectors/source-monday/unit_tests/test_graphql_requester.py +++ b/airbyte-integrations/connectors/source-monday/unit_tests/test_graphql_requester.py @@ -105,6 +105,7 @@ def monday_requester(): nested_limit=InterpolatedString.create("100", parameters={"name": "activity_logs"}), ) + def test_get_schema_root_properties(mocker, monday_requester): mock_schema = { "properties": { @@ -113,29 +114,29 @@ def test_get_schema_root_properties(mocker, monday_requester): "pulse_id": {"type": "integer"}, "board_id": {"type": "integer"}, "other_field": {"type": "string"}, - "yet_another_field": {"type": "boolean"} + "yet_another_field": {"type": "boolean"}, } } - mocker.patch.object(JsonFileSchemaLoader, 'get_json_schema', return_value=mock_schema) + mocker.patch.object(JsonFileSchemaLoader, "get_json_schema", return_value=mock_schema) requester = monday_requester result_schema = requester._get_schema_root_properties() - assert result_schema == { - "other_field": { "type": "string" }, - "yet_another_field": { "type": "boolean" } - } + assert result_schema == {"other_field": {"type": "string"}, "yet_another_field": {"type": "boolean"}} def test_build_activity_query(mocker, monday_requester): - mock_stream_state = { "updated_at_int": 1636738688 } - object_arguments = { "stream_state": mock_stream_state } - mocker.patch.object(MondayGraphqlRequester, '_get_object_arguments', return_value="stream_state:{{ stream_state['updated_at_int'] }}") + mock_stream_state = {"updated_at_int": 1636738688} + object_arguments = {"stream_state": mock_stream_state} + mocker.patch.object(MondayGraphqlRequester, "_get_object_arguments", return_value="stream_state:{{ stream_state['updated_at_int'] }}") requester = monday_requester result = requester._build_activity_query(object_name="activity_logs", field_schema={}, sub_page=None, **object_arguments) - assert result == "boards(stream_state:{{ stream_state['updated_at_int'] }}){activity_logs(stream_state:{{ stream_state['updated_at_int'] }}){}}" + assert ( + result + == "boards(stream_state:{{ stream_state['updated_at_int'] }}){activity_logs(stream_state:{{ stream_state['updated_at_int'] }}){}}" + ) def test_build_items_incremental_query(monday_requester): @@ -149,11 +150,11 @@ def test_build_items_incremental_query(monday_requester): built_query = monday_requester._build_items_incremental_query(object_name, field_schema, stream_slice) - assert built_query == 'items(limit:100,ids:[1, 2, 3]){id,name}' + assert built_query == "items(limit:100,ids:[1, 2, 3]){id,name}" def test_get_request_headers(monday_requester): headers = monday_requester.get_request_headers() - assert headers == {'API-Version': '2024-01'} + assert headers == {"API-Version": "2024-01"} diff --git a/airbyte-integrations/connectors/source-monday/unit_tests/test_item_pagination_strategy.py b/airbyte-integrations/connectors/source-monday/unit_tests/test_item_pagination_strategy.py index 979c72284713..fc2fcfc48221 100644 --- a/airbyte-integrations/connectors/source-monday/unit_tests/test_item_pagination_strategy.py +++ b/airbyte-integrations/connectors/source-monday/unit_tests/test_item_pagination_strategy.py @@ -41,19 +41,20 @@ def test_item_pagination_strategy(response_json, last_records, expected): assert strategy.next_page_token(response, last_records) == expected + @pytest.mark.parametrize( ("response_json", "last_records", "expected"), [ pytest.param( - {"data": {"boards": [{"items_page": {"cursor": "bla", "items":[{"id": "1"}]}}]}}, + {"data": {"boards": [{"items_page": {"cursor": "bla", "items": [{"id": "1"}]}}]}}, [], - (1, 'bla'), + (1, "bla"), id="test_cursor_in_first_request", ), pytest.param( - {"data": {"next_items_page": {"cursor": "bla2", "items":[{"id": "1"}]}}}, + {"data": {"next_items_page": {"cursor": "bla2", "items": [{"id": "1"}]}}}, [], - (1, 'bla2'), + (1, "bla2"), id="test_cursor_in_next_page", ), pytest.param( diff --git a/docs/integrations/sources/monday.md b/docs/integrations/sources/monday.md index 065bc3df1c89..f8a98c6a258d 100644 --- a/docs/integrations/sources/monday.md +++ b/docs/integrations/sources/monday.md @@ -74,6 +74,7 @@ The Monday connector should not run into Monday API limitations under normal usa | Version | Date | Pull Request | Subject | |:--------|:-----------|:----------------------------------------------------------|:------------------------------------------------------------------------| +| 2.0.4 | 2024-02-28 | [35696](https://github.com/airbytehq/airbyte/pull/35696) | Fix extraction for `null` value in stream `Activity logs` | | 2.0.3 | 2024-02-21 | [35506](https://github.com/airbytehq/airbyte/pull/35506) | Support for column values of the mirror type for the `Items` stream. | | 2.0.2 | 2024-02-12 | [35146](https://github.com/airbytehq/airbyte/pull/35146) | Manage dependencies with Poetry. | | 2.0.1 | 2024-02-08 | [35016](https://github.com/airbytehq/airbyte/pull/35016) | Migrated to the latest airbyte cdk | From a24a208c774b9b37bb64da383e71f0a8765f0624 Mon Sep 17 00:00:00 2001 From: Anton Karpets Date: Wed, 28 Feb 2024 15:20:46 +0200 Subject: [PATCH 017/172] =?UTF-8?q?=F0=9F=8F=A5Source=20Mixpanel:=20fix=20?= =?UTF-8?q?expected=20records=20(#35697)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .../integration_tests/expected_records.jsonl | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/airbyte-integrations/connectors/source-mixpanel/integration_tests/expected_records.jsonl b/airbyte-integrations/connectors/source-mixpanel/integration_tests/expected_records.jsonl index 1dc048c3757e..3b576d1a580f 100644 --- a/airbyte-integrations/connectors/source-mixpanel/integration_tests/expected_records.jsonl +++ b/airbyte-integrations/connectors/source-mixpanel/integration_tests/expected_records.jsonl @@ -2,11 +2,11 @@ {"stream": "engage", "data": {"distinct_id": "123@gmail.com", "email": "123@gmail.com", "name": "123", "123": "123456", "last_seen": "2023-01-01T00:00:00", "how are you": "just fine"}, "emitted_at": 1695642956746} {"stream": "engage", "data": {"distinct_id": "integration-test@airbyte.io", "name": "Integration Test1", "test": "test", "email": "integration-test@airbyte.io", "last_seen": "2023-01-01T00:00:00"}, "emitted_at": 1695642956748} {"stream": "engage", "data": {"distinct_id": "integration-test.db4415.mp-service-account", "name": "test", "test": "test", "last_seen": "2023-01-01T00:00:00"}, "emitted_at": 1695642956749} -{"stream": "funnels", "data": {"funnel_id": 36152117, "name": "test", "date": "2023-01-01", "steps": [{"count": 0, "avg_time": null, "avg_time_from_start": null, "event": "Purchase", "goal": "Purchase", "step_label": "Purchase", "overall_conv_ratio": 1, "step_conv_ratio": 1}, {"count": 0, "avg_time": null, "avg_time_from_start": null, "event": "$custom_event:1305068", "goal": "$custom_event:1305068", "step_label": "111", "custom_event": true, "custom_event_id": 1305068, "overall_conv_ratio": 0, "step_conv_ratio": 0}], "analysis": {"completion": 0, "starting_amount": 0, "steps": 2, "worst": 1}}, "emitted_at": 1695642317451} -{"stream": "funnels", "data": {"funnel_id": 36152117, "name": "test", "date": "2023-01-02", "steps": [{"count": 0, "avg_time": null, "avg_time_from_start": null, "event": "Purchase", "goal": "Purchase", "step_label": "Purchase", "overall_conv_ratio": 1, "step_conv_ratio": 1}, {"count": 0, "avg_time": null, "avg_time_from_start": null, "event": "$custom_event:1305068", "goal": "$custom_event:1305068", "step_label": "111", "custom_event": true, "custom_event_id": 1305068, "overall_conv_ratio": 0, "step_conv_ratio": 0}], "analysis": {"completion": 0, "starting_amount": 0, "steps": 2, "worst": 1}}, "emitted_at": 1695642317453} -{"stream": "funnels", "data": {"funnel_id": 36152117, "name": "test", "date": "2023-01-03", "steps": [{"count": 0, "avg_time": null, "avg_time_from_start": null, "event": "Purchase", "goal": "Purchase", "step_label": "Purchase", "overall_conv_ratio": 1, "step_conv_ratio": 1}, {"count": 0, "avg_time": null, "avg_time_from_start": null, "event": "$custom_event:1305068", "goal": "$custom_event:1305068", "step_label": "111", "custom_event": true, "custom_event_id": 1305068, "overall_conv_ratio": 0, "step_conv_ratio": 0}], "analysis": {"completion": 0, "starting_amount": 0, "steps": 2, "worst": 1}}, "emitted_at": 1695642317453} -{"stream": "revenue", "data": {"date": "2023-01-01", "amount": 0.0, "count": 3, "paid_count": 0}, "emitted_at": 1695644343316} -{"stream": "revenue", "data": {"date": "2023-01-02", "amount": 0.0, "count": 3, "paid_count": 0}, "emitted_at": 1695644343317} -{"stream": "revenue", "data": {"date": "2023-01-03", "amount": 0.0, "count": 3, "paid_count": 0}, "emitted_at": 1695644343317} +{"stream": "funnels", "data": {"funnel_id": 36152117, "name": "test", "date": "2024-02-26", "steps": [{"count": 0, "avg_time": null, "avg_time_from_start": null, "event": "Purchase", "goal": "Purchase", "step_label": "Purchase", "overall_conv_ratio": 1, "step_conv_ratio": 1}, {"count": 0, "avg_time": null, "avg_time_from_start": null, "event": "$custom_event:1305068", "goal": "$custom_event:1305068", "step_label": "111", "custom_event": true, "custom_event_id": 1305068, "overall_conv_ratio": 0, "step_conv_ratio": 0}], "analysis": {"completion": 0, "starting_amount": 0, "steps": 2, "worst": 1}}, "emitted_at": 1709117161812} +{"stream": "funnels", "data": {"funnel_id": 36152117, "name": "test", "date": "2024-02-27", "steps": [{"count": 0, "avg_time": null, "avg_time_from_start": null, "event": "Purchase", "goal": "Purchase", "step_label": "Purchase", "overall_conv_ratio": 1, "step_conv_ratio": 1}, {"count": 0, "avg_time": null, "avg_time_from_start": null, "event": "$custom_event:1305068", "goal": "$custom_event:1305068", "step_label": "111", "custom_event": true, "custom_event_id": 1305068, "overall_conv_ratio": 0, "step_conv_ratio": 0}], "analysis": {"completion": 0, "starting_amount": 0, "steps": 2, "worst": 1}}, "emitted_at": 1709117161812} +{"stream": "funnels", "data": {"funnel_id": 36152117, "name": "test", "date": "2024-02-28", "steps": [{"count": 0, "avg_time": null, "avg_time_from_start": null, "event": "Purchase", "goal": "Purchase", "step_label": "Purchase", "overall_conv_ratio": 1, "step_conv_ratio": 1}, {"count": 0, "avg_time": null, "avg_time_from_start": null, "event": "$custom_event:1305068", "goal": "$custom_event:1305068", "step_label": "111", "custom_event": true, "custom_event_id": 1305068, "overall_conv_ratio": 0, "step_conv_ratio": 0}], "analysis": {"completion": 0, "starting_amount": 0, "steps": 2, "worst": 1}}, "emitted_at": 1709117161812} +{"stream": "revenue", "data": {"date": "2024-02-26", "amount": 0.0, "count": 3, "paid_count": 0}, "emitted_at": 1695644343316} +{"stream": "revenue", "data": {"date": "2024-02-27", "amount": 0.0, "count": 3, "paid_count": 0}, "emitted_at": 1695644343317} +{"stream": "revenue", "data": {"date": "2024-02-28", "amount": 0.0, "count": 3, "paid_count": 0}, "emitted_at": 1695644343317} {"stream": "cohort_members", "data": {"distinct_id": "integration-test@airbyte.io", "name": "Integration Test1", "test": "test", "email": "integration-test@airbyte.io", "last_seen": "2023-01-01T00:00:00", "cohort_id": 1478097}, "emitted_at": 1695644214153} {"stream": "cohort_members", "data": {"distinct_id": "integration-test.db4415.mp-service-account", "name": "test", "test": "test", "last_seen": "2023-01-01T00:00:00", "cohort_id": 1478097}, "emitted_at": 1695644214154} From 30190eae5468aa517ab3090292dc063a45bfcd33 Mon Sep 17 00:00:00 2001 From: Akash Kulkarni <113392464+akashkulk@users.noreply.github.com> Date: Wed, 28 Feb 2024 10:48:20 -0800 Subject: [PATCH 018/172] [Source-mssql] : Add config to throw an error on invalid CDC position (#35566) --- .../connectors/source-mssql/build.gradle | 2 +- .../connectors/source-mssql/metadata.yaml | 2 +- .../source/mssql/MsSqlSpecConstants.java | 15 +++++++++++++++ .../mssql/initialsync/MssqlInitialReadUtil.java | 10 ++++++++++ .../source-mssql/src/main/resources/spec.json | 8 ++++++++ .../test-integration/resources/expected_spec.json | 8 ++++++++ .../source/mssql/MsSQLTestDatabase.java | 6 +++++- docs/integrations/sources/mssql.md | 1 + 8 files changed, 49 insertions(+), 3 deletions(-) create mode 100644 airbyte-integrations/connectors/source-mssql/src/main/java/io/airbyte/integrations/source/mssql/MsSqlSpecConstants.java diff --git a/airbyte-integrations/connectors/source-mssql/build.gradle b/airbyte-integrations/connectors/source-mssql/build.gradle index 034bc22d127a..bb7f720685cf 100644 --- a/airbyte-integrations/connectors/source-mssql/build.gradle +++ b/airbyte-integrations/connectors/source-mssql/build.gradle @@ -3,7 +3,7 @@ plugins { } airbyteJavaConnector { - cdkVersionRequired = '0.19.0' + cdkVersionRequired = '0.21.4' features = ['db-sources'] useLocalCdk = false } diff --git a/airbyte-integrations/connectors/source-mssql/metadata.yaml b/airbyte-integrations/connectors/source-mssql/metadata.yaml index 69ff2c08b161..65ebccd83038 100644 --- a/airbyte-integrations/connectors/source-mssql/metadata.yaml +++ b/airbyte-integrations/connectors/source-mssql/metadata.yaml @@ -9,7 +9,7 @@ data: connectorSubtype: database connectorType: source definitionId: b5ea17b1-f170-46dc-bc31-cc744ca984c1 - dockerImageTag: 3.7.3 + dockerImageTag: 3.7.4 dockerRepository: airbyte/source-mssql documentationUrl: https://docs.airbyte.com/integrations/sources/mssql githubIssueLabel: source-mssql diff --git a/airbyte-integrations/connectors/source-mssql/src/main/java/io/airbyte/integrations/source/mssql/MsSqlSpecConstants.java b/airbyte-integrations/connectors/source-mssql/src/main/java/io/airbyte/integrations/source/mssql/MsSqlSpecConstants.java new file mode 100644 index 000000000000..e5e6eca91f41 --- /dev/null +++ b/airbyte-integrations/connectors/source-mssql/src/main/java/io/airbyte/integrations/source/mssql/MsSqlSpecConstants.java @@ -0,0 +1,15 @@ +/* + * Copyright (c) 2023 Airbyte, Inc., all rights reserved. + */ + +package io.airbyte.integrations.source.mssql; + +// Constants defined in +// airbyte-integrations/connectors/source-mssql/src/main/resources/spec.json. +public class MsSqlSpecConstants { + + public static final String INVALID_CDC_CURSOR_POSITION_PROPERTY = "invalid_cdc_cursor_position_behavior"; + public static final String FAIL_SYNC_OPTION = "Fail sync"; + public static final String RESYNC_DATA_OPTION = "Re-sync data"; + +} diff --git a/airbyte-integrations/connectors/source-mssql/src/main/java/io/airbyte/integrations/source/mssql/initialsync/MssqlInitialReadUtil.java b/airbyte-integrations/connectors/source-mssql/src/main/java/io/airbyte/integrations/source/mssql/initialsync/MssqlInitialReadUtil.java index 24945d25430e..46bd00400a50 100644 --- a/airbyte-integrations/connectors/source-mssql/src/main/java/io/airbyte/integrations/source/mssql/initialsync/MssqlInitialReadUtil.java +++ b/airbyte-integrations/connectors/source-mssql/src/main/java/io/airbyte/integrations/source/mssql/initialsync/MssqlInitialReadUtil.java @@ -4,6 +4,9 @@ package io.airbyte.integrations.source.mssql.initialsync; +import static io.airbyte.cdk.db.DbAnalyticsUtils.cdcCursorInvalidMessage; +import static io.airbyte.integrations.source.mssql.MsSqlSpecConstants.FAIL_SYNC_OPTION; +import static io.airbyte.integrations.source.mssql.MsSqlSpecConstants.INVALID_CDC_CURSOR_POSITION_PROPERTY; import static io.airbyte.integrations.source.mssql.MssqlCdcHelper.getDebeziumProperties; import static io.airbyte.integrations.source.mssql.MssqlQueryUtils.getTableSizeInfoForStreams; import static io.airbyte.integrations.source.mssql.MssqlQueryUtils.prettyPrintConfiguredAirbyteStreamList; @@ -28,6 +31,7 @@ import io.airbyte.cdk.integrations.source.relationaldb.models.CursorBasedStatus; import io.airbyte.cdk.integrations.source.relationaldb.models.OrderedColumnLoadStatus; import io.airbyte.cdk.integrations.source.relationaldb.state.StateManager; +import io.airbyte.commons.exceptions.ConfigErrorException; import io.airbyte.commons.json.Jsons; import io.airbyte.commons.util.AutoCloseableIterator; import io.airbyte.commons.util.AutoCloseableIterators; @@ -112,6 +116,12 @@ public static List> getCdcReadIterators(fi savedOffset.isPresent() && mssqlDebeziumStateUtil.savedOffsetStillPresentOnServer(database, savedOffset.get()); if (!savedOffsetStillPresentOnServer) { + AirbyteTraceMessageUtility.emitAnalyticsTrace(cdcCursorInvalidMessage()); + if (!sourceConfig.get("replication_method").has(INVALID_CDC_CURSOR_POSITION_PROPERTY) || sourceConfig.get("replication_method").get( + INVALID_CDC_CURSOR_POSITION_PROPERTY).asText().equals(FAIL_SYNC_OPTION)) { + throw new ConfigErrorException( + "Saved offset no longer present on the server. Please reset the connection, and then increase binlog retention and/or increase sync frequency."); + } LOGGER.warn("Saved offset no longer present on the server, Airbyte is going to trigger a sync from scratch"); } diff --git a/airbyte-integrations/connectors/source-mssql/src/main/resources/spec.json b/airbyte-integrations/connectors/source-mssql/src/main/resources/spec.json index 005311b9e5a8..7a040718f402 100644 --- a/airbyte-integrations/connectors/source-mssql/src/main/resources/spec.json +++ b/airbyte-integrations/connectors/source-mssql/src/main/resources/spec.json @@ -140,6 +140,14 @@ "min": 120, "max": 1200, "order": 3 + }, + "invalid_cdc_cursor_position_behavior": { + "type": "string", + "title": "Invalid CDC position behavior (Advanced)", + "description": "Determines whether Airbyte should fail or re-sync data in case of an stale/invalid cursor value into the WAL. If 'Fail sync' is chosen, a user will have to manually reset the connection before being able to continue syncing data. If 'Re-sync data' is chosen, Airbyte will automatically trigger a refresh but could lead to higher cloud costs and data loss.", + "enum": ["Fail sync", "Re-sync data"], + "default": "Fail sync", + "order": 4 } } }, diff --git a/airbyte-integrations/connectors/source-mssql/src/test-integration/resources/expected_spec.json b/airbyte-integrations/connectors/source-mssql/src/test-integration/resources/expected_spec.json index c2f000494ee4..78d7147d7e43 100644 --- a/airbyte-integrations/connectors/source-mssql/src/test-integration/resources/expected_spec.json +++ b/airbyte-integrations/connectors/source-mssql/src/test-integration/resources/expected_spec.json @@ -140,6 +140,14 @@ "min": 120, "max": 1200, "order": 3 + }, + "invalid_cdc_cursor_position_behavior": { + "type": "string", + "title": "Invalid CDC position behavior (Advanced)", + "description": "Determines whether Airbyte should fail or re-sync data in case of an stale/invalid cursor value into the WAL. If 'Fail sync' is chosen, a user will have to manually reset the connection before being able to continue syncing data. If 'Re-sync data' is chosen, Airbyte will automatically trigger a refresh but could lead to higher cloud costs and data loss.", + "enum": ["Fail sync", "Re-sync data"], + "default": "Fail sync", + "order": 4 } } }, diff --git a/airbyte-integrations/connectors/source-mssql/src/testFixtures/java/io/airbyte/integrations/source/mssql/MsSQLTestDatabase.java b/airbyte-integrations/connectors/source-mssql/src/testFixtures/java/io/airbyte/integrations/source/mssql/MsSQLTestDatabase.java index a9deadded8e1..76483b0ef961 100644 --- a/airbyte-integrations/connectors/source-mssql/src/testFixtures/java/io/airbyte/integrations/source/mssql/MsSQLTestDatabase.java +++ b/airbyte-integrations/connectors/source-mssql/src/testFixtures/java/io/airbyte/integrations/source/mssql/MsSQLTestDatabase.java @@ -4,6 +4,9 @@ package io.airbyte.integrations.source.mssql; +import static io.airbyte.integrations.source.mssql.MsSqlSpecConstants.INVALID_CDC_CURSOR_POSITION_PROPERTY; +import static io.airbyte.integrations.source.mssql.MsSqlSpecConstants.RESYNC_DATA_OPTION; + import io.airbyte.cdk.db.factory.DatabaseDriver; import io.airbyte.cdk.db.jdbc.JdbcUtils; import io.airbyte.cdk.testutils.TestDatabase; @@ -259,7 +262,8 @@ public MsSQLConfigBuilder withCdcReplication() { return with("is_test", true) .with("replication_method", Map.of( "method", "CDC", - "initial_waiting_seconds", DEFAULT_CDC_REPLICATION_INITIAL_WAIT.getSeconds())); + "initial_waiting_seconds", DEFAULT_CDC_REPLICATION_INITIAL_WAIT.getSeconds(), + INVALID_CDC_CURSOR_POSITION_PROPERTY, RESYNC_DATA_OPTION)); } public MsSQLConfigBuilder withSchemas(final String... schemas) { diff --git a/docs/integrations/sources/mssql.md b/docs/integrations/sources/mssql.md index 164b087f6fdf..a384947e78cb 100644 --- a/docs/integrations/sources/mssql.md +++ b/docs/integrations/sources/mssql.md @@ -342,6 +342,7 @@ WHERE actor_definition_id ='b5ea17b1-f170-46dc-bc31-cc744ca984c1' AND (configura | Version | Date | Pull Request | Subject | |:--------|:-----------|:------------------------------------------------------------------------------------------------------------------|:------------------------------------------------------------------------------------------------------------------------------------------------| +| 3.7.4 | 2024-02-26 | [35566](https://github.com/airbytehq/airbyte/pull/35566) | Add config to throw an error on invalid CDC position.. | | 3.7.3 | 2024-02-23 | [35596](https://github.com/airbytehq/airbyte/pull/35596) | Fix a logger issue | | 3.7.2 | 2024-02-21 | [35368](https://github.com/airbytehq/airbyte/pull/35368) | Change query syntax to make it compatible with Azure SQL Managed Instance. | | 3.7.1 | 2024-02-20 | [35405](https://github.com/airbytehq/airbyte/pull/35405) | Change query syntax to make it compatible with Azure Synapse. | From a3d1bb1844c3ade3c7ff110232254dbeb986228e Mon Sep 17 00:00:00 2001 From: ambirdsall Date: Wed, 28 Feb 2024 18:58:17 +0000 Subject: [PATCH 019/172] =?UTF-8?q?=F0=9F=A4=96=20Bump=20patch=20version?= =?UTF-8?q?=20of=20Python=20CDK?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- airbyte-cdk/python/.bumpversion.cfg | 2 +- airbyte-cdk/python/CHANGELOG.md | 3 +++ airbyte-cdk/python/Dockerfile | 4 ++-- airbyte-cdk/python/setup.py | 2 +- 4 files changed, 7 insertions(+), 4 deletions(-) diff --git a/airbyte-cdk/python/.bumpversion.cfg b/airbyte-cdk/python/.bumpversion.cfg index cae50f52aa81..eb6f22c65e87 100644 --- a/airbyte-cdk/python/.bumpversion.cfg +++ b/airbyte-cdk/python/.bumpversion.cfg @@ -1,5 +1,5 @@ [bumpversion] -current_version = 0.67.0 +current_version = 0.67.1 commit = False [bumpversion:file:setup.py] diff --git a/airbyte-cdk/python/CHANGELOG.md b/airbyte-cdk/python/CHANGELOG.md index aafd9d33a8a7..d2698bdced66 100644 --- a/airbyte-cdk/python/CHANGELOG.md +++ b/airbyte-cdk/python/CHANGELOG.md @@ -1,5 +1,8 @@ # Changelog +## 0.67.1 +Fix handling of tab-separated CSVs + ## 0.67.0 Low-code: Add CustomRecordFilter diff --git a/airbyte-cdk/python/Dockerfile b/airbyte-cdk/python/Dockerfile index 2c1ea428129c..5ad0969929cf 100644 --- a/airbyte-cdk/python/Dockerfile +++ b/airbyte-cdk/python/Dockerfile @@ -10,7 +10,7 @@ RUN apk --no-cache upgrade \ && apk --no-cache add tzdata build-base # install airbyte-cdk -RUN pip install --prefix=/install airbyte-cdk==0.67.0 +RUN pip install --prefix=/install airbyte-cdk==0.67.1 # build a clean environment FROM base @@ -32,5 +32,5 @@ ENV AIRBYTE_ENTRYPOINT "python /airbyte/integration_code/main.py" ENTRYPOINT ["python", "/airbyte/integration_code/main.py"] # needs to be the same as CDK -LABEL io.airbyte.version=0.67.0 +LABEL io.airbyte.version=0.67.1 LABEL io.airbyte.name=airbyte/source-declarative-manifest diff --git a/airbyte-cdk/python/setup.py b/airbyte-cdk/python/setup.py index b441974dc1bb..2e7ff9cc4583 100644 --- a/airbyte-cdk/python/setup.py +++ b/airbyte-cdk/python/setup.py @@ -36,7 +36,7 @@ name="airbyte-cdk", # The version of the airbyte-cdk package is used at runtime to validate manifests. That validation must be # updated if our semver format changes such as using release candidate versions. - version="0.67.0", + version="0.67.1", description="A framework for writing Airbyte Connectors.", long_description=README, long_description_content_type="text/markdown", From ed61d27e0d88e256d886cb4a437a6ab0f4b4ed43 Mon Sep 17 00:00:00 2001 From: Anton Karpets Date: Wed, 28 Feb 2024 20:58:29 +0200 Subject: [PATCH 020/172] =?UTF-8?q?=F0=9F=8F=A5Source=20Tiktok=20Marketing?= =?UTF-8?q?:=20update=20unit=20tests=20(#35698)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .../unit_tests/streams_test.py | 11 +++++++++++ 1 file changed, 11 insertions(+) diff --git a/airbyte-integrations/connectors/source-tiktok-marketing/unit_tests/streams_test.py b/airbyte-integrations/connectors/source-tiktok-marketing/unit_tests/streams_test.py index eca654baa6f7..f4a4d10aed0c 100644 --- a/airbyte-integrations/connectors/source-tiktok-marketing/unit_tests/streams_test.py +++ b/airbyte-integrations/connectors/source-tiktok-marketing/unit_tests/streams_test.py @@ -2,6 +2,7 @@ # Copyright (c) 2023 Airbyte, Inc., all rights reserved. # +from decimal import Decimal from unittest.mock import MagicMock, PropertyMock, patch import pendulum @@ -19,6 +20,7 @@ BasicReports, CampaignsReports, Daily, + FullRefreshTiktokStream, Hourly, Lifetime, ReportGranularity, @@ -273,3 +275,12 @@ def test_no_next_page_token(requests_mock): requests_mock.get(url, json={"data": {"page_info": {}}}) test_response = requests.get(url) assert stream.next_page_token(test_response) is None + + +@pytest.mark.parametrize( + ("original_value", "expected_value"), + (("-", None), (26.10, Decimal(26.10)), ("some_str", "some_str")), +) +def test_transform_function(original_value, expected_value): + field_schema = {} + assert FullRefreshTiktokStream.transform_function(original_value, field_schema) == expected_value From 543eaeabc5576959fa619da75132d0ce76010d64 Mon Sep 17 00:00:00 2001 From: Christo Grabowski <108154848+ChristoGrab@users.noreply.github.com> Date: Wed, 28 Feb 2024 14:18:38 -0500 Subject: [PATCH 021/172] Source Linnworks: re-enable in registry (#35557) --- .../connectors/source-linnworks/README.md | 66 +- .../connectors/source-linnworks/metadata.yaml | 10 +- .../connectors/source-linnworks/poetry.lock | 1271 +++++++++++++++++ .../source-linnworks/pyproject.toml | 29 + .../source-linnworks/requirements.txt | 1 - .../connectors/source-linnworks/setup.py | 44 - docs/integrations/sources/linnworks.md | 1 + 7 files changed, 1335 insertions(+), 87 deletions(-) create mode 100644 airbyte-integrations/connectors/source-linnworks/poetry.lock create mode 100644 airbyte-integrations/connectors/source-linnworks/pyproject.toml delete mode 100644 airbyte-integrations/connectors/source-linnworks/requirements.txt delete mode 100644 airbyte-integrations/connectors/source-linnworks/setup.py diff --git a/airbyte-integrations/connectors/source-linnworks/README.md b/airbyte-integrations/connectors/source-linnworks/README.md index d8557c27c0ce..eae9b5f359b8 100644 --- a/airbyte-integrations/connectors/source-linnworks/README.md +++ b/airbyte-integrations/connectors/source-linnworks/README.md @@ -7,51 +7,39 @@ For information about how to use this connector within Airbyte, see [the documen ### Prerequisites -**To iterate on this connector, make sure to complete this prerequisites section.** +* Python (~=3.9) +* Poetry (~=1.7) - installation instructions [here](https://python-poetry.org/docs/#installation) -#### Minimum Python version required `= 3.7.0` +### Installing the connector -#### Build & Activate Virtual Environment and install dependencies - -From this connector directory, create a virtual environment: - -```bash -python -m venv .venv -``` - -This will generate a virtualenv for this module in `.venv/`. Make sure this venv is active in your -development environment of choice. To activate it from the terminal, run: +From this connector directory, run: ```bash -source .venv/bin/activate -pip install -r requirements.txt -pip install '.[tests]' +poetry install --with dev ``` -If you are in an IDE, follow your IDE's instructions to activate the virtualenv. +### Create credentials -Note that while we are installing dependencies from `requirements.txt`, you should only edit `setup.py` for your dependencies. `requirements.txt` is -used for editable installs (`pip install -e`) to pull in Python dependencies from the monorepo and will call `setup.py`. -If this is mumbo jumbo to you, don't worry about it, just put your deps in `setup.py` but install using `pip install -r requirements.txt` and everything -should work as you expect. - -#### Create credentials - -**If you are a community contributor**, follow the instructions in the [documentation](https://docs.airbyte.io/integrations/sources/linnworks) -to generate the necessary credentials. Then create a file `secrets/config.json` conforming to the `source_linnworks/spec.json` file. +**If you are a community contributor**, follow the instructions in the [documentation](https://docs.airbyte.com/integrations/sources/linnworks) +to generate the necessary credentials. Then create a file `secrets/config.json` conforming to the `source_linnworks/spec.yaml` file. Note that any directory named `secrets` is gitignored across the entire Airbyte repo, so there is no danger of accidentally checking in sensitive information. See `integration_tests/sample_config.json` for a sample config file. -**If you are an Airbyte core member**, copy the credentials in Lastpass under the secret name `source linnworks test creds` -and place them into `secrets/config.json`. - ### Locally running the connector ```bash -python main.py spec -python main.py check --config secrets/config.json -python main.py discover --config secrets/config.json -python main.py read --config secrets/config.json --catalog integration_tests/configured_catalog.json +poetry run source-linnworks spec +poetry run source-linnworks check --config secrets/config.json +poetry run source-linnworks discover --config secrets/config.json +poetry run source-linnworks read --config secrets/config.json --catalog integration_tests/configured_catalog.json +``` + +### Running unit tests + +To run unit tests locally, from the connector directory run: + +```bash +poetry run pytest unit_tests ``` ### Locally running the connector docker image @@ -151,13 +139,17 @@ airbyte-ci connectors --name=source-linnworks test Customize `acceptance-test-config.yml` file to configure tests. See [Connector Acceptance Tests](https://docs.airbyte.com/connector-development/testing-connectors/connector-acceptance-tests-reference) for more information. If your connector requires to create or destroy resources for use during acceptance tests create fixtures for it and place them inside integration_tests/acceptance.py. -## Dependency Management +### Dependency Management -All of your dependencies should go in `setup.py`, NOT `requirements.txt`. The requirements file is only used to connect internal Airbyte dependencies in the monorepo for local development. -We split dependencies between two groups, dependencies that are: +All of your dependencies should be managed via Poetry. + +To add a new dependency, run: + +```bash +poetry add +``` -* required for your connector to work need to go to `MAIN_REQUIREMENTS` list. -* required for the testing need to go to `TEST_REQUIREMENTS` list +Please commit the changes to `pyproject.toml` and `poetry.lock` files. ### Publishing a new version of the connector diff --git a/airbyte-integrations/connectors/source-linnworks/metadata.yaml b/airbyte-integrations/connectors/source-linnworks/metadata.yaml index b2bf2f742b85..22f9617db685 100644 --- a/airbyte-integrations/connectors/source-linnworks/metadata.yaml +++ b/airbyte-integrations/connectors/source-linnworks/metadata.yaml @@ -7,22 +7,22 @@ data: connectorSubtype: api connectorType: source definitionId: 7b86879e-26c5-4ef6-a5ce-2be5c7b46d1e - dockerImageTag: 0.1.6 + dockerImageTag: 0.1.7 dockerRepository: airbyte/source-linnworks documentationUrl: https://docs.airbyte.com/integrations/sources/linnworks githubIssueLabel: source-linnworks icon: linnworks.svg license: MIT name: Linnworks - registries: # Removed from registries due to LEGACY STATE + registries: cloud: - enabled: false + enabled: true oss: - enabled: false + enabled: true releaseStage: alpha remoteRegistries: pypi: - enabled: false + enabled: true packageName: airbyte-source-linnworks supportLevel: community tags: diff --git a/airbyte-integrations/connectors/source-linnworks/poetry.lock b/airbyte-integrations/connectors/source-linnworks/poetry.lock new file mode 100644 index 000000000000..1ddca697292e --- /dev/null +++ b/airbyte-integrations/connectors/source-linnworks/poetry.lock @@ -0,0 +1,1271 @@ +# This file is automatically @generated by Poetry 1.7.1 and should not be changed by hand. + +[[package]] +name = "airbyte-cdk" +version = "0.67.0" +description = "A framework for writing Airbyte Connectors." +optional = false +python-versions = ">=3.8" +files = [ + {file = "airbyte-cdk-0.67.0.tar.gz", hash = "sha256:cbbff1b3895c89313764a721870bb293a396c74bad8dd6e5c36a0c3b0a2f6a10"}, + {file = "airbyte_cdk-0.67.0-py3-none-any.whl", hash = "sha256:2082c859536a2450c03b89dba1bbdab21bad314fbf5ef6d2e86fefc4ba935373"}, +] + +[package.dependencies] +airbyte-protocol-models = "0.5.1" +backoff = "*" +cachetools = "*" +Deprecated = ">=1.2,<2.0" +dpath = ">=2.0.1,<2.1.0" +genson = "1.2.2" +isodate = ">=0.6.1,<0.7.0" +Jinja2 = ">=3.1.2,<3.2.0" +jsonref = ">=0.2,<1.0" +jsonschema = ">=3.2.0,<3.3.0" +pendulum = "<3.0.0" +pydantic = ">=1.10.8,<2.0.0" +pyrate-limiter = ">=3.1.0,<3.2.0" +python-dateutil = "*" +PyYAML = ">=6.0.1" +requests = "*" +requests-cache = "*" +wcmatch = "8.4" + +[package.extras] +dev = ["avro (>=1.11.2,<1.12.0)", "cohere (==4.21)", "fastavro (>=1.8.0,<1.9.0)", "freezegun", "langchain (==0.0.271)", "markdown", "mypy", "openai[embeddings] (==0.27.9)", "pandas (==2.0.3)", "pdf2image (==1.16.3)", "pdfminer.six (==20221105)", "pyarrow (==12.0.1)", "pytesseract (==0.3.10)", "pytest", "pytest-cov", "pytest-httpserver", "pytest-mock", "requests-mock", "tiktoken (==0.4.0)", "unstructured (==0.10.27)", "unstructured.pytesseract (>=0.3.12)", "unstructured[docx,pptx] (==0.10.27)"] +file-based = ["avro (>=1.11.2,<1.12.0)", "fastavro (>=1.8.0,<1.9.0)", "markdown", "pdf2image (==1.16.3)", "pdfminer.six (==20221105)", "pyarrow (==12.0.1)", "pytesseract (==0.3.10)", "unstructured (==0.10.27)", "unstructured.pytesseract (>=0.3.12)", "unstructured[docx,pptx] (==0.10.27)"] +sphinx-docs = ["Sphinx (>=4.2,<5.0)", "sphinx-rtd-theme (>=1.0,<2.0)"] +vector-db-based = ["cohere (==4.21)", "langchain (==0.0.271)", "openai[embeddings] (==0.27.9)", "tiktoken (==0.4.0)"] + +[[package]] +name = "airbyte-protocol-models" +version = "0.5.1" +description = "Declares the Airbyte Protocol." +optional = false +python-versions = ">=3.8" +files = [ + {file = "airbyte_protocol_models-0.5.1-py3-none-any.whl", hash = "sha256:dfe84e130e51ce2ae81a06d5aa36f6c5ce3152b9e36e6f0195fad6c3dab0927e"}, + {file = "airbyte_protocol_models-0.5.1.tar.gz", hash = "sha256:7c8b16c7c1c7956b1996052e40585a3a93b1e44cb509c4e97c1ee4fe507ea086"}, +] + +[package.dependencies] +pydantic = ">=1.9.2,<2.0.0" + +[[package]] +name = "atomicwrites" +version = "1.4.1" +description = "Atomic file writes." +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +files = [ + {file = "atomicwrites-1.4.1.tar.gz", hash = "sha256:81b2c9071a49367a7f770170e5eec8cb66567cfbbc8c73d20ce5ca4a8d71cf11"}, +] + +[[package]] +name = "attrs" +version = "23.2.0" +description = "Classes Without Boilerplate" +optional = false +python-versions = ">=3.7" +files = [ + {file = "attrs-23.2.0-py3-none-any.whl", hash = "sha256:99b87a485a5820b23b879f04c2305b44b951b502fd64be915879d77a7e8fc6f1"}, + {file = "attrs-23.2.0.tar.gz", hash = "sha256:935dc3b529c262f6cf76e50877d35a4bd3c1de194fd41f47a2b7ae8f19971f30"}, +] + +[package.extras] +cov = ["attrs[tests]", "coverage[toml] (>=5.3)"] +dev = ["attrs[tests]", "pre-commit"] +docs = ["furo", "myst-parser", "sphinx", "sphinx-notfound-page", "sphinxcontrib-towncrier", "towncrier", "zope-interface"] +tests = ["attrs[tests-no-zope]", "zope-interface"] +tests-mypy = ["mypy (>=1.6)", "pytest-mypy-plugins"] +tests-no-zope = ["attrs[tests-mypy]", "cloudpickle", "hypothesis", "pympler", "pytest (>=4.3.0)", "pytest-xdist[psutil]"] + +[[package]] +name = "backoff" +version = "2.2.1" +description = "Function decoration for backoff and retry" +optional = false +python-versions = ">=3.7,<4.0" +files = [ + {file = "backoff-2.2.1-py3-none-any.whl", hash = "sha256:63579f9a0628e06278f7e47b7d7d5b6ce20dc65c5e96a6f3ca99a6adca0396e8"}, + {file = "backoff-2.2.1.tar.gz", hash = "sha256:03f829f5bb1923180821643f8753b0502c3b682293992485b0eef2807afa5cba"}, +] + +[[package]] +name = "bracex" +version = "2.4" +description = "Bash style brace expander." +optional = false +python-versions = ">=3.8" +files = [ + {file = "bracex-2.4-py3-none-any.whl", hash = "sha256:efdc71eff95eaff5e0f8cfebe7d01adf2c8637c8c92edaf63ef348c241a82418"}, + {file = "bracex-2.4.tar.gz", hash = "sha256:a27eaf1df42cf561fed58b7a8f3fdf129d1ea16a81e1fadd1d17989bc6384beb"}, +] + +[[package]] +name = "cachetools" +version = "5.3.3" +description = "Extensible memoizing collections and decorators" +optional = false +python-versions = ">=3.7" +files = [ + {file = "cachetools-5.3.3-py3-none-any.whl", hash = "sha256:0abad1021d3f8325b2fc1d2e9c8b9c9d57b04c3932657a72465447332c24d945"}, + {file = "cachetools-5.3.3.tar.gz", hash = "sha256:ba29e2dfa0b8b556606f097407ed1aa62080ee108ab0dc5ec9d6a723a007d105"}, +] + +[[package]] +name = "cattrs" +version = "23.2.3" +description = "Composable complex class support for attrs and dataclasses." +optional = false +python-versions = ">=3.8" +files = [ + {file = "cattrs-23.2.3-py3-none-any.whl", hash = "sha256:0341994d94971052e9ee70662542699a3162ea1e0c62f7ce1b4a57f563685108"}, + {file = "cattrs-23.2.3.tar.gz", hash = "sha256:a934090d95abaa9e911dac357e3a8699e0b4b14f8529bcc7d2b1ad9d51672b9f"}, +] + +[package.dependencies] +attrs = ">=23.1.0" +exceptiongroup = {version = ">=1.1.1", markers = "python_version < \"3.11\""} +typing-extensions = {version = ">=4.1.0,<4.6.3 || >4.6.3", markers = "python_version < \"3.11\""} + +[package.extras] +bson = ["pymongo (>=4.4.0)"] +cbor2 = ["cbor2 (>=5.4.6)"] +msgpack = ["msgpack (>=1.0.5)"] +orjson = ["orjson (>=3.9.2)"] +pyyaml = ["pyyaml (>=6.0)"] +tomlkit = ["tomlkit (>=0.11.8)"] +ujson = ["ujson (>=5.7.0)"] + +[[package]] +name = "certifi" +version = "2024.2.2" +description = "Python package for providing Mozilla's CA Bundle." +optional = false +python-versions = ">=3.6" +files = [ + {file = "certifi-2024.2.2-py3-none-any.whl", hash = "sha256:dc383c07b76109f368f6106eee2b593b04a011ea4d55f652c6ca24a754d1cdd1"}, + {file = "certifi-2024.2.2.tar.gz", hash = "sha256:0569859f95fc761b18b45ef421b1290a0f65f147e92a1e5eb3e635f9a5e4e66f"}, +] + +[[package]] +name = "charset-normalizer" +version = "3.3.2" +description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet." +optional = false +python-versions = ">=3.7.0" +files = [ + {file = "charset-normalizer-3.3.2.tar.gz", hash = "sha256:f30c3cb33b24454a82faecaf01b19c18562b1e89558fb6c56de4d9118a032fd5"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:25baf083bf6f6b341f4121c2f3c548875ee6f5339300e08be3f2b2ba1721cdd3"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:06435b539f889b1f6f4ac1758871aae42dc3a8c0e24ac9e60c2384973ad73027"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9063e24fdb1e498ab71cb7419e24622516c4a04476b17a2dab57e8baa30d6e03"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6897af51655e3691ff853668779c7bad41579facacf5fd7253b0133308cf000d"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1d3193f4a680c64b4b6a9115943538edb896edc190f0b222e73761716519268e"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:cd70574b12bb8a4d2aaa0094515df2463cb429d8536cfb6c7ce983246983e5a6"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8465322196c8b4d7ab6d1e049e4c5cb460d0394da4a27d23cc242fbf0034b6b5"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a9a8e9031d613fd2009c182b69c7b2c1ef8239a0efb1df3f7c8da66d5dd3d537"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:beb58fe5cdb101e3a055192ac291b7a21e3b7ef4f67fa1d74e331a7f2124341c"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:e06ed3eb3218bc64786f7db41917d4e686cc4856944f53d5bdf83a6884432e12"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:2e81c7b9c8979ce92ed306c249d46894776a909505d8f5a4ba55b14206e3222f"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:572c3763a264ba47b3cf708a44ce965d98555f618ca42c926a9c1616d8f34269"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:fd1abc0d89e30cc4e02e4064dc67fcc51bd941eb395c502aac3ec19fab46b519"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-win32.whl", hash = "sha256:3d47fa203a7bd9c5b6cee4736ee84ca03b8ef23193c0d1ca99b5089f72645c73"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-win_amd64.whl", hash = "sha256:10955842570876604d404661fbccbc9c7e684caf432c09c715ec38fbae45ae09"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:802fe99cca7457642125a8a88a084cef28ff0cf9407060f7b93dca5aa25480db"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:573f6eac48f4769d667c4442081b1794f52919e7edada77495aaed9236d13a96"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:549a3a73da901d5bc3ce8d24e0600d1fa85524c10287f6004fbab87672bf3e1e"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f27273b60488abe721a075bcca6d7f3964f9f6f067c8c4c605743023d7d3944f"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1ceae2f17a9c33cb48e3263960dc5fc8005351ee19db217e9b1bb15d28c02574"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:65f6f63034100ead094b8744b3b97965785388f308a64cf8d7c34f2f2e5be0c4"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:753f10e867343b4511128c6ed8c82f7bec3bd026875576dfd88483c5c73b2fd8"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4a78b2b446bd7c934f5dcedc588903fb2f5eec172f3d29e52a9096a43722adfc"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:e537484df0d8f426ce2afb2d0f8e1c3d0b114b83f8850e5f2fbea0e797bd82ae"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:eb6904c354526e758fda7167b33005998fb68c46fbc10e013ca97f21ca5c8887"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:deb6be0ac38ece9ba87dea880e438f25ca3eddfac8b002a2ec3d9183a454e8ae"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:4ab2fe47fae9e0f9dee8c04187ce5d09f48eabe611be8259444906793ab7cbce"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:80402cd6ee291dcb72644d6eac93785fe2c8b9cb30893c1af5b8fdd753b9d40f"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-win32.whl", hash = "sha256:7cd13a2e3ddeed6913a65e66e94b51d80a041145a026c27e6bb76c31a853c6ab"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-win_amd64.whl", hash = "sha256:663946639d296df6a2bb2aa51b60a2454ca1cb29835324c640dafb5ff2131a77"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:0b2b64d2bb6d3fb9112bafa732def486049e63de9618b5843bcdd081d8144cd8"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:ddbb2551d7e0102e7252db79ba445cdab71b26640817ab1e3e3648dad515003b"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:55086ee1064215781fff39a1af09518bc9255b50d6333f2e4c74ca09fac6a8f6"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8f4a014bc36d3c57402e2977dada34f9c12300af536839dc38c0beab8878f38a"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a10af20b82360ab00827f916a6058451b723b4e65030c5a18577c8b2de5b3389"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8d756e44e94489e49571086ef83b2bb8ce311e730092d2c34ca8f7d925cb20aa"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:90d558489962fd4918143277a773316e56c72da56ec7aa3dc3dbbe20fdfed15b"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6ac7ffc7ad6d040517be39eb591cac5ff87416c2537df6ba3cba3bae290c0fed"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:7ed9e526742851e8d5cc9e6cf41427dfc6068d4f5a3bb03659444b4cabf6bc26"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:8bdb58ff7ba23002a4c5808d608e4e6c687175724f54a5dade5fa8c67b604e4d"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:6b3251890fff30ee142c44144871185dbe13b11bab478a88887a639655be1068"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:b4a23f61ce87adf89be746c8a8974fe1c823c891d8f86eb218bb957c924bb143"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:efcb3f6676480691518c177e3b465bcddf57cea040302f9f4e6e191af91174d4"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-win32.whl", hash = "sha256:d965bba47ddeec8cd560687584e88cf699fd28f192ceb452d1d7ee807c5597b7"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-win_amd64.whl", hash = "sha256:96b02a3dc4381e5494fad39be677abcb5e6634bf7b4fa83a6dd3112607547001"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:95f2a5796329323b8f0512e09dbb7a1860c46a39da62ecb2324f116fa8fdc85c"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c002b4ffc0be611f0d9da932eb0f704fe2602a9a949d1f738e4c34c75b0863d5"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a981a536974bbc7a512cf44ed14938cf01030a99e9b3a06dd59578882f06f985"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3287761bc4ee9e33561a7e058c72ac0938c4f57fe49a09eae428fd88aafe7bb6"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:42cb296636fcc8b0644486d15c12376cb9fa75443e00fb25de0b8602e64c1714"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0a55554a2fa0d408816b3b5cedf0045f4b8e1a6065aec45849de2d6f3f8e9786"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:c083af607d2515612056a31f0a8d9e0fcb5876b7bfc0abad3ecd275bc4ebc2d5"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:87d1351268731db79e0f8e745d92493ee2841c974128ef629dc518b937d9194c"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:bd8f7df7d12c2db9fab40bdd87a7c09b1530128315d047a086fa3ae3435cb3a8"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:c180f51afb394e165eafe4ac2936a14bee3eb10debc9d9e4db8958fe36afe711"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:8c622a5fe39a48f78944a87d4fb8a53ee07344641b0562c540d840748571b811"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-win32.whl", hash = "sha256:db364eca23f876da6f9e16c9da0df51aa4f104a972735574842618b8c6d999d4"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-win_amd64.whl", hash = "sha256:86216b5cee4b06df986d214f664305142d9c76df9b6512be2738aa72a2048f99"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:6463effa3186ea09411d50efc7d85360b38d5f09b870c48e4600f63af490e56a"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:6c4caeef8fa63d06bd437cd4bdcf3ffefe6738fb1b25951440d80dc7df8c03ac"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:37e55c8e51c236f95b033f6fb391d7d7970ba5fe7ff453dad675e88cf303377a"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fb69256e180cb6c8a894fee62b3afebae785babc1ee98b81cdf68bbca1987f33"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ae5f4161f18c61806f411a13b0310bea87f987c7d2ecdbdaad0e94eb2e404238"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b2b0a0c0517616b6869869f8c581d4eb2dd83a4d79e0ebcb7d373ef9956aeb0a"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:45485e01ff4d3630ec0d9617310448a8702f70e9c01906b0d0118bdf9d124cf2"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:eb00ed941194665c332bf8e078baf037d6c35d7c4f3102ea2d4f16ca94a26dc8"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:2127566c664442652f024c837091890cb1942c30937add288223dc895793f898"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:a50aebfa173e157099939b17f18600f72f84eed3049e743b68ad15bd69b6bf99"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:4d0d1650369165a14e14e1e47b372cfcb31d6ab44e6e33cb2d4e57265290044d"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:923c0c831b7cfcb071580d3f46c4baf50f174be571576556269530f4bbd79d04"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:06a81e93cd441c56a9b65d8e1d043daeb97a3d0856d177d5c90ba85acb3db087"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-win32.whl", hash = "sha256:6ef1d82a3af9d3eecdba2321dc1b3c238245d890843e040e41e470ffa64c3e25"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-win_amd64.whl", hash = "sha256:eb8821e09e916165e160797a6c17edda0679379a4be5c716c260e836e122f54b"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:c235ebd9baae02f1b77bcea61bce332cb4331dc3617d254df3323aa01ab47bd4"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:5b4c145409bef602a690e7cfad0a15a55c13320ff7a3ad7ca59c13bb8ba4d45d"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:68d1f8a9e9e37c1223b656399be5d6b448dea850bed7d0f87a8311f1ff3dabb0"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:22afcb9f253dac0696b5a4be4a1c0f8762f8239e21b99680099abd9b2b1b2269"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e27ad930a842b4c5eb8ac0016b0a54f5aebbe679340c26101df33424142c143c"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1f79682fbe303db92bc2b1136016a38a42e835d932bab5b3b1bfcfbf0640e519"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b261ccdec7821281dade748d088bb6e9b69e6d15b30652b74cbbac25e280b796"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:122c7fa62b130ed55f8f285bfd56d5f4b4a5b503609d181f9ad85e55c89f4185"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:d0eccceffcb53201b5bfebb52600a5fb483a20b61da9dbc885f8b103cbe7598c"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:9f96df6923e21816da7e0ad3fd47dd8f94b2a5ce594e00677c0013018b813458"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:7f04c839ed0b6b98b1a7501a002144b76c18fb1c1850c8b98d458ac269e26ed2"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:34d1c8da1e78d2e001f363791c98a272bb734000fcef47a491c1e3b0505657a8"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:ff8fa367d09b717b2a17a052544193ad76cd49979c805768879cb63d9ca50561"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-win32.whl", hash = "sha256:aed38f6e4fb3f5d6bf81bfa990a07806be9d83cf7bacef998ab1a9bd660a581f"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-win_amd64.whl", hash = "sha256:b01b88d45a6fcb69667cd6d2f7a9aeb4bf53760d7fc536bf679ec94fe9f3ff3d"}, + {file = "charset_normalizer-3.3.2-py3-none-any.whl", hash = "sha256:3e4d1f6587322d2788836a99c69062fbb091331ec940e02d12d179c1d53e25fc"}, +] + +[[package]] +name = "colorama" +version = "0.4.6" +description = "Cross-platform colored terminal text." +optional = false +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7" +files = [ + {file = "colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6"}, + {file = "colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44"}, +] + +[[package]] +name = "deprecated" +version = "1.2.14" +description = "Python @deprecated decorator to deprecate old python classes, functions or methods." +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +files = [ + {file = "Deprecated-1.2.14-py2.py3-none-any.whl", hash = "sha256:6fac8b097794a90302bdbb17b9b815e732d3c4720583ff1b198499d78470466c"}, + {file = "Deprecated-1.2.14.tar.gz", hash = "sha256:e5323eb936458dccc2582dc6f9c322c852a775a27065ff2b0c4970b9d53d01b3"}, +] + +[package.dependencies] +wrapt = ">=1.10,<2" + +[package.extras] +dev = ["PyTest", "PyTest-Cov", "bump2version (<1)", "sphinx (<2)", "tox"] + +[[package]] +name = "dpath" +version = "2.0.8" +description = "Filesystem-like pathing and searching for dictionaries" +optional = false +python-versions = ">=3.7" +files = [ + {file = "dpath-2.0.8-py3-none-any.whl", hash = "sha256:f92f595214dd93a00558d75d4b858beee519f4cffca87f02616ad6cd013f3436"}, + {file = "dpath-2.0.8.tar.gz", hash = "sha256:a3440157ebe80d0a3ad794f1b61c571bef125214800ffdb9afc9424e8250fe9b"}, +] + +[[package]] +name = "exceptiongroup" +version = "1.2.0" +description = "Backport of PEP 654 (exception groups)" +optional = false +python-versions = ">=3.7" +files = [ + {file = "exceptiongroup-1.2.0-py3-none-any.whl", hash = "sha256:4bfd3996ac73b41e9b9628b04e079f193850720ea5945fc96a08633c66912f14"}, + {file = "exceptiongroup-1.2.0.tar.gz", hash = "sha256:91f5c769735f051a4290d52edd0858999b57e5876e9f85937691bd4c9fa3ed68"}, +] + +[package.extras] +test = ["pytest (>=6)"] + +[[package]] +name = "genson" +version = "1.2.2" +description = "GenSON is a powerful, user-friendly JSON Schema generator." +optional = false +python-versions = "*" +files = [ + {file = "genson-1.2.2.tar.gz", hash = "sha256:8caf69aa10af7aee0e1a1351d1d06801f4696e005f06cedef438635384346a16"}, +] + +[[package]] +name = "idna" +version = "3.6" +description = "Internationalized Domain Names in Applications (IDNA)" +optional = false +python-versions = ">=3.5" +files = [ + {file = "idna-3.6-py3-none-any.whl", hash = "sha256:c05567e9c24a6b9faaa835c4821bad0590fbb9d5779e7caa6e1cc4978e7eb24f"}, + {file = "idna-3.6.tar.gz", hash = "sha256:9ecdbbd083b06798ae1e86adcbfe8ab1479cf864e4ee30fe4e46a003d12491ca"}, +] + +[[package]] +name = "iniconfig" +version = "2.0.0" +description = "brain-dead simple config-ini parsing" +optional = false +python-versions = ">=3.7" +files = [ + {file = "iniconfig-2.0.0-py3-none-any.whl", hash = "sha256:b6a85871a79d2e3b22d2d1b94ac2824226a63c6b741c88f7ae975f18b6778374"}, + {file = "iniconfig-2.0.0.tar.gz", hash = "sha256:2d91e135bf72d31a410b17c16da610a82cb55f6b0477d1a902134b24a455b8b3"}, +] + +[[package]] +name = "isodate" +version = "0.6.1" +description = "An ISO 8601 date/time/duration parser and formatter" +optional = false +python-versions = "*" +files = [ + {file = "isodate-0.6.1-py2.py3-none-any.whl", hash = "sha256:0751eece944162659049d35f4f549ed815792b38793f07cf73381c1c87cbed96"}, + {file = "isodate-0.6.1.tar.gz", hash = "sha256:48c5881de7e8b0a0d648cb024c8062dc84e7b840ed81e864c7614fd3c127bde9"}, +] + +[package.dependencies] +six = "*" + +[[package]] +name = "jinja2" +version = "3.1.3" +description = "A very fast and expressive template engine." +optional = false +python-versions = ">=3.7" +files = [ + {file = "Jinja2-3.1.3-py3-none-any.whl", hash = "sha256:7d6d50dd97d52cbc355597bd845fabfbac3f551e1f99619e39a35ce8c370b5fa"}, + {file = "Jinja2-3.1.3.tar.gz", hash = "sha256:ac8bd6544d4bb2c9792bf3a159e80bba8fda7f07e81bc3aed565432d5925ba90"}, +] + +[package.dependencies] +MarkupSafe = ">=2.0" + +[package.extras] +i18n = ["Babel (>=2.7)"] + +[[package]] +name = "jsonref" +version = "0.3.0" +description = "jsonref is a library for automatic dereferencing of JSON Reference objects for Python." +optional = false +python-versions = ">=3.3,<4.0" +files = [ + {file = "jsonref-0.3.0-py3-none-any.whl", hash = "sha256:9480ad1b500f7e795daeb0ef29f9c55ae3a9ab38fb8d6659b6f4868acb5a5bc8"}, + {file = "jsonref-0.3.0.tar.gz", hash = "sha256:68b330c6815dc0d490dbb3d65ccda265ddde9f7856fd2f3322f971d456ea7549"}, +] + +[[package]] +name = "jsonschema" +version = "3.2.0" +description = "An implementation of JSON Schema validation for Python" +optional = false +python-versions = "*" +files = [ + {file = "jsonschema-3.2.0-py2.py3-none-any.whl", hash = "sha256:4e5b3cf8216f577bee9ce139cbe72eca3ea4f292ec60928ff24758ce626cd163"}, + {file = "jsonschema-3.2.0.tar.gz", hash = "sha256:c8a85b28d377cc7737e46e2d9f2b4f44ee3c0e1deac6bf46ddefc7187d30797a"}, +] + +[package.dependencies] +attrs = ">=17.4.0" +pyrsistent = ">=0.14.0" +setuptools = "*" +six = ">=1.11.0" + +[package.extras] +format = ["idna", "jsonpointer (>1.13)", "rfc3987", "strict-rfc3339", "webcolors"] +format-nongpl = ["idna", "jsonpointer (>1.13)", "rfc3339-validator", "rfc3986-validator (>0.1.0)", "webcolors"] + +[[package]] +name = "markupsafe" +version = "2.1.5" +description = "Safely add untrusted strings to HTML/XML markup." +optional = false +python-versions = ">=3.7" +files = [ + {file = "MarkupSafe-2.1.5-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:a17a92de5231666cfbe003f0e4b9b3a7ae3afb1ec2845aadc2bacc93ff85febc"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:72b6be590cc35924b02c78ef34b467da4ba07e4e0f0454a2c5907f473fc50ce5"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e61659ba32cf2cf1481e575d0462554625196a1f2fc06a1c777d3f48e8865d46"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2174c595a0d73a3080ca3257b40096db99799265e1c27cc5a610743acd86d62f"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ae2ad8ae6ebee9d2d94b17fb62763125f3f374c25618198f40cbb8b525411900"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:075202fa5b72c86ad32dc7d0b56024ebdbcf2048c0ba09f1cde31bfdd57bcfff"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:598e3276b64aff0e7b3451b72e94fa3c238d452e7ddcd893c3ab324717456bad"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:fce659a462a1be54d2ffcacea5e3ba2d74daa74f30f5f143fe0c58636e355fdd"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-win32.whl", hash = "sha256:d9fad5155d72433c921b782e58892377c44bd6252b5af2f67f16b194987338a4"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-win_amd64.whl", hash = "sha256:bf50cd79a75d181c9181df03572cdce0fbb75cc353bc350712073108cba98de5"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:629ddd2ca402ae6dbedfceeba9c46d5f7b2a61d9749597d4307f943ef198fc1f"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:5b7b716f97b52c5a14bffdf688f971b2d5ef4029127f1ad7a513973cfd818df2"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6ec585f69cec0aa07d945b20805be741395e28ac1627333b1c5b0105962ffced"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b91c037585eba9095565a3556f611e3cbfaa42ca1e865f7b8015fe5c7336d5a5"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7502934a33b54030eaf1194c21c692a534196063db72176b0c4028e140f8f32c"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:0e397ac966fdf721b2c528cf028494e86172b4feba51d65f81ffd65c63798f3f"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:c061bb86a71b42465156a3ee7bd58c8c2ceacdbeb95d05a99893e08b8467359a"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:3a57fdd7ce31c7ff06cdfbf31dafa96cc533c21e443d57f5b1ecc6cdc668ec7f"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-win32.whl", hash = "sha256:397081c1a0bfb5124355710fe79478cdbeb39626492b15d399526ae53422b906"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-win_amd64.whl", hash = "sha256:2b7c57a4dfc4f16f7142221afe5ba4e093e09e728ca65c51f5620c9aaeb9a617"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:8dec4936e9c3100156f8a2dc89c4b88d5c435175ff03413b443469c7c8c5f4d1"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:3c6b973f22eb18a789b1460b4b91bf04ae3f0c4234a0a6aa6b0a92f6f7b951d4"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ac07bad82163452a6884fe8fa0963fb98c2346ba78d779ec06bd7a6262132aee"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f5dfb42c4604dddc8e4305050aa6deb084540643ed5804d7455b5df8fe16f5e5"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ea3d8a3d18833cf4304cd2fc9cbb1efe188ca9b5efef2bdac7adc20594a0e46b"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:d050b3361367a06d752db6ead6e7edeb0009be66bc3bae0ee9d97fb326badc2a"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:bec0a414d016ac1a18862a519e54b2fd0fc8bbfd6890376898a6c0891dd82e9f"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:58c98fee265677f63a4385256a6d7683ab1832f3ddd1e66fe948d5880c21a169"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-win32.whl", hash = "sha256:8590b4ae07a35970728874632fed7bd57b26b0102df2d2b233b6d9d82f6c62ad"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-win_amd64.whl", hash = "sha256:823b65d8706e32ad2df51ed89496147a42a2a6e01c13cfb6ffb8b1e92bc910bb"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:c8b29db45f8fe46ad280a7294f5c3ec36dbac9491f2d1c17345be8e69cc5928f"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ec6a563cff360b50eed26f13adc43e61bc0c04d94b8be985e6fb24b81f6dcfdf"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a549b9c31bec33820e885335b451286e2969a2d9e24879f83fe904a5ce59d70a"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4f11aa001c540f62c6166c7726f71f7573b52c68c31f014c25cc7901deea0b52"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:7b2e5a267c855eea6b4283940daa6e88a285f5f2a67f2220203786dfa59b37e9"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:2d2d793e36e230fd32babe143b04cec8a8b3eb8a3122d2aceb4a371e6b09b8df"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:ce409136744f6521e39fd8e2a24c53fa18ad67aa5bc7c2cf83645cce5b5c4e50"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-win32.whl", hash = "sha256:4096e9de5c6fdf43fb4f04c26fb114f61ef0bf2e5604b6ee3019d51b69e8c371"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-win_amd64.whl", hash = "sha256:4275d846e41ecefa46e2015117a9f491e57a71ddd59bbead77e904dc02b1bed2"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:656f7526c69fac7f600bd1f400991cc282b417d17539a1b228617081106feb4a"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:97cafb1f3cbcd3fd2b6fbfb99ae11cdb14deea0736fc2b0952ee177f2b813a46"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1f3fbcb7ef1f16e48246f704ab79d79da8a46891e2da03f8783a5b6fa41a9532"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fa9db3f79de01457b03d4f01b34cf91bc0048eb2c3846ff26f66687c2f6d16ab"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ffee1f21e5ef0d712f9033568f8344d5da8cc2869dbd08d87c84656e6a2d2f68"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:5dedb4db619ba5a2787a94d877bc8ffc0566f92a01c0ef214865e54ecc9ee5e0"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:30b600cf0a7ac9234b2638fbc0fb6158ba5bdcdf46aeb631ead21248b9affbc4"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:8dd717634f5a044f860435c1d8c16a270ddf0ef8588d4887037c5028b859b0c3"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-win32.whl", hash = "sha256:daa4ee5a243f0f20d528d939d06670a298dd39b1ad5f8a72a4275124a7819eff"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-win_amd64.whl", hash = "sha256:619bc166c4f2de5caa5a633b8b7326fbe98e0ccbfacabd87268a2b15ff73a029"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:7a68b554d356a91cce1236aa7682dc01df0edba8d043fd1ce607c49dd3c1edcf"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:db0b55e0f3cc0be60c1f19efdde9a637c32740486004f20d1cff53c3c0ece4d2"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3e53af139f8579a6d5f7b76549125f0d94d7e630761a2111bc431fd820e163b8"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:17b950fccb810b3293638215058e432159d2b71005c74371d784862b7e4683f3"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4c31f53cdae6ecfa91a77820e8b151dba54ab528ba65dfd235c80b086d68a465"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:bff1b4290a66b490a2f4719358c0cdcd9bafb6b8f061e45c7a2460866bf50c2e"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:bc1667f8b83f48511b94671e0e441401371dfd0f0a795c7daa4a3cd1dde55bea"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:5049256f536511ee3f7e1b3f87d1d1209d327e818e6ae1365e8653d7e3abb6a6"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-win32.whl", hash = "sha256:00e046b6dd71aa03a41079792f8473dc494d564611a8f89bbbd7cb93295ebdcf"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-win_amd64.whl", hash = "sha256:fa173ec60341d6bb97a89f5ea19c85c5643c1e7dedebc22f5181eb73573142c5"}, + {file = "MarkupSafe-2.1.5.tar.gz", hash = "sha256:d283d37a890ba4c1ae73ffadf8046435c76e7bc2247bbb63c00bd1a709c6544b"}, +] + +[[package]] +name = "multidict" +version = "6.0.5" +description = "multidict implementation" +optional = false +python-versions = ">=3.7" +files = [ + {file = "multidict-6.0.5-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:228b644ae063c10e7f324ab1ab6b548bdf6f8b47f3ec234fef1093bc2735e5f9"}, + {file = "multidict-6.0.5-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:896ebdcf62683551312c30e20614305f53125750803b614e9e6ce74a96232604"}, + {file = "multidict-6.0.5-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:411bf8515f3be9813d06004cac41ccf7d1cd46dfe233705933dd163b60e37600"}, + {file = "multidict-6.0.5-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1d147090048129ce3c453f0292e7697d333db95e52616b3793922945804a433c"}, + {file = "multidict-6.0.5-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:215ed703caf15f578dca76ee6f6b21b7603791ae090fbf1ef9d865571039ade5"}, + {file = "multidict-6.0.5-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7c6390cf87ff6234643428991b7359b5f59cc15155695deb4eda5c777d2b880f"}, + {file = "multidict-6.0.5-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:21fd81c4ebdb4f214161be351eb5bcf385426bf023041da2fd9e60681f3cebae"}, + {file = "multidict-6.0.5-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3cc2ad10255f903656017363cd59436f2111443a76f996584d1077e43ee51182"}, + {file = "multidict-6.0.5-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:6939c95381e003f54cd4c5516740faba40cf5ad3eeff460c3ad1d3e0ea2549bf"}, + {file = "multidict-6.0.5-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:220dd781e3f7af2c2c1053da9fa96d9cf3072ca58f057f4c5adaaa1cab8fc442"}, + {file = "multidict-6.0.5-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:766c8f7511df26d9f11cd3a8be623e59cca73d44643abab3f8c8c07620524e4a"}, + {file = "multidict-6.0.5-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:fe5d7785250541f7f5019ab9cba2c71169dc7d74d0f45253f8313f436458a4ef"}, + {file = "multidict-6.0.5-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:c1c1496e73051918fcd4f58ff2e0f2f3066d1c76a0c6aeffd9b45d53243702cc"}, + {file = "multidict-6.0.5-cp310-cp310-win32.whl", hash = "sha256:7afcdd1fc07befad18ec4523a782cde4e93e0a2bf71239894b8d61ee578c1319"}, + {file = "multidict-6.0.5-cp310-cp310-win_amd64.whl", hash = "sha256:99f60d34c048c5c2fabc766108c103612344c46e35d4ed9ae0673d33c8fb26e8"}, + {file = "multidict-6.0.5-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:f285e862d2f153a70586579c15c44656f888806ed0e5b56b64489afe4a2dbfba"}, + {file = "multidict-6.0.5-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:53689bb4e102200a4fafa9de9c7c3c212ab40a7ab2c8e474491914d2305f187e"}, + {file = "multidict-6.0.5-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:612d1156111ae11d14afaf3a0669ebf6c170dbb735e510a7438ffe2369a847fd"}, + {file = "multidict-6.0.5-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7be7047bd08accdb7487737631d25735c9a04327911de89ff1b26b81745bd4e3"}, + {file = "multidict-6.0.5-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:de170c7b4fe6859beb8926e84f7d7d6c693dfe8e27372ce3b76f01c46e489fcf"}, + {file = "multidict-6.0.5-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:04bde7a7b3de05732a4eb39c94574db1ec99abb56162d6c520ad26f83267de29"}, + {file = "multidict-6.0.5-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:85f67aed7bb647f93e7520633d8f51d3cbc6ab96957c71272b286b2f30dc70ed"}, + {file = "multidict-6.0.5-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:425bf820055005bfc8aa9a0b99ccb52cc2f4070153e34b701acc98d201693733"}, + {file = "multidict-6.0.5-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:d3eb1ceec286eba8220c26f3b0096cf189aea7057b6e7b7a2e60ed36b373b77f"}, + {file = "multidict-6.0.5-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:7901c05ead4b3fb75113fb1dd33eb1253c6d3ee37ce93305acd9d38e0b5f21a4"}, + {file = "multidict-6.0.5-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:e0e79d91e71b9867c73323a3444724d496c037e578a0e1755ae159ba14f4f3d1"}, + {file = "multidict-6.0.5-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:29bfeb0dff5cb5fdab2023a7a9947b3b4af63e9c47cae2a10ad58394b517fddc"}, + {file = "multidict-6.0.5-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:e030047e85cbcedbfc073f71836d62dd5dadfbe7531cae27789ff66bc551bd5e"}, + {file = "multidict-6.0.5-cp311-cp311-win32.whl", hash = "sha256:2f4848aa3baa109e6ab81fe2006c77ed4d3cd1e0ac2c1fbddb7b1277c168788c"}, + {file = "multidict-6.0.5-cp311-cp311-win_amd64.whl", hash = "sha256:2faa5ae9376faba05f630d7e5e6be05be22913782b927b19d12b8145968a85ea"}, + {file = "multidict-6.0.5-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:51d035609b86722963404f711db441cf7134f1889107fb171a970c9701f92e1e"}, + {file = "multidict-6.0.5-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:cbebcd5bcaf1eaf302617c114aa67569dd3f090dd0ce8ba9e35e9985b41ac35b"}, + {file = "multidict-6.0.5-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:2ffc42c922dbfddb4a4c3b438eb056828719f07608af27d163191cb3e3aa6cc5"}, + {file = "multidict-6.0.5-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ceb3b7e6a0135e092de86110c5a74e46bda4bd4fbfeeb3a3bcec79c0f861e450"}, + {file = "multidict-6.0.5-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:79660376075cfd4b2c80f295528aa6beb2058fd289f4c9252f986751a4cd0496"}, + {file = "multidict-6.0.5-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e4428b29611e989719874670fd152b6625500ad6c686d464e99f5aaeeaca175a"}, + {file = "multidict-6.0.5-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d84a5c3a5f7ce6db1f999fb9438f686bc2e09d38143f2d93d8406ed2dd6b9226"}, + {file = "multidict-6.0.5-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:76c0de87358b192de7ea9649beb392f107dcad9ad27276324c24c91774ca5271"}, + {file = "multidict-6.0.5-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:79a6d2ba910adb2cbafc95dad936f8b9386e77c84c35bc0add315b856d7c3abb"}, + {file = "multidict-6.0.5-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:92d16a3e275e38293623ebf639c471d3e03bb20b8ebb845237e0d3664914caef"}, + {file = "multidict-6.0.5-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:fb616be3538599e797a2017cccca78e354c767165e8858ab5116813146041a24"}, + {file = "multidict-6.0.5-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:14c2976aa9038c2629efa2c148022ed5eb4cb939e15ec7aace7ca932f48f9ba6"}, + {file = "multidict-6.0.5-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:435a0984199d81ca178b9ae2c26ec3d49692d20ee29bc4c11a2a8d4514c67eda"}, + {file = "multidict-6.0.5-cp312-cp312-win32.whl", hash = "sha256:9fe7b0653ba3d9d65cbe7698cca585bf0f8c83dbbcc710db9c90f478e175f2d5"}, + {file = "multidict-6.0.5-cp312-cp312-win_amd64.whl", hash = "sha256:01265f5e40f5a17f8241d52656ed27192be03bfa8764d88e8220141d1e4b3556"}, + {file = "multidict-6.0.5-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:19fe01cea168585ba0f678cad6f58133db2aa14eccaf22f88e4a6dccadfad8b3"}, + {file = "multidict-6.0.5-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6bf7a982604375a8d49b6cc1b781c1747f243d91b81035a9b43a2126c04766f5"}, + {file = "multidict-6.0.5-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:107c0cdefe028703fb5dafe640a409cb146d44a6ae201e55b35a4af8e95457dd"}, + {file = "multidict-6.0.5-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:403c0911cd5d5791605808b942c88a8155c2592e05332d2bf78f18697a5fa15e"}, + {file = "multidict-6.0.5-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:aeaf541ddbad8311a87dd695ed9642401131ea39ad7bc8cf3ef3967fd093b626"}, + {file = "multidict-6.0.5-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e4972624066095e52b569e02b5ca97dbd7a7ddd4294bf4e7247d52635630dd83"}, + {file = "multidict-6.0.5-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:d946b0a9eb8aaa590df1fe082cee553ceab173e6cb5b03239716338629c50c7a"}, + {file = "multidict-6.0.5-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:b55358304d7a73d7bdf5de62494aaf70bd33015831ffd98bc498b433dfe5b10c"}, + {file = "multidict-6.0.5-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:a3145cb08d8625b2d3fee1b2d596a8766352979c9bffe5d7833e0503d0f0b5e5"}, + {file = "multidict-6.0.5-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:d65f25da8e248202bd47445cec78e0025c0fe7582b23ec69c3b27a640dd7a8e3"}, + {file = "multidict-6.0.5-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:c9bf56195c6bbd293340ea82eafd0071cb3d450c703d2c93afb89f93b8386ccc"}, + {file = "multidict-6.0.5-cp37-cp37m-win32.whl", hash = "sha256:69db76c09796b313331bb7048229e3bee7928eb62bab5e071e9f7fcc4879caee"}, + {file = "multidict-6.0.5-cp37-cp37m-win_amd64.whl", hash = "sha256:fce28b3c8a81b6b36dfac9feb1de115bab619b3c13905b419ec71d03a3fc1423"}, + {file = "multidict-6.0.5-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:76f067f5121dcecf0d63a67f29080b26c43c71a98b10c701b0677e4a065fbd54"}, + {file = "multidict-6.0.5-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:b82cc8ace10ab5bd93235dfaab2021c70637005e1ac787031f4d1da63d493c1d"}, + {file = "multidict-6.0.5-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:5cb241881eefd96b46f89b1a056187ea8e9ba14ab88ba632e68d7a2ecb7aadf7"}, + {file = "multidict-6.0.5-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e8e94e6912639a02ce173341ff62cc1201232ab86b8a8fcc05572741a5dc7d93"}, + {file = "multidict-6.0.5-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:09a892e4a9fb47331da06948690ae38eaa2426de97b4ccbfafbdcbe5c8f37ff8"}, + {file = "multidict-6.0.5-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:55205d03e8a598cfc688c71ca8ea5f66447164efff8869517f175ea632c7cb7b"}, + {file = "multidict-6.0.5-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:37b15024f864916b4951adb95d3a80c9431299080341ab9544ed148091b53f50"}, + {file = "multidict-6.0.5-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f2a1dee728b52b33eebff5072817176c172050d44d67befd681609b4746e1c2e"}, + {file = "multidict-6.0.5-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:edd08e6f2f1a390bf137080507e44ccc086353c8e98c657e666c017718561b89"}, + {file = "multidict-6.0.5-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:60d698e8179a42ec85172d12f50b1668254628425a6bd611aba022257cac1386"}, + {file = "multidict-6.0.5-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:3d25f19500588cbc47dc19081d78131c32637c25804df8414463ec908631e453"}, + {file = "multidict-6.0.5-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:4cc0ef8b962ac7a5e62b9e826bd0cd5040e7d401bc45a6835910ed699037a461"}, + {file = "multidict-6.0.5-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:eca2e9d0cc5a889850e9bbd68e98314ada174ff6ccd1129500103df7a94a7a44"}, + {file = "multidict-6.0.5-cp38-cp38-win32.whl", hash = "sha256:4a6a4f196f08c58c59e0b8ef8ec441d12aee4125a7d4f4fef000ccb22f8d7241"}, + {file = "multidict-6.0.5-cp38-cp38-win_amd64.whl", hash = "sha256:0275e35209c27a3f7951e1ce7aaf93ce0d163b28948444bec61dd7badc6d3f8c"}, + {file = "multidict-6.0.5-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:e7be68734bd8c9a513f2b0cfd508802d6609da068f40dc57d4e3494cefc92929"}, + {file = "multidict-6.0.5-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:1d9ea7a7e779d7a3561aade7d596649fbecfa5c08a7674b11b423783217933f9"}, + {file = "multidict-6.0.5-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:ea1456df2a27c73ce51120fa2f519f1bea2f4a03a917f4a43c8707cf4cbbae1a"}, + {file = "multidict-6.0.5-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cf590b134eb70629e350691ecca88eac3e3b8b3c86992042fb82e3cb1830d5e1"}, + {file = "multidict-6.0.5-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5c0631926c4f58e9a5ccce555ad7747d9a9f8b10619621f22f9635f069f6233e"}, + {file = "multidict-6.0.5-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:dce1c6912ab9ff5f179eaf6efe7365c1f425ed690b03341911bf4939ef2f3046"}, + {file = "multidict-6.0.5-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c0868d64af83169e4d4152ec612637a543f7a336e4a307b119e98042e852ad9c"}, + {file = "multidict-6.0.5-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:141b43360bfd3bdd75f15ed811850763555a251e38b2405967f8e25fb43f7d40"}, + {file = "multidict-6.0.5-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:7df704ca8cf4a073334e0427ae2345323613e4df18cc224f647f251e5e75a527"}, + {file = "multidict-6.0.5-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:6214c5a5571802c33f80e6c84713b2c79e024995b9c5897f794b43e714daeec9"}, + {file = "multidict-6.0.5-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:cd6c8fca38178e12c00418de737aef1261576bd1b6e8c6134d3e729a4e858b38"}, + {file = "multidict-6.0.5-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:e02021f87a5b6932fa6ce916ca004c4d441509d33bbdbeca70d05dff5e9d2479"}, + {file = "multidict-6.0.5-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:ebd8d160f91a764652d3e51ce0d2956b38efe37c9231cd82cfc0bed2e40b581c"}, + {file = "multidict-6.0.5-cp39-cp39-win32.whl", hash = "sha256:04da1bb8c8dbadf2a18a452639771951c662c5ad03aefe4884775454be322c9b"}, + {file = "multidict-6.0.5-cp39-cp39-win_amd64.whl", hash = "sha256:d6f6d4f185481c9669b9447bf9d9cf3b95a0e9df9d169bbc17e363b7d5487755"}, + {file = "multidict-6.0.5-py3-none-any.whl", hash = "sha256:0d63c74e3d7ab26de115c49bffc92cc77ed23395303d496eae515d4204a625e7"}, + {file = "multidict-6.0.5.tar.gz", hash = "sha256:f7e301075edaf50500f0b341543c41194d8df3ae5caf4702f2095f3ca73dd8da"}, +] + +[[package]] +name = "packaging" +version = "23.2" +description = "Core utilities for Python packages" +optional = false +python-versions = ">=3.7" +files = [ + {file = "packaging-23.2-py3-none-any.whl", hash = "sha256:8c491190033a9af7e1d931d0b5dacc2ef47509b34dd0de67ed209b5203fc88c7"}, + {file = "packaging-23.2.tar.gz", hash = "sha256:048fb0e9405036518eaaf48a55953c750c11e1a1b68e0dd1a9d62ed0c092cfc5"}, +] + +[[package]] +name = "pendulum" +version = "2.1.2" +description = "Python datetimes made easy" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" +files = [ + {file = "pendulum-2.1.2-cp27-cp27m-macosx_10_15_x86_64.whl", hash = "sha256:b6c352f4bd32dff1ea7066bd31ad0f71f8d8100b9ff709fb343f3b86cee43efe"}, + {file = "pendulum-2.1.2-cp27-cp27m-win_amd64.whl", hash = "sha256:318f72f62e8e23cd6660dbafe1e346950281a9aed144b5c596b2ddabc1d19739"}, + {file = "pendulum-2.1.2-cp35-cp35m-macosx_10_15_x86_64.whl", hash = "sha256:0731f0c661a3cb779d398803655494893c9f581f6488048b3fb629c2342b5394"}, + {file = "pendulum-2.1.2-cp35-cp35m-manylinux1_i686.whl", hash = "sha256:3481fad1dc3f6f6738bd575a951d3c15d4b4ce7c82dce37cf8ac1483fde6e8b0"}, + {file = "pendulum-2.1.2-cp35-cp35m-manylinux1_x86_64.whl", hash = "sha256:9702069c694306297ed362ce7e3c1ef8404ac8ede39f9b28b7c1a7ad8c3959e3"}, + {file = "pendulum-2.1.2-cp35-cp35m-win_amd64.whl", hash = "sha256:fb53ffa0085002ddd43b6ca61a7b34f2d4d7c3ed66f931fe599e1a531b42af9b"}, + {file = "pendulum-2.1.2-cp36-cp36m-macosx_10_15_x86_64.whl", hash = "sha256:c501749fdd3d6f9e726086bf0cd4437281ed47e7bca132ddb522f86a1645d360"}, + {file = "pendulum-2.1.2-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:c807a578a532eeb226150d5006f156632df2cc8c5693d778324b43ff8c515dd0"}, + {file = "pendulum-2.1.2-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:2d1619a721df661e506eff8db8614016f0720ac171fe80dda1333ee44e684087"}, + {file = "pendulum-2.1.2-cp36-cp36m-win_amd64.whl", hash = "sha256:f888f2d2909a414680a29ae74d0592758f2b9fcdee3549887779cd4055e975db"}, + {file = "pendulum-2.1.2-cp37-cp37m-macosx_10_15_x86_64.whl", hash = "sha256:e95d329384717c7bf627bf27e204bc3b15c8238fa8d9d9781d93712776c14002"}, + {file = "pendulum-2.1.2-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:4c9c689747f39d0d02a9f94fcee737b34a5773803a64a5fdb046ee9cac7442c5"}, + {file = "pendulum-2.1.2-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:1245cd0075a3c6d889f581f6325dd8404aca5884dea7223a5566c38aab94642b"}, + {file = "pendulum-2.1.2-cp37-cp37m-win_amd64.whl", hash = "sha256:db0a40d8bcd27b4fb46676e8eb3c732c67a5a5e6bfab8927028224fbced0b40b"}, + {file = "pendulum-2.1.2-cp38-cp38-macosx_10_15_x86_64.whl", hash = "sha256:f5e236e7730cab1644e1b87aca3d2ff3e375a608542e90fe25685dae46310116"}, + {file = "pendulum-2.1.2-cp38-cp38-manylinux1_i686.whl", hash = "sha256:de42ea3e2943171a9e95141f2eecf972480636e8e484ccffaf1e833929e9e052"}, + {file = "pendulum-2.1.2-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:7c5ec650cb4bec4c63a89a0242cc8c3cebcec92fcfe937c417ba18277d8560be"}, + {file = "pendulum-2.1.2-cp38-cp38-win_amd64.whl", hash = "sha256:33fb61601083f3eb1d15edeb45274f73c63b3c44a8524703dc143f4212bf3269"}, + {file = "pendulum-2.1.2-cp39-cp39-manylinux1_i686.whl", hash = "sha256:29c40a6f2942376185728c9a0347d7c0f07905638c83007e1d262781f1e6953a"}, + {file = "pendulum-2.1.2-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:94b1fc947bfe38579b28e1cccb36f7e28a15e841f30384b5ad6c5e31055c85d7"}, + {file = "pendulum-2.1.2.tar.gz", hash = "sha256:b06a0ca1bfe41c990bbf0c029f0b6501a7f2ec4e38bfec730712015e8860f207"}, +] + +[package.dependencies] +python-dateutil = ">=2.6,<3.0" +pytzdata = ">=2020.1" + +[[package]] +name = "platformdirs" +version = "4.2.0" +description = "A small Python package for determining appropriate platform-specific dirs, e.g. a \"user data dir\"." +optional = false +python-versions = ">=3.8" +files = [ + {file = "platformdirs-4.2.0-py3-none-any.whl", hash = "sha256:0614df2a2f37e1a662acbd8e2b25b92ccf8632929bc6d43467e17fe89c75e068"}, + {file = "platformdirs-4.2.0.tar.gz", hash = "sha256:ef0cc731df711022c174543cb70a9b5bd22e5a9337c8624ef2c2ceb8ddad8768"}, +] + +[package.extras] +docs = ["furo (>=2023.9.10)", "proselint (>=0.13)", "sphinx (>=7.2.6)", "sphinx-autodoc-typehints (>=1.25.2)"] +test = ["appdirs (==1.4.4)", "covdefaults (>=2.3)", "pytest (>=7.4.3)", "pytest-cov (>=4.1)", "pytest-mock (>=3.12)"] + +[[package]] +name = "pluggy" +version = "1.4.0" +description = "plugin and hook calling mechanisms for python" +optional = false +python-versions = ">=3.8" +files = [ + {file = "pluggy-1.4.0-py3-none-any.whl", hash = "sha256:7db9f7b503d67d1c5b95f59773ebb58a8c1c288129a88665838012cfb07b8981"}, + {file = "pluggy-1.4.0.tar.gz", hash = "sha256:8c85c2876142a764e5b7548e7d9a0e0ddb46f5185161049a79b7e974454223be"}, +] + +[package.extras] +dev = ["pre-commit", "tox"] +testing = ["pytest", "pytest-benchmark"] + +[[package]] +name = "py" +version = "1.11.0" +description = "library with cross-python path, ini-parsing, io, code, log facilities" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" +files = [ + {file = "py-1.11.0-py2.py3-none-any.whl", hash = "sha256:607c53218732647dff4acdfcd50cb62615cedf612e72d1724fb1a0cc6405b378"}, + {file = "py-1.11.0.tar.gz", hash = "sha256:51c75c4126074b472f746a24399ad32f6053d1b34b68d2fa41e558e6f4a98719"}, +] + +[[package]] +name = "pydantic" +version = "1.10.14" +description = "Data validation and settings management using python type hints" +optional = false +python-versions = ">=3.7" +files = [ + {file = "pydantic-1.10.14-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:7f4fcec873f90537c382840f330b90f4715eebc2bc9925f04cb92de593eae054"}, + {file = "pydantic-1.10.14-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:8e3a76f571970fcd3c43ad982daf936ae39b3e90b8a2e96c04113a369869dc87"}, + {file = "pydantic-1.10.14-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:82d886bd3c3fbeaa963692ef6b643159ccb4b4cefaf7ff1617720cbead04fd1d"}, + {file = "pydantic-1.10.14-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:798a3d05ee3b71967844a1164fd5bdb8c22c6d674f26274e78b9f29d81770c4e"}, + {file = "pydantic-1.10.14-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:23d47a4b57a38e8652bcab15a658fdb13c785b9ce217cc3a729504ab4e1d6bc9"}, + {file = "pydantic-1.10.14-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:f9f674b5c3bebc2eba401de64f29948ae1e646ba2735f884d1594c5f675d6f2a"}, + {file = "pydantic-1.10.14-cp310-cp310-win_amd64.whl", hash = "sha256:24a7679fab2e0eeedb5a8924fc4a694b3bcaac7d305aeeac72dd7d4e05ecbebf"}, + {file = "pydantic-1.10.14-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:9d578ac4bf7fdf10ce14caba6f734c178379bd35c486c6deb6f49006e1ba78a7"}, + {file = "pydantic-1.10.14-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:fa7790e94c60f809c95602a26d906eba01a0abee9cc24150e4ce2189352deb1b"}, + {file = "pydantic-1.10.14-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:aad4e10efa5474ed1a611b6d7f0d130f4aafadceb73c11d9e72823e8f508e663"}, + {file = "pydantic-1.10.14-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1245f4f61f467cb3dfeced2b119afef3db386aec3d24a22a1de08c65038b255f"}, + {file = "pydantic-1.10.14-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:21efacc678a11114c765eb52ec0db62edffa89e9a562a94cbf8fa10b5db5c046"}, + {file = "pydantic-1.10.14-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:412ab4a3f6dbd2bf18aefa9f79c7cca23744846b31f1d6555c2ee2b05a2e14ca"}, + {file = "pydantic-1.10.14-cp311-cp311-win_amd64.whl", hash = "sha256:e897c9f35281f7889873a3e6d6b69aa1447ceb024e8495a5f0d02ecd17742a7f"}, + {file = "pydantic-1.10.14-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:d604be0f0b44d473e54fdcb12302495fe0467c56509a2f80483476f3ba92b33c"}, + {file = "pydantic-1.10.14-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a42c7d17706911199798d4c464b352e640cab4351efe69c2267823d619a937e5"}, + {file = "pydantic-1.10.14-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:596f12a1085e38dbda5cbb874d0973303e34227b400b6414782bf205cc14940c"}, + {file = "pydantic-1.10.14-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:bfb113860e9288d0886e3b9e49d9cf4a9d48b441f52ded7d96db7819028514cc"}, + {file = "pydantic-1.10.14-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:bc3ed06ab13660b565eed80887fcfbc0070f0aa0691fbb351657041d3e874efe"}, + {file = "pydantic-1.10.14-cp37-cp37m-win_amd64.whl", hash = "sha256:ad8c2bc677ae5f6dbd3cf92f2c7dc613507eafe8f71719727cbc0a7dec9a8c01"}, + {file = "pydantic-1.10.14-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:c37c28449752bb1f47975d22ef2882d70513c546f8f37201e0fec3a97b816eee"}, + {file = "pydantic-1.10.14-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:49a46a0994dd551ec051986806122767cf144b9702e31d47f6d493c336462597"}, + {file = "pydantic-1.10.14-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:53e3819bd20a42470d6dd0fe7fc1c121c92247bca104ce608e609b59bc7a77ee"}, + {file = "pydantic-1.10.14-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0fbb503bbbbab0c588ed3cd21975a1d0d4163b87e360fec17a792f7d8c4ff29f"}, + {file = "pydantic-1.10.14-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:336709883c15c050b9c55a63d6c7ff09be883dbc17805d2b063395dd9d9d0022"}, + {file = "pydantic-1.10.14-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:4ae57b4d8e3312d486e2498d42aed3ece7b51848336964e43abbf9671584e67f"}, + {file = "pydantic-1.10.14-cp38-cp38-win_amd64.whl", hash = "sha256:dba49d52500c35cfec0b28aa8b3ea5c37c9df183ffc7210b10ff2a415c125c4a"}, + {file = "pydantic-1.10.14-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:c66609e138c31cba607d8e2a7b6a5dc38979a06c900815495b2d90ce6ded35b4"}, + {file = "pydantic-1.10.14-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:d986e115e0b39604b9eee3507987368ff8148222da213cd38c359f6f57b3b347"}, + {file = "pydantic-1.10.14-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:646b2b12df4295b4c3148850c85bff29ef6d0d9621a8d091e98094871a62e5c7"}, + {file = "pydantic-1.10.14-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:282613a5969c47c83a8710cc8bfd1e70c9223feb76566f74683af889faadc0ea"}, + {file = "pydantic-1.10.14-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:466669501d08ad8eb3c4fecd991c5e793c4e0bbd62299d05111d4f827cded64f"}, + {file = "pydantic-1.10.14-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:13e86a19dca96373dcf3190fcb8797d40a6f12f154a244a8d1e8e03b8f280593"}, + {file = "pydantic-1.10.14-cp39-cp39-win_amd64.whl", hash = "sha256:08b6ec0917c30861e3fe71a93be1648a2aa4f62f866142ba21670b24444d7fd8"}, + {file = "pydantic-1.10.14-py3-none-any.whl", hash = "sha256:8ee853cd12ac2ddbf0ecbac1c289f95882b2d4482258048079d13be700aa114c"}, + {file = "pydantic-1.10.14.tar.gz", hash = "sha256:46f17b832fe27de7850896f3afee50ea682220dd218f7e9c88d436788419dca6"}, +] + +[package.dependencies] +typing-extensions = ">=4.2.0" + +[package.extras] +dotenv = ["python-dotenv (>=0.10.4)"] +email = ["email-validator (>=1.0.3)"] + +[[package]] +name = "pyrate-limiter" +version = "3.1.1" +description = "Python Rate-Limiter using Leaky-Bucket Algorithm" +optional = false +python-versions = ">=3.8,<4.0" +files = [ + {file = "pyrate_limiter-3.1.1-py3-none-any.whl", hash = "sha256:c51906f1d51d56dc992ff6c26e8300e32151bc6cfa3e6559792e31971dfd4e2b"}, + {file = "pyrate_limiter-3.1.1.tar.gz", hash = "sha256:2f57eda712687e6eccddf6afe8f8a15b409b97ed675fe64a626058f12863b7b7"}, +] + +[package.extras] +all = ["filelock (>=3.0)", "redis (>=5.0.0,<6.0.0)"] +docs = ["furo (>=2022.3.4,<2023.0.0)", "myst-parser (>=0.17)", "sphinx (>=4.3.0,<5.0.0)", "sphinx-autodoc-typehints (>=1.17,<2.0)", "sphinx-copybutton (>=0.5)", "sphinxcontrib-apidoc (>=0.3,<0.4)"] + +[[package]] +name = "pyrsistent" +version = "0.20.0" +description = "Persistent/Functional/Immutable data structures" +optional = false +python-versions = ">=3.8" +files = [ + {file = "pyrsistent-0.20.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:8c3aba3e01235221e5b229a6c05f585f344734bd1ad42a8ac51493d74722bbce"}, + {file = "pyrsistent-0.20.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c1beb78af5423b879edaf23c5591ff292cf7c33979734c99aa66d5914ead880f"}, + {file = "pyrsistent-0.20.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:21cc459636983764e692b9eba7144cdd54fdec23ccdb1e8ba392a63666c60c34"}, + {file = "pyrsistent-0.20.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f5ac696f02b3fc01a710427585c855f65cd9c640e14f52abe52020722bb4906b"}, + {file = "pyrsistent-0.20.0-cp310-cp310-win32.whl", hash = "sha256:0724c506cd8b63c69c7f883cc233aac948c1ea946ea95996ad8b1380c25e1d3f"}, + {file = "pyrsistent-0.20.0-cp310-cp310-win_amd64.whl", hash = "sha256:8441cf9616d642c475684d6cf2520dd24812e996ba9af15e606df5f6fd9d04a7"}, + {file = "pyrsistent-0.20.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:0f3b1bcaa1f0629c978b355a7c37acd58907390149b7311b5db1b37648eb6958"}, + {file = "pyrsistent-0.20.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5cdd7ef1ea7a491ae70d826b6cc64868de09a1d5ff9ef8d574250d0940e275b8"}, + {file = "pyrsistent-0.20.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cae40a9e3ce178415040a0383f00e8d68b569e97f31928a3a8ad37e3fde6df6a"}, + {file = "pyrsistent-0.20.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6288b3fa6622ad8a91e6eb759cfc48ff3089e7c17fb1d4c59a919769314af224"}, + {file = "pyrsistent-0.20.0-cp311-cp311-win32.whl", hash = "sha256:7d29c23bdf6e5438c755b941cef867ec2a4a172ceb9f50553b6ed70d50dfd656"}, + {file = "pyrsistent-0.20.0-cp311-cp311-win_amd64.whl", hash = "sha256:59a89bccd615551391f3237e00006a26bcf98a4d18623a19909a2c48b8e986ee"}, + {file = "pyrsistent-0.20.0-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:09848306523a3aba463c4b49493a760e7a6ca52e4826aa100ee99d8d39b7ad1e"}, + {file = "pyrsistent-0.20.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a14798c3005ec892bbada26485c2eea3b54109cb2533713e355c806891f63c5e"}, + {file = "pyrsistent-0.20.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b14decb628fac50db5e02ee5a35a9c0772d20277824cfe845c8a8b717c15daa3"}, + {file = "pyrsistent-0.20.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2e2c116cc804d9b09ce9814d17df5edf1df0c624aba3b43bc1ad90411487036d"}, + {file = "pyrsistent-0.20.0-cp312-cp312-win32.whl", hash = "sha256:e78d0c7c1e99a4a45c99143900ea0546025e41bb59ebc10182e947cf1ece9174"}, + {file = "pyrsistent-0.20.0-cp312-cp312-win_amd64.whl", hash = "sha256:4021a7f963d88ccd15b523787d18ed5e5269ce57aa4037146a2377ff607ae87d"}, + {file = "pyrsistent-0.20.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:79ed12ba79935adaac1664fd7e0e585a22caa539dfc9b7c7c6d5ebf91fb89054"}, + {file = "pyrsistent-0.20.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f920385a11207dc372a028b3f1e1038bb244b3ec38d448e6d8e43c6b3ba20e98"}, + {file = "pyrsistent-0.20.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4f5c2d012671b7391803263419e31b5c7c21e7c95c8760d7fc35602353dee714"}, + {file = "pyrsistent-0.20.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ef3992833fbd686ee783590639f4b8343a57f1f75de8633749d984dc0eb16c86"}, + {file = "pyrsistent-0.20.0-cp38-cp38-win32.whl", hash = "sha256:881bbea27bbd32d37eb24dd320a5e745a2a5b092a17f6debc1349252fac85423"}, + {file = "pyrsistent-0.20.0-cp38-cp38-win_amd64.whl", hash = "sha256:6d270ec9dd33cdb13f4d62c95c1a5a50e6b7cdd86302b494217137f760495b9d"}, + {file = "pyrsistent-0.20.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:ca52d1ceae015859d16aded12584c59eb3825f7b50c6cfd621d4231a6cc624ce"}, + {file = "pyrsistent-0.20.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b318ca24db0f0518630e8b6f3831e9cba78f099ed5c1d65ffe3e023003043ba0"}, + {file = "pyrsistent-0.20.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fed2c3216a605dc9a6ea50c7e84c82906e3684c4e80d2908208f662a6cbf9022"}, + {file = "pyrsistent-0.20.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2e14c95c16211d166f59c6611533d0dacce2e25de0f76e4c140fde250997b3ca"}, + {file = "pyrsistent-0.20.0-cp39-cp39-win32.whl", hash = "sha256:f058a615031eea4ef94ead6456f5ec2026c19fb5bd6bfe86e9665c4158cf802f"}, + {file = "pyrsistent-0.20.0-cp39-cp39-win_amd64.whl", hash = "sha256:58b8f6366e152092194ae68fefe18b9f0b4f89227dfd86a07770c3d86097aebf"}, + {file = "pyrsistent-0.20.0-py3-none-any.whl", hash = "sha256:c55acc4733aad6560a7f5f818466631f07efc001fd023f34a6c203f8b6df0f0b"}, + {file = "pyrsistent-0.20.0.tar.gz", hash = "sha256:4c48f78f62ab596c679086084d0dd13254ae4f3d6c72a83ffdf5ebdef8f265a4"}, +] + +[[package]] +name = "pytest" +version = "6.2.5" +description = "pytest: simple powerful testing with Python" +optional = false +python-versions = ">=3.6" +files = [ + {file = "pytest-6.2.5-py3-none-any.whl", hash = "sha256:7310f8d27bc79ced999e760ca304d69f6ba6c6649c0b60fb0e04a4a77cacc134"}, + {file = "pytest-6.2.5.tar.gz", hash = "sha256:131b36680866a76e6781d13f101efb86cf674ebb9762eb70d3082b6f29889e89"}, +] + +[package.dependencies] +atomicwrites = {version = ">=1.0", markers = "sys_platform == \"win32\""} +attrs = ">=19.2.0" +colorama = {version = "*", markers = "sys_platform == \"win32\""} +iniconfig = "*" +packaging = "*" +pluggy = ">=0.12,<2.0" +py = ">=1.8.2" +toml = "*" + +[package.extras] +testing = ["argcomplete", "hypothesis (>=3.56)", "mock", "nose", "requests", "xmlschema"] + +[[package]] +name = "pytest-mock" +version = "3.12.0" +description = "Thin-wrapper around the mock package for easier use with pytest" +optional = false +python-versions = ">=3.8" +files = [ + {file = "pytest-mock-3.12.0.tar.gz", hash = "sha256:31a40f038c22cad32287bb43932054451ff5583ff094bca6f675df2f8bc1a6e9"}, + {file = "pytest_mock-3.12.0-py3-none-any.whl", hash = "sha256:0972719a7263072da3a21c7f4773069bcc7486027d7e8e1f81d98a47e701bc4f"}, +] + +[package.dependencies] +pytest = ">=5.0" + +[package.extras] +dev = ["pre-commit", "pytest-asyncio", "tox"] + +[[package]] +name = "python-dateutil" +version = "2.8.2" +description = "Extensions to the standard Python datetime module" +optional = false +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7" +files = [ + {file = "python-dateutil-2.8.2.tar.gz", hash = "sha256:0123cacc1627ae19ddf3c27a5de5bd67ee4586fbdd6440d9748f8abb483d3e86"}, + {file = "python_dateutil-2.8.2-py2.py3-none-any.whl", hash = "sha256:961d03dc3453ebbc59dbdea9e4e11c5651520a876d0f4db161e8674aae935da9"}, +] + +[package.dependencies] +six = ">=1.5" + +[[package]] +name = "pytzdata" +version = "2020.1" +description = "The Olson timezone database for Python." +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +files = [ + {file = "pytzdata-2020.1-py2.py3-none-any.whl", hash = "sha256:e1e14750bcf95016381e4d472bad004eef710f2d6417240904070b3d6654485f"}, + {file = "pytzdata-2020.1.tar.gz", hash = "sha256:3efa13b335a00a8de1d345ae41ec78dd11c9f8807f522d39850f2dd828681540"}, +] + +[[package]] +name = "pyyaml" +version = "6.0.1" +description = "YAML parser and emitter for Python" +optional = false +python-versions = ">=3.6" +files = [ + {file = "PyYAML-6.0.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d858aa552c999bc8a8d57426ed01e40bef403cd8ccdd0fc5f6f04a00414cac2a"}, + {file = "PyYAML-6.0.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:fd66fc5d0da6d9815ba2cebeb4205f95818ff4b79c3ebe268e75d961704af52f"}, + {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:69b023b2b4daa7548bcfbd4aa3da05b3a74b772db9e23b982788168117739938"}, + {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:81e0b275a9ecc9c0c0c07b4b90ba548307583c125f54d5b6946cfee6360c733d"}, + {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ba336e390cd8e4d1739f42dfe9bb83a3cc2e80f567d8805e11b46f4a943f5515"}, + {file = "PyYAML-6.0.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:326c013efe8048858a6d312ddd31d56e468118ad4cdeda36c719bf5bb6192290"}, + {file = "PyYAML-6.0.1-cp310-cp310-win32.whl", hash = "sha256:bd4af7373a854424dabd882decdc5579653d7868b8fb26dc7d0e99f823aa5924"}, + {file = "PyYAML-6.0.1-cp310-cp310-win_amd64.whl", hash = "sha256:fd1592b3fdf65fff2ad0004b5e363300ef59ced41c2e6b3a99d4089fa8c5435d"}, + {file = "PyYAML-6.0.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:6965a7bc3cf88e5a1c3bd2e0b5c22f8d677dc88a455344035f03399034eb3007"}, + {file = "PyYAML-6.0.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:f003ed9ad21d6a4713f0a9b5a7a0a79e08dd0f221aff4525a2be4c346ee60aab"}, + {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:42f8152b8dbc4fe7d96729ec2b99c7097d656dc1213a3229ca5383f973a5ed6d"}, + {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:062582fca9fabdd2c8b54a3ef1c978d786e0f6b3a1510e0ac93ef59e0ddae2bc"}, + {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d2b04aac4d386b172d5b9692e2d2da8de7bfb6c387fa4f801fbf6fb2e6ba4673"}, + {file = "PyYAML-6.0.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:e7d73685e87afe9f3b36c799222440d6cf362062f78be1013661b00c5c6f678b"}, + {file = "PyYAML-6.0.1-cp311-cp311-win32.whl", hash = "sha256:1635fd110e8d85d55237ab316b5b011de701ea0f29d07611174a1b42f1444741"}, + {file = "PyYAML-6.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:bf07ee2fef7014951eeb99f56f39c9bb4af143d8aa3c21b1677805985307da34"}, + {file = "PyYAML-6.0.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:855fb52b0dc35af121542a76b9a84f8d1cd886ea97c84703eaa6d88e37a2ad28"}, + {file = "PyYAML-6.0.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:40df9b996c2b73138957fe23a16a4f0ba614f4c0efce1e9406a184b6d07fa3a9"}, + {file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a08c6f0fe150303c1c6b71ebcd7213c2858041a7e01975da3a99aed1e7a378ef"}, + {file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6c22bec3fbe2524cde73d7ada88f6566758a8f7227bfbf93a408a9d86bcc12a0"}, + {file = "PyYAML-6.0.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8d4e9c88387b0f5c7d5f281e55304de64cf7f9c0021a3525bd3b1c542da3b0e4"}, + {file = "PyYAML-6.0.1-cp312-cp312-win32.whl", hash = "sha256:d483d2cdf104e7c9fa60c544d92981f12ad66a457afae824d146093b8c294c54"}, + {file = "PyYAML-6.0.1-cp312-cp312-win_amd64.whl", hash = "sha256:0d3304d8c0adc42be59c5f8a4d9e3d7379e6955ad754aa9d6ab7a398b59dd1df"}, + {file = "PyYAML-6.0.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:50550eb667afee136e9a77d6dc71ae76a44df8b3e51e41b77f6de2932bfe0f47"}, + {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1fe35611261b29bd1de0070f0b2f47cb6ff71fa6595c077e42bd0c419fa27b98"}, + {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:704219a11b772aea0d8ecd7058d0082713c3562b4e271b849ad7dc4a5c90c13c"}, + {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:afd7e57eddb1a54f0f1a974bc4391af8bcce0b444685d936840f125cf046d5bd"}, + {file = "PyYAML-6.0.1-cp36-cp36m-win32.whl", hash = "sha256:fca0e3a251908a499833aa292323f32437106001d436eca0e6e7833256674585"}, + {file = "PyYAML-6.0.1-cp36-cp36m-win_amd64.whl", hash = "sha256:f22ac1c3cac4dbc50079e965eba2c1058622631e526bd9afd45fedd49ba781fa"}, + {file = "PyYAML-6.0.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:b1275ad35a5d18c62a7220633c913e1b42d44b46ee12554e5fd39c70a243d6a3"}, + {file = "PyYAML-6.0.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:18aeb1bf9a78867dc38b259769503436b7c72f7a1f1f4c93ff9a17de54319b27"}, + {file = "PyYAML-6.0.1-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:596106435fa6ad000c2991a98fa58eeb8656ef2325d7e158344fb33864ed87e3"}, + {file = "PyYAML-6.0.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:baa90d3f661d43131ca170712d903e6295d1f7a0f595074f151c0aed377c9b9c"}, + {file = "PyYAML-6.0.1-cp37-cp37m-win32.whl", hash = "sha256:9046c58c4395dff28dd494285c82ba00b546adfc7ef001486fbf0324bc174fba"}, + {file = "PyYAML-6.0.1-cp37-cp37m-win_amd64.whl", hash = "sha256:4fb147e7a67ef577a588a0e2c17b6db51dda102c71de36f8549b6816a96e1867"}, + {file = "PyYAML-6.0.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1d4c7e777c441b20e32f52bd377e0c409713e8bb1386e1099c2415f26e479595"}, + {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a0cd17c15d3bb3fa06978b4e8958dcdc6e0174ccea823003a106c7d4d7899ac5"}, + {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:28c119d996beec18c05208a8bd78cbe4007878c6dd15091efb73a30e90539696"}, + {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7e07cbde391ba96ab58e532ff4803f79c4129397514e1413a7dc761ccd755735"}, + {file = "PyYAML-6.0.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:49a183be227561de579b4a36efbb21b3eab9651dd81b1858589f796549873dd6"}, + {file = "PyYAML-6.0.1-cp38-cp38-win32.whl", hash = "sha256:184c5108a2aca3c5b3d3bf9395d50893a7ab82a38004c8f61c258d4428e80206"}, + {file = "PyYAML-6.0.1-cp38-cp38-win_amd64.whl", hash = "sha256:1e2722cc9fbb45d9b87631ac70924c11d3a401b2d7f410cc0e3bbf249f2dca62"}, + {file = "PyYAML-6.0.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:9eb6caa9a297fc2c2fb8862bc5370d0303ddba53ba97e71f08023b6cd73d16a8"}, + {file = "PyYAML-6.0.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:c8098ddcc2a85b61647b2590f825f3db38891662cfc2fc776415143f599bb859"}, + {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5773183b6446b2c99bb77e77595dd486303b4faab2b086e7b17bc6bef28865f6"}, + {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b786eecbdf8499b9ca1d697215862083bd6d2a99965554781d0d8d1ad31e13a0"}, + {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bc1bf2925a1ecd43da378f4db9e4f799775d6367bdb94671027b73b393a7c42c"}, + {file = "PyYAML-6.0.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:04ac92ad1925b2cff1db0cfebffb6ffc43457495c9b3c39d3fcae417d7125dc5"}, + {file = "PyYAML-6.0.1-cp39-cp39-win32.whl", hash = "sha256:faca3bdcf85b2fc05d06ff3fbc1f83e1391b3e724afa3feba7d13eeab355484c"}, + {file = "PyYAML-6.0.1-cp39-cp39-win_amd64.whl", hash = "sha256:510c9deebc5c0225e8c96813043e62b680ba2f9c50a08d3724c7f28a747d1486"}, + {file = "PyYAML-6.0.1.tar.gz", hash = "sha256:bfdf460b1736c775f2ba9f6a92bca30bc2095067b8a9d77876d1fad6cc3b4a43"}, +] + +[[package]] +name = "requests" +version = "2.31.0" +description = "Python HTTP for Humans." +optional = false +python-versions = ">=3.7" +files = [ + {file = "requests-2.31.0-py3-none-any.whl", hash = "sha256:58cd2187c01e70e6e26505bca751777aa9f2ee0b7f4300988b709f44e013003f"}, + {file = "requests-2.31.0.tar.gz", hash = "sha256:942c5a758f98d790eaed1a29cb6eefc7ffb0d1cf7af05c3d2791656dbd6ad1e1"}, +] + +[package.dependencies] +certifi = ">=2017.4.17" +charset-normalizer = ">=2,<4" +idna = ">=2.5,<4" +urllib3 = ">=1.21.1,<3" + +[package.extras] +socks = ["PySocks (>=1.5.6,!=1.5.7)"] +use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"] + +[[package]] +name = "requests-cache" +version = "1.2.0" +description = "A persistent cache for python requests" +optional = false +python-versions = ">=3.8" +files = [ + {file = "requests_cache-1.2.0-py3-none-any.whl", hash = "sha256:490324301bf0cb924ff4e6324bd2613453e7e1f847353928b08adb0fdfb7f722"}, + {file = "requests_cache-1.2.0.tar.gz", hash = "sha256:db1c709ca343cc1cd5b6c8b1a5387298eceed02306a6040760db538c885e3838"}, +] + +[package.dependencies] +attrs = ">=21.2" +cattrs = ">=22.2" +platformdirs = ">=2.5" +requests = ">=2.22" +url-normalize = ">=1.4" +urllib3 = ">=1.25.5" + +[package.extras] +all = ["boto3 (>=1.15)", "botocore (>=1.18)", "itsdangerous (>=2.0)", "pymongo (>=3)", "pyyaml (>=6.0.1)", "redis (>=3)", "ujson (>=5.4)"] +bson = ["bson (>=0.5)"] +docs = ["furo (>=2023.3,<2024.0)", "linkify-it-py (>=2.0,<3.0)", "myst-parser (>=1.0,<2.0)", "sphinx (>=5.0.2,<6.0.0)", "sphinx-autodoc-typehints (>=1.19)", "sphinx-automodapi (>=0.14)", "sphinx-copybutton (>=0.5)", "sphinx-design (>=0.2)", "sphinx-notfound-page (>=0.8)", "sphinxcontrib-apidoc (>=0.3)", "sphinxext-opengraph (>=0.9)"] +dynamodb = ["boto3 (>=1.15)", "botocore (>=1.18)"] +json = ["ujson (>=5.4)"] +mongodb = ["pymongo (>=3)"] +redis = ["redis (>=3)"] +security = ["itsdangerous (>=2.0)"] +yaml = ["pyyaml (>=6.0.1)"] + +[[package]] +name = "requests-mock" +version = "1.11.0" +description = "Mock out responses from the requests package" +optional = false +python-versions = "*" +files = [ + {file = "requests-mock-1.11.0.tar.gz", hash = "sha256:ef10b572b489a5f28e09b708697208c4a3b2b89ef80a9f01584340ea357ec3c4"}, + {file = "requests_mock-1.11.0-py2.py3-none-any.whl", hash = "sha256:f7fae383f228633f6bececebdab236c478ace2284d6292c6e7e2867b9ab74d15"}, +] + +[package.dependencies] +requests = ">=2.3,<3" +six = "*" + +[package.extras] +fixture = ["fixtures"] +test = ["fixtures", "mock", "purl", "pytest", "requests-futures", "sphinx", "testtools"] + +[[package]] +name = "setuptools" +version = "69.1.1" +description = "Easily download, build, install, upgrade, and uninstall Python packages" +optional = false +python-versions = ">=3.8" +files = [ + {file = "setuptools-69.1.1-py3-none-any.whl", hash = "sha256:02fa291a0471b3a18b2b2481ed902af520c69e8ae0919c13da936542754b4c56"}, + {file = "setuptools-69.1.1.tar.gz", hash = "sha256:5c0806c7d9af348e6dd3777b4f4dbb42c7ad85b190104837488eab9a7c945cf8"}, +] + +[package.extras] +docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "rst.linker (>=1.9)", "sphinx (<7.2.5)", "sphinx (>=3.5)", "sphinx-favicon", "sphinx-inline-tabs", "sphinx-lint", "sphinx-notfound-page (>=1,<2)", "sphinx-reredirects", "sphinxcontrib-towncrier"] +testing = ["build[virtualenv]", "filelock (>=3.4.0)", "flake8-2020", "ini2toml[lite] (>=0.9)", "jaraco.develop (>=7.21)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "packaging (>=23.2)", "pip (>=19.1)", "pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-home (>=0.5)", "pytest-mypy (>=0.9.1)", "pytest-perf", "pytest-ruff (>=0.2.1)", "pytest-timeout", "pytest-xdist", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel"] +testing-integration = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "packaging (>=23.2)", "pytest", "pytest-enabler", "pytest-xdist", "tomli", "virtualenv (>=13.0.0)", "wheel"] + +[[package]] +name = "six" +version = "1.16.0" +description = "Python 2 and 3 compatibility utilities" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*" +files = [ + {file = "six-1.16.0-py2.py3-none-any.whl", hash = "sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254"}, + {file = "six-1.16.0.tar.gz", hash = "sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926"}, +] + +[[package]] +name = "toml" +version = "0.10.2" +description = "Python Library for Tom's Obvious, Minimal Language" +optional = false +python-versions = ">=2.6, !=3.0.*, !=3.1.*, !=3.2.*" +files = [ + {file = "toml-0.10.2-py2.py3-none-any.whl", hash = "sha256:806143ae5bfb6a3c6e736a764057db0e6a0e05e338b5630894a5f779cabb4f9b"}, + {file = "toml-0.10.2.tar.gz", hash = "sha256:b3bda1d108d5dd99f4a20d24d9c348e91c4db7ab1b749200bded2f839ccbe68f"}, +] + +[[package]] +name = "typing-extensions" +version = "4.10.0" +description = "Backported and Experimental Type Hints for Python 3.8+" +optional = false +python-versions = ">=3.8" +files = [ + {file = "typing_extensions-4.10.0-py3-none-any.whl", hash = "sha256:69b1a937c3a517342112fb4c6df7e72fc39a38e7891a5730ed4985b5214b5475"}, + {file = "typing_extensions-4.10.0.tar.gz", hash = "sha256:b0abd7c89e8fb96f98db18d86106ff1d90ab692004eb746cf6eda2682f91b3cb"}, +] + +[[package]] +name = "url-normalize" +version = "1.4.3" +description = "URL normalization for Python" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*" +files = [ + {file = "url-normalize-1.4.3.tar.gz", hash = "sha256:d23d3a070ac52a67b83a1c59a0e68f8608d1cd538783b401bc9de2c0fac999b2"}, + {file = "url_normalize-1.4.3-py2.py3-none-any.whl", hash = "sha256:ec3c301f04e5bb676d333a7fa162fa977ad2ca04b7e652bfc9fac4e405728eed"}, +] + +[package.dependencies] +six = "*" + +[[package]] +name = "urllib3" +version = "1.26.18" +description = "HTTP library with thread-safe connection pooling, file post, and more." +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*" +files = [ + {file = "urllib3-1.26.18-py2.py3-none-any.whl", hash = "sha256:34b97092d7e0a3a8cf7cd10e386f401b3737364026c45e622aa02903dffe0f07"}, + {file = "urllib3-1.26.18.tar.gz", hash = "sha256:f8ecc1bba5667413457c529ab955bf8c67b45db799d159066261719e328580a0"}, +] + +[package.extras] +brotli = ["brotli (==1.0.9)", "brotli (>=1.0.9)", "brotlicffi (>=0.8.0)", "brotlipy (>=0.6.0)"] +secure = ["certifi", "cryptography (>=1.3.4)", "idna (>=2.0.0)", "ipaddress", "pyOpenSSL (>=0.14)", "urllib3-secure-extra"] +socks = ["PySocks (>=1.5.6,!=1.5.7,<2.0)"] + +[[package]] +name = "urllib3" +version = "2.2.1" +description = "HTTP library with thread-safe connection pooling, file post, and more." +optional = false +python-versions = ">=3.8" +files = [ + {file = "urllib3-2.2.1-py3-none-any.whl", hash = "sha256:450b20ec296a467077128bff42b73080516e71b56ff59a60a02bef2232c4fa9d"}, + {file = "urllib3-2.2.1.tar.gz", hash = "sha256:d0570876c61ab9e520d776c38acbbb5b05a776d3f9ff98a5c8fd5162a444cf19"}, +] + +[package.extras] +brotli = ["brotli (>=1.0.9)", "brotlicffi (>=0.8.0)"] +h2 = ["h2 (>=4,<5)"] +socks = ["pysocks (>=1.5.6,!=1.5.7,<2.0)"] +zstd = ["zstandard (>=0.18.0)"] + +[[package]] +name = "vcrpy" +version = "6.0.1" +description = "Automatically mock your HTTP interactions to simplify and speed up testing" +optional = false +python-versions = ">=3.8" +files = [ + {file = "vcrpy-6.0.1.tar.gz", hash = "sha256:9e023fee7f892baa0bbda2f7da7c8ac51165c1c6e38ff8688683a12a4bde9278"}, +] + +[package.dependencies] +PyYAML = "*" +urllib3 = {version = "<2", markers = "platform_python_implementation == \"PyPy\" or python_version < \"3.10\""} +wrapt = "*" +yarl = "*" + +[package.extras] +tests = ["Werkzeug (==2.0.3)", "aiohttp", "boto3", "httplib2", "httpx", "pytest", "pytest-aiohttp", "pytest-asyncio", "pytest-cov", "pytest-httpbin", "requests (>=2.22.0)", "tornado", "urllib3"] + +[[package]] +name = "wcmatch" +version = "8.4" +description = "Wildcard/glob file name matcher." +optional = false +python-versions = ">=3.7" +files = [ + {file = "wcmatch-8.4-py3-none-any.whl", hash = "sha256:dc7351e5a7f8bbf4c6828d51ad20c1770113f5f3fd3dfe2a03cfde2a63f03f98"}, + {file = "wcmatch-8.4.tar.gz", hash = "sha256:ba4fc5558f8946bf1ffc7034b05b814d825d694112499c86035e0e4d398b6a67"}, +] + +[package.dependencies] +bracex = ">=2.1.1" + +[[package]] +name = "wrapt" +version = "1.16.0" +description = "Module for decorators, wrappers and monkey patching." +optional = false +python-versions = ">=3.6" +files = [ + {file = "wrapt-1.16.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:ffa565331890b90056c01db69c0fe634a776f8019c143a5ae265f9c6bc4bd6d4"}, + {file = "wrapt-1.16.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:e4fdb9275308292e880dcbeb12546df7f3e0f96c6b41197e0cf37d2826359020"}, + {file = "wrapt-1.16.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bb2dee3874a500de01c93d5c71415fcaef1d858370d405824783e7a8ef5db440"}, + {file = "wrapt-1.16.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2a88e6010048489cda82b1326889ec075a8c856c2e6a256072b28eaee3ccf487"}, + {file = "wrapt-1.16.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ac83a914ebaf589b69f7d0a1277602ff494e21f4c2f743313414378f8f50a4cf"}, + {file = "wrapt-1.16.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:73aa7d98215d39b8455f103de64391cb79dfcad601701a3aa0dddacf74911d72"}, + {file = "wrapt-1.16.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:807cc8543a477ab7422f1120a217054f958a66ef7314f76dd9e77d3f02cdccd0"}, + {file = "wrapt-1.16.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:bf5703fdeb350e36885f2875d853ce13172ae281c56e509f4e6eca049bdfb136"}, + {file = "wrapt-1.16.0-cp310-cp310-win32.whl", hash = "sha256:f6b2d0c6703c988d334f297aa5df18c45e97b0af3679bb75059e0e0bd8b1069d"}, + {file = "wrapt-1.16.0-cp310-cp310-win_amd64.whl", hash = "sha256:decbfa2f618fa8ed81c95ee18a387ff973143c656ef800c9f24fb7e9c16054e2"}, + {file = "wrapt-1.16.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:1a5db485fe2de4403f13fafdc231b0dbae5eca4359232d2efc79025527375b09"}, + {file = "wrapt-1.16.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:75ea7d0ee2a15733684badb16de6794894ed9c55aa5e9903260922f0482e687d"}, + {file = "wrapt-1.16.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a452f9ca3e3267cd4d0fcf2edd0d035b1934ac2bd7e0e57ac91ad6b95c0c6389"}, + {file = "wrapt-1.16.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:43aa59eadec7890d9958748db829df269f0368521ba6dc68cc172d5d03ed8060"}, + {file = "wrapt-1.16.0-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:72554a23c78a8e7aa02abbd699d129eead8b147a23c56e08d08dfc29cfdddca1"}, + {file = "wrapt-1.16.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:d2efee35b4b0a347e0d99d28e884dfd82797852d62fcd7ebdeee26f3ceb72cf3"}, + {file = "wrapt-1.16.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:6dcfcffe73710be01d90cae08c3e548d90932d37b39ef83969ae135d36ef3956"}, + {file = "wrapt-1.16.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:eb6e651000a19c96f452c85132811d25e9264d836951022d6e81df2fff38337d"}, + {file = "wrapt-1.16.0-cp311-cp311-win32.whl", hash = "sha256:66027d667efe95cc4fa945af59f92c5a02c6f5bb6012bff9e60542c74c75c362"}, + {file = "wrapt-1.16.0-cp311-cp311-win_amd64.whl", hash = "sha256:aefbc4cb0a54f91af643660a0a150ce2c090d3652cf4052a5397fb2de549cd89"}, + {file = "wrapt-1.16.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:5eb404d89131ec9b4f748fa5cfb5346802e5ee8836f57d516576e61f304f3b7b"}, + {file = "wrapt-1.16.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:9090c9e676d5236a6948330e83cb89969f433b1943a558968f659ead07cb3b36"}, + {file = "wrapt-1.16.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:94265b00870aa407bd0cbcfd536f17ecde43b94fb8d228560a1e9d3041462d73"}, + {file = "wrapt-1.16.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f2058f813d4f2b5e3a9eb2eb3faf8f1d99b81c3e51aeda4b168406443e8ba809"}, + {file = "wrapt-1.16.0-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:98b5e1f498a8ca1858a1cdbffb023bfd954da4e3fa2c0cb5853d40014557248b"}, + {file = "wrapt-1.16.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:14d7dc606219cdd7405133c713f2c218d4252f2a469003f8c46bb92d5d095d81"}, + {file = "wrapt-1.16.0-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:49aac49dc4782cb04f58986e81ea0b4768e4ff197b57324dcbd7699c5dfb40b9"}, + {file = "wrapt-1.16.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:418abb18146475c310d7a6dc71143d6f7adec5b004ac9ce08dc7a34e2babdc5c"}, + {file = "wrapt-1.16.0-cp312-cp312-win32.whl", hash = "sha256:685f568fa5e627e93f3b52fda002c7ed2fa1800b50ce51f6ed1d572d8ab3e7fc"}, + {file = "wrapt-1.16.0-cp312-cp312-win_amd64.whl", hash = "sha256:dcdba5c86e368442528f7060039eda390cc4091bfd1dca41e8046af7c910dda8"}, + {file = "wrapt-1.16.0-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:d462f28826f4657968ae51d2181a074dfe03c200d6131690b7d65d55b0f360f8"}, + {file = "wrapt-1.16.0-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a33a747400b94b6d6b8a165e4480264a64a78c8a4c734b62136062e9a248dd39"}, + {file = "wrapt-1.16.0-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b3646eefa23daeba62643a58aac816945cadc0afaf21800a1421eeba5f6cfb9c"}, + {file = "wrapt-1.16.0-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3ebf019be5c09d400cf7b024aa52b1f3aeebeff51550d007e92c3c1c4afc2a40"}, + {file = "wrapt-1.16.0-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:0d2691979e93d06a95a26257adb7bfd0c93818e89b1406f5a28f36e0d8c1e1fc"}, + {file = "wrapt-1.16.0-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:1acd723ee2a8826f3d53910255643e33673e1d11db84ce5880675954183ec47e"}, + {file = "wrapt-1.16.0-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:bc57efac2da352a51cc4658878a68d2b1b67dbe9d33c36cb826ca449d80a8465"}, + {file = "wrapt-1.16.0-cp36-cp36m-win32.whl", hash = "sha256:da4813f751142436b075ed7aa012a8778aa43a99f7b36afe9b742d3ed8bdc95e"}, + {file = "wrapt-1.16.0-cp36-cp36m-win_amd64.whl", hash = "sha256:6f6eac2360f2d543cc875a0e5efd413b6cbd483cb3ad7ebf888884a6e0d2e966"}, + {file = "wrapt-1.16.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:a0ea261ce52b5952bf669684a251a66df239ec6d441ccb59ec7afa882265d593"}, + {file = "wrapt-1.16.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7bd2d7ff69a2cac767fbf7a2b206add2e9a210e57947dd7ce03e25d03d2de292"}, + {file = "wrapt-1.16.0-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9159485323798c8dc530a224bd3ffcf76659319ccc7bbd52e01e73bd0241a0c5"}, + {file = "wrapt-1.16.0-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a86373cf37cd7764f2201b76496aba58a52e76dedfaa698ef9e9688bfd9e41cf"}, + {file = "wrapt-1.16.0-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:73870c364c11f03ed072dda68ff7aea6d2a3a5c3fe250d917a429c7432e15228"}, + {file = "wrapt-1.16.0-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:b935ae30c6e7400022b50f8d359c03ed233d45b725cfdd299462f41ee5ffba6f"}, + {file = "wrapt-1.16.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:db98ad84a55eb09b3c32a96c576476777e87c520a34e2519d3e59c44710c002c"}, + {file = "wrapt-1.16.0-cp37-cp37m-win32.whl", hash = "sha256:9153ed35fc5e4fa3b2fe97bddaa7cbec0ed22412b85bcdaf54aeba92ea37428c"}, + {file = "wrapt-1.16.0-cp37-cp37m-win_amd64.whl", hash = "sha256:66dfbaa7cfa3eb707bbfcd46dab2bc6207b005cbc9caa2199bcbc81d95071a00"}, + {file = "wrapt-1.16.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1dd50a2696ff89f57bd8847647a1c363b687d3d796dc30d4dd4a9d1689a706f0"}, + {file = "wrapt-1.16.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:44a2754372e32ab315734c6c73b24351d06e77ffff6ae27d2ecf14cf3d229202"}, + {file = "wrapt-1.16.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8e9723528b9f787dc59168369e42ae1c3b0d3fadb2f1a71de14531d321ee05b0"}, + {file = "wrapt-1.16.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:dbed418ba5c3dce92619656802cc5355cb679e58d0d89b50f116e4a9d5a9603e"}, + {file = "wrapt-1.16.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:941988b89b4fd6b41c3f0bfb20e92bd23746579736b7343283297c4c8cbae68f"}, + {file = "wrapt-1.16.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:6a42cd0cfa8ffc1915aef79cb4284f6383d8a3e9dcca70c445dcfdd639d51267"}, + {file = "wrapt-1.16.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:1ca9b6085e4f866bd584fb135a041bfc32cab916e69f714a7d1d397f8c4891ca"}, + {file = "wrapt-1.16.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:d5e49454f19ef621089e204f862388d29e6e8d8b162efce05208913dde5b9ad6"}, + {file = "wrapt-1.16.0-cp38-cp38-win32.whl", hash = "sha256:c31f72b1b6624c9d863fc095da460802f43a7c6868c5dda140f51da24fd47d7b"}, + {file = "wrapt-1.16.0-cp38-cp38-win_amd64.whl", hash = "sha256:490b0ee15c1a55be9c1bd8609b8cecd60e325f0575fc98f50058eae366e01f41"}, + {file = "wrapt-1.16.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:9b201ae332c3637a42f02d1045e1d0cccfdc41f1f2f801dafbaa7e9b4797bfc2"}, + {file = "wrapt-1.16.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:2076fad65c6736184e77d7d4729b63a6d1ae0b70da4868adeec40989858eb3fb"}, + {file = "wrapt-1.16.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c5cd603b575ebceca7da5a3a251e69561bec509e0b46e4993e1cac402b7247b8"}, + {file = "wrapt-1.16.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b47cfad9e9bbbed2339081f4e346c93ecd7ab504299403320bf85f7f85c7d46c"}, + {file = "wrapt-1.16.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f8212564d49c50eb4565e502814f694e240c55551a5f1bc841d4fcaabb0a9b8a"}, + {file = "wrapt-1.16.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:5f15814a33e42b04e3de432e573aa557f9f0f56458745c2074952f564c50e664"}, + {file = "wrapt-1.16.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:db2e408d983b0e61e238cf579c09ef7020560441906ca990fe8412153e3b291f"}, + {file = "wrapt-1.16.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:edfad1d29c73f9b863ebe7082ae9321374ccb10879eeabc84ba3b69f2579d537"}, + {file = "wrapt-1.16.0-cp39-cp39-win32.whl", hash = "sha256:ed867c42c268f876097248e05b6117a65bcd1e63b779e916fe2e33cd6fd0d3c3"}, + {file = "wrapt-1.16.0-cp39-cp39-win_amd64.whl", hash = "sha256:eb1b046be06b0fce7249f1d025cd359b4b80fc1c3e24ad9eca33e0dcdb2e4a35"}, + {file = "wrapt-1.16.0-py3-none-any.whl", hash = "sha256:6906c4100a8fcbf2fa735f6059214bb13b97f75b1a61777fcf6432121ef12ef1"}, + {file = "wrapt-1.16.0.tar.gz", hash = "sha256:5f370f952971e7d17c7d1ead40e49f32345a7f7a5373571ef44d800d06b1899d"}, +] + +[[package]] +name = "yarl" +version = "1.9.4" +description = "Yet another URL library" +optional = false +python-versions = ">=3.7" +files = [ + {file = "yarl-1.9.4-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:a8c1df72eb746f4136fe9a2e72b0c9dc1da1cbd23b5372f94b5820ff8ae30e0e"}, + {file = "yarl-1.9.4-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:a3a6ed1d525bfb91b3fc9b690c5a21bb52de28c018530ad85093cc488bee2dd2"}, + {file = "yarl-1.9.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:c38c9ddb6103ceae4e4498f9c08fac9b590c5c71b0370f98714768e22ac6fa66"}, + {file = "yarl-1.9.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d9e09c9d74f4566e905a0b8fa668c58109f7624db96a2171f21747abc7524234"}, + {file = "yarl-1.9.4-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b8477c1ee4bd47c57d49621a062121c3023609f7a13b8a46953eb6c9716ca392"}, + {file = "yarl-1.9.4-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d5ff2c858f5f6a42c2a8e751100f237c5e869cbde669a724f2062d4c4ef93551"}, + {file = "yarl-1.9.4-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:357495293086c5b6d34ca9616a43d329317feab7917518bc97a08f9e55648455"}, + {file = "yarl-1.9.4-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:54525ae423d7b7a8ee81ba189f131054defdb122cde31ff17477951464c1691c"}, + {file = "yarl-1.9.4-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:801e9264d19643548651b9db361ce3287176671fb0117f96b5ac0ee1c3530d53"}, + {file = "yarl-1.9.4-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:e516dc8baf7b380e6c1c26792610230f37147bb754d6426462ab115a02944385"}, + {file = "yarl-1.9.4-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:7d5aaac37d19b2904bb9dfe12cdb08c8443e7ba7d2852894ad448d4b8f442863"}, + {file = "yarl-1.9.4-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:54beabb809ffcacbd9d28ac57b0db46e42a6e341a030293fb3185c409e626b8b"}, + {file = "yarl-1.9.4-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:bac8d525a8dbc2a1507ec731d2867025d11ceadcb4dd421423a5d42c56818541"}, + {file = "yarl-1.9.4-cp310-cp310-win32.whl", hash = "sha256:7855426dfbddac81896b6e533ebefc0af2f132d4a47340cee6d22cac7190022d"}, + {file = "yarl-1.9.4-cp310-cp310-win_amd64.whl", hash = "sha256:848cd2a1df56ddbffeb375535fb62c9d1645dde33ca4d51341378b3f5954429b"}, + {file = "yarl-1.9.4-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:35a2b9396879ce32754bd457d31a51ff0a9d426fd9e0e3c33394bf4b9036b099"}, + {file = "yarl-1.9.4-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:4c7d56b293cc071e82532f70adcbd8b61909eec973ae9d2d1f9b233f3d943f2c"}, + {file = "yarl-1.9.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:d8a1c6c0be645c745a081c192e747c5de06e944a0d21245f4cf7c05e457c36e0"}, + {file = "yarl-1.9.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4b3c1ffe10069f655ea2d731808e76e0f452fc6c749bea04781daf18e6039525"}, + {file = "yarl-1.9.4-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:549d19c84c55d11687ddbd47eeb348a89df9cb30e1993f1b128f4685cd0ebbf8"}, + {file = "yarl-1.9.4-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a7409f968456111140c1c95301cadf071bd30a81cbd7ab829169fb9e3d72eae9"}, + {file = "yarl-1.9.4-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e23a6d84d9d1738dbc6e38167776107e63307dfc8ad108e580548d1f2c587f42"}, + {file = "yarl-1.9.4-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d8b889777de69897406c9fb0b76cdf2fd0f31267861ae7501d93003d55f54fbe"}, + {file = "yarl-1.9.4-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:03caa9507d3d3c83bca08650678e25364e1843b484f19986a527630ca376ecce"}, + {file = "yarl-1.9.4-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:4e9035df8d0880b2f1c7f5031f33f69e071dfe72ee9310cfc76f7b605958ceb9"}, + {file = "yarl-1.9.4-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:c0ec0ed476f77db9fb29bca17f0a8fcc7bc97ad4c6c1d8959c507decb22e8572"}, + {file = "yarl-1.9.4-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:ee04010f26d5102399bd17f8df8bc38dc7ccd7701dc77f4a68c5b8d733406958"}, + {file = "yarl-1.9.4-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:49a180c2e0743d5d6e0b4d1a9e5f633c62eca3f8a86ba5dd3c471060e352ca98"}, + {file = "yarl-1.9.4-cp311-cp311-win32.whl", hash = "sha256:81eb57278deb6098a5b62e88ad8281b2ba09f2f1147c4767522353eaa6260b31"}, + {file = "yarl-1.9.4-cp311-cp311-win_amd64.whl", hash = "sha256:d1d2532b340b692880261c15aee4dc94dd22ca5d61b9db9a8a361953d36410b1"}, + {file = "yarl-1.9.4-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:0d2454f0aef65ea81037759be5ca9947539667eecebca092733b2eb43c965a81"}, + {file = "yarl-1.9.4-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:44d8ffbb9c06e5a7f529f38f53eda23e50d1ed33c6c869e01481d3fafa6b8142"}, + {file = "yarl-1.9.4-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:aaaea1e536f98754a6e5c56091baa1b6ce2f2700cc4a00b0d49eca8dea471074"}, + {file = "yarl-1.9.4-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3777ce5536d17989c91696db1d459574e9a9bd37660ea7ee4d3344579bb6f129"}, + {file = "yarl-1.9.4-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9fc5fc1eeb029757349ad26bbc5880557389a03fa6ada41703db5e068881e5f2"}, + {file = "yarl-1.9.4-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:ea65804b5dc88dacd4a40279af0cdadcfe74b3e5b4c897aa0d81cf86927fee78"}, + {file = "yarl-1.9.4-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:aa102d6d280a5455ad6a0f9e6d769989638718e938a6a0a2ff3f4a7ff8c62cc4"}, + {file = "yarl-1.9.4-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:09efe4615ada057ba2d30df871d2f668af661e971dfeedf0c159927d48bbeff0"}, + {file = "yarl-1.9.4-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:008d3e808d03ef28542372d01057fd09168419cdc8f848efe2804f894ae03e51"}, + {file = "yarl-1.9.4-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:6f5cb257bc2ec58f437da2b37a8cd48f666db96d47b8a3115c29f316313654ff"}, + {file = "yarl-1.9.4-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:992f18e0ea248ee03b5a6e8b3b4738850ae7dbb172cc41c966462801cbf62cf7"}, + {file = "yarl-1.9.4-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:0e9d124c191d5b881060a9e5060627694c3bdd1fe24c5eecc8d5d7d0eb6faabc"}, + {file = "yarl-1.9.4-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:3986b6f41ad22988e53d5778f91855dc0399b043fc8946d4f2e68af22ee9ff10"}, + {file = "yarl-1.9.4-cp312-cp312-win32.whl", hash = "sha256:4b21516d181cd77ebd06ce160ef8cc2a5e9ad35fb1c5930882baff5ac865eee7"}, + {file = "yarl-1.9.4-cp312-cp312-win_amd64.whl", hash = "sha256:a9bd00dc3bc395a662900f33f74feb3e757429e545d831eef5bb280252631984"}, + {file = "yarl-1.9.4-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:63b20738b5aac74e239622d2fe30df4fca4942a86e31bf47a81a0e94c14df94f"}, + {file = "yarl-1.9.4-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d7d7f7de27b8944f1fee2c26a88b4dabc2409d2fea7a9ed3df79b67277644e17"}, + {file = "yarl-1.9.4-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c74018551e31269d56fab81a728f683667e7c28c04e807ba08f8c9e3bba32f14"}, + {file = "yarl-1.9.4-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:ca06675212f94e7a610e85ca36948bb8fc023e458dd6c63ef71abfd482481aa5"}, + {file = "yarl-1.9.4-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5aef935237d60a51a62b86249839b51345f47564208c6ee615ed2a40878dccdd"}, + {file = "yarl-1.9.4-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2b134fd795e2322b7684155b7855cc99409d10b2e408056db2b93b51a52accc7"}, + {file = "yarl-1.9.4-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:d25039a474c4c72a5ad4b52495056f843a7ff07b632c1b92ea9043a3d9950f6e"}, + {file = "yarl-1.9.4-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:f7d6b36dd2e029b6bcb8a13cf19664c7b8e19ab3a58e0fefbb5b8461447ed5ec"}, + {file = "yarl-1.9.4-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:957b4774373cf6f709359e5c8c4a0af9f6d7875db657adb0feaf8d6cb3c3964c"}, + {file = "yarl-1.9.4-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:d7eeb6d22331e2fd42fce928a81c697c9ee2d51400bd1a28803965883e13cead"}, + {file = "yarl-1.9.4-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:6a962e04b8f91f8c4e5917e518d17958e3bdee71fd1d8b88cdce74dd0ebbf434"}, + {file = "yarl-1.9.4-cp37-cp37m-win32.whl", hash = "sha256:f3bc6af6e2b8f92eced34ef6a96ffb248e863af20ef4fde9448cc8c9b858b749"}, + {file = "yarl-1.9.4-cp37-cp37m-win_amd64.whl", hash = "sha256:ad4d7a90a92e528aadf4965d685c17dacff3df282db1121136c382dc0b6014d2"}, + {file = "yarl-1.9.4-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:ec61d826d80fc293ed46c9dd26995921e3a82146feacd952ef0757236fc137be"}, + {file = "yarl-1.9.4-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:8be9e837ea9113676e5754b43b940b50cce76d9ed7d2461df1af39a8ee674d9f"}, + {file = "yarl-1.9.4-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:bef596fdaa8f26e3d66af846bbe77057237cb6e8efff8cd7cc8dff9a62278bbf"}, + {file = "yarl-1.9.4-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2d47552b6e52c3319fede1b60b3de120fe83bde9b7bddad11a69fb0af7db32f1"}, + {file = "yarl-1.9.4-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:84fc30f71689d7fc9168b92788abc977dc8cefa806909565fc2951d02f6b7d57"}, + {file = "yarl-1.9.4-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4aa9741085f635934f3a2583e16fcf62ba835719a8b2b28fb2917bb0537c1dfa"}, + {file = "yarl-1.9.4-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:206a55215e6d05dbc6c98ce598a59e6fbd0c493e2de4ea6cc2f4934d5a18d130"}, + {file = "yarl-1.9.4-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:07574b007ee20e5c375a8fe4a0789fad26db905f9813be0f9fef5a68080de559"}, + {file = "yarl-1.9.4-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:5a2e2433eb9344a163aced6a5f6c9222c0786e5a9e9cac2c89f0b28433f56e23"}, + {file = "yarl-1.9.4-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:6ad6d10ed9b67a382b45f29ea028f92d25bc0bc1daf6c5b801b90b5aa70fb9ec"}, + {file = "yarl-1.9.4-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:6fe79f998a4052d79e1c30eeb7d6c1c1056ad33300f682465e1b4e9b5a188b78"}, + {file = "yarl-1.9.4-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:a825ec844298c791fd28ed14ed1bffc56a98d15b8c58a20e0e08c1f5f2bea1be"}, + {file = "yarl-1.9.4-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:8619d6915b3b0b34420cf9b2bb6d81ef59d984cb0fde7544e9ece32b4b3043c3"}, + {file = "yarl-1.9.4-cp38-cp38-win32.whl", hash = "sha256:686a0c2f85f83463272ddffd4deb5e591c98aac1897d65e92319f729c320eece"}, + {file = "yarl-1.9.4-cp38-cp38-win_amd64.whl", hash = "sha256:a00862fb23195b6b8322f7d781b0dc1d82cb3bcac346d1e38689370cc1cc398b"}, + {file = "yarl-1.9.4-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:604f31d97fa493083ea21bd9b92c419012531c4e17ea6da0f65cacdcf5d0bd27"}, + {file = "yarl-1.9.4-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:8a854227cf581330ffa2c4824d96e52ee621dd571078a252c25e3a3b3d94a1b1"}, + {file = "yarl-1.9.4-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:ba6f52cbc7809cd8d74604cce9c14868306ae4aa0282016b641c661f981a6e91"}, + {file = "yarl-1.9.4-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a6327976c7c2f4ee6816eff196e25385ccc02cb81427952414a64811037bbc8b"}, + {file = "yarl-1.9.4-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8397a3817d7dcdd14bb266283cd1d6fc7264a48c186b986f32e86d86d35fbac5"}, + {file = "yarl-1.9.4-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e0381b4ce23ff92f8170080c97678040fc5b08da85e9e292292aba67fdac6c34"}, + {file = "yarl-1.9.4-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:23d32a2594cb5d565d358a92e151315d1b2268bc10f4610d098f96b147370136"}, + {file = "yarl-1.9.4-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ddb2a5c08a4eaaba605340fdee8fc08e406c56617566d9643ad8bf6852778fc7"}, + {file = "yarl-1.9.4-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:26a1dc6285e03f3cc9e839a2da83bcbf31dcb0d004c72d0730e755b33466c30e"}, + {file = "yarl-1.9.4-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:18580f672e44ce1238b82f7fb87d727c4a131f3a9d33a5e0e82b793362bf18b4"}, + {file = "yarl-1.9.4-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:29e0f83f37610f173eb7e7b5562dd71467993495e568e708d99e9d1944f561ec"}, + {file = "yarl-1.9.4-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:1f23e4fe1e8794f74b6027d7cf19dc25f8b63af1483d91d595d4a07eca1fb26c"}, + {file = "yarl-1.9.4-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:db8e58b9d79200c76956cefd14d5c90af54416ff5353c5bfd7cbe58818e26ef0"}, + {file = "yarl-1.9.4-cp39-cp39-win32.whl", hash = "sha256:c7224cab95645c7ab53791022ae77a4509472613e839dab722a72abe5a684575"}, + {file = "yarl-1.9.4-cp39-cp39-win_amd64.whl", hash = "sha256:824d6c50492add5da9374875ce72db7a0733b29c2394890aef23d533106e2b15"}, + {file = "yarl-1.9.4-py3-none-any.whl", hash = "sha256:928cecb0ef9d5a7946eb6ff58417ad2fe9375762382f1bf5c55e61645f2c43ad"}, + {file = "yarl-1.9.4.tar.gz", hash = "sha256:566db86717cf8080b99b58b083b773a908ae40f06681e87e589a976faf8246bf"}, +] + +[package.dependencies] +idna = ">=2.0" +multidict = ">=4.0" + +[metadata] +lock-version = "2.0" +python-versions = "^3.9,<3.12" +content-hash = "c3c6bdac38beb6788cbb86c7f869719982303043362b4b91a6367c4fdc089035" diff --git a/airbyte-integrations/connectors/source-linnworks/pyproject.toml b/airbyte-integrations/connectors/source-linnworks/pyproject.toml new file mode 100644 index 000000000000..27343a20c92c --- /dev/null +++ b/airbyte-integrations/connectors/source-linnworks/pyproject.toml @@ -0,0 +1,29 @@ +[build-system] +requires = [ "poetry-core>=1.0.0",] +build-backend = "poetry.core.masonry.api" + +[tool.poetry] +version = "0.1.7" +name = "source-linnworks" +description = "Source implementation for Linnworks." +authors = [ "Airbyte ",] +license = "MIT" +readme = "README.md" +documentation = "https://docs.airbyte.com/integrations/sources/linnworks" +homepage = "https://airbyte.com" +repository = "https://github.com/airbytehq/airbyte" +[[tool.poetry.packages]] +include = "source_linnworks" + +[tool.poetry.dependencies] +python = "^3.9,<3.12" +airbyte-cdk = "^0.67.0" +vcrpy = "^6.0.0" + +[tool.poetry.scripts] +source-linnworks = "source_linnworks.run:run" + +[tool.poetry.group.dev.dependencies] +requests-mock = "^1.9.3" +pytest-mock = "^3.6.1" +pytest = "^6.1" diff --git a/airbyte-integrations/connectors/source-linnworks/requirements.txt b/airbyte-integrations/connectors/source-linnworks/requirements.txt deleted file mode 100644 index d6e1198b1ab1..000000000000 --- a/airbyte-integrations/connectors/source-linnworks/requirements.txt +++ /dev/null @@ -1 +0,0 @@ --e . diff --git a/airbyte-integrations/connectors/source-linnworks/setup.py b/airbyte-integrations/connectors/source-linnworks/setup.py deleted file mode 100644 index a2d048bbd31a..000000000000 --- a/airbyte-integrations/connectors/source-linnworks/setup.py +++ /dev/null @@ -1,44 +0,0 @@ -# -# Copyright (c) 2023 Airbyte, Inc., all rights reserved. -# - - -from setuptools import find_packages, setup - -MAIN_REQUIREMENTS = ["airbyte-cdk", "vcrpy"] - -TEST_REQUIREMENTS = [ - "pytest~=6.1", - "pytest-mock~=3.6.1", - "requests-mock~=1.9.3", -] - -setup( - entry_points={ - "console_scripts": [ - "source-linnworks=source_linnworks.run:run", - ], - }, - name="source_linnworks", - description="Source implementation for Linnworks.", - author="Labanoras Tech", - author_email="jv@labanoras.io", - packages=find_packages(), - install_requires=MAIN_REQUIREMENTS, - package_data={ - "": [ - # Include yaml files in the package (if any) - "*.yml", - "*.yaml", - # Include all json files in the package, up to 4 levels deep - "*.json", - "*/*.json", - "*/*/*.json", - "*/*/*/*.json", - "*/*/*/*/*.json", - ] - }, - extras_require={ - "tests": TEST_REQUIREMENTS, - }, -) diff --git a/docs/integrations/sources/linnworks.md b/docs/integrations/sources/linnworks.md index 47f9aa6257b3..2effa53a9a59 100644 --- a/docs/integrations/sources/linnworks.md +++ b/docs/integrations/sources/linnworks.md @@ -71,6 +71,7 @@ Rate limits for the Linnworks API vary across endpoints. Use the [links in the * | Version | Date | Pull Request | Subject | | :------ | :--------- | :------------------------------------------------------- | :-------------------------------------------------------------------------- | +| 0.1.7 | 2024-02-22 | [35557](https://github.com/airbytehq/airbyte/pull/35557) | Manage dependencies with Poetry | | 0.1.6 | 2024-01-31 | [34717](https://github.com/airbytehq/airbyte/pull/34717) | Update CDK and migrate to base image | | 0.1.5 | 2022-11-20 | [19865](https://github.com/airbytehq/airbyte/pull/19865) | Bump Version | | 0.1.4 | 2021-11-24 | [8226](https://github.com/airbytehq/airbyte/pull/8226) | Source Linnworks: improve streams ProcessedOrders and ProcessedOrderDetails | From 357c2d686d8e82949661fab8126b01d27c2ee36a Mon Sep 17 00:00:00 2001 From: Maxime Carbonneau-Leclerc <3360483+maxi297@users.noreply.github.com> Date: Wed, 28 Feb 2024 15:35:24 -0500 Subject: [PATCH 022/172] Issue 35112/relax cats when not primary key (#35645) --- .../connector_acceptance_test/config.py | 32 +---- .../tests/test_core.py | 116 ++++-------------- .../tests/test_full_refresh.py | 35 ------ .../utils/__init__.py | 3 +- .../utils/asserts.py | 2 +- .../utils/compare.py | 12 +- .../sample_files/acceptance-test-config.yml | 2 - .../unit_tests/test_asserts.py | 4 +- .../unit_tests/test_core.py | 71 ++++++----- .../unit_tests/test_global_fixtures.py | 47 ------- .../unit_tests/test_test_full_refresh.py | 108 ---------------- .../unit_tests/test_utils.py | 14 --- .../acceptance-test-config.yml.hbs | 2 - .../connector-acceptance-tests-reference.md | 12 +- 14 files changed, 73 insertions(+), 387 deletions(-) diff --git a/airbyte-integrations/bases/connector-acceptance-test/connector_acceptance_test/config.py b/airbyte-integrations/bases/connector-acceptance-test/connector_acceptance_test/config.py index a0d62f646163..39e60f4ce531 100644 --- a/airbyte-integrations/bases/connector-acceptance-test/connector_acceptance_test/config.py +++ b/airbyte-integrations/bases/connector-acceptance-test/connector_acceptance_test/config.py @@ -91,24 +91,7 @@ class Config: bypass_reason: Optional[str] = Field(description="Reason why this test is bypassed.") path: Optional[Path] = Field(description="File with expected records") - extra_fields: bool = Field(False, description="Allow records to have other fields") exact_order: bool = Field(False, description="Ensure that records produced in exact same order") - extra_records: bool = Field( - True, - description="Allow connector to produce extra records, but still enforce all records from the expected file to be produced", - ) - - @validator("exact_order", always=True) - def validate_exact_order(cls, exact_order, values): - if "extra_fields" in values and values["extra_fields"] and not exact_order: - raise ValueError("exact_order must be on if extra_fields enabled") - return exact_order - - @validator("extra_records", always=True) - def validate_extra_records(cls, extra_records, values): - if "extra_fields" in values and values["extra_fields"] and extra_records: - raise ValueError("extra_records must be off if extra_fields enabled") - return extra_records @validator("path", always=True) def no_bypass_reason_when_path_is_set(cls, path, values): @@ -132,11 +115,6 @@ class IgnoredFieldsConfiguration(BaseConfig): bypass_reason: Optional[str] = Field(default=None, description="Reason why this field is considered ignored.") -ignored_fields: Optional[Mapping[str, List[IgnoredFieldsConfiguration]]] = Field( - description="For each stream, list of fields path ignoring in sequential reads test" -) - - class NoPrimaryKeyConfiguration(BaseConfig): name: str bypass_reason: Optional[str] = Field(default=None, description="Reason why this stream does not support a primary key") @@ -196,7 +174,6 @@ class BasicReadTestConfig(BaseConfig): ) expect_trace_message_on_failure: bool = Field(True, description="Ensure that a trace message is emitted when the connector crashes") timeout_seconds: int = timeout_seconds - ignored_fields: Optional[Mapping[str, List[IgnoredFieldsConfiguration]]] = ignored_fields file_types: Optional[FileTypesConfig] = Field( default_factory=FileTypesConfig, description="For file-based connectors, unsupported by source file types can be configured or a test can be skipped at all", @@ -214,7 +191,9 @@ class FullRefreshConfig(BaseConfig): configured_catalog_path: Optional[str] = configured_catalog_path timeout_seconds: int = timeout_seconds deployment_mode: Optional[str] = deployment_mode - ignored_fields: Optional[Mapping[str, List[IgnoredFieldsConfiguration]]] = ignored_fields + ignored_fields: Optional[Mapping[str, List[IgnoredFieldsConfiguration]]] = Field( + description="For each stream, list of fields path ignoring in sequential reads test" + ) class FutureStateConfig(BaseConfig): @@ -341,11 +320,6 @@ def migrate_legacy_to_current_config(legacy_config: dict) -> dict: basic_read_tests["empty_streams"] = [ {"name": empty_stream_name} for empty_stream_name in basic_read_tests.get("empty_streams", []) ] - if "ignored_fields" in basic_read_tests: - basic_read_tests["ignored_fields"] = { - stream: [{"name": field_name} for field_name in ignore_fields] - for stream, ignore_fields in basic_read_tests["ignored_fields"].items() - } for full_refresh_test in migrated_config["acceptance_tests"].get("full_refresh", {}).get("tests", []): if "ignored_fields" in full_refresh_test: full_refresh_test["ignored_fields"] = { diff --git a/airbyte-integrations/bases/connector-acceptance-test/connector_acceptance_test/tests/test_core.py b/airbyte-integrations/bases/connector-acceptance-test/connector_acceptance_test/tests/test_core.py index ce2df96dc858..d51238071272 100644 --- a/airbyte-integrations/bases/connector-acceptance-test/connector_acceptance_test/tests/test_core.py +++ b/airbyte-integrations/bases/connector-acceptance-test/connector_acceptance_test/tests/test_core.py @@ -50,7 +50,7 @@ SpecTestConfig, UnsupportedFileTypeConfig, ) -from connector_acceptance_test.utils import ConnectorRunner, SecretDict, delete_fields, filter_output, make_hashable, verify_records_schema +from connector_acceptance_test.utils import ConnectorRunner, SecretDict, filter_output, make_hashable, verify_records_schema from connector_acceptance_test.utils.backward_compatibility import CatalogDiffChecker, SpecDiffChecker, validate_previous_configs from connector_acceptance_test.utils.common import ( build_configured_catalog_from_custom_catalog, @@ -58,7 +58,6 @@ find_all_values_for_key_in_schema, find_keyword_schema, ) -from connector_acceptance_test.utils.compare import diff_dicts from connector_acceptance_test.utils.json_schema_helper import ( JsonSchemaHelper, flatten_tuples, @@ -888,13 +887,13 @@ def _validate_records_structure(records: List[AirbyteRecordMessage], configured_ ), f" Record {record} from {record.stream} stream with fields {record_fields} should have some fields mentioned by json schema: {schema_pathes}" @staticmethod - def _validate_schema(records: List[AirbyteRecordMessage], configured_catalog: ConfiguredAirbyteCatalog, fail_on_extra_columns: Boolean): + def _validate_schema(records: List[AirbyteRecordMessage], configured_catalog: ConfiguredAirbyteCatalog): """ Check if data type and structure in records matches the one in json_schema of the stream in catalog """ TestBasicRead._validate_records_structure(records, configured_catalog) bar = "-" * 80 - streams_errors = verify_records_schema(records, configured_catalog, fail_on_extra_columns) + streams_errors = verify_records_schema(records, configured_catalog) for stream_name, errors in streams_errors.items(): errors = map(str, errors.values()) str_errors = f"\n{bar}\n".join(errors) @@ -978,10 +977,7 @@ def _validate_expected_records( stream_name=stream_name, actual=actual, expected=expected, - extra_fields=flags.extra_fields, exact_order=flags.exact_order, - extra_records=flags.extra_records, - ignored_fields=ignored_field_names, detailed_logger=detailed_logger, configured_catalog=configured_catalog, ) @@ -1071,9 +1067,7 @@ async def test_read( assert records, "At least one record should be read using provided catalog" if should_validate_schema: - self._validate_schema( - records=records, configured_catalog=configured_catalog, fail_on_extra_columns=should_fail_on_extra_columns - ) + self._validate_schema(records=records, configured_catalog=configured_catalog) self._validate_empty_streams(records=records, configured_catalog=configured_catalog, allowed_empty_streams=empty_streams) for pks, record in primary_keys_for_records(streams=configured_catalog.streams, records=records): @@ -1130,30 +1124,12 @@ async def test_airbyte_trace_message_on_failure(self, connector_config, inputs: assert len(error_trace_messages) >= 1, "Connector should emit at least one error trace message" - @staticmethod - def remove_extra_fields(record: Any, spec: Any) -> Any: - """Remove keys from record that spec doesn't have, works recursively""" - if not isinstance(spec, Mapping): - return record - - assert isinstance(record, Mapping), "Record or part of it is not a dictionary, but expected record is." - result = {} - - for k, v in spec.items(): - assert k in record, "Record or part of it doesn't have attribute that has expected record." - result[k] = TestBasicRead.remove_extra_fields(record[k], v) - - return result - @staticmethod def compare_records( stream_name: str, actual: List[Mapping[str, Any]], expected: List[Mapping[str, Any]], - extra_fields: bool, exact_order: bool, - extra_records: bool, - ignored_fields: List[str], detailed_logger: Logger, configured_catalog: ConfiguredAirbyteCatalog, ): @@ -1178,76 +1154,26 @@ def compare_records( assert ( not expected_but_not_found ), f"Expected to see those primary keys in the actual response for stream {stream_name} but they were not found." - else: - TestBasicRead.legacy_compare_records( - stream_name, actual, expected, extra_fields, exact_order, extra_records, ignored_fields, detailed_logger - ) + elif len(expected) > len(actual): + if exact_order: + detailed_logger.warning("exact_order is `True` but validation without primary key does not consider order") - @staticmethod - def legacy_compare_records( - stream_name: str, - actual: List[Mapping[str, Any]], - expected: List[Mapping[str, Any]], - extra_fields: bool, - exact_order: bool, - extra_records: bool, - ignored_fields: List[str], - detailed_logger: Logger, - ): - if exact_order: - if ignored_fields: - for item in actual: - delete_fields(item, ignored_fields) - for item in expected: - delete_fields(item, ignored_fields) - - cleaned_actual = [] - if extra_fields: - for r1, r2 in zip(expected, actual): - if r1 and r2: - cleaned_actual.append(TestBasicRead.remove_extra_fields(r2, r1)) - else: - break - - cleaned_actual = cleaned_actual or actual - complete_diff = "\n".join( - diff_dicts(cleaned_actual if not extra_records else cleaned_actual[: len(expected)], expected, use_markup=False) - ) - for r1, r2 in zip(expected, cleaned_actual): - if r1 is None: - assert extra_records, f"Stream {stream_name}: There are more records than expected, but extra_records is off" - break - - # to avoid printing the diff twice, we avoid the == operator here (see plugin.pytest_assertrepr_compare) - equals = r1 == r2 - assert equals, f"Stream {stream_name}: Mismatch of record order or values\nDiff actual vs expected:{complete_diff}" - else: - _make_hashable = functools.partial(make_hashable, exclude_fields=ignored_fields) if ignored_fields else make_hashable - expected = set(map(_make_hashable, expected)) - actual = set(map(_make_hashable, actual)) + expected = set(map(make_hashable, expected)) + actual = set(map(make_hashable, actual)) missing_expected = set(expected) - set(actual) - if missing_expected: - extra = set(actual) - set(expected) - msg = f"Stream {stream_name}: All expected records must be produced" - detailed_logger.info(msg) - detailed_logger.info("missing:") - detailed_logger.log_json_list(sorted(missing_expected, key=lambda record: str(record.get("ID", "0")))) - detailed_logger.info("expected:") - detailed_logger.log_json_list(sorted(expected, key=lambda record: str(record.get("ID", "0")))) - detailed_logger.info("actual:") - detailed_logger.log_json_list(sorted(actual, key=lambda record: str(record.get("ID", "0")))) - detailed_logger.info("extra:") - detailed_logger.log_json_list(sorted(extra, key=lambda record: str(record.get("ID", "0")))) - pytest.fail(msg) - - if not extra_records: - extra_actual = set(actual) - set(expected) - if extra_actual: - msg = f"Stream {stream_name}: There are more records than expected, but extra_records is off" - detailed_logger.info(msg) - detailed_logger.log_json_list(extra_actual) - pytest.fail(msg) + extra = set(actual) - set(expected) + msg = f"Expected to have at least as many records than expected for stream {stream_name}." + detailed_logger.info(msg) + detailed_logger.info("missing:") + detailed_logger.log_json_list(sorted(missing_expected)) + detailed_logger.info("expected:") + detailed_logger.log_json_list(sorted(expected)) + detailed_logger.info("actual:") + detailed_logger.log_json_list(sorted(actual)) + detailed_logger.info("extra:") + detailed_logger.log_json_list(sorted(extra)) + pytest.fail(msg) @staticmethod def group_by_stream(records: List[AirbyteRecordMessage]) -> MutableMapping[str, List[MutableMapping]]: diff --git a/airbyte-integrations/bases/connector-acceptance-test/connector_acceptance_test/tests/test_full_refresh.py b/airbyte-integrations/bases/connector-acceptance-test/connector_acceptance_test/tests/test_full_refresh.py index 103bd8b3aed8..038b5e7bc9b6 100644 --- a/airbyte-integrations/bases/connector-acceptance-test/connector_acceptance_test/tests/test_full_refresh.py +++ b/airbyte-integrations/bases/connector-acceptance-test/connector_acceptance_test/tests/test_full_refresh.py @@ -51,38 +51,6 @@ def assert_emitted_at_increase_on_subsequent_runs(self, first_read_records, seco assert max_emitted_at_first_read < min_emitted_at_second_read, "emitted_at should increase on subsequent runs" - def assert_two_sequential_reads_produce_same_or_subset_records( - self, records_1, records_2, configured_catalog, ignored_fields, detailed_logger - ): - records_by_stream_1 = defaultdict(list) - for record in records_1: - records_by_stream_1[record.stream].append(record.data) - - records_by_stream_2 = defaultdict(list) - for record in records_2: - records_by_stream_2[record.stream].append(record.data) - - pks_by_stream = primary_keys_by_stream(configured_catalog) - - for stream in records_by_stream_1: - if pks_by_stream.get(stream): - serializer = partial(primary_keys_only, pks=pks_by_stream.get(stream)) - else: - serializer = partial(make_hashable, exclude_fields=[field.name for field in ignored_fields.get(stream, [])]) - stream_records_1 = records_by_stream_1.get(stream) - stream_records_2 = records_by_stream_2.get(stream) - if not set(map(serializer, stream_records_1)).issubset(set(map(serializer, stream_records_2))): - missing_records = set(map(serializer, stream_records_1)) - (set(map(serializer, stream_records_2))) - msg = f"{stream}: the two sequential reads should produce either equal set of records or one of them is a strict subset of the other" - detailed_logger.info(msg) - detailed_logger.info("First read") - detailed_logger.log_json_list(stream_records_1) - detailed_logger.info("Second read") - detailed_logger.log_json_list(stream_records_2) - detailed_logger.info("Missing records") - detailed_logger.log_json_list(missing_records) - pytest.fail(msg) - async def test_sequential_reads( self, connector_config: SecretDict, @@ -106,6 +74,3 @@ async def test_sequential_reads( records_2 = [message.record for message in output_2 if message.type == Type.RECORD] self.assert_emitted_at_increase_on_subsequent_runs(records_1, records_2) - self.assert_two_sequential_reads_produce_same_or_subset_records( - records_1, records_2, configured_catalog, ignored_fields, detailed_logger - ) diff --git a/airbyte-integrations/bases/connector-acceptance-test/connector_acceptance_test/utils/__init__.py b/airbyte-integrations/bases/connector-acceptance-test/connector_acceptance_test/utils/__init__.py index 5144353f2af7..144352b36a40 100644 --- a/airbyte-integrations/bases/connector-acceptance-test/connector_acceptance_test/utils/__init__.py +++ b/airbyte-integrations/bases/connector-acceptance-test/connector_acceptance_test/utils/__init__.py @@ -12,7 +12,7 @@ load_config, load_yaml_or_json_path, ) -from .compare import delete_fields, diff_dicts, make_hashable +from .compare import diff_dicts, make_hashable from .connector_runner import ConnectorRunner from .json_schema_helper import JsonSchemaHelper @@ -30,5 +30,4 @@ "verify_records_schema", "build_configured_catalog_from_custom_catalog", "build_configured_catalog_from_discovered_catalog_and_empty_streams", - "delete_fields", ] diff --git a/airbyte-integrations/bases/connector-acceptance-test/connector_acceptance_test/utils/asserts.py b/airbyte-integrations/bases/connector-acceptance-test/connector_acceptance_test/utils/asserts.py index 36a3e01c1158..df276b655fd7 100644 --- a/airbyte-integrations/bases/connector-acceptance-test/connector_acceptance_test/utils/asserts.py +++ b/airbyte-integrations/bases/connector-acceptance-test/connector_acceptance_test/utils/asserts.py @@ -80,7 +80,7 @@ def check(self, instance, format): def verify_records_schema( - records: List[AirbyteRecordMessage], catalog: ConfiguredAirbyteCatalog, fail_on_extra_columns: bool + records: List[AirbyteRecordMessage], catalog: ConfiguredAirbyteCatalog ) -> Mapping[str, Mapping[str, ValidationError]]: """Check records against their schemas from the catalog, yield error messages. Only first record with error will be yielded for each stream. diff --git a/airbyte-integrations/bases/connector-acceptance-test/connector_acceptance_test/utils/compare.py b/airbyte-integrations/bases/connector-acceptance-test/connector_acceptance_test/utils/compare.py index e71b1bafdc55..6ae6940f2d1d 100644 --- a/airbyte-integrations/bases/connector-acceptance-test/connector_acceptance_test/utils/compare.py +++ b/airbyte-integrations/bases/connector-acceptance-test/connector_acceptance_test/utils/compare.py @@ -79,15 +79,7 @@ class ListWithHashMixin(HashMixin, list): pass -def delete_fields(obj: Mapping, path_list: List[str]) -> None: - for path in path_list: - try: - dpath.util.delete(obj, path) - except dpath.exceptions.PathNotFound: - pass - - -def make_hashable(obj, exclude_fields: List[str] = None) -> str: +def make_hashable(obj) -> str: """ Simplify comparison of nested dicts/lists :param obj value for comparison @@ -95,8 +87,6 @@ def make_hashable(obj, exclude_fields: List[str] = None) -> str: """ if isinstance(obj, Mapping): # If value is Mapping, some fields can be excluded - if exclude_fields: - delete_fields(obj, exclude_fields) return DictWithHashMixin(obj) if isinstance(obj, List): return ListWithHashMixin(obj) diff --git a/airbyte-integrations/bases/connector-acceptance-test/sample_files/acceptance-test-config.yml b/airbyte-integrations/bases/connector-acceptance-test/sample_files/acceptance-test-config.yml index dc6a5ea47839..566151455330 100644 --- a/airbyte-integrations/bases/connector-acceptance-test/sample_files/acceptance-test-config.yml +++ b/airbyte-integrations/bases/connector-acceptance-test/sample_files/acceptance-test-config.yml @@ -15,9 +15,7 @@ tests: empty_streams: [] expect_records: path: "integration_tests/expected_records.jsonl" - extra_fields: no exact_order: no - extra_records: yes incremental: - config_path: "secrets/config.json" configured_catalog_path: "sample_files/configured_catalog.json" diff --git a/airbyte-integrations/bases/connector-acceptance-test/unit_tests/test_asserts.py b/airbyte-integrations/bases/connector-acceptance-test/unit_tests/test_asserts.py index 0b78b93148c6..fa92179b9385 100644 --- a/airbyte-integrations/bases/connector-acceptance-test/unit_tests/test_asserts.py +++ b/airbyte-integrations/bases/connector-acceptance-test/unit_tests/test_asserts.py @@ -73,7 +73,7 @@ def test_verify_records_schema(configured_catalog: ConfiguredAirbyteCatalog): records = [AirbyteRecordMessage(stream="my_stream", data=record, emitted_at=0) for record in records] - streams_with_errors = verify_records_schema(records, configured_catalog, fail_on_extra_columns=False) + streams_with_errors = verify_records_schema(records, configured_catalog) errors = [error.message for error in streams_with_errors["my_stream"].values()] assert "my_stream" in streams_with_errors @@ -114,7 +114,7 @@ def test_verify_records_schema(configured_catalog: ConfiguredAirbyteCatalog): ) def test_validate_records_format(record, configured_catalog, valid): records = [AirbyteRecordMessage(stream="my_stream", data=record, emitted_at=0)] - streams_with_errors = verify_records_schema(records, configured_catalog, fail_on_extra_columns=False) + streams_with_errors = verify_records_schema(records, configured_catalog) if valid: assert not streams_with_errors else: diff --git a/airbyte-integrations/bases/connector-acceptance-test/unit_tests/test_core.py b/airbyte-integrations/bases/connector-acceptance-test/unit_tests/test_core.py index fd7f8b020a0b..22395658e043 100644 --- a/airbyte-integrations/bases/connector-acceptance-test/unit_tests/test_core.py +++ b/airbyte-integrations/bases/connector-acceptance-test/unit_tests/test_core.py @@ -596,15 +596,17 @@ def test_configured_catalog_fixture(mocker, test_strictness_level, configured_ca @pytest.mark.parametrize( - "schema, ignored_fields, expect_records_config, record, expected_records_by_stream, primary_key, expectation", + "schema, ignored_fields, expect_records_config, records, expected_records_by_stream, primary_key, expectation", [ - ({"type": "object"}, {}, _DEFAULT_RECORD_CONFIG, {"aa": 23}, {}, None, does_not_raise()), - ({"type": "object"}, {}, _DEFAULT_RECORD_CONFIG, {}, {}, None, does_not_raise()), + # given no expected records and actual has one empty record + ({"type": "object"}, {}, _DEFAULT_RECORD_CONFIG, [{}], {}, None, does_not_raise()), + # given no expected records but actual has one record that match schema + ({"type": "object"}, {}, _DEFAULT_RECORD_CONFIG, [{"aa": 23}], {}, None, does_not_raise()), ( {"type": "object", "properties": {"created": {"type": "string"}}}, {}, _DEFAULT_RECORD_CONFIG, - {"aa": 23}, + [{"aa": 23}], {}, None, pytest.raises(AssertionError, match="should have some fields mentioned by json schema"), @@ -613,7 +615,7 @@ def test_configured_catalog_fixture(mocker, test_strictness_level, configured_ca {"type": "object", "properties": {"created": {"type": "string"}}}, {}, _DEFAULT_RECORD_CONFIG, - {"created": "23"}, + [{"created": "23"}], {}, None, does_not_raise(), @@ -622,7 +624,7 @@ def test_configured_catalog_fixture(mocker, test_strictness_level, configured_ca {"type": "object", "properties": {"created": {"type": "string"}}}, {}, _DEFAULT_RECORD_CONFIG, - {"root": {"created": "23"}}, + [{"root": {"created": "23"}}], {}, None, pytest.raises(AssertionError, match="should have some fields mentioned by json schema"), @@ -632,57 +634,70 @@ def test_configured_catalog_fixture(mocker, test_strictness_level, configured_ca {"type": "object", "properties": {"shop": {"type": ["null", "object"]}, "store": {"type": ["null", "object"]}}}, {}, _DEFAULT_RECORD_CONFIG, - {"shop": {"a": "23"}, "store": {"b": "23"}}, + [{"shop": {"a": "23"}, "store": {"b": "23"}}], {}, None, does_not_raise(), ), - # Fail when expected and actual records are not equal + # Given stream without primary key with different record ( {"type": "object"}, {}, _DEFAULT_RECORD_CONFIG, - {"constant_field": "must equal", "fast_changing_field": [{"field": 2}]}, + [{"constant_field": "must equal", "fast_changing_field": [{"field": 2}]}], {"test_stream": [{"constant_field": "must equal", "fast_changing_field": [{"field": 1}]}]}, None, - pytest.raises(Failed, match="Stream test_stream: All expected records must be produced"), + does_not_raise(), ), - # Expected and Actual records are not equal but we ignore fast changing field + # Given stream without primary key and more actual than expected ( {"type": "object"}, - {"test_stream": [IgnoredFieldsConfiguration(name="fast_changing_field/*/field", bypass_reason="test")]}, + {}, _DEFAULT_RECORD_CONFIG, - {"constant_field": "must equal", "fast_changing_field": [{"field": 2}]}, + [ + {"constant_field": "must equal", "fast_changing_field": [{"field": 1}]}, + {"constant_field": "must equal", "fast_changing_field": [{"field": 2}]} + ], {"test_stream": [{"constant_field": "must equal", "fast_changing_field": [{"field": 1}]}]}, None, does_not_raise(), ), - # Fail when expected and actual records are not equal and exact_order=True + # Expected and Actual records are not equal but we ignore fast changing field ( {"type": "object"}, - {}, - ExpectedRecordsConfig(extra_fields=False, exact_order=True, extra_records=True, path="foobar"), - {"constant_field": "must equal", "fast_changing_field": [{"field": 2}]}, + {"test_stream": [IgnoredFieldsConfiguration(name="fast_changing_field/*/field", bypass_reason="test")]}, + _DEFAULT_RECORD_CONFIG, + [{"constant_field": "must equal", "fast_changing_field": [{"field": 2}]}], {"test_stream": [{"constant_field": "must equal", "fast_changing_field": [{"field": 1}]}]}, None, - pytest.raises(AssertionError, match="Stream test_stream: Mismatch of record order or values"), + does_not_raise(), ), # Expected and Actual records are not equal but we ignore fast changing field (for case when exact_order=True) ( {"type": "object"}, {"test_stream": [IgnoredFieldsConfiguration(name="fast_changing_field/*/field", bypass_reason="test")]}, - ExpectedRecordsConfig(extra_fields=False, exact_order=True, extra_records=True, path="foobar"), - {"constant_field": "must equal", "fast_changing_field": [{"field": 1}]}, + ExpectedRecordsConfig(exact_order=True, path="foobar"), + [{"constant_field": "must equal", "fast_changing_field": [{"field": 1}]}], {"test_stream": [{"constant_field": "must equal", "fast_changing_field": [{"field": 2}]}]}, None, does_not_raise(), ), + # Expected is in actual but not in order (for case when exact_order=True) + ( + {"type": "object"}, + {"test_stream": [IgnoredFieldsConfiguration(name="fast_changing_field/*/field", bypass_reason="test")]}, + ExpectedRecordsConfig(exact_order=True, path="foobar"), + [{"constant_field": "not in order"}, {"constant_field": "must equal"}], + {"test_stream": [{"constant_field": "must equal"}]}, + None, + does_not_raise(), + ), # Match by primary key ( {"type": "object"}, {}, _DEFAULT_RECORD_CONFIG, - {"primary_key": "a primary_key"}, + [{"primary_key": "a primary_key"}], {"test_stream": [{"primary_key": "a primary_key"}]}, [["primary_key"]], does_not_raise(), @@ -692,7 +707,7 @@ def test_configured_catalog_fixture(mocker, test_strictness_level, configured_ca {"type": "object"}, {}, _DEFAULT_RECORD_CONFIG, - {"primary_key": "a primary_key", "a field that should be ignored": "ignored value"}, + [{"primary_key": "a primary_key", "a field that should be ignored": "ignored value"}], {"test_stream": [{"primary_key": "a primary_key"}]}, [["primary_key"]], does_not_raise(), @@ -702,7 +717,7 @@ def test_configured_catalog_fixture(mocker, test_strictness_level, configured_ca {"type": "object"}, {}, _DEFAULT_RECORD_CONFIG, - {"primary_key": "a primary_key", "matching key": "value 1"}, + [{"primary_key": "a primary_key", "matching key": "value 1"}], {"test_stream": [{"primary_key": "a primary_key", "non matching key": "value 2"}]}, [["primary_key"]], does_not_raise(), @@ -712,7 +727,7 @@ def test_configured_catalog_fixture(mocker, test_strictness_level, configured_ca {"type": "object"}, {}, _DEFAULT_RECORD_CONFIG, - {"top_level_field": {"child_field": "a primary_key"}, "matching key": "value 1"}, + [{"top_level_field": {"child_field": "a primary_key"}, "matching key": "value 1"}], {"test_stream": [{"top_level_field": {"child_field": "a primary_key"}, "matching key": "value 1"}]}, [["top_level_field", "child_field"]], does_not_raise(), @@ -722,7 +737,7 @@ def test_configured_catalog_fixture(mocker, test_strictness_level, configured_ca {"type": "object"}, {}, _DEFAULT_RECORD_CONFIG, - {"primary_key_1": "a primary_key_1", "primary_key_2": "a primary_key_2"}, + [{"primary_key_1": "a primary_key_1", "primary_key_2": "a primary_key_2"}], {"test_stream": [{"primary_key_1": "a primary_key_1", "primary_key_2": "a primary_key_2"}]}, [["primary_key_1"], ["primary_key_2"]], does_not_raise(), @@ -732,14 +747,14 @@ def test_configured_catalog_fixture(mocker, test_strictness_level, configured_ca {"type": "object"}, {}, _DEFAULT_RECORD_CONFIG, - {"primary_key_1": "a primary_key_1", "primary_key_2_1": {"primary_key_2_2": "primary_key_2"}}, + [{"primary_key_1": "a primary_key_1", "primary_key_2_1": {"primary_key_2_2": "primary_key_2"}}], {"test_stream": [{"primary_key_1": "a primary_key_1", "primary_key_2_1": {"primary_key_2_2": "primary_key_2"}}]}, [["primary_key_1"], ["primary_key_2_1", "primary_key_2_2"]], does_not_raise(), ), ], ) -async def test_read(mocker, schema, ignored_fields, expect_records_config, record, expected_records_by_stream, primary_key, expectation): +async def test_read(mocker, schema, ignored_fields, expect_records_config, records, expected_records_by_stream, primary_key, expectation): configured_catalog = ConfiguredAirbyteCatalog( streams=[ ConfiguredAirbyteStream( @@ -756,7 +771,7 @@ async def test_read(mocker, schema, ignored_fields, expect_records_config, recor ) docker_runner_mock = mocker.MagicMock( call_read=mocker.AsyncMock( - return_value=[AirbyteMessage(type=Type.RECORD, record=AirbyteRecordMessage(stream="test_stream", data=record, emitted_at=111))] + return_value=[AirbyteMessage(type=Type.RECORD, record=AirbyteRecordMessage(stream="test_stream", data=record, emitted_at=111)) for record in records] ) ) t = test_core.TestBasicRead() diff --git a/airbyte-integrations/bases/connector-acceptance-test/unit_tests/test_global_fixtures.py b/airbyte-integrations/bases/connector-acceptance-test/unit_tests/test_global_fixtures.py index ad7c45046304..c6c617be8693 100644 --- a/airbyte-integrations/bases/connector-acceptance-test/unit_tests/test_global_fixtures.py +++ b/airbyte-integrations/bases/connector-acceptance-test/unit_tests/test_global_fixtures.py @@ -60,53 +60,6 @@ def test_empty_streams_fixture(mocker, test_strictness_level, basic_read_test_co conftest.pytest.fail.assert_not_called() -@pytest.mark.parametrize( - "test_strictness_level, basic_read_test_config, expect_test_failure", - [ - pytest.param( - Config.TestStrictnessLevel.low, - BasicReadTestConfig(config_path="config_path", ignored_fields={"test_stream": [IgnoredFieldsConfiguration(name="ignore_me")]}), - False, - id="[LOW test strictness level] Ignored fields can be declared without bypass_reason.", - ), - pytest.param( - Config.TestStrictnessLevel.low, - BasicReadTestConfig( - config_path="config_path", - ignored_fields={"test_stream": [IgnoredFieldsConfiguration(name="ignore_me", bypass_reason="test")]}, - ), - False, - id="[LOW test strictness level] Ignored fields can be declared with a bypass_reason.", - ), - pytest.param( - Config.TestStrictnessLevel.high, - BasicReadTestConfig(config_path="config_path", ignored_fields={"test_stream": [IgnoredFieldsConfiguration(name="ignore_me")]}), - True, - id="[HIGH test strictness level] Ignored fields can't be declared without bypass_reason.", - ), - pytest.param( - Config.TestStrictnessLevel.high, - BasicReadTestConfig( - config_path="config_path", - ignored_fields={"test_stream": [IgnoredFieldsConfiguration(name="ignore_me", bypass_reason="test")]}, - ), - False, - id="[HIGH test strictness level] Ignored fields can be declared with a bypass_reason.", - ), - ], -) -def test_ignored_fields_fixture(mocker, test_strictness_level, basic_read_test_config, expect_test_failure): - mocker.patch.object(conftest.pytest, "fail") - # Pytest prevents fixture to be directly called. Using __wrapped__ allows us to call the actual function before it's been wrapped by the decorator. - assert ( - conftest.ignored_fields_fixture.__wrapped__(basic_read_test_config, test_strictness_level) == basic_read_test_config.ignored_fields - ) - if expect_test_failure: - conftest.pytest.fail.assert_called_once() - else: - conftest.pytest.fail.assert_not_called() - - TEST_AIRBYTE_STREAM_A = AirbyteStream(name="test_stream_a", json_schema={"k": "v"}, supported_sync_modes=[SyncMode.full_refresh]) TEST_AIRBYTE_STREAM_B = AirbyteStream(name="test_stream_b", json_schema={"k": "v"}, supported_sync_modes=[SyncMode.full_refresh]) TEST_AIRBYTE_STREAM_C = AirbyteStream(name="test_stream_c", json_schema={"k": "v"}, supported_sync_modes=[SyncMode.full_refresh]) diff --git a/airbyte-integrations/bases/connector-acceptance-test/unit_tests/test_test_full_refresh.py b/airbyte-integrations/bases/connector-acceptance-test/unit_tests/test_test_full_refresh.py index 0d913e3a1daa..d767ffa3a0f6 100644 --- a/airbyte-integrations/bases/connector-acceptance-test/unit_tests/test_test_full_refresh.py +++ b/airbyte-integrations/bases/connector-acceptance-test/unit_tests/test_test_full_refresh.py @@ -67,82 +67,6 @@ def get_default_catalog(schema, **kwargs): no_fail_context = does_not_raise() -ignored_fields_test_cases = [ - pytest.param( - {"type": "object", "properties": {"created": {"type": "string"}}}, - {"created": "23"}, - {"created": "23"}, - no_fail_context, - id="no_ignored_fields_present", - ), - pytest.param( - { - "type": "object", - "properties": { - "created": {"type": "string"}, - "ignore_me": {"type": "string"}, - }, - }, - {"created": "23"}, - {"created": "23", "ignore_me": "23"}, - no_fail_context, - id="with_ignored_field", - ), - pytest.param( - { - "type": "object", - "required": ["created", "DONT_ignore_me"], - "properties": { - "created": {"type": "string"}, - "DONT_ignore_me": {"type": "string"}, - "ignore_me": {"type": "string"}, - }, - }, - {"created": "23"}, - {"created": "23", "DONT_ignore_me": "23", "ignore_me": "hello"}, - fail_context, - id="ignore_field_present_but_a_required_is_not", - ), -] - - -@pytest.mark.parametrize( - "schema, record, expected_record, fail_context", - ignored_fields_test_cases, -) -async def test_read_with_ignore_fields(mocker, schema, record, expected_record, fail_context): - catalog = get_default_catalog(schema) - input_config = ReadTestConfigWithIgnoreFields() - docker_runner_mock = mocker.MagicMock() - - sequence_of_docker_callread_results = [record, expected_record] - - # Ignored fields should work both ways - for first, second in ( - sequence_of_docker_callread_results, - list(reversed(sequence_of_docker_callread_results)), - ): - - docker_runner_mock = mocker.MagicMock( - call_read=mocker.AsyncMock( - side_effect=[ - record_message_from_record([first], emitted_at=111), - record_message_from_record([second], emitted_at=112), - ] - ) - ) - - t = _TestFullRefresh() - with fail_context: - await t.test_sequential_reads( - ignored_fields=input_config.ignored_fields, - connector_config=mocker.MagicMock(), - configured_catalog=catalog, - docker_runner=docker_runner_mock, - detailed_logger=mocker.MagicMock(), - ) - - recordset_comparison_test_cases = [ pytest.param( [["id"]], @@ -209,38 +133,6 @@ async def test_read_with_ignore_fields(mocker, schema, record, expected_record, ] -@pytest.mark.parametrize( - "primary_key, first_read_records, second_read_records, fail_context", - recordset_comparison_test_cases, -) -async def test_recordset_comparison(mocker, primary_key, first_read_records, second_read_records, fail_context): - schema = { - "type": "object", - "properties": {"id": {"type": "integer"}, "first_name": {"type": "string"}, "last_name": {"type": "string"}}, - } - catalog = get_default_catalog(schema, primary_key=primary_key) - input_config = ReadTestConfigWithIgnoreFields() - - docker_runner_mock = mocker.MagicMock( - call_read=mocker.AsyncMock( - side_effect=[ - record_message_from_record(first_read_records, emitted_at=111), - record_message_from_record(second_read_records, emitted_at=112), - ] - ) - ) - - t = _TestFullRefresh() - with fail_context: - await t.test_sequential_reads( - ignored_fields=input_config.ignored_fields, - connector_config=mocker.MagicMock(), - configured_catalog=catalog, - docker_runner=docker_runner_mock, - detailed_logger=mocker.MagicMock(), - ) - - @pytest.mark.parametrize( "schema, records_1, records_2, expectation", [ diff --git a/airbyte-integrations/bases/connector-acceptance-test/unit_tests/test_utils.py b/airbyte-integrations/bases/connector-acceptance-test/unit_tests/test_utils.py index cc994347268a..b99ae8389b22 100644 --- a/airbyte-integrations/bases/connector-acceptance-test/unit_tests/test_utils.py +++ b/airbyte-integrations/bases/connector-acceptance-test/unit_tests/test_utils.py @@ -164,20 +164,6 @@ def test_compare_two_records_nested_with_different_orders(obj1, obj2, is_same): assert output_diff, f"{obj1} shouldnt be equal to {obj2}" -def test_exclude_fields(): - """Test that check ignoring fields""" - data = [ - sorted_data(), - ] - ignored_fields = [ - "organization_id", - ] - serializer = partial(make_hashable, exclude_fields=ignored_fields) - output = map(serializer, data) - for item in output: - assert "organization_id" not in item - - class MockContainer: def __init__(self, status: dict, iter_logs: Iterable): self.wait = Mock(return_value=status) diff --git a/airbyte-integrations/connector-templates/connector_acceptance_test_files/acceptance-test-config.yml.hbs b/airbyte-integrations/connector-templates/connector_acceptance_test_files/acceptance-test-config.yml.hbs index bd7e9a9ac06a..0cae90d53d99 100644 --- a/airbyte-integrations/connector-templates/connector_acceptance_test_files/acceptance-test-config.yml.hbs +++ b/airbyte-integrations/connector-templates/connector_acceptance_test_files/acceptance-test-config.yml.hbs @@ -22,9 +22,7 @@ acceptance_tests: # TODO uncomment this block to specify that the tests should assert the connector outputs the records provided in the input file a file # expect_records: # path: "integration_tests/expected_records.jsonl" -# extra_fields: no # exact_order: no -# extra_records: yes incremental: bypass_reason: "This connector does not implement incremental sync" # TODO uncomment this block this block if your connector implements incremental sync: diff --git a/docs/connector-development/testing-connectors/connector-acceptance-tests-reference.md b/docs/connector-development/testing-connectors/connector-acceptance-tests-reference.md index 5c38d69cc276..375146b7d836 100644 --- a/docs/connector-development/testing-connectors/connector-acceptance-tests-reference.md +++ b/docs/connector-development/testing-connectors/connector-acceptance-tests-reference.md @@ -200,9 +200,7 @@ Set `validate_data_points=True` if possible. This validation is going to be enab | `expect_records` | object | None | Compare produced records with expected records, see details below | | `expect_records.path` | string | | File with expected records | | `expect_records.bypass_reason` | string | | Explain why this test is bypassed | -| `expect_records.extra_fields` | boolean | False | Allow output records to have other fields i.e: expected records are a subset | | `expect_records.exact_order` | boolean | False | Ensure that records produced in exact same order | -| `expect_records.extra_records` | boolean | True | Allow connector to produce extra records, but still enforce all records from the expected file to be produced | | `file_types` | object | None | Configure file-based connectors specific tests | | `file_types.skip_test` | boolean | False | Skip file-based connectors specific tests for the current config with a `bypass_reason` | | `file_types.bypass_reason` | string | None | Reason why file-based connectors specific tests are skipped | @@ -210,15 +208,7 @@ Set `validate_data_points=True` if possible. This validation is going to be enab | `file_types.unsupported_types[0].extension` | string | | File type in `.csv` format which cannot be added to a test account | | `file_types.unsupported_types[0].bypass_reason` | string | None | Reason why this file type cannot be added to a test account | -`expect_records` is a nested configuration, if omitted - the part of the test responsible for record matching will be skipped. Due to the fact that we can't identify records without primary keys, only the following flag combinations are supported: - -| extra_fields | exact_order | extra_records | -| :----------- | :---------- | :------------ | -| x | x | | -| | x | x | -| | x | | -| | | x | -| | | | +`expect_records` is a nested configuration, if omitted - the part of the test responsible for record matching will be skipped. ### Schema format checking From 16336c6ebc7dca16f3a4f04c81e99610c2a7e905 Mon Sep 17 00:00:00 2001 From: Alexandre Cuoci Date: Wed, 28 Feb 2024 15:37:30 -0500 Subject: [PATCH 023/172] Helm setup instruction update (#35681) --- docs/enterprise-setup/implementation-guide.md | 157 +++++++++++++----- 1 file changed, 112 insertions(+), 45 deletions(-) diff --git a/docs/enterprise-setup/implementation-guide.md b/docs/enterprise-setup/implementation-guide.md index cde0f05d7349..1634853a8582 100644 --- a/docs/enterprise-setup/implementation-guide.md +++ b/docs/enterprise-setup/implementation-guide.md @@ -13,6 +13,8 @@ Airbyte Self-Managed Enterprise must be deployed using Kubernetes. This is to en ## Prerequisites +### Infrastructure Prerequisites + For a production-ready deployment of Self-Managed Enterprise, various infrastructure components are required. We recommend deploying to Amazon EKS or Google Kubernetes Engine. The following diagram illustrates a typical Airbyte deployment running on AWS: ![AWS Architecture Diagram](./assets/self-managed-enterprise-aws.png) @@ -28,7 +30,7 @@ Prior to deploying Self-Managed Enterprise, we recommend having each of the foll | External Secrets Manager | [Amazon Secrets Manager](/operator-guides/configuring-airbyte#secrets) for storing connector secrets. | -We also require you to install and configure the following Kubernetes tooling: +We require you to install and configure the following Kubernetes tooling: 1. Install `helm` by following [these instructions](https://helm.sh/docs/intro/install/) 2. Install `kubectl` by following [these instructions](https://kubernetes.io/docs/tasks/tools/). 3. Configure `kubectl` to connect to your cluster by using `kubectl use-context my-cluster-name`: @@ -59,33 +61,75 @@ We also require you to install and configure the following Kubernetes tooling: -## Deploy Airbyte Enterprise +We also require you to create a Kubernetes namespace for your Airbyte deployment: + +``` +kubectl create namespace airbyte +``` + +## Installation Steps -### Add Airbyte Helm Repository +### Step 1: Add Airbyte Helm Repository Follow these instructions to add the Airbyte helm repository: 1. Run `helm repo add airbyte https://airbytehq.github.io/helm-charts`, where `airbyte` is the name of the repository that will be indexed locally. 2. Perform the repo indexing process, and ensure your helm repository is up-to-date by running `helm repo update`. 3. You can then browse all charts uploaded to your repository by running `helm search repo airbyte`. -### Clone & Configure Airbyte +### Step 2: Create your Helm Values File -1. `git clone` the latest revision of the [airbyte-platform repository](https://github.com/airbytehq/airbyte-platform) +1. Create a new `airbyte` directory. Inside, create an empty `airbyte.yml` file. -2. Create a new `airbyte.yml` file in the `configs` directory of the `airbyte-platform` folder. You may also copy `airbyte.sample.yml` to use as a template: +2. Paste the following into your newly created `airbyte.yml` file. This is the minimal values file to be used to deploy Self-Managed Enterprise. -```sh -cp configs/airbyte.sample.yml configs/airbyte.yml +
+Template airbyte.yml file + +``` +webapp-url: # example: localhost:8080 + +initial-user: + email: + first-name: + last-name: + username: # your existing Airbyte instance username + password: # your existing Airbyte instance password + +license-key: + +# Enables Self-Managed Enterprise. +# Do not make modifications to this section. + +global: + edition: "pro" + +keycloak: + enabled: true + bypassInit: false + +keycloak-setup: + enabled: true + +server: + env_vars: + API_AUTHORIZATION_ENABLED: "true" ``` -3. Add your Airbyte Self-Managed Enterprise license key to your `airbyte.yml`. +
+ +### Step 3: Configure your Deployment -4. Add your [auth details](/access-management/sso) to your `airbyte.yml`. +#### Configure User Authentication + +1. Fill in the contents of the `initial-user` block. The credentials grant an initial user with admin permissions. You should store these credentials in a secure location. + +2. Add your Airbyte Self-Managed Enterprise license key to your `airbyte.yml` in the `license-key` field. + +3. To enable SSO authentication, add [SSO auth details](/access-management/sso) to your `airbyte.yml` file.
Configuring auth in your airbyte.yml file - To configure SSO with Okta, add the following at the end of your `airbyte.yml` file: @@ -122,9 +166,9 @@ See the [following guide](/access-management/sso-providers/azure-entra-id) on ho -To configure basic auth (deploy without SSO), remove the entire `auth:` section from your airbyte.yml config file. You will authenticate with the instance admin user and password included in the your `airbyte.yml`. +To modify auth configurations on an existing deployment (after Airbyte has been installed at least once), you will need to `helm upgrade` Airbyte with the additional environment variable `--set keycloak-setup.env_vars.KEYCLOAK_RESET_REALM=true`. As this also resets the list of Airbyte users and permissions, please use this with caution. -To modify auth configurations after Airbyte is installed, you will need to redeploy Airbyte with the additional environment variable `KEYCLOAK_RESET_REALM=TRUE`. As this also resets the list of Airbyte users and permissions, please use this with caution. +To deploy Self-Managed Enterprise without SSO, exclude the entire `auth:` section from your airbyte.yml config file. You will authenticate with the instance admin user and password included in your `airbyte.yml`. Without SSO, you cannot currently have unique logins for multiple users.
@@ -132,25 +176,17 @@ To modify auth configurations after Airbyte is installed, you will need to redep For Self-Managed Enterprise deployments, we recommend using a dedicated database instance for better reliability, and backups (such as AWS RDS or GCP Cloud SQL) instead of the default internal Postgres database (`airbyte/db`) that Airbyte spins up within the Kubernetes cluster. -:::info -Currently, Airbyte requires connection to a Postgres 13 instance. -::: - We assume in the following that you've already configured a Postgres instance:
External database setup steps -1. In the `charts/airbyte/values.yaml` file, disable the default Postgres database (`airbyte/db`): +1. Add external database details to your `airbyte.yml` file. This disables the default internal Postgres database (`airbyte/db`), and configures the external Postgres database: ```yaml postgresql: enabled: false -``` -2. In the `charts/airbyte/values.yaml` file, enable and configure the external Postgres database: - -```yaml externalDatabase: host: ## Database host user: ## Non-root username for the Airbyte database @@ -158,9 +194,12 @@ externalDatabase: port: 5432 ## Database port number ``` -For the non-root user's password which has database access, you may use `password`, `existingSecret` or `jdbcUrl`. We recommend using `existingSecret`, or injecting sensitive fields from your own external secret store. Each of these parameters is mutually exclusive: +2. For the non-root user's password which has database access, you may use `password`, `existingSecret` or `jdbcUrl`. We recommend using `existingSecret`, or injecting sensitive fields from your own external secret store. Each of these parameters is mutually exclusive: ```yaml +postgresql: + enabled: false + externalDatabase: ... password: ## Password for non-root database user @@ -180,24 +219,18 @@ For Self-Managed Enterprise deployments, we recommend spinning up standalone log
External log storage setup steps -1. In the `charts/airbyte/values.yaml` file, disable the default Minio instance (`airbyte/minio`): - -```yaml -minio: - enabled: false -``` - -2. In the `charts/airbyte/values.yaml` file, enable and configure external log storage: +To do this, add external log storage details to your `airbyte.yml` file. This disables the default internal Minio instance (`airbyte/minio`), and configures the external log database: ```yaml +minio: + enabled: false + global: - ... log4jConfig: "log4j2-no-minio.xml" - logs: storage: type: "S3" @@ -223,7 +256,7 @@ global: For each of `accessKey` and `secretKey`, the `password` and `existingSecret` fields are mutually exclusive. -3. Ensure your access key is tied to an IAM user with the [following policies](https://docs.aws.amazon.com/AmazonS3/latest/userguide/example-policies-s3.html#iam-policy-ex0), allowing the user access to S3 storage: +Then, ensure your access key is tied to an IAM user with the [following policies](https://docs.aws.amazon.com/AmazonS3/latest/userguide/example-policies-s3.html#iam-policy-ex0), allowing the user access to S3 storage: ```yaml { @@ -259,10 +292,11 @@ For each of `accessKey` and `secretKey`, the `password` and `existingSecret` fie ```yaml +minio: + enabled: false + global: - ... log4jConfig: "log4j2-no-minio.xml" - logs: storage: type: "GCS" @@ -282,7 +316,6 @@ Note that the `credentials` and `credentialsJson` fields are mutually exclusive.
- #### Configuring Ingress To access the Airbyte UI, you will need to manually attach an ingress configuration to your deployment. The following is a skimmed down definition of an ingress resource you could use for Self-Managed Enterprise: @@ -393,23 +426,57 @@ Once this is complete, ensure that the value of the `webapp-url` field in your ` You may configure ingress using a load balancer or an API Gateway. We do not currently support most service meshes (such as Istio). If you are having networking issues after fully deploying Airbyte, please verify that firewalls or lacking permissions are not interfering with pod-pod communication. Please also verify that deployed pods have the right permissions to make requests to your external database. -### Install Airbyte Enterprise +### Step 4: Deploy Self-Managed Enterprise -Install Airbyte Enterprise on helm using the following command: +Install Airbyte Self-Managed Enterprise on helm using the following command: ```sh -./tools/bin/install_airbyte_pro_on_helm.sh +helm install \ +--namespace airbyte \ +"airbyte-enterprise" \ +"airbyte/airbyte" \ +--set-file airbyteYml="./airbyte.yml" ``` -The default release name is `airbyte-pro`. You can change this via the `RELEASE_NAME` environment -variable. +The default release name is `airbyte-enterprise`. You can change this by modifying the above `helm upgrade` command. + +## Updating Self-Managed Enterprise + +Upgrade Airbyte Self-Managed Enterprise by: + +1. Running `helm repo update`. This pulls an up-to-date version of our helm charts, which is tied to a version of the Airbyte platform. +2. Re-installing Airbyte Self-Managed Enterprise: + +```sh +helm upgrade \ +--namespace airbyte \ +--install "airbyte-enterprise" \ +"airbyte/airbyte" \ +--set-file airbyteYml="./airbyte.yml" +``` -### Customizing your Airbyte Enterprise Deployment +## Customizing your Deployment -In order to customize your deployment, you need to create `values.yaml` file in a local folder and populate it with default configuration override values. A `values.yaml` example can be located in [charts/airbyte](https://github.com/airbytehq/airbyte-platform/blob/main/charts/airbyte/values.yaml) folder of the Airbyte repository. +In order to customize your deployment, you need to create an additional `values.yaml` file in your `airbyte` directory, and populate it with configuration override values. A thorough `values.yaml` example including many configurations can be located in [charts/airbyte](https://github.com/airbytehq/airbyte-platform/blob/main/charts/airbyte/values.yaml) folder of the Airbyte repository. After specifying your own configuration, run the following command: ```sh -./tools/bin/install_airbyte_pro_on_helm.sh --values path/to/values.yaml +helm upgrade \ +--namespace airbyte \ +--install "airbyte-enterprise" \ +"airbyte/airbyte" \ + --set-file airbyteYml="./airbyte.yml" \ + --values path/to/values.yaml +``` + +### Customizing your Service Account + +You may choose to use your own service account instead of the Airbyte default, `airbyte-sa`. This may allow for better audit trails and resource management specific to your organizational policies and requirements. + +To do this, add the following to your `airbyte.yml`: + +``` +serviceAccount: + name: ``` From c4d9759853a7f9bd62b2fa91c6a97ad02c95632c Mon Sep 17 00:00:00 2001 From: Juan <80164312+jnr0790@users.noreply.github.com> Date: Wed, 28 Feb 2024 16:01:41 -0500 Subject: [PATCH 024/172] Update okta.md (#35707) --- docs/integrations/sources/okta.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/integrations/sources/okta.md b/docs/integrations/sources/okta.md index b4da7c3b1a83..2cb60f58492f 100644 --- a/docs/integrations/sources/okta.md +++ b/docs/integrations/sources/okta.md @@ -32,7 +32,7 @@ Okta is the complete identity solution for all your apps and people that’s uni 2. In the left navigation bar, click **Sources**. In the top-right corner, click **+ new source**. 3. On the source setup page, select **Okta** from the Source type dropdown and enter a name for this connector. 4. Add **Name** -5. Add **Okta-Domain** +5. Add **Okta Domain** (If your Okta URL is `https://MY_DOMAIN.okta.com/`, then `MY_DOMAIN` is your Okta domain.) 6. Add **Start date** (defaults to 7 days if no date is included) 7. Choose the method of authentication 8. If you select Token authentication - fill the field **Personal Api Token** From c84c61b1e50c83518f9494eaec793b29d669aa3a Mon Sep 17 00:00:00 2001 From: Juan <80164312+jnr0790@users.noreply.github.com> Date: Wed, 28 Feb 2024 16:02:01 -0500 Subject: [PATCH 025/172] Update file.md (#35598) --- docs/integrations/sources/file.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/integrations/sources/file.md b/docs/integrations/sources/file.md index a2fef7e3bbf4..aa9acea4fe9c 100644 --- a/docs/integrations/sources/file.md +++ b/docs/integrations/sources/file.md @@ -111,7 +111,7 @@ For example, if the format `CSV` is selected, then options from the [read_csv](h We would therefore provide in the `reader_options` the following json: ``` -{ "sep" : "\t", "header" : 0, "names": ["column1", "column2"], "parse_dates": ["column2"]} +{ "sep" : "\t", "header" : null, "names": ["column1", "column2"], "parse_dates": ["column2"]} ``` In case you select `JSON` format, then options from the [read_json](https://pandas.pydata.org/pandas-docs/stable/user_guide/io.html#io-json-reader) reader are available. From 934de16a91c77b160a01a451d9dfda223a21d08a Mon Sep 17 00:00:00 2001 From: Evan Tahler Date: Wed, 28 Feb 2024 13:08:08 -0800 Subject: [PATCH 026/172] Docs index pages are not really docs pages (#35706) --- docusaurus/src/remark/utils.js | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/docusaurus/src/remark/utils.js b/docusaurus/src/remark/utils.js index 02c4d856d2e3..087f1f59e46f 100644 --- a/docusaurus/src/remark/utils.js +++ b/docusaurus/src/remark/utils.js @@ -11,8 +11,9 @@ const isDocsPage = (vfile) => { let response = { isDocsPage: false, isTrueDocsPage: false }; if ( - vfile.path.includes("integrations/sources") || - vfile.path.includes("integrations/destinations") + (vfile.path.includes("integrations/sources") || + vfile.path.includes("integrations/destinations")) && + !vfile.path.toLowerCase().includes("readme.md") ) { response.isDocsPage = true; response.isTrueDocsPage = true; From 339f8d7439ba109ef8e4e20c14bbf4a859b059c7 Mon Sep 17 00:00:00 2001 From: Tyler B <104733644+tybernstein@users.noreply.github.com> Date: Wed, 28 Feb 2024 16:23:27 -0500 Subject: [PATCH 027/172] correct typo on-kubernetes-via-helm.md (#35711) --- docs/deploying-airbyte/on-kubernetes-via-helm.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/deploying-airbyte/on-kubernetes-via-helm.md b/docs/deploying-airbyte/on-kubernetes-via-helm.md index d4f974eb8030..bf6c72e0dd59 100644 --- a/docs/deploying-airbyte/on-kubernetes-via-helm.md +++ b/docs/deploying-airbyte/on-kubernetes-via-helm.md @@ -135,7 +135,7 @@ helm install --values path/to/values.yaml %release_name% airbyte/airbyte Starting from `0.39.37-alpha` we've revisited helm charts structure and separated all components of airbyte into their own independent charts, thus by allowing our developers to test single component without deploying airbyte as a whole and by upgrading single component at a time. -In most cases upgrade from older monolith chart to a new one should go without any issue, but if you've configured custom logging or specified custom configuration of DB or Logging then follow the instructions listed bellow +In most cases upgrade from older monolith chart to a new one should go without any issue, but if you've configured custom logging or specified custom configuration of DB or Logging then follow the instructions listed below ### Minio migration From 333adf74ec6f535bf7cf08f4b79e05b7bcebf133 Mon Sep 17 00:00:00 2001 From: Maxime Carbonneau-Leclerc <3360483+maxi297@users.noreply.github.com> Date: Wed, 28 Feb 2024 16:35:02 -0500 Subject: [PATCH 028/172] [ISSUE #35112] clean acceptance-test-config.yml (#35708) --- .../acceptance-test-config.yml | 2 - .../source-aha/acceptance-test-config.yml | 2 - .../source-aircall/acceptance-test-config.yml | 2 - .../acceptance-test-config.yml | 22 - .../acceptance-test-config.yml | 10 - .../acceptance-test-config.yml | 33 -- .../acceptance-test-config.yml | 2 - .../acceptance-test-config.yml | 2 - .../acceptance-test-config.yml | 2 - .../source-auth0/acceptance-test-config.yml | 2 - .../acceptance-test-config.yml | 535 ------------------ .../acceptance-test-config.yml | 2 - .../acceptance-test-config.yml | 2 - .../acceptance-test-config.yml | 2 - .../acceptance-test-config.yml | 2 - .../acceptance-test-config.yml | 2 - .../acceptance-test-config.yml | 2 - .../acceptance-test-config.yml | 4 - .../acceptance-test-config.yml | 2 - .../source-copper/acceptance-test-config.yml | 2 - .../acceptance-test-config.yml | 2 - .../source-datadog/acceptance-test-config.yml | 2 - .../source-drift/acceptance-test-config.yml | 2 - .../source-dv-360/acceptance-test-config.yml | 2 - .../acceptance-test-config.yml | 56 -- .../source-faker/acceptance-test-config.yml | 16 - .../source-fauna/acceptance-test-config.yml | 4 - .../source-file/acceptance-test-config.yml | 2 - .../acceptance-test-config.yml | 2 - .../acceptance-test-config.yml | 2 - .../acceptance-test-config.yml | 2 - .../acceptance-test-config.yml | 2 - .../acceptance-test-config.yml | 11 - .../acceptance-test-config.yml | 2 - .../source-getlago/acceptance-test-config.yml | 2 - .../source-github/acceptance-test-config.yml | 52 -- .../source-gitlab/acceptance-test-config.yml | 27 - .../acceptance-test-config.yml | 91 --- .../acceptance-test-config.yml | 2 - .../acceptance-test-config.yml | 105 ---- .../acceptance-test-config.yml | 7 - .../source-harness/acceptance-test-config.yml | 2 - .../source-hubspot/acceptance-test-config.yml | 142 ----- .../acceptance-test-config.yml | 30 - .../acceptance-test-config.yml | 2 - .../acceptance-test-config.yml | 2 - .../acceptance-test-config.yml | 2 - .../acceptance-test-config.yml | 2 - .../source-jira/acceptance-test-config.yml | 6 - .../acceptance-test-config.yml | 2 - .../source-klarna/acceptance-test-config.yml | 2 - .../source-klaviyo/acceptance-test-config.yml | 5 - .../source-kyriba/acceptance-test-config.yml | 2 - .../acceptance-test-config.yml | 2 - .../acceptance-test-config.yml | 14 - .../acceptance-test-config.yml | 1 - .../acceptance-test-config.yml | 2 - .../acceptance-test-config.yml | 1 - .../source-mailgun/acceptance-test-config.yml | 2 - .../acceptance-test-config.yml | 2 - .../acceptance-test-config.yml | 2 - .../source-merge/acceptance-test-config.yml | 2 - .../acceptance-test-config.yml | 9 - .../source-monday/acceptance-test-config.yml | 18 - .../source-n8n/acceptance-test-config.yml | 2 - .../source-nasa/acceptance-test-config.yml | 2 - .../source-okta/acceptance-test-config.yml | 2 - .../acceptance-test-config.yml | 2 - .../acceptance-test-config.yml | 2 - .../acceptance-test-config.yml | 2 - .../acceptance-test-config.yml | 2 - .../acceptance-test-config.yml | 9 - .../acceptance-test-config.yml | 10 - .../source-pendo/acceptance-test-config.yml | 2 - .../acceptance-test-config.yml | 2 - .../acceptance-test-config.yml | 25 - .../acceptance-test-config.yml | 2 - .../acceptance-test-config.yml | 2 - .../acceptance-test-config.yml | 2 - .../acceptance-test-config.yml | 2 - .../source-railz/acceptance-test-config.yml | 2 - .../acceptance-test-config.yml | 16 - .../acceptance-test-config.yml | 2 - .../acceptance-test-config.yml | 2 - .../acceptance-test-config.yml | 2 - .../acceptance-test-config.yml | 2 - .../acceptance-test-config.yml | 6 - .../acceptance-test-config.yml | 10 - .../acceptance-test-config.yml | 2 - .../acceptance-test-config.yml | 2 - .../acceptance-test-config.yml | 2 - .../source-secoda/acceptance-test-config.yml | 2 - .../acceptance-test-config.yml | 8 - .../acceptance-test-config.yml | 2 - .../acceptance-test-config.yml | 2 - .../source-sentry/acceptance-test-config.yml | 19 - .../source-shopify/acceptance-test-config.yml | 46 -- .../source-shortio/acceptance-test-config.yml | 2 - .../source-slack/acceptance-test-config.yml | 4 - .../source-smaily/acceptance-test-config.yml | 2 - .../acceptance-test-config.yml | 2 - .../acceptance-test-config.yml | 2 - .../acceptance-test-config.yml | 2 - .../acceptance-test-config.yml | 2 - .../source-square/acceptance-test-config.yml | 11 - .../acceptance-test-config.yml | 2 - .../source-stripe/acceptance-test-config.yml | 46 -- .../acceptance-test-config.yml | 2 - .../acceptance-test-config.yml | 32 -- .../source-tmdb/acceptance-test-config.yml | 2 - .../source-todoist/acceptance-test-config.yml | 2 - .../source-toggl/acceptance-test-config.yml | 2 - .../source-trello/acceptance-test-config.yml | 2 - .../source-twilio/acceptance-test-config.yml | 6 - .../source-vantage/acceptance-test-config.yml | 2 - .../acceptance-test-config.yml | 2 - .../acceptance-test-config.yml | 2 - .../source-wrike/acceptance-test-config.yml | 2 - .../source-xero/acceptance-test-config.yml | 11 - .../acceptance-test-config.yml | 2 - .../source-yotpo/acceptance-test-config.yml | 2 - .../source-younium/acceptance-test-config.yml | 2 - .../acceptance-test-config.yml | 2 - .../acceptance-test-config.yml | 2 - .../acceptance-test-config.yml | 13 - .../source-zenloop/acceptance-test-config.yml | 2 - .../acceptance-test-config.yml | 6 - 127 files changed, 1651 deletions(-) diff --git a/airbyte-integrations/connectors/source-activecampaign/acceptance-test-config.yml b/airbyte-integrations/connectors/source-activecampaign/acceptance-test-config.yml index 24cdda784fa8..cf3cd897b54a 100644 --- a/airbyte-integrations/connectors/source-activecampaign/acceptance-test-config.yml +++ b/airbyte-integrations/connectors/source-activecampaign/acceptance-test-config.yml @@ -18,9 +18,7 @@ tests: # TODO uncomment this block to specify that the tests should assert the connector outputs the records provided in the input file a file # expect_records: # path: "integration_tests/expected_records.jsonl" - # extra_fields: no # exact_order: no - # extra_records: yes # incremental: # TODO if your connector does not implement incremental sync, remove this block # - config_path: "secrets/config.json" # configured_catalog_path: "integration_tests/configured_catalog.json" diff --git a/airbyte-integrations/connectors/source-aha/acceptance-test-config.yml b/airbyte-integrations/connectors/source-aha/acceptance-test-config.yml index 202dffc572e8..2b725d04c958 100644 --- a/airbyte-integrations/connectors/source-aha/acceptance-test-config.yml +++ b/airbyte-integrations/connectors/source-aha/acceptance-test-config.yml @@ -24,9 +24,7 @@ acceptance_tests: # TODO uncomment this block to specify that the tests should assert the connector outputs the records provided in the input file a file # expect_records: # path: "integration_tests/expected_records.jsonl" - # extra_fields: no # exact_order: no - # extra_records: yes incremental: bypass_reason: "This connector does not implement incremental sync" # TODO uncomment this block this block if your connector implements incremental sync: diff --git a/airbyte-integrations/connectors/source-aircall/acceptance-test-config.yml b/airbyte-integrations/connectors/source-aircall/acceptance-test-config.yml index c72d7490f58d..74e47542f1f1 100644 --- a/airbyte-integrations/connectors/source-aircall/acceptance-test-config.yml +++ b/airbyte-integrations/connectors/source-aircall/acceptance-test-config.yml @@ -23,9 +23,7 @@ acceptance_tests: bypass_reason: "Sandbox account cannot seed this stream" expect_records: path: "integration_tests/expected_records.jsonl" - extra_fields: no exact_order: no - extra_records: yes incremental: tests: - config_path: "secrets/config.json" diff --git a/airbyte-integrations/connectors/source-airtable/acceptance-test-config.yml b/airbyte-integrations/connectors/source-airtable/acceptance-test-config.yml index 19e267aea061..bd63210c2ba7 100644 --- a/airbyte-integrations/connectors/source-airtable/acceptance-test-config.yml +++ b/airbyte-integrations/connectors/source-airtable/acceptance-test-config.yml @@ -31,33 +31,11 @@ acceptance_tests: - config_path: "secrets/config.json" expect_records: path: "integration_tests/expected_records.jsonl" - extra_fields: true exact_order: true - extra_records: false - ignored_fields: - users/field_type_test/tblFcp5mncufoYaR9: - - name: "attachment" - bypass_reason: "Attachments' preview links are changed frequently" - "users/50_columns/tbl01Hi93Tt6XJ0u5": - - name: "attachments" - bypass_reason: "Attachments' preview links are changed frequently" - - name: "attachments_2" - bypass_reason: "Attachments' preview links are changed frequently" - config_path: "secrets/config_oauth.json" expect_records: path: "integration_tests/expected_records.jsonl" - extra_fields: true exact_order: true - extra_records: false - ignored_fields: - users/field_type_test/tblFcp5mncufoYaR9: - - name: "attachment" - bypass_reason: "Attachments' preview links are changed frequently" - "users/50_columns/tbl01Hi93Tt6XJ0u5": - - name: "attachments" - bypass_reason: "Attachments' preview links are changed frequently" - - name: "attachments_2" - bypass_reason: "Attachments' preview links are changed frequently" full_refresh: tests: - config_path: "secrets/config.json" diff --git a/airbyte-integrations/connectors/source-amazon-ads/acceptance-test-config.yml b/airbyte-integrations/connectors/source-amazon-ads/acceptance-test-config.yml index e3eb72c4b986..eff7c3273bf1 100644 --- a/airbyte-integrations/connectors/source-amazon-ads/acceptance-test-config.yml +++ b/airbyte-integrations/connectors/source-amazon-ads/acceptance-test-config.yml @@ -29,16 +29,6 @@ acceptance_tests: bypass_reason: "can't populate stream because it requires real ad campaign" - name: sponsored_display_creatives bypass_reason: "can't populate stream because it requires real ad campaign" - ignored_fields: - sponsored_product_campaigns: - - name: dailyBudget - bypass_reason: "can be updated, also it is sometimes integer, sometimes float" - sponsored_product_ad_group_suggested_keywords: - - name: suggestedKeywords - bypass_reason: "value can be changed because it is real-life recommendation from Amazon" - sponsored_product_ad_group_bid_recommendations: - - name: suggestedBid - bypass_reason: "value can be changed because it is real-life recommendation from Amazon" timeout_seconds: 2400 expect_records: path: integration_tests/expected_records.jsonl diff --git a/airbyte-integrations/connectors/source-amazon-seller-partner/acceptance-test-config.yml b/airbyte-integrations/connectors/source-amazon-seller-partner/acceptance-test-config.yml index c4278cb6df5e..2d84d9683a7f 100644 --- a/airbyte-integrations/connectors/source-amazon-seller-partner/acceptance-test-config.yml +++ b/airbyte-integrations/connectors/source-amazon-seller-partner/acceptance-test-config.yml @@ -25,42 +25,9 @@ acceptance_tests: tests: - config_path: "secrets/config.json" timeout_seconds: 3600 - ignored_fields: - GET_MERCHANT_LISTINGS_ALL_DATA: - - name: "dataEndTime" - bypass_reason: "This field is used as a cursor field and depends on today's date, so it changes every day" - GET_FLAT_FILE_OPEN_LISTINGS_DATA: - - name: "dataEndTime" - bypass_reason: "This field is used as a cursor field and depends on today's date, so it changes every day" - GET_MERCHANTS_LISTINGS_FYP_REPORT: - - name: "dataEndTime" - bypass_reason: "This field is used as a cursor field and depends on today's date, so it changes every day" - GET_MERCHANT_LISTINGS_DATA: - - name: "dataEndTime" - bypass_reason: "This field is used as a cursor field and depends on today's date, so it changes every day" - GET_MERCHANT_LISTINGS_INACTIVE_DATA: - - name: "dataEndTime" - bypass_reason: "This field is used as a cursor field and depends on today's date, so it changes every day" - GET_RESTOCK_INVENTORY_RECOMMENDATIONS_REPORT: - - name: "dataEndTime" - bypass_reason: "This field is used as a cursor field and depends on today's date, so it changes every day" - GET_V2_SETTLEMENT_REPORT_DATA_FLAT_FILE: - - name: "dataEndTime" - bypass_reason: "This field is used as a cursor field and depends on today's date, so it changes every day" - GET_MERCHANT_LISTINGS_DATA_BACK_COMPAT: - - name: "dataEndTime" - bypass_reason: "This field is used as a cursor field and depends on today's date, so it changes every day" - GET_XML_BROWSE_TREE_DATA: - - name: "dataEndTime" - bypass_reason: "This field is used as a cursor field and depends on today's date, so it changes every day" - ListFinancialEvents: - - name: "PostedBefore" - bypass_reason: "This field is used as a cursor field and depends on today's date, so it changes every day" expect_records: path: "integration_tests/expected_records.jsonl" - extra_fields: no exact_order: no - extra_records: yes empty_streams: - name: GET_FLAT_FILE_ACTIONABLE_ORDER_DATA_SHIPPING bypass_reason: "Data cannot be seeded in the test account, integration tests added for the stream instead" diff --git a/airbyte-integrations/connectors/source-amazon-sqs/acceptance-test-config.yml b/airbyte-integrations/connectors/source-amazon-sqs/acceptance-test-config.yml index 5123308bb931..5a42061de78e 100644 --- a/airbyte-integrations/connectors/source-amazon-sqs/acceptance-test-config.yml +++ b/airbyte-integrations/connectors/source-amazon-sqs/acceptance-test-config.yml @@ -17,9 +17,7 @@ tests: empty_streams: [] # expect_records: # path: "integration_tests/expected_records.jsonl" - # extra_fields: no # exact_order: no - # extra_records: yes full_refresh: - config_path: "secrets/config.json" configured_catalog_path: "integration_tests/configured_catalog.json" diff --git a/airbyte-integrations/connectors/source-amplitude/acceptance-test-config.yml b/airbyte-integrations/connectors/source-amplitude/acceptance-test-config.yml index 79eb3c22d680..2af9f234c98c 100644 --- a/airbyte-integrations/connectors/source-amplitude/acceptance-test-config.yml +++ b/airbyte-integrations/connectors/source-amplitude/acceptance-test-config.yml @@ -29,9 +29,7 @@ acceptance_tests: bypass_reason: "This stream is empty due to free subscription plan for the sandbox." expect_records: path: "integration_tests/expected_records.jsonl" - extra_fields: no exact_order: no - extra_records: yes incremental: tests: - config_path: "secrets/config.json" diff --git a/airbyte-integrations/connectors/source-appsflyer/acceptance-test-config.yml b/airbyte-integrations/connectors/source-appsflyer/acceptance-test-config.yml index 457757b481ab..a3ece8e84930 100644 --- a/airbyte-integrations/connectors/source-appsflyer/acceptance-test-config.yml +++ b/airbyte-integrations/connectors/source-appsflyer/acceptance-test-config.yml @@ -18,9 +18,7 @@ tests: # TODO uncomment this block to specify that the tests should assert the connector outputs the records provided in the input file a file # expect_records: # path: "integration_tests/expected_records.jsonl" - # extra_fields: no # exact_order: no - # extra_records: yes incremental: # TODO if your connector does not implement incremental sync, remove this block - config_path: "secrets/config.json" configured_catalog_path: "integration_tests/configured_catalog.json" diff --git a/airbyte-integrations/connectors/source-auth0/acceptance-test-config.yml b/airbyte-integrations/connectors/source-auth0/acceptance-test-config.yml index 938d14f308bd..d7be0a4f65ea 100644 --- a/airbyte-integrations/connectors/source-auth0/acceptance-test-config.yml +++ b/airbyte-integrations/connectors/source-auth0/acceptance-test-config.yml @@ -19,9 +19,7 @@ acceptance_tests: empty_streams: [] expect_records: path: "integration_tests/expected_records.jsonl" - extra_fields: no exact_order: no - extra_records: yes incremental: # bypass_reason: "Connection check getting lost" tests: diff --git a/airbyte-integrations/connectors/source-bing-ads/acceptance-test-config.yml b/airbyte-integrations/connectors/source-bing-ads/acceptance-test-config.yml index c1b5e2cc05f7..7728fca85880 100644 --- a/airbyte-integrations/connectors/source-bing-ads/acceptance-test-config.yml +++ b/airbyte-integrations/connectors/source-bing-ads/acceptance-test-config.yml @@ -24,7 +24,6 @@ acceptance_tests: - config_path: secrets/config.json expect_records: path: "integration_tests/expected_records.jsonl" - extra_records: yes empty_streams: - name: account_performance_report_hourly bypass_reason: "Hourly reports are disabled, because sync is too long" @@ -90,531 +89,10 @@ acceptance_tests: bypass_reason: "This stream is tested without start date" - name: campaign_labels bypass_reason: "This stream is tested without start date" - ignored_fields: - campaigns: - - name: Status - bypass_reason: "Status can be changed" - ads: - - name: Descriptions/AssetLink/*/AssetPerformanceLabel - bypass_reason: "This field indicates the asset's performance and is dynamically updated by the API." - - name: Headlines/AssetLink/*/AssetPerformanceLabel - bypass_reason: "This field indicates the asset's performance and is dynamically updated by the API." - account_impression_performance_report_weekly: - - name: Ctr - bypass_reason: "dynamic field" - - name: Impressions - bypass_reason: "dynamic field" - - name: AverageCpm - bypass_reason: "dynamic field" - - name: LowQualityImpressionsPercent - bypass_reason: "dynamic field" - - name: Clicks - bypass_reason: "dynamic field" - - name: AverageCpc - bypass_reason: "dynamic field" - - name: Spend - bypass_reason: "dynamic field" - - name: ConversionRate - bypass_reason: "dynamic field" - - name: LowQualityClicksPercent - bypass_reason: "dynamic field" - - name: LowQualityImpressions - bypass_reason: "dynamic field" - - name: ReturnOnAdSpend - bypass_reason: "dynamic field" - - name: AllConversionRate - bypass_reason: "dynamic field" - - name: AllReturnOnAdSpend - bypass_reason: "dynamic field" - age_gender_audience_report_daily: - - name: Impressions - bypass_reason: "dynamic field" - age_gender_audience_report_weekly: - - name: Impressions - bypass_reason: "dynamic field" - keyword_performance_report_weekly: - - name: Mainline1Bid - bypass_reason: "dynamic field" - - name: MainlineBid - bypass_reason: "dynamic field" - - name: FirstPageBid - bypass_reason: "dynamic field" - - name: FirstPageBid - bypass_reason: "dynamic field" - - name: QualityScore - bypass_reason: "dynamic field" - - name: AdRelevance - bypass_reason: "dynamic field" - campaign_impression_performance_report_weekly: - - name: Impressions - bypass_reason: "dynamic field" - - name: LowQualityImpressions - bypass_reason: "dynamic field" - - name: LowQualityImpressionsPercent - bypass_reason: "dynamic field" - - name: Clicks - bypass_reason: "dynamic field" - - name: Ctr - bypass_reason: "dynamic field" - - name: AverageCpc - bypass_reason: "dynamic field" - - name: Spend - bypass_reason: "dynamic field" - - name: LowQualityClicksPercent - bypass_reason: "dynamic field" - - name: ReturnOnAdSpend - bypass_reason: "dynamic field" - - name: AllReturnOnAdSpend - bypass_reason: "dynamic field" - - name: AverageCpm - bypass_reason: "dynamic field" - - name: LandingPageExperience - bypass_reason: "dynamic field" - - name: CampaignStatus - bypass_reason: "dynamic field" - - name: ReturnOnAdSpend - bypass_reason: "dynamic field" - - name: AllReturnOnAdSpend - bypass_reason: "dynamic field" - - name: QualityScore - bypass_reason: "dynamic field" - - name: AdRelevance - bypass_reason: "dynamic field" - campaign_performance_report_weekly: - - name: Impressions - bypass_reason: "dynamic field" - - name: DeviceType - bypass_reason: "dynamic field" - - name: DeviceOS - bypass_reason: "dynamic field" - - name: LowQualityImpressions - bypass_reason: "dynamic field" - - name: Clicks - bypass_reason: "dynamic field" - - name: Ctr - bypass_reason: "dynamic field" - - name: Spend - bypass_reason: "dynamic field" - - name: LandingPageExperience - bypass_reason: "dynamic field" - - name: ReturnOnAdSpend - bypass_reason: "dynamic field" - - name: AllReturnOnAdSpend - bypass_reason: "dynamic field" - - name: AverageCpc - bypass_reason: "dynamic field" - - name: AverageCpm - bypass_reason: "dynamic field" - - name: LowQualityClicksPercent - bypass_reason: "dynamic field" - - name: CampaignStatus - bypass_reason: "dynamic field" - - name: QualityScore - bypass_reason: "dynamic field" - - name: AdRelevance - bypass_reason: "dynamic field" - ad_group_impression_performance_report_weekly: - - name: Impressions - bypass_reason: "dynamic field" - - name: DeviceType - bypass_reason: "dynamic field" - - name: HistoricalQualityScore - bypass_reason: "dynamic field" - - name: HistoricalExpectedCtr - bypass_reason: "dynamic field" - - name: HistoricalAdRelevance - bypass_reason: "dynamic field" - - name: HistoricalLandingPageExperience - bypass_reason: "dynamic field" - - name: Clicks - bypass_reason: "dynamic field" - - name: Ctr - bypass_reason: "dynamic field" - - name: AverageCpc - bypass_reason: "dynamic field" - - name: Spend - bypass_reason: "dynamic field" - - name: ConversionRate - bypass_reason: "dynamic field" - - name: ReturnOnAdSpend - bypass_reason: "dynamic field" - - name: AllConversionRate - bypass_reason: "dynamic field" - - name: AllReturnOnAdSpend - bypass_reason: "dynamic field" - - name: AverageCpm - bypass_reason: "dynamic field" - - name: LandingPageExperience - bypass_reason: "dynamic field" - - name: AdRelevance - bypass_reason: "dynamic field" - - name: CampaignStatus - bypass_reason: "dynamic field" - - name: QualityScore - bypass_reason: "dynamic field" - - name: AbsoluteTopImpressionRatePercent - bypass_reason: "dynamic field" - - name: TopImpressionRatePercent - bypass_reason: "dynamic field" - ad_group_performance_report_weekly: - - name: Impressions - bypass_reason: "dynamic field" - - name: Ctr - bypass_reason: "dynamic field" - - name: Clicks - bypass_reason: "dynamic field" - - name: AverageCpm - bypass_reason: "dynamic field" - - name: Spend - bypass_reason: "dynamic field" - - name: AllReturnOnAdSpend - bypass_reason: "dynamic field" - - name: AllConversionRate - bypass_reason: "dynamic field" - - name: AverageCpc - bypass_reason: "dynamic field" - - name: ConversionRate - bypass_reason: "dynamic field" - - name: LandingPageExperience - bypass_reason: "dynamic field" - - name: QualityScore - bypass_reason: "dynamic field" - - name: AdRelevance - bypass_reason: "dynamic field" - ad_performance_report_daily: - - name: TimePeriod - bypass_reason: "dynamic field" - - name: AbsoluteTopImpressionRatePercent - bypass_reason: "dynamic field" - - name: AdDistribution - bypass_reason: "dynamic field" - - name: DeviceType - bypass_reason: "dynamic field" - - name: Language - bypass_reason: "dynamic field" - - name: Network - bypass_reason: "dynamic field" - - name: DeviceOS - bypass_reason: "dynamic field" - - name: TopVsOther - bypass_reason: "dynamic field" - - name: Impressions - bypass_reason: "dynamic field" - - name: Ctr - bypass_reason: "dynamic field" - - name: Spend - bypass_reason: "dynamic field" - - name: AverageCpc - bypass_reason: "dynamic field" - - name: AverageCpm - bypass_reason: "dynamic field" - ad_performance_report_weekly: - - name: TimePeriod - bypass_reason: "dynamic field" - - name: AbsoluteTopImpressionRatePercent - bypass_reason: "dynamic field" - - name: AdDistribution - bypass_reason: "dynamic field" - - name: DeviceType - bypass_reason: "dynamic field" - - name: Language - bypass_reason: "dynamic field" - - name: Network - bypass_reason: "dynamic field" - - name: DeviceOS - bypass_reason: "dynamic field" - - name: TopVsOther - bypass_reason: "dynamic field" - - name: Impressions - bypass_reason: "dynamic field" - - name: Clicks - bypass_reason: "dynamic field" - - name: Ctr - bypass_reason: "dynamic field" - - name: Spent - bypass_reason: "dynamic field" - - name: ReturnOnAdSpend - bypass_reason: "dynamic field" - - name: AllReturnOnAdSpend - bypass_reason: "dynamic field" - - name: ConversionRate - bypass_reason: "dynamic field" - - name: AverageCpc - bypass_reason: "dynamic field" - - name: AverageCpm - bypass_reason: "dynamic field" - - name: AllConversions - bypass_reason: "dynamic field" - - name: AllConversionRate - bypass_reason: "dynamic field" - - name: AllRevenue - bypass_reason: "dynamic field" - ad_group_performance_report_daily: - - name: Impressions - bypass_reason: "dynamic field" - - name: Ctr - bypass_reason: "dynamic field" - - name: AverageCpm - bypass_reason: "dynamic field" - - name: HistoricalQualityScore - bypass_reason: "dynamic field" - - name: HistoricalExpectedCtr - bypass_reason: "dynamic field" - - name: HistoricalAdRelevance - bypass_reason: "dynamic field" - - name: HistoricalLandingPageExperience - bypass_reason: "dynamic field" - - name: LandingPageExperience - bypass_reason: "dynamic field" - - name: QualityScore - bypass_reason: "dynamic field" - - name: AdRelevance - bypass_reason: "dynamic field" - budget_summary_report: - - name: Date - bypass_reason: "dynamic field" - - name: MonthlyBudget - bypass_reason: "dynamic field" - - name: DailySpend - bypass_reason: "dynamic field" - - name: MonthToDateSpend - bypass_reason: "dynamic field" - campaign_impression_performance_report_daily: - - name: AdDistribution - bypass_reason: "dynamic field" - - name: LowQualityImpressions - bypass_reason: "dynamic field" - - name: LowQualityImpressionsPercent - bypass_reason: "dynamic field" - - name: ImpressionSharePercent - bypass_reason: "dynamic field" - - name: ImpressionLostToBudgetPercent - bypass_reason: "dynamic field" - - name: ImpressionLostToRankAggPercent - bypass_reason: "dynamic field" - - name: HistoricalQualityScore - bypass_reason: "dynamic field" - - name: HistoricalExpectedCtr - bypass_reason: "dynamic field" - - name: HistoricalAdRelevance - bypass_reason: "dynamic field" - - name: HistoricalLandingPageExperience - bypass_reason: "dynamic field" - - name: ExactMatchImpressionSharePercent - bypass_reason: "dynamic field" - - name: AbsoluteTopImpressionSharePercent - bypass_reason: "dynamic field" - - name: TopImpressionShareLostToRankPercent - bypass_reason: "dynamic field" - - name: TopImpressionShareLostToBudgetPercent - bypass_reason: "dynamic field" - - name: AbsoluteTopImpressionShareLostToRankPercent - bypass_reason: "dynamic field" - - name: AbsoluteTopImpressionShareLostToBudgetPercent - bypass_reason: "dynamic field" - - name: TopImpressionSharePercent - bypass_reason: "dynamic field" - - name: AbsoluteTopImpressionRatePercent - bypass_reason: "dynamic field" - - name: TopImpressionRatePercent - bypass_reason: "dynamic field" - - name: Ctr - bypass_reason: "dynamic field" - - name: Impressions - bypass_reason: "dynamic field" - - name: AverageCpm - bypass_reason: "dynamic field" - - name: Clicks - bypass_reason: "dynamic field" - - name: AverageCpc - bypass_reason: "dynamic field" - - name: Spend - bypass_reason: "dynamic field" - - name: LowQualityClicksPercent - bypass_reason: "dynamic field" - - name: LowQualityClicks - bypass_reason: "dynamic field" - - name: LandingPageExperience - bypass_reason: "dynamic field" - - name: LowQualityGeneralClicks - bypass_reason: "dynamic field" - - name: CampaignStatus - bypass_reason: "dynamic field" - - name: ReturnOnAdSpend - bypass_reason: "dynamic field" - - name: AllReturnOnAdSpend - bypass_reason: "dynamic field" - - name: QualityScore - bypass_reason: "dynamic field" - - name: AdRelevance - bypass_reason: "dynamic field" - account_performance_report_daily: - - name: Ctr - bypass_reason: "dynamic field" - - name: Impressions - bypass_reason: "dynamic field" - - name: AverageCpm - bypass_reason: "dynamic field" - account_performance_report_weekly: - - name: Clicks - bypass_reason: "dynamic field" - - name: Spend - bypass_reason: "dynamic field" - - name: ReturnOnAdSpend - bypass_reason: "dynamic field" - - name: AverageCpc - bypass_reason: "dynamic field" - - name: ConversionRate - bypass_reason: "dynamic field" - - name: LowQualityClicksPercent - bypass_reason: "dynamic field" - - name: AverageCpm - bypass_reason: "dynamic field" - - name: Impressions - bypass_reason: "dynamic field" - - name: Ctr - bypass_reason: "dynamic field" - account_impression_performance_report_daily: - - name: Ctr - bypass_reason: "dynamic field" - - name: Impressions - bypass_reason: "dynamic field" - - name: AverageCpm - bypass_reason: "dynamic field" - - name: LowQualityImpressionsPercent - bypass_reason: "dynamic field" - - name: ExactMatchImpressionSharePercent - bypass_reason: "dynamic field" - - name: AbsoluteTopImpressionSharePercent - bypass_reason: "dynamic field" - - name: TopImpressionShareLostToRankPercent - bypass_reason: "dynamic field" - - name: TopImpressionShareLostToBudgetPercent - bypass_reason: "dynamic field" - - name: AbsoluteTopImpressionShareLostToRankPercent - bypass_reason: "dynamic field" - - name: AbsoluteTopImpressionShareLostToBudgetPercent - bypass_reason: "dynamic field" - - name: TopImpressionSharePercent - bypass_reason: "dynamic field" - - name: AbsoluteTopImpressionRatePercent - bypass_reason: "dynamic field" - - name: ImpressionSharePercent - bypass_reason: "dynamic field" - - name: ImpressionLostToBudgetPercent - bypass_reason: "dynamic field" - - name: ImpressionLostToRankAggPercent - bypass_reason: "dynamic field" - ad_group_impression_performance_report_daily: - - name: Ctr - bypass_reason: "dynamic field" - - name: Impressions - bypass_reason: "dynamic field" - - name: AverageCpm - bypass_reason: "dynamic field" - - name: HistoricalQualityScore - bypass_reason: "dynamic field" - - name: HistoricalExpectedCtr - bypass_reason: "dynamic field" - - name: HistoricalAdRelevance - bypass_reason: "dynamic field" - - name: HistoricalLandingPageExperience - bypass_reason: "dynamic field" - - name: LandingPageExperience - bypass_reason: "dynamic field" - - name: ImpressionSharePercent - bypass_reason: "dynamic field" - - name: ImpressionLostToBudgetPercent - bypass_reason: "dynamic field" - - name: ImpressionLostToRankAggPercent - bypass_reason: "dynamic field" - - name: ExactMatchImpressionSharePercent - bypass_reason: "dynamic field" - - name: AdRelevance - bypass_reason: "dynamic field" - - name: CampaignStatus - bypass_reason: "dynamic field" - - name: QualityScore - bypass_reason: "dynamic field" - - name: AbsoluteTopImpressionRatePercent - bypass_reason: "dynamic field" - - name: TopImpressionRatePercent - bypass_reason: "dynamic field" - campaign_performance_report_daily: - - name: Ctr - bypass_reason: "dynamic field" - - name: Impressions - bypass_reason: "dynamic field" - - name: AverageCpm - bypass_reason: "dynamic field" - - name: HistoricalQualityScore - bypass_reason: "dynamic field" - - name: HistoricalExpectedCtr - bypass_reason: "dynamic field" - - name: HistoricalAdRelevance - bypass_reason: "dynamic field" - - name: HistoricalLandingPageExperience - bypass_reason: "dynamic field" - - name: LandingPageExperience - bypass_reason: "dynamic field" - - name: CampaignStatus - bypass_reason: "dynamic field" - - name: QualityScore - bypass_reason: "dynamic field" - - name: AdRelevance - bypass_reason: "dynamic field" - keyword_performance_report_daily: - - name: Language - bypass_reason: "dynamic field" - - name: Network - bypass_reason: "dynamic field" - - name: DeviceOS - bypass_reason: "dynamic field" - - name: TopVsOther - bypass_reason: "dynamic field" - - name: Impressions - bypass_reason: "dynamic field" - - name: Clicks - bypass_reason: "dynamic field" - - name: Ctr - bypass_reason: "dynamic field" - - name: Spend - bypass_reason: "dynamic field" - - name: ReturnOnAdSpend - bypass_reason: "dynamic field" - - name: AllReturnOnAdSpend - bypass_reason: "dynamic field" - - name: ConversionRate - bypass_reason: "dynamic field" - - name: AverageCpc - bypass_reason: "dynamic field" - - name: AverageCpm - bypass_reason: "dynamic field" - - name: AllConversionRate - bypass_reason: "dynamic field" - - name: Mainline1Bid - bypass_reason: "dynamic field" - - name: MainlineBid - bypass_reason: "dynamic field" - - name: HistoricalExpectedCtr - bypass_reason: "dynamic field" - - name: HistoricalAdRelevance - bypass_reason: "dynamic field" - - name: HistoricalLandingPageExperience - bypass_reason: "dynamic field" - - name: HistoricalQualityScore - bypass_reason: "dynamic field" - - name: FirstPageBid - bypass_reason: "dynamic field" - - name: QualityScore - bypass_reason: "dynamic field" - - name: AdRelevance - bypass_reason: "dynamic field" timeout_seconds: 9000 - config_path: secrets/config_no_date.json expect_records: path: "integration_tests/expected_records_no_start_date.jsonl" - extra_records: yes empty_streams: - name: app_install_ads bypass_reason: "Can not populate; new campaign with link to app needed; feature is not available yet" @@ -728,19 +206,6 @@ acceptance_tests: bypass_reason: "This stream is tested with config with start date" - name: account_impression_performance_report_weekly bypass_reason: "This stream is tested with config with start date" - ignored_fields: - campaign_labels: - - name: Modified Time - bypass_reason: "dynamic field" - keyword_labels: - - name: Modified Time - bypass_reason: "dynamic field" - ad_group_labels: - - name: Modified Time - bypass_reason: "dynamic field" - labels: - - name: Modified Time - bypass_reason: "dynamic field" timeout_seconds: 9000 full_refresh: tests: diff --git a/airbyte-integrations/connectors/source-braintree/acceptance-test-config.yml b/airbyte-integrations/connectors/source-braintree/acceptance-test-config.yml index 92fb919762f9..27dd6b7697ef 100644 --- a/airbyte-integrations/connectors/source-braintree/acceptance-test-config.yml +++ b/airbyte-integrations/connectors/source-braintree/acceptance-test-config.yml @@ -20,9 +20,7 @@ acceptance_tests: bypass_reason: "No subscription in the test account" expect_records: path: integration_tests/expected_records.jsonl - extra_fields: no exact_order: no - extra_records: yes incremental: tests: - config_path: secrets/config.json diff --git a/airbyte-integrations/connectors/source-chargebee/acceptance-test-config.yml b/airbyte-integrations/connectors/source-chargebee/acceptance-test-config.yml index e7ce8a221fb0..333354b0dc91 100644 --- a/airbyte-integrations/connectors/source-chargebee/acceptance-test-config.yml +++ b/airbyte-integrations/connectors/source-chargebee/acceptance-test-config.yml @@ -40,9 +40,7 @@ acceptance_tests: bypass_reason: "To be Tested with integration tests." expect_records: path: "integration_tests/expected_records.jsonl" - extra_fields: no exact_order: no - extra_records: yes fail_on_extra_columns: true incremental: tests: diff --git a/airbyte-integrations/connectors/source-chargify/acceptance-test-config.yml b/airbyte-integrations/connectors/source-chargify/acceptance-test-config.yml index d25e058e4a93..7a88cdc009d9 100644 --- a/airbyte-integrations/connectors/source-chargify/acceptance-test-config.yml +++ b/airbyte-integrations/connectors/source-chargify/acceptance-test-config.yml @@ -26,9 +26,7 @@ acceptance_tests: # TODO uncomment this block to specify that the tests should assert the connector outputs the records provided in the input file a file # expect_records: # path: "integration_tests/expected_records.jsonl" - # extra_fields: no # exact_order: no - # extra_records: yes incremental: bypass_reason: "This connector does not implement incremental sync" # TODO uncomment this block this block if your connector implements incremental sync: diff --git a/airbyte-integrations/connectors/source-chartmogul/acceptance-test-config.yml b/airbyte-integrations/connectors/source-chartmogul/acceptance-test-config.yml index aff79bf554f2..e395d1033ed4 100644 --- a/airbyte-integrations/connectors/source-chartmogul/acceptance-test-config.yml +++ b/airbyte-integrations/connectors/source-chartmogul/acceptance-test-config.yml @@ -20,9 +20,7 @@ acceptance_tests: configured_catalog_path: "integration_tests/configured_catalog.json" expect_records: path: "integration_tests/expected_records.jsonl" - extra_fields: no exact_order: no - extra_records: yes fail_on_extra_columns: false full_refresh: tests: diff --git a/airbyte-integrations/connectors/source-clockify/acceptance-test-config.yml b/airbyte-integrations/connectors/source-clockify/acceptance-test-config.yml index 326def29cf01..3f59b2d1a563 100644 --- a/airbyte-integrations/connectors/source-clockify/acceptance-test-config.yml +++ b/airbyte-integrations/connectors/source-clockify/acceptance-test-config.yml @@ -23,9 +23,7 @@ acceptance_tests: # TODO uncomment this block to specify that the tests should assert the connector outputs the records provided in the input file a file # expect_records: # path: "integration_tests/expected_records.jsonl" - # extra_fields: no # exact_order: no - # extra_records: yes incremental: bypass_reason: "This connector does not implement incremental sync" # TODO uncomment this block this block if your connector implements incremental sync: diff --git a/airbyte-integrations/connectors/source-coinmarketcap/acceptance-test-config.yml b/airbyte-integrations/connectors/source-coinmarketcap/acceptance-test-config.yml index 29bc9e9e1a53..9e22ab330d23 100644 --- a/airbyte-integrations/connectors/source-coinmarketcap/acceptance-test-config.yml +++ b/airbyte-integrations/connectors/source-coinmarketcap/acceptance-test-config.yml @@ -23,9 +23,7 @@ acceptance_tests: # TODO uncomment this block to specify that the tests should assert the connector outputs the records provided in the input file a file # expect_records: # path: "integration_tests/expected_records.jsonl" - # extra_fields: no # exact_order: no - # extra_records: yes incremental: bypass_reason: "This connector does not implement incremental sync" # TODO uncomment this block this block if your connector implements incremental sync: diff --git a/airbyte-integrations/connectors/source-confluence/acceptance-test-config.yml b/airbyte-integrations/connectors/source-confluence/acceptance-test-config.yml index 6ff3ec48adc8..a43c6f34c304 100644 --- a/airbyte-integrations/connectors/source-confluence/acceptance-test-config.yml +++ b/airbyte-integrations/connectors/source-confluence/acceptance-test-config.yml @@ -21,10 +21,6 @@ acceptance_tests: expect_records: path: "integration_tests/expected_records.jsonl" fail_on_extra_columns: false - ignored_fields: - pages: - - name: body/view/value - bypass_reason: "Different class order" full_refresh: tests: - config_path: "secrets/config.json" diff --git a/airbyte-integrations/connectors/source-convertkit/acceptance-test-config.yml b/airbyte-integrations/connectors/source-convertkit/acceptance-test-config.yml index 5184a39b38df..2ab278d07640 100644 --- a/airbyte-integrations/connectors/source-convertkit/acceptance-test-config.yml +++ b/airbyte-integrations/connectors/source-convertkit/acceptance-test-config.yml @@ -18,9 +18,7 @@ tests: # TODO uncomment this block to specify that the tests should assert the connector outputs the records provided in the input file a file # expect_records: # path: "integration_tests/expected_records.jsonl" - # extra_fields: no # exact_order: no - # extra_records: yes # incremental: # TODO if your connector does not implement incremental sync, remove this block # - config_path: "secrets/config.json" # configured_catalog_path: "integration_tests/configured_catalog.json" diff --git a/airbyte-integrations/connectors/source-copper/acceptance-test-config.yml b/airbyte-integrations/connectors/source-copper/acceptance-test-config.yml index 42544e846a39..04988f5c8edd 100644 --- a/airbyte-integrations/connectors/source-copper/acceptance-test-config.yml +++ b/airbyte-integrations/connectors/source-copper/acceptance-test-config.yml @@ -22,9 +22,7 @@ acceptance_tests: empty_streams: [] # expect_records: # path: "integration_tests/expected_records.jsonl" - # extra_fields: no # exact_order: no - # extra_records: yes incremental: bypass_reason: "This connector does not implement incremental sync" # tests: diff --git a/airbyte-integrations/connectors/source-customer-io/acceptance-test-config.yml b/airbyte-integrations/connectors/source-customer-io/acceptance-test-config.yml index 6d56394853a4..c8f64fd70aaa 100644 --- a/airbyte-integrations/connectors/source-customer-io/acceptance-test-config.yml +++ b/airbyte-integrations/connectors/source-customer-io/acceptance-test-config.yml @@ -19,9 +19,7 @@ acceptance_tests: empty_streams: [] expect_records: path: "integration_tests/expected_records.jsonl" - extra_fields: no exact_order: no - extra_records: yes # incremental: # # bypass_reason: "This connector does not implement incremental sync" # tests: diff --git a/airbyte-integrations/connectors/source-datadog/acceptance-test-config.yml b/airbyte-integrations/connectors/source-datadog/acceptance-test-config.yml index 191fa7e68daa..e59744f07a5e 100644 --- a/airbyte-integrations/connectors/source-datadog/acceptance-test-config.yml +++ b/airbyte-integrations/connectors/source-datadog/acceptance-test-config.yml @@ -28,9 +28,7 @@ acceptance_tests: # TODO uncomment this block to specify that the tests should assert the connector outputs the records provided in the input file a file # expect_records: # path: "integration_tests/expected_records.jsonl" - # extra_fields: no # exact_order: no - # extra_records: yes incremental: bypass_reason: "This connector does not implement incremental sync" # TODO uncomment this block this block if your connector implements incremental sync: diff --git a/airbyte-integrations/connectors/source-drift/acceptance-test-config.yml b/airbyte-integrations/connectors/source-drift/acceptance-test-config.yml index 08662b2c94b3..a9896eee99cd 100644 --- a/airbyte-integrations/connectors/source-drift/acceptance-test-config.yml +++ b/airbyte-integrations/connectors/source-drift/acceptance-test-config.yml @@ -25,9 +25,7 @@ acceptance_tests: bypass_reason: "Sandbox account can't seed this stream" # expect_records: # path: "integration_tests/expected_records.jsonl" - # extra_fields: no # exact_order: no - # extra_records: yes incremental: bypass_reason: "This connector does not implement incremental sync" # TODO uncomment this block this block if your connector implements incremental sync: diff --git a/airbyte-integrations/connectors/source-dv-360/acceptance-test-config.yml b/airbyte-integrations/connectors/source-dv-360/acceptance-test-config.yml index ab23426c22ea..d92e6a06e982 100644 --- a/airbyte-integrations/connectors/source-dv-360/acceptance-test-config.yml +++ b/airbyte-integrations/connectors/source-dv-360/acceptance-test-config.yml @@ -18,9 +18,7 @@ tests: # TODO uncomment this block to specify that the tests should assert the connector outputs the records provided in the input file a file # expect_records: # path: "integration_tests/expected_records.jsonl" - # extra_fields: no # exact_order: no - # extra_records: yes incremental: # TODO if your connector does not implement incremental sync, remove this block - config_path: "secrets/config.json" configured_catalog_path: "integration_tests/configured_catalog.json" diff --git a/airbyte-integrations/connectors/source-facebook-marketing/acceptance-test-config.yml b/airbyte-integrations/connectors/source-facebook-marketing/acceptance-test-config.yml index b4ec84009b78..9417cf6a8600 100644 --- a/airbyte-integrations/connectors/source-facebook-marketing/acceptance-test-config.yml +++ b/airbyte-integrations/connectors/source-facebook-marketing/acceptance-test-config.yml @@ -26,61 +26,6 @@ acceptance_tests: basic_read: tests: - config_path: "secrets/config.json" - ignored_fields: - ad_account: - - name: age - bypass_reason: is changeable - - name: amount_spent - bypass_reason: is changeable - - name: capabilities - bypass_reason: is changeable - ad_creatives: - - name: thumbnail_url - bypass_reason: is changeable - - name: image_url - bypass_reason: is changeable - images: - - name: permalink_url - bypass_reason: is changeable - - name: url - bypass_reason: is changeable - - name: url_128 - bypass_reason: is changeable - ads_insights_demographics_dma_region: - - name: cost_per_estimated_ad_recallers - bypass_reason: can be missing - ads_insights_dma: - - name: cost_per_estimated_ad_recallers - bypass_reason: can be missing - ads_insights_age_and_gender: - - name: cost_per_estimated_ad_recallers - bypass_reason: can be missing - ads_insights_delivery_device: - - name: cost_per_estimated_ad_recallers - bypass_reason: can be missing - ads_insights_delivery_platform_and_device_platform: - - name: cost_per_estimated_ad_recallers - bypass_reason: can be missing - ads_insights_demographics_age: - - name: cost_per_estimated_ad_recallers - bypass_reason: can be missing - ads_insights_demographics_country: - - name: cost_per_estimated_ad_recallers - bypass_reason: can be missing - ads_insights_demographics_gender: - - name: cost_per_estimated_ad_recallers - bypass_reason: can be missing - ads_insights_platform_and_device: - - name: cost_per_estimated_ad_recallers - bypass_reason: can be missing - ads_insights_region: - - name: cost_per_estimated_ad_recallers - bypass_reason: can be missing - custom_audiences: - - name: approximate_count_lower_bound - bypass_reason: is changeable - - name: approximate_count_upper_bound - bypass_reason: is changeable empty_streams: - name: "ads_insights_action_product_id" bypass_reason: "Data cannot be seeded in the test account, integration tests added for the stream instead" @@ -89,7 +34,6 @@ acceptance_tests: timeout_seconds: 4800 expect_records: path: "integration_tests/expected_records.jsonl" - extra_records: yes incremental: tests: - config_path: "secrets/config.json" diff --git a/airbyte-integrations/connectors/source-faker/acceptance-test-config.yml b/airbyte-integrations/connectors/source-faker/acceptance-test-config.yml index 9c72cea9eb5f..72be6dd38c96 100644 --- a/airbyte-integrations/connectors/source-faker/acceptance-test-config.yml +++ b/airbyte-integrations/connectors/source-faker/acceptance-test-config.yml @@ -20,22 +20,6 @@ acceptance_tests: - config_path: secrets/config.json expect_records: path: integration_tests/expected_records.jsonl - ignored_fields: - users: - - name: updated_at - bypass_reason: "dynamic field" - - name: created_at - bypass_reason: "dynamic field" - products: - - name: updated_at - bypass_reason: "dynamic field" - - name: created_at - bypass_reason: "dynamic field" - purchases: - - name: updated_at - bypass_reason: "dynamic field" - - name: created_at - bypass_reason: "dynamic field" full_refresh: tests: - config_path: secrets/config.json diff --git a/airbyte-integrations/connectors/source-fauna/acceptance-test-config.yml b/airbyte-integrations/connectors/source-fauna/acceptance-test-config.yml index 3dfadb9d8aea..f82be0d40c40 100644 --- a/airbyte-integrations/connectors/source-fauna/acceptance-test-config.yml +++ b/airbyte-integrations/connectors/source-fauna/acceptance-test-config.yml @@ -20,17 +20,13 @@ tests: empty_streams: [] expect_records: path: "integration_tests/expected_records.jsonl" - extra_fields: no exact_order: yes - extra_records: no - config_path: "secrets/config-deletions.json" configured_catalog_path: "integration_tests/configured_catalog_incremental.json" empty_streams: [] expect_records: path: "integration_tests/expected_deletions_records.txt" - extra_fields: no exact_order: yes - extra_records: no incremental: - config_path: "secrets/config.json" configured_catalog_path: "integration_tests/configured_catalog.json" diff --git a/airbyte-integrations/connectors/source-file/acceptance-test-config.yml b/airbyte-integrations/connectors/source-file/acceptance-test-config.yml index dde3f1091671..733a50b5aaa0 100644 --- a/airbyte-integrations/connectors/source-file/acceptance-test-config.yml +++ b/airbyte-integrations/connectors/source-file/acceptance-test-config.yml @@ -25,9 +25,7 @@ acceptance_tests: - config_path: "integration_tests/config.json" expect_records: path: "integration_tests/expected_records.jsonl" - extra_fields: no exact_order: no - extra_records: yes file_types: skip_test: yes bypass_reason: "Source is not based on file based CDK" diff --git a/airbyte-integrations/connectors/source-firebase-realtime-database/acceptance-test-config.yml b/airbyte-integrations/connectors/source-firebase-realtime-database/acceptance-test-config.yml index 2cca19b47363..56e7cba71756 100644 --- a/airbyte-integrations/connectors/source-firebase-realtime-database/acceptance-test-config.yml +++ b/airbyte-integrations/connectors/source-firebase-realtime-database/acceptance-test-config.yml @@ -17,9 +17,7 @@ tests: empty_streams: [] expect_records: path: "integration_tests/expected_records.txt" - extra_fields: no exact_order: no - extra_records: yes full_refresh: - config_path: "secrets/config.json" configured_catalog_path: "integration_tests/configured_catalog.json" diff --git a/airbyte-integrations/connectors/source-firebolt/acceptance-test-config.yml b/airbyte-integrations/connectors/source-firebolt/acceptance-test-config.yml index ff09f0084a8a..16bb9670cdd8 100644 --- a/airbyte-integrations/connectors/source-firebolt/acceptance-test-config.yml +++ b/airbyte-integrations/connectors/source-firebolt/acceptance-test-config.yml @@ -26,9 +26,7 @@ acceptance_tests: expect_trace_message_on_failure: false expect_records: path: "integration_tests/expected_records.jsonl" - extra_fields: no exact_order: yes - extra_records: no full_refresh: tests: - config_path: "secrets/config.json" diff --git a/airbyte-integrations/connectors/source-flexport/acceptance-test-config.yml b/airbyte-integrations/connectors/source-flexport/acceptance-test-config.yml index 6333001736de..8f7488a768b7 100644 --- a/airbyte-integrations/connectors/source-flexport/acceptance-test-config.yml +++ b/airbyte-integrations/connectors/source-flexport/acceptance-test-config.yml @@ -39,9 +39,7 @@ acceptance_tests: # # TODO uncomment this block to specify that the tests should assert the connector outputs the records provided in the input file a file # # expect_records: # # path: "integration_tests/expected_records.jsonl" -# # extra_fields: no # # exact_order: no -# # extra_records: yes # incremental: # bypass_reason: "This connector does not implement incremental sync" # # TODO uncomment this block this block if your connector implements incremental sync: diff --git a/airbyte-integrations/connectors/source-freshdesk/acceptance-test-config.yml b/airbyte-integrations/connectors/source-freshdesk/acceptance-test-config.yml index 199aeae9461b..f76384cf170a 100644 --- a/airbyte-integrations/connectors/source-freshdesk/acceptance-test-config.yml +++ b/airbyte-integrations/connectors/source-freshdesk/acceptance-test-config.yml @@ -24,9 +24,7 @@ acceptance_tests: - config_path: "secrets/config.json" expect_records: path: "integration_tests/expected_records.jsonl" - extra_fields: no exact_order: no - extra_records: yes timeout_seconds: 600 empty_streams: - name: skills diff --git a/airbyte-integrations/connectors/source-freshsales/acceptance-test-config.yml b/airbyte-integrations/connectors/source-freshsales/acceptance-test-config.yml index cefc9ad1fffa..def0a764126a 100644 --- a/airbyte-integrations/connectors/source-freshsales/acceptance-test-config.yml +++ b/airbyte-integrations/connectors/source-freshsales/acceptance-test-config.yml @@ -20,17 +20,6 @@ acceptance_tests: - config_path: "secrets/config.json" expect_records: path: "integration_tests/expected_records.jsonl" - ignored_fields: - open_deals: - - name: age - bypass_reason: "Frequently changed data" - - name: rotten_days - bypass_reason: "Frequently changed data" - won_deals: - - name: age - bypass_reason: "Frequently changed data" - - name: rotten_days - bypass_reason: "Frequently changed data" full_refresh: tests: - config_path: "secrets/config.json" diff --git a/airbyte-integrations/connectors/source-gainsight-px/acceptance-test-config.yml b/airbyte-integrations/connectors/source-gainsight-px/acceptance-test-config.yml index cb5dc426f507..e8e171ba92dd 100644 --- a/airbyte-integrations/connectors/source-gainsight-px/acceptance-test-config.yml +++ b/airbyte-integrations/connectors/source-gainsight-px/acceptance-test-config.yml @@ -34,9 +34,7 @@ acceptance_tests: # TODO uncomment this block to specify that the tests should assert the connector outputs the records provided in the input file a file # expect_records: # path: "integration_tests/expected_records.jsonl" - # extra_fields: no # exact_order: no - # extra_records: yes incremental: bypass_reason: "This connector does not implement incremental sync" # TODO uncomment this block this block if your connector implements incremental sync: diff --git a/airbyte-integrations/connectors/source-getlago/acceptance-test-config.yml b/airbyte-integrations/connectors/source-getlago/acceptance-test-config.yml index cfe7c503642d..2473993b748c 100644 --- a/airbyte-integrations/connectors/source-getlago/acceptance-test-config.yml +++ b/airbyte-integrations/connectors/source-getlago/acceptance-test-config.yml @@ -26,9 +26,7 @@ acceptance_tests: # TODO uncomment this block to specify that the tests should assert the connector outputs the records provided in the input file a file # expect_records: # path: "integration_tests/expected_records.jsonl" - # extra_fields: no # exact_order: no - # extra_records: yes incremental: bypass_reason: "This connector does not implement incremental sync" # TODO uncomment this block this block if your connector implements incremental sync: diff --git a/airbyte-integrations/connectors/source-github/acceptance-test-config.yml b/airbyte-integrations/connectors/source-github/acceptance-test-config.yml index 5ee30aee5036..83d184693c0d 100644 --- a/airbyte-integrations/connectors/source-github/acceptance-test-config.yml +++ b/airbyte-integrations/connectors/source-github/acceptance-test-config.yml @@ -27,62 +27,10 @@ acceptance_tests: - config_path: "secrets/config.json" expect_records: path: "integration_tests/expected_records.jsonl" - extra_fields: no exact_order: no - extra_records: yes empty_streams: - name: "events" bypass_reason: "Only events created within the past 90 days can be showed. Stream is tested with integration tests." - ignored_fields: - contributor_activity: - - name: weeks - bypass_reason: "depend on changing data" - - name: total - bypass_reason: "depend on changing data" - workflows: - - name: created_at - bypass_reason: value may be returned in different time zones - - name: updated_at - bypass_reason: value may be returned in different time zones - workflow_jobs: - - name: steps/*/started_at - bypass_reason: "depend on changing data" - - name: steps/*/completed_at - bypass_reason: "depend on changing data" - organizations: - - name: followers - bypass_reason: "fast changing data" - - name: updated_at - bypass_reason: "fast changing data" - - name: plan - bypass_reason: "fast changing data" - - name: public_repos - bypass_reason: "fast changing data" - - name: total_private_repos - bypass_reason: "fast changing data" - - name: owned_private_repos - bypass_reason: "fast changing data" - repositories: - - name: updated_at - bypass_reason: "fast changing data" - - name: pushed_at - bypass_reason: "fast changing data" - - name: size - bypass_reason: "fast changing data" - - name: stargazers_count - bypass_reason: "fast changing data" - - name: watchers_count - bypass_reason: "fast changing data" - - name: forks_count - bypass_reason: "fast changing data" - - name: forks - bypass_reason: "fast changing data" - - name: open_issues - bypass_reason: "fast changing data" - - name: open_issues_count - bypass_reason: "fast changing data" - - name: watchers - bypass_reason: "fast changing data" incremental: tests: - config_path: "secrets/config.json" diff --git a/airbyte-integrations/connectors/source-gitlab/acceptance-test-config.yml b/airbyte-integrations/connectors/source-gitlab/acceptance-test-config.yml index 148e387d8c9f..2e4894416879 100644 --- a/airbyte-integrations/connectors/source-gitlab/acceptance-test-config.yml +++ b/airbyte-integrations/connectors/source-gitlab/acceptance-test-config.yml @@ -25,15 +25,6 @@ acceptance_tests: timeout_seconds: 3600 expect_records: path: "integration_tests/expected_records.jsonl" - ignored_fields: - jobs: - - name: "user" - bypass_reason: "User object contains local_time which will be different each time test is run" - projects: - - name: "updated_at" - bypass_reason: "value can be changed" - - name: "code_suggestions" - bypass_reason: "value can be changed" - config_path: "secrets/config_with_ids.json" timeout_seconds: 3600 empty_streams: @@ -43,28 +34,10 @@ acceptance_tests: bypass_reason: "Group in this config does not have epics issues. This stream is tested in the above TC." expect_records: path: "integration_tests/expected_records_with_ids.jsonl" - ignored_fields: - jobs: - - name: "user" - bypass_reason: "User object contains local_time which will be different each time test is run" - projects: - - name: "updated_at" - bypass_reason: "value can be changed" - - name: "code_suggestions" - bypass_reason: "value can be changed" - config_path: "secrets/config_oauth.json" timeout_seconds: 3600 expect_records: path: "integration_tests/expected_records.jsonl" - ignored_fields: - jobs: - - name: "user" - bypass_reason: "User object contains local_time which will be different each time test is run" - projects: - - name: "updated_at" - bypass_reason: "value can be changed" - - name: "code_suggestions" - bypass_reason: "value can be changed" incremental: tests: - config_path: "secrets/config_with_ids.json" diff --git a/airbyte-integrations/connectors/source-google-ads/acceptance-test-config.yml b/airbyte-integrations/connectors/source-google-ads/acceptance-test-config.yml index b7b7d3ba73a6..1d1f6b7fa8af 100644 --- a/airbyte-integrations/connectors/source-google-ads/acceptance-test-config.yml +++ b/airbyte-integrations/connectors/source-google-ads/acceptance-test-config.yml @@ -22,9 +22,7 @@ acceptance_tests: - config_path: "secrets/config.json" expect_records: path: "integration_tests/expected_records.jsonl" - extra_fields: no exact_order: yes - extra_records: yes # the file with all the records is 15 MB, so comparing only 3 records timeout_seconds: 3600 empty_streams: - name: "customer_label" @@ -35,14 +33,6 @@ acceptance_tests: bypass_reason: "No data for this date range, tested in next config" - name: "click_view" bypass_reason: "Stream has data only for last 90 days, next config is used for testing it" - ignored_fields: - customer: - - name: customer.optimization_score_weight - bypass_reason: "Value can be updated by Google Ads" - - name: customer.optimization_score - bypass_reason: "Value can be updated by Google Ads" - - name: customer.pay_per_conversion_eligibility_failure_reasons - bypass_reason: "Value can be updated by Google Ads" - config_path: "secrets/config_click_view.json" expect_records: path: "integration_tests/expected_records_click.jsonl" @@ -56,87 +46,6 @@ acceptance_tests: bypass_reason: "No data for this date range, tested in previous config" - name: "keyword_view" bypass_reason: "No data for this date range, tested in previous config" - ignored_fields: - account_performance_report: - - name: metrics.cross_device_conversions - bypass_reason: "Value can be updated by Google Ads" - - name: metrics.all_conversions - bypass_reason: "Value can be updated by Google Ads" - - name: metrics.all_conversions_from_interactions_rate - bypass_reason: "Value can be updated by Google Ads" - - name: metrics.all_conversions_value - bypass_reason: "Value can be updated by Google Ads" - - name: metrics.cost_per_all_conversions - bypass_reason: "Value can be updated by Google Ads" - ad_group: - - name: ad_group.url_custom_parameters - bypass_reason: "Value can be updated by Google Ads" - customer: - - name: customer.optimization_score_weight - bypass_reason: "Value can be updated by Google Ads" - - name: customer.optimization_score - bypass_reason: "Value can be updated by Google Ads" - - name: customer.pay_per_conversion_eligibility_failure_reasons - bypass_reason: "Value can be updated by Google Ads" - campaign_budget: - - name: campaign_budget.recommended_budget_estimated_change_weekly_interactions - bypass_reason: "Value can be updated by Google Ads" - - name: metrics.all_conversions - bypass_reason: "Value can be updated by Google Ads" - - name: metrics.all_conversions_from_interactions_rate - bypass_reason: "Value can be updated by Google Ads" - - name: metrics.all_conversions_value - bypass_reason: "Value can be updated by Google Ads" - - name: metrics.conversions - bypass_reason: "Value can be updated by Google Ads" - - name: metrics.conversions_from_interactions_rate - bypass_reason: "Value can be updated by Google Ads" - - name: metrics.conversions_value - bypass_reason: "Value can be updated by Google Ads" - - name: metrics.cost_per_all_conversions - bypass_reason: "Value can be updated by Google Ads" - - name: metrics.cost_per_conversion - bypass_reason: "Value can be updated by Google Ads" - - name: metrics.value_per_all_conversions - bypass_reason: "Value can be updated by Google Ads" - - name: metrics.value_per_conversion - bypass_reason: "Value can be updated by Google Ads" - - name: metrics.cross_device_conversions - bypass_reason: "Value can be updated by Google Ads" - campaign: - - name: campaign.optimization_score - bypass_reason: "Value can be updated by Google Ads" - ad_group_ad_legacy: - - name: metrics.all_conversions_from_interactions_rate - bypass_reason: "Value can be updated by Google Ads" - - name: metrics.all_conversions_value - bypass_reason: "Value can be updated by Google Ads" - - name: metrics.all_conversions - bypass_reason: "Value can be updated by Google Ads" - - name: metrics.conversions_from_interactions_rate - bypass_reason: "Value can be updated by Google Ads" - - name: metrics.conversions_value - bypass_reason: "Value can be updated by Google Ads" - - name: metrics.conversions - bypass_reason: "Value can be updated by Google Ads" - - name: metrics.cost_per_all_conversions - bypass_reason: "Value can be updated by Google Ads" - - name: metrics.cost_per_conversion - bypass_reason: "Value can be updated by Google Ads" - - name: metrics.cost_per_current_model_attributed_conversion - bypass_reason: "Value can be updated by Google Ads" - - name: metrics.current_model_attributed_conversions_value - bypass_reason: "Value can be updated by Google Ads" - - name: metrics.current_model_attributed_conversions - bypass_reason: "Value can be updated by Google Ads" - - name: metrics.value_per_all_conversions - bypass_reason: "Value can be updated by Google Ads" - - name: metrics.value_per_conversion - bypass_reason: "Value can be updated by Google Ads" - - name: metrics.value_per_current_model_attributed_conversion - bypass_reason: "Value can be updated by Google Ads" - - name: metrics.cross_device_conversions - bypass_reason: "Value can be updated by Google Ads" full_refresh: tests: - config_path: "secrets/config.json" diff --git a/airbyte-integrations/connectors/source-google-pagespeed-insights/acceptance-test-config.yml b/airbyte-integrations/connectors/source-google-pagespeed-insights/acceptance-test-config.yml index 60e5336104d9..9e9a7b99b2dd 100644 --- a/airbyte-integrations/connectors/source-google-pagespeed-insights/acceptance-test-config.yml +++ b/airbyte-integrations/connectors/source-google-pagespeed-insights/acceptance-test-config.yml @@ -25,9 +25,7 @@ acceptance_tests: # really works and produces multiple category request params expect_records: path: "integration_tests/expected_records.jsonl" - extra_fields: yes exact_order: yes - extra_records: no incremental: bypass_reason: "This connector does not implement incremental sync" full_refresh: diff --git a/airbyte-integrations/connectors/source-google-search-console/acceptance-test-config.yml b/airbyte-integrations/connectors/source-google-search-console/acceptance-test-config.yml index 09fb7528c86b..d661153c7768 100755 --- a/airbyte-integrations/connectors/source-google-search-console/acceptance-test-config.yml +++ b/airbyte-integrations/connectors/source-google-search-console/acceptance-test-config.yml @@ -26,9 +26,7 @@ acceptance_tests: - config_path: "secrets/config.json" expect_records: path: "integration_tests/expected_records.jsonl" - extra_fields: yes exact_order: yes - extra_records: no timeout_seconds: 3600 empty_streams: - name: search_analytics_page_report @@ -43,109 +41,6 @@ acceptance_tests: bypass_reason: "Fast changing data" - name: search_analytics_site_report_by_site bypass_reason: "Fast changing data" - ignored_fields: - sitemaps: - - name: lastDownloaded - bypass_reason: "URL changes upon each request for privacy/security" - - name: contents/0/submitted - bypass_reason: "URL changes upon each request for privacy/security" - search_analytics_by_date: - - name: clicks - bypass_reason: "Data changing frequently, because we use live account." - - name: impressions - bypass_reason: "Data changing frequently, because we use live account." - - name: ctr - bypass_reason: "Data changing frequently, because we use live account." - - name: position - bypass_reason: "Data changing frequently, because we use live account." - - name: date - bypass_reason: "Statistic shift each day." - search_analytics_by_country: - - name: clicks - bypass_reason: "Data changing frequently, because we use live account." - - name: impressions - bypass_reason: "Data changing frequently, because we use live account." - - name: ctr - bypass_reason: "Data changing frequently, because we use live account." - - name: position - bypass_reason: "Data changing frequently, because we use live account." - - name: country - bypass_reason: "Data changing frequently, because we use live account." - - name: date - bypass_reason: "Statistic shift each day." - search_analytics_by_device: - - name: clicks - bypass_reason: "Data changing frequently, because we use live account." - - name: impressions - bypass_reason: "Data changing frequently, because we use live account." - - name: ctr - bypass_reason: "Data changing frequently, because we use live account." - - name: position - bypass_reason: "Data changing frequently, because we use live account." - - name: device - bypass_reason: "Data changing frequently, because we use live account." - - name: date - bypass_reason: "Statistic shift each day." - search_analytics_by_page: - - name: clicks - bypass_reason: "Data changing frequently, because we use live account." - - name: impressions - bypass_reason: "Data changing frequently, because we use live account." - - name: ctr - bypass_reason: "Data changing frequently, because we use live account." - - name: position - bypass_reason: "Data changing frequently, because we use live account." - - name: page - bypass_reason: "Data changing frequently, because we use live account." - - name: date - bypass_reason: "Statistic shift each day." - search_analytics_by_query: - - name: clicks - bypass_reason: "Data changing frequently, because we use live account." - - name: impressions - bypass_reason: "Data changing frequently, because we use live account." - - name: ctr - bypass_reason: "Data changing frequently, because we use live account." - - name: position - bypass_reason: "Data changing frequently, because we use live account." - - name: query - bypass_reason: "Data changing frequently, because we use live account." - - name: date - bypass_reason: "Statistic shift each day." - search_analytics_all_fields: - - name: clicks - bypass_reason: "Data changing frequently, because we use live account." - - name: impressions - bypass_reason: "Data changing frequently, because we use live account." - - name: ctr - bypass_reason: "Data changing frequently, because we use live account." - - name: position - bypass_reason: "Data changing frequently, because we use live account." - - name: query - bypass_reason: "Data changing frequently, because we use live account." - - name: page - bypass_reason: "Data changing frequently, because we use live account." - - name: device - bypass_reason: "Data changing frequently, because we use live account." - - name: country - bypass_reason: "Data changing frequently, because we use live account." - - name: date - bypass_reason: "Statistic shift each day." - custom_dimensions: - - name: clicks - bypass_reason: "Data changing frequently, because we use live account." - - name: impressions - bypass_reason: "Data changing frequently, because we use live account." - - name: ctr - bypass_reason: "Data changing frequently, because we use live account." - - name: position - bypass_reason: "Data changing frequently, because we use live account." - - name: country - bypass_reason: "Data changing frequently, because we use live account." - - name: device - bypass_reason: "Data changing frequently, because we use live account." - - name: date - bypass_reason: "Statistic shift each day." full_refresh: tests: - config_path: "secrets/config.json" diff --git a/airbyte-integrations/connectors/source-greenhouse/acceptance-test-config.yml b/airbyte-integrations/connectors/source-greenhouse/acceptance-test-config.yml index 41a387794e0e..8bfb5f4eccf8 100644 --- a/airbyte-integrations/connectors/source-greenhouse/acceptance-test-config.yml +++ b/airbyte-integrations/connectors/source-greenhouse/acceptance-test-config.yml @@ -31,13 +31,6 @@ acceptance_tests: bypass_reason: "No data" - name: user_permissions bypass_reason: "No Data" - ignored_fields: - users: - - name: updated_at - bypass_reason: "Updated when you login to account" - job_posts: - - name: updated_at - bypass_reason: "Updated when you try edit without editing" full_refresh: tests: - config_path: "secrets/config.json" diff --git a/airbyte-integrations/connectors/source-harness/acceptance-test-config.yml b/airbyte-integrations/connectors/source-harness/acceptance-test-config.yml index 28456c8a61fb..b1e1a0d6e9e4 100644 --- a/airbyte-integrations/connectors/source-harness/acceptance-test-config.yml +++ b/airbyte-integrations/connectors/source-harness/acceptance-test-config.yml @@ -22,9 +22,7 @@ acceptance_tests: # TODO uncomment this block to specify that the tests should assert the connector outputs the records provided in the input file a file # expect_records: # path: "integration_tests/expected_records.jsonl" - # extra_fields: no # exact_order: no - # extra_records: yes incremental: bypass_reason: "This connector does not implement incremental sync" # TODO uncomment this block this block if your connector implements incremental sync: diff --git a/airbyte-integrations/connectors/source-hubspot/acceptance-test-config.yml b/airbyte-integrations/connectors/source-hubspot/acceptance-test-config.yml index d0146f24d744..ec50dfc82a72 100644 --- a/airbyte-integrations/connectors/source-hubspot/acceptance-test-config.yml +++ b/airbyte-integrations/connectors/source-hubspot/acceptance-test-config.yml @@ -26,7 +26,6 @@ acceptance_tests: - config_path: secrets/config_oauth.json expect_records: path: integration_tests/expected_records.jsonl - extra_records: yes timeout_seconds: 3600 empty_streams: - name: engagements_calls @@ -59,147 +58,6 @@ acceptance_tests: bypass_reason: Unable to populate - name: cars_web_analytics bypass_reason: Unable to populate - ignored_fields: - engagements_tasks: - - name: hs_num_associated_* - bypass_reason: Floating value - - name: hs_object_source_* - bypass_reason: Floating value - - name: properties_hs_num_associated_* - bypass_reason: Floating value - - name: properties_hs_object_source_* - bypass_reason: Floating value - - name: properties_hs_object_source - bypass_reason: Floating value - - name: properties_hs_time_* - bypass_reason: Hubspot time depend on current time - - name: properties/hs_time_* - bypass_reason: Hubspot time depend on current time - contact_lists: - - name: ilsFilterBranch - bypass_reason: Floating fields order - companies: - - name: properties_hs_time_* - bypass_reason: Hubspot time depend on current time - - name: properties/hs_time_* - bypass_reason: Hubspot time depend on current time - - name: properties_hs_was_imported - bypass_reason: attribute is not stable - - name: properties/hs_was_imported - bypass_reason: attribute is not stable - contacts: - - name: properties_hs_time_* - bypass_reason: Hubspot time depend on current time - - name: properties/hs_time_* - bypass_reason: Hubspot time depend on current time - - name: properties_hs_time_in_subscriber - bypass_reason: Hubspot time depend on current time - - name: properties/hs_time_in_subscriber - bypass_reason: Hubspot time depend on current time - - name: properties_hs_latest_source_timestamp - bypass_reason: Hubspot time depend on current time - - name: properties/hs_latest_source_timestamp - bypass_reason: Hubspot time depend on current time - - name: properties_hs_predictivescoringtier - bypass_reason: Hubspot prediction changes - - name: properties/hs_predictivescoringtier - bypass_reason: Hubspot prediction changes - - name: properties_lastmodifieddate - bypass_reason: Hubspot time depend on current time - - name: properties/lastmodifieddate - bypass_reason: Hubspot time depend on current time - - name: properties_hs_time_in_lead - bypass_reason: Hubspot time depend on current time - - name: properties/hs_time_in_lead - bypass_reason: Hubspot time depend on current time - - name: properties_hs_time_in_opportunity - bypass_reason: Hubspot time depend on current time - - name: properties/hs_time_in_opportunity - bypass_reason: Hubspot time depend on current time - - name: properties_hs_was_imported - bypass_reason: attribute is not stable - - name: properties/hs_was_imported - bypass_reason: attribute is not stable - - name: updatedAt - bypass_reason: Hubspot time depend on current time - - name: properties/hs_v2_cumulative_time_* - bypass_reason: Hubspot time depend on current time - - name: properties/hs_v2_latest_time_* - bypass_reason: Hubspot time depend on current time - - name: properties_hs_v2_cumulative_time_* - bypass_reason: Hubspot time depend on current time - - name: properties_hs_v2_latest_time_* - bypass_reason: Hubspot time depend on current time - deals: - - name: properties_hs_time_* - bypass_reason: Hubspot time depend on current time - - name: properties/hs_time_* - bypass_reason: Hubspot time depend on current time - - name: properties_hs_acv - bypass_reason: value can be an integer or float - - name: properties/hs_acv - bypass_reason: value can be an integer or float - - name: properties_hs_arr - bypass_reason: value can be an integer or float - - name: properties/hs_arr - bypass_reason: value can be an integer or float - - name: properties_hs_mrr - bypass_reason: value can be an integer or float - - name: properties/hs_mrr - bypass_reason: value can be an integer or float - - name: properties_hs_tcv - bypass_reason: value can be an integer or float - - name: properties/hs_tcv - bypass_reason: value can be an integer or float - - name: properties_hs_num_of_associated_line_items - bypass_reason: value can be an integer or float - - name: properties/hs_num_of_associated_line_items - bypass_reason: value can be an integer or float - deals_archived: - - name: properties_hs_time_* - bypass_reason: Hubspot time depend on current time - - name: properties/hs_time_* - bypass_reason: Hubspot time depend on current time - - name: properties_hs_acv - bypass_reason: value can be an integer or float - - name: properties/hs_acv - bypass_reason: value can be an integer or float - - name: properties_hs_arr - bypass_reason: value can be an integer or float - - name: properties/hs_arr - bypass_reason: value can be an integer or float - - name: properties_hs_mrr - bypass_reason: value can be an integer or float - - name: properties/hs_mrr - bypass_reason: value can be an integer or float - - name: properties_hs_tcv - bypass_reason: value can be an integer or float - - name: properties/hs_tcv - bypass_reason: value can be an integer or float - - name: properties_hs_num_of_associated_line_items - bypass_reason: value can be an integer or float - - name: properties/hs_num_of_associated_line_items - bypass_reason: value can be an integer or float - tickets: - - name: properties_hs_time_* - bypass_reason: Hubspot time depend on current time - - name: properties/hs_time_* - bypass_reason: Hubspot time depend on current time - goals: - - name: properties_hs_time_* - bypass_reason: Hubspot time depend on current time - - name: properties/hs_time_* - bypass_reason: Hubspot time depend on current time - - name: properties_hs_lastmodifieddate - bypass_reason: Hubspot time depend on current time - - name: properties/hs_lastmodifieddate - bypass_reason: Hubspot time depend on current time - - name: properties_hs_kpi_value_last_calculated_at - bypass_reason: Hubspot time depend on current time - - name: properties/hs_kpi_value_last_calculated_at - bypass_reason: Hubspot time depend on current time - - name: updatedAt - bypass_reason: field changes too often full_refresh: tests: - config_path: secrets/config.json diff --git a/airbyte-integrations/connectors/source-instagram/acceptance-test-config.yml b/airbyte-integrations/connectors/source-instagram/acceptance-test-config.yml index cbb6e96e83a4..43b70c2d3dac 100644 --- a/airbyte-integrations/connectors/source-instagram/acceptance-test-config.yml +++ b/airbyte-integrations/connectors/source-instagram/acceptance-test-config.yml @@ -29,36 +29,6 @@ acceptance_tests: bypass_reason: Stories available only 24 hours - name: story_insights bypass_reason: Stories available only 24 hours, so do the insights - ignored_fields: - users: - - name: media_count - bypass_reason: Updated each time when new post was added to live account - - name: followers_count - bypass_reason: Updated each time when followers updated was added to live account - - name: follows_count - bypass_reason: Updated each time when follows updated was added to live account - - name: profile_picture_url - bypass_reason: Contains auto generated hash - user_lifetime_insights: - - name: value - bypass_reason: Contains PII data - - name: date - bypass_reason: Depend on current date - user_insights: - - name: date - bypass_reason: Anonymization for exactly for which date statistics anonymization - - name: online_followers - bypass_reason: Depend on each online user - media: - - name: like_count - bypass_reason: Auto updated field - - name: media_url - bypass_reason: Contains auto generated hash - - name: thumbnail_url - bypass_reason: Contains auto generated hash - media_insights: - - name: id - bypass_reason: For statistic anonymization full_refresh: tests: - config_path: "secrets/config.json" diff --git a/airbyte-integrations/connectors/source-instatus/acceptance-test-config.yml b/airbyte-integrations/connectors/source-instatus/acceptance-test-config.yml index 2878fb1de5da..155432d61d3d 100644 --- a/airbyte-integrations/connectors/source-instatus/acceptance-test-config.yml +++ b/airbyte-integrations/connectors/source-instatus/acceptance-test-config.yml @@ -21,9 +21,7 @@ acceptance_tests: configured_catalog_path: "integration_tests/configured_catalog.json" expect_records: path: "integration_tests/expected_records.txt" - extra_fields: yes exact_order: yes - extra_records: no incremental: bypass_reason: "This connector does not implement incremental sync" full_refresh: diff --git a/airbyte-integrations/connectors/source-intruder/acceptance-test-config.yml b/airbyte-integrations/connectors/source-intruder/acceptance-test-config.yml index 874695a762c3..c418ee9ffb10 100644 --- a/airbyte-integrations/connectors/source-intruder/acceptance-test-config.yml +++ b/airbyte-integrations/connectors/source-intruder/acceptance-test-config.yml @@ -22,9 +22,7 @@ acceptance_tests: # TODO uncomment this block to specify that the tests should assert the connector outputs the records provided in the input file a file # expect_records: # path: "integration_tests/expected_records.jsonl" - # extra_fields: no # exact_order: no - # extra_records: yes incremental: bypass_reason: "This connector does not implement incremental sync" # TODO uncomment this block this block if your connector implements incremental sync: diff --git a/airbyte-integrations/connectors/source-ip2whois/acceptance-test-config.yml b/airbyte-integrations/connectors/source-ip2whois/acceptance-test-config.yml index 0a97d3bef967..39f6e438acd6 100644 --- a/airbyte-integrations/connectors/source-ip2whois/acceptance-test-config.yml +++ b/airbyte-integrations/connectors/source-ip2whois/acceptance-test-config.yml @@ -22,9 +22,7 @@ acceptance_tests: # TODO uncomment this block to specify that the tests should assert the connector outputs the records provided in the input file a file # expect_records: # path: "integration_tests/expected_records.jsonl" - # extra_fields: no # exact_order: no - # extra_records: yes incremental: bypass_reason: "This connector does not implement incremental sync" # TODO uncomment this block this block if your connector implements incremental sync: diff --git a/airbyte-integrations/connectors/source-iterable/acceptance-test-config.yml b/airbyte-integrations/connectors/source-iterable/acceptance-test-config.yml index e6e5ed3decab..37106d5ff4b1 100644 --- a/airbyte-integrations/connectors/source-iterable/acceptance-test-config.yml +++ b/airbyte-integrations/connectors/source-iterable/acceptance-test-config.yml @@ -20,9 +20,7 @@ acceptance_tests: - config_path: "secrets/config.json" expect_records: path: "integration_tests/expected_records.jsonl" - extra_fields: no exact_order: no - extra_records: yes empty_streams: - name: "web_push_click" bypass_reason: "Can not populate; need messaging service configured" diff --git a/airbyte-integrations/connectors/source-jira/acceptance-test-config.yml b/airbyte-integrations/connectors/source-jira/acceptance-test-config.yml index bcce37ac8c3f..0fd75e938c9e 100644 --- a/airbyte-integrations/connectors/source-jira/acceptance-test-config.yml +++ b/airbyte-integrations/connectors/source-jira/acceptance-test-config.yml @@ -25,12 +25,6 @@ acceptance_tests: empty_streams: - name: "project_permission_schemes" bypass_reason: "Unable to populate. Jira doesn't support issue security for Free plan." - ignored_fields: - sprint_issues: - - name: updated - bypass_reason: "Unstable data" - - name: fields/updated - bypass_reason: "Unstable data" timeout_seconds: 2400 fail_on_extra_columns: false incremental: diff --git a/airbyte-integrations/connectors/source-k6-cloud/acceptance-test-config.yml b/airbyte-integrations/connectors/source-k6-cloud/acceptance-test-config.yml index f6d6e4574572..5d00378484fb 100644 --- a/airbyte-integrations/connectors/source-k6-cloud/acceptance-test-config.yml +++ b/airbyte-integrations/connectors/source-k6-cloud/acceptance-test-config.yml @@ -18,9 +18,7 @@ tests: # TODO uncomment this block to specify that the tests should assert the connector outputs the records provided in the input file a file # expect_records: # path: "integration_tests/expected_records.jsonl" - # extra_fields: no # exact_order: no - # extra_records: yes full_refresh: - config_path: "secrets/config.json" configured_catalog_path: "integration_tests/configured_catalog.json" diff --git a/airbyte-integrations/connectors/source-klarna/acceptance-test-config.yml b/airbyte-integrations/connectors/source-klarna/acceptance-test-config.yml index eca3dbc24e45..2f043e2db4eb 100644 --- a/airbyte-integrations/connectors/source-klarna/acceptance-test-config.yml +++ b/airbyte-integrations/connectors/source-klarna/acceptance-test-config.yml @@ -18,9 +18,7 @@ tests: # TODO uncomment this block to specify that the tests should assert the connector outputs the records provided in the input file a file # expect_records: # path: "integration_tests/expected_records.jsonl" - # extra_fields: no # exact_order: no - # extra_records: yes # incremental: # TODO Implementation of incremental sync is possible # - config_path: "secrets/config.json" # configured_catalog_path: "integration_tests/configured_catalog.json" diff --git a/airbyte-integrations/connectors/source-klaviyo/acceptance-test-config.yml b/airbyte-integrations/connectors/source-klaviyo/acceptance-test-config.yml index 3558908788bd..6a408a70855a 100644 --- a/airbyte-integrations/connectors/source-klaviyo/acceptance-test-config.yml +++ b/airbyte-integrations/connectors/source-klaviyo/acceptance-test-config.yml @@ -4,11 +4,6 @@ acceptance_tests: - config_path: secrets/config.json expect_records: path: integration_tests/expected_records.jsonl - extra_records: true - ignored_fields: - email_templates: - - name: "attributes/html" - bypass_reason: unstable data connection: tests: - config_path: secrets/config.json diff --git a/airbyte-integrations/connectors/source-kyriba/acceptance-test-config.yml b/airbyte-integrations/connectors/source-kyriba/acceptance-test-config.yml index fa8d8c88fc81..eb793bb69613 100644 --- a/airbyte-integrations/connectors/source-kyriba/acceptance-test-config.yml +++ b/airbyte-integrations/connectors/source-kyriba/acceptance-test-config.yml @@ -20,9 +20,7 @@ acceptance_tests: timeout_seconds: 1200 expect_records: path: "integration_tests/expected_records.jsonl" - extra_fields: no exact_order: no - extra_records: yes fail_on_extra_columns: true incremental: tests: diff --git a/airbyte-integrations/connectors/source-launchdarkly/acceptance-test-config.yml b/airbyte-integrations/connectors/source-launchdarkly/acceptance-test-config.yml index dd27120e9bdb..1cab7fc2623e 100644 --- a/airbyte-integrations/connectors/source-launchdarkly/acceptance-test-config.yml +++ b/airbyte-integrations/connectors/source-launchdarkly/acceptance-test-config.yml @@ -24,9 +24,7 @@ acceptance_tests: # TODO uncomment this block to specify that the tests should assert the connector outputs the records provided in the input file a file # expect_records: # path: "integration_tests/expected_records.jsonl" - # extra_fields: no # exact_order: no - # extra_records: yes incremental: bypass_reason: "This connector does not implement incremental sync" # TODO uncomment this block this block if your connector implements incremental sync: diff --git a/airbyte-integrations/connectors/source-linkedin-ads/acceptance-test-config.yml b/airbyte-integrations/connectors/source-linkedin-ads/acceptance-test-config.yml index 64a8fb921408..6e1bdb9df0df 100644 --- a/airbyte-integrations/connectors/source-linkedin-ads/acceptance-test-config.yml +++ b/airbyte-integrations/connectors/source-linkedin-ads/acceptance-test-config.yml @@ -27,20 +27,6 @@ acceptance_tests: path: "integration_tests/expected_records.jsonl" fail_on_extra_columns: true timeout_seconds: 3600 - ignored_fields: - campaign_groups: - - name: "lastModified" - bypass_reason: "Volatile data" - ad_campaign_analytics: - - name: "costInLocalCurrency" - bypass_reason: "Data changes too often" - - name: "costInUsd" - bypass_reason: "Data changes too often" - ad_creative_analytics: - - name: "costInLocalCurrency" - bypass_reason: "Data changes too often" - - name: "costInUsd" - bypass_reason: "Data changes too often" incremental: tests: - config_path: "secrets/config_oauth.json" diff --git a/airbyte-integrations/connectors/source-mailchimp/acceptance-test-config.yml b/airbyte-integrations/connectors/source-mailchimp/acceptance-test-config.yml index c7b7594f3190..c5aeffc15774 100644 --- a/airbyte-integrations/connectors/source-mailchimp/acceptance-test-config.yml +++ b/airbyte-integrations/connectors/source-mailchimp/acceptance-test-config.yml @@ -36,7 +36,6 @@ acceptance_tests: - config_path: "secrets/config_oauth.json" expect_records: path: "integration_tests/expected_records.jsonl" - extra_records: True fail_on_extra_columns: false empty_streams: - name: "automations" diff --git a/airbyte-integrations/connectors/source-mailerlite/acceptance-test-config.yml b/airbyte-integrations/connectors/source-mailerlite/acceptance-test-config.yml index 80718ac797a9..4c9c8a168e22 100644 --- a/airbyte-integrations/connectors/source-mailerlite/acceptance-test-config.yml +++ b/airbyte-integrations/connectors/source-mailerlite/acceptance-test-config.yml @@ -18,9 +18,7 @@ tests: # TODO uncomment this block to specify that the tests should assert the connector outputs the records provided in the input file a file # expect_records: # path: "integration_tests/expected_records.jsonl" - # extra_fields: no # exact_order: no - # extra_records: yes # incremental: # TODO if your connector does not implement incremental sync, remove this block # - config_path: "secrets/config.json" # configured_catalog_path: "integration_tests/configured_catalog.json" diff --git a/airbyte-integrations/connectors/source-mailersend/acceptance-test-config.yml b/airbyte-integrations/connectors/source-mailersend/acceptance-test-config.yml index 8a400087862a..69d6145e21fe 100644 --- a/airbyte-integrations/connectors/source-mailersend/acceptance-test-config.yml +++ b/airbyte-integrations/connectors/source-mailersend/acceptance-test-config.yml @@ -22,7 +22,6 @@ acceptance_tests: # TODO uncomment this block to specify that the tests should assert the connector outputs the records provided in the input file a file # expect_records: # path: "integration_tests/expected_records.jsonl" - # extra_fields: no # exact_order: no # extra_recods: yes incremental: diff --git a/airbyte-integrations/connectors/source-mailgun/acceptance-test-config.yml b/airbyte-integrations/connectors/source-mailgun/acceptance-test-config.yml index cfe77380a94b..0690f37ae2e4 100644 --- a/airbyte-integrations/connectors/source-mailgun/acceptance-test-config.yml +++ b/airbyte-integrations/connectors/source-mailgun/acceptance-test-config.yml @@ -25,9 +25,7 @@ acceptance_tests: bypass_reason: "Sandbox account can't seed this stream" # expect_records: # path: "integration_tests/expected_records.jsonl" - # extra_fields: no # exact_order: no - # extra_records: yes incremental: bypass_reason: "This connector does not implement incremental sync" # tests: diff --git a/airbyte-integrations/connectors/source-mailjet-mail/acceptance-test-config.yml b/airbyte-integrations/connectors/source-mailjet-mail/acceptance-test-config.yml index a2b3df29f9a4..934c9d2c87ac 100644 --- a/airbyte-integrations/connectors/source-mailjet-mail/acceptance-test-config.yml +++ b/airbyte-integrations/connectors/source-mailjet-mail/acceptance-test-config.yml @@ -17,9 +17,7 @@ tests: # TODO uncomment this block to specify that the tests should assert the connector outputs the records provided in the input file a file # expect_records: # path: "integration_tests/expected_records.jsonl" - # extra_fields: no # exact_order: no - # extra_records: yes full_refresh: - config_path: "secrets/config.json" configured_catalog_path: "integration_tests/configured_catalog.json" diff --git a/airbyte-integrations/connectors/source-mailjet-sms/acceptance-test-config.yml b/airbyte-integrations/connectors/source-mailjet-sms/acceptance-test-config.yml index 9e621b1d482e..26668fd09b82 100644 --- a/airbyte-integrations/connectors/source-mailjet-sms/acceptance-test-config.yml +++ b/airbyte-integrations/connectors/source-mailjet-sms/acceptance-test-config.yml @@ -18,9 +18,7 @@ tests: # TODO uncomment this block to specify that the tests should assert the connector outputs the records provided in the input file a file # expect_records: # path: "integration_tests/expected_records.jsonl" - # extra_fields: no # exact_order: no - # extra_records: yes full_refresh: - config_path: "secrets/config.json" configured_catalog_path: "integration_tests/configured_catalog.json" diff --git a/airbyte-integrations/connectors/source-merge/acceptance-test-config.yml b/airbyte-integrations/connectors/source-merge/acceptance-test-config.yml index fbe2b8b3336b..a1444444771d 100644 --- a/airbyte-integrations/connectors/source-merge/acceptance-test-config.yml +++ b/airbyte-integrations/connectors/source-merge/acceptance-test-config.yml @@ -44,9 +44,7 @@ acceptance_tests: # TODO uncomment this block to specify that the tests should assert the connector outputs the records provided in the input file a file # expect_records: # path: "integration_tests/expected_records.jsonl" - # extra_fields: no # exact_order: no - # extra_records: yes incremental: bypass_reason: "This connector does not implement incremental sync" # TODO uncomment this block this block if your connector implements incremental sync: diff --git a/airbyte-integrations/connectors/source-mixpanel/acceptance-test-config.yml b/airbyte-integrations/connectors/source-mixpanel/acceptance-test-config.yml index 1e733552628e..2a0097b08965 100644 --- a/airbyte-integrations/connectors/source-mixpanel/acceptance-test-config.yml +++ b/airbyte-integrations/connectors/source-mixpanel/acceptance-test-config.yml @@ -33,21 +33,12 @@ acceptance_tests: timeout_seconds: 9000 expect_records: path: "integration_tests/expected_records.jsonl" - extra_fields: no exact_order: no - extra_records: yes empty_streams: - name: export bypass_reason: "Data expired too often" - name: annotations bypass_reason: "Data expired too often" - ignored_fields: - funnels: - - name: date - bypass_reason: "Data changes too often" - revenue: - - name: date - bypass_reason: "Data changes too often" full_refresh: tests: - config_path: "secrets/config_project_secret.json" diff --git a/airbyte-integrations/connectors/source-monday/acceptance-test-config.yml b/airbyte-integrations/connectors/source-monday/acceptance-test-config.yml index 01fc98c6f304..12b53f7ebca0 100644 --- a/airbyte-integrations/connectors/source-monday/acceptance-test-config.yml +++ b/airbyte-integrations/connectors/source-monday/acceptance-test-config.yml @@ -34,35 +34,17 @@ acceptance_tests: - config_path: "secrets/config_api_token.json" expect_records: path: "integration_tests/expected_records.jsonl" - extra_fields: no exact_order: no - extra_records: yes empty_streams: - name: teams bypass_reason: "The stream has no test data and tested with integration tests" - ignored_fields: - items: - - name: assets/*/public_url - bypass_reason: "Unstable data" - updates: - - name: assets/*/public_url - bypass_reason: "Unstable data" - config_path: "secrets/config_oauth.json" expect_records: path: "integration_tests/expected_records.jsonl" - extra_fields: no exact_order: no - extra_records: yes empty_streams: - name: teams bypass_reason: "The stream has no test data and tested with integration tests" - ignored_fields: - items: - - name: assets/*/public_url - bypass_reason: "Unstable data" - updates: - - name: assets/*/public_url - bypass_reason: "Unstable data" full_refresh: tests: - config_path: "secrets/config_api_token.json" diff --git a/airbyte-integrations/connectors/source-n8n/acceptance-test-config.yml b/airbyte-integrations/connectors/source-n8n/acceptance-test-config.yml index c72809a2672c..6e83613d00d6 100644 --- a/airbyte-integrations/connectors/source-n8n/acceptance-test-config.yml +++ b/airbyte-integrations/connectors/source-n8n/acceptance-test-config.yml @@ -22,9 +22,7 @@ acceptance_tests: # TODO uncomment this block to specify that the tests should assert the connector outputs the records provided in the input file a file # expect_records: # path: "integration_tests/expected_records.jsonl" - # extra_fields: no # exact_order: no - # extra_records: yes incremental: bypass_reason: "This connector does not implement incremental sync" # TODO uncomment this block this block if your connector implements incremental sync: diff --git a/airbyte-integrations/connectors/source-nasa/acceptance-test-config.yml b/airbyte-integrations/connectors/source-nasa/acceptance-test-config.yml index 5be380ce8487..1cee69aa1025 100644 --- a/airbyte-integrations/connectors/source-nasa/acceptance-test-config.yml +++ b/airbyte-integrations/connectors/source-nasa/acceptance-test-config.yml @@ -22,9 +22,7 @@ acceptance_tests: # TODO uncomment this block to specify that the tests should assert the connector outputs the records provided in the input file a file # expect_records: # path: "integration_tests/expected_records.jsonl" - # extra_fields: no # exact_order: no - # extra_records: yes incremental: # bypass_reason: "This connector does not implement incremental sync" tests: diff --git a/airbyte-integrations/connectors/source-okta/acceptance-test-config.yml b/airbyte-integrations/connectors/source-okta/acceptance-test-config.yml index e7a9d92bdad0..d34cae9702f9 100644 --- a/airbyte-integrations/connectors/source-okta/acceptance-test-config.yml +++ b/airbyte-integrations/connectors/source-okta/acceptance-test-config.yml @@ -18,9 +18,7 @@ acceptance_tests: - config_path: "secrets/config.json" expect_records: path: "integration_tests/expected_records.jsonl" - extra_fields: no exact_order: no - extra_records: yes full_refresh: tests: - config_path: "secrets/config.json" diff --git a/airbyte-integrations/connectors/source-omnisend/acceptance-test-config.yml b/airbyte-integrations/connectors/source-omnisend/acceptance-test-config.yml index 28c845e8b336..486c48f82f48 100644 --- a/airbyte-integrations/connectors/source-omnisend/acceptance-test-config.yml +++ b/airbyte-integrations/connectors/source-omnisend/acceptance-test-config.yml @@ -18,9 +18,7 @@ tests: # TODO uncomment this block to specify that the tests should assert the connector outputs the records provided in the input file a file # expect_records: # path: "integration_tests/expected_records.jsonl" - # extra_fields: no # exact_order: no - # extra_records: yes # incremental: # TODO if your connector does not implement incremental sync, remove this block # - config_path: "secrets/config.json" # configured_catalog_path: "integration_tests/configured_catalog.json" diff --git a/airbyte-integrations/connectors/source-onesignal/acceptance-test-config.yml b/airbyte-integrations/connectors/source-onesignal/acceptance-test-config.yml index 8b4e4c7d5270..b980dc9db0b0 100644 --- a/airbyte-integrations/connectors/source-onesignal/acceptance-test-config.yml +++ b/airbyte-integrations/connectors/source-onesignal/acceptance-test-config.yml @@ -25,9 +25,7 @@ acceptance_tests: bypass_reason: "Sandbox account cannot seed the stream" expect_records: path: "integration_tests/expected_records.jsonl" - extra_fields: no exact_order: no - extra_records: yes fail_on_extra_columns: false # TODO: enable incremental test after seeding the connector # incremental: diff --git a/airbyte-integrations/connectors/source-open-exchange-rates/acceptance-test-config.yml b/airbyte-integrations/connectors/source-open-exchange-rates/acceptance-test-config.yml index 483fe518fa28..e928acf0025e 100644 --- a/airbyte-integrations/connectors/source-open-exchange-rates/acceptance-test-config.yml +++ b/airbyte-integrations/connectors/source-open-exchange-rates/acceptance-test-config.yml @@ -22,9 +22,7 @@ acceptance_tests: empty_streams: [] # expect_records: # path: "integration_tests/expected_records.jsonl" - # extra_fields: no # exact_order: no - # extra_records: yes incremental: # bypass_reason: "This connector does not implement incremental sync" tests: diff --git a/airbyte-integrations/connectors/source-outbrain-amplify/acceptance-test-config.yml b/airbyte-integrations/connectors/source-outbrain-amplify/acceptance-test-config.yml index 77a5173ea17b..d791fbc0b1f7 100644 --- a/airbyte-integrations/connectors/source-outbrain-amplify/acceptance-test-config.yml +++ b/airbyte-integrations/connectors/source-outbrain-amplify/acceptance-test-config.yml @@ -55,9 +55,7 @@ acceptance_tests: # TODO uncomment this block to specify that the tests should assert the connector outputs the records provided in the input file a file # expect_records: # path: "integration_tests/expected_records.jsonl" - # extra_fields: no # exact_order: no - # extra_records: yes # incremental: # bypass_reason: "This connector does not implement incremental sync" # TODO uncomment this block this block if your connector implements incremental sync: diff --git a/airbyte-integrations/connectors/source-paypal-transaction/acceptance-test-config.yml b/airbyte-integrations/connectors/source-paypal-transaction/acceptance-test-config.yml index f4682c9534da..66695c691d17 100644 --- a/airbyte-integrations/connectors/source-paypal-transaction/acceptance-test-config.yml +++ b/airbyte-integrations/connectors/source-paypal-transaction/acceptance-test-config.yml @@ -47,20 +47,11 @@ acceptance_tests: #Have to add for testing PR CI. - name: list_disputes bypass_reason: "Disputes may not exist." - ignored_fields: - balances: - - name: last_refresh_time - bypass_reason: "field changes during every read" - list_products: - - name: description - bypass_reason: "Sometimes it is not contained in the response" timeout_seconds: 3200 expect_records: path: "integration_tests/sample_files/expected_records_sandbox.jsonl" #path: "integration_tests/sample_files/expected_records.jsonl" - extra_fields: yes exact_order: yes - extra_records: no fail_on_extra_columns: False incremental: tests: diff --git a/airbyte-integrations/connectors/source-paystack/acceptance-test-config.yml b/airbyte-integrations/connectors/source-paystack/acceptance-test-config.yml index c075e0e5c3c8..bf1c7a263d72 100644 --- a/airbyte-integrations/connectors/source-paystack/acceptance-test-config.yml +++ b/airbyte-integrations/connectors/source-paystack/acceptance-test-config.yml @@ -21,16 +21,6 @@ acceptance_tests: bypass_reason: "unable to seed, this stream requires third party system" - name: settlements bypass_reason: "unable to seed, this stream requires third party system" - ignored_fields: - subscriptions: - - name: next_payment_date - bypass_reason: "Payment data is updated every month" - - name: open_invoice - bypass_reason: "Payment data is updated every month" - - name: payments_count - bypass_reason: "Payment data is updated every month" - - name: most_recent_invoice - bypass_reason: "Payment data is updated every month" expect_records: path: "integration_tests/expected_records.jsonl" fail_on_extra_columns: false diff --git a/airbyte-integrations/connectors/source-pendo/acceptance-test-config.yml b/airbyte-integrations/connectors/source-pendo/acceptance-test-config.yml index 67c768793f8e..b35a9ecb9f65 100644 --- a/airbyte-integrations/connectors/source-pendo/acceptance-test-config.yml +++ b/airbyte-integrations/connectors/source-pendo/acceptance-test-config.yml @@ -22,9 +22,7 @@ acceptance_tests: # TODO uncomment this block to specify that the tests should assert the connector outputs the records provided in the input file a file # expect_records: # path: "integration_tests/expected_records.jsonl" - # extra_fields: no # exact_order: no - # extra_records: yes incremental: bypass_reason: "This connector does not implement incremental sync" # TODO uncomment this block this block if your connector implements incremental sync: diff --git a/airbyte-integrations/connectors/source-persistiq/acceptance-test-config.yml b/airbyte-integrations/connectors/source-persistiq/acceptance-test-config.yml index 913296e0e8e5..b080a679f320 100644 --- a/airbyte-integrations/connectors/source-persistiq/acceptance-test-config.yml +++ b/airbyte-integrations/connectors/source-persistiq/acceptance-test-config.yml @@ -22,9 +22,7 @@ acceptance_tests: # TODO uncomment this block to specify that the tests should assert the connector outputs the records provided in the input file a file # expect_records: # path: "integration_tests/expected_records.jsonl" - # extra_fields: no # exact_order: no - # extra_records: yes incremental: bypass_reason: "This connector does not implement incremental sync" # TODO uncomment this block this block if your connector implements incremental sync: diff --git a/airbyte-integrations/connectors/source-pinterest/acceptance-test-config.yml b/airbyte-integrations/connectors/source-pinterest/acceptance-test-config.yml index 4eab013a5fad..c450976d9deb 100644 --- a/airbyte-integrations/connectors/source-pinterest/acceptance-test-config.yml +++ b/airbyte-integrations/connectors/source-pinterest/acceptance-test-config.yml @@ -45,33 +45,8 @@ acceptance_tests: timeout_seconds: 1200 expect_records: path: "integration_tests/expected_records.jsonl" - extra_fields: no exact_order: no - extra_records: yes fail_on_extra_columns: false - ignored_fields: - board_pins: - - name: "media" - bypass_reason: "urls may change" - board_section_pins: - - name: "media" - bypass_reason: "urls may change" - ads: - - name: "updated_time" - bypass_reason: "can be updated" - ad_groups: - - name: "updated_time" - bypass_reason: "can be updated" - campaigns: - - name: "updated_time" - bypass_reason: "can be updated" - audiences: - - name: "size" - bypass_reason: "can be changed" - - name: "updated_timestamp" - bypass_reason: "can be changed" - - name: "created_timestamp" - bypass_reason: "can be changed" incremental: tests: - config_path: secrets/config.json diff --git a/airbyte-integrations/connectors/source-polygon-stock-api/acceptance-test-config.yml b/airbyte-integrations/connectors/source-polygon-stock-api/acceptance-test-config.yml index 50352654ce14..6682e8da3032 100644 --- a/airbyte-integrations/connectors/source-polygon-stock-api/acceptance-test-config.yml +++ b/airbyte-integrations/connectors/source-polygon-stock-api/acceptance-test-config.yml @@ -22,9 +22,7 @@ acceptance_tests: # TODO uncomment this block to specify that the tests should assert the connector outputs the records provided in the input file a file # expect_records: # path: "integration_tests/expected_records.jsonl" - # extra_fields: no # exact_order: no - # extra_records: yes incremental: bypass_reason: "This connector does not implement incremental sync" # TODO uncomment this block this block if your connector implements incremental sync: diff --git a/airbyte-integrations/connectors/source-postmarkapp/acceptance-test-config.yml b/airbyte-integrations/connectors/source-postmarkapp/acceptance-test-config.yml index bd2574c01ab3..1f707c1dc9d7 100644 --- a/airbyte-integrations/connectors/source-postmarkapp/acceptance-test-config.yml +++ b/airbyte-integrations/connectors/source-postmarkapp/acceptance-test-config.yml @@ -31,9 +31,7 @@ acceptance_tests: bypass_reason: "Is empty in test data" expect_records: path: "integration_tests/expected_records.jsonl" - extra_fields: no exact_order: no - extra_records: yes full_refresh: tests: - config_path: "secrets/config.json" diff --git a/airbyte-integrations/connectors/source-prestashop/acceptance-test-config.yml b/airbyte-integrations/connectors/source-prestashop/acceptance-test-config.yml index e75738412d47..81a1f3b4b781 100644 --- a/airbyte-integrations/connectors/source-prestashop/acceptance-test-config.yml +++ b/airbyte-integrations/connectors/source-prestashop/acceptance-test-config.yml @@ -28,9 +28,7 @@ acceptance_tests: bypass_reason: "Can not populate" expect_records: path: "integration_tests/expected_records.jsonl" - extra_fields: no exact_order: no - extra_records: yes fail_on_extra_columns: false incremental: tests: diff --git a/airbyte-integrations/connectors/source-quickbooks/acceptance-test-config.yml b/airbyte-integrations/connectors/source-quickbooks/acceptance-test-config.yml index d5f58965d1bf..c773184a5540 100644 --- a/airbyte-integrations/connectors/source-quickbooks/acceptance-test-config.yml +++ b/airbyte-integrations/connectors/source-quickbooks/acceptance-test-config.yml @@ -37,9 +37,7 @@ acceptance_tests: bypass_reason: "unable to populate" # expect_records: # path: "integration_tests/expected_records.jsonl" - # extra_fields: no # exact_order: no - # extra_records: yes incremental: tests: - config_path: "secrets/config.json" diff --git a/airbyte-integrations/connectors/source-railz/acceptance-test-config.yml b/airbyte-integrations/connectors/source-railz/acceptance-test-config.yml index a2a9431e605e..5b806c1d67b6 100644 --- a/airbyte-integrations/connectors/source-railz/acceptance-test-config.yml +++ b/airbyte-integrations/connectors/source-railz/acceptance-test-config.yml @@ -20,9 +20,7 @@ acceptance_tests: - config_path: "secrets/config.json" expect_records: path: "integration_tests/expected_records.jsonl" - extra_fields: no exact_order: no - extra_records: yes empty_streams: - name: "businesses" bypass_reason: "Stream object fields like 'status' and 'updatedAt' can change pretty often" diff --git a/airbyte-integrations/connectors/source-recharge/acceptance-test-config.yml b/airbyte-integrations/connectors/source-recharge/acceptance-test-config.yml index 085c9b7cfc55..6055da4f21f1 100644 --- a/airbyte-integrations/connectors/source-recharge/acceptance-test-config.yml +++ b/airbyte-integrations/connectors/source-recharge/acceptance-test-config.yml @@ -9,18 +9,10 @@ acceptance_tests: bypass_reason: "volatile data" - name: onetimes bypass_reason: "no data from stream" - ignored_fields: - shop: - - name: shop/updated_at - bypass_reason: "updated after login" - - name: store/updated_at - bypass_reason: "updated after login" timeout_seconds: 7200 expect_records: path: "integration_tests/expected_records.jsonl" - extra_fields: no exact_order: no - extra_records: yes fail_on_extra_columns: false - config_path: secrets/config_order_modern_api.json empty_streams: @@ -30,18 +22,10 @@ acceptance_tests: bypass_reason: "volatile data" - name: onetimes bypass_reason: "no data from stream" - ignored_fields: - shop: - - name: shop/updated_at - bypass_reason: "updated after login" - - name: store/updated_at - bypass_reason: "updated after login" timeout_seconds: 7200 expect_records: path: "integration_tests/expected_records_orders_modern_api.jsonl" - extra_fields: no exact_order: no - extra_records: yes fail_on_extra_columns: false connection: tests: diff --git a/airbyte-integrations/connectors/source-recreation/acceptance-test-config.yml b/airbyte-integrations/connectors/source-recreation/acceptance-test-config.yml index 31eacb139a5f..ddc6dbff0c22 100644 --- a/airbyte-integrations/connectors/source-recreation/acceptance-test-config.yml +++ b/airbyte-integrations/connectors/source-recreation/acceptance-test-config.yml @@ -22,9 +22,7 @@ acceptance_tests: # TODO uncomment this block to specify that the tests should assert the connector outputs the records provided in the input file a file # expect_records: # path: "integration_tests/expected_records.jsonl" - # extra_fields: no # exact_order: no - # extra_records: yes full_refresh: tests: - config_path: "secrets/config.json" diff --git a/airbyte-integrations/connectors/source-recruitee/acceptance-test-config.yml b/airbyte-integrations/connectors/source-recruitee/acceptance-test-config.yml index 82aebcc49bf4..6ce31ea992e1 100644 --- a/airbyte-integrations/connectors/source-recruitee/acceptance-test-config.yml +++ b/airbyte-integrations/connectors/source-recruitee/acceptance-test-config.yml @@ -22,9 +22,7 @@ acceptance_tests: # TODO uncomment this block to specify that the tests should assert the connector outputs the records provided in the input file a file # expect_records: # path: "integration_tests/expected_records.jsonl" - # extra_fields: no # exact_order: no - # extra_records: yes incremental: bypass_reason: "This connector does not implement incremental sync" # TODO uncomment this block this block if your connector implements incremental sync: diff --git a/airbyte-integrations/connectors/source-ringcentral/acceptance-test-config.yml b/airbyte-integrations/connectors/source-ringcentral/acceptance-test-config.yml index bc1c7ca356e0..e35afacc906c 100644 --- a/airbyte-integrations/connectors/source-ringcentral/acceptance-test-config.yml +++ b/airbyte-integrations/connectors/source-ringcentral/acceptance-test-config.yml @@ -40,9 +40,7 @@ acceptance_tests: # TODO uncomment this block to specify that the tests should assert the connector outputs the records provided in the input file a file # expect_records: # path: "integration_tests/expected_records.jsonl" - # extra_fields: no # exact_order: no - # extra_records: yes incremental: bypass_reason: "This connector does not implement incremental sync" # TODO uncomment this block this block if your connector implements incremental sync: diff --git a/airbyte-integrations/connectors/source-rki-covid/acceptance-test-config.yml b/airbyte-integrations/connectors/source-rki-covid/acceptance-test-config.yml index 155c872a250e..40cc2c4f3b3f 100644 --- a/airbyte-integrations/connectors/source-rki-covid/acceptance-test-config.yml +++ b/airbyte-integrations/connectors/source-rki-covid/acceptance-test-config.yml @@ -22,9 +22,7 @@ tests: # TODO uncomment this block to specify that the tests should assert the connector outputs the records provided in the input file a file # expect_records: # path: "integration_tests/expected_records.jsonl" - # extra_fields: no # exact_order: no - # extra_records: yes incremental: # TODO - config_path: "secrets/config.json" configured_catalog_path: "integration_tests/configured_catalog.json" diff --git a/airbyte-integrations/connectors/source-salesforce/acceptance-test-config.yml b/airbyte-integrations/connectors/source-salesforce/acceptance-test-config.yml index a78ff7a81fc8..9c5bb5ff9e9f 100644 --- a/airbyte-integrations/connectors/source-salesforce/acceptance-test-config.yml +++ b/airbyte-integrations/connectors/source-salesforce/acceptance-test-config.yml @@ -33,12 +33,6 @@ acceptance_tests: bypass_reason: "impossible to fill the stream with data because it is an organic traffic" - name: "Describe" bypass_reason: "Data is not permanent" - ignored_fields: - accounts: - - name: LastViewedDate - bypass_reason: The fields is being updated after any manipulations with account - - name: LastReferencedDate - bypass_reason: The fields is being updated after any manipulations with account fail_on_extra_columns: false timeout_seconds: 7200 incremental: diff --git a/airbyte-integrations/connectors/source-salesloft/acceptance-test-config.yml b/airbyte-integrations/connectors/source-salesloft/acceptance-test-config.yml index fa9271151623..61ef49b481cc 100644 --- a/airbyte-integrations/connectors/source-salesloft/acceptance-test-config.yml +++ b/airbyte-integrations/connectors/source-salesloft/acceptance-test-config.yml @@ -4,16 +4,6 @@ acceptance_tests: - config_path: secrets/config.json expect_records: path: integration_tests/expected_records.jsonl - ignored_fields: - actions: - - name: updated_at - bypass_reason: auto-updated by provider - people: - - name: locale_utc_offset - bypass_reason: volatile data - users: - - name: locale_utc_offset - bypass_reason: volatile data fail_on_extra_columns: false empty_streams: - name: call_data_records diff --git a/airbyte-integrations/connectors/source-sap-fieldglass/acceptance-test-config.yml b/airbyte-integrations/connectors/source-sap-fieldglass/acceptance-test-config.yml index c0a0bb7a6020..71f7be95fb3e 100644 --- a/airbyte-integrations/connectors/source-sap-fieldglass/acceptance-test-config.yml +++ b/airbyte-integrations/connectors/source-sap-fieldglass/acceptance-test-config.yml @@ -22,9 +22,7 @@ acceptance_tests: # TODO uncomment this block to specify that the tests should assert the connector outputs the records provided in the input file a file # expect_records: # path: "integration_tests/expected_records.jsonl" - # extra_fields: no # exact_order: no - # extra_records: yes incremental: bypass_reason: "This connector does not implement incremental sync" # TODO uncomment this block this block if your connector implements incremental sync: diff --git a/airbyte-integrations/connectors/source-scaffold-source-http/acceptance-test-config.yml b/airbyte-integrations/connectors/source-scaffold-source-http/acceptance-test-config.yml index 3d73e0c2b14e..6299671da202 100644 --- a/airbyte-integrations/connectors/source-scaffold-source-http/acceptance-test-config.yml +++ b/airbyte-integrations/connectors/source-scaffold-source-http/acceptance-test-config.yml @@ -22,9 +22,7 @@ acceptance_tests: # TODO uncomment this block to specify that the tests should assert the connector outputs the records provided in the input file a file # expect_records: # path: "integration_tests/expected_records.jsonl" - # extra_fields: no # exact_order: no - # extra_records: yes incremental: bypass_reason: "This connector does not implement incremental sync" # TODO uncomment this block this block if your connector implements incremental sync: diff --git a/airbyte-integrations/connectors/source-scaffold-source-python/acceptance-test-config.yml b/airbyte-integrations/connectors/source-scaffold-source-python/acceptance-test-config.yml index e31b63a31b08..80bf5971b2d8 100644 --- a/airbyte-integrations/connectors/source-scaffold-source-python/acceptance-test-config.yml +++ b/airbyte-integrations/connectors/source-scaffold-source-python/acceptance-test-config.yml @@ -22,9 +22,7 @@ acceptance_tests: # TODO uncomment this block to specify that the tests should assert the connector outputs the records provided in the input file a file # expect_records: # path: "integration_tests/expected_records.jsonl" - # extra_fields: no # exact_order: no - # extra_records: yes incremental: bypass_reason: "This connector does not implement incremental sync" # TODO uncomment this block this block if your connector implements incremental sync: diff --git a/airbyte-integrations/connectors/source-secoda/acceptance-test-config.yml b/airbyte-integrations/connectors/source-secoda/acceptance-test-config.yml index dacad737f3f0..593abe379ada 100644 --- a/airbyte-integrations/connectors/source-secoda/acceptance-test-config.yml +++ b/airbyte-integrations/connectors/source-secoda/acceptance-test-config.yml @@ -18,9 +18,7 @@ tests: # # TODO uncomment this block to specify that the tests should assert the connector outputs the records provided in the input file a file # # expect_records: # # path: "integration_tests/expected_records.jsonl" - # # extra_fields: no # # exact_order: no - # # extra_records: yes # full_refresh: # - config_path: "secrets/config.json" # configured_catalog_path: "integration_tests/configured_catalog.json" diff --git a/airbyte-integrations/connectors/source-sendgrid/acceptance-test-config.yml b/airbyte-integrations/connectors/source-sendgrid/acceptance-test-config.yml index 7de7e9cc8e79..1c5ad5c0a273 100644 --- a/airbyte-integrations/connectors/source-sendgrid/acceptance-test-config.yml +++ b/airbyte-integrations/connectors/source-sendgrid/acceptance-test-config.yml @@ -26,9 +26,7 @@ acceptance_tests: - config_path: "secrets/config.json" expect_records: path: "integration_tests/expected_records.jsonl" - extra_fields: no exact_order: no - extra_records: yes empty_streams: - name: spam_reports bypass_reason: "can not populate" @@ -36,12 +34,6 @@ acceptance_tests: bypass_reason: "can not populate" - name: blocks bypass_reason: "can not populate" - ignored_fields: - segments: - - name: sample_updated_at - bypass_reason: "depend on current date" - - name: next_sample_update - bypass_reason: "depend on current date" fail_on_extra_columns: false incremental: tests: diff --git a/airbyte-integrations/connectors/source-sendinblue/acceptance-test-config.yml b/airbyte-integrations/connectors/source-sendinblue/acceptance-test-config.yml index 106bcca9d06f..7fc6f197051e 100644 --- a/airbyte-integrations/connectors/source-sendinblue/acceptance-test-config.yml +++ b/airbyte-integrations/connectors/source-sendinblue/acceptance-test-config.yml @@ -22,9 +22,7 @@ acceptance_tests: # TODO uncomment this block to specify that the tests should assert the connector outputs the records provided in the input file a file # expect_records: # path: "integration_tests/expected_records.jsonl" - # extra_fields: no # exact_order: no - # extra_records: yes incremental: tests: - config_path: "secrets/config.json" diff --git a/airbyte-integrations/connectors/source-senseforce/acceptance-test-config.yml b/airbyte-integrations/connectors/source-senseforce/acceptance-test-config.yml index 91563b71ca52..695577836ee2 100644 --- a/airbyte-integrations/connectors/source-senseforce/acceptance-test-config.yml +++ b/airbyte-integrations/connectors/source-senseforce/acceptance-test-config.yml @@ -22,9 +22,7 @@ acceptance_tests: # TODO uncomment this block to specify that the tests should assert the connector outputs the records provided in the input file a file # expect_records: # path: "integration_tests/expected_records.jsonl" - # extra_fields: no # exact_order: no - # extra_records: yes incremental: tests: - config_path: "secrets/config.json" diff --git a/airbyte-integrations/connectors/source-sentry/acceptance-test-config.yml b/airbyte-integrations/connectors/source-sentry/acceptance-test-config.yml index c1bbe22949d7..e694c493e7c5 100644 --- a/airbyte-integrations/connectors/source-sentry/acceptance-test-config.yml +++ b/airbyte-integrations/connectors/source-sentry/acceptance-test-config.yml @@ -10,25 +10,6 @@ acceptance_tests: timeout_seconds: 1200 expect_records: path: "integration_tests/expected_records.jsonl" - ignored_fields: - project_detail: - - name: access - bypass_reason: "Order access return randomly" - - name: features - bypass_reason: "Order features return randomly" - - name: options - bypass_reason: "Order options return randomly" - - name: organization/features - bypass_reason: "Order features return randomly" - - name: plugins/*/features - bypass_reason: "Order features return randomly" - projects: - - name: access - bypass_reason: "Order access return randomly" - - name: features - bypass_reason: "Order features return randomly" - - name: organization/features - bypass_reason: "Order features return randomly" connection: tests: - config_path: secrets/config.json diff --git a/airbyte-integrations/connectors/source-shopify/acceptance-test-config.yml b/airbyte-integrations/connectors/source-shopify/acceptance-test-config.yml index c38d1f41afaa..f1bc305b3d34 100644 --- a/airbyte-integrations/connectors/source-shopify/acceptance-test-config.yml +++ b/airbyte-integrations/connectors/source-shopify/acceptance-test-config.yml @@ -42,29 +42,6 @@ acceptance_tests: bypass_reason: The stream is not available for our sandbox. - name: disputes bypass_reason: The stream requires real purchases to fill in the data. - ignored_fields: - products: - - name: variants/*/updated_at - bypass_reason: Value can change as the account data is not frozen - - name: image/src - bypass_reason: May contain dynamically changed URL params - - name: image/updated_at - bypass_reason: Value can change as the account data is not frozen - - name: images/*/src - bypass_reason: May contain dynamically changed URL params - - name: images/*/updated_at - bypass_reason: Value can change as the account data is not frozen - products_graph_ql: - - name: onlineStorePreviewUrl - bypass_reason: Autogenerated floating URL values - product_variants: - - name: updated_at - bypass_reason: Value can change as the account data is not frozen - product_images: - - name: src - bypass_reason: May contain dynamically changed URL params - - name: updated_at - bypass_reason: Value can change as the account data is not frozen - config_path: "secrets/config.json" timeout_seconds: 4800 expect_records: @@ -78,29 +55,6 @@ acceptance_tests: bypass_reason: The stream is not available for our sandbox. - name: disputes bypass_reason: The stream requires real purchases to fill in the data. - ignored_fields: - products: - - name: variants/*/updated_at - bypass_reason: Value can change as the account data is not frozen - - name: image/src - bypass_reason: May contain dynamically changed URL params - - name: image/updated_at - bypass_reason: Value can change as the account data is not frozen - - name: images/*/src - bypass_reason: May contain dynamically changed URL params - - name: images/*/updated_at - bypass_reason: Value can change as the account data is not frozen - products_graph_ql: - - name: onlineStorePreviewUrl - bypass_reason: Autogenerated floating URL values - product_variants: - - name: updated_at - bypass_reason: Value can change as the account data is not frozen - product_images: - - name: src - bypass_reason: May contain dynamically changed URL params - - name: updated_at - bypass_reason: Value can change as the account data is not frozen incremental: tests: - config_path: "secrets/config.json" diff --git a/airbyte-integrations/connectors/source-shortio/acceptance-test-config.yml b/airbyte-integrations/connectors/source-shortio/acceptance-test-config.yml index c08152adef62..2ae2306b3eb1 100644 --- a/airbyte-integrations/connectors/source-shortio/acceptance-test-config.yml +++ b/airbyte-integrations/connectors/source-shortio/acceptance-test-config.yml @@ -23,9 +23,7 @@ acceptance_tests: bypass_reason: "Sandbox account cannot seed the stream" expect_records: path: "integration_tests/expected_records.jsonl" - extra_fields: no exact_order: no - extra_records: yes incremental: # bypass_reason: "This connector does not implement incremental sync" tests: diff --git a/airbyte-integrations/connectors/source-slack/acceptance-test-config.yml b/airbyte-integrations/connectors/source-slack/acceptance-test-config.yml index b94b7cf70bc0..2fd1ba9af04b 100644 --- a/airbyte-integrations/connectors/source-slack/acceptance-test-config.yml +++ b/airbyte-integrations/connectors/source-slack/acceptance-test-config.yml @@ -26,10 +26,6 @@ acceptance_tests: expect_records: path: "integration_tests/expected_records.jsonl" timeout_seconds: 4800 - ignored_fields: - channels: - - name: updated - bypass_reason: Value can change while interacting with data full_refresh: tests: - config_path: "secrets/config.json" diff --git a/airbyte-integrations/connectors/source-smaily/acceptance-test-config.yml b/airbyte-integrations/connectors/source-smaily/acceptance-test-config.yml index 16c13b52f7f4..5d1728e19e06 100644 --- a/airbyte-integrations/connectors/source-smaily/acceptance-test-config.yml +++ b/airbyte-integrations/connectors/source-smaily/acceptance-test-config.yml @@ -22,9 +22,7 @@ acceptance_tests: # TODO uncomment this block to specify that the tests should assert the connector outputs the records provided in the input file a file # expect_records: # path: "integration_tests/expected_records.jsonl" - # extra_fields: no # exact_order: no - # extra_records: yes incremental: bypass_reason: "This connector does not implement incremental sync" # TODO uncomment this block this block if your connector implements incremental sync: diff --git a/airbyte-integrations/connectors/source-smartengage/acceptance-test-config.yml b/airbyte-integrations/connectors/source-smartengage/acceptance-test-config.yml index 0242ea1cf621..967995af3636 100644 --- a/airbyte-integrations/connectors/source-smartengage/acceptance-test-config.yml +++ b/airbyte-integrations/connectors/source-smartengage/acceptance-test-config.yml @@ -22,9 +22,7 @@ acceptance_tests: # TODO uncomment this block to specify that the tests should assert the connector outputs the records provided in the input file a file # expect_records: # path: "integration_tests/expected_records.jsonl" - # extra_fields: no # exact_order: no - # extra_records: yes incremental: bypass_reason: "This connector does not implement incremental sync" # TODO uncomment this block this block if your connector implements incremental sync: diff --git a/airbyte-integrations/connectors/source-smartsheets/acceptance-test-config.yml b/airbyte-integrations/connectors/source-smartsheets/acceptance-test-config.yml index 4c19a4b688e3..7bc0533a3f13 100644 --- a/airbyte-integrations/connectors/source-smartsheets/acceptance-test-config.yml +++ b/airbyte-integrations/connectors/source-smartsheets/acceptance-test-config.yml @@ -19,9 +19,7 @@ tests: empty_streams: [] expect_records: path: "integration_tests/expected_records.jsonl" - extra_fields: yes exact_order: yes - extra_records: no full_refresh: - config_path: "secrets/config.json" configured_catalog_path: "integration_tests/configured_catalog.json" diff --git a/airbyte-integrations/connectors/source-snapchat-marketing/acceptance-test-config.yml b/airbyte-integrations/connectors/source-snapchat-marketing/acceptance-test-config.yml index dc72aecef099..fcba2949d1d4 100644 --- a/airbyte-integrations/connectors/source-snapchat-marketing/acceptance-test-config.yml +++ b/airbyte-integrations/connectors/source-snapchat-marketing/acceptance-test-config.yml @@ -20,9 +20,7 @@ acceptance_tests: - config_path: "secrets/config.json" expect_records: path: "integration_tests/expected_records.jsonl" - extra_fields: no exact_order: no - extra_records: yes empty_streams: - name: "adaccounts_stats_lifetime" bypass_reason: "The data changes from sync to sync" diff --git a/airbyte-integrations/connectors/source-sonar-cloud/acceptance-test-config.yml b/airbyte-integrations/connectors/source-sonar-cloud/acceptance-test-config.yml index f8b18d7272fa..6106a27cb7fd 100644 --- a/airbyte-integrations/connectors/source-sonar-cloud/acceptance-test-config.yml +++ b/airbyte-integrations/connectors/source-sonar-cloud/acceptance-test-config.yml @@ -22,9 +22,7 @@ acceptance_tests: # TODO uncomment this block to specify that the tests should assert the connector outputs the records provided in the input file a file # expect_records: # path: "integration_tests/expected_records.jsonl" - # extra_fields: no # exact_order: no - # extra_records: yes incremental: bypass_reason: "This connector does not implement incremental sync" # TODO uncomment this block this block if your connector implements incremental sync: diff --git a/airbyte-integrations/connectors/source-square/acceptance-test-config.yml b/airbyte-integrations/connectors/source-square/acceptance-test-config.yml index 9ea5ef9f749e..5b9b6aaa6e70 100644 --- a/airbyte-integrations/connectors/source-square/acceptance-test-config.yml +++ b/airbyte-integrations/connectors/source-square/acceptance-test-config.yml @@ -32,17 +32,6 @@ acceptance_tests: bypass_reason: "Not able to fill stream" expect_records: path: "integration_tests/expected_records.jsonl" - ignored_fields: - items: - - name: version - bypass_reason: "Floating data" - - name: updated_at - bypass_reason: "Floating data" - categories: - - name: version - bypass_reason: "Floating data" - - name: updated_at - bypass_reason: "Floating data" fail_on_extra_columns: false incremental: tests: diff --git a/airbyte-integrations/connectors/source-statuspage/acceptance-test-config.yml b/airbyte-integrations/connectors/source-statuspage/acceptance-test-config.yml index 6d8492baa303..38dad91b8b24 100644 --- a/airbyte-integrations/connectors/source-statuspage/acceptance-test-config.yml +++ b/airbyte-integrations/connectors/source-statuspage/acceptance-test-config.yml @@ -27,9 +27,7 @@ acceptance_tests: # TODO uncomment this block to specify that the tests should assert the connector outputs the records provided in the input file a file # expect_records: # path: "integration_tests/expected_records.jsonl" - # extra_fields: no # exact_order: no - # extra_records: yes incremental: bypass_reason: "This connector does not implement incremental sync" # TODO uncomment this block this block if your connector implements incremental sync: diff --git a/airbyte-integrations/connectors/source-stripe/acceptance-test-config.yml b/airbyte-integrations/connectors/source-stripe/acceptance-test-config.yml index af2bc7f25dfe..b58ac0b09bc5 100644 --- a/airbyte-integrations/connectors/source-stripe/acceptance-test-config.yml +++ b/airbyte-integrations/connectors/source-stripe/acceptance-test-config.yml @@ -49,52 +49,6 @@ acceptance_tests: bypass_reason: "Data expires every 30 days." expect_records: path: "integration_tests/expected_records.jsonl" - ignored_fields: - invoices: - - name: invoice_pdf - bypass_reason: "URL changes upon each request for privacy/security" - - name: hosted_invoice_url - bypass_reason: "URL changes upon each request for privacy/security" - - name: lines/data/*/margins - bypass_reason: "API randomly returns this field" - charges: - - name: receipt_url - bypass_reason: "URL changes upon each request for privacy/security" - - name: receipt_number - bypass_reason: "Fast changing data" - - name: payment_method_details - bypass_reason: "Randomly added network_token field to the record" - - name: source/wallet - bypass_reason: "Randomly added network_token field to the record" - payment_intents: - - name: receipt_url - bypass_reason: "URL changes upon each request for privacy/security" - - name: charges/data/*/receipt_url - bypass_reason: "URL changes upon each request for privacy/security" - - name: charges/data/*/payment_method_details - bypass_reason: "Randomly added network_token field to the record" - credit_notes: - - name: pdf - bypass_reason: "URL changes upon each request for privacy/security" - files: - - name: links/data - bypass_reason: "Order of links/data elements changes on every request" - usage_records: - - name: id - bypass_reason: "id field is randomly generated" - invoice_line_items: - - name: margins - bypass_reason: "API randomly returns this field" - subscriptions: - - name: current_period_start - bypass_reason: "Frequently changing data" - - name: current_period_end - bypass_reason: "Frequently changing data" - - name: latest_invoice - bypass_reason: "Frequently changing data" - customers: - - name: next_invoice_sequence - bypass_reason: "Frequently changing data" incremental: tests: - config_path: "secrets/config.json" diff --git a/airbyte-integrations/connectors/source-survey-sparrow/acceptance-test-config.yml b/airbyte-integrations/connectors/source-survey-sparrow/acceptance-test-config.yml index aca7c68a633f..cf74f95ed769 100644 --- a/airbyte-integrations/connectors/source-survey-sparrow/acceptance-test-config.yml +++ b/airbyte-integrations/connectors/source-survey-sparrow/acceptance-test-config.yml @@ -18,9 +18,7 @@ tests: # TODO uncomment this block to specify that the tests should assert the connector outputs the records provided in the input file a file # expect_records: # path: "integration_tests/expected_records.jsonl" - # extra_fields: no # exact_order: no - # extra_records: yes full_refresh: - config_path: "secrets/config.json" configured_catalog_path: "integration_tests/configured_catalog.json" diff --git a/airbyte-integrations/connectors/source-tiktok-marketing/acceptance-test-config.yml b/airbyte-integrations/connectors/source-tiktok-marketing/acceptance-test-config.yml index f1be50fe8cff..c0fae40ebb1b 100644 --- a/airbyte-integrations/connectors/source-tiktok-marketing/acceptance-test-config.yml +++ b/airbyte-integrations/connectors/source-tiktok-marketing/acceptance-test-config.yml @@ -42,24 +42,8 @@ acceptance_tests: - config_path: "secrets/config.json" expect_records: path: "integration_tests/expected_records.jsonl" - extra_fields: no exact_order: no - extra_records: yes timeout_seconds: 1200 - ignored_fields: - ads: - - name: "profile_image_url" - bypass_reason: "Volatile URLs" - creative_assets_images: - - name: "image_url" - bypass_reason: "Volatile URL params" - creative_assets_videos: - - name: "preview_url" - bypass_reason: "Volatile URL params" - - name: "video_cover_url" - bypass_reason: "Volatile URL params" - - name: "preview_url_expire_time" - bypass_reason: "Changes over time" empty_streams: - name: ads_reports_hourly bypass_reason: "Tested with daily granularity." @@ -79,24 +63,8 @@ acceptance_tests: - config_path: "secrets/prod_config_with_day_granularity.json" expect_records: path: "integration_tests/expected_records2.jsonl" - extra_fields: no exact_order: no - extra_records: yes timeout_seconds: 1200 - ignored_fields: - ads: - - name: "profile_image_url" - bypass_reason: "Volatile URLs" - creative_assets_images: - - name: "image_url" - bypass_reason: "Volatile URL params" - creative_assets_videos: - - name: "preview_url" - bypass_reason: "Volatile URL params" - - name: "video_cover_url" - bypass_reason: "Volatile URL params" - - name: "preview_url_expire_time" - bypass_reason: "Changes over time" empty_streams: - name: ad_groups bypass_reason: "This stream is tested on the new style config." diff --git a/airbyte-integrations/connectors/source-tmdb/acceptance-test-config.yml b/airbyte-integrations/connectors/source-tmdb/acceptance-test-config.yml index f0fe7d7f755d..f408a7e6ad9e 100644 --- a/airbyte-integrations/connectors/source-tmdb/acceptance-test-config.yml +++ b/airbyte-integrations/connectors/source-tmdb/acceptance-test-config.yml @@ -22,9 +22,7 @@ acceptance_tests: # TODO uncomment this block to specify that the tests should assert the connector outputs the records provided in the input file a file # expect_records: # path: "integration_tests/expected_records.jsonl" - # extra_fields: no # exact_order: no - # extra_records: yes incremental: bypass_reason: "This connector does not implement incremental sync" # TODO uncomment this block this block if your connector implements incremental sync: diff --git a/airbyte-integrations/connectors/source-todoist/acceptance-test-config.yml b/airbyte-integrations/connectors/source-todoist/acceptance-test-config.yml index 25c0dc8dc1d4..0fa80e25e911 100644 --- a/airbyte-integrations/connectors/source-todoist/acceptance-test-config.yml +++ b/airbyte-integrations/connectors/source-todoist/acceptance-test-config.yml @@ -22,9 +22,7 @@ acceptance_tests: # TODO uncomment this block to specify that the tests should assert the connector outputs the records provided in the input file a file # expect_records: # path: "integration_tests/expected_records.jsonl" - # extra_fields: no # exact_order: no - # extra_records: yes incremental: bypass_reason: "This connector does not implement incremental sync" # TODO uncomment this block this block if your connector implements incremental sync: diff --git a/airbyte-integrations/connectors/source-toggl/acceptance-test-config.yml b/airbyte-integrations/connectors/source-toggl/acceptance-test-config.yml index 402bace4f222..9146d116b46c 100644 --- a/airbyte-integrations/connectors/source-toggl/acceptance-test-config.yml +++ b/airbyte-integrations/connectors/source-toggl/acceptance-test-config.yml @@ -22,9 +22,7 @@ acceptance_tests: # TODO uncomment this block to specify that the tests should assert the connector outputs the records provided in the input file a file # expect_records: # path: "integration_tests/expected_records.jsonl" - # extra_fields: no # exact_order: no - # extra_records: yes incremental: bypass_reason: "This connector does not implement incremental sync" # TODO uncomment this block this block if your connector implements incremental sync: diff --git a/airbyte-integrations/connectors/source-trello/acceptance-test-config.yml b/airbyte-integrations/connectors/source-trello/acceptance-test-config.yml index 7be14f26455a..11c1c49b7f68 100644 --- a/airbyte-integrations/connectors/source-trello/acceptance-test-config.yml +++ b/airbyte-integrations/connectors/source-trello/acceptance-test-config.yml @@ -24,9 +24,7 @@ acceptance_tests: - config_path: "secrets/config.json" expect_records: path: "integration_tests/expected_records.jsonl" - extra_fields: no exact_order: no - extra_records: yes fail_on_extra_columns: false incremental: tests: diff --git a/airbyte-integrations/connectors/source-twilio/acceptance-test-config.yml b/airbyte-integrations/connectors/source-twilio/acceptance-test-config.yml index adbf72dc66c2..636c50d08724 100644 --- a/airbyte-integrations/connectors/source-twilio/acceptance-test-config.yml +++ b/airbyte-integrations/connectors/source-twilio/acceptance-test-config.yml @@ -18,12 +18,6 @@ acceptance_tests: - config_path: "secrets/config.json" expect_records: path: "integration_tests/expected_records.jsonl" - ignored_fields: - transcriptions: - - name: "date_created" - bypass_reason: "Floating data" - - name: "date_updated" - bypass_reason: "Floating data" empty_streams: # TODO: SAT should be able to exclude expected records validation on the stream > field level # to avoid exposures such as `auth_token` diff --git a/airbyte-integrations/connectors/source-vantage/acceptance-test-config.yml b/airbyte-integrations/connectors/source-vantage/acceptance-test-config.yml index 9e4edd372fd6..a98aeb5fe6fd 100644 --- a/airbyte-integrations/connectors/source-vantage/acceptance-test-config.yml +++ b/airbyte-integrations/connectors/source-vantage/acceptance-test-config.yml @@ -24,9 +24,7 @@ acceptance_tests: # TODO uncomment this block to specify that the tests should assert the connector outputs the records provided in the input file a file # expect_records: # path: "integration_tests/expected_records.jsonl" - # extra_fields: no # exact_order: no - # extra_records: yes incremental: bypass_reason: "This connector does not implement incremental sync" # TODO uncomment this block this block if your connector implements incremental sync: diff --git a/airbyte-integrations/connectors/source-waiteraid/acceptance-test-config.yml b/airbyte-integrations/connectors/source-waiteraid/acceptance-test-config.yml index 036671600da7..cba38ecbd6cb 100644 --- a/airbyte-integrations/connectors/source-waiteraid/acceptance-test-config.yml +++ b/airbyte-integrations/connectors/source-waiteraid/acceptance-test-config.yml @@ -18,9 +18,7 @@ tests: # TODO uncomment this block to specify that the tests should assert the connector outputs the records provided in the input file a file # expect_records: # path: "integration_tests/expected_records.jsonl" - # extra_fields: no # exact_order: no - # extra_records: yes #incremental: # TODO if your connector does not implement incremental sync, remove this block # - config_path: "secrets/config.json" # configured_catalog_path: "integration_tests/configured_catalog.json" diff --git a/airbyte-integrations/connectors/source-wikipedia-pageviews/acceptance-test-config.yml b/airbyte-integrations/connectors/source-wikipedia-pageviews/acceptance-test-config.yml index f74a18abdd88..7a02582a9e98 100755 --- a/airbyte-integrations/connectors/source-wikipedia-pageviews/acceptance-test-config.yml +++ b/airbyte-integrations/connectors/source-wikipedia-pageviews/acceptance-test-config.yml @@ -18,9 +18,7 @@ tests: # TODO uncomment this block to specify that the tests should assert the connector outputs the records provided in the input file a file # expect_records: # path: "integration_tests/expected_records.jsonl" - # extra_fields: no # exact_order: no - # extra_records: yes # incremental: # TODO if your connector does not implement incremental sync, remove this block # - config_path: "secrets/config.json" # configured_catalog_path: "integration_tests/configured_catalog.json" diff --git a/airbyte-integrations/connectors/source-wrike/acceptance-test-config.yml b/airbyte-integrations/connectors/source-wrike/acceptance-test-config.yml index 05e94454ef6a..7dbf39f61f52 100644 --- a/airbyte-integrations/connectors/source-wrike/acceptance-test-config.yml +++ b/airbyte-integrations/connectors/source-wrike/acceptance-test-config.yml @@ -26,9 +26,7 @@ acceptance_tests: # TODO uncomment this block to specify that the tests should assert the connector outputs the records provided in the input file a file # expect_records: # path: "integration_tests/expected_records.jsonl" - # extra_fields: no # exact_order: no - # extra_records: yes incremental: bypass_reason: "This connector does not implement incremental sync" # TODO uncomment this block this block if your connector implements incremental sync: diff --git a/airbyte-integrations/connectors/source-xero/acceptance-test-config.yml b/airbyte-integrations/connectors/source-xero/acceptance-test-config.yml index c9e52f62abbf..a73c006ddf51 100644 --- a/airbyte-integrations/connectors/source-xero/acceptance-test-config.yml +++ b/airbyte-integrations/connectors/source-xero/acceptance-test-config.yml @@ -24,9 +24,7 @@ acceptance_tests: - config_path: "secrets/config.json" expect_records: path: "integration_tests/expected_records.jsonl" - extra_fields: no exact_order: no - extra_records: yes empty_streams: - name: "bank_transfers" bypass_reason: "Empty stream, further investigation is required" @@ -43,15 +41,6 @@ acceptance_tests: - name: "tracking_categories" bypass_reason: "Empty stream, further investigation is required" fail_on_extra_columns: false - ignored_fields: - accounts: - - name: "TaxType" - bypass_reason: "empty may change from empty string to none" - items: - - name: "SalesDetails" - bypass_reason: "empty may change from empty string to none" - - name: "PurchaseDetails" - bypass_reason: "empty may change from empty string to none" incremental: tests: - config_path: "secrets/config.json" diff --git a/airbyte-integrations/connectors/source-yandex-metrica/acceptance-test-config.yml b/airbyte-integrations/connectors/source-yandex-metrica/acceptance-test-config.yml index a0aa317a1a8d..702a45619651 100644 --- a/airbyte-integrations/connectors/source-yandex-metrica/acceptance-test-config.yml +++ b/airbyte-integrations/connectors/source-yandex-metrica/acceptance-test-config.yml @@ -16,9 +16,7 @@ tests: configured_catalog_path: "integration_tests/configured_catalog.json" expect_records: path: "integration_tests/expected_records.jsonl" - extra_fields: no exact_order: no - extra_records: yes timeout_seconds: 3600 full_refresh: - config_path: "secrets/config.json" diff --git a/airbyte-integrations/connectors/source-yotpo/acceptance-test-config.yml b/airbyte-integrations/connectors/source-yotpo/acceptance-test-config.yml index c06e358431a7..bfcda5337791 100644 --- a/airbyte-integrations/connectors/source-yotpo/acceptance-test-config.yml +++ b/airbyte-integrations/connectors/source-yotpo/acceptance-test-config.yml @@ -29,9 +29,7 @@ acceptance_tests: bypass_reason: "Sandbox account cannot seed the endpoint" expect_records: path: "integration_tests/expected_records.jsonl" - extra_fields: no exact_order: no - extra_records: yes incremental: tests: - config_path: "secrets/config.json" diff --git a/airbyte-integrations/connectors/source-younium/acceptance-test-config.yml b/airbyte-integrations/connectors/source-younium/acceptance-test-config.yml index a8a1f5ebcfb4..99312727f37e 100644 --- a/airbyte-integrations/connectors/source-younium/acceptance-test-config.yml +++ b/airbyte-integrations/connectors/source-younium/acceptance-test-config.yml @@ -22,9 +22,7 @@ acceptance_tests: # TODO uncomment this block to specify that the tests should assert the connector outputs the records provided in the input file a file # expect_records: # path: "integration_tests/expected_records.jsonl" - # extra_fields: no # exact_order: no - # extra_records: yes incremental: bypass_reason: "This connector does not implement incremental sync" # TODO uncomment this block this block if your connector implements incremental sync: diff --git a/airbyte-integrations/connectors/source-zapier-supported-storage/acceptance-test-config.yml b/airbyte-integrations/connectors/source-zapier-supported-storage/acceptance-test-config.yml index b49b2366ae3e..81b739e57080 100644 --- a/airbyte-integrations/connectors/source-zapier-supported-storage/acceptance-test-config.yml +++ b/airbyte-integrations/connectors/source-zapier-supported-storage/acceptance-test-config.yml @@ -18,9 +18,7 @@ tests: # TODO uncomment this block to specify that the tests should assert the connector outputs the records provided in the input file a file # expect_records: # path: "integration_tests/expected_records.jsonl" - # extra_fields: no # exact_order: no - # extra_records: yes # incremental: # TODO if your connector does not implement incremental sync, remove this block # - config_path: "secrets/config.json" # configured_catalog_path: "integration_tests/configured_catalog.json" diff --git a/airbyte-integrations/connectors/source-zendesk-support/acceptance-test-config.yml b/airbyte-integrations/connectors/source-zendesk-support/acceptance-test-config.yml index fb956e46e31c..8a549caf39ad 100644 --- a/airbyte-integrations/connectors/source-zendesk-support/acceptance-test-config.yml +++ b/airbyte-integrations/connectors/source-zendesk-support/acceptance-test-config.yml @@ -28,9 +28,7 @@ acceptance_tests: timeout_seconds: 2400 expect_records: path: "integration_tests/expected_records.jsonl" - extra_fields: no exact_order: no - extra_records: yes empty_streams: - name: "post_comments" bypass_reason: "not available in current subscription plan" diff --git a/airbyte-integrations/connectors/source-zendesk-talk/acceptance-test-config.yml b/airbyte-integrations/connectors/source-zendesk-talk/acceptance-test-config.yml index 29f7464fc66c..05b42a969d3e 100644 --- a/airbyte-integrations/connectors/source-zendesk-talk/acceptance-test-config.yml +++ b/airbyte-integrations/connectors/source-zendesk-talk/acceptance-test-config.yml @@ -28,19 +28,6 @@ acceptance_tests: - config_path: "secrets/config.json" expect_records: path: "integration_tests/expected_records.jsonl" - ignored_fields: - greetings: - - name: audio_url - bypass_reason: In url present auto generated hash - account_overview: - - name: current_timestamp - bypass_reason: Depend on current time - agents_overview: - - name: current_timestamp - bypass_reason: Depend on current time - current_queue_activity: - - name: current_timestamp - bypass_reason: Depend on current time fail_on_extra_columns: false incremental: tests: diff --git a/airbyte-integrations/connectors/source-zenloop/acceptance-test-config.yml b/airbyte-integrations/connectors/source-zenloop/acceptance-test-config.yml index de6f12368cf2..4c74e29d80ac 100644 --- a/airbyte-integrations/connectors/source-zenloop/acceptance-test-config.yml +++ b/airbyte-integrations/connectors/source-zenloop/acceptance-test-config.yml @@ -16,9 +16,7 @@ tests: configured_catalog_path: "integration_tests/configured_catalog.json" expect_records: path: "integration_tests/expected_records.jsonl" - extra_fields: no exact_order: no - extra_records: yes timeout_seconds: 2400 incremental: - config_path: "secrets/config.json" diff --git a/airbyte-integrations/connectors/source-zoho-crm/acceptance-test-config.yml b/airbyte-integrations/connectors/source-zoho-crm/acceptance-test-config.yml index 9da7069eec0f..3d5a4aca53ed 100644 --- a/airbyte-integrations/connectors/source-zoho-crm/acceptance-test-config.yml +++ b/airbyte-integrations/connectors/source-zoho-crm/acceptance-test-config.yml @@ -20,13 +20,7 @@ acceptance_tests: - config_path: "secrets/config.json" expect_records: path: "integration_tests/expected_records.jsonl" - extra_fields: no exact_order: no - extra_records: yes - ignored_fields: - incremental_contacts_zoho_crm_stream: - - name: Contact_Auto_Number - bypass_reason: "depend on changing data" empty_streams: - name: incremental_notes_zoho_crm_stream bypass_reason: "no data" From f0f7a9813b46a29e345274212d5781c4ded1430c Mon Sep 17 00:00:00 2001 From: Edward Gao Date: Wed, 28 Feb 2024 14:41:18 -0800 Subject: [PATCH 029/172] DV2 TyperDeduper: Extract migrations to separate method (#35376) Signed-off-by: Gireesh Sreepathi Co-authored-by: Edward Gao --- airbyte-cdk/java/airbyte-cdk/README.md | 1 + .../src/main/resources/version.properties | 2 +- .../jdbc/JdbcBufferedConsumerFactory.java | 3 +- .../staging/GeneralStagingFunctions.java | 8 +- .../typing_deduping/DefaultTyperDeduper.java | 23 +++--- .../NoOpTyperDeduperWithV1V2Migrations.java | 41 ++-------- .../typing_deduping/NoopTyperDeduper.java | 12 ++- .../typing_deduping/TyperDeduper.java | 40 +++++++++- .../typing_deduping/TyperDeduperUtil.kt | 79 ++++++++++++++++--- .../DefaultTyperDeduperTest.java | 34 +++++--- 10 files changed, 169 insertions(+), 74 deletions(-) diff --git a/airbyte-cdk/java/airbyte-cdk/README.md b/airbyte-cdk/java/airbyte-cdk/README.md index f05d010ea701..ed366673ecff 100644 --- a/airbyte-cdk/java/airbyte-cdk/README.md +++ b/airbyte-cdk/java/airbyte-cdk/README.md @@ -166,6 +166,7 @@ MavenLocal debugging steps: | Version | Date | Pull Request | Subject | |:--------|:-----------|:-----------------------------------------------------------|:---------------------------------------------------------------------------------------------------------------------------------------------------------------| +| 0.23.7 | 2024-02-28 | [\#35376](https://github.com/airbytehq/airbyte/pull/35376) | Add a getNamespace into TestDataHolder | | 0.23.6 | 2024-02-26 | [\#35647](https://github.com/airbytehq/airbyte/pull/35647) | Add a getNamespace into TestDataHolder | | 0.23.5 | 2024-02-26 | [\#35512](https://github.com/airbytehq/airbyte/pull/35512) | Remove @DisplayName from all CDK tests. | | 0.23.4 | 2024-02-26 | [\#35507](https://github.com/airbytehq/airbyte/pull/35507) | Add more logs into TestDatabase. | diff --git a/airbyte-cdk/java/airbyte-cdk/core/src/main/resources/version.properties b/airbyte-cdk/java/airbyte-cdk/core/src/main/resources/version.properties index 8aef173dc819..c40a8721d426 100644 --- a/airbyte-cdk/java/airbyte-cdk/core/src/main/resources/version.properties +++ b/airbyte-cdk/java/airbyte-cdk/core/src/main/resources/version.properties @@ -1 +1 @@ -version=0.23.6 +version=0.23.7 diff --git a/airbyte-cdk/java/airbyte-cdk/db-destinations/src/main/java/io/airbyte/cdk/integrations/destination/jdbc/JdbcBufferedConsumerFactory.java b/airbyte-cdk/java/airbyte-cdk/db-destinations/src/main/java/io/airbyte/cdk/integrations/destination/jdbc/JdbcBufferedConsumerFactory.java index d0d488c71284..8fd513423b0a 100644 --- a/airbyte-cdk/java/airbyte-cdk/db-destinations/src/main/java/io/airbyte/cdk/integrations/destination/jdbc/JdbcBufferedConsumerFactory.java +++ b/airbyte-cdk/java/airbyte-cdk/db-destinations/src/main/java/io/airbyte/cdk/integrations/destination/jdbc/JdbcBufferedConsumerFactory.java @@ -161,7 +161,7 @@ private static OnStartFunction onStartFunction(final JdbcDatabase database, final Collection writeConfigs, final TyperDeduper typerDeduper) { return () -> { - typerDeduper.prepareTables(); + typerDeduper.prepareSchemasAndRunMigrations(); LOGGER.info("Preparing raw tables in destination started for {} streams", writeConfigs.size()); final List queryList = new ArrayList<>(); for (final WriteConfig writeConfig : writeConfigs) { @@ -181,6 +181,7 @@ private static OnStartFunction onStartFunction(final JdbcDatabase database, } sqlOperations.executeTransaction(database, queryList); LOGGER.info("Preparing raw tables in destination completed."); + typerDeduper.prepareFinalTables(); }; } diff --git a/airbyte-cdk/java/airbyte-cdk/db-destinations/src/main/java/io/airbyte/cdk/integrations/destination/staging/GeneralStagingFunctions.java b/airbyte-cdk/java/airbyte-cdk/db-destinations/src/main/java/io/airbyte/cdk/integrations/destination/staging/GeneralStagingFunctions.java index 0eef0c5343bf..ef88ca3743c9 100644 --- a/airbyte-cdk/java/airbyte-cdk/db-destinations/src/main/java/io/airbyte/cdk/integrations/destination/staging/GeneralStagingFunctions.java +++ b/airbyte-cdk/java/airbyte-cdk/db-destinations/src/main/java/io/airbyte/cdk/integrations/destination/staging/GeneralStagingFunctions.java @@ -39,7 +39,10 @@ public static OnStartFunction onStartFunction(final JdbcDatabase database, final TyperDeduper typerDeduper) { return () -> { log.info("Preparing raw tables in destination started for {} streams", writeConfigs.size()); - typerDeduper.prepareTables(); + + typerDeduper.prepareSchemasAndRunMigrations(); + + // Create raw tables final List queryList = new ArrayList<>(); for (final WriteConfig writeConfig : writeConfigs) { final String schema = writeConfig.getOutputSchemaName(); @@ -69,6 +72,9 @@ public static OnStartFunction onStartFunction(final JdbcDatabase database, log.info("Preparing staging area in destination completed for schema {} stream {}", schema, stream); } + + typerDeduper.prepareFinalTables(); + log.info("Executing finalization of tables."); stagingOperations.executeTransaction(database, queryList); }; diff --git a/airbyte-cdk/java/airbyte-cdk/typing-deduping/src/main/java/io/airbyte/integrations/base/destination/typing_deduping/DefaultTyperDeduper.java b/airbyte-cdk/java/airbyte-cdk/typing-deduping/src/main/java/io/airbyte/integrations/base/destination/typing_deduping/DefaultTyperDeduper.java index ec49be79cb57..2dbd9f1e8498 100644 --- a/airbyte-cdk/java/airbyte-cdk/typing-deduping/src/main/java/io/airbyte/integrations/base/destination/typing_deduping/DefaultTyperDeduper.java +++ b/airbyte-cdk/java/airbyte-cdk/typing-deduping/src/main/java/io/airbyte/integrations/base/destination/typing_deduping/DefaultTyperDeduper.java @@ -8,7 +8,6 @@ import static io.airbyte.cdk.integrations.util.ConnectorExceptionUtil.getResultsOrLogAndThrowFirst; import static io.airbyte.integrations.base.destination.typing_deduping.FutureUtils.*; import static io.airbyte.integrations.base.destination.typing_deduping.FutureUtils.reduceExceptions; -import static io.airbyte.integrations.base.destination.typing_deduping.TyperDeduperUtilKt.prepareAllSchemas; import static java.util.Collections.singleton; import io.airbyte.cdk.integrations.destination.StreamSyncSummary; @@ -43,7 +42,7 @@ *

* In a typical sync, destinations should call the methods: *

    - *
  1. {@link #prepareTables()} once at the start of the sync
  2. + *
  3. {@link #prepareFinalTables()} once at the start of the sync
  4. *
  5. {@link #typeAndDedupe(String, String, boolean)} as needed throughout the sync
  6. *
  7. {@link #commitFinalTables()} once at the end of the sync
  8. *
@@ -104,27 +103,23 @@ public DefaultTyperDeduper(final SqlGenerator sqlGenerator, this(sqlGenerator, destinationHandler, parsedCatalog, v1V2Migrator, new NoopV2TableMigrator()); } - private void prepareSchemas(final ParsedCatalog parsedCatalog) throws Exception { - prepareAllSchemas(parsedCatalog, sqlGenerator, destinationHandler); + @Override + public void prepareSchemasAndRunMigrations() { + // Technically kind of weird to call this here, but it's the best place we have. + // Ideally, we'd create just airbyte_internal here, and defer creating the final table schemas + // until prepareFinalTables... but it doesn't really matter. + TyperDeduperUtil.prepareSchemas(sqlGenerator, destinationHandler, parsedCatalog); + TyperDeduperUtil.executeRawTableMigrations(executorService, sqlGenerator, destinationHandler, v1V2Migrator, v2TableMigrator, parsedCatalog); } @Override - public void prepareTables() throws Exception { + public void prepareFinalTables() throws Exception { if (overwriteStreamsWithTmpTable != null) { throw new IllegalStateException("Tables were already prepared."); } overwriteStreamsWithTmpTable = ConcurrentHashMap.newKeySet(); LOGGER.info("Preparing tables"); - // This is intentionally not done in parallel to avoid rate limits in some destinations. - prepareSchemas(parsedCatalog); - - // TODO: Either the migrations run the soft reset and create v2 tables or the actual prepare tables. - // unify the logic with current state of raw tables & final tables. This is done first before gather - // initial state to avoid recreating final tables later again. - final List> runMigrationsResult = - CompletableFutures.allOf(parsedCatalog.streams().stream().map(this::runMigrationsAsync).toList()).toCompletableFuture().join(); - getResultsOrLogAndThrowFirst("The following exceptions were thrown attempting to run migrations:\n", runMigrationsResult); final List initialStates = destinationHandler.gatherInitialState(parsedCatalog.streams()); final List> prepareTablesFutureResult = CompletableFutures.allOf( initialStates.stream().map(this::prepareTablesFuture).toList()).toCompletableFuture().join(); diff --git a/airbyte-cdk/java/airbyte-cdk/typing-deduping/src/main/java/io/airbyte/integrations/base/destination/typing_deduping/NoOpTyperDeduperWithV1V2Migrations.java b/airbyte-cdk/java/airbyte-cdk/typing-deduping/src/main/java/io/airbyte/integrations/base/destination/typing_deduping/NoOpTyperDeduperWithV1V2Migrations.java index f76bd2e07019..1d06b9a49b61 100644 --- a/airbyte-cdk/java/airbyte-cdk/typing-deduping/src/main/java/io/airbyte/integrations/base/destination/typing_deduping/NoOpTyperDeduperWithV1V2Migrations.java +++ b/airbyte-cdk/java/airbyte-cdk/typing-deduping/src/main/java/io/airbyte/integrations/base/destination/typing_deduping/NoOpTyperDeduperWithV1V2Migrations.java @@ -6,22 +6,14 @@ import static io.airbyte.cdk.integrations.base.IntegrationRunner.TYPE_AND_DEDUPE_THREAD_NAME; import static io.airbyte.integrations.base.destination.typing_deduping.FutureUtils.getCountOfTypeAndDedupeThreads; -import static io.airbyte.integrations.base.destination.typing_deduping.FutureUtils.reduceExceptions; -import static io.airbyte.integrations.base.destination.typing_deduping.TyperDeduperUtilKt.prepareAllSchemas; import io.airbyte.cdk.integrations.destination.StreamSyncSummary; import io.airbyte.protocol.models.v0.StreamDescriptor; -import java.util.HashSet; import java.util.Map; -import java.util.Optional; -import java.util.Set; -import java.util.concurrent.CompletableFuture; import java.util.concurrent.ExecutorService; import java.util.concurrent.Executors; import java.util.concurrent.locks.Lock; -import kotlin.NotImplementedError; import lombok.extern.slf4j.Slf4j; -import org.apache.commons.lang3.NotImplementedException; import org.apache.commons.lang3.concurrent.BasicThreadFactory; /** @@ -54,31 +46,14 @@ public NoOpTyperDeduperWithV1V2Migrations(final SqlGenerator sqlGenerator, } @Override - public void prepareTables() throws Exception { - try { - log.info("Ensuring schemas exist for prepareTables with V1V2 migrations"); - prepareAllSchemas(parsedCatalog, sqlGenerator, destinationHandler); - final Set>> prepareTablesTasks = new HashSet<>(); - for (final StreamConfig stream : parsedCatalog.streams()) { - prepareTablesTasks.add(CompletableFuture.supplyAsync(() -> { - // Migrate the Raw Tables if this is the first v2 sync after a v1 sync - try { - log.info("Migrating V1->V2 for stream {}", stream.id()); - v1V2Migrator.migrateIfNecessary(sqlGenerator, destinationHandler, stream); - log.info("Migrating V2 legacy for stream {}", stream.id()); - v2TableMigrator.migrateIfNecessary(stream); - return Optional.empty(); - } catch (final Exception e) { - return Optional.of(e); - } - }, executorService)); - } - CompletableFuture.allOf(prepareTablesTasks.toArray(CompletableFuture[]::new)).join(); - reduceExceptions(prepareTablesTasks, "The following exceptions were thrown attempting to prepare tables:\n"); - } catch (NotImplementedError | NotImplementedException e) { - log.warn( - "Could not prepare schemas or tables because this is not implemented for this destination, this should not be required for this destination to succeed"); - } + public void prepareSchemasAndRunMigrations() { + TyperDeduperUtil.prepareSchemas(sqlGenerator, destinationHandler, parsedCatalog); + TyperDeduperUtil.executeRawTableMigrations(executorService, sqlGenerator, destinationHandler, v1V2Migrator, v2TableMigrator, parsedCatalog); + } + + @Override + public void prepareFinalTables() { + log.info("Skipping prepareFinalTables"); } @Override diff --git a/airbyte-cdk/java/airbyte-cdk/typing-deduping/src/main/java/io/airbyte/integrations/base/destination/typing_deduping/NoopTyperDeduper.java b/airbyte-cdk/java/airbyte-cdk/typing-deduping/src/main/java/io/airbyte/integrations/base/destination/typing_deduping/NoopTyperDeduper.java index af8529e3d2b2..6a312a72b515 100644 --- a/airbyte-cdk/java/airbyte-cdk/typing-deduping/src/main/java/io/airbyte/integrations/base/destination/typing_deduping/NoopTyperDeduper.java +++ b/airbyte-cdk/java/airbyte-cdk/typing-deduping/src/main/java/io/airbyte/integrations/base/destination/typing_deduping/NoopTyperDeduper.java @@ -11,10 +11,20 @@ import java.util.concurrent.locks.Condition; import java.util.concurrent.locks.Lock; +/** + * This class should be used while upgrading a destination from V1 to V2. V2 destinations should use + * {@link NoOpTyperDeduperWithV1V2Migrations} for disabling T+D, because it correctly handles + * various migration operations. + */ public class NoopTyperDeduper implements TyperDeduper { @Override - public void prepareTables() { + public void prepareSchemasAndRunMigrations() throws Exception { + + } + + @Override + public void prepareFinalTables() { } diff --git a/airbyte-cdk/java/airbyte-cdk/typing-deduping/src/main/java/io/airbyte/integrations/base/destination/typing_deduping/TyperDeduper.java b/airbyte-cdk/java/airbyte-cdk/typing-deduping/src/main/java/io/airbyte/integrations/base/destination/typing_deduping/TyperDeduper.java index 263c9a11742c..37d34643b720 100644 --- a/airbyte-cdk/java/airbyte-cdk/typing-deduping/src/main/java/io/airbyte/integrations/base/destination/typing_deduping/TyperDeduper.java +++ b/airbyte-cdk/java/airbyte-cdk/typing-deduping/src/main/java/io/airbyte/integrations/base/destination/typing_deduping/TyperDeduper.java @@ -9,15 +9,53 @@ import java.util.Map; import java.util.concurrent.locks.Lock; +/* + * This class wants to do three separate things, but not all of them actually happen here right now: + * * A migration runner, which handles any changes in raw tables (#prepareSchemasAndRawTables) * A + * raw table creator, which creates any missing raw tables (currently handled in e.g. + * GeneralStagingFunctions.onStartFunction, BigQueryStagingConsumerFactory.onStartFunction, etc.) * + * A T+D runner, which manages the final tables (#prepareFinalTables, #typeAndDedupe, etc.) + * + * These would be injectable to the relevant locations, so that we can have: * DV2 destinations with + * T+D enabled (i.e. all three objects instantiated for real) * DV2 destinations with T+D disabled + * (i.e. noop T+D runner but the other two objects for real) * DV1 destinations (i.e. all three + * objects as noop) + * + * Even more ideally, we'd create an instance per stream, instead of having one instance for the + * entire sync. This would massively simplify all the state contained in our implementations - see + * DefaultTyperDeduper's pile of Sets and Maps. + * + * Unfortunately, it's just a pain to inject these objects to everywhere they need to be, and we'd + * need to refactor part of the async framework on top of that. There's an obvious overlap with the + * async framework's onStart function... which we should deal with eventually. + */ public interface TyperDeduper { + /** + * Does two things: Set up the schemas for the sync (both airbyte_internal and final table schemas), + * and execute any raw table migrations. These migrations might include: Upgrading v1 raw tables to + * v2, adding a column to the raw tables, etc. In general, this method shouldn't actually create the + * raw tables; the only exception is in the V1 -> V2 migration. + *

+ * This method should be called BEFORE creating raw tables, because the V1V2 migration might create + * the raw tables. + *

+ * This method may affect the behavior of {@link #prepareFinalTables()}. For example, modifying a + * raw table may require us to run a soft reset. However, we should defer that soft reset until + * {@link #prepareFinalTables()}. + */ + void prepareSchemasAndRunMigrations() throws Exception; + /** * Create the tables that T+D will write to during the sync. In OVERWRITE mode, these might not be * the true final tables. Specifically, other than an initial sync (i.e. table does not exist, or is * empty) we write to a temporary final table, and swap it into the true final table at the end of * the sync. This is to prevent user downtime during a sync. + *

+ * This method should be called AFTER creating the raw tables, because it may run a soft reset + * (which requires the raw tables to exist). */ - void prepareTables() throws Exception; + void prepareFinalTables() throws Exception; /** * Suggest that we execute typing and deduping for a single stream (i.e. fetch new raw records into diff --git a/airbyte-cdk/java/airbyte-cdk/typing-deduping/src/main/java/io/airbyte/integrations/base/destination/typing_deduping/TyperDeduperUtil.kt b/airbyte-cdk/java/airbyte-cdk/typing-deduping/src/main/java/io/airbyte/integrations/base/destination/typing_deduping/TyperDeduperUtil.kt index 8f56b1a81acb..59d829cb79b9 100644 --- a/airbyte-cdk/java/airbyte-cdk/typing-deduping/src/main/java/io/airbyte/integrations/base/destination/typing_deduping/TyperDeduperUtil.kt +++ b/airbyte-cdk/java/airbyte-cdk/typing-deduping/src/main/java/io/airbyte/integrations/base/destination/typing_deduping/TyperDeduperUtil.kt @@ -1,15 +1,72 @@ package io.airbyte.integrations.base.destination.typing_deduping +import com.google.common.collect.Streams +import io.airbyte.cdk.integrations.util.ConnectorExceptionUtil.getResultsOrLogAndThrowFirst +import io.airbyte.commons.concurrency.CompletableFutures +import java.util.* +import java.util.concurrent.CompletableFuture +import java.util.concurrent.CompletionStage +import java.util.concurrent.ExecutorService -/** - * Extracts all the "raw" and "final" schemas identified in the [parsedCatalog] and ensures they - * exist in the Destination Database. - */ -fun prepareAllSchemas(parsedCatalog: ParsedCatalog, sqlGenerator: SqlGenerator, destinationHandler: DestinationHandler) { - val rawSchema = parsedCatalog.streams.mapNotNull { it.id.rawNamespace } - val finalSchema = parsedCatalog.streams.mapNotNull { it.id.finalNamespace } - val createAllSchemasSql = rawSchema.union(finalSchema) - .map { sqlGenerator.createSchema(it) } - .toList() - destinationHandler.execute(Sql.concat(createAllSchemasSql)) + +class TyperDeduperUtil { + companion object { + + @JvmStatic + fun executeRawTableMigrations( + executorService: ExecutorService, + sqlGenerator: SqlGenerator, + destinationHandler: DestinationHandler, + v1V2Migrator: DestinationV1V2Migrator, + v2TableMigrator: V2TableMigrator, + parsedCatalog: ParsedCatalog + ) { + // TODO: Either the migrations run the soft reset and create v2 tables or the actual prepare tables. + // unify the logic + // with current state of raw tables & final tables. This is done first before gather initial state + // to avoid recreating + // final tables later again. + val runMigrationsResult = + CompletableFutures.allOf(parsedCatalog.streams().stream() + .map { streamConfig -> runMigrationsAsync(executorService, sqlGenerator, destinationHandler, v1V2Migrator, v2TableMigrator, streamConfig) } + .toList()).toCompletableFuture().join() + getResultsOrLogAndThrowFirst("The following exceptions were thrown attempting to run migrations:\n", runMigrationsResult) + } + + /** + * Extracts all the "raw" and "final" schemas identified in the [parsedCatalog] and ensures they + * exist in the Destination Database. + */ + @JvmStatic + fun prepareSchemas( + sqlGenerator: SqlGenerator, + destinationHandler: DestinationHandler, + parsedCatalog: ParsedCatalog) { + val rawSchema = parsedCatalog.streams.stream().map { it.id.rawNamespace } + val finalSchema = parsedCatalog.streams.stream().map { it.id.finalNamespace } + val createAllSchemasSql = Streams.concat(rawSchema, finalSchema) + .filter(Objects::nonNull) + .distinct() + .map(sqlGenerator::createSchema) + .toList() + destinationHandler.execute(Sql.concat(createAllSchemasSql)) + } + + private fun runMigrationsAsync( + executorService: ExecutorService, + sqlGenerator: SqlGenerator, + destinationHandler: DestinationHandler, + v1V2Migrator: DestinationV1V2Migrator, + v2TableMigrator: V2TableMigrator, + streamConfig: StreamConfig): CompletionStage { + return CompletableFuture.runAsync({ + try { + v1V2Migrator.migrateIfNecessary(sqlGenerator, destinationHandler, streamConfig) + v2TableMigrator.migrateIfNecessary(streamConfig) + } catch (e: java.lang.Exception) { + throw RuntimeException(e) + } + }, executorService) + } + } } diff --git a/airbyte-cdk/java/airbyte-cdk/typing-deduping/src/test/java/io/airbyte/integrations/base/destination/typing_deduping/DefaultTyperDeduperTest.java b/airbyte-cdk/java/airbyte-cdk/typing-deduping/src/test/java/io/airbyte/integrations/base/destination/typing_deduping/DefaultTyperDeduperTest.java index 916c0235722d..65f2c127f26e 100644 --- a/airbyte-cdk/java/airbyte-cdk/typing-deduping/src/test/java/io/airbyte/integrations/base/destination/typing_deduping/DefaultTyperDeduperTest.java +++ b/airbyte-cdk/java/airbyte-cdk/typing-deduping/src/test/java/io/airbyte/integrations/base/destination/typing_deduping/DefaultTyperDeduperTest.java @@ -92,10 +92,11 @@ void setup() throws Exception { @Test void emptyDestination() throws Exception { initialStates.forEach(initialState -> when(initialState.isFinalTablePresent()).thenReturn(false)); - // when(destinationHandler.findExistingTable(any())).thenReturn(Optional.empty()); - typerDeduper.prepareTables(); + typerDeduper.prepareSchemasAndRunMigrations(); verify(destinationHandler).execute(separately("CREATE SCHEMA overwrite_ns", "CREATE SCHEMA append_ns", "CREATE SCHEMA dedup_ns")); + + typerDeduper.prepareFinalTables(); verify(destinationHandler).execute(Sql.of("CREATE TABLE overwrite_ns.overwrite_stream")); verify(destinationHandler).execute(Sql.of("CREATE TABLE append_ns.append_stream")); verify(destinationHandler).execute(Sql.of("CREATE TABLE dedup_ns.dedup_stream")); @@ -126,8 +127,11 @@ void existingEmptyTable() throws Exception { when(initialState.isFinalTableEmpty()).thenReturn(true); when(initialState.isSchemaMismatch()).thenReturn(true); }); - typerDeduper.prepareTables(); + + typerDeduper.prepareSchemasAndRunMigrations(); verify(destinationHandler).execute(separately("CREATE SCHEMA overwrite_ns", "CREATE SCHEMA append_ns", "CREATE SCHEMA dedup_ns")); + + typerDeduper.prepareFinalTables(); verify(destinationHandler).execute(Sql.of("CREATE TABLE overwrite_ns.overwrite_stream_airbyte_tmp")); verify(destinationHandler).execute(Sql.of("PREPARE append_ns.append_stream FOR SOFT RESET")); verify(destinationHandler).execute(Sql.of("UPDATE TABLE append_ns.append_stream_ab_soft_reset WITHOUT SAFER CASTING")); @@ -161,12 +165,14 @@ void existingEmptyTableMatchingSchema() throws Exception { initialStates.forEach(initialState -> { when(initialState.isFinalTablePresent()).thenReturn(true); when(initialState.isFinalTableEmpty()).thenReturn(true); - when(initialState.isSchemaMismatch()).thenReturn(true); + when(initialState.isSchemaMismatch()).thenReturn(false); }); - typerDeduper.prepareTables(); + typerDeduper.prepareSchemasAndRunMigrations(); verify(destinationHandler).execute(separately("CREATE SCHEMA overwrite_ns", "CREATE SCHEMA append_ns", "CREATE SCHEMA dedup_ns")); clearInvocations(destinationHandler); + + typerDeduper.prepareFinalTables(); verify(destinationHandler, never()).execute(any()); } @@ -183,8 +189,10 @@ void existingNonemptyTable() throws Exception { when(initialState.initialRawTableState()).thenReturn(new InitialRawTableState(true, Optional.of(Instant.parse("2023-01-01T12:34:56Z")))); }); - typerDeduper.prepareTables(); + typerDeduper.prepareSchemasAndRunMigrations(); verify(destinationHandler).execute(separately("CREATE SCHEMA overwrite_ns", "CREATE SCHEMA append_ns", "CREATE SCHEMA dedup_ns")); + + typerDeduper.prepareFinalTables(); // NB: We only create a tmp table for the overwrite stream, and do _not_ soft reset the existing // overwrite stream's table. @@ -228,10 +236,12 @@ void existingNonemptyTableMatchingSchema() throws Exception { when(initialState.initialRawTableState()).thenReturn(new InitialRawTableState(true, Optional.of(Instant.now()))); }); - typerDeduper.prepareTables(); + typerDeduper.prepareSchemasAndRunMigrations(); + verify(destinationHandler).execute(separately("CREATE SCHEMA overwrite_ns", "CREATE SCHEMA append_ns", "CREATE SCHEMA dedup_ns")); + + typerDeduper.prepareFinalTables(); // NB: We only create one tmp table here. // Also, we need to alter the existing _real_ table, not the tmp table! - verify(destinationHandler).execute(separately("CREATE SCHEMA overwrite_ns", "CREATE SCHEMA append_ns", "CREATE SCHEMA dedup_ns")); verify(destinationHandler).execute(Sql.of("CREATE TABLE overwrite_ns.overwrite_stream_airbyte_tmp")); verifyNoMoreInteractions(ignoreStubs(destinationHandler)); } @@ -247,7 +257,7 @@ void nonexistentStream() { void failedSetup() throws Exception { doThrow(new RuntimeException("foo")).when(destinationHandler).execute(any()); - assertThrows(Exception.class, () -> typerDeduper.prepareTables()); + assertThrows(Exception.class, () -> typerDeduper.prepareFinalTables()); clearInvocations(destinationHandler); typerDeduper.typeAndDedupe("dedup_ns", "dedup_stream", false); @@ -263,7 +273,8 @@ void failedSetup() throws Exception { @Test void noUnprocessedRecords() throws Exception { initialStates.forEach(initialState -> when(initialState.initialRawTableState()).thenReturn(new InitialRawTableState(false, Optional.empty()))); - typerDeduper.prepareTables(); + + typerDeduper.prepareFinalTables(); clearInvocations(destinationHandler); typerDeduper.typeAndDedupe(Map.of( @@ -286,7 +297,8 @@ void noUnprocessedRecords() throws Exception { void unprocessedRecords() throws Exception { initialStates.forEach(initialState -> when(initialState.initialRawTableState()) .thenReturn(new InitialRawTableState(true, Optional.of(Instant.parse("2023-01-23T12:34:56Z"))))); - typerDeduper.prepareTables(); + + typerDeduper.prepareFinalTables(); clearInvocations(destinationHandler); typerDeduper.typeAndDedupe(Map.of( From 41834f6eb7a04bfd0111f56fb28743e7e9919681 Mon Sep 17 00:00:00 2001 From: Bindi Pankhudi Date: Wed, 28 Feb 2024 17:43:05 -0800 Subject: [PATCH 030/172] Docs: adding pyairbyte to left nav bar (#35719) Co-authored-by: bindipankhudi Co-authored-by: Aaron ("AJ") Steers --- docusaurus/sidebars.js | 5 +++++ docusaurus/src/components/HeaderDecoration.jsx | 8 +++----- docusaurus/src/remark/docsHeaderDecoration.js | 3 ++- 3 files changed, 10 insertions(+), 6 deletions(-) diff --git a/docusaurus/sidebars.js b/docusaurus/sidebars.js index 138790ac2deb..a5cd413ee494 100644 --- a/docusaurus/sidebars.js +++ b/docusaurus/sidebars.js @@ -479,6 +479,11 @@ module.exports = { "cloud/managing-airbyte-cloud/manage-connection-state", ], }, + { + type: "doc", + label: "Using PyAirbyte", + id: "using-airbyte/pyairbyte/getting-started", + }, { type: "category", label: "Workspace Management", diff --git a/docusaurus/src/components/HeaderDecoration.jsx b/docusaurus/src/components/HeaderDecoration.jsx index 875396216ba3..490eb1b104ed 100644 --- a/docusaurus/src/components/HeaderDecoration.jsx +++ b/docusaurus/src/components/HeaderDecoration.jsx @@ -52,11 +52,9 @@ export const HeaderDecoration = ({ {isOss ? CHECK_ICON : CROSS_ICON} Airbyte OSS - {isPypiPublished && ( - - {CHECK_ICON} airbyte_lib - - )} + + {isPypiPublished ? CHECK_ICON : CROSS_ICON} PyAirbyte +

diff --git a/docusaurus/src/remark/docsHeaderDecoration.js b/docusaurus/src/remark/docsHeaderDecoration.js index 615286fd798c..4f2923d7538e 100644 --- a/docusaurus/src/remark/docsHeaderDecoration.js +++ b/docusaurus/src/remark/docsHeaderDecoration.js @@ -1,5 +1,6 @@ const visit = require("unist-util-visit").visit; const { isDocsPage, getRegistryEntry } = require("./utils"); +const { isPypiConnector } = require("../connector_registry"); const toAttributes = (props) => Object.entries(props).map(([key, value]) => ({ @@ -31,7 +32,7 @@ const plugin = () => { node.attributes = toAttributes({ isOss: registryEntry.is_oss, isCloud: registryEntry.is_cloud, - isPypiPublished: false, + isPypiPublished: isPypiConnector(registryEntry), supportLevel: registryEntry.supportLevel_oss, dockerImageTag: registryEntry.dockerImageTag_oss, iconUrl: registryEntry.iconUrl_oss, From deeb436cab906d702fe0a45ad8d53b3baa70dd1a Mon Sep 17 00:00:00 2001 From: Artem Inzhyyants <36314070+artem1205@users.noreply.github.com> Date: Thu, 29 Feb 2024 09:35:21 +0100 Subject: [PATCH 031/172] Source Google Ads: handle 2-Step Verification exception (#35709) Signed-off-by: Artem Inzhyyants --- .../connectors/source-google-ads/metadata.yaml | 2 +- airbyte-integrations/connectors/source-google-ads/poetry.lock | 4 ++-- .../connectors/source-google-ads/pyproject.toml | 3 +-- .../source-google-ads/source_google_ads/google_ads.py | 2 +- .../connectors/source-google-ads/source_google_ads/source.py | 1 - .../connectors/source-google-ads/source_google_ads/utils.py | 3 +++ docs/integrations/sources/google-ads.md | 1 + 7 files changed, 9 insertions(+), 7 deletions(-) diff --git a/airbyte-integrations/connectors/source-google-ads/metadata.yaml b/airbyte-integrations/connectors/source-google-ads/metadata.yaml index ed883a5080af..8d1582887623 100644 --- a/airbyte-integrations/connectors/source-google-ads/metadata.yaml +++ b/airbyte-integrations/connectors/source-google-ads/metadata.yaml @@ -11,7 +11,7 @@ data: connectorSubtype: api connectorType: source definitionId: 253487c0-2246-43ba-a21f-5116b20a2c50 - dockerImageTag: 3.3.4 + dockerImageTag: 3.3.5 dockerRepository: airbyte/source-google-ads documentationUrl: https://docs.airbyte.com/integrations/sources/google-ads githubIssueLabel: source-google-ads diff --git a/airbyte-integrations/connectors/source-google-ads/poetry.lock b/airbyte-integrations/connectors/source-google-ads/poetry.lock index f6e98089cef2..eefcf8333c3f 100644 --- a/airbyte-integrations/connectors/source-google-ads/poetry.lock +++ b/airbyte-integrations/connectors/source-google-ads/poetry.lock @@ -1,4 +1,4 @@ -# This file is automatically @generated by Poetry 1.6.1 and should not be changed by hand. +# This file is automatically @generated by Poetry 1.7.1 and should not be changed by hand. [[package]] name = "airbyte-cdk" @@ -1342,4 +1342,4 @@ files = [ [metadata] lock-version = "2.0" python-versions = "^3.9,<3.12" -content-hash = "995b656ad5991d34f517389e1e26276ebf1052e6fc4af5f3955b6a5d9c7cd2ef" +content-hash = "85de59ee85d7cb6a28483c561e60f5fd6315a509a920e34639a1e3199f8ef4cd" diff --git a/airbyte-integrations/connectors/source-google-ads/pyproject.toml b/airbyte-integrations/connectors/source-google-ads/pyproject.toml index 0535b4fa62d9..f46539d35eeb 100644 --- a/airbyte-integrations/connectors/source-google-ads/pyproject.toml +++ b/airbyte-integrations/connectors/source-google-ads/pyproject.toml @@ -3,7 +3,7 @@ requires = [ "poetry-core>=1.0.0",] build-backend = "poetry.core.masonry.api" [tool.poetry] -version = "3.3.4" +version = "3.3.5" name = "source-google-ads" description = "Source implementation for Google Ads." authors = [ "Airbyte ",] @@ -19,7 +19,6 @@ include = "source_google_ads" python = "^3.9,<3.12" google-ads = "==22.1.0" protobuf = "==4.25.2" -pendulum = "==2.1.2" airbyte-cdk = "==0.58.8" [tool.poetry.scripts] diff --git a/airbyte-integrations/connectors/source-google-ads/source_google_ads/google_ads.py b/airbyte-integrations/connectors/source-google-ads/source_google_ads/google_ads.py index 09ebafcd9267..c34833154ce6 100644 --- a/airbyte-integrations/connectors/source-google-ads/source_google_ads/google_ads.py +++ b/airbyte-integrations/connectors/source-google-ads/source_google_ads/google_ads.py @@ -5,7 +5,7 @@ import logging from enum import Enum -from typing import Any, Iterable, Iterator, List, Mapping, MutableMapping, Optional +from typing import Any, Iterable, Iterator, List, Mapping, MutableMapping import backoff from airbyte_cdk.models import FailureType diff --git a/airbyte-integrations/connectors/source-google-ads/source_google_ads/source.py b/airbyte-integrations/connectors/source-google-ads/source_google_ads/source.py index 2402cd18adbe..a51fe215868a 100644 --- a/airbyte-integrations/connectors/source-google-ads/source_google_ads/source.py +++ b/airbyte-integrations/connectors/source-google-ads/source_google_ads/source.py @@ -40,7 +40,6 @@ GeographicView, KeywordView, Label, - ServiceAccounts, ShoppingPerformanceView, TopicView, UserInterest, diff --git a/airbyte-integrations/connectors/source-google-ads/source_google_ads/utils.py b/airbyte-integrations/connectors/source-google-ads/source_google_ads/utils.py index 3085343c9278..ab54a6ee116c 100644 --- a/airbyte-integrations/connectors/source-google-ads/source_google_ads/utils.py +++ b/airbyte-integrations/connectors/source-google-ads/source_google_ads/utils.py @@ -83,6 +83,9 @@ def traced_exception(ga_exception: Union[GoogleAdsException, Unauthenticated], c f"Ensure the customer is linked to your manager account or check your permissions to access this customer account." ) + elif is_error_type(authentication_error, AuthenticationErrorEnum.AuthenticationError.TWO_STEP_VERIFICATION_NOT_ENROLLED): + message = "An account administrator changed this account's authentication settings. To access this Google Ads account, enable 2-Step Verification in your Google account at https://www.google.com/landing/2step" + # If the error is encountered in the internally used class `ServiceAccounts`, an exception is raised. # For other classes, the error is logged and skipped to prevent sync failure. See: https://github.com/airbytehq/airbyte/issues/12486 elif is_error_type(authorization_error, AuthorizationErrorEnum.AuthorizationError.CUSTOMER_NOT_ENABLED): diff --git a/docs/integrations/sources/google-ads.md b/docs/integrations/sources/google-ads.md index 703474a83533..e7acc051bb90 100644 --- a/docs/integrations/sources/google-ads.md +++ b/docs/integrations/sources/google-ads.md @@ -280,6 +280,7 @@ Due to a limitation in the Google Ads API which does not allow getting performan | Version | Date | Pull Request | Subject | |:---------|:-----------|:---------------------------------------------------------|:-------------------------------------------------------------------------------------------------------------------------------------| +| `3.3.5` | 2024-02-28 | [35709](https://github.com/airbytehq/airbyte/pull/35709) | Handle 2-Step Verification exception as config error | | `3.3.4` | 2024-02-21 | [35493](https://github.com/airbytehq/airbyte/pull/35493) | Rolling back the patch 3.3.3 made for `user_interest` steam | | `3.3.3` | 2024-02-14 | [35280](https://github.com/airbytehq/airbyte/pull/35280) | Temporary patch that disables some fields to avoid 500 error when syncing `user_interest` steam | | `3.3.2` | 2024-02-12 | [35158](https://github.com/airbytehq/airbyte/pull/35158) | Manage dependencies with Poetry. | From edcc2fac4c18fb24f7553c176f145c61e3a4a327 Mon Sep 17 00:00:00 2001 From: Anton Karpets Date: Thu, 29 Feb 2024 11:35:32 +0200 Subject: [PATCH 032/172] =?UTF-8?q?=F0=9F=9A=A8=F0=9F=9A=A8=F0=9F=90=9BSou?= =?UTF-8?q?rce=20Amazon=20Seller=20Partner:=20update=20schema=20for=20`GET?= =?UTF-8?q?=5FFBA=5FSTORAGE=5FFEE=5FCHARGES=5FDATA`=20stream=20(#35439)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .../source-amazon-seller-partner/README.md | 90 +- .../metadata.yaml | 13 +- .../source-amazon-seller-partner/poetry.lock | 1222 +++++++++++++++++ .../pyproject.toml | 31 + .../requirements.txt | 1 - .../source-amazon-seller-partner/setup.py | 40 - .../GET_FBA_STORAGE_FEE_CHARGES_DATA.json | 50 +- .../amazon-seller-partner-migrations.md | 29 + .../sources/amazon-seller-partner.md | 1 + 9 files changed, 1359 insertions(+), 118 deletions(-) create mode 100644 airbyte-integrations/connectors/source-amazon-seller-partner/poetry.lock create mode 100644 airbyte-integrations/connectors/source-amazon-seller-partner/pyproject.toml delete mode 100644 airbyte-integrations/connectors/source-amazon-seller-partner/requirements.txt delete mode 100644 airbyte-integrations/connectors/source-amazon-seller-partner/setup.py diff --git a/airbyte-integrations/connectors/source-amazon-seller-partner/README.md b/airbyte-integrations/connectors/source-amazon-seller-partner/README.md index fc2ba61fd156..178a3bbca314 100644 --- a/airbyte-integrations/connectors/source-amazon-seller-partner/README.md +++ b/airbyte-integrations/connectors/source-amazon-seller-partner/README.md @@ -1,4 +1,5 @@ -# Amazon Seller-Partner Source +# Amazon Seller Partner Source + This is the repository for the Amazon Seller-Partner source connector, written in Python. For information about how to use this connector within Airbyte, see [the documentation](https://docs.airbyte.io/integrations/sources/amazon-seller-partner). @@ -6,61 +7,49 @@ For information about how to use this connector within Airbyte, see [the documen ## Local development ### Prerequisites -**To iterate on this connector, make sure to complete this prerequisites section.** +* Python (~=3.9) +* Poetry (~=1.7) - installation instructions [here](https://python-poetry.org/docs/#installation) -#### Build & Activate Virtual Environment and install dependencies -From this connector directory, create a virtual environment: -``` -python -m venv .venv -``` -This will generate a virtualenv for this module in `.venv/`. Make sure this venv is active in your -development environment of choice. To activate it from the terminal, run: -``` -source .venv/bin/activate -pip install -r requirements.txt +### Installing the connector +From this connector directory, run: +```bash +poetry install --with dev ``` -If you are in an IDE, follow your IDE's instructions to activate the virtualenv. -Note that while we are installing dependencies from `requirements.txt`, you should only edit `setup.py` for your dependencies. `requirements.txt` is -used for editable installs (`pip install -e`) to pull in Python dependencies from the monorepo and will call `setup.py`. -If this is mumbo jumbo to you, don't worry about it, just put your deps in `setup.py` but install using `pip install -r requirements.txt` and everything -should work as you expect. -#### Create credentials -**If you are a community contributor**, follow the instructions in the [documentation](https://docs.airbyte.io/integrations/sources/amazon-seller-partner) -to generate the necessary credentials. Then create a file `secrets/config.json` conforming to the `source_amazon_seller-partner/integration_tests/spec.json` file. -Note that the `secrets` directory is gitignored by default, so there is no danger of accidentally checking in sensitive information. +### Create credentials +**If you are a community contributor**, follow the instructions in the [documentation](https://docs.airbyte.com/integrations/sources/amazon-seller-partner) +to generate the necessary credentials. Then create a file `secrets/config.json` conforming to the `source_amazon_seller_partner/spec.json` file. +Note that any directory named `secrets` is gitignored across the entire Airbyte repo, so there is no danger of accidentally checking in sensitive information. See `integration_tests/sample_config.json` for a sample config file. -**If you are an Airbyte core member**, copy the credentials in Lastpass under the secret name `source amazon-seller-partner test creds` -and place them into `secrets/config.json`. ### Locally running the connector ``` -python main.py spec -python main.py check --config secrets/config.json -python main.py discover --config secrets/config.json -python main.py read --config secrets/config.json --catalog integration_tests/configured_catalog.json +poetry run source-amazon-seller-partner spec +poetry run source-amazon-seller-partner check --config secrets/config.json +poetry run source-amazon-seller-partner discover --config secrets/config.json +poetry run source-amazon-seller-partner read --config secrets/config.json --catalog integration_tests/configured_catalog.json ``` -### Locally running the connector docker image - +### Running unit tests +To run unit tests locally, from the connector directory run: +``` +poetry run pytest unit_tests +``` -#### Build -**Via [`airbyte-ci`](https://github.com/airbytehq/airbyte/blob/master/airbyte-ci/connectors/pipelines/README.md) (recommended):** +### Building the docker image +1. Install [`airbyte-ci`](https://github.com/airbytehq/airbyte/blob/master/airbyte-ci/connectors/pipelines/README.md) +2. Run the following command to build the docker image: ```bash airbyte-ci connectors --name=source-amazon-seller-partner build ``` -An image will be built with the tag `airbyte/source-amazon-seller-partner:dev`. +An image will be available on your host with the tag `airbyte/source-amazon-seller-partner:dev`. -**Via `docker build`:** -```bash -docker build -t airbyte/source-amazon-seller-partner:dev . -``` -#### Run +### Running as a docker container Then run any of the connector commands as follows: ``` docker run --rm airbyte/source-amazon-seller-partner:dev spec @@ -69,29 +58,34 @@ docker run --rm -v $(pwd)/secrets:/secrets airbyte/source-amazon-seller-partner: docker run --rm -v $(pwd)/secrets:/secrets -v $(pwd)/integration_tests:/integration_tests airbyte/source-amazon-seller-partner:dev read --config /secrets/config.json --catalog /integration_tests/configured_catalog.json ``` -## Testing +### Running our CI test suite You can run our full test suite locally using [`airbyte-ci`](https://github.com/airbytehq/airbyte/blob/master/airbyte-ci/connectors/pipelines/README.md): ```bash airbyte-ci connectors --name=source-amazon-seller-partner test ``` ### Customizing acceptance Tests -Customize `acceptance-test-config.yml` file to configure tests. See [Connector Acceptance Tests](https://docs.airbyte.com/connector-development/testing-connectors/connector-acceptance-tests-reference) for more information. +Customize `acceptance-test-config.yml` file to configure acceptance tests. See [Connector Acceptance Tests](https://docs.airbyte.com/connector-development/testing-connectors/connector-acceptance-tests-reference) for more information. If your connector requires to create or destroy resources for use during acceptance tests create fixtures for it and place them inside integration_tests/acceptance.py. -## Dependency Management -All of your dependencies should go in `setup.py`, NOT `requirements.txt`. The requirements file is only used to connect internal Airbyte dependencies in the monorepo for local development. -We split dependencies between two groups, dependencies that are: -* required for your connector to work need to go to `MAIN_REQUIREMENTS` list. -* required for the testing need to go to `TEST_REQUIREMENTS` list +### Dependency Management +All of your dependencies should be managed via Poetry. +To add a new dependency, run: +```bash +poetry add +``` + +Please commit the changes to `pyproject.toml` and `poetry.lock` files. -### Publishing a new version of the connector +## Publishing a new version of the connector You've checked out the repo, implemented a million dollar feature, and you're ready to share your changes with the world. Now what? 1. Make sure your changes are passing our test suite: `airbyte-ci connectors --name=source-amazon-seller-partner test` -2. Bump the connector version in `metadata.yaml`: increment the `dockerImageTag` value. Please follow [semantic versioning for connectors](https://docs.airbyte.com/contributing-to-airbyte/resources/pull-requests-handbook/#semantic-versioning-for-connectors). +2. Bump the connector version (please follow [semantic versioning for connectors](https://docs.airbyte.com/contributing-to-airbyte/resources/pull-requests-handbook/#semantic-versioning-for-connectors)): + - bump the `dockerImageTag` value in in `metadata.yaml` + - bump the `version` value in `pyproject.toml` 3. Make sure the `metadata.yaml` content is up to date. -4. Make the connector documentation and its changelog is up to date (`docs/integrations/sources/amazon-seller-partner.md`). +4. Make sure the connector documentation and its changelog is up to date (`docs/integrations/sources/amazon-seller-partner.md`). 5. Create a Pull Request: use [our PR naming conventions](https://docs.airbyte.com/contributing-to-airbyte/resources/pull-requests-handbook/#pull-request-title-convention). 6. Pat yourself on the back for being an awesome contributor. 7. Someone from Airbyte will take a look at your PR and iterate with you to merge it into master. - +8. Once your PR is merged, the new version of the connector will be automatically published to Docker Hub and our connector registry. \ No newline at end of file diff --git a/airbyte-integrations/connectors/source-amazon-seller-partner/metadata.yaml b/airbyte-integrations/connectors/source-amazon-seller-partner/metadata.yaml index ffbc4a51e268..0dc1a4afd26e 100644 --- a/airbyte-integrations/connectors/source-amazon-seller-partner/metadata.yaml +++ b/airbyte-integrations/connectors/source-amazon-seller-partner/metadata.yaml @@ -15,7 +15,7 @@ data: connectorSubtype: api connectorType: source definitionId: e55879a8-0ef8-4557-abcf-ab34c53ec460 - dockerImageTag: 3.5.0 + dockerImageTag: 4.0.0 dockerRepository: airbyte/source-amazon-seller-partner documentationUrl: https://docs.airbyte.com/integrations/sources/amazon-seller-partner githubIssueLabel: source-amazon-seller-partner @@ -45,11 +45,16 @@ data: upgradeDeadline: "2023-12-11" 3.0.0: message: - Streams 'GET_FLAT_FILE_ALL_ORDERS_DATA_BY_ORDER_DATE_GENERAL' and 'GET_FLAT_FILE_ALL_ORDERS_DATA_BY_LAST_UPDATE_GENERAL' now have updated schemas. - Streams 'GET_AMAZON_FULFILLED_SHIPMENTS_DATA_GENERAL', 'GET_LEDGER_DETAIL_VIEW_DATA', 'GET_MERCHANTS_LISTINGS_FYP_REPORT', - 'GET_STRANDED_INVENTORY_UI_DATA', and 'GET_V2_SETTLEMENT_REPORT_DATA_FLAT_FILE' now have date-time formatted fields. + Streams `GET_FLAT_FILE_ALL_ORDERS_DATA_BY_ORDER_DATE_GENERAL` and `GET_FLAT_FILE_ALL_ORDERS_DATA_BY_LAST_UPDATE_GENERAL` now have updated schemas. + Streams `GET_AMAZON_FULFILLED_SHIPMENTS_DATA_GENERAL`, `GET_LEDGER_DETAIL_VIEW_DATA`, `GET_MERCHANTS_LISTINGS_FYP_REPORT`, + `GET_STRANDED_INVENTORY_UI_DATA`, and `GET_V2_SETTLEMENT_REPORT_DATA_FLAT_FILE` now have date-time formatted fields. Users will need to refresh the source schemas and reset these streams after upgrading. upgradeDeadline: "2024-01-12" + 4.0.0: + message: + Stream `GET_FBA_STORAGE_FEE_CHARGES_DATA` schema has been updated to match Amazon Seller Partner. + Users will need to refresh the source schema and reset this stream after upgrading. + upgradeDeadline: "2024-03-11" supportLevel: community tags: - language:python diff --git a/airbyte-integrations/connectors/source-amazon-seller-partner/poetry.lock b/airbyte-integrations/connectors/source-amazon-seller-partner/poetry.lock new file mode 100644 index 000000000000..bd41472c15b0 --- /dev/null +++ b/airbyte-integrations/connectors/source-amazon-seller-partner/poetry.lock @@ -0,0 +1,1222 @@ +# This file is automatically @generated by Poetry 1.5.1 and should not be changed by hand. + +[[package]] +name = "airbyte-cdk" +version = "0.63.2" +description = "A framework for writing Airbyte Connectors." +optional = false +python-versions = ">=3.8" +files = [ + {file = "airbyte-cdk-0.63.2.tar.gz", hash = "sha256:b2edc160f560352a816f3a266b5dfa6dfe37868add1e3a0a2628eb19ba771ed1"}, + {file = "airbyte_cdk-0.63.2-py3-none-any.whl", hash = "sha256:8698cb94514f35577123520954503cb2da407423af109dffd03644ba8b0093cd"}, +] + +[package.dependencies] +airbyte-protocol-models = "0.5.1" +backoff = "*" +cachetools = "*" +Deprecated = ">=1.2,<2.0" +dpath = ">=2.0.1,<2.1.0" +genson = "1.2.2" +isodate = ">=0.6.1,<0.7.0" +Jinja2 = ">=3.1.2,<3.2.0" +jsonref = ">=0.2,<1.0" +jsonschema = ">=3.2.0,<3.3.0" +pendulum = "<3.0.0" +pydantic = ">=1.10.8,<2.0.0" +pyrate-limiter = ">=3.1.0,<3.2.0" +python-dateutil = "*" +PyYAML = ">=6.0.1" +requests = "*" +requests-cache = "*" +wcmatch = "8.4" + +[package.extras] +dev = ["avro (>=1.11.2,<1.12.0)", "cohere (==4.21)", "fastavro (>=1.8.0,<1.9.0)", "freezegun", "langchain (==0.0.271)", "markdown", "mypy", "openai[embeddings] (==0.27.9)", "pandas (==2.0.3)", "pdf2image (==1.16.3)", "pdfminer.six (==20221105)", "pyarrow (==12.0.1)", "pytesseract (==0.3.10)", "pytest", "pytest-cov", "pytest-httpserver", "pytest-mock", "requests-mock", "tiktoken (==0.4.0)", "unstructured (==0.10.27)", "unstructured.pytesseract (>=0.3.12)", "unstructured[docx,pptx] (==0.10.27)"] +file-based = ["avro (>=1.11.2,<1.12.0)", "fastavro (>=1.8.0,<1.9.0)", "markdown", "pdf2image (==1.16.3)", "pdfminer.six (==20221105)", "pyarrow (==12.0.1)", "pytesseract (==0.3.10)", "unstructured (==0.10.27)", "unstructured.pytesseract (>=0.3.12)", "unstructured[docx,pptx] (==0.10.27)"] +sphinx-docs = ["Sphinx (>=4.2,<5.0)", "sphinx-rtd-theme (>=1.0,<2.0)"] +vector-db-based = ["cohere (==4.21)", "langchain (==0.0.271)", "openai[embeddings] (==0.27.9)", "tiktoken (==0.4.0)"] + +[[package]] +name = "airbyte-protocol-models" +version = "0.5.1" +description = "Declares the Airbyte Protocol." +optional = false +python-versions = ">=3.8" +files = [ + {file = "airbyte_protocol_models-0.5.1-py3-none-any.whl", hash = "sha256:dfe84e130e51ce2ae81a06d5aa36f6c5ce3152b9e36e6f0195fad6c3dab0927e"}, + {file = "airbyte_protocol_models-0.5.1.tar.gz", hash = "sha256:7c8b16c7c1c7956b1996052e40585a3a93b1e44cb509c4e97c1ee4fe507ea086"}, +] + +[package.dependencies] +pydantic = ">=1.9.2,<2.0.0" + +[[package]] +name = "atomicwrites" +version = "1.4.1" +description = "Atomic file writes." +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +files = [ + {file = "atomicwrites-1.4.1.tar.gz", hash = "sha256:81b2c9071a49367a7f770170e5eec8cb66567cfbbc8c73d20ce5ca4a8d71cf11"}, +] + +[[package]] +name = "attrs" +version = "23.2.0" +description = "Classes Without Boilerplate" +optional = false +python-versions = ">=3.7" +files = [ + {file = "attrs-23.2.0-py3-none-any.whl", hash = "sha256:99b87a485a5820b23b879f04c2305b44b951b502fd64be915879d77a7e8fc6f1"}, + {file = "attrs-23.2.0.tar.gz", hash = "sha256:935dc3b529c262f6cf76e50877d35a4bd3c1de194fd41f47a2b7ae8f19971f30"}, +] + +[package.extras] +cov = ["attrs[tests]", "coverage[toml] (>=5.3)"] +dev = ["attrs[tests]", "pre-commit"] +docs = ["furo", "myst-parser", "sphinx", "sphinx-notfound-page", "sphinxcontrib-towncrier", "towncrier", "zope-interface"] +tests = ["attrs[tests-no-zope]", "zope-interface"] +tests-mypy = ["mypy (>=1.6)", "pytest-mypy-plugins"] +tests-no-zope = ["attrs[tests-mypy]", "cloudpickle", "hypothesis", "pympler", "pytest (>=4.3.0)", "pytest-xdist[psutil]"] + +[[package]] +name = "backoff" +version = "2.2.1" +description = "Function decoration for backoff and retry" +optional = false +python-versions = ">=3.7,<4.0" +files = [ + {file = "backoff-2.2.1-py3-none-any.whl", hash = "sha256:63579f9a0628e06278f7e47b7d7d5b6ce20dc65c5e96a6f3ca99a6adca0396e8"}, + {file = "backoff-2.2.1.tar.gz", hash = "sha256:03f829f5bb1923180821643f8753b0502c3b682293992485b0eef2807afa5cba"}, +] + +[[package]] +name = "bracex" +version = "2.4" +description = "Bash style brace expander." +optional = false +python-versions = ">=3.8" +files = [ + {file = "bracex-2.4-py3-none-any.whl", hash = "sha256:efdc71eff95eaff5e0f8cfebe7d01adf2c8637c8c92edaf63ef348c241a82418"}, + {file = "bracex-2.4.tar.gz", hash = "sha256:a27eaf1df42cf561fed58b7a8f3fdf129d1ea16a81e1fadd1d17989bc6384beb"}, +] + +[[package]] +name = "cachetools" +version = "5.3.2" +description = "Extensible memoizing collections and decorators" +optional = false +python-versions = ">=3.7" +files = [ + {file = "cachetools-5.3.2-py3-none-any.whl", hash = "sha256:861f35a13a451f94e301ce2bec7cac63e881232ccce7ed67fab9b5df4d3beaa1"}, + {file = "cachetools-5.3.2.tar.gz", hash = "sha256:086ee420196f7b2ab9ca2db2520aca326318b68fe5ba8bc4d49cca91add450f2"}, +] + +[[package]] +name = "cattrs" +version = "23.2.3" +description = "Composable complex class support for attrs and dataclasses." +optional = false +python-versions = ">=3.8" +files = [ + {file = "cattrs-23.2.3-py3-none-any.whl", hash = "sha256:0341994d94971052e9ee70662542699a3162ea1e0c62f7ce1b4a57f563685108"}, + {file = "cattrs-23.2.3.tar.gz", hash = "sha256:a934090d95abaa9e911dac357e3a8699e0b4b14f8529bcc7d2b1ad9d51672b9f"}, +] + +[package.dependencies] +attrs = ">=23.1.0" +exceptiongroup = {version = ">=1.1.1", markers = "python_version < \"3.11\""} +typing-extensions = {version = ">=4.1.0,<4.6.3 || >4.6.3", markers = "python_version < \"3.11\""} + +[package.extras] +bson = ["pymongo (>=4.4.0)"] +cbor2 = ["cbor2 (>=5.4.6)"] +msgpack = ["msgpack (>=1.0.5)"] +orjson = ["orjson (>=3.9.2)"] +pyyaml = ["pyyaml (>=6.0)"] +tomlkit = ["tomlkit (>=0.11.8)"] +ujson = ["ujson (>=5.7.0)"] + +[[package]] +name = "certifi" +version = "2024.2.2" +description = "Python package for providing Mozilla's CA Bundle." +optional = false +python-versions = ">=3.6" +files = [ + {file = "certifi-2024.2.2-py3-none-any.whl", hash = "sha256:dc383c07b76109f368f6106eee2b593b04a011ea4d55f652c6ca24a754d1cdd1"}, + {file = "certifi-2024.2.2.tar.gz", hash = "sha256:0569859f95fc761b18b45ef421b1290a0f65f147e92a1e5eb3e635f9a5e4e66f"}, +] + +[[package]] +name = "charset-normalizer" +version = "3.3.2" +description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet." +optional = false +python-versions = ">=3.7.0" +files = [ + {file = "charset-normalizer-3.3.2.tar.gz", hash = "sha256:f30c3cb33b24454a82faecaf01b19c18562b1e89558fb6c56de4d9118a032fd5"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:25baf083bf6f6b341f4121c2f3c548875ee6f5339300e08be3f2b2ba1721cdd3"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:06435b539f889b1f6f4ac1758871aae42dc3a8c0e24ac9e60c2384973ad73027"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9063e24fdb1e498ab71cb7419e24622516c4a04476b17a2dab57e8baa30d6e03"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6897af51655e3691ff853668779c7bad41579facacf5fd7253b0133308cf000d"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1d3193f4a680c64b4b6a9115943538edb896edc190f0b222e73761716519268e"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:cd70574b12bb8a4d2aaa0094515df2463cb429d8536cfb6c7ce983246983e5a6"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8465322196c8b4d7ab6d1e049e4c5cb460d0394da4a27d23cc242fbf0034b6b5"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a9a8e9031d613fd2009c182b69c7b2c1ef8239a0efb1df3f7c8da66d5dd3d537"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:beb58fe5cdb101e3a055192ac291b7a21e3b7ef4f67fa1d74e331a7f2124341c"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:e06ed3eb3218bc64786f7db41917d4e686cc4856944f53d5bdf83a6884432e12"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:2e81c7b9c8979ce92ed306c249d46894776a909505d8f5a4ba55b14206e3222f"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:572c3763a264ba47b3cf708a44ce965d98555f618ca42c926a9c1616d8f34269"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:fd1abc0d89e30cc4e02e4064dc67fcc51bd941eb395c502aac3ec19fab46b519"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-win32.whl", hash = "sha256:3d47fa203a7bd9c5b6cee4736ee84ca03b8ef23193c0d1ca99b5089f72645c73"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-win_amd64.whl", hash = "sha256:10955842570876604d404661fbccbc9c7e684caf432c09c715ec38fbae45ae09"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:802fe99cca7457642125a8a88a084cef28ff0cf9407060f7b93dca5aa25480db"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:573f6eac48f4769d667c4442081b1794f52919e7edada77495aaed9236d13a96"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:549a3a73da901d5bc3ce8d24e0600d1fa85524c10287f6004fbab87672bf3e1e"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f27273b60488abe721a075bcca6d7f3964f9f6f067c8c4c605743023d7d3944f"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1ceae2f17a9c33cb48e3263960dc5fc8005351ee19db217e9b1bb15d28c02574"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:65f6f63034100ead094b8744b3b97965785388f308a64cf8d7c34f2f2e5be0c4"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:753f10e867343b4511128c6ed8c82f7bec3bd026875576dfd88483c5c73b2fd8"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4a78b2b446bd7c934f5dcedc588903fb2f5eec172f3d29e52a9096a43722adfc"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:e537484df0d8f426ce2afb2d0f8e1c3d0b114b83f8850e5f2fbea0e797bd82ae"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:eb6904c354526e758fda7167b33005998fb68c46fbc10e013ca97f21ca5c8887"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:deb6be0ac38ece9ba87dea880e438f25ca3eddfac8b002a2ec3d9183a454e8ae"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:4ab2fe47fae9e0f9dee8c04187ce5d09f48eabe611be8259444906793ab7cbce"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:80402cd6ee291dcb72644d6eac93785fe2c8b9cb30893c1af5b8fdd753b9d40f"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-win32.whl", hash = "sha256:7cd13a2e3ddeed6913a65e66e94b51d80a041145a026c27e6bb76c31a853c6ab"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-win_amd64.whl", hash = "sha256:663946639d296df6a2bb2aa51b60a2454ca1cb29835324c640dafb5ff2131a77"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:0b2b64d2bb6d3fb9112bafa732def486049e63de9618b5843bcdd081d8144cd8"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:ddbb2551d7e0102e7252db79ba445cdab71b26640817ab1e3e3648dad515003b"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:55086ee1064215781fff39a1af09518bc9255b50d6333f2e4c74ca09fac6a8f6"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8f4a014bc36d3c57402e2977dada34f9c12300af536839dc38c0beab8878f38a"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a10af20b82360ab00827f916a6058451b723b4e65030c5a18577c8b2de5b3389"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8d756e44e94489e49571086ef83b2bb8ce311e730092d2c34ca8f7d925cb20aa"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:90d558489962fd4918143277a773316e56c72da56ec7aa3dc3dbbe20fdfed15b"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6ac7ffc7ad6d040517be39eb591cac5ff87416c2537df6ba3cba3bae290c0fed"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:7ed9e526742851e8d5cc9e6cf41427dfc6068d4f5a3bb03659444b4cabf6bc26"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:8bdb58ff7ba23002a4c5808d608e4e6c687175724f54a5dade5fa8c67b604e4d"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:6b3251890fff30ee142c44144871185dbe13b11bab478a88887a639655be1068"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:b4a23f61ce87adf89be746c8a8974fe1c823c891d8f86eb218bb957c924bb143"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:efcb3f6676480691518c177e3b465bcddf57cea040302f9f4e6e191af91174d4"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-win32.whl", hash = "sha256:d965bba47ddeec8cd560687584e88cf699fd28f192ceb452d1d7ee807c5597b7"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-win_amd64.whl", hash = "sha256:96b02a3dc4381e5494fad39be677abcb5e6634bf7b4fa83a6dd3112607547001"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:95f2a5796329323b8f0512e09dbb7a1860c46a39da62ecb2324f116fa8fdc85c"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c002b4ffc0be611f0d9da932eb0f704fe2602a9a949d1f738e4c34c75b0863d5"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a981a536974bbc7a512cf44ed14938cf01030a99e9b3a06dd59578882f06f985"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3287761bc4ee9e33561a7e058c72ac0938c4f57fe49a09eae428fd88aafe7bb6"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:42cb296636fcc8b0644486d15c12376cb9fa75443e00fb25de0b8602e64c1714"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0a55554a2fa0d408816b3b5cedf0045f4b8e1a6065aec45849de2d6f3f8e9786"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:c083af607d2515612056a31f0a8d9e0fcb5876b7bfc0abad3ecd275bc4ebc2d5"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:87d1351268731db79e0f8e745d92493ee2841c974128ef629dc518b937d9194c"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:bd8f7df7d12c2db9fab40bdd87a7c09b1530128315d047a086fa3ae3435cb3a8"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:c180f51afb394e165eafe4ac2936a14bee3eb10debc9d9e4db8958fe36afe711"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:8c622a5fe39a48f78944a87d4fb8a53ee07344641b0562c540d840748571b811"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-win32.whl", hash = "sha256:db364eca23f876da6f9e16c9da0df51aa4f104a972735574842618b8c6d999d4"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-win_amd64.whl", hash = "sha256:86216b5cee4b06df986d214f664305142d9c76df9b6512be2738aa72a2048f99"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:6463effa3186ea09411d50efc7d85360b38d5f09b870c48e4600f63af490e56a"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:6c4caeef8fa63d06bd437cd4bdcf3ffefe6738fb1b25951440d80dc7df8c03ac"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:37e55c8e51c236f95b033f6fb391d7d7970ba5fe7ff453dad675e88cf303377a"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fb69256e180cb6c8a894fee62b3afebae785babc1ee98b81cdf68bbca1987f33"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ae5f4161f18c61806f411a13b0310bea87f987c7d2ecdbdaad0e94eb2e404238"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b2b0a0c0517616b6869869f8c581d4eb2dd83a4d79e0ebcb7d373ef9956aeb0a"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:45485e01ff4d3630ec0d9617310448a8702f70e9c01906b0d0118bdf9d124cf2"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:eb00ed941194665c332bf8e078baf037d6c35d7c4f3102ea2d4f16ca94a26dc8"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:2127566c664442652f024c837091890cb1942c30937add288223dc895793f898"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:a50aebfa173e157099939b17f18600f72f84eed3049e743b68ad15bd69b6bf99"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:4d0d1650369165a14e14e1e47b372cfcb31d6ab44e6e33cb2d4e57265290044d"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:923c0c831b7cfcb071580d3f46c4baf50f174be571576556269530f4bbd79d04"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:06a81e93cd441c56a9b65d8e1d043daeb97a3d0856d177d5c90ba85acb3db087"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-win32.whl", hash = "sha256:6ef1d82a3af9d3eecdba2321dc1b3c238245d890843e040e41e470ffa64c3e25"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-win_amd64.whl", hash = "sha256:eb8821e09e916165e160797a6c17edda0679379a4be5c716c260e836e122f54b"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:c235ebd9baae02f1b77bcea61bce332cb4331dc3617d254df3323aa01ab47bd4"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:5b4c145409bef602a690e7cfad0a15a55c13320ff7a3ad7ca59c13bb8ba4d45d"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:68d1f8a9e9e37c1223b656399be5d6b448dea850bed7d0f87a8311f1ff3dabb0"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:22afcb9f253dac0696b5a4be4a1c0f8762f8239e21b99680099abd9b2b1b2269"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e27ad930a842b4c5eb8ac0016b0a54f5aebbe679340c26101df33424142c143c"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1f79682fbe303db92bc2b1136016a38a42e835d932bab5b3b1bfcfbf0640e519"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b261ccdec7821281dade748d088bb6e9b69e6d15b30652b74cbbac25e280b796"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:122c7fa62b130ed55f8f285bfd56d5f4b4a5b503609d181f9ad85e55c89f4185"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:d0eccceffcb53201b5bfebb52600a5fb483a20b61da9dbc885f8b103cbe7598c"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:9f96df6923e21816da7e0ad3fd47dd8f94b2a5ce594e00677c0013018b813458"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:7f04c839ed0b6b98b1a7501a002144b76c18fb1c1850c8b98d458ac269e26ed2"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:34d1c8da1e78d2e001f363791c98a272bb734000fcef47a491c1e3b0505657a8"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:ff8fa367d09b717b2a17a052544193ad76cd49979c805768879cb63d9ca50561"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-win32.whl", hash = "sha256:aed38f6e4fb3f5d6bf81bfa990a07806be9d83cf7bacef998ab1a9bd660a581f"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-win_amd64.whl", hash = "sha256:b01b88d45a6fcb69667cd6d2f7a9aeb4bf53760d7fc536bf679ec94fe9f3ff3d"}, + {file = "charset_normalizer-3.3.2-py3-none-any.whl", hash = "sha256:3e4d1f6587322d2788836a99c69062fbb091331ec940e02d12d179c1d53e25fc"}, +] + +[[package]] +name = "colorama" +version = "0.4.6" +description = "Cross-platform colored terminal text." +optional = false +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7" +files = [ + {file = "colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6"}, + {file = "colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44"}, +] + +[[package]] +name = "dateparser" +version = "1.2.0" +description = "Date parsing library designed to parse dates from HTML pages" +optional = false +python-versions = ">=3.7" +files = [ + {file = "dateparser-1.2.0-py2.py3-none-any.whl", hash = "sha256:0b21ad96534e562920a0083e97fd45fa959882d4162acc358705144520a35830"}, + {file = "dateparser-1.2.0.tar.gz", hash = "sha256:7975b43a4222283e0ae15be7b4999d08c9a70e2d378ac87385b1ccf2cffbbb30"}, +] + +[package.dependencies] +python-dateutil = "*" +pytz = "*" +regex = "<2019.02.19 || >2019.02.19,<2021.8.27 || >2021.8.27" +tzlocal = "*" + +[package.extras] +calendars = ["convertdate", "hijri-converter"] +fasttext = ["fasttext"] +langdetect = ["langdetect"] + +[[package]] +name = "deprecated" +version = "1.2.14" +description = "Python @deprecated decorator to deprecate old python classes, functions or methods." +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +files = [ + {file = "Deprecated-1.2.14-py2.py3-none-any.whl", hash = "sha256:6fac8b097794a90302bdbb17b9b815e732d3c4720583ff1b198499d78470466c"}, + {file = "Deprecated-1.2.14.tar.gz", hash = "sha256:e5323eb936458dccc2582dc6f9c322c852a775a27065ff2b0c4970b9d53d01b3"}, +] + +[package.dependencies] +wrapt = ">=1.10,<2" + +[package.extras] +dev = ["PyTest", "PyTest-Cov", "bump2version (<1)", "sphinx (<2)", "tox"] + +[[package]] +name = "dpath" +version = "2.0.8" +description = "Filesystem-like pathing and searching for dictionaries" +optional = false +python-versions = ">=3.7" +files = [ + {file = "dpath-2.0.8-py3-none-any.whl", hash = "sha256:f92f595214dd93a00558d75d4b858beee519f4cffca87f02616ad6cd013f3436"}, + {file = "dpath-2.0.8.tar.gz", hash = "sha256:a3440157ebe80d0a3ad794f1b61c571bef125214800ffdb9afc9424e8250fe9b"}, +] + +[[package]] +name = "exceptiongroup" +version = "1.2.0" +description = "Backport of PEP 654 (exception groups)" +optional = false +python-versions = ">=3.7" +files = [ + {file = "exceptiongroup-1.2.0-py3-none-any.whl", hash = "sha256:4bfd3996ac73b41e9b9628b04e079f193850720ea5945fc96a08633c66912f14"}, + {file = "exceptiongroup-1.2.0.tar.gz", hash = "sha256:91f5c769735f051a4290d52edd0858999b57e5876e9f85937691bd4c9fa3ed68"}, +] + +[package.extras] +test = ["pytest (>=6)"] + +[[package]] +name = "freezegun" +version = "1.2.2" +description = "Let your Python tests travel through time" +optional = false +python-versions = ">=3.6" +files = [ + {file = "freezegun-1.2.2-py3-none-any.whl", hash = "sha256:ea1b963b993cb9ea195adbd893a48d573fda951b0da64f60883d7e988b606c9f"}, + {file = "freezegun-1.2.2.tar.gz", hash = "sha256:cd22d1ba06941384410cd967d8a99d5ae2442f57dfafeff2fda5de8dc5c05446"}, +] + +[package.dependencies] +python-dateutil = ">=2.7" + +[[package]] +name = "genson" +version = "1.2.2" +description = "GenSON is a powerful, user-friendly JSON Schema generator." +optional = false +python-versions = "*" +files = [ + {file = "genson-1.2.2.tar.gz", hash = "sha256:8caf69aa10af7aee0e1a1351d1d06801f4696e005f06cedef438635384346a16"}, +] + +[[package]] +name = "idna" +version = "3.6" +description = "Internationalized Domain Names in Applications (IDNA)" +optional = false +python-versions = ">=3.5" +files = [ + {file = "idna-3.6-py3-none-any.whl", hash = "sha256:c05567e9c24a6b9faaa835c4821bad0590fbb9d5779e7caa6e1cc4978e7eb24f"}, + {file = "idna-3.6.tar.gz", hash = "sha256:9ecdbbd083b06798ae1e86adcbfe8ab1479cf864e4ee30fe4e46a003d12491ca"}, +] + +[[package]] +name = "iniconfig" +version = "2.0.0" +description = "brain-dead simple config-ini parsing" +optional = false +python-versions = ">=3.7" +files = [ + {file = "iniconfig-2.0.0-py3-none-any.whl", hash = "sha256:b6a85871a79d2e3b22d2d1b94ac2824226a63c6b741c88f7ae975f18b6778374"}, + {file = "iniconfig-2.0.0.tar.gz", hash = "sha256:2d91e135bf72d31a410b17c16da610a82cb55f6b0477d1a902134b24a455b8b3"}, +] + +[[package]] +name = "isodate" +version = "0.6.1" +description = "An ISO 8601 date/time/duration parser and formatter" +optional = false +python-versions = "*" +files = [ + {file = "isodate-0.6.1-py2.py3-none-any.whl", hash = "sha256:0751eece944162659049d35f4f549ed815792b38793f07cf73381c1c87cbed96"}, + {file = "isodate-0.6.1.tar.gz", hash = "sha256:48c5881de7e8b0a0d648cb024c8062dc84e7b840ed81e864c7614fd3c127bde9"}, +] + +[package.dependencies] +six = "*" + +[[package]] +name = "jinja2" +version = "3.1.3" +description = "A very fast and expressive template engine." +optional = false +python-versions = ">=3.7" +files = [ + {file = "Jinja2-3.1.3-py3-none-any.whl", hash = "sha256:7d6d50dd97d52cbc355597bd845fabfbac3f551e1f99619e39a35ce8c370b5fa"}, + {file = "Jinja2-3.1.3.tar.gz", hash = "sha256:ac8bd6544d4bb2c9792bf3a159e80bba8fda7f07e81bc3aed565432d5925ba90"}, +] + +[package.dependencies] +MarkupSafe = ">=2.0" + +[package.extras] +i18n = ["Babel (>=2.7)"] + +[[package]] +name = "jsonref" +version = "0.3.0" +description = "jsonref is a library for automatic dereferencing of JSON Reference objects for Python." +optional = false +python-versions = ">=3.3,<4.0" +files = [ + {file = "jsonref-0.3.0-py3-none-any.whl", hash = "sha256:9480ad1b500f7e795daeb0ef29f9c55ae3a9ab38fb8d6659b6f4868acb5a5bc8"}, + {file = "jsonref-0.3.0.tar.gz", hash = "sha256:68b330c6815dc0d490dbb3d65ccda265ddde9f7856fd2f3322f971d456ea7549"}, +] + +[[package]] +name = "jsonschema" +version = "3.2.0" +description = "An implementation of JSON Schema validation for Python" +optional = false +python-versions = "*" +files = [ + {file = "jsonschema-3.2.0-py2.py3-none-any.whl", hash = "sha256:4e5b3cf8216f577bee9ce139cbe72eca3ea4f292ec60928ff24758ce626cd163"}, + {file = "jsonschema-3.2.0.tar.gz", hash = "sha256:c8a85b28d377cc7737e46e2d9f2b4f44ee3c0e1deac6bf46ddefc7187d30797a"}, +] + +[package.dependencies] +attrs = ">=17.4.0" +pyrsistent = ">=0.14.0" +setuptools = "*" +six = ">=1.11.0" + +[package.extras] +format = ["idna", "jsonpointer (>1.13)", "rfc3987", "strict-rfc3339", "webcolors"] +format-nongpl = ["idna", "jsonpointer (>1.13)", "rfc3339-validator", "rfc3986-validator (>0.1.0)", "webcolors"] + +[[package]] +name = "markupsafe" +version = "2.1.5" +description = "Safely add untrusted strings to HTML/XML markup." +optional = false +python-versions = ">=3.7" +files = [ + {file = "MarkupSafe-2.1.5-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:a17a92de5231666cfbe003f0e4b9b3a7ae3afb1ec2845aadc2bacc93ff85febc"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:72b6be590cc35924b02c78ef34b467da4ba07e4e0f0454a2c5907f473fc50ce5"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e61659ba32cf2cf1481e575d0462554625196a1f2fc06a1c777d3f48e8865d46"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2174c595a0d73a3080ca3257b40096db99799265e1c27cc5a610743acd86d62f"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ae2ad8ae6ebee9d2d94b17fb62763125f3f374c25618198f40cbb8b525411900"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:075202fa5b72c86ad32dc7d0b56024ebdbcf2048c0ba09f1cde31bfdd57bcfff"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:598e3276b64aff0e7b3451b72e94fa3c238d452e7ddcd893c3ab324717456bad"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:fce659a462a1be54d2ffcacea5e3ba2d74daa74f30f5f143fe0c58636e355fdd"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-win32.whl", hash = "sha256:d9fad5155d72433c921b782e58892377c44bd6252b5af2f67f16b194987338a4"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-win_amd64.whl", hash = "sha256:bf50cd79a75d181c9181df03572cdce0fbb75cc353bc350712073108cba98de5"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:629ddd2ca402ae6dbedfceeba9c46d5f7b2a61d9749597d4307f943ef198fc1f"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:5b7b716f97b52c5a14bffdf688f971b2d5ef4029127f1ad7a513973cfd818df2"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6ec585f69cec0aa07d945b20805be741395e28ac1627333b1c5b0105962ffced"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b91c037585eba9095565a3556f611e3cbfaa42ca1e865f7b8015fe5c7336d5a5"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7502934a33b54030eaf1194c21c692a534196063db72176b0c4028e140f8f32c"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:0e397ac966fdf721b2c528cf028494e86172b4feba51d65f81ffd65c63798f3f"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:c061bb86a71b42465156a3ee7bd58c8c2ceacdbeb95d05a99893e08b8467359a"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:3a57fdd7ce31c7ff06cdfbf31dafa96cc533c21e443d57f5b1ecc6cdc668ec7f"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-win32.whl", hash = "sha256:397081c1a0bfb5124355710fe79478cdbeb39626492b15d399526ae53422b906"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-win_amd64.whl", hash = "sha256:2b7c57a4dfc4f16f7142221afe5ba4e093e09e728ca65c51f5620c9aaeb9a617"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:8dec4936e9c3100156f8a2dc89c4b88d5c435175ff03413b443469c7c8c5f4d1"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:3c6b973f22eb18a789b1460b4b91bf04ae3f0c4234a0a6aa6b0a92f6f7b951d4"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ac07bad82163452a6884fe8fa0963fb98c2346ba78d779ec06bd7a6262132aee"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f5dfb42c4604dddc8e4305050aa6deb084540643ed5804d7455b5df8fe16f5e5"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ea3d8a3d18833cf4304cd2fc9cbb1efe188ca9b5efef2bdac7adc20594a0e46b"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:d050b3361367a06d752db6ead6e7edeb0009be66bc3bae0ee9d97fb326badc2a"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:bec0a414d016ac1a18862a519e54b2fd0fc8bbfd6890376898a6c0891dd82e9f"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:58c98fee265677f63a4385256a6d7683ab1832f3ddd1e66fe948d5880c21a169"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-win32.whl", hash = "sha256:8590b4ae07a35970728874632fed7bd57b26b0102df2d2b233b6d9d82f6c62ad"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-win_amd64.whl", hash = "sha256:823b65d8706e32ad2df51ed89496147a42a2a6e01c13cfb6ffb8b1e92bc910bb"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:c8b29db45f8fe46ad280a7294f5c3ec36dbac9491f2d1c17345be8e69cc5928f"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ec6a563cff360b50eed26f13adc43e61bc0c04d94b8be985e6fb24b81f6dcfdf"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a549b9c31bec33820e885335b451286e2969a2d9e24879f83fe904a5ce59d70a"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4f11aa001c540f62c6166c7726f71f7573b52c68c31f014c25cc7901deea0b52"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:7b2e5a267c855eea6b4283940daa6e88a285f5f2a67f2220203786dfa59b37e9"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:2d2d793e36e230fd32babe143b04cec8a8b3eb8a3122d2aceb4a371e6b09b8df"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:ce409136744f6521e39fd8e2a24c53fa18ad67aa5bc7c2cf83645cce5b5c4e50"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-win32.whl", hash = "sha256:4096e9de5c6fdf43fb4f04c26fb114f61ef0bf2e5604b6ee3019d51b69e8c371"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-win_amd64.whl", hash = "sha256:4275d846e41ecefa46e2015117a9f491e57a71ddd59bbead77e904dc02b1bed2"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:656f7526c69fac7f600bd1f400991cc282b417d17539a1b228617081106feb4a"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:97cafb1f3cbcd3fd2b6fbfb99ae11cdb14deea0736fc2b0952ee177f2b813a46"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1f3fbcb7ef1f16e48246f704ab79d79da8a46891e2da03f8783a5b6fa41a9532"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fa9db3f79de01457b03d4f01b34cf91bc0048eb2c3846ff26f66687c2f6d16ab"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ffee1f21e5ef0d712f9033568f8344d5da8cc2869dbd08d87c84656e6a2d2f68"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:5dedb4db619ba5a2787a94d877bc8ffc0566f92a01c0ef214865e54ecc9ee5e0"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:30b600cf0a7ac9234b2638fbc0fb6158ba5bdcdf46aeb631ead21248b9affbc4"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:8dd717634f5a044f860435c1d8c16a270ddf0ef8588d4887037c5028b859b0c3"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-win32.whl", hash = "sha256:daa4ee5a243f0f20d528d939d06670a298dd39b1ad5f8a72a4275124a7819eff"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-win_amd64.whl", hash = "sha256:619bc166c4f2de5caa5a633b8b7326fbe98e0ccbfacabd87268a2b15ff73a029"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:7a68b554d356a91cce1236aa7682dc01df0edba8d043fd1ce607c49dd3c1edcf"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:db0b55e0f3cc0be60c1f19efdde9a637c32740486004f20d1cff53c3c0ece4d2"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3e53af139f8579a6d5f7b76549125f0d94d7e630761a2111bc431fd820e163b8"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:17b950fccb810b3293638215058e432159d2b71005c74371d784862b7e4683f3"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4c31f53cdae6ecfa91a77820e8b151dba54ab528ba65dfd235c80b086d68a465"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:bff1b4290a66b490a2f4719358c0cdcd9bafb6b8f061e45c7a2460866bf50c2e"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:bc1667f8b83f48511b94671e0e441401371dfd0f0a795c7daa4a3cd1dde55bea"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:5049256f536511ee3f7e1b3f87d1d1209d327e818e6ae1365e8653d7e3abb6a6"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-win32.whl", hash = "sha256:00e046b6dd71aa03a41079792f8473dc494d564611a8f89bbbd7cb93295ebdcf"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-win_amd64.whl", hash = "sha256:fa173ec60341d6bb97a89f5ea19c85c5643c1e7dedebc22f5181eb73573142c5"}, + {file = "MarkupSafe-2.1.5.tar.gz", hash = "sha256:d283d37a890ba4c1ae73ffadf8046435c76e7bc2247bbb63c00bd1a709c6544b"}, +] + +[[package]] +name = "packaging" +version = "23.2" +description = "Core utilities for Python packages" +optional = false +python-versions = ">=3.7" +files = [ + {file = "packaging-23.2-py3-none-any.whl", hash = "sha256:8c491190033a9af7e1d931d0b5dacc2ef47509b34dd0de67ed209b5203fc88c7"}, + {file = "packaging-23.2.tar.gz", hash = "sha256:048fb0e9405036518eaaf48a55953c750c11e1a1b68e0dd1a9d62ed0c092cfc5"}, +] + +[[package]] +name = "pendulum" +version = "2.1.2" +description = "Python datetimes made easy" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" +files = [ + {file = "pendulum-2.1.2-cp27-cp27m-macosx_10_15_x86_64.whl", hash = "sha256:b6c352f4bd32dff1ea7066bd31ad0f71f8d8100b9ff709fb343f3b86cee43efe"}, + {file = "pendulum-2.1.2-cp27-cp27m-win_amd64.whl", hash = "sha256:318f72f62e8e23cd6660dbafe1e346950281a9aed144b5c596b2ddabc1d19739"}, + {file = "pendulum-2.1.2-cp35-cp35m-macosx_10_15_x86_64.whl", hash = "sha256:0731f0c661a3cb779d398803655494893c9f581f6488048b3fb629c2342b5394"}, + {file = "pendulum-2.1.2-cp35-cp35m-manylinux1_i686.whl", hash = "sha256:3481fad1dc3f6f6738bd575a951d3c15d4b4ce7c82dce37cf8ac1483fde6e8b0"}, + {file = "pendulum-2.1.2-cp35-cp35m-manylinux1_x86_64.whl", hash = "sha256:9702069c694306297ed362ce7e3c1ef8404ac8ede39f9b28b7c1a7ad8c3959e3"}, + {file = "pendulum-2.1.2-cp35-cp35m-win_amd64.whl", hash = "sha256:fb53ffa0085002ddd43b6ca61a7b34f2d4d7c3ed66f931fe599e1a531b42af9b"}, + {file = "pendulum-2.1.2-cp36-cp36m-macosx_10_15_x86_64.whl", hash = "sha256:c501749fdd3d6f9e726086bf0cd4437281ed47e7bca132ddb522f86a1645d360"}, + {file = "pendulum-2.1.2-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:c807a578a532eeb226150d5006f156632df2cc8c5693d778324b43ff8c515dd0"}, + {file = "pendulum-2.1.2-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:2d1619a721df661e506eff8db8614016f0720ac171fe80dda1333ee44e684087"}, + {file = "pendulum-2.1.2-cp36-cp36m-win_amd64.whl", hash = "sha256:f888f2d2909a414680a29ae74d0592758f2b9fcdee3549887779cd4055e975db"}, + {file = "pendulum-2.1.2-cp37-cp37m-macosx_10_15_x86_64.whl", hash = "sha256:e95d329384717c7bf627bf27e204bc3b15c8238fa8d9d9781d93712776c14002"}, + {file = "pendulum-2.1.2-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:4c9c689747f39d0d02a9f94fcee737b34a5773803a64a5fdb046ee9cac7442c5"}, + {file = "pendulum-2.1.2-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:1245cd0075a3c6d889f581f6325dd8404aca5884dea7223a5566c38aab94642b"}, + {file = "pendulum-2.1.2-cp37-cp37m-win_amd64.whl", hash = "sha256:db0a40d8bcd27b4fb46676e8eb3c732c67a5a5e6bfab8927028224fbced0b40b"}, + {file = "pendulum-2.1.2-cp38-cp38-macosx_10_15_x86_64.whl", hash = "sha256:f5e236e7730cab1644e1b87aca3d2ff3e375a608542e90fe25685dae46310116"}, + {file = "pendulum-2.1.2-cp38-cp38-manylinux1_i686.whl", hash = "sha256:de42ea3e2943171a9e95141f2eecf972480636e8e484ccffaf1e833929e9e052"}, + {file = "pendulum-2.1.2-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:7c5ec650cb4bec4c63a89a0242cc8c3cebcec92fcfe937c417ba18277d8560be"}, + {file = "pendulum-2.1.2-cp38-cp38-win_amd64.whl", hash = "sha256:33fb61601083f3eb1d15edeb45274f73c63b3c44a8524703dc143f4212bf3269"}, + {file = "pendulum-2.1.2-cp39-cp39-manylinux1_i686.whl", hash = "sha256:29c40a6f2942376185728c9a0347d7c0f07905638c83007e1d262781f1e6953a"}, + {file = "pendulum-2.1.2-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:94b1fc947bfe38579b28e1cccb36f7e28a15e841f30384b5ad6c5e31055c85d7"}, + {file = "pendulum-2.1.2.tar.gz", hash = "sha256:b06a0ca1bfe41c990bbf0c029f0b6501a7f2ec4e38bfec730712015e8860f207"}, +] + +[package.dependencies] +python-dateutil = ">=2.6,<3.0" +pytzdata = ">=2020.1" + +[[package]] +name = "platformdirs" +version = "4.2.0" +description = "A small Python package for determining appropriate platform-specific dirs, e.g. a \"user data dir\"." +optional = false +python-versions = ">=3.8" +files = [ + {file = "platformdirs-4.2.0-py3-none-any.whl", hash = "sha256:0614df2a2f37e1a662acbd8e2b25b92ccf8632929bc6d43467e17fe89c75e068"}, + {file = "platformdirs-4.2.0.tar.gz", hash = "sha256:ef0cc731df711022c174543cb70a9b5bd22e5a9337c8624ef2c2ceb8ddad8768"}, +] + +[package.extras] +docs = ["furo (>=2023.9.10)", "proselint (>=0.13)", "sphinx (>=7.2.6)", "sphinx-autodoc-typehints (>=1.25.2)"] +test = ["appdirs (==1.4.4)", "covdefaults (>=2.3)", "pytest (>=7.4.3)", "pytest-cov (>=4.1)", "pytest-mock (>=3.12)"] + +[[package]] +name = "pluggy" +version = "1.4.0" +description = "plugin and hook calling mechanisms for python" +optional = false +python-versions = ">=3.8" +files = [ + {file = "pluggy-1.4.0-py3-none-any.whl", hash = "sha256:7db9f7b503d67d1c5b95f59773ebb58a8c1c288129a88665838012cfb07b8981"}, + {file = "pluggy-1.4.0.tar.gz", hash = "sha256:8c85c2876142a764e5b7548e7d9a0e0ddb46f5185161049a79b7e974454223be"}, +] + +[package.extras] +dev = ["pre-commit", "tox"] +testing = ["pytest", "pytest-benchmark"] + +[[package]] +name = "py" +version = "1.11.0" +description = "library with cross-python path, ini-parsing, io, code, log facilities" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" +files = [ + {file = "py-1.11.0-py2.py3-none-any.whl", hash = "sha256:607c53218732647dff4acdfcd50cb62615cedf612e72d1724fb1a0cc6405b378"}, + {file = "py-1.11.0.tar.gz", hash = "sha256:51c75c4126074b472f746a24399ad32f6053d1b34b68d2fa41e558e6f4a98719"}, +] + +[[package]] +name = "pydantic" +version = "1.10.14" +description = "Data validation and settings management using python type hints" +optional = false +python-versions = ">=3.7" +files = [ + {file = "pydantic-1.10.14-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:7f4fcec873f90537c382840f330b90f4715eebc2bc9925f04cb92de593eae054"}, + {file = "pydantic-1.10.14-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:8e3a76f571970fcd3c43ad982daf936ae39b3e90b8a2e96c04113a369869dc87"}, + {file = "pydantic-1.10.14-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:82d886bd3c3fbeaa963692ef6b643159ccb4b4cefaf7ff1617720cbead04fd1d"}, + {file = "pydantic-1.10.14-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:798a3d05ee3b71967844a1164fd5bdb8c22c6d674f26274e78b9f29d81770c4e"}, + {file = "pydantic-1.10.14-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:23d47a4b57a38e8652bcab15a658fdb13c785b9ce217cc3a729504ab4e1d6bc9"}, + {file = "pydantic-1.10.14-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:f9f674b5c3bebc2eba401de64f29948ae1e646ba2735f884d1594c5f675d6f2a"}, + {file = "pydantic-1.10.14-cp310-cp310-win_amd64.whl", hash = "sha256:24a7679fab2e0eeedb5a8924fc4a694b3bcaac7d305aeeac72dd7d4e05ecbebf"}, + {file = "pydantic-1.10.14-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:9d578ac4bf7fdf10ce14caba6f734c178379bd35c486c6deb6f49006e1ba78a7"}, + {file = "pydantic-1.10.14-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:fa7790e94c60f809c95602a26d906eba01a0abee9cc24150e4ce2189352deb1b"}, + {file = "pydantic-1.10.14-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:aad4e10efa5474ed1a611b6d7f0d130f4aafadceb73c11d9e72823e8f508e663"}, + {file = "pydantic-1.10.14-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1245f4f61f467cb3dfeced2b119afef3db386aec3d24a22a1de08c65038b255f"}, + {file = "pydantic-1.10.14-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:21efacc678a11114c765eb52ec0db62edffa89e9a562a94cbf8fa10b5db5c046"}, + {file = "pydantic-1.10.14-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:412ab4a3f6dbd2bf18aefa9f79c7cca23744846b31f1d6555c2ee2b05a2e14ca"}, + {file = "pydantic-1.10.14-cp311-cp311-win_amd64.whl", hash = "sha256:e897c9f35281f7889873a3e6d6b69aa1447ceb024e8495a5f0d02ecd17742a7f"}, + {file = "pydantic-1.10.14-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:d604be0f0b44d473e54fdcb12302495fe0467c56509a2f80483476f3ba92b33c"}, + {file = "pydantic-1.10.14-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a42c7d17706911199798d4c464b352e640cab4351efe69c2267823d619a937e5"}, + {file = "pydantic-1.10.14-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:596f12a1085e38dbda5cbb874d0973303e34227b400b6414782bf205cc14940c"}, + {file = "pydantic-1.10.14-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:bfb113860e9288d0886e3b9e49d9cf4a9d48b441f52ded7d96db7819028514cc"}, + {file = "pydantic-1.10.14-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:bc3ed06ab13660b565eed80887fcfbc0070f0aa0691fbb351657041d3e874efe"}, + {file = "pydantic-1.10.14-cp37-cp37m-win_amd64.whl", hash = "sha256:ad8c2bc677ae5f6dbd3cf92f2c7dc613507eafe8f71719727cbc0a7dec9a8c01"}, + {file = "pydantic-1.10.14-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:c37c28449752bb1f47975d22ef2882d70513c546f8f37201e0fec3a97b816eee"}, + {file = "pydantic-1.10.14-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:49a46a0994dd551ec051986806122767cf144b9702e31d47f6d493c336462597"}, + {file = "pydantic-1.10.14-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:53e3819bd20a42470d6dd0fe7fc1c121c92247bca104ce608e609b59bc7a77ee"}, + {file = "pydantic-1.10.14-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0fbb503bbbbab0c588ed3cd21975a1d0d4163b87e360fec17a792f7d8c4ff29f"}, + {file = "pydantic-1.10.14-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:336709883c15c050b9c55a63d6c7ff09be883dbc17805d2b063395dd9d9d0022"}, + {file = "pydantic-1.10.14-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:4ae57b4d8e3312d486e2498d42aed3ece7b51848336964e43abbf9671584e67f"}, + {file = "pydantic-1.10.14-cp38-cp38-win_amd64.whl", hash = "sha256:dba49d52500c35cfec0b28aa8b3ea5c37c9df183ffc7210b10ff2a415c125c4a"}, + {file = "pydantic-1.10.14-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:c66609e138c31cba607d8e2a7b6a5dc38979a06c900815495b2d90ce6ded35b4"}, + {file = "pydantic-1.10.14-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:d986e115e0b39604b9eee3507987368ff8148222da213cd38c359f6f57b3b347"}, + {file = "pydantic-1.10.14-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:646b2b12df4295b4c3148850c85bff29ef6d0d9621a8d091e98094871a62e5c7"}, + {file = "pydantic-1.10.14-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:282613a5969c47c83a8710cc8bfd1e70c9223feb76566f74683af889faadc0ea"}, + {file = "pydantic-1.10.14-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:466669501d08ad8eb3c4fecd991c5e793c4e0bbd62299d05111d4f827cded64f"}, + {file = "pydantic-1.10.14-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:13e86a19dca96373dcf3190fcb8797d40a6f12f154a244a8d1e8e03b8f280593"}, + {file = "pydantic-1.10.14-cp39-cp39-win_amd64.whl", hash = "sha256:08b6ec0917c30861e3fe71a93be1648a2aa4f62f866142ba21670b24444d7fd8"}, + {file = "pydantic-1.10.14-py3-none-any.whl", hash = "sha256:8ee853cd12ac2ddbf0ecbac1c289f95882b2d4482258048079d13be700aa114c"}, + {file = "pydantic-1.10.14.tar.gz", hash = "sha256:46f17b832fe27de7850896f3afee50ea682220dd218f7e9c88d436788419dca6"}, +] + +[package.dependencies] +typing-extensions = ">=4.2.0" + +[package.extras] +dotenv = ["python-dotenv (>=0.10.4)"] +email = ["email-validator (>=1.0.3)"] + +[[package]] +name = "pyrate-limiter" +version = "3.1.1" +description = "Python Rate-Limiter using Leaky-Bucket Algorithm" +optional = false +python-versions = ">=3.8,<4.0" +files = [ + {file = "pyrate_limiter-3.1.1-py3-none-any.whl", hash = "sha256:c51906f1d51d56dc992ff6c26e8300e32151bc6cfa3e6559792e31971dfd4e2b"}, + {file = "pyrate_limiter-3.1.1.tar.gz", hash = "sha256:2f57eda712687e6eccddf6afe8f8a15b409b97ed675fe64a626058f12863b7b7"}, +] + +[package.extras] +all = ["filelock (>=3.0)", "redis (>=5.0.0,<6.0.0)"] +docs = ["furo (>=2022.3.4,<2023.0.0)", "myst-parser (>=0.17)", "sphinx (>=4.3.0,<5.0.0)", "sphinx-autodoc-typehints (>=1.17,<2.0)", "sphinx-copybutton (>=0.5)", "sphinxcontrib-apidoc (>=0.3,<0.4)"] + +[[package]] +name = "pyrsistent" +version = "0.20.0" +description = "Persistent/Functional/Immutable data structures" +optional = false +python-versions = ">=3.8" +files = [ + {file = "pyrsistent-0.20.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:8c3aba3e01235221e5b229a6c05f585f344734bd1ad42a8ac51493d74722bbce"}, + {file = "pyrsistent-0.20.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c1beb78af5423b879edaf23c5591ff292cf7c33979734c99aa66d5914ead880f"}, + {file = "pyrsistent-0.20.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:21cc459636983764e692b9eba7144cdd54fdec23ccdb1e8ba392a63666c60c34"}, + {file = "pyrsistent-0.20.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f5ac696f02b3fc01a710427585c855f65cd9c640e14f52abe52020722bb4906b"}, + {file = "pyrsistent-0.20.0-cp310-cp310-win32.whl", hash = "sha256:0724c506cd8b63c69c7f883cc233aac948c1ea946ea95996ad8b1380c25e1d3f"}, + {file = "pyrsistent-0.20.0-cp310-cp310-win_amd64.whl", hash = "sha256:8441cf9616d642c475684d6cf2520dd24812e996ba9af15e606df5f6fd9d04a7"}, + {file = "pyrsistent-0.20.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:0f3b1bcaa1f0629c978b355a7c37acd58907390149b7311b5db1b37648eb6958"}, + {file = "pyrsistent-0.20.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5cdd7ef1ea7a491ae70d826b6cc64868de09a1d5ff9ef8d574250d0940e275b8"}, + {file = "pyrsistent-0.20.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cae40a9e3ce178415040a0383f00e8d68b569e97f31928a3a8ad37e3fde6df6a"}, + {file = "pyrsistent-0.20.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6288b3fa6622ad8a91e6eb759cfc48ff3089e7c17fb1d4c59a919769314af224"}, + {file = "pyrsistent-0.20.0-cp311-cp311-win32.whl", hash = "sha256:7d29c23bdf6e5438c755b941cef867ec2a4a172ceb9f50553b6ed70d50dfd656"}, + {file = "pyrsistent-0.20.0-cp311-cp311-win_amd64.whl", hash = "sha256:59a89bccd615551391f3237e00006a26bcf98a4d18623a19909a2c48b8e986ee"}, + {file = "pyrsistent-0.20.0-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:09848306523a3aba463c4b49493a760e7a6ca52e4826aa100ee99d8d39b7ad1e"}, + {file = "pyrsistent-0.20.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a14798c3005ec892bbada26485c2eea3b54109cb2533713e355c806891f63c5e"}, + {file = "pyrsistent-0.20.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b14decb628fac50db5e02ee5a35a9c0772d20277824cfe845c8a8b717c15daa3"}, + {file = "pyrsistent-0.20.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2e2c116cc804d9b09ce9814d17df5edf1df0c624aba3b43bc1ad90411487036d"}, + {file = "pyrsistent-0.20.0-cp312-cp312-win32.whl", hash = "sha256:e78d0c7c1e99a4a45c99143900ea0546025e41bb59ebc10182e947cf1ece9174"}, + {file = "pyrsistent-0.20.0-cp312-cp312-win_amd64.whl", hash = "sha256:4021a7f963d88ccd15b523787d18ed5e5269ce57aa4037146a2377ff607ae87d"}, + {file = "pyrsistent-0.20.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:79ed12ba79935adaac1664fd7e0e585a22caa539dfc9b7c7c6d5ebf91fb89054"}, + {file = "pyrsistent-0.20.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f920385a11207dc372a028b3f1e1038bb244b3ec38d448e6d8e43c6b3ba20e98"}, + {file = "pyrsistent-0.20.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4f5c2d012671b7391803263419e31b5c7c21e7c95c8760d7fc35602353dee714"}, + {file = "pyrsistent-0.20.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ef3992833fbd686ee783590639f4b8343a57f1f75de8633749d984dc0eb16c86"}, + {file = "pyrsistent-0.20.0-cp38-cp38-win32.whl", hash = "sha256:881bbea27bbd32d37eb24dd320a5e745a2a5b092a17f6debc1349252fac85423"}, + {file = "pyrsistent-0.20.0-cp38-cp38-win_amd64.whl", hash = "sha256:6d270ec9dd33cdb13f4d62c95c1a5a50e6b7cdd86302b494217137f760495b9d"}, + {file = "pyrsistent-0.20.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:ca52d1ceae015859d16aded12584c59eb3825f7b50c6cfd621d4231a6cc624ce"}, + {file = "pyrsistent-0.20.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b318ca24db0f0518630e8b6f3831e9cba78f099ed5c1d65ffe3e023003043ba0"}, + {file = "pyrsistent-0.20.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fed2c3216a605dc9a6ea50c7e84c82906e3684c4e80d2908208f662a6cbf9022"}, + {file = "pyrsistent-0.20.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2e14c95c16211d166f59c6611533d0dacce2e25de0f76e4c140fde250997b3ca"}, + {file = "pyrsistent-0.20.0-cp39-cp39-win32.whl", hash = "sha256:f058a615031eea4ef94ead6456f5ec2026c19fb5bd6bfe86e9665c4158cf802f"}, + {file = "pyrsistent-0.20.0-cp39-cp39-win_amd64.whl", hash = "sha256:58b8f6366e152092194ae68fefe18b9f0b4f89227dfd86a07770c3d86097aebf"}, + {file = "pyrsistent-0.20.0-py3-none-any.whl", hash = "sha256:c55acc4733aad6560a7f5f818466631f07efc001fd023f34a6c203f8b6df0f0b"}, + {file = "pyrsistent-0.20.0.tar.gz", hash = "sha256:4c48f78f62ab596c679086084d0dd13254ae4f3d6c72a83ffdf5ebdef8f265a4"}, +] + +[[package]] +name = "pytest" +version = "6.2.5" +description = "pytest: simple powerful testing with Python" +optional = false +python-versions = ">=3.6" +files = [ + {file = "pytest-6.2.5-py3-none-any.whl", hash = "sha256:7310f8d27bc79ced999e760ca304d69f6ba6c6649c0b60fb0e04a4a77cacc134"}, + {file = "pytest-6.2.5.tar.gz", hash = "sha256:131b36680866a76e6781d13f101efb86cf674ebb9762eb70d3082b6f29889e89"}, +] + +[package.dependencies] +atomicwrites = {version = ">=1.0", markers = "sys_platform == \"win32\""} +attrs = ">=19.2.0" +colorama = {version = "*", markers = "sys_platform == \"win32\""} +iniconfig = "*" +packaging = "*" +pluggy = ">=0.12,<2.0" +py = ">=1.8.2" +toml = "*" + +[package.extras] +testing = ["argcomplete", "hypothesis (>=3.56)", "mock", "nose", "requests", "xmlschema"] + +[[package]] +name = "pytest-mock" +version = "3.12.0" +description = "Thin-wrapper around the mock package for easier use with pytest" +optional = false +python-versions = ">=3.8" +files = [ + {file = "pytest-mock-3.12.0.tar.gz", hash = "sha256:31a40f038c22cad32287bb43932054451ff5583ff094bca6f675df2f8bc1a6e9"}, + {file = "pytest_mock-3.12.0-py3-none-any.whl", hash = "sha256:0972719a7263072da3a21c7f4773069bcc7486027d7e8e1f81d98a47e701bc4f"}, +] + +[package.dependencies] +pytest = ">=5.0" + +[package.extras] +dev = ["pre-commit", "pytest-asyncio", "tox"] + +[[package]] +name = "python-dateutil" +version = "2.8.2" +description = "Extensions to the standard Python datetime module" +optional = false +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7" +files = [ + {file = "python-dateutil-2.8.2.tar.gz", hash = "sha256:0123cacc1627ae19ddf3c27a5de5bd67ee4586fbdd6440d9748f8abb483d3e86"}, + {file = "python_dateutil-2.8.2-py2.py3-none-any.whl", hash = "sha256:961d03dc3453ebbc59dbdea9e4e11c5651520a876d0f4db161e8674aae935da9"}, +] + +[package.dependencies] +six = ">=1.5" + +[[package]] +name = "pytz" +version = "2024.1" +description = "World timezone definitions, modern and historical" +optional = false +python-versions = "*" +files = [ + {file = "pytz-2024.1-py2.py3-none-any.whl", hash = "sha256:328171f4e3623139da4983451950b28e95ac706e13f3f2630a879749e7a8b319"}, + {file = "pytz-2024.1.tar.gz", hash = "sha256:2a29735ea9c18baf14b448846bde5a48030ed267578472d8955cd0e7443a9812"}, +] + +[[package]] +name = "pytzdata" +version = "2020.1" +description = "The Olson timezone database for Python." +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +files = [ + {file = "pytzdata-2020.1-py2.py3-none-any.whl", hash = "sha256:e1e14750bcf95016381e4d472bad004eef710f2d6417240904070b3d6654485f"}, + {file = "pytzdata-2020.1.tar.gz", hash = "sha256:3efa13b335a00a8de1d345ae41ec78dd11c9f8807f522d39850f2dd828681540"}, +] + +[[package]] +name = "pyyaml" +version = "6.0.1" +description = "YAML parser and emitter for Python" +optional = false +python-versions = ">=3.6" +files = [ + {file = "PyYAML-6.0.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d858aa552c999bc8a8d57426ed01e40bef403cd8ccdd0fc5f6f04a00414cac2a"}, + {file = "PyYAML-6.0.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:fd66fc5d0da6d9815ba2cebeb4205f95818ff4b79c3ebe268e75d961704af52f"}, + {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:69b023b2b4daa7548bcfbd4aa3da05b3a74b772db9e23b982788168117739938"}, + {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:81e0b275a9ecc9c0c0c07b4b90ba548307583c125f54d5b6946cfee6360c733d"}, + {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ba336e390cd8e4d1739f42dfe9bb83a3cc2e80f567d8805e11b46f4a943f5515"}, + {file = "PyYAML-6.0.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:326c013efe8048858a6d312ddd31d56e468118ad4cdeda36c719bf5bb6192290"}, + {file = "PyYAML-6.0.1-cp310-cp310-win32.whl", hash = "sha256:bd4af7373a854424dabd882decdc5579653d7868b8fb26dc7d0e99f823aa5924"}, + {file = "PyYAML-6.0.1-cp310-cp310-win_amd64.whl", hash = "sha256:fd1592b3fdf65fff2ad0004b5e363300ef59ced41c2e6b3a99d4089fa8c5435d"}, + {file = "PyYAML-6.0.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:6965a7bc3cf88e5a1c3bd2e0b5c22f8d677dc88a455344035f03399034eb3007"}, + {file = "PyYAML-6.0.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:f003ed9ad21d6a4713f0a9b5a7a0a79e08dd0f221aff4525a2be4c346ee60aab"}, + {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:42f8152b8dbc4fe7d96729ec2b99c7097d656dc1213a3229ca5383f973a5ed6d"}, + {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:062582fca9fabdd2c8b54a3ef1c978d786e0f6b3a1510e0ac93ef59e0ddae2bc"}, + {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d2b04aac4d386b172d5b9692e2d2da8de7bfb6c387fa4f801fbf6fb2e6ba4673"}, + {file = "PyYAML-6.0.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:e7d73685e87afe9f3b36c799222440d6cf362062f78be1013661b00c5c6f678b"}, + {file = "PyYAML-6.0.1-cp311-cp311-win32.whl", hash = "sha256:1635fd110e8d85d55237ab316b5b011de701ea0f29d07611174a1b42f1444741"}, + {file = "PyYAML-6.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:bf07ee2fef7014951eeb99f56f39c9bb4af143d8aa3c21b1677805985307da34"}, + {file = "PyYAML-6.0.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:855fb52b0dc35af121542a76b9a84f8d1cd886ea97c84703eaa6d88e37a2ad28"}, + {file = "PyYAML-6.0.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:40df9b996c2b73138957fe23a16a4f0ba614f4c0efce1e9406a184b6d07fa3a9"}, + {file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a08c6f0fe150303c1c6b71ebcd7213c2858041a7e01975da3a99aed1e7a378ef"}, + {file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6c22bec3fbe2524cde73d7ada88f6566758a8f7227bfbf93a408a9d86bcc12a0"}, + {file = "PyYAML-6.0.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8d4e9c88387b0f5c7d5f281e55304de64cf7f9c0021a3525bd3b1c542da3b0e4"}, + {file = "PyYAML-6.0.1-cp312-cp312-win32.whl", hash = "sha256:d483d2cdf104e7c9fa60c544d92981f12ad66a457afae824d146093b8c294c54"}, + {file = "PyYAML-6.0.1-cp312-cp312-win_amd64.whl", hash = "sha256:0d3304d8c0adc42be59c5f8a4d9e3d7379e6955ad754aa9d6ab7a398b59dd1df"}, + {file = "PyYAML-6.0.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:50550eb667afee136e9a77d6dc71ae76a44df8b3e51e41b77f6de2932bfe0f47"}, + {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1fe35611261b29bd1de0070f0b2f47cb6ff71fa6595c077e42bd0c419fa27b98"}, + {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:704219a11b772aea0d8ecd7058d0082713c3562b4e271b849ad7dc4a5c90c13c"}, + {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:afd7e57eddb1a54f0f1a974bc4391af8bcce0b444685d936840f125cf046d5bd"}, + {file = "PyYAML-6.0.1-cp36-cp36m-win32.whl", hash = "sha256:fca0e3a251908a499833aa292323f32437106001d436eca0e6e7833256674585"}, + {file = "PyYAML-6.0.1-cp36-cp36m-win_amd64.whl", hash = "sha256:f22ac1c3cac4dbc50079e965eba2c1058622631e526bd9afd45fedd49ba781fa"}, + {file = "PyYAML-6.0.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:b1275ad35a5d18c62a7220633c913e1b42d44b46ee12554e5fd39c70a243d6a3"}, + {file = "PyYAML-6.0.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:18aeb1bf9a78867dc38b259769503436b7c72f7a1f1f4c93ff9a17de54319b27"}, + {file = "PyYAML-6.0.1-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:596106435fa6ad000c2991a98fa58eeb8656ef2325d7e158344fb33864ed87e3"}, + {file = "PyYAML-6.0.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:baa90d3f661d43131ca170712d903e6295d1f7a0f595074f151c0aed377c9b9c"}, + {file = "PyYAML-6.0.1-cp37-cp37m-win32.whl", hash = "sha256:9046c58c4395dff28dd494285c82ba00b546adfc7ef001486fbf0324bc174fba"}, + {file = "PyYAML-6.0.1-cp37-cp37m-win_amd64.whl", hash = "sha256:4fb147e7a67ef577a588a0e2c17b6db51dda102c71de36f8549b6816a96e1867"}, + {file = "PyYAML-6.0.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1d4c7e777c441b20e32f52bd377e0c409713e8bb1386e1099c2415f26e479595"}, + {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a0cd17c15d3bb3fa06978b4e8958dcdc6e0174ccea823003a106c7d4d7899ac5"}, + {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:28c119d996beec18c05208a8bd78cbe4007878c6dd15091efb73a30e90539696"}, + {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7e07cbde391ba96ab58e532ff4803f79c4129397514e1413a7dc761ccd755735"}, + {file = "PyYAML-6.0.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:49a183be227561de579b4a36efbb21b3eab9651dd81b1858589f796549873dd6"}, + {file = "PyYAML-6.0.1-cp38-cp38-win32.whl", hash = "sha256:184c5108a2aca3c5b3d3bf9395d50893a7ab82a38004c8f61c258d4428e80206"}, + {file = "PyYAML-6.0.1-cp38-cp38-win_amd64.whl", hash = "sha256:1e2722cc9fbb45d9b87631ac70924c11d3a401b2d7f410cc0e3bbf249f2dca62"}, + {file = "PyYAML-6.0.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:9eb6caa9a297fc2c2fb8862bc5370d0303ddba53ba97e71f08023b6cd73d16a8"}, + {file = "PyYAML-6.0.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:c8098ddcc2a85b61647b2590f825f3db38891662cfc2fc776415143f599bb859"}, + {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5773183b6446b2c99bb77e77595dd486303b4faab2b086e7b17bc6bef28865f6"}, + {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b786eecbdf8499b9ca1d697215862083bd6d2a99965554781d0d8d1ad31e13a0"}, + {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bc1bf2925a1ecd43da378f4db9e4f799775d6367bdb94671027b73b393a7c42c"}, + {file = "PyYAML-6.0.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:04ac92ad1925b2cff1db0cfebffb6ffc43457495c9b3c39d3fcae417d7125dc5"}, + {file = "PyYAML-6.0.1-cp39-cp39-win32.whl", hash = "sha256:faca3bdcf85b2fc05d06ff3fbc1f83e1391b3e724afa3feba7d13eeab355484c"}, + {file = "PyYAML-6.0.1-cp39-cp39-win_amd64.whl", hash = "sha256:510c9deebc5c0225e8c96813043e62b680ba2f9c50a08d3724c7f28a747d1486"}, + {file = "PyYAML-6.0.1.tar.gz", hash = "sha256:bfdf460b1736c775f2ba9f6a92bca30bc2095067b8a9d77876d1fad6cc3b4a43"}, +] + +[[package]] +name = "regex" +version = "2023.12.25" +description = "Alternative regular expression module, to replace re." +optional = false +python-versions = ">=3.7" +files = [ + {file = "regex-2023.12.25-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:0694219a1d54336fd0445ea382d49d36882415c0134ee1e8332afd1529f0baa5"}, + {file = "regex-2023.12.25-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:b014333bd0217ad3d54c143de9d4b9a3ca1c5a29a6d0d554952ea071cff0f1f8"}, + {file = "regex-2023.12.25-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:d865984b3f71f6d0af64d0d88f5733521698f6c16f445bb09ce746c92c97c586"}, + {file = "regex-2023.12.25-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1e0eabac536b4cc7f57a5f3d095bfa557860ab912f25965e08fe1545e2ed8b4c"}, + {file = "regex-2023.12.25-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c25a8ad70e716f96e13a637802813f65d8a6760ef48672aa3502f4c24ea8b400"}, + {file = "regex-2023.12.25-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a9b6d73353f777630626f403b0652055ebfe8ff142a44ec2cf18ae470395766e"}, + {file = "regex-2023.12.25-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a9cc99d6946d750eb75827cb53c4371b8b0fe89c733a94b1573c9dd16ea6c9e4"}, + {file = "regex-2023.12.25-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:88d1f7bef20c721359d8675f7d9f8e414ec5003d8f642fdfd8087777ff7f94b5"}, + {file = "regex-2023.12.25-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:cb3fe77aec8f1995611f966d0c656fdce398317f850d0e6e7aebdfe61f40e1cd"}, + {file = "regex-2023.12.25-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:7aa47c2e9ea33a4a2a05f40fcd3ea36d73853a2aae7b4feab6fc85f8bf2c9704"}, + {file = "regex-2023.12.25-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:df26481f0c7a3f8739fecb3e81bc9da3fcfae34d6c094563b9d4670b047312e1"}, + {file = "regex-2023.12.25-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:c40281f7d70baf6e0db0c2f7472b31609f5bc2748fe7275ea65a0b4601d9b392"}, + {file = "regex-2023.12.25-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:d94a1db462d5690ebf6ae86d11c5e420042b9898af5dcf278bd97d6bda065423"}, + {file = "regex-2023.12.25-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:ba1b30765a55acf15dce3f364e4928b80858fa8f979ad41f862358939bdd1f2f"}, + {file = "regex-2023.12.25-cp310-cp310-win32.whl", hash = "sha256:150c39f5b964e4d7dba46a7962a088fbc91f06e606f023ce57bb347a3b2d4630"}, + {file = "regex-2023.12.25-cp310-cp310-win_amd64.whl", hash = "sha256:09da66917262d9481c719599116c7dc0c321ffcec4b1f510c4f8a066f8768105"}, + {file = "regex-2023.12.25-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:1b9d811f72210fa9306aeb88385b8f8bcef0dfbf3873410413c00aa94c56c2b6"}, + {file = "regex-2023.12.25-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:d902a43085a308cef32c0d3aea962524b725403fd9373dea18110904003bac97"}, + {file = "regex-2023.12.25-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:d166eafc19f4718df38887b2bbe1467a4f74a9830e8605089ea7a30dd4da8887"}, + {file = "regex-2023.12.25-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c7ad32824b7f02bb3c9f80306d405a1d9b7bb89362d68b3c5a9be53836caebdb"}, + {file = "regex-2023.12.25-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:636ba0a77de609d6510235b7f0e77ec494d2657108f777e8765efc060094c98c"}, + {file = "regex-2023.12.25-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0fda75704357805eb953a3ee15a2b240694a9a514548cd49b3c5124b4e2ad01b"}, + {file = "regex-2023.12.25-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f72cbae7f6b01591f90814250e636065850c5926751af02bb48da94dfced7baa"}, + {file = "regex-2023.12.25-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:db2a0b1857f18b11e3b0e54ddfefc96af46b0896fb678c85f63fb8c37518b3e7"}, + {file = "regex-2023.12.25-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:7502534e55c7c36c0978c91ba6f61703faf7ce733715ca48f499d3dbbd7657e0"}, + {file = "regex-2023.12.25-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:e8c7e08bb566de4faaf11984af13f6bcf6a08f327b13631d41d62592681d24fe"}, + {file = "regex-2023.12.25-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:283fc8eed679758de38fe493b7d7d84a198b558942b03f017b1f94dda8efae80"}, + {file = "regex-2023.12.25-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:f44dd4d68697559d007462b0a3a1d9acd61d97072b71f6d1968daef26bc744bd"}, + {file = "regex-2023.12.25-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:67d3ccfc590e5e7197750fcb3a2915b416a53e2de847a728cfa60141054123d4"}, + {file = "regex-2023.12.25-cp311-cp311-win32.whl", hash = "sha256:68191f80a9bad283432385961d9efe09d783bcd36ed35a60fb1ff3f1ec2efe87"}, + {file = "regex-2023.12.25-cp311-cp311-win_amd64.whl", hash = "sha256:7d2af3f6b8419661a0c421584cfe8aaec1c0e435ce7e47ee2a97e344b98f794f"}, + {file = "regex-2023.12.25-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:8a0ccf52bb37d1a700375a6b395bff5dd15c50acb745f7db30415bae3c2b0715"}, + {file = "regex-2023.12.25-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:c3c4a78615b7762740531c27cf46e2f388d8d727d0c0c739e72048beb26c8a9d"}, + {file = "regex-2023.12.25-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:ad83e7545b4ab69216cef4cc47e344d19622e28aabec61574b20257c65466d6a"}, + {file = "regex-2023.12.25-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b7a635871143661feccce3979e1727c4e094f2bdfd3ec4b90dfd4f16f571a87a"}, + {file = "regex-2023.12.25-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d498eea3f581fbe1b34b59c697512a8baef88212f92e4c7830fcc1499f5b45a5"}, + {file = "regex-2023.12.25-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:43f7cd5754d02a56ae4ebb91b33461dc67be8e3e0153f593c509e21d219c5060"}, + {file = "regex-2023.12.25-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:51f4b32f793812714fd5307222a7f77e739b9bc566dc94a18126aba3b92b98a3"}, + {file = "regex-2023.12.25-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ba99d8077424501b9616b43a2d208095746fb1284fc5ba490139651f971d39d9"}, + {file = "regex-2023.12.25-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:4bfc2b16e3ba8850e0e262467275dd4d62f0d045e0e9eda2bc65078c0110a11f"}, + {file = "regex-2023.12.25-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:8c2c19dae8a3eb0ea45a8448356ed561be843b13cbc34b840922ddf565498c1c"}, + {file = "regex-2023.12.25-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:60080bb3d8617d96f0fb7e19796384cc2467447ef1c491694850ebd3670bc457"}, + {file = "regex-2023.12.25-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:b77e27b79448e34c2c51c09836033056a0547aa360c45eeeb67803da7b0eedaf"}, + {file = "regex-2023.12.25-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:518440c991f514331f4850a63560321f833979d145d7d81186dbe2f19e27ae3d"}, + {file = "regex-2023.12.25-cp312-cp312-win32.whl", hash = "sha256:e2610e9406d3b0073636a3a2e80db05a02f0c3169b5632022b4e81c0364bcda5"}, + {file = "regex-2023.12.25-cp312-cp312-win_amd64.whl", hash = "sha256:cc37b9aeebab425f11f27e5e9e6cf580be7206c6582a64467a14dda211abc232"}, + {file = "regex-2023.12.25-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:da695d75ac97cb1cd725adac136d25ca687da4536154cdc2815f576e4da11c69"}, + {file = "regex-2023.12.25-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d126361607b33c4eb7b36debc173bf25d7805847346dd4d99b5499e1fef52bc7"}, + {file = "regex-2023.12.25-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4719bb05094d7d8563a450cf8738d2e1061420f79cfcc1fa7f0a44744c4d8f73"}, + {file = "regex-2023.12.25-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5dd58946bce44b53b06d94aa95560d0b243eb2fe64227cba50017a8d8b3cd3e2"}, + {file = "regex-2023.12.25-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:22a86d9fff2009302c440b9d799ef2fe322416d2d58fc124b926aa89365ec482"}, + {file = "regex-2023.12.25-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2aae8101919e8aa05ecfe6322b278f41ce2994c4a430303c4cd163fef746e04f"}, + {file = "regex-2023.12.25-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:e692296c4cc2873967771345a876bcfc1c547e8dd695c6b89342488b0ea55cd8"}, + {file = "regex-2023.12.25-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:263ef5cc10979837f243950637fffb06e8daed7f1ac1e39d5910fd29929e489a"}, + {file = "regex-2023.12.25-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:d6f7e255e5fa94642a0724e35406e6cb7001c09d476ab5fce002f652b36d0c39"}, + {file = "regex-2023.12.25-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:88ad44e220e22b63b0f8f81f007e8abbb92874d8ced66f32571ef8beb0643b2b"}, + {file = "regex-2023.12.25-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:3a17d3ede18f9cedcbe23d2daa8a2cd6f59fe2bf082c567e43083bba3fb00347"}, + {file = "regex-2023.12.25-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:d15b274f9e15b1a0b7a45d2ac86d1f634d983ca40d6b886721626c47a400bf39"}, + {file = "regex-2023.12.25-cp37-cp37m-win32.whl", hash = "sha256:ed19b3a05ae0c97dd8f75a5d8f21f7723a8c33bbc555da6bbe1f96c470139d3c"}, + {file = "regex-2023.12.25-cp37-cp37m-win_amd64.whl", hash = "sha256:a6d1047952c0b8104a1d371f88f4ab62e6275567d4458c1e26e9627ad489b445"}, + {file = "regex-2023.12.25-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:b43523d7bc2abd757119dbfb38af91b5735eea45537ec6ec3a5ec3f9562a1c53"}, + {file = "regex-2023.12.25-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:efb2d82f33b2212898f1659fb1c2e9ac30493ac41e4d53123da374c3b5541e64"}, + {file = "regex-2023.12.25-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:b7fca9205b59c1a3d5031f7e64ed627a1074730a51c2a80e97653e3e9fa0d415"}, + {file = "regex-2023.12.25-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:086dd15e9435b393ae06f96ab69ab2d333f5d65cbe65ca5a3ef0ec9564dfe770"}, + {file = "regex-2023.12.25-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e81469f7d01efed9b53740aedd26085f20d49da65f9c1f41e822a33992cb1590"}, + {file = "regex-2023.12.25-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:34e4af5b27232f68042aa40a91c3b9bb4da0eeb31b7632e0091afc4310afe6cb"}, + {file = "regex-2023.12.25-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9852b76ab558e45b20bf1893b59af64a28bd3820b0c2efc80e0a70a4a3ea51c1"}, + {file = "regex-2023.12.25-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ff100b203092af77d1a5a7abe085b3506b7eaaf9abf65b73b7d6905b6cb76988"}, + {file = "regex-2023.12.25-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:cc038b2d8b1470364b1888a98fd22d616fba2b6309c5b5f181ad4483e0017861"}, + {file = "regex-2023.12.25-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:094ba386bb5c01e54e14434d4caabf6583334090865b23ef58e0424a6286d3dc"}, + {file = "regex-2023.12.25-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:5cd05d0f57846d8ba4b71d9c00f6f37d6b97d5e5ef8b3c3840426a475c8f70f4"}, + {file = "regex-2023.12.25-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:9aa1a67bbf0f957bbe096375887b2505f5d8ae16bf04488e8b0f334c36e31360"}, + {file = "regex-2023.12.25-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:98a2636994f943b871786c9e82bfe7883ecdaba2ef5df54e1450fa9869d1f756"}, + {file = "regex-2023.12.25-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:37f8e93a81fc5e5bd8db7e10e62dc64261bcd88f8d7e6640aaebe9bc180d9ce2"}, + {file = "regex-2023.12.25-cp38-cp38-win32.whl", hash = "sha256:d78bd484930c1da2b9679290a41cdb25cc127d783768a0369d6b449e72f88beb"}, + {file = "regex-2023.12.25-cp38-cp38-win_amd64.whl", hash = "sha256:b521dcecebc5b978b447f0f69b5b7f3840eac454862270406a39837ffae4e697"}, + {file = "regex-2023.12.25-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:f7bc09bc9c29ebead055bcba136a67378f03d66bf359e87d0f7c759d6d4ffa31"}, + {file = "regex-2023.12.25-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:e14b73607d6231f3cc4622809c196b540a6a44e903bcfad940779c80dffa7be7"}, + {file = "regex-2023.12.25-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:9eda5f7a50141291beda3edd00abc2d4a5b16c29c92daf8d5bd76934150f3edc"}, + {file = "regex-2023.12.25-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cc6bb9aa69aacf0f6032c307da718f61a40cf970849e471254e0e91c56ffca95"}, + {file = "regex-2023.12.25-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:298dc6354d414bc921581be85695d18912bea163a8b23cac9a2562bbcd5088b1"}, + {file = "regex-2023.12.25-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2f4e475a80ecbd15896a976aa0b386c5525d0ed34d5c600b6d3ebac0a67c7ddf"}, + {file = "regex-2023.12.25-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:531ac6cf22b53e0696f8e1d56ce2396311254eb806111ddd3922c9d937151dae"}, + {file = "regex-2023.12.25-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:22f3470f7524b6da61e2020672df2f3063676aff444db1daa283c2ea4ed259d6"}, + {file = "regex-2023.12.25-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:89723d2112697feaa320c9d351e5f5e7b841e83f8b143dba8e2d2b5f04e10923"}, + {file = "regex-2023.12.25-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:0ecf44ddf9171cd7566ef1768047f6e66975788258b1c6c6ca78098b95cf9a3d"}, + {file = "regex-2023.12.25-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:905466ad1702ed4acfd67a902af50b8db1feeb9781436372261808df7a2a7bca"}, + {file = "regex-2023.12.25-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:4558410b7a5607a645e9804a3e9dd509af12fb72b9825b13791a37cd417d73a5"}, + {file = "regex-2023.12.25-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:7e316026cc1095f2a3e8cc012822c99f413b702eaa2ca5408a513609488cb62f"}, + {file = "regex-2023.12.25-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:3b1de218d5375cd6ac4b5493e0b9f3df2be331e86520f23382f216c137913d20"}, + {file = "regex-2023.12.25-cp39-cp39-win32.whl", hash = "sha256:11a963f8e25ab5c61348d090bf1b07f1953929c13bd2309a0662e9ff680763c9"}, + {file = "regex-2023.12.25-cp39-cp39-win_amd64.whl", hash = "sha256:e693e233ac92ba83a87024e1d32b5f9ab15ca55ddd916d878146f4e3406b5c91"}, + {file = "regex-2023.12.25.tar.gz", hash = "sha256:29171aa128da69afdf4bde412d5bedc335f2ca8fcfe4489038577d05f16181e5"}, +] + +[[package]] +name = "requests" +version = "2.31.0" +description = "Python HTTP for Humans." +optional = false +python-versions = ">=3.7" +files = [ + {file = "requests-2.31.0-py3-none-any.whl", hash = "sha256:58cd2187c01e70e6e26505bca751777aa9f2ee0b7f4300988b709f44e013003f"}, + {file = "requests-2.31.0.tar.gz", hash = "sha256:942c5a758f98d790eaed1a29cb6eefc7ffb0d1cf7af05c3d2791656dbd6ad1e1"}, +] + +[package.dependencies] +certifi = ">=2017.4.17" +charset-normalizer = ">=2,<4" +idna = ">=2.5,<4" +urllib3 = ">=1.21.1,<3" + +[package.extras] +socks = ["PySocks (>=1.5.6,!=1.5.7)"] +use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"] + +[[package]] +name = "requests-cache" +version = "1.2.0" +description = "A persistent cache for python requests" +optional = false +python-versions = ">=3.8" +files = [ + {file = "requests_cache-1.2.0-py3-none-any.whl", hash = "sha256:490324301bf0cb924ff4e6324bd2613453e7e1f847353928b08adb0fdfb7f722"}, + {file = "requests_cache-1.2.0.tar.gz", hash = "sha256:db1c709ca343cc1cd5b6c8b1a5387298eceed02306a6040760db538c885e3838"}, +] + +[package.dependencies] +attrs = ">=21.2" +cattrs = ">=22.2" +platformdirs = ">=2.5" +requests = ">=2.22" +url-normalize = ">=1.4" +urllib3 = ">=1.25.5" + +[package.extras] +all = ["boto3 (>=1.15)", "botocore (>=1.18)", "itsdangerous (>=2.0)", "pymongo (>=3)", "pyyaml (>=6.0.1)", "redis (>=3)", "ujson (>=5.4)"] +bson = ["bson (>=0.5)"] +docs = ["furo (>=2023.3,<2024.0)", "linkify-it-py (>=2.0,<3.0)", "myst-parser (>=1.0,<2.0)", "sphinx (>=5.0.2,<6.0.0)", "sphinx-autodoc-typehints (>=1.19)", "sphinx-automodapi (>=0.14)", "sphinx-copybutton (>=0.5)", "sphinx-design (>=0.2)", "sphinx-notfound-page (>=0.8)", "sphinxcontrib-apidoc (>=0.3)", "sphinxext-opengraph (>=0.9)"] +dynamodb = ["boto3 (>=1.15)", "botocore (>=1.18)"] +json = ["ujson (>=5.4)"] +mongodb = ["pymongo (>=3)"] +redis = ["redis (>=3)"] +security = ["itsdangerous (>=2.0)"] +yaml = ["pyyaml (>=6.0.1)"] + +[[package]] +name = "requests-mock" +version = "1.9.3" +description = "Mock out responses from the requests package" +optional = false +python-versions = "*" +files = [ + {file = "requests-mock-1.9.3.tar.gz", hash = "sha256:8d72abe54546c1fc9696fa1516672f1031d72a55a1d66c85184f972a24ba0eba"}, + {file = "requests_mock-1.9.3-py2.py3-none-any.whl", hash = "sha256:0a2d38a117c08bb78939ec163522976ad59a6b7fdd82b709e23bb98004a44970"}, +] + +[package.dependencies] +requests = ">=2.3,<3" +six = "*" + +[package.extras] +fixture = ["fixtures"] +test = ["fixtures", "mock", "purl", "pytest", "sphinx", "testrepository (>=0.0.18)", "testtools"] + +[[package]] +name = "setuptools" +version = "69.1.0" +description = "Easily download, build, install, upgrade, and uninstall Python packages" +optional = false +python-versions = ">=3.8" +files = [ + {file = "setuptools-69.1.0-py3-none-any.whl", hash = "sha256:c054629b81b946d63a9c6e732bc8b2513a7c3ea645f11d0139a2191d735c60c6"}, + {file = "setuptools-69.1.0.tar.gz", hash = "sha256:850894c4195f09c4ed30dba56213bf7c3f21d86ed6bdaafb5df5972593bfc401"}, +] + +[package.extras] +docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "rst.linker (>=1.9)", "sphinx (<7.2.5)", "sphinx (>=3.5)", "sphinx-favicon", "sphinx-inline-tabs", "sphinx-lint", "sphinx-notfound-page (>=1,<2)", "sphinx-reredirects", "sphinxcontrib-towncrier"] +testing = ["build[virtualenv]", "filelock (>=3.4.0)", "flake8-2020", "ini2toml[lite] (>=0.9)", "jaraco.develop (>=7.21)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "pip (>=19.1)", "pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-home (>=0.5)", "pytest-mypy (>=0.9.1)", "pytest-perf", "pytest-ruff (>=0.2.1)", "pytest-timeout", "pytest-xdist", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel"] +testing-integration = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "packaging (>=23.1)", "pytest", "pytest-enabler", "pytest-xdist", "tomli", "virtualenv (>=13.0.0)", "wheel"] + +[[package]] +name = "six" +version = "1.16.0" +description = "Python 2 and 3 compatibility utilities" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*" +files = [ + {file = "six-1.16.0-py2.py3-none-any.whl", hash = "sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254"}, + {file = "six-1.16.0.tar.gz", hash = "sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926"}, +] + +[[package]] +name = "toml" +version = "0.10.2" +description = "Python Library for Tom's Obvious, Minimal Language" +optional = false +python-versions = ">=2.6, !=3.0.*, !=3.1.*, !=3.2.*" +files = [ + {file = "toml-0.10.2-py2.py3-none-any.whl", hash = "sha256:806143ae5bfb6a3c6e736a764057db0e6a0e05e338b5630894a5f779cabb4f9b"}, + {file = "toml-0.10.2.tar.gz", hash = "sha256:b3bda1d108d5dd99f4a20d24d9c348e91c4db7ab1b749200bded2f839ccbe68f"}, +] + +[[package]] +name = "typing-extensions" +version = "4.9.0" +description = "Backported and Experimental Type Hints for Python 3.8+" +optional = false +python-versions = ">=3.8" +files = [ + {file = "typing_extensions-4.9.0-py3-none-any.whl", hash = "sha256:af72aea155e91adfc61c3ae9e0e342dbc0cba726d6cba4b6c72c1f34e47291cd"}, + {file = "typing_extensions-4.9.0.tar.gz", hash = "sha256:23478f88c37f27d76ac8aee6c905017a143b0b1b886c3c9f66bc2fd94f9f5783"}, +] + +[[package]] +name = "tzdata" +version = "2024.1" +description = "Provider of IANA time zone data" +optional = false +python-versions = ">=2" +files = [ + {file = "tzdata-2024.1-py2.py3-none-any.whl", hash = "sha256:9068bc196136463f5245e51efda838afa15aaeca9903f49050dfa2679db4d252"}, + {file = "tzdata-2024.1.tar.gz", hash = "sha256:2674120f8d891909751c38abcdfd386ac0a5a1127954fbc332af6b5ceae07efd"}, +] + +[[package]] +name = "tzlocal" +version = "5.2" +description = "tzinfo object for the local timezone" +optional = false +python-versions = ">=3.8" +files = [ + {file = "tzlocal-5.2-py3-none-any.whl", hash = "sha256:49816ef2fe65ea8ac19d19aa7a1ae0551c834303d5014c6d5a62e4cbda8047b8"}, + {file = "tzlocal-5.2.tar.gz", hash = "sha256:8d399205578f1a9342816409cc1e46a93ebd5755e39ea2d85334bea911bf0e6e"}, +] + +[package.dependencies] +tzdata = {version = "*", markers = "platform_system == \"Windows\""} + +[package.extras] +devenv = ["check-manifest", "pytest (>=4.3)", "pytest-cov", "pytest-mock (>=3.3)", "zest.releaser"] + +[[package]] +name = "url-normalize" +version = "1.4.3" +description = "URL normalization for Python" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*" +files = [ + {file = "url-normalize-1.4.3.tar.gz", hash = "sha256:d23d3a070ac52a67b83a1c59a0e68f8608d1cd538783b401bc9de2c0fac999b2"}, + {file = "url_normalize-1.4.3-py2.py3-none-any.whl", hash = "sha256:ec3c301f04e5bb676d333a7fa162fa977ad2ca04b7e652bfc9fac4e405728eed"}, +] + +[package.dependencies] +six = "*" + +[[package]] +name = "urllib3" +version = "2.2.1" +description = "HTTP library with thread-safe connection pooling, file post, and more." +optional = false +python-versions = ">=3.8" +files = [ + {file = "urllib3-2.2.1-py3-none-any.whl", hash = "sha256:450b20ec296a467077128bff42b73080516e71b56ff59a60a02bef2232c4fa9d"}, + {file = "urllib3-2.2.1.tar.gz", hash = "sha256:d0570876c61ab9e520d776c38acbbb5b05a776d3f9ff98a5c8fd5162a444cf19"}, +] + +[package.extras] +brotli = ["brotli (>=1.0.9)", "brotlicffi (>=0.8.0)"] +h2 = ["h2 (>=4,<5)"] +socks = ["pysocks (>=1.5.6,!=1.5.7,<2.0)"] +zstd = ["zstandard (>=0.18.0)"] + +[[package]] +name = "wcmatch" +version = "8.4" +description = "Wildcard/glob file name matcher." +optional = false +python-versions = ">=3.7" +files = [ + {file = "wcmatch-8.4-py3-none-any.whl", hash = "sha256:dc7351e5a7f8bbf4c6828d51ad20c1770113f5f3fd3dfe2a03cfde2a63f03f98"}, + {file = "wcmatch-8.4.tar.gz", hash = "sha256:ba4fc5558f8946bf1ffc7034b05b814d825d694112499c86035e0e4d398b6a67"}, +] + +[package.dependencies] +bracex = ">=2.1.1" + +[[package]] +name = "wrapt" +version = "1.16.0" +description = "Module for decorators, wrappers and monkey patching." +optional = false +python-versions = ">=3.6" +files = [ + {file = "wrapt-1.16.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:ffa565331890b90056c01db69c0fe634a776f8019c143a5ae265f9c6bc4bd6d4"}, + {file = "wrapt-1.16.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:e4fdb9275308292e880dcbeb12546df7f3e0f96c6b41197e0cf37d2826359020"}, + {file = "wrapt-1.16.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bb2dee3874a500de01c93d5c71415fcaef1d858370d405824783e7a8ef5db440"}, + {file = "wrapt-1.16.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2a88e6010048489cda82b1326889ec075a8c856c2e6a256072b28eaee3ccf487"}, + {file = "wrapt-1.16.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ac83a914ebaf589b69f7d0a1277602ff494e21f4c2f743313414378f8f50a4cf"}, + {file = "wrapt-1.16.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:73aa7d98215d39b8455f103de64391cb79dfcad601701a3aa0dddacf74911d72"}, + {file = "wrapt-1.16.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:807cc8543a477ab7422f1120a217054f958a66ef7314f76dd9e77d3f02cdccd0"}, + {file = "wrapt-1.16.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:bf5703fdeb350e36885f2875d853ce13172ae281c56e509f4e6eca049bdfb136"}, + {file = "wrapt-1.16.0-cp310-cp310-win32.whl", hash = "sha256:f6b2d0c6703c988d334f297aa5df18c45e97b0af3679bb75059e0e0bd8b1069d"}, + {file = "wrapt-1.16.0-cp310-cp310-win_amd64.whl", hash = "sha256:decbfa2f618fa8ed81c95ee18a387ff973143c656ef800c9f24fb7e9c16054e2"}, + {file = "wrapt-1.16.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:1a5db485fe2de4403f13fafdc231b0dbae5eca4359232d2efc79025527375b09"}, + {file = "wrapt-1.16.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:75ea7d0ee2a15733684badb16de6794894ed9c55aa5e9903260922f0482e687d"}, + {file = "wrapt-1.16.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a452f9ca3e3267cd4d0fcf2edd0d035b1934ac2bd7e0e57ac91ad6b95c0c6389"}, + {file = "wrapt-1.16.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:43aa59eadec7890d9958748db829df269f0368521ba6dc68cc172d5d03ed8060"}, + {file = "wrapt-1.16.0-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:72554a23c78a8e7aa02abbd699d129eead8b147a23c56e08d08dfc29cfdddca1"}, + {file = "wrapt-1.16.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:d2efee35b4b0a347e0d99d28e884dfd82797852d62fcd7ebdeee26f3ceb72cf3"}, + {file = "wrapt-1.16.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:6dcfcffe73710be01d90cae08c3e548d90932d37b39ef83969ae135d36ef3956"}, + {file = "wrapt-1.16.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:eb6e651000a19c96f452c85132811d25e9264d836951022d6e81df2fff38337d"}, + {file = "wrapt-1.16.0-cp311-cp311-win32.whl", hash = "sha256:66027d667efe95cc4fa945af59f92c5a02c6f5bb6012bff9e60542c74c75c362"}, + {file = "wrapt-1.16.0-cp311-cp311-win_amd64.whl", hash = "sha256:aefbc4cb0a54f91af643660a0a150ce2c090d3652cf4052a5397fb2de549cd89"}, + {file = "wrapt-1.16.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:5eb404d89131ec9b4f748fa5cfb5346802e5ee8836f57d516576e61f304f3b7b"}, + {file = "wrapt-1.16.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:9090c9e676d5236a6948330e83cb89969f433b1943a558968f659ead07cb3b36"}, + {file = "wrapt-1.16.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:94265b00870aa407bd0cbcfd536f17ecde43b94fb8d228560a1e9d3041462d73"}, + {file = "wrapt-1.16.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f2058f813d4f2b5e3a9eb2eb3faf8f1d99b81c3e51aeda4b168406443e8ba809"}, + {file = "wrapt-1.16.0-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:98b5e1f498a8ca1858a1cdbffb023bfd954da4e3fa2c0cb5853d40014557248b"}, + {file = "wrapt-1.16.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:14d7dc606219cdd7405133c713f2c218d4252f2a469003f8c46bb92d5d095d81"}, + {file = "wrapt-1.16.0-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:49aac49dc4782cb04f58986e81ea0b4768e4ff197b57324dcbd7699c5dfb40b9"}, + {file = "wrapt-1.16.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:418abb18146475c310d7a6dc71143d6f7adec5b004ac9ce08dc7a34e2babdc5c"}, + {file = "wrapt-1.16.0-cp312-cp312-win32.whl", hash = "sha256:685f568fa5e627e93f3b52fda002c7ed2fa1800b50ce51f6ed1d572d8ab3e7fc"}, + {file = "wrapt-1.16.0-cp312-cp312-win_amd64.whl", hash = "sha256:dcdba5c86e368442528f7060039eda390cc4091bfd1dca41e8046af7c910dda8"}, + {file = "wrapt-1.16.0-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:d462f28826f4657968ae51d2181a074dfe03c200d6131690b7d65d55b0f360f8"}, + {file = "wrapt-1.16.0-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a33a747400b94b6d6b8a165e4480264a64a78c8a4c734b62136062e9a248dd39"}, + {file = "wrapt-1.16.0-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b3646eefa23daeba62643a58aac816945cadc0afaf21800a1421eeba5f6cfb9c"}, + {file = "wrapt-1.16.0-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3ebf019be5c09d400cf7b024aa52b1f3aeebeff51550d007e92c3c1c4afc2a40"}, + {file = "wrapt-1.16.0-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:0d2691979e93d06a95a26257adb7bfd0c93818e89b1406f5a28f36e0d8c1e1fc"}, + {file = "wrapt-1.16.0-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:1acd723ee2a8826f3d53910255643e33673e1d11db84ce5880675954183ec47e"}, + {file = "wrapt-1.16.0-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:bc57efac2da352a51cc4658878a68d2b1b67dbe9d33c36cb826ca449d80a8465"}, + {file = "wrapt-1.16.0-cp36-cp36m-win32.whl", hash = "sha256:da4813f751142436b075ed7aa012a8778aa43a99f7b36afe9b742d3ed8bdc95e"}, + {file = "wrapt-1.16.0-cp36-cp36m-win_amd64.whl", hash = "sha256:6f6eac2360f2d543cc875a0e5efd413b6cbd483cb3ad7ebf888884a6e0d2e966"}, + {file = "wrapt-1.16.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:a0ea261ce52b5952bf669684a251a66df239ec6d441ccb59ec7afa882265d593"}, + {file = "wrapt-1.16.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7bd2d7ff69a2cac767fbf7a2b206add2e9a210e57947dd7ce03e25d03d2de292"}, + {file = "wrapt-1.16.0-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9159485323798c8dc530a224bd3ffcf76659319ccc7bbd52e01e73bd0241a0c5"}, + {file = "wrapt-1.16.0-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a86373cf37cd7764f2201b76496aba58a52e76dedfaa698ef9e9688bfd9e41cf"}, + {file = "wrapt-1.16.0-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:73870c364c11f03ed072dda68ff7aea6d2a3a5c3fe250d917a429c7432e15228"}, + {file = "wrapt-1.16.0-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:b935ae30c6e7400022b50f8d359c03ed233d45b725cfdd299462f41ee5ffba6f"}, + {file = "wrapt-1.16.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:db98ad84a55eb09b3c32a96c576476777e87c520a34e2519d3e59c44710c002c"}, + {file = "wrapt-1.16.0-cp37-cp37m-win32.whl", hash = "sha256:9153ed35fc5e4fa3b2fe97bddaa7cbec0ed22412b85bcdaf54aeba92ea37428c"}, + {file = "wrapt-1.16.0-cp37-cp37m-win_amd64.whl", hash = "sha256:66dfbaa7cfa3eb707bbfcd46dab2bc6207b005cbc9caa2199bcbc81d95071a00"}, + {file = "wrapt-1.16.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1dd50a2696ff89f57bd8847647a1c363b687d3d796dc30d4dd4a9d1689a706f0"}, + {file = "wrapt-1.16.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:44a2754372e32ab315734c6c73b24351d06e77ffff6ae27d2ecf14cf3d229202"}, + {file = "wrapt-1.16.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8e9723528b9f787dc59168369e42ae1c3b0d3fadb2f1a71de14531d321ee05b0"}, + {file = "wrapt-1.16.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:dbed418ba5c3dce92619656802cc5355cb679e58d0d89b50f116e4a9d5a9603e"}, + {file = "wrapt-1.16.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:941988b89b4fd6b41c3f0bfb20e92bd23746579736b7343283297c4c8cbae68f"}, + {file = "wrapt-1.16.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:6a42cd0cfa8ffc1915aef79cb4284f6383d8a3e9dcca70c445dcfdd639d51267"}, + {file = "wrapt-1.16.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:1ca9b6085e4f866bd584fb135a041bfc32cab916e69f714a7d1d397f8c4891ca"}, + {file = "wrapt-1.16.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:d5e49454f19ef621089e204f862388d29e6e8d8b162efce05208913dde5b9ad6"}, + {file = "wrapt-1.16.0-cp38-cp38-win32.whl", hash = "sha256:c31f72b1b6624c9d863fc095da460802f43a7c6868c5dda140f51da24fd47d7b"}, + {file = "wrapt-1.16.0-cp38-cp38-win_amd64.whl", hash = "sha256:490b0ee15c1a55be9c1bd8609b8cecd60e325f0575fc98f50058eae366e01f41"}, + {file = "wrapt-1.16.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:9b201ae332c3637a42f02d1045e1d0cccfdc41f1f2f801dafbaa7e9b4797bfc2"}, + {file = "wrapt-1.16.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:2076fad65c6736184e77d7d4729b63a6d1ae0b70da4868adeec40989858eb3fb"}, + {file = "wrapt-1.16.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c5cd603b575ebceca7da5a3a251e69561bec509e0b46e4993e1cac402b7247b8"}, + {file = "wrapt-1.16.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b47cfad9e9bbbed2339081f4e346c93ecd7ab504299403320bf85f7f85c7d46c"}, + {file = "wrapt-1.16.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f8212564d49c50eb4565e502814f694e240c55551a5f1bc841d4fcaabb0a9b8a"}, + {file = "wrapt-1.16.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:5f15814a33e42b04e3de432e573aa557f9f0f56458745c2074952f564c50e664"}, + {file = "wrapt-1.16.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:db2e408d983b0e61e238cf579c09ef7020560441906ca990fe8412153e3b291f"}, + {file = "wrapt-1.16.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:edfad1d29c73f9b863ebe7082ae9321374ccb10879eeabc84ba3b69f2579d537"}, + {file = "wrapt-1.16.0-cp39-cp39-win32.whl", hash = "sha256:ed867c42c268f876097248e05b6117a65bcd1e63b779e916fe2e33cd6fd0d3c3"}, + {file = "wrapt-1.16.0-cp39-cp39-win_amd64.whl", hash = "sha256:eb1b046be06b0fce7249f1d025cd359b4b80fc1c3e24ad9eca33e0dcdb2e4a35"}, + {file = "wrapt-1.16.0-py3-none-any.whl", hash = "sha256:6906c4100a8fcbf2fa735f6059214bb13b97f75b1a61777fcf6432121ef12ef1"}, + {file = "wrapt-1.16.0.tar.gz", hash = "sha256:5f370f952971e7d17c7d1ead40e49f32345a7f7a5373571ef44d800d06b1899d"}, +] + +[[package]] +name = "xmltodict" +version = "0.13.0" +description = "Makes working with XML feel like you are working with JSON" +optional = false +python-versions = ">=3.4" +files = [ + {file = "xmltodict-0.13.0-py2.py3-none-any.whl", hash = "sha256:aa89e8fd76320154a40d19a0df04a4695fb9dc5ba977cbb68ab3e4eb225e7852"}, + {file = "xmltodict-0.13.0.tar.gz", hash = "sha256:341595a488e3e01a85a9d8911d8912fd922ede5fecc4dce437eb4b6c8d037e56"}, +] + +[metadata] +lock-version = "2.0" +python-versions = "^3.9,<3.12" +content-hash = "d3855ad9303c1e3d5d8d2a3eb082e9a4aa11c293d0c11fddd3e9aaa986afeb4a" diff --git a/airbyte-integrations/connectors/source-amazon-seller-partner/pyproject.toml b/airbyte-integrations/connectors/source-amazon-seller-partner/pyproject.toml new file mode 100644 index 000000000000..467e46d1c195 --- /dev/null +++ b/airbyte-integrations/connectors/source-amazon-seller-partner/pyproject.toml @@ -0,0 +1,31 @@ +[build-system] +requires = ["poetry-core>=1.0.0"] +build-backend = "poetry.core.masonry.api" + +[tool.poetry] +version = "4.0.0" +name = "source-amazon-seller-partner" +description = "Source implementation for Amazon Seller Partner." +authors = ["Airbyte "] +license = "MIT" +readme = "README.md" +documentation = "https://docs.airbyte.com/integrations/sources/amazon-seller-partner" +homepage = "https://airbyte.com" +repository = "https://github.com/airbytehq/airbyte" +[[tool.poetry.packages]] +include = "source_amazon_seller_partner" + +[tool.poetry.dependencies] +python = "^3.9,<3.12" +airbyte-cdk = "*" +xmltodict = "~=0.12" +dateparser = "==1.2.0" + +[tool.poetry.scripts] +source-amazon-seller-partner = "source_amazon_seller_partner.run:run" + +[tool.poetry.group.dev.dependencies] +pytest-mock = "^3.6" +freezegun = "==1.2.2" +pytest = "~=6.1" +requests-mock = "~=1.9.3" diff --git a/airbyte-integrations/connectors/source-amazon-seller-partner/requirements.txt b/airbyte-integrations/connectors/source-amazon-seller-partner/requirements.txt deleted file mode 100644 index ecf975e2fa63..000000000000 --- a/airbyte-integrations/connectors/source-amazon-seller-partner/requirements.txt +++ /dev/null @@ -1 +0,0 @@ --e . \ No newline at end of file diff --git a/airbyte-integrations/connectors/source-amazon-seller-partner/setup.py b/airbyte-integrations/connectors/source-amazon-seller-partner/setup.py deleted file mode 100644 index e75c0a55146b..000000000000 --- a/airbyte-integrations/connectors/source-amazon-seller-partner/setup.py +++ /dev/null @@ -1,40 +0,0 @@ -# -# Copyright (c) 2023 Airbyte, Inc., all rights reserved. -# - - -from setuptools import find_packages, setup - -MAIN_REQUIREMENTS = ["airbyte-cdk", "xmltodict~=0.12", "dateparser==1.2.0"] - -TEST_REQUIREMENTS = ["requests-mock~=1.9.3", "pytest~=6.1", "pytest-mock", "freezegun==1.2.2"] - -setup( - entry_points={ - "console_scripts": [ - "source-amazon-seller-partner=source_amazon_seller_partner.run:run", - ], - }, - name="source_amazon_seller_partner", - description="Source implementation for Amazon Seller Partner.", - author="Airbyte", - author_email="contact@airbyte.io", - packages=find_packages(), - install_requires=MAIN_REQUIREMENTS, - package_data={ - "": [ - # Include yaml files in the package (if any) - "*.yml", - "*.yaml", - # Include all json files in the package, up to 4 levels deep - "*.json", - "*/*.json", - "*/*/*.json", - "*/*/*/*.json", - "*/*/*/*/*.json", - ] - }, - extras_require={ - "tests": TEST_REQUIREMENTS, - }, -) diff --git a/airbyte-integrations/connectors/source-amazon-seller-partner/source_amazon_seller_partner/schemas/GET_FBA_STORAGE_FEE_CHARGES_DATA.json b/airbyte-integrations/connectors/source-amazon-seller-partner/source_amazon_seller_partner/schemas/GET_FBA_STORAGE_FEE_CHARGES_DATA.json index 33515d7bf222..ab989b6d2a7b 100644 --- a/airbyte-integrations/connectors/source-amazon-seller-partner/source_amazon_seller_partner/schemas/GET_FBA_STORAGE_FEE_CHARGES_DATA.json +++ b/airbyte-integrations/connectors/source-amazon-seller-partner/source_amazon_seller_partner/schemas/GET_FBA_STORAGE_FEE_CHARGES_DATA.json @@ -10,73 +10,73 @@ "fnsku": { "type": ["null", "string"] }, - "product-name": { + "product_name": { "type": ["null", "string"] }, - "fulfillment-center": { + "fulfillment_center": { "type": ["null", "string"] }, - "country-code": { + "country_code": { "type": ["null", "string"] }, - "longest-side": { + "longest_side": { "type": ["null", "string"] }, - "median-side": { + "median_side": { "type": ["null", "string"] }, - "shortest-side": { + "shortest_side": { "type": ["null", "string"] }, - "measurement-units": { + "measurement_units": { "type": ["null", "string"] }, "weight": { "type": ["null", "string"] }, - "weight-units": { + "weight_units": { "type": ["null", "string"] }, - "item-volume": { + "item_volume": { "type": ["null", "string"] }, - "volume-units": { + "volume_units": { "type": ["null", "string"] }, - "product-size-tier": { + "product_size_tier": { "type": ["null", "string"] }, - "average-quantity-on-hand": { + "average_quantity_on_hand": { "type": ["null", "string"] }, - "average-quantity-pending-removal": { + "average_quantity_pending_removal": { "type": ["null", "string"] }, - "estimated-total-item-volume": { + "estimated_total_item_volume": { "type": ["null", "string"] }, - "month-of-charge": { + "month_of_charge": { "type": ["null", "string"] }, - "storage-rate": { + "storage_rate": { "type": ["null", "string"] }, - "estimated-monthly-storage-fee": { + "estimated_monthly_storage_fee": { "type": ["null", "string"] }, "currency": { "type": ["null", "string"] }, - "average-quantity-customer-orders": { + "average_quantity_customer_orders": { "type": ["null", "number"] }, - "base-rate": { + "base_rate": { "type": ["null", "number"] }, - "breakdown-incentive-fee-amount": { + "breakdown_incentive_fee_amount": { "type": ["null", "string"] }, - "dangerous-goods-storage-type": { + "dangerous_goods_storage_type": { "type": ["null", "string"] }, "eligible_for_inventory_discount": { @@ -85,16 +85,16 @@ "qualifies_for_inventory_discount": { "type": ["null", "string"] }, - "storage-utilization-ratio": { + "storage_utilization_ratio": { "type": ["null", "number"] }, - "storage-utilization-ratio-units": { + "storage_utilization_ratio_units": { "type": ["null", "string"] }, - "total-incentive-fee-amount": { + "total_incentive_fee_amount": { "type": ["null", "number"] }, - "utilization-surcharge-rate": { + "utilization_surcharge_rate": { "type": ["null", "number"] }, "dataEndTime": { diff --git a/docs/integrations/sources/amazon-seller-partner-migrations.md b/docs/integrations/sources/amazon-seller-partner-migrations.md index 5dc3da1be61a..8cb9deade9b2 100644 --- a/docs/integrations/sources/amazon-seller-partner-migrations.md +++ b/docs/integrations/sources/amazon-seller-partner-migrations.md @@ -1,5 +1,34 @@ # Amazon Seller Partner Migration Guide +## Upgrading to 4.0.0 + +Stream `GET_FBA_STORAGE_FEE_CHARGES_DATA` now has updated schema, which matches Amazon Seller Partner [docs](https://developer-docs.amazon.com/sp-api/docs/fba-inventory-reports-attributes#get_fba_storage_fee_charges_data). + +Users will need to refresh the source schema and reset this stream after upgrading. + +### Refresh affected schemas and reset data + +1. Select **Connections** in the main navbar. + 1. Select the connection(s) affected by the update. +2. Select the **Replication** tab. + 1. Select **Refresh source schema**. + 2. Select **OK**. +```note +Any detected schema changes will be listed for your review. +``` +3. Select **Save changes** at the bottom of the page. + 1. Ensure the **Reset affected streams** option is checked. +```note +Depending on destination type you may not be prompted to reset your data. +``` +4. Select **Save connection**. +```note +This will reset the data in your destination and initiate a fresh sync. +``` + +For more information on resetting your data in Airbyte, see [this page](https://docs.airbyte.com/operator-guides/reset). + + ## Upgrading to 3.0.0 Streams `GET_FLAT_FILE_ALL_ORDERS_DATA_BY_ORDER_DATE_GENERAL` and `GET_FLAT_FILE_ALL_ORDERS_DATA_BY_LAST_UPDATE_GENERAL` now have updated schemas. diff --git a/docs/integrations/sources/amazon-seller-partner.md b/docs/integrations/sources/amazon-seller-partner.md index ed6daa83ca62..40f11a5e7d12 100644 --- a/docs/integrations/sources/amazon-seller-partner.md +++ b/docs/integrations/sources/amazon-seller-partner.md @@ -168,6 +168,7 @@ Information about rate limits you may find [here](https://developer-docs.amazon. | Version | Date | Pull Request | Subject | |:---------|:-----------|:------------------------------------------------------------|:------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------| +| `4.0.0` | 2024-02-23 | [\#35439](https://github.com/airbytehq/airbyte/pull/35439) | Update schema for the `GET_FBA_STORAGE_FEE_CHARGES_DATA` stream | | `3.5.0` | 2024-02-09 | [\#35331](https://github.com/airbytehq/airbyte/pull/35331) | Fix check for Vendor accounts. Add failed report result message | | `3.4.0` | 2024-02-15 | [\#35273](https://github.com/airbytehq/airbyte/pull/35273) | Add `VendorOrders` stream | | `3.3.2` | 2024-02-13 | [\#33996](https://github.com/airbytehq/airbyte/pull/33996) | Add integration tests | From d0017dab9f2c7847a23527acd8a2e23e576fc0f3 Mon Sep 17 00:00:00 2001 From: Anton Karpets Date: Thu, 29 Feb 2024 18:10:13 +0200 Subject: [PATCH 033/172] =?UTF-8?q?=F0=9F=8F=A5Source=20Google=20Sheets:?= =?UTF-8?q?=20add=20logic=20to=20emit=20stream=20statuses=20(#35722)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .../source-google-sheets/metadata.yaml | 2 +- .../source-google-sheets/pyproject.toml | 2 +- .../source_google_sheets/source.py | 10 +++- .../unit_tests/test_stream.py | 49 ++++++++++++++++++- docs/integrations/sources/google-sheets.md | 5 +- 5 files changed, 60 insertions(+), 8 deletions(-) diff --git a/airbyte-integrations/connectors/source-google-sheets/metadata.yaml b/airbyte-integrations/connectors/source-google-sheets/metadata.yaml index cc4c1f2a0388..6124d5ca3efd 100644 --- a/airbyte-integrations/connectors/source-google-sheets/metadata.yaml +++ b/airbyte-integrations/connectors/source-google-sheets/metadata.yaml @@ -10,7 +10,7 @@ data: connectorSubtype: file connectorType: source definitionId: 71607ba1-c0ac-4799-8049-7f4b90dd50f7 - dockerImageTag: 0.3.16 + dockerImageTag: 0.3.17 dockerRepository: airbyte/source-google-sheets documentationUrl: https://docs.airbyte.com/integrations/sources/google-sheets githubIssueLabel: source-google-sheets diff --git a/airbyte-integrations/connectors/source-google-sheets/pyproject.toml b/airbyte-integrations/connectors/source-google-sheets/pyproject.toml index b3e27e7ba6dd..4629c02cf92e 100644 --- a/airbyte-integrations/connectors/source-google-sheets/pyproject.toml +++ b/airbyte-integrations/connectors/source-google-sheets/pyproject.toml @@ -3,7 +3,7 @@ requires = [ "poetry-core>=1.0.0",] build-backend = "poetry.core.masonry.api" [tool.poetry] -version = "0.3.16" +version = "0.3.17" name = "source-google-sheets" description = "Source implementation for Google Sheets." authors = [ "Airbyte ",] diff --git a/airbyte-integrations/connectors/source-google-sheets/source_google_sheets/source.py b/airbyte-integrations/connectors/source-google-sheets/source_google_sheets/source.py index 8b7e8faa303e..1c98ffa6a563 100644 --- a/airbyte-integrations/connectors/source-google-sheets/source_google_sheets/source.py +++ b/airbyte-integrations/connectors/source-google-sheets/source_google_sheets/source.py @@ -14,12 +14,14 @@ AirbyteConnectionStatus, AirbyteMessage, AirbyteStateMessage, + AirbyteStreamStatus, ConfiguredAirbyteCatalog, Status, Type, ) from airbyte_cdk.sources.source import Source from airbyte_cdk.utils import AirbyteTracedException +from airbyte_cdk.utils.stream_status_utils import as_airbyte_message from apiclient import errors from google.auth import exceptions as google_exceptions from requests.status_codes import codes as status_codes @@ -145,11 +147,11 @@ def _read( logger: AirbyteLogger, config: json, catalog: ConfiguredAirbyteCatalog, - state: Union[List[AirbyteStateMessage], MutableMapping[str, Any]] = None, ) -> Generator[AirbyteMessage, None, None]: client = GoogleSheetsClient(self.get_credentials(config)) sheet_to_column_name = Helpers.parse_sheet_and_column_names_from_catalog(catalog) + stream_name_to_stream = {stream.stream.name: stream for stream in catalog.streams} spreadsheet_id = Helpers.get_spreadsheet_id(config["spreadsheet_id"]) logger.info(f"Starting syncing spreadsheet {spreadsheet_id}") @@ -162,6 +164,8 @@ def _read( logger.info(f"Row counts: {sheet_row_counts}") for sheet in sheet_to_column_index_to_name.keys(): logger.info(f"Syncing sheet {sheet}") + stream = stream_name_to_stream.get(sheet) + yield as_airbyte_message(stream, AirbyteStreamStatus.STARTED) # We revalidate the sheet here to avoid errors in case the sheet was changed after the sync started is_valid, reason = Helpers.check_sheet_is_valid(client, spreadsheet_id, sheet) if is_valid: @@ -191,11 +195,13 @@ def _read( if len(row_values) == 0: break + yield as_airbyte_message(stream, AirbyteStreamStatus.RUNNING) for row in row_values: if not Helpers.is_row_empty(row) and Helpers.row_contains_relevant_data(row, column_index_to_name.keys()): yield AirbyteMessage( type=Type.RECORD, record=Helpers.row_data_to_record_message(sheet, row, column_index_to_name) ) + yield as_airbyte_message(stream, AirbyteStreamStatus.COMPLETE) else: logger.info(f"Skipping syncing sheet {sheet}: {reason}") @@ -208,7 +214,7 @@ def read( ) -> Generator[AirbyteMessage, None, None]: spreadsheet_id = Helpers.get_spreadsheet_id(config["spreadsheet_id"]) try: - yield from self._read(logger, config, catalog, state) + yield from self._read(logger, config, catalog) except errors.HttpError as e: error_description = exception_description_by_status_code(e.status_code, spreadsheet_id) diff --git a/airbyte-integrations/connectors/source-google-sheets/unit_tests/test_stream.py b/airbyte-integrations/connectors/source-google-sheets/unit_tests/test_stream.py index 20e8d5f862bc..c838cb7ccb24 100644 --- a/airbyte-integrations/connectors/source-google-sheets/unit_tests/test_stream.py +++ b/airbyte-integrations/connectors/source-google-sheets/unit_tests/test_stream.py @@ -8,6 +8,7 @@ import requests from airbyte_cdk.models.airbyte_protocol import ( AirbyteStream, + AirbyteStreamStatus, ConfiguredAirbyteCatalog, ConfiguredAirbyteStream, DestinationSyncMode, @@ -18,7 +19,7 @@ from source_google_sheets import SourceGoogleSheets from source_google_sheets.client import GoogleSheetsClient from source_google_sheets.helpers import SCOPES, Helpers -from source_google_sheets.models import CellData, GridData, RowData, Sheet, SheetProperties, Spreadsheet +from source_google_sheets.models import CellData, GridData, RowData, Sheet, SheetProperties, Spreadsheet, SpreadsheetValues, ValueRange def set_http_error_for_google_sheets_client(mocker, resp): @@ -304,7 +305,7 @@ def test_read_expected_data_on_1_sheet(invalid_config, mocker, caplog): assert "Unexpected return result: Sheet soccer_team was expected to contain data on exactly 1 sheet." in str(e.value) -def test_read_emply_sheet(invalid_config, mocker, caplog): +def test_read_empty_sheet(invalid_config, mocker, caplog): source = SourceGoogleSheets() mocker.patch.object(GoogleSheetsClient, "__init__", lambda s, credentials, scopes=SCOPES: None) sheet1 = "soccer_team" @@ -347,3 +348,47 @@ def test_read_emply_sheet(invalid_config, mocker, caplog): records = list(source.read(logger=logging.getLogger("airbyte"), catalog=catalog, config=invalid_config)) assert records == [] assert "The sheet soccer_team (ID invalid_spreadsheet_id) is empty!" in caplog.text + + +def test_when_read_then_status_messages_emitted(mocker, invalid_config): + source = SourceGoogleSheets() + spreadsheet_id = "invalid_spreadsheet_id" + sheet_name = "sheet_1" + mocker.patch.object( + GoogleSheetsClient, + "get", + return_value=Spreadsheet( + spreadsheetId=spreadsheet_id, + sheets=[ + Sheet( + data=[GridData(rowData=[RowData(values=[CellData(formattedValue="ID")])])], + properties=SheetProperties(title=sheet_name, gridProperties={"rowCount": 2}) + ), + ], + ), + ) + + mocker.patch.object( + GoogleSheetsClient, + "get_values", + return_value=SpreadsheetValues(spreadsheetId=spreadsheet_id, valueRanges=[ValueRange(values=[["1"]])]), + ) + + sheet_schema = {"properties": {"ID": {"type": "string"}}} + catalog = ConfiguredAirbyteCatalog( + streams=[ + ConfiguredAirbyteStream( + stream=AirbyteStream(name=sheet_name, json_schema=sheet_schema, supported_sync_modes=["full_refresh"]), + sync_mode=SyncMode.full_refresh, + destination_sync_mode=DestinationSyncMode.overwrite, + ), + ] + ) + + records = list(source.read(logger=logging.getLogger("airbyte"), config=invalid_config, catalog=catalog)) + + # stream started, stream running, 1 record, stream completed + assert len(records) == 4 + assert records[0].trace.stream_status.status == AirbyteStreamStatus.STARTED + assert records[1].trace.stream_status.status == AirbyteStreamStatus.RUNNING + assert records[3].trace.stream_status.status == AirbyteStreamStatus.COMPLETE diff --git a/docs/integrations/sources/google-sheets.md b/docs/integrations/sources/google-sheets.md index 1d07a81ce708..650d8bcdacdc 100644 --- a/docs/integrations/sources/google-sheets.md +++ b/docs/integrations/sources/google-sheets.md @@ -142,7 +142,7 @@ Airbyte batches requests to the API in order to efficiently pull data and respec ### Troubleshooting * If your sheet is completely empty (no header rows) or deleted, Airbyte will not delete the table in the destination. If this happens, the sync logs will contain a message saying the sheet has been skipped when syncing the full spreadsheet. -* Connector setup will fail if the speadsheet is not a Google Sheets file. If the file was saved or imported as another file type the setup could fail. +* Connector setup will fail if the spreadsheet is not a Google Sheets file. If the file was saved or imported as another file type the setup could fail. * Check out common troubleshooting issues for the Google Sheets source connector on our [Airbyte Forum](https://github.com/airbytehq/airbyte/discussions).
@@ -150,7 +150,8 @@ Airbyte batches requests to the API in order to efficiently pull data and respec ## Changelog | Version | Date | Pull Request | Subject | -| ------- | ---------- | -------------------------------------------------------- | --------------------------------------------------------------------------------- | +|---------|------------|----------------------------------------------------------|-----------------------------------------------------------------------------------| +| 0.3.17 | 2024-02-29 | [35722](https://github.com/airbytehq/airbyte/pull/35722) | Add logic to emit stream statuses | | 0.3.16 | 2024-02-12 | [35136](https://github.com/airbytehq/airbyte/pull/35136) | Fix license in `pyproject.toml`. | | 0.3.15 | 2024-02-07 | [34944](https://github.com/airbytehq/airbyte/pull/34944) | Manage dependencies with Poetry. | | 0.3.14 | 2024-01-23 | [34437](https://github.com/airbytehq/airbyte/pull/34437) | Fix header cells filtering | From b7ae6c487582027e7554734abed358caf425d8bc Mon Sep 17 00:00:00 2001 From: Anton Karpets Date: Thu, 29 Feb 2024 19:23:56 +0200 Subject: [PATCH 034/172] =?UTF-8?q?=F0=9F=8F=A5Source=20Google=20Search=20?= =?UTF-8?q?Console:=20fix=20expected=20records=20(#35723)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .../integration_tests/abnormal_state.json | 26 +++++++-------- .../integration_tests/expected_records.jsonl | 32 +++++++++---------- .../metadata.yaml | 3 +- .../unit_tests/unit_test.py | 4 +-- 4 files changed, 31 insertions(+), 34 deletions(-) diff --git a/airbyte-integrations/connectors/source-google-search-console/integration_tests/abnormal_state.json b/airbyte-integrations/connectors/source-google-search-console/integration_tests/abnormal_state.json index 883a3f90e368..872a103443f7 100755 --- a/airbyte-integrations/connectors/source-google-search-console/integration_tests/abnormal_state.json +++ b/airbyte-integrations/connectors/source-google-search-console/integration_tests/abnormal_state.json @@ -3,7 +3,7 @@ "type": "STREAM", "stream": { "stream_state": { - "https://airbyte.io/": { + "sc-domain:airbyte.io": { "web": { "date": "2050-08-28" }, "news": { "date": "2050-08-28" }, "image": { "date": "2050-08-28" }, @@ -19,7 +19,7 @@ "type": "STREAM", "stream": { "stream_state": { - "https://airbyte.io/": { + "sc-domain:airbyte.io": { "web": { "date": "2050-08-28" }, "news": { "date": "2050-08-28" }, "image": { "date": "2050-08-28" }, @@ -35,7 +35,7 @@ "type": "STREAM", "stream": { "stream_state": { - "https://airbyte.io/": { + "sc-domain:airbyte.io": { "web": { "date": "2050-08-28" }, "news": { "date": "2050-08-28" }, "image": { "date": "2050-08-28" }, @@ -50,7 +50,7 @@ "type": "STREAM", "stream": { "stream_state": { - "https://airbyte.io/": { + "sc-domain:airbyte.io": { "web": { "date": "2050-08-28" }, "news": { "date": "2050-08-28" }, "image": { "date": "2050-08-28" }, @@ -66,7 +66,7 @@ "type": "STREAM", "stream": { "stream_state": { - "https://airbyte.io/": { + "sc-domain:airbyte.io": { "web": { "date": "2050-08-28" }, "news": { "date": "2050-08-28" }, "image": { "date": "2050-08-28" }, @@ -80,7 +80,7 @@ "type": "STREAM", "stream": { "stream_state": { - "https://airbyte.io/": { + "sc-domain:airbyte.io": { "web": { "date": "2050-08-28" }, "news": { "date": "2050-08-28" }, "image": { "date": "2050-08-28" }, @@ -94,7 +94,7 @@ "type": "STREAM", "stream": { "stream_state": { - "https://airbyte.io/": { + "sc-domain:airbyte.io": { "web": { "date": "2050-08-28" }, "news": { "date": "2050-08-28" }, "image": { "date": "2050-08-28" }, @@ -108,7 +108,7 @@ "type": "STREAM", "stream": { "stream_state": { - "https://airbyte.io/": { + "sc-domain:airbyte.io": { "web": { "date": "2050-08-28" }, "news": { "date": "2050-08-28" }, "image": { "date": "2050-08-28" }, @@ -123,7 +123,7 @@ "type": "STREAM", "stream": { "stream_state": { - "https://airbyte.io/": { + "sc-domain:airbyte.io": { "web": { "date": "2050-08-28" }, "news": { "date": "2050-08-28" }, "image": { "date": "2050-08-28" }, @@ -139,7 +139,7 @@ "type": "STREAM", "stream": { "stream_state": { - "https://airbyte.io/": { + "sc-domain:airbyte.io": { "web": { "date": "2050-08-28" }, "news": { "date": "2050-08-28" }, "image": { "date": "2050-08-28" }, @@ -155,7 +155,7 @@ "type": "STREAM", "stream": { "stream_state": { - "https://airbyte.io/": { + "sc-domain:airbyte.io": { "web": { "date": "2050-08-28" }, "news": { "date": "2050-08-28" }, "image": { "date": "2050-08-28" }, @@ -169,7 +169,7 @@ "type": "STREAM", "stream": { "stream_state": { - "https://airbyte.io/": { + "sc-domain:airbyte.io": { "web": { "date": "2050-08-28" }, "news": { "date": "2050-08-28" }, "image": { "date": "2050-08-28" }, @@ -184,7 +184,7 @@ "type": "STREAM", "stream": { "stream_state": { - "https://airbyte.io/": { + "sc-domain:airbyte.io": { "web": { "date": "2050-08-28" }, "news": { "date": "2050-08-28" }, "image": { "date": "2050-08-28" }, diff --git a/airbyte-integrations/connectors/source-google-search-console/integration_tests/expected_records.jsonl b/airbyte-integrations/connectors/source-google-search-console/integration_tests/expected_records.jsonl index 4b75a033c505..fc30ccbbccd9 100644 --- a/airbyte-integrations/connectors/source-google-search-console/integration_tests/expected_records.jsonl +++ b/airbyte-integrations/connectors/source-google-search-console/integration_tests/expected_records.jsonl @@ -1,19 +1,17 @@ -{"stream": "sites", "data": {"siteUrl": "https://airbyte.io/", "permissionLevel": "siteOwner"}, "emitted_at": 1677799184974} -{"stream": "sites", "data": {"siteUrl": "https://airbyte.io/", "permissionLevel": "siteOwner"}, "emitted_at": 1677799185060} +{"stream": "sites", "data": {"siteUrl": "sc-domain:airbyte.io", "permissionLevel": "siteFullUser"}, "emitted_at": 1709211825229} {"stream": "sitemaps", "data": {"path": "https://airbyte.io/sitemap.xml", "lastSubmitted": "2021-09-10T23:02:22.258Z", "isPending": false, "isSitemapsIndex": false, "type": "sitemap", "lastDownloaded": "2023-03-02T03:42:19.607Z", "warnings": "0", "errors": "0", "contents": [{"type": "web", "submitted": "5165", "indexed": "0"}]}, "emitted_at": 1677799185696} {"stream": "sitemaps", "data": {"path": "https://airbyte.io/sitemap.xml", "lastSubmitted": "2021-09-10T23:02:22.258Z", "isPending": false, "isSitemapsIndex": false, "type": "sitemap", "lastDownloaded": "2023-03-02T03:42:19.607Z", "warnings": "0", "errors": "0", "contents": [{"type": "web", "submitted": "5165", "indexed": "0"}]}, "emitted_at": 1677799186044} -{"stream": "search_analytics_by_date", "data": {"clicks": 749, "impressions": 39606, "ctr": 0.01891127606928243, "position": 26.441978488107864, "site_url": "https://airbyte.io/", "search_type": "web", "date": "2021-10-19"}, "emitted_at": 1677799193858} -{"stream": "search_analytics_by_date", "data": {"clicks": 888, "impressions": 38797, "ctr": 0.02288836765729309, "position": 26.447173750547723, "site_url": "https://airbyte.io/", "search_type": "web", "date": "2021-10-20"}, "emitted_at": 1677799194291} -{"stream": "search_analytics_by_country", "data": {"clicks": 209, "impressions": 5736, "ctr": 0.03643654114365411, "position": 29.37726638772664, "site_url": "https://airbyte.io/", "search_type": "web", "date": "2021-10-19", "country": "usa"}, "emitted_at": 1677799200001} -{"stream": "search_analytics_by_country", "data": {"clicks": 64, "impressions": 8803, "ctr": 0.007270248778825401, "position": 14.991366579575145, "site_url": "https://airbyte.io/", "search_type": "web", "date": "2021-10-19", "country": "ind"}, "emitted_at": 1677799200002} -{"stream": "search_analytics_by_device", "data": {"clicks": 655, "impressions": 29038, "ctr": 0.02255664990701839, "position": 30.284523727529443, "site_url": "https://airbyte.io/", "search_type": "web", "date": "2021-10-19", "device": "DESKTOP"}, "emitted_at": 1677799206886} -{"stream": "search_analytics_by_device", "data": {"clicks": 93, "impressions": 10412, "ctr": 0.008932001536688437, "position": 15.865155589704187, "site_url": "https://airbyte.io/", "search_type": "web", "date": "2021-10-19", "device": "MOBILE"}, "emitted_at": 1677799206887} -{"stream": "search_analytics_by_page", "data": {"clicks": 349, "impressions": 3087, "ctr": 0.11305474570780694, "position": 32.505992873339814, "site_url": "https://airbyte.io/", "search_type": "web", "date": "2021-10-19", "page": "https://airbyte.io/"}, "emitted_at": 1677799212505} -{"stream": "search_analytics_by_page", "data": {"clicks": 65, "impressions": 691, "ctr": 0.09406657018813314, "position": 18.070911722141823, "site_url": "https://airbyte.io/", "search_type": "web", "date": "2021-10-19", "page": "https://airbyte.io/recipes/migrate-from-mysql-to-postgresql"}, "emitted_at": 1677799212505} -{"stream": "search_analytics_by_query", "data": {"clicks": 271, "impressions": 428, "ctr": 0.633177570093458, "position": 1, "site_url": "https://airbyte.io/", "search_type": "web", "date": "2021-10-19", "query": "airbyte"}, "emitted_at": 1677799217633} -{"stream": "search_analytics_by_query", "data": {"clicks": 9, "impressions": 13, "ctr": 0.6923076923076923, "position": 1, "site_url": "https://airbyte.io/", "search_type": "web", "date": "2021-10-19", "query": "airbite"}, "emitted_at": 1677799217634} -{"stream": "search_analytics_all_fields", "data": {"clicks": 79, "impressions": 134, "ctr": 0.5895522388059702, "position": 1, "site_url": "https://airbyte.io/", "search_type": "web", "date": "2021-10-19", "country": "usa", "device": "DESKTOP", "page": "https://airbyte.io/", "query": "airbyte"}, "emitted_at": 1677799225648} -{"stream": "search_analytics_all_fields", "data": {"clicks": 18, "impressions": 33, "ctr": 0.5454545454545454, "position": 1, "site_url": "https://airbyte.io/", "search_type": "web", "date": "2021-10-19", "country": "fra", "device": "DESKTOP", "page": "https://airbyte.io/", "query": "airbyte"}, "emitted_at": 1677799225649} -{"stream": "custom_dimensions", "data": {"clicks": 169, "impressions": 4869, "ctr": 0.03470938591086466, "position": 29.429657013760526, "site_url": "https://airbyte.io/", "search_type": "web", "date": "2021-10-19", "country": "usa", "device": "DESKTOP"}, "emitted_at": 1677799232181} -{"stream": "custom_dimensions", "data": {"clicks": 56, "impressions": 2729, "ctr": 0.020520337119824112, "position": 33.29351410773177, "site_url": "https://airbyte.io/", "search_type": "web", "date": "2021-10-19", "country": "ind", "device": "DESKTOP"}, "emitted_at": 1677799232181} -{"stream": "search_analytics_keyword_page_report", "data": {"clicks": 1, "impressions": 1, "ctr": 1, "position": 4, "site_url": "https://airbyte.io/", "search_type": "web", "date": "2022-02-14", "country": "nld", "device": "DESKTOP", "query": "airbyte s3 destination", "page": "https://airbyte.io/connections/Progress-to-S3"}, "emitted_at": 1688032519825} +{"stream": "search_analytics_by_date", "data": {"clicks": 664, "impressions": 14606, "ctr": 0.045460769546761606, "position": 17.294262631795153, "site_url": "sc-domain:airbyte.io", "search_type": "web", "date": "2022-10-17"}, "emitted_at": 1709212437957} +{"stream": "search_analytics_by_date", "data": {"clicks": 650, "impressions": 15009, "ctr": 0.043307348923978944, "position": 17.77133719768139, "site_url": "sc-domain:airbyte.io", "search_type": "web", "date": "2022-10-18"}, "emitted_at": 1709212438576} +{"stream": "search_analytics_by_country", "data": {"clicks": 117, "impressions": 3207, "ctr": 0.03648269410664172, "position": 18.13376987839102, "site_url": "sc-domain:airbyte.io", "search_type": "web", "date": "2022-10-17", "country": "usa"}, "emitted_at": 1709212703008} +{"stream": "search_analytics_by_country", "data": {"clicks": 87, "impressions": 756, "ctr": 0.11507936507936507, "position": 9.941798941798941, "site_url": "sc-domain:airbyte.io", "search_type": "web", "date": "2022-10-17", "country": "fra"}, "emitted_at": 1709212703011} +{"stream": "search_analytics_by_device", "data": {"clicks": 637, "impressions": 13493, "ctr": 0.04720966427036241, "position": 16.320166012006226, "site_url": "sc-domain:airbyte.io", "search_type": "web", "date": "2022-10-17", "device": "DESKTOP"}, "emitted_at": 1709213135112} +{"stream": "search_analytics_by_device", "data": {"clicks": 27, "impressions": 1097, "ctr": 0.024612579762989972, "position": 29.275296262534184, "site_url": "sc-domain:airbyte.io", "search_type": "web", "date": "2022-10-17", "device": "MOBILE"}, "emitted_at": 1709213135114} +{"stream": "search_analytics_by_page", "data": {"clicks": 13, "impressions": 210, "ctr": 0.06190476190476191, "position": 8.395238095238096, "site_url": "sc-domain:airbyte.io", "search_type": "web", "date": "2022-10-17", "page": "https://discuss.airbyte.io/t/kafka-connection-fails/723"}, "emitted_at": 1709213297272} +{"stream": "search_analytics_by_page", "data": {"clicks": 12, "impressions": 118, "ctr": 0.1016949152542373, "position": 9.23728813559322, "site_url": "sc-domain:airbyte.io", "search_type": "web", "date": "2022-10-17", "page": "https://discuss.airbyte.io/t/error-io-grpc-statusruntimeexception-deadline-exceeded-deadline-exceeded-after/254"}, "emitted_at": 1709213297273} +{"stream": "search_analytics_by_query", "data": {"clicks": 5, "impressions": 5, "ctr": 1, "position": 1, "site_url": "sc-domain:airbyte.io", "search_type": "web", "date": "2022-10-17", "query": "airbyte login"}, "emitted_at": 1709213455011} +{"stream": "search_analytics_by_query", "data": {"clicks": 4, "impressions": 17, "ctr": 0.23529411764705882, "position": 3, "site_url": "sc-domain:airbyte.io", "search_type": "web", "date": "2022-10-17", "query": "kafka \"(id: -1 rack: null) disconnected\""}, "emitted_at": 1709213455013} +{"stream": "search_analytics_all_fields", "data": {"clicks": 2, "impressions": 8, "ctr": 0.25, "position": 3, "site_url": "sc-domain:airbyte.io", "search_type": "web", "date": "2022-10-17", "country": "usa", "device": "DESKTOP", "page": "https://discuss.airbyte.io/t/kafka-connection-fails/723", "query": "kafka \"(id: -1 rack: null) disconnected\""}, "emitted_at": 1709213754017} +{"stream": "search_analytics_all_fields", "data": {"clicks": 1, "impressions": 2, "ctr": 0.5, "position": 2, "site_url": "sc-domain:airbyte.io", "search_type": "web", "date": "2022-10-17", "country": "alb", "device": "DESKTOP", "page": "https://discuss.airbyte.io/c/issues/11", "query": "airbyte issues"}, "emitted_at": 1709213754019} +{"stream": "custom_dimensions", "data": {"clicks": 116, "impressions": 2996, "ctr": 0.03871829105473965, "position": 17.209946595460615, "site_url": "sc-domain:airbyte.io", "search_type": "web", "date": "2022-10-17", "country": "usa", "device": "DESKTOP"}, "emitted_at": 1709213938241} +{"stream": "custom_dimensions", "data": {"clicks": 85, "impressions": 692, "ctr": 0.12283236994219653, "position": 9.426300578034683, "site_url": "sc-domain:airbyte.io", "search_type": "web", "date": "2022-10-17", "country": "fra", "device": "DESKTOP"}, "emitted_at": 1709213938244} diff --git a/airbyte-integrations/connectors/source-google-search-console/metadata.yaml b/airbyte-integrations/connectors/source-google-search-console/metadata.yaml index 5328b530325e..4bec12112e05 100644 --- a/airbyte-integrations/connectors/source-google-search-console/metadata.yaml +++ b/airbyte-integrations/connectors/source-google-search-console/metadata.yaml @@ -19,8 +19,7 @@ data: name: Google Search Console remoteRegistries: pypi: - enabled: false - # TODO: Set enabled=true after `airbyte-lib-validate-source` is passing. + enabled: true packageName: airbyte-source-google-search-console registries: cloud: diff --git a/airbyte-integrations/connectors/source-google-search-console/unit_tests/unit_test.py b/airbyte-integrations/connectors/source-google-search-console/unit_tests/unit_test.py index 84a6ed75f91e..83905385b18a 100755 --- a/airbyte-integrations/connectors/source-google-search-console/unit_tests/unit_test.py +++ b/airbyte-integrations/connectors/source-google-search-console/unit_tests/unit_test.py @@ -231,14 +231,14 @@ def test_check_connection(config_gen, config, mocker, requests_mock): lazy_fixture("config"), ( False, - "UnauthorizedOauthError('Unable to connect with privided OAuth credentials. The `access token` or `refresh token` is expired. Please re-authrenticate using valid account credenials.')", + "UnauthorizedOauthError('Unable to connect with provided OAuth credentials. The `access token` or `refresh token` is expired. Please re-authrenticate using valid account credenials.')", ), ), ( lazy_fixture("service_account_config"), ( False, - "UnauthorizedServiceAccountError('Unable to connect with privided Service Account credentials. Make sure the `sevice account crdentials` povided is valid.')", + "UnauthorizedServiceAccountError('Unable to connect with provided Service Account credentials. Make sure the `sevice account credentials` provided are valid.')", ), ), ], From 62ba6c9e2a46c9ea582402f948373935635dbb0a Mon Sep 17 00:00:00 2001 From: Augustin Date: Thu, 29 Feb 2024 18:55:00 +0100 Subject: [PATCH 035/172] update connector generators to use poetry (#35411) --- .../source-configuration-based/README.md.hbs | 122 ++++++------- .../metadata.yaml.hbs | 3 +- .../pyproject.toml.hbs | 27 +++ .../requirements.txt.hbs | 1 - .../source-configuration-based/setup.py.hbs | 30 ---- .../{ => src}/main.py.hbs | 0 .../source_{{snakeCase name}}/__init__.py.hbs | 0 .../manifest.yaml.hbs | 0 .../src}/source_{{snakeCase name}}/run.py.hbs | 0 .../schemas/TODO.md.hbs | 0 .../schemas/customers.json | 0 .../schemas/employees.json | 0 .../source_{{snakeCase name}}/source.py.hbs | 0 .../source-python-http-api/README.md.hbs | 165 ++++++------------ .../source-python-http-api/pyproject.toml.hbs | 27 +++ .../requirements.txt.hbs | 1 - .../source-python-http-api/setup.py.hbs | 35 ---- .../{ => src}/main.py.hbs | 0 .../source_{{snakeCase name}}/__init__.py.hbs | 0 .../src}/source_{{snakeCase name}}/run.py.hbs | 0 .../source_{{snakeCase name}}/schemas/TODO.md | 0 .../schemas/customers.json | 0 .../schemas/employees.json | 0 .../source_{{snakeCase name}}/source.py.hbs | 0 .../source_{{snakeCase name}}/spec.yaml.hbs | 0 .../source-python/README.md.hbs | 162 ++++++----------- .../source-python/pyproject.toml.hbs | 27 +++ .../source-python/requirements.txt.hbs | 2 - .../source-python/setup.py.hbs | 35 ---- .../source-python/{ => src}/main.py.hbs | 0 .../source_{{snakeCase name}}/__init__.py.hbs | 0 .../src/source_{{snakeCase name}}/run.py.hbs | 13 ++ .../source_{{snakeCase name}}/source.py.hbs | 0 .../source_{{snakeCase name}}/spec.yaml.hbs | 0 .../metadata.yaml | 4 +- .../source-scaffold-source-http/README.md | 165 ++++++------------ .../source-scaffold-source-http/metadata.yaml | 10 +- .../pyproject.toml | 27 +++ .../requirements.txt | 1 - .../source-scaffold-source-http/setup.py | 47 ----- .../{ => src}/main.py | 0 .../source_scaffold_source_http/__init__.py | 0 .../source_scaffold_source_http/run.py | 3 +- .../schemas/TODO.md | 0 .../schemas/customers.json | 0 .../schemas/employees.json | 0 .../source_scaffold_source_http/source.py | 0 .../source_scaffold_source_http/spec.yaml | 0 .../source-scaffold-source-python/README.md | 162 ++++++----------- .../metadata.yaml | 2 +- .../pyproject.toml | 27 +++ .../requirements.txt | 2 - .../source-scaffold-source-python/setup.py | 47 ----- .../{ => src}/main.py | 0 .../source_scaffold_source_python/__init__.py | 0 .../source_scaffold_source_python/run.py | 3 +- .../source_scaffold_source_python/source.py | 0 .../source_scaffold_source_python/spec.yaml | 0 58 files changed, 419 insertions(+), 731 deletions(-) create mode 100644 airbyte-integrations/connector-templates/source-configuration-based/pyproject.toml.hbs delete mode 100644 airbyte-integrations/connector-templates/source-configuration-based/requirements.txt.hbs delete mode 100644 airbyte-integrations/connector-templates/source-configuration-based/setup.py.hbs rename airbyte-integrations/connector-templates/source-configuration-based/{ => src}/main.py.hbs (100%) rename airbyte-integrations/connector-templates/source-configuration-based/{ => src}/source_{{snakeCase name}}/__init__.py.hbs (100%) rename airbyte-integrations/connector-templates/source-configuration-based/{ => src}/source_{{snakeCase name}}/manifest.yaml.hbs (100%) rename airbyte-integrations/connector-templates/{source-python-http-api => source-configuration-based/src}/source_{{snakeCase name}}/run.py.hbs (100%) rename airbyte-integrations/connector-templates/source-configuration-based/{ => src}/source_{{snakeCase name}}/schemas/TODO.md.hbs (100%) rename airbyte-integrations/connector-templates/source-configuration-based/{ => src}/source_{{snakeCase name}}/schemas/customers.json (100%) rename airbyte-integrations/connector-templates/source-configuration-based/{ => src}/source_{{snakeCase name}}/schemas/employees.json (100%) rename airbyte-integrations/connector-templates/source-configuration-based/{ => src}/source_{{snakeCase name}}/source.py.hbs (100%) create mode 100644 airbyte-integrations/connector-templates/source-python-http-api/pyproject.toml.hbs delete mode 100644 airbyte-integrations/connector-templates/source-python-http-api/requirements.txt.hbs delete mode 100644 airbyte-integrations/connector-templates/source-python-http-api/setup.py.hbs rename airbyte-integrations/connector-templates/source-python-http-api/{ => src}/main.py.hbs (100%) rename airbyte-integrations/connector-templates/source-python-http-api/{ => src}/source_{{snakeCase name}}/__init__.py.hbs (100%) rename airbyte-integrations/connector-templates/{source-python => source-python-http-api/src}/source_{{snakeCase name}}/run.py.hbs (100%) rename airbyte-integrations/connector-templates/source-python-http-api/{ => src}/source_{{snakeCase name}}/schemas/TODO.md (100%) rename airbyte-integrations/connector-templates/source-python-http-api/{ => src}/source_{{snakeCase name}}/schemas/customers.json (100%) rename airbyte-integrations/connector-templates/source-python-http-api/{ => src}/source_{{snakeCase name}}/schemas/employees.json (100%) rename airbyte-integrations/connector-templates/source-python-http-api/{ => src}/source_{{snakeCase name}}/source.py.hbs (100%) rename airbyte-integrations/connector-templates/source-python-http-api/{ => src}/source_{{snakeCase name}}/spec.yaml.hbs (100%) create mode 100644 airbyte-integrations/connector-templates/source-python/pyproject.toml.hbs delete mode 100644 airbyte-integrations/connector-templates/source-python/requirements.txt.hbs delete mode 100644 airbyte-integrations/connector-templates/source-python/setup.py.hbs rename airbyte-integrations/connector-templates/source-python/{ => src}/main.py.hbs (100%) rename airbyte-integrations/connector-templates/source-python/{ => src}/source_{{snakeCase name}}/__init__.py.hbs (100%) create mode 100644 airbyte-integrations/connector-templates/source-python/src/source_{{snakeCase name}}/run.py.hbs rename airbyte-integrations/connector-templates/source-python/{ => src}/source_{{snakeCase name}}/source.py.hbs (100%) rename airbyte-integrations/connector-templates/source-python/{ => src}/source_{{snakeCase name}}/spec.yaml.hbs (100%) create mode 100644 airbyte-integrations/connectors/source-scaffold-source-http/pyproject.toml delete mode 100644 airbyte-integrations/connectors/source-scaffold-source-http/requirements.txt delete mode 100644 airbyte-integrations/connectors/source-scaffold-source-http/setup.py rename airbyte-integrations/connectors/source-scaffold-source-http/{ => src}/main.py (100%) rename airbyte-integrations/connectors/source-scaffold-source-http/{ => src}/source_scaffold_source_http/__init__.py (100%) rename airbyte-integrations/connectors/source-scaffold-source-http/{ => src}/source_scaffold_source_http/run.py (76%) rename airbyte-integrations/connectors/source-scaffold-source-http/{ => src}/source_scaffold_source_http/schemas/TODO.md (100%) rename airbyte-integrations/connectors/source-scaffold-source-http/{ => src}/source_scaffold_source_http/schemas/customers.json (100%) rename airbyte-integrations/connectors/source-scaffold-source-http/{ => src}/source_scaffold_source_http/schemas/employees.json (100%) rename airbyte-integrations/connectors/source-scaffold-source-http/{ => src}/source_scaffold_source_http/source.py (100%) rename airbyte-integrations/connectors/source-scaffold-source-http/{ => src}/source_scaffold_source_http/spec.yaml (100%) create mode 100644 airbyte-integrations/connectors/source-scaffold-source-python/pyproject.toml delete mode 100644 airbyte-integrations/connectors/source-scaffold-source-python/requirements.txt delete mode 100644 airbyte-integrations/connectors/source-scaffold-source-python/setup.py rename airbyte-integrations/connectors/source-scaffold-source-python/{ => src}/main.py (100%) rename airbyte-integrations/connectors/source-scaffold-source-python/{ => src}/source_scaffold_source_python/__init__.py (100%) rename airbyte-integrations/connectors/source-scaffold-source-python/{ => src}/source_scaffold_source_python/run.py (74%) rename airbyte-integrations/connectors/source-scaffold-source-python/{ => src}/source_scaffold_source_python/source.py (100%) rename airbyte-integrations/connectors/source-scaffold-source-python/{ => src}/source_scaffold_source_python/spec.yaml (100%) diff --git a/airbyte-integrations/connector-templates/source-configuration-based/README.md.hbs b/airbyte-integrations/connector-templates/source-configuration-based/README.md.hbs index ebfac13705c4..be2fdeb3183b 100644 --- a/airbyte-integrations/connector-templates/source-configuration-based/README.md.hbs +++ b/airbyte-integrations/connector-templates/source-configuration-based/README.md.hbs @@ -5,81 +5,59 @@ For information about how to use this connector within Airbyte, see [the documen ## Local development +### Prerequisites -#### Create credentials -**If you are a community contributor**, follow the instructions in the [documentation](https://docs.airbyte.com/integrations/sources/{{dashCase name}}) -to generate the necessary credentials. Then create a file `secrets/config.json` conforming to the `source_{{snakeCase name}}/spec.yaml` file. -Note that any directory named `secrets` is gitignored across the entire Airbyte repo, so there is no danger of accidentally checking in sensitive information. -See `integration_tests/sample_config.json` for a sample config file. +* Python (`^3.9`) +* Poetry (`^1.7`) - installation instructions [here](https://python-poetry.org/docs/#installation) -**If you are an Airbyte core member**, copy the credentials in Lastpass under the secret name `source {{dashCase name}} test creds` -and place them into `secrets/config.json`. -### Locally running the connector docker image -#### Use `airbyte-ci` to build your connector -The Airbyte way of building this connector is to use our `airbyte-ci` tool. -You can follow install instructions [here](https://github.com/airbytehq/airbyte/blob/master/airbyte-ci/connectors/pipelines/README.md#L1). -Then running the following command will build your connector: +### Installing the connector +From this connector directory, run: ```bash -airbyte-ci connectors --name source-{{dashCase name}} build +poetry install --with dev ``` -Once the command is done, you will find your connector image in your local docker registry: `airbyte/source-{{dashCase name}}:dev`. - -##### Customizing our build process -When contributing on our connector you might need to customize the build process to add a system dependency or set an env var. -You can customize our build process by adding a `build_customization.py` module to your connector. -This module should contain a `pre_connector_install` and `post_connector_install` async function that will mutate the base image and the connector container respectively. -It will be imported at runtime by our build process and the functions will be called if they exist. -Here is an example of a `build_customization.py` module: -```python -from __future__ import annotations -from typing import TYPE_CHECKING +### Create credentials -if TYPE_CHECKING: - # Feel free to check the dagger documentation for more information on the Container object and its methods. - # https://dagger-io.readthedocs.io/en/sdk-python-v0.6.4/ - from dagger import Container +**If you are a community contributor**, follow the instructions in the [documentation](https://docs.airbyte.com/integrations/sources/{{dashCase name}}) +to generate the necessary credentials. Then create a file `secrets/config.json` conforming to the `src/source_{{snakeCase name}}/spec.yaml` file. +Note that any directory named `secrets` is gitignored across the entire Airbyte repo, so there is no danger of accidentally checking in sensitive information. +See `sample_files/sample_config.json` for a sample config file. -async def pre_connector_install(base_image_container: Container) -> Container: - return await base_image_container.with_env_variable("MY_PRE_BUILD_ENV_VAR", "my_pre_build_env_var_value") +### Locally running the connector -async def post_connector_install(connector_container: Container) -> Container: - return await connector_container.with_env_variable("MY_POST_BUILD_ENV_VAR", "my_post_build_env_var_value") +``` +poetry run source-{{dashCase name}} spec +poetry run source-{{dashCase name}} check --config secrets/config.json +poetry run source-{{dashCase name}} discover --config secrets/config.json +poetry run source-{{dashCase name}} read --config secrets/config.json --catalog sample_files/configured_catalog.json ``` -#### Build your own connector image -This connector is built using our dynamic built process in `airbyte-ci`. -The base image used to build it is defined within the metadata.yaml file under the `connectorBuildOptions`. -The build logic is defined using [Dagger](https://dagger.io/) [here](https://github.com/airbytehq/airbyte/blob/master/airbyte-ci/connectors/pipelines/pipelines/builds/python_connectors.py). -It does not rely on a Dockerfile. +### Running tests -If you would like to patch our connector and build your own a simple approach would be to: +To run tests locally, from the connector directory run: -1. Create your own Dockerfile based on the latest version of the connector image. -```Dockerfile -FROM airbyte/source-{{dashCase name}}:latest +``` +poetry run pytest tests +``` -COPY . ./airbyte/integration_code -RUN pip install ./airbyte/integration_code +### Building the docker image -# The entrypoint and default env vars are already set in the base image -# ENV AIRBYTE_ENTRYPOINT "python /airbyte/integration_code/main.py" -# ENTRYPOINT ["python", "/airbyte/integration_code/main.py"] +1. Install [`airbyte-ci`](https://github.com/airbytehq/airbyte/blob/master/airbyte-ci/connectors/pipelines/README.md) +2. Run the following command to build the docker image: +```bash +airbyte-ci connectors --name=source-{{dashCase name}} build ``` -Please use this as an example. This is not optimized. -2. Build your image: -```bash -docker build -t airbyte/source-{{dashCase name}}:dev . -# Running the spec command against your patched connector -docker run airbyte/source-{{dashCase name}}:dev spec +An image will be available on your host with the tag `airbyte/source-{{dashCase name}}:dev`. + + +### Running as a docker container -#### Run Then run any of the connector commands as follows: ``` docker run --rm airbyte/source-{{dashCase name}}:dev spec @@ -87,29 +65,39 @@ docker run --rm -v $(pwd)/secrets:/secrets airbyte/source-{{dashCase name}}:dev docker run --rm -v $(pwd)/secrets:/secrets airbyte/source-{{dashCase name}}:dev discover --config /secrets/config.json docker run --rm -v $(pwd)/secrets:/secrets -v $(pwd)/integration_tests:/integration_tests airbyte/source-{{dashCase name}}:dev read --config /secrets/config.json --catalog /integration_tests/configured_catalog.json ``` -## Testing -### Acceptance Tests -Customize `acceptance-test-config.yml` file to configure tests. See [Connector Acceptance Tests](https://docs.airbyte.com/connector-development/testing-connectors/connector-acceptance-tests-reference) for more information. +### Running our CI test suite + +You can run our full test suite locally using [`airbyte-ci`](https://github.com/airbytehq/airbyte/blob/master/airbyte-ci/connectors/pipelines/README.md): +```bash +airbyte-ci connectors --name=source-{{dashCase name}} test +``` + +### Customizing acceptance Tests + +Customize `acceptance-test-config.yml` file to configure acceptance tests. See [Connector Acceptance Tests](https://docs.airbyte.com/connector-development/testing-connectors/connector-acceptance-tests-reference) for more information. If your connector requires to create or destroy resources for use during acceptance tests create fixtures for it and place them inside integration_tests/acceptance.py. -Please run acceptance tests via [airbyte-ci](https://github.com/airbytehq/airbyte/blob/master/airbyte-ci/connectors/pipelines/README.md#connectors-test-command): + +### Dependency Management + +All of your dependencies should be managed via Poetry. +To add a new dependency, run: ```bash -airbyte-ci connectors --name source-{{dashCase name}} test +poetry add ``` -## Dependency Management -All of your dependencies should go in `setup.py`, NOT `requirements.txt`. The requirements file is only used to connect internal Airbyte dependencies in the monorepo for local development. -We split dependencies between two groups, dependencies that are: -* required for your connector to work need to go to `MAIN_REQUIREMENTS` list. -* required for the testing need to go to `TEST_REQUIREMENTS` list +Please commit the changes to `pyproject.toml` and `poetry.lock` files. + +## Publishing a new version of the connector -### Publishing a new version of the connector You've checked out the repo, implemented a million dollar feature, and you're ready to share your changes with the world. Now what? 1. Make sure your changes are passing our test suite: `airbyte-ci connectors --name=source-{{dashCase name}} test` -2. Bump the connector version in `metadata.yaml`: increment the `dockerImageTag` value. Please follow [semantic versioning for connectors](https://docs.airbyte.com/contributing-to-airbyte/resources/pull-requests-handbook/#semantic-versioning-for-connectors). +2. Bump the connector version (please follow [semantic versioning for connectors](https://docs.airbyte.com/contributing-to-airbyte/resources/pull-requests-handbook/#semantic-versioning-for-connectors)): + - bump the `dockerImageTag` value in in `metadata.yaml` + - bump the `version` value in `pyproject.toml` 3. Make sure the `metadata.yaml` content is up to date. -4. Make the connector documentation and its changelog is up to date (`docs/integrations/sources/{{dashCase name}}.md`). +4. Make sure the connector documentation and its changelog is up to date (`docs/integrations/sources/{{dashCase name}}.md`). 5. Create a Pull Request: use [our PR naming conventions](https://docs.airbyte.com/contributing-to-airbyte/resources/pull-requests-handbook/#pull-request-title-convention). 6. Pat yourself on the back for being an awesome contributor. 7. Someone from Airbyte will take a look at your PR and iterate with you to merge it into master. - +8. Once your PR is merged, the new version of the connector will be automatically published to Docker Hub and our connector registry. \ No newline at end of file diff --git a/airbyte-integrations/connector-templates/source-configuration-based/metadata.yaml.hbs b/airbyte-integrations/connector-templates/source-configuration-based/metadata.yaml.hbs index 5ce3f8817b95..abb927715e3a 100644 --- a/airbyte-integrations/connector-templates/source-configuration-based/metadata.yaml.hbs +++ b/airbyte-integrations/connector-templates/source-configuration-based/metadata.yaml.hbs @@ -30,5 +30,6 @@ data: supportLevel: community documentationUrl: https://docs.airbyte.com/integrations/sources/{{dashCase name}} tags: - - language:low-code + - language:python + - cdk:low-code metadataSpecVersion: "1.0" diff --git a/airbyte-integrations/connector-templates/source-configuration-based/pyproject.toml.hbs b/airbyte-integrations/connector-templates/source-configuration-based/pyproject.toml.hbs new file mode 100644 index 000000000000..40bbebd8c090 --- /dev/null +++ b/airbyte-integrations/connector-templates/source-configuration-based/pyproject.toml.hbs @@ -0,0 +1,27 @@ +[build-system] +requires = [ "poetry-core>=1.0.0",] +build-backend = "poetry.core.masonry.api" + +[tool.poetry] +version = "0.1.0" +name = "source-{{dashCase name}}" +description = "Source implementation for {{dashCase name}}." +authors = [ "Airbyte ",] +license = "MIT" +readme = "README.md" +documentation = "https://docs.airbyte.com/integrations/sources/{{dashCase name}}" +homepage = "https://airbyte.com" +repository = "https://github.com/airbytehq/airbyte" +packages = [ { include = "source_{{snakeCase name}}", from="src"}, {include = "main.py", from = "src"} ] + +[tool.poetry.dependencies] +python = "^3.9,<3.12" +airbyte-cdk = "^0" + +[tool.poetry.scripts] +source-{{dashCase name}} = "source_{{snakeCase name}}.run:run" + +[tool.poetry.group.dev.dependencies] +requests-mock = "^1.9.3" +pytest-mock = "^3.6.1" +pytest = "^6.1" diff --git a/airbyte-integrations/connector-templates/source-configuration-based/requirements.txt.hbs b/airbyte-integrations/connector-templates/source-configuration-based/requirements.txt.hbs deleted file mode 100644 index d6e1198b1ab1..000000000000 --- a/airbyte-integrations/connector-templates/source-configuration-based/requirements.txt.hbs +++ /dev/null @@ -1 +0,0 @@ --e . diff --git a/airbyte-integrations/connector-templates/source-configuration-based/setup.py.hbs b/airbyte-integrations/connector-templates/source-configuration-based/setup.py.hbs deleted file mode 100644 index 38d921d99b11..000000000000 --- a/airbyte-integrations/connector-templates/source-configuration-based/setup.py.hbs +++ /dev/null @@ -1,30 +0,0 @@ -# -# Copyright (c) 2023 Airbyte, Inc., all rights reserved. -# - - -from setuptools import find_packages, setup - -MAIN_REQUIREMENTS = [ - "airbyte-cdk~=0.1", -] - -TEST_REQUIREMENTS = [ - "requests-mock~=1.9.3", - "pytest~=6.2", - "pytest-mock~=3.6.1", - "connector-acceptance-test", -] - -setup( - name="source_{{snakeCase name}}", - description="Source implementation for {{capitalCase name}}.", - author="Airbyte", - author_email="contact@airbyte.io", - packages=find_packages(), - install_requires=MAIN_REQUIREMENTS, - package_data={"": ["*.json", "*.yaml", "schemas/*.json", "schemas/shared/*.json"]}, - extras_require={ - "tests": TEST_REQUIREMENTS, - }, -) diff --git a/airbyte-integrations/connector-templates/source-configuration-based/main.py.hbs b/airbyte-integrations/connector-templates/source-configuration-based/src/main.py.hbs similarity index 100% rename from airbyte-integrations/connector-templates/source-configuration-based/main.py.hbs rename to airbyte-integrations/connector-templates/source-configuration-based/src/main.py.hbs diff --git a/airbyte-integrations/connector-templates/source-configuration-based/source_{{snakeCase name}}/__init__.py.hbs b/airbyte-integrations/connector-templates/source-configuration-based/src/source_{{snakeCase name}}/__init__.py.hbs similarity index 100% rename from airbyte-integrations/connector-templates/source-configuration-based/source_{{snakeCase name}}/__init__.py.hbs rename to airbyte-integrations/connector-templates/source-configuration-based/src/source_{{snakeCase name}}/__init__.py.hbs diff --git a/airbyte-integrations/connector-templates/source-configuration-based/source_{{snakeCase name}}/manifest.yaml.hbs b/airbyte-integrations/connector-templates/source-configuration-based/src/source_{{snakeCase name}}/manifest.yaml.hbs similarity index 100% rename from airbyte-integrations/connector-templates/source-configuration-based/source_{{snakeCase name}}/manifest.yaml.hbs rename to airbyte-integrations/connector-templates/source-configuration-based/src/source_{{snakeCase name}}/manifest.yaml.hbs diff --git a/airbyte-integrations/connector-templates/source-python-http-api/source_{{snakeCase name}}/run.py.hbs b/airbyte-integrations/connector-templates/source-configuration-based/src/source_{{snakeCase name}}/run.py.hbs similarity index 100% rename from airbyte-integrations/connector-templates/source-python-http-api/source_{{snakeCase name}}/run.py.hbs rename to airbyte-integrations/connector-templates/source-configuration-based/src/source_{{snakeCase name}}/run.py.hbs diff --git a/airbyte-integrations/connector-templates/source-configuration-based/source_{{snakeCase name}}/schemas/TODO.md.hbs b/airbyte-integrations/connector-templates/source-configuration-based/src/source_{{snakeCase name}}/schemas/TODO.md.hbs similarity index 100% rename from airbyte-integrations/connector-templates/source-configuration-based/source_{{snakeCase name}}/schemas/TODO.md.hbs rename to airbyte-integrations/connector-templates/source-configuration-based/src/source_{{snakeCase name}}/schemas/TODO.md.hbs diff --git a/airbyte-integrations/connector-templates/source-configuration-based/source_{{snakeCase name}}/schemas/customers.json b/airbyte-integrations/connector-templates/source-configuration-based/src/source_{{snakeCase name}}/schemas/customers.json similarity index 100% rename from airbyte-integrations/connector-templates/source-configuration-based/source_{{snakeCase name}}/schemas/customers.json rename to airbyte-integrations/connector-templates/source-configuration-based/src/source_{{snakeCase name}}/schemas/customers.json diff --git a/airbyte-integrations/connector-templates/source-configuration-based/source_{{snakeCase name}}/schemas/employees.json b/airbyte-integrations/connector-templates/source-configuration-based/src/source_{{snakeCase name}}/schemas/employees.json similarity index 100% rename from airbyte-integrations/connector-templates/source-configuration-based/source_{{snakeCase name}}/schemas/employees.json rename to airbyte-integrations/connector-templates/source-configuration-based/src/source_{{snakeCase name}}/schemas/employees.json diff --git a/airbyte-integrations/connector-templates/source-configuration-based/source_{{snakeCase name}}/source.py.hbs b/airbyte-integrations/connector-templates/source-configuration-based/src/source_{{snakeCase name}}/source.py.hbs similarity index 100% rename from airbyte-integrations/connector-templates/source-configuration-based/source_{{snakeCase name}}/source.py.hbs rename to airbyte-integrations/connector-templates/source-configuration-based/src/source_{{snakeCase name}}/source.py.hbs diff --git a/airbyte-integrations/connector-templates/source-python-http-api/README.md.hbs b/airbyte-integrations/connector-templates/source-python-http-api/README.md.hbs index 56e84e01802c..28e5231c1cd1 100644 --- a/airbyte-integrations/connector-templates/source-python-http-api/README.md.hbs +++ b/airbyte-integrations/connector-templates/source-python-http-api/README.md.hbs @@ -6,113 +6,58 @@ For information about how to use this connector within Airbyte, see [the documen ## Local development ### Prerequisites -**To iterate on this connector, make sure to complete this prerequisites section.** -#### Minimum Python version required `= 3.9.0` +* Python (`^3.9`) +* Poetry (`^1.7`) - installation instructions [here](https://python-poetry.org/docs/#installation) -#### Activate Virtual Environment and install dependencies -From this connector directory, create a virtual environment: -``` -python -m venv .venv -``` -This will generate a virtualenv for this module in `.venv/`. Make sure this venv is active in your -development environment of choice. To activate it from the terminal, run: -``` -source .venv/bin/activate -pip install -r requirements.txt -pip install '.[tests]' + +### Installing the connector + +From this connector directory, run: +```bash +poetry install --with dev ``` -If you are in an IDE, follow your IDE's instructions to activate the virtualenv. -Note that while we are installing dependencies from `requirements.txt`, you should only edit `setup.py` for your dependencies. `requirements.txt` is -used for editable installs (`pip install -e`) to pull in Python dependencies from the monorepo and will call `setup.py`. -If this is mumbo jumbo to you, don't worry about it, just put your deps in `setup.py` but install using `pip install -r requirements.txt` and everything -should work as you expect. -#### Create credentials +### Create credentials + **If you are a community contributor**, follow the instructions in the [documentation](https://docs.airbyte.com/integrations/sources/{{dashCase name}}) -to generate the necessary credentials. Then create a file `secrets/config.json` conforming to the `source_{{snakeCase name}}/spec.yaml` file. +to generate the necessary credentials. Then create a file `secrets/config.json` conforming to the `src/source_{{snakeCase name}}/spec.yaml` file. Note that any directory named `secrets` is gitignored across the entire Airbyte repo, so there is no danger of accidentally checking in sensitive information. -See `integration_tests/sample_config.json` for a sample config file. +See `sample_files/sample_config.json` for a sample config file. -**If you are an Airbyte core member**, copy the credentials in Lastpass under the secret name `source {{dashCase name}} test creds` -and place them into `secrets/config.json`. ### Locally running the connector + ``` -python main.py spec -python main.py check --config secrets/config.json -python main.py discover --config secrets/config.json -python main.py read --config secrets/config.json --catalog integration_tests/configured_catalog.json +poetry run source-{{dashCase name}} spec +poetry run source-{{dashCase name}} check --config secrets/config.json +poetry run source-{{dashCase name}} discover --config secrets/config.json +poetry run source-{{dashCase name}} read --config secrets/config.json --catalog sample_files/configured_catalog.json ``` -### Locally running the connector docker image +### Running tests -#### Use `airbyte-ci` to build your connector -The Airbyte way of building this connector is to use our `airbyte-ci` tool. -You can follow install instructions [here](https://github.com/airbytehq/airbyte/blob/master/airbyte-ci/connectors/pipelines/README.md#L1). -Then running the following command will build your connector: +To run tests locally, from the connector directory run: -```bash -airbyte-ci connectors --name source-{{dashCase name}} build ``` -Once the command is done, you will find your connector image in your local docker registry: `airbyte/source-{{dashCase name}}:dev`. - -##### Customizing our build process -When contributing on our connector you might need to customize the build process to add a system dependency or set an env var. -You can customize our build process by adding a `build_customization.py` module to your connector. -This module should contain a `pre_connector_install` and `post_connector_install` async function that will mutate the base image and the connector container respectively. -It will be imported at runtime by our build process and the functions will be called if they exist. - -Here is an example of a `build_customization.py` module: -```python -from __future__ import annotations - -from typing import TYPE_CHECKING - -if TYPE_CHECKING: - # Feel free to check the dagger documentation for more information on the Container object and its methods. - # https://dagger-io.readthedocs.io/en/sdk-python-v0.6.4/ - from dagger import Container - - -async def pre_connector_install(base_image_container: Container) -> Container: - return await base_image_container.with_env_variable("MY_PRE_BUILD_ENV_VAR", "my_pre_build_env_var_value") - -async def post_connector_install(connector_container: Container) -> Container: - return await connector_container.with_env_variable("MY_POST_BUILD_ENV_VAR", "my_post_build_env_var_value") +poetry run pytest tests ``` -#### Build your own connector image -This connector is built using our dynamic built process in `airbyte-ci`. -The base image used to build it is defined within the metadata.yaml file under the `connectorBuildOptions`. -The build logic is defined using [Dagger](https://dagger.io/) [here](https://github.com/airbytehq/airbyte/blob/master/airbyte-ci/connectors/pipelines/pipelines/builds/python_connectors.py). -It does not rely on a Dockerfile. +### Building the docker image -If you would like to patch our connector and build your own a simple approach would be to: - -1. Create your own Dockerfile based on the latest version of the connector image. -```Dockerfile -FROM airbyte/source-{{dashCase name}}:latest +1. Install [`airbyte-ci`](https://github.com/airbytehq/airbyte/blob/master/airbyte-ci/connectors/pipelines/README.md) +2. Run the following command to build the docker image: +```bash +airbyte-ci connectors --name=source-{{dashCase name}} build +``` -COPY . ./airbyte/integration_code -RUN pip install ./airbyte/integration_code +An image will be available on your host with the tag `airbyte/source-{{dashCase name}}:dev`. -# The entrypoint and default env vars are already set in the base image -# ENV AIRBYTE_ENTRYPOINT "python /airbyte/integration_code/main.py" -# ENTRYPOINT ["python", "/airbyte/integration_code/main.py"] -``` -Please use this as an example. This is not optimized. -2. Build your image: -```bash -docker build -t airbyte/source-{{dashCase name}}:dev . -# Running the spec command against your patched connector -docker run airbyte/source-{{dashCase name}}:dev spec -```` +### Running as a docker container -#### Run Then run any of the connector commands as follows: ``` docker run --rm airbyte/source-{{dashCase name}}:dev spec @@ -120,47 +65,41 @@ docker run --rm -v $(pwd)/secrets:/secrets airbyte/source-{{dashCase name}}:dev docker run --rm -v $(pwd)/secrets:/secrets airbyte/source-{{dashCase name}}:dev discover --config /secrets/config.json docker run --rm -v $(pwd)/secrets:/secrets -v $(pwd)/integration_tests:/integration_tests airbyte/source-{{dashCase name}}:dev read --config /secrets/config.json --catalog /integration_tests/configured_catalog.json ``` -## Testing -Make sure to familiarize yourself with [pytest test discovery](https://docs.pytest.org/en/latest/goodpractices.html#test-discovery) to know how your test files and methods should be named. -First install test dependencies into your virtual environment: -``` -pip install .[tests] -``` -### Unit Tests -To run unit tests locally, from the connector directory run: -``` -python -m pytest unit_tests -``` -### Integration Tests -There are two types of integration tests: Acceptance Tests (Airbyte's test suite for all source connectors) and custom integration tests (which are specific to this connector). -#### Custom Integration tests -Place custom tests inside `integration_tests/` folder, then, from the connector root, run -``` -python -m pytest integration_tests +### Running our CI test suite + +You can run our full test suite locally using [`airbyte-ci`](https://github.com/airbytehq/airbyte/blob/master/airbyte-ci/connectors/pipelines/README.md): + +```bash +airbyte-ci connectors --name=source-{{dashCase name}} test ``` -### Acceptance Tests -Customize `acceptance-test-config.yml` file to configure tests. See [Connector Acceptance Tests](https://docs.airbyte.com/connector-development/testing-connectors/connector-acceptance-tests-reference) for more information. +### Customizing acceptance Tests + +Customize `acceptance-test-config.yml` file to configure acceptance tests. See [Connector Acceptance Tests](https://docs.airbyte.com/connector-development/testing-connectors/connector-acceptance-tests-reference) for more information. If your connector requires to create or destroy resources for use during acceptance tests create fixtures for it and place them inside integration_tests/acceptance.py. -Please run acceptance tests via [airbyte-ci](https://github.com/airbytehq/airbyte/blob/master/airbyte-ci/connectors/pipelines/README.md#connectors-test-command): + +### Dependency Management + +All of your dependencies should be managed via Poetry. +To add a new dependency, run: + ```bash -airbyte-ci connectors --name source-{{dashCase name}} test +poetry add ``` -## Dependency Management -All of your dependencies should go in `setup.py`, NOT `requirements.txt`. The requirements file is only used to connect internal Airbyte dependencies in the monorepo for local development. -We split dependencies between two groups, dependencies that are: -* required for your connector to work need to go to `MAIN_REQUIREMENTS` list. -* required for the testing need to go to `TEST_REQUIREMENTS` list +Please commit the changes to `pyproject.toml` and `poetry.lock` files. + +## Publishing a new version of the connector -### Publishing a new version of the connector You've checked out the repo, implemented a million dollar feature, and you're ready to share your changes with the world. Now what? 1. Make sure your changes are passing our test suite: `airbyte-ci connectors --name=source-{{dashCase name}} test` -2. Bump the connector version in `metadata.yaml`: increment the `dockerImageTag` value. Please follow [semantic versioning for connectors](https://docs.airbyte.com/contributing-to-airbyte/resources/pull-requests-handbook/#semantic-versioning-for-connectors). +2. Bump the connector version (please follow [semantic versioning for connectors](https://docs.airbyte.com/contributing-to-airbyte/resources/pull-requests-handbook/#semantic-versioning-for-connectors)): + - bump the `dockerImageTag` value in in `metadata.yaml` + - bump the `version` value in `pyproject.toml` 3. Make sure the `metadata.yaml` content is up to date. -4. Make the connector documentation and its changelog is up to date (`docs/integrations/sources/{{dashCase name}}.md`). +4. Make sure the connector documentation and its changelog is up to date (`docs/integrations/sources/{{dashCase name}}.md`). 5. Create a Pull Request: use [our PR naming conventions](https://docs.airbyte.com/contributing-to-airbyte/resources/pull-requests-handbook/#pull-request-title-convention). 6. Pat yourself on the back for being an awesome contributor. 7. Someone from Airbyte will take a look at your PR and iterate with you to merge it into master. - +8. Once your PR is merged, the new version of the connector will be automatically published to Docker Hub and our connector registry. \ No newline at end of file diff --git a/airbyte-integrations/connector-templates/source-python-http-api/pyproject.toml.hbs b/airbyte-integrations/connector-templates/source-python-http-api/pyproject.toml.hbs new file mode 100644 index 000000000000..40bbebd8c090 --- /dev/null +++ b/airbyte-integrations/connector-templates/source-python-http-api/pyproject.toml.hbs @@ -0,0 +1,27 @@ +[build-system] +requires = [ "poetry-core>=1.0.0",] +build-backend = "poetry.core.masonry.api" + +[tool.poetry] +version = "0.1.0" +name = "source-{{dashCase name}}" +description = "Source implementation for {{dashCase name}}." +authors = [ "Airbyte ",] +license = "MIT" +readme = "README.md" +documentation = "https://docs.airbyte.com/integrations/sources/{{dashCase name}}" +homepage = "https://airbyte.com" +repository = "https://github.com/airbytehq/airbyte" +packages = [ { include = "source_{{snakeCase name}}", from="src"}, {include = "main.py", from = "src"} ] + +[tool.poetry.dependencies] +python = "^3.9,<3.12" +airbyte-cdk = "^0" + +[tool.poetry.scripts] +source-{{dashCase name}} = "source_{{snakeCase name}}.run:run" + +[tool.poetry.group.dev.dependencies] +requests-mock = "^1.9.3" +pytest-mock = "^3.6.1" +pytest = "^6.1" diff --git a/airbyte-integrations/connector-templates/source-python-http-api/requirements.txt.hbs b/airbyte-integrations/connector-templates/source-python-http-api/requirements.txt.hbs deleted file mode 100644 index d6e1198b1ab1..000000000000 --- a/airbyte-integrations/connector-templates/source-python-http-api/requirements.txt.hbs +++ /dev/null @@ -1 +0,0 @@ --e . diff --git a/airbyte-integrations/connector-templates/source-python-http-api/setup.py.hbs b/airbyte-integrations/connector-templates/source-python-http-api/setup.py.hbs deleted file mode 100644 index 8f3eebe3cef1..000000000000 --- a/airbyte-integrations/connector-templates/source-python-http-api/setup.py.hbs +++ /dev/null @@ -1,35 +0,0 @@ -# -# Copyright (c) 2023 Airbyte, Inc., all rights reserved. -# - - -from setuptools import find_packages, setup - -MAIN_REQUIREMENTS = [ - "airbyte-cdk~=0.2", -] - -TEST_REQUIREMENTS = [ - "requests-mock~=1.9.3", - "pytest~=6.2", - "pytest-mock~=3.6.1", - "connector-acceptance-test", -] - -setup( - name="source_{{snakeCase name}}", - description="Source implementation for {{capitalCase name}}.", - author="Airbyte", - author_email="contact@airbyte.io", - packages=find_packages(), - install_requires=MAIN_REQUIREMENTS, - package_data={"": ["*.json", "*.yaml", "schemas/*.json", "schemas/shared/*.json"]}, - extras_require={ - "tests": TEST_REQUIREMENTS, - }, - entry_points={ - "console_scripts": [ - "source-{{dashCase name}}=source_{{snakeCase name}}.run:run", - ], - }, -) diff --git a/airbyte-integrations/connector-templates/source-python-http-api/main.py.hbs b/airbyte-integrations/connector-templates/source-python-http-api/src/main.py.hbs similarity index 100% rename from airbyte-integrations/connector-templates/source-python-http-api/main.py.hbs rename to airbyte-integrations/connector-templates/source-python-http-api/src/main.py.hbs diff --git a/airbyte-integrations/connector-templates/source-python-http-api/source_{{snakeCase name}}/__init__.py.hbs b/airbyte-integrations/connector-templates/source-python-http-api/src/source_{{snakeCase name}}/__init__.py.hbs similarity index 100% rename from airbyte-integrations/connector-templates/source-python-http-api/source_{{snakeCase name}}/__init__.py.hbs rename to airbyte-integrations/connector-templates/source-python-http-api/src/source_{{snakeCase name}}/__init__.py.hbs diff --git a/airbyte-integrations/connector-templates/source-python/source_{{snakeCase name}}/run.py.hbs b/airbyte-integrations/connector-templates/source-python-http-api/src/source_{{snakeCase name}}/run.py.hbs similarity index 100% rename from airbyte-integrations/connector-templates/source-python/source_{{snakeCase name}}/run.py.hbs rename to airbyte-integrations/connector-templates/source-python-http-api/src/source_{{snakeCase name}}/run.py.hbs diff --git a/airbyte-integrations/connector-templates/source-python-http-api/source_{{snakeCase name}}/schemas/TODO.md b/airbyte-integrations/connector-templates/source-python-http-api/src/source_{{snakeCase name}}/schemas/TODO.md similarity index 100% rename from airbyte-integrations/connector-templates/source-python-http-api/source_{{snakeCase name}}/schemas/TODO.md rename to airbyte-integrations/connector-templates/source-python-http-api/src/source_{{snakeCase name}}/schemas/TODO.md diff --git a/airbyte-integrations/connector-templates/source-python-http-api/source_{{snakeCase name}}/schemas/customers.json b/airbyte-integrations/connector-templates/source-python-http-api/src/source_{{snakeCase name}}/schemas/customers.json similarity index 100% rename from airbyte-integrations/connector-templates/source-python-http-api/source_{{snakeCase name}}/schemas/customers.json rename to airbyte-integrations/connector-templates/source-python-http-api/src/source_{{snakeCase name}}/schemas/customers.json diff --git a/airbyte-integrations/connector-templates/source-python-http-api/source_{{snakeCase name}}/schemas/employees.json b/airbyte-integrations/connector-templates/source-python-http-api/src/source_{{snakeCase name}}/schemas/employees.json similarity index 100% rename from airbyte-integrations/connector-templates/source-python-http-api/source_{{snakeCase name}}/schemas/employees.json rename to airbyte-integrations/connector-templates/source-python-http-api/src/source_{{snakeCase name}}/schemas/employees.json diff --git a/airbyte-integrations/connector-templates/source-python-http-api/source_{{snakeCase name}}/source.py.hbs b/airbyte-integrations/connector-templates/source-python-http-api/src/source_{{snakeCase name}}/source.py.hbs similarity index 100% rename from airbyte-integrations/connector-templates/source-python-http-api/source_{{snakeCase name}}/source.py.hbs rename to airbyte-integrations/connector-templates/source-python-http-api/src/source_{{snakeCase name}}/source.py.hbs diff --git a/airbyte-integrations/connector-templates/source-python-http-api/source_{{snakeCase name}}/spec.yaml.hbs b/airbyte-integrations/connector-templates/source-python-http-api/src/source_{{snakeCase name}}/spec.yaml.hbs similarity index 100% rename from airbyte-integrations/connector-templates/source-python-http-api/source_{{snakeCase name}}/spec.yaml.hbs rename to airbyte-integrations/connector-templates/source-python-http-api/src/source_{{snakeCase name}}/spec.yaml.hbs diff --git a/airbyte-integrations/connector-templates/source-python/README.md.hbs b/airbyte-integrations/connector-templates/source-python/README.md.hbs index 56e84e01802c..919b24fa5f35 100644 --- a/airbyte-integrations/connector-templates/source-python/README.md.hbs +++ b/airbyte-integrations/connector-templates/source-python/README.md.hbs @@ -6,113 +6,58 @@ For information about how to use this connector within Airbyte, see [the documen ## Local development ### Prerequisites -**To iterate on this connector, make sure to complete this prerequisites section.** -#### Minimum Python version required `= 3.9.0` +* Python (`^3.9`) +* Poetry (`^1.7`) - installation instructions [here](https://python-poetry.org/docs/#installation) -#### Activate Virtual Environment and install dependencies -From this connector directory, create a virtual environment: -``` -python -m venv .venv -``` -This will generate a virtualenv for this module in `.venv/`. Make sure this venv is active in your -development environment of choice. To activate it from the terminal, run: -``` -source .venv/bin/activate -pip install -r requirements.txt -pip install '.[tests]' + +### Installing the connector + +From this connector directory, run: +```bash +poetry install --with dev ``` -If you are in an IDE, follow your IDE's instructions to activate the virtualenv. -Note that while we are installing dependencies from `requirements.txt`, you should only edit `setup.py` for your dependencies. `requirements.txt` is -used for editable installs (`pip install -e`) to pull in Python dependencies from the monorepo and will call `setup.py`. -If this is mumbo jumbo to you, don't worry about it, just put your deps in `setup.py` but install using `pip install -r requirements.txt` and everything -should work as you expect. -#### Create credentials +### Create credentials + **If you are a community contributor**, follow the instructions in the [documentation](https://docs.airbyte.com/integrations/sources/{{dashCase name}}) -to generate the necessary credentials. Then create a file `secrets/config.json` conforming to the `source_{{snakeCase name}}/spec.yaml` file. +to generate the necessary credentials. Then create a file `secrets/config.json` conforming to the `src/source_{{snakeCase name}}/spec.yaml` file. Note that any directory named `secrets` is gitignored across the entire Airbyte repo, so there is no danger of accidentally checking in sensitive information. -See `integration_tests/sample_config.json` for a sample config file. +See `sample_files/sample_config.json` for a sample config file. -**If you are an Airbyte core member**, copy the credentials in Lastpass under the secret name `source {{dashCase name}} test creds` -and place them into `secrets/config.json`. ### Locally running the connector + ``` -python main.py spec -python main.py check --config secrets/config.json -python main.py discover --config secrets/config.json -python main.py read --config secrets/config.json --catalog integration_tests/configured_catalog.json +poetry run source-{{dashCase name}} spec +poetry run source-{{dashCase name}} check --config secrets/config.json +poetry run source-{{dashCase name}} discover --config secrets/config.json +poetry run source-{{dashCase name}} read --config secrets/config.json --catalog sample_files/configured_catalog.json ``` -### Locally running the connector docker image +### Running tests -#### Use `airbyte-ci` to build your connector -The Airbyte way of building this connector is to use our `airbyte-ci` tool. -You can follow install instructions [here](https://github.com/airbytehq/airbyte/blob/master/airbyte-ci/connectors/pipelines/README.md#L1). -Then running the following command will build your connector: +To run tests locally, from the connector directory run: -```bash -airbyte-ci connectors --name source-{{dashCase name}} build ``` -Once the command is done, you will find your connector image in your local docker registry: `airbyte/source-{{dashCase name}}:dev`. - -##### Customizing our build process -When contributing on our connector you might need to customize the build process to add a system dependency or set an env var. -You can customize our build process by adding a `build_customization.py` module to your connector. -This module should contain a `pre_connector_install` and `post_connector_install` async function that will mutate the base image and the connector container respectively. -It will be imported at runtime by our build process and the functions will be called if they exist. - -Here is an example of a `build_customization.py` module: -```python -from __future__ import annotations - -from typing import TYPE_CHECKING - -if TYPE_CHECKING: - # Feel free to check the dagger documentation for more information on the Container object and its methods. - # https://dagger-io.readthedocs.io/en/sdk-python-v0.6.4/ - from dagger import Container - - -async def pre_connector_install(base_image_container: Container) -> Container: - return await base_image_container.with_env_variable("MY_PRE_BUILD_ENV_VAR", "my_pre_build_env_var_value") - -async def post_connector_install(connector_container: Container) -> Container: - return await connector_container.with_env_variable("MY_POST_BUILD_ENV_VAR", "my_post_build_env_var_value") +poetry run pytest tests ``` -#### Build your own connector image -This connector is built using our dynamic built process in `airbyte-ci`. -The base image used to build it is defined within the metadata.yaml file under the `connectorBuildOptions`. -The build logic is defined using [Dagger](https://dagger.io/) [here](https://github.com/airbytehq/airbyte/blob/master/airbyte-ci/connectors/pipelines/pipelines/builds/python_connectors.py). -It does not rely on a Dockerfile. +### Building the docker image -If you would like to patch our connector and build your own a simple approach would be to: - -1. Create your own Dockerfile based on the latest version of the connector image. -```Dockerfile -FROM airbyte/source-{{dashCase name}}:latest +1. Install [`airbyte-ci`](https://github.com/airbytehq/airbyte/blob/master/airbyte-ci/connectors/pipelines/README.md) +2. Run the following command to build the docker image: +```bash +airbyte-ci connectors --name=source-{{dashCase name}} build +``` -COPY . ./airbyte/integration_code -RUN pip install ./airbyte/integration_code +An image will be available on your host with the tag `airbyte/source-{{dashCase name}}:dev`. -# The entrypoint and default env vars are already set in the base image -# ENV AIRBYTE_ENTRYPOINT "python /airbyte/integration_code/main.py" -# ENTRYPOINT ["python", "/airbyte/integration_code/main.py"] -``` -Please use this as an example. This is not optimized. -2. Build your image: -```bash -docker build -t airbyte/source-{{dashCase name}}:dev . -# Running the spec command against your patched connector -docker run airbyte/source-{{dashCase name}}:dev spec -```` +### Running as a docker container -#### Run Then run any of the connector commands as follows: ``` docker run --rm airbyte/source-{{dashCase name}}:dev spec @@ -120,47 +65,38 @@ docker run --rm -v $(pwd)/secrets:/secrets airbyte/source-{{dashCase name}}:dev docker run --rm -v $(pwd)/secrets:/secrets airbyte/source-{{dashCase name}}:dev discover --config /secrets/config.json docker run --rm -v $(pwd)/secrets:/secrets -v $(pwd)/integration_tests:/integration_tests airbyte/source-{{dashCase name}}:dev read --config /secrets/config.json --catalog /integration_tests/configured_catalog.json ``` -## Testing -Make sure to familiarize yourself with [pytest test discovery](https://docs.pytest.org/en/latest/goodpractices.html#test-discovery) to know how your test files and methods should be named. -First install test dependencies into your virtual environment: -``` -pip install .[tests] -``` -### Unit Tests -To run unit tests locally, from the connector directory run: -``` -python -m pytest unit_tests -``` -### Integration Tests -There are two types of integration tests: Acceptance Tests (Airbyte's test suite for all source connectors) and custom integration tests (which are specific to this connector). -#### Custom Integration tests -Place custom tests inside `integration_tests/` folder, then, from the connector root, run -``` -python -m pytest integration_tests +### Running our CI test suite +You can run our full test suite locally using [`airbyte-ci`](https://github.com/airbytehq/airbyte/blob/master/airbyte-ci/connectors/pipelines/README.md): +```bash +airbyte-ci connectors --name=source-{{dashCase name}} test ``` -### Acceptance Tests -Customize `acceptance-test-config.yml` file to configure tests. See [Connector Acceptance Tests](https://docs.airbyte.com/connector-development/testing-connectors/connector-acceptance-tests-reference) for more information. +### Customizing acceptance Tests + +Customize `acceptance-test-config.yml` file to configure acceptance tests. See [Connector Acceptance Tests](https://docs.airbyte.com/connector-development/testing-connectors/connector-acceptance-tests-reference) for more information. If your connector requires to create or destroy resources for use during acceptance tests create fixtures for it and place them inside integration_tests/acceptance.py. -Please run acceptance tests via [airbyte-ci](https://github.com/airbytehq/airbyte/blob/master/airbyte-ci/connectors/pipelines/README.md#connectors-test-command): + +### Dependency Management + +All of your dependencies should be managed via Poetry. +To add a new dependency, run: ```bash -airbyte-ci connectors --name source-{{dashCase name}} test +poetry add ``` -## Dependency Management -All of your dependencies should go in `setup.py`, NOT `requirements.txt`. The requirements file is only used to connect internal Airbyte dependencies in the monorepo for local development. -We split dependencies between two groups, dependencies that are: -* required for your connector to work need to go to `MAIN_REQUIREMENTS` list. -* required for the testing need to go to `TEST_REQUIREMENTS` list +Please commit the changes to `pyproject.toml` and `poetry.lock` files. + +## Publishing a new version of the connector -### Publishing a new version of the connector You've checked out the repo, implemented a million dollar feature, and you're ready to share your changes with the world. Now what? 1. Make sure your changes are passing our test suite: `airbyte-ci connectors --name=source-{{dashCase name}} test` -2. Bump the connector version in `metadata.yaml`: increment the `dockerImageTag` value. Please follow [semantic versioning for connectors](https://docs.airbyte.com/contributing-to-airbyte/resources/pull-requests-handbook/#semantic-versioning-for-connectors). +2. Bump the connector version (please follow [semantic versioning for connectors](https://docs.airbyte.com/contributing-to-airbyte/resources/pull-requests-handbook/#semantic-versioning-for-connectors)): + - bump the `dockerImageTag` value in in `metadata.yaml` + - bump the `version` value in `pyproject.toml` 3. Make sure the `metadata.yaml` content is up to date. -4. Make the connector documentation and its changelog is up to date (`docs/integrations/sources/{{dashCase name}}.md`). +4. Make sure the connector documentation and its changelog is up to date (`docs/integrations/sources/{{dashCase name}}.md`). 5. Create a Pull Request: use [our PR naming conventions](https://docs.airbyte.com/contributing-to-airbyte/resources/pull-requests-handbook/#pull-request-title-convention). 6. Pat yourself on the back for being an awesome contributor. 7. Someone from Airbyte will take a look at your PR and iterate with you to merge it into master. - +8. Once your PR is merged, the new version of the connector will be automatically published to Docker Hub and our connector registry. \ No newline at end of file diff --git a/airbyte-integrations/connector-templates/source-python/pyproject.toml.hbs b/airbyte-integrations/connector-templates/source-python/pyproject.toml.hbs new file mode 100644 index 000000000000..40bbebd8c090 --- /dev/null +++ b/airbyte-integrations/connector-templates/source-python/pyproject.toml.hbs @@ -0,0 +1,27 @@ +[build-system] +requires = [ "poetry-core>=1.0.0",] +build-backend = "poetry.core.masonry.api" + +[tool.poetry] +version = "0.1.0" +name = "source-{{dashCase name}}" +description = "Source implementation for {{dashCase name}}." +authors = [ "Airbyte ",] +license = "MIT" +readme = "README.md" +documentation = "https://docs.airbyte.com/integrations/sources/{{dashCase name}}" +homepage = "https://airbyte.com" +repository = "https://github.com/airbytehq/airbyte" +packages = [ { include = "source_{{snakeCase name}}", from="src"}, {include = "main.py", from = "src"} ] + +[tool.poetry.dependencies] +python = "^3.9,<3.12" +airbyte-cdk = "^0" + +[tool.poetry.scripts] +source-{{dashCase name}} = "source_{{snakeCase name}}.run:run" + +[tool.poetry.group.dev.dependencies] +requests-mock = "^1.9.3" +pytest-mock = "^3.6.1" +pytest = "^6.1" diff --git a/airbyte-integrations/connector-templates/source-python/requirements.txt.hbs b/airbyte-integrations/connector-templates/source-python/requirements.txt.hbs deleted file mode 100644 index 7b9114ed5867..000000000000 --- a/airbyte-integrations/connector-templates/source-python/requirements.txt.hbs +++ /dev/null @@ -1,2 +0,0 @@ -# This file is autogenerated -- only edit if you know what you are doing. Use setup.py for declaring dependencies. --e . diff --git a/airbyte-integrations/connector-templates/source-python/setup.py.hbs b/airbyte-integrations/connector-templates/source-python/setup.py.hbs deleted file mode 100644 index b16123258acb..000000000000 --- a/airbyte-integrations/connector-templates/source-python/setup.py.hbs +++ /dev/null @@ -1,35 +0,0 @@ -# -# Copyright (c) 2023 Airbyte, Inc., all rights reserved. -# - - -from setuptools import find_packages, setup - -MAIN_REQUIREMENTS = [ - "airbyte-cdk~=0.2", -] - -TEST_REQUIREMENTS = [ - "requests-mock~=1.9.3", - "pytest-mock~=3.6.1", - "pytest~=6.2", - "connector-acceptance-test", -] - -setup( - name="source_{{snakeCase name}}", - description="Source implementation for {{capitalCase name}}.", - author="Airbyte", - author_email="contact@airbyte.io", - packages=find_packages(), - install_requires=MAIN_REQUIREMENTS, - package_data={"": ["*.json", "*.yaml"]}, - extras_require={ - "tests": TEST_REQUIREMENTS, - }, - entry_points={ - "console_scripts": [ - "source-{{dashCase name}}=source_{{snakeCase name}}.run:run", - ], - }, -) diff --git a/airbyte-integrations/connector-templates/source-python/main.py.hbs b/airbyte-integrations/connector-templates/source-python/src/main.py.hbs similarity index 100% rename from airbyte-integrations/connector-templates/source-python/main.py.hbs rename to airbyte-integrations/connector-templates/source-python/src/main.py.hbs diff --git a/airbyte-integrations/connector-templates/source-python/source_{{snakeCase name}}/__init__.py.hbs b/airbyte-integrations/connector-templates/source-python/src/source_{{snakeCase name}}/__init__.py.hbs similarity index 100% rename from airbyte-integrations/connector-templates/source-python/source_{{snakeCase name}}/__init__.py.hbs rename to airbyte-integrations/connector-templates/source-python/src/source_{{snakeCase name}}/__init__.py.hbs diff --git a/airbyte-integrations/connector-templates/source-python/src/source_{{snakeCase name}}/run.py.hbs b/airbyte-integrations/connector-templates/source-python/src/source_{{snakeCase name}}/run.py.hbs new file mode 100644 index 000000000000..25c9400301f9 --- /dev/null +++ b/airbyte-integrations/connector-templates/source-python/src/source_{{snakeCase name}}/run.py.hbs @@ -0,0 +1,13 @@ +# +# Copyright (c) 2023 Airbyte, Inc., all rights reserved. +# + + +import sys + +from airbyte_cdk.entrypoint import launch +from .source import Source{{properCase name}} + +def run(): + source = Source{{properCase name}}() + launch(source, sys.argv[1:]) diff --git a/airbyte-integrations/connector-templates/source-python/source_{{snakeCase name}}/source.py.hbs b/airbyte-integrations/connector-templates/source-python/src/source_{{snakeCase name}}/source.py.hbs similarity index 100% rename from airbyte-integrations/connector-templates/source-python/source_{{snakeCase name}}/source.py.hbs rename to airbyte-integrations/connector-templates/source-python/src/source_{{snakeCase name}}/source.py.hbs diff --git a/airbyte-integrations/connector-templates/source-python/source_{{snakeCase name}}/spec.yaml.hbs b/airbyte-integrations/connector-templates/source-python/src/source_{{snakeCase name}}/spec.yaml.hbs similarity index 100% rename from airbyte-integrations/connector-templates/source-python/source_{{snakeCase name}}/spec.yaml.hbs rename to airbyte-integrations/connector-templates/source-python/src/source_{{snakeCase name}}/spec.yaml.hbs diff --git a/airbyte-integrations/connectors/destination-scaffold-destination-python/metadata.yaml b/airbyte-integrations/connectors/destination-scaffold-destination-python/metadata.yaml index 6665273ac1a7..066301f0cd77 100644 --- a/airbyte-integrations/connectors/destination-scaffold-destination-python/metadata.yaml +++ b/airbyte-integrations/connectors/destination-scaffold-destination-python/metadata.yaml @@ -4,7 +4,7 @@ data: - TODO # Please change to the hostname of the source. registries: oss: - enabled: false + enabled: true cloud: enabled: false connectorBuildOptions: @@ -14,7 +14,7 @@ data: baseImage: docker.io/airbyte/python-connector-base:1.0.0@sha256:dd17e347fbda94f7c3abff539be298a65af2d7fc27a307d89297df1081a45c27 connectorSubtype: database connectorType: destination - definitionId: 1c342214-aad1-4344-8ee8-92c8c7e91c07 + definitionId: FAKE-UUID-0000-0000-000000000000 dockerImageTag: 0.1.0 dockerRepository: airbyte/destination-scaffold-destination-python githubIssueLabel: destination-scaffold-destination-python diff --git a/airbyte-integrations/connectors/source-scaffold-source-http/README.md b/airbyte-integrations/connectors/source-scaffold-source-http/README.md index e0724d544bcc..2a71d4287325 100644 --- a/airbyte-integrations/connectors/source-scaffold-source-http/README.md +++ b/airbyte-integrations/connectors/source-scaffold-source-http/README.md @@ -6,113 +6,58 @@ For information about how to use this connector within Airbyte, see [the documen ## Local development ### Prerequisites -**To iterate on this connector, make sure to complete this prerequisites section.** -#### Minimum Python version required `= 3.9.0` +* Python (`^3.9`) +* Poetry (`^1.7`) - installation instructions [here](https://python-poetry.org/docs/#installation) -#### Activate Virtual Environment and install dependencies -From this connector directory, create a virtual environment: -``` -python -m venv .venv -``` -This will generate a virtualenv for this module in `.venv/`. Make sure this venv is active in your -development environment of choice. To activate it from the terminal, run: -``` -source .venv/bin/activate -pip install -r requirements.txt -pip install '.[tests]' + +### Installing the connector + +From this connector directory, run: +```bash +poetry install --with dev ``` -If you are in an IDE, follow your IDE's instructions to activate the virtualenv. -Note that while we are installing dependencies from `requirements.txt`, you should only edit `setup.py` for your dependencies. `requirements.txt` is -used for editable installs (`pip install -e`) to pull in Python dependencies from the monorepo and will call `setup.py`. -If this is mumbo jumbo to you, don't worry about it, just put your deps in `setup.py` but install using `pip install -r requirements.txt` and everything -should work as you expect. -#### Create credentials +### Create credentials + **If you are a community contributor**, follow the instructions in the [documentation](https://docs.airbyte.com/integrations/sources/scaffold-source-http) -to generate the necessary credentials. Then create a file `secrets/config.json` conforming to the `source_scaffold_source_http/spec.yaml` file. +to generate the necessary credentials. Then create a file `secrets/config.json` conforming to the `src/source_scaffold_source_http/spec.yaml` file. Note that any directory named `secrets` is gitignored across the entire Airbyte repo, so there is no danger of accidentally checking in sensitive information. -See `integration_tests/sample_config.json` for a sample config file. +See `sample_files/sample_config.json` for a sample config file. -**If you are an Airbyte core member**, copy the credentials in Lastpass under the secret name `source scaffold-source-http test creds` -and place them into `secrets/config.json`. ### Locally running the connector + ``` -python main.py spec -python main.py check --config secrets/config.json -python main.py discover --config secrets/config.json -python main.py read --config secrets/config.json --catalog integration_tests/configured_catalog.json +poetry run source-scaffold-source-http spec +poetry run source-scaffold-source-http check --config secrets/config.json +poetry run source-scaffold-source-http discover --config secrets/config.json +poetry run source-scaffold-source-http read --config secrets/config.json --catalog sample_files/configured_catalog.json ``` -### Locally running the connector docker image +### Running tests -#### Use `airbyte-ci` to build your connector -The Airbyte way of building this connector is to use our `airbyte-ci` tool. -You can follow install instructions [here](https://github.com/airbytehq/airbyte/blob/master/airbyte-ci/connectors/pipelines/README.md#L1). -Then running the following command will build your connector: +To run tests locally, from the connector directory run: -```bash -airbyte-ci connectors --name source-scaffold-source-http build ``` -Once the command is done, you will find your connector image in your local docker registry: `airbyte/source-scaffold-source-http:dev`. - -##### Customizing our build process -When contributing on our connector you might need to customize the build process to add a system dependency or set an env var. -You can customize our build process by adding a `build_customization.py` module to your connector. -This module should contain a `pre_connector_install` and `post_connector_install` async function that will mutate the base image and the connector container respectively. -It will be imported at runtime by our build process and the functions will be called if they exist. - -Here is an example of a `build_customization.py` module: -```python -from __future__ import annotations - -from typing import TYPE_CHECKING - -if TYPE_CHECKING: - # Feel free to check the dagger documentation for more information on the Container object and its methods. - # https://dagger-io.readthedocs.io/en/sdk-python-v0.6.4/ - from dagger import Container - - -async def pre_connector_install(base_image_container: Container) -> Container: - return await base_image_container.with_env_variable("MY_PRE_BUILD_ENV_VAR", "my_pre_build_env_var_value") - -async def post_connector_install(connector_container: Container) -> Container: - return await connector_container.with_env_variable("MY_POST_BUILD_ENV_VAR", "my_post_build_env_var_value") +poetry run pytest tests ``` -#### Build your own connector image -This connector is built using our dynamic built process in `airbyte-ci`. -The base image used to build it is defined within the metadata.yaml file under the `connectorBuildOptions`. -The build logic is defined using [Dagger](https://dagger.io/) [here](https://github.com/airbytehq/airbyte/blob/master/airbyte-ci/connectors/pipelines/pipelines/builds/python_connectors.py). -It does not rely on a Dockerfile. +### Building the docker image -If you would like to patch our connector and build your own a simple approach would be to: - -1. Create your own Dockerfile based on the latest version of the connector image. -```Dockerfile -FROM airbyte/source-scaffold-source-http:latest +1. Install [`airbyte-ci`](https://github.com/airbytehq/airbyte/blob/master/airbyte-ci/connectors/pipelines/README.md) +2. Run the following command to build the docker image: +```bash +airbyte-ci connectors --name=source-scaffold-source-http build +``` -COPY . ./airbyte/integration_code -RUN pip install ./airbyte/integration_code +An image will be available on your host with the tag `airbyte/source-scaffold-source-http:dev`. -# The entrypoint and default env vars are already set in the base image -# ENV AIRBYTE_ENTRYPOINT "python /airbyte/integration_code/main.py" -# ENTRYPOINT ["python", "/airbyte/integration_code/main.py"] -``` -Please use this as an example. This is not optimized. -2. Build your image: -```bash -docker build -t airbyte/source-scaffold-source-http:dev . -# Running the spec command against your patched connector -docker run airbyte/source-scaffold-source-http:dev spec -```` +### Running as a docker container -#### Run Then run any of the connector commands as follows: ``` docker run --rm airbyte/source-scaffold-source-http:dev spec @@ -120,47 +65,41 @@ docker run --rm -v $(pwd)/secrets:/secrets airbyte/source-scaffold-source-http:d docker run --rm -v $(pwd)/secrets:/secrets airbyte/source-scaffold-source-http:dev discover --config /secrets/config.json docker run --rm -v $(pwd)/secrets:/secrets -v $(pwd)/integration_tests:/integration_tests airbyte/source-scaffold-source-http:dev read --config /secrets/config.json --catalog /integration_tests/configured_catalog.json ``` -## Testing -Make sure to familiarize yourself with [pytest test discovery](https://docs.pytest.org/en/latest/goodpractices.html#test-discovery) to know how your test files and methods should be named. -First install test dependencies into your virtual environment: -``` -pip install .[tests] -``` -### Unit Tests -To run unit tests locally, from the connector directory run: -``` -python -m pytest unit_tests -``` -### Integration Tests -There are two types of integration tests: Acceptance Tests (Airbyte's test suite for all source connectors) and custom integration tests (which are specific to this connector). -#### Custom Integration tests -Place custom tests inside `integration_tests/` folder, then, from the connector root, run -``` -python -m pytest integration_tests +### Running our CI test suite + +You can run our full test suite locally using [`airbyte-ci`](https://github.com/airbytehq/airbyte/blob/master/airbyte-ci/connectors/pipelines/README.md): + +```bash +airbyte-ci connectors --name=source-scaffold-source-http test ``` -### Acceptance Tests -Customize `acceptance-test-config.yml` file to configure tests. See [Connector Acceptance Tests](https://docs.airbyte.com/connector-development/testing-connectors/connector-acceptance-tests-reference) for more information. +### Customizing acceptance Tests + +Customize `acceptance-test-config.yml` file to configure acceptance tests. See [Connector Acceptance Tests](https://docs.airbyte.com/connector-development/testing-connectors/connector-acceptance-tests-reference) for more information. If your connector requires to create or destroy resources for use during acceptance tests create fixtures for it and place them inside integration_tests/acceptance.py. -Please run acceptance tests via [airbyte-ci](https://github.com/airbytehq/airbyte/blob/master/airbyte-ci/connectors/pipelines/README.md#connectors-test-command): + +### Dependency Management + +All of your dependencies should be managed via Poetry. +To add a new dependency, run: + ```bash -airbyte-ci connectors --name source-scaffold-source-http test +poetry add ``` -## Dependency Management -All of your dependencies should go in `setup.py`, NOT `requirements.txt`. The requirements file is only used to connect internal Airbyte dependencies in the monorepo for local development. -We split dependencies between two groups, dependencies that are: -* required for your connector to work need to go to `MAIN_REQUIREMENTS` list. -* required for the testing need to go to `TEST_REQUIREMENTS` list +Please commit the changes to `pyproject.toml` and `poetry.lock` files. + +## Publishing a new version of the connector -### Publishing a new version of the connector You've checked out the repo, implemented a million dollar feature, and you're ready to share your changes with the world. Now what? 1. Make sure your changes are passing our test suite: `airbyte-ci connectors --name=source-scaffold-source-http test` -2. Bump the connector version in `metadata.yaml`: increment the `dockerImageTag` value. Please follow [semantic versioning for connectors](https://docs.airbyte.com/contributing-to-airbyte/resources/pull-requests-handbook/#semantic-versioning-for-connectors). +2. Bump the connector version (please follow [semantic versioning for connectors](https://docs.airbyte.com/contributing-to-airbyte/resources/pull-requests-handbook/#semantic-versioning-for-connectors)): + - bump the `dockerImageTag` value in in `metadata.yaml` + - bump the `version` value in `pyproject.toml` 3. Make sure the `metadata.yaml` content is up to date. -4. Make the connector documentation and its changelog is up to date (`docs/integrations/sources/scaffold-source-http.md`). +4. Make sure the connector documentation and its changelog is up to date (`docs/integrations/sources/scaffold-source-http.md`). 5. Create a Pull Request: use [our PR naming conventions](https://docs.airbyte.com/contributing-to-airbyte/resources/pull-requests-handbook/#pull-request-title-convention). 6. Pat yourself on the back for being an awesome contributor. 7. Someone from Airbyte will take a look at your PR and iterate with you to merge it into master. - +8. Once your PR is merged, the new version of the connector will be automatically published to Docker Hub and our connector registry. \ No newline at end of file diff --git a/airbyte-integrations/connectors/source-scaffold-source-http/metadata.yaml b/airbyte-integrations/connectors/source-scaffold-source-http/metadata.yaml index c56a4810bde9..2370fb40dbaf 100644 --- a/airbyte-integrations/connectors/source-scaffold-source-http/metadata.yaml +++ b/airbyte-integrations/connectors/source-scaffold-source-http/metadata.yaml @@ -2,15 +2,15 @@ data: allowedHosts: hosts: - TODO # Please change to the hostname of the source. - remoteRegistries: - pypi: - enabled: true - packageName: airbyte-source-scaffold-source-http registries: oss: - enabled: false + enabled: true cloud: enabled: false + remoteRegistries: + pypi: + enabled: true + packageName: airbyte-source-scaffold-source-http connectorBuildOptions: # Please update to the latest version of the connector base image. # https://hub.docker.com/r/airbyte/python-connector-base diff --git a/airbyte-integrations/connectors/source-scaffold-source-http/pyproject.toml b/airbyte-integrations/connectors/source-scaffold-source-http/pyproject.toml new file mode 100644 index 000000000000..96f1d51ff7e2 --- /dev/null +++ b/airbyte-integrations/connectors/source-scaffold-source-http/pyproject.toml @@ -0,0 +1,27 @@ +[build-system] +requires = [ "poetry-core>=1.0.0",] +build-backend = "poetry.core.masonry.api" + +[tool.poetry] +version = "0.1.0" +name = "source-scaffold-source-http" +description = "Source implementation for scaffold-source-http." +authors = [ "Airbyte ",] +license = "MIT" +readme = "README.md" +documentation = "https://docs.airbyte.com/integrations/sources/scaffold-source-http" +homepage = "https://airbyte.com" +repository = "https://github.com/airbytehq/airbyte" +packages = [ { include = "source_scaffold_source_http", from="src"}, {include = "main.py", from = "src"} ] + +[tool.poetry.dependencies] +python = "^3.9,<3.12" +airbyte-cdk = "^0" + +[tool.poetry.scripts] +source-scaffold-source-http = "source_scaffold_source_http.run:run" + +[tool.poetry.group.dev.dependencies] +requests-mock = "^1.9.3" +pytest-mock = "^3.6.1" +pytest = "^6.1" diff --git a/airbyte-integrations/connectors/source-scaffold-source-http/requirements.txt b/airbyte-integrations/connectors/source-scaffold-source-http/requirements.txt deleted file mode 100644 index d6e1198b1ab1..000000000000 --- a/airbyte-integrations/connectors/source-scaffold-source-http/requirements.txt +++ /dev/null @@ -1 +0,0 @@ --e . diff --git a/airbyte-integrations/connectors/source-scaffold-source-http/setup.py b/airbyte-integrations/connectors/source-scaffold-source-http/setup.py deleted file mode 100644 index a7a496b52161..000000000000 --- a/airbyte-integrations/connectors/source-scaffold-source-http/setup.py +++ /dev/null @@ -1,47 +0,0 @@ -# -# Copyright (c) 2023 Airbyte, Inc., all rights reserved. -# - - -from setuptools import find_packages, setup - -MAIN_REQUIREMENTS = [ - "airbyte-cdk~=0.2", -] - -TEST_REQUIREMENTS = [ - "requests-mock~=1.9.3", - "pytest~=6.2", - "pytest-mock~=3.6.1", - "connector-acceptance-test", -] - -setup( - entry_points={ - "console_scripts": [ - "source-scaffold-source-http=source_scaffold_source_http.run:run", - ], - }, - name="source_scaffold_source_http", - description="Source implementation for Scaffold Source Http.", - author="Airbyte", - author_email="contact@airbyte.io", - packages=find_packages(), - install_requires=MAIN_REQUIREMENTS, - package_data={ - "": [ - # Include yaml files in the package (if any) - "*.yml", - "*.yaml", - # Include all json files in the package, up to 4 levels deep - "*.json", - "*/*.json", - "*/*/*.json", - "*/*/*/*.json", - "*/*/*/*/*.json", - ] - }, - extras_require={ - "tests": TEST_REQUIREMENTS, - }, -) diff --git a/airbyte-integrations/connectors/source-scaffold-source-http/main.py b/airbyte-integrations/connectors/source-scaffold-source-http/src/main.py similarity index 100% rename from airbyte-integrations/connectors/source-scaffold-source-http/main.py rename to airbyte-integrations/connectors/source-scaffold-source-http/src/main.py diff --git a/airbyte-integrations/connectors/source-scaffold-source-http/source_scaffold_source_http/__init__.py b/airbyte-integrations/connectors/source-scaffold-source-http/src/source_scaffold_source_http/__init__.py similarity index 100% rename from airbyte-integrations/connectors/source-scaffold-source-http/source_scaffold_source_http/__init__.py rename to airbyte-integrations/connectors/source-scaffold-source-http/src/source_scaffold_source_http/__init__.py diff --git a/airbyte-integrations/connectors/source-scaffold-source-http/source_scaffold_source_http/run.py b/airbyte-integrations/connectors/source-scaffold-source-http/src/source_scaffold_source_http/run.py similarity index 76% rename from airbyte-integrations/connectors/source-scaffold-source-http/source_scaffold_source_http/run.py rename to airbyte-integrations/connectors/source-scaffold-source-http/src/source_scaffold_source_http/run.py index 94b4f015f312..6e87d985813e 100644 --- a/airbyte-integrations/connectors/source-scaffold-source-http/source_scaffold_source_http/run.py +++ b/airbyte-integrations/connectors/source-scaffold-source-http/src/source_scaffold_source_http/run.py @@ -6,7 +6,8 @@ import sys from airbyte_cdk.entrypoint import launch -from source_scaffold_source_http import SourceScaffoldSourceHttp + +from .source import SourceScaffoldSourceHttp def run(): diff --git a/airbyte-integrations/connectors/source-scaffold-source-http/source_scaffold_source_http/schemas/TODO.md b/airbyte-integrations/connectors/source-scaffold-source-http/src/source_scaffold_source_http/schemas/TODO.md similarity index 100% rename from airbyte-integrations/connectors/source-scaffold-source-http/source_scaffold_source_http/schemas/TODO.md rename to airbyte-integrations/connectors/source-scaffold-source-http/src/source_scaffold_source_http/schemas/TODO.md diff --git a/airbyte-integrations/connectors/source-scaffold-source-http/source_scaffold_source_http/schemas/customers.json b/airbyte-integrations/connectors/source-scaffold-source-http/src/source_scaffold_source_http/schemas/customers.json similarity index 100% rename from airbyte-integrations/connectors/source-scaffold-source-http/source_scaffold_source_http/schemas/customers.json rename to airbyte-integrations/connectors/source-scaffold-source-http/src/source_scaffold_source_http/schemas/customers.json diff --git a/airbyte-integrations/connectors/source-scaffold-source-http/source_scaffold_source_http/schemas/employees.json b/airbyte-integrations/connectors/source-scaffold-source-http/src/source_scaffold_source_http/schemas/employees.json similarity index 100% rename from airbyte-integrations/connectors/source-scaffold-source-http/source_scaffold_source_http/schemas/employees.json rename to airbyte-integrations/connectors/source-scaffold-source-http/src/source_scaffold_source_http/schemas/employees.json diff --git a/airbyte-integrations/connectors/source-scaffold-source-http/source_scaffold_source_http/source.py b/airbyte-integrations/connectors/source-scaffold-source-http/src/source_scaffold_source_http/source.py similarity index 100% rename from airbyte-integrations/connectors/source-scaffold-source-http/source_scaffold_source_http/source.py rename to airbyte-integrations/connectors/source-scaffold-source-http/src/source_scaffold_source_http/source.py diff --git a/airbyte-integrations/connectors/source-scaffold-source-http/source_scaffold_source_http/spec.yaml b/airbyte-integrations/connectors/source-scaffold-source-http/src/source_scaffold_source_http/spec.yaml similarity index 100% rename from airbyte-integrations/connectors/source-scaffold-source-http/source_scaffold_source_http/spec.yaml rename to airbyte-integrations/connectors/source-scaffold-source-http/src/source_scaffold_source_http/spec.yaml diff --git a/airbyte-integrations/connectors/source-scaffold-source-python/README.md b/airbyte-integrations/connectors/source-scaffold-source-python/README.md index 054cbe3741e9..1f11ec7cdaa4 100644 --- a/airbyte-integrations/connectors/source-scaffold-source-python/README.md +++ b/airbyte-integrations/connectors/source-scaffold-source-python/README.md @@ -6,113 +6,58 @@ For information about how to use this connector within Airbyte, see [the documen ## Local development ### Prerequisites -**To iterate on this connector, make sure to complete this prerequisites section.** -#### Minimum Python version required `= 3.9.0` +* Python (`^3.9`) +* Poetry (`^1.7`) - installation instructions [here](https://python-poetry.org/docs/#installation) -#### Activate Virtual Environment and install dependencies -From this connector directory, create a virtual environment: -``` -python -m venv .venv -``` -This will generate a virtualenv for this module in `.venv/`. Make sure this venv is active in your -development environment of choice. To activate it from the terminal, run: -``` -source .venv/bin/activate -pip install -r requirements.txt -pip install '.[tests]' + +### Installing the connector + +From this connector directory, run: +```bash +poetry install --with dev ``` -If you are in an IDE, follow your IDE's instructions to activate the virtualenv. -Note that while we are installing dependencies from `requirements.txt`, you should only edit `setup.py` for your dependencies. `requirements.txt` is -used for editable installs (`pip install -e`) to pull in Python dependencies from the monorepo and will call `setup.py`. -If this is mumbo jumbo to you, don't worry about it, just put your deps in `setup.py` but install using `pip install -r requirements.txt` and everything -should work as you expect. -#### Create credentials +### Create credentials + **If you are a community contributor**, follow the instructions in the [documentation](https://docs.airbyte.com/integrations/sources/scaffold-source-python) -to generate the necessary credentials. Then create a file `secrets/config.json` conforming to the `source_scaffold_source_python/spec.yaml` file. +to generate the necessary credentials. Then create a file `secrets/config.json` conforming to the `src/source_scaffold_source_python/spec.yaml` file. Note that any directory named `secrets` is gitignored across the entire Airbyte repo, so there is no danger of accidentally checking in sensitive information. -See `integration_tests/sample_config.json` for a sample config file. +See `sample_files/sample_config.json` for a sample config file. -**If you are an Airbyte core member**, copy the credentials in Lastpass under the secret name `source scaffold-source-python test creds` -and place them into `secrets/config.json`. ### Locally running the connector + ``` -python main.py spec -python main.py check --config secrets/config.json -python main.py discover --config secrets/config.json -python main.py read --config secrets/config.json --catalog integration_tests/configured_catalog.json +poetry run source-scaffold-source-python spec +poetry run source-scaffold-source-python check --config secrets/config.json +poetry run source-scaffold-source-python discover --config secrets/config.json +poetry run source-scaffold-source-python read --config secrets/config.json --catalog sample_files/configured_catalog.json ``` -### Locally running the connector docker image +### Running tests -#### Use `airbyte-ci` to build your connector -The Airbyte way of building this connector is to use our `airbyte-ci` tool. -You can follow install instructions [here](https://github.com/airbytehq/airbyte/blob/master/airbyte-ci/connectors/pipelines/README.md#L1). -Then running the following command will build your connector: +To run tests locally, from the connector directory run: -```bash -airbyte-ci connectors --name source-scaffold-source-python build ``` -Once the command is done, you will find your connector image in your local docker registry: `airbyte/source-scaffold-source-python:dev`. - -##### Customizing our build process -When contributing on our connector you might need to customize the build process to add a system dependency or set an env var. -You can customize our build process by adding a `build_customization.py` module to your connector. -This module should contain a `pre_connector_install` and `post_connector_install` async function that will mutate the base image and the connector container respectively. -It will be imported at runtime by our build process and the functions will be called if they exist. - -Here is an example of a `build_customization.py` module: -```python -from __future__ import annotations - -from typing import TYPE_CHECKING - -if TYPE_CHECKING: - # Feel free to check the dagger documentation for more information on the Container object and its methods. - # https://dagger-io.readthedocs.io/en/sdk-python-v0.6.4/ - from dagger import Container - - -async def pre_connector_install(base_image_container: Container) -> Container: - return await base_image_container.with_env_variable("MY_PRE_BUILD_ENV_VAR", "my_pre_build_env_var_value") - -async def post_connector_install(connector_container: Container) -> Container: - return await connector_container.with_env_variable("MY_POST_BUILD_ENV_VAR", "my_post_build_env_var_value") +poetry run pytest tests ``` -#### Build your own connector image -This connector is built using our dynamic built process in `airbyte-ci`. -The base image used to build it is defined within the metadata.yaml file under the `connectorBuildOptions`. -The build logic is defined using [Dagger](https://dagger.io/) [here](https://github.com/airbytehq/airbyte/blob/master/airbyte-ci/connectors/pipelines/pipelines/builds/python_connectors.py). -It does not rely on a Dockerfile. +### Building the docker image -If you would like to patch our connector and build your own a simple approach would be to: - -1. Create your own Dockerfile based on the latest version of the connector image. -```Dockerfile -FROM airbyte/source-scaffold-source-python:latest +1. Install [`airbyte-ci`](https://github.com/airbytehq/airbyte/blob/master/airbyte-ci/connectors/pipelines/README.md) +2. Run the following command to build the docker image: +```bash +airbyte-ci connectors --name=source-scaffold-source-python build +``` -COPY . ./airbyte/integration_code -RUN pip install ./airbyte/integration_code +An image will be available on your host with the tag `airbyte/source-scaffold-source-python:dev`. -# The entrypoint and default env vars are already set in the base image -# ENV AIRBYTE_ENTRYPOINT "python /airbyte/integration_code/main.py" -# ENTRYPOINT ["python", "/airbyte/integration_code/main.py"] -``` -Please use this as an example. This is not optimized. -2. Build your image: -```bash -docker build -t airbyte/source-scaffold-source-python:dev . -# Running the spec command against your patched connector -docker run airbyte/source-scaffold-source-python:dev spec -```` +### Running as a docker container -#### Run Then run any of the connector commands as follows: ``` docker run --rm airbyte/source-scaffold-source-python:dev spec @@ -120,47 +65,38 @@ docker run --rm -v $(pwd)/secrets:/secrets airbyte/source-scaffold-source-python docker run --rm -v $(pwd)/secrets:/secrets airbyte/source-scaffold-source-python:dev discover --config /secrets/config.json docker run --rm -v $(pwd)/secrets:/secrets -v $(pwd)/integration_tests:/integration_tests airbyte/source-scaffold-source-python:dev read --config /secrets/config.json --catalog /integration_tests/configured_catalog.json ``` -## Testing -Make sure to familiarize yourself with [pytest test discovery](https://docs.pytest.org/en/latest/goodpractices.html#test-discovery) to know how your test files and methods should be named. -First install test dependencies into your virtual environment: -``` -pip install .[tests] -``` -### Unit Tests -To run unit tests locally, from the connector directory run: -``` -python -m pytest unit_tests -``` -### Integration Tests -There are two types of integration tests: Acceptance Tests (Airbyte's test suite for all source connectors) and custom integration tests (which are specific to this connector). -#### Custom Integration tests -Place custom tests inside `integration_tests/` folder, then, from the connector root, run -``` -python -m pytest integration_tests +### Running our CI test suite +You can run our full test suite locally using [`airbyte-ci`](https://github.com/airbytehq/airbyte/blob/master/airbyte-ci/connectors/pipelines/README.md): +```bash +airbyte-ci connectors --name=source-scaffold-source-python test ``` -### Acceptance Tests -Customize `acceptance-test-config.yml` file to configure tests. See [Connector Acceptance Tests](https://docs.airbyte.com/connector-development/testing-connectors/connector-acceptance-tests-reference) for more information. +### Customizing acceptance Tests + +Customize `acceptance-test-config.yml` file to configure acceptance tests. See [Connector Acceptance Tests](https://docs.airbyte.com/connector-development/testing-connectors/connector-acceptance-tests-reference) for more information. If your connector requires to create or destroy resources for use during acceptance tests create fixtures for it and place them inside integration_tests/acceptance.py. -Please run acceptance tests via [airbyte-ci](https://github.com/airbytehq/airbyte/blob/master/airbyte-ci/connectors/pipelines/README.md#connectors-test-command): + +### Dependency Management + +All of your dependencies should be managed via Poetry. +To add a new dependency, run: ```bash -airbyte-ci connectors --name source-scaffold-source-python test +poetry add ``` -## Dependency Management -All of your dependencies should go in `setup.py`, NOT `requirements.txt`. The requirements file is only used to connect internal Airbyte dependencies in the monorepo for local development. -We split dependencies between two groups, dependencies that are: -* required for your connector to work need to go to `MAIN_REQUIREMENTS` list. -* required for the testing need to go to `TEST_REQUIREMENTS` list +Please commit the changes to `pyproject.toml` and `poetry.lock` files. + +## Publishing a new version of the connector -### Publishing a new version of the connector You've checked out the repo, implemented a million dollar feature, and you're ready to share your changes with the world. Now what? 1. Make sure your changes are passing our test suite: `airbyte-ci connectors --name=source-scaffold-source-python test` -2. Bump the connector version in `metadata.yaml`: increment the `dockerImageTag` value. Please follow [semantic versioning for connectors](https://docs.airbyte.com/contributing-to-airbyte/resources/pull-requests-handbook/#semantic-versioning-for-connectors). +2. Bump the connector version (please follow [semantic versioning for connectors](https://docs.airbyte.com/contributing-to-airbyte/resources/pull-requests-handbook/#semantic-versioning-for-connectors)): + - bump the `dockerImageTag` value in in `metadata.yaml` + - bump the `version` value in `pyproject.toml` 3. Make sure the `metadata.yaml` content is up to date. -4. Make the connector documentation and its changelog is up to date (`docs/integrations/sources/scaffold-source-python.md`). +4. Make sure the connector documentation and its changelog is up to date (`docs/integrations/sources/scaffold-source-python.md`). 5. Create a Pull Request: use [our PR naming conventions](https://docs.airbyte.com/contributing-to-airbyte/resources/pull-requests-handbook/#pull-request-title-convention). 6. Pat yourself on the back for being an awesome contributor. 7. Someone from Airbyte will take a look at your PR and iterate with you to merge it into master. - +8. Once your PR is merged, the new version of the connector will be automatically published to Docker Hub and our connector registry. \ No newline at end of file diff --git a/airbyte-integrations/connectors/source-scaffold-source-python/metadata.yaml b/airbyte-integrations/connectors/source-scaffold-source-python/metadata.yaml index 24ab5f9c09c1..fff878b0cfda 100644 --- a/airbyte-integrations/connectors/source-scaffold-source-python/metadata.yaml +++ b/airbyte-integrations/connectors/source-scaffold-source-python/metadata.yaml @@ -8,7 +8,7 @@ data: packageName: airbyte-source-scaffold-source-python registries: oss: - enabled: false + enabled: true cloud: enabled: false connectorBuildOptions: diff --git a/airbyte-integrations/connectors/source-scaffold-source-python/pyproject.toml b/airbyte-integrations/connectors/source-scaffold-source-python/pyproject.toml new file mode 100644 index 000000000000..c08991345cd7 --- /dev/null +++ b/airbyte-integrations/connectors/source-scaffold-source-python/pyproject.toml @@ -0,0 +1,27 @@ +[build-system] +requires = [ "poetry-core>=1.0.0",] +build-backend = "poetry.core.masonry.api" + +[tool.poetry] +version = "0.1.0" +name = "source-scaffold-source-python" +description = "Source implementation for scaffold-source-python." +authors = [ "Airbyte ",] +license = "MIT" +readme = "README.md" +documentation = "https://docs.airbyte.com/integrations/sources/scaffold-source-python" +homepage = "https://airbyte.com" +repository = "https://github.com/airbytehq/airbyte" +packages = [ { include = "source_scaffold_source_python", from="src"}, {include = "main.py", from = "src"} ] + +[tool.poetry.dependencies] +python = "^3.9,<3.12" +airbyte-cdk = "^0" + +[tool.poetry.scripts] +source-scaffold-source-python = "source_scaffold_source_python.run:run" + +[tool.poetry.group.dev.dependencies] +requests-mock = "^1.9.3" +pytest-mock = "^3.6.1" +pytest = "^6.1" diff --git a/airbyte-integrations/connectors/source-scaffold-source-python/requirements.txt b/airbyte-integrations/connectors/source-scaffold-source-python/requirements.txt deleted file mode 100644 index 7b9114ed5867..000000000000 --- a/airbyte-integrations/connectors/source-scaffold-source-python/requirements.txt +++ /dev/null @@ -1,2 +0,0 @@ -# This file is autogenerated -- only edit if you know what you are doing. Use setup.py for declaring dependencies. --e . diff --git a/airbyte-integrations/connectors/source-scaffold-source-python/setup.py b/airbyte-integrations/connectors/source-scaffold-source-python/setup.py deleted file mode 100644 index ebeda07f6998..000000000000 --- a/airbyte-integrations/connectors/source-scaffold-source-python/setup.py +++ /dev/null @@ -1,47 +0,0 @@ -# -# Copyright (c) 2023 Airbyte, Inc., all rights reserved. -# - - -from setuptools import find_packages, setup - -MAIN_REQUIREMENTS = [ - "airbyte-cdk~=0.2", -] - -TEST_REQUIREMENTS = [ - "requests-mock~=1.9.3", - "pytest-mock~=3.6.1", - "pytest~=6.2", - "connector-acceptance-test", -] - -setup( - entry_points={ - "console_scripts": [ - "source-scaffold-source-python=source_scaffold_source_python.run:run", - ], - }, - name="source_scaffold_source_python", - description="Source implementation for Scaffold Source Python.", - author="Airbyte", - author_email="contact@airbyte.io", - packages=find_packages(), - install_requires=MAIN_REQUIREMENTS, - package_data={ - "": [ - # Include yaml files in the package (if any) - "*.yml", - "*.yaml", - # Include all json files in the package, up to 4 levels deep - "*.json", - "*/*.json", - "*/*/*.json", - "*/*/*/*.json", - "*/*/*/*/*.json", - ] - }, - extras_require={ - "tests": TEST_REQUIREMENTS, - }, -) diff --git a/airbyte-integrations/connectors/source-scaffold-source-python/main.py b/airbyte-integrations/connectors/source-scaffold-source-python/src/main.py similarity index 100% rename from airbyte-integrations/connectors/source-scaffold-source-python/main.py rename to airbyte-integrations/connectors/source-scaffold-source-python/src/main.py diff --git a/airbyte-integrations/connectors/source-scaffold-source-python/source_scaffold_source_python/__init__.py b/airbyte-integrations/connectors/source-scaffold-source-python/src/source_scaffold_source_python/__init__.py similarity index 100% rename from airbyte-integrations/connectors/source-scaffold-source-python/source_scaffold_source_python/__init__.py rename to airbyte-integrations/connectors/source-scaffold-source-python/src/source_scaffold_source_python/__init__.py diff --git a/airbyte-integrations/connectors/source-scaffold-source-python/source_scaffold_source_python/run.py b/airbyte-integrations/connectors/source-scaffold-source-python/src/source_scaffold_source_python/run.py similarity index 74% rename from airbyte-integrations/connectors/source-scaffold-source-python/source_scaffold_source_python/run.py rename to airbyte-integrations/connectors/source-scaffold-source-python/src/source_scaffold_source_python/run.py index 6bb55fc68e7d..70967c0159ff 100644 --- a/airbyte-integrations/connectors/source-scaffold-source-python/source_scaffold_source_python/run.py +++ b/airbyte-integrations/connectors/source-scaffold-source-python/src/source_scaffold_source_python/run.py @@ -6,7 +6,8 @@ import sys from airbyte_cdk.entrypoint import launch -from source_scaffold_source_python import SourceScaffoldSourcePython + +from .source import SourceScaffoldSourcePython def run(): diff --git a/airbyte-integrations/connectors/source-scaffold-source-python/source_scaffold_source_python/source.py b/airbyte-integrations/connectors/source-scaffold-source-python/src/source_scaffold_source_python/source.py similarity index 100% rename from airbyte-integrations/connectors/source-scaffold-source-python/source_scaffold_source_python/source.py rename to airbyte-integrations/connectors/source-scaffold-source-python/src/source_scaffold_source_python/source.py diff --git a/airbyte-integrations/connectors/source-scaffold-source-python/source_scaffold_source_python/spec.yaml b/airbyte-integrations/connectors/source-scaffold-source-python/src/source_scaffold_source_python/spec.yaml similarity index 100% rename from airbyte-integrations/connectors/source-scaffold-source-python/source_scaffold_source_python/spec.yaml rename to airbyte-integrations/connectors/source-scaffold-source-python/src/source_scaffold_source_python/spec.yaml From 2637e97e23d80949a3b70f34908df6edd6b49646 Mon Sep 17 00:00:00 2001 From: Xiaohan Song Date: Thu, 29 Feb 2024 10:30:25 -0800 Subject: [PATCH 036/172] [source-mysql] merge state manager (#35529) --- airbyte-cdk/java/airbyte-cdk/README.md | 1 + .../src/main/resources/version.properties | 2 +- .../state/SourceStateIterator.java | 29 +++++-- .../state/SourceStateIteratorManager.java | 37 -------- .../state/SourceStateMessageProducer.java | 45 ++++++++++ .../state/StateEmitFrequency.java | 9 ++ .../state/SourceStateIteratorTest.java | 27 +++--- .../connectors/source-mysql/build.gradle | 2 +- .../connectors/source-mysql/metadata.yaml | 2 +- .../MySqlInitialLoadGlobalStateManager.java | 18 ++-- .../initialsync/MySqlInitialLoadHandler.java | 15 ++-- .../MySqlInitialLoadStateManager.java | 69 +++++++++++---- .../MySqlInitialLoadStreamStateManager.java | 29 ++++--- .../MySqlInitialSyncStateIteratorManager.java | 84 ------------------- .../mysql/MySqlJdbcSourceAcceptanceTest.java | 1 + docs/integrations/sources/mysql.md | 9 +- 16 files changed, 187 insertions(+), 192 deletions(-) delete mode 100644 airbyte-cdk/java/airbyte-cdk/db-sources/src/main/java/io/airbyte/cdk/integrations/source/relationaldb/state/SourceStateIteratorManager.java create mode 100644 airbyte-cdk/java/airbyte-cdk/db-sources/src/main/java/io/airbyte/cdk/integrations/source/relationaldb/state/SourceStateMessageProducer.java create mode 100644 airbyte-cdk/java/airbyte-cdk/db-sources/src/main/java/io/airbyte/cdk/integrations/source/relationaldb/state/StateEmitFrequency.java delete mode 100644 airbyte-integrations/connectors/source-mysql/src/main/java/io/airbyte/integrations/source/mysql/initialsync/MySqlInitialSyncStateIteratorManager.java diff --git a/airbyte-cdk/java/airbyte-cdk/README.md b/airbyte-cdk/java/airbyte-cdk/README.md index ed366673ecff..538848b65e3f 100644 --- a/airbyte-cdk/java/airbyte-cdk/README.md +++ b/airbyte-cdk/java/airbyte-cdk/README.md @@ -166,6 +166,7 @@ MavenLocal debugging steps: | Version | Date | Pull Request | Subject | |:--------|:-----------|:-----------------------------------------------------------|:---------------------------------------------------------------------------------------------------------------------------------------------------------------| +| 0.23.8 | 2024-02-28 | [\#35529](https://github.com/airbytehq/airbyte/pull/35529) | Refactor on state iterators | | 0.23.7 | 2024-02-28 | [\#35376](https://github.com/airbytehq/airbyte/pull/35376) | Add a getNamespace into TestDataHolder | | 0.23.6 | 2024-02-26 | [\#35647](https://github.com/airbytehq/airbyte/pull/35647) | Add a getNamespace into TestDataHolder | | 0.23.5 | 2024-02-26 | [\#35512](https://github.com/airbytehq/airbyte/pull/35512) | Remove @DisplayName from all CDK tests. | diff --git a/airbyte-cdk/java/airbyte-cdk/core/src/main/resources/version.properties b/airbyte-cdk/java/airbyte-cdk/core/src/main/resources/version.properties index c40a8721d426..8a67e26e76c8 100644 --- a/airbyte-cdk/java/airbyte-cdk/core/src/main/resources/version.properties +++ b/airbyte-cdk/java/airbyte-cdk/core/src/main/resources/version.properties @@ -1 +1 @@ -version=0.23.7 +version=0.23.8 diff --git a/airbyte-cdk/java/airbyte-cdk/db-sources/src/main/java/io/airbyte/cdk/integrations/source/relationaldb/state/SourceStateIterator.java b/airbyte-cdk/java/airbyte-cdk/db-sources/src/main/java/io/airbyte/cdk/integrations/source/relationaldb/state/SourceStateIterator.java index 5166ae2898ae..c2d5ff8a7ba5 100644 --- a/airbyte-cdk/java/airbyte-cdk/db-sources/src/main/java/io/airbyte/cdk/integrations/source/relationaldb/state/SourceStateIterator.java +++ b/airbyte-cdk/java/airbyte-cdk/db-sources/src/main/java/io/airbyte/cdk/integrations/source/relationaldb/state/SourceStateIterator.java @@ -9,7 +9,10 @@ import io.airbyte.protocol.models.v0.AirbyteMessage.Type; import io.airbyte.protocol.models.v0.AirbyteStateMessage; import io.airbyte.protocol.models.v0.AirbyteStateStats; +import io.airbyte.protocol.models.v0.ConfiguredAirbyteStream; +import java.time.Duration; import java.time.Instant; +import java.time.OffsetDateTime; import java.util.Iterator; import javax.annotation.CheckForNull; import org.slf4j.Logger; @@ -19,16 +22,22 @@ public class SourceStateIterator extends AbstractIterator imp private static final Logger LOGGER = LoggerFactory.getLogger(SourceStateIterator.class); private final Iterator messageIterator; + private final ConfiguredAirbyteStream stream; + private final StateEmitFrequency stateEmitFrequency; private boolean hasEmittedFinalState = false; private long recordCount = 0L; private Instant lastCheckpoint = Instant.now(); - private final SourceStateIteratorManager sourceStateIteratorManager; + private final SourceStateMessageProducer sourceStateMessageProducer; public SourceStateIterator(final Iterator messageIterator, - final SourceStateIteratorManager sourceStateIteratorManager) { + final ConfiguredAirbyteStream stream, + final SourceStateMessageProducer sourceStateMessageProducer, + final StateEmitFrequency stateEmitFrequency) { this.messageIterator = messageIterator; - this.sourceStateIteratorManager = sourceStateIteratorManager; + this.stream = stream; + this.sourceStateMessageProducer = sourceStateMessageProducer; + this.stateEmitFrequency = stateEmitFrequency; } @CheckForNull @@ -45,8 +54,8 @@ protected AirbyteMessage computeNext() { throw new RuntimeException(ex); } if (iteratorHasNextValue) { - if (sourceStateIteratorManager.shouldEmitStateMessage(recordCount, lastCheckpoint)) { - final AirbyteStateMessage stateMessage = sourceStateIteratorManager.generateStateMessageAtCheckpoint(); + if (shouldEmitStateMessage() && sourceStateMessageProducer.shouldEmitStateMessage(stream)) { + final AirbyteStateMessage stateMessage = sourceStateMessageProducer.generateStateMessageAtCheckpoint(stream); stateMessage.withSourceStats(new AirbyteStateStats().withRecordCount((double) recordCount)); recordCount = 0L; @@ -58,7 +67,7 @@ protected AirbyteMessage computeNext() { // Use try-catch to catch Exception that could occur when connection to the database fails try { final T message = messageIterator.next(); - final AirbyteMessage processedMessage = sourceStateIteratorManager.processRecordMessage(message); + final AirbyteMessage processedMessage = sourceStateMessageProducer.processRecordMessage(stream, message); recordCount++; return processedMessage; } catch (final Exception e) { @@ -66,7 +75,7 @@ protected AirbyteMessage computeNext() { } } else if (!hasEmittedFinalState) { hasEmittedFinalState = true; - final AirbyteStateMessage finalStateMessageForStream = sourceStateIteratorManager.createFinalStateMessage(); + final AirbyteStateMessage finalStateMessageForStream = sourceStateMessageProducer.createFinalStateMessage(stream); finalStateMessageForStream.withSourceStats(new AirbyteStateStats().withRecordCount((double) recordCount)); recordCount = 0L; return new AirbyteMessage() @@ -77,4 +86,10 @@ protected AirbyteMessage computeNext() { } } + private boolean shouldEmitStateMessage() { + return (recordCount >= stateEmitFrequency.syncCheckpointRecords() + || Duration.between(lastCheckpoint, OffsetDateTime.now()).compareTo(stateEmitFrequency.syncCheckpointDuration()) > 0); + + } + } diff --git a/airbyte-cdk/java/airbyte-cdk/db-sources/src/main/java/io/airbyte/cdk/integrations/source/relationaldb/state/SourceStateIteratorManager.java b/airbyte-cdk/java/airbyte-cdk/db-sources/src/main/java/io/airbyte/cdk/integrations/source/relationaldb/state/SourceStateIteratorManager.java deleted file mode 100644 index a76b0256be2f..000000000000 --- a/airbyte-cdk/java/airbyte-cdk/db-sources/src/main/java/io/airbyte/cdk/integrations/source/relationaldb/state/SourceStateIteratorManager.java +++ /dev/null @@ -1,37 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.cdk.integrations.source.relationaldb.state; - -import io.airbyte.protocol.models.v0.AirbyteMessage; -import io.airbyte.protocol.models.v0.AirbyteStateMessage; -import java.time.Instant; - -public interface SourceStateIteratorManager { - - /** - * Returns a state message that should be emitted at checkpoint. - */ - AirbyteStateMessage generateStateMessageAtCheckpoint(); - - /** - * For the incoming record message, this method defines how the connector will consume it. - */ - AirbyteMessage processRecordMessage(final T message); - - /** - * At the end of the iteration, this method will be called and it will generate the final state - * message. - * - * @return - */ - AirbyteStateMessage createFinalStateMessage(); - - /** - * Determines if the iterator has reached checkpoint or not, based on the time and number of record - * messages it has been processed since the last checkpoint. - */ - boolean shouldEmitStateMessage(final long recordCount, final Instant lastCheckpoint); - -} diff --git a/airbyte-cdk/java/airbyte-cdk/db-sources/src/main/java/io/airbyte/cdk/integrations/source/relationaldb/state/SourceStateMessageProducer.java b/airbyte-cdk/java/airbyte-cdk/db-sources/src/main/java/io/airbyte/cdk/integrations/source/relationaldb/state/SourceStateMessageProducer.java new file mode 100644 index 000000000000..c4d95b2b1fbb --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/db-sources/src/main/java/io/airbyte/cdk/integrations/source/relationaldb/state/SourceStateMessageProducer.java @@ -0,0 +1,45 @@ +/* + * Copyright (c) 2023 Airbyte, Inc., all rights reserved. + */ + +package io.airbyte.cdk.integrations.source.relationaldb.state; + +import io.airbyte.protocol.models.v0.AirbyteMessage; +import io.airbyte.protocol.models.v0.AirbyteStateMessage; +import io.airbyte.protocol.models.v0.ConfiguredAirbyteStream; + +/** + * To be used with SourceStateIterator. SourceStateIterator will iterate over the records and + * generate state messages when needed. This interface defines how would those state messages be + * generated, and how the incoming record messages will be processed. + * + * @param + */ +public interface SourceStateMessageProducer { + + /** + * Returns a state message that should be emitted at checkpoint. + */ + AirbyteStateMessage generateStateMessageAtCheckpoint(final ConfiguredAirbyteStream stream); + + /** + * For the incoming record message, this method defines how the connector will consume it. + */ + AirbyteMessage processRecordMessage(final ConfiguredAirbyteStream stream, final T message); + + /** + * At the end of the iteration, this method will be called and it will generate the final state + * message. + * + * @return + */ + AirbyteStateMessage createFinalStateMessage(final ConfiguredAirbyteStream stream); + + /** + * Determines if the iterator has reached checkpoint or not per connector's definition. By default + * iterator will check if the number of records processed is greater than the checkpoint interval or + * last state message has already passed syncCheckpointDuration. + */ + boolean shouldEmitStateMessage(final ConfiguredAirbyteStream stream); + +} diff --git a/airbyte-cdk/java/airbyte-cdk/db-sources/src/main/java/io/airbyte/cdk/integrations/source/relationaldb/state/StateEmitFrequency.java b/airbyte-cdk/java/airbyte-cdk/db-sources/src/main/java/io/airbyte/cdk/integrations/source/relationaldb/state/StateEmitFrequency.java new file mode 100644 index 000000000000..ee1eef34c421 --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/db-sources/src/main/java/io/airbyte/cdk/integrations/source/relationaldb/state/StateEmitFrequency.java @@ -0,0 +1,9 @@ +/* + * Copyright (c) 2023 Airbyte, Inc., all rights reserved. + */ + +package io.airbyte.cdk.integrations.source.relationaldb.state; + +import java.time.Duration; + +public record StateEmitFrequency(long syncCheckpointRecords, Duration syncCheckpointDuration) {} diff --git a/airbyte-cdk/java/airbyte-cdk/db-sources/src/test/java/io/airbyte/cdk/integrations/source/relationaldb/state/SourceStateIteratorTest.java b/airbyte-cdk/java/airbyte-cdk/db-sources/src/test/java/io/airbyte/cdk/integrations/source/relationaldb/state/SourceStateIteratorTest.java index 34560be119d9..626cd52545a4 100644 --- a/airbyte-cdk/java/airbyte-cdk/db-sources/src/test/java/io/airbyte/cdk/integrations/source/relationaldb/state/SourceStateIteratorTest.java +++ b/airbyte-cdk/java/airbyte-cdk/db-sources/src/test/java/io/airbyte/cdk/integrations/source/relationaldb/state/SourceStateIteratorTest.java @@ -7,7 +7,6 @@ import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertThrows; import static org.mockito.ArgumentMatchers.any; -import static org.mockito.ArgumentMatchers.anyLong; import static org.mockito.ArgumentMatchers.eq; import static org.mockito.Mockito.atLeastOnce; import static org.mockito.Mockito.doReturn; @@ -20,36 +19,40 @@ import io.airbyte.protocol.models.v0.AirbyteRecordMessage; import io.airbyte.protocol.models.v0.AirbyteStateMessage; import io.airbyte.protocol.models.v0.AirbyteStateStats; +import io.airbyte.protocol.models.v0.ConfiguredAirbyteStream; +import java.time.Duration; import java.util.Iterator; import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Test; public class SourceStateIteratorTest { - SourceStateIteratorManager mockProcessor; + SourceStateMessageProducer mockProducer; Iterator messageIterator; + ConfiguredAirbyteStream stream; SourceStateIterator sourceStateIterator; @BeforeEach void setup() { - mockProcessor = mock(SourceStateIteratorManager.class); + mockProducer = mock(SourceStateMessageProducer.class); + stream = mock(ConfiguredAirbyteStream.class); messageIterator = mock(Iterator.class); - sourceStateIterator = new SourceStateIterator(messageIterator, mockProcessor); + StateEmitFrequency stateEmitFrequency = new StateEmitFrequency(1L, Duration.ofSeconds(100L)); + sourceStateIterator = new SourceStateIterator(messageIterator, stream, mockProducer, stateEmitFrequency); } // Provides a way to generate a record message and will verify corresponding spied functions have // been called. void processRecordMessage() { doReturn(true).when(messageIterator).hasNext(); - doReturn(false).when(mockProcessor).shouldEmitStateMessage(anyLong(), any()); + doReturn(false).when(mockProducer).shouldEmitStateMessage(eq(stream)); AirbyteMessage message = new AirbyteMessage().withType(Type.RECORD).withRecord(new AirbyteRecordMessage()); - doReturn(message).when(mockProcessor).processRecordMessage(any()); + doReturn(message).when(mockProducer).processRecordMessage(eq(stream), any()); doReturn(message).when(messageIterator).next(); assertEquals(message, sourceStateIterator.computeNext()); - verify(mockProcessor, atLeastOnce()).processRecordMessage(message); - verify(mockProcessor, atLeastOnce()).shouldEmitStateMessage(eq(0L), any()); + verify(mockProducer, atLeastOnce()).processRecordMessage(eq(stream), eq(message)); } @Test @@ -60,9 +63,9 @@ void testShouldProcessRecordMessage() { @Test void testShouldEmitStateMessage() { processRecordMessage(); - doReturn(true).when(mockProcessor).shouldEmitStateMessage(anyLong(), any()); + doReturn(true).when(mockProducer).shouldEmitStateMessage(eq(stream)); final AirbyteStateMessage stateMessage = new AirbyteStateMessage(); - doReturn(stateMessage).when(mockProcessor).generateStateMessageAtCheckpoint(); + doReturn(stateMessage).when(mockProducer).generateStateMessageAtCheckpoint(stream); AirbyteMessage expectedMessage = new AirbyteMessage().withType(Type.STATE).withState(stateMessage); expectedMessage.getState().withSourceStats(new AirbyteStateStats().withRecordCount(1.0)); assertEquals(expectedMessage, sourceStateIterator.computeNext()); @@ -74,7 +77,7 @@ void testShouldEmitFinalStateMessage() { processRecordMessage(); doReturn(false).when(messageIterator).hasNext(); final AirbyteStateMessage stateMessage = new AirbyteStateMessage(); - doReturn(stateMessage).when(mockProcessor).createFinalStateMessage(); + doReturn(stateMessage).when(mockProducer).createFinalStateMessage(stream); AirbyteMessage expectedMessage = new AirbyteMessage().withType(Type.STATE).withState(stateMessage); expectedMessage.getState().withSourceStats(new AirbyteStateStats().withRecordCount(2.0)); assertEquals(expectedMessage, sourceStateIterator.computeNext()); @@ -84,7 +87,7 @@ void testShouldEmitFinalStateMessage() { void testShouldSendEndOfData() { processRecordMessage(); doReturn(false).when(messageIterator).hasNext(); - doReturn(new AirbyteStateMessage()).when(mockProcessor).createFinalStateMessage(); + doReturn(new AirbyteStateMessage()).when(mockProducer).createFinalStateMessage(stream); sourceStateIterator.computeNext(); // After sending the final state, if iterator was called again, we will return null. diff --git a/airbyte-integrations/connectors/source-mysql/build.gradle b/airbyte-integrations/connectors/source-mysql/build.gradle index c0f1e6ef5114..88b501de2204 100644 --- a/airbyte-integrations/connectors/source-mysql/build.gradle +++ b/airbyte-integrations/connectors/source-mysql/build.gradle @@ -6,7 +6,7 @@ plugins { } airbyteJavaConnector { - cdkVersionRequired = '0.23.1' + cdkVersionRequired = '0.23.8' features = ['db-sources'] useLocalCdk = false } diff --git a/airbyte-integrations/connectors/source-mysql/metadata.yaml b/airbyte-integrations/connectors/source-mysql/metadata.yaml index c3072a70cac7..044fd28e7d8e 100644 --- a/airbyte-integrations/connectors/source-mysql/metadata.yaml +++ b/airbyte-integrations/connectors/source-mysql/metadata.yaml @@ -9,7 +9,7 @@ data: connectorSubtype: database connectorType: source definitionId: 435bb9a5-7887-4809-aa58-28c27df0d7ad - dockerImageTag: 3.3.12 + dockerImageTag: 3.3.13 dockerRepository: airbyte/source-mysql documentationUrl: https://docs.airbyte.com/integrations/sources/mysql githubIssueLabel: source-mysql diff --git a/airbyte-integrations/connectors/source-mysql/src/main/java/io/airbyte/integrations/source/mysql/initialsync/MySqlInitialLoadGlobalStateManager.java b/airbyte-integrations/connectors/source-mysql/src/main/java/io/airbyte/integrations/source/mysql/initialsync/MySqlInitialLoadGlobalStateManager.java index e810d860e4c8..cff715ba6d33 100644 --- a/airbyte-integrations/connectors/source-mysql/src/main/java/io/airbyte/integrations/source/mysql/initialsync/MySqlInitialLoadGlobalStateManager.java +++ b/airbyte-integrations/connectors/source-mysql/src/main/java/io/airbyte/integrations/source/mysql/initialsync/MySqlInitialLoadGlobalStateManager.java @@ -17,6 +17,7 @@ import io.airbyte.protocol.models.v0.AirbyteStateMessage.AirbyteStateType; import io.airbyte.protocol.models.v0.AirbyteStreamState; import io.airbyte.protocol.models.v0.ConfiguredAirbyteCatalog; +import io.airbyte.protocol.models.v0.ConfiguredAirbyteStream; import io.airbyte.protocol.models.v0.StreamDescriptor; import io.airbyte.protocol.models.v0.SyncMode; import java.util.ArrayList; @@ -27,11 +28,8 @@ import java.util.Objects; import java.util.Set; -public class MySqlInitialLoadGlobalStateManager implements MySqlInitialLoadStateManager { +public class MySqlInitialLoadGlobalStateManager extends MySqlInitialLoadStateManager { - private final Map pairToPrimaryKeyLoadStatus; - // Map of pair to the primary key info (field name & data type) associated with it. - private final Map pairToPrimaryKeyInfo; private final CdcState cdcState; // Only one global state is emitted, which is fanned out into many entries in the DB by platform. As @@ -63,14 +61,17 @@ private static Set initStreamsCompletedSnapshot( } @Override - public AirbyteStateMessage createIntermediateStateMessage(final AirbyteStreamNameNamespacePair pair, final PrimaryKeyLoadStatus pkLoadStatus) { + public AirbyteStateMessage generateStateMessageAtCheckpoint(final ConfiguredAirbyteStream airbyteStream) { final List streamStates = new ArrayList<>(); streamsThatHaveCompletedSnapshot.forEach(stream -> { final DbStreamState state = getFinalState(stream); streamStates.add(getAirbyteStreamState(stream, Jsons.jsonNode(state))); }); - streamStates.add(getAirbyteStreamState(pair, (Jsons.jsonNode(pkLoadStatus)))); + AirbyteStreamNameNamespacePair pair = + new AirbyteStreamNameNamespacePair(airbyteStream.getStream().getName(), airbyteStream.getStream().getNamespace()); + var pkStatus = getPrimaryKeyLoadStatus(pair); + streamStates.add(getAirbyteStreamState(pair, (Jsons.jsonNode(pkStatus)))); final AirbyteGlobalState globalState = new AirbyteGlobalState(); globalState.setSharedState(Jsons.jsonNode(cdcState)); globalState.setStreamStates(streamStates); @@ -86,8 +87,9 @@ public void updatePrimaryKeyLoadState(final AirbyteStreamNameNamespacePair pair, } @Override - public AirbyteStateMessage createFinalStateMessage(final AirbyteStreamNameNamespacePair pair, - final JsonNode streamStateForIncrementalRun) { + public AirbyteStateMessage createFinalStateMessage(final ConfiguredAirbyteStream airbyteStream) { + AirbyteStreamNameNamespacePair pair = + new AirbyteStreamNameNamespacePair(airbyteStream.getStream().getName(), airbyteStream.getStream().getNamespace()); streamsThatHaveCompletedSnapshot.add(pair); final List streamStates = new ArrayList<>(); streamsThatHaveCompletedSnapshot.forEach(stream -> { diff --git a/airbyte-integrations/connectors/source-mysql/src/main/java/io/airbyte/integrations/source/mysql/initialsync/MySqlInitialLoadHandler.java b/airbyte-integrations/connectors/source-mysql/src/main/java/io/airbyte/integrations/source/mysql/initialsync/MySqlInitialLoadHandler.java index 236c65659f82..6b03ff28128f 100644 --- a/airbyte-integrations/connectors/source-mysql/src/main/java/io/airbyte/integrations/source/mysql/initialsync/MySqlInitialLoadHandler.java +++ b/airbyte-integrations/connectors/source-mysql/src/main/java/io/airbyte/integrations/source/mysql/initialsync/MySqlInitialLoadHandler.java @@ -15,7 +15,7 @@ import io.airbyte.cdk.integrations.source.relationaldb.DbSourceDiscoverUtil; import io.airbyte.cdk.integrations.source.relationaldb.TableInfo; import io.airbyte.cdk.integrations.source.relationaldb.state.SourceStateIterator; -import io.airbyte.cdk.integrations.source.relationaldb.state.SourceStateIteratorManager; +import io.airbyte.cdk.integrations.source.relationaldb.state.StateEmitFrequency; import io.airbyte.commons.stream.AirbyteStreamUtils; import io.airbyte.commons.util.AutoCloseableIterator; import io.airbyte.commons.util.AutoCloseableIterators; @@ -115,7 +115,7 @@ public List> getIncrementalIterators( calculateChunkSize(tableSizeInfoMap.get(pair), pair), isCompositePrimaryKey(airbyteStream)); final AutoCloseableIterator recordIterator = getRecordIterator(queryStream, streamName, namespace, emittedAt.toEpochMilli()); - final AutoCloseableIterator recordAndMessageIterator = augmentWithState(recordIterator, pair); + final AutoCloseableIterator recordAndMessageIterator = augmentWithState(recordIterator, airbyteStream, pair); iteratorList.add(augmentWithLogs(recordAndMessageIterator, pair, streamName)); @@ -174,12 +174,10 @@ private AutoCloseableIterator augmentWithLogs(final AutoCloseabl } private AutoCloseableIterator augmentWithState(final AutoCloseableIterator recordIterator, + final ConfiguredAirbyteStream airbyteStream, final AirbyteStreamNameNamespacePair pair) { final PrimaryKeyLoadStatus currentPkLoadStatus = initialLoadStateManager.getPrimaryKeyLoadStatus(pair); - final JsonNode incrementalState = - (currentPkLoadStatus == null || currentPkLoadStatus.getIncrementalState() == null) ? streamStateForIncrementalRunSupplier.apply(pair) - : currentPkLoadStatus.getIncrementalState(); final Duration syncCheckpointDuration = config.get(SYNC_CHECKPOINT_DURATION_PROPERTY) != null ? Duration.ofSeconds(config.get(SYNC_CHECKPOINT_DURATION_PROPERTY).asLong()) @@ -187,12 +185,11 @@ private AutoCloseableIterator augmentWithState(final AutoCloseab final Long syncCheckpointRecords = config.get(SYNC_CHECKPOINT_RECORDS_PROPERTY) != null ? config.get(SYNC_CHECKPOINT_RECORDS_PROPERTY).asLong() : DebeziumIteratorConstants.SYNC_CHECKPOINT_RECORDS; - final SourceStateIteratorManager processor = - new MySqlInitialSyncStateIteratorManager(pair, initialLoadStateManager, incrementalState, - syncCheckpointDuration, syncCheckpointRecords); + initialLoadStateManager.setStreamStateForIncrementalRunSupplier(streamStateForIncrementalRunSupplier); return AutoCloseableIterators.transformIterator( - r -> new SourceStateIterator<>(r, processor), + r -> new SourceStateIterator<>(r, airbyteStream, initialLoadStateManager, + new StateEmitFrequency(syncCheckpointRecords, syncCheckpointDuration)), recordIterator, pair); } diff --git a/airbyte-integrations/connectors/source-mysql/src/main/java/io/airbyte/integrations/source/mysql/initialsync/MySqlInitialLoadStateManager.java b/airbyte-integrations/connectors/source-mysql/src/main/java/io/airbyte/integrations/source/mysql/initialsync/MySqlInitialLoadStateManager.java index be5cec573294..28d99b0bdf98 100644 --- a/airbyte-integrations/connectors/source-mysql/src/main/java/io/airbyte/integrations/source/mysql/initialsync/MySqlInitialLoadStateManager.java +++ b/airbyte-integrations/connectors/source-mysql/src/main/java/io/airbyte/integrations/source/mysql/initialsync/MySqlInitialLoadStateManager.java @@ -5,39 +5,76 @@ package io.airbyte.integrations.source.mysql.initialsync; import com.fasterxml.jackson.databind.JsonNode; +import io.airbyte.cdk.integrations.source.relationaldb.state.SourceStateMessageProducer; import io.airbyte.integrations.source.mysql.initialsync.MySqlInitialReadUtil.PrimaryKeyInfo; +import io.airbyte.integrations.source.mysql.internal.models.InternalModels.StateType; import io.airbyte.integrations.source.mysql.internal.models.PrimaryKeyLoadStatus; import io.airbyte.protocol.models.AirbyteStreamNameNamespacePair; -import io.airbyte.protocol.models.v0.AirbyteStateMessage; +import io.airbyte.protocol.models.v0.AirbyteMessage; +import io.airbyte.protocol.models.v0.ConfiguredAirbyteStream; import java.util.HashMap; import java.util.Map; +import java.util.Objects; +import java.util.function.Function; -public interface MySqlInitialLoadStateManager { +public abstract class MySqlInitialLoadStateManager implements SourceStateMessageProducer { - long MYSQL_STATUS_VERSION = 2; - String STATE_TYPE_KEY = "state_type"; - String PRIMARY_KEY_STATE_TYPE = "primary_key"; + public static final long MYSQL_STATUS_VERSION = 2; + public static final String STATE_TYPE_KEY = "state_type"; + public static final String PRIMARY_KEY_STATE_TYPE = "primary_key"; - // Returns an intermediate state message for the initial sync. - AirbyteStateMessage createIntermediateStateMessage(final AirbyteStreamNameNamespacePair pair, final PrimaryKeyLoadStatus pkLoadStatus); + protected Function streamStateForIncrementalRunSupplier; - // Updates the {@link PrimaryKeyLoadStatus} for the state associated with the given pair - void updatePrimaryKeyLoadState(final AirbyteStreamNameNamespacePair pair, final PrimaryKeyLoadStatus pkLoadStatus); + protected Map pairToPrimaryKeyLoadStatus; + + // Map of pair to the primary key info (field name & data type) associated with it. + protected Map pairToPrimaryKeyInfo; + + void setStreamStateForIncrementalRunSupplier(final Function streamStateForIncrementalRunSupplier) { + this.streamStateForIncrementalRunSupplier = streamStateForIncrementalRunSupplier; + } - // Returns the final state message for the initial sync. - AirbyteStateMessage createFinalStateMessage(final AirbyteStreamNameNamespacePair pair, - final JsonNode streamStateForIncrementalRun); + // Updates the {@link PrimaryKeyLoadStatus} for the state associated with the given pair + public abstract void updatePrimaryKeyLoadState(final AirbyteStreamNameNamespacePair pair, final PrimaryKeyLoadStatus pkLoadStatus); // Returns the previous state emitted, represented as a {@link PrimaryKeyLoadStatus} associated with // the stream. - PrimaryKeyLoadStatus getPrimaryKeyLoadStatus(final AirbyteStreamNameNamespacePair pair); + public abstract PrimaryKeyLoadStatus getPrimaryKeyLoadStatus(final AirbyteStreamNameNamespacePair pair); // Returns the current {@PrimaryKeyInfo}, associated with the stream. This includes the data type & // the column name associated with the stream. - PrimaryKeyInfo getPrimaryKeyInfo(final AirbyteStreamNameNamespacePair pair); + public abstract PrimaryKeyInfo getPrimaryKeyInfo(final AirbyteStreamNameNamespacePair pair); + + protected JsonNode getIncrementalState(final AirbyteStreamNameNamespacePair pair) { + final PrimaryKeyLoadStatus currentPkLoadStatus = getPrimaryKeyLoadStatus(pair); + return (currentPkLoadStatus == null || currentPkLoadStatus.getIncrementalState() == null) ? streamStateForIncrementalRunSupplier.apply(pair) + : currentPkLoadStatus.getIncrementalState(); + } + + @Override + public AirbyteMessage processRecordMessage(final ConfiguredAirbyteStream stream, final AirbyteMessage message) { + if (Objects.nonNull(message)) { + final AirbyteStreamNameNamespacePair pair = new AirbyteStreamNameNamespacePair(stream.getStream().getName(), stream.getStream().getNamespace()); + final String pkFieldName = this.getPrimaryKeyInfo(pair).pkFieldName(); + final String lastPk = message.getRecord().getData().get(pkFieldName).asText(); + final PrimaryKeyLoadStatus pkStatus = new PrimaryKeyLoadStatus() + .withVersion(MYSQL_STATUS_VERSION) + .withStateType(StateType.PRIMARY_KEY) + .withPkName(pkFieldName) + .withPkVal(lastPk) + .withIncrementalState(getIncrementalState(pair)); + this.updatePrimaryKeyLoadState(pair, pkStatus); + } + return message; + } + + @Override + public boolean shouldEmitStateMessage(final ConfiguredAirbyteStream stream) { + return true; + } - static Map initPairToPrimaryKeyLoadStatusMap( - final Map pairToPkStatus) { + public static Map initPairToPrimaryKeyLoadStatusMap( + final Map pairToPkStatus) { final Map map = new HashMap<>(); pairToPkStatus.forEach((pair, pkStatus) -> { final AirbyteStreamNameNamespacePair updatedPair = new AirbyteStreamNameNamespacePair(pair.getName(), pair.getNamespace()); diff --git a/airbyte-integrations/connectors/source-mysql/src/main/java/io/airbyte/integrations/source/mysql/initialsync/MySqlInitialLoadStreamStateManager.java b/airbyte-integrations/connectors/source-mysql/src/main/java/io/airbyte/integrations/source/mysql/initialsync/MySqlInitialLoadStreamStateManager.java index 88859e2ecb84..4f7445646fbd 100644 --- a/airbyte-integrations/connectors/source-mysql/src/main/java/io/airbyte/integrations/source/mysql/initialsync/MySqlInitialLoadStreamStateManager.java +++ b/airbyte-integrations/connectors/source-mysql/src/main/java/io/airbyte/integrations/source/mysql/initialsync/MySqlInitialLoadStreamStateManager.java @@ -9,10 +9,12 @@ import io.airbyte.integrations.source.mysql.initialsync.MySqlInitialReadUtil.InitialLoadStreams; import io.airbyte.integrations.source.mysql.initialsync.MySqlInitialReadUtil.PrimaryKeyInfo; import io.airbyte.integrations.source.mysql.internal.models.PrimaryKeyLoadStatus; +import io.airbyte.protocol.models.AirbyteStreamNameNamespacePair; import io.airbyte.protocol.models.v0.AirbyteStateMessage; import io.airbyte.protocol.models.v0.AirbyteStateMessage.AirbyteStateType; import io.airbyte.protocol.models.v0.AirbyteStreamState; import io.airbyte.protocol.models.v0.ConfiguredAirbyteCatalog; +import io.airbyte.protocol.models.v0.ConfiguredAirbyteStream; import io.airbyte.protocol.models.v0.StreamDescriptor; import java.util.Map; import java.util.Objects; @@ -24,12 +26,7 @@ * keys to the stream state when they're going through the iterator Once we have verified that * expanding StreamStateManager itself to include this functionality, this class will be removed */ -public class MySqlInitialLoadStreamStateManager implements MySqlInitialLoadStateManager { - - private final Map pairToPrimaryKeyLoadStatus; - - // Map of pair to the primary key info (field name & data type) associated with it. - private final Map pairToPrimaryKeyInfo; +public class MySqlInitialLoadStreamStateManager extends MySqlInitialLoadStateManager { private static final Logger LOGGER = LoggerFactory.getLogger(MySqlInitialLoadStreamStateManager.class); @@ -40,6 +37,12 @@ public MySqlInitialLoadStreamStateManager(final ConfiguredAirbyteCatalog catalog this.pairToPrimaryKeyLoadStatus = MySqlInitialLoadStateManager.initPairToPrimaryKeyLoadStatusMap(initialLoadStreams.pairToInitialLoadStatus()); } + /** + * @param pair + * @param pkLoadStatus + * @return + */ + @Override public void updatePrimaryKeyLoadState(final io.airbyte.protocol.models.AirbyteStreamNameNamespacePair pair, final PrimaryKeyLoadStatus pkLoadStatus) { @@ -47,12 +50,13 @@ public void updatePrimaryKeyLoadState(final io.airbyte.protocol.models.AirbyteSt } @Override - public AirbyteStateMessage createFinalStateMessage(final io.airbyte.protocol.models.AirbyteStreamNameNamespacePair pair, - final JsonNode streamStateForIncrementalRun) { + public AirbyteStateMessage createFinalStateMessage(final ConfiguredAirbyteStream stream) { + AirbyteStreamNameNamespacePair pair = new AirbyteStreamNameNamespacePair(stream.getStream().getName(), stream.getStream().getNamespace()); + final JsonNode incrementalState = getIncrementalState(pair); return new AirbyteStateMessage() .withType(AirbyteStateType.STREAM) - .withStream(getAirbyteStreamState(pair, (streamStateForIncrementalRun))); + .withStream(getAirbyteStreamState(pair, incrementalState)); } @Override @@ -66,11 +70,12 @@ public PrimaryKeyLoadStatus getPrimaryKeyLoadStatus(final io.airbyte.protocol.mo } @Override - public AirbyteStateMessage createIntermediateStateMessage(final io.airbyte.protocol.models.AirbyteStreamNameNamespacePair pair, - final PrimaryKeyLoadStatus pkLoadStatus) { + public AirbyteStateMessage generateStateMessageAtCheckpoint(final ConfiguredAirbyteStream stream) { + AirbyteStreamNameNamespacePair pair = new AirbyteStreamNameNamespacePair(stream.getStream().getName(), stream.getStream().getNamespace()); + var pkStatus = getPrimaryKeyLoadStatus(pair); return new AirbyteStateMessage() .withType(AirbyteStateType.STREAM) - .withStream(getAirbyteStreamState(pair, Jsons.jsonNode(pkLoadStatus))); + .withStream(getAirbyteStreamState(pair, Jsons.jsonNode(pkStatus))); } private AirbyteStreamState getAirbyteStreamState(final io.airbyte.protocol.models.AirbyteStreamNameNamespacePair pair, final JsonNode stateData) { diff --git a/airbyte-integrations/connectors/source-mysql/src/main/java/io/airbyte/integrations/source/mysql/initialsync/MySqlInitialSyncStateIteratorManager.java b/airbyte-integrations/connectors/source-mysql/src/main/java/io/airbyte/integrations/source/mysql/initialsync/MySqlInitialSyncStateIteratorManager.java deleted file mode 100644 index f7722e1844da..000000000000 --- a/airbyte-integrations/connectors/source-mysql/src/main/java/io/airbyte/integrations/source/mysql/initialsync/MySqlInitialSyncStateIteratorManager.java +++ /dev/null @@ -1,84 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.integrations.source.mysql.initialsync; - -import static io.airbyte.integrations.source.mysql.initialsync.MySqlInitialLoadStateManager.MYSQL_STATUS_VERSION; - -import com.fasterxml.jackson.databind.JsonNode; -import io.airbyte.cdk.integrations.source.relationaldb.state.SourceStateIteratorManager; -import io.airbyte.integrations.source.mysql.internal.models.InternalModels.StateType; -import io.airbyte.integrations.source.mysql.internal.models.PrimaryKeyLoadStatus; -import io.airbyte.protocol.models.AirbyteStreamNameNamespacePair; -import io.airbyte.protocol.models.v0.AirbyteMessage; -import io.airbyte.protocol.models.v0.AirbyteStateMessage; -import java.time.Duration; -import java.time.Instant; -import java.time.OffsetDateTime; -import java.util.Objects; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -public class MySqlInitialSyncStateIteratorManager implements SourceStateIteratorManager { - - private static final Logger LOGGER = LoggerFactory.getLogger(MySqlInitialSyncStateIteratorManager.class); - - private final AirbyteStreamNameNamespacePair pair; - private PrimaryKeyLoadStatus pkStatus; - private final JsonNode streamStateForIncrementalRun; - private final MySqlInitialLoadStateManager stateManager; - private final Duration syncCheckpointDuration; - private final Long syncCheckpointRecords; - private final String pkFieldName; - - public MySqlInitialSyncStateIteratorManager( - final AirbyteStreamNameNamespacePair pair, - final MySqlInitialLoadStateManager stateManager, - final JsonNode streamStateForIncrementalRun, - final Duration checkpointDuration, - final Long checkpointRecords) { - this.pair = pair; - this.stateManager = stateManager; - this.streamStateForIncrementalRun = streamStateForIncrementalRun; - this.syncCheckpointDuration = checkpointDuration; - this.syncCheckpointRecords = checkpointRecords; - this.pkFieldName = stateManager.getPrimaryKeyInfo(pair).pkFieldName(); - this.pkStatus = stateManager.getPrimaryKeyLoadStatus(pair); - } - - @Override - public AirbyteStateMessage generateStateMessageAtCheckpoint() { - LOGGER.info("Emitting initial sync pk state for stream {}, state is {}", pair, pkStatus); - return stateManager.createIntermediateStateMessage(pair, pkStatus); - } - - @Override - public AirbyteMessage processRecordMessage(final AirbyteMessage message) { - if (Objects.nonNull(message)) { - final String lastPk = message.getRecord().getData().get(pkFieldName).asText(); - pkStatus = new PrimaryKeyLoadStatus() - .withVersion(MYSQL_STATUS_VERSION) - .withStateType(StateType.PRIMARY_KEY) - .withPkName(pkFieldName) - .withPkVal(lastPk) - .withIncrementalState(streamStateForIncrementalRun); - stateManager.updatePrimaryKeyLoadState(pair, pkStatus); - } - return message; - } - - @Override - public AirbyteStateMessage createFinalStateMessage() { - final AirbyteStateMessage finalStateMessage = stateManager.createFinalStateMessage(pair, streamStateForIncrementalRun); - LOGGER.info("Finished initial sync of stream {}, Emitting final state, state is {}", pair, finalStateMessage); - return finalStateMessage; - } - - @Override - public boolean shouldEmitStateMessage(long recordCount, Instant lastCheckpoint) { - return (recordCount >= syncCheckpointRecords || Duration.between(lastCheckpoint, OffsetDateTime.now()).compareTo(syncCheckpointDuration) > 0) - && Objects.nonNull(pkStatus); - } - -} diff --git a/airbyte-integrations/connectors/source-mysql/src/test/java/io/airbyte/integrations/source/mysql/MySqlJdbcSourceAcceptanceTest.java b/airbyte-integrations/connectors/source-mysql/src/test/java/io/airbyte/integrations/source/mysql/MySqlJdbcSourceAcceptanceTest.java index d6597cd2b023..79a27260480e 100644 --- a/airbyte-integrations/connectors/source-mysql/src/test/java/io/airbyte/integrations/source/mysql/MySqlJdbcSourceAcceptanceTest.java +++ b/airbyte-integrations/connectors/source-mysql/src/test/java/io/airbyte/integrations/source/mysql/MySqlJdbcSourceAcceptanceTest.java @@ -198,6 +198,7 @@ protected void testReadMultipleTablesIncrementally() throws Exception { // Extract only state messages for each stream final List streamOneStateMessagesFromFirstSync = extractStateMessage(messagesFromFirstSync, streamOneName); final List streamTwoStateMessagesFromFirstSync = extractStateMessage(messagesFromFirstSync, streamTwoName); + // Extract the incremental states of each stream's first and second state message final List streamOneIncrementalStatesFromFirstSync = List.of(streamOneStateMessagesFromFirstSync.get(0).getStream().getStreamState().get("incremental_state"), diff --git a/docs/integrations/sources/mysql.md b/docs/integrations/sources/mysql.md index f8d7de3fed39..89caf6e38a71 100644 --- a/docs/integrations/sources/mysql.md +++ b/docs/integrations/sources/mysql.md @@ -223,11 +223,12 @@ Any database or table encoding combination of charset and collation is supported | Version | Date | Pull Request | Subject | |:--------|:-----------|:-----------------------------------------------------------|:------------------------------------------------------------------------------------------------------------------------------------------------| -| 3.3.12 | 2024-02-27 | [35675](https://github.com/airbytehq/airbyte/pull/35675) | Fix invalid cdc error message. | +| 3.3.13 | 2024-02-29 | [35529](https://github.com/airbytehq/airbyte/pull/35529) | Refactor state iterator messages. | +| 3.3.12 | 2024-02-27 | [35675](https://github.com/airbytehq/airbyte/pull/35675) | Fix invalid cdc error message. | | 3.3.11 | 2024-02-23 | [35527](https://github.com/airbytehq/airbyte/pull/35527) | Adopt 0.23.1 and shutdown timeouts. | -| 3.3.10 | 2024-02-22 | [35569](https://github.com/airbytehq/airbyte/pull/35569) | Fix logging bug. | -| 3.3.9 | 2024-02-21 | [35525](https://github.com/airbytehq/airbyte/pull/35338) | Adopt 0.21.4 and reduce cdc state compression threshold to 1MB. | -| 3.3.8 | 2024-02-20 | [35338](https://github.com/airbytehq/airbyte/pull/35338) | Add config to throw an error on invalid CDC position. | +| 3.3.10 | 2024-02-22 | [35569](https://github.com/airbytehq/airbyte/pull/35569) | Fix logging bug. | +| 3.3.9 | 2024-02-21 | [35525](https://github.com/airbytehq/airbyte/pull/35338) | Adopt 0.21.4 and reduce cdc state compression threshold to 1MB. | +| 3.3.8 | 2024-02-20 | [35338](https://github.com/airbytehq/airbyte/pull/35338) | Add config to throw an error on invalid CDC position. | | 3.3.7 | 2024-02-13 | [35036](https://github.com/airbytehq/airbyte/pull/34751) | Emit analytics message for invalid CDC cursor. | | 3.3.6 | 2024-02-13 | [34869](https://github.com/airbytehq/airbyte/pull/34573) | Don't emit state in SourceStateIterator when there is an underlying stream failure. | | 3.3.5 | 2024-02-12 | [34580](https://github.com/airbytehq/airbyte/pull/34580) | Support special chars in db name | From 1ee2d8a442f4bc091b20c5385c3aa7b0ab63a773 Mon Sep 17 00:00:00 2001 From: Justin Chau Date: Thu, 29 Feb 2024 11:38:02 -0800 Subject: [PATCH 037/172] Update getting-started.mdx (#35730) --- docs/using-airbyte/pyairbyte/getting-started.mdx | 2 ++ 1 file changed, 2 insertions(+) diff --git a/docs/using-airbyte/pyairbyte/getting-started.mdx b/docs/using-airbyte/pyairbyte/getting-started.mdx index 86d0d13adff9..d8ece1301b4a 100644 --- a/docs/using-airbyte/pyairbyte/getting-started.mdx +++ b/docs/using-airbyte/pyairbyte/getting-started.mdx @@ -4,6 +4,8 @@ import AirbyteLibConnectors from '@site/src/components/AirbyteLibConnectors'; PyAirbyte is a library that provides a set of utilities to use Airbyte connectors in Python. It is meant to be used in situations where setting up an Airbyte server or cloud account is not possible or desirable, for example in a Jupyter notebook or when iterating on early prototypes on a developer's workstation. +You can also check out this [YouTube video](https://youtu.be/tUTE-csnwCI) on how to get started with PyAirbyte! + ## Installation ```bash From 68feeec8626989d9e95a6616c48598d49bae143c Mon Sep 17 00:00:00 2001 From: Augustin Date: Thu, 29 Feb 2024 21:58:06 +0100 Subject: [PATCH 038/172] connectors-test: 6H workflow timeout (#35734) --- .github/workflows/connectors_tests.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/connectors_tests.yml b/.github/workflows/connectors_tests.yml index 8ee70eef8528..5c10ebdbef8d 100644 --- a/.github/workflows/connectors_tests.yml +++ b/.github/workflows/connectors_tests.yml @@ -63,7 +63,7 @@ jobs: if: needs.changes.outputs.connectors == 'true' name: Connectors CI runs-on: connector-test-large - timeout-minutes: 1440 # 24 hours + timeout-minutes: 360 # 6 hours steps: - name: Checkout Airbyte uses: actions/checkout@v3 From 61e0b3fbf234af3154299960d53a2e6b934acfff Mon Sep 17 00:00:00 2001 From: Stephane Geneix <147216312+stephane-airbyte@users.noreply.github.com> Date: Thu, 29 Feb 2024 13:30:31 -0800 Subject: [PATCH 039/172] improve airbyte-cdk (#35720) various small changes related to tests and mssql: the jdbc string for MSSQL was wrong. The LoggingInterceptor was printing faulty log lines The SSHBastionContainer should be a container like any other, relying upon the ContainerFactory. As a result, the ContainerFactory should be able to return any type of container, not just JdbcDatabaseContainer. In addition, we shouldn't be passing strings to the ContainerFactory as container modifiers, but a named lambda. No more introspection needed! (the old string-based is kept for backward compatibility) The CdcSourceTest doesn't check DB invariants before starting a source. We need a test function that can be overriden by the implementations of CdcSourceTest --- airbyte-cdk/java/airbyte-cdk/README.md | 3 +- .../cdk/db/factory/DatabaseDriver.java | 2 +- .../src/main/resources/version.properties | 2 +- .../LoggingInvocationInterceptor.java | 5 +- .../base/ssh/SshBastionContainer.java | 31 ++++- .../cdk/testutils/ContainerFactory.java | 127 +++++++++++++----- .../integrations/debezium/CdcSourceTest.java | 30 +++-- 7 files changed, 149 insertions(+), 51 deletions(-) diff --git a/airbyte-cdk/java/airbyte-cdk/README.md b/airbyte-cdk/java/airbyte-cdk/README.md index 538848b65e3f..8d941ed946f2 100644 --- a/airbyte-cdk/java/airbyte-cdk/README.md +++ b/airbyte-cdk/java/airbyte-cdk/README.md @@ -166,8 +166,9 @@ MavenLocal debugging steps: | Version | Date | Pull Request | Subject | |:--------|:-----------|:-----------------------------------------------------------|:---------------------------------------------------------------------------------------------------------------------------------------------------------------| +| 0.23.9 | 2024-03-01 | [\#35720](https://github.com/airbytehq/airbyte/pull/35720) | various improvements for tests TestDataHolder | | 0.23.8 | 2024-02-28 | [\#35529](https://github.com/airbytehq/airbyte/pull/35529) | Refactor on state iterators | -| 0.23.7 | 2024-02-28 | [\#35376](https://github.com/airbytehq/airbyte/pull/35376) | Add a getNamespace into TestDataHolder | +| 0.23.7 | 2024-02-28 | [\#35376](https://github.com/airbytehq/airbyte/pull/35376) | Extract typereduper migrations to separte method | | 0.23.6 | 2024-02-26 | [\#35647](https://github.com/airbytehq/airbyte/pull/35647) | Add a getNamespace into TestDataHolder | | 0.23.5 | 2024-02-26 | [\#35512](https://github.com/airbytehq/airbyte/pull/35512) | Remove @DisplayName from all CDK tests. | | 0.23.4 | 2024-02-26 | [\#35507](https://github.com/airbytehq/airbyte/pull/35507) | Add more logs into TestDatabase. | diff --git a/airbyte-cdk/java/airbyte-cdk/core/src/main/java/io/airbyte/cdk/db/factory/DatabaseDriver.java b/airbyte-cdk/java/airbyte-cdk/core/src/main/java/io/airbyte/cdk/db/factory/DatabaseDriver.java index 27e7750e1847..39bdfdfc0aa7 100644 --- a/airbyte-cdk/java/airbyte-cdk/core/src/main/java/io/airbyte/cdk/db/factory/DatabaseDriver.java +++ b/airbyte-cdk/java/airbyte-cdk/core/src/main/java/io/airbyte/cdk/db/factory/DatabaseDriver.java @@ -14,7 +14,7 @@ public enum DatabaseDriver { DB2("com.ibm.db2.jcc.DB2Driver", "jdbc:db2://%s:%d/%s"), STARBURST("io.trino.jdbc.TrinoDriver", "jdbc:trino://%s:%s/%s?SSL=true&source=airbyte"), MARIADB("org.mariadb.jdbc.Driver", "jdbc:mariadb://%s:%d/%s"), - MSSQLSERVER("com.microsoft.sqlserver.jdbc.SQLServerDriver", "jdbc:sqlserver://%s:%d/%s"), + MSSQLSERVER("com.microsoft.sqlserver.jdbc.SQLServerDriver", "jdbc:sqlserver://%s:%d;databaseName=%s"), MYSQL("com.mysql.cj.jdbc.Driver", "jdbc:mysql://%s:%d/%s"), ORACLE("oracle.jdbc.OracleDriver", "jdbc:oracle:thin:@%s:%d/%s"), VERTICA("com.vertica.jdbc.Driver", "jdbc:vertica://%s:%d/%s"), diff --git a/airbyte-cdk/java/airbyte-cdk/core/src/main/resources/version.properties b/airbyte-cdk/java/airbyte-cdk/core/src/main/resources/version.properties index 8a67e26e76c8..586cdf0b8210 100644 --- a/airbyte-cdk/java/airbyte-cdk/core/src/main/resources/version.properties +++ b/airbyte-cdk/java/airbyte-cdk/core/src/main/resources/version.properties @@ -1 +1 @@ -version=0.23.8 +version=0.23.9 diff --git a/airbyte-cdk/java/airbyte-cdk/core/src/testFixtures/java/io/airbyte/cdk/extensions/LoggingInvocationInterceptor.java b/airbyte-cdk/java/airbyte-cdk/core/src/testFixtures/java/io/airbyte/cdk/extensions/LoggingInvocationInterceptor.java index 8957a5928d83..a0a2d31640f9 100644 --- a/airbyte-cdk/java/airbyte-cdk/core/src/testFixtures/java/io/airbyte/cdk/extensions/LoggingInvocationInterceptor.java +++ b/airbyte-cdk/java/airbyte-cdk/core/src/testFixtures/java/io/airbyte/cdk/extensions/LoggingInvocationInterceptor.java @@ -59,8 +59,9 @@ public Object invoke(Object proxy, Method method, Object[] args) throws Throwabl logLineSuffix = "instance creation for %s".formatted(invocationContext.getTargetClass()); } else if (methodMatcher.matches()) { String interceptedEvent = methodMatcher.group(1); - logLineSuffix = "execution of @%s method %s.%s".formatted(invocationContext.getExecutable().getDeclaringClass().getSimpleName(), - interceptedEvent, invocationContext.getExecutable().getName()); + logLineSuffix = "execution of @%s method %s.%s".formatted(interceptedEvent, + invocationContext.getExecutable().getDeclaringClass().getSimpleName(), + invocationContext.getExecutable().getName()); } else { logLineSuffix = "execution of unknown intercepted call %s".formatted(methodName); } diff --git a/airbyte-cdk/java/airbyte-cdk/core/src/testFixtures/java/io/airbyte/cdk/integrations/base/ssh/SshBastionContainer.java b/airbyte-cdk/java/airbyte-cdk/core/src/testFixtures/java/io/airbyte/cdk/integrations/base/ssh/SshBastionContainer.java index 20c395d2e720..1770dca4905e 100644 --- a/airbyte-cdk/java/airbyte-cdk/core/src/testFixtures/java/io/airbyte/cdk/integrations/base/ssh/SshBastionContainer.java +++ b/airbyte-cdk/java/airbyte-cdk/core/src/testFixtures/java/io/airbyte/cdk/integrations/base/ssh/SshBastionContainer.java @@ -10,29 +10,50 @@ import com.fasterxml.jackson.databind.JsonNode; import com.google.common.collect.ImmutableMap; import io.airbyte.cdk.integrations.util.HostPortResolver; +import io.airbyte.cdk.testutils.ContainerFactory; import io.airbyte.commons.json.Jsons; import java.io.IOException; import java.util.List; import java.util.Objects; +import java.util.function.Consumer; import org.apache.commons.lang3.tuple.ImmutablePair; import org.testcontainers.containers.Container; import org.testcontainers.containers.GenericContainer; import org.testcontainers.containers.JdbcDatabaseContainer; import org.testcontainers.containers.Network; import org.testcontainers.images.builder.ImageFromDockerfile; +import org.testcontainers.utility.DockerImageName; public class SshBastionContainer implements AutoCloseable { + public static class SshBastionContainerFactory extends ContainerFactory> { + + @Override + protected GenericContainer createNewContainer(DockerImageName imageName) { + var container = new GenericContainer(new ImageFromDockerfile("bastion-test") + .withFileFromClasspath("Dockerfile", "bastion/Dockerfile")) + .withExposedPorts(22); + return container; + } + + public GenericContainer exclusive(final Network network) { + Consumer> imageModifier = c -> { + c.withNetwork(network); + }; + var container = super.exclusive("bastion-test", new NamedContainerModifierImpl<>("withNetwork", imageModifier)); + return container; + } + + } + + private static final SshBastionContainerFactory factory = new SshBastionContainerFactory(); + private static final String SSH_USER = "sshuser"; private static final String SSH_PASSWORD = "secret"; private GenericContainer bastion; public void initAndStartBastion(final Network network) { - bastion = new GenericContainer( - new ImageFromDockerfile("bastion-test") - .withFileFromClasspath("Dockerfile", "bastion/Dockerfile")) - .withNetwork(network) - .withExposedPorts(22); + bastion = factory.exclusive(network); bastion.start(); } diff --git a/airbyte-cdk/java/airbyte-cdk/core/src/testFixtures/java/io/airbyte/cdk/testutils/ContainerFactory.java b/airbyte-cdk/java/airbyte-cdk/core/src/testFixtures/java/io/airbyte/cdk/testutils/ContainerFactory.java index 6c0b8e40e89f..6e89dc7e2f2f 100644 --- a/airbyte-cdk/java/airbyte-cdk/core/src/testFixtures/java/io/airbyte/cdk/testutils/ContainerFactory.java +++ b/airbyte-cdk/java/airbyte-cdk/core/src/testFixtures/java/io/airbyte/cdk/testutils/ContainerFactory.java @@ -4,6 +4,7 @@ package io.airbyte.cdk.testutils; +import com.google.common.collect.Lists; import io.airbyte.commons.logging.LoggingHelper; import io.airbyte.commons.logging.MdcScope; import java.lang.reflect.InvocationTargetException; @@ -13,13 +14,13 @@ import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.ConcurrentMap; import java.util.concurrent.atomic.AtomicInteger; +import java.util.function.Consumer; import java.util.function.Supplier; import java.util.stream.Stream; import org.apache.commons.lang3.StringUtils; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.testcontainers.containers.GenericContainer; -import org.testcontainers.containers.JdbcDatabaseContainer; import org.testcontainers.containers.output.OutputFrame; import org.testcontainers.containers.output.Slf4jLogConsumer; import org.testcontainers.utility.DockerImageName; @@ -28,11 +29,13 @@ * ContainerFactory is the companion to {@link TestDatabase} and provides it with suitable * testcontainer instances. */ -public abstract class ContainerFactory> { +public abstract class ContainerFactory> { static private final Logger LOGGER = LoggerFactory.getLogger(ContainerFactory.class); - private record ContainerKey(Class clazz, DockerImageName imageName, List methods) {}; + private record ContainerKey> (Class clazz, + DockerImageName imageName, + List> methods) {}; private static class ContainerOrException { @@ -67,12 +70,13 @@ GenericContainer container() { } - private static final ConcurrentMap SHARED_CONTAINERS = new ConcurrentHashMap<>(); + private final ConcurrentMap, ContainerOrException> SHARED_CONTAINERS = new ConcurrentHashMap<>(); private static final AtomicInteger containerId = new AtomicInteger(0); - private static final MdcScope.Builder getTestContainerLogMdcBuilder(DockerImageName imageName, List methods) { + private final MdcScope.Builder getTestContainerLogMdcBuilder(DockerImageName imageName, + List> containerModifiers) { return new MdcScope.Builder() - .setLogPrefix("testcontainer %s (%s[%s]):".formatted(containerId.incrementAndGet(), imageName, StringUtils.join(methods, ","))) + .setLogPrefix("testcontainer %s (%s[%s]):".formatted(containerId.incrementAndGet(), imageName, StringUtils.join(containerModifiers, ","))) .setPrefixColor(LoggingHelper.Color.RED_BACKGROUND); } @@ -84,10 +88,25 @@ private static final MdcScope.Builder getTestContainerLogMdcBuilder(DockerImageN /** * Returns a shared instance of the testcontainer. + * + * @Deprecated use shared(String, NamedContainerModifier) instead */ - @SuppressWarnings("unchecked") + @Deprecated public final C shared(String imageName, String... methods) { - final var containerKey = new ContainerKey(getClass(), DockerImageName.parse(imageName), Stream.of(methods).toList()); + return shared(imageName, + Stream.of(methods).map(n -> new NamedContainerModifierImpl(n, resolveModifierByName(n))).toList()); + } + + public final C shared(String imageName, NamedContainerModifier... namedContainerModifiers) { + return shared(imageName, List.of(namedContainerModifiers)); + } + + public final C shared(String imageName) { + return shared(imageName, new ArrayList<>()); + } + + public final C shared(String imageName, List> namedContainerModifiers) { + final ContainerKey containerKey = new ContainerKey<>(getClass(), DockerImageName.parse(imageName), namedContainerModifiers); // We deliberately avoid creating the container itself eagerly during the evaluation of the map // value. // Container creation can be exceedingly slow. @@ -100,41 +119,83 @@ public final C shared(String imageName, String... methods) { /** * Returns an exclusive instance of the testcontainer. + * + * @Deprecated use exclusive(String, NamedContainerModifier) instead */ @SuppressWarnings("unchecked") + @Deprecated public final C exclusive(String imageName, String... methods) { - return (C) createAndStartContainer(DockerImageName.parse(imageName), Stream.of(methods).toList()); + return exclusive(imageName, + (NamedContainerModifier) Stream.of(methods).map(n -> new NamedContainerModifierImpl(n, resolveModifierByName(n))).toList()); + } + + public final C exclusive(String imageName) { + return exclusive(imageName, new ArrayList<>()); + } + + public final C exclusive(String imageName, NamedContainerModifier... namedContainerModifiers) { + return exclusive(imageName, List.of(namedContainerModifiers)); + } + + public final C exclusive(String imageName, List> namedContainerModifiers) { + return (C) createAndStartContainer(DockerImageName.parse(imageName), namedContainerModifiers); + } + + public interface NamedContainerModifier> { + + String name(); + + Consumer modifier(); + } - private GenericContainer createAndStartContainer(DockerImageName imageName, List methodNames) { - LOGGER.info("Creating new shared container based on {} with {}.", imageName, methodNames); - try { - GenericContainer container = createNewContainer(imageName); - final var methods = new ArrayList(); - for (String methodName : methodNames) { - methods.add(getClass().getMethod(methodName, container.getClass())); + public record NamedContainerModifierImpl> (String name, Consumer method) implements NamedContainerModifier { + + public String name() { + return name; + } + + public Consumer modifier() { + return method; + } + + } + + private Consumer resolveModifierByName(String methodName) { + final ContainerFactory self = this; + Consumer resolvedMethod = c -> { + try { + Class containerClass = c.getClass(); + Method method = self.getClass().getMethod(methodName, containerClass); + method.invoke(self, c); + } catch (NoSuchMethodException | IllegalAccessException | InvocationTargetException e) { + throw new RuntimeException(e); } - final var logConsumer = new Slf4jLogConsumer(LOGGER) { + }; + return resolvedMethod; + } - public void accept(OutputFrame frame) { - if (frame.getUtf8StringWithoutLineEnding().trim().length() > 0) { - super.accept(frame); - } - } + private C createAndStartContainer(DockerImageName imageName, List> namedContainerModifiers) { + LOGGER.info("Creating new container based on {} with {}.", imageName, Lists.transform(namedContainerModifiers, c -> c.name())); + C container = createNewContainer(imageName); + final var logConsumer = new Slf4jLogConsumer(LOGGER) { - }; - getTestContainerLogMdcBuilder(imageName, methodNames).produceMappings(logConsumer::withMdc); - container.withLogConsumer(logConsumer); - for (Method method : methods) { - LOGGER.info("Calling {} in {} on new shared container based on {}.", - method.getName(), getClass().getName(), imageName); - method.invoke(this, container); + public void accept(OutputFrame frame) { + if (frame.getUtf8StringWithoutLineEnding().trim().length() > 0) { + super.accept(frame); + } } - container.start(); - return container; - } catch (IllegalAccessException | InvocationTargetException | NoSuchMethodException e) { - throw new RuntimeException(e); + + }; + getTestContainerLogMdcBuilder(imageName, namedContainerModifiers).produceMappings(logConsumer::withMdc); + container.withLogConsumer(logConsumer); + for (NamedContainerModifier resolvedNamedContainerModifier : namedContainerModifiers) { + LOGGER.info("Calling {} in {} on new container based on {}.", + resolvedNamedContainerModifier.name(), getClass().getName(), imageName); + resolvedNamedContainerModifier.modifier().accept(container); } + container.start(); + return container; } } diff --git a/airbyte-cdk/java/airbyte-cdk/db-sources/src/testFixtures/java/io/airbyte/cdk/integrations/debezium/CdcSourceTest.java b/airbyte-cdk/java/airbyte-cdk/db-sources/src/testFixtures/java/io/airbyte/cdk/integrations/debezium/CdcSourceTest.java index f69c2e380260..91638d2982f6 100644 --- a/airbyte-cdk/java/airbyte-cdk/db-sources/src/testFixtures/java/io/airbyte/cdk/integrations/debezium/CdcSourceTest.java +++ b/airbyte-cdk/java/airbyte-cdk/db-sources/src/testFixtures/java/io/airbyte/cdk/integrations/debezium/CdcSourceTest.java @@ -359,6 +359,7 @@ void testDelete() throws Exception { assertExpectedStateMessages(stateMessages1); deleteMessageOnIdCol(MODELS_STREAM_NAME, COL_ID, 11); + waitForCdcRecords(modelsSchema(), MODELS_STREAM_NAME, 1); final JsonNode state = Jsons.jsonNode(Collections.singletonList(stateMessages1.get(stateMessages1.size() - 1))); final AutoCloseableIterator read2 = source() @@ -388,6 +389,7 @@ void testUpdate() throws Exception { assertExpectedStateMessages(stateMessages1); updateCommand(MODELS_STREAM_NAME, COL_MODEL, updatedModel, COL_ID, 11); + waitForCdcRecords(modelsSchema(), MODELS_STREAM_NAME, 1); final JsonNode state = Jsons.jsonNode(Collections.singletonList(stateMessages1.get(stateMessages1.size() - 1))); final AutoCloseableIterator read2 = source() @@ -408,7 +410,9 @@ void testUpdate() throws Exception { // Verify that when data is inserted into the database while a sync is happening and after the first // sync, it all gets replicated. protected void testRecordsProducedDuringAndAfterSync() throws Exception { - + int recordsCreatedBeforeTestCount = MODEL_RECORDS.size(); + int expectedRecords = recordsCreatedBeforeTestCount; + int expectedRecordsInCdc = 0; final int recordsToCreate = 20; // first batch of records. 20 created here and 6 created in setup method. for (int recordsCreated = 0; recordsCreated < recordsToCreate; recordsCreated++) { @@ -417,7 +421,10 @@ protected void testRecordsProducedDuringAndAfterSync() throws Exception { .of(COL_ID, 100 + recordsCreated, COL_MAKE_ID, 1, COL_MODEL, "F-" + recordsCreated)); writeModelRecord(record); + expectedRecords++; + expectedRecordsInCdc++; } + waitForCdcRecords(modelsSchema(), MODELS_STREAM_NAME, expectedRecordsInCdc); final AutoCloseableIterator firstBatchIterator = source() .read(config(), getConfiguredCatalog(), null); @@ -427,7 +434,7 @@ protected void testRecordsProducedDuringAndAfterSync() throws Exception { assertExpectedStateMessagesForRecordsProducedDuringAndAfterSync(stateAfterFirstBatch); final Set recordsFromFirstBatch = extractRecordMessages( dataFromFirstBatch); - assertEquals((MODEL_RECORDS.size() + recordsToCreate), recordsFromFirstBatch.size()); + assertEquals(expectedRecords, recordsFromFirstBatch.size()); // second batch of records again 20 being created for (int recordsCreated = 0; recordsCreated < recordsToCreate; recordsCreated++) { @@ -436,7 +443,10 @@ protected void testRecordsProducedDuringAndAfterSync() throws Exception { .of(COL_ID, 200 + recordsCreated, COL_MAKE_ID, 1, COL_MODEL, "F-" + recordsCreated)); writeModelRecord(record); + expectedRecords++; + expectedRecordsInCdc++; } + waitForCdcRecords(modelsSchema(), MODELS_STREAM_NAME, expectedRecordsInCdc); final JsonNode state = Jsons.jsonNode(Collections.singletonList(stateAfterFirstBatch.get(stateAfterFirstBatch.size() - 1))); final AutoCloseableIterator secondBatchIterator = source() @@ -459,10 +469,9 @@ protected void testRecordsProducedDuringAndAfterSync() throws Exception { final Set recordsFromSecondBatchWithoutDuplicates = removeDuplicates( recordsFromSecondBatch); - final int recordsCreatedBeforeTestCount = MODEL_RECORDS.size(); assertTrue(recordsCreatedBeforeTestCount < recordsFromFirstBatchWithoutDuplicates.size(), "Expected first sync to include records created while the test was running."); - assertEquals((recordsToCreate * 2) + recordsCreatedBeforeTestCount, + assertEquals(expectedRecords, recordsFromFirstBatchWithoutDuplicates.size() + recordsFromSecondBatchWithoutDuplicates .size()); } @@ -527,6 +536,7 @@ void testCdcAndFullRefreshInSameSync() throws Exception { final JsonNode puntoRecord = Jsons .jsonNode(ImmutableMap.of(COL_ID, 100, COL_MAKE_ID, 3, COL_MODEL, "Punto")); writeModelRecord(puntoRecord); + waitForCdcRecords(modelsSchema(), MODELS_STREAM_NAME, 1); final JsonNode state = Jsons.jsonNode(Collections.singletonList(stateMessages1.get(stateMessages1.size() - 1))); final AutoCloseableIterator read2 = source() @@ -547,9 +557,10 @@ void testCdcAndFullRefreshInSameSync() throws Exception { @Test // When no records exist, no records are returned. - void testNoData() throws Exception { + public void testNoData() throws Exception { deleteCommand(MODELS_STREAM_NAME); + waitForCdcRecords(modelsSchema(), MODELS_STREAM_NAME, MODEL_RECORDS.size()); final AutoCloseableIterator read = source().read(config(), getConfiguredCatalog(), null); final List actualRecords = AutoCloseableIterators.toListAndClose(read); @@ -565,7 +576,7 @@ protected void assertExpectedStateMessagesForNoData(final List read1 = source() .read(config(), getConfiguredCatalog(), null); final List actualRecords1 = AutoCloseableIterators.toListAndClose(read1); @@ -584,13 +595,13 @@ void testNoDataOnSecondSync() throws Exception { } @Test - void testCheck() throws Exception { + public void testCheck() throws Exception { final AirbyteConnectionStatus status = source().check(config()); assertEquals(status.getStatus(), AirbyteConnectionStatus.Status.SUCCEEDED); } @Test - void testDiscover() throws Exception { + public void testDiscover() throws Exception { final AirbyteCatalog expectedCatalog = expectedCatalogForDiscover(); final AirbyteCatalog actualCatalog = source().discover(config()); @@ -823,4 +834,7 @@ protected AirbyteCatalog expectedCatalogForDiscover() { return expectedCatalog; } + protected void waitForCdcRecords(String schemaName, String tableName, int recordCount) + throws Exception {} + } From 9f281138fa686ad287a575c55b3f4056e17fab26 Mon Sep 17 00:00:00 2001 From: Anton Karpets Date: Fri, 1 Mar 2024 12:18:28 +0200 Subject: [PATCH 040/172] =?UTF-8?q?=F0=9F=8F=A5Source=20Google=20Search=20?= =?UTF-8?q?Console:=20fix=20expected=20records=20(#35742)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .../acceptance-test-config.yml | 2 +- .../integration_tests/expected_records.jsonl | 28 +++++++++---------- 2 files changed, 15 insertions(+), 15 deletions(-) diff --git a/airbyte-integrations/connectors/source-google-search-console/acceptance-test-config.yml b/airbyte-integrations/connectors/source-google-search-console/acceptance-test-config.yml index d661153c7768..aaf7caeb5c8e 100755 --- a/airbyte-integrations/connectors/source-google-search-console/acceptance-test-config.yml +++ b/airbyte-integrations/connectors/source-google-search-console/acceptance-test-config.yml @@ -26,7 +26,7 @@ acceptance_tests: - config_path: "secrets/config.json" expect_records: path: "integration_tests/expected_records.jsonl" - exact_order: yes + exact_order: no timeout_seconds: 3600 empty_streams: - name: search_analytics_page_report diff --git a/airbyte-integrations/connectors/source-google-search-console/integration_tests/expected_records.jsonl b/airbyte-integrations/connectors/source-google-search-console/integration_tests/expected_records.jsonl index fc30ccbbccd9..1b96b467d4bb 100644 --- a/airbyte-integrations/connectors/source-google-search-console/integration_tests/expected_records.jsonl +++ b/airbyte-integrations/connectors/source-google-search-console/integration_tests/expected_records.jsonl @@ -1,17 +1,17 @@ {"stream": "sites", "data": {"siteUrl": "sc-domain:airbyte.io", "permissionLevel": "siteFullUser"}, "emitted_at": 1709211825229} {"stream": "sitemaps", "data": {"path": "https://airbyte.io/sitemap.xml", "lastSubmitted": "2021-09-10T23:02:22.258Z", "isPending": false, "isSitemapsIndex": false, "type": "sitemap", "lastDownloaded": "2023-03-02T03:42:19.607Z", "warnings": "0", "errors": "0", "contents": [{"type": "web", "submitted": "5165", "indexed": "0"}]}, "emitted_at": 1677799185696} {"stream": "sitemaps", "data": {"path": "https://airbyte.io/sitemap.xml", "lastSubmitted": "2021-09-10T23:02:22.258Z", "isPending": false, "isSitemapsIndex": false, "type": "sitemap", "lastDownloaded": "2023-03-02T03:42:19.607Z", "warnings": "0", "errors": "0", "contents": [{"type": "web", "submitted": "5165", "indexed": "0"}]}, "emitted_at": 1677799186044} -{"stream": "search_analytics_by_date", "data": {"clicks": 664, "impressions": 14606, "ctr": 0.045460769546761606, "position": 17.294262631795153, "site_url": "sc-domain:airbyte.io", "search_type": "web", "date": "2022-10-17"}, "emitted_at": 1709212437957} -{"stream": "search_analytics_by_date", "data": {"clicks": 650, "impressions": 15009, "ctr": 0.043307348923978944, "position": 17.77133719768139, "site_url": "sc-domain:airbyte.io", "search_type": "web", "date": "2022-10-18"}, "emitted_at": 1709212438576} -{"stream": "search_analytics_by_country", "data": {"clicks": 117, "impressions": 3207, "ctr": 0.03648269410664172, "position": 18.13376987839102, "site_url": "sc-domain:airbyte.io", "search_type": "web", "date": "2022-10-17", "country": "usa"}, "emitted_at": 1709212703008} -{"stream": "search_analytics_by_country", "data": {"clicks": 87, "impressions": 756, "ctr": 0.11507936507936507, "position": 9.941798941798941, "site_url": "sc-domain:airbyte.io", "search_type": "web", "date": "2022-10-17", "country": "fra"}, "emitted_at": 1709212703011} -{"stream": "search_analytics_by_device", "data": {"clicks": 637, "impressions": 13493, "ctr": 0.04720966427036241, "position": 16.320166012006226, "site_url": "sc-domain:airbyte.io", "search_type": "web", "date": "2022-10-17", "device": "DESKTOP"}, "emitted_at": 1709213135112} -{"stream": "search_analytics_by_device", "data": {"clicks": 27, "impressions": 1097, "ctr": 0.024612579762989972, "position": 29.275296262534184, "site_url": "sc-domain:airbyte.io", "search_type": "web", "date": "2022-10-17", "device": "MOBILE"}, "emitted_at": 1709213135114} -{"stream": "search_analytics_by_page", "data": {"clicks": 13, "impressions": 210, "ctr": 0.06190476190476191, "position": 8.395238095238096, "site_url": "sc-domain:airbyte.io", "search_type": "web", "date": "2022-10-17", "page": "https://discuss.airbyte.io/t/kafka-connection-fails/723"}, "emitted_at": 1709213297272} -{"stream": "search_analytics_by_page", "data": {"clicks": 12, "impressions": 118, "ctr": 0.1016949152542373, "position": 9.23728813559322, "site_url": "sc-domain:airbyte.io", "search_type": "web", "date": "2022-10-17", "page": "https://discuss.airbyte.io/t/error-io-grpc-statusruntimeexception-deadline-exceeded-deadline-exceeded-after/254"}, "emitted_at": 1709213297273} -{"stream": "search_analytics_by_query", "data": {"clicks": 5, "impressions": 5, "ctr": 1, "position": 1, "site_url": "sc-domain:airbyte.io", "search_type": "web", "date": "2022-10-17", "query": "airbyte login"}, "emitted_at": 1709213455011} -{"stream": "search_analytics_by_query", "data": {"clicks": 4, "impressions": 17, "ctr": 0.23529411764705882, "position": 3, "site_url": "sc-domain:airbyte.io", "search_type": "web", "date": "2022-10-17", "query": "kafka \"(id: -1 rack: null) disconnected\""}, "emitted_at": 1709213455013} -{"stream": "search_analytics_all_fields", "data": {"clicks": 2, "impressions": 8, "ctr": 0.25, "position": 3, "site_url": "sc-domain:airbyte.io", "search_type": "web", "date": "2022-10-17", "country": "usa", "device": "DESKTOP", "page": "https://discuss.airbyte.io/t/kafka-connection-fails/723", "query": "kafka \"(id: -1 rack: null) disconnected\""}, "emitted_at": 1709213754017} -{"stream": "search_analytics_all_fields", "data": {"clicks": 1, "impressions": 2, "ctr": 0.5, "position": 2, "site_url": "sc-domain:airbyte.io", "search_type": "web", "date": "2022-10-17", "country": "alb", "device": "DESKTOP", "page": "https://discuss.airbyte.io/c/issues/11", "query": "airbyte issues"}, "emitted_at": 1709213754019} -{"stream": "custom_dimensions", "data": {"clicks": 116, "impressions": 2996, "ctr": 0.03871829105473965, "position": 17.209946595460615, "site_url": "sc-domain:airbyte.io", "search_type": "web", "date": "2022-10-17", "country": "usa", "device": "DESKTOP"}, "emitted_at": 1709213938241} -{"stream": "custom_dimensions", "data": {"clicks": 85, "impressions": 692, "ctr": 0.12283236994219653, "position": 9.426300578034683, "site_url": "sc-domain:airbyte.io", "search_type": "web", "date": "2022-10-17", "country": "fra", "device": "DESKTOP"}, "emitted_at": 1709213938244} +{"stream": "search_analytics_by_date", "data": {"clicks": 160, "impressions": 6097, "ctr": 0.026242414302115796, "position": 27.335410857798916, "site_url": "sc-domain:airbyte.io", "search_type": "web", "date": "2022-10-22"}, "emitted_at": 1709284338937} +{"stream": "search_analytics_by_date", "data": {"clicks": 227, "impressions": 7309, "ctr": 0.031057600218908195, "position": 25.308523737857435, "site_url": "sc-domain:airbyte.io", "search_type": "web", "date": "2022-10-23"}, "emitted_at": 1709284338938} +{"stream": "search_analytics_by_country", "data": {"clicks": 102, "impressions": 3190, "ctr": 0.03197492163009404, "position": 18.926018808777428, "site_url": "sc-domain:airbyte.io", "search_type": "web", "date": "2022-10-20", "country": "usa"}, "emitted_at": 1709284488094} +{"stream": "search_analytics_by_country", "data": {"clicks": 85, "impressions": 1270, "ctr": 0.06692913385826772, "position": 15.401574803149606, "site_url": "sc-domain:airbyte.io", "search_type": "web", "date": "2022-10-20", "country": "ind"}, "emitted_at": 1709284488095} +{"stream": "search_analytics_by_device", "data": {"clicks": 576, "impressions": 13543, "ctr": 0.04253119692830244, "position": 17.01343867680721, "site_url": "sc-domain:airbyte.io", "search_type": "web", "date": "2022-10-20", "device": "DESKTOP"}, "emitted_at": 1709284601472} +{"stream": "search_analytics_by_device", "data": {"clicks": 43, "impressions": 1213, "ctr": 0.03544929925803792, "position": 27.912613355317394, "site_url": "sc-domain:airbyte.io", "search_type": "web", "date": "2022-10-20", "device": "MOBILE"}, "emitted_at": 1709284601472} +{"stream": "search_analytics_by_page", "data": {"clicks": 13, "impressions": 176, "ctr": 0.07386363636363637, "position": 7.5227272727272725, "site_url": "sc-domain:airbyte.io", "search_type": "web", "date": "2022-10-20", "page": "https://discuss.airbyte.io/t/error-io-grpc-statusruntimeexception-deadline-exceeded-deadline-exceeded-after/254"}, "emitted_at": 1709284712124} +{"stream": "search_analytics_by_page", "data": {"clicks": 13, "impressions": 28, "ctr": 0.4642857142857143, "position": 6.678571428571429, "site_url": "sc-domain:airbyte.io", "search_type": "web", "date": "2022-10-20", "page": "https://discuss.airbyte.io/t/user-management-oauth-authentication/1287"}, "emitted_at": 1709284712124} +{"stream": "search_analytics_by_query", "data": {"clicks": 5, "impressions": 5, "ctr": 1, "position": 1, "site_url": "sc-domain:airbyte.io", "search_type": "web", "date": "2022-10-20", "query": "airbyte discourse"}, "emitted_at": 1709284850261} +{"stream": "search_analytics_by_query", "data": {"clicks": 4, "impressions": 36, "ctr": 0.1111111111111111, "position": 5.027777777777778, "site_url": "sc-domain:airbyte.io", "search_type": "web", "date": "2022-10-19", "query": "airbyte connectors"}, "emitted_at": 1709284850262} +{"stream": "search_analytics_all_fields", "data": {"clicks": 2, "impressions": 18, "ctr": 0.1111111111111111, "position": 4.944444444444445, "site_url": "sc-domain:airbyte.io", "search_type": "web", "date": "2022-10-19", "country": "usa", "device": "DESKTOP", "page": "https://demo.airbyte.io/", "query": "airbyte connectors"}, "emitted_at": 1709284987285} +{"stream": "search_analytics_all_fields", "data": {"clicks": 2, "impressions": 2, "ctr": 1, "position": 3.5, "site_url": "sc-domain:airbyte.io", "search_type": "web", "date": "2022-10-20", "country": "swe", "device": "MOBILE", "page": "https://discuss.airbyte.io/t/advice-for-custom-destination-connector-for-reverse-etl/678", "query": "airbyte reverse etl"}, "emitted_at": 1709284987285} +{"stream": "custom_dimensions", "data": {"clicks": 91, "impressions": 2925, "ctr": 0.03111111111111111, "position": 18.23145299145299, "site_url": "sc-domain:airbyte.io", "search_type": "web", "date": "2022-10-20", "country": "usa", "device": "DESKTOP"}, "emitted_at": 1709285094714} +{"stream": "custom_dimensions", "data": {"clicks": 81, "impressions": 1155, "ctr": 0.07012987012987013, "position": 14.841558441558442, "site_url": "sc-domain:airbyte.io", "search_type": "web", "date": "2022-10-20", "country": "ind", "device": "DESKTOP"}, "emitted_at": 1709285094714} From bcf32a2d29c7dc601aa0faa00a7e66653ed3bcbe Mon Sep 17 00:00:00 2001 From: Daryna Ishchenko <80129833+darynaishchenko@users.noreply.github.com> Date: Fri, 1 Mar 2024 12:34:29 +0200 Subject: [PATCH 041/172] :sparkles: Source Bing Ads: Stream budget and product dimension performance report (#35201) --- .../acceptance-test-config.yml | 16 ++ .../integration_tests/configured_catalog.json | 50 ++++ .../integration_tests/expected_records.jsonl | 1 + .../expected_records_no_start_date.jsonl | 1 + .../connectors/source-bing-ads/metadata.yaml | 2 +- .../connectors/source-bing-ads/pyproject.toml | 2 +- .../source_bing_ads/bulk_streams.py | 9 + .../source_bing_ads/report_streams.py | 39 +++ .../source_bing_ads/schemas/budget.json | 41 +++ .../product_dimension_performance_report.json | 259 +++++++++++++++++ ...t_dimension_performance_report_hourly.json | 260 ++++++++++++++++++ .../source-bing-ads/source_bing_ads/source.py | 17 +- .../unit_tests/integrations/base_test.py | 60 ++++ .../unit_tests/integrations/client_builder.py | 31 +++ .../unit_tests/integrations/config_builder.py | 44 +++ .../integrations/suds_response_mock.py | 146 ++++++++++ .../integrations/test_budget_stream.py | 54 ++++ .../integrations/test_bulk_stream.py | 28 ++ ...st_product_dimension_performance_report.py | 43 +++ .../integrations/test_report_stream.py | 112 ++++++++ .../resource/http/response/oauth.json | 5 + .../unit_tests/resource/response/budget.csv | 3 + .../resource/response/budget_empty.csv | 0 .../response/budget_with_cursor_value.csv | 10 + .../resource/response/budget_with_state.csv | 10 + ...uct_dimension_performance_report_daily.csv | 9 + ...n_performance_report_daily_incremental.csv | 9 + ...ct_dimension_performance_report_hourly.csv | 9 + ..._performance_report_hourly_incremental.csv | 9 + ...t_dimension_performance_report_monthly.csv | 9 + ...performance_report_monthly_incremental.csv | 9 + ...ct_dimension_performance_report_weekly.csv | 9 + ..._performance_report_weekly_incremental.csv | 9 + .../resource/state/budget_state.json | 5 + ...ension_performance_report_daily_state.json | 5 + ...nsion_performance_report_hourly_state.json | 5 + ...sion_performance_report_monthly_state.json | 5 + ...nsion_performance_report_weekly_state.json | 5 + .../unit_tests/test_bulk_streams.py | 8 + .../source-bing-ads/unit_tests/test_source.py | 2 +- docs/integrations/sources/bing-ads.md | 8 +- 41 files changed, 1353 insertions(+), 5 deletions(-) create mode 100644 airbyte-integrations/connectors/source-bing-ads/source_bing_ads/schemas/budget.json create mode 100644 airbyte-integrations/connectors/source-bing-ads/source_bing_ads/schemas/product_dimension_performance_report.json create mode 100644 airbyte-integrations/connectors/source-bing-ads/source_bing_ads/schemas/product_dimension_performance_report_hourly.json create mode 100644 airbyte-integrations/connectors/source-bing-ads/unit_tests/integrations/base_test.py create mode 100644 airbyte-integrations/connectors/source-bing-ads/unit_tests/integrations/client_builder.py create mode 100644 airbyte-integrations/connectors/source-bing-ads/unit_tests/integrations/config_builder.py create mode 100644 airbyte-integrations/connectors/source-bing-ads/unit_tests/integrations/suds_response_mock.py create mode 100644 airbyte-integrations/connectors/source-bing-ads/unit_tests/integrations/test_budget_stream.py create mode 100644 airbyte-integrations/connectors/source-bing-ads/unit_tests/integrations/test_bulk_stream.py create mode 100644 airbyte-integrations/connectors/source-bing-ads/unit_tests/integrations/test_product_dimension_performance_report.py create mode 100644 airbyte-integrations/connectors/source-bing-ads/unit_tests/integrations/test_report_stream.py create mode 100644 airbyte-integrations/connectors/source-bing-ads/unit_tests/resource/http/response/oauth.json create mode 100644 airbyte-integrations/connectors/source-bing-ads/unit_tests/resource/response/budget.csv create mode 100644 airbyte-integrations/connectors/source-bing-ads/unit_tests/resource/response/budget_empty.csv create mode 100644 airbyte-integrations/connectors/source-bing-ads/unit_tests/resource/response/budget_with_cursor_value.csv create mode 100644 airbyte-integrations/connectors/source-bing-ads/unit_tests/resource/response/budget_with_state.csv create mode 100644 airbyte-integrations/connectors/source-bing-ads/unit_tests/resource/response/product_dimension_performance_report_daily.csv create mode 100644 airbyte-integrations/connectors/source-bing-ads/unit_tests/resource/response/product_dimension_performance_report_daily_incremental.csv create mode 100644 airbyte-integrations/connectors/source-bing-ads/unit_tests/resource/response/product_dimension_performance_report_hourly.csv create mode 100644 airbyte-integrations/connectors/source-bing-ads/unit_tests/resource/response/product_dimension_performance_report_hourly_incremental.csv create mode 100644 airbyte-integrations/connectors/source-bing-ads/unit_tests/resource/response/product_dimension_performance_report_monthly.csv create mode 100644 airbyte-integrations/connectors/source-bing-ads/unit_tests/resource/response/product_dimension_performance_report_monthly_incremental.csv create mode 100644 airbyte-integrations/connectors/source-bing-ads/unit_tests/resource/response/product_dimension_performance_report_weekly.csv create mode 100644 airbyte-integrations/connectors/source-bing-ads/unit_tests/resource/response/product_dimension_performance_report_weekly_incremental.csv create mode 100644 airbyte-integrations/connectors/source-bing-ads/unit_tests/resource/state/budget_state.json create mode 100644 airbyte-integrations/connectors/source-bing-ads/unit_tests/resource/state/product_dimension_performance_report_daily_state.json create mode 100644 airbyte-integrations/connectors/source-bing-ads/unit_tests/resource/state/product_dimension_performance_report_hourly_state.json create mode 100644 airbyte-integrations/connectors/source-bing-ads/unit_tests/resource/state/product_dimension_performance_report_monthly_state.json create mode 100644 airbyte-integrations/connectors/source-bing-ads/unit_tests/resource/state/product_dimension_performance_report_weekly_state.json diff --git a/airbyte-integrations/connectors/source-bing-ads/acceptance-test-config.yml b/airbyte-integrations/connectors/source-bing-ads/acceptance-test-config.yml index 7728fca85880..8a89d4511f33 100644 --- a/airbyte-integrations/connectors/source-bing-ads/acceptance-test-config.yml +++ b/airbyte-integrations/connectors/source-bing-ads/acceptance-test-config.yml @@ -25,6 +25,14 @@ acceptance_tests: expect_records: path: "integration_tests/expected_records.jsonl" empty_streams: + - name: product_dimension_performance_report_hourly + bypass_reason: "Test Account doesn't have Merchant Center configured to add Products, testing in integration test" + - name: product_dimension_performance_report_daily + bypass_reason: "Test Account doesn't have Merchant Center configured to add Products, testing in integration test" + - name: product_dimension_performance_report_weekly + bypass_reason: "Test Account doesn't have Merchant Center configured to add Products, testing in integration test" + - name: product_dimension_performance_report_monthly + bypass_reason: "Test Account doesn't have Merchant Center configured to add Products, testing in integration test" - name: account_performance_report_hourly bypass_reason: "Hourly reports are disabled, because sync is too long" - name: ad_group_performance_report_hourly @@ -94,6 +102,14 @@ acceptance_tests: expect_records: path: "integration_tests/expected_records_no_start_date.jsonl" empty_streams: + - name: product_dimension_performance_report_hourly + bypass_reason: "Test Account doesn't have Merchant Center configured to add Products, testing in integration test" + - name: product_dimension_performance_report_daily + bypass_reason: "Test Account doesn't have Merchant Center configured to add Products, testing in integration test" + - name: product_dimension_performance_report_weekly + bypass_reason: "Test Account doesn't have Merchant Center configured to add Products, testing in integration test" + - name: product_dimension_performance_report_monthly + bypass_reason: "Test Account doesn't have Merchant Center configured to add Products, testing in integration test" - name: app_install_ads bypass_reason: "Can not populate; new campaign with link to app needed; feature is not available yet" - name: app_install_ad_labels diff --git a/airbyte-integrations/connectors/source-bing-ads/integration_tests/configured_catalog.json b/airbyte-integrations/connectors/source-bing-ads/integration_tests/configured_catalog.json index cdf122f10f7b..83f21c6ff1c4 100644 --- a/airbyte-integrations/connectors/source-bing-ads/integration_tests/configured_catalog.json +++ b/airbyte-integrations/connectors/source-bing-ads/integration_tests/configured_catalog.json @@ -18,6 +18,16 @@ "sync_mode": "full_refresh", "destination_sync_mode": "append" }, + { + "stream": { + "name": "budget", + "json_schema": {}, + "supported_sync_modes": ["full_refresh", "incremental"] + }, + "sync_mode": "incremental", + "cursor_field": ["Modified Time"], + "destination_sync_mode": "append" + }, { "stream": { "name": "campaigns", @@ -585,6 +595,46 @@ "sync_mode": "incremental", "cursor_field": ["TimePeriod"], "destination_sync_mode": "append" + }, + { + "stream": { + "name": "product_dimension_performance_report_hourly", + "json_schema": {}, + "supported_sync_modes": ["incremental", "full_refresh"] + }, + "sync_mode": "incremental", + "cursor_field": ["TimePeriod"], + "destination_sync_mode": "append" + }, + { + "stream": { + "name": "product_dimension_performance_report_daily", + "json_schema": {}, + "supported_sync_modes": ["incremental", "full_refresh"] + }, + "sync_mode": "incremental", + "cursor_field": ["TimePeriod"], + "destination_sync_mode": "append" + }, + { + "stream": { + "name": "product_dimension_performance_report_weekly", + "json_schema": {}, + "supported_sync_modes": ["incremental", "full_refresh"] + }, + "sync_mode": "incremental", + "cursor_field": ["TimePeriod"], + "destination_sync_mode": "append" + }, + { + "stream": { + "name": "product_dimension_performance_report_monthly", + "json_schema": {}, + "supported_sync_modes": ["incremental", "full_refresh"] + }, + "sync_mode": "incremental", + "cursor_field": ["TimePeriod"], + "destination_sync_mode": "append" } ] } diff --git a/airbyte-integrations/connectors/source-bing-ads/integration_tests/expected_records.jsonl b/airbyte-integrations/connectors/source-bing-ads/integration_tests/expected_records.jsonl index ba3379edc241..2da8a26461d3 100644 --- a/airbyte-integrations/connectors/source-bing-ads/integration_tests/expected_records.jsonl +++ b/airbyte-integrations/connectors/source-bing-ads/integration_tests/expected_records.jsonl @@ -27,3 +27,4 @@ {"stream":"user_location_performance_report_weekly","data":{"AccountName":"Airbyte","AccountNumber":"F149MJ18","AccountId":180519267,"TimePeriod":"2023-12-17","CampaignName":"Airbyte test","CampaignId":531016227,"AdGroupName":"keywords","AdGroupId":1356799861840328,"Country":"United Arab Emirates","State":"Dubai","MetroArea":null,"CurrencyCode":"USD","AdDistribution":"Audience","Impressions":1,"Clicks":0,"Ctr":0.0,"AverageCpc":0.0,"Spend":0.0,"AveragePosition":0.0,"ProximityTargetLocation":null,"Radius":0,"Language":"English","City":"Dubai","QueryIntentCountry":"United Arab Emirates","QueryIntentState":null,"QueryIntentCity":null,"QueryIntentDMA":null,"BidMatchType":"Broad","DeliveredMatchType":"Exact","Network":"Audience","TopVsOther":"Audience network","DeviceType":"Smartphone","DeviceOS":"Android","Assists":0,"Conversions":0,"ConversionRate":null,"Revenue":0.0,"ReturnOnAdSpend":null,"CostPerConversion":null,"CostPerAssist":null,"RevenuePerConversion":null,"RevenuePerAssist":null,"County":null,"PostalCode":null,"QueryIntentCounty":null,"QueryIntentPostalCode":null,"LocationId":154645,"QueryIntentLocationId":218,"AllConversions":0,"AllRevenue":0.0,"AllConversionRate":null,"AllCostPerConversion":null,"AllReturnOnAdSpend":null,"AllRevenuePerConversion":null,"ViewThroughConversions":0,"Goal":null,"GoalType":null,"AbsoluteTopImpressionRatePercent":0.0,"TopImpressionRatePercent":0.0,"AverageCpm":0.0,"ConversionsQualified":0.0,"AllConversionsQualified":0.0,"ViewThroughConversionsQualified":null,"Neighborhood":null,"QueryIntentNeighborhood":null,"ViewThroughRevenue":0.0,"CampaignType":"Search & content","AssetGroupId":null,"AssetGroupName":null},"emitted_at":1704833830043} {"stream":"account_impression_performance_report_daily","data":{"AccountName":"Airbyte","AccountNumber":"F149MJ18","AccountId":180519267,"TimePeriod":"2023-12-18","CurrencyCode":"USD","AdDistribution":"Search","Impressions":22,"Clicks":0,"Ctr":0.0,"AverageCpc":0.0,"Spend":0.0,"AveragePosition":0.0,"Conversions":0,"ConversionRate":null,"CostPerConversion":null,"LowQualityClicks":0,"LowQualityClicksPercent":null,"LowQualityImpressions":6,"LowQualityImpressionsPercent":21.43,"LowQualityConversions":0,"LowQualityConversionRate":null,"DeviceType":"Computer","ImpressionSharePercent":34.92,"ImpressionLostToBudgetPercent":1.59,"ImpressionLostToRankAggPercent":63.49,"PhoneImpressions":0,"PhoneCalls":0,"Ptr":null,"Network":"Syndicated search partners","Assists":0,"Revenue":0.0,"ReturnOnAdSpend":null,"CostPerAssist":null,"RevenuePerConversion":null,"RevenuePerAssist":null,"AccountStatus":"Active","LowQualityGeneralClicks":0,"LowQualitySophisticatedClicks":0,"ExactMatchImpressionSharePercent":5.26,"ClickSharePercent":null,"AbsoluteTopImpressionSharePercent":10.2,"TopImpressionShareLostToRankPercent":68.0,"TopImpressionShareLostToBudgetPercent":0.0,"AbsoluteTopImpressionShareLostToRankPercent":89.8,"AbsoluteTopImpressionShareLostToBudgetPercent":0.0,"TopImpressionSharePercent":32.0,"AbsoluteTopImpressionRatePercent":22.73,"TopImpressionRatePercent":72.73,"AllConversions":0,"AllRevenue":0.0,"AllConversionRate":null,"AllCostPerConversion":null,"AllReturnOnAdSpend":null,"AllRevenuePerConversion":null,"ViewThroughConversions":0,"AudienceImpressionSharePercent":null,"AudienceImpressionLostToRankPercent":null,"AudienceImpressionLostToBudgetPercent":null,"AverageCpm":0.0,"ConversionsQualified":0.0,"LowQualityConversionsQualified":0.0,"AllConversionsQualified":0.0,"ViewThroughConversionsQualified":null,"ViewThroughRevenue":0.0,"VideoViews":0,"ViewThroughRate":0.0,"AverageCPV":null,"VideoViewsAt25Percent":0,"VideoViewsAt50Percent":0,"VideoViewsAt75Percent":0,"CompletedVideoViews":0,"VideoCompletionRate":0.0,"TotalWatchTimeInMS":0,"AverageWatchTimePerVideoView":null,"AverageWatchTimePerImpression":0.0,"Sales":0,"CostPerSale":null,"RevenuePerSale":null,"Installs":0,"CostPerInstall":null,"RevenuePerInstall":null},"emitted_at":1704833886551} {"stream":"account_impression_performance_report_weekly","data":{"AccountName":"Airbyte","AccountNumber":"F149MJ18","AccountId":180519267,"TimePeriod":"2023-12-17","CurrencyCode":"USD","AdDistribution":"Search","Impressions":639,"Clicks":14,"Ctr":2.19,"AverageCpc":0.12,"Spend":1.74,"AveragePosition":0.0,"Conversions":0,"ConversionRate":0.0,"CostPerConversion":null,"LowQualityClicks":6,"LowQualityClicksPercent":30.0,"LowQualityImpressions":53,"LowQualityImpressionsPercent":7.66,"LowQualityConversions":0,"LowQualityConversionRate":0.0,"DeviceType":"Computer","ImpressionSharePercent":13.57,"ImpressionLostToBudgetPercent":17.96,"ImpressionLostToRankAggPercent":68.47,"PhoneImpressions":0,"PhoneCalls":0,"Ptr":null,"Network":"Syndicated search partners","Assists":0,"Revenue":0.0,"ReturnOnAdSpend":0.0,"CostPerAssist":null,"RevenuePerConversion":null,"RevenuePerAssist":null,"AccountStatus":"Active","LowQualityGeneralClicks":0,"LowQualitySophisticatedClicks":6,"ExactMatchImpressionSharePercent":17.65,"ClickSharePercent":1.28,"AbsoluteTopImpressionSharePercent":3.2,"TopImpressionShareLostToRankPercent":74.15,"TopImpressionShareLostToBudgetPercent":18.25,"AbsoluteTopImpressionShareLostToRankPercent":78.51,"AbsoluteTopImpressionShareLostToBudgetPercent":18.29,"TopImpressionSharePercent":7.6,"AbsoluteTopImpressionRatePercent":22.69,"TopImpressionRatePercent":53.99,"AllConversions":0,"AllRevenue":0.0,"AllConversionRate":0.0,"AllCostPerConversion":null,"AllReturnOnAdSpend":0.0,"AllRevenuePerConversion":null,"ViewThroughConversions":0,"AudienceImpressionSharePercent":null,"AudienceImpressionLostToRankPercent":null,"AudienceImpressionLostToBudgetPercent":null,"AverageCpm":2.72,"ConversionsQualified":0.0,"LowQualityConversionsQualified":0.0,"AllConversionsQualified":0.0,"ViewThroughConversionsQualified":null,"ViewThroughRevenue":0.0,"VideoViews":0,"ViewThroughRate":0.0,"AverageCPV":null,"VideoViewsAt25Percent":0,"VideoViewsAt50Percent":0,"VideoViewsAt75Percent":0,"CompletedVideoViews":0,"VideoCompletionRate":0.0,"TotalWatchTimeInMS":0,"AverageWatchTimePerVideoView":null,"AverageWatchTimePerImpression":0.0,"Sales":0,"CostPerSale":null,"RevenuePerSale":null,"Installs":0,"CostPerInstall":null,"RevenuePerInstall":null},"emitted_at":1704833908003} +{"stream": "budget", "data": {"Type": "Budget", "Status": "Active", "Id": 10239202868095, "Parent Id": 180519267, "Client Id": null, "Modified Time": "2024-02-28T17:52:08.900+00:00", "Budget Id": null, "Budget Name": "Test Shared Budget", "Budget": 2.0, "Budget Type": "DailyBudgetStandard", "Account Id": 180519267}, "emitted_at": 1709228203331} diff --git a/airbyte-integrations/connectors/source-bing-ads/integration_tests/expected_records_no_start_date.jsonl b/airbyte-integrations/connectors/source-bing-ads/integration_tests/expected_records_no_start_date.jsonl index d9be9afa0293..6547e46fa055 100644 --- a/airbyte-integrations/connectors/source-bing-ads/integration_tests/expected_records_no_start_date.jsonl +++ b/airbyte-integrations/connectors/source-bing-ads/integration_tests/expected_records_no_start_date.jsonl @@ -3,3 +3,4 @@ {"stream": "ad_group_labels", "data": {"Status": "Active", "Id": 10239203506495, "Parent Id": 1350201453189474, "Campaign": null, "Ad Group": null, "Client Id": null, "Modified Time": "2023-04-27T18:00:14.970+00:00", "Account Id": 180278106}, "emitted_at": 1701982478843} {"stream": "labels", "data": {"Status": "Active", "Id": 10239203506496, "Client Id": null, "Modified Time": "2023-04-27T17:16:53.430+00:00", "Description": null, "Label": "campaign label 2", "Color": "#D8558B", "Account Id": 180278106}, "emitted_at": 1701982532098} {"stream": "campaign_labels", "data": {"Status": "Active", "Id": 10239203506495, "Parent Id": 413732450, "Campaign": null, "Client Id": null, "Modified Time": "2023-04-27T17:57:21.497+00:00", "Account Id": 180278106}, "emitted_at": 1701982600348} +{"stream": "budget", "data": {"Type": "Budget", "Status": "Active", "Id": 10239202868095, "Parent Id": 180519267, "Client Id": null, "Modified Time": "2024-02-28T17:52:08.900+00:00", "Budget Id": null, "Budget Name": "Test Shared Budget", "Budget": 2.0, "Budget Type": "DailyBudgetStandard", "Account Id": 180519267}, "emitted_at": 1709228203331} diff --git a/airbyte-integrations/connectors/source-bing-ads/metadata.yaml b/airbyte-integrations/connectors/source-bing-ads/metadata.yaml index d0bf4b329ece..8d4a1a9bb186 100644 --- a/airbyte-integrations/connectors/source-bing-ads/metadata.yaml +++ b/airbyte-integrations/connectors/source-bing-ads/metadata.yaml @@ -16,7 +16,7 @@ data: connectorSubtype: api connectorType: source definitionId: 47f25999-dd5e-4636-8c39-e7cea2453331 - dockerImageTag: 2.1.4 + dockerImageTag: 2.2.0 dockerRepository: airbyte/source-bing-ads documentationUrl: https://docs.airbyte.com/integrations/sources/bing-ads githubIssueLabel: source-bing-ads diff --git a/airbyte-integrations/connectors/source-bing-ads/pyproject.toml b/airbyte-integrations/connectors/source-bing-ads/pyproject.toml index 17bb4f851c66..043177bac838 100644 --- a/airbyte-integrations/connectors/source-bing-ads/pyproject.toml +++ b/airbyte-integrations/connectors/source-bing-ads/pyproject.toml @@ -3,7 +3,7 @@ requires = [ "poetry-core>=1.0.0",] build-backend = "poetry.core.masonry.api" [tool.poetry] -version = "2.1.4" +version = "2.2.0" name = "source-bing-ads" description = "Source implementation for Bing Ads." authors = [ "Airbyte ",] diff --git a/airbyte-integrations/connectors/source-bing-ads/source_bing_ads/bulk_streams.py b/airbyte-integrations/connectors/source-bing-ads/source_bing_ads/bulk_streams.py index 440b0a3607a0..159aba01bf79 100644 --- a/airbyte-integrations/connectors/source-bing-ads/source_bing_ads/bulk_streams.py +++ b/airbyte-integrations/connectors/source-bing-ads/source_bing_ads/bulk_streams.py @@ -196,3 +196,12 @@ class AdGroupLabels(BingAdsBulkStream): data_scope = ["EntityData"] download_entities = ["AdGroupLabels"] + + +class Budget(BingAdsBulkStream): + """ + https://learn.microsoft.com/en-us/advertising/bulk-service/budget?view=bingads-13&viewFallbackFrom=bingads-13 + """ + + data_scope = ["EntityData"] + download_entities = ["Budgets"] diff --git a/airbyte-integrations/connectors/source-bing-ads/source_bing_ads/report_streams.py b/airbyte-integrations/connectors/source-bing-ads/source_bing_ads/report_streams.py index e1b98a4ec17c..f6e84836c099 100644 --- a/airbyte-integrations/connectors/source-bing-ads/source_bing_ads/report_streams.py +++ b/airbyte-integrations/connectors/source-bing-ads/source_bing_ads/report_streams.py @@ -670,6 +670,45 @@ class UserLocationPerformanceReportMonthly(UserLocationPerformanceReport): report_aggregation = "Monthly" +class ProductDimensionPerformanceReport(BingAdsReportingServicePerformanceStream, ABC): + """ + https://learn.microsoft.com/en-us/advertising/reporting-service/productdimensionperformancereportrequest?view=bingads-13 + """ + + report_name: str = "ProductDimensionPerformanceReport" + report_schema_name = "product_dimension_performance_report" + primary_key = None + + @property + def report_columns(self) -> Iterable[str]: + """AccountId is not in reporting columns for this report""" + properties = list(self.get_json_schema().get("properties", {}).keys()) + properties.remove("AccountId") + return properties + + def transform(self, record: MutableMapping[str, Any], stream_slice: Mapping[str, Any], **kwargs) -> MutableMapping[str, Any]: + record = super().transform(record, stream_slice) + record["AccountId"] = stream_slice["account_id"] + return record + + +class ProductDimensionPerformanceReportHourly(HourlyReportTransformerMixin, ProductDimensionPerformanceReport): + report_aggregation = "Hourly" + report_schema_name = "product_dimension_performance_report_hourly" + + +class ProductDimensionPerformanceReportDaily(ProductDimensionPerformanceReport): + report_aggregation = "Daily" + + +class ProductDimensionPerformanceReportWeekly(ProductDimensionPerformanceReport): + report_aggregation = "Weekly" + + +class ProductDimensionPerformanceReportMonthly(ProductDimensionPerformanceReport): + report_aggregation = "Monthly" + + class CustomReport(BingAdsReportingServicePerformanceStream, ABC): transformer: TypeTransformer = TypeTransformer(TransformConfig.DefaultSchemaNormalization) custom_report_columns = [] diff --git a/airbyte-integrations/connectors/source-bing-ads/source_bing_ads/schemas/budget.json b/airbyte-integrations/connectors/source-bing-ads/source_bing_ads/schemas/budget.json new file mode 100644 index 000000000000..a6af3367098b --- /dev/null +++ b/airbyte-integrations/connectors/source-bing-ads/source_bing_ads/schemas/budget.json @@ -0,0 +1,41 @@ +{ + "$schema": "https://json-schema.org/draft-07/schema#", + "type": "object", + "properties": { + "Account Id": { + "type": ["null", "integer"] + }, + "Type": { + "type": ["null", "string"] + }, + "Status": { + "type": ["null", "string"] + }, + "Id": { + "type": ["null", "integer"] + }, + "Parent Id": { + "type": ["null", "integer"] + }, + "Client Id": { + "type": ["null", "integer"] + }, + "Modified Time": { + "type": ["null", "string"], + "format": "date-time", + "airbyte_type": "timestamp_with_timezone" + }, + "Budget Id": { + "type": ["null", "integer"] + }, + "Budget Name": { + "type": ["null", "string"] + }, + "Budget": { + "type": ["null", "number"] + }, + "Budget Type": { + "type": ["null", "string"] + } + } +} diff --git a/airbyte-integrations/connectors/source-bing-ads/source_bing_ads/schemas/product_dimension_performance_report.json b/airbyte-integrations/connectors/source-bing-ads/source_bing_ads/schemas/product_dimension_performance_report.json new file mode 100644 index 000000000000..c4acd7baea44 --- /dev/null +++ b/airbyte-integrations/connectors/source-bing-ads/source_bing_ads/schemas/product_dimension_performance_report.json @@ -0,0 +1,259 @@ +{ + "$schema": "https://json-schema.org/draft-07/schema#", + "type": "object", + "properties": { + "AccountId": { + "type": ["null", "integer"] + }, + "TimePeriod": { + "type": ["null", "string"], + "format": "date" + }, + "AccountName": { + "type": ["null", "string"] + }, + "AccountNumber": { + "type": ["null", "string"] + }, + "AdGroupName": { + "type": ["null", "string"] + }, + "AdGroupId": { + "type": ["null", "integer"] + }, + "CampaignStatus": { + "type": ["null", "string"] + }, + "AccountStatus": { + "type": ["null", "string"] + }, + "AdGroupStatus": { + "type": ["null", "string"] + }, + "Network": { + "type": ["null", "string"] + }, + "AdId": { + "type": ["null", "integer"] + }, + "CampaignId": { + "type": ["null", "integer"] + }, + "CampaignName": { + "type": ["null", "string"] + }, + "CurrencyCode": { + "type": ["null", "string"] + }, + "DeviceType": { + "type": ["null", "string"] + }, + "Language": { + "type": ["null", "string"] + }, + "MerchantProductId": { + "type": ["null", "string"] + }, + "Title": { + "type": ["null", "string"] + }, + "Condition": { + "type": ["null", "string"] + }, + "Brand": { + "type": ["null", "string"] + }, + "Price": { + "type": ["null", "number"] + }, + "Impressions": { + "type": ["null", "integer"] + }, + "Clicks": { + "type": ["null", "integer"] + }, + "Ctr": { + "type": ["null", "number"] + }, + "AverageCpc": { + "type": ["null", "number"] + }, + "Spend": { + "type": ["null", "number"] + }, + "Conversions": { + "type": ["null", "integer"] + }, + "ConversionRate": { + "type": ["null", "number"] + }, + "Revenue": { + "type": ["null", "number"] + }, + "RevenuePerConversion": { + "type": ["null", "number"] + }, + "SellerName": { + "type": ["null", "string"] + }, + "OfferLanguage": { + "type": ["null", "string"] + }, + "CountryOfSale": { + "type": ["null", "string"] + }, + "AdStatus": { + "type": ["null", "string"] + }, + "AdDistribution": { + "type": ["null", "string"] + }, + "ClickTypeId": { + "type": ["null", "string"] + }, + "TotalClicksOnAdElements": { + "type": ["null", "number"] + }, + "ClickType": { + "type": ["null", "string"] + }, + "ReturnOnAdSpend": { + "type": ["null", "number"] + }, + "BidStrategyType": { + "type": ["null", "string"] + }, + "LocalStoreCode": { + "type": ["null", "string"] + }, + "StoreId": { + "type": ["null", "string"] + }, + "AssistedClicks": { + "type": ["null", "string"] + }, + "AssistedConversions": { + "type": ["null", "string"] + }, + "AllConversions": { + "type": ["null", "integer"] + }, + "AllRevenue": { + "type": ["null", "number"] + }, + "AllConversionRate": { + "type": ["null", "number"] + }, + "AllCostPerConversion": { + "type": ["null", "number"] + }, + "AllReturnOnAdSpend": { + "type": ["null", "number"] + }, + "AllRevenuePerConversion": { + "type": ["null", "number"] + }, + "CostPerConversion": { + "type": ["null", "number"] + }, + "ViewThroughConversions": { + "type": ["null", "integer"] + }, + "Goal": { + "type": ["null", "string"] + }, + "GoalType": { + "type": ["null", "string"] + }, + "ProductBought": { + "type": ["null", "string"] + }, + "QuantityBought": { + "type": ["null", "string"] + }, + "AverageCpm": { + "type": ["null", "number"] + }, + "ConversionsQualified": { + "type": ["null", "number"] + }, + "AssistedConversionsQualified": { + "type": ["null", "string"] + }, + "ViewThroughConversionsQualified": { + "type": ["null", "number"] + }, + "ProductBoughtTitle": { + "type": ["null", "string"] + }, + "GTIN": { + "type": ["null", "string"] + }, + "MPN": { + "type": ["null", "string"] + }, + "ViewThroughRevenue": { + "type": ["null", "number"] + }, + "Sales": { + "type": ["null", "integer"] + }, + "CostPerSale": { + "type": ["null", "number"] + }, + "RevenuePerSale": { + "type": ["null", "number"] + }, + "Installs": { + "type": ["null", "integer"] + }, + "CostPerInstall": { + "type": ["null", "number"] + }, + "RevenuePerInstall": { + "type": ["null", "number"] + }, + "CampaignType": { + "type": ["null", "string"] + }, + "AssetGroupId": { + "type": ["null", "string"] + }, + "AssetGroupName": { + "type": ["null", "string"] + }, + "AssetGroupStatus": { + "type": ["null", "string"] + }, + "CustomLabel0": { + "type": ["null", "string"] + }, + "CustomLabel1": { + "type": ["null", "string"] + }, + "CustomLabel2": { + "type": ["null", "string"] + }, + "CustomLabel3": { + "type": ["null", "string"] + }, + "CustomLabel4": { + "type": ["null", "string"] + }, + "ProductType1": { + "type": ["null", "string"] + }, + "ProductType2": { + "type": ["null", "string"] + }, + "ProductType3": { + "type": ["null", "string"] + }, + "ProductType4": { + "type": ["null", "string"] + }, + "ProductType5": { + "type": ["null", "string"] + } + } +} diff --git a/airbyte-integrations/connectors/source-bing-ads/source_bing_ads/schemas/product_dimension_performance_report_hourly.json b/airbyte-integrations/connectors/source-bing-ads/source_bing_ads/schemas/product_dimension_performance_report_hourly.json new file mode 100644 index 000000000000..81fd12773d99 --- /dev/null +++ b/airbyte-integrations/connectors/source-bing-ads/source_bing_ads/schemas/product_dimension_performance_report_hourly.json @@ -0,0 +1,260 @@ +{ + "$schema": "https://json-schema.org/draft-07/schema#", + "type": "object", + "properties": { + "AccountId": { + "type": ["null", "integer"] + }, + "TimePeriod": { + "type": ["null", "string"], + "format": "date-time", + "airbyte_type": "timestamp_with_timezone" + }, + "AccountName": { + "type": ["null", "string"] + }, + "AccountNumber": { + "type": ["null", "string"] + }, + "AdGroupName": { + "type": ["null", "string"] + }, + "AdGroupId": { + "type": ["null", "integer"] + }, + "CampaignStatus": { + "type": ["null", "string"] + }, + "AccountStatus": { + "type": ["null", "string"] + }, + "AdGroupStatus": { + "type": ["null", "string"] + }, + "Network": { + "type": ["null", "string"] + }, + "AdId": { + "type": ["null", "integer"] + }, + "CampaignId": { + "type": ["null", "integer"] + }, + "CampaignName": { + "type": ["null", "string"] + }, + "CurrencyCode": { + "type": ["null", "string"] + }, + "DeviceType": { + "type": ["null", "string"] + }, + "Language": { + "type": ["null", "string"] + }, + "MerchantProductId": { + "type": ["null", "string"] + }, + "Title": { + "type": ["null", "string"] + }, + "Condition": { + "type": ["null", "string"] + }, + "Brand": { + "type": ["null", "string"] + }, + "Price": { + "type": ["null", "number"] + }, + "Impressions": { + "type": ["null", "integer"] + }, + "Clicks": { + "type": ["null", "integer"] + }, + "Ctr": { + "type": ["null", "number"] + }, + "AverageCpc": { + "type": ["null", "number"] + }, + "Spend": { + "type": ["null", "number"] + }, + "Conversions": { + "type": ["null", "integer"] + }, + "ConversionRate": { + "type": ["null", "number"] + }, + "Revenue": { + "type": ["null", "number"] + }, + "RevenuePerConversion": { + "type": ["null", "number"] + }, + "SellerName": { + "type": ["null", "string"] + }, + "OfferLanguage": { + "type": ["null", "string"] + }, + "CountryOfSale": { + "type": ["null", "string"] + }, + "AdStatus": { + "type": ["null", "string"] + }, + "AdDistribution": { + "type": ["null", "string"] + }, + "ClickTypeId": { + "type": ["null", "string"] + }, + "TotalClicksOnAdElements": { + "type": ["null", "number"] + }, + "ClickType": { + "type": ["null", "string"] + }, + "ReturnOnAdSpend": { + "type": ["null", "number"] + }, + "BidStrategyType": { + "type": ["null", "string"] + }, + "LocalStoreCode": { + "type": ["null", "string"] + }, + "StoreId": { + "type": ["null", "string"] + }, + "AssistedClicks": { + "type": ["null", "string"] + }, + "AssistedConversions": { + "type": ["null", "string"] + }, + "AllConversions": { + "type": ["null", "integer"] + }, + "AllRevenue": { + "type": ["null", "number"] + }, + "AllConversionRate": { + "type": ["null", "number"] + }, + "AllCostPerConversion": { + "type": ["null", "number"] + }, + "AllReturnOnAdSpend": { + "type": ["null", "number"] + }, + "AllRevenuePerConversion": { + "type": ["null", "number"] + }, + "CostPerConversion": { + "type": ["null", "number"] + }, + "ViewThroughConversions": { + "type": ["null", "integer"] + }, + "Goal": { + "type": ["null", "string"] + }, + "GoalType": { + "type": ["null", "string"] + }, + "ProductBought": { + "type": ["null", "string"] + }, + "QuantityBought": { + "type": ["null", "string"] + }, + "AverageCpm": { + "type": ["null", "number"] + }, + "ConversionsQualified": { + "type": ["null", "number"] + }, + "AssistedConversionsQualified": { + "type": ["null", "string"] + }, + "ViewThroughConversionsQualified": { + "type": ["null", "number"] + }, + "ProductBoughtTitle": { + "type": ["null", "string"] + }, + "GTIN": { + "type": ["null", "string"] + }, + "MPN": { + "type": ["null", "string"] + }, + "ViewThroughRevenue": { + "type": ["null", "number"] + }, + "Sales": { + "type": ["null", "integer"] + }, + "CostPerSale": { + "type": ["null", "number"] + }, + "RevenuePerSale": { + "type": ["null", "number"] + }, + "Installs": { + "type": ["null", "integer"] + }, + "CostPerInstall": { + "type": ["null", "number"] + }, + "RevenuePerInstall": { + "type": ["null", "number"] + }, + "CampaignType": { + "type": ["null", "string"] + }, + "AssetGroupId": { + "type": ["null", "string"] + }, + "AssetGroupName": { + "type": ["null", "string"] + }, + "AssetGroupStatus": { + "type": ["null", "string"] + }, + "CustomLabel0": { + "type": ["null", "string"] + }, + "CustomLabel1": { + "type": ["null", "string"] + }, + "CustomLabel2": { + "type": ["null", "string"] + }, + "CustomLabel3": { + "type": ["null", "string"] + }, + "CustomLabel4": { + "type": ["null", "string"] + }, + "ProductType1": { + "type": ["null", "string"] + }, + "ProductType2": { + "type": ["null", "string"] + }, + "ProductType3": { + "type": ["null", "string"] + }, + "ProductType4": { + "type": ["null", "string"] + }, + "ProductType5": { + "type": ["null", "string"] + } + } +} diff --git a/airbyte-integrations/connectors/source-bing-ads/source_bing_ads/source.py b/airbyte-integrations/connectors/source-bing-ads/source_bing_ads/source.py index 37c2b9bc5d2b..7323a89c46c5 100644 --- a/airbyte-integrations/connectors/source-bing-ads/source_bing_ads/source.py +++ b/airbyte-integrations/connectors/source-bing-ads/source_bing_ads/source.py @@ -10,7 +10,16 @@ from airbyte_cdk.sources.streams import Stream from airbyte_cdk.utils import AirbyteTracedException from source_bing_ads.base_streams import Accounts, AdGroups, Ads, Campaigns -from source_bing_ads.bulk_streams import AdGroupLabels, AppInstallAdLabels, AppInstallAds, CampaignLabels, KeywordLabels, Keywords, Labels +from source_bing_ads.bulk_streams import ( + AdGroupLabels, + AppInstallAdLabels, + AppInstallAds, + Budget, + CampaignLabels, + KeywordLabels, + Keywords, + Labels, +) from source_bing_ads.client import Client from source_bing_ads.report_streams import ( # noqa: F401 AccountImpressionPerformanceReportDaily, @@ -56,6 +65,10 @@ KeywordPerformanceReportHourly, KeywordPerformanceReportMonthly, KeywordPerformanceReportWeekly, + ProductDimensionPerformanceReportDaily, + ProductDimensionPerformanceReportHourly, + ProductDimensionPerformanceReportMonthly, + ProductDimensionPerformanceReportWeekly, SearchQueryPerformanceReportDaily, SearchQueryPerformanceReportHourly, SearchQueryPerformanceReportMonthly, @@ -131,6 +144,7 @@ def streams(self, config: Mapping[str, Any]) -> List[Stream]: AppInstallAds(client, config), AppInstallAdLabels(client, config), Ads(client, config), + Budget(client, config), Campaigns(client, config), BudgetSummaryReport(client, config), Labels(client, config), @@ -150,6 +164,7 @@ def streams(self, config: Mapping[str, Any]) -> List[Stream]: "CampaignPerformanceReport", "CampaignImpressionPerformanceReport", "GeographicPerformanceReport", + "ProductDimensionPerformanceReport", "SearchQueryPerformanceReport", "UserLocationPerformanceReport", ) diff --git a/airbyte-integrations/connectors/source-bing-ads/unit_tests/integrations/base_test.py b/airbyte-integrations/connectors/source-bing-ads/unit_tests/integrations/base_test.py new file mode 100644 index 000000000000..6554e3684009 --- /dev/null +++ b/airbyte-integrations/connectors/source-bing-ads/unit_tests/integrations/base_test.py @@ -0,0 +1,60 @@ +# Copyright (c) 2023 Airbyte, Inc., all rights reserved. +import json +from pathlib import Path +from typing import Any, Dict, Optional, Tuple, Union +from unittest import TestCase +from unittest.mock import MagicMock, patch + +from airbyte_cdk.models import SyncMode +from airbyte_cdk.test.catalog_builder import CatalogBuilder +from airbyte_cdk.test.entrypoint_wrapper import EntrypointOutput, read +from airbyte_cdk.test.mock_http import HttpMocker +from airbyte_cdk.test.state_builder import StateBuilder +from airbyte_protocol.models import AirbyteStateMessage +from bingads.v13.bulk import BulkServiceManager +from bingads.v13.reporting.reporting_service_manager import ReportingServiceManager +from client_builder import build_request, response_with_status +from config_builder import ConfigBuilder +from source_bing_ads.source import SourceBingAds +from suds.transport.https import HttpAuthenticated +from suds_response_mock import mock_http_authenticated_send + + +class BaseTest(TestCase): + + @property + def service_manager(self) -> Union[ReportingServiceManager, BulkServiceManager]: + pass + + def _download_file(self, file: Optional[str] = None) -> Path: + pass + + @property + def _config(self) -> dict[str, Any]: + return ConfigBuilder().build() + + def _state(self, file: str, stream_name: str) -> list[AirbyteStateMessage]: + state_file = Path(__file__).parent.parent / f"resource/state/{file}.json" + with open(state_file, "r") as f: + state = json.loads(f.read()) + return StateBuilder().with_stream_state(stream_name, state).build() + + def auth_client(self, http_mocker: HttpMocker) -> None: + http_mocker.post( + request=build_request(self._config), + responses=response_with_status("oauth", 200) + ) + + def read_stream( + self, + stream_name: str, + sync_mode: SyncMode, + config: Dict[str, Any], + stream_data_file: str = None, + state: Optional[Dict[str, Any]] = None, + expecting_exception: bool = False, + ) -> Tuple[EntrypointOutput, MagicMock]: + with patch.object(HttpAuthenticated, "send", mock_http_authenticated_send): + with patch.object(self.service_manager, "download_file", return_value=self._download_file(stream_data_file)) as service_call_mock: + catalog = CatalogBuilder().with_stream(stream_name, sync_mode).build() + return read(SourceBingAds(), config, catalog, state, expecting_exception), service_call_mock diff --git a/airbyte-integrations/connectors/source-bing-ads/unit_tests/integrations/client_builder.py b/airbyte-integrations/connectors/source-bing-ads/unit_tests/integrations/client_builder.py new file mode 100644 index 000000000000..5efab804e556 --- /dev/null +++ b/airbyte-integrations/connectors/source-bing-ads/unit_tests/integrations/client_builder.py @@ -0,0 +1,31 @@ +# Copyright (c) 2023 Airbyte, Inc., all rights reserved. + +import json +from typing import Any, Dict + +from airbyte_cdk.test.mock_http import HttpRequest, HttpResponse +from airbyte_cdk.test.mock_http.response_builder import find_template + + +def response_with_status(resource: str, status_code: int) -> HttpResponse: + return HttpResponse(json.dumps(find_template(resource, __file__)), status_code) + + +def build_request(config: Dict[str, Any]) -> HttpRequest: + body = ( + f"client_id={config['client_id']}" + f"&client_secret={config['client_secret']}" + "&grant_type=refresh_token" + f"&refresh_token={config['refresh_token']}" + "&environment=production&scope=https%3A%2F%2Fads.microsoft.com%2Fmsads.manage+offline_access&oauth_scope=msads.manage" + f"&tenant={config['tenant_id']}" + ) + + return HttpRequest( + url="https://login.microsoftonline.com/common/oauth2/v2.0/token", + query_params={}, + body=body, + headers={ + "Content-Type": "application/x-www-form-urlencoded" + }, + ) diff --git a/airbyte-integrations/connectors/source-bing-ads/unit_tests/integrations/config_builder.py b/airbyte-integrations/connectors/source-bing-ads/unit_tests/integrations/config_builder.py new file mode 100644 index 000000000000..a31eaff68e6f --- /dev/null +++ b/airbyte-integrations/connectors/source-bing-ads/unit_tests/integrations/config_builder.py @@ -0,0 +1,44 @@ +# Copyright (c) 2023 Airbyte, Inc., all rights reserved. + +import datetime +from typing import Any, Dict, List + +from airbyte_cdk.test.mock_http.response_builder import find_template + +TENNANT_ID = "common" +DEVELOPER_TOKEN = "test-token" +REFRESH_TOKEN = "test-refresh-token" +CLIENT_ID = "test-client-id" +CLIENT_SECRET = "test-client-secret" +LOOKBACK_WINDOW = 0 + + +class ConfigBuilder: + def __init__(self) -> None: + oauth_fixture: Dict[str, Any] = find_template("oauth", __file__) + self._access_token: str = oauth_fixture["access_token"] + self._refresh_token: str = oauth_fixture["refresh_token"] + self._client_id: str = CLIENT_ID + self._client_secret: str = CLIENT_SECRET + self._refresh_token: str = REFRESH_TOKEN + self._developer_token: str = DEVELOPER_TOKEN + self._tenant_id: str = TENNANT_ID + self._report_start_date: str = None + self._lookback_window: int = LOOKBACK_WINDOW + + def with_reports_start_date(self, start_date: str) -> "ConfigBuilder": + self._report_start_date = start_date + return self + + def build(self) -> Dict[str, Any]: + config = { + "tenant_id": self._tenant_id, + "developer_token": self._developer_token, + "refresh_token": self._refresh_token, + "client_id": self._client_id, + "client_secret": self._client_secret, + "lookback_window": self._lookback_window, + } + if self._report_start_date: + config["reports_start_date"] = self._report_start_date + return config diff --git a/airbyte-integrations/connectors/source-bing-ads/unit_tests/integrations/suds_response_mock.py b/airbyte-integrations/connectors/source-bing-ads/unit_tests/integrations/suds_response_mock.py new file mode 100644 index 000000000000..224a810a402a --- /dev/null +++ b/airbyte-integrations/connectors/source-bing-ads/unit_tests/integrations/suds_response_mock.py @@ -0,0 +1,146 @@ +# Copyright (c) 2023 Airbyte, Inc., all rights reserved. + +from suds.transport import Reply, Request +from suds.transport.https import HttpAuthenticated + +SEARCH_ACCOUNTS_RESPONSE = b""" + + 6f0a329e-4cb4-4c79-9c08-2dfe601ba05a + + + + + + + 251186883 + USD + ClearFinancialStatus + 180535609 + English + 0 + 2023-08-11T08:24:26.603 + DEMO-ACCOUNT + F149W3B6 + 251186883 + + + 138225488 + Pause + AAAAAH10c1A= + Santiago + 2 + + + + + + + + San Francisco + US + 149694999 + 350 29th avenue + + + + 94121 + CA + + Daxtarity Inc. + + Inactive + + Expert + + + + + +""" + +GET_USER_RESPONSE = b""" + + 762354725472 + + + + + + + City + USD + 12345678 + Test Line + Test Line + Test Line + Test Line + 0671 + State + 12327485 + Test + + 50005 + 7365 + test@mail.com + test + 73456-343 + 83563 + 1232346573 + 736537 + 2645 + 45353 + + 234627 + 276342574 + Title Job + 234722342 + 2024-01-01T01:01:10.327 + 827462346 + + Name First + Name Last + Test + + test + test + test? + test + 2736452 + test + + + key + value + + + token + + + + 8324628 + 726542 + + 180535609 + + + 180535609 + + http://link + + + + + +""" + + +def mock_http_authenticated_send(transport: HttpAuthenticated, request: Request) -> Reply: + if request.headers.get('SOAPAction').decode() == '"GetUser"': + return Reply(code=200, headers={}, message=GET_USER_RESPONSE) + + if request.headers.get('SOAPAction').decode() == '"SearchAccounts"': + return Reply(code=200, headers={}, message=SEARCH_ACCOUNTS_RESPONSE) + + raise Exception(f"Unexpected SOAPAction provided for mock SOAP client: {request.headers.get('SOAPAction').decode()}") diff --git a/airbyte-integrations/connectors/source-bing-ads/unit_tests/integrations/test_budget_stream.py b/airbyte-integrations/connectors/source-bing-ads/unit_tests/integrations/test_budget_stream.py new file mode 100644 index 000000000000..5f7bd4802613 --- /dev/null +++ b/airbyte-integrations/connectors/source-bing-ads/unit_tests/integrations/test_budget_stream.py @@ -0,0 +1,54 @@ +# Copyright (c) 2023 Airbyte, Inc., all rights reserved. +import pendulum +from airbyte_cdk.models import SyncMode +from airbyte_cdk.test.mock_http import HttpMocker +from freezegun import freeze_time +from test_bulk_stream import TestBulkStream + + +class TestBudgetStream(TestBulkStream): + stream_name = "budget" + account_id = "180535609" + cursor_field = "Modified Time" + + @HttpMocker() + def test_return_records_from_given_csv_file(self, http_mocker: HttpMocker): + self.auth_client(http_mocker) + output, _ = self.read_stream(self.stream_name, SyncMode.full_refresh, self._config, "budget") + assert len(output.records) == 1 + + @HttpMocker() + def test_return_logged_info_for_empty_csv_file(self, http_mocker: HttpMocker): + self.auth_client(http_mocker) + output, _ = self.read_stream(self.stream_name, SyncMode.full_refresh, self._config, "budget_empty") + assert len(output.records) == 0 + assert len(output.logs) == 10 + + @HttpMocker() + def test_transform_records(self, http_mocker: HttpMocker): + self.auth_client(http_mocker) + output, _ = self.read_stream(self.stream_name, SyncMode.full_refresh, self._config, "budget") + assert output.records + for record in output.records: + assert "Account Id" in record.record.data.keys() + assert isinstance(record.record.data["Account Id"], int) + + @HttpMocker() + def test_incremental_read_cursor_value_matches_value_from_most_recent_record(self, http_mocker: HttpMocker): + self.auth_client(http_mocker) + output, _ = self.read_stream(self.stream_name, SyncMode.incremental, self._config, "budget_with_cursor_value") + assert len(output.records) == 8 + assert output.most_recent_state.get(self.stream_name, {}).get(self.account_id, {}) == {self.cursor_field: "2024-01-01T12:54:12.028+00:00"} + + @HttpMocker() + @freeze_time("204-02-26") # mock current time as stream data available for 30 days only + def test_incremental_read_with_state(self, http_mocker: HttpMocker): + state = self._state("budget_state", self.stream_name) + self.auth_client(http_mocker) + output, service_call_mock = self.read_stream(self.stream_name, SyncMode.incremental, self._config, "budget_with_state", state) + assert len(output.records) == 8 + assert output.most_recent_state.get(self.stream_name, {}).get(self.account_id, {}) == {self.cursor_field: "2024-01-30T12:54:12.028+00:00"} + + previous_state = state[0].stream.stream_state.dict() + # gets DownloadParams object + assert service_call_mock.call_args.args[0].last_sync_time_in_utc == pendulum.parse(previous_state[self.account_id][self.cursor_field]) diff --git a/airbyte-integrations/connectors/source-bing-ads/unit_tests/integrations/test_bulk_stream.py b/airbyte-integrations/connectors/source-bing-ads/unit_tests/integrations/test_bulk_stream.py new file mode 100644 index 000000000000..9435295921b2 --- /dev/null +++ b/airbyte-integrations/connectors/source-bing-ads/unit_tests/integrations/test_bulk_stream.py @@ -0,0 +1,28 @@ +# Copyright (c) 2023 Airbyte, Inc., all rights reserved. + +from pathlib import Path +from typing import Optional + +from base_test import BaseTest +from bingads.v13.bulk.bulk_service_manager import BulkServiceManager + + +class TestBulkStream(BaseTest): + + @property + def service_manager(self) -> BulkServiceManager: + return BulkServiceManager + + def _download_file(self, file: Optional[str] = None) -> Path: + """ + Returns path to temporary file of downloaded data that will be use in read. + Base file should be named as {file_name}.cvs in resource/response folder. + """ + if file: + path_to_tmp_file = Path(__file__).parent.parent / f"resource/response/{file}_tmp.csv" + path_to_file_base = Path(__file__).parent.parent / f"resource/response/{file}.csv" + with open(path_to_file_base, "r") as f1, open(path_to_tmp_file, "w") as f2: + for line in f1: + f2.write(line) + return path_to_tmp_file + return Path(__file__).parent.parent / "resource/response/non-existing-file.csv" diff --git a/airbyte-integrations/connectors/source-bing-ads/unit_tests/integrations/test_product_dimension_performance_report.py b/airbyte-integrations/connectors/source-bing-ads/unit_tests/integrations/test_product_dimension_performance_report.py new file mode 100644 index 000000000000..89968b69c67b --- /dev/null +++ b/airbyte-integrations/connectors/source-bing-ads/unit_tests/integrations/test_product_dimension_performance_report.py @@ -0,0 +1,43 @@ +# Copyright (c) 2023 Airbyte, Inc., all rights reserved. + +from test_report_stream import TestSuiteReportStream + + +class TestProductDimensionPerformanceReportDailyStream(TestSuiteReportStream): + stream_name = "product_dimension_performance_report_daily" + report_file = "product_dimension_performance_report_daily" + records_number = 8 + state_file = "product_dimension_performance_report_daily_state" + incremental_report_file = "product_dimension_performance_report_daily_incremental" + first_read_state = {"product_dimension_performance_report_daily": {"180535609": {"TimePeriod": "2023-12-17"}}} + second_read_state = {"product_dimension_performance_report_daily": {"180535609": {"TimePeriod": "2023-12-25"}}} + + +class TestProductDimensionPerformanceReportHourlyStream(TestSuiteReportStream): + stream_name = "product_dimension_performance_report_hourly" + report_file = "product_dimension_performance_report_hourly" + records_number = 8 + state_file = "product_dimension_performance_report_hourly_state" + incremental_report_file = "product_dimension_performance_report_hourly_incremental" + first_read_state = {"product_dimension_performance_report_hourly": {"180535609": {"TimePeriod": "2023-11-11T01:00:00+00:00"}}} + second_read_state = {"product_dimension_performance_report_hourly": {"180535609": {"TimePeriod": "2023-11-12T01:00:00+00:00"}}} + + +class TestProductDimensionPerformanceReportWeeklyStream(TestSuiteReportStream): + stream_name = "product_dimension_performance_report_weekly" + report_file = "product_dimension_performance_report_weekly" + records_number = 8 + state_file = "product_dimension_performance_report_weekly_state" + incremental_report_file = "product_dimension_performance_report_weekly_incremental" + first_read_state = {"product_dimension_performance_report_weekly": {"180535609": {"TimePeriod": "2023-12-17"}}} + second_read_state = {"product_dimension_performance_report_weekly": {"180535609": {"TimePeriod": "2023-12-25"}}} + + +class TestProductDimensionPerformanceReportMonthlyStream(TestSuiteReportStream): + stream_name = "product_dimension_performance_report_monthly" + report_file = "product_dimension_performance_report_monthly" + records_number = 8 + state_file = "product_dimension_performance_report_monthly_state" + incremental_report_file = "product_dimension_performance_report_monthly_incremental" + first_read_state = {"product_dimension_performance_report_monthly": {"180535609": {"TimePeriod": "2023-12-01"}}} + second_read_state = {"product_dimension_performance_report_monthly": {"180535609": {"TimePeriod": "2024-01-01"}}} diff --git a/airbyte-integrations/connectors/source-bing-ads/unit_tests/integrations/test_report_stream.py b/airbyte-integrations/connectors/source-bing-ads/unit_tests/integrations/test_report_stream.py new file mode 100644 index 000000000000..0678ec45aed8 --- /dev/null +++ b/airbyte-integrations/connectors/source-bing-ads/unit_tests/integrations/test_report_stream.py @@ -0,0 +1,112 @@ +# Copyright (c) 2023 Airbyte, Inc., all rights reserved. + +from pathlib import Path +from typing import Any, Optional + +import pendulum +from airbyte_cdk.models import SyncMode +from airbyte_cdk.test.mock_http import HttpMocker +from base_test import BaseTest +from bingads.v13.reporting.reporting_service_manager import ReportingServiceManager +from config_builder import ConfigBuilder + + +class TestReportStream(BaseTest): + start_date = "2024-01-01" + + @property + def service_manager(self) -> ReportingServiceManager: + return ReportingServiceManager + + @property + def _config(self) -> dict[str, Any]: + return ConfigBuilder().with_reports_start_date(self.start_date).build() + + def _download_file(self, file: Optional[str] = None) -> Path: + """ + Returns path to temporary file of downloaded data that will be use in read. + Base file should be named as {file_name}.cvs in resource/response folder. + """ + if file: + path_to_tmp_file = Path(__file__).parent.parent / f"resource/response/{file}.csv" + return path_to_tmp_file + return Path(__file__).parent.parent / "resource/response/non-existing-file.csv" + + +class TestSuiteReportStream(TestReportStream): + stream_name: str = None + report_file: str + records_number: int + state_file: str + incremental_report_file: str + first_read_state: dict + second_read_state: dict + transform_field: str = "AccountId" + account_id: str = "180535609" + cursor_field = "TimePeriod" + + def setUp(self): + if not self.stream_name: + self.skipTest("Skipping TestSuiteReportStream") + + @HttpMocker() + def test_return_records_from_given_csv_file(self, http_mocker: HttpMocker): + self.auth_client(http_mocker) + output, _ = self.read_stream( + self.stream_name, + SyncMode.full_refresh, + self._config, + self.report_file + ) + assert len(output.records) == self.records_number + + @HttpMocker() + def test_transform_records_from_given_csv_file(self, http_mocker: HttpMocker): + self.auth_client(http_mocker) + output, _ = self.read_stream( + self.stream_name, + SyncMode.full_refresh, + self._config, + self.report_file + ) + + assert len(output.records) == self.records_number + for record in output.records: + assert self.transform_field in record.record.data.keys() + + @HttpMocker() + def test_incremental_read_returns_records(self, http_mocker: HttpMocker): + self.auth_client(http_mocker) + output, _ = self.read_stream( + self.stream_name, + SyncMode.incremental, + self._config, + self.report_file + ) + assert len(output.records) == self.records_number + assert output.most_recent_state == self.first_read_state + + @HttpMocker() + def test_incremental_read_with_state_returns_records(self, http_mocker: HttpMocker): + state = self._state(self.state_file, self.stream_name) + self.auth_client(http_mocker) + output, service_call_mock = self.read_stream( + self.stream_name, + SyncMode.incremental, + self._config, + self.incremental_report_file, + state + ) + assert len(output.records) == self.records_number + + actual_cursor = output.most_recent_state.get(self.stream_name).get(self.account_id) + expected_cursor = self.second_read_state.get(self.stream_name).get(self.account_id) + assert actual_cursor == expected_cursor + + provided_state = state[0].stream.stream_state.dict()[self.account_id][self.cursor_field] + # gets ReportDownloadParams object + request_start_date = service_call_mock.call_args.args[0].report_request.Time.CustomDateRangeStart + year = request_start_date.Year + month = request_start_date.Month + day = request_start_date.Day + assert pendulum.DateTime(year, month, day, tzinfo=pendulum.UTC) == pendulum.parse(provided_state) diff --git a/airbyte-integrations/connectors/source-bing-ads/unit_tests/resource/http/response/oauth.json b/airbyte-integrations/connectors/source-bing-ads/unit_tests/resource/http/response/oauth.json new file mode 100644 index 000000000000..217f1d0f12da --- /dev/null +++ b/airbyte-integrations/connectors/source-bing-ads/unit_tests/resource/http/response/oauth.json @@ -0,0 +1,5 @@ +{ + "access_token": "access_token", + "expires_in": 111111, + "refresh_token": "refresh_token" +} diff --git a/airbyte-integrations/connectors/source-bing-ads/unit_tests/resource/response/budget.csv b/airbyte-integrations/connectors/source-bing-ads/unit_tests/resource/response/budget.csv new file mode 100644 index 000000000000..764d241d6722 --- /dev/null +++ b/airbyte-integrations/connectors/source-bing-ads/unit_tests/resource/response/budget.csv @@ -0,0 +1,3 @@ +Type,Status,Id,Parent Id,Client Id,Modified Time,Budget Id,Budget Name,Budget,Budget Type,Name +Format Version,,,,,,,,,,6.0 +Budget,Active,-20,0,23645271,,,My Shared Budget,50,DailyBudgetStandard, \ No newline at end of file diff --git a/airbyte-integrations/connectors/source-bing-ads/unit_tests/resource/response/budget_empty.csv b/airbyte-integrations/connectors/source-bing-ads/unit_tests/resource/response/budget_empty.csv new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/airbyte-integrations/connectors/source-bing-ads/unit_tests/resource/response/budget_with_cursor_value.csv b/airbyte-integrations/connectors/source-bing-ads/unit_tests/resource/response/budget_with_cursor_value.csv new file mode 100644 index 000000000000..cad3c6249d2c --- /dev/null +++ b/airbyte-integrations/connectors/source-bing-ads/unit_tests/resource/response/budget_with_cursor_value.csv @@ -0,0 +1,10 @@ +Type,Status,Id,Parent Id,Client Id,Modified Time,Budget Id,Budget Name,Budget,Budget Type,Name +Format Version,,,,,,,,,,6.0 +Budget,Active,-20,0,23645271,01/01/2024 12:12:12.02837,,My Shared Budget,50,DailyBudgetStandard, +Budget,Active,-20,0,23645271,01/01/2024 12:13:12.02837,,My Shared Budget,50,DailyBudgetStandard, +Budget,Active,-20,0,23645271,01/01/2024 12:14:12.02837,,My Shared Budget,50,DailyBudgetStandard, +Budget,Active,-20,0,23645271,01/01/2024 12:15:12.02837,,My Shared Budget,50,DailyBudgetStandard, +Budget,Active,-20,0,23645271,01/01/2024 12:17:12.02837,,My Shared Budget,50,DailyBudgetStandard, +Budget,Active,-20,0,23645271,01/01/2024 12:23:12.02837,,My Shared Budget,50,DailyBudgetStandard, +Budget,Active,-20,0,23645271,01/01/2024 12:43:12.02837,,My Shared Budget,50,DailyBudgetStandard, +Budget,Active,-20,0,23645271,01/01/2024 12:54:12.02837,,My Shared Budget,50,DailyBudgetStandard, \ No newline at end of file diff --git a/airbyte-integrations/connectors/source-bing-ads/unit_tests/resource/response/budget_with_state.csv b/airbyte-integrations/connectors/source-bing-ads/unit_tests/resource/response/budget_with_state.csv new file mode 100644 index 000000000000..6dcbbf10b211 --- /dev/null +++ b/airbyte-integrations/connectors/source-bing-ads/unit_tests/resource/response/budget_with_state.csv @@ -0,0 +1,10 @@ +Type,Status,Id,Parent Id,Client Id,Modified Time,Budget Id,Budget Name,Budget,Budget Type,Name +Format Version,,,,,,,,,,6.0 +Budget,Active,-20,0,23645271,01/30/2024 12:12:12.02837,,My Shared Budget,50,DailyBudgetStandard, +Budget,Active,-20,0,23645271,01/30/2024 12:13:12.02837,,My Shared Budget,50,DailyBudgetStandard, +Budget,Active,-20,0,23645271,01/30/2024 12:14:12.02837,,My Shared Budget,50,DailyBudgetStandard, +Budget,Active,-20,0,23645271,01/30/2024 12:15:12.02837,,My Shared Budget,50,DailyBudgetStandard, +Budget,Active,-20,0,23645271,01/30/2024 12:17:12.02837,,My Shared Budget,50,DailyBudgetStandard, +Budget,Active,-20,0,23645271,01/30/2024 12:23:12.02837,,My Shared Budget,50,DailyBudgetStandard, +Budget,Active,-20,0,23645271,01/30/2024 12:43:12.02837,,My Shared Budget,50,DailyBudgetStandard, +Budget,Active,-20,0,23645271,01/30/2024 12:54:12.02837,,My Shared Budget,50,DailyBudgetStandard, \ No newline at end of file diff --git a/airbyte-integrations/connectors/source-bing-ads/unit_tests/resource/response/product_dimension_performance_report_daily.csv b/airbyte-integrations/connectors/source-bing-ads/unit_tests/resource/response/product_dimension_performance_report_daily.csv new file mode 100644 index 000000000000..727529543895 --- /dev/null +++ b/airbyte-integrations/connectors/source-bing-ads/unit_tests/resource/response/product_dimension_performance_report_daily.csv @@ -0,0 +1,9 @@ +TimePeriod,AccountName,AccountNumber,AdGroupName,AdGroupId,CampaignStatus,AccountStatus,AdGroupStatus,Network,AdId,CampaignId,CampaignName,CurrencyCode,DeviceType,Language,MerchantProductId,Title,Condition,Brand,Price,Impressions,Clicks,Ctr,AverageCpc,Spend,Conversions,ConversionRate,Revenue,RevenuePerConversion,SellerName,OfferLanguage,CountryOfSale,AdStatus,AdDistribution,ClickTypeId,TotalClicksOnAdElements,ClickType,ReturnOnAdSpend,BidStrategyType,LocalStoreCode,StoreId,AssistedClicks,AssistedConversions,AllConversions,AllRevenue,AllConversionRate,AllCostPerConversion,AllReturnOnAdSpend,AllRevenuePerConversion,CostPerConversion,ViewThroughConversions,Goal,GoalType,ProductBought,QuantityBought,AverageCpm,ConversionsQualified,AssistedConversionsQualified,ViewThroughConversionsQualified,ProductBoughtTitle,GTIN,MPN,ViewThroughRevenue,Sales,CostPerSale,RevenuePerSale,Installs,CostPerInstall,RevenuePerInstall,CampaignType,AssetGroupId,AssetGroupName,AssetGroupStatus,CustomLabel0,CustomLabel1,CustomLabel2,CustomLabel3,CustomLabel4,ProductType1,ProductType2,ProductType3,ProductType4,ProductType5 +2023-12-17,TestAccount,123456,TestGroup,212344,Active,Active,,,12345,123456778,USD,Computer,English,123455,Title,,,45,45,0,0,0,0,0,0,0,0,0,TestName,English,USA,Active,,12124,1233,,,,,5675,0,4,,,,,,,,,,,,,,,,,,,,,,,,,,,,,23,234,CustomLabel0,CustomLabel1,CustomLabel2,CustomLabel3,CustomLabel4,ProductType1,ProductType2,ProductType3,ProductType4,ProductType5 +2023-12-17,TestAccount,123456,TestGroup,212344,Active,Active,,,12345,123456778,USD,Computer,English,123455,Title,,,45,45,0,0,0,0,0,0,0,0,0,TestName,English,USA,Active,,12124,1233,,,,,5675,0,4,,,,,,,,,,,,,,,,,,,,,,,,,,,,,23,234,CustomLabel0,CustomLabel1,CustomLabel2,CustomLabel3,CustomLabel4,ProductType1,ProductType2,ProductType3,ProductType4,ProductType5 +2023-12-17,TestAccount,123456,TestGroup,212344,Active,Active,,,12345,123456778,USD,Computer,English,123455,Title,,,45,45,0,0,0,0,0,0,0,0,0,TestName,English,USA,Active,,12124,1233,,,,,5675,0,4,,,,,,,,,,,,,,,,,,,,,,,,,,,,,23,234,CustomLabel0,CustomLabel1,CustomLabel2,CustomLabel3,CustomLabel4,ProductType1,ProductType2,ProductType3,ProductType4,ProductType5 +2023-12-17,TestAccount,123456,TestGroup,212344,Active,Active,,,12345,123456778,USD,Computer,English,123455,Title,,,45,45,0,0,0,0,0,0,0,0,0,TestName,English,USA,Active,,12124,1233,,,,,5675,0,4,,,,,,,,,,,,,,,,,,,,,,,,,,,,,23,234,CustomLabel0,CustomLabel1,CustomLabel2,CustomLabel3,CustomLabel4,ProductType1,ProductType2,ProductType3,ProductType4,ProductType5 +2023-12-17,TestAccount,123456,TestGroup,212344,Active,Active,,,12345,123456778,USD,Computer,English,123455,Title,,,45,45,0,0,0,0,0,0,0,0,0,TestName,English,USA,Active,,12124,1233,,,,,5675,0,4,,,,,,,,,,,,,,,,,,,,,,,,,,,,,23,234,CustomLabel0,CustomLabel1,CustomLabel2,CustomLabel3,CustomLabel4,ProductType1,ProductType2,ProductType3,ProductType4,ProductType5 +2023-12-17,TestAccount,123456,TestGroup,212344,Active,Active,,,12345,123456778,USD,Computer,English,123455,Title,,,45,45,0,0,0,0,0,0,0,0,0,TestName,English,USA,Active,,12124,1233,,,,,5675,0,4,,,,,,,,,,,,,,,,,,,,,,,,,,,,,23,234,CustomLabel0,CustomLabel1,CustomLabel2,CustomLabel3,CustomLabel4,ProductType1,ProductType2,ProductType3,ProductType4,ProductType5 +2023-12-17,TestAccount,123456,TestGroup,212344,Active,Active,,,12345,123456778,USD,Computer,English,123455,Title,,,45,45,0,0,0,0,0,0,0,0,0,TestName,English,USA,Active,,12124,1233,,,,,5675,0,4,,,,,,,,,,,,,,,,,,,,,,,,,,,,,23,234,CustomLabel0,CustomLabel1,CustomLabel2,CustomLabel3,CustomLabel4,ProductType1,ProductType2,ProductType3,ProductType4,ProductType5 +2023-12-17,TestAccount,123456,TestGroup,212344,Active,Active,,,12345,123456778,USD,Computer,English,123455,Title,,,45,45,0,0,0,0,0,0,0,0,0,TestName,English,USA,Active,,12124,1233,,,,,5675,0,4,,,,,,,,,,,,,,,,,,,,,,,,,,,,,23,234,CustomLabel0,CustomLabel1,CustomLabel2,CustomLabel3,CustomLabel4,ProductType1,ProductType2,ProductType3,ProductType4,ProductType5 \ No newline at end of file diff --git a/airbyte-integrations/connectors/source-bing-ads/unit_tests/resource/response/product_dimension_performance_report_daily_incremental.csv b/airbyte-integrations/connectors/source-bing-ads/unit_tests/resource/response/product_dimension_performance_report_daily_incremental.csv new file mode 100644 index 000000000000..ea0830b65172 --- /dev/null +++ b/airbyte-integrations/connectors/source-bing-ads/unit_tests/resource/response/product_dimension_performance_report_daily_incremental.csv @@ -0,0 +1,9 @@ +TimePeriod,AccountName,AccountNumber,AdGroupName,AdGroupId,CampaignStatus,AccountStatus,AdGroupStatus,Network,AdId,CampaignId,CampaignName,CurrencyCode,DeviceType,Language,MerchantProductId,Title,Condition,Brand,Price,Impressions,Clicks,Ctr,AverageCpc,Spend,Conversions,ConversionRate,Revenue,RevenuePerConversion,SellerName,OfferLanguage,CountryOfSale,AdStatus,AdDistribution,ClickTypeId,TotalClicksOnAdElements,ClickType,ReturnOnAdSpend,BidStrategyType,LocalStoreCode,StoreId,AssistedClicks,AssistedConversions,AllConversions,AllRevenue,AllConversionRate,AllCostPerConversion,AllReturnOnAdSpend,AllRevenuePerConversion,CostPerConversion,ViewThroughConversions,Goal,GoalType,ProductBought,QuantityBought,AverageCpm,ConversionsQualified,AssistedConversionsQualified,ViewThroughConversionsQualified,ProductBoughtTitle,GTIN,MPN,ViewThroughRevenue,Sales,CostPerSale,RevenuePerSale,Installs,CostPerInstall,RevenuePerInstall,CampaignType,AssetGroupId,AssetGroupName,AssetGroupStatus,CustomLabel0,CustomLabel1,CustomLabel2,CustomLabel3,CustomLabel4,ProductType1,ProductType2,ProductType3,ProductType4,ProductType5 +2023-12-18,TestAccount,123456,TestGroup,212344,Active,Active,,,12345,123456778,USD,Computer,English,123455,Title,,,45,45,0,0,0,0,0,0,0,0,0,TestName,English,USA,Active,,12124,1233,,,,,5675,0,4,,,,,,,,,,,,,,,,,,,,,,,,,,,,,23,234,CustomLabel0,CustomLabel1,CustomLabel2,CustomLabel3,CustomLabel4,ProductType1,ProductType2,ProductType3,ProductType4,ProductType5 +2023-12-19,TestAccount,123456,TestGroup,212344,Active,Active,,,12345,123456778,USD,Computer,English,123455,Title,,,45,45,0,0,0,0,0,0,0,0,0,TestName,English,USA,Active,,12124,1233,,,,,5675,0,4,,,,,,,,,,,,,,,,,,,,,,,,,,,,,23,234,CustomLabel0,CustomLabel1,CustomLabel2,CustomLabel3,CustomLabel4,ProductType1,ProductType2,ProductType3,ProductType4,ProductType5 +2023-12-20,TestAccount,123456,TestGroup,212344,Active,Active,,,12345,123456778,USD,Computer,English,123455,Title,,,45,45,0,0,0,0,0,0,0,0,0,TestName,English,USA,Active,,12124,1233,,,,,5675,0,4,,,,,,,,,,,,,,,,,,,,,,,,,,,,,23,234,CustomLabel0,CustomLabel1,CustomLabel2,CustomLabel3,CustomLabel4,ProductType1,ProductType2,ProductType3,ProductType4,ProductType5 +2023-12-21,TestAccount,123456,TestGroup,212344,Active,Active,,,12345,123456778,USD,Computer,English,123455,Title,,,45,45,0,0,0,0,0,0,0,0,0,TestName,English,USA,Active,,12124,1233,,,,,5675,0,4,,,,,,,,,,,,,,,,,,,,,,,,,,,,,23,234,CustomLabel0,CustomLabel1,CustomLabel2,CustomLabel3,CustomLabel4,ProductType1,ProductType2,ProductType3,ProductType4,ProductType5 +2023-12-22,TestAccount,123456,TestGroup,212344,Active,Active,,,12345,123456778,USD,Computer,English,123455,Title,,,45,45,0,0,0,0,0,0,0,0,0,TestName,English,USA,Active,,12124,1233,,,,,5675,0,4,,,,,,,,,,,,,,,,,,,,,,,,,,,,,23,234,CustomLabel0,CustomLabel1,CustomLabel2,CustomLabel3,CustomLabel4,ProductType1,ProductType2,ProductType3,ProductType4,ProductType5 +2023-11-11T01:15:00+00:00,TestAccount,123456,TestGroup,212344,Active,Active,,,12345,123456778,USD,Computer,English,123455,Title,,,45,45,0,0,0,0,0,0,0,0,0,TestName,English,USA,Active,,12124,1233,,,,,5675,0,4,,,,,,,,,,,,,,,,,,,,,,,,,,,,,23,234,CustomLabel0,CustomLabel1,CustomLabel2,CustomLabel3,CustomLabel4,ProductType1,ProductType2,ProductType3,ProductType4,ProductType5 +2023-12-24,TestAccount,123456,TestGroup,212344,Active,Active,,,12345,123456778,USD,Computer,English,123455,Title,,,45,45,0,0,0,0,0,0,0,0,0,TestName,English,USA,Active,,12124,1233,,,,,5675,0,4,,,,,,,,,,,,,,,,,,,,,,,,,,,,,23,234,CustomLabel0,CustomLabel1,CustomLabel2,CustomLabel3,CustomLabel4,ProductType1,ProductType2,ProductType3,ProductType4,ProductType5 +2023-12-25,TestAccount,123456,TestGroup,212344,Active,Active,,,12345,123456778,USD,Computer,English,123455,Title,,,45,45,0,0,0,0,0,0,0,0,0,TestName,English,USA,Active,,12124,1233,,,,,5675,0,4,,,,,,,,,,,,,,,,,,,,,,,,,,,,,23,234,CustomLabel0,CustomLabel1,CustomLabel2,CustomLabel3,CustomLabel4,ProductType1,ProductType2,ProductType3,ProductType4,ProductType5 \ No newline at end of file diff --git a/airbyte-integrations/connectors/source-bing-ads/unit_tests/resource/response/product_dimension_performance_report_hourly.csv b/airbyte-integrations/connectors/source-bing-ads/unit_tests/resource/response/product_dimension_performance_report_hourly.csv new file mode 100644 index 000000000000..a4bc888df397 --- /dev/null +++ b/airbyte-integrations/connectors/source-bing-ads/unit_tests/resource/response/product_dimension_performance_report_hourly.csv @@ -0,0 +1,9 @@ +TimePeriod,AccountName,AccountNumber,AdGroupName,AdGroupId,CampaignStatus,AccountStatus,AdGroupStatus,Network,AdId,CampaignId,CampaignName,CurrencyCode,DeviceType,Language,MerchantProductId,Title,Condition,Brand,Price,Impressions,Clicks,Ctr,AverageCpc,Spend,Conversions,ConversionRate,Revenue,RevenuePerConversion,SellerName,OfferLanguage,CountryOfSale,AdStatus,AdDistribution,ClickTypeId,TotalClicksOnAdElements,ClickType,ReturnOnAdSpend,BidStrategyType,LocalStoreCode,StoreId,AssistedClicks,AssistedConversions,AllConversions,AllRevenue,AllConversionRate,AllCostPerConversion,AllReturnOnAdSpend,AllRevenuePerConversion,CostPerConversion,ViewThroughConversions,Goal,GoalType,ProductBought,QuantityBought,AverageCpm,ConversionsQualified,AssistedConversionsQualified,ViewThroughConversionsQualified,ProductBoughtTitle,GTIN,MPN,ViewThroughRevenue,Sales,CostPerSale,RevenuePerSale,Installs,CostPerInstall,RevenuePerInstall,CampaignType,AssetGroupId,AssetGroupName,AssetGroupStatus,CustomLabel0,CustomLabel1,CustomLabel2,CustomLabel3,CustomLabel4,ProductType1,ProductType2,ProductType3,ProductType4,ProductType5 +2023-11-11|01,TestAccount,123456,TestGroup,212344,Active,Active,,,12345,123456778,USD,Computer,English,123455,Title,,,45,45,0,0,0,0,0,0,0,0,0,TestName,English,USA,Active,,12124,1233,,,,,5675,0,4,,,,,,,,,,,,,,,,,,,,,,,,,,,,,23,234,CustomLabel0,CustomLabel1,CustomLabel2,CustomLabel3,CustomLabel4,ProductType1,ProductType2,ProductType3,ProductType4,ProductType5 +2023-11-11|01,TestAccount,123456,TestGroup,212344,Active,Active,,,12345,123456778,USD,Computer,English,123455,Title,,,45,45,0,0,0,0,0,0,0,0,0,TestName,English,USA,Active,,12124,1233,,,,,5675,0,4,,,,,,,,,,,,,,,,,,,,,,,,,,,,,23,234,CustomLabel0,CustomLabel1,CustomLabel2,CustomLabel3,CustomLabel4,ProductType1,ProductType2,ProductType3,ProductType4,ProductType5 +2023-11-11|01,TestAccount,123456,TestGroup,212344,Active,Active,,,12345,123456778,USD,Computer,English,123455,Title,,,45,45,0,0,0,0,0,0,0,0,0,TestName,English,USA,Active,,12124,1233,,,,,5675,0,4,,,,,,,,,,,,,,,,,,,,,,,,,,,,,23,234,CustomLabel0,CustomLabel1,CustomLabel2,CustomLabel3,CustomLabel4,ProductType1,ProductType2,ProductType3,ProductType4,ProductType5 +2023-11-11|01,TestAccount,123456,TestGroup,212344,Active,Active,,,12345,123456778,USD,Computer,English,123455,Title,,,45,45,0,0,0,0,0,0,0,0,0,TestName,English,USA,Active,,12124,1233,,,,,5675,0,4,,,,,,,,,,,,,,,,,,,,,,,,,,,,,23,234,CustomLabel0,CustomLabel1,CustomLabel2,CustomLabel3,CustomLabel4,ProductType1,ProductType2,ProductType3,ProductType4,ProductType5 +2023-11-11|01,TestAccount,123456,TestGroup,212344,Active,Active,,,12345,123456778,USD,Computer,English,123455,Title,,,45,45,0,0,0,0,0,0,0,0,0,TestName,English,USA,Active,,12124,1233,,,,,5675,0,4,,,,,,,,,,,,,,,,,,,,,,,,,,,,,23,234,CustomLabel0,CustomLabel1,CustomLabel2,CustomLabel3,CustomLabel4,ProductType1,ProductType2,ProductType3,ProductType4,ProductType5 +2023-11-11|01,TestAccount,123456,TestGroup,212344,Active,Active,,,12345,123456778,USD,Computer,English,123455,Title,,,45,45,0,0,0,0,0,0,0,0,0,TestName,English,USA,Active,,12124,1233,,,,,5675,0,4,,,,,,,,,,,,,,,,,,,,,,,,,,,,,23,234,CustomLabel0,CustomLabel1,CustomLabel2,CustomLabel3,CustomLabel4,ProductType1,ProductType2,ProductType3,ProductType4,ProductType5 +2023-11-11|01,TestAccount,123456,TestGroup,212344,Active,Active,,,12345,123456778,USD,Computer,English,123455,Title,,,45,45,0,0,0,0,0,0,0,0,0,TestName,English,USA,Active,,12124,1233,,,,,5675,0,4,,,,,,,,,,,,,,,,,,,,,,,,,,,,,23,234,CustomLabel0,CustomLabel1,CustomLabel2,CustomLabel3,CustomLabel4,ProductType1,ProductType2,ProductType3,ProductType4,ProductType5 +2023-11-11|01,TestAccount,123456,TestGroup,212344,Active,Active,,,12345,123456778,USD,Computer,English,123455,Title,,,45,45,0,0,0,0,0,0,0,0,0,TestName,English,USA,Active,,12124,1233,,,,,5675,0,4,,,,,,,,,,,,,,,,,,,,,,,,,,,,,23,234,CustomLabel0,CustomLabel1,CustomLabel2,CustomLabel3,CustomLabel4,ProductType1,ProductType2,ProductType3,ProductType4,ProductType5 \ No newline at end of file diff --git a/airbyte-integrations/connectors/source-bing-ads/unit_tests/resource/response/product_dimension_performance_report_hourly_incremental.csv b/airbyte-integrations/connectors/source-bing-ads/unit_tests/resource/response/product_dimension_performance_report_hourly_incremental.csv new file mode 100644 index 000000000000..0c5dfa2da5bd --- /dev/null +++ b/airbyte-integrations/connectors/source-bing-ads/unit_tests/resource/response/product_dimension_performance_report_hourly_incremental.csv @@ -0,0 +1,9 @@ +TimePeriod,AccountName,AccountNumber,AdGroupName,AdGroupId,CampaignStatus,AccountStatus,AdGroupStatus,Network,AdId,CampaignId,CampaignName,CurrencyCode,DeviceType,Language,MerchantProductId,Title,Condition,Brand,Price,Impressions,Clicks,Ctr,AverageCpc,Spend,Conversions,ConversionRate,Revenue,RevenuePerConversion,SellerName,OfferLanguage,CountryOfSale,AdStatus,AdDistribution,ClickTypeId,TotalClicksOnAdElements,ClickType,ReturnOnAdSpend,BidStrategyType,LocalStoreCode,StoreId,AssistedClicks,AssistedConversions,AllConversions,AllRevenue,AllConversionRate,AllCostPerConversion,AllReturnOnAdSpend,AllRevenuePerConversion,CostPerConversion,ViewThroughConversions,Goal,GoalType,ProductBought,QuantityBought,AverageCpm,ConversionsQualified,AssistedConversionsQualified,ViewThroughConversionsQualified,ProductBoughtTitle,GTIN,MPN,ViewThroughRevenue,Sales,CostPerSale,RevenuePerSale,Installs,CostPerInstall,RevenuePerInstall,CampaignType,AssetGroupId,AssetGroupName,AssetGroupStatus,CustomLabel0,CustomLabel1,CustomLabel2,CustomLabel3,CustomLabel4,ProductType1,ProductType2,ProductType3,ProductType4,ProductType5 +2023-11-12|01,TestAccount,123456,TestGroup,212344,Active,Active,,,12345,123456778,USD,Computer,English,123455,Title,,,45,45,0,0,0,0,0,0,0,0,0,TestName,English,USA,Active,,12124,1233,,,,,5675,0,4,,,,,,,,,,,,,,,,,,,,,,,,,,,,,23,234,CustomLabel0,CustomLabel1,CustomLabel2,CustomLabel3,CustomLabel4,ProductType1,ProductType2,ProductType3,ProductType4,ProductType5 +2023-11-12|01,TestAccount,123456,TestGroup,212344,Active,Active,,,12345,123456778,USD,Computer,English,123455,Title,,,45,45,0,0,0,0,0,0,0,0,0,TestName,English,USA,Active,,12124,1233,,,,,5675,0,4,,,,,,,,,,,,,,,,,,,,,,,,,,,,,23,234,CustomLabel0,CustomLabel1,CustomLabel2,CustomLabel3,CustomLabel4,ProductType1,ProductType2,ProductType3,ProductType4,ProductType5 +2023-11-12|01,TestAccount,123456,TestGroup,212344,Active,Active,,,12345,123456778,USD,Computer,English,123455,Title,,,45,45,0,0,0,0,0,0,0,0,0,TestName,English,USA,Active,,12124,1233,,,,,5675,0,4,,,,,,,,,,,,,,,,,,,,,,,,,,,,,23,234,CustomLabel0,CustomLabel1,CustomLabel2,CustomLabel3,CustomLabel4,ProductType1,ProductType2,ProductType3,ProductType4,ProductType5 +2023-11-12|01,TestAccount,123456,TestGroup,212344,Active,Active,,,12345,123456778,USD,Computer,English,123455,Title,,,45,45,0,0,0,0,0,0,0,0,0,TestName,English,USA,Active,,12124,1233,,,,,5675,0,4,,,,,,,,,,,,,,,,,,,,,,,,,,,,,23,234,CustomLabel0,CustomLabel1,CustomLabel2,CustomLabel3,CustomLabel4,ProductType1,ProductType2,ProductType3,ProductType4,ProductType5 +2023-11-12|01,TestAccount,123456,TestGroup,212344,Active,Active,,,12345,123456778,USD,Computer,English,123455,Title,,,45,45,0,0,0,0,0,0,0,0,0,TestName,English,USA,Active,,12124,1233,,,,,5675,0,4,,,,,,,,,,,,,,,,,,,,,,,,,,,,,23,234,CustomLabel0,CustomLabel1,CustomLabel2,CustomLabel3,CustomLabel4,ProductType1,ProductType2,ProductType3,ProductType4,ProductType5 +2023-11-12|01,TestAccount,123456,TestGroup,212344,Active,Active,,,12345,123456778,USD,Computer,English,123455,Title,,,45,45,0,0,0,0,0,0,0,0,0,TestName,English,USA,Active,,12124,1233,,,,,5675,0,4,,,,,,,,,,,,,,,,,,,,,,,,,,,,,23,234,CustomLabel0,CustomLabel1,CustomLabel2,CustomLabel3,CustomLabel4,ProductType1,ProductType2,ProductType3,ProductType4,ProductType5 +2023-11-12|01,TestAccount,123456,TestGroup,212344,Active,Active,,,12345,123456778,USD,Computer,English,123455,Title,,,45,45,0,0,0,0,0,0,0,0,0,TestName,English,USA,Active,,12124,1233,,,,,5675,0,4,,,,,,,,,,,,,,,,,,,,,,,,,,,,,23,234,CustomLabel0,CustomLabel1,CustomLabel2,CustomLabel3,CustomLabel4,ProductType1,ProductType2,ProductType3,ProductType4,ProductType5 +2023-11-12|01,TestAccount,123456,TestGroup,212344,Active,Active,,,12345,123456778,USD,Computer,English,123455,Title,,,45,45,0,0,0,0,0,0,0,0,0,TestName,English,USA,Active,,12124,1233,,,,,5675,0,4,,,,,,,,,,,,,,,,,,,,,,,,,,,,,23,234,CustomLabel0,CustomLabel1,CustomLabel2,CustomLabel3,CustomLabel4,ProductType1,ProductType2,ProductType3,ProductType4,ProductType5 \ No newline at end of file diff --git a/airbyte-integrations/connectors/source-bing-ads/unit_tests/resource/response/product_dimension_performance_report_monthly.csv b/airbyte-integrations/connectors/source-bing-ads/unit_tests/resource/response/product_dimension_performance_report_monthly.csv new file mode 100644 index 000000000000..ebe759d45fe3 --- /dev/null +++ b/airbyte-integrations/connectors/source-bing-ads/unit_tests/resource/response/product_dimension_performance_report_monthly.csv @@ -0,0 +1,9 @@ +TimePeriod,AccountName,AccountNumber,AdGroupName,AdGroupId,CampaignStatus,AccountStatus,AdGroupStatus,Network,AdId,CampaignId,CampaignName,CurrencyCode,DeviceType,Language,MerchantProductId,Title,Condition,Brand,Price,Impressions,Clicks,Ctr,AverageCpc,Spend,Conversions,ConversionRate,Revenue,RevenuePerConversion,SellerName,OfferLanguage,CountryOfSale,AdStatus,AdDistribution,ClickTypeId,TotalClicksOnAdElements,ClickType,ReturnOnAdSpend,BidStrategyType,LocalStoreCode,StoreId,AssistedClicks,AssistedConversions,AllConversions,AllRevenue,AllConversionRate,AllCostPerConversion,AllReturnOnAdSpend,AllRevenuePerConversion,CostPerConversion,ViewThroughConversions,Goal,GoalType,ProductBought,QuantityBought,AverageCpm,ConversionsQualified,AssistedConversionsQualified,ViewThroughConversionsQualified,ProductBoughtTitle,GTIN,MPN,ViewThroughRevenue,Sales,CostPerSale,RevenuePerSale,Installs,CostPerInstall,RevenuePerInstall,CampaignType,AssetGroupId,AssetGroupName,AssetGroupStatus,CustomLabel0,CustomLabel1,CustomLabel2,CustomLabel3,CustomLabel4,ProductType1,ProductType2,ProductType3,ProductType4,ProductType5 +2023-12-01,TestAccount,123456,TestGroup,212344,Active,Active,,,12345,123456778,USD,Computer,English,123455,Title,,,45,45,0,0,0,0,0,0,0,0,0,TestName,English,USA,Active,,12124,1233,,,,,5675,0,4,,,,,,,,,,,,,,,,,,,,,,,,,,,,,23,234,CustomLabel0,CustomLabel1,CustomLabel2,CustomLabel3,CustomLabel4,ProductType1,ProductType2,ProductType3,ProductType4,ProductType5 +2023-12-01,TestAccount,123456,TestGroup,212344,Active,Active,,,12345,123456778,USD,Computer,English,123455,Title,,,45,45,0,0,0,0,0,0,0,0,0,TestName,English,USA,Active,,12124,1233,,,,,5675,0,4,,,,,,,,,,,,,,,,,,,,,,,,,,,,,23,234,CustomLabel0,CustomLabel1,CustomLabel2,CustomLabel3,CustomLabel4,ProductType1,ProductType2,ProductType3,ProductType4,ProductType5 +2023-12-01,TestAccount,123456,TestGroup,212344,Active,Active,,,12345,123456778,USD,Computer,English,123455,Title,,,45,45,0,0,0,0,0,0,0,0,0,TestName,English,USA,Active,,12124,1233,,,,,5675,0,4,,,,,,,,,,,,,,,,,,,,,,,,,,,,,23,234,CustomLabel0,CustomLabel1,CustomLabel2,CustomLabel3,CustomLabel4,ProductType1,ProductType2,ProductType3,ProductType4,ProductType5 +2023-12-01,TestAccount,123456,TestGroup,212344,Active,Active,,,12345,123456778,USD,Computer,English,123455,Title,,,45,45,0,0,0,0,0,0,0,0,0,TestName,English,USA,Active,,12124,1233,,,,,5675,0,4,,,,,,,,,,,,,,,,,,,,,,,,,,,,,23,234,CustomLabel0,CustomLabel1,CustomLabel2,CustomLabel3,CustomLabel4,ProductType1,ProductType2,ProductType3,ProductType4,ProductType5 +2023-12-01,TestAccount,123456,TestGroup,212344,Active,Active,,,12345,123456778,USD,Computer,English,123455,Title,,,45,45,0,0,0,0,0,0,0,0,0,TestName,English,USA,Active,,12124,1233,,,,,5675,0,4,,,,,,,,,,,,,,,,,,,,,,,,,,,,,23,234,CustomLabel0,CustomLabel1,CustomLabel2,CustomLabel3,CustomLabel4,ProductType1,ProductType2,ProductType3,ProductType4,ProductType5 +2023-12-01,TestAccount,123456,TestGroup,212344,Active,Active,,,12345,123456778,USD,Computer,English,123455,Title,,,45,45,0,0,0,0,0,0,0,0,0,TestName,English,USA,Active,,12124,1233,,,,,5675,0,4,,,,,,,,,,,,,,,,,,,,,,,,,,,,,23,234,CustomLabel0,CustomLabel1,CustomLabel2,CustomLabel3,CustomLabel4,ProductType1,ProductType2,ProductType3,ProductType4,ProductType5 +2023-12-01,TestAccount,123456,TestGroup,212344,Active,Active,,,12345,123456778,USD,Computer,English,123455,Title,,,45,45,0,0,0,0,0,0,0,0,0,TestName,English,USA,Active,,12124,1233,,,,,5675,0,4,,,,,,,,,,,,,,,,,,,,,,,,,,,,,23,234,CustomLabel0,CustomLabel1,CustomLabel2,CustomLabel3,CustomLabel4,ProductType1,ProductType2,ProductType3,ProductType4,ProductType5 +2023-12-01,TestAccount,123456,TestGroup,212344,Active,Active,,,12345,123456778,USD,Computer,English,123455,Title,,,45,45,0,0,0,0,0,0,0,0,0,TestName,English,USA,Active,,12124,1233,,,,,5675,0,4,,,,,,,,,,,,,,,,,,,,,,,,,,,,,23,234,CustomLabel0,CustomLabel1,CustomLabel2,CustomLabel3,CustomLabel4,ProductType1,ProductType2,ProductType3,ProductType4,ProductType5 \ No newline at end of file diff --git a/airbyte-integrations/connectors/source-bing-ads/unit_tests/resource/response/product_dimension_performance_report_monthly_incremental.csv b/airbyte-integrations/connectors/source-bing-ads/unit_tests/resource/response/product_dimension_performance_report_monthly_incremental.csv new file mode 100644 index 000000000000..abb1a3247ba7 --- /dev/null +++ b/airbyte-integrations/connectors/source-bing-ads/unit_tests/resource/response/product_dimension_performance_report_monthly_incremental.csv @@ -0,0 +1,9 @@ +TimePeriod,AccountName,AccountNumber,AdGroupName,AdGroupId,CampaignStatus,AccountStatus,AdGroupStatus,Network,AdId,CampaignId,CampaignName,CurrencyCode,DeviceType,Language,MerchantProductId,Title,Condition,Brand,Price,Impressions,Clicks,Ctr,AverageCpc,Spend,Conversions,ConversionRate,Revenue,RevenuePerConversion,SellerName,OfferLanguage,CountryOfSale,AdStatus,AdDistribution,ClickTypeId,TotalClicksOnAdElements,ClickType,ReturnOnAdSpend,BidStrategyType,LocalStoreCode,StoreId,AssistedClicks,AssistedConversions,AllConversions,AllRevenue,AllConversionRate,AllCostPerConversion,AllReturnOnAdSpend,AllRevenuePerConversion,CostPerConversion,ViewThroughConversions,Goal,GoalType,ProductBought,QuantityBought,AverageCpm,ConversionsQualified,AssistedConversionsQualified,ViewThroughConversionsQualified,ProductBoughtTitle,GTIN,MPN,ViewThroughRevenue,Sales,CostPerSale,RevenuePerSale,Installs,CostPerInstall,RevenuePerInstall,CampaignType,AssetGroupId,AssetGroupName,AssetGroupStatus,CustomLabel0,CustomLabel1,CustomLabel2,CustomLabel3,CustomLabel4,ProductType1,ProductType2,ProductType3,ProductType4,ProductType5 +2024-01-01,TestAccount,123456,TestGroup,212344,Active,Active,,,12345,123456778,USD,Computer,English,123455,Title,,,45,45,0,0,0,0,0,0,0,0,0,TestName,English,USA,Active,,12124,1233,,,,,5675,0,4,,,,,,,,,,,,,,,,,,,,,,,,,,,,,23,234,CustomLabel0,CustomLabel1,CustomLabel2,CustomLabel3,CustomLabel4,ProductType1,ProductType2,ProductType3,ProductType4,ProductType5 +2024-01-01,TestAccount,123456,TestGroup,212344,Active,Active,,,12345,123456778,USD,Computer,English,123455,Title,,,45,45,0,0,0,0,0,0,0,0,0,TestName,English,USA,Active,,12124,1233,,,,,5675,0,4,,,,,,,,,,,,,,,,,,,,,,,,,,,,,23,234,CustomLabel0,CustomLabel1,CustomLabel2,CustomLabel3,CustomLabel4,ProductType1,ProductType2,ProductType3,ProductType4,ProductType5 +2024-01-01,TestAccount,123456,TestGroup,212344,Active,Active,,,12345,123456778,USD,Computer,English,123455,Title,,,45,45,0,0,0,0,0,0,0,0,0,TestName,English,USA,Active,,12124,1233,,,,,5675,0,4,,,,,,,,,,,,,,,,,,,,,,,,,,,,,23,234,CustomLabel0,CustomLabel1,CustomLabel2,CustomLabel3,CustomLabel4,ProductType1,ProductType2,ProductType3,ProductType4,ProductType5 +2024-01-01,TestAccount,123456,TestGroup,212344,Active,Active,,,12345,123456778,USD,Computer,English,123455,Title,,,45,45,0,0,0,0,0,0,0,0,0,TestName,English,USA,Active,,12124,1233,,,,,5675,0,4,,,,,,,,,,,,,,,,,,,,,,,,,,,,,23,234,CustomLabel0,CustomLabel1,CustomLabel2,CustomLabel3,CustomLabel4,ProductType1,ProductType2,ProductType3,ProductType4,ProductType5 +2024-01-01,TestAccount,123456,TestGroup,212344,Active,Active,,,12345,123456778,USD,Computer,English,123455,Title,,,45,45,0,0,0,0,0,0,0,0,0,TestName,English,USA,Active,,12124,1233,,,,,5675,0,4,,,,,,,,,,,,,,,,,,,,,,,,,,,,,23,234,CustomLabel0,CustomLabel1,CustomLabel2,CustomLabel3,CustomLabel4,ProductType1,ProductType2,ProductType3,ProductType4,ProductType5 +2024-01-01,TestAccount,123456,TestGroup,212344,Active,Active,,,12345,123456778,USD,Computer,English,123455,Title,,,45,45,0,0,0,0,0,0,0,0,0,TestName,English,USA,Active,,12124,1233,,,,,5675,0,4,,,,,,,,,,,,,,,,,,,,,,,,,,,,,23,234,CustomLabel0,CustomLabel1,CustomLabel2,CustomLabel3,CustomLabel4,ProductType1,ProductType2,ProductType3,ProductType4,ProductType5 +2024-01-01,TestAccount,123456,TestGroup,212344,Active,Active,,,12345,123456778,USD,Computer,English,123455,Title,,,45,45,0,0,0,0,0,0,0,0,0,TestName,English,USA,Active,,12124,1233,,,,,5675,0,4,,,,,,,,,,,,,,,,,,,,,,,,,,,,,23,234,CustomLabel0,CustomLabel1,CustomLabel2,CustomLabel3,CustomLabel4,ProductType1,ProductType2,ProductType3,ProductType4,ProductType5 +2024-01-01,TestAccount,123456,TestGroup,212344,Active,Active,,,12345,123456778,USD,Computer,English,123455,Title,,,45,45,0,0,0,0,0,0,0,0,0,TestName,English,USA,Active,,12124,1233,,,,,5675,0,4,,,,,,,,,,,,,,,,,,,,,,,,,,,,,23,234,CustomLabel0,CustomLabel1,CustomLabel2,CustomLabel3,CustomLabel4,ProductType1,ProductType2,ProductType3,ProductType4,ProductType5 \ No newline at end of file diff --git a/airbyte-integrations/connectors/source-bing-ads/unit_tests/resource/response/product_dimension_performance_report_weekly.csv b/airbyte-integrations/connectors/source-bing-ads/unit_tests/resource/response/product_dimension_performance_report_weekly.csv new file mode 100644 index 000000000000..727529543895 --- /dev/null +++ b/airbyte-integrations/connectors/source-bing-ads/unit_tests/resource/response/product_dimension_performance_report_weekly.csv @@ -0,0 +1,9 @@ +TimePeriod,AccountName,AccountNumber,AdGroupName,AdGroupId,CampaignStatus,AccountStatus,AdGroupStatus,Network,AdId,CampaignId,CampaignName,CurrencyCode,DeviceType,Language,MerchantProductId,Title,Condition,Brand,Price,Impressions,Clicks,Ctr,AverageCpc,Spend,Conversions,ConversionRate,Revenue,RevenuePerConversion,SellerName,OfferLanguage,CountryOfSale,AdStatus,AdDistribution,ClickTypeId,TotalClicksOnAdElements,ClickType,ReturnOnAdSpend,BidStrategyType,LocalStoreCode,StoreId,AssistedClicks,AssistedConversions,AllConversions,AllRevenue,AllConversionRate,AllCostPerConversion,AllReturnOnAdSpend,AllRevenuePerConversion,CostPerConversion,ViewThroughConversions,Goal,GoalType,ProductBought,QuantityBought,AverageCpm,ConversionsQualified,AssistedConversionsQualified,ViewThroughConversionsQualified,ProductBoughtTitle,GTIN,MPN,ViewThroughRevenue,Sales,CostPerSale,RevenuePerSale,Installs,CostPerInstall,RevenuePerInstall,CampaignType,AssetGroupId,AssetGroupName,AssetGroupStatus,CustomLabel0,CustomLabel1,CustomLabel2,CustomLabel3,CustomLabel4,ProductType1,ProductType2,ProductType3,ProductType4,ProductType5 +2023-12-17,TestAccount,123456,TestGroup,212344,Active,Active,,,12345,123456778,USD,Computer,English,123455,Title,,,45,45,0,0,0,0,0,0,0,0,0,TestName,English,USA,Active,,12124,1233,,,,,5675,0,4,,,,,,,,,,,,,,,,,,,,,,,,,,,,,23,234,CustomLabel0,CustomLabel1,CustomLabel2,CustomLabel3,CustomLabel4,ProductType1,ProductType2,ProductType3,ProductType4,ProductType5 +2023-12-17,TestAccount,123456,TestGroup,212344,Active,Active,,,12345,123456778,USD,Computer,English,123455,Title,,,45,45,0,0,0,0,0,0,0,0,0,TestName,English,USA,Active,,12124,1233,,,,,5675,0,4,,,,,,,,,,,,,,,,,,,,,,,,,,,,,23,234,CustomLabel0,CustomLabel1,CustomLabel2,CustomLabel3,CustomLabel4,ProductType1,ProductType2,ProductType3,ProductType4,ProductType5 +2023-12-17,TestAccount,123456,TestGroup,212344,Active,Active,,,12345,123456778,USD,Computer,English,123455,Title,,,45,45,0,0,0,0,0,0,0,0,0,TestName,English,USA,Active,,12124,1233,,,,,5675,0,4,,,,,,,,,,,,,,,,,,,,,,,,,,,,,23,234,CustomLabel0,CustomLabel1,CustomLabel2,CustomLabel3,CustomLabel4,ProductType1,ProductType2,ProductType3,ProductType4,ProductType5 +2023-12-17,TestAccount,123456,TestGroup,212344,Active,Active,,,12345,123456778,USD,Computer,English,123455,Title,,,45,45,0,0,0,0,0,0,0,0,0,TestName,English,USA,Active,,12124,1233,,,,,5675,0,4,,,,,,,,,,,,,,,,,,,,,,,,,,,,,23,234,CustomLabel0,CustomLabel1,CustomLabel2,CustomLabel3,CustomLabel4,ProductType1,ProductType2,ProductType3,ProductType4,ProductType5 +2023-12-17,TestAccount,123456,TestGroup,212344,Active,Active,,,12345,123456778,USD,Computer,English,123455,Title,,,45,45,0,0,0,0,0,0,0,0,0,TestName,English,USA,Active,,12124,1233,,,,,5675,0,4,,,,,,,,,,,,,,,,,,,,,,,,,,,,,23,234,CustomLabel0,CustomLabel1,CustomLabel2,CustomLabel3,CustomLabel4,ProductType1,ProductType2,ProductType3,ProductType4,ProductType5 +2023-12-17,TestAccount,123456,TestGroup,212344,Active,Active,,,12345,123456778,USD,Computer,English,123455,Title,,,45,45,0,0,0,0,0,0,0,0,0,TestName,English,USA,Active,,12124,1233,,,,,5675,0,4,,,,,,,,,,,,,,,,,,,,,,,,,,,,,23,234,CustomLabel0,CustomLabel1,CustomLabel2,CustomLabel3,CustomLabel4,ProductType1,ProductType2,ProductType3,ProductType4,ProductType5 +2023-12-17,TestAccount,123456,TestGroup,212344,Active,Active,,,12345,123456778,USD,Computer,English,123455,Title,,,45,45,0,0,0,0,0,0,0,0,0,TestName,English,USA,Active,,12124,1233,,,,,5675,0,4,,,,,,,,,,,,,,,,,,,,,,,,,,,,,23,234,CustomLabel0,CustomLabel1,CustomLabel2,CustomLabel3,CustomLabel4,ProductType1,ProductType2,ProductType3,ProductType4,ProductType5 +2023-12-17,TestAccount,123456,TestGroup,212344,Active,Active,,,12345,123456778,USD,Computer,English,123455,Title,,,45,45,0,0,0,0,0,0,0,0,0,TestName,English,USA,Active,,12124,1233,,,,,5675,0,4,,,,,,,,,,,,,,,,,,,,,,,,,,,,,23,234,CustomLabel0,CustomLabel1,CustomLabel2,CustomLabel3,CustomLabel4,ProductType1,ProductType2,ProductType3,ProductType4,ProductType5 \ No newline at end of file diff --git a/airbyte-integrations/connectors/source-bing-ads/unit_tests/resource/response/product_dimension_performance_report_weekly_incremental.csv b/airbyte-integrations/connectors/source-bing-ads/unit_tests/resource/response/product_dimension_performance_report_weekly_incremental.csv new file mode 100644 index 000000000000..ea0830b65172 --- /dev/null +++ b/airbyte-integrations/connectors/source-bing-ads/unit_tests/resource/response/product_dimension_performance_report_weekly_incremental.csv @@ -0,0 +1,9 @@ +TimePeriod,AccountName,AccountNumber,AdGroupName,AdGroupId,CampaignStatus,AccountStatus,AdGroupStatus,Network,AdId,CampaignId,CampaignName,CurrencyCode,DeviceType,Language,MerchantProductId,Title,Condition,Brand,Price,Impressions,Clicks,Ctr,AverageCpc,Spend,Conversions,ConversionRate,Revenue,RevenuePerConversion,SellerName,OfferLanguage,CountryOfSale,AdStatus,AdDistribution,ClickTypeId,TotalClicksOnAdElements,ClickType,ReturnOnAdSpend,BidStrategyType,LocalStoreCode,StoreId,AssistedClicks,AssistedConversions,AllConversions,AllRevenue,AllConversionRate,AllCostPerConversion,AllReturnOnAdSpend,AllRevenuePerConversion,CostPerConversion,ViewThroughConversions,Goal,GoalType,ProductBought,QuantityBought,AverageCpm,ConversionsQualified,AssistedConversionsQualified,ViewThroughConversionsQualified,ProductBoughtTitle,GTIN,MPN,ViewThroughRevenue,Sales,CostPerSale,RevenuePerSale,Installs,CostPerInstall,RevenuePerInstall,CampaignType,AssetGroupId,AssetGroupName,AssetGroupStatus,CustomLabel0,CustomLabel1,CustomLabel2,CustomLabel3,CustomLabel4,ProductType1,ProductType2,ProductType3,ProductType4,ProductType5 +2023-12-18,TestAccount,123456,TestGroup,212344,Active,Active,,,12345,123456778,USD,Computer,English,123455,Title,,,45,45,0,0,0,0,0,0,0,0,0,TestName,English,USA,Active,,12124,1233,,,,,5675,0,4,,,,,,,,,,,,,,,,,,,,,,,,,,,,,23,234,CustomLabel0,CustomLabel1,CustomLabel2,CustomLabel3,CustomLabel4,ProductType1,ProductType2,ProductType3,ProductType4,ProductType5 +2023-12-19,TestAccount,123456,TestGroup,212344,Active,Active,,,12345,123456778,USD,Computer,English,123455,Title,,,45,45,0,0,0,0,0,0,0,0,0,TestName,English,USA,Active,,12124,1233,,,,,5675,0,4,,,,,,,,,,,,,,,,,,,,,,,,,,,,,23,234,CustomLabel0,CustomLabel1,CustomLabel2,CustomLabel3,CustomLabel4,ProductType1,ProductType2,ProductType3,ProductType4,ProductType5 +2023-12-20,TestAccount,123456,TestGroup,212344,Active,Active,,,12345,123456778,USD,Computer,English,123455,Title,,,45,45,0,0,0,0,0,0,0,0,0,TestName,English,USA,Active,,12124,1233,,,,,5675,0,4,,,,,,,,,,,,,,,,,,,,,,,,,,,,,23,234,CustomLabel0,CustomLabel1,CustomLabel2,CustomLabel3,CustomLabel4,ProductType1,ProductType2,ProductType3,ProductType4,ProductType5 +2023-12-21,TestAccount,123456,TestGroup,212344,Active,Active,,,12345,123456778,USD,Computer,English,123455,Title,,,45,45,0,0,0,0,0,0,0,0,0,TestName,English,USA,Active,,12124,1233,,,,,5675,0,4,,,,,,,,,,,,,,,,,,,,,,,,,,,,,23,234,CustomLabel0,CustomLabel1,CustomLabel2,CustomLabel3,CustomLabel4,ProductType1,ProductType2,ProductType3,ProductType4,ProductType5 +2023-12-22,TestAccount,123456,TestGroup,212344,Active,Active,,,12345,123456778,USD,Computer,English,123455,Title,,,45,45,0,0,0,0,0,0,0,0,0,TestName,English,USA,Active,,12124,1233,,,,,5675,0,4,,,,,,,,,,,,,,,,,,,,,,,,,,,,,23,234,CustomLabel0,CustomLabel1,CustomLabel2,CustomLabel3,CustomLabel4,ProductType1,ProductType2,ProductType3,ProductType4,ProductType5 +2023-11-11T01:15:00+00:00,TestAccount,123456,TestGroup,212344,Active,Active,,,12345,123456778,USD,Computer,English,123455,Title,,,45,45,0,0,0,0,0,0,0,0,0,TestName,English,USA,Active,,12124,1233,,,,,5675,0,4,,,,,,,,,,,,,,,,,,,,,,,,,,,,,23,234,CustomLabel0,CustomLabel1,CustomLabel2,CustomLabel3,CustomLabel4,ProductType1,ProductType2,ProductType3,ProductType4,ProductType5 +2023-12-24,TestAccount,123456,TestGroup,212344,Active,Active,,,12345,123456778,USD,Computer,English,123455,Title,,,45,45,0,0,0,0,0,0,0,0,0,TestName,English,USA,Active,,12124,1233,,,,,5675,0,4,,,,,,,,,,,,,,,,,,,,,,,,,,,,,23,234,CustomLabel0,CustomLabel1,CustomLabel2,CustomLabel3,CustomLabel4,ProductType1,ProductType2,ProductType3,ProductType4,ProductType5 +2023-12-25,TestAccount,123456,TestGroup,212344,Active,Active,,,12345,123456778,USD,Computer,English,123455,Title,,,45,45,0,0,0,0,0,0,0,0,0,TestName,English,USA,Active,,12124,1233,,,,,5675,0,4,,,,,,,,,,,,,,,,,,,,,,,,,,,,,23,234,CustomLabel0,CustomLabel1,CustomLabel2,CustomLabel3,CustomLabel4,ProductType1,ProductType2,ProductType3,ProductType4,ProductType5 \ No newline at end of file diff --git a/airbyte-integrations/connectors/source-bing-ads/unit_tests/resource/state/budget_state.json b/airbyte-integrations/connectors/source-bing-ads/unit_tests/resource/state/budget_state.json new file mode 100644 index 000000000000..7c234188ff04 --- /dev/null +++ b/airbyte-integrations/connectors/source-bing-ads/unit_tests/resource/state/budget_state.json @@ -0,0 +1,5 @@ +{ + "180535609": { + "Modified Time": "2024-01-29T12:54:12.028+00:00" + } +} diff --git a/airbyte-integrations/connectors/source-bing-ads/unit_tests/resource/state/product_dimension_performance_report_daily_state.json b/airbyte-integrations/connectors/source-bing-ads/unit_tests/resource/state/product_dimension_performance_report_daily_state.json new file mode 100644 index 000000000000..daa759dfec58 --- /dev/null +++ b/airbyte-integrations/connectors/source-bing-ads/unit_tests/resource/state/product_dimension_performance_report_daily_state.json @@ -0,0 +1,5 @@ +{ + "180535609": { + "TimePeriod": "2023-11-11" + } +} diff --git a/airbyte-integrations/connectors/source-bing-ads/unit_tests/resource/state/product_dimension_performance_report_hourly_state.json b/airbyte-integrations/connectors/source-bing-ads/unit_tests/resource/state/product_dimension_performance_report_hourly_state.json new file mode 100644 index 000000000000..daa759dfec58 --- /dev/null +++ b/airbyte-integrations/connectors/source-bing-ads/unit_tests/resource/state/product_dimension_performance_report_hourly_state.json @@ -0,0 +1,5 @@ +{ + "180535609": { + "TimePeriod": "2023-11-11" + } +} diff --git a/airbyte-integrations/connectors/source-bing-ads/unit_tests/resource/state/product_dimension_performance_report_monthly_state.json b/airbyte-integrations/connectors/source-bing-ads/unit_tests/resource/state/product_dimension_performance_report_monthly_state.json new file mode 100644 index 000000000000..864a77903e29 --- /dev/null +++ b/airbyte-integrations/connectors/source-bing-ads/unit_tests/resource/state/product_dimension_performance_report_monthly_state.json @@ -0,0 +1,5 @@ +{ + "180535609": { + "TimePeriod": "2023-12-01" + } +} diff --git a/airbyte-integrations/connectors/source-bing-ads/unit_tests/resource/state/product_dimension_performance_report_weekly_state.json b/airbyte-integrations/connectors/source-bing-ads/unit_tests/resource/state/product_dimension_performance_report_weekly_state.json new file mode 100644 index 000000000000..daa759dfec58 --- /dev/null +++ b/airbyte-integrations/connectors/source-bing-ads/unit_tests/resource/state/product_dimension_performance_report_weekly_state.json @@ -0,0 +1,5 @@ +{ + "180535609": { + "TimePeriod": "2023-11-11" + } +} diff --git a/airbyte-integrations/connectors/source-bing-ads/unit_tests/test_bulk_streams.py b/airbyte-integrations/connectors/source-bing-ads/unit_tests/test_bulk_streams.py index c69f77e9bea2..9a96becee8c6 100644 --- a/airbyte-integrations/connectors/source-bing-ads/unit_tests/test_bulk_streams.py +++ b/airbyte-integrations/connectors/source-bing-ads/unit_tests/test_bulk_streams.py @@ -103,6 +103,14 @@ def test_bulk_stream_read_with_chunks_app_install_ad_labels(mocked_client, confi } +@patch.object(source_bing_ads.source, "Client") +def test_bulk_stream_read_with_chunks_ioe_error(mocked_client, config, caplog): + app_install_ads = AppInstallAdLabels(mocked_client, config) + with pytest.raises(IOError): + list(app_install_ads.read_with_chunks(path=Path(__file__).parent / "non-existing-file.csv")) + assert "The IO/Error occurred while reading tmp data" in caplog.text + + @patch.object(source_bing_ads.source, "Client") @freeze_time("2023-11-01T12:00:00.000+00:00") @pytest.mark.parametrize( diff --git a/airbyte-integrations/connectors/source-bing-ads/unit_tests/test_source.py b/airbyte-integrations/connectors/source-bing-ads/unit_tests/test_source.py index 210ccf1031be..5293baef52b0 100644 --- a/airbyte-integrations/connectors/source-bing-ads/unit_tests/test_source.py +++ b/airbyte-integrations/connectors/source-bing-ads/unit_tests/test_source.py @@ -16,7 +16,7 @@ @patch.object(source_bing_ads.source, "Client") def test_streams_config_based(mocked_client, config): streams = SourceBingAds().streams(config) - assert len(streams) == 60 + assert len(streams) == 65 @patch.object(source_bing_ads.source, "Client") diff --git a/docs/integrations/sources/bing-ads.md b/docs/integrations/sources/bing-ads.md index 3cedce121288..2214886b6137 100644 --- a/docs/integrations/sources/bing-ads.md +++ b/docs/integrations/sources/bing-ads.md @@ -113,6 +113,7 @@ The Bing Ads source connector supports the following streams. For more informati - [Ads](https://docs.microsoft.com/en-us/advertising/campaign-management-service/getadsbyadgroupid?view=bingads-13) - [App Install Ads](https://learn.microsoft.com/en-us/advertising/bulk-service/app-install-ad?view=bingads-13) - [App Install Ad Labels](https://learn.microsoft.com/en-us/advertising/bulk-service/app-install-ad-label?view=bingads-13) +- [Budget](https://learn.microsoft.com/en-us/advertising/bulk-service/budget?view=bingads-13&viewFallbackFrom=bingads-13) - [Campaigns](https://docs.microsoft.com/en-us/advertising/campaign-management-service/getcampaignsbyaccountid?view=bingads-13) - [Campaign Labels](https://learn.microsoft.com/en-us/advertising/bulk-service/campaign-label?view=bingads-13) - [Keywords](https://learn.microsoft.com/en-us/advertising/bulk-service/keyword?view=bingads-13) @@ -166,6 +167,10 @@ The Bing Ads source connector supports the following streams. For more informati - [User Location Performance Report Daily](https://learn.microsoft.com/en-us/advertising/reporting-service/userlocationperformancereportrequest?view=bingads-13) - [User Location Performance Report Weekly](https://learn.microsoft.com/en-us/advertising/reporting-service/userlocationperformancereportrequest?view=bingads-13) - [User Location Performance Report Monthly](https://learn.microsoft.com/en-us/advertising/reporting-service/userlocationperformancereportrequest?view=bingads-13) +- [Product Dimension Performance Report Hourly](https://learn.microsoft.com/en-us/advertising/reporting-service/productdimensionperformancereportrequest?view=bingads-13) +- [Product Dimension Performance Report Daily](https://learn.microsoft.com/en-us/advertising/reporting-service/productdimensionperformancereportrequest?view=bingads-13) +- [Product Dimension Performance Report Weekly](https://learn.microsoft.com/en-us/advertising/reporting-service/productdimensionperformancereportrequest?view=bingads-13) +- [Product Dimension Performance Report Monthly](https://learn.microsoft.com/en-us/advertising/reporting-service/productdimensionperformancereportrequest?view=bingads-13) - [Search Query Performance Report Hourly](https://learn.microsoft.com/en-us/advertising/reporting-service/searchqueryperformancereportrequest?view=bingads-13) - [Search Query Performance Report Daily](https://learn.microsoft.com/en-us/advertising/reporting-service/searchqueryperformancereportrequest?view=bingads-13) - [Search Query Performance Report Weekly](https://learn.microsoft.com/en-us/advertising/reporting-service/searchqueryperformancereportrequest?view=bingads-13) @@ -226,7 +231,8 @@ The Bing Ads API limits the number of requests for all Microsoft Advertising cli | Version | Date | Pull Request | Subject | |:--------|:-----------|:---------------------------------------------------------------------------------------------------------------------------------|:------------------------------------------------------------------------------------------------------------------------------------------------| -| 2.1.4 | 2024-02-12 | [35179](https://github.com/airbytehq/airbyte/pull/35179) | Manage dependencies with Poetry. | +| 2.2.0 | 2024-02-13 | [35201](https://github.com/airbytehq/airbyte/pull/35201) | New streams Budget and | +| 2.1.4 | 2024-02-12 | [35179](https://github.com/airbytehq/airbyte/pull/35179) | Manage dependencies with Poetry. | | 2.1.3 | 2024-01-31 | [34712](https://github.com/airbytehq/airbyte/pull/34712) | Fix duplicated records for report-based streams | | 2.1.2 | 2024-01-09 | [34045](https://github.com/airbytehq/airbyte/pull/34045) | Speed up record transformation | | 2.1.1 | 2023-12-15 | [33500](https://github.com/airbytehq/airbyte/pull/33500) | Fix state setter when state was provided | From 55d206b90e4c49758dde2853a72dabfa5465d11d Mon Sep 17 00:00:00 2001 From: Anton Karpets Date: Fri, 1 Mar 2024 14:10:46 +0200 Subject: [PATCH 042/172] =?UTF-8?q?=F0=9F=8F=A5Source=20Amplitude:=20fix?= =?UTF-8?q?=20CAT=20(#35743)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .../connectors/source-amplitude/acceptance-test-config.yml | 7 +++++-- .../integration_tests/expected_records.jsonl | 4 ---- 2 files changed, 5 insertions(+), 6 deletions(-) diff --git a/airbyte-integrations/connectors/source-amplitude/acceptance-test-config.yml b/airbyte-integrations/connectors/source-amplitude/acceptance-test-config.yml index 2af9f234c98c..f916910191bf 100644 --- a/airbyte-integrations/connectors/source-amplitude/acceptance-test-config.yml +++ b/airbyte-integrations/connectors/source-amplitude/acceptance-test-config.yml @@ -7,7 +7,7 @@ acceptance_tests: tests: - spec_path: "source_amplitude/spec.yaml" backward_compatibility_tests_config: - disable_for_version: 0.3.2 # `start_date` format changed to format: date-time + disable_for_version: 0.3.2 # `start_date` format changed to format: date-time connection: tests: - config_path: "secrets/config.json" @@ -18,7 +18,7 @@ acceptance_tests: tests: - config_path: "secrets/config.json" backward_compatibility_tests_config: - disable_for_version: 0.3.5 # `date` format changed to format: date-time in the AverageSessionLength stream + disable_for_version: 0.3.5 # `date` format changed to format: date-time in the AverageSessionLength stream basic_read: tests: - config_path: "secrets/config.json" @@ -27,6 +27,8 @@ acceptance_tests: bypass_reason: "This stream is empty due to free subscription plan for the sandbox." - name: "annotations" bypass_reason: "This stream is empty due to free subscription plan for the sandbox." + - name: "average_session_length" + bypass_reason: "No data in the sandbox." expect_records: path: "integration_tests/expected_records.jsonl" exact_order: no @@ -37,6 +39,7 @@ acceptance_tests: future_state: future_state_path: "integration_tests/abnormal_state.json" timeout_seconds: 3600 + skip_comprehensive_incremental_tests: yes full_refresh: tests: - config_path: "secrets/config.json" diff --git a/airbyte-integrations/connectors/source-amplitude/integration_tests/expected_records.jsonl b/airbyte-integrations/connectors/source-amplitude/integration_tests/expected_records.jsonl index a23992c53d61..be624bdc5b72 100644 --- a/airbyte-integrations/connectors/source-amplitude/integration_tests/expected_records.jsonl +++ b/airbyte-integrations/connectors/source-amplitude/integration_tests/expected_records.jsonl @@ -2,10 +2,6 @@ {"stream": "active_users", "data": {"date": "2023-08-29", "statistics": {"(none)": 0}}, "emitted_at": 1694709513302} {"stream": "active_users", "data": {"date": "2023-08-30", "statistics": {"(none)": 0}}, "emitted_at": 1694709513303} {"stream": "active_users", "data": {"date": "2023-08-31", "statistics": {"(none)": 0}}, "emitted_at": 1694709513305} -{"stream": "average_session_length", "data": {"date": "2023-08-11T00:00:00", "length": 0.0}, "emitted_at": 1694709517092} -{"stream": "average_session_length", "data": {"date": "2023-08-18T00:00:00", "length": 0.0}, "emitted_at": 1694709517088} -{"stream": "average_session_length", "data": {"date": "2023-08-23T00:00:00", "length": 0.0}, "emitted_at": 1694709517090} -{"stream": "average_session_length", "data": {"date": "2023-08-27T00:00:00", "length": 0.0}, "emitted_at": 1694709517086} {"stream": "events", "data": {"$insert_id": "google-ad-4651612872-643022056303-DESKTOP-2023-08-24", "$insert_key": null, "$schema": null, "adid": null, "amplitude_attribution_ids": null, "amplitude_event_type": null, "amplitude_id": 550106004607, "app": 434735, "city": null, "client_event_time": "2023-08-24T07:00:00+00:00", "client_upload_time": "2023-08-25T11:04:55.821000+00:00", "country": null, "data": {"path": "/batch", "user_properties_updated": true, "vacuum_source_id": "5955", "group_first_event": {}, "group_ids": {}}, "data_type": "event", "device_brand": null, "device_carrier": null, "device_family": null, "device_id": "google-ad-4651612872-643022056303", "device_manufacturer": null, "device_model": null, "device_type": null, "dma": null, "event_id": 355175889, "event_properties": {"ad_metrics.cost": 0.528957, "campaign_advertising_channel_type": "DISPLAY", "ad_segment_device": "DESKTOP", "ad_metrics.impressions": 1535, "ad_group_type": "DISPLAY_STANDARD", "campaign_name": "Brand awareness and reach-Display-1", "ad_group_name": "Ad group 1", "ad_id": 643022056303, "campaign_start_date": "2022-12-28", "final_url": "https://airbyte.com", "ad_platform": "google", "campaign_end_date": "2037-12-30", "ad_metrics.clicks": 0, "ad_group_id": 144799120517, "ad_metrics.conversions": 0.0, "ad_metrics.interactions": 0, "campaign_id": 19410069806}, "event_time": "2023-08-24T07:00:00+00:00", "event_type": "Daily Ad Metrics", "global_user_properties": null, "group_properties": {}, "groups": {}, "idfa": null, "ip_address": null, "is_attribution_event": null, "language": null, "library": "google_ads", "location_lat": null, "location_lng": null, "os_name": null, "os_version": null, "partner_id": null, "paying": null, "plan": {}, "platform": null, "processed_time": "2023-08-25T11:05:08.912000+00:00", "region": null, "sample_rate": null, "server_received_time": "2023-08-25T11:04:55.821000+00:00", "server_upload_time": "2023-08-25T11:05:08.013000+00:00", "session_id": -1, "source_id": null, "start_version": null, "user_creation_time": null, "user_id": null, "user_properties": {"country": "test", "device_model": "test", "city": "test", "os_version": "test", "City": "London", "platform": "test", "device_manufacturer": "test", "carrier": "test", "device_brand": "test", "Region": "London", "DMA": "London", "Country": "UK", "os_name": "test", "region": "test"}, "uuid": "37bcd2f0-2688-47d5-ba90-76b0bd13b0f8", "version_name": null}, "emitted_at": 1694709577929} {"stream": "events", "data": {"$insert_id": "google-ad-4651612872-643022056303-MOBILE-2023-08-24", "$insert_key": null, "$schema": null, "adid": null, "amplitude_attribution_ids": null, "amplitude_event_type": null, "amplitude_id": 550106004607, "app": 434735, "city": null, "client_event_time": "2023-08-24T07:00:00+00:00", "client_upload_time": "2023-08-25T11:04:55.821000+00:00", "country": null, "data": {"path": "/batch", "vacuum_source_id": "5955", "group_first_event": {}, "group_ids": {}}, "data_type": "event", "device_brand": null, "device_carrier": null, "device_family": null, "device_id": "google-ad-4651612872-643022056303", "device_manufacturer": null, "device_model": null, "device_type": null, "dma": null, "event_id": 604299598, "event_properties": {"ad_metrics.cost": 11.398659, "campaign_advertising_channel_type": "DISPLAY", "ad_segment_device": "MOBILE", "ad_metrics.impressions": 15084, "ad_group_type": "DISPLAY_STANDARD", "campaign_name": "Brand awareness and reach-Display-1", "ad_group_name": "Ad group 1", "ad_id": 643022056303, "campaign_start_date": "2022-12-28", "final_url": "https://airbyte.com", "ad_platform": "google", "campaign_end_date": "2037-12-30", "ad_metrics.clicks": 28, "ad_group_id": 144799120517, "ad_metrics.conversions": 0.0, "ad_metrics.interactions": 28, "campaign_id": 19410069806}, "event_time": "2023-08-24T07:00:00+00:00", "event_type": "Daily Ad Metrics", "global_user_properties": null, "group_properties": {}, "groups": {}, "idfa": null, "ip_address": null, "is_attribution_event": null, "language": null, "library": "google_ads", "location_lat": null, "location_lng": null, "os_name": null, "os_version": null, "partner_id": null, "paying": null, "plan": {}, "platform": null, "processed_time": "2023-08-25T11:05:08.912000+00:00", "region": null, "sample_rate": null, "server_received_time": "2023-08-25T11:04:55.821000+00:00", "server_upload_time": "2023-08-25T11:05:08.013000+00:00", "session_id": -1, "source_id": null, "start_version": null, "user_creation_time": null, "user_id": null, "user_properties": {"country": "test", "device_model": "test", "city": "test", "os_version": "test", "City": "London", "platform": "test", "device_manufacturer": "test", "carrier": "test", "device_brand": "test", "Region": "London", "DMA": "London", "Country": "UK", "os_name": "test", "region": "test"}, "uuid": "60320805-6886-43d6-b2ea-d3a6eccefee2", "version_name": null}, "emitted_at": 1694709577931} {"stream": "events", "data": {"$insert_id": "google-ad-4651612872-643022056303-TABLET-2023-08-24", "$insert_key": null, "$schema": null, "adid": null, "amplitude_attribution_ids": null, "amplitude_event_type": null, "amplitude_id": 550106004607, "app": 434735, "city": null, "client_event_time": "2023-08-24T07:00:00+00:00", "client_upload_time": "2023-08-25T11:04:55.821000+00:00", "country": null, "data": {"path": "/batch", "vacuum_source_id": "5955", "group_first_event": {}, "group_ids": {}}, "data_type": "event", "device_brand": null, "device_carrier": null, "device_family": null, "device_id": "google-ad-4651612872-643022056303", "device_manufacturer": null, "device_model": null, "device_type": null, "dma": null, "event_id": 798716893, "event_properties": {"ad_metrics.cost": 0.644529, "campaign_advertising_channel_type": "DISPLAY", "ad_segment_device": "TABLET", "ad_metrics.impressions": 931, "ad_group_type": "DISPLAY_STANDARD", "campaign_name": "Brand awareness and reach-Display-1", "ad_group_name": "Ad group 1", "ad_id": 643022056303, "campaign_start_date": "2022-12-28", "final_url": "https://airbyte.com", "ad_platform": "google", "campaign_end_date": "2037-12-30", "ad_metrics.clicks": 3, "ad_group_id": 144799120517, "ad_metrics.conversions": 0.0, "ad_metrics.interactions": 3, "campaign_id": 19410069806}, "event_time": "2023-08-24T07:00:00+00:00", "event_type": "Daily Ad Metrics", "global_user_properties": null, "group_properties": {}, "groups": {}, "idfa": null, "ip_address": null, "is_attribution_event": null, "language": null, "library": "google_ads", "location_lat": null, "location_lng": null, "os_name": null, "os_version": null, "partner_id": null, "paying": null, "plan": {}, "platform": null, "processed_time": "2023-08-25T11:05:08.912000+00:00", "region": null, "sample_rate": null, "server_received_time": "2023-08-25T11:04:55.821000+00:00", "server_upload_time": "2023-08-25T11:05:08.013000+00:00", "session_id": -1, "source_id": null, "start_version": null, "user_creation_time": null, "user_id": null, "user_properties": {"country": "test", "device_model": "test", "city": "test", "os_version": "test", "City": "London", "platform": "test", "device_manufacturer": "test", "carrier": "test", "device_brand": "test", "Region": "London", "DMA": "London", "Country": "UK", "os_name": "test", "region": "test"}, "uuid": "9ae0c1e5-3c39-4a4d-af8b-0902fe889410", "version_name": null}, "emitted_at": 1694709577932} From df71835d9a5574889524c1a4967a91189de0daf4 Mon Sep 17 00:00:00 2001 From: Anatolii Yatsuk <35109939+tolik0@users.noreply.github.com> Date: Fri, 1 Mar 2024 14:51:53 +0200 Subject: [PATCH 043/172] :bug: Source Google Ads: Fix error for new customers for incremental events streams (#35664) --- .../source-google-ads/metadata.yaml | 2 +- .../connectors/source-google-ads/poetry.lock | 183 +++++++++--------- .../source-google-ads/pyproject.toml | 2 +- .../source_google_ads/streams.py | 86 ++++---- .../source-google-ads/unit_tests/conftest.py | 4 + .../test_incremental_events_streams.py | 48 +++++ docs/integrations/sources/google-ads.md | 1 + 7 files changed, 199 insertions(+), 127 deletions(-) diff --git a/airbyte-integrations/connectors/source-google-ads/metadata.yaml b/airbyte-integrations/connectors/source-google-ads/metadata.yaml index 8d1582887623..087f70587419 100644 --- a/airbyte-integrations/connectors/source-google-ads/metadata.yaml +++ b/airbyte-integrations/connectors/source-google-ads/metadata.yaml @@ -11,7 +11,7 @@ data: connectorSubtype: api connectorType: source definitionId: 253487c0-2246-43ba-a21f-5116b20a2c50 - dockerImageTag: 3.3.5 + dockerImageTag: 3.3.6 dockerRepository: airbyte/source-google-ads documentationUrl: https://docs.airbyte.com/integrations/sources/google-ads githubIssueLabel: source-google-ads diff --git a/airbyte-integrations/connectors/source-google-ads/poetry.lock b/airbyte-integrations/connectors/source-google-ads/poetry.lock index eefcf8333c3f..1b8c7d1afff5 100644 --- a/airbyte-integrations/connectors/source-google-ads/poetry.lock +++ b/airbyte-integrations/connectors/source-google-ads/poetry.lock @@ -1,4 +1,4 @@ -# This file is automatically @generated by Poetry 1.7.1 and should not be changed by hand. +# This file is automatically @generated by Poetry 1.5.1 and should not be changed by hand. [[package]] name = "airbyte-cdk" @@ -104,13 +104,13 @@ files = [ [[package]] name = "cachetools" -version = "5.3.2" +version = "5.3.3" description = "Extensible memoizing collections and decorators" optional = false python-versions = ">=3.7" files = [ - {file = "cachetools-5.3.2-py3-none-any.whl", hash = "sha256:861f35a13a451f94e301ce2bec7cac63e881232ccce7ed67fab9b5df4d3beaa1"}, - {file = "cachetools-5.3.2.tar.gz", hash = "sha256:086ee420196f7b2ab9ca2db2520aca326318b68fe5ba8bc4d49cca91add450f2"}, + {file = "cachetools-5.3.3-py3-none-any.whl", hash = "sha256:0abad1021d3f8325b2fc1d2e9c8b9c9d57b04c3932657a72465447332c24d945"}, + {file = "cachetools-5.3.3.tar.gz", hash = "sha256:ba29e2dfa0b8b556606f097407ed1aa62080ee108ab0dc5ec9d6a723a007d105"}, ] [[package]] @@ -352,13 +352,13 @@ tests = ["nox (>=2020.12.31,<2022.6)"] [[package]] name = "google-api-core" -version = "2.17.0" +version = "2.17.1" description = "Google API client core library" optional = false python-versions = ">=3.7" files = [ - {file = "google-api-core-2.17.0.tar.gz", hash = "sha256:de7ef0450faec7c75e0aea313f29ac870fdc44cfaec9d6499a9a17305980ef66"}, - {file = "google_api_core-2.17.0-py3-none-any.whl", hash = "sha256:08ed79ed8e93e329de5e3e7452746b734e6bf8438d8d64dd3319d21d3164890c"}, + {file = "google-api-core-2.17.1.tar.gz", hash = "sha256:9df18a1f87ee0df0bc4eea2770ebc4228392d8cc4066655b320e2cfccb15db95"}, + {file = "google_api_core-2.17.1-py3-none-any.whl", hash = "sha256:610c5b90092c360736baccf17bd3efbcb30dd380e7a6dc28a71059edb8bd0d8e"}, ] [package.dependencies] @@ -374,13 +374,13 @@ grpcio-gcp = ["grpcio-gcp (>=0.2.2,<1.0.dev0)"] [[package]] name = "google-auth" -version = "2.27.0" +version = "2.28.1" description = "Google Authentication Library" optional = false python-versions = ">=3.7" files = [ - {file = "google-auth-2.27.0.tar.gz", hash = "sha256:e863a56ccc2d8efa83df7a80272601e43487fa9a728a376205c86c26aaefa821"}, - {file = "google_auth-2.27.0-py2.py3-none-any.whl", hash = "sha256:8e4bad367015430ff253fe49d500fdc3396c1a434db5740828c728e45bcce245"}, + {file = "google-auth-2.28.1.tar.gz", hash = "sha256:34fc3046c257cedcf1622fc4b31fc2be7923d9b4d44973d481125ecc50d83885"}, + {file = "google_auth-2.28.1-py2.py3-none-any.whl", hash = "sha256:25141e2d7a14bfcba945f5e9827f98092716e99482562f15306e5b026e21aa72"}, ] [package.dependencies] @@ -432,84 +432,84 @@ grpc = ["grpcio (>=1.44.0,<2.0.0.dev0)"] [[package]] name = "grpcio" -version = "1.60.1" +version = "1.62.0" description = "HTTP/2-based RPC framework" optional = false python-versions = ">=3.7" files = [ - {file = "grpcio-1.60.1-cp310-cp310-linux_armv7l.whl", hash = "sha256:14e8f2c84c0832773fb3958240c69def72357bc11392571f87b2d7b91e0bb092"}, - {file = "grpcio-1.60.1-cp310-cp310-macosx_12_0_universal2.whl", hash = "sha256:33aed0a431f5befeffd9d346b0fa44b2c01aa4aeae5ea5b2c03d3e25e0071216"}, - {file = "grpcio-1.60.1-cp310-cp310-manylinux_2_17_aarch64.whl", hash = "sha256:fead980fbc68512dfd4e0c7b1f5754c2a8e5015a04dea454b9cada54a8423525"}, - {file = "grpcio-1.60.1-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:082081e6a36b6eb5cf0fd9a897fe777dbb3802176ffd08e3ec6567edd85bc104"}, - {file = "grpcio-1.60.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:55ccb7db5a665079d68b5c7c86359ebd5ebf31a19bc1a91c982fd622f1e31ff2"}, - {file = "grpcio-1.60.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:9b54577032d4f235452f77a83169b6527bf4b77d73aeada97d45b2aaf1bf5ce0"}, - {file = "grpcio-1.60.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:7d142bcd604166417929b071cd396aa13c565749a4c840d6c702727a59d835eb"}, - {file = "grpcio-1.60.1-cp310-cp310-win32.whl", hash = "sha256:2a6087f234cb570008a6041c8ffd1b7d657b397fdd6d26e83d72283dae3527b1"}, - {file = "grpcio-1.60.1-cp310-cp310-win_amd64.whl", hash = "sha256:f2212796593ad1d0235068c79836861f2201fc7137a99aa2fea7beeb3b101177"}, - {file = "grpcio-1.60.1-cp311-cp311-linux_armv7l.whl", hash = "sha256:79ae0dc785504cb1e1788758c588c711f4e4a0195d70dff53db203c95a0bd303"}, - {file = "grpcio-1.60.1-cp311-cp311-macosx_10_10_universal2.whl", hash = "sha256:4eec8b8c1c2c9b7125508ff7c89d5701bf933c99d3910e446ed531cd16ad5d87"}, - {file = "grpcio-1.60.1-cp311-cp311-manylinux_2_17_aarch64.whl", hash = "sha256:8c9554ca8e26241dabe7951aa1fa03a1ba0856688ecd7e7bdbdd286ebc272e4c"}, - {file = "grpcio-1.60.1-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:91422ba785a8e7a18725b1dc40fbd88f08a5bb4c7f1b3e8739cab24b04fa8a03"}, - {file = "grpcio-1.60.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cba6209c96828711cb7c8fcb45ecef8c8859238baf15119daa1bef0f6c84bfe7"}, - {file = "grpcio-1.60.1-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:c71be3f86d67d8d1311c6076a4ba3b75ba5703c0b856b4e691c9097f9b1e8bd2"}, - {file = "grpcio-1.60.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:af5ef6cfaf0d023c00002ba25d0751e5995fa0e4c9eec6cd263c30352662cbce"}, - {file = "grpcio-1.60.1-cp311-cp311-win32.whl", hash = "sha256:a09506eb48fa5493c58f946c46754ef22f3ec0df64f2b5149373ff31fb67f3dd"}, - {file = "grpcio-1.60.1-cp311-cp311-win_amd64.whl", hash = "sha256:49c9b6a510e3ed8df5f6f4f3c34d7fbf2d2cae048ee90a45cd7415abab72912c"}, - {file = "grpcio-1.60.1-cp312-cp312-linux_armv7l.whl", hash = "sha256:b58b855d0071575ea9c7bc0d84a06d2edfbfccec52e9657864386381a7ce1ae9"}, - {file = "grpcio-1.60.1-cp312-cp312-macosx_10_10_universal2.whl", hash = "sha256:a731ac5cffc34dac62053e0da90f0c0b8560396a19f69d9703e88240c8f05858"}, - {file = "grpcio-1.60.1-cp312-cp312-manylinux_2_17_aarch64.whl", hash = "sha256:cf77f8cf2a651fbd869fbdcb4a1931464189cd210abc4cfad357f1cacc8642a6"}, - {file = "grpcio-1.60.1-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c557e94e91a983e5b1e9c60076a8fd79fea1e7e06848eb2e48d0ccfb30f6e073"}, - {file = "grpcio-1.60.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:069fe2aeee02dfd2135d562d0663fe70fbb69d5eed6eb3389042a7e963b54de8"}, - {file = "grpcio-1.60.1-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:cb0af13433dbbd1c806e671d81ec75bd324af6ef75171fd7815ca3074fe32bfe"}, - {file = "grpcio-1.60.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:2f44c32aef186bbba254129cea1df08a20be414144ac3bdf0e84b24e3f3b2e05"}, - {file = "grpcio-1.60.1-cp312-cp312-win32.whl", hash = "sha256:a212e5dea1a4182e40cd3e4067ee46be9d10418092ce3627475e995cca95de21"}, - {file = "grpcio-1.60.1-cp312-cp312-win_amd64.whl", hash = "sha256:6e490fa5f7f5326222cb9f0b78f207a2b218a14edf39602e083d5f617354306f"}, - {file = "grpcio-1.60.1-cp37-cp37m-linux_armv7l.whl", hash = "sha256:4216e67ad9a4769117433814956031cb300f85edc855252a645a9a724b3b6594"}, - {file = "grpcio-1.60.1-cp37-cp37m-macosx_10_10_universal2.whl", hash = "sha256:73e14acd3d4247169955fae8fb103a2b900cfad21d0c35f0dcd0fdd54cd60367"}, - {file = "grpcio-1.60.1-cp37-cp37m-manylinux_2_17_aarch64.whl", hash = "sha256:6ecf21d20d02d1733e9c820fb5c114c749d888704a7ec824b545c12e78734d1c"}, - {file = "grpcio-1.60.1-cp37-cp37m-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:33bdea30dcfd4f87b045d404388469eb48a48c33a6195a043d116ed1b9a0196c"}, - {file = "grpcio-1.60.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:53b69e79d00f78c81eecfb38f4516080dc7f36a198b6b37b928f1c13b3c063e9"}, - {file = "grpcio-1.60.1-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:39aa848794b887120b1d35b1b994e445cc028ff602ef267f87c38122c1add50d"}, - {file = "grpcio-1.60.1-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:72153a0d2e425f45b884540a61c6639436ddafa1829a42056aa5764b84108b8e"}, - {file = "grpcio-1.60.1-cp37-cp37m-win_amd64.whl", hash = "sha256:50d56280b482875d1f9128ce596e59031a226a8b84bec88cb2bf76c289f5d0de"}, - {file = "grpcio-1.60.1-cp38-cp38-linux_armv7l.whl", hash = "sha256:6d140bdeb26cad8b93c1455fa00573c05592793c32053d6e0016ce05ba267549"}, - {file = "grpcio-1.60.1-cp38-cp38-macosx_10_10_universal2.whl", hash = "sha256:bc808924470643b82b14fe121923c30ec211d8c693e747eba8a7414bc4351a23"}, - {file = "grpcio-1.60.1-cp38-cp38-manylinux_2_17_aarch64.whl", hash = "sha256:70c83bb530572917be20c21f3b6be92cd86b9aecb44b0c18b1d3b2cc3ae47df0"}, - {file = "grpcio-1.60.1-cp38-cp38-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9b106bc52e7f28170e624ba61cc7dc6829566e535a6ec68528f8e1afbed1c41f"}, - {file = "grpcio-1.60.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:30e980cd6db1088c144b92fe376747328d5554bc7960ce583ec7b7d81cd47287"}, - {file = "grpcio-1.60.1-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:0c5807e9152eff15f1d48f6b9ad3749196f79a4a050469d99eecb679be592acc"}, - {file = "grpcio-1.60.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:f1c3dc536b3ee124e8b24feb7533e5c70b9f2ef833e3b2e5513b2897fd46763a"}, - {file = "grpcio-1.60.1-cp38-cp38-win32.whl", hash = "sha256:d7404cebcdb11bb5bd40bf94131faf7e9a7c10a6c60358580fe83913f360f929"}, - {file = "grpcio-1.60.1-cp38-cp38-win_amd64.whl", hash = "sha256:c8754c75f55781515a3005063d9a05878b2cfb3cb7e41d5401ad0cf19de14872"}, - {file = "grpcio-1.60.1-cp39-cp39-linux_armv7l.whl", hash = "sha256:0250a7a70b14000fa311de04b169cc7480be6c1a769b190769d347939d3232a8"}, - {file = "grpcio-1.60.1-cp39-cp39-macosx_10_10_universal2.whl", hash = "sha256:660fc6b9c2a9ea3bb2a7e64ba878c98339abaf1811edca904ac85e9e662f1d73"}, - {file = "grpcio-1.60.1-cp39-cp39-manylinux_2_17_aarch64.whl", hash = "sha256:76eaaba891083fcbe167aa0f03363311a9f12da975b025d30e94b93ac7a765fc"}, - {file = "grpcio-1.60.1-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e5d97c65ea7e097056f3d1ead77040ebc236feaf7f71489383d20f3b4c28412a"}, - {file = "grpcio-1.60.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2bb2a2911b028f01c8c64d126f6b632fcd8a9ac975aa1b3855766c94e4107180"}, - {file = "grpcio-1.60.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:5a1ebbae7e2214f51b1f23b57bf98eeed2cf1ba84e4d523c48c36d5b2f8829ff"}, - {file = "grpcio-1.60.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:9a66f4d2a005bc78e61d805ed95dedfcb35efa84b7bba0403c6d60d13a3de2d6"}, - {file = "grpcio-1.60.1-cp39-cp39-win32.whl", hash = "sha256:8d488fbdbf04283f0d20742b64968d44825617aa6717b07c006168ed16488804"}, - {file = "grpcio-1.60.1-cp39-cp39-win_amd64.whl", hash = "sha256:61b7199cd2a55e62e45bfb629a35b71fc2c0cb88f686a047f25b1112d3810904"}, - {file = "grpcio-1.60.1.tar.gz", hash = "sha256:dd1d3a8d1d2e50ad9b59e10aa7f07c7d1be2b367f3f2d33c5fade96ed5460962"}, + {file = "grpcio-1.62.0-cp310-cp310-linux_armv7l.whl", hash = "sha256:136ffd79791b1eddda8d827b607a6285474ff8a1a5735c4947b58c481e5e4271"}, + {file = "grpcio-1.62.0-cp310-cp310-macosx_12_0_universal2.whl", hash = "sha256:d6a56ba703be6b6267bf19423d888600c3f574ac7c2cc5e6220af90662a4d6b0"}, + {file = "grpcio-1.62.0-cp310-cp310-manylinux_2_17_aarch64.whl", hash = "sha256:4cd356211579043fce9f52acc861e519316fff93980a212c8109cca8f47366b6"}, + {file = "grpcio-1.62.0-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e803e9b58d8f9b4ff0ea991611a8d51b31c68d2e24572cd1fe85e99e8cc1b4f8"}, + {file = "grpcio-1.62.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f4c04fe33039b35b97c02d2901a164bbbb2f21fb9c4e2a45a959f0b044c3512c"}, + {file = "grpcio-1.62.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:95370c71b8c9062f9ea033a0867c4c73d6f0ff35113ebd2618171ec1f1e903e0"}, + {file = "grpcio-1.62.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:c912688acc05e4ff012c8891803659d6a8a8b5106f0f66e0aed3fb7e77898fa6"}, + {file = "grpcio-1.62.0-cp310-cp310-win32.whl", hash = "sha256:821a44bd63d0f04e33cf4ddf33c14cae176346486b0df08b41a6132b976de5fc"}, + {file = "grpcio-1.62.0-cp310-cp310-win_amd64.whl", hash = "sha256:81531632f93fece32b2762247c4c169021177e58e725494f9a746ca62c83acaa"}, + {file = "grpcio-1.62.0-cp311-cp311-linux_armv7l.whl", hash = "sha256:3fa15850a6aba230eed06b236287c50d65a98f05054a0f01ccedf8e1cc89d57f"}, + {file = "grpcio-1.62.0-cp311-cp311-macosx_10_10_universal2.whl", hash = "sha256:36df33080cd7897623feff57831eb83c98b84640b016ce443305977fac7566fb"}, + {file = "grpcio-1.62.0-cp311-cp311-manylinux_2_17_aarch64.whl", hash = "sha256:7a195531828b46ea9c4623c47e1dc45650fc7206f8a71825898dd4c9004b0928"}, + {file = "grpcio-1.62.0-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ab140a3542bbcea37162bdfc12ce0d47a3cda3f2d91b752a124cc9fe6776a9e2"}, + {file = "grpcio-1.62.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7f9d6c3223914abb51ac564dc9c3782d23ca445d2864321b9059d62d47144021"}, + {file = "grpcio-1.62.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:fbe0c20ce9a1cff75cfb828b21f08d0a1ca527b67f2443174af6626798a754a4"}, + {file = "grpcio-1.62.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:38f69de9c28c1e7a8fd24e4af4264726637b72f27c2099eaea6e513e7142b47e"}, + {file = "grpcio-1.62.0-cp311-cp311-win32.whl", hash = "sha256:ce1aafdf8d3f58cb67664f42a617af0e34555fe955450d42c19e4a6ad41c84bd"}, + {file = "grpcio-1.62.0-cp311-cp311-win_amd64.whl", hash = "sha256:eef1d16ac26c5325e7d39f5452ea98d6988c700c427c52cbc7ce3201e6d93334"}, + {file = "grpcio-1.62.0-cp312-cp312-linux_armv7l.whl", hash = "sha256:8aab8f90b2a41208c0a071ec39a6e5dbba16fd827455aaa070fec241624ccef8"}, + {file = "grpcio-1.62.0-cp312-cp312-macosx_10_10_universal2.whl", hash = "sha256:62aa1659d8b6aad7329ede5d5b077e3d71bf488d85795db517118c390358d5f6"}, + {file = "grpcio-1.62.0-cp312-cp312-manylinux_2_17_aarch64.whl", hash = "sha256:0d7ae7fc7dbbf2d78d6323641ded767d9ec6d121aaf931ec4a5c50797b886532"}, + {file = "grpcio-1.62.0-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f359d635ee9428f0294bea062bb60c478a8ddc44b0b6f8e1f42997e5dc12e2ee"}, + {file = "grpcio-1.62.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:77d48e5b1f8f4204889f1acf30bb57c30378e17c8d20df5acbe8029e985f735c"}, + {file = "grpcio-1.62.0-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:662d3df5314ecde3184cf87ddd2c3a66095b3acbb2d57a8cada571747af03873"}, + {file = "grpcio-1.62.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:92cdb616be44c8ac23a57cce0243af0137a10aa82234f23cd46e69e115071388"}, + {file = "grpcio-1.62.0-cp312-cp312-win32.whl", hash = "sha256:0b9179478b09ee22f4a36b40ca87ad43376acdccc816ce7c2193a9061bf35701"}, + {file = "grpcio-1.62.0-cp312-cp312-win_amd64.whl", hash = "sha256:614c3ed234208e76991992342bab725f379cc81c7dd5035ee1de2f7e3f7a9842"}, + {file = "grpcio-1.62.0-cp37-cp37m-linux_armv7l.whl", hash = "sha256:7e1f51e2a460b7394670fdb615e26d31d3260015154ea4f1501a45047abe06c9"}, + {file = "grpcio-1.62.0-cp37-cp37m-macosx_10_10_universal2.whl", hash = "sha256:bcff647e7fe25495e7719f779cc219bbb90b9e79fbd1ce5bda6aae2567f469f2"}, + {file = "grpcio-1.62.0-cp37-cp37m-manylinux_2_17_aarch64.whl", hash = "sha256:56ca7ba0b51ed0de1646f1735154143dcbdf9ec2dbe8cc6645def299bb527ca1"}, + {file = "grpcio-1.62.0-cp37-cp37m-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2e84bfb2a734e4a234b116be208d6f0214e68dcf7804306f97962f93c22a1839"}, + {file = "grpcio-1.62.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2c1488b31a521fbba50ae86423f5306668d6f3a46d124f7819c603979fc538c4"}, + {file = "grpcio-1.62.0-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:98d8f4eb91f1ce0735bf0b67c3b2a4fea68b52b2fd13dc4318583181f9219b4b"}, + {file = "grpcio-1.62.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:b3d3d755cfa331d6090e13aac276d4a3fb828bf935449dc16c3d554bf366136b"}, + {file = "grpcio-1.62.0-cp37-cp37m-win_amd64.whl", hash = "sha256:a33f2bfd8a58a02aab93f94f6c61279be0f48f99fcca20ebaee67576cd57307b"}, + {file = "grpcio-1.62.0-cp38-cp38-linux_armv7l.whl", hash = "sha256:5e709f7c8028ce0443bddc290fb9c967c1e0e9159ef7a030e8c21cac1feabd35"}, + {file = "grpcio-1.62.0-cp38-cp38-macosx_10_10_universal2.whl", hash = "sha256:2f3d9a4d0abb57e5f49ed5039d3ed375826c2635751ab89dcc25932ff683bbb6"}, + {file = "grpcio-1.62.0-cp38-cp38-manylinux_2_17_aarch64.whl", hash = "sha256:62ccb92f594d3d9fcd00064b149a0187c246b11e46ff1b7935191f169227f04c"}, + {file = "grpcio-1.62.0-cp38-cp38-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:921148f57c2e4b076af59a815467d399b7447f6e0ee10ef6d2601eb1e9c7f402"}, + {file = "grpcio-1.62.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f897b16190b46bc4d4aaf0a32a4b819d559a37a756d7c6b571e9562c360eed72"}, + {file = "grpcio-1.62.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:1bc8449084fe395575ed24809752e1dc4592bb70900a03ca42bf236ed5bf008f"}, + {file = "grpcio-1.62.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:81d444e5e182be4c7856cd33a610154fe9ea1726bd071d07e7ba13fafd202e38"}, + {file = "grpcio-1.62.0-cp38-cp38-win32.whl", hash = "sha256:88f41f33da3840b4a9bbec68079096d4caf629e2c6ed3a72112159d570d98ebe"}, + {file = "grpcio-1.62.0-cp38-cp38-win_amd64.whl", hash = "sha256:fc2836cb829895ee190813446dce63df67e6ed7b9bf76060262c55fcd097d270"}, + {file = "grpcio-1.62.0-cp39-cp39-linux_armv7l.whl", hash = "sha256:fcc98cff4084467839d0a20d16abc2a76005f3d1b38062464d088c07f500d170"}, + {file = "grpcio-1.62.0-cp39-cp39-macosx_10_10_universal2.whl", hash = "sha256:0d3dee701e48ee76b7d6fbbba18ba8bc142e5b231ef7d3d97065204702224e0e"}, + {file = "grpcio-1.62.0-cp39-cp39-manylinux_2_17_aarch64.whl", hash = "sha256:b7a6be562dd18e5d5bec146ae9537f20ae1253beb971c0164f1e8a2f5a27e829"}, + {file = "grpcio-1.62.0-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:29cb592c4ce64a023712875368bcae13938c7f03e99f080407e20ffe0a9aa33b"}, + {file = "grpcio-1.62.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1eda79574aec8ec4d00768dcb07daba60ed08ef32583b62b90bbf274b3c279f7"}, + {file = "grpcio-1.62.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:7eea57444a354ee217fda23f4b479a4cdfea35fb918ca0d8a0e73c271e52c09c"}, + {file = "grpcio-1.62.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:0e97f37a3b7c89f9125b92d22e9c8323f4e76e7993ba7049b9f4ccbe8bae958a"}, + {file = "grpcio-1.62.0-cp39-cp39-win32.whl", hash = "sha256:39cd45bd82a2e510e591ca2ddbe22352e8413378852ae814549c162cf3992a93"}, + {file = "grpcio-1.62.0-cp39-cp39-win_amd64.whl", hash = "sha256:b71c65427bf0ec6a8b48c68c17356cb9fbfc96b1130d20a07cb462f4e4dcdcd5"}, + {file = "grpcio-1.62.0.tar.gz", hash = "sha256:748496af9238ac78dcd98cce65421f1adce28c3979393e3609683fcd7f3880d7"}, ] [package.extras] -protobuf = ["grpcio-tools (>=1.60.1)"] +protobuf = ["grpcio-tools (>=1.62.0)"] [[package]] name = "grpcio-status" -version = "1.60.1" +version = "1.62.0" description = "Status proto mapping for gRPC" optional = false python-versions = ">=3.6" files = [ - {file = "grpcio-status-1.60.1.tar.gz", hash = "sha256:61b5aab8989498e8aa142c20b88829ea5d90d18c18c853b9f9e6d407d37bf8b4"}, - {file = "grpcio_status-1.60.1-py3-none-any.whl", hash = "sha256:3034fdb239185b6e0f3169d08c268c4507481e4b8a434c21311a03d9eb5889a0"}, + {file = "grpcio-status-1.62.0.tar.gz", hash = "sha256:0d693e9c09880daeaac060d0c3dba1ae470a43c99e5d20dfeafd62cf7e08a85d"}, + {file = "grpcio_status-1.62.0-py3-none-any.whl", hash = "sha256:3baac03fcd737310e67758c4082a188107f771d32855bce203331cd4c9aa687a"}, ] [package.dependencies] googleapis-common-protos = ">=1.5.5" -grpcio = ">=1.60.1" +grpcio = ">=1.62.0" protobuf = ">=4.21.6" [[package]] @@ -981,13 +981,13 @@ dev = ["pre-commit", "pytest-asyncio", "tox"] [[package]] name = "python-dateutil" -version = "2.8.2" +version = "2.9.0" description = "Extensions to the standard Python datetime module" optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7" files = [ - {file = "python-dateutil-2.8.2.tar.gz", hash = "sha256:0123cacc1627ae19ddf3c27a5de5bd67ee4586fbdd6440d9748f8abb483d3e86"}, - {file = "python_dateutil-2.8.2-py2.py3-none-any.whl", hash = "sha256:961d03dc3453ebbc59dbdea9e4e11c5651520a876d0f4db161e8674aae935da9"}, + {file = "python-dateutil-2.9.0.tar.gz", hash = "sha256:78e73e19c63f5b20ffa567001531680d939dc042bf7850431877645523c66709"}, + {file = "python_dateutil-2.9.0-py2.py3-none-any.whl", hash = "sha256:cbf2f1da5e6083ac2fbfd4da39a25f34312230110440f424a14c7558bb85d82e"}, ] [package.dependencies] @@ -1029,7 +1029,6 @@ files = [ {file = "PyYAML-6.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:bf07ee2fef7014951eeb99f56f39c9bb4af143d8aa3c21b1677805985307da34"}, {file = "PyYAML-6.0.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:855fb52b0dc35af121542a76b9a84f8d1cd886ea97c84703eaa6d88e37a2ad28"}, {file = "PyYAML-6.0.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:40df9b996c2b73138957fe23a16a4f0ba614f4c0efce1e9406a184b6d07fa3a9"}, - {file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a08c6f0fe150303c1c6b71ebcd7213c2858041a7e01975da3a99aed1e7a378ef"}, {file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6c22bec3fbe2524cde73d7ada88f6566758a8f7227bfbf93a408a9d86bcc12a0"}, {file = "PyYAML-6.0.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8d4e9c88387b0f5c7d5f281e55304de64cf7f9c0021a3525bd3b1c542da3b0e4"}, {file = "PyYAML-6.0.1-cp312-cp312-win32.whl", hash = "sha256:d483d2cdf104e7c9fa60c544d92981f12ad66a457afae824d146093b8c294c54"}, @@ -1087,13 +1086,13 @@ use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"] [[package]] name = "requests-cache" -version = "1.1.1" +version = "1.2.0" description = "A persistent cache for python requests" optional = false -python-versions = ">=3.7,<4.0" +python-versions = ">=3.8" files = [ - {file = "requests_cache-1.1.1-py3-none-any.whl", hash = "sha256:c8420cf096f3aafde13c374979c21844752e2694ffd8710e6764685bb577ac90"}, - {file = "requests_cache-1.1.1.tar.gz", hash = "sha256:764f93d3fa860be72125a568c2cc8eafb151cf29b4dc2515433a56ee657e1c60"}, + {file = "requests_cache-1.2.0-py3-none-any.whl", hash = "sha256:490324301bf0cb924ff4e6324bd2613453e7e1f847353928b08adb0fdfb7f722"}, + {file = "requests_cache-1.2.0.tar.gz", hash = "sha256:db1c709ca343cc1cd5b6c8b1a5387298eceed02306a6040760db538c885e3838"}, ] [package.dependencies] @@ -1105,15 +1104,15 @@ url-normalize = ">=1.4" urllib3 = ">=1.25.5" [package.extras] -all = ["boto3 (>=1.15)", "botocore (>=1.18)", "itsdangerous (>=2.0)", "pymongo (>=3)", "pyyaml (>=5.4)", "redis (>=3)", "ujson (>=5.4)"] +all = ["boto3 (>=1.15)", "botocore (>=1.18)", "itsdangerous (>=2.0)", "pymongo (>=3)", "pyyaml (>=6.0.1)", "redis (>=3)", "ujson (>=5.4)"] bson = ["bson (>=0.5)"] -docs = ["furo (>=2023.3,<2024.0)", "linkify-it-py (>=2.0,<3.0)", "myst-parser (>=1.0,<2.0)", "sphinx (>=5.0.2,<6.0.0)", "sphinx-autodoc-typehints (>=1.19)", "sphinx-automodapi (>=0.14)", "sphinx-copybutton (>=0.5)", "sphinx-design (>=0.2)", "sphinx-notfound-page (>=0.8)", "sphinxcontrib-apidoc (>=0.3)", "sphinxext-opengraph (>=0.6)"] +docs = ["furo (>=2023.3,<2024.0)", "linkify-it-py (>=2.0,<3.0)", "myst-parser (>=1.0,<2.0)", "sphinx (>=5.0.2,<6.0.0)", "sphinx-autodoc-typehints (>=1.19)", "sphinx-automodapi (>=0.14)", "sphinx-copybutton (>=0.5)", "sphinx-design (>=0.2)", "sphinx-notfound-page (>=0.8)", "sphinxcontrib-apidoc (>=0.3)", "sphinxext-opengraph (>=0.9)"] dynamodb = ["boto3 (>=1.15)", "botocore (>=1.18)"] json = ["ujson (>=5.4)"] mongodb = ["pymongo (>=3)"] redis = ["redis (>=3)"] security = ["itsdangerous (>=2.0)"] -yaml = ["pyyaml (>=5.4)"] +yaml = ["pyyaml (>=6.0.1)"] [[package]] name = "requests-mock" @@ -1168,19 +1167,19 @@ pyasn1 = ">=0.1.3" [[package]] name = "setuptools" -version = "69.1.0" +version = "69.1.1" description = "Easily download, build, install, upgrade, and uninstall Python packages" optional = false python-versions = ">=3.8" files = [ - {file = "setuptools-69.1.0-py3-none-any.whl", hash = "sha256:c054629b81b946d63a9c6e732bc8b2513a7c3ea645f11d0139a2191d735c60c6"}, - {file = "setuptools-69.1.0.tar.gz", hash = "sha256:850894c4195f09c4ed30dba56213bf7c3f21d86ed6bdaafb5df5972593bfc401"}, + {file = "setuptools-69.1.1-py3-none-any.whl", hash = "sha256:02fa291a0471b3a18b2b2481ed902af520c69e8ae0919c13da936542754b4c56"}, + {file = "setuptools-69.1.1.tar.gz", hash = "sha256:5c0806c7d9af348e6dd3777b4f4dbb42c7ad85b190104837488eab9a7c945cf8"}, ] [package.extras] docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "rst.linker (>=1.9)", "sphinx (<7.2.5)", "sphinx (>=3.5)", "sphinx-favicon", "sphinx-inline-tabs", "sphinx-lint", "sphinx-notfound-page (>=1,<2)", "sphinx-reredirects", "sphinxcontrib-towncrier"] -testing = ["build[virtualenv]", "filelock (>=3.4.0)", "flake8-2020", "ini2toml[lite] (>=0.9)", "jaraco.develop (>=7.21)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "pip (>=19.1)", "pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-home (>=0.5)", "pytest-mypy (>=0.9.1)", "pytest-perf", "pytest-ruff (>=0.2.1)", "pytest-timeout", "pytest-xdist", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel"] -testing-integration = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "packaging (>=23.1)", "pytest", "pytest-enabler", "pytest-xdist", "tomli", "virtualenv (>=13.0.0)", "wheel"] +testing = ["build[virtualenv]", "filelock (>=3.4.0)", "flake8-2020", "ini2toml[lite] (>=0.9)", "jaraco.develop (>=7.21)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "packaging (>=23.2)", "pip (>=19.1)", "pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-home (>=0.5)", "pytest-mypy (>=0.9.1)", "pytest-perf", "pytest-ruff (>=0.2.1)", "pytest-timeout", "pytest-xdist", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel"] +testing-integration = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "packaging (>=23.2)", "pytest", "pytest-enabler", "pytest-xdist", "tomli", "virtualenv (>=13.0.0)", "wheel"] [[package]] name = "six" @@ -1206,13 +1205,13 @@ files = [ [[package]] name = "typing-extensions" -version = "4.9.0" +version = "4.10.0" description = "Backported and Experimental Type Hints for Python 3.8+" optional = false python-versions = ">=3.8" files = [ - {file = "typing_extensions-4.9.0-py3-none-any.whl", hash = "sha256:af72aea155e91adfc61c3ae9e0e342dbc0cba726d6cba4b6c72c1f34e47291cd"}, - {file = "typing_extensions-4.9.0.tar.gz", hash = "sha256:23478f88c37f27d76ac8aee6c905017a143b0b1b886c3c9f66bc2fd94f9f5783"}, + {file = "typing_extensions-4.10.0-py3-none-any.whl", hash = "sha256:69b1a937c3a517342112fb4c6df7e72fc39a38e7891a5730ed4985b5214b5475"}, + {file = "typing_extensions-4.10.0.tar.gz", hash = "sha256:b0abd7c89e8fb96f98db18d86106ff1d90ab692004eb746cf6eda2682f91b3cb"}, ] [[package]] @@ -1231,13 +1230,13 @@ six = "*" [[package]] name = "urllib3" -version = "2.2.0" +version = "2.2.1" description = "HTTP library with thread-safe connection pooling, file post, and more." optional = false python-versions = ">=3.8" files = [ - {file = "urllib3-2.2.0-py3-none-any.whl", hash = "sha256:ce3711610ddce217e6d113a2732fafad960a03fd0318c91faa79481e35c11224"}, - {file = "urllib3-2.2.0.tar.gz", hash = "sha256:051d961ad0c62a94e50ecf1af379c3aba230c66c710493493560c0c223c49f20"}, + {file = "urllib3-2.2.1-py3-none-any.whl", hash = "sha256:450b20ec296a467077128bff42b73080516e71b56ff59a60a02bef2232c4fa9d"}, + {file = "urllib3-2.2.1.tar.gz", hash = "sha256:d0570876c61ab9e520d776c38acbbb5b05a776d3f9ff98a5c8fd5162a444cf19"}, ] [package.extras] diff --git a/airbyte-integrations/connectors/source-google-ads/pyproject.toml b/airbyte-integrations/connectors/source-google-ads/pyproject.toml index f46539d35eeb..b91a9d991725 100644 --- a/airbyte-integrations/connectors/source-google-ads/pyproject.toml +++ b/airbyte-integrations/connectors/source-google-ads/pyproject.toml @@ -3,7 +3,7 @@ requires = [ "poetry-core>=1.0.0",] build-backend = "poetry.core.masonry.api" [tool.poetry] -version = "3.3.5" +version = "3.3.6" name = "source-google-ads" description = "Source implementation for Google Ads." authors = [ "Airbyte ",] diff --git a/airbyte-integrations/connectors/source-google-ads/source_google_ads/streams.py b/airbyte-integrations/connectors/source-google-ads/source_google_ads/streams.py index 499bceca367e..65292f028631 100644 --- a/airbyte-integrations/connectors/source-google-ads/source_google_ads/streams.py +++ b/airbyte-integrations/connectors/source-google-ads/source_google_ads/streams.py @@ -556,6 +556,37 @@ def query_limit(self) -> Optional[int]: """Queries for ChangeStatus resource have to include limit in it""" return 10000 + def stream_slices(self, stream_state: Mapping[str, Any] = None, **kwargs) -> Iterable[Optional[MutableMapping[str, any]]]: + """Modifies the original stream_slices to return one empty slice for new customers that doesn't have state yet""" + stream_state = stream_state or {} + for customer in self.customers: + if stream_state.get(customer.id): + start_date = stream_state[customer.id].get(self.cursor_field) or self._start_date + # We should keep backward compatibility with the previous version + elif stream_state.get(self.cursor_field) and len(self.customers) == 1: + start_date = stream_state.get(self.cursor_field) or self._start_date + else: + # child stream doesn't need parent stream as it is used only for the updates + yield {"customer_id": customer.id, "login_customer_id": customer.login_customer_id} + continue + + end_date = self._end_date + + for chunk in chunk_date_range( + start_date=start_date, + end_date=end_date, + conversion_window=self.conversion_window_days, + days_of_data_storage=self.days_of_data_storage, + time_zone=customer.time_zone, + time_format=self.cursor_time_format, + slice_duration=self.slice_duration, + slice_step=self.slice_step, + ): + if chunk: + chunk["customer_id"] = customer.id + chunk["login_customer_id"] = customer.login_customer_id + yield chunk + def read_records( self, sync_mode: SyncMode, cursor_field: List[str] = None, stream_slice: MutableMapping[str, Any] = None, **kwargs ) -> Iterable[Mapping[str, Any]]: @@ -658,39 +689,11 @@ def stream_slices(self, stream_state: Mapping[str, Any] = None, **kwargs) -> Ite """ If state exists read updates from parent stream otherwise return slices with only customer id to sync all records for stream """ - if stream_state: - slices_generator = self.read_parent_stream(SyncMode.incremental, self.parent_cursor_field, stream_state) - yield from slices_generator - else: - for customer in self.customers: - yield { - "customer_id": customer.id, - "login_customer_id": customer.login_customer_id, - "updated_ids": set(), - "deleted_ids": set(), - "record_changed_time_map": dict(), - } - - def _process_parent_record(self, parent_record: MutableMapping[str, Any], child_slice: MutableMapping[str, Any]) -> bool: - """Process a single parent_record and update the child_slice.""" - substream_id = parent_record.get(self.parent_id_field) - if not substream_id: - return False - - # Save time of change - child_slice["record_changed_time_map"][substream_id] = parent_record[self.parent_cursor_field] - - # Add record id to list of changed or deleted items depending on status - slice_id_list = "deleted_ids" if parent_record.get("change_status.resource_status") == "REMOVED" else "updated_ids" - child_slice[slice_id_list].add(substream_id) - - return True - - def read_parent_stream( - self, sync_mode: SyncMode, cursor_field: Optional[str], stream_state: Mapping[str, Any] - ) -> Iterable[Mapping[str, Any]]: + stream_state = stream_state or {} for parent_slice in self.parent_stream.stream_slices( - sync_mode=sync_mode, cursor_field=cursor_field, stream_state=stream_state.get(self.parent_stream_name) + sync_mode=SyncMode.incremental, + cursor_field=self.parent_cursor_field, + stream_state=stream_state.get(self.parent_stream_name, {}), ): customer_id = parent_slice.get("customer_id") child_slice = { @@ -705,13 +708,30 @@ def read_parent_stream( continue parent_slice["resource_type"] = self.resource_type - for parent_record in self.parent_stream.read_records(sync_mode=sync_mode, cursor_field=cursor_field, stream_slice=parent_slice): + for parent_record in self.parent_stream.read_records( + sync_mode=SyncMode.incremental, cursor_field=self.parent_cursor_field, stream_slice=parent_slice + ): self._process_parent_record(parent_record, child_slice) # yield child slice if any records where read if child_slice["record_changed_time_map"]: yield child_slice + def _process_parent_record(self, parent_record: MutableMapping[str, Any], child_slice: MutableMapping[str, Any]) -> bool: + """Process a single parent_record and update the child_slice.""" + substream_id = parent_record.get(self.parent_id_field) + if not substream_id: + return False + + # Save time of change + child_slice["record_changed_time_map"][substream_id] = parent_record[self.parent_cursor_field] + + # Add record id to list of changed or deleted items depending on status + slice_id_list = "deleted_ids" if parent_record.get("change_status.resource_status") == "REMOVED" else "updated_ids" + child_slice[slice_id_list].add(substream_id) + + return True + def parse_response(self, response: SearchPager, stream_slice: MutableMapping[str, Any] = None) -> Iterable[Mapping]: # update records with time obtained from parent stream for record in super().parse_response(response): diff --git a/airbyte-integrations/connectors/source-google-ads/unit_tests/conftest.py b/airbyte-integrations/connectors/source-google-ads/unit_tests/conftest.py index 7054284da5db..f409e0f1ac52 100644 --- a/airbyte-integrations/connectors/source-google-ads/unit_tests/conftest.py +++ b/airbyte-integrations/connectors/source-google-ads/unit_tests/conftest.py @@ -55,6 +55,10 @@ def mock_oauth_call(requests_mock): def customers(config): return [CustomerModel(id=_id, time_zone="local", is_manager_account=False) for _id in config["customer_id"].split(",")] +@pytest.fixture +def additional_customers(config, customers): + return customers + [CustomerModel(id="789", time_zone="local", is_manager_account=False)] + @pytest.fixture def customers_manager(config): diff --git a/airbyte-integrations/connectors/source-google-ads/unit_tests/test_incremental_events_streams.py b/airbyte-integrations/connectors/source-google-ads/unit_tests/test_incremental_events_streams.py index 8ddf8bd80fba..34b89d1e2e88 100644 --- a/airbyte-integrations/connectors/source-google-ads/unit_tests/test_incremental_events_streams.py +++ b/airbyte-integrations/connectors/source-google-ads/unit_tests/test_incremental_events_streams.py @@ -81,6 +81,54 @@ def test_change_status_stream(config, customers): stream.get_query.assert_called_with({"customer_id": customer_id, "login_customer_id": "default"}) +def test_change_status_stream_slices(config, additional_customers): + """ Change status stream slices should return correct empty slices for the new customers """ + google_api = MockGoogleAds(credentials=config["credentials"]) + + stream = ChangeStatus(api=google_api, customers=additional_customers) + + now = pendulum.datetime(2023, 11, 2, 12, 53, 7) + pendulum.set_test_now(now) + + stream_state = {"123": {"change_status.last_change_date_time": "2023-11-01 12:36:04.772447"}} + + result_slices = list(stream.stream_slices(stream_state=stream_state)) + assert len(result_slices) == 2 + assert result_slices == [{'start_date': '2023-11-01 12:36:04.772447', 'end_date': '2023-11-02 00:00:00.000000', 'customer_id': '123', + 'login_customer_id': None}, {'customer_id': '789', 'login_customer_id': None}] + + +def test_incremental_events_stream_slices(config, additional_customers): + """ Test if the empty slice will be produced for the new customers """ + stream_state = {"change_status": {"123": {"change_status.last_change_date_time": "2023-06-12 13:20:01.003295"}}} + + google_api = MockGoogleAds(credentials=config["credentials"]) + + stream = CampaignCriterion(api=google_api, customers=additional_customers) + parent_stream = stream.parent_stream + + parent_stream.get_query = Mock() + parent_stream.get_query.return_value = "query_parent" + + parent_stream.state = stream_state["change_status"] + + stream.get_query = Mock() + stream.get_query.return_value = "query_child" + + now = pendulum.datetime(2023, 6, 15, 12, 53, 7) + pendulum.set_test_now(now) + + stream_slices = list(stream.stream_slices(stream_state=stream_state)) + + assert len(stream_slices) == 2 + assert stream_slices == [{'customer_id': '123', 'updated_ids': {'2', '1'}, 'deleted_ids': {'3', '4'}, + 'record_changed_time_map': {'1': '2023-06-13 12:36:01.772447', '2': '2023-06-13 12:36:02.772447', + '3': '2023-06-13 12:36:03.772447', '4': '2023-06-13 12:36:04.772447'}, + 'login_customer_id': None}, + {'customer_id': '789', 'updated_ids': set(), 'deleted_ids': set(), 'record_changed_time_map': {}, + 'login_customer_id': None}] + + def test_child_incremental_events_read(config, customers): """ Page token expired while reading records on date 2021-01-03 diff --git a/docs/integrations/sources/google-ads.md b/docs/integrations/sources/google-ads.md index e7acc051bb90..1bc3c0733a7d 100644 --- a/docs/integrations/sources/google-ads.md +++ b/docs/integrations/sources/google-ads.md @@ -280,6 +280,7 @@ Due to a limitation in the Google Ads API which does not allow getting performan | Version | Date | Pull Request | Subject | |:---------|:-----------|:---------------------------------------------------------|:-------------------------------------------------------------------------------------------------------------------------------------| +| `3.3.6` | 2024-03-01 | [35664](https://github.com/airbytehq/airbyte/pull/35664) | Fix error for new customers for incremental events streams | | `3.3.5` | 2024-02-28 | [35709](https://github.com/airbytehq/airbyte/pull/35709) | Handle 2-Step Verification exception as config error | | `3.3.4` | 2024-02-21 | [35493](https://github.com/airbytehq/airbyte/pull/35493) | Rolling back the patch 3.3.3 made for `user_interest` steam | | `3.3.3` | 2024-02-14 | [35280](https://github.com/airbytehq/airbyte/pull/35280) | Temporary patch that disables some fields to avoid 500 error when syncing `user_interest` steam | From eb041d508929c728c740ee0b3aed44f099b68ca0 Mon Sep 17 00:00:00 2001 From: Anton Karpets Date: Fri, 1 Mar 2024 15:02:08 +0200 Subject: [PATCH 044/172] =?UTF-8?q?=F0=9F=8F=A5Source=20Amplitude:=20fix?= =?UTF-8?q?=20formatting=20(#35748)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .../connectors/source-amplitude/acceptance-test-config.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/airbyte-integrations/connectors/source-amplitude/acceptance-test-config.yml b/airbyte-integrations/connectors/source-amplitude/acceptance-test-config.yml index f916910191bf..be073275f480 100644 --- a/airbyte-integrations/connectors/source-amplitude/acceptance-test-config.yml +++ b/airbyte-integrations/connectors/source-amplitude/acceptance-test-config.yml @@ -7,7 +7,7 @@ acceptance_tests: tests: - spec_path: "source_amplitude/spec.yaml" backward_compatibility_tests_config: - disable_for_version: 0.3.2 # `start_date` format changed to format: date-time + disable_for_version: 0.3.2 # `start_date` format changed to format: date-time connection: tests: - config_path: "secrets/config.json" @@ -18,7 +18,7 @@ acceptance_tests: tests: - config_path: "secrets/config.json" backward_compatibility_tests_config: - disable_for_version: 0.3.5 # `date` format changed to format: date-time in the AverageSessionLength stream + disable_for_version: 0.3.5 # `date` format changed to format: date-time in the AverageSessionLength stream basic_read: tests: - config_path: "secrets/config.json" From 62daec657da2572a73c049cf50f08997f2263f53 Mon Sep 17 00:00:00 2001 From: Anatolii Yatsuk <35109939+tolik0@users.noreply.github.com> Date: Fri, 1 Mar 2024 15:16:47 +0200 Subject: [PATCH 045/172] =?UTF-8?q?=E2=9C=A8=20Source=20Microsoft=20OneDri?= =?UTF-8?q?ve:=20Enable=20connector=20in=20Cloud=20(#35584)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .../source-microsoft-onedrive/README.md | 163 +- .../source-microsoft-onedrive/metadata.yaml | 4 +- .../source-microsoft-onedrive/poetry.lock | 2278 +++++++++++++++++ .../source-microsoft-onedrive/pyproject.toml | 34 + .../source-microsoft-onedrive/setup.py | 49 - .../sources/microsoft-onedrive.md | 19 +- 6 files changed, 2368 insertions(+), 179 deletions(-) create mode 100644 airbyte-integrations/connectors/source-microsoft-onedrive/poetry.lock create mode 100644 airbyte-integrations/connectors/source-microsoft-onedrive/pyproject.toml delete mode 100644 airbyte-integrations/connectors/source-microsoft-onedrive/setup.py diff --git a/airbyte-integrations/connectors/source-microsoft-onedrive/README.md b/airbyte-integrations/connectors/source-microsoft-onedrive/README.md index ee6043e14b38..90cdae8aafc6 100644 --- a/airbyte-integrations/connectors/source-microsoft-onedrive/README.md +++ b/airbyte-integrations/connectors/source-microsoft-onedrive/README.md @@ -1,118 +1,55 @@ -# Microsoft Onedrive Source +# Microsoft OneDrive source connector -This is the repository for the Microsoft Onedrive source connector, written in Python. + +This is the repository for the Microsoft OneDrive source connector, written in Python. For information about how to use this connector within Airbyte, see [the documentation](https://docs.airbyte.com/integrations/sources/microsoft-onedrive). ## Local development ### Prerequisites -**To iterate on this connector, make sure to complete this prerequisites section.** +* Python (~=3.9) +* Poetry (~=1.7) - installation instructions [here](https://python-poetry.org/docs/#installation) -#### Minimum Python version required `= 3.9.0` -#### Activate Virtual Environment and install dependencies -From this connector directory, create a virtual environment: -``` -python -m venv .venv -``` - -This will generate a virtualenv for this module in `.venv/`. Make sure this venv is active in your -development environment of choice. To activate it from the terminal, run: -``` -source .venv/bin/activate -pip install -r requirements.txt -pip install '.[tests]' +### Installing the connector +From this connector directory, run: +```bash +poetry install --with dev ``` -If you are in an IDE, follow your IDE's instructions to activate the virtualenv. -Note that while we are installing dependencies from `requirements.txt`, you should only edit `setup.py` for your dependencies. `requirements.txt` is -used for editable installs (`pip install -e`) to pull in Python dependencies from the monorepo and will call `setup.py`. -If this is mumbo jumbo to you, don't worry about it, just put your deps in `setup.py` but install using `pip install -r requirements.txt` and everything -should work as you expect. -#### Create credentials +### Create credentials **If you are a community contributor**, follow the instructions in the [documentation](https://docs.airbyte.com/integrations/sources/microsoft-onedrive) to generate the necessary credentials. Then create a file `secrets/config.json` conforming to the `source_microsoft_onedrive/spec.yaml` file. Note that any directory named `secrets` is gitignored across the entire Airbyte repo, so there is no danger of accidentally checking in sensitive information. -See `integration_tests/sample_config.json` for a sample config file. +See `sample_files/sample_config.json` for a sample config file. -**If you are an Airbyte core member**, copy the credentials in Lastpass under the secret name `source microsoft-onedrive test creds` -and place them into `secrets/config.json`. ### Locally running the connector ``` -python main.py spec -python main.py check --config secrets/config.json -python main.py discover --config secrets/config.json -python main.py read --config secrets/config.json --catalog integration_tests/configured_catalog.json +poetry run source-microsoft-onedrive spec +poetry run source-microsoft-onedrive check --config secrets/config.json +poetry run source-microsoft-onedrive discover --config secrets/config.json +poetry run source-microsoft-onedrive read --config secrets/config.json --catalog sample_files/configured_catalog.json ``` -### Locally running the connector docker image - -#### Use `airbyte-ci` to build your connector -The Airbyte way of building this connector is to use our `airbyte-ci` tool. -You can follow install instructions [here](https://github.com/airbytehq/airbyte/blob/master/airbyte-ci/connectors/pipelines/README.md#L1). -Then running the following command will build your connector: - -```bash -airbyte-ci connectors --name source-microsoft-onedrive build +### Running unit tests +To run unit tests locally, from the connector directory run: ``` -Once the command is done, you will find your connector image in your local docker registry: `airbyte/source-microsoft-onedrive:dev`. - -##### Customizing our build process -When contributing on our connector you might need to customize the build process to add a system dependency or set an env var. -You can customize our build process by adding a `build_customization.py` module to your connector. -This module should contain a `pre_connector_install` and `post_connector_install` async function that will mutate the base image and the connector container respectively. -It will be imported at runtime by our build process and the functions will be called if they exist. - -Here is an example of a `build_customization.py` module: -```python -from __future__ import annotations - -from typing import TYPE_CHECKING - -if TYPE_CHECKING: - # Feel free to check the dagger documentation for more information on the Container object and its methods. - # https://dagger-io.readthedocs.io/en/sdk-python-v0.6.4/ - from dagger import Container - - -async def pre_connector_install(base_image_container: Container) -> Container: - return await base_image_container.with_env_variable("MY_PRE_BUILD_ENV_VAR", "my_pre_build_env_var_value") - -async def post_connector_install(connector_container: Container) -> Container: - return await connector_container.with_env_variable("MY_POST_BUILD_ENV_VAR", "my_post_build_env_var_value") +poetry run pytest unit_tests ``` -#### Build your own connector image -This connector is built using our dynamic built process in `airbyte-ci`. -The base image used to build it is defined within the metadata.yaml file under the `connectorBuildOptions`. -The build logic is defined using [Dagger](https://dagger.io/) [here](https://github.com/airbytehq/airbyte/blob/master/airbyte-ci/connectors/pipelines/pipelines/builds/python_connectors.py). -It does not rely on a Dockerfile. - -If you would like to patch our connector and build your own a simple approach would be to: - -1. Create your own Dockerfile based on the latest version of the connector image. -```Dockerfile -FROM airbyte/source-microsoft-onedrive:latest - -COPY . ./airbyte/integration_code -RUN pip install ./airbyte/integration_code - -# The entrypoint and default env vars are already set in the base image -# ENV AIRBYTE_ENTRYPOINT "python /airbyte/integration_code/main.py" -# ENTRYPOINT ["python", "/airbyte/integration_code/main.py"] +### Building the docker image +1. Install [`airbyte-ci`](https://github.com/airbytehq/airbyte/blob/master/airbyte-ci/connectors/pipelines/README.md) +2. Run the following command to build the docker image: +```bash +airbyte-ci connectors --name=source-microsoft-onedrive build ``` -Please use this as an example. This is not optimized. -2. Build your image: -```bash -docker build -t airbyte/source-microsoft-onedrive:dev . -# Running the spec command against your patched connector -docker run airbyte/source-microsoft-onedrive:dev spec -```` +An image will be available on your host with the tag `airbyte/source-microsoft-onedrive:dev`. -#### Run + +### Running as a docker container Then run any of the connector commands as follows: ``` docker run --rm airbyte/source-microsoft-onedrive:dev spec @@ -120,47 +57,35 @@ docker run --rm -v $(pwd)/secrets:/secrets airbyte/source-microsoft-onedrive:dev docker run --rm -v $(pwd)/secrets:/secrets airbyte/source-microsoft-onedrive:dev discover --config /secrets/config.json docker run --rm -v $(pwd)/secrets:/secrets -v $(pwd)/integration_tests:/integration_tests airbyte/source-microsoft-onedrive:dev read --config /secrets/config.json --catalog /integration_tests/configured_catalog.json ``` -## Testing -Make sure to familiarize yourself with [pytest test discovery](https://docs.pytest.org/en/latest/goodpractices.html#test-discovery) to know how your test files and methods should be named. -First install test dependencies into your virtual environment: -``` -pip install .[tests] -``` -### Unit Tests -To run unit tests locally, from the connector directory run: -``` -python -m pytest unit_tests -``` -### Integration Tests -There are two types of integration tests: Acceptance Tests (Airbyte's test suite for all source connectors) and custom integration tests (which are specific to this connector). -#### Custom Integration tests -Place custom tests inside `integration_tests/` folder, then, from the connector root, run -``` -python -m pytest integration_tests +### Running our CI test suite +You can run our full test suite locally using [`airbyte-ci`](https://github.com/airbytehq/airbyte/blob/master/airbyte-ci/connectors/pipelines/README.md): +```bash +airbyte-ci connectors --name=source-microsoft-onedrive test ``` -### Acceptance Tests -Customize `acceptance-test-config.yml` file to configure tests. See [Connector Acceptance Tests](https://docs.airbyte.com/connector-development/testing-connectors/connector-acceptance-tests-reference) for more information. +### Customizing acceptance Tests +Customize `acceptance-test-config.yml` file to configure acceptance tests. See [Connector Acceptance Tests](https://docs.airbyte.com/connector-development/testing-connectors/connector-acceptance-tests-reference) for more information. If your connector requires to create or destroy resources for use during acceptance tests create fixtures for it and place them inside integration_tests/acceptance.py. -Please run acceptance tests via [airbyte-ci](https://github.com/airbytehq/airbyte/blob/master/airbyte-ci/connectors/pipelines/README.md#connectors-test-command): + +### Dependency Management +All of your dependencies should be managed via Poetry. +To add a new dependency, run: ```bash -airbyte-ci connectors --name source-microsoft-onedrive test +poetry add ``` -## Dependency Management -All of your dependencies should go in `setup.py`, NOT `requirements.txt`. The requirements file is only used to connect internal Airbyte dependencies in the monorepo for local development. -We split dependencies between two groups, dependencies that are: -* required for your connector to work need to go to `MAIN_REQUIREMENTS` list. -* required for the testing need to go to `TEST_REQUIREMENTS` list +Please commit the changes to `pyproject.toml` and `poetry.lock` files. -### Publishing a new version of the connector +## Publishing a new version of the connector You've checked out the repo, implemented a million dollar feature, and you're ready to share your changes with the world. Now what? 1. Make sure your changes are passing our test suite: `airbyte-ci connectors --name=source-microsoft-onedrive test` -2. Bump the connector version in `metadata.yaml`: increment the `dockerImageTag` value. Please follow [semantic versioning for connectors](https://docs.airbyte.com/contributing-to-airbyte/resources/pull-requests-handbook/#semantic-versioning-for-connectors). +2. Bump the connector version (please follow [semantic versioning for connectors](https://docs.airbyte.com/contributing-to-airbyte/resources/pull-requests-handbook/#semantic-versioning-for-connectors)): + - bump the `dockerImageTag` value in in `metadata.yaml` + - bump the `version` value in `pyproject.toml` 3. Make sure the `metadata.yaml` content is up to date. -4. Make the connector documentation and its changelog is up to date (`docs/integrations/sources/microsoft-onedrive.md`). +4. Make sure the connector documentation and its changelog is up to date (`docs/integrations/sources/microsoft-onedrive.md`). 5. Create a Pull Request: use [our PR naming conventions](https://docs.airbyte.com/contributing-to-airbyte/resources/pull-requests-handbook/#pull-request-title-convention). 6. Pat yourself on the back for being an awesome contributor. 7. Someone from Airbyte will take a look at your PR and iterate with you to merge it into master. - +8. Once your PR is merged, the new version of the connector will be automatically published to Docker Hub and our connector registry. \ No newline at end of file diff --git a/airbyte-integrations/connectors/source-microsoft-onedrive/metadata.yaml b/airbyte-integrations/connectors/source-microsoft-onedrive/metadata.yaml index f8833c9d4d99..209d5dab6938 100644 --- a/airbyte-integrations/connectors/source-microsoft-onedrive/metadata.yaml +++ b/airbyte-integrations/connectors/source-microsoft-onedrive/metadata.yaml @@ -14,13 +14,13 @@ data: oss: enabled: true cloud: - enabled: false # We need to either implement OAuth for cloud or remove OAuth from the config for cloud + enabled: true connectorBuildOptions: baseImage: docker.io/airbyte/python-connector-base:1.2.0@sha256:c22a9d97464b69d6ef01898edf3f8612dc11614f05a84984451dde195f337db9 connectorSubtype: api connectorType: source definitionId: 01d1c685-fd4a-4837-8f4c-93fe5a0d2188 - dockerImageTag: 0.1.6 + dockerImageTag: 0.1.7 dockerRepository: airbyte/source-microsoft-onedrive githubIssueLabel: source-microsoft-onedrive icon: microsoft-onedrive.svg diff --git a/airbyte-integrations/connectors/source-microsoft-onedrive/poetry.lock b/airbyte-integrations/connectors/source-microsoft-onedrive/poetry.lock new file mode 100644 index 000000000000..5dacbf85016b --- /dev/null +++ b/airbyte-integrations/connectors/source-microsoft-onedrive/poetry.lock @@ -0,0 +1,2278 @@ +# This file is automatically @generated by Poetry 1.6.1 and should not be changed by hand. + +[[package]] +name = "airbyte-cdk" +version = "0.61.0" +description = "A framework for writing Airbyte Connectors." +optional = false +python-versions = ">=3.8" +files = [ + {file = "airbyte-cdk-0.61.0.tar.gz", hash = "sha256:8beda008c5a177041ac02860a431ce7b1ecd00062a4a8f31fe6ac446cbed3e70"}, + {file = "airbyte_cdk-0.61.0-py3-none-any.whl", hash = "sha256:3f989bfe692c9519d61f9120ddb744ab82c432c2caf25374d4d6f5cdc374a1e9"}, +] + +[package.dependencies] +airbyte-protocol-models = "0.5.1" +avro = {version = ">=1.11.2,<1.12.0", optional = true, markers = "extra == \"file-based\""} +backoff = "*" +cachetools = "*" +Deprecated = ">=1.2,<2.0" +dpath = ">=2.0.1,<2.1.0" +fastavro = {version = ">=1.8.0,<1.9.0", optional = true, markers = "extra == \"file-based\""} +genson = "1.2.2" +isodate = ">=0.6.1,<0.7.0" +Jinja2 = ">=3.1.2,<3.2.0" +jsonref = ">=0.2,<1.0" +jsonschema = ">=3.2.0,<3.3.0" +markdown = {version = "*", optional = true, markers = "extra == \"file-based\""} +pdf2image = {version = "1.16.3", optional = true, markers = "extra == \"file-based\""} +"pdfminer.six" = {version = "20221105", optional = true, markers = "extra == \"file-based\""} +pendulum = "<3.0.0" +pyarrow = {version = "12.0.1", optional = true, markers = "extra == \"file-based\""} +pydantic = ">=1.10.8,<2.0.0" +pyrate-limiter = ">=3.1.0,<3.2.0" +pytesseract = {version = "0.3.10", optional = true, markers = "extra == \"file-based\""} +python-dateutil = "*" +PyYAML = ">=6.0.1" +requests = "*" +requests-cache = "*" +unstructured = [ + {version = "0.10.27", optional = true, markers = "extra == \"file-based\""}, + {version = "0.10.27", extras = ["docx", "pptx"], optional = true, markers = "extra == \"file-based\""}, +] +"unstructured.pytesseract" = {version = ">=0.3.12", optional = true, markers = "extra == \"file-based\""} +wcmatch = "8.4" + +[package.extras] +dev = ["avro (>=1.11.2,<1.12.0)", "cohere (==4.21)", "fastavro (>=1.8.0,<1.9.0)", "freezegun", "langchain (==0.0.271)", "markdown", "mypy", "openai[embeddings] (==0.27.9)", "pandas (==2.0.3)", "pdf2image (==1.16.3)", "pdfminer.six (==20221105)", "pyarrow (==12.0.1)", "pytesseract (==0.3.10)", "pytest", "pytest-cov", "pytest-httpserver", "pytest-mock", "requests-mock", "tiktoken (==0.4.0)", "unstructured (==0.10.27)", "unstructured.pytesseract (>=0.3.12)", "unstructured[docx,pptx] (==0.10.27)"] +file-based = ["avro (>=1.11.2,<1.12.0)", "fastavro (>=1.8.0,<1.9.0)", "markdown", "pdf2image (==1.16.3)", "pdfminer.six (==20221105)", "pyarrow (==12.0.1)", "pytesseract (==0.3.10)", "unstructured (==0.10.27)", "unstructured.pytesseract (>=0.3.12)", "unstructured[docx,pptx] (==0.10.27)"] +sphinx-docs = ["Sphinx (>=4.2,<5.0)", "sphinx-rtd-theme (>=1.0,<2.0)"] +vector-db-based = ["cohere (==4.21)", "langchain (==0.0.271)", "openai[embeddings] (==0.27.9)", "tiktoken (==0.4.0)"] + +[[package]] +name = "airbyte-protocol-models" +version = "0.5.1" +description = "Declares the Airbyte Protocol." +optional = false +python-versions = ">=3.8" +files = [ + {file = "airbyte_protocol_models-0.5.1-py3-none-any.whl", hash = "sha256:dfe84e130e51ce2ae81a06d5aa36f6c5ce3152b9e36e6f0195fad6c3dab0927e"}, + {file = "airbyte_protocol_models-0.5.1.tar.gz", hash = "sha256:7c8b16c7c1c7956b1996052e40585a3a93b1e44cb509c4e97c1ee4fe507ea086"}, +] + +[package.dependencies] +pydantic = ">=1.9.2,<2.0.0" + +[[package]] +name = "atomicwrites" +version = "1.4.1" +description = "Atomic file writes." +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +files = [ + {file = "atomicwrites-1.4.1.tar.gz", hash = "sha256:81b2c9071a49367a7f770170e5eec8cb66567cfbbc8c73d20ce5ca4a8d71cf11"}, +] + +[[package]] +name = "attrs" +version = "23.2.0" +description = "Classes Without Boilerplate" +optional = false +python-versions = ">=3.7" +files = [ + {file = "attrs-23.2.0-py3-none-any.whl", hash = "sha256:99b87a485a5820b23b879f04c2305b44b951b502fd64be915879d77a7e8fc6f1"}, + {file = "attrs-23.2.0.tar.gz", hash = "sha256:935dc3b529c262f6cf76e50877d35a4bd3c1de194fd41f47a2b7ae8f19971f30"}, +] + +[package.extras] +cov = ["attrs[tests]", "coverage[toml] (>=5.3)"] +dev = ["attrs[tests]", "pre-commit"] +docs = ["furo", "myst-parser", "sphinx", "sphinx-notfound-page", "sphinxcontrib-towncrier", "towncrier", "zope-interface"] +tests = ["attrs[tests-no-zope]", "zope-interface"] +tests-mypy = ["mypy (>=1.6)", "pytest-mypy-plugins"] +tests-no-zope = ["attrs[tests-mypy]", "cloudpickle", "hypothesis", "pympler", "pytest (>=4.3.0)", "pytest-xdist[psutil]"] + +[[package]] +name = "avro" +version = "1.11.3" +description = "Avro is a serialization and RPC framework." +optional = false +python-versions = ">=3.6" +files = [ + {file = "avro-1.11.3.tar.gz", hash = "sha256:3393bb5139f9cf0791d205756ce1e39a5b58586af5b153d6a3b5a199610e9d17"}, +] + +[package.extras] +snappy = ["python-snappy"] +zstandard = ["zstandard"] + +[[package]] +name = "backoff" +version = "2.2.1" +description = "Function decoration for backoff and retry" +optional = false +python-versions = ">=3.7,<4.0" +files = [ + {file = "backoff-2.2.1-py3-none-any.whl", hash = "sha256:63579f9a0628e06278f7e47b7d7d5b6ce20dc65c5e96a6f3ca99a6adca0396e8"}, + {file = "backoff-2.2.1.tar.gz", hash = "sha256:03f829f5bb1923180821643f8753b0502c3b682293992485b0eef2807afa5cba"}, +] + +[[package]] +name = "beautifulsoup4" +version = "4.12.3" +description = "Screen-scraping library" +optional = false +python-versions = ">=3.6.0" +files = [ + {file = "beautifulsoup4-4.12.3-py3-none-any.whl", hash = "sha256:b80878c9f40111313e55da8ba20bdba06d8fa3969fc68304167741bbf9e082ed"}, + {file = "beautifulsoup4-4.12.3.tar.gz", hash = "sha256:74e3d1928edc070d21748185c46e3fb33490f22f52a3addee9aee0f4f7781051"}, +] + +[package.dependencies] +soupsieve = ">1.2" + +[package.extras] +cchardet = ["cchardet"] +chardet = ["chardet"] +charset-normalizer = ["charset-normalizer"] +html5lib = ["html5lib"] +lxml = ["lxml"] + +[[package]] +name = "bracex" +version = "2.4" +description = "Bash style brace expander." +optional = false +python-versions = ">=3.8" +files = [ + {file = "bracex-2.4-py3-none-any.whl", hash = "sha256:efdc71eff95eaff5e0f8cfebe7d01adf2c8637c8c92edaf63ef348c241a82418"}, + {file = "bracex-2.4.tar.gz", hash = "sha256:a27eaf1df42cf561fed58b7a8f3fdf129d1ea16a81e1fadd1d17989bc6384beb"}, +] + +[[package]] +name = "cachetools" +version = "5.3.3" +description = "Extensible memoizing collections and decorators" +optional = false +python-versions = ">=3.7" +files = [ + {file = "cachetools-5.3.3-py3-none-any.whl", hash = "sha256:0abad1021d3f8325b2fc1d2e9c8b9c9d57b04c3932657a72465447332c24d945"}, + {file = "cachetools-5.3.3.tar.gz", hash = "sha256:ba29e2dfa0b8b556606f097407ed1aa62080ee108ab0dc5ec9d6a723a007d105"}, +] + +[[package]] +name = "cattrs" +version = "23.2.3" +description = "Composable complex class support for attrs and dataclasses." +optional = false +python-versions = ">=3.8" +files = [ + {file = "cattrs-23.2.3-py3-none-any.whl", hash = "sha256:0341994d94971052e9ee70662542699a3162ea1e0c62f7ce1b4a57f563685108"}, + {file = "cattrs-23.2.3.tar.gz", hash = "sha256:a934090d95abaa9e911dac357e3a8699e0b4b14f8529bcc7d2b1ad9d51672b9f"}, +] + +[package.dependencies] +attrs = ">=23.1.0" +exceptiongroup = {version = ">=1.1.1", markers = "python_version < \"3.11\""} +typing-extensions = {version = ">=4.1.0,<4.6.3 || >4.6.3", markers = "python_version < \"3.11\""} + +[package.extras] +bson = ["pymongo (>=4.4.0)"] +cbor2 = ["cbor2 (>=5.4.6)"] +msgpack = ["msgpack (>=1.0.5)"] +orjson = ["orjson (>=3.9.2)"] +pyyaml = ["pyyaml (>=6.0)"] +tomlkit = ["tomlkit (>=0.11.8)"] +ujson = ["ujson (>=5.7.0)"] + +[[package]] +name = "certifi" +version = "2024.2.2" +description = "Python package for providing Mozilla's CA Bundle." +optional = false +python-versions = ">=3.6" +files = [ + {file = "certifi-2024.2.2-py3-none-any.whl", hash = "sha256:dc383c07b76109f368f6106eee2b593b04a011ea4d55f652c6ca24a754d1cdd1"}, + {file = "certifi-2024.2.2.tar.gz", hash = "sha256:0569859f95fc761b18b45ef421b1290a0f65f147e92a1e5eb3e635f9a5e4e66f"}, +] + +[[package]] +name = "cffi" +version = "1.16.0" +description = "Foreign Function Interface for Python calling C code." +optional = false +python-versions = ">=3.8" +files = [ + {file = "cffi-1.16.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:6b3d6606d369fc1da4fd8c357d026317fbb9c9b75d36dc16e90e84c26854b088"}, + {file = "cffi-1.16.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:ac0f5edd2360eea2f1daa9e26a41db02dd4b0451b48f7c318e217ee092a213e9"}, + {file = "cffi-1.16.0-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7e61e3e4fa664a8588aa25c883eab612a188c725755afff6289454d6362b9673"}, + {file = "cffi-1.16.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a72e8961a86d19bdb45851d8f1f08b041ea37d2bd8d4fd19903bc3083d80c896"}, + {file = "cffi-1.16.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5b50bf3f55561dac5438f8e70bfcdfd74543fd60df5fa5f62d94e5867deca684"}, + {file = "cffi-1.16.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7651c50c8c5ef7bdb41108b7b8c5a83013bfaa8a935590c5d74627c047a583c7"}, + {file = "cffi-1.16.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e4108df7fe9b707191e55f33efbcb2d81928e10cea45527879a4749cbe472614"}, + {file = "cffi-1.16.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:32c68ef735dbe5857c810328cb2481e24722a59a2003018885514d4c09af9743"}, + {file = "cffi-1.16.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:673739cb539f8cdaa07d92d02efa93c9ccf87e345b9a0b556e3ecc666718468d"}, + {file = "cffi-1.16.0-cp310-cp310-win32.whl", hash = "sha256:9f90389693731ff1f659e55c7d1640e2ec43ff725cc61b04b2f9c6d8d017df6a"}, + {file = "cffi-1.16.0-cp310-cp310-win_amd64.whl", hash = "sha256:e6024675e67af929088fda399b2094574609396b1decb609c55fa58b028a32a1"}, + {file = "cffi-1.16.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:b84834d0cf97e7d27dd5b7f3aca7b6e9263c56308ab9dc8aae9784abb774d404"}, + {file = "cffi-1.16.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:1b8ebc27c014c59692bb2664c7d13ce7a6e9a629be20e54e7271fa696ff2b417"}, + {file = "cffi-1.16.0-cp311-cp311-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ee07e47c12890ef248766a6e55bd38ebfb2bb8edd4142d56db91b21ea68b7627"}, + {file = "cffi-1.16.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d8a9d3ebe49f084ad71f9269834ceccbf398253c9fac910c4fd7053ff1386936"}, + {file = "cffi-1.16.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e70f54f1796669ef691ca07d046cd81a29cb4deb1e5f942003f401c0c4a2695d"}, + {file = "cffi-1.16.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5bf44d66cdf9e893637896c7faa22298baebcd18d1ddb6d2626a6e39793a1d56"}, + {file = "cffi-1.16.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7b78010e7b97fef4bee1e896df8a4bbb6712b7f05b7ef630f9d1da00f6444d2e"}, + {file = "cffi-1.16.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:c6a164aa47843fb1b01e941d385aab7215563bb8816d80ff3a363a9f8448a8dc"}, + {file = "cffi-1.16.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:e09f3ff613345df5e8c3667da1d918f9149bd623cd9070c983c013792a9a62eb"}, + {file = "cffi-1.16.0-cp311-cp311-win32.whl", hash = "sha256:2c56b361916f390cd758a57f2e16233eb4f64bcbeee88a4881ea90fca14dc6ab"}, + {file = "cffi-1.16.0-cp311-cp311-win_amd64.whl", hash = "sha256:db8e577c19c0fda0beb7e0d4e09e0ba74b1e4c092e0e40bfa12fe05b6f6d75ba"}, + {file = "cffi-1.16.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:fa3a0128b152627161ce47201262d3140edb5a5c3da88d73a1b790a959126956"}, + {file = "cffi-1.16.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:68e7c44931cc171c54ccb702482e9fc723192e88d25a0e133edd7aff8fcd1f6e"}, + {file = "cffi-1.16.0-cp312-cp312-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:abd808f9c129ba2beda4cfc53bde801e5bcf9d6e0f22f095e45327c038bfe68e"}, + {file = "cffi-1.16.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:88e2b3c14bdb32e440be531ade29d3c50a1a59cd4e51b1dd8b0865c54ea5d2e2"}, + {file = "cffi-1.16.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:fcc8eb6d5902bb1cf6dc4f187ee3ea80a1eba0a89aba40a5cb20a5087d961357"}, + {file = "cffi-1.16.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b7be2d771cdba2942e13215c4e340bfd76398e9227ad10402a8767ab1865d2e6"}, + {file = "cffi-1.16.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e715596e683d2ce000574bae5d07bd522c781a822866c20495e52520564f0969"}, + {file = "cffi-1.16.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:2d92b25dbf6cae33f65005baf472d2c245c050b1ce709cc4588cdcdd5495b520"}, + {file = "cffi-1.16.0-cp312-cp312-win32.whl", hash = "sha256:b2ca4e77f9f47c55c194982e10f058db063937845bb2b7a86c84a6cfe0aefa8b"}, + {file = "cffi-1.16.0-cp312-cp312-win_amd64.whl", hash = "sha256:68678abf380b42ce21a5f2abde8efee05c114c2fdb2e9eef2efdb0257fba1235"}, + {file = "cffi-1.16.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:0c9ef6ff37e974b73c25eecc13952c55bceed9112be2d9d938ded8e856138bcc"}, + {file = "cffi-1.16.0-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a09582f178759ee8128d9270cd1344154fd473bb77d94ce0aeb2a93ebf0feaf0"}, + {file = "cffi-1.16.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e760191dd42581e023a68b758769e2da259b5d52e3103c6060ddc02c9edb8d7b"}, + {file = "cffi-1.16.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:80876338e19c951fdfed6198e70bc88f1c9758b94578d5a7c4c91a87af3cf31c"}, + {file = "cffi-1.16.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a6a14b17d7e17fa0d207ac08642c8820f84f25ce17a442fd15e27ea18d67c59b"}, + {file = "cffi-1.16.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6602bc8dc6f3a9e02b6c22c4fc1e47aa50f8f8e6d3f78a5e16ac33ef5fefa324"}, + {file = "cffi-1.16.0-cp38-cp38-win32.whl", hash = "sha256:131fd094d1065b19540c3d72594260f118b231090295d8c34e19a7bbcf2e860a"}, + {file = "cffi-1.16.0-cp38-cp38-win_amd64.whl", hash = "sha256:31d13b0f99e0836b7ff893d37af07366ebc90b678b6664c955b54561fc36ef36"}, + {file = "cffi-1.16.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:582215a0e9adbe0e379761260553ba11c58943e4bbe9c36430c4ca6ac74b15ed"}, + {file = "cffi-1.16.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:b29ebffcf550f9da55bec9e02ad430c992a87e5f512cd63388abb76f1036d8d2"}, + {file = "cffi-1.16.0-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:dc9b18bf40cc75f66f40a7379f6a9513244fe33c0e8aa72e2d56b0196a7ef872"}, + {file = "cffi-1.16.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9cb4a35b3642fc5c005a6755a5d17c6c8b6bcb6981baf81cea8bfbc8903e8ba8"}, + {file = "cffi-1.16.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b86851a328eedc692acf81fb05444bdf1891747c25af7529e39ddafaf68a4f3f"}, + {file = "cffi-1.16.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c0f31130ebc2d37cdd8e44605fb5fa7ad59049298b3f745c74fa74c62fbfcfc4"}, + {file = "cffi-1.16.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8f8e709127c6c77446a8c0a8c8bf3c8ee706a06cd44b1e827c3e6a2ee6b8c098"}, + {file = "cffi-1.16.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:748dcd1e3d3d7cd5443ef03ce8685043294ad6bd7c02a38d1bd367cfd968e000"}, + {file = "cffi-1.16.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:8895613bcc094d4a1b2dbe179d88d7fb4a15cee43c052e8885783fac397d91fe"}, + {file = "cffi-1.16.0-cp39-cp39-win32.whl", hash = "sha256:ed86a35631f7bfbb28e108dd96773b9d5a6ce4811cf6ea468bb6a359b256b1e4"}, + {file = "cffi-1.16.0-cp39-cp39-win_amd64.whl", hash = "sha256:3686dffb02459559c74dd3d81748269ffb0eb027c39a6fc99502de37d501faa8"}, + {file = "cffi-1.16.0.tar.gz", hash = "sha256:bcb3ef43e58665bbda2fb198698fcae6776483e0c4a631aa5647806c25e02cc0"}, +] + +[package.dependencies] +pycparser = "*" + +[[package]] +name = "chardet" +version = "5.2.0" +description = "Universal encoding detector for Python 3" +optional = false +python-versions = ">=3.7" +files = [ + {file = "chardet-5.2.0-py3-none-any.whl", hash = "sha256:e1cf59446890a00105fe7b7912492ea04b6e6f06d4b742b2c788469e34c82970"}, + {file = "chardet-5.2.0.tar.gz", hash = "sha256:1b3b6ff479a8c414bc3fa2c0852995695c4a026dcd6d0633b2dd092ca39c1cf7"}, +] + +[[package]] +name = "charset-normalizer" +version = "3.3.2" +description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet." +optional = false +python-versions = ">=3.7.0" +files = [ + {file = "charset-normalizer-3.3.2.tar.gz", hash = "sha256:f30c3cb33b24454a82faecaf01b19c18562b1e89558fb6c56de4d9118a032fd5"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:25baf083bf6f6b341f4121c2f3c548875ee6f5339300e08be3f2b2ba1721cdd3"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:06435b539f889b1f6f4ac1758871aae42dc3a8c0e24ac9e60c2384973ad73027"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9063e24fdb1e498ab71cb7419e24622516c4a04476b17a2dab57e8baa30d6e03"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6897af51655e3691ff853668779c7bad41579facacf5fd7253b0133308cf000d"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1d3193f4a680c64b4b6a9115943538edb896edc190f0b222e73761716519268e"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:cd70574b12bb8a4d2aaa0094515df2463cb429d8536cfb6c7ce983246983e5a6"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8465322196c8b4d7ab6d1e049e4c5cb460d0394da4a27d23cc242fbf0034b6b5"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a9a8e9031d613fd2009c182b69c7b2c1ef8239a0efb1df3f7c8da66d5dd3d537"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:beb58fe5cdb101e3a055192ac291b7a21e3b7ef4f67fa1d74e331a7f2124341c"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:e06ed3eb3218bc64786f7db41917d4e686cc4856944f53d5bdf83a6884432e12"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:2e81c7b9c8979ce92ed306c249d46894776a909505d8f5a4ba55b14206e3222f"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:572c3763a264ba47b3cf708a44ce965d98555f618ca42c926a9c1616d8f34269"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:fd1abc0d89e30cc4e02e4064dc67fcc51bd941eb395c502aac3ec19fab46b519"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-win32.whl", hash = "sha256:3d47fa203a7bd9c5b6cee4736ee84ca03b8ef23193c0d1ca99b5089f72645c73"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-win_amd64.whl", hash = "sha256:10955842570876604d404661fbccbc9c7e684caf432c09c715ec38fbae45ae09"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:802fe99cca7457642125a8a88a084cef28ff0cf9407060f7b93dca5aa25480db"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:573f6eac48f4769d667c4442081b1794f52919e7edada77495aaed9236d13a96"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:549a3a73da901d5bc3ce8d24e0600d1fa85524c10287f6004fbab87672bf3e1e"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f27273b60488abe721a075bcca6d7f3964f9f6f067c8c4c605743023d7d3944f"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1ceae2f17a9c33cb48e3263960dc5fc8005351ee19db217e9b1bb15d28c02574"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:65f6f63034100ead094b8744b3b97965785388f308a64cf8d7c34f2f2e5be0c4"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:753f10e867343b4511128c6ed8c82f7bec3bd026875576dfd88483c5c73b2fd8"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4a78b2b446bd7c934f5dcedc588903fb2f5eec172f3d29e52a9096a43722adfc"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:e537484df0d8f426ce2afb2d0f8e1c3d0b114b83f8850e5f2fbea0e797bd82ae"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:eb6904c354526e758fda7167b33005998fb68c46fbc10e013ca97f21ca5c8887"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:deb6be0ac38ece9ba87dea880e438f25ca3eddfac8b002a2ec3d9183a454e8ae"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:4ab2fe47fae9e0f9dee8c04187ce5d09f48eabe611be8259444906793ab7cbce"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:80402cd6ee291dcb72644d6eac93785fe2c8b9cb30893c1af5b8fdd753b9d40f"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-win32.whl", hash = "sha256:7cd13a2e3ddeed6913a65e66e94b51d80a041145a026c27e6bb76c31a853c6ab"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-win_amd64.whl", hash = "sha256:663946639d296df6a2bb2aa51b60a2454ca1cb29835324c640dafb5ff2131a77"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:0b2b64d2bb6d3fb9112bafa732def486049e63de9618b5843bcdd081d8144cd8"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:ddbb2551d7e0102e7252db79ba445cdab71b26640817ab1e3e3648dad515003b"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:55086ee1064215781fff39a1af09518bc9255b50d6333f2e4c74ca09fac6a8f6"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8f4a014bc36d3c57402e2977dada34f9c12300af536839dc38c0beab8878f38a"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a10af20b82360ab00827f916a6058451b723b4e65030c5a18577c8b2de5b3389"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8d756e44e94489e49571086ef83b2bb8ce311e730092d2c34ca8f7d925cb20aa"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:90d558489962fd4918143277a773316e56c72da56ec7aa3dc3dbbe20fdfed15b"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6ac7ffc7ad6d040517be39eb591cac5ff87416c2537df6ba3cba3bae290c0fed"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:7ed9e526742851e8d5cc9e6cf41427dfc6068d4f5a3bb03659444b4cabf6bc26"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:8bdb58ff7ba23002a4c5808d608e4e6c687175724f54a5dade5fa8c67b604e4d"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:6b3251890fff30ee142c44144871185dbe13b11bab478a88887a639655be1068"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:b4a23f61ce87adf89be746c8a8974fe1c823c891d8f86eb218bb957c924bb143"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:efcb3f6676480691518c177e3b465bcddf57cea040302f9f4e6e191af91174d4"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-win32.whl", hash = "sha256:d965bba47ddeec8cd560687584e88cf699fd28f192ceb452d1d7ee807c5597b7"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-win_amd64.whl", hash = "sha256:96b02a3dc4381e5494fad39be677abcb5e6634bf7b4fa83a6dd3112607547001"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:95f2a5796329323b8f0512e09dbb7a1860c46a39da62ecb2324f116fa8fdc85c"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c002b4ffc0be611f0d9da932eb0f704fe2602a9a949d1f738e4c34c75b0863d5"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a981a536974bbc7a512cf44ed14938cf01030a99e9b3a06dd59578882f06f985"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3287761bc4ee9e33561a7e058c72ac0938c4f57fe49a09eae428fd88aafe7bb6"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:42cb296636fcc8b0644486d15c12376cb9fa75443e00fb25de0b8602e64c1714"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0a55554a2fa0d408816b3b5cedf0045f4b8e1a6065aec45849de2d6f3f8e9786"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:c083af607d2515612056a31f0a8d9e0fcb5876b7bfc0abad3ecd275bc4ebc2d5"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:87d1351268731db79e0f8e745d92493ee2841c974128ef629dc518b937d9194c"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:bd8f7df7d12c2db9fab40bdd87a7c09b1530128315d047a086fa3ae3435cb3a8"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:c180f51afb394e165eafe4ac2936a14bee3eb10debc9d9e4db8958fe36afe711"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:8c622a5fe39a48f78944a87d4fb8a53ee07344641b0562c540d840748571b811"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-win32.whl", hash = "sha256:db364eca23f876da6f9e16c9da0df51aa4f104a972735574842618b8c6d999d4"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-win_amd64.whl", hash = "sha256:86216b5cee4b06df986d214f664305142d9c76df9b6512be2738aa72a2048f99"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:6463effa3186ea09411d50efc7d85360b38d5f09b870c48e4600f63af490e56a"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:6c4caeef8fa63d06bd437cd4bdcf3ffefe6738fb1b25951440d80dc7df8c03ac"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:37e55c8e51c236f95b033f6fb391d7d7970ba5fe7ff453dad675e88cf303377a"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fb69256e180cb6c8a894fee62b3afebae785babc1ee98b81cdf68bbca1987f33"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ae5f4161f18c61806f411a13b0310bea87f987c7d2ecdbdaad0e94eb2e404238"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b2b0a0c0517616b6869869f8c581d4eb2dd83a4d79e0ebcb7d373ef9956aeb0a"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:45485e01ff4d3630ec0d9617310448a8702f70e9c01906b0d0118bdf9d124cf2"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:eb00ed941194665c332bf8e078baf037d6c35d7c4f3102ea2d4f16ca94a26dc8"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:2127566c664442652f024c837091890cb1942c30937add288223dc895793f898"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:a50aebfa173e157099939b17f18600f72f84eed3049e743b68ad15bd69b6bf99"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:4d0d1650369165a14e14e1e47b372cfcb31d6ab44e6e33cb2d4e57265290044d"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:923c0c831b7cfcb071580d3f46c4baf50f174be571576556269530f4bbd79d04"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:06a81e93cd441c56a9b65d8e1d043daeb97a3d0856d177d5c90ba85acb3db087"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-win32.whl", hash = "sha256:6ef1d82a3af9d3eecdba2321dc1b3c238245d890843e040e41e470ffa64c3e25"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-win_amd64.whl", hash = "sha256:eb8821e09e916165e160797a6c17edda0679379a4be5c716c260e836e122f54b"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:c235ebd9baae02f1b77bcea61bce332cb4331dc3617d254df3323aa01ab47bd4"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:5b4c145409bef602a690e7cfad0a15a55c13320ff7a3ad7ca59c13bb8ba4d45d"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:68d1f8a9e9e37c1223b656399be5d6b448dea850bed7d0f87a8311f1ff3dabb0"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:22afcb9f253dac0696b5a4be4a1c0f8762f8239e21b99680099abd9b2b1b2269"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e27ad930a842b4c5eb8ac0016b0a54f5aebbe679340c26101df33424142c143c"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1f79682fbe303db92bc2b1136016a38a42e835d932bab5b3b1bfcfbf0640e519"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b261ccdec7821281dade748d088bb6e9b69e6d15b30652b74cbbac25e280b796"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:122c7fa62b130ed55f8f285bfd56d5f4b4a5b503609d181f9ad85e55c89f4185"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:d0eccceffcb53201b5bfebb52600a5fb483a20b61da9dbc885f8b103cbe7598c"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:9f96df6923e21816da7e0ad3fd47dd8f94b2a5ce594e00677c0013018b813458"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:7f04c839ed0b6b98b1a7501a002144b76c18fb1c1850c8b98d458ac269e26ed2"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:34d1c8da1e78d2e001f363791c98a272bb734000fcef47a491c1e3b0505657a8"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:ff8fa367d09b717b2a17a052544193ad76cd49979c805768879cb63d9ca50561"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-win32.whl", hash = "sha256:aed38f6e4fb3f5d6bf81bfa990a07806be9d83cf7bacef998ab1a9bd660a581f"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-win_amd64.whl", hash = "sha256:b01b88d45a6fcb69667cd6d2f7a9aeb4bf53760d7fc536bf679ec94fe9f3ff3d"}, + {file = "charset_normalizer-3.3.2-py3-none-any.whl", hash = "sha256:3e4d1f6587322d2788836a99c69062fbb091331ec940e02d12d179c1d53e25fc"}, +] + +[[package]] +name = "click" +version = "8.1.7" +description = "Composable command line interface toolkit" +optional = false +python-versions = ">=3.7" +files = [ + {file = "click-8.1.7-py3-none-any.whl", hash = "sha256:ae74fb96c20a0277a1d615f1e4d73c8414f5a98db8b799a7931d1582f3390c28"}, + {file = "click-8.1.7.tar.gz", hash = "sha256:ca9853ad459e787e2192211578cc907e7594e294c7ccc834310722b41b9ca6de"}, +] + +[package.dependencies] +colorama = {version = "*", markers = "platform_system == \"Windows\""} + +[[package]] +name = "colorama" +version = "0.4.6" +description = "Cross-platform colored terminal text." +optional = false +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7" +files = [ + {file = "colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6"}, + {file = "colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44"}, +] + +[[package]] +name = "cryptography" +version = "42.0.5" +description = "cryptography is a package which provides cryptographic recipes and primitives to Python developers." +optional = false +python-versions = ">=3.7" +files = [ + {file = "cryptography-42.0.5-cp37-abi3-macosx_10_12_universal2.whl", hash = "sha256:a30596bae9403a342c978fb47d9b0ee277699fa53bbafad14706af51fe543d16"}, + {file = "cryptography-42.0.5-cp37-abi3-macosx_10_12_x86_64.whl", hash = "sha256:b7ffe927ee6531c78f81aa17e684e2ff617daeba7f189f911065b2ea2d526dec"}, + {file = "cryptography-42.0.5-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2424ff4c4ac7f6b8177b53c17ed5d8fa74ae5955656867f5a8affaca36a27abb"}, + {file = "cryptography-42.0.5-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:329906dcc7b20ff3cad13c069a78124ed8247adcac44b10bea1130e36caae0b4"}, + {file = "cryptography-42.0.5-cp37-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:b03c2ae5d2f0fc05f9a2c0c997e1bc18c8229f392234e8a0194f202169ccd278"}, + {file = "cryptography-42.0.5-cp37-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:f8837fe1d6ac4a8052a9a8ddab256bc006242696f03368a4009be7ee3075cdb7"}, + {file = "cryptography-42.0.5-cp37-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:0270572b8bd2c833c3981724b8ee9747b3ec96f699a9665470018594301439ee"}, + {file = "cryptography-42.0.5-cp37-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:b8cac287fafc4ad485b8a9b67d0ee80c66bf3574f655d3b97ef2e1082360faf1"}, + {file = "cryptography-42.0.5-cp37-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:16a48c23a62a2f4a285699dba2e4ff2d1cff3115b9df052cdd976a18856d8e3d"}, + {file = "cryptography-42.0.5-cp37-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:2bce03af1ce5a5567ab89bd90d11e7bbdff56b8af3acbbec1faded8f44cb06da"}, + {file = "cryptography-42.0.5-cp37-abi3-win32.whl", hash = "sha256:b6cd2203306b63e41acdf39aa93b86fb566049aeb6dc489b70e34bcd07adca74"}, + {file = "cryptography-42.0.5-cp37-abi3-win_amd64.whl", hash = "sha256:98d8dc6d012b82287f2c3d26ce1d2dd130ec200c8679b6213b3c73c08b2b7940"}, + {file = "cryptography-42.0.5-cp39-abi3-macosx_10_12_universal2.whl", hash = "sha256:5e6275c09d2badf57aea3afa80d975444f4be8d3bc58f7f80d2a484c6f9485c8"}, + {file = "cryptography-42.0.5-cp39-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e4985a790f921508f36f81831817cbc03b102d643b5fcb81cd33df3fa291a1a1"}, + {file = "cryptography-42.0.5-cp39-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7cde5f38e614f55e28d831754e8a3bacf9ace5d1566235e39d91b35502d6936e"}, + {file = "cryptography-42.0.5-cp39-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:7367d7b2eca6513681127ebad53b2582911d1736dc2ffc19f2c3ae49997496bc"}, + {file = "cryptography-42.0.5-cp39-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:cd2030f6650c089aeb304cf093f3244d34745ce0cfcc39f20c6fbfe030102e2a"}, + {file = "cryptography-42.0.5-cp39-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:a2913c5375154b6ef2e91c10b5720ea6e21007412f6437504ffea2109b5a33d7"}, + {file = "cryptography-42.0.5-cp39-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:c41fb5e6a5fe9ebcd58ca3abfeb51dffb5d83d6775405305bfa8715b76521922"}, + {file = "cryptography-42.0.5-cp39-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:3eaafe47ec0d0ffcc9349e1708be2aaea4c6dd4978d76bf6eb0cb2c13636c6fc"}, + {file = "cryptography-42.0.5-cp39-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:1b95b98b0d2af784078fa69f637135e3c317091b615cd0905f8b8a087e86fa30"}, + {file = "cryptography-42.0.5-cp39-abi3-win32.whl", hash = "sha256:1f71c10d1e88467126f0efd484bd44bca5e14c664ec2ede64c32f20875c0d413"}, + {file = "cryptography-42.0.5-cp39-abi3-win_amd64.whl", hash = "sha256:a011a644f6d7d03736214d38832e030d8268bcff4a41f728e6030325fea3e400"}, + {file = "cryptography-42.0.5-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:9481ffe3cf013b71b2428b905c4f7a9a4f76ec03065b05ff499bb5682a8d9ad8"}, + {file = "cryptography-42.0.5-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:ba334e6e4b1d92442b75ddacc615c5476d4ad55cc29b15d590cc6b86efa487e2"}, + {file = "cryptography-42.0.5-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:ba3e4a42397c25b7ff88cdec6e2a16c2be18720f317506ee25210f6d31925f9c"}, + {file = "cryptography-42.0.5-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:111a0d8553afcf8eb02a4fea6ca4f59d48ddb34497aa8706a6cf536f1a5ec576"}, + {file = "cryptography-42.0.5-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:cd65d75953847815962c84a4654a84850b2bb4aed3f26fadcc1c13892e1e29f6"}, + {file = "cryptography-42.0.5-pp39-pypy39_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:e807b3188f9eb0eaa7bbb579b462c5ace579f1cedb28107ce8b48a9f7ad3679e"}, + {file = "cryptography-42.0.5-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:f12764b8fffc7a123f641d7d049d382b73f96a34117e0b637b80643169cec8ac"}, + {file = "cryptography-42.0.5-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:37dd623507659e08be98eec89323469e8c7b4c1407c85112634ae3dbdb926fdd"}, + {file = "cryptography-42.0.5.tar.gz", hash = "sha256:6fe07eec95dfd477eb9530aef5bead34fec819b3aaf6c5bd6d20565da607bfe1"}, +] + +[package.dependencies] +cffi = {version = ">=1.12", markers = "platform_python_implementation != \"PyPy\""} + +[package.extras] +docs = ["sphinx (>=5.3.0)", "sphinx-rtd-theme (>=1.1.1)"] +docstest = ["pyenchant (>=1.6.11)", "readme-renderer", "sphinxcontrib-spelling (>=4.0.1)"] +nox = ["nox"] +pep8test = ["check-sdist", "click", "mypy", "ruff"] +sdist = ["build"] +ssh = ["bcrypt (>=3.1.5)"] +test = ["certifi", "pretend", "pytest (>=6.2.0)", "pytest-benchmark", "pytest-cov", "pytest-xdist"] +test-randomorder = ["pytest-randomly"] + +[[package]] +name = "dataclasses-json" +version = "0.6.4" +description = "Easily serialize dataclasses to and from JSON." +optional = false +python-versions = ">=3.7,<4.0" +files = [ + {file = "dataclasses_json-0.6.4-py3-none-any.whl", hash = "sha256:f90578b8a3177f7552f4e1a6e535e84293cd5da421fcce0642d49c0d7bdf8df2"}, + {file = "dataclasses_json-0.6.4.tar.gz", hash = "sha256:73696ebf24936560cca79a2430cbc4f3dd23ac7bf46ed17f38e5e5e7657a6377"}, +] + +[package.dependencies] +marshmallow = ">=3.18.0,<4.0.0" +typing-inspect = ">=0.4.0,<1" + +[[package]] +name = "deprecated" +version = "1.2.14" +description = "Python @deprecated decorator to deprecate old python classes, functions or methods." +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +files = [ + {file = "Deprecated-1.2.14-py2.py3-none-any.whl", hash = "sha256:6fac8b097794a90302bdbb17b9b815e732d3c4720583ff1b198499d78470466c"}, + {file = "Deprecated-1.2.14.tar.gz", hash = "sha256:e5323eb936458dccc2582dc6f9c322c852a775a27065ff2b0c4970b9d53d01b3"}, +] + +[package.dependencies] +wrapt = ">=1.10,<2" + +[package.extras] +dev = ["PyTest", "PyTest-Cov", "bump2version (<1)", "sphinx (<2)", "tox"] + +[[package]] +name = "dpath" +version = "2.0.8" +description = "Filesystem-like pathing and searching for dictionaries" +optional = false +python-versions = ">=3.7" +files = [ + {file = "dpath-2.0.8-py3-none-any.whl", hash = "sha256:f92f595214dd93a00558d75d4b858beee519f4cffca87f02616ad6cd013f3436"}, + {file = "dpath-2.0.8.tar.gz", hash = "sha256:a3440157ebe80d0a3ad794f1b61c571bef125214800ffdb9afc9424e8250fe9b"}, +] + +[[package]] +name = "emoji" +version = "2.10.1" +description = "Emoji for Python" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +files = [ + {file = "emoji-2.10.1-py2.py3-none-any.whl", hash = "sha256:11fb369ea79d20c14efa4362c732d67126df294a7959a2c98bfd7447c12a218e"}, + {file = "emoji-2.10.1.tar.gz", hash = "sha256:16287283518fb7141bde00198f9ffff4e1c1cb570efb68b2f1ec50975c3a581d"}, +] + +[package.extras] +dev = ["coverage", "coveralls", "pytest"] + +[[package]] +name = "exceptiongroup" +version = "1.2.0" +description = "Backport of PEP 654 (exception groups)" +optional = false +python-versions = ">=3.7" +files = [ + {file = "exceptiongroup-1.2.0-py3-none-any.whl", hash = "sha256:4bfd3996ac73b41e9b9628b04e079f193850720ea5945fc96a08633c66912f14"}, + {file = "exceptiongroup-1.2.0.tar.gz", hash = "sha256:91f5c769735f051a4290d52edd0858999b57e5876e9f85937691bd4c9fa3ed68"}, +] + +[package.extras] +test = ["pytest (>=6)"] + +[[package]] +name = "fastavro" +version = "1.8.4" +description = "Fast read/write of AVRO files" +optional = false +python-versions = ">=3.8" +files = [ + {file = "fastavro-1.8.4-cp310-cp310-macosx_11_0_x86_64.whl", hash = "sha256:7afe1475e8a967c04e2b0ef4d33bc10bffa66b4fa6e08bd2ee9d91b6768cba2a"}, + {file = "fastavro-1.8.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f5fd73609f3c1ac0d90ae3179d2fb9d788f842245db2656ff9225fce871fc5b7"}, + {file = "fastavro-1.8.4-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:78fdf1ba47e43146af72ac48d7b2247a06c4f2d95dfdaad6129c481014b07a6b"}, + {file = "fastavro-1.8.4-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:d950542b3263653f00b695cbc728b5c60ab9ea6df32a7017ad9a6a67235386e7"}, + {file = "fastavro-1.8.4-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:ce2ccfa9aff8df6da683c48542b7b2a216dde6d3a4d1c505c5e1b8ca2ec0abbb"}, + {file = "fastavro-1.8.4-cp310-cp310-win_amd64.whl", hash = "sha256:f12f9914d6196695d3208ea348145a80d0defefe16b8a226373fe8ce68f66139"}, + {file = "fastavro-1.8.4-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:d353aec9c000b96c33ad285651a2cba0f87fe50fcdecc6120689996af427194d"}, + {file = "fastavro-1.8.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1eaed91d6e1fb06c172e0aaf4b1ca1fd019c3f4a481e314bf783a4c74f6b7015"}, + {file = "fastavro-1.8.4-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9293b303955acd34a6f69dd4ef3465bd575dbde0cd3e3f00227a0ba5012430b4"}, + {file = "fastavro-1.8.4-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:b79baefd61554d9f03c4beaebbe638ef175d0efc1fb01f25e88ee6ae97985ab3"}, + {file = "fastavro-1.8.4-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:14d7cd3df019d41c66733b8bf5d983af9e1f601d4cb162853a49238a4087d6b0"}, + {file = "fastavro-1.8.4-cp311-cp311-win_amd64.whl", hash = "sha256:c8fb27001b7023910969f15bee2c9205c4e9f40713929d6c1dca8f470fc8fc80"}, + {file = "fastavro-1.8.4-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:e331229acef15f858d9863ced7b629ebef4bd5f80766d367255e51cbf44f8dab"}, + {file = "fastavro-1.8.4-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:04e26b3ba288bd423f25630a3b9bd70cc61b46c6f6161de35e398a6fc8f260f0"}, + {file = "fastavro-1.8.4-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6281f4555659ed658b195d1618a637504013e57b680d6cbad7c726e9a4e2cf0b"}, + {file = "fastavro-1.8.4-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:3201880149e1fb807d616ab46b338a26788173a9f4e8a3396ae145e86af878a1"}, + {file = "fastavro-1.8.4-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:39771719fa04b8321eeebfb0813eaa2723c20e5bf570bcca3f53f1169099a0d7"}, + {file = "fastavro-1.8.4-cp312-cp312-win_amd64.whl", hash = "sha256:7095ae37a5c46dacb7ef430092e5f94650f576be281487b72050c1cf12e4ee20"}, + {file = "fastavro-1.8.4-cp38-cp38-macosx_11_0_x86_64.whl", hash = "sha256:eb76f5bfcde91cde240c93594dae47670cdf1a95d7e5d0dc3ccdef57c6c1c183"}, + {file = "fastavro-1.8.4-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:71ebe1cf090f800ca7d4c64d50c81c2a88c56e6ef6aa5eb61ec425e7ae723617"}, + {file = "fastavro-1.8.4-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e9f0ef601943ea11cd02a59c57f5588cea3e300ac67608f53c904ec7aeddd232"}, + {file = "fastavro-1.8.4-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:1060318f3ab31bcc4b2184cea3763305b773163381601e304000da81a2f7e11f"}, + {file = "fastavro-1.8.4-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:01c8c7f22172174f2c2c0922801b552fbca75758f84b0ad3cd6f3e505a76ed05"}, + {file = "fastavro-1.8.4-cp38-cp38-win_amd64.whl", hash = "sha256:bc8a1af80b8face4a41d8526a34b6474a874f7367a900d0b14752eacebb7a2b8"}, + {file = "fastavro-1.8.4-cp39-cp39-macosx_11_0_x86_64.whl", hash = "sha256:687a2f8fa83a76906c4ec35c9d0500e13a567fc631845f0e47646c48233c7725"}, + {file = "fastavro-1.8.4-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1b921c63fcfb9149a32c40a9cd27b0e900fcda602455cbce4d773300019b9ce2"}, + {file = "fastavro-1.8.4-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2610a8683b10be7aaa532ddddbcb719883ee2d6f09dafd4a4a7b46d5d719fc07"}, + {file = "fastavro-1.8.4-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:94448dc749d098f846f6a6d82d59f85483bd6fcdecfb6234daac5f4494ae4156"}, + {file = "fastavro-1.8.4-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:2d39c6b5db7014a3722a7d206310874430486f4895161911b6b6574cb1a6c48f"}, + {file = "fastavro-1.8.4-cp39-cp39-win_amd64.whl", hash = "sha256:3b73472e8da33bcbf08ec989996637aea04eaca71058bb6d45def6fa4168f541"}, + {file = "fastavro-1.8.4.tar.gz", hash = "sha256:dae6118da27e81abf5957dc79a6d778888fc1bbf67645f52959cb2faba95beff"}, +] + +[package.extras] +codecs = ["lz4", "python-snappy", "zstandard"] +lz4 = ["lz4"] +snappy = ["python-snappy"] +zstandard = ["zstandard"] + +[[package]] +name = "filetype" +version = "1.2.0" +description = "Infer file type and MIME type of any file/buffer. No external dependencies." +optional = false +python-versions = "*" +files = [ + {file = "filetype-1.2.0-py2.py3-none-any.whl", hash = "sha256:7ce71b6880181241cf7ac8697a2f1eb6a8bd9b429f7ad6d27b8db9ba5f1c2d25"}, + {file = "filetype-1.2.0.tar.gz", hash = "sha256:66b56cd6474bf41d8c54660347d37afcc3f7d1970648de365c102ef77548aadb"}, +] + +[[package]] +name = "genson" +version = "1.2.2" +description = "GenSON is a powerful, user-friendly JSON Schema generator." +optional = false +python-versions = "*" +files = [ + {file = "genson-1.2.2.tar.gz", hash = "sha256:8caf69aa10af7aee0e1a1351d1d06801f4696e005f06cedef438635384346a16"}, +] + +[[package]] +name = "idna" +version = "3.6" +description = "Internationalized Domain Names in Applications (IDNA)" +optional = false +python-versions = ">=3.5" +files = [ + {file = "idna-3.6-py3-none-any.whl", hash = "sha256:c05567e9c24a6b9faaa835c4821bad0590fbb9d5779e7caa6e1cc4978e7eb24f"}, + {file = "idna-3.6.tar.gz", hash = "sha256:9ecdbbd083b06798ae1e86adcbfe8ab1479cf864e4ee30fe4e46a003d12491ca"}, +] + +[[package]] +name = "importlib-metadata" +version = "7.0.1" +description = "Read metadata from Python packages" +optional = false +python-versions = ">=3.8" +files = [ + {file = "importlib_metadata-7.0.1-py3-none-any.whl", hash = "sha256:4805911c3a4ec7c3966410053e9ec6a1fecd629117df5adee56dfc9432a1081e"}, + {file = "importlib_metadata-7.0.1.tar.gz", hash = "sha256:f238736bb06590ae52ac1fab06a3a9ef1d8dce2b7a35b5ab329371d6c8f5d2cc"}, +] + +[package.dependencies] +zipp = ">=0.5" + +[package.extras] +docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (<7.2.5)", "sphinx (>=3.5)", "sphinx-lint"] +perf = ["ipython"] +testing = ["flufl.flake8", "importlib-resources (>=1.3)", "packaging", "pyfakefs", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-mypy (>=0.9.1)", "pytest-perf (>=0.9.2)", "pytest-ruff"] + +[[package]] +name = "iniconfig" +version = "2.0.0" +description = "brain-dead simple config-ini parsing" +optional = false +python-versions = ">=3.7" +files = [ + {file = "iniconfig-2.0.0-py3-none-any.whl", hash = "sha256:b6a85871a79d2e3b22d2d1b94ac2824226a63c6b741c88f7ae975f18b6778374"}, + {file = "iniconfig-2.0.0.tar.gz", hash = "sha256:2d91e135bf72d31a410b17c16da610a82cb55f6b0477d1a902134b24a455b8b3"}, +] + +[[package]] +name = "isodate" +version = "0.6.1" +description = "An ISO 8601 date/time/duration parser and formatter" +optional = false +python-versions = "*" +files = [ + {file = "isodate-0.6.1-py2.py3-none-any.whl", hash = "sha256:0751eece944162659049d35f4f549ed815792b38793f07cf73381c1c87cbed96"}, + {file = "isodate-0.6.1.tar.gz", hash = "sha256:48c5881de7e8b0a0d648cb024c8062dc84e7b840ed81e864c7614fd3c127bde9"}, +] + +[package.dependencies] +six = "*" + +[[package]] +name = "jinja2" +version = "3.1.3" +description = "A very fast and expressive template engine." +optional = false +python-versions = ">=3.7" +files = [ + {file = "Jinja2-3.1.3-py3-none-any.whl", hash = "sha256:7d6d50dd97d52cbc355597bd845fabfbac3f551e1f99619e39a35ce8c370b5fa"}, + {file = "Jinja2-3.1.3.tar.gz", hash = "sha256:ac8bd6544d4bb2c9792bf3a159e80bba8fda7f07e81bc3aed565432d5925ba90"}, +] + +[package.dependencies] +MarkupSafe = ">=2.0" + +[package.extras] +i18n = ["Babel (>=2.7)"] + +[[package]] +name = "joblib" +version = "1.3.2" +description = "Lightweight pipelining with Python functions" +optional = false +python-versions = ">=3.7" +files = [ + {file = "joblib-1.3.2-py3-none-any.whl", hash = "sha256:ef4331c65f239985f3f2220ecc87db222f08fd22097a3dd5698f693875f8cbb9"}, + {file = "joblib-1.3.2.tar.gz", hash = "sha256:92f865e621e17784e7955080b6d042489e3b8e294949cc44c6eac304f59772b1"}, +] + +[[package]] +name = "jsonref" +version = "0.3.0" +description = "jsonref is a library for automatic dereferencing of JSON Reference objects for Python." +optional = false +python-versions = ">=3.3,<4.0" +files = [ + {file = "jsonref-0.3.0-py3-none-any.whl", hash = "sha256:9480ad1b500f7e795daeb0ef29f9c55ae3a9ab38fb8d6659b6f4868acb5a5bc8"}, + {file = "jsonref-0.3.0.tar.gz", hash = "sha256:68b330c6815dc0d490dbb3d65ccda265ddde9f7856fd2f3322f971d456ea7549"}, +] + +[[package]] +name = "jsonschema" +version = "3.2.0" +description = "An implementation of JSON Schema validation for Python" +optional = false +python-versions = "*" +files = [ + {file = "jsonschema-3.2.0-py2.py3-none-any.whl", hash = "sha256:4e5b3cf8216f577bee9ce139cbe72eca3ea4f292ec60928ff24758ce626cd163"}, + {file = "jsonschema-3.2.0.tar.gz", hash = "sha256:c8a85b28d377cc7737e46e2d9f2b4f44ee3c0e1deac6bf46ddefc7187d30797a"}, +] + +[package.dependencies] +attrs = ">=17.4.0" +pyrsistent = ">=0.14.0" +setuptools = "*" +six = ">=1.11.0" + +[package.extras] +format = ["idna", "jsonpointer (>1.13)", "rfc3987", "strict-rfc3339", "webcolors"] +format-nongpl = ["idna", "jsonpointer (>1.13)", "rfc3339-validator", "rfc3986-validator (>0.1.0)", "webcolors"] + +[[package]] +name = "langdetect" +version = "1.0.9" +description = "Language detection library ported from Google's language-detection." +optional = false +python-versions = "*" +files = [ + {file = "langdetect-1.0.9-py2-none-any.whl", hash = "sha256:7cbc0746252f19e76f77c0b1690aadf01963be835ef0cd4b56dddf2a8f1dfc2a"}, + {file = "langdetect-1.0.9.tar.gz", hash = "sha256:cbc1fef89f8d062739774bd51eda3da3274006b3661d199c2655f6b3f6d605a0"}, +] + +[package.dependencies] +six = "*" + +[[package]] +name = "lxml" +version = "5.1.0" +description = "Powerful and Pythonic XML processing library combining libxml2/libxslt with the ElementTree API." +optional = false +python-versions = ">=3.6" +files = [ + {file = "lxml-5.1.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:704f5572ff473a5f897745abebc6df40f22d4133c1e0a1f124e4f2bd3330ff7e"}, + {file = "lxml-5.1.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:9d3c0f8567ffe7502d969c2c1b809892dc793b5d0665f602aad19895f8d508da"}, + {file = "lxml-5.1.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:5fcfbebdb0c5d8d18b84118842f31965d59ee3e66996ac842e21f957eb76138c"}, + {file = "lxml-5.1.0-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2f37c6d7106a9d6f0708d4e164b707037b7380fcd0b04c5bd9cae1fb46a856fb"}, + {file = "lxml-5.1.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2befa20a13f1a75c751f47e00929fb3433d67eb9923c2c0b364de449121f447c"}, + {file = "lxml-5.1.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:22b7ee4c35f374e2c20337a95502057964d7e35b996b1c667b5c65c567d2252a"}, + {file = "lxml-5.1.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:bf8443781533b8d37b295016a4b53c1494fa9a03573c09ca5104550c138d5c05"}, + {file = "lxml-5.1.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:82bddf0e72cb2af3cbba7cec1d2fd11fda0de6be8f4492223d4a268713ef2147"}, + {file = "lxml-5.1.0-cp310-cp310-win32.whl", hash = "sha256:b66aa6357b265670bb574f050ffceefb98549c721cf28351b748be1ef9577d93"}, + {file = "lxml-5.1.0-cp310-cp310-win_amd64.whl", hash = "sha256:4946e7f59b7b6a9e27bef34422f645e9a368cb2be11bf1ef3cafc39a1f6ba68d"}, + {file = "lxml-5.1.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:14deca1460b4b0f6b01f1ddc9557704e8b365f55c63070463f6c18619ebf964f"}, + {file = "lxml-5.1.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:ed8c3d2cd329bf779b7ed38db176738f3f8be637bb395ce9629fc76f78afe3d4"}, + {file = "lxml-5.1.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:436a943c2900bb98123b06437cdd30580a61340fbdb7b28aaf345a459c19046a"}, + {file = "lxml-5.1.0-cp311-cp311-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:acb6b2f96f60f70e7f34efe0c3ea34ca63f19ca63ce90019c6cbca6b676e81fa"}, + {file = "lxml-5.1.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:af8920ce4a55ff41167ddbc20077f5698c2e710ad3353d32a07d3264f3a2021e"}, + {file = "lxml-5.1.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7cfced4a069003d8913408e10ca8ed092c49a7f6cefee9bb74b6b3e860683b45"}, + {file = "lxml-5.1.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:9e5ac3437746189a9b4121db2a7b86056ac8786b12e88838696899328fc44bb2"}, + {file = "lxml-5.1.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:f4c9bda132ad108b387c33fabfea47866af87f4ea6ffb79418004f0521e63204"}, + {file = "lxml-5.1.0-cp311-cp311-win32.whl", hash = "sha256:bc64d1b1dab08f679fb89c368f4c05693f58a9faf744c4d390d7ed1d8223869b"}, + {file = "lxml-5.1.0-cp311-cp311-win_amd64.whl", hash = "sha256:a5ab722ae5a873d8dcee1f5f45ddd93c34210aed44ff2dc643b5025981908cda"}, + {file = "lxml-5.1.0-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:9aa543980ab1fbf1720969af1d99095a548ea42e00361e727c58a40832439114"}, + {file = "lxml-5.1.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:6f11b77ec0979f7e4dc5ae081325a2946f1fe424148d3945f943ceaede98adb8"}, + {file = "lxml-5.1.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:a36c506e5f8aeb40680491d39ed94670487ce6614b9d27cabe45d94cd5d63e1e"}, + {file = "lxml-5.1.0-cp312-cp312-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f643ffd2669ffd4b5a3e9b41c909b72b2a1d5e4915da90a77e119b8d48ce867a"}, + {file = "lxml-5.1.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:16dd953fb719f0ffc5bc067428fc9e88f599e15723a85618c45847c96f11f431"}, + {file = "lxml-5.1.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:16018f7099245157564d7148165132c70adb272fb5a17c048ba70d9cc542a1a1"}, + {file = "lxml-5.1.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:82cd34f1081ae4ea2ede3d52f71b7be313756e99b4b5f829f89b12da552d3aa3"}, + {file = "lxml-5.1.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:19a1bc898ae9f06bccb7c3e1dfd73897ecbbd2c96afe9095a6026016e5ca97b8"}, + {file = "lxml-5.1.0-cp312-cp312-win32.whl", hash = "sha256:13521a321a25c641b9ea127ef478b580b5ec82aa2e9fc076c86169d161798b01"}, + {file = "lxml-5.1.0-cp312-cp312-win_amd64.whl", hash = "sha256:1ad17c20e3666c035db502c78b86e58ff6b5991906e55bdbef94977700c72623"}, + {file = "lxml-5.1.0-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:24ef5a4631c0b6cceaf2dbca21687e29725b7c4e171f33a8f8ce23c12558ded1"}, + {file = "lxml-5.1.0-cp36-cp36m-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8d2900b7f5318bc7ad8631d3d40190b95ef2aa8cc59473b73b294e4a55e9f30f"}, + {file = "lxml-5.1.0-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:601f4a75797d7a770daed8b42b97cd1bb1ba18bd51a9382077a6a247a12aa38d"}, + {file = "lxml-5.1.0-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b4b68c961b5cc402cbd99cca5eb2547e46ce77260eb705f4d117fd9c3f932b95"}, + {file = "lxml-5.1.0-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:afd825e30f8d1f521713a5669b63657bcfe5980a916c95855060048b88e1adb7"}, + {file = "lxml-5.1.0-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:262bc5f512a66b527d026518507e78c2f9c2bd9eb5c8aeeb9f0eb43fcb69dc67"}, + {file = "lxml-5.1.0-cp36-cp36m-win32.whl", hash = "sha256:e856c1c7255c739434489ec9c8aa9cdf5179785d10ff20add308b5d673bed5cd"}, + {file = "lxml-5.1.0-cp36-cp36m-win_amd64.whl", hash = "sha256:c7257171bb8d4432fe9d6fdde4d55fdbe663a63636a17f7f9aaba9bcb3153ad7"}, + {file = "lxml-5.1.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:b9e240ae0ba96477682aa87899d94ddec1cc7926f9df29b1dd57b39e797d5ab5"}, + {file = "lxml-5.1.0-cp37-cp37m-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a96f02ba1bcd330807fc060ed91d1f7a20853da6dd449e5da4b09bfcc08fdcf5"}, + {file = "lxml-5.1.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3e3898ae2b58eeafedfe99e542a17859017d72d7f6a63de0f04f99c2cb125936"}, + {file = "lxml-5.1.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:61c5a7edbd7c695e54fca029ceb351fc45cd8860119a0f83e48be44e1c464862"}, + {file = "lxml-5.1.0-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:3aeca824b38ca78d9ee2ab82bd9883083d0492d9d17df065ba3b94e88e4d7ee6"}, + {file = "lxml-5.1.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:8f52fe6859b9db71ee609b0c0a70fea5f1e71c3462ecf144ca800d3f434f0764"}, + {file = "lxml-5.1.0-cp37-cp37m-win32.whl", hash = "sha256:d42e3a3fc18acc88b838efded0e6ec3edf3e328a58c68fbd36a7263a874906c8"}, + {file = "lxml-5.1.0-cp37-cp37m-win_amd64.whl", hash = "sha256:eac68f96539b32fce2c9b47eb7c25bb2582bdaf1bbb360d25f564ee9e04c542b"}, + {file = "lxml-5.1.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:ae15347a88cf8af0949a9872b57a320d2605ae069bcdf047677318bc0bba45b1"}, + {file = "lxml-5.1.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:c26aab6ea9c54d3bed716b8851c8bfc40cb249b8e9880e250d1eddde9f709bf5"}, + {file = "lxml-5.1.0-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:342e95bddec3a698ac24378d61996b3ee5ba9acfeb253986002ac53c9a5f6f84"}, + {file = "lxml-5.1.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:725e171e0b99a66ec8605ac77fa12239dbe061482ac854d25720e2294652eeaa"}, + {file = "lxml-5.1.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3d184e0d5c918cff04cdde9dbdf9600e960161d773666958c9d7b565ccc60c45"}, + {file = "lxml-5.1.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:98f3f020a2b736566c707c8e034945c02aa94e124c24f77ca097c446f81b01f1"}, + {file = "lxml-5.1.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:6d48fc57e7c1e3df57be5ae8614bab6d4e7b60f65c5457915c26892c41afc59e"}, + {file = "lxml-5.1.0-cp38-cp38-win32.whl", hash = "sha256:7ec465e6549ed97e9f1e5ed51c657c9ede767bc1c11552f7f4d022c4df4a977a"}, + {file = "lxml-5.1.0-cp38-cp38-win_amd64.whl", hash = "sha256:b21b4031b53d25b0858d4e124f2f9131ffc1530431c6d1321805c90da78388d1"}, + {file = "lxml-5.1.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:52427a7eadc98f9e62cb1368a5079ae826f94f05755d2d567d93ee1bc3ceb354"}, + {file = "lxml-5.1.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:6a2a2c724d97c1eb8cf966b16ca2915566a4904b9aad2ed9a09c748ffe14f969"}, + {file = "lxml-5.1.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:843b9c835580d52828d8f69ea4302537337a21e6b4f1ec711a52241ba4a824f3"}, + {file = "lxml-5.1.0-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9b99f564659cfa704a2dd82d0684207b1aadf7d02d33e54845f9fc78e06b7581"}, + {file = "lxml-5.1.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4f8b0c78e7aac24979ef09b7f50da871c2de2def043d468c4b41f512d831e912"}, + {file = "lxml-5.1.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9bcf86dfc8ff3e992fed847c077bd875d9e0ba2fa25d859c3a0f0f76f07f0c8d"}, + {file = "lxml-5.1.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:49a9b4af45e8b925e1cd6f3b15bbba2c81e7dba6dce170c677c9cda547411e14"}, + {file = "lxml-5.1.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:280f3edf15c2a967d923bcfb1f8f15337ad36f93525828b40a0f9d6c2ad24890"}, + {file = "lxml-5.1.0-cp39-cp39-win32.whl", hash = "sha256:ed7326563024b6e91fef6b6c7a1a2ff0a71b97793ac33dbbcf38f6005e51ff6e"}, + {file = "lxml-5.1.0-cp39-cp39-win_amd64.whl", hash = "sha256:8d7b4beebb178e9183138f552238f7e6613162a42164233e2bda00cb3afac58f"}, + {file = "lxml-5.1.0-pp310-pypy310_pp73-macosx_10_9_x86_64.whl", hash = "sha256:9bd0ae7cc2b85320abd5e0abad5ccee5564ed5f0cc90245d2f9a8ef330a8deae"}, + {file = "lxml-5.1.0-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d8c1d679df4361408b628f42b26a5d62bd3e9ba7f0c0e7969f925021554755aa"}, + {file = "lxml-5.1.0-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:2ad3a8ce9e8a767131061a22cd28fdffa3cd2dc193f399ff7b81777f3520e372"}, + {file = "lxml-5.1.0-pp37-pypy37_pp73-macosx_10_9_x86_64.whl", hash = "sha256:304128394c9c22b6569eba2a6d98392b56fbdfbad58f83ea702530be80d0f9df"}, + {file = "lxml-5.1.0-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d74fcaf87132ffc0447b3c685a9f862ffb5b43e70ea6beec2fb8057d5d2a1fea"}, + {file = "lxml-5.1.0-pp37-pypy37_pp73-win_amd64.whl", hash = "sha256:8cf5877f7ed384dabfdcc37922c3191bf27e55b498fecece9fd5c2c7aaa34c33"}, + {file = "lxml-5.1.0-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:877efb968c3d7eb2dad540b6cabf2f1d3c0fbf4b2d309a3c141f79c7e0061324"}, + {file = "lxml-5.1.0-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3f14a4fb1c1c402a22e6a341a24c1341b4a3def81b41cd354386dcb795f83897"}, + {file = "lxml-5.1.0-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:25663d6e99659544ee8fe1b89b1a8c0aaa5e34b103fab124b17fa958c4a324a6"}, + {file = "lxml-5.1.0-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:8b9f19df998761babaa7f09e6bc169294eefafd6149aaa272081cbddc7ba4ca3"}, + {file = "lxml-5.1.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5e53d7e6a98b64fe54775d23a7c669763451340c3d44ad5e3a3b48a1efbdc96f"}, + {file = "lxml-5.1.0-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:c3cd1fc1dc7c376c54440aeaaa0dcc803d2126732ff5c6b68ccd619f2e64be4f"}, + {file = "lxml-5.1.0.tar.gz", hash = "sha256:3eea6ed6e6c918e468e693c41ef07f3c3acc310b70ddd9cc72d9ef84bc9564ca"}, +] + +[package.extras] +cssselect = ["cssselect (>=0.7)"] +html5 = ["html5lib"] +htmlsoup = ["BeautifulSoup4"] +source = ["Cython (>=3.0.7)"] + +[[package]] +name = "markdown" +version = "3.5.2" +description = "Python implementation of John Gruber's Markdown." +optional = false +python-versions = ">=3.8" +files = [ + {file = "Markdown-3.5.2-py3-none-any.whl", hash = "sha256:d43323865d89fc0cb9b20c75fc8ad313af307cc087e84b657d9eec768eddeadd"}, + {file = "Markdown-3.5.2.tar.gz", hash = "sha256:e1ac7b3dc550ee80e602e71c1d168002f062e49f1b11e26a36264dafd4df2ef8"}, +] + +[package.dependencies] +importlib-metadata = {version = ">=4.4", markers = "python_version < \"3.10\""} + +[package.extras] +docs = ["mdx-gh-links (>=0.2)", "mkdocs (>=1.5)", "mkdocs-gen-files", "mkdocs-literate-nav", "mkdocs-nature (>=0.6)", "mkdocs-section-index", "mkdocstrings[python]"] +testing = ["coverage", "pyyaml"] + +[[package]] +name = "markupsafe" +version = "2.1.5" +description = "Safely add untrusted strings to HTML/XML markup." +optional = false +python-versions = ">=3.7" +files = [ + {file = "MarkupSafe-2.1.5-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:a17a92de5231666cfbe003f0e4b9b3a7ae3afb1ec2845aadc2bacc93ff85febc"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:72b6be590cc35924b02c78ef34b467da4ba07e4e0f0454a2c5907f473fc50ce5"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e61659ba32cf2cf1481e575d0462554625196a1f2fc06a1c777d3f48e8865d46"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2174c595a0d73a3080ca3257b40096db99799265e1c27cc5a610743acd86d62f"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ae2ad8ae6ebee9d2d94b17fb62763125f3f374c25618198f40cbb8b525411900"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:075202fa5b72c86ad32dc7d0b56024ebdbcf2048c0ba09f1cde31bfdd57bcfff"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:598e3276b64aff0e7b3451b72e94fa3c238d452e7ddcd893c3ab324717456bad"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:fce659a462a1be54d2ffcacea5e3ba2d74daa74f30f5f143fe0c58636e355fdd"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-win32.whl", hash = "sha256:d9fad5155d72433c921b782e58892377c44bd6252b5af2f67f16b194987338a4"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-win_amd64.whl", hash = "sha256:bf50cd79a75d181c9181df03572cdce0fbb75cc353bc350712073108cba98de5"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:629ddd2ca402ae6dbedfceeba9c46d5f7b2a61d9749597d4307f943ef198fc1f"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:5b7b716f97b52c5a14bffdf688f971b2d5ef4029127f1ad7a513973cfd818df2"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6ec585f69cec0aa07d945b20805be741395e28ac1627333b1c5b0105962ffced"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b91c037585eba9095565a3556f611e3cbfaa42ca1e865f7b8015fe5c7336d5a5"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7502934a33b54030eaf1194c21c692a534196063db72176b0c4028e140f8f32c"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:0e397ac966fdf721b2c528cf028494e86172b4feba51d65f81ffd65c63798f3f"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:c061bb86a71b42465156a3ee7bd58c8c2ceacdbeb95d05a99893e08b8467359a"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:3a57fdd7ce31c7ff06cdfbf31dafa96cc533c21e443d57f5b1ecc6cdc668ec7f"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-win32.whl", hash = "sha256:397081c1a0bfb5124355710fe79478cdbeb39626492b15d399526ae53422b906"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-win_amd64.whl", hash = "sha256:2b7c57a4dfc4f16f7142221afe5ba4e093e09e728ca65c51f5620c9aaeb9a617"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:8dec4936e9c3100156f8a2dc89c4b88d5c435175ff03413b443469c7c8c5f4d1"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:3c6b973f22eb18a789b1460b4b91bf04ae3f0c4234a0a6aa6b0a92f6f7b951d4"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ac07bad82163452a6884fe8fa0963fb98c2346ba78d779ec06bd7a6262132aee"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f5dfb42c4604dddc8e4305050aa6deb084540643ed5804d7455b5df8fe16f5e5"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ea3d8a3d18833cf4304cd2fc9cbb1efe188ca9b5efef2bdac7adc20594a0e46b"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:d050b3361367a06d752db6ead6e7edeb0009be66bc3bae0ee9d97fb326badc2a"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:bec0a414d016ac1a18862a519e54b2fd0fc8bbfd6890376898a6c0891dd82e9f"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:58c98fee265677f63a4385256a6d7683ab1832f3ddd1e66fe948d5880c21a169"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-win32.whl", hash = "sha256:8590b4ae07a35970728874632fed7bd57b26b0102df2d2b233b6d9d82f6c62ad"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-win_amd64.whl", hash = "sha256:823b65d8706e32ad2df51ed89496147a42a2a6e01c13cfb6ffb8b1e92bc910bb"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:c8b29db45f8fe46ad280a7294f5c3ec36dbac9491f2d1c17345be8e69cc5928f"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ec6a563cff360b50eed26f13adc43e61bc0c04d94b8be985e6fb24b81f6dcfdf"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a549b9c31bec33820e885335b451286e2969a2d9e24879f83fe904a5ce59d70a"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4f11aa001c540f62c6166c7726f71f7573b52c68c31f014c25cc7901deea0b52"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:7b2e5a267c855eea6b4283940daa6e88a285f5f2a67f2220203786dfa59b37e9"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:2d2d793e36e230fd32babe143b04cec8a8b3eb8a3122d2aceb4a371e6b09b8df"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:ce409136744f6521e39fd8e2a24c53fa18ad67aa5bc7c2cf83645cce5b5c4e50"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-win32.whl", hash = "sha256:4096e9de5c6fdf43fb4f04c26fb114f61ef0bf2e5604b6ee3019d51b69e8c371"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-win_amd64.whl", hash = "sha256:4275d846e41ecefa46e2015117a9f491e57a71ddd59bbead77e904dc02b1bed2"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:656f7526c69fac7f600bd1f400991cc282b417d17539a1b228617081106feb4a"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:97cafb1f3cbcd3fd2b6fbfb99ae11cdb14deea0736fc2b0952ee177f2b813a46"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1f3fbcb7ef1f16e48246f704ab79d79da8a46891e2da03f8783a5b6fa41a9532"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fa9db3f79de01457b03d4f01b34cf91bc0048eb2c3846ff26f66687c2f6d16ab"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ffee1f21e5ef0d712f9033568f8344d5da8cc2869dbd08d87c84656e6a2d2f68"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:5dedb4db619ba5a2787a94d877bc8ffc0566f92a01c0ef214865e54ecc9ee5e0"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:30b600cf0a7ac9234b2638fbc0fb6158ba5bdcdf46aeb631ead21248b9affbc4"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:8dd717634f5a044f860435c1d8c16a270ddf0ef8588d4887037c5028b859b0c3"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-win32.whl", hash = "sha256:daa4ee5a243f0f20d528d939d06670a298dd39b1ad5f8a72a4275124a7819eff"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-win_amd64.whl", hash = "sha256:619bc166c4f2de5caa5a633b8b7326fbe98e0ccbfacabd87268a2b15ff73a029"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:7a68b554d356a91cce1236aa7682dc01df0edba8d043fd1ce607c49dd3c1edcf"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:db0b55e0f3cc0be60c1f19efdde9a637c32740486004f20d1cff53c3c0ece4d2"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3e53af139f8579a6d5f7b76549125f0d94d7e630761a2111bc431fd820e163b8"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:17b950fccb810b3293638215058e432159d2b71005c74371d784862b7e4683f3"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4c31f53cdae6ecfa91a77820e8b151dba54ab528ba65dfd235c80b086d68a465"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:bff1b4290a66b490a2f4719358c0cdcd9bafb6b8f061e45c7a2460866bf50c2e"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:bc1667f8b83f48511b94671e0e441401371dfd0f0a795c7daa4a3cd1dde55bea"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:5049256f536511ee3f7e1b3f87d1d1209d327e818e6ae1365e8653d7e3abb6a6"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-win32.whl", hash = "sha256:00e046b6dd71aa03a41079792f8473dc494d564611a8f89bbbd7cb93295ebdcf"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-win_amd64.whl", hash = "sha256:fa173ec60341d6bb97a89f5ea19c85c5643c1e7dedebc22f5181eb73573142c5"}, + {file = "MarkupSafe-2.1.5.tar.gz", hash = "sha256:d283d37a890ba4c1ae73ffadf8046435c76e7bc2247bbb63c00bd1a709c6544b"}, +] + +[[package]] +name = "marshmallow" +version = "3.21.0" +description = "A lightweight library for converting complex datatypes to and from native Python datatypes." +optional = false +python-versions = ">=3.8" +files = [ + {file = "marshmallow-3.21.0-py3-none-any.whl", hash = "sha256:e7997f83571c7fd476042c2c188e4ee8a78900ca5e74bd9c8097afa56624e9bd"}, + {file = "marshmallow-3.21.0.tar.gz", hash = "sha256:20f53be28c6e374a711a16165fb22a8dc6003e3f7cda1285e3ca777b9193885b"}, +] + +[package.dependencies] +packaging = ">=17.0" + +[package.extras] +dev = ["marshmallow[tests]", "pre-commit (>=3.5,<4.0)", "tox"] +docs = ["alabaster (==0.7.16)", "autodocsumm (==0.2.12)", "sphinx (==7.2.6)", "sphinx-issues (==4.0.0)", "sphinx-version-warning (==1.1.2)"] +tests = ["pytest", "pytz", "simplejson"] + +[[package]] +name = "msal" +version = "1.25.0" +description = "The Microsoft Authentication Library (MSAL) for Python library" +optional = false +python-versions = ">=2.7" +files = [ + {file = "msal-1.25.0-py2.py3-none-any.whl", hash = "sha256:386df621becb506bc315a713ec3d4d5b5d6163116955c7dde23622f156b81af6"}, + {file = "msal-1.25.0.tar.gz", hash = "sha256:f44329fdb59f4f044c779164a34474b8a44ad9e4940afbc4c3a3a2bbe90324d9"}, +] + +[package.dependencies] +cryptography = ">=0.6,<44" +PyJWT = {version = ">=1.0.0,<3", extras = ["crypto"]} +requests = ">=2.0.0,<3" + +[package.extras] +broker = ["pymsalruntime (>=0.13.2,<0.14)"] + +[[package]] +name = "mypy-extensions" +version = "1.0.0" +description = "Type system extensions for programs checked with the mypy type checker." +optional = false +python-versions = ">=3.5" +files = [ + {file = "mypy_extensions-1.0.0-py3-none-any.whl", hash = "sha256:4392f6c0eb8a5668a69e23d168ffa70f0be9ccfd32b5cc2d26a34ae5b844552d"}, + {file = "mypy_extensions-1.0.0.tar.gz", hash = "sha256:75dbf8955dc00442a438fc4d0666508a9a97b6bd41aa2f0ffe9d2f2725af0782"}, +] + +[[package]] +name = "nltk" +version = "3.8.1" +description = "Natural Language Toolkit" +optional = false +python-versions = ">=3.7" +files = [ + {file = "nltk-3.8.1-py3-none-any.whl", hash = "sha256:fd5c9109f976fa86bcadba8f91e47f5e9293bd034474752e92a520f81c93dda5"}, + {file = "nltk-3.8.1.zip", hash = "sha256:1834da3d0682cba4f2cede2f9aad6b0fafb6461ba451db0efb6f9c39798d64d3"}, +] + +[package.dependencies] +click = "*" +joblib = "*" +regex = ">=2021.8.3" +tqdm = "*" + +[package.extras] +all = ["matplotlib", "numpy", "pyparsing", "python-crfsuite", "requests", "scikit-learn", "scipy", "twython"] +corenlp = ["requests"] +machine-learning = ["numpy", "python-crfsuite", "scikit-learn", "scipy"] +plot = ["matplotlib"] +tgrep = ["pyparsing"] +twitter = ["twython"] + +[[package]] +name = "numpy" +version = "1.26.4" +description = "Fundamental package for array computing in Python" +optional = false +python-versions = ">=3.9" +files = [ + {file = "numpy-1.26.4-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:9ff0f4f29c51e2803569d7a51c2304de5554655a60c5d776e35b4a41413830d0"}, + {file = "numpy-1.26.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:2e4ee3380d6de9c9ec04745830fd9e2eccb3e6cf790d39d7b98ffd19b0dd754a"}, + {file = "numpy-1.26.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d209d8969599b27ad20994c8e41936ee0964e6da07478d6c35016bc386b66ad4"}, + {file = "numpy-1.26.4-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ffa75af20b44f8dba823498024771d5ac50620e6915abac414251bd971b4529f"}, + {file = "numpy-1.26.4-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:62b8e4b1e28009ef2846b4c7852046736bab361f7aeadeb6a5b89ebec3c7055a"}, + {file = "numpy-1.26.4-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:a4abb4f9001ad2858e7ac189089c42178fcce737e4169dc61321660f1a96c7d2"}, + {file = "numpy-1.26.4-cp310-cp310-win32.whl", hash = "sha256:bfe25acf8b437eb2a8b2d49d443800a5f18508cd811fea3181723922a8a82b07"}, + {file = "numpy-1.26.4-cp310-cp310-win_amd64.whl", hash = "sha256:b97fe8060236edf3662adfc2c633f56a08ae30560c56310562cb4f95500022d5"}, + {file = "numpy-1.26.4-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:4c66707fabe114439db9068ee468c26bbdf909cac0fb58686a42a24de1760c71"}, + {file = "numpy-1.26.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:edd8b5fe47dab091176d21bb6de568acdd906d1887a4584a15a9a96a1dca06ef"}, + {file = "numpy-1.26.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7ab55401287bfec946ced39700c053796e7cc0e3acbef09993a9ad2adba6ca6e"}, + {file = "numpy-1.26.4-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:666dbfb6ec68962c033a450943ded891bed2d54e6755e35e5835d63f4f6931d5"}, + {file = "numpy-1.26.4-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:96ff0b2ad353d8f990b63294c8986f1ec3cb19d749234014f4e7eb0112ceba5a"}, + {file = "numpy-1.26.4-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:60dedbb91afcbfdc9bc0b1f3f402804070deed7392c23eb7a7f07fa857868e8a"}, + {file = "numpy-1.26.4-cp311-cp311-win32.whl", hash = "sha256:1af303d6b2210eb850fcf03064d364652b7120803a0b872f5211f5234b399f20"}, + {file = "numpy-1.26.4-cp311-cp311-win_amd64.whl", hash = "sha256:cd25bcecc4974d09257ffcd1f098ee778f7834c3ad767fe5db785be9a4aa9cb2"}, + {file = "numpy-1.26.4-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:b3ce300f3644fb06443ee2222c2201dd3a89ea6040541412b8fa189341847218"}, + {file = "numpy-1.26.4-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:03a8c78d01d9781b28a6989f6fa1bb2c4f2d51201cf99d3dd875df6fbd96b23b"}, + {file = "numpy-1.26.4-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9fad7dcb1aac3c7f0584a5a8133e3a43eeb2fe127f47e3632d43d677c66c102b"}, + {file = "numpy-1.26.4-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:675d61ffbfa78604709862923189bad94014bef562cc35cf61d3a07bba02a7ed"}, + {file = "numpy-1.26.4-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:ab47dbe5cc8210f55aa58e4805fe224dac469cde56b9f731a4c098b91917159a"}, + {file = "numpy-1.26.4-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:1dda2e7b4ec9dd512f84935c5f126c8bd8b9f2fc001e9f54af255e8c5f16b0e0"}, + {file = "numpy-1.26.4-cp312-cp312-win32.whl", hash = "sha256:50193e430acfc1346175fcbdaa28ffec49947a06918b7b92130744e81e640110"}, + {file = "numpy-1.26.4-cp312-cp312-win_amd64.whl", hash = "sha256:08beddf13648eb95f8d867350f6a018a4be2e5ad54c8d8caed89ebca558b2818"}, + {file = "numpy-1.26.4-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:7349ab0fa0c429c82442a27a9673fc802ffdb7c7775fad780226cb234965e53c"}, + {file = "numpy-1.26.4-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:52b8b60467cd7dd1e9ed082188b4e6bb35aa5cdd01777621a1658910745b90be"}, + {file = "numpy-1.26.4-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d5241e0a80d808d70546c697135da2c613f30e28251ff8307eb72ba696945764"}, + {file = "numpy-1.26.4-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f870204a840a60da0b12273ef34f7051e98c3b5961b61b0c2c1be6dfd64fbcd3"}, + {file = "numpy-1.26.4-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:679b0076f67ecc0138fd2ede3a8fd196dddc2ad3254069bcb9faf9a79b1cebcd"}, + {file = "numpy-1.26.4-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:47711010ad8555514b434df65f7d7b076bb8261df1ca9bb78f53d3b2db02e95c"}, + {file = "numpy-1.26.4-cp39-cp39-win32.whl", hash = "sha256:a354325ee03388678242a4d7ebcd08b5c727033fcff3b2f536aea978e15ee9e6"}, + {file = "numpy-1.26.4-cp39-cp39-win_amd64.whl", hash = "sha256:3373d5d70a5fe74a2c1bb6d2cfd9609ecf686d47a2d7b1d37a8f3b6bf6003aea"}, + {file = "numpy-1.26.4-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:afedb719a9dcfc7eaf2287b839d8198e06dcd4cb5d276a3df279231138e83d30"}, + {file = "numpy-1.26.4-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:95a7476c59002f2f6c590b9b7b998306fba6a5aa646b1e22ddfeaf8f78c3a29c"}, + {file = "numpy-1.26.4-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:7e50d0a0cc3189f9cb0aeb3a6a6af18c16f59f004b866cd2be1c14b36134a4a0"}, + {file = "numpy-1.26.4.tar.gz", hash = "sha256:2a02aba9ed12e4ac4eb3ea9421c420301a0c6460d9830d74a9df87efa4912010"}, +] + +[[package]] +name = "office365-rest-python-client" +version = "2.5.5" +description = "Microsoft 365 & Microsoft Graph Library for Python" +optional = false +python-versions = "*" +files = [ + {file = "Office365-REST-Python-Client-2.5.5.tar.gz", hash = "sha256:2396f3ac1bc544646abff3db9e45f0e43a28d20668ed9a4736554c5262e70a86"}, + {file = "Office365_REST_Python_Client-2.5.5-py3-none-any.whl", hash = "sha256:d64dcb9b3fe76859f8d570136c0e448a36ae26a8d71b52b4c5127eb9ae2290ca"}, +] + +[package.dependencies] +msal = "*" +pytz = "*" +requests = "*" +typing-extensions = ">=4.0.0" + +[package.extras] +ntlmprovider = ["requests-ntlm"] + +[[package]] +name = "packaging" +version = "23.2" +description = "Core utilities for Python packages" +optional = false +python-versions = ">=3.7" +files = [ + {file = "packaging-23.2-py3-none-any.whl", hash = "sha256:8c491190033a9af7e1d931d0b5dacc2ef47509b34dd0de67ed209b5203fc88c7"}, + {file = "packaging-23.2.tar.gz", hash = "sha256:048fb0e9405036518eaaf48a55953c750c11e1a1b68e0dd1a9d62ed0c092cfc5"}, +] + +[[package]] +name = "pdf2image" +version = "1.16.3" +description = "A wrapper around the pdftoppm and pdftocairo command line tools to convert PDF to a PIL Image list." +optional = false +python-versions = "*" +files = [ + {file = "pdf2image-1.16.3-py3-none-any.whl", hash = "sha256:b6154164af3677211c22cbb38b2bd778b43aca02758e962fe1e231f6d3b0e380"}, + {file = "pdf2image-1.16.3.tar.gz", hash = "sha256:74208810c2cef4d9e347769b8e62a52303982ddb4f2dfd744c7ab4b940ae287e"}, +] + +[package.dependencies] +pillow = "*" + +[[package]] +name = "pdfminer-six" +version = "20221105" +description = "PDF parser and analyzer" +optional = false +python-versions = ">=3.6" +files = [ + {file = "pdfminer.six-20221105-py3-none-any.whl", hash = "sha256:1eaddd712d5b2732f8ac8486824533514f8ba12a0787b3d5fe1e686cd826532d"}, + {file = "pdfminer.six-20221105.tar.gz", hash = "sha256:8448ab7b939d18b64820478ecac5394f482d7a79f5f7eaa7703c6c959c175e1d"}, +] + +[package.dependencies] +charset-normalizer = ">=2.0.0" +cryptography = ">=36.0.0" + +[package.extras] +dev = ["black", "mypy (==0.931)", "nox", "pytest"] +docs = ["sphinx", "sphinx-argparse"] +image = ["Pillow"] + +[[package]] +name = "pendulum" +version = "2.1.2" +description = "Python datetimes made easy" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" +files = [ + {file = "pendulum-2.1.2-cp27-cp27m-macosx_10_15_x86_64.whl", hash = "sha256:b6c352f4bd32dff1ea7066bd31ad0f71f8d8100b9ff709fb343f3b86cee43efe"}, + {file = "pendulum-2.1.2-cp27-cp27m-win_amd64.whl", hash = "sha256:318f72f62e8e23cd6660dbafe1e346950281a9aed144b5c596b2ddabc1d19739"}, + {file = "pendulum-2.1.2-cp35-cp35m-macosx_10_15_x86_64.whl", hash = "sha256:0731f0c661a3cb779d398803655494893c9f581f6488048b3fb629c2342b5394"}, + {file = "pendulum-2.1.2-cp35-cp35m-manylinux1_i686.whl", hash = "sha256:3481fad1dc3f6f6738bd575a951d3c15d4b4ce7c82dce37cf8ac1483fde6e8b0"}, + {file = "pendulum-2.1.2-cp35-cp35m-manylinux1_x86_64.whl", hash = "sha256:9702069c694306297ed362ce7e3c1ef8404ac8ede39f9b28b7c1a7ad8c3959e3"}, + {file = "pendulum-2.1.2-cp35-cp35m-win_amd64.whl", hash = "sha256:fb53ffa0085002ddd43b6ca61a7b34f2d4d7c3ed66f931fe599e1a531b42af9b"}, + {file = "pendulum-2.1.2-cp36-cp36m-macosx_10_15_x86_64.whl", hash = "sha256:c501749fdd3d6f9e726086bf0cd4437281ed47e7bca132ddb522f86a1645d360"}, + {file = "pendulum-2.1.2-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:c807a578a532eeb226150d5006f156632df2cc8c5693d778324b43ff8c515dd0"}, + {file = "pendulum-2.1.2-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:2d1619a721df661e506eff8db8614016f0720ac171fe80dda1333ee44e684087"}, + {file = "pendulum-2.1.2-cp36-cp36m-win_amd64.whl", hash = "sha256:f888f2d2909a414680a29ae74d0592758f2b9fcdee3549887779cd4055e975db"}, + {file = "pendulum-2.1.2-cp37-cp37m-macosx_10_15_x86_64.whl", hash = "sha256:e95d329384717c7bf627bf27e204bc3b15c8238fa8d9d9781d93712776c14002"}, + {file = "pendulum-2.1.2-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:4c9c689747f39d0d02a9f94fcee737b34a5773803a64a5fdb046ee9cac7442c5"}, + {file = "pendulum-2.1.2-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:1245cd0075a3c6d889f581f6325dd8404aca5884dea7223a5566c38aab94642b"}, + {file = "pendulum-2.1.2-cp37-cp37m-win_amd64.whl", hash = "sha256:db0a40d8bcd27b4fb46676e8eb3c732c67a5a5e6bfab8927028224fbced0b40b"}, + {file = "pendulum-2.1.2-cp38-cp38-macosx_10_15_x86_64.whl", hash = "sha256:f5e236e7730cab1644e1b87aca3d2ff3e375a608542e90fe25685dae46310116"}, + {file = "pendulum-2.1.2-cp38-cp38-manylinux1_i686.whl", hash = "sha256:de42ea3e2943171a9e95141f2eecf972480636e8e484ccffaf1e833929e9e052"}, + {file = "pendulum-2.1.2-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:7c5ec650cb4bec4c63a89a0242cc8c3cebcec92fcfe937c417ba18277d8560be"}, + {file = "pendulum-2.1.2-cp38-cp38-win_amd64.whl", hash = "sha256:33fb61601083f3eb1d15edeb45274f73c63b3c44a8524703dc143f4212bf3269"}, + {file = "pendulum-2.1.2-cp39-cp39-manylinux1_i686.whl", hash = "sha256:29c40a6f2942376185728c9a0347d7c0f07905638c83007e1d262781f1e6953a"}, + {file = "pendulum-2.1.2-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:94b1fc947bfe38579b28e1cccb36f7e28a15e841f30384b5ad6c5e31055c85d7"}, + {file = "pendulum-2.1.2.tar.gz", hash = "sha256:b06a0ca1bfe41c990bbf0c029f0b6501a7f2ec4e38bfec730712015e8860f207"}, +] + +[package.dependencies] +python-dateutil = ">=2.6,<3.0" +pytzdata = ">=2020.1" + +[[package]] +name = "pillow" +version = "10.2.0" +description = "Python Imaging Library (Fork)" +optional = false +python-versions = ">=3.8" +files = [ + {file = "pillow-10.2.0-cp310-cp310-macosx_10_10_x86_64.whl", hash = "sha256:7823bdd049099efa16e4246bdf15e5a13dbb18a51b68fa06d6c1d4d8b99a796e"}, + {file = "pillow-10.2.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:83b2021f2ade7d1ed556bc50a399127d7fb245e725aa0113ebd05cfe88aaf588"}, + {file = "pillow-10.2.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6fad5ff2f13d69b7e74ce5b4ecd12cc0ec530fcee76356cac6742785ff71c452"}, + {file = "pillow-10.2.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:da2b52b37dad6d9ec64e653637a096905b258d2fc2b984c41ae7d08b938a67e4"}, + {file = "pillow-10.2.0-cp310-cp310-manylinux_2_28_aarch64.whl", hash = "sha256:47c0995fc4e7f79b5cfcab1fc437ff2890b770440f7696a3ba065ee0fd496563"}, + {file = "pillow-10.2.0-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:322bdf3c9b556e9ffb18f93462e5f749d3444ce081290352c6070d014c93feb2"}, + {file = "pillow-10.2.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:51f1a1bffc50e2e9492e87d8e09a17c5eea8409cda8d3f277eb6edc82813c17c"}, + {file = "pillow-10.2.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:69ffdd6120a4737710a9eee73e1d2e37db89b620f702754b8f6e62594471dee0"}, + {file = "pillow-10.2.0-cp310-cp310-win32.whl", hash = "sha256:c6dafac9e0f2b3c78df97e79af707cdc5ef8e88208d686a4847bab8266870023"}, + {file = "pillow-10.2.0-cp310-cp310-win_amd64.whl", hash = "sha256:aebb6044806f2e16ecc07b2a2637ee1ef67a11840a66752751714a0d924adf72"}, + {file = "pillow-10.2.0-cp310-cp310-win_arm64.whl", hash = "sha256:7049e301399273a0136ff39b84c3678e314f2158f50f517bc50285fb5ec847ad"}, + {file = "pillow-10.2.0-cp311-cp311-macosx_10_10_x86_64.whl", hash = "sha256:35bb52c37f256f662abdfa49d2dfa6ce5d93281d323a9af377a120e89a9eafb5"}, + {file = "pillow-10.2.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:9c23f307202661071d94b5e384e1e1dc7dfb972a28a2310e4ee16103e66ddb67"}, + {file = "pillow-10.2.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:773efe0603db30c281521a7c0214cad7836c03b8ccff897beae9b47c0b657d61"}, + {file = "pillow-10.2.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:11fa2e5984b949b0dd6d7a94d967743d87c577ff0b83392f17cb3990d0d2fd6e"}, + {file = "pillow-10.2.0-cp311-cp311-manylinux_2_28_aarch64.whl", hash = "sha256:716d30ed977be8b37d3ef185fecb9e5a1d62d110dfbdcd1e2a122ab46fddb03f"}, + {file = "pillow-10.2.0-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:a086c2af425c5f62a65e12fbf385f7c9fcb8f107d0849dba5839461a129cf311"}, + {file = "pillow-10.2.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:c8de2789052ed501dd829e9cae8d3dcce7acb4777ea4a479c14521c942d395b1"}, + {file = "pillow-10.2.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:609448742444d9290fd687940ac0b57fb35e6fd92bdb65386e08e99af60bf757"}, + {file = "pillow-10.2.0-cp311-cp311-win32.whl", hash = "sha256:823ef7a27cf86df6597fa0671066c1b596f69eba53efa3d1e1cb8b30f3533068"}, + {file = "pillow-10.2.0-cp311-cp311-win_amd64.whl", hash = "sha256:1da3b2703afd040cf65ec97efea81cfba59cdbed9c11d8efc5ab09df9509fc56"}, + {file = "pillow-10.2.0-cp311-cp311-win_arm64.whl", hash = "sha256:edca80cbfb2b68d7b56930b84a0e45ae1694aeba0541f798e908a49d66b837f1"}, + {file = "pillow-10.2.0-cp312-cp312-macosx_10_10_x86_64.whl", hash = "sha256:1b5e1b74d1bd1b78bc3477528919414874748dd363e6272efd5abf7654e68bef"}, + {file = "pillow-10.2.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:0eae2073305f451d8ecacb5474997c08569fb4eb4ac231ffa4ad7d342fdc25ac"}, + {file = "pillow-10.2.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b7c2286c23cd350b80d2fc9d424fc797575fb16f854b831d16fd47ceec078f2c"}, + {file = "pillow-10.2.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1e23412b5c41e58cec602f1135c57dfcf15482013ce6e5f093a86db69646a5aa"}, + {file = "pillow-10.2.0-cp312-cp312-manylinux_2_28_aarch64.whl", hash = "sha256:52a50aa3fb3acb9cf7213573ef55d31d6eca37f5709c69e6858fe3bc04a5c2a2"}, + {file = "pillow-10.2.0-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:127cee571038f252a552760076407f9cff79761c3d436a12af6000cd182a9d04"}, + {file = "pillow-10.2.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:8d12251f02d69d8310b046e82572ed486685c38f02176bd08baf216746eb947f"}, + {file = "pillow-10.2.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:54f1852cd531aa981bc0965b7d609f5f6cc8ce8c41b1139f6ed6b3c54ab82bfb"}, + {file = "pillow-10.2.0-cp312-cp312-win32.whl", hash = "sha256:257d8788df5ca62c980314053197f4d46eefedf4e6175bc9412f14412ec4ea2f"}, + {file = "pillow-10.2.0-cp312-cp312-win_amd64.whl", hash = "sha256:154e939c5f0053a383de4fd3d3da48d9427a7e985f58af8e94d0b3c9fcfcf4f9"}, + {file = "pillow-10.2.0-cp312-cp312-win_arm64.whl", hash = "sha256:f379abd2f1e3dddb2b61bc67977a6b5a0a3f7485538bcc6f39ec76163891ee48"}, + {file = "pillow-10.2.0-cp38-cp38-macosx_10_10_x86_64.whl", hash = "sha256:8373c6c251f7ef8bda6675dd6d2b3a0fcc31edf1201266b5cf608b62a37407f9"}, + {file = "pillow-10.2.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:870ea1ada0899fd0b79643990809323b389d4d1d46c192f97342eeb6ee0b8483"}, + {file = "pillow-10.2.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b4b6b1e20608493548b1f32bce8cca185bf0480983890403d3b8753e44077129"}, + {file = "pillow-10.2.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3031709084b6e7852d00479fd1d310b07d0ba82765f973b543c8af5061cf990e"}, + {file = "pillow-10.2.0-cp38-cp38-manylinux_2_28_aarch64.whl", hash = "sha256:3ff074fc97dd4e80543a3e91f69d58889baf2002b6be64347ea8cf5533188213"}, + {file = "pillow-10.2.0-cp38-cp38-manylinux_2_28_x86_64.whl", hash = "sha256:cb4c38abeef13c61d6916f264d4845fab99d7b711be96c326b84df9e3e0ff62d"}, + {file = "pillow-10.2.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:b1b3020d90c2d8e1dae29cf3ce54f8094f7938460fb5ce8bc5c01450b01fbaf6"}, + {file = "pillow-10.2.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:170aeb00224ab3dc54230c797f8404507240dd868cf52066f66a41b33169bdbe"}, + {file = "pillow-10.2.0-cp38-cp38-win32.whl", hash = "sha256:c4225f5220f46b2fde568c74fca27ae9771536c2e29d7c04f4fb62c83275ac4e"}, + {file = "pillow-10.2.0-cp38-cp38-win_amd64.whl", hash = "sha256:0689b5a8c5288bc0504d9fcee48f61a6a586b9b98514d7d29b840143d6734f39"}, + {file = "pillow-10.2.0-cp39-cp39-macosx_10_10_x86_64.whl", hash = "sha256:b792a349405fbc0163190fde0dc7b3fef3c9268292586cf5645598b48e63dc67"}, + {file = "pillow-10.2.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:c570f24be1e468e3f0ce7ef56a89a60f0e05b30a3669a459e419c6eac2c35364"}, + {file = "pillow-10.2.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d8ecd059fdaf60c1963c58ceb8997b32e9dc1b911f5da5307aab614f1ce5c2fb"}, + {file = "pillow-10.2.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c365fd1703040de1ec284b176d6af5abe21b427cb3a5ff68e0759e1e313a5e7e"}, + {file = "pillow-10.2.0-cp39-cp39-manylinux_2_28_aarch64.whl", hash = "sha256:70c61d4c475835a19b3a5aa42492409878bbca7438554a1f89d20d58a7c75c01"}, + {file = "pillow-10.2.0-cp39-cp39-manylinux_2_28_x86_64.whl", hash = "sha256:b6f491cdf80ae540738859d9766783e3b3c8e5bd37f5dfa0b76abdecc5081f13"}, + {file = "pillow-10.2.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:9d189550615b4948f45252d7f005e53c2040cea1af5b60d6f79491a6e147eef7"}, + {file = "pillow-10.2.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:49d9ba1ed0ef3e061088cd1e7538a0759aab559e2e0a80a36f9fd9d8c0c21591"}, + {file = "pillow-10.2.0-cp39-cp39-win32.whl", hash = "sha256:babf5acfede515f176833ed6028754cbcd0d206f7f614ea3447d67c33be12516"}, + {file = "pillow-10.2.0-cp39-cp39-win_amd64.whl", hash = "sha256:0304004f8067386b477d20a518b50f3fa658a28d44e4116970abfcd94fac34a8"}, + {file = "pillow-10.2.0-cp39-cp39-win_arm64.whl", hash = "sha256:0fb3e7fc88a14eacd303e90481ad983fd5b69c761e9e6ef94c983f91025da869"}, + {file = "pillow-10.2.0-pp310-pypy310_pp73-macosx_10_10_x86_64.whl", hash = "sha256:322209c642aabdd6207517e9739c704dc9f9db943015535783239022002f054a"}, + {file = "pillow-10.2.0-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3eedd52442c0a5ff4f887fab0c1c0bb164d8635b32c894bc1faf4c618dd89df2"}, + {file = "pillow-10.2.0-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cb28c753fd5eb3dd859b4ee95de66cc62af91bcff5db5f2571d32a520baf1f04"}, + {file = "pillow-10.2.0-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:33870dc4653c5017bf4c8873e5488d8f8d5f8935e2f1fb9a2208c47cdd66efd2"}, + {file = "pillow-10.2.0-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:3c31822339516fb3c82d03f30e22b1d038da87ef27b6a78c9549888f8ceda39a"}, + {file = "pillow-10.2.0-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:a2b56ba36e05f973d450582fb015594aaa78834fefe8dfb8fcd79b93e64ba4c6"}, + {file = "pillow-10.2.0-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:d8e6aeb9201e655354b3ad049cb77d19813ad4ece0df1249d3c793de3774f8c7"}, + {file = "pillow-10.2.0-pp39-pypy39_pp73-macosx_10_10_x86_64.whl", hash = "sha256:2247178effb34a77c11c0e8ac355c7a741ceca0a732b27bf11e747bbc950722f"}, + {file = "pillow-10.2.0-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:15587643b9e5eb26c48e49a7b33659790d28f190fc514a322d55da2fb5c2950e"}, + {file = "pillow-10.2.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:753cd8f2086b2b80180d9b3010dd4ed147efc167c90d3bf593fe2af21265e5a5"}, + {file = "pillow-10.2.0-pp39-pypy39_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:7c8f97e8e7a9009bcacbe3766a36175056c12f9a44e6e6f2d5caad06dcfbf03b"}, + {file = "pillow-10.2.0-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:d1b35bcd6c5543b9cb547dee3150c93008f8dd0f1fef78fc0cd2b141c5baf58a"}, + {file = "pillow-10.2.0-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:fe4c15f6c9285dc54ce6553a3ce908ed37c8f3825b5a51a15c91442bb955b868"}, + {file = "pillow-10.2.0.tar.gz", hash = "sha256:e87f0b2c78157e12d7686b27d63c070fd65d994e8ddae6f328e0dcf4a0cd007e"}, +] + +[package.extras] +docs = ["furo", "olefile", "sphinx (>=2.4)", "sphinx-copybutton", "sphinx-inline-tabs", "sphinx-removed-in", "sphinxext-opengraph"] +fpx = ["olefile"] +mic = ["olefile"] +tests = ["check-manifest", "coverage", "defusedxml", "markdown2", "olefile", "packaging", "pyroma", "pytest", "pytest-cov", "pytest-timeout"] +typing = ["typing-extensions"] +xmp = ["defusedxml"] + +[[package]] +name = "platformdirs" +version = "4.2.0" +description = "A small Python package for determining appropriate platform-specific dirs, e.g. a \"user data dir\"." +optional = false +python-versions = ">=3.8" +files = [ + {file = "platformdirs-4.2.0-py3-none-any.whl", hash = "sha256:0614df2a2f37e1a662acbd8e2b25b92ccf8632929bc6d43467e17fe89c75e068"}, + {file = "platformdirs-4.2.0.tar.gz", hash = "sha256:ef0cc731df711022c174543cb70a9b5bd22e5a9337c8624ef2c2ceb8ddad8768"}, +] + +[package.extras] +docs = ["furo (>=2023.9.10)", "proselint (>=0.13)", "sphinx (>=7.2.6)", "sphinx-autodoc-typehints (>=1.25.2)"] +test = ["appdirs (==1.4.4)", "covdefaults (>=2.3)", "pytest (>=7.4.3)", "pytest-cov (>=4.1)", "pytest-mock (>=3.12)"] + +[[package]] +name = "pluggy" +version = "1.4.0" +description = "plugin and hook calling mechanisms for python" +optional = false +python-versions = ">=3.8" +files = [ + {file = "pluggy-1.4.0-py3-none-any.whl", hash = "sha256:7db9f7b503d67d1c5b95f59773ebb58a8c1c288129a88665838012cfb07b8981"}, + {file = "pluggy-1.4.0.tar.gz", hash = "sha256:8c85c2876142a764e5b7548e7d9a0e0ddb46f5185161049a79b7e974454223be"}, +] + +[package.extras] +dev = ["pre-commit", "tox"] +testing = ["pytest", "pytest-benchmark"] + +[[package]] +name = "py" +version = "1.11.0" +description = "library with cross-python path, ini-parsing, io, code, log facilities" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" +files = [ + {file = "py-1.11.0-py2.py3-none-any.whl", hash = "sha256:607c53218732647dff4acdfcd50cb62615cedf612e72d1724fb1a0cc6405b378"}, + {file = "py-1.11.0.tar.gz", hash = "sha256:51c75c4126074b472f746a24399ad32f6053d1b34b68d2fa41e558e6f4a98719"}, +] + +[[package]] +name = "pyarrow" +version = "12.0.1" +description = "Python library for Apache Arrow" +optional = false +python-versions = ">=3.7" +files = [ + {file = "pyarrow-12.0.1-cp310-cp310-macosx_10_14_x86_64.whl", hash = "sha256:6d288029a94a9bb5407ceebdd7110ba398a00412c5b0155ee9813a40d246c5df"}, + {file = "pyarrow-12.0.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:345e1828efdbd9aa4d4de7d5676778aba384a2c3add896d995b23d368e60e5af"}, + {file = "pyarrow-12.0.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8d6009fdf8986332b2169314da482baed47ac053311c8934ac6651e614deacd6"}, + {file = "pyarrow-12.0.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2d3c4cbbf81e6dd23fe921bc91dc4619ea3b79bc58ef10bce0f49bdafb103daf"}, + {file = "pyarrow-12.0.1-cp310-cp310-win_amd64.whl", hash = "sha256:cdacf515ec276709ac8042c7d9bd5be83b4f5f39c6c037a17a60d7ebfd92c890"}, + {file = "pyarrow-12.0.1-cp311-cp311-macosx_10_14_x86_64.whl", hash = "sha256:749be7fd2ff260683f9cc739cb862fb11be376de965a2a8ccbf2693b098db6c7"}, + {file = "pyarrow-12.0.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:6895b5fb74289d055c43db3af0de6e16b07586c45763cb5e558d38b86a91e3a7"}, + {file = "pyarrow-12.0.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1887bdae17ec3b4c046fcf19951e71b6a619f39fa674f9881216173566c8f718"}, + {file = "pyarrow-12.0.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e2c9cb8eeabbadf5fcfc3d1ddea616c7ce893db2ce4dcef0ac13b099ad7ca082"}, + {file = "pyarrow-12.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:ce4aebdf412bd0eeb800d8e47db854f9f9f7e2f5a0220440acf219ddfddd4f63"}, + {file = "pyarrow-12.0.1-cp37-cp37m-macosx_10_14_x86_64.whl", hash = "sha256:e0d8730c7f6e893f6db5d5b86eda42c0a130842d101992b581e2138e4d5663d3"}, + {file = "pyarrow-12.0.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:43364daec02f69fec89d2315f7fbfbeec956e0d991cbbef471681bd77875c40f"}, + {file = "pyarrow-12.0.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:051f9f5ccf585f12d7de836e50965b3c235542cc896959320d9776ab93f3b33d"}, + {file = "pyarrow-12.0.1-cp37-cp37m-win_amd64.whl", hash = "sha256:be2757e9275875d2a9c6e6052ac7957fbbfc7bc7370e4a036a9b893e96fedaba"}, + {file = "pyarrow-12.0.1-cp38-cp38-macosx_10_14_x86_64.whl", hash = "sha256:cf812306d66f40f69e684300f7af5111c11f6e0d89d6b733e05a3de44961529d"}, + {file = "pyarrow-12.0.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:459a1c0ed2d68671188b2118c63bac91eaef6fc150c77ddd8a583e3c795737bf"}, + {file = "pyarrow-12.0.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:85e705e33eaf666bbe508a16fd5ba27ca061e177916b7a317ba5a51bee43384c"}, + {file = "pyarrow-12.0.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9120c3eb2b1f6f516a3b7a9714ed860882d9ef98c4b17edcdc91d95b7528db60"}, + {file = "pyarrow-12.0.1-cp38-cp38-win_amd64.whl", hash = "sha256:c780f4dc40460015d80fcd6a6140de80b615349ed68ef9adb653fe351778c9b3"}, + {file = "pyarrow-12.0.1-cp39-cp39-macosx_10_14_x86_64.whl", hash = "sha256:a3c63124fc26bf5f95f508f5d04e1ece8cc23a8b0af2a1e6ab2b1ec3fdc91b24"}, + {file = "pyarrow-12.0.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:b13329f79fa4472324f8d32dc1b1216616d09bd1e77cfb13104dec5463632c36"}, + {file = "pyarrow-12.0.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bb656150d3d12ec1396f6dde542db1675a95c0cc8366d507347b0beed96e87ca"}, + {file = "pyarrow-12.0.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6251e38470da97a5b2e00de5c6a049149f7b2bd62f12fa5dbb9ac674119ba71a"}, + {file = "pyarrow-12.0.1-cp39-cp39-win_amd64.whl", hash = "sha256:3de26da901216149ce086920547dfff5cd22818c9eab67ebc41e863a5883bac7"}, + {file = "pyarrow-12.0.1.tar.gz", hash = "sha256:cce317fc96e5b71107bf1f9f184d5e54e2bd14bbf3f9a3d62819961f0af86fec"}, +] + +[package.dependencies] +numpy = ">=1.16.6" + +[[package]] +name = "pycparser" +version = "2.21" +description = "C parser in Python" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +files = [ + {file = "pycparser-2.21-py2.py3-none-any.whl", hash = "sha256:8ee45429555515e1f6b185e78100aea234072576aa43ab53aefcae078162fca9"}, + {file = "pycparser-2.21.tar.gz", hash = "sha256:e644fdec12f7872f86c58ff790da456218b10f863970249516d60a5eaca77206"}, +] + +[[package]] +name = "pydantic" +version = "1.10.14" +description = "Data validation and settings management using python type hints" +optional = false +python-versions = ">=3.7" +files = [ + {file = "pydantic-1.10.14-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:7f4fcec873f90537c382840f330b90f4715eebc2bc9925f04cb92de593eae054"}, + {file = "pydantic-1.10.14-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:8e3a76f571970fcd3c43ad982daf936ae39b3e90b8a2e96c04113a369869dc87"}, + {file = "pydantic-1.10.14-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:82d886bd3c3fbeaa963692ef6b643159ccb4b4cefaf7ff1617720cbead04fd1d"}, + {file = "pydantic-1.10.14-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:798a3d05ee3b71967844a1164fd5bdb8c22c6d674f26274e78b9f29d81770c4e"}, + {file = "pydantic-1.10.14-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:23d47a4b57a38e8652bcab15a658fdb13c785b9ce217cc3a729504ab4e1d6bc9"}, + {file = "pydantic-1.10.14-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:f9f674b5c3bebc2eba401de64f29948ae1e646ba2735f884d1594c5f675d6f2a"}, + {file = "pydantic-1.10.14-cp310-cp310-win_amd64.whl", hash = "sha256:24a7679fab2e0eeedb5a8924fc4a694b3bcaac7d305aeeac72dd7d4e05ecbebf"}, + {file = "pydantic-1.10.14-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:9d578ac4bf7fdf10ce14caba6f734c178379bd35c486c6deb6f49006e1ba78a7"}, + {file = "pydantic-1.10.14-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:fa7790e94c60f809c95602a26d906eba01a0abee9cc24150e4ce2189352deb1b"}, + {file = "pydantic-1.10.14-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:aad4e10efa5474ed1a611b6d7f0d130f4aafadceb73c11d9e72823e8f508e663"}, + {file = "pydantic-1.10.14-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1245f4f61f467cb3dfeced2b119afef3db386aec3d24a22a1de08c65038b255f"}, + {file = "pydantic-1.10.14-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:21efacc678a11114c765eb52ec0db62edffa89e9a562a94cbf8fa10b5db5c046"}, + {file = "pydantic-1.10.14-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:412ab4a3f6dbd2bf18aefa9f79c7cca23744846b31f1d6555c2ee2b05a2e14ca"}, + {file = "pydantic-1.10.14-cp311-cp311-win_amd64.whl", hash = "sha256:e897c9f35281f7889873a3e6d6b69aa1447ceb024e8495a5f0d02ecd17742a7f"}, + {file = "pydantic-1.10.14-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:d604be0f0b44d473e54fdcb12302495fe0467c56509a2f80483476f3ba92b33c"}, + {file = "pydantic-1.10.14-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a42c7d17706911199798d4c464b352e640cab4351efe69c2267823d619a937e5"}, + {file = "pydantic-1.10.14-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:596f12a1085e38dbda5cbb874d0973303e34227b400b6414782bf205cc14940c"}, + {file = "pydantic-1.10.14-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:bfb113860e9288d0886e3b9e49d9cf4a9d48b441f52ded7d96db7819028514cc"}, + {file = "pydantic-1.10.14-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:bc3ed06ab13660b565eed80887fcfbc0070f0aa0691fbb351657041d3e874efe"}, + {file = "pydantic-1.10.14-cp37-cp37m-win_amd64.whl", hash = "sha256:ad8c2bc677ae5f6dbd3cf92f2c7dc613507eafe8f71719727cbc0a7dec9a8c01"}, + {file = "pydantic-1.10.14-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:c37c28449752bb1f47975d22ef2882d70513c546f8f37201e0fec3a97b816eee"}, + {file = "pydantic-1.10.14-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:49a46a0994dd551ec051986806122767cf144b9702e31d47f6d493c336462597"}, + {file = "pydantic-1.10.14-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:53e3819bd20a42470d6dd0fe7fc1c121c92247bca104ce608e609b59bc7a77ee"}, + {file = "pydantic-1.10.14-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0fbb503bbbbab0c588ed3cd21975a1d0d4163b87e360fec17a792f7d8c4ff29f"}, + {file = "pydantic-1.10.14-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:336709883c15c050b9c55a63d6c7ff09be883dbc17805d2b063395dd9d9d0022"}, + {file = "pydantic-1.10.14-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:4ae57b4d8e3312d486e2498d42aed3ece7b51848336964e43abbf9671584e67f"}, + {file = "pydantic-1.10.14-cp38-cp38-win_amd64.whl", hash = "sha256:dba49d52500c35cfec0b28aa8b3ea5c37c9df183ffc7210b10ff2a415c125c4a"}, + {file = "pydantic-1.10.14-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:c66609e138c31cba607d8e2a7b6a5dc38979a06c900815495b2d90ce6ded35b4"}, + {file = "pydantic-1.10.14-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:d986e115e0b39604b9eee3507987368ff8148222da213cd38c359f6f57b3b347"}, + {file = "pydantic-1.10.14-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:646b2b12df4295b4c3148850c85bff29ef6d0d9621a8d091e98094871a62e5c7"}, + {file = "pydantic-1.10.14-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:282613a5969c47c83a8710cc8bfd1e70c9223feb76566f74683af889faadc0ea"}, + {file = "pydantic-1.10.14-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:466669501d08ad8eb3c4fecd991c5e793c4e0bbd62299d05111d4f827cded64f"}, + {file = "pydantic-1.10.14-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:13e86a19dca96373dcf3190fcb8797d40a6f12f154a244a8d1e8e03b8f280593"}, + {file = "pydantic-1.10.14-cp39-cp39-win_amd64.whl", hash = "sha256:08b6ec0917c30861e3fe71a93be1648a2aa4f62f866142ba21670b24444d7fd8"}, + {file = "pydantic-1.10.14-py3-none-any.whl", hash = "sha256:8ee853cd12ac2ddbf0ecbac1c289f95882b2d4482258048079d13be700aa114c"}, + {file = "pydantic-1.10.14.tar.gz", hash = "sha256:46f17b832fe27de7850896f3afee50ea682220dd218f7e9c88d436788419dca6"}, +] + +[package.dependencies] +typing-extensions = ">=4.2.0" + +[package.extras] +dotenv = ["python-dotenv (>=0.10.4)"] +email = ["email-validator (>=1.0.3)"] + +[[package]] +name = "pyjwt" +version = "2.8.0" +description = "JSON Web Token implementation in Python" +optional = false +python-versions = ">=3.7" +files = [ + {file = "PyJWT-2.8.0-py3-none-any.whl", hash = "sha256:59127c392cc44c2da5bb3192169a91f429924e17aff6534d70fdc02ab3e04320"}, + {file = "PyJWT-2.8.0.tar.gz", hash = "sha256:57e28d156e3d5c10088e0c68abb90bfac3df82b40a71bd0daa20c65ccd5c23de"}, +] + +[package.dependencies] +cryptography = {version = ">=3.4.0", optional = true, markers = "extra == \"crypto\""} + +[package.extras] +crypto = ["cryptography (>=3.4.0)"] +dev = ["coverage[toml] (==5.0.4)", "cryptography (>=3.4.0)", "pre-commit", "pytest (>=6.0.0,<7.0.0)", "sphinx (>=4.5.0,<5.0.0)", "sphinx-rtd-theme", "zope.interface"] +docs = ["sphinx (>=4.5.0,<5.0.0)", "sphinx-rtd-theme", "zope.interface"] +tests = ["coverage[toml] (==5.0.4)", "pytest (>=6.0.0,<7.0.0)"] + +[[package]] +name = "pyrate-limiter" +version = "3.1.1" +description = "Python Rate-Limiter using Leaky-Bucket Algorithm" +optional = false +python-versions = ">=3.8,<4.0" +files = [ + {file = "pyrate_limiter-3.1.1-py3-none-any.whl", hash = "sha256:c51906f1d51d56dc992ff6c26e8300e32151bc6cfa3e6559792e31971dfd4e2b"}, + {file = "pyrate_limiter-3.1.1.tar.gz", hash = "sha256:2f57eda712687e6eccddf6afe8f8a15b409b97ed675fe64a626058f12863b7b7"}, +] + +[package.extras] +all = ["filelock (>=3.0)", "redis (>=5.0.0,<6.0.0)"] +docs = ["furo (>=2022.3.4,<2023.0.0)", "myst-parser (>=0.17)", "sphinx (>=4.3.0,<5.0.0)", "sphinx-autodoc-typehints (>=1.17,<2.0)", "sphinx-copybutton (>=0.5)", "sphinxcontrib-apidoc (>=0.3,<0.4)"] + +[[package]] +name = "pyrsistent" +version = "0.20.0" +description = "Persistent/Functional/Immutable data structures" +optional = false +python-versions = ">=3.8" +files = [ + {file = "pyrsistent-0.20.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:8c3aba3e01235221e5b229a6c05f585f344734bd1ad42a8ac51493d74722bbce"}, + {file = "pyrsistent-0.20.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c1beb78af5423b879edaf23c5591ff292cf7c33979734c99aa66d5914ead880f"}, + {file = "pyrsistent-0.20.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:21cc459636983764e692b9eba7144cdd54fdec23ccdb1e8ba392a63666c60c34"}, + {file = "pyrsistent-0.20.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f5ac696f02b3fc01a710427585c855f65cd9c640e14f52abe52020722bb4906b"}, + {file = "pyrsistent-0.20.0-cp310-cp310-win32.whl", hash = "sha256:0724c506cd8b63c69c7f883cc233aac948c1ea946ea95996ad8b1380c25e1d3f"}, + {file = "pyrsistent-0.20.0-cp310-cp310-win_amd64.whl", hash = "sha256:8441cf9616d642c475684d6cf2520dd24812e996ba9af15e606df5f6fd9d04a7"}, + {file = "pyrsistent-0.20.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:0f3b1bcaa1f0629c978b355a7c37acd58907390149b7311b5db1b37648eb6958"}, + {file = "pyrsistent-0.20.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5cdd7ef1ea7a491ae70d826b6cc64868de09a1d5ff9ef8d574250d0940e275b8"}, + {file = "pyrsistent-0.20.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cae40a9e3ce178415040a0383f00e8d68b569e97f31928a3a8ad37e3fde6df6a"}, + {file = "pyrsistent-0.20.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6288b3fa6622ad8a91e6eb759cfc48ff3089e7c17fb1d4c59a919769314af224"}, + {file = "pyrsistent-0.20.0-cp311-cp311-win32.whl", hash = "sha256:7d29c23bdf6e5438c755b941cef867ec2a4a172ceb9f50553b6ed70d50dfd656"}, + {file = "pyrsistent-0.20.0-cp311-cp311-win_amd64.whl", hash = "sha256:59a89bccd615551391f3237e00006a26bcf98a4d18623a19909a2c48b8e986ee"}, + {file = "pyrsistent-0.20.0-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:09848306523a3aba463c4b49493a760e7a6ca52e4826aa100ee99d8d39b7ad1e"}, + {file = "pyrsistent-0.20.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a14798c3005ec892bbada26485c2eea3b54109cb2533713e355c806891f63c5e"}, + {file = "pyrsistent-0.20.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b14decb628fac50db5e02ee5a35a9c0772d20277824cfe845c8a8b717c15daa3"}, + {file = "pyrsistent-0.20.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2e2c116cc804d9b09ce9814d17df5edf1df0c624aba3b43bc1ad90411487036d"}, + {file = "pyrsistent-0.20.0-cp312-cp312-win32.whl", hash = "sha256:e78d0c7c1e99a4a45c99143900ea0546025e41bb59ebc10182e947cf1ece9174"}, + {file = "pyrsistent-0.20.0-cp312-cp312-win_amd64.whl", hash = "sha256:4021a7f963d88ccd15b523787d18ed5e5269ce57aa4037146a2377ff607ae87d"}, + {file = "pyrsistent-0.20.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:79ed12ba79935adaac1664fd7e0e585a22caa539dfc9b7c7c6d5ebf91fb89054"}, + {file = "pyrsistent-0.20.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f920385a11207dc372a028b3f1e1038bb244b3ec38d448e6d8e43c6b3ba20e98"}, + {file = "pyrsistent-0.20.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4f5c2d012671b7391803263419e31b5c7c21e7c95c8760d7fc35602353dee714"}, + {file = "pyrsistent-0.20.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ef3992833fbd686ee783590639f4b8343a57f1f75de8633749d984dc0eb16c86"}, + {file = "pyrsistent-0.20.0-cp38-cp38-win32.whl", hash = "sha256:881bbea27bbd32d37eb24dd320a5e745a2a5b092a17f6debc1349252fac85423"}, + {file = "pyrsistent-0.20.0-cp38-cp38-win_amd64.whl", hash = "sha256:6d270ec9dd33cdb13f4d62c95c1a5a50e6b7cdd86302b494217137f760495b9d"}, + {file = "pyrsistent-0.20.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:ca52d1ceae015859d16aded12584c59eb3825f7b50c6cfd621d4231a6cc624ce"}, + {file = "pyrsistent-0.20.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b318ca24db0f0518630e8b6f3831e9cba78f099ed5c1d65ffe3e023003043ba0"}, + {file = "pyrsistent-0.20.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fed2c3216a605dc9a6ea50c7e84c82906e3684c4e80d2908208f662a6cbf9022"}, + {file = "pyrsistent-0.20.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2e14c95c16211d166f59c6611533d0dacce2e25de0f76e4c140fde250997b3ca"}, + {file = "pyrsistent-0.20.0-cp39-cp39-win32.whl", hash = "sha256:f058a615031eea4ef94ead6456f5ec2026c19fb5bd6bfe86e9665c4158cf802f"}, + {file = "pyrsistent-0.20.0-cp39-cp39-win_amd64.whl", hash = "sha256:58b8f6366e152092194ae68fefe18b9f0b4f89227dfd86a07770c3d86097aebf"}, + {file = "pyrsistent-0.20.0-py3-none-any.whl", hash = "sha256:c55acc4733aad6560a7f5f818466631f07efc001fd023f34a6c203f8b6df0f0b"}, + {file = "pyrsistent-0.20.0.tar.gz", hash = "sha256:4c48f78f62ab596c679086084d0dd13254ae4f3d6c72a83ffdf5ebdef8f265a4"}, +] + +[[package]] +name = "pytesseract" +version = "0.3.10" +description = "Python-tesseract is a python wrapper for Google's Tesseract-OCR" +optional = false +python-versions = ">=3.7" +files = [ + {file = "pytesseract-0.3.10-py3-none-any.whl", hash = "sha256:8f22cc98f765bf13517ead0c70effedb46c153540d25783e04014f28b55a5fc6"}, + {file = "pytesseract-0.3.10.tar.gz", hash = "sha256:f1c3a8b0f07fd01a1085d451f5b8315be6eec1d5577a6796d46dc7a62bd4120f"}, +] + +[package.dependencies] +packaging = ">=21.3" +Pillow = ">=8.0.0" + +[[package]] +name = "pytest" +version = "6.2.5" +description = "pytest: simple powerful testing with Python" +optional = false +python-versions = ">=3.6" +files = [ + {file = "pytest-6.2.5-py3-none-any.whl", hash = "sha256:7310f8d27bc79ced999e760ca304d69f6ba6c6649c0b60fb0e04a4a77cacc134"}, + {file = "pytest-6.2.5.tar.gz", hash = "sha256:131b36680866a76e6781d13f101efb86cf674ebb9762eb70d3082b6f29889e89"}, +] + +[package.dependencies] +atomicwrites = {version = ">=1.0", markers = "sys_platform == \"win32\""} +attrs = ">=19.2.0" +colorama = {version = "*", markers = "sys_platform == \"win32\""} +iniconfig = "*" +packaging = "*" +pluggy = ">=0.12,<2.0" +py = ">=1.8.2" +toml = "*" + +[package.extras] +testing = ["argcomplete", "hypothesis (>=3.56)", "mock", "nose", "requests", "xmlschema"] + +[[package]] +name = "pytest-mock" +version = "3.12.0" +description = "Thin-wrapper around the mock package for easier use with pytest" +optional = false +python-versions = ">=3.8" +files = [ + {file = "pytest-mock-3.12.0.tar.gz", hash = "sha256:31a40f038c22cad32287bb43932054451ff5583ff094bca6f675df2f8bc1a6e9"}, + {file = "pytest_mock-3.12.0-py3-none-any.whl", hash = "sha256:0972719a7263072da3a21c7f4773069bcc7486027d7e8e1f81d98a47e701bc4f"}, +] + +[package.dependencies] +pytest = ">=5.0" + +[package.extras] +dev = ["pre-commit", "pytest-asyncio", "tox"] + +[[package]] +name = "python-dateutil" +version = "2.9.0" +description = "Extensions to the standard Python datetime module" +optional = false +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7" +files = [ + {file = "python-dateutil-2.9.0.tar.gz", hash = "sha256:78e73e19c63f5b20ffa567001531680d939dc042bf7850431877645523c66709"}, + {file = "python_dateutil-2.9.0-py2.py3-none-any.whl", hash = "sha256:cbf2f1da5e6083ac2fbfd4da39a25f34312230110440f424a14c7558bb85d82e"}, +] + +[package.dependencies] +six = ">=1.5" + +[[package]] +name = "python-docx" +version = "1.1.0" +description = "Create, read, and update Microsoft Word .docx files." +optional = false +python-versions = ">=3.7" +files = [ + {file = "python-docx-1.1.0.tar.gz", hash = "sha256:5829b722141cf1ab79aedf0c34d9fe9924b29764584c0f2164eb2b02dcdf17c9"}, + {file = "python_docx-1.1.0-py3-none-any.whl", hash = "sha256:bac9773278098a1ddc43a52d84e22f5909c4a3080a624530b3ecb3771b07c6cd"}, +] + +[package.dependencies] +lxml = ">=3.1.0" +typing-extensions = "*" + +[[package]] +name = "python-iso639" +version = "2024.2.7" +description = "Look-up utilities for ISO 639 language codes and names" +optional = false +python-versions = ">=3.8" +files = [ + {file = "python-iso639-2024.2.7.tar.gz", hash = "sha256:c323233348c34d57c601e3e6d824088e492896bcb97a61a87f7d93401a305377"}, + {file = "python_iso639-2024.2.7-py3-none-any.whl", hash = "sha256:7b149623ff74230f4ee3061fb01d18e57a8d07c5fee2aa72907f39b7f6d16cbc"}, +] + +[package.extras] +dev = ["black (==24.1.1)", "build (==1.0.3)", "flake8 (==7.0.0)", "pytest (==8.0.0)", "twine (==4.0.2)"] + +[[package]] +name = "python-magic" +version = "0.4.27" +description = "File type identification using libmagic" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" +files = [ + {file = "python-magic-0.4.27.tar.gz", hash = "sha256:c1ba14b08e4a5f5c31a302b7721239695b2f0f058d125bd5ce1ee36b9d9d3c3b"}, + {file = "python_magic-0.4.27-py2.py3-none-any.whl", hash = "sha256:c212960ad306f700aa0d01e5d7a325d20548ff97eb9920dcd29513174f0294d3"}, +] + +[[package]] +name = "python-pptx" +version = "0.6.21" +description = "Generate and manipulate Open XML PowerPoint (.pptx) files" +optional = false +python-versions = "*" +files = [ + {file = "python-pptx-0.6.21.tar.gz", hash = "sha256:7798a2aaf89563565b3c7120c0acfe9aff775db0db3580544e3bf4840c2e378f"}, +] + +[package.dependencies] +lxml = ">=3.1.0" +Pillow = ">=3.3.2" +XlsxWriter = ">=0.5.7" + +[[package]] +name = "pytz" +version = "2024.1" +description = "World timezone definitions, modern and historical" +optional = false +python-versions = "*" +files = [ + {file = "pytz-2024.1-py2.py3-none-any.whl", hash = "sha256:328171f4e3623139da4983451950b28e95ac706e13f3f2630a879749e7a8b319"}, + {file = "pytz-2024.1.tar.gz", hash = "sha256:2a29735ea9c18baf14b448846bde5a48030ed267578472d8955cd0e7443a9812"}, +] + +[[package]] +name = "pytzdata" +version = "2020.1" +description = "The Olson timezone database for Python." +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +files = [ + {file = "pytzdata-2020.1-py2.py3-none-any.whl", hash = "sha256:e1e14750bcf95016381e4d472bad004eef710f2d6417240904070b3d6654485f"}, + {file = "pytzdata-2020.1.tar.gz", hash = "sha256:3efa13b335a00a8de1d345ae41ec78dd11c9f8807f522d39850f2dd828681540"}, +] + +[[package]] +name = "pyyaml" +version = "6.0.1" +description = "YAML parser and emitter for Python" +optional = false +python-versions = ">=3.6" +files = [ + {file = "PyYAML-6.0.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d858aa552c999bc8a8d57426ed01e40bef403cd8ccdd0fc5f6f04a00414cac2a"}, + {file = "PyYAML-6.0.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:fd66fc5d0da6d9815ba2cebeb4205f95818ff4b79c3ebe268e75d961704af52f"}, + {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:69b023b2b4daa7548bcfbd4aa3da05b3a74b772db9e23b982788168117739938"}, + {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:81e0b275a9ecc9c0c0c07b4b90ba548307583c125f54d5b6946cfee6360c733d"}, + {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ba336e390cd8e4d1739f42dfe9bb83a3cc2e80f567d8805e11b46f4a943f5515"}, + {file = "PyYAML-6.0.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:326c013efe8048858a6d312ddd31d56e468118ad4cdeda36c719bf5bb6192290"}, + {file = "PyYAML-6.0.1-cp310-cp310-win32.whl", hash = "sha256:bd4af7373a854424dabd882decdc5579653d7868b8fb26dc7d0e99f823aa5924"}, + {file = "PyYAML-6.0.1-cp310-cp310-win_amd64.whl", hash = "sha256:fd1592b3fdf65fff2ad0004b5e363300ef59ced41c2e6b3a99d4089fa8c5435d"}, + {file = "PyYAML-6.0.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:6965a7bc3cf88e5a1c3bd2e0b5c22f8d677dc88a455344035f03399034eb3007"}, + {file = "PyYAML-6.0.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:f003ed9ad21d6a4713f0a9b5a7a0a79e08dd0f221aff4525a2be4c346ee60aab"}, + {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:42f8152b8dbc4fe7d96729ec2b99c7097d656dc1213a3229ca5383f973a5ed6d"}, + {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:062582fca9fabdd2c8b54a3ef1c978d786e0f6b3a1510e0ac93ef59e0ddae2bc"}, + {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d2b04aac4d386b172d5b9692e2d2da8de7bfb6c387fa4f801fbf6fb2e6ba4673"}, + {file = "PyYAML-6.0.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:e7d73685e87afe9f3b36c799222440d6cf362062f78be1013661b00c5c6f678b"}, + {file = "PyYAML-6.0.1-cp311-cp311-win32.whl", hash = "sha256:1635fd110e8d85d55237ab316b5b011de701ea0f29d07611174a1b42f1444741"}, + {file = "PyYAML-6.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:bf07ee2fef7014951eeb99f56f39c9bb4af143d8aa3c21b1677805985307da34"}, + {file = "PyYAML-6.0.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:855fb52b0dc35af121542a76b9a84f8d1cd886ea97c84703eaa6d88e37a2ad28"}, + {file = "PyYAML-6.0.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:40df9b996c2b73138957fe23a16a4f0ba614f4c0efce1e9406a184b6d07fa3a9"}, + {file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a08c6f0fe150303c1c6b71ebcd7213c2858041a7e01975da3a99aed1e7a378ef"}, + {file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6c22bec3fbe2524cde73d7ada88f6566758a8f7227bfbf93a408a9d86bcc12a0"}, + {file = "PyYAML-6.0.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8d4e9c88387b0f5c7d5f281e55304de64cf7f9c0021a3525bd3b1c542da3b0e4"}, + {file = "PyYAML-6.0.1-cp312-cp312-win32.whl", hash = "sha256:d483d2cdf104e7c9fa60c544d92981f12ad66a457afae824d146093b8c294c54"}, + {file = "PyYAML-6.0.1-cp312-cp312-win_amd64.whl", hash = "sha256:0d3304d8c0adc42be59c5f8a4d9e3d7379e6955ad754aa9d6ab7a398b59dd1df"}, + {file = "PyYAML-6.0.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:50550eb667afee136e9a77d6dc71ae76a44df8b3e51e41b77f6de2932bfe0f47"}, + {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1fe35611261b29bd1de0070f0b2f47cb6ff71fa6595c077e42bd0c419fa27b98"}, + {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:704219a11b772aea0d8ecd7058d0082713c3562b4e271b849ad7dc4a5c90c13c"}, + {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:afd7e57eddb1a54f0f1a974bc4391af8bcce0b444685d936840f125cf046d5bd"}, + {file = "PyYAML-6.0.1-cp36-cp36m-win32.whl", hash = "sha256:fca0e3a251908a499833aa292323f32437106001d436eca0e6e7833256674585"}, + {file = "PyYAML-6.0.1-cp36-cp36m-win_amd64.whl", hash = "sha256:f22ac1c3cac4dbc50079e965eba2c1058622631e526bd9afd45fedd49ba781fa"}, + {file = "PyYAML-6.0.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:b1275ad35a5d18c62a7220633c913e1b42d44b46ee12554e5fd39c70a243d6a3"}, + {file = "PyYAML-6.0.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:18aeb1bf9a78867dc38b259769503436b7c72f7a1f1f4c93ff9a17de54319b27"}, + {file = "PyYAML-6.0.1-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:596106435fa6ad000c2991a98fa58eeb8656ef2325d7e158344fb33864ed87e3"}, + {file = "PyYAML-6.0.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:baa90d3f661d43131ca170712d903e6295d1f7a0f595074f151c0aed377c9b9c"}, + {file = "PyYAML-6.0.1-cp37-cp37m-win32.whl", hash = "sha256:9046c58c4395dff28dd494285c82ba00b546adfc7ef001486fbf0324bc174fba"}, + {file = "PyYAML-6.0.1-cp37-cp37m-win_amd64.whl", hash = "sha256:4fb147e7a67ef577a588a0e2c17b6db51dda102c71de36f8549b6816a96e1867"}, + {file = "PyYAML-6.0.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1d4c7e777c441b20e32f52bd377e0c409713e8bb1386e1099c2415f26e479595"}, + {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a0cd17c15d3bb3fa06978b4e8958dcdc6e0174ccea823003a106c7d4d7899ac5"}, + {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:28c119d996beec18c05208a8bd78cbe4007878c6dd15091efb73a30e90539696"}, + {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7e07cbde391ba96ab58e532ff4803f79c4129397514e1413a7dc761ccd755735"}, + {file = "PyYAML-6.0.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:49a183be227561de579b4a36efbb21b3eab9651dd81b1858589f796549873dd6"}, + {file = "PyYAML-6.0.1-cp38-cp38-win32.whl", hash = "sha256:184c5108a2aca3c5b3d3bf9395d50893a7ab82a38004c8f61c258d4428e80206"}, + {file = "PyYAML-6.0.1-cp38-cp38-win_amd64.whl", hash = "sha256:1e2722cc9fbb45d9b87631ac70924c11d3a401b2d7f410cc0e3bbf249f2dca62"}, + {file = "PyYAML-6.0.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:9eb6caa9a297fc2c2fb8862bc5370d0303ddba53ba97e71f08023b6cd73d16a8"}, + {file = "PyYAML-6.0.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:c8098ddcc2a85b61647b2590f825f3db38891662cfc2fc776415143f599bb859"}, + {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5773183b6446b2c99bb77e77595dd486303b4faab2b086e7b17bc6bef28865f6"}, + {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b786eecbdf8499b9ca1d697215862083bd6d2a99965554781d0d8d1ad31e13a0"}, + {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bc1bf2925a1ecd43da378f4db9e4f799775d6367bdb94671027b73b393a7c42c"}, + {file = "PyYAML-6.0.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:04ac92ad1925b2cff1db0cfebffb6ffc43457495c9b3c39d3fcae417d7125dc5"}, + {file = "PyYAML-6.0.1-cp39-cp39-win32.whl", hash = "sha256:faca3bdcf85b2fc05d06ff3fbc1f83e1391b3e724afa3feba7d13eeab355484c"}, + {file = "PyYAML-6.0.1-cp39-cp39-win_amd64.whl", hash = "sha256:510c9deebc5c0225e8c96813043e62b680ba2f9c50a08d3724c7f28a747d1486"}, + {file = "PyYAML-6.0.1.tar.gz", hash = "sha256:bfdf460b1736c775f2ba9f6a92bca30bc2095067b8a9d77876d1fad6cc3b4a43"}, +] + +[[package]] +name = "rapidfuzz" +version = "3.6.1" +description = "rapid fuzzy string matching" +optional = false +python-versions = ">=3.8" +files = [ + {file = "rapidfuzz-3.6.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:ac434fc71edda30d45db4a92ba5e7a42c7405e1a54cb4ec01d03cc668c6dcd40"}, + {file = "rapidfuzz-3.6.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:2a791168e119cfddf4b5a40470620c872812042f0621e6a293983a2d52372db0"}, + {file = "rapidfuzz-3.6.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:5a2f3e9df346145c2be94e4d9eeffb82fab0cbfee85bd4a06810e834fe7c03fa"}, + {file = "rapidfuzz-3.6.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:23de71e7f05518b0bbeef55d67b5dbce3bcd3e2c81e7e533051a2e9401354eb0"}, + {file = "rapidfuzz-3.6.1-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d056e342989248d2bdd67f1955bb7c3b0ecfa239d8f67a8dfe6477b30872c607"}, + {file = "rapidfuzz-3.6.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:01835d02acd5d95c1071e1da1bb27fe213c84a013b899aba96380ca9962364bc"}, + {file = "rapidfuzz-3.6.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:ed0f712e0bb5fea327e92aec8a937afd07ba8de4c529735d82e4c4124c10d5a0"}, + {file = "rapidfuzz-3.6.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:96cd19934f76a1264e8ecfed9d9f5291fde04ecb667faef5f33bdbfd95fe2d1f"}, + {file = "rapidfuzz-3.6.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:e06c4242a1354cf9d48ee01f6f4e6e19c511d50bb1e8d7d20bcadbb83a2aea90"}, + {file = "rapidfuzz-3.6.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:d73dcfe789d37c6c8b108bf1e203e027714a239e50ad55572ced3c004424ed3b"}, + {file = "rapidfuzz-3.6.1-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:06e98ff000e2619e7cfe552d086815671ed09b6899408c2c1b5103658261f6f3"}, + {file = "rapidfuzz-3.6.1-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:08b6fb47dd889c69fbc0b915d782aaed43e025df6979b6b7f92084ba55edd526"}, + {file = "rapidfuzz-3.6.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:a1788ebb5f5b655a15777e654ea433d198f593230277e74d51a2a1e29a986283"}, + {file = "rapidfuzz-3.6.1-cp310-cp310-win32.whl", hash = "sha256:c65f92881753aa1098c77818e2b04a95048f30edbe9c3094dc3707d67df4598b"}, + {file = "rapidfuzz-3.6.1-cp310-cp310-win_amd64.whl", hash = "sha256:4243a9c35667a349788461aae6471efde8d8800175b7db5148a6ab929628047f"}, + {file = "rapidfuzz-3.6.1-cp310-cp310-win_arm64.whl", hash = "sha256:f59d19078cc332dbdf3b7b210852ba1f5db8c0a2cd8cc4c0ed84cc00c76e6802"}, + {file = "rapidfuzz-3.6.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:fbc07e2e4ac696497c5f66ec35c21ddab3fc7a406640bffed64c26ab2f7ce6d6"}, + {file = "rapidfuzz-3.6.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:40cced1a8852652813f30fb5d4b8f9b237112a0bbaeebb0f4cc3611502556764"}, + {file = "rapidfuzz-3.6.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:82300e5f8945d601c2daaaac139d5524d7c1fdf719aa799a9439927739917460"}, + {file = "rapidfuzz-3.6.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:edf97c321fd641fea2793abce0e48fa4f91f3c202092672f8b5b4e781960b891"}, + {file = "rapidfuzz-3.6.1-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7420e801b00dee4a344ae2ee10e837d603461eb180e41d063699fb7efe08faf0"}, + {file = "rapidfuzz-3.6.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:060bd7277dc794279fa95522af355034a29c90b42adcb7aa1da358fc839cdb11"}, + {file = "rapidfuzz-3.6.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b7e3375e4f2bfec77f907680328e4cd16cc64e137c84b1886d547ab340ba6928"}, + {file = "rapidfuzz-3.6.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a490cd645ef9d8524090551016f05f052e416c8adb2d8b85d35c9baa9d0428ab"}, + {file = "rapidfuzz-3.6.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:2e03038bfa66d2d7cffa05d81c2f18fd6acbb25e7e3c068d52bb7469e07ff382"}, + {file = "rapidfuzz-3.6.1-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:2b19795b26b979c845dba407fe79d66975d520947b74a8ab6cee1d22686f7967"}, + {file = "rapidfuzz-3.6.1-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:064c1d66c40b3a0f488db1f319a6e75616b2e5fe5430a59f93a9a5e40a656d15"}, + {file = "rapidfuzz-3.6.1-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:3c772d04fb0ebeece3109d91f6122b1503023086a9591a0b63d6ee7326bd73d9"}, + {file = "rapidfuzz-3.6.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:841eafba6913c4dfd53045835545ba01a41e9644e60920c65b89c8f7e60c00a9"}, + {file = "rapidfuzz-3.6.1-cp311-cp311-win32.whl", hash = "sha256:266dd630f12696ea7119f31d8b8e4959ef45ee2cbedae54417d71ae6f47b9848"}, + {file = "rapidfuzz-3.6.1-cp311-cp311-win_amd64.whl", hash = "sha256:d79aec8aeee02ab55d0ddb33cea3ecd7b69813a48e423c966a26d7aab025cdfe"}, + {file = "rapidfuzz-3.6.1-cp311-cp311-win_arm64.whl", hash = "sha256:484759b5dbc5559e76fefaa9170147d1254468f555fd9649aea3bad46162a88b"}, + {file = "rapidfuzz-3.6.1-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:b2ef4c0fd3256e357b70591ffb9e8ed1d439fb1f481ba03016e751a55261d7c1"}, + {file = "rapidfuzz-3.6.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:588c4b20fa2fae79d60a4e438cf7133d6773915df3cc0a7f1351da19eb90f720"}, + {file = "rapidfuzz-3.6.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:7142ee354e9c06e29a2636b9bbcb592bb00600a88f02aa5e70e4f230347b373e"}, + {file = "rapidfuzz-3.6.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1dfc557c0454ad22382373ec1b7df530b4bbd974335efe97a04caec936f2956a"}, + {file = "rapidfuzz-3.6.1-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:03f73b381bdeccb331a12c3c60f1e41943931461cdb52987f2ecf46bfc22f50d"}, + {file = "rapidfuzz-3.6.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:6b0ccc2ec1781c7e5370d96aef0573dd1f97335343e4982bdb3a44c133e27786"}, + {file = "rapidfuzz-3.6.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:da3e8c9f7e64bb17faefda085ff6862ecb3ad8b79b0f618a6cf4452028aa2222"}, + {file = "rapidfuzz-3.6.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fde9b14302a31af7bdafbf5cfbb100201ba21519be2b9dedcf4f1048e4fbe65d"}, + {file = "rapidfuzz-3.6.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:c1a23eee225dfb21c07f25c9fcf23eb055d0056b48e740fe241cbb4b22284379"}, + {file = "rapidfuzz-3.6.1-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:e49b9575d16c56c696bc7b06a06bf0c3d4ef01e89137b3ddd4e2ce709af9fe06"}, + {file = "rapidfuzz-3.6.1-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:0a9fc714b8c290261669f22808913aad49553b686115ad0ee999d1cb3df0cd66"}, + {file = "rapidfuzz-3.6.1-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:a3ee4f8f076aa92184e80308fc1a079ac356b99c39408fa422bbd00145be9854"}, + {file = "rapidfuzz-3.6.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:f056ba42fd2f32e06b2c2ba2443594873cfccc0c90c8b6327904fc2ddf6d5799"}, + {file = "rapidfuzz-3.6.1-cp312-cp312-win32.whl", hash = "sha256:5d82b9651e3d34b23e4e8e201ecd3477c2baa17b638979deeabbb585bcb8ba74"}, + {file = "rapidfuzz-3.6.1-cp312-cp312-win_amd64.whl", hash = "sha256:dad55a514868dae4543ca48c4e1fc0fac704ead038dafedf8f1fc0cc263746c1"}, + {file = "rapidfuzz-3.6.1-cp312-cp312-win_arm64.whl", hash = "sha256:3c84294f4470fcabd7830795d754d808133329e0a81d62fcc2e65886164be83b"}, + {file = "rapidfuzz-3.6.1-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:e19d519386e9db4a5335a4b29f25b8183a1c3f78cecb4c9c3112e7f86470e37f"}, + {file = "rapidfuzz-3.6.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:01eb03cd880a294d1bf1a583fdd00b87169b9cc9c9f52587411506658c864d73"}, + {file = "rapidfuzz-3.6.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:be368573255f8fbb0125a78330a1a40c65e9ba3c5ad129a426ff4289099bfb41"}, + {file = "rapidfuzz-3.6.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b3e5af946f419c30f5cb98b69d40997fe8580efe78fc83c2f0f25b60d0e56efb"}, + {file = "rapidfuzz-3.6.1-cp38-cp38-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f382f7ffe384ce34345e1c0b2065451267d3453cadde78946fbd99a59f0cc23c"}, + {file = "rapidfuzz-3.6.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:be156f51f3a4f369e758505ed4ae64ea88900dcb2f89d5aabb5752676d3f3d7e"}, + {file = "rapidfuzz-3.6.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1936d134b6c513fbe934aeb668b0fee1ffd4729a3c9d8d373f3e404fbb0ce8a0"}, + {file = "rapidfuzz-3.6.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:12ff8eaf4a9399eb2bebd838f16e2d1ded0955230283b07376d68947bbc2d33d"}, + {file = "rapidfuzz-3.6.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:ae598a172e3a95df3383634589660d6b170cc1336fe7578115c584a99e0ba64d"}, + {file = "rapidfuzz-3.6.1-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:cd4ba4c18b149da11e7f1b3584813159f189dc20833709de5f3df8b1342a9759"}, + {file = "rapidfuzz-3.6.1-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:0402f1629e91a4b2e4aee68043a30191e5e1b7cd2aa8dacf50b1a1bcf6b7d3ab"}, + {file = "rapidfuzz-3.6.1-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:1e12319c6b304cd4c32d5db00b7a1e36bdc66179c44c5707f6faa5a889a317c0"}, + {file = "rapidfuzz-3.6.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:0bbfae35ce4de4c574b386c43c78a0be176eeddfdae148cb2136f4605bebab89"}, + {file = "rapidfuzz-3.6.1-cp38-cp38-win32.whl", hash = "sha256:7fec74c234d3097612ea80f2a80c60720eec34947066d33d34dc07a3092e8105"}, + {file = "rapidfuzz-3.6.1-cp38-cp38-win_amd64.whl", hash = "sha256:a553cc1a80d97459d587529cc43a4c7c5ecf835f572b671107692fe9eddf3e24"}, + {file = "rapidfuzz-3.6.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:757dfd7392ec6346bd004f8826afb3bf01d18a723c97cbe9958c733ab1a51791"}, + {file = "rapidfuzz-3.6.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:2963f4a3f763870a16ee076796be31a4a0958fbae133dbc43fc55c3968564cf5"}, + {file = "rapidfuzz-3.6.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:d2f0274595cc5b2b929c80d4e71b35041104b577e118cf789b3fe0a77b37a4c5"}, + {file = "rapidfuzz-3.6.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:42f211e366e026de110a4246801d43a907cd1a10948082f47e8a4e6da76fef52"}, + {file = "rapidfuzz-3.6.1-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a59472b43879012b90989603aa5a6937a869a72723b1bf2ff1a0d1edee2cc8e6"}, + {file = "rapidfuzz-3.6.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a03863714fa6936f90caa7b4b50ea59ea32bb498cc91f74dc25485b3f8fccfe9"}, + {file = "rapidfuzz-3.6.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5dd95b6b7bfb1584f806db89e1e0c8dbb9d25a30a4683880c195cc7f197eaf0c"}, + {file = "rapidfuzz-3.6.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7183157edf0c982c0b8592686535c8b3e107f13904b36d85219c77be5cefd0d8"}, + {file = "rapidfuzz-3.6.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:ad9d74ef7c619b5b0577e909582a1928d93e07d271af18ba43e428dc3512c2a1"}, + {file = "rapidfuzz-3.6.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:b53137d81e770c82189e07a8f32722d9e4260f13a0aec9914029206ead38cac3"}, + {file = "rapidfuzz-3.6.1-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:49b9ed2472394d306d5dc967a7de48b0aab599016aa4477127b20c2ed982dbf9"}, + {file = "rapidfuzz-3.6.1-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:dec307b57ec2d5054d77d03ee4f654afcd2c18aee00c48014cb70bfed79597d6"}, + {file = "rapidfuzz-3.6.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:4381023fa1ff32fd5076f5d8321249a9aa62128eb3f21d7ee6a55373e672b261"}, + {file = "rapidfuzz-3.6.1-cp39-cp39-win32.whl", hash = "sha256:8d7a072f10ee57c8413c8ab9593086d42aaff6ee65df4aa6663eecdb7c398dca"}, + {file = "rapidfuzz-3.6.1-cp39-cp39-win_amd64.whl", hash = "sha256:ebcfb5bfd0a733514352cfc94224faad8791e576a80ffe2fd40b2177bf0e7198"}, + {file = "rapidfuzz-3.6.1-cp39-cp39-win_arm64.whl", hash = "sha256:1c47d592e447738744905c18dda47ed155620204714e6df20eb1941bb1ba315e"}, + {file = "rapidfuzz-3.6.1-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:eef8b346ab331bec12bbc83ac75641249e6167fab3d84d8f5ca37fd8e6c7a08c"}, + {file = "rapidfuzz-3.6.1-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:53251e256017e2b87f7000aee0353ba42392c442ae0bafd0f6b948593d3f68c6"}, + {file = "rapidfuzz-3.6.1-pp38-pypy38_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6dede83a6b903e3ebcd7e8137e7ff46907ce9316e9d7e7f917d7e7cdc570ee05"}, + {file = "rapidfuzz-3.6.1-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8e4da90e4c2b444d0a171d7444ea10152e07e95972bb40b834a13bdd6de1110c"}, + {file = "rapidfuzz-3.6.1-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:ca3dfcf74f2b6962f411c33dd95b0adf3901266e770da6281bc96bb5a8b20de9"}, + {file = "rapidfuzz-3.6.1-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:bcc957c0a8bde8007f1a8a413a632a1a409890f31f73fe764ef4eac55f59ca87"}, + {file = "rapidfuzz-3.6.1-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:692c9a50bea7a8537442834f9bc6b7d29d8729a5b6379df17c31b6ab4df948c2"}, + {file = "rapidfuzz-3.6.1-pp39-pypy39_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:76c23ceaea27e790ddd35ef88b84cf9d721806ca366199a76fd47cfc0457a81b"}, + {file = "rapidfuzz-3.6.1-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2b155e67fff215c09f130555002e42f7517d0ea72cbd58050abb83cb7c880cec"}, + {file = "rapidfuzz-3.6.1-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:3028ee8ecc48250607fa8a0adce37b56275ec3b1acaccd84aee1f68487c8557b"}, + {file = "rapidfuzz-3.6.1.tar.gz", hash = "sha256:35660bee3ce1204872574fa041c7ad7ec5175b3053a4cb6e181463fc07013de7"}, +] + +[package.extras] +full = ["numpy"] + +[[package]] +name = "regex" +version = "2023.12.25" +description = "Alternative regular expression module, to replace re." +optional = false +python-versions = ">=3.7" +files = [ + {file = "regex-2023.12.25-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:0694219a1d54336fd0445ea382d49d36882415c0134ee1e8332afd1529f0baa5"}, + {file = "regex-2023.12.25-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:b014333bd0217ad3d54c143de9d4b9a3ca1c5a29a6d0d554952ea071cff0f1f8"}, + {file = "regex-2023.12.25-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:d865984b3f71f6d0af64d0d88f5733521698f6c16f445bb09ce746c92c97c586"}, + {file = "regex-2023.12.25-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1e0eabac536b4cc7f57a5f3d095bfa557860ab912f25965e08fe1545e2ed8b4c"}, + {file = "regex-2023.12.25-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c25a8ad70e716f96e13a637802813f65d8a6760ef48672aa3502f4c24ea8b400"}, + {file = "regex-2023.12.25-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a9b6d73353f777630626f403b0652055ebfe8ff142a44ec2cf18ae470395766e"}, + {file = "regex-2023.12.25-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a9cc99d6946d750eb75827cb53c4371b8b0fe89c733a94b1573c9dd16ea6c9e4"}, + {file = "regex-2023.12.25-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:88d1f7bef20c721359d8675f7d9f8e414ec5003d8f642fdfd8087777ff7f94b5"}, + {file = "regex-2023.12.25-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:cb3fe77aec8f1995611f966d0c656fdce398317f850d0e6e7aebdfe61f40e1cd"}, + {file = "regex-2023.12.25-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:7aa47c2e9ea33a4a2a05f40fcd3ea36d73853a2aae7b4feab6fc85f8bf2c9704"}, + {file = "regex-2023.12.25-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:df26481f0c7a3f8739fecb3e81bc9da3fcfae34d6c094563b9d4670b047312e1"}, + {file = "regex-2023.12.25-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:c40281f7d70baf6e0db0c2f7472b31609f5bc2748fe7275ea65a0b4601d9b392"}, + {file = "regex-2023.12.25-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:d94a1db462d5690ebf6ae86d11c5e420042b9898af5dcf278bd97d6bda065423"}, + {file = "regex-2023.12.25-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:ba1b30765a55acf15dce3f364e4928b80858fa8f979ad41f862358939bdd1f2f"}, + {file = "regex-2023.12.25-cp310-cp310-win32.whl", hash = "sha256:150c39f5b964e4d7dba46a7962a088fbc91f06e606f023ce57bb347a3b2d4630"}, + {file = "regex-2023.12.25-cp310-cp310-win_amd64.whl", hash = "sha256:09da66917262d9481c719599116c7dc0c321ffcec4b1f510c4f8a066f8768105"}, + {file = "regex-2023.12.25-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:1b9d811f72210fa9306aeb88385b8f8bcef0dfbf3873410413c00aa94c56c2b6"}, + {file = "regex-2023.12.25-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:d902a43085a308cef32c0d3aea962524b725403fd9373dea18110904003bac97"}, + {file = "regex-2023.12.25-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:d166eafc19f4718df38887b2bbe1467a4f74a9830e8605089ea7a30dd4da8887"}, + {file = "regex-2023.12.25-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c7ad32824b7f02bb3c9f80306d405a1d9b7bb89362d68b3c5a9be53836caebdb"}, + {file = "regex-2023.12.25-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:636ba0a77de609d6510235b7f0e77ec494d2657108f777e8765efc060094c98c"}, + {file = "regex-2023.12.25-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0fda75704357805eb953a3ee15a2b240694a9a514548cd49b3c5124b4e2ad01b"}, + {file = "regex-2023.12.25-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f72cbae7f6b01591f90814250e636065850c5926751af02bb48da94dfced7baa"}, + {file = "regex-2023.12.25-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:db2a0b1857f18b11e3b0e54ddfefc96af46b0896fb678c85f63fb8c37518b3e7"}, + {file = "regex-2023.12.25-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:7502534e55c7c36c0978c91ba6f61703faf7ce733715ca48f499d3dbbd7657e0"}, + {file = "regex-2023.12.25-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:e8c7e08bb566de4faaf11984af13f6bcf6a08f327b13631d41d62592681d24fe"}, + {file = "regex-2023.12.25-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:283fc8eed679758de38fe493b7d7d84a198b558942b03f017b1f94dda8efae80"}, + {file = "regex-2023.12.25-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:f44dd4d68697559d007462b0a3a1d9acd61d97072b71f6d1968daef26bc744bd"}, + {file = "regex-2023.12.25-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:67d3ccfc590e5e7197750fcb3a2915b416a53e2de847a728cfa60141054123d4"}, + {file = "regex-2023.12.25-cp311-cp311-win32.whl", hash = "sha256:68191f80a9bad283432385961d9efe09d783bcd36ed35a60fb1ff3f1ec2efe87"}, + {file = "regex-2023.12.25-cp311-cp311-win_amd64.whl", hash = "sha256:7d2af3f6b8419661a0c421584cfe8aaec1c0e435ce7e47ee2a97e344b98f794f"}, + {file = "regex-2023.12.25-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:8a0ccf52bb37d1a700375a6b395bff5dd15c50acb745f7db30415bae3c2b0715"}, + {file = "regex-2023.12.25-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:c3c4a78615b7762740531c27cf46e2f388d8d727d0c0c739e72048beb26c8a9d"}, + {file = "regex-2023.12.25-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:ad83e7545b4ab69216cef4cc47e344d19622e28aabec61574b20257c65466d6a"}, + {file = "regex-2023.12.25-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b7a635871143661feccce3979e1727c4e094f2bdfd3ec4b90dfd4f16f571a87a"}, + {file = "regex-2023.12.25-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d498eea3f581fbe1b34b59c697512a8baef88212f92e4c7830fcc1499f5b45a5"}, + {file = "regex-2023.12.25-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:43f7cd5754d02a56ae4ebb91b33461dc67be8e3e0153f593c509e21d219c5060"}, + {file = "regex-2023.12.25-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:51f4b32f793812714fd5307222a7f77e739b9bc566dc94a18126aba3b92b98a3"}, + {file = "regex-2023.12.25-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ba99d8077424501b9616b43a2d208095746fb1284fc5ba490139651f971d39d9"}, + {file = "regex-2023.12.25-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:4bfc2b16e3ba8850e0e262467275dd4d62f0d045e0e9eda2bc65078c0110a11f"}, + {file = "regex-2023.12.25-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:8c2c19dae8a3eb0ea45a8448356ed561be843b13cbc34b840922ddf565498c1c"}, + {file = "regex-2023.12.25-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:60080bb3d8617d96f0fb7e19796384cc2467447ef1c491694850ebd3670bc457"}, + {file = "regex-2023.12.25-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:b77e27b79448e34c2c51c09836033056a0547aa360c45eeeb67803da7b0eedaf"}, + {file = "regex-2023.12.25-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:518440c991f514331f4850a63560321f833979d145d7d81186dbe2f19e27ae3d"}, + {file = "regex-2023.12.25-cp312-cp312-win32.whl", hash = "sha256:e2610e9406d3b0073636a3a2e80db05a02f0c3169b5632022b4e81c0364bcda5"}, + {file = "regex-2023.12.25-cp312-cp312-win_amd64.whl", hash = "sha256:cc37b9aeebab425f11f27e5e9e6cf580be7206c6582a64467a14dda211abc232"}, + {file = "regex-2023.12.25-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:da695d75ac97cb1cd725adac136d25ca687da4536154cdc2815f576e4da11c69"}, + {file = "regex-2023.12.25-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d126361607b33c4eb7b36debc173bf25d7805847346dd4d99b5499e1fef52bc7"}, + {file = "regex-2023.12.25-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4719bb05094d7d8563a450cf8738d2e1061420f79cfcc1fa7f0a44744c4d8f73"}, + {file = "regex-2023.12.25-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5dd58946bce44b53b06d94aa95560d0b243eb2fe64227cba50017a8d8b3cd3e2"}, + {file = "regex-2023.12.25-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:22a86d9fff2009302c440b9d799ef2fe322416d2d58fc124b926aa89365ec482"}, + {file = "regex-2023.12.25-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2aae8101919e8aa05ecfe6322b278f41ce2994c4a430303c4cd163fef746e04f"}, + {file = "regex-2023.12.25-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:e692296c4cc2873967771345a876bcfc1c547e8dd695c6b89342488b0ea55cd8"}, + {file = "regex-2023.12.25-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:263ef5cc10979837f243950637fffb06e8daed7f1ac1e39d5910fd29929e489a"}, + {file = "regex-2023.12.25-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:d6f7e255e5fa94642a0724e35406e6cb7001c09d476ab5fce002f652b36d0c39"}, + {file = "regex-2023.12.25-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:88ad44e220e22b63b0f8f81f007e8abbb92874d8ced66f32571ef8beb0643b2b"}, + {file = "regex-2023.12.25-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:3a17d3ede18f9cedcbe23d2daa8a2cd6f59fe2bf082c567e43083bba3fb00347"}, + {file = "regex-2023.12.25-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:d15b274f9e15b1a0b7a45d2ac86d1f634d983ca40d6b886721626c47a400bf39"}, + {file = "regex-2023.12.25-cp37-cp37m-win32.whl", hash = "sha256:ed19b3a05ae0c97dd8f75a5d8f21f7723a8c33bbc555da6bbe1f96c470139d3c"}, + {file = "regex-2023.12.25-cp37-cp37m-win_amd64.whl", hash = "sha256:a6d1047952c0b8104a1d371f88f4ab62e6275567d4458c1e26e9627ad489b445"}, + {file = "regex-2023.12.25-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:b43523d7bc2abd757119dbfb38af91b5735eea45537ec6ec3a5ec3f9562a1c53"}, + {file = "regex-2023.12.25-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:efb2d82f33b2212898f1659fb1c2e9ac30493ac41e4d53123da374c3b5541e64"}, + {file = "regex-2023.12.25-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:b7fca9205b59c1a3d5031f7e64ed627a1074730a51c2a80e97653e3e9fa0d415"}, + {file = "regex-2023.12.25-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:086dd15e9435b393ae06f96ab69ab2d333f5d65cbe65ca5a3ef0ec9564dfe770"}, + {file = "regex-2023.12.25-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e81469f7d01efed9b53740aedd26085f20d49da65f9c1f41e822a33992cb1590"}, + {file = "regex-2023.12.25-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:34e4af5b27232f68042aa40a91c3b9bb4da0eeb31b7632e0091afc4310afe6cb"}, + {file = "regex-2023.12.25-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9852b76ab558e45b20bf1893b59af64a28bd3820b0c2efc80e0a70a4a3ea51c1"}, + {file = "regex-2023.12.25-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ff100b203092af77d1a5a7abe085b3506b7eaaf9abf65b73b7d6905b6cb76988"}, + {file = "regex-2023.12.25-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:cc038b2d8b1470364b1888a98fd22d616fba2b6309c5b5f181ad4483e0017861"}, + {file = "regex-2023.12.25-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:094ba386bb5c01e54e14434d4caabf6583334090865b23ef58e0424a6286d3dc"}, + {file = "regex-2023.12.25-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:5cd05d0f57846d8ba4b71d9c00f6f37d6b97d5e5ef8b3c3840426a475c8f70f4"}, + {file = "regex-2023.12.25-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:9aa1a67bbf0f957bbe096375887b2505f5d8ae16bf04488e8b0f334c36e31360"}, + {file = "regex-2023.12.25-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:98a2636994f943b871786c9e82bfe7883ecdaba2ef5df54e1450fa9869d1f756"}, + {file = "regex-2023.12.25-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:37f8e93a81fc5e5bd8db7e10e62dc64261bcd88f8d7e6640aaebe9bc180d9ce2"}, + {file = "regex-2023.12.25-cp38-cp38-win32.whl", hash = "sha256:d78bd484930c1da2b9679290a41cdb25cc127d783768a0369d6b449e72f88beb"}, + {file = "regex-2023.12.25-cp38-cp38-win_amd64.whl", hash = "sha256:b521dcecebc5b978b447f0f69b5b7f3840eac454862270406a39837ffae4e697"}, + {file = "regex-2023.12.25-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:f7bc09bc9c29ebead055bcba136a67378f03d66bf359e87d0f7c759d6d4ffa31"}, + {file = "regex-2023.12.25-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:e14b73607d6231f3cc4622809c196b540a6a44e903bcfad940779c80dffa7be7"}, + {file = "regex-2023.12.25-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:9eda5f7a50141291beda3edd00abc2d4a5b16c29c92daf8d5bd76934150f3edc"}, + {file = "regex-2023.12.25-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cc6bb9aa69aacf0f6032c307da718f61a40cf970849e471254e0e91c56ffca95"}, + {file = "regex-2023.12.25-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:298dc6354d414bc921581be85695d18912bea163a8b23cac9a2562bbcd5088b1"}, + {file = "regex-2023.12.25-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2f4e475a80ecbd15896a976aa0b386c5525d0ed34d5c600b6d3ebac0a67c7ddf"}, + {file = "regex-2023.12.25-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:531ac6cf22b53e0696f8e1d56ce2396311254eb806111ddd3922c9d937151dae"}, + {file = "regex-2023.12.25-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:22f3470f7524b6da61e2020672df2f3063676aff444db1daa283c2ea4ed259d6"}, + {file = "regex-2023.12.25-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:89723d2112697feaa320c9d351e5f5e7b841e83f8b143dba8e2d2b5f04e10923"}, + {file = "regex-2023.12.25-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:0ecf44ddf9171cd7566ef1768047f6e66975788258b1c6c6ca78098b95cf9a3d"}, + {file = "regex-2023.12.25-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:905466ad1702ed4acfd67a902af50b8db1feeb9781436372261808df7a2a7bca"}, + {file = "regex-2023.12.25-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:4558410b7a5607a645e9804a3e9dd509af12fb72b9825b13791a37cd417d73a5"}, + {file = "regex-2023.12.25-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:7e316026cc1095f2a3e8cc012822c99f413b702eaa2ca5408a513609488cb62f"}, + {file = "regex-2023.12.25-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:3b1de218d5375cd6ac4b5493e0b9f3df2be331e86520f23382f216c137913d20"}, + {file = "regex-2023.12.25-cp39-cp39-win32.whl", hash = "sha256:11a963f8e25ab5c61348d090bf1b07f1953929c13bd2309a0662e9ff680763c9"}, + {file = "regex-2023.12.25-cp39-cp39-win_amd64.whl", hash = "sha256:e693e233ac92ba83a87024e1d32b5f9ab15ca55ddd916d878146f4e3406b5c91"}, + {file = "regex-2023.12.25.tar.gz", hash = "sha256:29171aa128da69afdf4bde412d5bedc335f2ca8fcfe4489038577d05f16181e5"}, +] + +[[package]] +name = "requests" +version = "2.31.0" +description = "Python HTTP for Humans." +optional = false +python-versions = ">=3.7" +files = [ + {file = "requests-2.31.0-py3-none-any.whl", hash = "sha256:58cd2187c01e70e6e26505bca751777aa9f2ee0b7f4300988b709f44e013003f"}, + {file = "requests-2.31.0.tar.gz", hash = "sha256:942c5a758f98d790eaed1a29cb6eefc7ffb0d1cf7af05c3d2791656dbd6ad1e1"}, +] + +[package.dependencies] +certifi = ">=2017.4.17" +charset-normalizer = ">=2,<4" +idna = ">=2.5,<4" +urllib3 = ">=1.21.1,<3" + +[package.extras] +socks = ["PySocks (>=1.5.6,!=1.5.7)"] +use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"] + +[[package]] +name = "requests-cache" +version = "1.2.0" +description = "A persistent cache for python requests" +optional = false +python-versions = ">=3.8" +files = [ + {file = "requests_cache-1.2.0-py3-none-any.whl", hash = "sha256:490324301bf0cb924ff4e6324bd2613453e7e1f847353928b08adb0fdfb7f722"}, + {file = "requests_cache-1.2.0.tar.gz", hash = "sha256:db1c709ca343cc1cd5b6c8b1a5387298eceed02306a6040760db538c885e3838"}, +] + +[package.dependencies] +attrs = ">=21.2" +cattrs = ">=22.2" +platformdirs = ">=2.5" +requests = ">=2.22" +url-normalize = ">=1.4" +urllib3 = ">=1.25.5" + +[package.extras] +all = ["boto3 (>=1.15)", "botocore (>=1.18)", "itsdangerous (>=2.0)", "pymongo (>=3)", "pyyaml (>=6.0.1)", "redis (>=3)", "ujson (>=5.4)"] +bson = ["bson (>=0.5)"] +docs = ["furo (>=2023.3,<2024.0)", "linkify-it-py (>=2.0,<3.0)", "myst-parser (>=1.0,<2.0)", "sphinx (>=5.0.2,<6.0.0)", "sphinx-autodoc-typehints (>=1.19)", "sphinx-automodapi (>=0.14)", "sphinx-copybutton (>=0.5)", "sphinx-design (>=0.2)", "sphinx-notfound-page (>=0.8)", "sphinxcontrib-apidoc (>=0.3)", "sphinxext-opengraph (>=0.9)"] +dynamodb = ["boto3 (>=1.15)", "botocore (>=1.18)"] +json = ["ujson (>=5.4)"] +mongodb = ["pymongo (>=3)"] +redis = ["redis (>=3)"] +security = ["itsdangerous (>=2.0)"] +yaml = ["pyyaml (>=6.0.1)"] + +[[package]] +name = "requests-mock" +version = "1.11.0" +description = "Mock out responses from the requests package" +optional = false +python-versions = "*" +files = [ + {file = "requests-mock-1.11.0.tar.gz", hash = "sha256:ef10b572b489a5f28e09b708697208c4a3b2b89ef80a9f01584340ea357ec3c4"}, + {file = "requests_mock-1.11.0-py2.py3-none-any.whl", hash = "sha256:f7fae383f228633f6bececebdab236c478ace2284d6292c6e7e2867b9ab74d15"}, +] + +[package.dependencies] +requests = ">=2.3,<3" +six = "*" + +[package.extras] +fixture = ["fixtures"] +test = ["fixtures", "mock", "purl", "pytest", "requests-futures", "sphinx", "testtools"] + +[[package]] +name = "setuptools" +version = "69.1.1" +description = "Easily download, build, install, upgrade, and uninstall Python packages" +optional = false +python-versions = ">=3.8" +files = [ + {file = "setuptools-69.1.1-py3-none-any.whl", hash = "sha256:02fa291a0471b3a18b2b2481ed902af520c69e8ae0919c13da936542754b4c56"}, + {file = "setuptools-69.1.1.tar.gz", hash = "sha256:5c0806c7d9af348e6dd3777b4f4dbb42c7ad85b190104837488eab9a7c945cf8"}, +] + +[package.extras] +docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "rst.linker (>=1.9)", "sphinx (<7.2.5)", "sphinx (>=3.5)", "sphinx-favicon", "sphinx-inline-tabs", "sphinx-lint", "sphinx-notfound-page (>=1,<2)", "sphinx-reredirects", "sphinxcontrib-towncrier"] +testing = ["build[virtualenv]", "filelock (>=3.4.0)", "flake8-2020", "ini2toml[lite] (>=0.9)", "jaraco.develop (>=7.21)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "packaging (>=23.2)", "pip (>=19.1)", "pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-home (>=0.5)", "pytest-mypy (>=0.9.1)", "pytest-perf", "pytest-ruff (>=0.2.1)", "pytest-timeout", "pytest-xdist", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel"] +testing-integration = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "packaging (>=23.2)", "pytest", "pytest-enabler", "pytest-xdist", "tomli", "virtualenv (>=13.0.0)", "wheel"] + +[[package]] +name = "six" +version = "1.16.0" +description = "Python 2 and 3 compatibility utilities" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*" +files = [ + {file = "six-1.16.0-py2.py3-none-any.whl", hash = "sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254"}, + {file = "six-1.16.0.tar.gz", hash = "sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926"}, +] + +[[package]] +name = "smart-open" +version = "6.4.0" +description = "Utils for streaming large files (S3, HDFS, GCS, Azure Blob Storage, gzip, bz2...)" +optional = false +python-versions = ">=3.6,<4.0" +files = [ + {file = "smart_open-6.4.0-py3-none-any.whl", hash = "sha256:8d3ef7e6997e8e42dd55c74166ed21e6ac70664caa32dd940b26d54a8f6b4142"}, + {file = "smart_open-6.4.0.tar.gz", hash = "sha256:be3c92c246fbe80ebce8fbacb180494a481a77fcdcb7c1aadb2ea5b9c2bee8b9"}, +] + +[package.extras] +all = ["azure-common", "azure-core", "azure-storage-blob", "boto3", "google-cloud-storage (>=2.6.0)", "paramiko", "requests"] +azure = ["azure-common", "azure-core", "azure-storage-blob"] +gcs = ["google-cloud-storage (>=2.6.0)"] +http = ["requests"] +s3 = ["boto3"] +ssh = ["paramiko"] +test = ["azure-common", "azure-core", "azure-storage-blob", "boto3", "google-cloud-storage (>=2.6.0)", "moto[server]", "paramiko", "pytest", "pytest-rerunfailures", "requests", "responses"] +webhdfs = ["requests"] + +[[package]] +name = "soupsieve" +version = "2.5" +description = "A modern CSS selector implementation for Beautiful Soup." +optional = false +python-versions = ">=3.8" +files = [ + {file = "soupsieve-2.5-py3-none-any.whl", hash = "sha256:eaa337ff55a1579b6549dc679565eac1e3d000563bcb1c8ab0d0fefbc0c2cdc7"}, + {file = "soupsieve-2.5.tar.gz", hash = "sha256:5663d5a7b3bfaeee0bc4372e7fc48f9cff4940b3eec54a6451cc5299f1097690"}, +] + +[[package]] +name = "tabulate" +version = "0.9.0" +description = "Pretty-print tabular data" +optional = false +python-versions = ">=3.7" +files = [ + {file = "tabulate-0.9.0-py3-none-any.whl", hash = "sha256:024ca478df22e9340661486f85298cff5f6dcdba14f3813e8830015b9ed1948f"}, + {file = "tabulate-0.9.0.tar.gz", hash = "sha256:0095b12bf5966de529c0feb1fa08671671b3368eec77d7ef7ab114be2c068b3c"}, +] + +[package.extras] +widechars = ["wcwidth"] + +[[package]] +name = "toml" +version = "0.10.2" +description = "Python Library for Tom's Obvious, Minimal Language" +optional = false +python-versions = ">=2.6, !=3.0.*, !=3.1.*, !=3.2.*" +files = [ + {file = "toml-0.10.2-py2.py3-none-any.whl", hash = "sha256:806143ae5bfb6a3c6e736a764057db0e6a0e05e338b5630894a5f779cabb4f9b"}, + {file = "toml-0.10.2.tar.gz", hash = "sha256:b3bda1d108d5dd99f4a20d24d9c348e91c4db7ab1b749200bded2f839ccbe68f"}, +] + +[[package]] +name = "tqdm" +version = "4.66.2" +description = "Fast, Extensible Progress Meter" +optional = false +python-versions = ">=3.7" +files = [ + {file = "tqdm-4.66.2-py3-none-any.whl", hash = "sha256:1ee4f8a893eb9bef51c6e35730cebf234d5d0b6bd112b0271e10ed7c24a02bd9"}, + {file = "tqdm-4.66.2.tar.gz", hash = "sha256:6cd52cdf0fef0e0f543299cfc96fec90d7b8a7e88745f411ec33eb44d5ed3531"}, +] + +[package.dependencies] +colorama = {version = "*", markers = "platform_system == \"Windows\""} + +[package.extras] +dev = ["pytest (>=6)", "pytest-cov", "pytest-timeout", "pytest-xdist"] +notebook = ["ipywidgets (>=6)"] +slack = ["slack-sdk"] +telegram = ["requests"] + +[[package]] +name = "typing-extensions" +version = "4.10.0" +description = "Backported and Experimental Type Hints for Python 3.8+" +optional = false +python-versions = ">=3.8" +files = [ + {file = "typing_extensions-4.10.0-py3-none-any.whl", hash = "sha256:69b1a937c3a517342112fb4c6df7e72fc39a38e7891a5730ed4985b5214b5475"}, + {file = "typing_extensions-4.10.0.tar.gz", hash = "sha256:b0abd7c89e8fb96f98db18d86106ff1d90ab692004eb746cf6eda2682f91b3cb"}, +] + +[[package]] +name = "typing-inspect" +version = "0.9.0" +description = "Runtime inspection utilities for typing module." +optional = false +python-versions = "*" +files = [ + {file = "typing_inspect-0.9.0-py3-none-any.whl", hash = "sha256:9ee6fc59062311ef8547596ab6b955e1b8aa46242d854bfc78f4f6b0eff35f9f"}, + {file = "typing_inspect-0.9.0.tar.gz", hash = "sha256:b23fc42ff6f6ef6954e4852c1fb512cdd18dbea03134f91f856a95ccc9461f78"}, +] + +[package.dependencies] +mypy-extensions = ">=0.3.0" +typing-extensions = ">=3.7.4" + +[[package]] +name = "unstructured" +version = "0.10.27" +description = "A library that prepares raw documents for downstream ML tasks." +optional = false +python-versions = ">=3.7.0" +files = [ + {file = "unstructured-0.10.27-py3-none-any.whl", hash = "sha256:3a8a8e44302388ddc39c184059e8b4458f1cdc58032540b9af7d85f6c3eca3be"}, + {file = "unstructured-0.10.27.tar.gz", hash = "sha256:f567b5c4385993a9ab48db5563dd7b413aac4f2002bb22e6250496ea8f440f5e"}, +] + +[package.dependencies] +backoff = "*" +beautifulsoup4 = "*" +chardet = "*" +dataclasses-json = "*" +emoji = "*" +filetype = "*" +langdetect = "*" +lxml = "*" +nltk = "*" +numpy = "*" +python-docx = {version = ">=1.0.1", optional = true, markers = "extra == \"docx\""} +python-iso639 = "*" +python-magic = "*" +python-pptx = {version = "<=0.6.21", optional = true, markers = "extra == \"pptx\""} +rapidfuzz = "*" +requests = "*" +tabulate = "*" +typing-extensions = "*" + +[package.extras] +airtable = ["pyairtable"] +all-docs = ["markdown", "msg-parser", "networkx", "onnx", "openpyxl", "pandas", "pdf2image", "pdfminer.six", "pypandoc", "python-docx (>=1.0.1)", "python-pptx (<=0.6.21)", "unstructured-inference (==0.7.10)", "unstructured.pytesseract (>=0.3.12)", "xlrd"] +azure = ["adlfs", "fsspec (==2023.9.1)"] +azure-cognitive-search = ["azure-search-documents"] +bedrock = ["boto3", "langchain"] +biomed = ["bs4"] +box = ["boxfs", "fsspec (==2023.9.1)"] +confluence = ["atlassian-python-api"] +csv = ["pandas"] +delta-table = ["deltalake", "fsspec (==2023.9.1)"] +discord = ["discord-py"] +doc = ["python-docx (>=1.0.1)"] +docx = ["python-docx (>=1.0.1)"] +dropbox = ["dropboxdrivefs", "fsspec (==2023.9.1)"] +elasticsearch = ["elasticsearch", "jq"] +embed-huggingface = ["huggingface", "langchain", "sentence-transformers"] +epub = ["pypandoc"] +gcs = ["bs4", "fsspec (==2023.9.1)", "gcsfs"] +github = ["pygithub (>1.58.0)"] +gitlab = ["python-gitlab"] +google-drive = ["google-api-python-client"] +huggingface = ["langdetect", "sacremoses", "sentencepiece", "torch", "transformers"] +image = ["onnx", "pdf2image", "pdfminer.six", "unstructured-inference (==0.7.10)", "unstructured.pytesseract (>=0.3.12)"] +jira = ["atlassian-python-api"] +local-inference = ["markdown", "msg-parser", "networkx", "onnx", "openpyxl", "pandas", "pdf2image", "pdfminer.six", "pypandoc", "python-docx (>=1.0.1)", "python-pptx (<=0.6.21)", "unstructured-inference (==0.7.10)", "unstructured.pytesseract (>=0.3.12)", "xlrd"] +md = ["markdown"] +msg = ["msg-parser"] +notion = ["htmlBuilder", "notion-client"] +odt = ["pypandoc", "python-docx (>=1.0.1)"] +onedrive = ["Office365-REST-Python-Client (<2.4.3)", "bs4", "msal"] +openai = ["langchain", "openai", "tiktoken"] +org = ["pypandoc"] +outlook = ["Office365-REST-Python-Client (<2.4.3)", "msal"] +paddleocr = ["unstructured.paddleocr (==2.6.1.3)"] +pdf = ["onnx", "pdf2image", "pdfminer.six", "unstructured-inference (==0.7.10)", "unstructured.pytesseract (>=0.3.12)"] +ppt = ["python-pptx (<=0.6.21)"] +pptx = ["python-pptx (<=0.6.21)"] +reddit = ["praw"] +rst = ["pypandoc"] +rtf = ["pypandoc"] +s3 = ["fsspec (==2023.9.1)", "s3fs"] +salesforce = ["simple-salesforce"] +sharepoint = ["Office365-REST-Python-Client (<2.4.3)", "msal"] +slack = ["slack-sdk"] +tsv = ["pandas"] +wikipedia = ["wikipedia"] +xlsx = ["networkx", "openpyxl", "pandas", "xlrd"] + +[[package]] +name = "unstructured-pytesseract" +version = "0.3.12" +description = "Python-tesseract is a python wrapper for Google's Tesseract-OCR" +optional = false +python-versions = ">=3.8" +files = [ + {file = "unstructured.pytesseract-0.3.12-py3-none-any.whl", hash = "sha256:6ed42530fc697bb08d1ae4884cc517ee808620c1c1414efe8d5d90334da068d3"}, + {file = "unstructured.pytesseract-0.3.12.tar.gz", hash = "sha256:751a21d67b1f109036bf4daf796d3e04631697a355efd650f3373412b249de2e"}, +] + +[package.dependencies] +packaging = ">=21.3" +Pillow = ">=8.0.0" + +[[package]] +name = "url-normalize" +version = "1.4.3" +description = "URL normalization for Python" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*" +files = [ + {file = "url-normalize-1.4.3.tar.gz", hash = "sha256:d23d3a070ac52a67b83a1c59a0e68f8608d1cd538783b401bc9de2c0fac999b2"}, + {file = "url_normalize-1.4.3-py2.py3-none-any.whl", hash = "sha256:ec3c301f04e5bb676d333a7fa162fa977ad2ca04b7e652bfc9fac4e405728eed"}, +] + +[package.dependencies] +six = "*" + +[[package]] +name = "urllib3" +version = "2.2.1" +description = "HTTP library with thread-safe connection pooling, file post, and more." +optional = false +python-versions = ">=3.8" +files = [ + {file = "urllib3-2.2.1-py3-none-any.whl", hash = "sha256:450b20ec296a467077128bff42b73080516e71b56ff59a60a02bef2232c4fa9d"}, + {file = "urllib3-2.2.1.tar.gz", hash = "sha256:d0570876c61ab9e520d776c38acbbb5b05a776d3f9ff98a5c8fd5162a444cf19"}, +] + +[package.extras] +brotli = ["brotli (>=1.0.9)", "brotlicffi (>=0.8.0)"] +h2 = ["h2 (>=4,<5)"] +socks = ["pysocks (>=1.5.6,!=1.5.7,<2.0)"] +zstd = ["zstandard (>=0.18.0)"] + +[[package]] +name = "wcmatch" +version = "8.4" +description = "Wildcard/glob file name matcher." +optional = false +python-versions = ">=3.7" +files = [ + {file = "wcmatch-8.4-py3-none-any.whl", hash = "sha256:dc7351e5a7f8bbf4c6828d51ad20c1770113f5f3fd3dfe2a03cfde2a63f03f98"}, + {file = "wcmatch-8.4.tar.gz", hash = "sha256:ba4fc5558f8946bf1ffc7034b05b814d825d694112499c86035e0e4d398b6a67"}, +] + +[package.dependencies] +bracex = ">=2.1.1" + +[[package]] +name = "wrapt" +version = "1.16.0" +description = "Module for decorators, wrappers and monkey patching." +optional = false +python-versions = ">=3.6" +files = [ + {file = "wrapt-1.16.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:ffa565331890b90056c01db69c0fe634a776f8019c143a5ae265f9c6bc4bd6d4"}, + {file = "wrapt-1.16.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:e4fdb9275308292e880dcbeb12546df7f3e0f96c6b41197e0cf37d2826359020"}, + {file = "wrapt-1.16.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bb2dee3874a500de01c93d5c71415fcaef1d858370d405824783e7a8ef5db440"}, + {file = "wrapt-1.16.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2a88e6010048489cda82b1326889ec075a8c856c2e6a256072b28eaee3ccf487"}, + {file = "wrapt-1.16.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ac83a914ebaf589b69f7d0a1277602ff494e21f4c2f743313414378f8f50a4cf"}, + {file = "wrapt-1.16.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:73aa7d98215d39b8455f103de64391cb79dfcad601701a3aa0dddacf74911d72"}, + {file = "wrapt-1.16.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:807cc8543a477ab7422f1120a217054f958a66ef7314f76dd9e77d3f02cdccd0"}, + {file = "wrapt-1.16.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:bf5703fdeb350e36885f2875d853ce13172ae281c56e509f4e6eca049bdfb136"}, + {file = "wrapt-1.16.0-cp310-cp310-win32.whl", hash = "sha256:f6b2d0c6703c988d334f297aa5df18c45e97b0af3679bb75059e0e0bd8b1069d"}, + {file = "wrapt-1.16.0-cp310-cp310-win_amd64.whl", hash = "sha256:decbfa2f618fa8ed81c95ee18a387ff973143c656ef800c9f24fb7e9c16054e2"}, + {file = "wrapt-1.16.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:1a5db485fe2de4403f13fafdc231b0dbae5eca4359232d2efc79025527375b09"}, + {file = "wrapt-1.16.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:75ea7d0ee2a15733684badb16de6794894ed9c55aa5e9903260922f0482e687d"}, + {file = "wrapt-1.16.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a452f9ca3e3267cd4d0fcf2edd0d035b1934ac2bd7e0e57ac91ad6b95c0c6389"}, + {file = "wrapt-1.16.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:43aa59eadec7890d9958748db829df269f0368521ba6dc68cc172d5d03ed8060"}, + {file = "wrapt-1.16.0-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:72554a23c78a8e7aa02abbd699d129eead8b147a23c56e08d08dfc29cfdddca1"}, + {file = "wrapt-1.16.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:d2efee35b4b0a347e0d99d28e884dfd82797852d62fcd7ebdeee26f3ceb72cf3"}, + {file = "wrapt-1.16.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:6dcfcffe73710be01d90cae08c3e548d90932d37b39ef83969ae135d36ef3956"}, + {file = "wrapt-1.16.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:eb6e651000a19c96f452c85132811d25e9264d836951022d6e81df2fff38337d"}, + {file = "wrapt-1.16.0-cp311-cp311-win32.whl", hash = "sha256:66027d667efe95cc4fa945af59f92c5a02c6f5bb6012bff9e60542c74c75c362"}, + {file = "wrapt-1.16.0-cp311-cp311-win_amd64.whl", hash = "sha256:aefbc4cb0a54f91af643660a0a150ce2c090d3652cf4052a5397fb2de549cd89"}, + {file = "wrapt-1.16.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:5eb404d89131ec9b4f748fa5cfb5346802e5ee8836f57d516576e61f304f3b7b"}, + {file = "wrapt-1.16.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:9090c9e676d5236a6948330e83cb89969f433b1943a558968f659ead07cb3b36"}, + {file = "wrapt-1.16.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:94265b00870aa407bd0cbcfd536f17ecde43b94fb8d228560a1e9d3041462d73"}, + {file = "wrapt-1.16.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f2058f813d4f2b5e3a9eb2eb3faf8f1d99b81c3e51aeda4b168406443e8ba809"}, + {file = "wrapt-1.16.0-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:98b5e1f498a8ca1858a1cdbffb023bfd954da4e3fa2c0cb5853d40014557248b"}, + {file = "wrapt-1.16.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:14d7dc606219cdd7405133c713f2c218d4252f2a469003f8c46bb92d5d095d81"}, + {file = "wrapt-1.16.0-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:49aac49dc4782cb04f58986e81ea0b4768e4ff197b57324dcbd7699c5dfb40b9"}, + {file = "wrapt-1.16.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:418abb18146475c310d7a6dc71143d6f7adec5b004ac9ce08dc7a34e2babdc5c"}, + {file = "wrapt-1.16.0-cp312-cp312-win32.whl", hash = "sha256:685f568fa5e627e93f3b52fda002c7ed2fa1800b50ce51f6ed1d572d8ab3e7fc"}, + {file = "wrapt-1.16.0-cp312-cp312-win_amd64.whl", hash = "sha256:dcdba5c86e368442528f7060039eda390cc4091bfd1dca41e8046af7c910dda8"}, + {file = "wrapt-1.16.0-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:d462f28826f4657968ae51d2181a074dfe03c200d6131690b7d65d55b0f360f8"}, + {file = "wrapt-1.16.0-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a33a747400b94b6d6b8a165e4480264a64a78c8a4c734b62136062e9a248dd39"}, + {file = "wrapt-1.16.0-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b3646eefa23daeba62643a58aac816945cadc0afaf21800a1421eeba5f6cfb9c"}, + {file = "wrapt-1.16.0-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3ebf019be5c09d400cf7b024aa52b1f3aeebeff51550d007e92c3c1c4afc2a40"}, + {file = "wrapt-1.16.0-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:0d2691979e93d06a95a26257adb7bfd0c93818e89b1406f5a28f36e0d8c1e1fc"}, + {file = "wrapt-1.16.0-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:1acd723ee2a8826f3d53910255643e33673e1d11db84ce5880675954183ec47e"}, + {file = "wrapt-1.16.0-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:bc57efac2da352a51cc4658878a68d2b1b67dbe9d33c36cb826ca449d80a8465"}, + {file = "wrapt-1.16.0-cp36-cp36m-win32.whl", hash = "sha256:da4813f751142436b075ed7aa012a8778aa43a99f7b36afe9b742d3ed8bdc95e"}, + {file = "wrapt-1.16.0-cp36-cp36m-win_amd64.whl", hash = "sha256:6f6eac2360f2d543cc875a0e5efd413b6cbd483cb3ad7ebf888884a6e0d2e966"}, + {file = "wrapt-1.16.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:a0ea261ce52b5952bf669684a251a66df239ec6d441ccb59ec7afa882265d593"}, + {file = "wrapt-1.16.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7bd2d7ff69a2cac767fbf7a2b206add2e9a210e57947dd7ce03e25d03d2de292"}, + {file = "wrapt-1.16.0-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9159485323798c8dc530a224bd3ffcf76659319ccc7bbd52e01e73bd0241a0c5"}, + {file = "wrapt-1.16.0-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a86373cf37cd7764f2201b76496aba58a52e76dedfaa698ef9e9688bfd9e41cf"}, + {file = "wrapt-1.16.0-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:73870c364c11f03ed072dda68ff7aea6d2a3a5c3fe250d917a429c7432e15228"}, + {file = "wrapt-1.16.0-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:b935ae30c6e7400022b50f8d359c03ed233d45b725cfdd299462f41ee5ffba6f"}, + {file = "wrapt-1.16.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:db98ad84a55eb09b3c32a96c576476777e87c520a34e2519d3e59c44710c002c"}, + {file = "wrapt-1.16.0-cp37-cp37m-win32.whl", hash = "sha256:9153ed35fc5e4fa3b2fe97bddaa7cbec0ed22412b85bcdaf54aeba92ea37428c"}, + {file = "wrapt-1.16.0-cp37-cp37m-win_amd64.whl", hash = "sha256:66dfbaa7cfa3eb707bbfcd46dab2bc6207b005cbc9caa2199bcbc81d95071a00"}, + {file = "wrapt-1.16.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1dd50a2696ff89f57bd8847647a1c363b687d3d796dc30d4dd4a9d1689a706f0"}, + {file = "wrapt-1.16.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:44a2754372e32ab315734c6c73b24351d06e77ffff6ae27d2ecf14cf3d229202"}, + {file = "wrapt-1.16.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8e9723528b9f787dc59168369e42ae1c3b0d3fadb2f1a71de14531d321ee05b0"}, + {file = "wrapt-1.16.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:dbed418ba5c3dce92619656802cc5355cb679e58d0d89b50f116e4a9d5a9603e"}, + {file = "wrapt-1.16.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:941988b89b4fd6b41c3f0bfb20e92bd23746579736b7343283297c4c8cbae68f"}, + {file = "wrapt-1.16.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:6a42cd0cfa8ffc1915aef79cb4284f6383d8a3e9dcca70c445dcfdd639d51267"}, + {file = "wrapt-1.16.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:1ca9b6085e4f866bd584fb135a041bfc32cab916e69f714a7d1d397f8c4891ca"}, + {file = "wrapt-1.16.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:d5e49454f19ef621089e204f862388d29e6e8d8b162efce05208913dde5b9ad6"}, + {file = "wrapt-1.16.0-cp38-cp38-win32.whl", hash = "sha256:c31f72b1b6624c9d863fc095da460802f43a7c6868c5dda140f51da24fd47d7b"}, + {file = "wrapt-1.16.0-cp38-cp38-win_amd64.whl", hash = "sha256:490b0ee15c1a55be9c1bd8609b8cecd60e325f0575fc98f50058eae366e01f41"}, + {file = "wrapt-1.16.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:9b201ae332c3637a42f02d1045e1d0cccfdc41f1f2f801dafbaa7e9b4797bfc2"}, + {file = "wrapt-1.16.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:2076fad65c6736184e77d7d4729b63a6d1ae0b70da4868adeec40989858eb3fb"}, + {file = "wrapt-1.16.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c5cd603b575ebceca7da5a3a251e69561bec509e0b46e4993e1cac402b7247b8"}, + {file = "wrapt-1.16.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b47cfad9e9bbbed2339081f4e346c93ecd7ab504299403320bf85f7f85c7d46c"}, + {file = "wrapt-1.16.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f8212564d49c50eb4565e502814f694e240c55551a5f1bc841d4fcaabb0a9b8a"}, + {file = "wrapt-1.16.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:5f15814a33e42b04e3de432e573aa557f9f0f56458745c2074952f564c50e664"}, + {file = "wrapt-1.16.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:db2e408d983b0e61e238cf579c09ef7020560441906ca990fe8412153e3b291f"}, + {file = "wrapt-1.16.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:edfad1d29c73f9b863ebe7082ae9321374ccb10879eeabc84ba3b69f2579d537"}, + {file = "wrapt-1.16.0-cp39-cp39-win32.whl", hash = "sha256:ed867c42c268f876097248e05b6117a65bcd1e63b779e916fe2e33cd6fd0d3c3"}, + {file = "wrapt-1.16.0-cp39-cp39-win_amd64.whl", hash = "sha256:eb1b046be06b0fce7249f1d025cd359b4b80fc1c3e24ad9eca33e0dcdb2e4a35"}, + {file = "wrapt-1.16.0-py3-none-any.whl", hash = "sha256:6906c4100a8fcbf2fa735f6059214bb13b97f75b1a61777fcf6432121ef12ef1"}, + {file = "wrapt-1.16.0.tar.gz", hash = "sha256:5f370f952971e7d17c7d1ead40e49f32345a7f7a5373571ef44d800d06b1899d"}, +] + +[[package]] +name = "xlsxwriter" +version = "3.2.0" +description = "A Python module for creating Excel XLSX files." +optional = false +python-versions = ">=3.6" +files = [ + {file = "XlsxWriter-3.2.0-py3-none-any.whl", hash = "sha256:ecfd5405b3e0e228219bcaf24c2ca0915e012ca9464a14048021d21a995d490e"}, + {file = "XlsxWriter-3.2.0.tar.gz", hash = "sha256:9977d0c661a72866a61f9f7a809e25ebbb0fb7036baa3b9fe74afcfca6b3cb8c"}, +] + +[[package]] +name = "zipp" +version = "3.17.0" +description = "Backport of pathlib-compatible object wrapper for zip files" +optional = false +python-versions = ">=3.8" +files = [ + {file = "zipp-3.17.0-py3-none-any.whl", hash = "sha256:0e923e726174922dce09c53c59ad483ff7bbb8e572e00c7f7c46b88556409f31"}, + {file = "zipp-3.17.0.tar.gz", hash = "sha256:84e64a1c28cf7e91ed2078bb8cc8c259cb19b76942096c8d7b84947690cabaf0"}, +] + +[package.extras] +docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (<7.2.5)", "sphinx (>=3.5)", "sphinx-lint"] +testing = ["big-O", "jaraco.functools", "jaraco.itertools", "more-itertools", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-ignore-flaky", "pytest-mypy (>=0.9.1)", "pytest-ruff"] + +[metadata] +lock-version = "2.0" +python-versions = "^3.9,<3.12" +content-hash = "eee8676fc20f015e8f2496c3cb4c46ef8e8d81d828f49466448868efadb0b53a" diff --git a/airbyte-integrations/connectors/source-microsoft-onedrive/pyproject.toml b/airbyte-integrations/connectors/source-microsoft-onedrive/pyproject.toml new file mode 100644 index 000000000000..97cf3c7f5189 --- /dev/null +++ b/airbyte-integrations/connectors/source-microsoft-onedrive/pyproject.toml @@ -0,0 +1,34 @@ +[build-system] +requires = [ "poetry-core>=1.0.0",] +build-backend = "poetry.core.masonry.api" + +[tool.poetry] +version = "0.1.7" +name = "source-microsoft-onedrive" +description = "Source implementation for Microsoft OneDrive." +authors = [ "Airbyte ",] +license = "MIT" +readme = "README.md" +documentation = "https://docs.airbyte.com/integrations/sources/microsoft-onedrive" +homepage = "https://airbyte.com" +repository = "https://github.com/airbytehq/airbyte" +[[tool.poetry.packages]] +include = "source_microsoft_onedrive" + +[tool.poetry.dependencies] +python = "^3.9,<3.12" +Office365-REST-Python-Client = "==2.5.5" +smart-open = "==6.4.0" +msal = "==1.25.0" + +[tool.poetry.scripts] +source-microsoft-onedrive = "source_microsoft_onedrive.run:run" + +[tool.poetry.dependencies.airbyte-cdk] +extras = [ "file-based",] +version = "==0.61.0" + +[tool.poetry.group.dev.dependencies] +requests-mock = "^1.11.0" +pytest = "^6.1" +pytest-mock = "^3.6.1" diff --git a/airbyte-integrations/connectors/source-microsoft-onedrive/setup.py b/airbyte-integrations/connectors/source-microsoft-onedrive/setup.py deleted file mode 100644 index 526e39f5c813..000000000000 --- a/airbyte-integrations/connectors/source-microsoft-onedrive/setup.py +++ /dev/null @@ -1,49 +0,0 @@ -# -# Copyright (c) 2023 Airbyte, Inc., all rights reserved. -# - - -from setuptools import find_packages, setup - -MAIN_REQUIREMENTS = [ - "airbyte-cdk[file-based]>=0.61.0", - "msal~=1.25.0", - "Office365-REST-Python-Client~=2.5.2", - "smart-open~=6.4.0", -] - -TEST_REQUIREMENTS = [ - "pytest-mock~=3.6.1", - "pytest~=6.1", - "requests-mock~=1.11.0", -] - -setup( - entry_points={ - "console_scripts": [ - "source-microsoft-onedrive=source_microsoft_onedrive.run:run", - ], - }, - name="source_microsoft_onedrive", - description="Source implementation for Microsoft OneDrive.", - author="Airbyte", - author_email="contact@airbyte.io", - packages=find_packages(), - install_requires=MAIN_REQUIREMENTS, - package_data={ - "": [ - # Include yaml files in the package (if any) - "*.yml", - "*.yaml", - # Include all json files in the package, up to 4 levels deep - "*.json", - "*/*.json", - "*/*/*.json", - "*/*/*/*.json", - "*/*/*/*/*.json", - ] - }, - extras_require={ - "tests": TEST_REQUIREMENTS, - }, -) diff --git a/docs/integrations/sources/microsoft-onedrive.md b/docs/integrations/sources/microsoft-onedrive.md index c840f58b9517..099761b28b4c 100644 --- a/docs/integrations/sources/microsoft-onedrive.md +++ b/docs/integrations/sources/microsoft-onedrive.md @@ -119,12 +119,13 @@ The connector is restricted by normal Microsoft Graph [requests limitation](http ## Changelog -| Version | Date | Pull Request | Subject | -|:--------|:-----------|:---------------------------------------------------------|:--------------------------| -| 0.1.6 | 2024-02-06 | [34936](https://github.com/airbytehq/airbyte/pull/34936) | Bump CDK version to avoid missing SyncMode errors | -| 0.1.5 | 2024-01-30 | [34681](https://github.com/airbytehq/airbyte/pull/34681) | Unpin CDK version to make compatible with the Concurrent CDK | -| 0.1.4 | 2024-01-30 | [34661](https://github.com/airbytehq/airbyte/pull/34661) | Pin CDK version until upgrade for compatibility with the Concurrent CDK | -| 0.1.3 | 2024-01-24 | [34478](https://github.com/airbytehq/airbyte/pull/34478) | Fix OAuth | -| 0.1.2 | 2021-12-22 | [33745](https://github.com/airbytehq/airbyte/pull/33745) | Add ql and sl to metadata | -| 0.1.1 | 2021-12-15 | [33758](https://github.com/airbytehq/airbyte/pull/33758) | Fix for docs name | -| 0.1.0 | 2021-12-06 | [32655](https://github.com/airbytehq/airbyte/pull/32655) | New source | +| Version | Date | Pull Request | Subject | +|:--------|:-----------|:---------------------------------------------------------|:------------------------------------------------------------------------| +| 0.1.7 | 2024-03-0q | [34936](https://github.com/airbytehq/airbyte/pull/34936) | Enable in Cloud | +| 0.1.6 | 2024-02-06 | [34936](https://github.com/airbytehq/airbyte/pull/34936) | Bump CDK version to avoid missing SyncMode errors | +| 0.1.5 | 2024-01-30 | [34681](https://github.com/airbytehq/airbyte/pull/34681) | Unpin CDK version to make compatible with the Concurrent CDK | +| 0.1.4 | 2024-01-30 | [34661](https://github.com/airbytehq/airbyte/pull/34661) | Pin CDK version until upgrade for compatibility with the Concurrent CDK | +| 0.1.3 | 2024-01-24 | [34478](https://github.com/airbytehq/airbyte/pull/34478) | Fix OAuth | +| 0.1.2 | 2021-12-22 | [33745](https://github.com/airbytehq/airbyte/pull/33745) | Add ql and sl to metadata | +| 0.1.1 | 2021-12-15 | [33758](https://github.com/airbytehq/airbyte/pull/33758) | Fix for docs name | +| 0.1.0 | 2021-12-06 | [32655](https://github.com/airbytehq/airbyte/pull/32655) | New source | From 1da8836f6141a569136ef34bdca58e5e52c56e93 Mon Sep 17 00:00:00 2001 From: Anton Karpets Date: Fri, 1 Mar 2024 16:38:48 +0200 Subject: [PATCH 046/172] =?UTF-8?q?=F0=9F=8F=A5Source=20Chargebee:=20fix?= =?UTF-8?q?=20CAT=20(#35750)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .../connectors/source-chargebee/README.md | 2 +- .../source-chargebee/acceptance-test-config.yml | 10 ++++++---- .../integration_tests/expected_records.jsonl | 3 --- .../connectors/source-chargebee/pyproject.toml | 2 +- 4 files changed, 8 insertions(+), 9 deletions(-) diff --git a/airbyte-integrations/connectors/source-chargebee/README.md b/airbyte-integrations/connectors/source-chargebee/README.md index 5169efb56baa..84399c6eb84e 100644 --- a/airbyte-integrations/connectors/source-chargebee/README.md +++ b/airbyte-integrations/connectors/source-chargebee/README.md @@ -30,7 +30,7 @@ See `sample_files/sample_config.json` for a sample config file. poetry run source-chargebee spec poetry run source-chargebee check --config secrets/config.json poetry run source-chargebee discover --config secrets/config.json -poetry run source-chargebee read --config secrets/config.json --catalog sample_files/configured_catalog.json +poetry run source-chargebee read --config secrets/config.json --catalog integration_tests/configured_catalog.json ``` ### Running unit tests diff --git a/airbyte-integrations/connectors/source-chargebee/acceptance-test-config.yml b/airbyte-integrations/connectors/source-chargebee/acceptance-test-config.yml index 333354b0dc91..850da25aec46 100644 --- a/airbyte-integrations/connectors/source-chargebee/acceptance-test-config.yml +++ b/airbyte-integrations/connectors/source-chargebee/acceptance-test-config.yml @@ -31,13 +31,15 @@ acceptance_tests: - name: "event" bypass_reason: "Unstable data. Test data is not persistent." - name: "site_migration_detail" - bypass_reason: "Cannnot populate with test data." + bypass_reason: "Cannot populate with test data." - name: "customer" - bypass_reason: "To be Tested with integration tests." + bypass_reason: "To be tested with integration tests." - name: "subscription" - bypass_reason: "To be Tested with integration tests." + bypass_reason: "To be tested with integration tests." - name: "coupon" - bypass_reason: "To be Tested with integration tests." + bypass_reason: "To be tested with integration tests." + - name: "hosted_page" + bypass_reason: "To be tested with integration tests." expect_records: path: "integration_tests/expected_records.jsonl" exact_order: no diff --git a/airbyte-integrations/connectors/source-chargebee/integration_tests/expected_records.jsonl b/airbyte-integrations/connectors/source-chargebee/integration_tests/expected_records.jsonl index 1ef91d437747..4576e894a548 100644 --- a/airbyte-integrations/connectors/source-chargebee/integration_tests/expected_records.jsonl +++ b/airbyte-integrations/connectors/source-chargebee/integration_tests/expected_records.jsonl @@ -42,9 +42,6 @@ {"stream": "gift", "data": {"id": "Azz5jBTTJ96eclvRDvCs2SkyRM3cdsflXE5ClcIpcdbOPaa950", "status": "unclaimed", "scheduled_at": 1674057609, "auto_claim": false, "updated_at": 1674057613, "resource_version": 1674057613941, "object": "gift", "no_expiry": true, "gifter": {"customer_id": "Azz5jBTTJ96Mjlv5", "invoice_id": "27", "signature": "Airbyte", "note": "Test gift", "object": "gifter"}, "gift_receiver": {"customer_id": "Azz5jBTTJ96UqlvE", "subscription_id": "Azz5jBTTJ96Y2lvK", "first_name": "Test", "last_name": "2", "email": "integration-tgest@airbyte.io", "object": "gift_receiver"}, "gift_timelines": [{"status": "unclaimed", "occurred_at": 1674057613, "object": "gift_timeline"}, {"status": "scheduled", "occurred_at": 1674057604, "object": "gift_timeline"}], "custom_fields": []}, "emitted_at": 1705083808513} {"stream": "unbilled_charge", "data": {"id": "li_AzyhFLU1ehAb9vUH", "customer_id": "cbdemo_douglas", "subscription_id": "AzZTZgTTHdIU1NP", "date_from": 1705478400, "date_to": 1705564799, "unit_amount": 123, "pricing_model": "flat_fee", "quantity": 1, "amount": 123, "discount_amount": 0, "description": "Test charge #2", "is_voided": false, "updated_at": 1705524542, "deleted": false, "object": "unbilled_charge", "entity_type": "adhoc", "currency_code": "USD", "custom_fields": []}, "emitted_at": 1705535324699} {"stream": "unbilled_charge", "data": {"id": "li_6oap6U1egpE4vAs", "customer_id": "cbdemo_douglas", "subscription_id": "AzZTZgTTHdIU1NP", "date_from": 1705478400, "date_to": 1705564799, "unit_amount": 100, "pricing_model": "flat_fee", "quantity": 1, "amount": 100, "discount_amount": 0, "description": "Implementation charge", "is_voided": false, "updated_at": 1705524460, "deleted": false, "object": "unbilled_charge", "entity_type": "adhoc", "currency_code": "USD", "custom_fields": []}, "emitted_at": 1705535324701} -{"stream": "hosted_page", "data": {"id": "lxlrZIGiyRcuJCr2Uk3lfsqfOBhk2qwdA", "type": "checkout_gift", "url": "https://airbyte-test.chargebee.com/pages/v3/lxlrZIGiyRcuJCr2Uk3lfsqfOBhk2qwdA/", "state": "requested", "embed": false, "created_at": 1705600066, "object": "hosted_page", "updated_at": 1705600066, "resource_version": 1705600066437, "custom_fields": []}, "emitted_at": 1705600142998} -{"stream": "hosted_page", "data": {"id": "JgmYiyyrUG00JUQmjYcuVhcdOwnqoaCBw8", "type": "collect_now", "url": "https://airbyte-test.chargebee.com/pages/v3/JgmYiyyrUG00JUQmjYcuVhcdOwnqoaCBw8/collect_now", "state": "requested", "embed": false, "created_at": 1705600053, "expires_at": 1706032053, "object": "hosted_page", "updated_at": 1705600053, "resource_version": 1705600053347, "custom_fields": []}, "emitted_at": 1705600143003} -{"stream": "hosted_page", "data": {"id": "DEj7ybCXRlg2QBdtsPk80h0cuzyWfFcdHn", "type": "checkout_gift", "url": "https://airbyte-test.chargebee.com/pages/v3/DEj7ybCXRlg2QBdtsPk80h0cuzyWfFcdHn/", "state": "requested", "embed": false, "created_at": 1705599992, "object": "hosted_page", "updated_at": 1705599992, "resource_version": 1705599992794, "custom_fields": []}, "emitted_at": 1705600143008} {"stream": "item_family", "data": {"id": "test-4", "name": "test item family 4", "status": "active", "resource_version": 1705960880668, "updated_at": 1705960880, "object": "item_family", "custom_fields": []}, "emitted_at": 1705960929497} {"stream": "item_family", "data": {"id": "test-3", "name": "test item family 3", "status": "active", "resource_version": 1705956309899, "updated_at": 1705956309, "object": "item_family", "custom_fields": []}, "emitted_at": 1705960929501} {"stream": "item_family", "data": {"id": "test-2", "name": "test item family 2", "status": "active", "resource_version": 1705956286577, "updated_at": 1705956286, "object": "item_family", "custom_fields": []}, "emitted_at": 1705960929506} diff --git a/airbyte-integrations/connectors/source-chargebee/pyproject.toml b/airbyte-integrations/connectors/source-chargebee/pyproject.toml index 62f247cc3c9b..a3926727b9ff 100644 --- a/airbyte-integrations/connectors/source-chargebee/pyproject.toml +++ b/airbyte-integrations/connectors/source-chargebee/pyproject.toml @@ -3,7 +3,7 @@ requires = [ "poetry-core>=1.0.0",] build-backend = "poetry.core.masonry.api" [tool.poetry] -version = "0.3.1" +version = "0.4.0" name = "source-chargebee" description = "Source implementation for Chargebee." authors = [ "Airbyte ",] From 540e0aab4933791cb92b3a93aa5a8b72440d5c26 Mon Sep 17 00:00:00 2001 From: Christo Grabowski <108154848+ChristoGrab@users.noreply.github.com> Date: Fri, 1 Mar 2024 10:33:48 -0500 Subject: [PATCH 047/172] Omit run.py from test coverage for source Sendgrid, Sentry, Zendesk Chat (#35728) --- airbyte-integrations/connectors/source-sendgrid/.coveragerc | 3 +++ airbyte-integrations/connectors/source-sentry/.coveragerc | 3 +++ .../connectors/source-zendesk-chat/.coveragerc | 3 +++ 3 files changed, 9 insertions(+) create mode 100644 airbyte-integrations/connectors/source-sendgrid/.coveragerc create mode 100644 airbyte-integrations/connectors/source-sentry/.coveragerc create mode 100644 airbyte-integrations/connectors/source-zendesk-chat/.coveragerc diff --git a/airbyte-integrations/connectors/source-sendgrid/.coveragerc b/airbyte-integrations/connectors/source-sendgrid/.coveragerc new file mode 100644 index 000000000000..5703402afaeb --- /dev/null +++ b/airbyte-integrations/connectors/source-sendgrid/.coveragerc @@ -0,0 +1,3 @@ +[run] +omit = + source_sendgrid/run.py diff --git a/airbyte-integrations/connectors/source-sentry/.coveragerc b/airbyte-integrations/connectors/source-sentry/.coveragerc new file mode 100644 index 000000000000..748997278499 --- /dev/null +++ b/airbyte-integrations/connectors/source-sentry/.coveragerc @@ -0,0 +1,3 @@ +[run] +omit = + source_sentry/run.py diff --git a/airbyte-integrations/connectors/source-zendesk-chat/.coveragerc b/airbyte-integrations/connectors/source-zendesk-chat/.coveragerc new file mode 100644 index 000000000000..9c659194e647 --- /dev/null +++ b/airbyte-integrations/connectors/source-zendesk-chat/.coveragerc @@ -0,0 +1,3 @@ +[run] +omit = + source_zendesk_chat/run.py From 49f5dc1fbd136b0d71bfb4bc4cd321fc1f692865 Mon Sep 17 00:00:00 2001 From: Evan Tahler Date: Fri, 1 Mar 2024 08:18:27 -0800 Subject: [PATCH 048/172] remove archived connectors code (#35689) --- .../destination-amazon-sqs/.dockerignore | 5 - .../destination-amazon-sqs/Dockerfile | 38 - .../destination-amazon-sqs/README.md | 99 -- .../destination-amazon-sqs/bootstrap.md | 59 - .../destination_amazon_sqs/__init__.py | 8 - .../destination_amazon_sqs/destination.py | 176 --- .../destination_amazon_sqs/spec.json | 101 -- .../integration_tests/input_records_json | 1 - .../integration_tests/integration_test.py | 46 - .../connectors/destination-amazon-sqs/main.py | 11 - .../destination-amazon-sqs/requirements.txt | 1 - .../sample_files/configured_catalog.json | 27 - .../destination-amazon-sqs/setup.py | 23 - .../unit_tests/unit_test.py | 226 ---- .../.dockerignore | 3 - .../destination-cassandra/README.md | 72 -- .../destination-cassandra/bootstrap.md | 30 - .../destination-cassandra/build.gradle | 39 - .../destination-cassandra/docker-compose.yml | 23 - .../sample_secrets/config.json | 4 - .../cassandra/CassandraConfig.java | 114 -- .../cassandra/CassandraCqlProvider.java | 180 --- .../cassandra/CassandraDestination.java | 66 - .../cassandra/CassandraMessageConsumer.java | 107 -- .../cassandra/CassandraNameTransformer.java | 42 - .../cassandra/CassandraRecord.java | 49 - .../cassandra/CassandraStreamConfig.java | 58 - .../destination/cassandra/SessionManager.java | 66 - .../destination/cassandra/Tuple.java | 38 - .../src/main/resources/spec.json | 65 - .../CassandraContainerInitializr.java | 34 - .../cassandra/CassandraCqlProviderIT.java | 135 -- .../CassandraDestinationAcceptanceTest.java | 90 -- .../cassandra/CassandraDestinationIT.java | 57 - .../cassandra/CassandraMessageConsumerIT.java | 134 -- .../cassandra/TestDataFactory.java | 80 -- .../cassandra/CassandraConfigTest.java | 40 - .../CassandraNameTransformerTest.java | 64 - .../CassandraRecordConsumerTest.java | 48 - .../cassandra/TestDataFactory.java | 80 -- .../connectors/destination-cumulio/Dockerfile | 42 - .../connectors/destination-cumulio/README.md | 98 -- .../destination_cumulio/__init__.py | 8 - .../destination_cumulio/client.py | 367 ------ .../destination_cumulio/destination.py | 101 -- .../destination_cumulio/spec.json | 37 - .../destination_cumulio/writer.py | 205 --- .../integration_tests/configured_catalog.json | 29 - .../integration_tests/integration_test.py | 276 ---- .../integration_tests/sample_config.json | 5 - .../connectors/destination-cumulio/main.py | 11 - .../destination-cumulio/requirements.txt | 1 - .../connectors/destination-cumulio/setup.py | 23 - .../unit_tests/test_client.py | 629 ---------- .../unit_tests/test_destination.py | 155 --- .../unit_tests/test_writer.py | 512 -------- .../destination-databend/.dockerignore | 5 - .../destination-databend/Dockerfile | 38 - .../connectors/destination-databend/README.md | 99 -- .../destination_databend/__init__.py | 8 - .../destination_databend/__init__.pyc | Bin 307 -> 0 bytes .../destination_databend/client.py | 20 - .../destination_databend/destination.py | 89 -- .../destination_databend/spec.json | 57 - .../destination_databend/writer.py | 134 -- .../integration_tests/integration_test.py | 159 --- .../integration_tests/sample_config.json | 9 - .../connectors/destination-databend/main.py | 11 - .../destination-databend/requirements.txt | 1 - .../connectors/destination-databend/setup.py | 22 - .../unit_tests/test_databend_destination.py | 161 --- .../unit_tests/test_writer.py | 46 - .../connectors/destination-doris/README.md | 72 -- .../connectors/destination-doris/bootstrap.md | 29 - .../connectors/destination-doris/build.gradle | 28 - .../doris/DorisConnectionOptions.java | 102 -- .../destination/doris/DorisConsumer.java | 124 -- .../destination/doris/DorisDestination.java | 164 --- .../destination/doris/DorisLabelInfo.java | 31 - .../destination/doris/DorisStreamLoad.java | 235 ---- .../destination/doris/DorisWriteConfig.java | 34 - .../destination/doris/HttpUtil.java | 30 - .../doris/StreamLoadHttpPutBuilder.java | 104 -- .../doris/StreamLoadRespContent.java | 103 -- .../doris/exception/DorisException.java | 32 - .../exception/DorisRuntimeException.java | 35 - .../exception/IllegalArgumentException.java | 17 - .../doris/exception/StreamLoadException.java | 32 - .../src/main/resources/spec.json | 60 - .../doris/DorisDestinationAcceptanceTest.java | 128 -- .../doris/DorisDestinationTest.java | 172 --- .../connectors/destination-exasol/README.md | 71 -- .../destination-exasol/bootstrap.md | 19 - .../destination-exasol/build.gradle | 37 - .../destination/exasol/ExasolDestination.java | 61 - .../exasol/ExasolSQLNameTransformer.java | 47 - .../exasol/ExasolSqlOperations.java | 73 -- .../src/main/resources/spec.json | 64 - .../ExasolDestinationAcceptanceTest.java | 150 --- .../ExasolSqlOperationsAcceptanceTest.java | 93 -- .../exasol/ExasolDestinationTest.java | 85 -- .../exasol/ExasolSQLNameTransformerTest.java | 69 - .../destination-firebolt/Dockerfile | 29 - .../connectors/destination-firebolt/README.md | 99 -- .../destination-firebolt/bootstrap.md | 22 - .../destination_firebolt/__init__.py | 8 - .../destination_firebolt/destination.py | 128 -- .../destination_firebolt/spec.json | 109 -- .../destination_firebolt/writer.py | 235 ---- .../integration_tests/configured_catalog.json | 38 - .../integration_tests/integration_test.py | 147 --- .../integration_tests/invalid_config.json | 9 - .../integration_tests/invalid_config_s3.json | 13 - .../integration_tests/messages.jsonl | 2 - .../connectors/destination-firebolt/main.py | 11 - .../destination-firebolt/requirements.txt | 1 - .../connectors/destination-firebolt/setup.py | 23 - .../unit_tests/test_firebolt_destination.py | 241 ---- .../unit_tests/test_writer.py | 156 --- .../connectors/destination-keen/build.gradle | 33 - .../keen/KeenCharactersStripper.java | 18 - .../destination/keen/KeenDestination.java | 93 -- .../destination/keen/KeenHttpClient.java | 85 -- .../destination/keen/KeenRecordsConsumer.java | 130 -- .../keen/KeenTimestampService.java | 126 -- .../src/main/resources/spec.json | 35 - .../destination/keen/KeenDestinationTest.java | 129 -- .../keen/KeenRecordConsumerTest.java | 65 - .../keen/KeenTimestampServiceTest.java | 200 --- .../src/test/resources/cursors_catalog.json | 124 -- .../test/resources/nested_cursor_catalog.json | 21 - .../test/resources/number_cursor_catalog.json | 19 - .../test/resources/string_cursor_catalog.json | 19 - .../connectors/destination-kinesis/README.md | 72 -- .../destination-kinesis/bootstrap.md | 22 - .../destination-kinesis/build.gradle | 37 - .../destination-kinesis/docker-compose.yml | 14 - .../kinesis/KinesisClientPool.java | 65 - .../destination/kinesis/KinesisConfig.java | 107 -- .../kinesis/KinesisDestination.java | 79 -- .../kinesis/KinesisMessageConsumer.java | 110 -- .../kinesis/KinesisNameTransformer.java | 28 - .../destination/kinesis/KinesisRecord.java | 64 - .../destination/kinesis/KinesisStream.java | 193 --- .../kinesis/KinesisStreamConfig.java | 39 - .../destination/kinesis/KinesisUtils.java | 72 -- .../destination/kinesis/Tuple.java | 41 - .../src/main/resources/spec.json | 67 - .../kinesis/KinesisContainerInitializr.java | 48 - .../kinesis/KinesisDataFactory.java | 28 - .../KinesisDestinationAcceptanceTest.java | 110 -- .../kinesis/KinesisDestinationTest.java | 59 - .../kinesis/KinesisStreamTest.java | 126 -- .../kinesis/KinesisConfigTest.java | 41 - .../kinesis/KinesisDataFactory.java | 28 - .../kinesis/KinesisNameTransformerTest.java | 39 - .../kinesis/KinesisRecordConsumerTest.java | 47 - .../connectors/destination-kvdb/README.md | 118 -- .../destination_kvdb/__init__.py | 26 - .../destination_kvdb/client.py | 78 -- .../destination_kvdb/destination.py | 72 -- .../destination_kvdb/spec.json | 26 - .../destination_kvdb/writer.py | 46 - .../connectors/destination-kvdb/main.py | 11 - .../connectors/destination-kvdb/poetry.lock | 1108 ----------------- .../destination-kvdb/pyproject.toml | 31 - .../destination-kvdb/requirements.txt | 1 - .../destination-kvdb/unit_tests/unit_test.py | 7 - .../destination-mariadb-columnstore/README.md | 72 -- .../bootstrap.md | 24 - .../build.gradle | 32 - .../MariadbColumnstoreDestination.java | 109 -- .../MariadbColumnstoreNameTransformer.java | 21 - .../MariadbColumnstoreSqlOperations.java | 161 --- .../src/main/resources/spec.json | 57 - .../MariaDbTestDataComparator.java | 26 - ...bColumnstoreDestinationAcceptanceTest.java | 137 -- ...bColumnstoreDestinationAcceptanceTest.java | 16 - ...bColumnstoreDestinationAcceptanceTest.java | 155 --- ...bColumnstoreDestinationAcceptanceTest.java | 16 - .../MariadbColumnstoreDestinationTest.java | 29 - .../mariadb_columnstore/MariadbSpecTest.java | 97 -- .../destination-meilisearch/.dockerignore | 5 - .../destination-meilisearch/Dockerfile | 38 - .../destination-meilisearch/README.md | 99 -- .../destination_meilisearch/__init__.py | 8 - .../destination_meilisearch/destination.py | 84 -- .../destination_meilisearch/spec.json | 27 - .../destination_meilisearch/writer.py | 39 - .../integration_tests/integration_test.py | 103 -- .../integration_tests/messages.jsonl | 2 - .../destination-meilisearch/main.py | 11 - .../destination-meilisearch/requirements.txt | 1 - .../sample_files/configured_catalog.json | 27 - .../destination-meilisearch/setup.py | 23 - .../unit_tests/unit_test.py | 29 - .../connectors/destination-mqtt/README.md | 72 -- .../connectors/destination-mqtt/build.gradle | 31 - .../destination/mqtt/MqttDestination.java | 88 -- .../mqtt/MqttDestinationConfig.java | 140 --- .../destination/mqtt/MqttRecordConsumer.java | 155 --- .../src/main/resources/spec.json | 108 -- .../mqtt/MqttDestinationAcceptanceTest.java | 165 --- .../mqtt/MqttRecordConsumerTest.java | 175 --- .../connectors/destination-pulsar/README.md | 72 -- .../destination-pulsar/build.gradle | 31 - .../destination/pulsar/PulsarDestination.java | 98 -- .../pulsar/PulsarDestinationConfig.java | 114 -- .../pulsar/PulsarRecordConsumer.java | 114 -- .../destination/pulsar/PulsarUtils.java | 40 - .../src/main/resources/spec.json | 137 -- .../PulsarDestinationAcceptanceTest.java | 193 --- .../pulsar/PulsarRecordConsumerTest.java | 258 ---- .../connectors/destination-r2/README.md | 74 -- .../connectors/destination-r2/build.gradle | 49 - .../destination-r2/sample_secrets/config.json | 7 - .../destination/r2/R2Destination.java | 24 - .../src/main/resources/spec.json | 296 ----- .../r2/R2AvroDestinationAcceptanceTest.java | 22 - .../r2/R2CsvDestinationAcceptanceTest.java | 22 - .../R2CsvGzipDestinationAcceptanceTest.java | 22 - .../r2/R2JsonlDestinationAcceptanceTest.java | 22 - .../R2JsonlGzipDestinationAcceptanceTest.java | 22 - .../R2ParquetDestinationAcceptanceTest.java | 27 - .../destination-rabbitmq/.dockerignore | 5 - .../destination-rabbitmq/Dockerfile | 38 - .../connectors/destination-rabbitmq/README.md | 99 -- .../destination_rabbitmq/__init__.py | 8 - .../destination_rabbitmq/__init__.pyc | Bin 300 -> 0 bytes .../destination_rabbitmq/destination.py | 84 -- .../destination_rabbitmq/spec.json | 49 - .../integration_tests/integration_test.py | 90 -- .../integration_tests/invalid_config.json | 9 - .../connectors/destination-rabbitmq/main.py | 11 - .../destination-rabbitmq/requirements.txt | 1 - .../connectors/destination-rabbitmq/setup.py | 23 - .../unit_tests/unit_test.py | 130 -- .../connectors/destination-redpanda/README.md | 72 -- .../destination-redpanda/build.gradle | 32 - .../destination/redpanda/RedpandaConfig.java | 100 -- .../redpanda/RedpandaDestination.java | 67 - .../redpanda/RedpandaMessageConsumer.java | 101 -- .../redpanda/RedpandaNameTransformer.java | 19 - .../redpanda/RedpandaOperations.java | 144 --- .../redpanda/RedpandaWriteConfig.java | 15 - .../src/main/resources/spec.json | 76 -- .../redpanda/RedpandaConsumer.java | 16 - .../redpanda/RedpandaConsumerFactory.java | 30 - .../redpanda/RedpandaContainerFactory.java | 19 - .../RedpandaDestinationAcceptanceTest.java | 151 --- .../redpanda/RedpandaDestinationTest.java | 78 -- .../redpanda/RedpandaOperationsTest.java | 122 -- .../redpanda/RedpandaConfigTest.java | 56 - .../redpanda/RedpandaNameTransformerTest.java | 24 - .../redpanda/RedpandaWriteConfigTest.java | 25 - .../destination-rockset/BOOTSTRAP.md | 6 - .../connectors/destination-rockset/README.md | 72 -- .../destination-rockset/build.gradle | 30 - .../rockset/RocksetDestination.java | 90 -- .../rockset/RocksetSQLNameTransformer.java | 21 - .../destination/rockset/RocksetUtils.java | 264 ---- .../rockset/RocksetWriteApiConsumer.java | 194 --- .../src/main/resources/spec.json | 39 - .../RocksetDestinationAcceptanceTest.java | 196 --- .../rockset/RocksetWriteApiConsumerTest.java | 59 - .../README.md | 159 --- .../__init__.py | 8 - .../destination.py | 53 - .../spec.json | 20 - .../integration_tests/integration_test.py | 8 - .../main.py | 11 - .../requirements.txt | 1 - .../setup.py | 25 - .../unit_tests/unit_test.py | 7 - .../connectors/destination-scylla/README.md | 72 -- .../destination-scylla/bootstrap.md | 32 - .../destination-scylla/build.gradle | 37 - .../destination-scylla/docker-compose.yml | 19 - .../destination/scylla/ScyllaConfig.java | 96 -- .../destination/scylla/ScyllaCqlProvider.java | 177 --- .../destination/scylla/ScyllaDestination.java | 63 - .../scylla/ScyllaMessageConsumer.java | 105 -- .../scylla/ScyllaNameTransformer.java | 42 - .../destination/scylla/ScyllaSessionPool.java | 54 - .../scylla/ScyllaStreamConfig.java | 58 - .../destination/scylla/Triplet.java | 46 - .../destination/scylla/Tuple.java | 38 - .../src/main/resources/spec.json | 57 - .../scylla/ScyllaContainerInitializr.java | 36 - .../scylla/ScyllaCqlProviderTest.java | 134 -- .../ScyllaDestinationAcceptanceTest.java | 109 -- .../scylla/ScyllaDestinationTest.java | 52 - .../destination/scylla/TestDataFactory.java | 38 - .../destination/scylla/ScyllaConfigTest.java | 35 - .../scylla/ScyllaNameTransformerTest.java | 60 - .../scylla/ScyllaRecordConsumerTest.java | 77 -- .../destination/scylla/TestDataFactory.java | 34 - .../connectors/destination-selectdb/README.md | 72 -- .../destination-selectdb/bootstrap.md | 41 - .../destination-selectdb/build.gradle | 28 - .../destination/selectdb/BaseResponse.java | 29 - .../destination/selectdb/CopyIntoResp.java | 33 - .../destination/selectdb/LabelInfo.java | 24 - .../selectdb/SelectdbConnectionOptions.java | 115 -- .../selectdb/SelectdbConsumer.java | 116 -- .../selectdb/SelectdbCopyInto.java | 261 ---- .../selectdb/SelectdbDestination.java | 138 -- .../selectdb/SelectdbOperations.java | 67 - .../selectdb/SelectdbWriteConfig.java | 34 - .../selectdb/exception/CopyIntoException.java | 13 - .../exception/SelectdbRuntimeException.java | 20 - .../selectdb/exception/UploadException.java | 13 - .../selectdb/http/HttpPostBuilder.java | 55 - .../selectdb/http/HttpPutBuilder.java | 72 -- .../destination/selectdb/http/HttpUtil.java | 22 - .../selectdb/utils/ResponseUtils.java | 27 - .../src/main/resources/spec.json | 59 - .../SelectdbDestinationAcceptanceTest.java | 127 -- .../selectdb/SelectdbDestinationTest.java | 175 --- .../connectors/destination-tidb/README.md | 72 -- .../connectors/destination-tidb/build.gradle | 31 - .../destination/tidb/TiDBDestination.java | 106 -- .../tidb/TiDBSQLNameTransformer.java | 64 - .../destination/tidb/TiDBSqlOperations.java | 97 -- .../src/main/resources/spec.json | 65 - .../tidb/TiDBDestinationAcceptanceTest.java | 151 --- .../tidb/TiDBTestDataComparator.java | 48 - .../destination/tidb/TiDBDestinationTest.java | 131 -- .../destination-timeplus/.dockerignore | 5 - .../destination-timeplus/Dockerfile | 38 - .../connectors/destination-timeplus/README.md | 108 -- .../destination_timeplus/__init__.py | 8 - .../destination_timeplus/destination.py | 160 --- .../destination_timeplus/spec.json | 31 - .../integration_tests/configured_catalog.json | 263 ---- .../integration_tests/integration_test.py | 74 -- .../integration_tests/messages.jsonl | 5 - .../connectors/destination-timeplus/main.py | 11 - .../destination-timeplus/requirements.txt | 1 - .../connectors/destination-timeplus/setup.py | 26 - .../unit_tests/unit_test.py | 17 - .../destination-vectara/.dockerignore | 5 - .../connectors/destination-vectara/Dockerfile | 38 - .../connectors/destination-vectara/README.md | 123 -- .../destination_vectara/__init__.py | 8 - .../destination_vectara/client.py | 199 --- .../destination_vectara/config.py | 75 -- .../destination_vectara/destination.py | 95 -- .../destination_vectara/writer.py | 128 -- .../integration_tests/integration_test.py | 127 -- .../connectors/destination-vectara/main.py | 11 - .../destination-vectara/requirements.txt | 1 - .../connectors/destination-vectara/setup.py | 25 - .../unit_tests/__init__.py | 0 .../connectors/destination-xata/.dockerignore | 5 - .../connectors/destination-xata/Dockerfile | 38 - .../connectors/destination-xata/README.md | 99 -- .../connectors/destination-xata/bootstrap.md | 1 - .../destination_xata/__init__.py | 8 - .../destination_xata/destination.py | 79 -- .../destination_xata/spec.json | 28 - .../integration_tests/integration_test.py | 120 -- .../integration_tests/invalid_config.json | 4 - .../connectors/destination-xata/main.py | 11 - .../destination-xata/requirements.txt | 1 - .../sample_files/configured_catalog.json | 13 - .../connectors/destination-xata/setup.py | 23 - .../destination-xata/unit_tests/unit_test.py | 28 - .../destination-yugabytedb/README.md | 72 -- .../destination-yugabytedb/bootstrap.md | 0 .../destination-yugabytedb/build.gradle | 33 - .../destination-yugabytedb/docker-compose.yml | 36 - .../yugabytedb/YugabytedbDestination.java | 66 - .../YugabytedbNamingTransformer.java | 16 - .../yugabytedb/YugabytedbSqlOperations.java | 58 - .../src/main/resources/spec.json | 65 - .../yugabytedb/YugabyteDataSource.java | 28 - .../YugabytedbContainerInitializr.java | 75 -- .../YugabytedbDestinationAcceptanceTest.java | 154 --- .../yugabytedb/YugabytedbDestinationTest.java | 54 - .../YugabytedbNamingTransformerTest.java | 30 - 381 files changed, 27113 deletions(-) delete mode 100644 airbyte-integrations/connectors/destination-amazon-sqs/.dockerignore delete mode 100644 airbyte-integrations/connectors/destination-amazon-sqs/Dockerfile delete mode 100644 airbyte-integrations/connectors/destination-amazon-sqs/README.md delete mode 100644 airbyte-integrations/connectors/destination-amazon-sqs/bootstrap.md delete mode 100644 airbyte-integrations/connectors/destination-amazon-sqs/destination_amazon_sqs/__init__.py delete mode 100644 airbyte-integrations/connectors/destination-amazon-sqs/destination_amazon_sqs/destination.py delete mode 100644 airbyte-integrations/connectors/destination-amazon-sqs/destination_amazon_sqs/spec.json delete mode 100644 airbyte-integrations/connectors/destination-amazon-sqs/integration_tests/input_records_json delete mode 100644 airbyte-integrations/connectors/destination-amazon-sqs/integration_tests/integration_test.py delete mode 100644 airbyte-integrations/connectors/destination-amazon-sqs/main.py delete mode 100644 airbyte-integrations/connectors/destination-amazon-sqs/requirements.txt delete mode 100644 airbyte-integrations/connectors/destination-amazon-sqs/sample_files/configured_catalog.json delete mode 100644 airbyte-integrations/connectors/destination-amazon-sqs/setup.py delete mode 100644 airbyte-integrations/connectors/destination-amazon-sqs/unit_tests/unit_test.py delete mode 100644 airbyte-integrations/connectors/destination-bigquery-denormalized/.dockerignore delete mode 100644 airbyte-integrations/connectors/destination-cassandra/README.md delete mode 100644 airbyte-integrations/connectors/destination-cassandra/bootstrap.md delete mode 100644 airbyte-integrations/connectors/destination-cassandra/build.gradle delete mode 100644 airbyte-integrations/connectors/destination-cassandra/docker-compose.yml delete mode 100644 airbyte-integrations/connectors/destination-cassandra/sample_secrets/config.json delete mode 100644 airbyte-integrations/connectors/destination-cassandra/src/main/java/io/airbyte/integrations/destination/cassandra/CassandraConfig.java delete mode 100644 airbyte-integrations/connectors/destination-cassandra/src/main/java/io/airbyte/integrations/destination/cassandra/CassandraCqlProvider.java delete mode 100644 airbyte-integrations/connectors/destination-cassandra/src/main/java/io/airbyte/integrations/destination/cassandra/CassandraDestination.java delete mode 100644 airbyte-integrations/connectors/destination-cassandra/src/main/java/io/airbyte/integrations/destination/cassandra/CassandraMessageConsumer.java delete mode 100644 airbyte-integrations/connectors/destination-cassandra/src/main/java/io/airbyte/integrations/destination/cassandra/CassandraNameTransformer.java delete mode 100644 airbyte-integrations/connectors/destination-cassandra/src/main/java/io/airbyte/integrations/destination/cassandra/CassandraRecord.java delete mode 100644 airbyte-integrations/connectors/destination-cassandra/src/main/java/io/airbyte/integrations/destination/cassandra/CassandraStreamConfig.java delete mode 100644 airbyte-integrations/connectors/destination-cassandra/src/main/java/io/airbyte/integrations/destination/cassandra/SessionManager.java delete mode 100644 airbyte-integrations/connectors/destination-cassandra/src/main/java/io/airbyte/integrations/destination/cassandra/Tuple.java delete mode 100644 airbyte-integrations/connectors/destination-cassandra/src/main/resources/spec.json delete mode 100644 airbyte-integrations/connectors/destination-cassandra/src/test-integration/java/io/airbyte/integrations/destination/cassandra/CassandraContainerInitializr.java delete mode 100644 airbyte-integrations/connectors/destination-cassandra/src/test-integration/java/io/airbyte/integrations/destination/cassandra/CassandraCqlProviderIT.java delete mode 100644 airbyte-integrations/connectors/destination-cassandra/src/test-integration/java/io/airbyte/integrations/destination/cassandra/CassandraDestinationAcceptanceTest.java delete mode 100644 airbyte-integrations/connectors/destination-cassandra/src/test-integration/java/io/airbyte/integrations/destination/cassandra/CassandraDestinationIT.java delete mode 100644 airbyte-integrations/connectors/destination-cassandra/src/test-integration/java/io/airbyte/integrations/destination/cassandra/CassandraMessageConsumerIT.java delete mode 100644 airbyte-integrations/connectors/destination-cassandra/src/test-integration/java/io/airbyte/integrations/destination/cassandra/TestDataFactory.java delete mode 100644 airbyte-integrations/connectors/destination-cassandra/src/test/java/io/airbyte/integrations/destination/cassandra/CassandraConfigTest.java delete mode 100644 airbyte-integrations/connectors/destination-cassandra/src/test/java/io/airbyte/integrations/destination/cassandra/CassandraNameTransformerTest.java delete mode 100644 airbyte-integrations/connectors/destination-cassandra/src/test/java/io/airbyte/integrations/destination/cassandra/CassandraRecordConsumerTest.java delete mode 100644 airbyte-integrations/connectors/destination-cassandra/src/test/java/io/airbyte/integrations/destination/cassandra/TestDataFactory.java delete mode 100644 airbyte-integrations/connectors/destination-cumulio/Dockerfile delete mode 100644 airbyte-integrations/connectors/destination-cumulio/README.md delete mode 100644 airbyte-integrations/connectors/destination-cumulio/destination_cumulio/__init__.py delete mode 100644 airbyte-integrations/connectors/destination-cumulio/destination_cumulio/client.py delete mode 100644 airbyte-integrations/connectors/destination-cumulio/destination_cumulio/destination.py delete mode 100644 airbyte-integrations/connectors/destination-cumulio/destination_cumulio/spec.json delete mode 100644 airbyte-integrations/connectors/destination-cumulio/destination_cumulio/writer.py delete mode 100644 airbyte-integrations/connectors/destination-cumulio/integration_tests/configured_catalog.json delete mode 100644 airbyte-integrations/connectors/destination-cumulio/integration_tests/integration_test.py delete mode 100644 airbyte-integrations/connectors/destination-cumulio/integration_tests/sample_config.json delete mode 100644 airbyte-integrations/connectors/destination-cumulio/main.py delete mode 100644 airbyte-integrations/connectors/destination-cumulio/requirements.txt delete mode 100644 airbyte-integrations/connectors/destination-cumulio/setup.py delete mode 100644 airbyte-integrations/connectors/destination-cumulio/unit_tests/test_client.py delete mode 100644 airbyte-integrations/connectors/destination-cumulio/unit_tests/test_destination.py delete mode 100644 airbyte-integrations/connectors/destination-cumulio/unit_tests/test_writer.py delete mode 100644 airbyte-integrations/connectors/destination-databend/.dockerignore delete mode 100644 airbyte-integrations/connectors/destination-databend/Dockerfile delete mode 100644 airbyte-integrations/connectors/destination-databend/README.md delete mode 100644 airbyte-integrations/connectors/destination-databend/destination_databend/__init__.py delete mode 100644 airbyte-integrations/connectors/destination-databend/destination_databend/__init__.pyc delete mode 100644 airbyte-integrations/connectors/destination-databend/destination_databend/client.py delete mode 100644 airbyte-integrations/connectors/destination-databend/destination_databend/destination.py delete mode 100644 airbyte-integrations/connectors/destination-databend/destination_databend/spec.json delete mode 100644 airbyte-integrations/connectors/destination-databend/destination_databend/writer.py delete mode 100644 airbyte-integrations/connectors/destination-databend/integration_tests/integration_test.py delete mode 100644 airbyte-integrations/connectors/destination-databend/integration_tests/sample_config.json delete mode 100644 airbyte-integrations/connectors/destination-databend/main.py delete mode 100644 airbyte-integrations/connectors/destination-databend/requirements.txt delete mode 100644 airbyte-integrations/connectors/destination-databend/setup.py delete mode 100644 airbyte-integrations/connectors/destination-databend/unit_tests/test_databend_destination.py delete mode 100644 airbyte-integrations/connectors/destination-databend/unit_tests/test_writer.py delete mode 100644 airbyte-integrations/connectors/destination-doris/README.md delete mode 100644 airbyte-integrations/connectors/destination-doris/bootstrap.md delete mode 100644 airbyte-integrations/connectors/destination-doris/build.gradle delete mode 100644 airbyte-integrations/connectors/destination-doris/src/main/java/io/airbyte/integrations/destination/doris/DorisConnectionOptions.java delete mode 100644 airbyte-integrations/connectors/destination-doris/src/main/java/io/airbyte/integrations/destination/doris/DorisConsumer.java delete mode 100644 airbyte-integrations/connectors/destination-doris/src/main/java/io/airbyte/integrations/destination/doris/DorisDestination.java delete mode 100644 airbyte-integrations/connectors/destination-doris/src/main/java/io/airbyte/integrations/destination/doris/DorisLabelInfo.java delete mode 100644 airbyte-integrations/connectors/destination-doris/src/main/java/io/airbyte/integrations/destination/doris/DorisStreamLoad.java delete mode 100644 airbyte-integrations/connectors/destination-doris/src/main/java/io/airbyte/integrations/destination/doris/DorisWriteConfig.java delete mode 100644 airbyte-integrations/connectors/destination-doris/src/main/java/io/airbyte/integrations/destination/doris/HttpUtil.java delete mode 100644 airbyte-integrations/connectors/destination-doris/src/main/java/io/airbyte/integrations/destination/doris/StreamLoadHttpPutBuilder.java delete mode 100644 airbyte-integrations/connectors/destination-doris/src/main/java/io/airbyte/integrations/destination/doris/StreamLoadRespContent.java delete mode 100644 airbyte-integrations/connectors/destination-doris/src/main/java/io/airbyte/integrations/destination/doris/exception/DorisException.java delete mode 100644 airbyte-integrations/connectors/destination-doris/src/main/java/io/airbyte/integrations/destination/doris/exception/DorisRuntimeException.java delete mode 100644 airbyte-integrations/connectors/destination-doris/src/main/java/io/airbyte/integrations/destination/doris/exception/IllegalArgumentException.java delete mode 100644 airbyte-integrations/connectors/destination-doris/src/main/java/io/airbyte/integrations/destination/doris/exception/StreamLoadException.java delete mode 100644 airbyte-integrations/connectors/destination-doris/src/main/resources/spec.json delete mode 100644 airbyte-integrations/connectors/destination-doris/src/test-integration/java/io/airbyte/integrations/destination/doris/DorisDestinationAcceptanceTest.java delete mode 100644 airbyte-integrations/connectors/destination-doris/src/test/java/io/airbyte/integrations/destination/doris/DorisDestinationTest.java delete mode 100644 airbyte-integrations/connectors/destination-exasol/README.md delete mode 100644 airbyte-integrations/connectors/destination-exasol/bootstrap.md delete mode 100644 airbyte-integrations/connectors/destination-exasol/build.gradle delete mode 100644 airbyte-integrations/connectors/destination-exasol/src/main/java/io/airbyte/integrations/destination/exasol/ExasolDestination.java delete mode 100644 airbyte-integrations/connectors/destination-exasol/src/main/java/io/airbyte/integrations/destination/exasol/ExasolSQLNameTransformer.java delete mode 100644 airbyte-integrations/connectors/destination-exasol/src/main/java/io/airbyte/integrations/destination/exasol/ExasolSqlOperations.java delete mode 100644 airbyte-integrations/connectors/destination-exasol/src/main/resources/spec.json delete mode 100644 airbyte-integrations/connectors/destination-exasol/src/test-integration/java/io/airbyte/integrations/destination/exasol/ExasolDestinationAcceptanceTest.java delete mode 100644 airbyte-integrations/connectors/destination-exasol/src/test-integration/java/io/airbyte/integrations/destination/exasol/ExasolSqlOperationsAcceptanceTest.java delete mode 100644 airbyte-integrations/connectors/destination-exasol/src/test/java/io/airbyte/integrations/destination/exasol/ExasolDestinationTest.java delete mode 100644 airbyte-integrations/connectors/destination-exasol/src/test/java/io/airbyte/integrations/destination/exasol/ExasolSQLNameTransformerTest.java delete mode 100644 airbyte-integrations/connectors/destination-firebolt/Dockerfile delete mode 100644 airbyte-integrations/connectors/destination-firebolt/README.md delete mode 100644 airbyte-integrations/connectors/destination-firebolt/bootstrap.md delete mode 100644 airbyte-integrations/connectors/destination-firebolt/destination_firebolt/__init__.py delete mode 100644 airbyte-integrations/connectors/destination-firebolt/destination_firebolt/destination.py delete mode 100644 airbyte-integrations/connectors/destination-firebolt/destination_firebolt/spec.json delete mode 100644 airbyte-integrations/connectors/destination-firebolt/destination_firebolt/writer.py delete mode 100644 airbyte-integrations/connectors/destination-firebolt/integration_tests/configured_catalog.json delete mode 100644 airbyte-integrations/connectors/destination-firebolt/integration_tests/integration_test.py delete mode 100644 airbyte-integrations/connectors/destination-firebolt/integration_tests/invalid_config.json delete mode 100644 airbyte-integrations/connectors/destination-firebolt/integration_tests/invalid_config_s3.json delete mode 100644 airbyte-integrations/connectors/destination-firebolt/integration_tests/messages.jsonl delete mode 100644 airbyte-integrations/connectors/destination-firebolt/main.py delete mode 100644 airbyte-integrations/connectors/destination-firebolt/requirements.txt delete mode 100644 airbyte-integrations/connectors/destination-firebolt/setup.py delete mode 100644 airbyte-integrations/connectors/destination-firebolt/unit_tests/test_firebolt_destination.py delete mode 100644 airbyte-integrations/connectors/destination-firebolt/unit_tests/test_writer.py delete mode 100644 airbyte-integrations/connectors/destination-keen/build.gradle delete mode 100644 airbyte-integrations/connectors/destination-keen/src/main/java/io/airbyte/integrations/destination/keen/KeenCharactersStripper.java delete mode 100644 airbyte-integrations/connectors/destination-keen/src/main/java/io/airbyte/integrations/destination/keen/KeenDestination.java delete mode 100644 airbyte-integrations/connectors/destination-keen/src/main/java/io/airbyte/integrations/destination/keen/KeenHttpClient.java delete mode 100644 airbyte-integrations/connectors/destination-keen/src/main/java/io/airbyte/integrations/destination/keen/KeenRecordsConsumer.java delete mode 100644 airbyte-integrations/connectors/destination-keen/src/main/java/io/airbyte/integrations/destination/keen/KeenTimestampService.java delete mode 100644 airbyte-integrations/connectors/destination-keen/src/main/resources/spec.json delete mode 100644 airbyte-integrations/connectors/destination-keen/src/test-integration/java/io/airbyte/integrations/destination/keen/KeenDestinationTest.java delete mode 100644 airbyte-integrations/connectors/destination-keen/src/test/java/io/airbyte/integrations/destination/keen/KeenRecordConsumerTest.java delete mode 100644 airbyte-integrations/connectors/destination-keen/src/test/java/io/airbyte/integrations/destination/keen/KeenTimestampServiceTest.java delete mode 100644 airbyte-integrations/connectors/destination-keen/src/test/resources/cursors_catalog.json delete mode 100644 airbyte-integrations/connectors/destination-keen/src/test/resources/nested_cursor_catalog.json delete mode 100644 airbyte-integrations/connectors/destination-keen/src/test/resources/number_cursor_catalog.json delete mode 100644 airbyte-integrations/connectors/destination-keen/src/test/resources/string_cursor_catalog.json delete mode 100644 airbyte-integrations/connectors/destination-kinesis/README.md delete mode 100644 airbyte-integrations/connectors/destination-kinesis/bootstrap.md delete mode 100644 airbyte-integrations/connectors/destination-kinesis/build.gradle delete mode 100644 airbyte-integrations/connectors/destination-kinesis/docker-compose.yml delete mode 100644 airbyte-integrations/connectors/destination-kinesis/src/main/java/io/airbyte/integrations/destination/kinesis/KinesisClientPool.java delete mode 100644 airbyte-integrations/connectors/destination-kinesis/src/main/java/io/airbyte/integrations/destination/kinesis/KinesisConfig.java delete mode 100644 airbyte-integrations/connectors/destination-kinesis/src/main/java/io/airbyte/integrations/destination/kinesis/KinesisDestination.java delete mode 100644 airbyte-integrations/connectors/destination-kinesis/src/main/java/io/airbyte/integrations/destination/kinesis/KinesisMessageConsumer.java delete mode 100644 airbyte-integrations/connectors/destination-kinesis/src/main/java/io/airbyte/integrations/destination/kinesis/KinesisNameTransformer.java delete mode 100644 airbyte-integrations/connectors/destination-kinesis/src/main/java/io/airbyte/integrations/destination/kinesis/KinesisRecord.java delete mode 100644 airbyte-integrations/connectors/destination-kinesis/src/main/java/io/airbyte/integrations/destination/kinesis/KinesisStream.java delete mode 100644 airbyte-integrations/connectors/destination-kinesis/src/main/java/io/airbyte/integrations/destination/kinesis/KinesisStreamConfig.java delete mode 100644 airbyte-integrations/connectors/destination-kinesis/src/main/java/io/airbyte/integrations/destination/kinesis/KinesisUtils.java delete mode 100644 airbyte-integrations/connectors/destination-kinesis/src/main/java/io/airbyte/integrations/destination/kinesis/Tuple.java delete mode 100644 airbyte-integrations/connectors/destination-kinesis/src/main/resources/spec.json delete mode 100644 airbyte-integrations/connectors/destination-kinesis/src/test-integration/java/io/airbyte/integrations/destination/kinesis/KinesisContainerInitializr.java delete mode 100644 airbyte-integrations/connectors/destination-kinesis/src/test-integration/java/io/airbyte/integrations/destination/kinesis/KinesisDataFactory.java delete mode 100644 airbyte-integrations/connectors/destination-kinesis/src/test-integration/java/io/airbyte/integrations/destination/kinesis/KinesisDestinationAcceptanceTest.java delete mode 100644 airbyte-integrations/connectors/destination-kinesis/src/test-integration/java/io/airbyte/integrations/destination/kinesis/KinesisDestinationTest.java delete mode 100644 airbyte-integrations/connectors/destination-kinesis/src/test-integration/java/io/airbyte/integrations/destination/kinesis/KinesisStreamTest.java delete mode 100644 airbyte-integrations/connectors/destination-kinesis/src/test/java/io/airbyte/integrations/destination/kinesis/KinesisConfigTest.java delete mode 100644 airbyte-integrations/connectors/destination-kinesis/src/test/java/io/airbyte/integrations/destination/kinesis/KinesisDataFactory.java delete mode 100644 airbyte-integrations/connectors/destination-kinesis/src/test/java/io/airbyte/integrations/destination/kinesis/KinesisNameTransformerTest.java delete mode 100644 airbyte-integrations/connectors/destination-kinesis/src/test/java/io/airbyte/integrations/destination/kinesis/KinesisRecordConsumerTest.java delete mode 100644 airbyte-integrations/connectors/destination-kvdb/README.md delete mode 100644 airbyte-integrations/connectors/destination-kvdb/destination_kvdb/__init__.py delete mode 100644 airbyte-integrations/connectors/destination-kvdb/destination_kvdb/client.py delete mode 100644 airbyte-integrations/connectors/destination-kvdb/destination_kvdb/destination.py delete mode 100644 airbyte-integrations/connectors/destination-kvdb/destination_kvdb/spec.json delete mode 100644 airbyte-integrations/connectors/destination-kvdb/destination_kvdb/writer.py delete mode 100644 airbyte-integrations/connectors/destination-kvdb/main.py delete mode 100644 airbyte-integrations/connectors/destination-kvdb/poetry.lock delete mode 100644 airbyte-integrations/connectors/destination-kvdb/pyproject.toml delete mode 100644 airbyte-integrations/connectors/destination-kvdb/requirements.txt delete mode 100644 airbyte-integrations/connectors/destination-kvdb/unit_tests/unit_test.py delete mode 100644 airbyte-integrations/connectors/destination-mariadb-columnstore/README.md delete mode 100644 airbyte-integrations/connectors/destination-mariadb-columnstore/bootstrap.md delete mode 100644 airbyte-integrations/connectors/destination-mariadb-columnstore/build.gradle delete mode 100644 airbyte-integrations/connectors/destination-mariadb-columnstore/src/main/java/io/airbyte/integrations/destination/mariadb_columnstore/MariadbColumnstoreDestination.java delete mode 100644 airbyte-integrations/connectors/destination-mariadb-columnstore/src/main/java/io/airbyte/integrations/destination/mariadb_columnstore/MariadbColumnstoreNameTransformer.java delete mode 100644 airbyte-integrations/connectors/destination-mariadb-columnstore/src/main/java/io/airbyte/integrations/destination/mariadb_columnstore/MariadbColumnstoreSqlOperations.java delete mode 100644 airbyte-integrations/connectors/destination-mariadb-columnstore/src/main/resources/spec.json delete mode 100644 airbyte-integrations/connectors/destination-mariadb-columnstore/src/test-integration/java/io/airbyte/integrations/destination/mariadb_columnstore/MariaDbTestDataComparator.java delete mode 100644 airbyte-integrations/connectors/destination-mariadb-columnstore/src/test-integration/java/io/airbyte/integrations/destination/mariadb_columnstore/MariadbColumnstoreDestinationAcceptanceTest.java delete mode 100644 airbyte-integrations/connectors/destination-mariadb-columnstore/src/test-integration/java/io/airbyte/integrations/destination/mariadb_columnstore/SshKeyMariadbColumnstoreDestinationAcceptanceTest.java delete mode 100644 airbyte-integrations/connectors/destination-mariadb-columnstore/src/test-integration/java/io/airbyte/integrations/destination/mariadb_columnstore/SshMariadbColumnstoreDestinationAcceptanceTest.java delete mode 100644 airbyte-integrations/connectors/destination-mariadb-columnstore/src/test-integration/java/io/airbyte/integrations/destination/mariadb_columnstore/SshPasswordMariadbColumnstoreDestinationAcceptanceTest.java delete mode 100644 airbyte-integrations/connectors/destination-mariadb-columnstore/src/test/java/io/airbyte/integrations/destination/mariadb_columnstore/MariadbColumnstoreDestinationTest.java delete mode 100644 airbyte-integrations/connectors/destination-mariadb-columnstore/src/test/java/io/airbyte/integrations/destination/mariadb_columnstore/MariadbSpecTest.java delete mode 100644 airbyte-integrations/connectors/destination-meilisearch/.dockerignore delete mode 100644 airbyte-integrations/connectors/destination-meilisearch/Dockerfile delete mode 100644 airbyte-integrations/connectors/destination-meilisearch/README.md delete mode 100644 airbyte-integrations/connectors/destination-meilisearch/destination_meilisearch/__init__.py delete mode 100644 airbyte-integrations/connectors/destination-meilisearch/destination_meilisearch/destination.py delete mode 100644 airbyte-integrations/connectors/destination-meilisearch/destination_meilisearch/spec.json delete mode 100644 airbyte-integrations/connectors/destination-meilisearch/destination_meilisearch/writer.py delete mode 100644 airbyte-integrations/connectors/destination-meilisearch/integration_tests/integration_test.py delete mode 100644 airbyte-integrations/connectors/destination-meilisearch/integration_tests/messages.jsonl delete mode 100644 airbyte-integrations/connectors/destination-meilisearch/main.py delete mode 100644 airbyte-integrations/connectors/destination-meilisearch/requirements.txt delete mode 100644 airbyte-integrations/connectors/destination-meilisearch/sample_files/configured_catalog.json delete mode 100644 airbyte-integrations/connectors/destination-meilisearch/setup.py delete mode 100644 airbyte-integrations/connectors/destination-meilisearch/unit_tests/unit_test.py delete mode 100644 airbyte-integrations/connectors/destination-mqtt/README.md delete mode 100644 airbyte-integrations/connectors/destination-mqtt/build.gradle delete mode 100644 airbyte-integrations/connectors/destination-mqtt/src/main/java/io/airbyte/integrations/destination/mqtt/MqttDestination.java delete mode 100644 airbyte-integrations/connectors/destination-mqtt/src/main/java/io/airbyte/integrations/destination/mqtt/MqttDestinationConfig.java delete mode 100644 airbyte-integrations/connectors/destination-mqtt/src/main/java/io/airbyte/integrations/destination/mqtt/MqttRecordConsumer.java delete mode 100644 airbyte-integrations/connectors/destination-mqtt/src/main/resources/spec.json delete mode 100644 airbyte-integrations/connectors/destination-mqtt/src/test-integration/java/io/airbyte/integrations/destination/mqtt/MqttDestinationAcceptanceTest.java delete mode 100644 airbyte-integrations/connectors/destination-mqtt/src/test/java/io/airbyte/integrations/destination/mqtt/MqttRecordConsumerTest.java delete mode 100644 airbyte-integrations/connectors/destination-pulsar/README.md delete mode 100644 airbyte-integrations/connectors/destination-pulsar/build.gradle delete mode 100644 airbyte-integrations/connectors/destination-pulsar/src/main/java/io/airbyte/integrations/destination/pulsar/PulsarDestination.java delete mode 100644 airbyte-integrations/connectors/destination-pulsar/src/main/java/io/airbyte/integrations/destination/pulsar/PulsarDestinationConfig.java delete mode 100644 airbyte-integrations/connectors/destination-pulsar/src/main/java/io/airbyte/integrations/destination/pulsar/PulsarRecordConsumer.java delete mode 100644 airbyte-integrations/connectors/destination-pulsar/src/main/java/io/airbyte/integrations/destination/pulsar/PulsarUtils.java delete mode 100644 airbyte-integrations/connectors/destination-pulsar/src/main/resources/spec.json delete mode 100644 airbyte-integrations/connectors/destination-pulsar/src/test-integration/java/io/airbyte/integrations/destination/pulsar/PulsarDestinationAcceptanceTest.java delete mode 100644 airbyte-integrations/connectors/destination-pulsar/src/test/java/io/airbyte/integrations/destination/pulsar/PulsarRecordConsumerTest.java delete mode 100644 airbyte-integrations/connectors/destination-r2/README.md delete mode 100644 airbyte-integrations/connectors/destination-r2/build.gradle delete mode 100644 airbyte-integrations/connectors/destination-r2/sample_secrets/config.json delete mode 100644 airbyte-integrations/connectors/destination-r2/src/main/java/io/airbyte/integrations/destination/r2/R2Destination.java delete mode 100644 airbyte-integrations/connectors/destination-r2/src/main/resources/spec.json delete mode 100644 airbyte-integrations/connectors/destination-r2/src/test-integration/java/io/airbyte/integrations/destination/r2/R2AvroDestinationAcceptanceTest.java delete mode 100644 airbyte-integrations/connectors/destination-r2/src/test-integration/java/io/airbyte/integrations/destination/r2/R2CsvDestinationAcceptanceTest.java delete mode 100644 airbyte-integrations/connectors/destination-r2/src/test-integration/java/io/airbyte/integrations/destination/r2/R2CsvGzipDestinationAcceptanceTest.java delete mode 100644 airbyte-integrations/connectors/destination-r2/src/test-integration/java/io/airbyte/integrations/destination/r2/R2JsonlDestinationAcceptanceTest.java delete mode 100644 airbyte-integrations/connectors/destination-r2/src/test-integration/java/io/airbyte/integrations/destination/r2/R2JsonlGzipDestinationAcceptanceTest.java delete mode 100644 airbyte-integrations/connectors/destination-r2/src/test-integration/java/io/airbyte/integrations/destination/r2/R2ParquetDestinationAcceptanceTest.java delete mode 100644 airbyte-integrations/connectors/destination-rabbitmq/.dockerignore delete mode 100644 airbyte-integrations/connectors/destination-rabbitmq/Dockerfile delete mode 100644 airbyte-integrations/connectors/destination-rabbitmq/README.md delete mode 100644 airbyte-integrations/connectors/destination-rabbitmq/destination_rabbitmq/__init__.py delete mode 100644 airbyte-integrations/connectors/destination-rabbitmq/destination_rabbitmq/__init__.pyc delete mode 100644 airbyte-integrations/connectors/destination-rabbitmq/destination_rabbitmq/destination.py delete mode 100644 airbyte-integrations/connectors/destination-rabbitmq/destination_rabbitmq/spec.json delete mode 100644 airbyte-integrations/connectors/destination-rabbitmq/integration_tests/integration_test.py delete mode 100644 airbyte-integrations/connectors/destination-rabbitmq/integration_tests/invalid_config.json delete mode 100644 airbyte-integrations/connectors/destination-rabbitmq/main.py delete mode 100644 airbyte-integrations/connectors/destination-rabbitmq/requirements.txt delete mode 100644 airbyte-integrations/connectors/destination-rabbitmq/setup.py delete mode 100644 airbyte-integrations/connectors/destination-rabbitmq/unit_tests/unit_test.py delete mode 100644 airbyte-integrations/connectors/destination-redpanda/README.md delete mode 100644 airbyte-integrations/connectors/destination-redpanda/build.gradle delete mode 100644 airbyte-integrations/connectors/destination-redpanda/src/main/java/io/airbyte/integrations/destination/redpanda/RedpandaConfig.java delete mode 100644 airbyte-integrations/connectors/destination-redpanda/src/main/java/io/airbyte/integrations/destination/redpanda/RedpandaDestination.java delete mode 100644 airbyte-integrations/connectors/destination-redpanda/src/main/java/io/airbyte/integrations/destination/redpanda/RedpandaMessageConsumer.java delete mode 100644 airbyte-integrations/connectors/destination-redpanda/src/main/java/io/airbyte/integrations/destination/redpanda/RedpandaNameTransformer.java delete mode 100644 airbyte-integrations/connectors/destination-redpanda/src/main/java/io/airbyte/integrations/destination/redpanda/RedpandaOperations.java delete mode 100644 airbyte-integrations/connectors/destination-redpanda/src/main/java/io/airbyte/integrations/destination/redpanda/RedpandaWriteConfig.java delete mode 100644 airbyte-integrations/connectors/destination-redpanda/src/main/resources/spec.json delete mode 100644 airbyte-integrations/connectors/destination-redpanda/src/test-integration/java/io/airbyte/integrations/destination/redpanda/RedpandaConsumer.java delete mode 100644 airbyte-integrations/connectors/destination-redpanda/src/test-integration/java/io/airbyte/integrations/destination/redpanda/RedpandaConsumerFactory.java delete mode 100644 airbyte-integrations/connectors/destination-redpanda/src/test-integration/java/io/airbyte/integrations/destination/redpanda/RedpandaContainerFactory.java delete mode 100644 airbyte-integrations/connectors/destination-redpanda/src/test-integration/java/io/airbyte/integrations/destination/redpanda/RedpandaDestinationAcceptanceTest.java delete mode 100644 airbyte-integrations/connectors/destination-redpanda/src/test-integration/java/io/airbyte/integrations/destination/redpanda/RedpandaDestinationTest.java delete mode 100644 airbyte-integrations/connectors/destination-redpanda/src/test-integration/java/io/airbyte/integrations/destination/redpanda/RedpandaOperationsTest.java delete mode 100644 airbyte-integrations/connectors/destination-redpanda/src/test/java/io/airbyte/integrations/destination/redpanda/RedpandaConfigTest.java delete mode 100644 airbyte-integrations/connectors/destination-redpanda/src/test/java/io/airbyte/integrations/destination/redpanda/RedpandaNameTransformerTest.java delete mode 100644 airbyte-integrations/connectors/destination-redpanda/src/test/java/io/airbyte/integrations/destination/redpanda/RedpandaWriteConfigTest.java delete mode 100644 airbyte-integrations/connectors/destination-rockset/BOOTSTRAP.md delete mode 100644 airbyte-integrations/connectors/destination-rockset/README.md delete mode 100644 airbyte-integrations/connectors/destination-rockset/build.gradle delete mode 100644 airbyte-integrations/connectors/destination-rockset/src/main/java/io/airbyte/integrations/destination/rockset/RocksetDestination.java delete mode 100644 airbyte-integrations/connectors/destination-rockset/src/main/java/io/airbyte/integrations/destination/rockset/RocksetSQLNameTransformer.java delete mode 100644 airbyte-integrations/connectors/destination-rockset/src/main/java/io/airbyte/integrations/destination/rockset/RocksetUtils.java delete mode 100644 airbyte-integrations/connectors/destination-rockset/src/main/java/io/airbyte/integrations/destination/rockset/RocksetWriteApiConsumer.java delete mode 100644 airbyte-integrations/connectors/destination-rockset/src/main/resources/spec.json delete mode 100644 airbyte-integrations/connectors/destination-rockset/src/test-integration/java/io/airbyte/integrations/destination/rockset/RocksetDestinationAcceptanceTest.java delete mode 100644 airbyte-integrations/connectors/destination-rockset/src/test/java/io/airbyte/integrations/destination/rockset/RocksetWriteApiConsumerTest.java delete mode 100644 airbyte-integrations/connectors/destination-scaffold-destination-python/README.md delete mode 100644 airbyte-integrations/connectors/destination-scaffold-destination-python/destination_scaffold_destination_python/__init__.py delete mode 100644 airbyte-integrations/connectors/destination-scaffold-destination-python/destination_scaffold_destination_python/destination.py delete mode 100644 airbyte-integrations/connectors/destination-scaffold-destination-python/destination_scaffold_destination_python/spec.json delete mode 100644 airbyte-integrations/connectors/destination-scaffold-destination-python/integration_tests/integration_test.py delete mode 100644 airbyte-integrations/connectors/destination-scaffold-destination-python/main.py delete mode 100644 airbyte-integrations/connectors/destination-scaffold-destination-python/requirements.txt delete mode 100644 airbyte-integrations/connectors/destination-scaffold-destination-python/setup.py delete mode 100644 airbyte-integrations/connectors/destination-scaffold-destination-python/unit_tests/unit_test.py delete mode 100644 airbyte-integrations/connectors/destination-scylla/README.md delete mode 100644 airbyte-integrations/connectors/destination-scylla/bootstrap.md delete mode 100644 airbyte-integrations/connectors/destination-scylla/build.gradle delete mode 100644 airbyte-integrations/connectors/destination-scylla/docker-compose.yml delete mode 100644 airbyte-integrations/connectors/destination-scylla/src/main/java/io/airbyte/integrations/destination/scylla/ScyllaConfig.java delete mode 100644 airbyte-integrations/connectors/destination-scylla/src/main/java/io/airbyte/integrations/destination/scylla/ScyllaCqlProvider.java delete mode 100644 airbyte-integrations/connectors/destination-scylla/src/main/java/io/airbyte/integrations/destination/scylla/ScyllaDestination.java delete mode 100644 airbyte-integrations/connectors/destination-scylla/src/main/java/io/airbyte/integrations/destination/scylla/ScyllaMessageConsumer.java delete mode 100644 airbyte-integrations/connectors/destination-scylla/src/main/java/io/airbyte/integrations/destination/scylla/ScyllaNameTransformer.java delete mode 100644 airbyte-integrations/connectors/destination-scylla/src/main/java/io/airbyte/integrations/destination/scylla/ScyllaSessionPool.java delete mode 100644 airbyte-integrations/connectors/destination-scylla/src/main/java/io/airbyte/integrations/destination/scylla/ScyllaStreamConfig.java delete mode 100644 airbyte-integrations/connectors/destination-scylla/src/main/java/io/airbyte/integrations/destination/scylla/Triplet.java delete mode 100644 airbyte-integrations/connectors/destination-scylla/src/main/java/io/airbyte/integrations/destination/scylla/Tuple.java delete mode 100644 airbyte-integrations/connectors/destination-scylla/src/main/resources/spec.json delete mode 100644 airbyte-integrations/connectors/destination-scylla/src/test-integration/java/io/airbyte/integrations/destination/scylla/ScyllaContainerInitializr.java delete mode 100644 airbyte-integrations/connectors/destination-scylla/src/test-integration/java/io/airbyte/integrations/destination/scylla/ScyllaCqlProviderTest.java delete mode 100644 airbyte-integrations/connectors/destination-scylla/src/test-integration/java/io/airbyte/integrations/destination/scylla/ScyllaDestinationAcceptanceTest.java delete mode 100644 airbyte-integrations/connectors/destination-scylla/src/test-integration/java/io/airbyte/integrations/destination/scylla/ScyllaDestinationTest.java delete mode 100644 airbyte-integrations/connectors/destination-scylla/src/test-integration/java/io/airbyte/integrations/destination/scylla/TestDataFactory.java delete mode 100644 airbyte-integrations/connectors/destination-scylla/src/test/java/io/airbyte/integrations/destination/scylla/ScyllaConfigTest.java delete mode 100644 airbyte-integrations/connectors/destination-scylla/src/test/java/io/airbyte/integrations/destination/scylla/ScyllaNameTransformerTest.java delete mode 100644 airbyte-integrations/connectors/destination-scylla/src/test/java/io/airbyte/integrations/destination/scylla/ScyllaRecordConsumerTest.java delete mode 100644 airbyte-integrations/connectors/destination-scylla/src/test/java/io/airbyte/integrations/destination/scylla/TestDataFactory.java delete mode 100644 airbyte-integrations/connectors/destination-selectdb/README.md delete mode 100644 airbyte-integrations/connectors/destination-selectdb/bootstrap.md delete mode 100644 airbyte-integrations/connectors/destination-selectdb/build.gradle delete mode 100644 airbyte-integrations/connectors/destination-selectdb/src/main/java/io/airbyte/integrations/destination/selectdb/BaseResponse.java delete mode 100644 airbyte-integrations/connectors/destination-selectdb/src/main/java/io/airbyte/integrations/destination/selectdb/CopyIntoResp.java delete mode 100644 airbyte-integrations/connectors/destination-selectdb/src/main/java/io/airbyte/integrations/destination/selectdb/LabelInfo.java delete mode 100644 airbyte-integrations/connectors/destination-selectdb/src/main/java/io/airbyte/integrations/destination/selectdb/SelectdbConnectionOptions.java delete mode 100644 airbyte-integrations/connectors/destination-selectdb/src/main/java/io/airbyte/integrations/destination/selectdb/SelectdbConsumer.java delete mode 100644 airbyte-integrations/connectors/destination-selectdb/src/main/java/io/airbyte/integrations/destination/selectdb/SelectdbCopyInto.java delete mode 100644 airbyte-integrations/connectors/destination-selectdb/src/main/java/io/airbyte/integrations/destination/selectdb/SelectdbDestination.java delete mode 100644 airbyte-integrations/connectors/destination-selectdb/src/main/java/io/airbyte/integrations/destination/selectdb/SelectdbOperations.java delete mode 100644 airbyte-integrations/connectors/destination-selectdb/src/main/java/io/airbyte/integrations/destination/selectdb/SelectdbWriteConfig.java delete mode 100644 airbyte-integrations/connectors/destination-selectdb/src/main/java/io/airbyte/integrations/destination/selectdb/exception/CopyIntoException.java delete mode 100644 airbyte-integrations/connectors/destination-selectdb/src/main/java/io/airbyte/integrations/destination/selectdb/exception/SelectdbRuntimeException.java delete mode 100644 airbyte-integrations/connectors/destination-selectdb/src/main/java/io/airbyte/integrations/destination/selectdb/exception/UploadException.java delete mode 100644 airbyte-integrations/connectors/destination-selectdb/src/main/java/io/airbyte/integrations/destination/selectdb/http/HttpPostBuilder.java delete mode 100644 airbyte-integrations/connectors/destination-selectdb/src/main/java/io/airbyte/integrations/destination/selectdb/http/HttpPutBuilder.java delete mode 100644 airbyte-integrations/connectors/destination-selectdb/src/main/java/io/airbyte/integrations/destination/selectdb/http/HttpUtil.java delete mode 100644 airbyte-integrations/connectors/destination-selectdb/src/main/java/io/airbyte/integrations/destination/selectdb/utils/ResponseUtils.java delete mode 100644 airbyte-integrations/connectors/destination-selectdb/src/main/resources/spec.json delete mode 100644 airbyte-integrations/connectors/destination-selectdb/src/test-integration/java/io/airbyte/integrations/destination/selectdb/SelectdbDestinationAcceptanceTest.java delete mode 100644 airbyte-integrations/connectors/destination-selectdb/src/test/java/io/airbyte/integrations/destination/selectdb/SelectdbDestinationTest.java delete mode 100644 airbyte-integrations/connectors/destination-tidb/README.md delete mode 100644 airbyte-integrations/connectors/destination-tidb/build.gradle delete mode 100644 airbyte-integrations/connectors/destination-tidb/src/main/java/io/airbyte/integrations/destination/tidb/TiDBDestination.java delete mode 100644 airbyte-integrations/connectors/destination-tidb/src/main/java/io/airbyte/integrations/destination/tidb/TiDBSQLNameTransformer.java delete mode 100644 airbyte-integrations/connectors/destination-tidb/src/main/java/io/airbyte/integrations/destination/tidb/TiDBSqlOperations.java delete mode 100644 airbyte-integrations/connectors/destination-tidb/src/main/resources/spec.json delete mode 100644 airbyte-integrations/connectors/destination-tidb/src/test-integration/java/io/airbyte/integrations/destination/tidb/TiDBDestinationAcceptanceTest.java delete mode 100644 airbyte-integrations/connectors/destination-tidb/src/test-integration/java/io/airbyte/integrations/destination/tidb/TiDBTestDataComparator.java delete mode 100644 airbyte-integrations/connectors/destination-tidb/src/test/java/io/airbyte/integrations/destination/tidb/TiDBDestinationTest.java delete mode 100755 airbyte-integrations/connectors/destination-timeplus/.dockerignore delete mode 100755 airbyte-integrations/connectors/destination-timeplus/Dockerfile delete mode 100755 airbyte-integrations/connectors/destination-timeplus/README.md delete mode 100755 airbyte-integrations/connectors/destination-timeplus/destination_timeplus/__init__.py delete mode 100755 airbyte-integrations/connectors/destination-timeplus/destination_timeplus/destination.py delete mode 100755 airbyte-integrations/connectors/destination-timeplus/destination_timeplus/spec.json delete mode 100644 airbyte-integrations/connectors/destination-timeplus/integration_tests/configured_catalog.json delete mode 100755 airbyte-integrations/connectors/destination-timeplus/integration_tests/integration_test.py delete mode 100644 airbyte-integrations/connectors/destination-timeplus/integration_tests/messages.jsonl delete mode 100755 airbyte-integrations/connectors/destination-timeplus/main.py delete mode 100755 airbyte-integrations/connectors/destination-timeplus/requirements.txt delete mode 100755 airbyte-integrations/connectors/destination-timeplus/setup.py delete mode 100755 airbyte-integrations/connectors/destination-timeplus/unit_tests/unit_test.py delete mode 100644 airbyte-integrations/connectors/destination-vectara/.dockerignore delete mode 100644 airbyte-integrations/connectors/destination-vectara/Dockerfile delete mode 100644 airbyte-integrations/connectors/destination-vectara/README.md delete mode 100644 airbyte-integrations/connectors/destination-vectara/destination_vectara/__init__.py delete mode 100644 airbyte-integrations/connectors/destination-vectara/destination_vectara/client.py delete mode 100644 airbyte-integrations/connectors/destination-vectara/destination_vectara/config.py delete mode 100644 airbyte-integrations/connectors/destination-vectara/destination_vectara/destination.py delete mode 100644 airbyte-integrations/connectors/destination-vectara/destination_vectara/writer.py delete mode 100644 airbyte-integrations/connectors/destination-vectara/integration_tests/integration_test.py delete mode 100644 airbyte-integrations/connectors/destination-vectara/main.py delete mode 100644 airbyte-integrations/connectors/destination-vectara/requirements.txt delete mode 100644 airbyte-integrations/connectors/destination-vectara/setup.py delete mode 100644 airbyte-integrations/connectors/destination-vectara/unit_tests/__init__.py delete mode 100644 airbyte-integrations/connectors/destination-xata/.dockerignore delete mode 100644 airbyte-integrations/connectors/destination-xata/Dockerfile delete mode 100644 airbyte-integrations/connectors/destination-xata/README.md delete mode 100644 airbyte-integrations/connectors/destination-xata/bootstrap.md delete mode 100644 airbyte-integrations/connectors/destination-xata/destination_xata/__init__.py delete mode 100644 airbyte-integrations/connectors/destination-xata/destination_xata/destination.py delete mode 100644 airbyte-integrations/connectors/destination-xata/destination_xata/spec.json delete mode 100644 airbyte-integrations/connectors/destination-xata/integration_tests/integration_test.py delete mode 100644 airbyte-integrations/connectors/destination-xata/integration_tests/invalid_config.json delete mode 100644 airbyte-integrations/connectors/destination-xata/main.py delete mode 100644 airbyte-integrations/connectors/destination-xata/requirements.txt delete mode 100644 airbyte-integrations/connectors/destination-xata/sample_files/configured_catalog.json delete mode 100644 airbyte-integrations/connectors/destination-xata/setup.py delete mode 100644 airbyte-integrations/connectors/destination-xata/unit_tests/unit_test.py delete mode 100644 airbyte-integrations/connectors/destination-yugabytedb/README.md delete mode 100644 airbyte-integrations/connectors/destination-yugabytedb/bootstrap.md delete mode 100644 airbyte-integrations/connectors/destination-yugabytedb/build.gradle delete mode 100644 airbyte-integrations/connectors/destination-yugabytedb/docker-compose.yml delete mode 100644 airbyte-integrations/connectors/destination-yugabytedb/src/main/java/io/airbyte/integrations/destination/yugabytedb/YugabytedbDestination.java delete mode 100644 airbyte-integrations/connectors/destination-yugabytedb/src/main/java/io/airbyte/integrations/destination/yugabytedb/YugabytedbNamingTransformer.java delete mode 100644 airbyte-integrations/connectors/destination-yugabytedb/src/main/java/io/airbyte/integrations/destination/yugabytedb/YugabytedbSqlOperations.java delete mode 100644 airbyte-integrations/connectors/destination-yugabytedb/src/main/resources/spec.json delete mode 100644 airbyte-integrations/connectors/destination-yugabytedb/src/test-integration/java/io/airbyte/integrations/destination/yugabytedb/YugabyteDataSource.java delete mode 100644 airbyte-integrations/connectors/destination-yugabytedb/src/test-integration/java/io/airbyte/integrations/destination/yugabytedb/YugabytedbContainerInitializr.java delete mode 100644 airbyte-integrations/connectors/destination-yugabytedb/src/test-integration/java/io/airbyte/integrations/destination/yugabytedb/YugabytedbDestinationAcceptanceTest.java delete mode 100644 airbyte-integrations/connectors/destination-yugabytedb/src/test/java/io/airbyte/integrations/destination/yugabytedb/YugabytedbDestinationTest.java delete mode 100644 airbyte-integrations/connectors/destination-yugabytedb/src/test/java/io/airbyte/integrations/destination/yugabytedb/YugabytedbNamingTransformerTest.java diff --git a/airbyte-integrations/connectors/destination-amazon-sqs/.dockerignore b/airbyte-integrations/connectors/destination-amazon-sqs/.dockerignore deleted file mode 100644 index efa69d407fd8..000000000000 --- a/airbyte-integrations/connectors/destination-amazon-sqs/.dockerignore +++ /dev/null @@ -1,5 +0,0 @@ -* -!Dockerfile -!main.py -!destination_amazon_sqs -!setup.py diff --git a/airbyte-integrations/connectors/destination-amazon-sqs/Dockerfile b/airbyte-integrations/connectors/destination-amazon-sqs/Dockerfile deleted file mode 100644 index 9861de2b6843..000000000000 --- a/airbyte-integrations/connectors/destination-amazon-sqs/Dockerfile +++ /dev/null @@ -1,38 +0,0 @@ -FROM python:3.9.11-alpine3.15 as base - -# build and load all requirements -FROM base as builder -WORKDIR /airbyte/integration_code - -# upgrade pip to the latest version -RUN apk --no-cache upgrade \ - && pip install --upgrade pip \ - && apk --no-cache add tzdata build-base - - -COPY setup.py ./ -# install necessary packages to a temporary folder -RUN pip install --prefix=/install . - -# build a clean environment -FROM base -WORKDIR /airbyte/integration_code - -# copy all loaded and built libraries to a pure basic image -COPY --from=builder /install /usr/local -# add default timezone settings -COPY --from=builder /usr/share/zoneinfo/Etc/UTC /etc/localtime -RUN echo "Etc/UTC" > /etc/timezone - -# bash is installed for more convenient debugging. -RUN apk --no-cache add bash - -# copy payload code only -COPY main.py ./ -COPY destination_amazon_sqs ./destination_amazon_sqs - -ENV AIRBYTE_ENTRYPOINT "python /airbyte/integration_code/main.py" -ENTRYPOINT ["python", "/airbyte/integration_code/main.py"] - -LABEL io.airbyte.version=0.1.1 -LABEL io.airbyte.name=airbyte/destination-amazon-sqs diff --git a/airbyte-integrations/connectors/destination-amazon-sqs/README.md b/airbyte-integrations/connectors/destination-amazon-sqs/README.md deleted file mode 100644 index 2856f60b1ae7..000000000000 --- a/airbyte-integrations/connectors/destination-amazon-sqs/README.md +++ /dev/null @@ -1,99 +0,0 @@ -# Amazon Sqs Destination - -This is the repository for the Amazon Sqs destination connector, written in Python. -For information about how to use this connector within Airbyte, see [the documentation](https://docs.airbyte.io/integrations/destinations/amazon-sqs). - -## Local development - -### Prerequisites -**To iterate on this connector, make sure to complete this prerequisites section.** - -#### Minimum Python version required `= 3.7.0` - -#### Build & Activate Virtual Environment and install dependencies -From this connector directory, create a virtual environment: -``` -python -m venv .venv -``` - -This will generate a virtualenv for this module in `.venv/`. Make sure this venv is active in your -development environment of choice. To activate it from the terminal, run: -``` -source .venv/bin/activate -pip install -r requirements.txt -``` -If you are in an IDE, follow your IDE's instructions to activate the virtualenv. - -Note that while we are installing dependencies from `requirements.txt`, you should only edit `setup.py` for your dependencies. `requirements.txt` is -used for editable installs (`pip install -e`) to pull in Python dependencies from the monorepo and will call `setup.py`. -If this is mumbo jumbo to you, don't worry about it, just put your deps in `setup.py` but install using `pip install -r requirements.txt` and everything -should work as you expect. - -#### Create credentials -**If you are a community contributor**, follow the instructions in the [documentation](https://docs.airbyte.io/integrations/destinations/amazon-sqs) -to generate the necessary credentials. Then create a file `secrets/config.json` conforming to the `destination_amazon_sqs/spec.json` file. -Note that the `secrets` directory is gitignored by default, so there is no danger of accidentally checking in sensitive information. -See `integration_tests/sample_config.json` for a sample config file. - -**If you are an Airbyte core member**, copy the credentials in Lastpass under the secret name `destination amazon-sqs test creds` -and place them into `secrets/config.json`. - -### Locally running the connector -``` -python main.py spec -python main.py check --config secrets/config.json -python main.py discover --config secrets/config.json -python main.py read --config secrets/config.json --catalog integration_tests/configured_catalog.json -``` - -### Locally running the connector docker image - - -#### Build -**Via [`airbyte-ci`](https://github.com/airbytehq/airbyte/blob/master/airbyte-ci/connectors/pipelines/README.md) (recommended):** -```bash -airbyte-ci connectors --name=destination-amazon-sqs build -``` - -An image will be built with the tag `airbyte/destination-amazon-sqs:dev`. - -**Via `docker build`:** -```bash -docker build -t airbyte/destination-amazon-sqs:dev . -``` - -#### Run -Then run any of the connector commands as follows: -``` -docker run --rm airbyte/destination-amazon-sqs:dev spec -docker run --rm -v $(pwd)/secrets:/secrets airbyte/destination-amazon-sqs:dev check --config /secrets/config.json -# messages.jsonl is a file containing line-separated JSON representing AirbyteMessages -cat messages.jsonl | docker run --rm -v $(pwd)/secrets:/secrets -v $(pwd)/integration_tests:/integration_tests airbyte/destination-amazon-sqs:dev write --config /secrets/config.json --catalog /integration_tests/configured_catalog.json -``` - -## Testing -You can run our full test suite locally using [`airbyte-ci`](https://github.com/airbytehq/airbyte/blob/master/airbyte-ci/connectors/pipelines/README.md): -```bash -airbyte-ci connectors --name=destination-amazon-sqs test -``` - -### Customizing acceptance Tests -Customize `acceptance-test-config.yml` file to configure tests. See [Connector Acceptance Tests](https://docs.airbyte.com/connector-development/testing-connectors/connector-acceptance-tests-reference) for more information. -If your connector requires to create or destroy resources for use during acceptance tests create fixtures for it and place them inside integration_tests/acceptance.py. - -## Dependency Management -All of your dependencies should go in `setup.py`, NOT `requirements.txt`. The requirements file is only used to connect internal Airbyte dependencies in the monorepo for local development. -We split dependencies between two groups, dependencies that are: -* required for your connector to work need to go to `MAIN_REQUIREMENTS` list. -* required for the testing need to go to `TEST_REQUIREMENTS` list - -### Publishing a new version of the connector -You've checked out the repo, implemented a million dollar feature, and you're ready to share your changes with the world. Now what? -1. Make sure your changes are passing our test suite: `airbyte-ci connectors --name=destination-amazon-sqs test` -2. Bump the connector version in `metadata.yaml`: increment the `dockerImageTag` value. Please follow [semantic versioning for connectors](https://docs.airbyte.com/contributing-to-airbyte/resources/pull-requests-handbook/#semantic-versioning-for-connectors). -3. Make sure the `metadata.yaml` content is up to date. -4. Make the connector documentation and its changelog is up to date (`docs/integrations/destinations/amazon-sqs.md`). -5. Create a Pull Request: use [our PR naming conventions](https://docs.airbyte.com/contributing-to-airbyte/resources/pull-requests-handbook/#pull-request-title-convention). -6. Pat yourself on the back for being an awesome contributor. -7. Someone from Airbyte will take a look at your PR and iterate with you to merge it into master. - diff --git a/airbyte-integrations/connectors/destination-amazon-sqs/bootstrap.md b/airbyte-integrations/connectors/destination-amazon-sqs/bootstrap.md deleted file mode 100644 index ce91ec1ef142..000000000000 --- a/airbyte-integrations/connectors/destination-amazon-sqs/bootstrap.md +++ /dev/null @@ -1,59 +0,0 @@ -# Amazon SQS Destination - -## What -This is a connector for producing messages to an [Amazon SQS Queue](https://docs.aws.amazon.com/AWSSimpleQueueService/latest/SQSDeveloperGuide/welcome.html) - -## How -### Sending messages -Amazon SQS allows messages to be sent individually or in batches. Currently, this Destination only supports sending messages individually. This can -have performance implications if sending high volumes of messages. - -#### Message Body -By default, the SQS Message body is built using the AirbyteMessageRecord's 'data' property. - -If the **message_body_key** config item is set, we use the value as a key within the AirbyteMessageRecord's 'data' property. This could be -improved to handle nested keys by using JSONPath syntax to lookup values. - -For example, given the input Record: -``` -{ - "data": - { - "parent_key": { - "nested_key": "nested_value" - }, - "top_key": "top_value" - } -} -``` - -With no **message_body_key** set, the output SQS Message body will be -``` -{ - "parent_key": { - "nested_key": "nested_value" - }, - "top_key": "top_value" -} -``` - -With **message_body_key** set to `parent_key`, the output SQS Message body will be -``` -{ - "nested_key": "nested_value" -} -``` - -#### Message attributes -The airbyte_emmited_at timestamp is added to every message as an Attribute by default. This could be improved to allow the user to set Attributes through the UI, or to take keys from the Record as Attributes. - -#### FIFO Queues -A Queue URL that ends with '.fifo' **must** be a valid FIFO Queue. When the queue is FIFO, the *message_group_id* property is required. - -Currently, a unique uuid4 is generated as the dedupe ID for every message. This could be improved to allow the user to specify a path in the Record -to use as a dedupe ID. - -### Credentials -Requires an AWS IAM Access Key ID and Secret Key. - -This could be improved to add support for configured AWS profiles, env vars etc. diff --git a/airbyte-integrations/connectors/destination-amazon-sqs/destination_amazon_sqs/__init__.py b/airbyte-integrations/connectors/destination-amazon-sqs/destination_amazon_sqs/__init__.py deleted file mode 100644 index ff5ba7b7242c..000000000000 --- a/airbyte-integrations/connectors/destination-amazon-sqs/destination_amazon_sqs/__init__.py +++ /dev/null @@ -1,8 +0,0 @@ -# -# Copyright (c) 2023 Airbyte, Inc., all rights reserved. -# - - -from .destination import DestinationAmazonSqs - -__all__ = ["DestinationAmazonSqs"] diff --git a/airbyte-integrations/connectors/destination-amazon-sqs/destination_amazon_sqs/destination.py b/airbyte-integrations/connectors/destination-amazon-sqs/destination_amazon_sqs/destination.py deleted file mode 100644 index 1eb0249bccc5..000000000000 --- a/airbyte-integrations/connectors/destination-amazon-sqs/destination_amazon_sqs/destination.py +++ /dev/null @@ -1,176 +0,0 @@ -# -# Copyright (c) 2023 Airbyte, Inc., all rights reserved. -# - - -import json -from typing import Any, Iterable, Mapping -from uuid import uuid4 - -import boto3 -from airbyte_cdk import AirbyteLogger -from airbyte_cdk.destinations import Destination -from airbyte_cdk.models import AirbyteConnectionStatus, AirbyteMessage, ConfiguredAirbyteCatalog, Status, Type -from botocore.exceptions import ClientError - - -class DestinationAmazonSqs(Destination): - def queue_is_fifo(self, url: str) -> bool: - return url.endswith(".fifo") - - def parse_queue_name(self, url: str) -> str: - return url.rsplit("/", 1)[-1] - - def send_single_message(self, queue, message) -> dict: - return queue.send_message(**message) - - def build_sqs_message(self, record, message_body_key=None): - data = None - if message_body_key: - data = record.data.get(message_body_key) - if data is None: - raise Exception("Message had no attribute of the configured Message Body Key: " + message_body_key) - else: - data = json.dumps(record.data) - - message = {"MessageBody": data} - - return message - - def add_attributes_to_message(self, record, message): - attributes = {"airbyte_emitted_at": {"StringValue": str(record.emitted_at), "DataType": "String"}} - message["MessageAttributes"] = attributes - return message - - def set_message_delay(self, message, message_delay): - message["DelaySeconds"] = message_delay - return message - - # MessageGroupID and MessageDeduplicationID are required properties for FIFO queues - # https://docs.aws.amazon.com/AWSSimpleQueueService/latest/APIReference/API_SendMessage.html - def set_message_fifo_properties(self, message, message_group_id, use_content_dedupe=False): - # https://docs.aws.amazon.com/AWSSimpleQueueService/latest/SQSDeveloperGuide/using-messagegroupid-property.html - if not message_group_id: - raise Exception("Failed to build message - Message Group ID is required for FIFO queues") - else: - message["MessageGroupId"] = message_group_id - # https://docs.aws.amazon.com/AWSSimpleQueueService/latest/SQSDeveloperGuide/using-messagededuplicationid-property.html - if not use_content_dedupe: - message["MessageDeduplicationId"] = str(uuid4()) - # TODO: Support getting MessageDeduplicationId from a key in the record - # if message_dedupe_id: - # message['MessageDeduplicationId'] = message_dedupe_id - return message - - # TODO: Support batch send - # def send_batch_messages(messages, queue): - # entry = { - # 'Id': "1", - # 'MessageBody': str(record.data), - # } - # response = queue.send_messages(Entries=messages) - # if 'Successful' in response: - # for status in response['Successful']: - # print("Message sent: " + status['MessageId']) - # if 'Failed' in response: - # for status in response['Failed']: - # print("Message sent: " + status['MessageId']) - - # https://docs.aws.amazon.com/AWSSimpleQueueService/latest/APIReference/API_SendMessage.html - def write( - self, config: Mapping[str, Any], configured_catalog: ConfiguredAirbyteCatalog, input_messages: Iterable[AirbyteMessage] - ) -> Iterable[AirbyteMessage]: - - # Required propeties - queue_url = config["queue_url"] - queue_region = config["region"] - - # TODO: Implement optional params for batch - # Optional Properties - # max_batch_size = config.get("max_batch_size", 10) - # send_as_batch = config.get("send_as_batch", False) - message_delay = config.get("message_delay") - message_body_key = config.get("message_body_key") - - # FIFO Properties - message_group_id = config.get("message_group_id") - - # Senstive Properties - access_key = config["access_key"] - secret_key = config["secret_key"] - - session = boto3.Session(aws_access_key_id=access_key, aws_secret_access_key=secret_key, region_name=queue_region) - sqs = session.resource("sqs") - queue = sqs.Queue(url=queue_url) - - # TODO: Make access/secret key optional, support public access & profiles - # TODO: Support adding/setting attributes in the UI - # TODO: Support extract a specific path as message attributes - - for message in input_messages: - if message.type == Type.RECORD: - sqs_message = self.build_sqs_message(message.record, message_body_key) - - if message_delay: - sqs_message = self.set_message_delay(sqs_message, message_delay) - - sqs_message = self.add_attributes_to_message(message.record, sqs_message) - - if self.queue_is_fifo(queue_url): - use_content_dedupe = False if queue.attributes.get("ContentBasedDeduplication") == "false" else "true" - self.set_message_fifo_properties(sqs_message, message_group_id, use_content_dedupe) - - self.send_single_message(queue, sqs_message) - if message.type == Type.STATE: - yield message - - def check(self, logger: AirbyteLogger, config: Mapping[str, Any]) -> AirbyteConnectionStatus: - try: - # Required propeties - queue_url = config["queue_url"] - logger.debug("Amazon SQS Destination Config Check - queue_url: " + queue_url) - queue_region = config["region"] - logger.debug("Amazon SQS Destination Config Check - region: " + queue_region) - - # Senstive Properties - access_key = config["access_key"] - logger.debug("Amazon SQS Destination Config Check - access_key (ends with): " + access_key[-1]) - secret_key = config["secret_key"] - logger.debug("Amazon SQS Destination Config Check - secret_key (ends with): " + secret_key[-1]) - - logger.debug("Amazon SQS Destination Config Check - Starting connection test ---") - session = boto3.Session(aws_access_key_id=access_key, aws_secret_access_key=secret_key, region_name=queue_region) - sqs = session.resource("sqs") - queue = sqs.Queue(url=queue_url) - if hasattr(queue, "attributes"): - logger.debug("Amazon SQS Destination Config Check - Connection test successful ---") - - if self.queue_is_fifo(queue_url): - fifo = queue.attributes.get("FifoQueue", False) - if not fifo: - raise Exception("FIFO Queue URL set but Queue is not FIFO") - - message_group_id = config.get("message_group_id") - if message_group_id is None: - raise Exception("Message Group ID is not set, but is required for FIFO Queues.") - - # TODO: Support referencing an ID inside the Record to use as de-dupe ID - # message_dedupe_key = config.get("message_dedupe_key") - # content_dedupe = queue.attributes.get('ContentBasedDeduplication') - # if content_dedupe == "false": - # if message_dedupe_id is None: - # raise Exception("You must provide a Message Deduplication ID when ContentBasedDeduplication is not used.") - - return AirbyteConnectionStatus(status=Status.SUCCEEDED) - else: - return AirbyteConnectionStatus( - status=Status.FAILED, message="Amazon SQS Destination Config Check - Could not connect to queue" - ) - except ClientError as e: - return AirbyteConnectionStatus( - status=Status.FAILED, message=f"Amazon SQS Destination Config Check - Error in AWS Client: {str(e)}" - ) - except Exception as e: - return AirbyteConnectionStatus( - status=Status.FAILED, message=f"Amazon SQS Destination Config Check - An exception occurred: {str(e)}" - ) diff --git a/airbyte-integrations/connectors/destination-amazon-sqs/destination_amazon_sqs/spec.json b/airbyte-integrations/connectors/destination-amazon-sqs/destination_amazon_sqs/spec.json deleted file mode 100644 index f94d7d023e81..000000000000 --- a/airbyte-integrations/connectors/destination-amazon-sqs/destination_amazon_sqs/spec.json +++ /dev/null @@ -1,101 +0,0 @@ -{ - "documentationUrl": "https://docs.airbyte.com/integrations/destinations/amazon-sqs", - "supported_destination_sync_modes": ["append"], - "supportsIncremental": true, - "connectionSpecification": { - "$schema": "http://json-schema.org/draft-07/schema#", - "title": "Destination Amazon Sqs", - "type": "object", - "required": ["queue_url", "region"], - "additionalProperties": false, - "properties": { - "queue_url": { - "title": "Queue URL", - "description": "URL of the SQS Queue", - "type": "string", - "examples": [ - "https://sqs.eu-west-1.amazonaws.com/1234567890/my-example-queue" - ], - "order": 0 - }, - "region": { - "title": "AWS Region", - "description": "AWS Region of the SQS Queue", - "type": "string", - "enum": [ - "af-south-1", - "ap-east-1", - "ap-northeast-1", - "ap-northeast-2", - "ap-northeast-3", - "ap-south-1", - "ap-south-2", - "ap-southeast-1", - "ap-southeast-2", - "ap-southeast-3", - "ap-southeast-4", - "ca-central-1", - "ca-west-1", - "cn-north-1", - "cn-northwest-1", - "eu-central-1", - "eu-central-2", - "eu-north-1", - "eu-south-1", - "eu-south-2", - "eu-west-1", - "eu-west-2", - "eu-west-3", - "il-central-1", - "me-central-1", - "me-south-1", - "sa-east-1", - "us-east-1", - "us-east-2", - "us-gov-east-1", - "us-gov-west-1", - "us-west-1", - "us-west-2" - ], - "order": 1 - }, - "message_delay": { - "title": "Message Delay", - "description": "Modify the Message Delay of the individual message from the Queue's default (seconds).", - "type": "integer", - "examples": ["15"], - "order": 2 - }, - "access_key": { - "title": "AWS IAM Access Key ID", - "description": "The Access Key ID of the AWS IAM Role to use for sending messages", - "type": "string", - "examples": ["xxxxxHRNxxx3TBxxxxxx"], - "order": 3, - "airbyte_secret": true - }, - "secret_key": { - "title": "AWS IAM Secret Key", - "description": "The Secret Key of the AWS IAM Role to use for sending messages", - "type": "string", - "examples": ["hu+qE5exxxxT6o/ZrKsxxxxxxBhxxXLexxxxxVKz"], - "order": 4, - "airbyte_secret": true - }, - "message_body_key": { - "title": "Message Body Key", - "description": "Use this property to extract the contents of the named key in the input record to use as the SQS message body. If not set, the entire content of the input record data is used as the message body.", - "type": "string", - "examples": ["myDataPath"], - "order": 5 - }, - "message_group_id": { - "title": "Message Group Id", - "description": "The tag that specifies that a message belongs to a specific message group. This parameter applies only to, and is REQUIRED by, FIFO queues.", - "type": "string", - "examples": ["my-fifo-group"], - "order": 6 - } - } - } -} diff --git a/airbyte-integrations/connectors/destination-amazon-sqs/integration_tests/input_records_json b/airbyte-integrations/connectors/destination-amazon-sqs/integration_tests/input_records_json deleted file mode 100644 index b46977c1c13d..000000000000 --- a/airbyte-integrations/connectors/destination-amazon-sqs/integration_tests/input_records_json +++ /dev/null @@ -1 +0,0 @@ -{"type": "RECORD", "record": {"stream": "ab-airbyte-testing", "data": {"id": "ba0f237b-abf5-41ae-9d94-1dbd346f38dd", "body": "test 1", "attributes": null}, "emitted_at": 1633881878000}} \ No newline at end of file diff --git a/airbyte-integrations/connectors/destination-amazon-sqs/integration_tests/integration_test.py b/airbyte-integrations/connectors/destination-amazon-sqs/integration_tests/integration_test.py deleted file mode 100644 index 5d1e7112b133..000000000000 --- a/airbyte-integrations/connectors/destination-amazon-sqs/integration_tests/integration_test.py +++ /dev/null @@ -1,46 +0,0 @@ -# -# Copyright (c) 2023 Airbyte, Inc., all rights reserved. -# - -import json -from typing import Any, Mapping - -import pytest -from airbyte_cdk import AirbyteLogger -from airbyte_cdk.models import AirbyteStream, ConfiguredAirbyteCatalog, ConfiguredAirbyteStream, DestinationSyncMode, Status, SyncMode -from destination_amazon_sqs import DestinationAmazonSqs - - -@pytest.fixture(name="config") -def config_fixture() -> Mapping[str, Any]: - with open("secrets/config.json", "r") as f: - return json.loads(f.read()) - - -@pytest.fixture(name="configured_catalog") -def configured_catalog_fixture() -> ConfiguredAirbyteCatalog: - stream_schema = {"type": "object", "properties": {"string_col": {"type": "str"}, "int_col": {"type": "integer"}}} - - append_stream = ConfiguredAirbyteStream( - stream=AirbyteStream(name="append_stream", json_schema=stream_schema, supported_sync_modes=[SyncMode.incremental]), - sync_mode=SyncMode.incremental, - destination_sync_mode=DestinationSyncMode.append, - ) - - overwrite_stream = ConfiguredAirbyteStream( - stream=AirbyteStream(name="overwrite_stream", json_schema=stream_schema, supported_sync_modes=[SyncMode.incremental]), - sync_mode=SyncMode.incremental, - destination_sync_mode=DestinationSyncMode.overwrite, - ) - - return ConfiguredAirbyteCatalog(streams=[append_stream, overwrite_stream]) - - -def test_check_valid_config(config: Mapping): - outcome = DestinationAmazonSqs().check(AirbyteLogger(), config) - assert outcome.status == Status.SUCCEEDED - - -def test_check_invalid_config(): - outcome = DestinationAmazonSqs().check(AirbyteLogger(), {"secret_key": "not_a_real_secret"}) - assert outcome.status == Status.FAILED diff --git a/airbyte-integrations/connectors/destination-amazon-sqs/main.py b/airbyte-integrations/connectors/destination-amazon-sqs/main.py deleted file mode 100644 index bc6076972a29..000000000000 --- a/airbyte-integrations/connectors/destination-amazon-sqs/main.py +++ /dev/null @@ -1,11 +0,0 @@ -# -# Copyright (c) 2023 Airbyte, Inc., all rights reserved. -# - - -import sys - -from destination_amazon_sqs import DestinationAmazonSqs - -if __name__ == "__main__": - DestinationAmazonSqs().run(sys.argv[1:]) diff --git a/airbyte-integrations/connectors/destination-amazon-sqs/requirements.txt b/airbyte-integrations/connectors/destination-amazon-sqs/requirements.txt deleted file mode 100644 index d6e1198b1ab1..000000000000 --- a/airbyte-integrations/connectors/destination-amazon-sqs/requirements.txt +++ /dev/null @@ -1 +0,0 @@ --e . diff --git a/airbyte-integrations/connectors/destination-amazon-sqs/sample_files/configured_catalog.json b/airbyte-integrations/connectors/destination-amazon-sqs/sample_files/configured_catalog.json deleted file mode 100644 index ee132a2e53a7..000000000000 --- a/airbyte-integrations/connectors/destination-amazon-sqs/sample_files/configured_catalog.json +++ /dev/null @@ -1,27 +0,0 @@ -{ - "streams": [ - { - "sync_mode": "full_refresh", - "destination_sync_mode": "append", - "stream": { - "name": "ab-airbyte-testing", - "supported_sync_modes": ["full_refresh"], - "source_defined_cursor": false, - "json_schema": { - "type": "object", - "properties": { - "id": { - "type": "string" - }, - "body": { - "type": "string" - }, - "attributes": { - "type": ["null", "object"] - } - } - } - } - } - ] -} diff --git a/airbyte-integrations/connectors/destination-amazon-sqs/setup.py b/airbyte-integrations/connectors/destination-amazon-sqs/setup.py deleted file mode 100644 index f1df0009ea38..000000000000 --- a/airbyte-integrations/connectors/destination-amazon-sqs/setup.py +++ /dev/null @@ -1,23 +0,0 @@ -# -# Copyright (c) 2023 Airbyte, Inc., all rights reserved. -# - - -from setuptools import find_packages, setup - -MAIN_REQUIREMENTS = ["airbyte-cdk", "boto3"] - -TEST_REQUIREMENTS = ["pytest~=6.1", "moto"] - -setup( - name="destination_amazon_sqs", - description="Destination implementation for Amazon Sqs.", - author="Airbyte", - author_email="contact@airbyte.io", - packages=find_packages(), - install_requires=MAIN_REQUIREMENTS, - package_data={"": ["*.json"]}, - extras_require={ - "tests": TEST_REQUIREMENTS, - }, -) diff --git a/airbyte-integrations/connectors/destination-amazon-sqs/unit_tests/unit_test.py b/airbyte-integrations/connectors/destination-amazon-sqs/unit_tests/unit_test.py deleted file mode 100644 index 719671fa281b..000000000000 --- a/airbyte-integrations/connectors/destination-amazon-sqs/unit_tests/unit_test.py +++ /dev/null @@ -1,226 +0,0 @@ -# -# Copyright (c) 2023 Airbyte, Inc., all rights reserved. -# - -import json -import time -from typing import Any, Mapping - -import boto3 -from airbyte_cdk.logger import AirbyteLogger -from airbyte_cdk.models import AirbyteMessage, ConfiguredAirbyteCatalog, Status -from destination_amazon_sqs import DestinationAmazonSqs - -# from airbyte_cdk.sources.source import Source -from moto import mock_iam, mock_sqs -from moto.core import set_initial_no_auth_action_count - - -@mock_iam -def create_user_with_all_permissions(): - client = boto3.client("iam", region_name="eu-west-1") - client.create_user(UserName="test_user1") - - policy_document = { - "Version": "2012-10-17", - "Statement": [{"Effect": "Allow", "Action": ["sqs:*"], "Resource": "*"}], - } - - client.put_user_policy( - UserName="test_user1", - PolicyName="policy1", - PolicyDocument=json.dumps(policy_document), - ) - - return client.create_access_key(UserName="test_user1")["AccessKey"] - - -def create_config(queue_url, queue_region, access_key, secret_key, message_delay): - return { - "queue_url": queue_url, - "region": queue_region, - "access_key": access_key, - "secret_key": secret_key, - "message_delay": message_delay, - } - - -def create_fifo_config(queue_url, queue_region, access_key, secret_key, message_group_id, message_delay): - return { - "queue_url": queue_url, - "region": queue_region, - "access_key": access_key, - "secret_key": secret_key, - "message_group_id": message_group_id, - "message_delay": message_delay, - } - - -def create_config_with_body_key(queue_url, queue_region, access_key, secret_key, message_body_key, message_delay): - return { - "queue_url": queue_url, - "region": queue_region, - "access_key": access_key, - "secret_key": secret_key, - "message_body_key": message_body_key, - "message_delay": message_delay, - } - - -def get_catalog() -> Mapping[str, Any]: - with open("sample_files/configured_catalog.json", "r") as f: - return json.load(f) - - -@set_initial_no_auth_action_count(3) -@mock_sqs -@mock_iam -def test_check(): - # Create User - user = create_user_with_all_permissions() - # Create Queue - queue_name = "amazon-sqs-mock-queue" - queue_region = "eu-west-1" - client = boto3.client( - "sqs", aws_access_key_id=user["AccessKeyId"], aws_secret_access_key=user["SecretAccessKey"], region_name=queue_region - ) - queue_url = client.create_queue(QueueName=queue_name)["QueueUrl"] - # Create config - config = create_config(queue_url, queue_region, user["AccessKeyId"], user["SecretAccessKey"], 10) - # Create AirbyteLogger - logger = AirbyteLogger() - # Create Destination - destination = DestinationAmazonSqs() - # Run check - status = destination.check(logger, config) - assert status.status == Status.SUCCEEDED - - # Create FIFO queue - fifo_queue_name = "amazon-sqs-mock-queue.fifo" - fif_queue_url = client.create_queue(QueueName=fifo_queue_name, Attributes={"FifoQueue": "true"})["QueueUrl"] - # Create config for FIFO - fifo_config = create_fifo_config(fif_queue_url, queue_region, user["AccessKeyId"], user["SecretAccessKey"], "fifo-group", 10) - # Run check - status = destination.check(logger, fifo_config) - assert status.status == Status.SUCCEEDED - - -@set_initial_no_auth_action_count(4) -@mock_sqs -@mock_iam -def test_write(): - # Create User - user = create_user_with_all_permissions() - - test_message = { - "type": "RECORD", - "record": { - "stream": "ab-airbyte-testing", - "data": {"id": "ba0f237b-abf5-41ae-9d94-1dbd346f38dd", "body": "test 1", "attributes": None}, - "emitted_at": 1633881878000, - }, - } - ab_message = AirbyteMessage(**test_message) - - # Common params - message_delay = 1 - queue_region = "eu-west-1" - - # Standard Queue Test - print("## Starting standard queue test ##") - # Create Queue - queue_name = "amazon-sqs-mock-queue" - client = boto3.client( - "sqs", aws_access_key_id=user["AccessKeyId"], aws_secret_access_key=user["SecretAccessKey"], region_name=queue_region - ) - queue_url = client.create_queue(QueueName=queue_name)["QueueUrl"] - # Create config - config = create_config(queue_url, queue_region, user["AccessKeyId"], user["SecretAccessKey"], message_delay) - # Create ConfiguredAirbyteCatalog - catalog = ConfiguredAirbyteCatalog(streams=get_catalog()["streams"]) - # Create Destination - destination = DestinationAmazonSqs() - # Send messages using write() - for message in destination.write(config, catalog, [ab_message]): - print(f"Message Sent with delay of {message_delay} seconds") - # Listen for messages for max 20 seconds - timeout = time.time() + 20 - print("Listening for messages.") - while True: - message_received = client.receive_message(QueueUrl=queue_url) - if message_received.get("Messages"): - print("Message received.") - message_body = json.loads(message_received["Messages"][0]["Body"]) - # Compare the body of the received message, with the body of the message we sent - if message_body == test_message["record"]["data"]: - print("Received message matches for standard queue write.") - assert True - break - else: - continue - if time.time() > timeout: - print("Timed out waiting for message after 20 seconds.") - assert False - - # Standard Queue with a Message Key Test - print("## Starting body key queue test ##") - # Create Queue - key_queue_name = "amazon-sqs-mock-queue-key" - key_queue_url = client.create_queue(QueueName=key_queue_name)["QueueUrl"] - # Create config - message_body_key = "body" - key_config = create_config_with_body_key( - key_queue_url, queue_region, user["AccessKeyId"], user["SecretAccessKey"], message_body_key, message_delay - ) - # Send messages using write() - for message in destination.write(key_config, catalog, [ab_message]): - print(f"Message Sent with delay of {message_delay} seconds") - # Listen for messages for max 20 seconds - timeout = time.time() + 20 - print("Listening for messages.") - while True: - message_received = client.receive_message(QueueUrl=key_queue_url) - if message_received.get("Messages"): - print("Message received.") - message_body = message_received["Messages"][0]["Body"] - # Compare the body of the received message, with the body of the message we sent - if message_body == test_message["record"]["data"][message_body_key]: - print("Received message matches for body key queue write.") - assert True - break - else: - continue - if time.time() > timeout: - print("Timed out waiting for message after 20 seconds.") - assert False - - # FIFO Queue Test - print("## Starting FIFO queue test ##") - # Create Queue - fifo_queue_name = "amazon-sqs-mock-queue.fifo" - fifo_queue_url = client.create_queue(QueueName=fifo_queue_name, Attributes={"FifoQueue": "true"})["QueueUrl"] - # Create config - fifo_config = create_fifo_config( - fifo_queue_url, queue_region, user["AccessKeyId"], user["SecretAccessKey"], "fifo-group", message_delay - ) - # Send messages using write() - for message in destination.write(fifo_config, catalog, [ab_message]): - print(f"Message Sent with delay of {message_delay} seconds") - # Listen for messages for max 20 seconds - timeout = time.time() + 20 - print("Listening for messages.") - while True: - message_received = client.receive_message(QueueUrl=fifo_queue_url) - if message_received.get("Messages"): - print("Message received.") - message_body = json.loads(message_received["Messages"][0]["Body"]) - # Compare the body of the received message, with the body of the message we sent - if message_body == test_message["record"]["data"]: - print("Received message matches for FIFO queue write.") - assert True - break - else: - continue - if time.time() > timeout: - print("Timed out waiting for message after 20 seconds.") - assert False diff --git a/airbyte-integrations/connectors/destination-bigquery-denormalized/.dockerignore b/airbyte-integrations/connectors/destination-bigquery-denormalized/.dockerignore deleted file mode 100644 index e4fbece78752..000000000000 --- a/airbyte-integrations/connectors/destination-bigquery-denormalized/.dockerignore +++ /dev/null @@ -1,3 +0,0 @@ -* -!Dockerfile -!build/distributions diff --git a/airbyte-integrations/connectors/destination-cassandra/README.md b/airbyte-integrations/connectors/destination-cassandra/README.md deleted file mode 100644 index 21c6cde72284..000000000000 --- a/airbyte-integrations/connectors/destination-cassandra/README.md +++ /dev/null @@ -1,72 +0,0 @@ -# Destination Cassandra - -This is the repository for the Cassandra destination connector in Java. -For information about how to use this connector within Airbyte, see [the User Documentation](https://docs.airbyte.io/integrations/destinations/cassandra). - -## Local development - -#### Building via Gradle -From the Airbyte repository root, run: -``` -./gradlew :airbyte-integrations:connectors:destination-cassandra:build -``` - -#### Create credentials -**If you are a community contributor**, generate the necessary credentials and place them in `secrets/config.json` conforming to the spec file in `src/main/resources/spec.json`. -Note that the `secrets` directory is git-ignored by default, so there is no danger of accidentally checking in sensitive information. - -**If you are an Airbyte core member**, follow the [instructions](https://docs.airbyte.io/connector-development#using-credentials-in-ci) to set up the credentials. - -### Locally running the connector docker image - -#### Build -Build the connector image via Gradle: - -``` -./gradlew :airbyte-integrations:connectors:destination-cassandra:buildConnectorImage -``` -Once built, the docker image name and tag on your host will be `airbyte/destination-cassandra:dev`. -the Dockerfile. - -#### Run -Then run any of the connector commands as follows: -``` -docker run --rm airbyte/destination-cassandra:dev spec -docker run --rm -v $(pwd)/secrets:/secrets airbyte/destination-cassandra:dev check --config /secrets/config.json -docker run --rm -v $(pwd)/secrets:/secrets airbyte/destination-cassandra:dev discover --config /secrets/config.json -docker run --rm -v $(pwd)/secrets:/secrets -v $(pwd)/integration_tests:/integration_tests airbyte/destination-cassandra:dev read --config /secrets/config.json --catalog /integration_tests/configured_catalog.json -``` - -## Testing -We use `JUnit` for Java tests. - -### Unit and Integration Tests -Place unit tests under `src/test/io/airbyte/integrations/destinations/cassandra`. - -#### Acceptance Tests -Airbyte has a standard test suite that all destination connectors must pass. Implement the `TODO`s in -`src/test-integration/java/io/airbyte/integrations/destinations/cassandraDestinationAcceptanceTest.java`. - -### Using gradle to run tests -All commands should be run from airbyte project root. -To run unit tests: -``` -./gradlew :airbyte-integrations:connectors:destination-cassandra:unitTest -``` -To run acceptance and custom integration tests: -``` -./gradlew :airbyte-integrations:connectors:destination-cassandra:integrationTest -``` - -## Dependency Management - -### Publishing a new version of the connector -You've checked out the repo, implemented a million dollar feature, and you're ready to share your changes with the world. Now what? -1. Make sure your changes are passing our test suite: `airbyte-ci connectors --name=destination-cassandra test` -2. Bump the connector version in `metadata.yaml`: increment the `dockerImageTag` value. Please follow [semantic versioning for connectors](https://docs.airbyte.com/contributing-to-airbyte/resources/pull-requests-handbook/#semantic-versioning-for-connectors). -3. Make sure the `metadata.yaml` content is up to date. -4. Make the connector documentation and its changelog is up to date (`docs/integrations/destinations/cassandra.md`). -5. Create a Pull Request: use [our PR naming conventions](https://docs.airbyte.com/contributing-to-airbyte/resources/pull-requests-handbook/#pull-request-title-convention). -6. Pat yourself on the back for being an awesome contributor. -7. Someone from Airbyte will take a look at your PR and iterate with you to merge it into master. - diff --git a/airbyte-integrations/connectors/destination-cassandra/bootstrap.md b/airbyte-integrations/connectors/destination-cassandra/bootstrap.md deleted file mode 100644 index 35c19425c395..000000000000 --- a/airbyte-integrations/connectors/destination-cassandra/bootstrap.md +++ /dev/null @@ -1,30 +0,0 @@ -# Cassandra Destination - -Cassandra is a free and open-source, distributed, wide-column store, NoSQL database management system designed to handle -large amounts of data across many commodity servers, providing high availability with no single point of failure - -The data is structured in keyspaces and tables and is partitioned and replicated across different nodes in the -cluster. -[Read more about Cassandra](https://cassandra.apache.org/_/index.html) - -This connector maps an incoming `stream` to a Cassandra `table` and a `namespace` to a Cassandra`keyspace`. -When using destination sync mode `append` and `append_dedup`, an `insert` operation is performed against an existing -Cassandra table. -When using `overwrite`, the records are first placed in a temp table. When all the messages have been received the data -is copied to the final table which is first truncated and the temp table is deleted. - -The Implementation uses the [Datastax](https://github.com/datastax/java-driver) driver in order to access -Cassandra. [CassandraCqlProvider](./src/main/java/io/airbyte/integrations/destination/cassandra/CassandraCqlProvider.java) -handles the communication with the Cassandra cluster and internally it uses -the [SessionManager](./src/main/java/io/airbyte/integrations/destination/cassandra/SessionManager.java) to retrieve a -CqlSession to the cluster. - -The [CassandraMessageConsumer](./src/main/java/io/airbyte/integrations/destination/cassandra/CassandraMessageConsumer.java) -class contains the logic for handling airbyte messages, events and copying data between tables. - -## Development - -See the [CassandraCqlProvider](./src/main/java/io/airbyte/integrations/destination/cassandra/CassandraCqlProvider.java) -class on how to use the datastax driver. - -[Datastax docs.](https://docs.datastax.com/en/developer/java-driver/3.0/) \ No newline at end of file diff --git a/airbyte-integrations/connectors/destination-cassandra/build.gradle b/airbyte-integrations/connectors/destination-cassandra/build.gradle deleted file mode 100644 index b9774a9b9c7f..000000000000 --- a/airbyte-integrations/connectors/destination-cassandra/build.gradle +++ /dev/null @@ -1,39 +0,0 @@ -plugins { - id 'application' - id 'airbyte-java-connector' -} - -airbyteJavaConnector { - cdkVersionRequired = '0.2.0' - features = ['db-destinations'] - useLocalCdk = false -} - -//remove once upgrading the CDK version to 0.4.x or later -java { - compileJava { - options.compilerArgs.remove("-Werror") - } -} - -airbyteJavaConnector.addCdkDependencies() - -application { - mainClass = 'io.airbyte.integrations.destination.cassandra.CassandraDestination' - applicationDefaultJvmArgs = ['-XX:+ExitOnOutOfMemoryError', '-XX:MaxRAMPercentage=75.0'] -} - -def cassandraDriver = '4.13.0' -def assertVersion = '3.21.0' - -dependencies { - - implementation "com.datastax.oss:java-driver-core:${cassandraDriver}" - implementation "com.datastax.oss:java-driver-query-builder:${cassandraDriver}" - implementation "com.datastax.oss:java-driver-mapper-runtime:${cassandraDriver}" - - - // https://mvnrepository.com/artifact/org.assertj/assertj-core - testImplementation "org.assertj:assertj-core:${assertVersion}" - testImplementation libs.testcontainers.cassandra -} diff --git a/airbyte-integrations/connectors/destination-cassandra/docker-compose.yml b/airbyte-integrations/connectors/destination-cassandra/docker-compose.yml deleted file mode 100644 index a4786dda1b66..000000000000 --- a/airbyte-integrations/connectors/destination-cassandra/docker-compose.yml +++ /dev/null @@ -1,23 +0,0 @@ -version: "3.7" - -services: - cassandra1: - image: cassandra:4.0 - ports: - - "9042:9042" - environment: - - "MAX_HEAP_SIZE=2048M" - - "HEAP_NEWSIZE=1024M" - - "CASSANDRA_CLUSTER_NAME=cassandra_cluster" -# Uncomment if you want to run a Cassandra cluster -# cassandra2: -# image: cassandra:4.0 -# ports: -# - "9043:9042" -# environment: -# - "MAX_HEAP_SIZE=2048M" -# - "HEAP_NEWSIZE=1024M" -# - "CASSANDRA_SEEDS=cassandra1" -# - "CASSANDRA_CLUSTER_NAME=cassandra_cluster" -# depends_on: -# - cassandra1 diff --git a/airbyte-integrations/connectors/destination-cassandra/sample_secrets/config.json b/airbyte-integrations/connectors/destination-cassandra/sample_secrets/config.json deleted file mode 100644 index 644fd54c1ab9..000000000000 --- a/airbyte-integrations/connectors/destination-cassandra/sample_secrets/config.json +++ /dev/null @@ -1,4 +0,0 @@ -{ - "username": "paste-username-here", - "password": "paste-password-here" -} diff --git a/airbyte-integrations/connectors/destination-cassandra/src/main/java/io/airbyte/integrations/destination/cassandra/CassandraConfig.java b/airbyte-integrations/connectors/destination-cassandra/src/main/java/io/airbyte/integrations/destination/cassandra/CassandraConfig.java deleted file mode 100644 index 5ea984f3e154..000000000000 --- a/airbyte-integrations/connectors/destination-cassandra/src/main/java/io/airbyte/integrations/destination/cassandra/CassandraConfig.java +++ /dev/null @@ -1,114 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.integrations.destination.cassandra; - -import com.fasterxml.jackson.databind.JsonNode; -import java.util.Objects; - -/* - * Immutable configuration class for storing cassandra related config. - */ -class CassandraConfig { - - private final String keyspace; - - private final String username; - - private final String password; - - private final String address; - - private final int port; - - private final String datacenter; - - private final int replication; - - public CassandraConfig(String keyspace, - String username, - String password, - String address, - int port, - String datacenter, - int replication) { - this.keyspace = keyspace; - this.username = username; - this.password = password; - this.address = address; - this.port = port; - this.datacenter = datacenter; - this.replication = replication; - } - - public CassandraConfig(JsonNode config) { - this.keyspace = config.get("keyspace").asText(); - this.username = config.get("username").asText(); - this.password = config.get("password").asText(); - this.address = config.get("address").asText(); - this.port = config.get("port").asInt(9042); - this.datacenter = config.get("datacenter").asText("datacenter1"); - this.replication = config.get("replication").asInt(1); - } - - public String getKeyspace() { - return keyspace; - } - - public String getUsername() { - return username; - } - - public String getPassword() { - return password; - } - - public String getAddress() { - return address; - } - - public int getPort() { - return port; - } - - public String getDatacenter() { - return datacenter; - } - - public int getReplication() { - return replication; - } - - @Override - public String toString() { - return "CassandraConfig{" + - "keyspace='" + keyspace + '\'' + - ", username='" + username + '\'' + - ", password='" + password + '\'' + - ", address='" + address + '\'' + - ", port=" + port + - ", datacenter='" + datacenter + '\'' + - ", replication=" + replication + - '}'; - } - - @Override - public boolean equals(Object o) { - if (this == o) { - return true; - } - if (o == null || getClass() != o.getClass()) { - return false; - } - CassandraConfig that = (CassandraConfig) o; - return port == that.port && username.equals(that.username) && password.equals(that.password) && - address.equals(that.address) && datacenter.equals(that.datacenter); - } - - @Override - public int hashCode() { - return Objects.hash(username, password, address, port, datacenter); - } - -} diff --git a/airbyte-integrations/connectors/destination-cassandra/src/main/java/io/airbyte/integrations/destination/cassandra/CassandraCqlProvider.java b/airbyte-integrations/connectors/destination-cassandra/src/main/java/io/airbyte/integrations/destination/cassandra/CassandraCqlProvider.java deleted file mode 100644 index 0e48b8d8aecc..000000000000 --- a/airbyte-integrations/connectors/destination-cassandra/src/main/java/io/airbyte/integrations/destination/cassandra/CassandraCqlProvider.java +++ /dev/null @@ -1,180 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.integrations.destination.cassandra; - -import static com.datastax.oss.driver.api.querybuilder.QueryBuilder.now; - -import com.datastax.oss.driver.api.core.CqlSession; -import com.datastax.oss.driver.api.core.cql.BoundStatement; -import com.datastax.oss.driver.api.core.cql.PreparedStatement; -import com.datastax.oss.driver.api.core.metadata.TokenMap; -import com.datastax.oss.driver.api.core.type.DataTypes; -import com.datastax.oss.driver.api.core.uuid.Uuids; -import com.datastax.oss.driver.api.querybuilder.QueryBuilder; -import com.datastax.oss.driver.api.querybuilder.SchemaBuilder; -import io.airbyte.cdk.integrations.base.JavaBaseConstants; -import java.io.Closeable; -import java.time.Instant; -import java.util.List; -import java.util.UUID; -import java.util.concurrent.ExecutionException; -import java.util.concurrent.ExecutorService; -import java.util.concurrent.Executors; -import java.util.concurrent.Future; -import java.util.stream.Collectors; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -class CassandraCqlProvider implements Closeable { - - private static final Logger LOGGER = LoggerFactory.getLogger(CassandraCqlProvider.class); - - private static final int N_THREADS = Runtime.getRuntime().availableProcessors(); - - private final ExecutorService executorService; - - private final CqlSession cqlSession; - - private final CassandraConfig cassandraConfig; - - private final String columnId; - - private final String columnData; - - private final String columnTimestamp; - - public CassandraCqlProvider(CassandraConfig cassandraConfig) { - this.cassandraConfig = cassandraConfig; - this.cqlSession = SessionManager.initSession(cassandraConfig); - var nameTransformer = new CassandraNameTransformer(cassandraConfig); - this.columnId = nameTransformer.outputColumn(JavaBaseConstants.COLUMN_NAME_AB_ID); - this.columnData = nameTransformer.outputColumn(JavaBaseConstants.COLUMN_NAME_DATA); - this.columnTimestamp = nameTransformer.outputColumn(JavaBaseConstants.COLUMN_NAME_EMITTED_AT); - this.executorService = Executors.newFixedThreadPool(N_THREADS); - } - - public void createKeySpaceIfNotExists(String keyspace, int replicationFactor) { - var query = SchemaBuilder.createKeyspace(keyspace) - .ifNotExists() - .withSimpleStrategy(replicationFactor) - .build(); - cqlSession.execute(query); - } - - public void createTableIfNotExists(String keyspace, String tableName) { - var query = SchemaBuilder.createTable(keyspace, tableName) - .ifNotExists() - .withPartitionKey(columnId, DataTypes.UUID) - .withColumn(columnData, DataTypes.TEXT) - .withColumn(columnTimestamp, DataTypes.TIMESTAMP) - .build(); - cqlSession.execute(query); - } - - public void dropTableIfExists(String keyspace, String tableName) { - var query = SchemaBuilder.dropTable(keyspace, tableName) - .ifExists() - .build(); - cqlSession.execute(query); - } - - public void insert(String keyspace, String tableName, String jsonData) { - var query = QueryBuilder.insertInto(keyspace, tableName) - .value(columnId, QueryBuilder.literal(Uuids.random())) - .value(columnData, QueryBuilder.literal(jsonData)) - .value(columnTimestamp, QueryBuilder.toTimestamp(now())) - .build(); - cqlSession.execute(query); - } - - public void truncate(String keyspace, String tableName) { - var query = QueryBuilder.truncate(keyspace, tableName).build(); - cqlSession.execute(query); - } - - public List select(String keyspace, String tableName) { - var query = QueryBuilder.selectFrom(keyspace, tableName) - .columns(columnId, columnData, columnTimestamp) - .build(); - return cqlSession.execute(query) - .map(result -> new CassandraRecord( - result.get(columnId, UUID.class), - result.get(columnData, String.class), - result.get(columnTimestamp, Instant.class))) - .all(); - } - - public List>> retrieveMetadata() { - return cqlSession.getMetadata().getKeyspaces().values().stream() - .map(keyspace -> Tuple.of(keyspace.getName().toString(), keyspace.getTables().values() - .stream() - .map(table -> table.getName().toString()) - .collect(Collectors.toList()))) - .collect(Collectors.toList()); - } - - public void copy(String keyspace, String sourceTable, String destinationTable) { - var select = String.format("SELECT * FROM %s.%s WHERE token(%s) > ? AND token(%s) <= ?", - keyspace, sourceTable, columnId, columnId); - - var selectStatement = cqlSession.prepare(select); - - var insert = String.format("INSERT INTO %s.%s (%s, %s, %s) VALUES (?, ?, ?)", - keyspace, destinationTable, columnId, columnData, columnTimestamp); - - var insertStatement = cqlSession.prepare(insert); - - // perform full table scan in parallel using token ranges - // optimal for copying large amounts of data - cqlSession.getMetadata().getTokenMap() - .map(TokenMap::getTokenRanges) - .orElseThrow(IllegalStateException::new) - .stream() - .flatMap(range -> range.unwrap().stream()) - .map(range -> selectStatement.bind(range.getStart(), range.getEnd())) - // explore datastax 4.x async api as an alternative for async processing - .map(selectBoundStatement -> executorService.submit(() -> asyncInsert(selectBoundStatement, insertStatement))) - .forEach(this::awaitThread); - - } - - private void asyncInsert(BoundStatement select, PreparedStatement insert) { - var boundStatements = cqlSession.execute(select).all().stream() - .map(r -> CassandraRecord.of( - r.get(columnId, UUID.class), - r.get(columnData, String.class), - r.get(columnTimestamp, Instant.class))) - .map(r -> insert.bind(r.getId(), r.getData(), r.getTimestamp())).toList(); - - boundStatements.forEach(boundStatement -> { - var resultSetCompletionStage = cqlSession.executeAsync(boundStatement); - resultSetCompletionStage.whenCompleteAsync((res, err) -> { - if (err != null) { - LOGGER.error("Something went wrong during async insertion: " + err.getMessage()); - } - }); - }); - } - - private void awaitThread(Future future) { - try { - future.get(); - } catch (InterruptedException e) { - Thread.currentThread().interrupt(); - LOGGER.error("Interrupted thread while copying data with reason: ", e); - } catch (ExecutionException e) { - LOGGER.error("Error while copying data with reason: ", e); - } - } - - @Override - public void close() { - // wait for tasks completion and terminate executor gracefully - executorService.shutdown(); - // close cassandra session for the given config - SessionManager.closeSession(cassandraConfig); - } - -} diff --git a/airbyte-integrations/connectors/destination-cassandra/src/main/java/io/airbyte/integrations/destination/cassandra/CassandraDestination.java b/airbyte-integrations/connectors/destination-cassandra/src/main/java/io/airbyte/integrations/destination/cassandra/CassandraDestination.java deleted file mode 100644 index e2727ba734c2..000000000000 --- a/airbyte-integrations/connectors/destination-cassandra/src/main/java/io/airbyte/integrations/destination/cassandra/CassandraDestination.java +++ /dev/null @@ -1,66 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.integrations.destination.cassandra; - -import com.fasterxml.jackson.databind.JsonNode; -import io.airbyte.cdk.integrations.BaseConnector; -import io.airbyte.cdk.integrations.base.AirbyteMessageConsumer; -import io.airbyte.cdk.integrations.base.Destination; -import io.airbyte.cdk.integrations.base.IntegrationRunner; -import io.airbyte.protocol.models.v0.AirbyteConnectionStatus; -import io.airbyte.protocol.models.v0.AirbyteMessage; -import io.airbyte.protocol.models.v0.ConfiguredAirbyteCatalog; -import java.util.UUID; -import java.util.function.Consumer; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -class CassandraDestination extends BaseConnector implements Destination { - - private static final Logger LOGGER = LoggerFactory.getLogger(CassandraDestination.class); - - public static void main(String[] args) throws Exception { - new IntegrationRunner(new CassandraDestination()).run(args); - } - - @Override - public AirbyteConnectionStatus check(final JsonNode config) { - var cassandraConfig = new CassandraConfig(config); - // add random uuid to avoid conflicts with existing tables. - String tableName = "table_" + UUID.randomUUID().toString().replace("-", ""); - CassandraCqlProvider cassandraCqlProvider = null; - try { - cassandraCqlProvider = new CassandraCqlProvider(cassandraConfig); - // check connection and write permissions - cassandraCqlProvider.createKeySpaceIfNotExists(cassandraConfig.getKeyspace(), - cassandraConfig.getReplication()); - cassandraCqlProvider.createTableIfNotExists(cassandraConfig.getKeyspace(), tableName); - cassandraCqlProvider.insert(cassandraConfig.getKeyspace(), tableName, "{}"); - return new AirbyteConnectionStatus().withStatus(AirbyteConnectionStatus.Status.SUCCEEDED); - } catch (Exception e) { - LOGGER.error("Can't establish Cassandra connection with reason: ", e); - return new AirbyteConnectionStatus().withStatus(AirbyteConnectionStatus.Status.FAILED); - } finally { - if (cassandraCqlProvider != null) { - try { - cassandraCqlProvider.dropTableIfExists(cassandraConfig.getKeyspace(), tableName); - } catch (Exception e) { - LOGGER.error("Error while deleting temp table {} with reason: ", tableName, e); - } - cassandraCqlProvider.close(); - } - } - } - - @Override - public AirbyteMessageConsumer getConsumer(final JsonNode config, - final ConfiguredAirbyteCatalog configuredCatalog, - final Consumer outputRecordCollector) { - final CassandraConfig cassandraConfig = new CassandraConfig(config); - final CassandraCqlProvider cassandraCqlProvider = new CassandraCqlProvider(cassandraConfig); - return new CassandraMessageConsumer(cassandraConfig, configuredCatalog, cassandraCqlProvider, outputRecordCollector); - } - -} diff --git a/airbyte-integrations/connectors/destination-cassandra/src/main/java/io/airbyte/integrations/destination/cassandra/CassandraMessageConsumer.java b/airbyte-integrations/connectors/destination-cassandra/src/main/java/io/airbyte/integrations/destination/cassandra/CassandraMessageConsumer.java deleted file mode 100644 index 803cde8ffe34..000000000000 --- a/airbyte-integrations/connectors/destination-cassandra/src/main/java/io/airbyte/integrations/destination/cassandra/CassandraMessageConsumer.java +++ /dev/null @@ -1,107 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.integrations.destination.cassandra; - -import io.airbyte.cdk.integrations.base.FailureTrackingAirbyteMessageConsumer; -import io.airbyte.commons.json.Jsons; -import io.airbyte.protocol.models.v0.AirbyteMessage; -import io.airbyte.protocol.models.v0.AirbyteStreamNameNamespacePair; -import io.airbyte.protocol.models.v0.ConfiguredAirbyteCatalog; -import java.util.Map; -import java.util.function.Consumer; -import java.util.stream.Collectors; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -class CassandraMessageConsumer extends FailureTrackingAirbyteMessageConsumer { - - private static final Logger LOGGER = LoggerFactory.getLogger(CassandraMessageConsumer.class); - - private final CassandraConfig cassandraConfig; - - private final Consumer outputRecordCollector; - - private final Map cassandraStreams; - - private final CassandraCqlProvider cassandraCqlProvider; - - public CassandraMessageConsumer(final CassandraConfig cassandraConfig, - final ConfiguredAirbyteCatalog configuredCatalog, - final CassandraCqlProvider provider, - final Consumer outputRecordCollector) { - this.cassandraConfig = cassandraConfig; - this.outputRecordCollector = outputRecordCollector; - this.cassandraCqlProvider = provider; - var nameTransformer = new CassandraNameTransformer(cassandraConfig); - this.cassandraStreams = configuredCatalog.getStreams().stream() - .collect(Collectors.toUnmodifiableMap( - AirbyteStreamNameNamespacePair::fromConfiguredAirbyteSteam, - k -> new CassandraStreamConfig( - nameTransformer.outputKeyspace(k.getStream().getNamespace()), - nameTransformer.outputTable(k.getStream().getName()), - nameTransformer.outputTmpTable(k.getStream().getName()), - k.getDestinationSyncMode()))); - } - - @Override - protected void startTracked() { - cassandraStreams.forEach((k, v) -> { - cassandraCqlProvider.createKeySpaceIfNotExists(v.getKeyspace(), cassandraConfig.getReplication()); - cassandraCqlProvider.createTableIfNotExists(v.getKeyspace(), v.getTempTableName()); - }); - } - - @Override - protected void acceptTracked(final AirbyteMessage message) { - if (message.getType() == AirbyteMessage.Type.RECORD) { - var messageRecord = message.getRecord(); - var streamConfig = - cassandraStreams.get(AirbyteStreamNameNamespacePair.fromRecordMessage(messageRecord)); - if (streamConfig == null) { - throw new IllegalArgumentException("Unrecognized destination stream"); - } - var data = Jsons.serialize(messageRecord.getData()); - cassandraCqlProvider.insert(streamConfig.getKeyspace(), streamConfig.getTempTableName(), data); - } else if (message.getType() == AirbyteMessage.Type.STATE) { - outputRecordCollector.accept(message); - } else { - LOGGER.warn("Unsupported airbyte message type: {}", message.getType()); - } - } - - @Override - protected void close(final boolean hasFailed) { - if (!hasFailed) { - cassandraStreams.forEach((k, v) -> { - try { - cassandraCqlProvider.createTableIfNotExists(v.getKeyspace(), v.getTableName()); - switch (v.getDestinationSyncMode()) { - case APPEND -> { - cassandraCqlProvider.copy(v.getKeyspace(), v.getTempTableName(), v.getTableName()); - } - case OVERWRITE -> { - cassandraCqlProvider.truncate(v.getKeyspace(), v.getTableName()); - cassandraCqlProvider.copy(v.getKeyspace(), v.getTempTableName(), v.getTableName()); - } - default -> throw new UnsupportedOperationException(); - } - } catch (final Exception e) { - LOGGER.error("Error while copying data to table {}: : ", v.getTableName(), e); - } - }); - } - - cassandraStreams.forEach((k, v) -> { - try { - cassandraCqlProvider.dropTableIfExists(v.getKeyspace(), v.getTempTableName()); - } catch (final Exception e) { - LOGGER.error("Error while deleting temp table {} with reason: ", v.getTempTableName(), e); - } - }); - cassandraCqlProvider.close(); - - } - -} diff --git a/airbyte-integrations/connectors/destination-cassandra/src/main/java/io/airbyte/integrations/destination/cassandra/CassandraNameTransformer.java b/airbyte-integrations/connectors/destination-cassandra/src/main/java/io/airbyte/integrations/destination/cassandra/CassandraNameTransformer.java deleted file mode 100644 index da7f60bfba62..000000000000 --- a/airbyte-integrations/connectors/destination-cassandra/src/main/java/io/airbyte/integrations/destination/cassandra/CassandraNameTransformer.java +++ /dev/null @@ -1,42 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.integrations.destination.cassandra; - -import com.google.common.base.CharMatcher; -import io.airbyte.cdk.integrations.destination.StandardNameTransformer; -import io.airbyte.commons.text.Names; - -class CassandraNameTransformer extends StandardNameTransformer { - - private final CassandraConfig cassandraConfig; - - public CassandraNameTransformer(CassandraConfig cassandraConfig) { - this.cassandraConfig = cassandraConfig; - } - - String outputKeyspace(String namespace) { - if (namespace == null || namespace.isBlank()) { - return cassandraConfig.getKeyspace(); - } - return CharMatcher.is('_').trimLeadingFrom(Names.toAlphanumericAndUnderscore(namespace)); - } - - String outputTable(String streamName) { - var tableName = super.getRawTableName(streamName.toLowerCase()).substring(1); - // max allowed length for a cassandra table is 48 characters - return tableName.length() > 48 ? tableName.substring(0, 48) : tableName; - } - - String outputTmpTable(String streamName) { - var tableName = super.getTmpTableName(streamName.toLowerCase()).substring(1); - // max allowed length for a cassandra table is 48 characters - return tableName.length() > 48 ? tableName.substring(0, 48) : tableName; - } - - String outputColumn(String columnName) { - return Names.doubleQuote(columnName.toLowerCase()); - } - -} diff --git a/airbyte-integrations/connectors/destination-cassandra/src/main/java/io/airbyte/integrations/destination/cassandra/CassandraRecord.java b/airbyte-integrations/connectors/destination-cassandra/src/main/java/io/airbyte/integrations/destination/cassandra/CassandraRecord.java deleted file mode 100644 index 63af6d92fb6d..000000000000 --- a/airbyte-integrations/connectors/destination-cassandra/src/main/java/io/airbyte/integrations/destination/cassandra/CassandraRecord.java +++ /dev/null @@ -1,49 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.integrations.destination.cassandra; - -import java.time.Instant; -import java.util.UUID; - -class CassandraRecord { - - private final UUID id; - - private final String data; - - private final Instant timestamp; - - public CassandraRecord(UUID id, String data, Instant timestamp) { - this.id = id; - this.data = data; - this.timestamp = timestamp; - } - - static CassandraRecord of(UUID id, String data, Instant timestamp) { - return new CassandraRecord(id, data, timestamp); - } - - public UUID getId() { - return id; - } - - public String getData() { - return data; - } - - public Instant getTimestamp() { - return timestamp; - } - - @Override - public String toString() { - return "CassandraRecord{" + - "id=" + id + - ", data='" + data + '\'' + - ", timestamp=" + timestamp + - '}'; - } - -} diff --git a/airbyte-integrations/connectors/destination-cassandra/src/main/java/io/airbyte/integrations/destination/cassandra/CassandraStreamConfig.java b/airbyte-integrations/connectors/destination-cassandra/src/main/java/io/airbyte/integrations/destination/cassandra/CassandraStreamConfig.java deleted file mode 100644 index dd7d85d87563..000000000000 --- a/airbyte-integrations/connectors/destination-cassandra/src/main/java/io/airbyte/integrations/destination/cassandra/CassandraStreamConfig.java +++ /dev/null @@ -1,58 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.integrations.destination.cassandra; - -import io.airbyte.protocol.models.v0.DestinationSyncMode; - -/* - * Immutable configuration class for storing destination stream config. - */ -class CassandraStreamConfig { - - private final String keyspace; - - private final String tableName; - - private final String tempTableName; - - private final DestinationSyncMode destinationSyncMode; - - public CassandraStreamConfig(String keyspace, - String tableName, - String tempTableName, - DestinationSyncMode destinationSyncMode) { - this.keyspace = keyspace; - this.tableName = tableName; - this.tempTableName = tempTableName; - this.destinationSyncMode = destinationSyncMode; - } - - public String getKeyspace() { - return keyspace; - } - - public String getTableName() { - return tableName; - } - - public String getTempTableName() { - return tempTableName; - } - - public DestinationSyncMode getDestinationSyncMode() { - return destinationSyncMode; - } - - @Override - public String toString() { - return "CassandraStreamConfig{" + - "keyspace='" + keyspace + '\'' + - ", tableName='" + tableName + '\'' + - ", tempTableName='" + tempTableName + '\'' + - ", destinationSyncMode=" + destinationSyncMode + - '}'; - } - -} diff --git a/airbyte-integrations/connectors/destination-cassandra/src/main/java/io/airbyte/integrations/destination/cassandra/SessionManager.java b/airbyte-integrations/connectors/destination-cassandra/src/main/java/io/airbyte/integrations/destination/cassandra/SessionManager.java deleted file mode 100644 index 3837725f7970..000000000000 --- a/airbyte-integrations/connectors/destination-cassandra/src/main/java/io/airbyte/integrations/destination/cassandra/SessionManager.java +++ /dev/null @@ -1,66 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.integrations.destination.cassandra; - -import com.datastax.oss.driver.api.core.CqlSession; -import java.net.InetSocketAddress; -import java.util.concurrent.ConcurrentHashMap; -import java.util.concurrent.atomic.AtomicInteger; - -class SessionManager { - - // AtomicInteger is used for convenience, this class is not thread safe - // and needs additional synchronization for that. - private static final ConcurrentHashMap> sessions; - - static { - sessions = new ConcurrentHashMap<>(); - } - - private SessionManager() { - - } - - /* - * CqlSession objects are heavyweight and can hold several tcp connections to the Cassandra cluster, - * for that reason it is better if sessions are reused per configuration. Sessions are thread-safe - * and can be accessed from different threads. - * - */ - public static CqlSession initSession(CassandraConfig cassandraConfig) { - var cachedSession = sessions.get(cassandraConfig); - if (cachedSession != null) { - cachedSession.value2().incrementAndGet(); - return cachedSession.value1(); - } else { - var session = CqlSession.builder() - .withLocalDatacenter(cassandraConfig.getDatacenter()) - .addContactPoint(new InetSocketAddress(cassandraConfig.getAddress(), cassandraConfig.getPort())) - .withAuthCredentials(cassandraConfig.getUsername(), cassandraConfig.getPassword()) - .build(); - sessions.put(cassandraConfig, Tuple.of(session, new AtomicInteger(1))); - return session; - } - } - - /* - * Close session configured with cassandra config. if the session is being used by more than one - * external instance only decrease the usage count, otherwise close the session and remove it from - * the map. - * - */ - public static void closeSession(CassandraConfig cassandraConfig) { - var cachedSession = sessions.get(cassandraConfig); - if (cachedSession == null) { - throw new IllegalStateException("No session for the provided config"); - } - int count = cachedSession.value2().decrementAndGet(); - if (count < 1) { - cachedSession.value1().close(); - sessions.remove(cassandraConfig); - } - } - -} diff --git a/airbyte-integrations/connectors/destination-cassandra/src/main/java/io/airbyte/integrations/destination/cassandra/Tuple.java b/airbyte-integrations/connectors/destination-cassandra/src/main/java/io/airbyte/integrations/destination/cassandra/Tuple.java deleted file mode 100644 index 224f9b917906..000000000000 --- a/airbyte-integrations/connectors/destination-cassandra/src/main/java/io/airbyte/integrations/destination/cassandra/Tuple.java +++ /dev/null @@ -1,38 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.integrations.destination.cassandra; - -public class Tuple { - - private final V1 value1; - - private final V2 value2; - - public Tuple(V1 value1, V2 value2) { - this.value1 = value1; - this.value2 = value2; - } - - public static Tuple of(V1 value1, V2 value2) { - return new Tuple<>(value1, value2); - } - - public V1 value1() { - return value1; - } - - public V2 value2() { - return value2; - } - - @Override - public String toString() { - return "Tuple{" + - "value1=" + value1 + - ", value2=" + value2 + - '}'; - } - -} diff --git a/airbyte-integrations/connectors/destination-cassandra/src/main/resources/spec.json b/airbyte-integrations/connectors/destination-cassandra/src/main/resources/spec.json deleted file mode 100644 index fac77fe847be..000000000000 --- a/airbyte-integrations/connectors/destination-cassandra/src/main/resources/spec.json +++ /dev/null @@ -1,65 +0,0 @@ -{ - "documentationUrl": "https://docs.airbyte.com/integrations/destinations/cassandra", - "supportsIncremental": true, - "supportsNormalization": false, - "supportsDBT": false, - "supported_destination_sync_modes": ["overwrite", "append"], - "connectionSpecification": { - "$schema": "http://json-schema.org/draft-07/schema#", - "title": "Cassandra Destination Spec", - "type": "object", - "required": ["keyspace", "username", "password", "address", "port"], - "additionalProperties": true, - "properties": { - "keyspace": { - "title": "Keyspace", - "description": "Default Cassandra keyspace to create data in.", - "type": "string", - "order": 0 - }, - "username": { - "title": "Username", - "description": "Username to use to access Cassandra.", - "type": "string", - "order": 1 - }, - "password": { - "title": "Password", - "description": "Password associated with Cassandra.", - "type": "string", - "airbyte_secret": true, - "order": 2 - }, - "address": { - "title": "Address", - "description": "Address to connect to.", - "type": "string", - "examples": ["localhost,127.0.0.1"], - "order": 3 - }, - "port": { - "title": "Port", - "description": "Port of Cassandra.", - "type": "integer", - "minimum": 0, - "maximum": 65536, - "default": 9042, - "order": 4 - }, - "datacenter": { - "title": "Datacenter", - "description": "Datacenter of the cassandra cluster.", - "type": "string", - "default": "datacenter1", - "order": 5 - }, - "replication": { - "title": "Replication factor", - "type": "integer", - "description": "Indicates to how many nodes the data should be replicated to.", - "default": 1, - "order": 6 - } - } - } -} diff --git a/airbyte-integrations/connectors/destination-cassandra/src/test-integration/java/io/airbyte/integrations/destination/cassandra/CassandraContainerInitializr.java b/airbyte-integrations/connectors/destination-cassandra/src/test-integration/java/io/airbyte/integrations/destination/cassandra/CassandraContainerInitializr.java deleted file mode 100644 index 76cb904a5d4f..000000000000 --- a/airbyte-integrations/connectors/destination-cassandra/src/test-integration/java/io/airbyte/integrations/destination/cassandra/CassandraContainerInitializr.java +++ /dev/null @@ -1,34 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.integrations.destination.cassandra; - -import org.testcontainers.containers.CassandraContainer; - -class CassandraContainerInitializr { - - private static ConfiguredCassandraContainer cassandraContainer; - - private CassandraContainerInitializr() { - - } - - public static ConfiguredCassandraContainer initContainer() { - if (cassandraContainer == null) { - cassandraContainer = new ConfiguredCassandraContainer(); - } - cassandraContainer.start(); - return cassandraContainer; - } - - public static class ConfiguredCassandraContainer extends CassandraContainer { - - ConfiguredCassandraContainer() { - // latest compatible version with the internal testcontainers datastax driver. - super("cassandra:3.11.11"); - } - - } - -} diff --git a/airbyte-integrations/connectors/destination-cassandra/src/test-integration/java/io/airbyte/integrations/destination/cassandra/CassandraCqlProviderIT.java b/airbyte-integrations/connectors/destination-cassandra/src/test-integration/java/io/airbyte/integrations/destination/cassandra/CassandraCqlProviderIT.java deleted file mode 100644 index 9f0ebaec3205..000000000000 --- a/airbyte-integrations/connectors/destination-cassandra/src/test-integration/java/io/airbyte/integrations/destination/cassandra/CassandraCqlProviderIT.java +++ /dev/null @@ -1,135 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.integrations.destination.cassandra; - -import static org.assertj.core.api.Assertions.assertThat; -import static org.junit.jupiter.api.Assertions.assertDoesNotThrow; -import static org.junit.jupiter.api.Assertions.assertThrows; - -import com.datastax.oss.driver.api.core.servererrors.InvalidQueryException; -import org.junit.jupiter.api.AfterEach; -import org.junit.jupiter.api.BeforeAll; -import org.junit.jupiter.api.Test; -import org.junit.jupiter.api.TestInstance; - -@TestInstance(TestInstance.Lifecycle.PER_CLASS) -class CassandraCqlProviderIT { - - private static final String CASSANDRA_KEYSPACE = "cassandra_keyspace"; - - private static final String CASSANDRA_TABLE = "cassandra_table"; - - private CassandraCqlProvider cassandraCqlProvider; - - private CassandraNameTransformer nameTransformer; - - @BeforeAll - void setup() { - var cassandraContainer = CassandraContainerInitializr.initContainer(); - var cassandraConfig = TestDataFactory.createCassandraConfig( - cassandraContainer.getUsername(), - cassandraContainer.getPassword(), - cassandraContainer.getHost(), - cassandraContainer.getFirstMappedPort()); - this.cassandraCqlProvider = new CassandraCqlProvider(cassandraConfig); - this.nameTransformer = new CassandraNameTransformer(cassandraConfig); - cassandraCqlProvider.createKeySpaceIfNotExists(CASSANDRA_KEYSPACE, 1); - cassandraCqlProvider.createTableIfNotExists(CASSANDRA_KEYSPACE, CASSANDRA_TABLE); - } - - @AfterEach - void clean() { - cassandraCqlProvider.truncate(CASSANDRA_KEYSPACE, CASSANDRA_TABLE); - } - - @Test - void testCreateKeySpaceIfNotExists() { - String keyspace = nameTransformer.outputKeyspace("test_keyspace"); - assertDoesNotThrow(() -> cassandraCqlProvider.createKeySpaceIfNotExists(keyspace, 1)); - } - - @Test - void testCreateTableIfNotExists() { - String table = nameTransformer.outputTable("test_stream"); - assertDoesNotThrow(() -> cassandraCqlProvider.createTableIfNotExists(CASSANDRA_KEYSPACE, table)); - } - - @Test - void testInsert() { - // given - cassandraCqlProvider.insert(CASSANDRA_KEYSPACE, CASSANDRA_TABLE, "{\"property\":\"data1\"}"); - cassandraCqlProvider.insert(CASSANDRA_KEYSPACE, CASSANDRA_TABLE, "{\"property\":\"data2\"}"); - cassandraCqlProvider.insert(CASSANDRA_KEYSPACE, CASSANDRA_TABLE, "{\"property\":\"data3\"}"); - - // when - var resultSet = cassandraCqlProvider.select(CASSANDRA_KEYSPACE, CASSANDRA_TABLE); - - // then - assertThat(resultSet) - .isNotNull() - .hasSize(3) - .anyMatch(r -> r.getData().equals("{\"property\":\"data1\"}")) - .anyMatch(r -> r.getData().equals("{\"property\":\"data2\"}")) - .anyMatch(r -> r.getData().equals("{\"property\":\"data3\"}")); - - } - - @Test - void testTruncate() { - // given - cassandraCqlProvider.insert(CASSANDRA_KEYSPACE, CASSANDRA_TABLE, "{\"property\":\"data1\"}"); - cassandraCqlProvider.insert(CASSANDRA_KEYSPACE, CASSANDRA_TABLE, "{\"property\":\"data2\"}"); - cassandraCqlProvider.insert(CASSANDRA_KEYSPACE, CASSANDRA_TABLE, "{\"property\":\"data3\"}"); - - // when - cassandraCqlProvider.truncate(CASSANDRA_KEYSPACE, CASSANDRA_TABLE); - var resultSet = cassandraCqlProvider.select(CASSANDRA_KEYSPACE, CASSANDRA_TABLE); - - // then - assertThat(resultSet) - .isNotNull() - .isEmpty(); - } - - @Test - void testDropTableIfExists() { - // given - String table = nameTransformer.outputTmpTable("test_stream"); - cassandraCqlProvider.createTableIfNotExists(CASSANDRA_KEYSPACE, table); - - // when - cassandraCqlProvider.dropTableIfExists(CASSANDRA_KEYSPACE, table); - - // then - assertThrows(InvalidQueryException.class, () -> cassandraCqlProvider.select(CASSANDRA_KEYSPACE, table)); - } - - @Test - void testCopy() { - // given - String tmpTable = nameTransformer.outputTmpTable("test_stream_copy"); - cassandraCqlProvider.createTableIfNotExists(CASSANDRA_KEYSPACE, tmpTable); - cassandraCqlProvider.insert(CASSANDRA_KEYSPACE, tmpTable, "{\"property\":\"data1\"}"); - cassandraCqlProvider.insert(CASSANDRA_KEYSPACE, tmpTable, "{\"property\":\"data2\"}"); - cassandraCqlProvider.insert(CASSANDRA_KEYSPACE, tmpTable, "{\"property\":\"data3\"}"); - - String rawTable = nameTransformer.outputTable("test_stream_copy"); - cassandraCqlProvider.createTableIfNotExists(CASSANDRA_KEYSPACE, rawTable); - - // when - cassandraCqlProvider.copy(CASSANDRA_KEYSPACE, tmpTable, rawTable); - var resultSet = cassandraCqlProvider.select(CASSANDRA_KEYSPACE, rawTable); - - // then - assertThat(resultSet) - .isNotNull() - .hasSize(3) - .anyMatch(r -> r.getData().equals("{\"property\":\"data1\"}")) - .anyMatch(r -> r.getData().equals("{\"property\":\"data2\"}")) - .anyMatch(r -> r.getData().equals("{\"property\":\"data3\"}")); - - } - -} diff --git a/airbyte-integrations/connectors/destination-cassandra/src/test-integration/java/io/airbyte/integrations/destination/cassandra/CassandraDestinationAcceptanceTest.java b/airbyte-integrations/connectors/destination-cassandra/src/test-integration/java/io/airbyte/integrations/destination/cassandra/CassandraDestinationAcceptanceTest.java deleted file mode 100644 index 44c7bf00b5bf..000000000000 --- a/airbyte-integrations/connectors/destination-cassandra/src/test-integration/java/io/airbyte/integrations/destination/cassandra/CassandraDestinationAcceptanceTest.java +++ /dev/null @@ -1,90 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.integrations.destination.cassandra; - -import com.fasterxml.jackson.databind.JsonNode; -import io.airbyte.cdk.integrations.standardtest.destination.DestinationAcceptanceTest; -import io.airbyte.cdk.integrations.util.HostPortResolver; -import io.airbyte.commons.json.Jsons; -import java.util.Comparator; -import java.util.HashSet; -import java.util.List; -import java.util.stream.Collectors; -import org.junit.jupiter.api.BeforeAll; - -public class CassandraDestinationAcceptanceTest extends DestinationAcceptanceTest { - - private JsonNode configJson; - - private CassandraCqlProvider cassandraCqlProvider; - - private CassandraNameTransformer cassandraNameTransformer; - - private static CassandraContainerInitializr.ConfiguredCassandraContainer cassandraContainer; - - @BeforeAll - static void initContainer() { - cassandraContainer = CassandraContainerInitializr.initContainer(); - } - - @Override - protected void setup(final TestDestinationEnv testEnv, final HashSet TEST_SCHEMAS) { - configJson = TestDataFactory.createJsonConfig( - cassandraContainer.getUsername(), - cassandraContainer.getPassword(), - HostPortResolver.resolveHost(cassandraContainer), - HostPortResolver.resolvePort(cassandraContainer)); - final var cassandraConfig = new CassandraConfig(configJson); - cassandraCqlProvider = new CassandraCqlProvider(cassandraConfig); - cassandraNameTransformer = new CassandraNameTransformer(cassandraConfig); - } - - @Override - protected void tearDown(final TestDestinationEnv testEnv) { - cassandraCqlProvider.retrieveMetadata().forEach(meta -> { - final var keyspace = meta.value1(); - meta.value2().forEach(table -> cassandraCqlProvider.truncate(keyspace, table)); - }); - } - - @Override - protected String getImageName() { - return "airbyte/destination-cassandra:dev"; - } - - @Override - protected JsonNode getConfig() { - return configJson; - } - - @Override - protected boolean implementsNamespaces() { - return true; - } - - @Override - protected JsonNode getFailCheckConfig() { - return TestDataFactory.createJsonConfig( - "usr", - "pw", - "127.0.192.1", - 8080); - } - - @Override - protected List retrieveRecords(final TestDestinationEnv testEnv, - final String streamName, - final String namespace, - final JsonNode streamSchema) { - final var keyspace = cassandraNameTransformer.outputKeyspace(namespace); - final var table = cassandraNameTransformer.outputTable(streamName); - return cassandraCqlProvider.select(keyspace, table).stream() - .sorted(Comparator.comparing(CassandraRecord::getTimestamp)) - .map(CassandraRecord::getData) - .map(Jsons::deserialize) - .collect(Collectors.toList()); - } - -} diff --git a/airbyte-integrations/connectors/destination-cassandra/src/test-integration/java/io/airbyte/integrations/destination/cassandra/CassandraDestinationIT.java b/airbyte-integrations/connectors/destination-cassandra/src/test-integration/java/io/airbyte/integrations/destination/cassandra/CassandraDestinationIT.java deleted file mode 100644 index ea30e16cd0b4..000000000000 --- a/airbyte-integrations/connectors/destination-cassandra/src/test-integration/java/io/airbyte/integrations/destination/cassandra/CassandraDestinationIT.java +++ /dev/null @@ -1,57 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.integrations.destination.cassandra; - -import static org.assertj.core.api.Assertions.assertThat; - -import io.airbyte.integrations.destination.cassandra.CassandraContainerInitializr.ConfiguredCassandraContainer; -import io.airbyte.protocol.models.v0.AirbyteConnectionStatus; -import org.junit.jupiter.api.BeforeAll; -import org.junit.jupiter.api.Test; -import org.junit.jupiter.api.TestInstance; - -@TestInstance(TestInstance.Lifecycle.PER_CLASS) -class CassandraDestinationIT { - - private CassandraDestination cassandraDestination; - - private ConfiguredCassandraContainer cassandraContainer; - - @BeforeAll - void setup() { - this.cassandraContainer = CassandraContainerInitializr.initContainer(); - this.cassandraDestination = new CassandraDestination(); - } - - @Test - void testCheckWithStatusSucceeded() { - - var jsonConfiguration = TestDataFactory.createJsonConfig( - cassandraContainer.getUsername(), - cassandraContainer.getPassword(), - cassandraContainer.getHost(), - cassandraContainer.getFirstMappedPort()); - - var connectionStatus = cassandraDestination.check(jsonConfiguration); - - assertThat(connectionStatus.getStatus()).isEqualTo(AirbyteConnectionStatus.Status.SUCCEEDED); - } - - @Test - void testCheckWithStatusFailed() { - - var jsonConfiguration = TestDataFactory.createJsonConfig( - "usr", - "pw", - "192.0.2.1", - 8080); - - var connectionStatus = cassandraDestination.check(jsonConfiguration); - - assertThat(connectionStatus.getStatus()).isEqualTo(AirbyteConnectionStatus.Status.FAILED); - - } - -} diff --git a/airbyte-integrations/connectors/destination-cassandra/src/test-integration/java/io/airbyte/integrations/destination/cassandra/CassandraMessageConsumerIT.java b/airbyte-integrations/connectors/destination-cassandra/src/test-integration/java/io/airbyte/integrations/destination/cassandra/CassandraMessageConsumerIT.java deleted file mode 100644 index 678301dd867e..000000000000 --- a/airbyte-integrations/connectors/destination-cassandra/src/test-integration/java/io/airbyte/integrations/destination/cassandra/CassandraMessageConsumerIT.java +++ /dev/null @@ -1,134 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.integrations.destination.cassandra; - -import static org.assertj.core.api.Assertions.assertThat; -import static org.junit.jupiter.api.Assertions.assertDoesNotThrow; - -import com.fasterxml.jackson.databind.JsonNode; -import com.google.common.collect.ImmutableMap; -import io.airbyte.commons.json.Jsons; -import io.airbyte.protocol.models.v0.AirbyteMessage; -import io.airbyte.protocol.models.v0.DestinationSyncMode; -import java.util.function.Function; -import org.junit.jupiter.api.AfterAll; -import org.junit.jupiter.api.BeforeAll; -import org.junit.jupiter.api.MethodOrderer.OrderAnnotation; -import org.junit.jupiter.api.Order; -import org.junit.jupiter.api.Test; -import org.junit.jupiter.api.TestInstance; -import org.junit.jupiter.api.TestMethodOrder; - -@TestMethodOrder(OrderAnnotation.class) -@TestInstance(TestInstance.Lifecycle.PER_CLASS) -class CassandraMessageConsumerIT { - - private static final String AIRBYTE_NAMESPACE_1 = "airbyte_namespace_1"; - private static final String AIRBYTE_NAMESPACE_2 = "airbyte_namespace_2"; - - private static final String AIRBYTE_STREAM_1 = "airbyte_stream_1"; - private static final String AIRBYTE_STREAM_2 = "airbyte_stream_2"; - - private CassandraContainerInitializr.ConfiguredCassandraContainer cassandraContainer; - - private CassandraConfig cassandraConfig; - - private CassandraMessageConsumer cassandraMessageConsumer; - - private CassandraNameTransformer nameTransformer; - - @BeforeAll - void setup() { - cassandraContainer = CassandraContainerInitializr.initContainer(); - cassandraConfig = TestDataFactory.createCassandraConfig( - cassandraContainer.getUsername(), - cassandraContainer.getPassword(), - cassandraContainer.getHost(), - cassandraContainer.getFirstMappedPort()); - - final var stream1 = TestDataFactory.createAirbyteStream(AIRBYTE_STREAM_1, AIRBYTE_NAMESPACE_1); - final var stream2 = TestDataFactory.createAirbyteStream(AIRBYTE_STREAM_2, AIRBYTE_NAMESPACE_2); - - final var cStream1 = TestDataFactory.createConfiguredAirbyteStream(DestinationSyncMode.APPEND, stream1); - final var cStream2 = TestDataFactory.createConfiguredAirbyteStream(DestinationSyncMode.OVERWRITE, stream2); - - final var catalog = TestDataFactory.createConfiguredAirbyteCatalog(cStream1, cStream2); - - final CassandraCqlProvider cassandraCqlProvider = new CassandraCqlProvider(cassandraConfig); - cassandraMessageConsumer = new CassandraMessageConsumer(cassandraConfig, catalog, cassandraCqlProvider, message -> {}); - nameTransformer = new CassandraNameTransformer(cassandraConfig); - } - - @AfterAll - void close() { - cassandraContainer.close(); - } - - @Test - @Order(1) - void testStartTracked() { - assertDoesNotThrow(() -> cassandraMessageConsumer.startTracked()); - } - - @Test - @Order(2) - void testAcceptTracked() { - - final Function function = - data -> Jsons.jsonNode(ImmutableMap.builder().put("property", data).build()); - - assertDoesNotThrow(() -> { - cassandraMessageConsumer.acceptTracked( - TestDataFactory.createAirbyteMessage(AirbyteMessage.Type.RECORD, AIRBYTE_STREAM_1, AIRBYTE_NAMESPACE_1, - function.apply("data1"))); - cassandraMessageConsumer.acceptTracked( - TestDataFactory.createAirbyteMessage(AirbyteMessage.Type.RECORD, AIRBYTE_STREAM_1, AIRBYTE_NAMESPACE_1, - function.apply("data2"))); - cassandraMessageConsumer.acceptTracked( - TestDataFactory.createAirbyteMessage(AirbyteMessage.Type.RECORD, AIRBYTE_STREAM_2, AIRBYTE_NAMESPACE_2, - function.apply("data3"))); - cassandraMessageConsumer.acceptTracked( - TestDataFactory.createAirbyteMessage(AirbyteMessage.Type.RECORD, AIRBYTE_STREAM_2, AIRBYTE_NAMESPACE_2, - function.apply("data4"))); - cassandraMessageConsumer.acceptTracked( - TestDataFactory.createAirbyteMessage(AirbyteMessage.Type.STATE, AIRBYTE_STREAM_2, AIRBYTE_NAMESPACE_2, - function.apply("data5"))); - }); - - } - - @Test - @Order(3) - void testClose() { - - assertDoesNotThrow(() -> cassandraMessageConsumer.close(false)); - - } - - @Test - @Order(4) - void testFinalState() { - final var keyspace1 = nameTransformer.outputKeyspace(AIRBYTE_NAMESPACE_1); - final var keyspace2 = nameTransformer.outputKeyspace(AIRBYTE_NAMESPACE_2); - final var table1 = nameTransformer.outputTable(AIRBYTE_STREAM_1); - final var table2 = nameTransformer.outputTable(AIRBYTE_STREAM_2); - try (final var cassandraCqlProvider = new CassandraCqlProvider(cassandraConfig)) { - final var resultSet1 = cassandraCqlProvider.select(keyspace1, table1); - final var resultSet2 = cassandraCqlProvider.select(keyspace2, table2); - assertThat(resultSet1) - .isNotNull() - .hasSize(2) - .anyMatch(r -> r.getData().equals("{\"property\":\"data1\"}")) - .anyMatch(r -> r.getData().equals("{\"property\":\"data2\"}")); - - assertThat(resultSet2) - .isNotNull() - .hasSize(2) - .anyMatch(r -> r.getData().equals("{\"property\":\"data3\"}")) - .anyMatch(r -> r.getData().equals("{\"property\":\"data4\"}")); - } - } - -} diff --git a/airbyte-integrations/connectors/destination-cassandra/src/test-integration/java/io/airbyte/integrations/destination/cassandra/TestDataFactory.java b/airbyte-integrations/connectors/destination-cassandra/src/test-integration/java/io/airbyte/integrations/destination/cassandra/TestDataFactory.java deleted file mode 100644 index da3af9ec770b..000000000000 --- a/airbyte-integrations/connectors/destination-cassandra/src/test-integration/java/io/airbyte/integrations/destination/cassandra/TestDataFactory.java +++ /dev/null @@ -1,80 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.integrations.destination.cassandra; - -import com.fasterxml.jackson.databind.JsonNode; -import com.google.common.collect.ImmutableMap; -import com.google.common.collect.Lists; -import io.airbyte.commons.json.Jsons; -import io.airbyte.protocol.models.v0.AirbyteMessage; -import io.airbyte.protocol.models.v0.AirbyteRecordMessage; -import io.airbyte.protocol.models.v0.AirbyteStream; -import io.airbyte.protocol.models.v0.ConfiguredAirbyteCatalog; -import io.airbyte.protocol.models.v0.ConfiguredAirbyteStream; -import io.airbyte.protocol.models.v0.DestinationSyncMode; -import io.airbyte.protocol.models.v0.SyncMode; -import java.time.Instant; -import java.util.List; - -public class TestDataFactory { - - private TestDataFactory() { - - } - - static CassandraConfig createCassandraConfig(String username, String password, String address, int port) { - return new CassandraConfig( - "default_keyspace", - username, - password, - address, - port, - "datacenter1", - 1); - } - - static JsonNode createJsonConfig(String username, String password, String address, int port) { - return Jsons.jsonNode(ImmutableMap.builder() - .put("keyspace", "default_keyspace") - .put("username", username) - .put("password", password) - .put("address", address) - .put("port", port) - .put("datacenter", "datacenter1") - .put("replication", 1) - .build()); - } - - static AirbyteMessage createAirbyteMessage(AirbyteMessage.Type type, - String streamName, - String namespace, - JsonNode data) { - return new AirbyteMessage() - .withType(type) - .withRecord(new AirbyteRecordMessage() - .withStream(streamName) - .withNamespace(namespace) - .withData(data) - .withEmittedAt(Instant.now().toEpochMilli())); - } - - static AirbyteStream createAirbyteStream(String name, String namespace) { - return new AirbyteStream() - .withName(name) - .withNamespace(namespace) - .withSupportedSyncModes(Lists.newArrayList(SyncMode.FULL_REFRESH)); - } - - static ConfiguredAirbyteStream createConfiguredAirbyteStream(DestinationSyncMode syncMode, AirbyteStream stream) { - return new ConfiguredAirbyteStream() - .withDestinationSyncMode(syncMode) - .withStream(stream); - } - - static ConfiguredAirbyteCatalog createConfiguredAirbyteCatalog(ConfiguredAirbyteStream... configuredStreams) { - return new ConfiguredAirbyteCatalog().withStreams(List.of(configuredStreams)); - } - -} diff --git a/airbyte-integrations/connectors/destination-cassandra/src/test/java/io/airbyte/integrations/destination/cassandra/CassandraConfigTest.java b/airbyte-integrations/connectors/destination-cassandra/src/test/java/io/airbyte/integrations/destination/cassandra/CassandraConfigTest.java deleted file mode 100644 index c425481f27b7..000000000000 --- a/airbyte-integrations/connectors/destination-cassandra/src/test/java/io/airbyte/integrations/destination/cassandra/CassandraConfigTest.java +++ /dev/null @@ -1,40 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.integrations.destination.cassandra; - -import static org.assertj.core.api.Assertions.assertThat; - -import org.junit.jupiter.api.BeforeEach; -import org.junit.jupiter.api.Test; - -class CassandraConfigTest { - - private CassandraConfig cassandraConfig; - - @BeforeEach - void setup() { - var jsonNode = TestDataFactory.createJsonConfig( - "usr", - "pw", - "127.0.0.1", - 9042); - this.cassandraConfig = new CassandraConfig(jsonNode); - } - - @Test - void testConfig() { - - assertThat(cassandraConfig) - .hasFieldOrPropertyWithValue("keyspace", "default_keyspace") - .hasFieldOrPropertyWithValue("username", "usr") - .hasFieldOrPropertyWithValue("password", "pw") - .hasFieldOrPropertyWithValue("address", "127.0.0.1") - .hasFieldOrPropertyWithValue("port", 9042) - .hasFieldOrPropertyWithValue("datacenter", "datacenter1") - .hasFieldOrPropertyWithValue("replication", 1); - - } - -} diff --git a/airbyte-integrations/connectors/destination-cassandra/src/test/java/io/airbyte/integrations/destination/cassandra/CassandraNameTransformerTest.java b/airbyte-integrations/connectors/destination-cassandra/src/test/java/io/airbyte/integrations/destination/cassandra/CassandraNameTransformerTest.java deleted file mode 100644 index 6922de7323a7..000000000000 --- a/airbyte-integrations/connectors/destination-cassandra/src/test/java/io/airbyte/integrations/destination/cassandra/CassandraNameTransformerTest.java +++ /dev/null @@ -1,64 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.integrations.destination.cassandra; - -import static org.assertj.core.api.Assertions.assertThat; - -import org.junit.jupiter.api.BeforeAll; -import org.junit.jupiter.api.Test; -import org.junit.jupiter.api.TestInstance; - -@TestInstance(TestInstance.Lifecycle.PER_CLASS) -class CassandraNameTransformerTest { - - private CassandraNameTransformer cassandraNameTransformer; - - @BeforeAll - void setup() { - var cassandraConfig = TestDataFactory.createCassandraConfig( - "usr", - "pw", - "127.0.0.1", - 9042); - this.cassandraNameTransformer = new CassandraNameTransformer(cassandraConfig); - } - - @Test - void testOutputTable() { - - var table = cassandraNameTransformer.outputTable("stream_name"); - - assertThat(table).matches("airbyte_raw_stream_name"); - - } - - @Test - void testOutputTmpTable() { - - var table = cassandraNameTransformer.outputTmpTable("stream_name"); - - assertThat(table).matches("airbyte_tmp_+[a-z]+_stream_name"); - - } - - @Test - void testOutputKeyspace() { - - var keyspace = cassandraNameTransformer.outputKeyspace("***keyspace^h"); - - assertThat(keyspace).matches("keyspace_h"); - - } - - @Test - void outputColumn() { - - var column = cassandraNameTransformer.outputColumn("_airbyte_data"); - - assertThat(column).matches("\"_airbyte_data\""); - - } - -} diff --git a/airbyte-integrations/connectors/destination-cassandra/src/test/java/io/airbyte/integrations/destination/cassandra/CassandraRecordConsumerTest.java b/airbyte-integrations/connectors/destination-cassandra/src/test/java/io/airbyte/integrations/destination/cassandra/CassandraRecordConsumerTest.java deleted file mode 100644 index dc35e4bffa02..000000000000 --- a/airbyte-integrations/connectors/destination-cassandra/src/test/java/io/airbyte/integrations/destination/cassandra/CassandraRecordConsumerTest.java +++ /dev/null @@ -1,48 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.integrations.destination.cassandra; - -import io.airbyte.cdk.integrations.base.FailureTrackingAirbyteMessageConsumer; -import io.airbyte.cdk.integrations.standardtest.destination.PerStreamStateMessageTest; -import io.airbyte.protocol.models.v0.AirbyteMessage; -import io.airbyte.protocol.models.v0.ConfiguredAirbyteCatalog; -import java.util.function.Consumer; -import org.junit.jupiter.api.BeforeEach; -import org.junit.jupiter.api.extension.ExtendWith; -import org.mockito.InjectMocks; -import org.mockito.Mock; -import org.mockito.junit.jupiter.MockitoExtension; - -@ExtendWith(MockitoExtension.class) -public class CassandraRecordConsumerTest extends PerStreamStateMessageTest { - - @Mock - private Consumer outputRecordCollector; - - @InjectMocks - private CassandraMessageConsumer consumer; - @Mock - private CassandraConfig config; - @Mock - private ConfiguredAirbyteCatalog catalog; - @Mock - private CassandraCqlProvider provider; - - @BeforeEach - public void init() { - consumer = new CassandraMessageConsumer(config, catalog, provider, outputRecordCollector); - } - - @Override - protected Consumer getMockedConsumer() { - return outputRecordCollector; - } - - @Override - protected FailureTrackingAirbyteMessageConsumer getMessageConsumer() { - return consumer; - } - -} diff --git a/airbyte-integrations/connectors/destination-cassandra/src/test/java/io/airbyte/integrations/destination/cassandra/TestDataFactory.java b/airbyte-integrations/connectors/destination-cassandra/src/test/java/io/airbyte/integrations/destination/cassandra/TestDataFactory.java deleted file mode 100644 index da3af9ec770b..000000000000 --- a/airbyte-integrations/connectors/destination-cassandra/src/test/java/io/airbyte/integrations/destination/cassandra/TestDataFactory.java +++ /dev/null @@ -1,80 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.integrations.destination.cassandra; - -import com.fasterxml.jackson.databind.JsonNode; -import com.google.common.collect.ImmutableMap; -import com.google.common.collect.Lists; -import io.airbyte.commons.json.Jsons; -import io.airbyte.protocol.models.v0.AirbyteMessage; -import io.airbyte.protocol.models.v0.AirbyteRecordMessage; -import io.airbyte.protocol.models.v0.AirbyteStream; -import io.airbyte.protocol.models.v0.ConfiguredAirbyteCatalog; -import io.airbyte.protocol.models.v0.ConfiguredAirbyteStream; -import io.airbyte.protocol.models.v0.DestinationSyncMode; -import io.airbyte.protocol.models.v0.SyncMode; -import java.time.Instant; -import java.util.List; - -public class TestDataFactory { - - private TestDataFactory() { - - } - - static CassandraConfig createCassandraConfig(String username, String password, String address, int port) { - return new CassandraConfig( - "default_keyspace", - username, - password, - address, - port, - "datacenter1", - 1); - } - - static JsonNode createJsonConfig(String username, String password, String address, int port) { - return Jsons.jsonNode(ImmutableMap.builder() - .put("keyspace", "default_keyspace") - .put("username", username) - .put("password", password) - .put("address", address) - .put("port", port) - .put("datacenter", "datacenter1") - .put("replication", 1) - .build()); - } - - static AirbyteMessage createAirbyteMessage(AirbyteMessage.Type type, - String streamName, - String namespace, - JsonNode data) { - return new AirbyteMessage() - .withType(type) - .withRecord(new AirbyteRecordMessage() - .withStream(streamName) - .withNamespace(namespace) - .withData(data) - .withEmittedAt(Instant.now().toEpochMilli())); - } - - static AirbyteStream createAirbyteStream(String name, String namespace) { - return new AirbyteStream() - .withName(name) - .withNamespace(namespace) - .withSupportedSyncModes(Lists.newArrayList(SyncMode.FULL_REFRESH)); - } - - static ConfiguredAirbyteStream createConfiguredAirbyteStream(DestinationSyncMode syncMode, AirbyteStream stream) { - return new ConfiguredAirbyteStream() - .withDestinationSyncMode(syncMode) - .withStream(stream); - } - - static ConfiguredAirbyteCatalog createConfiguredAirbyteCatalog(ConfiguredAirbyteStream... configuredStreams) { - return new ConfiguredAirbyteCatalog().withStreams(List.of(configuredStreams)); - } - -} diff --git a/airbyte-integrations/connectors/destination-cumulio/Dockerfile b/airbyte-integrations/connectors/destination-cumulio/Dockerfile deleted file mode 100644 index 90e3f08bd96d..000000000000 --- a/airbyte-integrations/connectors/destination-cumulio/Dockerfile +++ /dev/null @@ -1,42 +0,0 @@ -FROM python:3.9.11 as base -# FROM python:3.9.11-alpine3.15 as base -# switched from alpine as there were tons of errors (in case you want to switch back to alpine) -# - https://stackoverflow.com/a/57485724/5246670 -# - numpy error: https://stackoverflow.com/a/22411624/5246670 -# - libstdc++ https://github.com/amancevice/docker-pandas/issues/12#issuecomment-717215043 -# - musl-dev linux-headers g++ because of: https://stackoverflow.com/a/40407099/5246670 - -# build and load all requirements -FROM base as builder -WORKDIR /airbyte/integration_code - -# upgrade pip to the latest version -RUN apt-get update && apt-get -y upgrade \ - && pip install --upgrade pip - -COPY setup.py ./ -# install necessary packages to a temporary folder -RUN pip install --prefix=/install . -# build a clean environment -FROM base -# RUN conda install -c conda-forge python-duckdb -WORKDIR /airbyte/integration_code - -# copy all loaded and built libraries to a pure basic image -COPY --from=builder /install /usr/local -# add default timezone settings -COPY --from=builder /usr/share/zoneinfo/Etc/UTC /etc/localtime -RUN echo "Etc/UTC" > /etc/timezone - -#adding duckdb manually (outside of setup.py - lots of errors) -RUN pip install duckdb - -# copy payload code only -COPY main.py ./ -COPY destination_cumulio ./destination_cumulio - -ENV AIRBYTE_ENTRYPOINT "python /airbyte/integration_code/main.py" -ENTRYPOINT ["python", "/airbyte/integration_code/main.py"] - -LABEL io.airbyte.version=0.1.0 -LABEL io.airbyte.name=airbyte/destination-cumulio diff --git a/airbyte-integrations/connectors/destination-cumulio/README.md b/airbyte-integrations/connectors/destination-cumulio/README.md deleted file mode 100644 index 62261106b05f..000000000000 --- a/airbyte-integrations/connectors/destination-cumulio/README.md +++ /dev/null @@ -1,98 +0,0 @@ -# Cumulio Destination - -This is the repository for the Cumulio destination connector, written in Python. -For information about how to use this connector within Airbyte, see [the documentation](https://docs.airbyte.com/integrations/destinations/cumulio). - -## Local development - -### Prerequisites -**To iterate on this connector, make sure to complete this prerequisites section.** - -#### Minimum Python version required `= 3.7.0` - -#### Build & Activate Virtual Environment and install dependencies -From this connector directory, create a virtual environment: -``` -python -m venv .venv -``` - -This will generate a virtualenv for this module in `.venv/`. Make sure this venv is active in your -development environment of choice. To activate it from the terminal, run: -``` -source .venv/bin/activate -pip install -r requirements.txt -``` -If you are in an IDE, follow your IDE's instructions to activate the virtualenv. - -Note that while we are installing dependencies from `requirements.txt`, you should only edit `setup.py` for your dependencies. `requirements.txt` is -used for editable installs (`pip install -e`) to pull in Python dependencies from the monorepo and will call `setup.py`. -If this is mumbo jumbo to you, don't worry about it, just put your deps in `setup.py` but install using `pip install -r requirements.txt` and everything -should work as you expect. - -#### Create credentials -**If you are a community contributor**, follow the instructions in the [documentation](https://docs.airbyte.com/integrations/destinations/cumulio) -to generate the necessary credentials. Then create a file `secrets/config.json` conforming to the `destination_cumulio/spec.json` file. -Note that the `secrets` directory is gitignored by default, so there is no danger of accidentally checking in sensitive information. -See `integration_tests/sample_config.json` for a sample config file. - -**If you are an Airbyte core member**, copy the credentials in Lastpass under the secret name `destination cumulio test creds` -and place them into `secrets/config.json`. - -### Locally running the connector -``` -python main.py spec -python main.py check --config secrets/config.json -python main.py write --config secrets/config.json --catalog integration_tests/configured_catalog.json -``` - -### Locally running the connector docker image - - -#### Build -**Via [`airbyte-ci`](https://github.com/airbytehq/airbyte/blob/master/airbyte-ci/connectors/pipelines/README.md) (recommended):** -```bash -airbyte-ci connectors --name=destination-cumulio build -``` - -An image will be built with the tag `airbyte/destination-cumulio:dev`. - -**Via `docker build`:** -```bash -docker build -t airbyte/destination-cumulio:dev . -``` - -#### Run -Then run any of the connector commands as follows: -``` -docker run --rm airbyte/destination-cumulio:dev spec -docker run --rm -v $(pwd)/secrets:/secrets airbyte/destination-cumulio:dev check --config /secrets/config.json -# messages.jsonl is a file containing line-separated JSON representing AirbyteMessages -cat messages.jsonl | docker run --rm -v $(pwd)/secrets:/secrets -v $(pwd)/integration_tests:/integration_tests airbyte/destination-cumulio:dev write --config /secrets/config.json --catalog /integration_tests/configured_catalog.json -``` - -## Testing -You can run our full test suite locally using [`airbyte-ci`](https://github.com/airbytehq/airbyte/blob/master/airbyte-ci/connectors/pipelines/README.md): -```bash -airbyte-ci connectors --name=destination-cumulio test -``` - -### Customizing acceptance Tests -Customize `acceptance-test-config.yml` file to configure tests. See [Connector Acceptance Tests](https://docs.airbyte.com/connector-development/testing-connectors/connector-acceptance-tests-reference) for more information. -If your connector requires to create or destroy resources for use during acceptance tests create fixtures for it and place them inside integration_tests/acceptance.py. - -## Dependency Management -All of your dependencies should go in `setup.py`, NOT `requirements.txt`. The requirements file is only used to connect internal Airbyte dependencies in the monorepo for local development. -We split dependencies between two groups, dependencies that are: -* required for your connector to work need to go to `MAIN_REQUIREMENTS` list. -* required for the testing need to go to `TEST_REQUIREMENTS` list - -### Publishing a new version of the connector -You've checked out the repo, implemented a million dollar feature, and you're ready to share your changes with the world. Now what? -1. Make sure your changes are passing our test suite: `airbyte-ci connectors --name=destination-cumulio test` -2. Bump the connector version in `metadata.yaml`: increment the `dockerImageTag` value. Please follow [semantic versioning for connectors](https://docs.airbyte.com/contributing-to-airbyte/resources/pull-requests-handbook/#semantic-versioning-for-connectors). -3. Make sure the `metadata.yaml` content is up to date. -4. Make the connector documentation and its changelog is up to date (`docs/integrations/destinations/cumulio.md`). -5. Create a Pull Request: use [our PR naming conventions](https://docs.airbyte.com/contributing-to-airbyte/resources/pull-requests-handbook/#pull-request-title-convention). -6. Pat yourself on the back for being an awesome contributor. -7. Someone from Airbyte will take a look at your PR and iterate with you to merge it into master. - diff --git a/airbyte-integrations/connectors/destination-cumulio/destination_cumulio/__init__.py b/airbyte-integrations/connectors/destination-cumulio/destination_cumulio/__init__.py deleted file mode 100644 index 5dda7de9dfe7..000000000000 --- a/airbyte-integrations/connectors/destination-cumulio/destination_cumulio/__init__.py +++ /dev/null @@ -1,8 +0,0 @@ -# -# Copyright (c) 2022 Airbyte, Inc., all rights reserved. -# - - -from .destination import DestinationCumulio - -__all__ = ["DestinationCumulio"] diff --git a/airbyte-integrations/connectors/destination-cumulio/destination_cumulio/client.py b/airbyte-integrations/connectors/destination-cumulio/destination_cumulio/client.py deleted file mode 100644 index 10728e374f54..000000000000 --- a/airbyte-integrations/connectors/destination-cumulio/destination_cumulio/client.py +++ /dev/null @@ -1,367 +0,0 @@ -# -# Copyright (c) 2023 Airbyte, Inc., all rights reserved. -# - - -import time -from logging import Logger -from typing import Any, Mapping - -from cumulio.cumulio import Cumulio # type: ignore - -# def _retry_with_backoff( -# fn: Callable, -# backoff_times_in_seconds: list[int] -# ): -# while True: -# try: -# return fn() - - -class CumulioClient: - # Cumul.io will auto-generate a UUID that is unique to the dataset created. - # To ensure a consistent flow to the same dataset, we'll add a tag to the dataset: - # the tag is a combination of the prefix below and the stream name. - # This allows us to retrieve the same dataset resource upon further sync schedules. - TAG_PREFIX = "[AIRBYTE - DO NOT DELETE] - " - - REPLACE_TAG = "REPLACE DATA" - - INITIAL_DATASET_NAME_PREFIX = "Airbyte - " - - BACKOFF_TIMES_IN_SECONDS = [300, 600, 1200] - - def __init__(self, config: Mapping[str, Any], logger: Logger): - self.logger = logger - self.client = Cumulio(config["api_key"], config["api_token"], config["api_host"]) - - def batch_write( - self, - stream_name: str, - write_buffer: list, - column_headers: list, - is_in_overwrite_sync_mode: bool, - is_first_batch: bool, - update_metadata: bool, - ): - """Write a list of data (array of arrays) in a specific sync mode to Cumul.io.""" - if len(write_buffer) == 0 or (len(write_buffer) == 1 and len(write_buffer[0]) == 0): - return - - dataset_id = self._get_dataset_id_from_stream_name(stream_name) - if dataset_id is None: - dataset_id = self._push_batch_to_new_dataset(stream_name, write_buffer, column_headers) - else: - is_in_replace_mode = self._dataset_contains_replace_tag(dataset_id) - first_batch_replace = is_first_batch and (is_in_overwrite_sync_mode or is_in_replace_mode) - self._push_batch_to_existing_dataset( - dataset_id, - write_buffer, - column_headers, - first_batch_replace, - update_metadata, - ) - - self.logger.info(f"Successfully pushed {len(write_buffer)} rows to Cumul.io's data warehouse in a dataset with id {dataset_id}.") - - def test_api_token(self): - """Test an API key and token by retrieving it.""" - self.logger.info("Checking API host, key and token.") - data = self.client.get("authorization", {"where": {"type": "api"}}) - # if response contains a count 0, the API host, key and token combination is unknown to Cumul.io. - if data["count"] == 0: - raise Exception( - "Unknown combination of API host, key and token. Can you verify whether you've specified the correct combination of " - "Cumul.io API host, key, and token?" - ) - self.logger.info("API host, key and token combination is valid.") - - def test_data_push(self, stream_name: str, data: list[list[Any]], columns: list[str]): - """[DEPRECATED] This method is no longer in use as it results in a lot of overhead. - Test pushing dummy data into a dataset, and delete the dataset afterwards.""" - - self.logger.info("Starting data push of dummy data.") - self.batch_write(stream_name, data, columns, True, True, True) - self.logger.info("Finished data push of dummy data. Will delete dummy dataset.") - - self.delete_dataset(stream_name) - self.logger.info("Finished deleting dummy dataset.") - - def delete_dataset(self, stream_name: str): - """Delete a dataset in Cumul.io. - This should only be used for testing purposes. Currently used in: - - Integration tests - - When pushing dummy data to an example dataset during "check" of Airbyte destination connector (see destination.py check method) - """ - dataset_id = self._get_dataset_id_from_stream_name(stream_name) - if dataset_id is not None: - return self.client.delete("securable", dataset_id) - - self.logger.info(f"No dataset for stream {stream_name} found to delete.") - - def get_ordered_columns(self, stream_name: str): - """Return a list of ordered columns (based on their order in Cumul.io). - The dataset is retrieved based on a Cumul.io tag that includes the stream_name. - """ - dataset_and_columns = self.get_dataset_and_columns_from_stream_name(stream_name) - if dataset_and_columns is None: - # Dataset hasn't been created yet on Cumul.io's side. - return [] - # Sort columns based on the order property. - order_sorted_columns = sorted(dataset_and_columns["columns"], key=lambda x: x["order"]) - # Return a list of column source names. - return [column["source_name"] for column in order_sorted_columns] - - def get_dataset_and_columns_from_stream_name(self, stream_name: str): - """Return a dataset and its columns based on a Cumul.io tag that includes the stream_name.""" - result = self.client.get( - "securable", - { - "where": {"type": "dataset"}, - "attributes": ["id", "name"], - "include": [ - { - "model": "Tag", - "where": {"tag": self.TAG_PREFIX + stream_name}, - "attributes": ["id", "tag"], - "jointype": "inner", - }, - { - "model": "Column", - "attributes": ["id", "source_name", "order"], - "jointype": "inner", - }, - ], - }, - ) - if result["count"] > 1: - raise Exception( - f"More than one dataset has been returned, could you verify whether the tag for stream {stream_name} is set up " - f"correctly in Cumul.io (expected a tag '{self.TAG_PREFIX}{stream_name}')?" - ) - # A count of zero means that the dataset has not been created on Cumul.io's side yet. - # We'll return None to indicate this. - elif result["count"] == 0: - return None - # return dataset and its columns. - return result["rows"][0] - - def set_replace_tag_on_dataset(self, stream_name: str): - """Add a "replace" tag to a specific dataset based on the stream_name. - The "replace" tag is used to ensure that the next sync will replace the existing data. - """ - dataset_id = self._get_dataset_id_from_stream_name(stream_name) - if dataset_id is not None: - self.logger.info( - f"A tag will be added to the dataset with id {dataset_id} to replace the existing data upon next sync. " - f"As a result, the existing data will not be replaced until the next sync has ran. " - f"This avoids empty datasets which cause 'No data' to be displayed upon querying them." - ) - return self._associate_tag_dataset_id(self.REPLACE_TAG, dataset_id) - self.logger.debug( - f"No dataset found to set Replace tag on (looking for stream name '{stream_name}'), " - f"this might be due to the dataset not existing yet on Cumul.io's side." - ) - - def _push_batch_to_new_dataset(self, stream_name: str, write_buffer: list[list[Any]], column_headers: list[str]): - properties = { - "type": "create", - "data": write_buffer, - "options": { - "header": column_headers, - "update_metadata": True, - "name": {"en": self.INITIAL_DATASET_NAME_PREFIX + stream_name}, - }, - } - result: Mapping[str, Any] = {} - data_is_pushed = False - try_count = 0 - while (not data_is_pushed) and try_count < len(self.BACKOFF_TIMES_IN_SECONDS): - try: - self.logger.info( - f"Pushing {len(write_buffer)} rows to Cumul.io's data warehouse in a new Cumul.io dataset " - f"with name {self.INITIAL_DATASET_NAME_PREFIX}{stream_name}." - ) - - result = self.client.create("data", properties) - data_is_pushed = True - - except Exception as e: - if "Unauthorized" in str(e): - raise Exception( - f"Not able to push a batch of data to a new dataset due to an 'Unauthorized' error. " - f"Please verify that your API key and token are still valid!" - f"Error: {e}" - ) - elif try_count + 1 >= len(self.BACKOFF_TIMES_IN_SECONDS): - raise Exception(f"Exception while creating new dataset after {len(self.BACKOFF_TIMES_IN_SECONDS)} retries: {e}") - - seconds_to_backoff = self.BACKOFF_TIMES_IN_SECONDS[try_count] - try_count += 1 - self.logger.info( - f"Error pushing data to a new dataset during try {try_count}, retrying in {seconds_to_backoff} seconds. Error: {e}" - ) - time.sleep(seconds_to_backoff) - - dataset_id = result["rows"][0]["id"] - try: - # Add a tag to the dataset to allow retrieving it upon further syncs / batch writes - self._associate_tag_dataset_id(stream_name, dataset_id) - except Exception as e: - raise Exception( - f"The data has been stored successfully, but an error occurred while associating a required tag to the " - f"dataset (id: {dataset_id}). This will likely cause issues upon further synchronizations. The following " - f"error occurred: ", - e, - ) - - return dataset_id - - def _push_batch_to_existing_dataset( - self, - dataset_id: str, - write_buffer: list[list[Any]], - column_headers: list[str], - first_batch_replace: bool, - update_metadata: bool, - ): - cumulio_sync_type = "replace" if first_batch_replace else "append" - - properties = { - "type": cumulio_sync_type, - "data": write_buffer, - "securable_id": dataset_id, - "options": { - "header": column_headers, - "update_metadata": update_metadata, - }, - } - data_is_pushed = False - try_count = 0 - while (not data_is_pushed) and try_count < len(self.BACKOFF_TIMES_IN_SECONDS): - try: - self.logger.info( - f"Pushing {len(write_buffer)} rows to Cumul.io dataset with id {dataset_id} in {cumulio_sync_type} mode, " - f"{'while' if update_metadata else 'not'} updating the columns of that dataset." - ) - self.client.create("data", properties) - - data_is_pushed = True - - if first_batch_replace: - # Try to remove replace tag to ensure next syncs do not replace existing data. - self._remove_replace_tag_dataset_id_association(dataset_id) - - except RuntimeError as e: - if "Unauthorized" in str(e): - raise Exception( - f"Not able to push a batch of data to dataset {dataset_id} due to an 'Unauthorized' error. " - f"Please verify that your API key and token are still valid!" - f"Error: {e}" - ) - elif try_count + 1 >= len(self.BACKOFF_TIMES_IN_SECONDS): - raise Exception( - f"Exception while pushing to existing dataset {dataset_id} after {len(self.BACKOFF_TIMES_IN_SECONDS)} retries: ", - e, - ) - - seconds_to_backoff = self.BACKOFF_TIMES_IN_SECONDS[try_count] - try_count += 1 - - self.logger.info( - f"Error pushing data to existing dataset {dataset_id} during try {try_count}, retrying in {seconds_to_backoff} seconds." - ) - - time.sleep(seconds_to_backoff) - - def _dataset_contains_replace_tag(self, dataset_id: str): - """Return a boolean to indicate whether a dataset contains the "replace" tag.""" - result = self.client.get( - "securable", - { - "where": {"type": "dataset", "id": dataset_id}, - "attributes": ["id", "name"], - "include": [ - { - "model": "Tag", - "where": {"tag": self.TAG_PREFIX + self.REPLACE_TAG}, - "attributes": ["id", "tag"], - "jointype": "inner", - } - ], - }, - ) - return False if result["count"] == 0 else True - - def _remove_replace_tag_dataset_id_association(self, dataset_id: str): - """Remove the "replace" tag from a specific dataset.""" - tag_id = self._get_tag_id(self.REPLACE_TAG) - if tag_id is not None: - return self._dissociate_tag_with_dataset_id(tag_id, dataset_id) - self.logger.debug( - f"No replace tag found, so could not remove for Cumul.io dataset with id {dataset_id}." - f"This could be expected as the stream might be configured in overwrite mode." - ) - - def _get_dataset_id_from_stream_name(self, stream_name: str): - """Return a dataset ID based on a Cumul.io tag that includes the stream_name.""" - result = self.client.get( - "securable", - { - "where": {"type": "dataset"}, - "attributes": ["id", "name"], - "include": [ - { - "model": "Tag", - "where": {"tag": self.TAG_PREFIX + stream_name}, - "attributes": ["id", "tag"], - "jointype": "inner", - } - ], - }, - ) - if result["count"] > 1: - raise Exception( - f"More than one dataset has been found, could you verify whether the tag for stream {stream_name} is set up " - f"correctly in Cumul.io (expected a tag '{self.TAG_PREFIX}{stream_name}' on a single dataset)?" - ) - # A count of zero means that the dataset has not been created on Cumul.io's side yet. - # We'll return None to indicate this. - elif result["count"] == 0: - return None - # return dataset ID - return result["rows"][0]["id"] - - def _associate_tag_dataset_id(self, tag_name: str, dataset_id: str): - """Ensure that a specific stream name tag is associated to a dataset ID. - Optionally the Tag is created and associated if not existing yet. - """ - # A tag should be unique and cannot be created multiple times. - # In order to ensure that the association doesn't fail, - # we'll first try to retrieve the tag and then either - # associate it with the newly created securable, - # or create & associate it. - tag_id = self._get_tag_id(tag_name) - if tag_id is not None: - return self._associate_tag_with_dataset_id(tag_id, dataset_id) - return self._create_and_associate_stream_name_tag_with_dataset_id(tag_name, dataset_id) - - def _get_tag_id(self, tag_name: str): - """Return a Tag ID using the stream name.""" - result = self.client.get("tag", {"where": {"tag": self.TAG_PREFIX + tag_name}}) - if result["count"] == 0: - return None - return result["rows"][0]["id"] - - def _associate_tag_with_dataset_id(self, tag_id: str, dataset_id: str): - return self.client.associate("tag", tag_id, "Securables", dataset_id) - - def _dissociate_tag_with_dataset_id(self, tag_id: str, dataset_id: str): - return self.client.dissociate("tag", tag_id, "Securables", dataset_id) - - def _create_and_associate_stream_name_tag_with_dataset_id(self, tag_name: str, dataset_id: str): - return self.client.create( - "tag", - {"tag": self.TAG_PREFIX + tag_name}, - [{"role": "Securables", "id": dataset_id}], - ) diff --git a/airbyte-integrations/connectors/destination-cumulio/destination_cumulio/destination.py b/airbyte-integrations/connectors/destination-cumulio/destination_cumulio/destination.py deleted file mode 100644 index 61c6c5ac4afb..000000000000 --- a/airbyte-integrations/connectors/destination-cumulio/destination_cumulio/destination.py +++ /dev/null @@ -1,101 +0,0 @@ -# -# Copyright (c) 2023 Airbyte, Inc., all rights reserved. -# - - -from logging import Logger, getLogger -from typing import Any, Iterable, Mapping - -from airbyte_cdk.destinations import Destination -from airbyte_cdk.models import AirbyteConnectionStatus, AirbyteMessage, ConfiguredAirbyteCatalog, DestinationSyncMode, Status, Type -from destination_cumulio.client import CumulioClient -from destination_cumulio.writer import CumulioWriter - -logger = getLogger("airbyte") - - -class DestinationCumulio(Destination): - def write( - self, - config: Mapping[str, Any], - configured_catalog: ConfiguredAirbyteCatalog, - input_messages: Iterable[AirbyteMessage], - ) -> Iterable[AirbyteMessage]: - """Reads the input stream of messages, config, and catalog to write data to the destination. - - This method returns an iterable (typically a generator of AirbyteMessages via yield) containing state messages received in the - input message stream. Outputting a state message means that every AirbyteRecordMessage which came before it has been successfully - persisted to the destination. This is used to ensure fault tolerance in the case that a sync fails before fully completing, - then the source is given the last state message output from this method as the starting point of the next sync. - - :param config: dict of JSON configuration matching the configuration declared in spec.json. Current format: - { - 'api_host': '', - 'api_key': '', - 'api_token': '' - } - :param configured_catalog: schema of the data being received and how it should be persisted in the destination. - :param input_messages: stream of input messages received from the source. - - :return: Iterable of AirbyteStateMessages wrapped in AirbyteMessage structs. - """ - writer = CumulioWriter(config, configured_catalog, logger) - - for configured_stream in configured_catalog.streams: - # Cumul.io does not support removing all data from an existing dataset, and removing the dataset itself will break existing - # dashboards built on top of it. - # Instead, the connector will make sure to push the first batch of data as a "replace" action: this will cause all existing data - # to be replaced with the first batch of data. All next batches will be pushed as an "append" action. - if configured_stream.destination_sync_mode == DestinationSyncMode.overwrite: - writer.delete_stream_entries(configured_stream.stream.name) - - for message in input_messages: - if message.type == Type.STATE: - # Yielding a state message indicates that all records which came before it have been written to the destination. - # We flush all write buffers in the writer, and then output the state message itself. - writer.flush_all() - yield message - elif message.type == Type.RECORD: - record = message.record - assert record is not None - assert record.stream is not None - assert record.data is not None - writer.queue_write_operation(record.stream, record.data) - else: - # ignore other message types for now - continue - - # Make sure to flush any records still in the queue - writer.flush_all() - - def check(self, logger: Logger, config: Mapping[str, Any]) -> AirbyteConnectionStatus: - """Tests if the input configuration can be used to successfully connect to the destination with the needed permissions. - - This will test whether the combination of the Cumul.io API host, API key and API token is valid. - - :param logger: Logging object to display debug/info/error to the logs - (logs will not be accessible via airbyte UI if they are not passed to this logger) - :param config: Json object containing the configuration of this destination, content of this json is as specified in - the properties of the spec.json file - - :return: AirbyteConnectionStatus indicating a Success or Failure - """ - try: - client = CumulioClient(config, logger) - # Verify access by hitting Cumul.io authentication endpoint - client.test_api_token() - - # We're no longer using testing a data push as this might take some time. - # If the API host, key, and token are valid, we can assume Data can be pushed using it. - - return AirbyteConnectionStatus(status=Status.SUCCEEDED) - except Exception as e: - # The Cumul.io Python SDK currently returns a generic error message when an issue occurs during the request, - # or when the request return e.g. a 401 Unauthorized HTTP response code. - # We'll assume that either the API host is incorrect, or the API key and token are no longer valid. - if not e == "Something went wrong": - return AirbyteConnectionStatus(status=Status.FAILED, message=f"An exception occurred: {repr(e)}") - return AirbyteConnectionStatus( - status=Status.FAILED, - message="An exception occurred: could it be that the API host is incorrect, or the API key and token are no longer valid?", - ) diff --git a/airbyte-integrations/connectors/destination-cumulio/destination_cumulio/spec.json b/airbyte-integrations/connectors/destination-cumulio/destination_cumulio/spec.json deleted file mode 100644 index dff9ec31cb64..000000000000 --- a/airbyte-integrations/connectors/destination-cumulio/destination_cumulio/spec.json +++ /dev/null @@ -1,37 +0,0 @@ -{ - "documentationUrl": "https://docs.airbyte.com/integrations/destinations/cumulio", - "supported_destination_sync_modes": ["overwrite", "append"], - "supportsIncremental": true, - "supportsDBT": false, - "supportsNormalization": false, - "connectionSpecification": { - "$schema": "http://json-schema.org/draft-07/schema#", - "title": "Destination Cumulio", - "type": "object", - "required": ["api_host", "api_key", "api_token"], - "additionalProperties": true, - "properties": { - "api_host": { - "title": "Cumul.io API Host URL", - "description": "URL of the Cumul.io API (e.g. 'https://api.cumul.io', 'https://api.us.cumul.io', or VPC-specific API url). Defaults to 'https://api.cumul.io'.", - "default": "https://api.cumul.io", - "type": "string", - "order": 0 - }, - "api_key": { - "title": "Cumul.io API Key", - "description": "An API key generated in Cumul.io's platform (can be generated here: https://app.cumul.io/start/profile/integration).", - "type": "string", - "airbyte_secret": true, - "order": 1 - }, - "api_token": { - "title": "Cumul.io API Token", - "description": "The corresponding API token generated in Cumul.io's platform (can be generated here: https://app.cumul.io/start/profile/integration).", - "type": "string", - "airbyte_secret": true, - "order": 2 - } - } - } -} diff --git a/airbyte-integrations/connectors/destination-cumulio/destination_cumulio/writer.py b/airbyte-integrations/connectors/destination-cumulio/destination_cumulio/writer.py deleted file mode 100644 index 93c8d05ee761..000000000000 --- a/airbyte-integrations/connectors/destination-cumulio/destination_cumulio/writer.py +++ /dev/null @@ -1,205 +0,0 @@ -# -# Copyright (c) 2023 Airbyte, Inc., all rights reserved. -# - - -import json -from logging import Logger -from typing import Any, Mapping - -from airbyte_cdk.models import ConfiguredAirbyteCatalog, ConfiguredAirbyteStream, DestinationSyncMode -from destination_cumulio.client import CumulioClient - - -def _convert_airbyte_configured_stream_into_headers_dict( - configured_stream: ConfiguredAirbyteStream, -): - """Return a dict of column names and types based on the configured Airbyte stream. - Note that the Airbyte types are currently not used due to Cumul.io's Data API Service not supporting specifying column types. - """ - column_headers = {} - for column_header in configured_stream.stream.json_schema["properties"]: - if "airbyte-type" in configured_stream.stream.json_schema["properties"][column_header]: - column_headers[column_header] = { - "airbyte-type": configured_stream.stream.json_schema["properties"][column_header]["airbyte-type"] - } - else: - column_headers[column_header] = {"airbyte-type": configured_stream.stream.json_schema["properties"][column_header]["type"]} - return column_headers - - -class CumulioWriter: - # Cumul.io's Data API service has a limit of pushing 10 000 data points (i.e. rows) in a single request. - # (see note here: https://developer.cumul.io/?shell#data_create) - FLUSH_INTERVAL = 10000 - - def __init__( - self, - config: Mapping[str, Any], - configured_catalog: ConfiguredAirbyteCatalog, - logger: Logger, - ): - """Create a single Cumul.io Client and a dict of writers. - The Cumul.io Client will be used to send API requests to Cumul.io's API. - The writers dict will contain one element for each configured_stream in the connection. - Each of these dicts have a stream-specific configuration and write buffer. - """ - self.logger = logger - self.client = CumulioClient(config, logger) - self.writers = self._create_writers(configured_catalog) - - def queue_write_operation(self, stream_name: str, data: Mapping): - """Queue data in a specific writer buffer. - It flushes the buffer in case it has reached the flush interval. - """ - cumulio_data = self.transform_data(stream_name, data) - self.writers[stream_name]["write_buffer"].append(cumulio_data) - if len(self.writers[stream_name]["write_buffer"]) == self.FLUSH_INTERVAL: - self.flush(stream_name) - - def flush_all(self): - """Flush all writer buffers.""" - for stream_name in self.writers: - self.flush(stream_name) - - def flush(self, stream_name: str): - """Write a batch of data from the write buffer using the Cumul.io client.""" - self.client.batch_write( - stream_name, - self.writers[stream_name]["write_buffer"], - [column_header["name"] for column_header in self.writers[stream_name]["column_headers"]], - self.writers[stream_name]["is_in_overwrite_sync_mode"], - self.writers[stream_name]["is_first_batch"], - self.writers[stream_name]["update_metadata"], - ) - self.writers[stream_name]["write_buffer"].clear() - if self.writers[stream_name]["is_first_batch"]: - self.writers[stream_name]["is_first_batch"] = False - - def transform_data(self, stream_name: str, airbyte_data: Mapping) -> list[Any]: - """Transform Airbyte data (one row) into Cumul.io's expected data format (a list in the appropriate order). - If data for a specific column is not included in the Airbyte data, the value will be None. - If data for a specific column in the Airbyte data is not recognized, it will be ignored as extraneous. - (see here: https://docs.airbyte.com/understanding-airbyte/airbyte-protocol/#output-4) - """ - try: - self.writers[stream_name] - except KeyError: - raise Exception(f"The stream {stream_name} is not defined in the configured_catalog and won't thus be streamed.") - - data: list[Any] = [None for i in range(len(self.writers[stream_name]["column_headers"]))] - for column in airbyte_data: - unknown_data = True - index: int = 0 - for column_header in self.writers[stream_name]["column_headers"]: - if column_header["name"] == column: - unknown_data = False - # Cumul.io doesn't support storing or querying nested (list, dict) or boolean data. - # we'll stringify this data via json.dumps - if ( - isinstance(airbyte_data[column], list) - or isinstance(airbyte_data[column], dict) - or isinstance(airbyte_data[column], bool) - ): - data[index] = json.dumps(airbyte_data[column]) - else: - data[index] = airbyte_data[column] - index += 1 - if unknown_data: - self.logger.debug( - f"The value with name {column} has not been defined in the ConfiguredAirbyteStream and will thus be " - f"ignored as extraneous." - ) - return data - - def delete_stream_entries(self, stream_name: str): - """Set a "replace" tag on a dataset to ensure all existing data will be replaced upon next synchronization.""" - return self.client.set_replace_tag_on_dataset(stream_name) - - def _create_writers(self, configured_catalog: ConfiguredAirbyteCatalog): - """Return a set of writers, one for each stream in the configured_catalog. - This method will also merge the Cumul.io columns for the stream's dataset, if existing.""" - writers = {} - for configured_stream in configured_catalog.streams: - result = self._merge_cumulio_and_airbyte_column_headers(configured_stream) - writers[configured_stream.stream.name] = { - "write_buffer": [], - "column_headers": result["sorted_column_headers"], - "is_in_overwrite_sync_mode": configured_stream.destination_sync_mode == DestinationSyncMode.overwrite, - "is_first_batch": True, - "update_metadata": result["update_metadata"], - } - return writers - - def _merge_cumulio_and_airbyte_column_headers(self, configured_stream: ConfiguredAirbyteStream): - """Merge columns known by Airbyte and Cumul.io. - - If the dataset does not yet exist in Cumul.io (i.e. the first sync), the columns order will be based on "for el in dict" order. - - Upon next synchronizations, the dataset exists in Cumul.io. Its column order will be used to send data in the corresponding order. - - If a new column is added to the source table (i.e. this column doesn't exist yet in Cumul.io), - it will be added at the end of the dataset's columns upon next synchronization. - - If an existing column is removed from the source: - 1. If the next synchronization for this stream runs in "overwrite" mode (or a "replace" tag is set), the Cumul.io dataset will - no longer contain the original column. - 2. If the next synchronization for this stream runs in "append" mode, the Cumul.io dataset will - contain empty values for the non-existing columns for all appended rows. - Note that Airbyte recommends a reset upon changes to source schema(s). In that case, the first batch will be synced - using the "overwrite" mode (due to setting a reset tag on the dataset, see delete_stream_entries implementation). - """ - cumulio_column_headers = self.client.get_ordered_columns(configured_stream.stream.name) - airbyte_column_headers = _convert_airbyte_configured_stream_into_headers_dict(configured_stream) - - update_metadata = False - - merged_column_headers = [] - new_column_count = 0 - for airbyte_column_header in airbyte_column_headers: - merged_column_header = { - "name": airbyte_column_header, - "airbyte-type": airbyte_column_headers[airbyte_column_header]["airbyte-type"], - } - - try: - # Add an order based on the order of the column in the Cumul.io dataset - merged_column_header["order"] = cumulio_column_headers.index(airbyte_column_header) - except ValueError: - # Add an appropriate order to ensure the column appears at the end of the data - new_column_count += 1 - merged_column_header["order"] = len(cumulio_column_headers) + new_column_count - - merged_column_headers.append(merged_column_header) - - sorted_column_headers = sorted(merged_column_headers, key=lambda x: x["order"]) - if new_column_count > 0: - update_metadata = True - - if len(cumulio_column_headers) > 0: - self.logger.info( - f"One or more columns defined in stream {configured_stream.stream.name} are not yet present in Cumul.io, " - f"and will added upon next successful synchronization." - ) - else: - self.logger.info( - f"The dataset for stream {configured_stream.stream.name} doesn't seem to exist in Cumul.io. " - f"The next sync for this stream will create it." - ) - elif not update_metadata: - # Validate whether all columns in Cumul.io are still part of the configured airbyte catalog definition. - for cumulio_column_header in cumulio_column_headers: - try: - # Try to find the Cumul.io column header in the Airbyte columns - airbyte_column_headers[cumulio_column_header] - except KeyError: - # Cumul.io's column hasn't been found, so we'll need to update the dataset's metadata upon next sync. - if configured_stream.destination_sync_mode == DestinationSyncMode.overwrite: - self.logger.info( - f"The source column {cumulio_column_header} in Cumul.io is no longer present in the configured " - f"stream {configured_stream.stream.name} (i.e. in the source). As the stream synchronization is " - f"in overwrite mode, the existing column in Cumul.io will be deleted upon next sync. Check " - f"carefully whether this column is used in any existing Cumul.io dashboards!" - ) - update_metadata = True - - return { - "sorted_column_headers": sorted_column_headers, - "update_metadata": update_metadata, - } diff --git a/airbyte-integrations/connectors/destination-cumulio/integration_tests/configured_catalog.json b/airbyte-integrations/connectors/destination-cumulio/integration_tests/configured_catalog.json deleted file mode 100644 index 844c37fea8f6..000000000000 --- a/airbyte-integrations/connectors/destination-cumulio/integration_tests/configured_catalog.json +++ /dev/null @@ -1,29 +0,0 @@ -{ - "streams": [ - { - "stream": { - "name": "cumulio_example_table", - "supported_sync_modes": ["full_refresh"], - "source_defined_cursor": false, - "json_schema": { - "type": "object", - "properties": { - "hierarchy_column": { - "type": "string" - }, - "numeric_column": { - "type": "number" - }, - "datetime_column": { - "type": "string", - "format": "date-time", - "airbyte_type": "timestamp_with_timezone" - } - } - } - }, - "sync_mode": "full_refresh", - "destination_sync_mode": "overwrite" - } - ] -} diff --git a/airbyte-integrations/connectors/destination-cumulio/integration_tests/integration_test.py b/airbyte-integrations/connectors/destination-cumulio/integration_tests/integration_test.py deleted file mode 100644 index 545241d463e7..000000000000 --- a/airbyte-integrations/connectors/destination-cumulio/integration_tests/integration_test.py +++ /dev/null @@ -1,276 +0,0 @@ -# -# Copyright (c) 2023 Airbyte, Inc., all rights reserved. -# - -import json -import time -from logging import Logger, getLogger -from typing import Any, Dict, Mapping - -import pytest -from airbyte_cdk.models import ( - AirbyteMessage, - AirbyteRecordMessage, - AirbyteStateMessage, - AirbyteStream, - ConfiguredAirbyteCatalog, - ConfiguredAirbyteStream, - DestinationSyncMode, - Status, - SyncMode, - Type, -) -from destination_cumulio import DestinationCumulio -from destination_cumulio.client import CumulioClient - - -@pytest.fixture(name="logger") -def logger_fixture() -> Logger: - return getLogger("airbyte") - - -@pytest.fixture(name="config") -def config_fixture() -> Mapping[str, Any]: - with open("secrets/config.json", "r") as f: - return json.loads(f.read()) - - -@pytest.fixture(name="configured_catalog") -def configured_catalog_fixture() -> ConfiguredAirbyteCatalog: - stream_schema = { - "type": "object", - "properties": { - "string_col": {"type": "str"}, - "int_col": {"type": "integer"}, - "obj_col": {"type": "object"}, - "arr_col": {"type": "array"}, - }, - } - - append_stream = ConfiguredAirbyteStream( - stream=AirbyteStream( - name="append_integration_test_stream", - json_schema=stream_schema, - supported_sync_modes=[SyncMode.incremental], - ), - sync_mode=SyncMode.incremental, - destination_sync_mode=DestinationSyncMode.append, - ) - - overwrite_stream = ConfiguredAirbyteStream( - stream=AirbyteStream( - name="overwrite_integration_test_stream", - json_schema=stream_schema, - supported_sync_modes=[SyncMode.incremental], - ), - sync_mode=SyncMode.incremental, - destination_sync_mode=DestinationSyncMode.overwrite, - ) - - return ConfiguredAirbyteCatalog(streams=[append_stream, overwrite_stream]) - - -@pytest.fixture(autouse=True) -def delete_datasets(config: Mapping, configured_catalog: ConfiguredAirbyteCatalog, logger: Logger): - cumulio_client = CumulioClient(config, logger) - for stream in configured_catalog.streams: - dataset = cumulio_client.get_dataset_and_columns_from_stream_name(stream.stream.name) - if dataset: - logger.info( - f"Existing integration test dataset found. Will delete Cumul.io dataset for integration test stream {stream.stream.name}." - ) - try: - cumulio_client.client.delete("securable", dataset["id"]) - except Exception as e: - logger.info( - f"The following exception occurred when trying to delete the dataset " - f"for integration test stream {stream.stream.name}: {e}" - ) - - -def test_check_valid_config(config: Mapping, logger: Logger): - outcome = DestinationCumulio().check(logger, config) - assert outcome.status == Status.SUCCEEDED - - -def test_check_incomplete_config(logger: Logger): - outcome = DestinationCumulio().check(logger, {"api_host": "https://api.cumul.io"}) - assert outcome.status == Status.FAILED - - -def test_check_invalid_config(logger: Logger): - outcome = DestinationCumulio().check( - logger, - { - "api_host": ".invalid.url", - "api_key": "invalid_key", - "api_token": "invalid_token", - }, - ) - assert outcome.status == Status.FAILED - - -def _state(data: Dict[str, Any]) -> AirbyteMessage: - return AirbyteMessage(type=Type.STATE, state=AirbyteStateMessage(data=data)) - - -def _record(stream_name: str, str_value: str, int_value: int, obj_value: dict, arr_value: list) -> AirbyteMessage: - return AirbyteMessage( - type=Type.RECORD, - record=AirbyteRecordMessage( - stream=stream_name, - data={ - "string_col": str_value, - "int_col": int_value, - "obj_col": obj_value, - "arr_col": arr_value, - }, - emitted_at=0, - ), - ) - - -def _retrieve_all_records(cumulio_client, stream_name): - dataset_and_columns = cumulio_client.get_dataset_and_columns_from_stream_name(stream_name) - # Wait 5 seconds before trying to retrieve the data to ensure it can be properly retrieved - time.sleep(5) - if dataset_and_columns is not None: - ordered_columns = cumulio_client.get_ordered_columns(stream_name) - dimension_columns = list( - map( - lambda x, y: { - "dataset_id": dataset_and_columns["id"], - "column_id": y["id"], - }, - ordered_columns, - dataset_and_columns["columns"], - ) - ) - int_col_ind = ordered_columns.index("int_col") - - raw_data_query = { - "dimensions": dimension_columns, - "options": {"rollup_data": False}, - "order": [ - { - "dataset_id": dataset_and_columns["id"], - "column_id": dataset_and_columns["columns"][int_col_ind]["id"], - "order": "asc", - } - ], - } - raw_data = cumulio_client.client.get("data", raw_data_query) - airbyte_data_to_return = [] - for row in raw_data["data"]: - airbyte_data_row = {} - for col_ind, column in enumerate(dataset_and_columns["columns"]): - if isinstance(row[col_ind], dict): - airbyte_data_row[column["source_name"]] = row[col_ind]["id"] - else: - airbyte_data_row[column["source_name"]] = row[col_ind] - airbyte_data_to_return.append( - AirbyteMessage( - type=Type.RECORD, - record=AirbyteRecordMessage(stream=stream_name, data=airbyte_data_row, emitted_at=0), - ) - ) - return airbyte_data_to_return - return None - - -def test_write_append( - config: Mapping, - configured_catalog: ConfiguredAirbyteCatalog, - logger: Logger, -): - """ - This test verifies that: - - Writing a stream in "append" mode appends new records while preserving existing data. - - The correct state message is output by the connector at the end of the sync. - - Object and Array data is appropriately stringified in Cumul.io. - """ - stream_name = configured_catalog.streams[0].stream.name - destination = DestinationCumulio() - - state_message = _state({"state": "3"}) - record_chunk_1 = [_record(stream_name, "test-" + str(i), i, {"test": i}, ["test", i]) for i in range(1, 3)] - - output_states_1 = list(destination.write(config, configured_catalog, [*record_chunk_1, state_message])) - assert [state_message] == output_states_1 - - record_chunk_2 = [_record(stream_name, "test-" + str(i), i, {"test": i}, ["test", i]) for i in range(3, 5)] - - output_states_2 = list(destination.write(config, configured_catalog, [*record_chunk_2, state_message])) - assert [state_message] == output_states_2 - - cumulio_client = CumulioClient(config, logger) - - records_in_destination = _retrieve_all_records(cumulio_client, stream_name) - - expected_records = [ - AirbyteMessage( - type=Type.RECORD, - record=AirbyteRecordMessage( - stream=stream_name, - data={ - "string_col": "test-" + str(i), - "int_col": i, - "obj_col": json.dumps({"test": i}), - "arr_col": json.dumps(["test", i]), - }, - emitted_at=0, - ), - ) - for i in range(1, 5) - ] - - assert expected_records == records_in_destination - - -def test_write_overwrite( - config: Mapping[str, Any], - configured_catalog: ConfiguredAirbyteCatalog, - logger: Logger, -): - """ - This test verifies that: - - writing a stream in "append" mode overwrite all exiting data. - - the correct state message is output by the connector at the end of the sync. - - Object and Array data is appropriately stringified in Cumul.io. - """ - stream_name = configured_catalog.streams[1].stream.name - destination = DestinationCumulio() - - state_message = _state({"state": "3"}) - record_chunk_1 = [_record(stream_name, "oldtest-" + str(i), i, {"oldtest": i}, ["oldtest", i]) for i in range(1, 3)] - - output_states_1 = list(destination.write(config, configured_catalog, [*record_chunk_1, state_message])) - assert [state_message] == output_states_1 - - record_chunk_2 = [_record(stream_name, "newtest-" + str(i), i, {"newtest": i}, ["newtest", i]) for i in range(1, 3)] - - output_states_2 = list(destination.write(config, configured_catalog, [*record_chunk_2, state_message])) - assert [state_message] == output_states_2 - - cumulio_client = CumulioClient(config, logger) - - records_in_destination = _retrieve_all_records(cumulio_client, stream_name) - - expected_records = [ - AirbyteMessage( - type=Type.RECORD, - record=AirbyteRecordMessage( - stream=stream_name, - data={ - "string_col": "newtest-" + str(i), - "int_col": i, - "obj_col": json.dumps({"newtest": i}), - "arr_col": json.dumps(["newtest", i]), - }, - emitted_at=0, - ), - ) - for i in range(1, 3) - ] - - assert expected_records == records_in_destination diff --git a/airbyte-integrations/connectors/destination-cumulio/integration_tests/sample_config.json b/airbyte-integrations/connectors/destination-cumulio/integration_tests/sample_config.json deleted file mode 100644 index 2a1ca74c862b..000000000000 --- a/airbyte-integrations/connectors/destination-cumulio/integration_tests/sample_config.json +++ /dev/null @@ -1,5 +0,0 @@ -{ - "api_host": "https://api.cumul.io", - "api_key": "CUMULIO_API_KEY", - "api_token": "CUMULIO_API_TOKEN" -} diff --git a/airbyte-integrations/connectors/destination-cumulio/main.py b/airbyte-integrations/connectors/destination-cumulio/main.py deleted file mode 100644 index 3ad0d7112206..000000000000 --- a/airbyte-integrations/connectors/destination-cumulio/main.py +++ /dev/null @@ -1,11 +0,0 @@ -# -# Copyright (c) 2023 Airbyte, Inc., all rights reserved. -# - - -import sys - -from destination_cumulio import DestinationCumulio - -if __name__ == "__main__": - DestinationCumulio().run(sys.argv[1:]) diff --git a/airbyte-integrations/connectors/destination-cumulio/requirements.txt b/airbyte-integrations/connectors/destination-cumulio/requirements.txt deleted file mode 100644 index d6e1198b1ab1..000000000000 --- a/airbyte-integrations/connectors/destination-cumulio/requirements.txt +++ /dev/null @@ -1 +0,0 @@ --e . diff --git a/airbyte-integrations/connectors/destination-cumulio/setup.py b/airbyte-integrations/connectors/destination-cumulio/setup.py deleted file mode 100644 index e613da7bbdb4..000000000000 --- a/airbyte-integrations/connectors/destination-cumulio/setup.py +++ /dev/null @@ -1,23 +0,0 @@ -# -# Copyright (c) 2023 Airbyte, Inc., all rights reserved. -# - - -from setuptools import find_packages, setup - -MAIN_REQUIREMENTS = ["airbyte-cdk", "cumulio"] - -TEST_REQUIREMENTS = ["pytest~=6.2"] - -setup( - name="destination_cumulio", - description="Airbyte destination connector implementation for Cumul.io.", - author="Cumul.io", - author_email="support@cumul.io", - packages=find_packages(), - install_requires=MAIN_REQUIREMENTS, - package_data={"": ["*.json"]}, - extras_require={ - "tests": TEST_REQUIREMENTS, - }, -) diff --git a/airbyte-integrations/connectors/destination-cumulio/unit_tests/test_client.py b/airbyte-integrations/connectors/destination-cumulio/unit_tests/test_client.py deleted file mode 100644 index 258e8ff2a578..000000000000 --- a/airbyte-integrations/connectors/destination-cumulio/unit_tests/test_client.py +++ /dev/null @@ -1,629 +0,0 @@ -# -# Copyright (c) 2023 Airbyte, Inc., all rights reserved. -# - - -from typing import Any, Mapping -from unittest.mock import ANY, MagicMock, patch - -import pytest -from destination_cumulio.client import CumulioClient - -# "# type: ignore" was added in several places to avoid mypy complaining about patching functions with MagicMock - - -@pytest.fixture(name="logger") -def logger_fixture() -> MagicMock: - return MagicMock() - - -@pytest.fixture(name="cumulio_client") -def cumulio_client_fixture(logger: MagicMock) -> CumulioClient: - # Create a mock configuration dictionary - config = { - "api_key": "123456", - "api_token": "abcdef", - "api_host": "https://api.cumul.io", - } - # Initialize a CumulioClient object with the mock configuration for the Cumulio class - with patch("destination_cumulio.client.Cumulio", MagicMock()): - return CumulioClient(config, logger) - - -@pytest.fixture(name="dummy_data") -def dummy_data_fixture() -> Mapping[str, Any]: - return { - "data": [ - [ - "Text value 1", - 1, - "2022-01-01T00:00:00.000Z", - ], - ["Text value 2", 2, "2022-02-01T00:00:00.000Z"], - ["Text value 3", 3, "2022-03-01T00:00:00.000Z"], - ], - "columns": ["Text column", "Numeric column", "Datetime column"], - } - - -# tests for batch_write method - - -def test_batch_write_append_empty_write_buffer(cumulio_client: CumulioClient): - cumulio_client._get_dataset_id_from_stream_name = MagicMock(return_value="dataset_id") # type: ignore - cumulio_client._push_batch_to_new_dataset = MagicMock() # type: ignore - cumulio_client._push_batch_to_existing_dataset = MagicMock() # type: ignore - - cumulio_client.batch_write( - stream_name="test-stream", - write_buffer=[], - column_headers=["test-column"], - is_in_overwrite_sync_mode=False, - is_first_batch=True, - update_metadata=True, - ) - - cumulio_client._get_dataset_id_from_stream_name.assert_not_called() - cumulio_client._push_batch_to_new_dataset.assert_not_called() - cumulio_client._push_batch_to_existing_dataset.assert_not_called() - - cumulio_client.batch_write( - stream_name="test-stream", - write_buffer=[[]], - column_headers=["test-column"], - is_in_overwrite_sync_mode=False, - is_first_batch=True, - update_metadata=True, - ) - - cumulio_client._get_dataset_id_from_stream_name.assert_not_called() - cumulio_client._push_batch_to_new_dataset.assert_not_called() - cumulio_client._push_batch_to_existing_dataset.assert_not_called() - - -def test_batch_write_append_no_existing_dataset(cumulio_client: CumulioClient, dummy_data: Mapping[str, Any]): - cumulio_client.client.get = MagicMock(return_value={"count": 0, "Rows": []}) - cumulio_client._push_batch_to_new_dataset = MagicMock() # type: ignore - cumulio_client._push_batch_to_existing_dataset = MagicMock() # type: ignore - - stream_name = "test-stream" - - cumulio_client.batch_write( - stream_name=stream_name, - write_buffer=dummy_data["data"], - column_headers=dummy_data["columns"], - is_in_overwrite_sync_mode=False, - is_first_batch=True, - update_metadata=True, - ) - - expected_properties = { - "where": {"type": "dataset"}, - "attributes": ["id", "name"], - "include": [ - { - "model": "Tag", - "where": {"tag": cumulio_client.TAG_PREFIX + stream_name}, - "attributes": ["id", "tag"], - "jointype": "inner", - } - ], - } - - cumulio_client.client.get.assert_called_once_with("securable", expected_properties) - - cumulio_client._push_batch_to_existing_dataset.assert_not_called() - - cumulio_client._push_batch_to_new_dataset.assert_called_once_with(stream_name, dummy_data["data"], dummy_data["columns"]) - - -def test_batch_write_existing_dataset_no_first_batch_replace(cumulio_client: CumulioClient, dummy_data: Mapping[str, Any]): - cumulio_client._get_dataset_id_from_stream_name = MagicMock(return_value="dataset_id") # type: ignore - cumulio_client._push_batch_to_new_dataset = MagicMock() # type: ignore - cumulio_client._push_batch_to_existing_dataset = MagicMock() # type: ignore - cumulio_client._dataset_contains_replace_tag = MagicMock(return_value=False) # type: ignore - - stream_name = "test-stream" - - cumulio_client.batch_write( - stream_name=stream_name, - write_buffer=dummy_data["data"], - column_headers=dummy_data["columns"], - is_in_overwrite_sync_mode=False, - is_first_batch=True, - update_metadata=True, - ) - cumulio_client._push_batch_to_new_dataset.assert_not_called() - cumulio_client._dataset_contains_replace_tag.assert_called_once_with("dataset_id") - cumulio_client._push_batch_to_existing_dataset.assert_called_once_with( - "dataset_id", dummy_data["data"], dummy_data["columns"], False, True - ) - - -def test_batch_write_existing_dataset_first_batch_replace_overwrite_mode(cumulio_client: CumulioClient, dummy_data: Mapping[str, Any]): - cumulio_client._get_dataset_id_from_stream_name = MagicMock(return_value="dataset_id") # type: ignore - cumulio_client._push_batch_to_new_dataset = MagicMock() # type: ignore - cumulio_client._push_batch_to_existing_dataset = MagicMock() # type: ignore - cumulio_client._dataset_contains_replace_tag = MagicMock(return_value=False) # type: ignore - - stream_name = "test-stream" - - cumulio_client.batch_write( - stream_name=stream_name, - write_buffer=dummy_data["data"], - column_headers=dummy_data["columns"], - is_in_overwrite_sync_mode=True, - is_first_batch=True, - update_metadata=True, - ) - cumulio_client._push_batch_to_new_dataset.assert_not_called() - cumulio_client._dataset_contains_replace_tag.assert_called_once_with("dataset_id") - cumulio_client._push_batch_to_existing_dataset.assert_called_once_with( - "dataset_id", dummy_data["data"], dummy_data["columns"], True, True - ) - - -def test_batch_write_existing_dataset_first_batch_replace_tag(cumulio_client: CumulioClient, dummy_data: Mapping[str, Any]): - cumulio_client._get_dataset_id_from_stream_name = MagicMock(return_value="dataset_id") # type: ignore - cumulio_client._push_batch_to_new_dataset = MagicMock() # type: ignore - cumulio_client._push_batch_to_existing_dataset = MagicMock() # type: ignore - cumulio_client._dataset_contains_replace_tag = MagicMock(return_value=True) # type: ignore - - stream_name = "test-stream" - - cumulio_client.batch_write( - stream_name=stream_name, - write_buffer=dummy_data["data"], - column_headers=dummy_data["columns"], - is_in_overwrite_sync_mode=False, - is_first_batch=True, - update_metadata=True, - ) - cumulio_client._push_batch_to_new_dataset.assert_not_called() - cumulio_client._dataset_contains_replace_tag.assert_called_once_with("dataset_id") - cumulio_client._push_batch_to_existing_dataset.assert_called_once_with( - "dataset_id", dummy_data["data"], dummy_data["columns"], True, True - ) - - -def test_batch_write_existing_dataset_non_first_batch(cumulio_client: CumulioClient, dummy_data: Mapping[str, Any]): - cumulio_client._get_dataset_id_from_stream_name = MagicMock(return_value="dataset_id") # type: ignore - cumulio_client._push_batch_to_new_dataset = MagicMock() # type: ignore - cumulio_client._push_batch_to_existing_dataset = MagicMock() # type: ignore - cumulio_client._dataset_contains_replace_tag = MagicMock(return_value=True) # type: ignore - - stream_name = "test-stream" - - cumulio_client.batch_write( - stream_name=stream_name, - write_buffer=dummy_data["data"], - column_headers=dummy_data["columns"], - is_in_overwrite_sync_mode=True, - is_first_batch=False, - update_metadata=True, - ) - cumulio_client._push_batch_to_new_dataset.assert_not_called() - cumulio_client._dataset_contains_replace_tag.assert_called_once_with("dataset_id") - cumulio_client._push_batch_to_existing_dataset.assert_called_once_with( - "dataset_id", dummy_data["data"], dummy_data["columns"], False, True - ) - - -# tests for test_api_token method - - -def test_api_token_unknown_combination(cumulio_client: CumulioClient): - """ "Test that the test_api_token method correctly throws an error upon an invalid combination""" - cumulio_client.client.get = MagicMock(return_value={"count": 0}) - with pytest.raises(Exception): - cumulio_client.test_api_token() - - -def test_api_token_api_call(cumulio_client: CumulioClient): - """ "Test that the test_api_token method makes an API request to the authorization endpoint""" - cumulio_client.client.get = MagicMock(return_value={"count": 1}) - cumulio_client.test_api_token() - cumulio_client.client.get.assert_called_with("authorization", {"where": {"type": "api"}}) - - -def test_test_data_push_method(cumulio_client: CumulioClient, dummy_data: Mapping[str, Any]): - """ "Test that the test_data_push method deletes the dataset afterwards""" - cumulio_client.batch_write = MagicMock() # type: ignore - cumulio_client.delete_dataset = MagicMock() # type: ignore - - stream_name = "test-stream" - - cumulio_client.test_data_push(stream_name, dummy_data["data"], dummy_data["columns"]) - - cumulio_client.delete_dataset.assert_called_once_with("test-stream") - - -# tests for delete_dataset method - - -def test_delete_dataset_no_dataset_found(cumulio_client: CumulioClient): - cumulio_client.client.delete = MagicMock() - cumulio_client._get_dataset_id_from_stream_name = MagicMock(return_value=None) # type: ignore - - cumulio_client.delete_dataset("stream_name") - - # assert that the _get_dataset_id_from_stream_name method was called once with the correct arguments - cumulio_client._get_dataset_id_from_stream_name.assert_called_once_with("stream_name") - - # assert that the client.delete method is not called as no dataset was found - cumulio_client.client.delete.assert_not_called() - - -def test_delete_dataset_dataset_found(cumulio_client: CumulioClient): - cumulio_client.client.delete = MagicMock() - cumulio_client._get_dataset_id_from_stream_name = MagicMock( # type: ignore - return_value="dataset_id" - ) # type: ignore - - cumulio_client.delete_dataset("stream_name") - - # assert that the _get_dataset_id_from_stream_name method was called once with the correct arguments - cumulio_client._get_dataset_id_from_stream_name.assert_called_once_with("stream_name") - - # assert that the client.delete method was called once with the correct arguments - cumulio_client.client.delete.assert_called_once_with("securable", "dataset_id") - - -# tests for get_ordered_columns method - - -def test_get_ordered_columns_dataset_not_created(cumulio_client: CumulioClient): - cumulio_client.get_dataset_and_columns_from_stream_name = MagicMock(return_value=None) # type: ignore - result = cumulio_client.get_ordered_columns("stream_name") - assert result == [] - - -def test_get_ordered_columns_same_order(cumulio_client: CumulioClient): - cumulio_dataset_and_columns = { - "id": "dataset_id", - "columns": [ - {"source_name": "column1", "order": 2}, - {"source_name": "column2", "order": 1}, - ], - } - cumulio_client.get_dataset_and_columns_from_stream_name = MagicMock(return_value=cumulio_dataset_and_columns) # type: ignore - result = cumulio_client.get_ordered_columns("stream_name") - assert result == ["column2", "column1"] - - -# tests for _push_batch_to_new_dataset method - - -def test_push_batch_to_new_dataset(cumulio_client: CumulioClient, dummy_data: Mapping[str, Any]): - cumulio_client.client.create = MagicMock(return_value={"rows": [{"id": "new_dataset_id"}]}) - cumulio_client._associate_tag_dataset_id = MagicMock() # type: ignore - - stream_name = "test_stream" - - expected_request_properties = { - "type": "create", - "data": dummy_data["data"], - "options": { - "header": dummy_data["columns"], - "update_metadata": True, - "name": {"en": cumulio_client.INITIAL_DATASET_NAME_PREFIX + stream_name}, - }, - } - cumulio_client._push_batch_to_new_dataset(stream_name, dummy_data["data"], dummy_data["columns"]) - cumulio_client.client.create.assert_called_once_with("data", expected_request_properties) - cumulio_client._associate_tag_dataset_id.assert_called_once_with(stream_name, "new_dataset_id") - - -def test_push_batch_to_new_dataset_all_retries_error(cumulio_client: CumulioClient, dummy_data: Mapping[str, Any]): - cumulio_client.client.create = MagicMock(side_effect=RuntimeError("Internal Server Error")) - stream_name = "test_stream" - - with patch("destination_cumulio.client.time", MagicMock()): - with pytest.raises(Exception): - cumulio_client._push_batch_to_new_dataset(stream_name, dummy_data["data"], dummy_data["columns"]) - - -def test_push_batch_to_new_dataset_first_try_fails(cumulio_client: CumulioClient, dummy_data: Mapping[str, Any]): - effects = iter([RuntimeError("Internal Server Error")]) - - def side_effect(*_): - try: - raise next(effects) - except StopIteration: - return {"rows": [{"id": "new_dataset_id"}]} - - cumulio_client.client.create = MagicMock(side_effect=side_effect) - cumulio_client._associate_tag_dataset_id = MagicMock() # type: ignore - - stream_name = "test_stream" - - expected_request_properties = { - "type": "create", - "data": dummy_data["data"], - "options": { - "header": dummy_data["columns"], - "update_metadata": True, - "name": {"en": cumulio_client.INITIAL_DATASET_NAME_PREFIX + stream_name}, - }, - } - - with patch("destination_cumulio.client.time", MagicMock()): - cumulio_client._push_batch_to_new_dataset(stream_name, dummy_data["data"], dummy_data["columns"]) - cumulio_client.client.create.assert_called_with("data", expected_request_properties) - - assert cumulio_client.client.create.call_count == 2 - - cumulio_client._associate_tag_dataset_id.assert_called_once_with(stream_name, "new_dataset_id") - - -# tests for _push_batch_to_existing_dataset method - - -def test_push_batch_to_existing_dataset_all_retries_error(cumulio_client: CumulioClient, dummy_data: Mapping[str, Any]): - cumulio_client.client.create = MagicMock(side_effect=RuntimeError("Internal Server Error")) - cumulio_client._remove_replace_tag_dataset_id_association = MagicMock() # type: ignore - - dataset_id = "dataset_id" - - with patch("destination_cumulio.client.time", MagicMock()): - with pytest.raises(Exception): - cumulio_client._push_batch_to_existing_dataset(dataset_id, dummy_data["data"], dummy_data["columns"], False, True) - - -def test_push_batch_to_existing_dataset_first_try_fails(cumulio_client: CumulioClient, dummy_data: Mapping[str, Any]): - effects = iter([RuntimeError("Internal Server Error")]) - - def side_effect(*_): - try: - raise next(effects) - except StopIteration: - return None - - cumulio_client.client.create = MagicMock(side_effect=side_effect) - cumulio_client._remove_replace_tag_dataset_id_association = MagicMock() # type: ignore - - dataset_id = "dataset_id" - - expected_request_properties = { - "type": "append", - "data": dummy_data["data"], - "securable_id": dataset_id, - "options": { - "header": dummy_data["columns"], - "update_metadata": True, - }, - } - - with patch("destination_cumulio.client.time", MagicMock()): - cumulio_client._push_batch_to_existing_dataset(dataset_id, dummy_data["data"], dummy_data["columns"], False, True) - cumulio_client.client.create.assert_called_with("data", expected_request_properties) - - assert cumulio_client.client.create.call_count == 2 - - cumulio_client._remove_replace_tag_dataset_id_association.assert_not_called() - - -def test_push_batch_to_existing_dataset_no_first_batch_replace(cumulio_client: CumulioClient, dummy_data: Mapping[str, Any]): - cumulio_client.client.create = MagicMock() - cumulio_client._remove_replace_tag_dataset_id_association = MagicMock() # type: ignore - - dataset_id = "dataset_id" - - expected_request_properties = { - "type": "append", - "data": dummy_data["data"], - "securable_id": dataset_id, - "options": { - "header": dummy_data["columns"], - "update_metadata": True, - }, - } - - cumulio_client._push_batch_to_existing_dataset(dataset_id, dummy_data["data"], dummy_data["columns"], False, True) - cumulio_client.client.create.assert_called_once_with("data", expected_request_properties) - cumulio_client._remove_replace_tag_dataset_id_association.assert_not_called() - - -def test_push_batch_to_existing_dataset_first_batch_replace(cumulio_client: CumulioClient, dummy_data: Mapping[str, Any]): - cumulio_client.client.create = MagicMock() - cumulio_client._remove_replace_tag_dataset_id_association = MagicMock() # type: ignore - - dataset_id = "dataset_id" - - expected_request_properties = { - "type": "replace", - "data": dummy_data["data"], - "securable_id": dataset_id, - "options": { - "header": dummy_data["columns"], - "update_metadata": True, - }, - } - - cumulio_client._push_batch_to_existing_dataset(dataset_id, dummy_data["data"], dummy_data["columns"], True, True) - cumulio_client.client.create.assert_called_once_with("data", expected_request_properties) - cumulio_client._remove_replace_tag_dataset_id_association.assert_called_once_with(dataset_id) - - -# tests for _dataset_contains_replace_tag method - - -def test_get_dataset_and_columns_from_stream_name_no_dataset( - cumulio_client: CumulioClient, -): - cumulio_dataset_and_columns_result = {"count": 0, "rows": []} - - # Test when no dataset is found - cumulio_client.client.get = MagicMock(return_value=cumulio_dataset_and_columns_result) - result = cumulio_client.get_dataset_and_columns_from_stream_name("test_stream") - assert result is None - - -def test_get_dataset_and_columns_from_stream_name_single_existing_dataset( - cumulio_client: CumulioClient, -): - cumulio_dataset_and_columns_result: Mapping[str, Any] = { - "count": 1, - "rows": [ - { - "id": "dataset_id", - "columns": [ - {"source_name": "column1", "order": 2}, - {"source_name": "column2", "order": 1}, - ], - } - ], - } - # Test when dataset is found - cumulio_client.client.get = MagicMock(return_value=cumulio_dataset_and_columns_result) - result = cumulio_client.get_dataset_and_columns_from_stream_name("test_stream") - assert result["id"] == cumulio_dataset_and_columns_result["rows"][0]["id"] - assert result["columns"] == cumulio_dataset_and_columns_result["rows"][0]["columns"] - - -def test_get_dataset_and_columns_from_stream_name_multiple_existing_datasets( - cumulio_client: CumulioClient, -): - """Tests whether an exception is thrown when multiple datasets are returned for a stream name""" - cumulio_dataset_and_columns_result = { - "count": 2, - "rows": [ - { - "id": "dataset_id_1", - "columns": [ - {"source_name": "column1", "order": 2}, - {"source_name": "column2", "order": 1}, - ], - }, - { - "id": "dataset_id_2", - "columns": [ - {"source_name": "column1", "order": 1}, - {"source_name": "column2", "order": 2}, - ], - }, - ], - } - # Test when multiple datasets are found - cumulio_client.client.get = MagicMock(return_value=cumulio_dataset_and_columns_result) - with pytest.raises(Exception): - cumulio_client.get_dataset_and_columns_from_stream_name("test_stream") - - -# tests for the set_replace_tag_on_dataset method - - -def test_set_replace_tag_on_dataset_no_dataset_found(cumulio_client: CumulioClient): - cumulio_client._get_dataset_id_from_stream_name = MagicMock(return_value=None) # type: ignore - cumulio_client._associate_tag_dataset_id = MagicMock() # type: ignore - - cumulio_client.set_replace_tag_on_dataset("stream_name") - - cumulio_client._get_dataset_id_from_stream_name.assert_called_once_with("stream_name") - cumulio_client._associate_tag_dataset_id.assert_not_called() - - -def test_set_replace_tag_on_dataset_existing_dataset(cumulio_client: CumulioClient): - cumulio_client._get_dataset_id_from_stream_name = MagicMock(return_value="dataset_id") # type: ignore - cumulio_client._associate_tag_dataset_id = MagicMock() # type: ignore - - cumulio_client.set_replace_tag_on_dataset("stream_name") - - cumulio_client._get_dataset_id_from_stream_name.assert_called_once_with("stream_name") - cumulio_client._associate_tag_dataset_id.assert_called_once_with(cumulio_client.REPLACE_TAG, "dataset_id") - - -# tests for _dataset_contains_replace_tag method - - -def test_dataset_contains_replace_tag(cumulio_client: CumulioClient): - dataset_id = "123" - cumulio_client.client.get = MagicMock(return_value={"count": 1}) - assert cumulio_client._dataset_contains_replace_tag(dataset_id) is True - - -def test_dataset_does_not_contain_replace_tag(cumulio_client: CumulioClient): - dataset_id = "123" - cumulio_client.client.get = MagicMock(return_value={"count": 0}) - assert cumulio_client._dataset_contains_replace_tag(dataset_id) is False - - -# tests for _get_dataset_id_from_stream_name method - - -def test_get_dataset_id_from_stream_name_no_dataset(cumulio_client: CumulioClient): - cumulio_client.client.get.return_value = {"count": 0, "rows": []} - dataset_id = cumulio_client._get_dataset_id_from_stream_name("test_stream") - assert dataset_id is None - - -def test_get_dataset_id_from_stream_name_single_dataset(cumulio_client: CumulioClient): - cumulio_client.client.get.return_value = { - "count": 1, - "rows": [{"id": "dataset_id", "name": "Test dataset"}], - } - dataset_id = cumulio_client._get_dataset_id_from_stream_name("test_stream") - assert dataset_id == "dataset_id" - - -def test_get_dataset_id_from_stream_name_multiple_datasets( - cumulio_client: CumulioClient, -): - """Tests whether an exception is thrown when multiple datasets are returned for a stream name""" - cumulio_client.client.get.return_value = { - "count": 2, - "rows": [ - {"id": "dataset_id_1", "name": "Test dataset 1"}, - {"id": "dataset_id_2", "name": "Test dataset 2"}, - ], - } - with pytest.raises(Exception): - cumulio_client._get_dataset_id_from_stream_name("test_stream") - - -# tests for _associate_tag_dataset_id method - - -def test_associate_tag_dataset_id_no_tag_found(cumulio_client: CumulioClient): - cumulio_client._get_tag_id = MagicMock(return_value=None) # type: ignore - cumulio_client._create_and_associate_stream_name_tag_with_dataset_id = MagicMock() # type: ignore - cumulio_client._associate_tag_with_dataset_id = MagicMock() # type: ignore - - cumulio_client._associate_tag_dataset_id("test_stream", "test_dataset_id") - - cumulio_client._create_and_associate_stream_name_tag_with_dataset_id.assert_called_once_with("test_stream", "test_dataset_id") - cumulio_client._associate_tag_with_dataset_id.assert_not_called() - - -def test_associate_tag_dataset_id_tag_found(cumulio_client: CumulioClient): - cumulio_client._get_tag_id = MagicMock(return_value="tag_id") # type: ignore - cumulio_client._create_and_associate_stream_name_tag_with_dataset_id = MagicMock() # type: ignore - cumulio_client._associate_tag_with_dataset_id = MagicMock() # type: ignore - - cumulio_client._associate_tag_dataset_id("test_stream", "test_dataset_id") - - cumulio_client._associate_tag_with_dataset_id.assert_called_once_with("tag_id", "test_dataset_id") - cumulio_client._create_and_associate_stream_name_tag_with_dataset_id.assert_not_called() - - -# tests for _get_tag_id method - - -def test_get_tag_id_no_tag_found(cumulio_client: CumulioClient): - tag_api_response = {"count": 0, "rows": []} - cumulio_client.client.get = MagicMock(return_value=tag_api_response) - - result = cumulio_client._get_tag_id("test_stream") - - cumulio_client.client.get.assert_called_once_with("tag", ANY) - assert result is None - - -def test_get_tag_id_tag_found(cumulio_client: CumulioClient): - tag_api_response: Mapping[str, Any] = {"count": 1, "rows": [{"id": "test_tag_id"}]} - cumulio_client.client.get = MagicMock(return_value=tag_api_response) - - result = cumulio_client._get_tag_id("test_stream") - - cumulio_client.client.get.assert_called_once_with("tag", ANY) - assert result == tag_api_response["rows"][0]["id"] diff --git a/airbyte-integrations/connectors/destination-cumulio/unit_tests/test_destination.py b/airbyte-integrations/connectors/destination-cumulio/unit_tests/test_destination.py deleted file mode 100644 index 4805fb51ecf5..000000000000 --- a/airbyte-integrations/connectors/destination-cumulio/unit_tests/test_destination.py +++ /dev/null @@ -1,155 +0,0 @@ -# -# Copyright (c) 2023 Airbyte, Inc., all rights reserved. -# - - -from datetime import datetime -from logging import Logger, getLogger -from typing import Any, Mapping -from unittest.mock import MagicMock, call, patch - -import pytest -from airbyte_cdk.models import ( - AirbyteMessage, - AirbyteRecordMessage, - AirbyteStateMessage, - AirbyteStream, - ConfiguredAirbyteCatalog, - ConfiguredAirbyteStream, - DestinationSyncMode, - SyncMode, - Type, -) -from destination_cumulio.destination import DestinationCumulio - - -@pytest.fixture(name="logger") -def logger_fixture() -> Logger: - return getLogger("airbyte") - - -@pytest.fixture(name="config") -def config_fixture() -> Mapping[str, Any]: - return { - "api_key": "123abc", - "api_token": "456def", - "api_host": "https://api.cumul.io", - } - - -@pytest.fixture(name="configured_catalog") -def configured_catalog_fixture() -> ConfiguredAirbyteCatalog: - stream_schema = { - "type": "object", - "properties": { - "string_column": {"type": "integer"}, - "int_column": {"type": "integer"}, - }, - } - - append_stream = ConfiguredAirbyteStream( - stream=AirbyteStream( - name="append_stream", - json_schema=stream_schema, - supported_sync_modes=[SyncMode.incremental], - ), - sync_mode=SyncMode.incremental, - destination_sync_mode=DestinationSyncMode.append, - ) - - overwrite_stream = ConfiguredAirbyteStream( - stream=AirbyteStream( - name="overwrite_stream", - json_schema=stream_schema, - supported_sync_modes=[SyncMode.incremental], - ), - sync_mode=SyncMode.incremental, - destination_sync_mode=DestinationSyncMode.overwrite, - ) - - return ConfiguredAirbyteCatalog(streams=[append_stream, overwrite_stream]) - - -@pytest.fixture(name="airbyte_message_1") -def airbyte_message_1_fixture() -> AirbyteMessage: - return AirbyteMessage( - type=Type.RECORD, - record=AirbyteRecordMessage( - stream="append_stream", - data={"string_column": "value_1", "int_column": 1}, - emitted_at=int(datetime.now().timestamp()) * 1000, - ), - ) - - -@pytest.fixture(name="airbyte_message_2") -def airbyte_message_2_fixture() -> AirbyteMessage: - return AirbyteMessage( - type=Type.RECORD, - record=AirbyteRecordMessage( - stream="overwrite_stream", - data={"string_column": "value_2", "int_column": 2}, - emitted_at=int(datetime.now().timestamp()) * 1000, - ), - ) - - -@pytest.fixture(name="airbyte_state_message") -def airbyte_state_message_fixture() -> AirbyteMessage: - return AirbyteMessage(type=Type.STATE, state=AirbyteStateMessage(data={})) - - -def test_check(config: Mapping[str, Any], logger: MagicMock): - with patch("destination_cumulio.destination.CumulioClient") as cumulio_client: - destination_cumulio = DestinationCumulio() - destination_cumulio.check(logger, config) - assert cumulio_client.mock_calls == [ - call(config, logger), - call().test_api_token(), - ] - - -def test_write_no_input_messages( - config: Mapping[str, Any], - configured_catalog: ConfiguredAirbyteCatalog, - airbyte_message_1: AirbyteMessage, - airbyte_message_2: AirbyteMessage, - airbyte_state_message: AirbyteMessage, - logger: MagicMock, -): - with patch("destination_cumulio.destination.CumulioWriter") as cumulio_writer: - destination_cumulio = DestinationCumulio() - - input_messages = [airbyte_state_message] - result = list(destination_cumulio.write(config, configured_catalog, input_messages)) - assert result == [airbyte_state_message] - - assert cumulio_writer.mock_calls == [ - call(config, configured_catalog, logger), - call().delete_stream_entries("overwrite_stream"), - call().flush_all(), # The first flush_all is called before yielding the state message - call().flush_all(), # The second flush_all is called after going through all input messages - ] - - -def test_write( - config: Mapping[str, Any], - configured_catalog: ConfiguredAirbyteCatalog, - airbyte_message_1: AirbyteMessage, - airbyte_message_2: AirbyteMessage, - airbyte_state_message: AirbyteMessage, - logger: MagicMock, -): - with patch("destination_cumulio.destination.CumulioWriter") as cumulio_writer: - input_messages = [airbyte_message_1, airbyte_message_2, airbyte_state_message] - destination_cumulio = DestinationCumulio() - result = list(destination_cumulio.write(config, configured_catalog, input_messages)) - assert result == [airbyte_state_message] - assert cumulio_writer.mock_calls == [ - call(config, configured_catalog, logger), - call().delete_stream_entries("overwrite_stream"), - call().queue_write_operation("append_stream", {"string_column": "value_1", "int_column": 1}), - call().queue_write_operation("overwrite_stream", {"string_column": "value_2", "int_column": 2}), - call().flush_all(), # The first flush_all is called before yielding the state message - call().flush_all(), # The second flush_all is called after going through all input messages - ] diff --git a/airbyte-integrations/connectors/destination-cumulio/unit_tests/test_writer.py b/airbyte-integrations/connectors/destination-cumulio/unit_tests/test_writer.py deleted file mode 100644 index ac921c7ef5c4..000000000000 --- a/airbyte-integrations/connectors/destination-cumulio/unit_tests/test_writer.py +++ /dev/null @@ -1,512 +0,0 @@ -# -# Copyright (c) 2023 Airbyte, Inc., all rights reserved. -# - - -import unittest -from typing import Any, Mapping -from unittest.mock import MagicMock, patch - -import pytest -from airbyte_cdk.models import AirbyteStream, ConfiguredAirbyteCatalog, ConfiguredAirbyteStream, DestinationSyncMode, SyncMode -from destination_cumulio.writer import CumulioWriter - - -@pytest.fixture(name="logger") -def logger_fixture() -> MagicMock: - return MagicMock() - - -@pytest.fixture(name="config") -def config_fixture() -> Mapping[str, Any]: - return { - "api_key": "123abc", - "api_token": "456def", - "api_host": "https://api.cumul.io", - } - - -@pytest.fixture(name="configured_catalog") -def configured_catalog_fixture() -> ConfiguredAirbyteCatalog: - orders_stream_schema = { - "type": "object", - "properties": { - "order_id": {"type": "integer"}, - "amount": {"type": "integer"}, - "customer_id": {"type": "string"}, - }, - } - products_stream_schema = { - "type": "object", - "properties": {"product_id": {"type": "integer"}}, - } - - orders_append_stream = ConfiguredAirbyteStream( - stream=AirbyteStream( - name="orders", - json_schema=orders_stream_schema, - supported_sync_modes=[SyncMode.incremental], - ), - sync_mode=SyncMode.incremental, - destination_sync_mode=DestinationSyncMode.append, - ) - - products_overwrite_stream = ConfiguredAirbyteStream( - stream=AirbyteStream( - name="products", - json_schema=products_stream_schema, - supported_sync_modes=[SyncMode.incremental], - ), - sync_mode=SyncMode.incremental, - destination_sync_mode=DestinationSyncMode.overwrite, - ) - - return ConfiguredAirbyteCatalog(streams=[orders_append_stream, products_overwrite_stream]) - - -@pytest.fixture(name="writer") -def writer_no_existing_cumulio_columns( - config: Mapping[str, Any], - configured_catalog: ConfiguredAirbyteCatalog, - logger: MagicMock, -) -> CumulioWriter: - """Returns a CumulioWriter using MagicMock, and mocking the return_value of all used CumulioClient methods.""" - with patch("destination_cumulio.writer.CumulioClient", MagicMock()) as cumulio_client_mock: - # Mock get_ordered_columns to return no existing Cumul.io columns (dataset hasn't been created yet --> first sync) - cumulio_client_mock.return_value.get_ordered_columns.return_value = [] - # cumulio_client_mock.return_value.batch_write.return_value = None - # cumulio_client_mock.return_value.set_replace_tag_on_dataset.return_value = None - return CumulioWriter(config, configured_catalog, logger) - - -def test_small_enough_data_point_limit(writer: CumulioWriter): - """Tests whether the FLUSH_INTERVAL variable is smaller than the maximum amount of data points Cumul.io supports.""" - assert writer.FLUSH_INTERVAL <= 10000 - - -def test_init(writer: CumulioWriter): - """Tests whether CumulioWriter is correctly initialized for streams with no known Cumulio dataset (i.e. first sync for each stream).""" - - # Assert each stream is correctly initializing writers - assert "orders" in writer.writers - assert "products" in writer.writers - - # Assert each stream is correctly initializing empty write buffer - assert len(writer.writers["orders"]["write_buffer"]) == 0 - assert len(writer.writers["products"]["write_buffer"]) == 0 - - # Assert each stream is correctly initializing is_in_overwrite_sync_mode - assert writer.writers["orders"]["is_in_overwrite_sync_mode"] is False - assert writer.writers["products"]["is_in_overwrite_sync_mode"] is True - - # Assert each stream is correctly initializing is_first_batch to True - assert writer.writers["orders"]["is_first_batch"] is True - assert writer.writers["products"]["is_first_batch"] is True - - # Assert each stream is correctly initializing update_metadata (due to no columns from Cumul.io in this writer, both are True) - assert writer.writers["orders"]["update_metadata"] is True - assert writer.writers["products"]["update_metadata"] is True - - -def test_transform_data(writer: CumulioWriter): - case = unittest.TestCase() - - data = {"order_id": 1, "amount": 100.0, "customer_id": "cust_1"} - transformed_data = writer.transform_data("orders", data) - case.assertCountEqual(transformed_data, ["cust_1", 1, 100.0]) - - -def test_transform_data_missing_data(writer: CumulioWriter): - case = unittest.TestCase() - - missing_data = {"order_id": 1, "customer_id": "cust_1"} - transformed_data = writer.transform_data("orders", missing_data) - case.assertCountEqual(transformed_data, ["cust_1", 1, None]) - - -def test_transform_data_additional_data(writer: CumulioWriter): - case = unittest.TestCase() - - additional_data = { - "order_id": 1, - "amount": 100.0, - "customer_id": "cust_1", - "custmer_name": "Customer 1", - } - transformed_data = writer.transform_data("orders", additional_data) - case.assertCountEqual(transformed_data, ["cust_1", 1, 100.0]) - - -def test_transform_data_bool_data(writer: CumulioWriter): - case = unittest.TestCase() - - bool_data = {"order_id": 1, "amount": 100.0, "customer_id": True} - transformed_data = writer.transform_data("orders", bool_data) - case.assertCountEqual(transformed_data, ["true", 1, 100.0]) - - -def test_transform_data_dict_data(writer: CumulioWriter): - case = unittest.TestCase() - - dict_data = {"order_id": 1, "amount": 100.0, "customer_id": {"key": "value"}} - transformed_data = writer.transform_data("orders", dict_data) - case.assertCountEqual(transformed_data, ['{"key": "value"}', 1, 100.0]) - - -def test_transform_data_arr_data(writer: CumulioWriter): - case = unittest.TestCase() - - arr_data = {"order_id": 1, "amount": 100.0, "customer_id": ["test1", "test2"]} - transformed_data = writer.transform_data("orders", arr_data) - case.assertCountEqual(transformed_data, ['["test1", "test2"]', 1, 100.0]) - - -def test_queue_write_operation(writer: CumulioWriter): - # Set flush interval to max value to avoid flushing data - writer.FLUSH_INTERVAL = 10000 - - writer.client.batch_write = MagicMock() # type: ignore - - case = unittest.TestCase() - - order_data = {"order_id": 1, "amount": 100.0, "customer_id": "customer_1"} - writer.queue_write_operation("orders", order_data) - - # Assert that write_buffer from the orders stream contains a single value - assert len(writer.writers["orders"]["write_buffer"]) == 1 - case.assertCountEqual(writer.writers["orders"]["write_buffer"][0], ["customer_1", 1, 100.0]) - - -def test_queue_write_operation_two_streams(writer: CumulioWriter): - # Set flush interval to max value to avoid flushing data - writer.FLUSH_INTERVAL = 10000 - - writer.client.batch_write = MagicMock() # type: ignore - - order_data = {"order_id": 1, "amount": 100.0, "customer_id": "customer_1"} - writer.queue_write_operation("orders", order_data) - - # Assert that write_buffer from the orders stream contains a single value - assert len(writer.writers["orders"]["write_buffer"]) == 1 - - product_data = {"product_id": 1} - writer.queue_write_operation("products", product_data) - - # Assert that the orders write_buffer isn't influenced by write operations from the products stream - assert len(writer.writers["orders"]["write_buffer"]) == 1 - - # Assert that write_buffer from the products stream contains a single value - assert len(writer.writers["products"]["write_buffer"]) == 1 - assert writer.writers["products"]["write_buffer"] == [[1]] - - product_data = {"product_id": 2} - writer.queue_write_operation("products", product_data) - # Assert that write_buffer from the orders stream contains two values - assert writer.writers["products"]["write_buffer"] == [[1], [2]] - - -def test_queue_write_operation_non_existing_stream(writer: CumulioWriter): - # Set flush interval to max value to avoid flushing data - writer.FLUSH_INTERVAL = 10000 - - writer.client.batch_write = MagicMock() # type: ignore - - with pytest.raises(Exception): - # Assert that an Exception is thrown upon trying to write to a non-existing stream - writer.queue_write_operation("non_existing_stream", {"column": "value"}) - - -def test_flush(writer: CumulioWriter): - writer.client.batch_write = MagicMock() # type: ignore - - writer.writers["orders"]["write_buffer"] = [["customer_1", 1, 100.0]] - writer.flush("orders") - assert writer.writers["orders"]["write_buffer"] == [] - - -def test_queue_write_flush_operation(writer: CumulioWriter): - # Set flush interval to 2 to cause flush after second row has been added to buffer - writer.FLUSH_INTERVAL = 2 - - writer.client.batch_write = MagicMock() # type: ignore - - product_data = {"product_id": 1} - writer.queue_write_operation("products", product_data) - assert writer.writers["products"]["write_buffer"] == [[1]] - - product_data = {"product_id": 2} - writer.queue_write_operation("products", product_data) - assert writer.writers["products"]["write_buffer"] == [] - assert writer.writers["products"]["is_first_batch"] is False - - product_data = {"product_id": 3} - writer.queue_write_operation("products", product_data) - assert writer.writers["products"]["write_buffer"] == [[3]] - - -def test_flush_all(writer: CumulioWriter): - writer.client.batch_write = MagicMock() # type: ignore - - writer.writers["orders"]["write_buffer"] = [["cust_1", 1, 100.0]] - writer.writers["products"]["write_buffer"] = [["cust_1", 1, 100.0]] - writer.flush_all() - assert writer.writers["orders"]["write_buffer"] == [] - assert writer.writers["products"]["write_buffer"] == [] - - -def test_delete_stream_entries(writer: CumulioWriter): - writer.client.set_replace_tag_on_dataset = MagicMock() # type: ignore - writer.delete_stream_entries("stream_name") - writer.client.set_replace_tag_on_dataset.assert_called_once_with("stream_name") - - -def _get_cumulio_and_merged_columns(writer: CumulioWriter) -> Mapping[str, Any]: - if len(writer.writers) < 0: - raise Exception("No streams defined for writer") - - result = {} - - for stream_name in writer.writers: - cumulio_columns = writer.client.get_ordered_columns(stream_name) - merged_columns = writer.writers[stream_name]["column_headers"] - result[stream_name] = { - "cumulio_columns": cumulio_columns, - "merged_columns": merged_columns, - } - return result - - -@pytest.fixture -def writer_existing_cumulio_columns( - config: Mapping[str, Any], - configured_catalog: ConfiguredAirbyteCatalog, - logger: MagicMock, -) -> CumulioWriter: - """This will return a CumulioWriter that mocks airbyte stream catalogs that contains the same columns as those existing in Cumul.io.""" - existing_cumulio_columns = {} - for configured_stream in configured_catalog.streams: - existing_cumulio_columns[configured_stream.stream.name] = [ - column_name for column_name in configured_stream.stream.json_schema["properties"] - ] - - def get_existing_cumulio_columns(stream_name): - return existing_cumulio_columns[stream_name] - - with patch("destination_cumulio.writer.CumulioClient", MagicMock()) as cumulio_client_mock: - # Mock get_ordered_columns to return existing_cumulio_columns - cumulio_client_mock.return_value.get_ordered_columns = MagicMock(side_effect=get_existing_cumulio_columns) - return CumulioWriter(config, configured_catalog, logger) - - -def test_init_existing_cumulio_columns(writer_existing_cumulio_columns: CumulioWriter): - """Tests whether each stream is correctly initializing update_metadata. - Due to identical columns in Cumul.io for this writer, both are False. - """ - assert writer_existing_cumulio_columns.writers["orders"]["update_metadata"] is False - assert writer_existing_cumulio_columns.writers["products"]["update_metadata"] is False - - -def test_equal_cumulio_and_merged_columns( - writer_existing_cumulio_columns: CumulioWriter, -): - result = _get_cumulio_and_merged_columns(writer_existing_cumulio_columns) - - for stream_name in result: - for index, column in enumerate(result[stream_name]["merged_columns"]): - # Assert that merged_columns are in same order as columns defined on Cumul.io's side. - assert result[stream_name]["cumulio_columns"][index] == column["name"] - - -def test_queue_write_operation_with_correct_data_order( - writer_existing_cumulio_columns: CumulioWriter, -): - writer_existing_cumulio_columns.client.batch_write = MagicMock() # type: ignore - - result = _get_cumulio_and_merged_columns(writer_existing_cumulio_columns) - # Set flush interval to max value to avoid flushing data - writer_existing_cumulio_columns.FLUSH_INTERVAL = 10000 - - order_data = {"order_id": 1, "amount": 100.0, "customer_id": "cust_1"} - writer_existing_cumulio_columns.queue_write_operation("orders", order_data) - expected_data = [] - for column in result["orders"]["merged_columns"]: - expected_data.append(order_data[column["name"]]) - assert writer_existing_cumulio_columns.writers["orders"]["write_buffer"][0] == expected_data - - -@pytest.fixture(name="configured_catalog_with_new_column") -def configured_catalog_with_new_column_fixture() -> ConfiguredAirbyteCatalog: - """Creates a ConfiguredAirbyteCatalog that will be used to mock a new column.""" - # The stream should have at least 2 schema properties (i.e. columns) defined. - orders_stream_schema = { - "type": "object", - "properties": { - "order_id": {"type": "integer"}, - "amount": {"type": "integer"}, - "customer_id": {"type": "string"}, - "customer_name": {"type": "string"}, - }, - } - - orders_append_stream = ConfiguredAirbyteStream( - stream=AirbyteStream( - name="orders_append", - json_schema=orders_stream_schema, - supported_sync_modes=[SyncMode.incremental], - ), - sync_mode=SyncMode.incremental, - destination_sync_mode=DestinationSyncMode.append, - ) - - orders_overwrite_stream = ConfiguredAirbyteStream( - stream=AirbyteStream( - name="orders_overwrite", - json_schema=orders_stream_schema, - supported_sync_modes=[SyncMode.incremental], - ), - sync_mode=SyncMode.incremental, - destination_sync_mode=DestinationSyncMode.overwrite, - ) - - return ConfiguredAirbyteCatalog(streams=[orders_append_stream, orders_overwrite_stream]) - - -@pytest.fixture -def writer_new_airbyte_column( - config: Mapping[str, Any], - configured_catalog_with_new_column: ConfiguredAirbyteCatalog, - logger: MagicMock, -) -> CumulioWriter: - """This will return a CumulioWriter that mocks airbyte stream catalogs that contains one column that does not exist in Cumul.io.""" - existing_cumulio_columns = {} - for configured_stream in configured_catalog_with_new_column.streams: - columns = [column_name for column_name in configured_stream.stream.json_schema["properties"]] - # get rid of the second element to mimic a new column being defined in configured_stream - del columns[1] - existing_cumulio_columns[configured_stream.stream.name] = columns - - def get_existing_cumulio_columns(stream_name): - return existing_cumulio_columns[stream_name] - - with patch("destination_cumulio.writer.CumulioClient", MagicMock()) as cumulio_client_mock: - # Mock get_ordered_columns to return existing_cumulio_columns (which does not include one column defined in configured stream) - cumulio_client_mock.return_value.get_ordered_columns = MagicMock(side_effect=get_existing_cumulio_columns) - cumulio_client_mock.return_value.batch_writer.return_value = None - cumulio_client_mock.return_value.set_replace_tag_on_dataset.return_value = None - return CumulioWriter(config, configured_catalog_with_new_column, logger) - - -def test_init_new_airbyte_column(writer_new_airbyte_column: CumulioWriter): - """Tests whether each stream is correctly initializing update_metadata (due to new Column in Airbyte for this writer, both are True)""" - assert writer_new_airbyte_column.writers["orders_append"]["update_metadata"] is True - assert writer_new_airbyte_column.writers["orders_overwrite"]["update_metadata"] is True - - -def test_new_column_update_metadata(writer_new_airbyte_column: CumulioWriter): - """Tests whether Airbyte streams with at least one new column defined results in update_metadata, - to inform Cumul.io about new column data being pushed.""" - for stream_name in writer_new_airbyte_column.writers: - assert writer_new_airbyte_column.writers[stream_name]["update_metadata"] is True - - -def test_new_column_appended(writer_new_airbyte_column: CumulioWriter): - """Tests whether the Airbyte streams with one new column appends it at the end of the column list""" - result = _get_cumulio_and_merged_columns(writer_new_airbyte_column) - for stream_name in result: - assert len(result[stream_name]["merged_columns"]) == len(result[stream_name]["cumulio_columns"]) + 1 - for index, column in enumerate(result[stream_name]["cumulio_columns"]): - # Assert that merged_columns are in same order as columns defined on Cumul.io's side. - assert result[stream_name]["merged_columns"][index]["name"] == column - with pytest.raises(Exception): - # Test whether last element of merged_columns is the column that is not defined on Cumul.io's end. - result[stream_name]["cumulio_columns"].index(result[stream_name]["merged_columns"][-1]["name"]) - - -@pytest.fixture(name="configured_catalog_with_deleted_column") -def configured_catalog_with_deleted_column_fixture() -> ConfiguredAirbyteCatalog: - """Creates a ConfiguredAirbyteCatalog that will be used to mock a deleted column.""" - orders_stream_schema = { - "type": "object", - "properties": {"order_id": {"type": "integer"}, "amount": {"type": "integer"}}, - } - - orders_append_stream = ConfiguredAirbyteStream( - stream=AirbyteStream( - name="orders_append", - json_schema=orders_stream_schema, - supported_sync_modes=[SyncMode.incremental], - ), - sync_mode=SyncMode.incremental, - destination_sync_mode=DestinationSyncMode.append, - ) - - orders_overwrite_stream = ConfiguredAirbyteStream( - stream=AirbyteStream( - name="orders_overwrite", - json_schema=orders_stream_schema, - supported_sync_modes=[SyncMode.incremental], - ), - sync_mode=SyncMode.incremental, - destination_sync_mode=DestinationSyncMode.overwrite, - ) - - return ConfiguredAirbyteCatalog(streams=[orders_append_stream, orders_overwrite_stream]) - - -@pytest.fixture -def writer_deleted_airbyte_column( - config: Mapping[str, Any], - configured_catalog_with_deleted_column: ConfiguredAirbyteCatalog, - logger: MagicMock, -) -> CumulioWriter: - """This will return a CumulioWriter that mocks airbyte stream catalogs that doesn't contain one column that does exist in Cumul.io.""" - existing_cumulio_columns = {} - for configured_stream in configured_catalog_with_deleted_column.streams: - columns = [column_name for column_name in configured_stream.stream.json_schema["properties"]] - # Add customer_name column as second element to mimic a deleted column being defined in configured_stream - columns.insert(1, "customer_name") - existing_cumulio_columns[configured_stream.stream.name] = columns - - def get_existing_cumulio_columns(stream_name): - return existing_cumulio_columns[stream_name] - - with patch("destination_cumulio.writer.CumulioClient", MagicMock()) as cumulio_client_mock: - # Mock get_ordered_columns to return existing_cumulio_columns (which does not include one column defined in configured stream) - cumulio_client_mock.return_value.get_ordered_columns = MagicMock(side_effect=get_existing_cumulio_columns) - cumulio_client_mock.return_value.batch_writer.return_value = None - cumulio_client_mock.return_value.set_replace_tag_on_dataset.return_value = None - return CumulioWriter(config, configured_catalog_with_deleted_column, logger) - - -def test_init_deleted_airbyte_column(writer_deleted_airbyte_column: CumulioWriter): - """Assert each stream is correctly initializing update_metadata. - Due to deleted Column in Airbyte for this writer: - - the update_metadata property for the orders dataset is set to False, as it's in append mode and thus should keep existing structure - - the update_metadata property for the orders dataset is set to True, as it's in overwrite mode - """ - assert writer_deleted_airbyte_column.writers["orders_append"]["update_metadata"] is False - assert writer_deleted_airbyte_column.writers["orders_overwrite"]["update_metadata"] is True - - -def test_deleted_column_update_metadata(writer_deleted_airbyte_column: CumulioWriter): - """Tests whether Airbyte streams that do not contain a column defined on Cumul.io's side results in update_metadata for only - overwrite streams (to inform Cumul.io about new column data being pushed)""" - assert writer_deleted_airbyte_column.writers["orders_append"]["update_metadata"] is False - assert writer_deleted_airbyte_column.writers["orders_overwrite"]["update_metadata"] is True - - -def test_merged_columns_order_for_deleted_column( - writer_deleted_airbyte_column: CumulioWriter, -): - """Tests whether Airbyte streams that do not contain a column defined on Cumul.io's side still correctly puts the other columns in - the right order""" - result = _get_cumulio_and_merged_columns(writer_deleted_airbyte_column) - for stream_name in result: - # Test whether merged_columns contains one less element - assert len(result[stream_name]["merged_columns"]) == len(result[stream_name]["cumulio_columns"]) - 1 - - cumulio_columns_without_deleted = [ - column_name for column_name in result[stream_name]["cumulio_columns"] if column_name != "customer_name" - ] - # Test whether elements, without deleted column, are equal and in the same position - assert cumulio_columns_without_deleted == [column["name"] for column in result[stream_name]["merged_columns"]] diff --git a/airbyte-integrations/connectors/destination-databend/.dockerignore b/airbyte-integrations/connectors/destination-databend/.dockerignore deleted file mode 100644 index 57f4cf36c057..000000000000 --- a/airbyte-integrations/connectors/destination-databend/.dockerignore +++ /dev/null @@ -1,5 +0,0 @@ -* -!Dockerfile -!main.py -!destination_databend -!setup.py diff --git a/airbyte-integrations/connectors/destination-databend/Dockerfile b/airbyte-integrations/connectors/destination-databend/Dockerfile deleted file mode 100644 index df2af6877d65..000000000000 --- a/airbyte-integrations/connectors/destination-databend/Dockerfile +++ /dev/null @@ -1,38 +0,0 @@ -FROM python:3.9.11-alpine3.15 as base - -# build and load all requirements -FROM base as builder -WORKDIR /airbyte/integration_code - -# upgrade pip to the latest version -RUN apk --no-cache upgrade \ - && pip install --upgrade pip \ - && apk --no-cache add tzdata build-base - - -COPY setup.py ./ -# install necessary packages to a temporary folder -RUN pip install --prefix=/install . - -# build a clean environment -FROM base -WORKDIR /airbyte/integration_code - -# copy all loaded and built libraries to a pure basic image -COPY --from=builder /install /usr/local -# add default timezone settings -COPY --from=builder /usr/share/zoneinfo/Etc/UTC /etc/localtime -RUN echo "Etc/UTC" > /etc/timezone - -# bash is installed for more convenient debugging. -RUN apk --no-cache add bash - -# copy payload code only -COPY main.py ./ -COPY destination_databend ./destination_databend - -ENV AIRBYTE_ENTRYPOINT "python /airbyte/integration_code/main.py" -ENTRYPOINT ["python", "/airbyte/integration_code/main.py"] - -LABEL io.airbyte.version=0.1.2 -LABEL io.airbyte.name=airbyte/destination-databend diff --git a/airbyte-integrations/connectors/destination-databend/README.md b/airbyte-integrations/connectors/destination-databend/README.md deleted file mode 100644 index 9b50cd9ffbfe..000000000000 --- a/airbyte-integrations/connectors/destination-databend/README.md +++ /dev/null @@ -1,99 +0,0 @@ -# Databend Destination - -This is the repository for the Databend destination connector, written in Python. -For information about how to use this connector within Airbyte, see [the documentation](https://docs.airbyte.io/integrations/destinations/databend). - -## Local development - -### Prerequisites -**To iterate on this connector, make sure to complete this prerequisites section.** - -#### Minimum Python version required `= 3.7.0` - -#### Build & Activate Virtual Environment and install dependencies -From this connector directory, create a virtual environment: -``` -python -m venv .venv -``` - -This will generate a virtualenv for this module in `.venv/`. Make sure this venv is active in your -development environment of choice. To activate it from the terminal, run: -``` -source .venv/bin/activate -pip install -r requirements.txt -``` -If you are in an IDE, follow your IDE's instructions to activate the virtualenv. - -Note that while we are installing dependencies from `requirements.txt`, you should only edit `setup.py` for your dependencies. `requirements.txt` is -used for editable installs (`pip install -e`) to pull in Python dependencies from the monorepo and will call `setup.py`. -If this is mumbo jumbo to you, don't worry about it, just put your deps in `setup.py` but install using `pip install -r requirements.txt` and everything -should work as you expect. - -#### Create credentials -**If you are a community contributor**, follow the instructions in the [documentation](https://docs.airbyte.io/integrations/destinations/databend) -to generate the necessary credentials. Then create a file `secrets/config.json` conforming to the `destination_databend/spec.json` file. -Note that the `secrets` directory is gitignored by default, so there is no danger of accidentally checking in sensitive information. -See `integration_tests/sample_config.json` for a sample config file. - -**If you are an Airbyte core member**, copy the credentials in Lastpass under the secret name `destination databend test creds` -and place them into `secrets/config.json`. - -### Locally running the connector -``` -python main.py spec -python main.py check --config secrets/config.json -python main.py discover --config secrets/config.json -python main.py read --config secrets/config.json --catalog integration_tests/configured_catalog.json -``` - -### Locally running the connector docker image - - -#### Build -**Via [`airbyte-ci`](https://github.com/airbytehq/airbyte/blob/master/airbyte-ci/connectors/pipelines/README.md) (recommended):** -```bash -airbyte-ci connectors --name=destination-databend build -``` - -An image will be built with the tag `airbyte/destination-databend:dev`. - -**Via `docker build`:** -```bash -docker build -t airbyte/destination-databend:dev . -``` - -#### Run -Then run any of the connector commands as follows: -``` -docker run --rm airbyte/destination-databend:dev spec -docker run --rm -v $(pwd)/secrets:/secrets airbyte/destination-databend:dev check --config /secrets/config.json -# messages.jsonl is a file containing line-separated JSON representing AirbyteMessages -cat messages.jsonl | docker run --rm -v $(pwd)/secrets:/secrets -v $(pwd)/integration_tests:/integration_tests airbyte/destination-databend:dev write --config /secrets/config.json --catalog /integration_tests/configured_catalog.json -``` - -## Testing -You can run our full test suite locally using [`airbyte-ci`](https://github.com/airbytehq/airbyte/blob/master/airbyte-ci/connectors/pipelines/README.md): -```bash -airbyte-ci connectors --name=destination-databend test -``` - -### Customizing acceptance Tests -Customize `acceptance-test-config.yml` file to configure tests. See [Connector Acceptance Tests](https://docs.airbyte.com/connector-development/testing-connectors/connector-acceptance-tests-reference) for more information. -If your connector requires to create or destroy resources for use during acceptance tests create fixtures for it and place them inside integration_tests/acceptance.py. - -## Dependency Management -All of your dependencies should go in `setup.py`, NOT `requirements.txt`. The requirements file is only used to connect internal Airbyte dependencies in the monorepo for local development. -We split dependencies between two groups, dependencies that are: -* required for your connector to work need to go to `MAIN_REQUIREMENTS` list. -* required for the testing need to go to `TEST_REQUIREMENTS` list - -### Publishing a new version of the connector -You've checked out the repo, implemented a million dollar feature, and you're ready to share your changes with the world. Now what? -1. Make sure your changes are passing our test suite: `airbyte-ci connectors --name=destination-databend test` -2. Bump the connector version in `metadata.yaml`: increment the `dockerImageTag` value. Please follow [semantic versioning for connectors](https://docs.airbyte.com/contributing-to-airbyte/resources/pull-requests-handbook/#semantic-versioning-for-connectors). -3. Make sure the `metadata.yaml` content is up to date. -4. Make the connector documentation and its changelog is up to date (`docs/integrations/destinations/databend.md`). -5. Create a Pull Request: use [our PR naming conventions](https://docs.airbyte.com/contributing-to-airbyte/resources/pull-requests-handbook/#pull-request-title-convention). -6. Pat yourself on the back for being an awesome contributor. -7. Someone from Airbyte will take a look at your PR and iterate with you to merge it into master. - diff --git a/airbyte-integrations/connectors/destination-databend/destination_databend/__init__.py b/airbyte-integrations/connectors/destination-databend/destination_databend/__init__.py deleted file mode 100644 index 5be40696a9e5..000000000000 --- a/airbyte-integrations/connectors/destination-databend/destination_databend/__init__.py +++ /dev/null @@ -1,8 +0,0 @@ -# -# Copyright (c) 2023 Airbyte, Inc., all rights reserved. -# - - -from .destination import DestinationDatabend - -__all__ = ["DestinationDatabend"] diff --git a/airbyte-integrations/connectors/destination-databend/destination_databend/__init__.pyc b/airbyte-integrations/connectors/destination-databend/destination_databend/__init__.pyc deleted file mode 100644 index 4538a0add1a44b3d66bbe4f5a67b65c59b0c7072..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 307 zcmYjNK~BRk5L_n+P!SRmXYRQKf1pAfdgcJ)!e!;gR#=+aD7y&d#8>zuA7D}yTFW~- zo*io}*FTqw@6A`&(SB9v_b9yqL~KU`(bmz6BNr>`!77S1JvAqAhyye;*fn64)FcO! zl2^gPI_3BXnI4*Ywp;+>!Ll3K8ARqV49pEE9Y7}>{j;V~{cFgIdXwaPvG%i!8}&iP z^VEgPQi8lo2%*< D6Y@_X diff --git a/airbyte-integrations/connectors/destination-databend/destination_databend/client.py b/airbyte-integrations/connectors/destination-databend/destination_databend/client.py deleted file mode 100644 index 1764093aa094..000000000000 --- a/airbyte-integrations/connectors/destination-databend/destination_databend/client.py +++ /dev/null @@ -1,20 +0,0 @@ -# -# Copyright (c) 2023 Airbyte, Inc., all rights reserved. -# - -from databend_sqlalchemy import connector - - -class DatabendClient: - def __init__(self, host: str, port: int, database: str, table: str, username: str, password: str = None): - self.host = host - self.port = port - self.database = database - self.table = table - self.username = username - self.password = password - - def open(self): - handle = connector.connect(f"https://{self.username}:{self.password}@{self.host}:{self.port}/{self.database}").cursor() - - return handle diff --git a/airbyte-integrations/connectors/destination-databend/destination_databend/destination.py b/airbyte-integrations/connectors/destination-databend/destination_databend/destination.py deleted file mode 100644 index 365575ec1e99..000000000000 --- a/airbyte-integrations/connectors/destination-databend/destination_databend/destination.py +++ /dev/null @@ -1,89 +0,0 @@ -# -# Copyright (c) 2023 Airbyte, Inc., all rights reserved. -# - - -import json -from datetime import datetime -from logging import getLogger -from typing import Any, Iterable, Mapping -from uuid import uuid4 - -from airbyte_cdk import AirbyteLogger -from airbyte_cdk.destinations import Destination -from airbyte_cdk.models import AirbyteConnectionStatus, AirbyteMessage, ConfiguredAirbyteCatalog, DestinationSyncMode, Status, Type -from destination_databend.client import DatabendClient - -from .writer import create_databend_wirter - -logger = getLogger("airbyte") - - -class DestinationDatabend(Destination): - def write( - self, config: Mapping[str, Any], configured_catalog: ConfiguredAirbyteCatalog, input_messages: Iterable[AirbyteMessage] - ) -> Iterable[AirbyteMessage]: - - """ - TODO - Reads the input stream of messages, config, and catalog to write data to the destination. - - This method returns an iterable (typically a generator of AirbyteMessages via yield) containing state messages received - in the input message stream. Outputting a state message means that every AirbyteRecordMessage which came before it has been - successfully persisted to the destination. This is used to ensure fault tolerance in the case that a sync fails before fully completing, - then the source is given the last state message output from this method as the starting point of the next sync. - - :param config: dict of JSON configuration matching the configuration declared in spec.json - :param configured_catalog: The Configured Catalog describing the schema of the data being received and how it should be persisted in the - destination - :param input_messages: The stream of input messages received from the source - :return: Iterable of AirbyteStateMessages wrapped in AirbyteMessage structs - """ - streams = {s.stream.name for s in configured_catalog.streams} - client = DatabendClient(**config) - - writer = create_databend_wirter(client, logger) - - for configured_stream in configured_catalog.streams: - if configured_stream.destination_sync_mode == DestinationSyncMode.overwrite: - writer.delete_table(configured_stream.stream.name) - logger.info(f"Stream {configured_stream.stream.name} is wiped.") - writer.create_raw_table(configured_stream.stream.name) - - for message in input_messages: - if message.type == Type.STATE: - yield message - elif message.type == Type.RECORD: - data = message.record.data - stream = message.record.stream - # Skip unselected streams - if stream not in streams: - logger.debug(f"Stream {stream} was not present in configured streams, skipping") - continue - writer.queue_write_data(stream, str(uuid4()), datetime.now(), json.dumps(data)) - - # Flush any leftover messages - writer.flush() - - def check(self, logger: AirbyteLogger, config: Mapping[str, Any]) -> AirbyteConnectionStatus: - """ - Tests if the input configuration can be used to successfully connect to the destination with the needed permissions - e.g: if a provided API token or password can be used to connect and write to the destination. - - :param logger: Logging object to display debug/info/error to the logs - (logs will not be accessible via airbyte UI if they are not passed to this logger) - :param config: Json object containing the configuration of this destination, content of this json is as specified in - the properties of the spec.json file - - :return: AirbyteConnectionStatus indicating a Success or Failure - """ - try: - client = DatabendClient(**config) - cursor = client.open() - cursor.execute("DROP TABLE IF EXISTS test") - cursor.execute("CREATE TABLE if not exists test (x Int32,y VARCHAR)") - cursor.execute("INSERT INTO test (x,y) VALUES (%,%)", [1, "yy", 2, "xx"]) - cursor.execute("DROP TABLE IF EXISTS test") - return AirbyteConnectionStatus(status=Status.SUCCEEDED) - except Exception as e: - return AirbyteConnectionStatus(status=Status.FAILED, message=f"An exception occurred: {repr(e)}") diff --git a/airbyte-integrations/connectors/destination-databend/destination_databend/spec.json b/airbyte-integrations/connectors/destination-databend/destination_databend/spec.json deleted file mode 100644 index e77d3301152c..000000000000 --- a/airbyte-integrations/connectors/destination-databend/destination_databend/spec.json +++ /dev/null @@ -1,57 +0,0 @@ -{ - "documentationUrl": "https://docs.airbyte.com/integrations/destinations/databend", - "supported_destination_sync_modes": ["overwrite", "append"], - "supportsIncremental": true, - "connectionSpecification": { - "$schema": "http://json-schema.org/draft-07/schema#", - "title": "Destination Databend", - "type": "object", - "required": ["host", "username", "database"], - "additionalProperties": true, - "properties": { - "host": { - "title": "Host", - "description": "Hostname of the database.", - "type": "string", - "order": 0 - }, - "port": { - "title": "Port", - "description": "Port of the database.", - "type": "integer", - "minimum": 0, - "maximum": 65536, - "default": 443, - "examples": ["443"], - "order": 2 - }, - "database": { - "title": "DB Name", - "description": "Name of the database.", - "type": "string", - "order": 3 - }, - "table": { - "title": "Default Table", - "description": "The default table was written to.", - "type": "string", - "examples": ["default"], - "default": "default", - "order": 4 - }, - "username": { - "title": "User", - "description": "Username to use to access the database.", - "type": "string", - "order": 5 - }, - "password": { - "title": "Password", - "description": "Password associated with the username.", - "type": "string", - "airbyte_secret": true, - "order": 6 - } - } - } -} diff --git a/airbyte-integrations/connectors/destination-databend/destination_databend/writer.py b/airbyte-integrations/connectors/destination-databend/destination_databend/writer.py deleted file mode 100644 index 006ff960b14e..000000000000 --- a/airbyte-integrations/connectors/destination-databend/destination_databend/writer.py +++ /dev/null @@ -1,134 +0,0 @@ -# -# Copyright (c) 2023 Airbyte, Inc., all rights reserved. -# - -from collections import defaultdict -from datetime import datetime -from itertools import chain - -from airbyte_cdk import AirbyteLogger -from airbyte_cdk.models import AirbyteConnectionStatus, Status -from destination_databend.client import DatabendClient - - -class DatabendWriter: - """ - Base class for shared writer logic. - """ - - flush_interval = 1000 - - def __init__(self, client: DatabendClient) -> None: - """ - :param client: Databend SDK connection class with established connection - to the databse. - """ - try: - # open a cursor and do some work with it - self.client = client - self.cursor = client.open() - self._buffer = defaultdict(list) - self._values = 0 - except Exception as e: - # handle the exception - raise AirbyteConnectionStatus(status=Status.FAILED, message=f"An exception occurred: {repr(e)}") - finally: - # close the cursor - self.cursor.close() - - def delete_table(self, name: str) -> None: - """ - Delete the resulting table. - Primarily used in Overwrite strategy to clean up previous data. - - :param name: table name to delete. - """ - self.cursor.execute(f"DROP TABLE IF EXISTS _airbyte_raw_{name}") - - def create_raw_table(self, name: str): - """ - Create the resulting _airbyte_raw table. - - :param name: table name to create. - """ - query = f""" - CREATE TABLE IF NOT EXISTS _airbyte_raw_{name} ( - _airbyte_ab_id TEXT, - _airbyte_emitted_at TIMESTAMP, - _airbyte_data TEXT - ) - """ - cursor = self.cursor - cursor.execute(query) - - def queue_write_data(self, stream_name: str, id: str, time: datetime, record: str) -> None: - """ - Queue up data in a buffer in memory before writing to the database. - When flush_interval is reached data is persisted. - - :param stream_name: name of the stream for which the data corresponds. - :param id: unique identifier of this data row. - :param time: time of writing. - :param record: string representation of the json data payload. - """ - self._buffer[stream_name].append((id, time, record)) - self._values += 1 - if self._values == self.flush_interval: - self._flush() - - def _flush(self): - """ - Stub for the intermediate data flush that's triggered during the - buffering operation. - """ - raise NotImplementedError() - - def flush(self): - """ - Stub for the data flush at the end of writing operation. - """ - raise NotImplementedError() - - -class DatabendSQLWriter(DatabendWriter): - """ - Data writer using the SQL writing strategy. Data is buffered in memory - and flushed using INSERT INTO SQL statement. - """ - - flush_interval = 1000 - - def __init__(self, client: DatabendClient) -> None: - """ - :param client: Databend SDK connection class with established connection - to the databse. - """ - super().__init__(client) - - def _flush(self) -> None: - """ - Intermediate data flush that's triggered during the - buffering operation. Writes data stored in memory via SQL commands. - databend connector insert into table using stage - """ - cursor = self.cursor - # id, written_at, data - for table, data in self._buffer.items(): - cursor.execute( - f"INSERT INTO _airbyte_raw_{table} (_airbyte_ab_id,_airbyte_emitted_at,_airbyte_data) VALUES (%, %, %)", - list(chain.from_iterable(data)), - ) - self._buffer.clear() - self._values = 0 - - def flush(self) -> None: - """ - Final data flush after all data has been written to memory. - """ - self._flush() - - -def create_databend_wirter(client: DatabendClient, logger: AirbyteLogger) -> DatabendWriter: - logger.info("Using the SQL writing strategy") - writer = DatabendSQLWriter(client) - return writer diff --git a/airbyte-integrations/connectors/destination-databend/integration_tests/integration_test.py b/airbyte-integrations/connectors/destination-databend/integration_tests/integration_test.py deleted file mode 100644 index a40494c4e048..000000000000 --- a/airbyte-integrations/connectors/destination-databend/integration_tests/integration_test.py +++ /dev/null @@ -1,159 +0,0 @@ -# -# Copyright (c) 2023 Airbyte, Inc., all rights reserved. -# - -import json -import logging -from typing import Any, Dict, List, Mapping - -import pytest -from airbyte_cdk.models import ( - AirbyteMessage, - AirbyteRecordMessage, - AirbyteStateMessage, - AirbyteStream, - ConfiguredAirbyteCatalog, - ConfiguredAirbyteStream, - DestinationSyncMode, - Status, - SyncMode, - Type, -) -from destination_databend import DestinationDatabend -from destination_databend.client import DatabendClient - - -@pytest.fixture(name="databendConfig") -def config_fixture() -> Mapping[str, Any]: - with open("secrets/config.json", "r") as f: - return json.loads(f.read()) - - -@pytest.fixture(name="configured_catalog") -def configured_catalog_fixture() -> ConfiguredAirbyteCatalog: - stream_schema = {"type": "object", "properties": {"string_col": {"type": "str"}, "int_col": {"type": "integer"}}} - - append_stream = ConfiguredAirbyteStream( - stream=AirbyteStream(name="append_stream", json_schema=stream_schema, supported_sync_modes=[SyncMode.incremental]), - sync_mode=SyncMode.incremental, - destination_sync_mode=DestinationSyncMode.append, - ) - - overwrite_stream = ConfiguredAirbyteStream( - stream=AirbyteStream(name="overwrite_stream", json_schema=stream_schema, supported_sync_modes=[SyncMode.incremental]), - sync_mode=SyncMode.incremental, - destination_sync_mode=DestinationSyncMode.overwrite, - ) - - return ConfiguredAirbyteCatalog(streams=[append_stream, overwrite_stream]) - - -@pytest.fixture(autouse=True) -def teardown(databendConfig: Mapping): - yield - client = DatabendClient(**databendConfig) - cursor = client.open() - cursor.close() - - -@pytest.fixture(name="client") -def client_fixture(databendConfig) -> DatabendClient: - return DatabendClient(**databendConfig) - - -def test_check_valid_config(databendConfig: Mapping): - outcome = DestinationDatabend().check(logging.getLogger("airbyte"), databendConfig) - assert outcome.status == Status.SUCCEEDED - - -def test_check_invalid_config(): - outcome = DestinationDatabend().check(logging.getLogger("airbyte"), {"bucket_id": "not_a_real_id"}) - assert outcome.status == Status.FAILED - - -def _state(data: Dict[str, Any]) -> AirbyteMessage: - return AirbyteMessage(type=Type.STATE, state=AirbyteStateMessage(data=data)) - - -def _record(stream: str, str_value: str, int_value: int) -> AirbyteMessage: - return AirbyteMessage( - type=Type.RECORD, record=AirbyteRecordMessage(stream=stream, data={"str_col": str_value, "int_col": int_value}, emitted_at=0) - ) - - -def retrieve_records(stream_name: str, client: DatabendClient) -> List[AirbyteRecordMessage]: - cursor = client.open() - cursor.execute(f"select * from _airbyte_raw_{stream_name}") - all_records = cursor.fetchall() - out = [] - for record in all_records: - # key = record[0] - # stream = key.split("__ab__")[0] - value = json.loads(record[2]) - out.append(_record(stream_name, value["str_col"], value["int_col"])) - return out - - -def retrieve_all_records(client: DatabendClient) -> List[AirbyteRecordMessage]: - """retrieves and formats all records in databend as Airbyte messages""" - overwrite_stream = "overwrite_stream" - append_stream = "append_stream" - overwrite_out = retrieve_records(overwrite_stream, client) - append_out = retrieve_records(append_stream, client) - return overwrite_out + append_out - - -def test_write(databendConfig: Mapping, configured_catalog: ConfiguredAirbyteCatalog, client: DatabendClient): - """ - This test verifies that: - 1. writing a stream in "overwrite" mode overwrites any existing data for that stream - 2. writing a stream in "append" mode appends new records without deleting the old ones - 3. The correct state message is output by the connector at the end of the sync - """ - append_stream, overwrite_stream = configured_catalog.streams[0].stream.name, configured_catalog.streams[1].stream.name - first_state_message = _state({"state": "1"}) - first_record_chunk = [_record(append_stream, str(i), i) for i in range(5)] + [_record(overwrite_stream, str(i), i) for i in range(5)] - - second_state_message = _state({"state": "2"}) - second_record_chunk = [_record(append_stream, str(i), i) for i in range(5, 10)] + [ - _record(overwrite_stream, str(i), i) for i in range(5, 10) - ] - - destination = DestinationDatabend() - - expected_states = [first_state_message, second_state_message] - output_states = list( - destination.write( - databendConfig, configured_catalog, [*first_record_chunk, first_state_message, *second_record_chunk, second_state_message] - ) - ) - assert expected_states == output_states, "Checkpoint state messages were expected from the destination" - - expected_records = [_record(append_stream, str(i), i) for i in range(10)] + [_record(overwrite_stream, str(i), i) for i in range(10)] - records_in_destination = retrieve_all_records(client) - assert len(expected_records) == len(records_in_destination), "Records in destination should match records expected" - - # After this sync we expect the append stream to have 15 messages and the overwrite stream to have 5 - third_state_message = _state({"state": "3"}) - third_record_chunk = [_record(append_stream, str(i), i) for i in range(10, 15)] + [ - _record(overwrite_stream, str(i), i) for i in range(10, 15) - ] - - output_states = list(destination.write(databendConfig, configured_catalog, [*third_record_chunk, third_state_message])) - assert [third_state_message] == output_states - - records_in_destination = retrieve_all_records(client) - expected_records = [_record(append_stream, str(i), i) for i in range(15)] + [ - _record(overwrite_stream, str(i), i) for i in range(10, 15) - ] - assert len(expected_records) == len(records_in_destination) - - tear_down(client) - - -def tear_down(client: DatabendClient): - overwrite_stream = "overwrite_stream" - append_stream = "append_stream" - cursor = client.open() - cursor.execute(f"DROP table _airbyte_raw_{overwrite_stream}") - cursor.execute(f"DROP table _airbyte_raw_{append_stream}") diff --git a/airbyte-integrations/connectors/destination-databend/integration_tests/sample_config.json b/airbyte-integrations/connectors/destination-databend/integration_tests/sample_config.json deleted file mode 100644 index 62c0cdb78b7f..000000000000 --- a/airbyte-integrations/connectors/destination-databend/integration_tests/sample_config.json +++ /dev/null @@ -1,9 +0,0 @@ -{ - "protocol": "https", - "host": "tnc7yee14--xxxx.ch.datafusecloud.com", - "port": 443, - "username": "username", - "password": "password", - "database": "default", - "table": "default" -} diff --git a/airbyte-integrations/connectors/destination-databend/main.py b/airbyte-integrations/connectors/destination-databend/main.py deleted file mode 100644 index 7482c00577de..000000000000 --- a/airbyte-integrations/connectors/destination-databend/main.py +++ /dev/null @@ -1,11 +0,0 @@ -# -# Copyright (c) 2023 Airbyte, Inc., all rights reserved. -# - - -import sys - -from destination_databend import DestinationDatabend - -if __name__ == "__main__": - DestinationDatabend().run(sys.argv[1:]) diff --git a/airbyte-integrations/connectors/destination-databend/requirements.txt b/airbyte-integrations/connectors/destination-databend/requirements.txt deleted file mode 100644 index d6e1198b1ab1..000000000000 --- a/airbyte-integrations/connectors/destination-databend/requirements.txt +++ /dev/null @@ -1 +0,0 @@ --e . diff --git a/airbyte-integrations/connectors/destination-databend/setup.py b/airbyte-integrations/connectors/destination-databend/setup.py deleted file mode 100644 index 49878e343bd6..000000000000 --- a/airbyte-integrations/connectors/destination-databend/setup.py +++ /dev/null @@ -1,22 +0,0 @@ -# -# Copyright (c) 2023 Airbyte, Inc., all rights reserved. -# - - -from setuptools import find_packages, setup - -MAIN_REQUIREMENTS = ["airbyte-cdk", "requests", "databend-sqlalchemy==0.1.6"] - -TEST_REQUIREMENTS = ["pytest~=6.1"] -setup( - name="destination_databend", - description="Destination implementation for Databend.", - author="Airbyte", - author_email="contact@airbyte.io", - packages=find_packages(), - install_requires=MAIN_REQUIREMENTS, - package_data={"": ["*.json"]}, - extras_require={ - "tests": TEST_REQUIREMENTS, - }, -) diff --git a/airbyte-integrations/connectors/destination-databend/unit_tests/test_databend_destination.py b/airbyte-integrations/connectors/destination-databend/unit_tests/test_databend_destination.py deleted file mode 100644 index e5a7c7e6d7d6..000000000000 --- a/airbyte-integrations/connectors/destination-databend/unit_tests/test_databend_destination.py +++ /dev/null @@ -1,161 +0,0 @@ -# -# Copyright (c) 2023 Airbyte, Inc., all rights reserved. -# - -from datetime import datetime -from typing import Dict -from unittest.mock import AsyncMock, MagicMock, call, patch - -from airbyte_cdk.models import ( - AirbyteMessage, - AirbyteRecordMessage, - AirbyteStream, - ConfiguredAirbyteCatalog, - ConfiguredAirbyteStream, - DestinationSyncMode, - SyncMode, - Type, -) -from destination_databend.destination import DatabendClient, DestinationDatabend -from pytest import fixture - - -@fixture -def logger() -> MagicMock: - return MagicMock() - - -@fixture -def config() -> Dict[str, str]: - args = { - "database": "default", - "username": "root", - "password": "root", - "host": "localhost", - "port": 8081, - "table": "default", - } - return args - - -@fixture(name="mock_connection") -def async_connection_cursor_mock(): - connection = MagicMock() - cursor = AsyncMock() - connection.cursor.return_value = cursor - return connection, cursor - - -@fixture -def configured_stream1() -> ConfiguredAirbyteStream: - return ConfiguredAirbyteStream( - stream=AirbyteStream( - name="table1", - json_schema={ - "type": "object", - "properties": {"col1": {"type": "string"}, "col2": {"type": "integer"}}, - }, - supported_sync_modes=[SyncMode.incremental], - ), - sync_mode=SyncMode.incremental, - destination_sync_mode=DestinationSyncMode.append, - ) - - -@fixture -def configured_stream2() -> ConfiguredAirbyteStream: - return ConfiguredAirbyteStream( - stream=AirbyteStream( - name="table2", - json_schema={ - "type": "object", - "properties": {"col1": {"type": "string"}, "col2": {"type": "integer"}}, - }, - supported_sync_modes=[SyncMode.incremental], - ), - sync_mode=SyncMode.incremental, - destination_sync_mode=DestinationSyncMode.append, - ) - - -@fixture -def airbyte_message1() -> AirbyteMessage: - return AirbyteMessage( - type=Type.RECORD, - record=AirbyteRecordMessage( - stream="table1", - data={"key1": "value1", "key2": 2}, - emitted_at=int(datetime.now().timestamp()) * 1000, - ), - ) - - -@fixture -def airbyte_message2() -> AirbyteMessage: - return AirbyteMessage( - type=Type.RECORD, - record=AirbyteRecordMessage( - stream="table2", - data={"key1": "value2", "key2": 3}, - emitted_at=int(datetime.now().timestamp()) * 1000, - ), - ) - - -@fixture -def airbyte_state_message() -> AirbyteMessage: - return AirbyteMessage(type=Type.STATE) - - -@patch("destination_databend.client.DatabendClient", MagicMock()) -def test_connection(config: Dict[str, str], logger: MagicMock) -> None: - # Check no log object - DatabendClient(**config) - - -@patch("destination_databend.writer.DatabendSQLWriter") -@patch("destination_databend.client.DatabendClient") -def test_sql_write_append( - mock_connection: MagicMock, - mock_writer: MagicMock, - config: Dict[str, str], - configured_stream1: ConfiguredAirbyteStream, - configured_stream2: ConfiguredAirbyteStream, - airbyte_message1: AirbyteMessage, - airbyte_message2: AirbyteMessage, - airbyte_state_message: AirbyteMessage, -) -> None: - catalog = ConfiguredAirbyteCatalog(streams=[configured_stream1, configured_stream2]) - - destination = DestinationDatabend() - result = destination.write(config, catalog, [airbyte_message1, airbyte_state_message, airbyte_message2]) - - assert list(result) == [airbyte_state_message] - mock_writer.return_value.delete_table.assert_not_called() - mock_writer.return_value.create_raw_table.mock_calls = [call(mock_connection, "table1"), call(mock_connection, "table2")] - assert len(mock_writer.return_value.queue_write_data.mock_calls) == 2 - mock_writer.return_value.flush.assert_called_once() - - -@patch("destination_databend.writer.DatabendSQLWriter") -@patch("destination_databend.client.DatabendClient") -def test_sql_write_overwrite( - mock_connection: MagicMock, - mock_writer: MagicMock, - config: Dict[str, str], - configured_stream1: ConfiguredAirbyteStream, - configured_stream2: ConfiguredAirbyteStream, - airbyte_message1: AirbyteMessage, - airbyte_message2: AirbyteMessage, - airbyte_state_message: AirbyteMessage, -): - # Overwrite triggers a delete - configured_stream1.destination_sync_mode = DestinationSyncMode.overwrite - catalog = ConfiguredAirbyteCatalog(streams=[configured_stream1, configured_stream2]) - - destination = DestinationDatabend() - result = destination.write(config, catalog, [airbyte_message1, airbyte_state_message, airbyte_message2]) - - assert list(result) == [airbyte_state_message] - mock_writer.return_value.delete_table.assert_called_once_with("table1") - mock_writer.return_value.create_raw_table.mock_calls = [call(mock_connection, "table1"), call(mock_connection, "table2")] diff --git a/airbyte-integrations/connectors/destination-databend/unit_tests/test_writer.py b/airbyte-integrations/connectors/destination-databend/unit_tests/test_writer.py deleted file mode 100644 index 0b68b113c2ab..000000000000 --- a/airbyte-integrations/connectors/destination-databend/unit_tests/test_writer.py +++ /dev/null @@ -1,46 +0,0 @@ -# -# Copyright (c) 2023 Airbyte, Inc., all rights reserved. -# - -from typing import Any, Union -from unittest.mock import MagicMock - -from destination_databend.writer import DatabendSQLWriter -from pytest import fixture, mark - - -@fixture -def client() -> MagicMock: - return MagicMock() - - -@fixture -def sql_writer(client: MagicMock) -> DatabendSQLWriter: - return DatabendSQLWriter(client) - - -def test_sql_default(sql_writer: DatabendSQLWriter) -> None: - assert len(sql_writer._buffer) == 0 - assert sql_writer.flush_interval == 1000 - - -@mark.parametrize("writer", ["sql_writer"]) -def test_sql_create(client: MagicMock, writer: Union[DatabendSQLWriter], request: Any) -> None: - writer = request.getfixturevalue(writer) - writer.create_raw_table("dummy") - - -def test_data_buffering(sql_writer: DatabendSQLWriter) -> None: - sql_writer.queue_write_data("dummy", "id1", 20200101, '{"key": "value"}') - sql_writer._buffer["dummy"][0] == ("id1", 20200101, '{"key": "value"}') - assert len(sql_writer._buffer["dummy"]) == 1 - assert len(sql_writer._buffer.keys()) == 1 - sql_writer.queue_write_data("dummy", "id2", 20200102, '{"key2": "value2"}') - sql_writer._buffer["dummy"][0] == ("id2", 20200102, '{"key2": "value2"}') - assert len(sql_writer._buffer["dummy"]) == 2 - assert len(sql_writer._buffer.keys()) == 1 - sql_writer.queue_write_data("dummy2", "id3", 20200103, '{"key3": "value3"}') - sql_writer._buffer["dummy"][0] == ("id3", 20200103, '{"key3": "value3"}') - assert len(sql_writer._buffer["dummy"]) == 2 - assert len(sql_writer._buffer["dummy2"]) == 1 - assert len(sql_writer._buffer.keys()) == 2 diff --git a/airbyte-integrations/connectors/destination-doris/README.md b/airbyte-integrations/connectors/destination-doris/README.md deleted file mode 100644 index b67c3bd50347..000000000000 --- a/airbyte-integrations/connectors/destination-doris/README.md +++ /dev/null @@ -1,72 +0,0 @@ -# Destination Doris - -This is the repository for the Doris destination connector in Java. -For information about how to use this connector within Airbyte, see [the User Documentation](https://docs.airbyte.io/integrations/destinations/doris). - -## Local development - -#### Building via Gradle -From the Airbyte repository root, run: -``` -./gradlew :airbyte-integrations:connectors:destination-doris:build -``` - -#### Create credentials -**If you are a community contributor**, generate the necessary credentials and place them in `secrets/config.json` conforming to the spec file in `src/main/resources/spec.json`. -Note that the `secrets` directory is git-ignored by default, so there is no danger of accidentally checking in sensitive information. - -**If you are an Airbyte core member**, follow the [instructions](https://docs.airbyte.io/connector-development#using-credentials-in-ci) to set up the credentials. - -### Locally running the connector docker image - -#### Build -Build the connector image via Gradle: - -``` -./gradlew :airbyte-integrations:connectors:destination-doris:buildConnectorImage -``` -Once built, the docker image name and tag on your host will be `airbyte/destination-doris:dev`. -the Dockerfile. - -#### Run -Then run any of the connector commands as follows: -``` -docker run --rm airbyte/destination-doris:dev spec -docker run --rm -v $(pwd)/secrets:/secrets airbyte/destination-doris:dev check --config /secrets/config.json -docker run --rm -v $(pwd)/secrets:/secrets airbyte/destination-doris:dev discover --config /secrets/config.json -docker run --rm -v $(pwd)/secrets:/secrets -v $(pwd)/integration_tests:/integration_tests airbyte/destination-doris:dev read --config /secrets/config.json --catalog /integration_tests/configured_catalog.json -``` - -## Testing -We use `JUnit` for Java tests. - -### Unit and Integration Tests -Place unit tests under `src/test/io/airbyte/integrations/destinations/doris`. - -#### Acceptance Tests -Airbyte has a standard test suite that all destination connectors must pass. Implement the `TODO`s in -`src/test-integration/java/io/airbyte/integrations/destinations/dorisDestinationAcceptanceTest.java`. - -### Using gradle to run tests -All commands should be run from airbyte project root. -To run unit tests: -``` -./gradlew :airbyte-integrations:connectors:destination-doris:unitTest -``` -To run acceptance and custom integration tests: -``` -./gradlew :airbyte-integrations:connectors:destination-doris:integrationTest -``` - -## Dependency Management - -### Publishing a new version of the connector -You've checked out the repo, implemented a million dollar feature, and you're ready to share your changes with the world. Now what? -1. Make sure your changes are passing our test suite: `airbyte-ci connectors --name=destination-doris test` -2. Bump the connector version in `metadata.yaml`: increment the `dockerImageTag` value. Please follow [semantic versioning for connectors](https://docs.airbyte.com/contributing-to-airbyte/resources/pull-requests-handbook/#semantic-versioning-for-connectors). -3. Make sure the `metadata.yaml` content is up to date. -4. Make the connector documentation and its changelog is up to date (`docs/integrations/destinations/doris.md`). -5. Create a Pull Request: use [our PR naming conventions](https://docs.airbyte.com/contributing-to-airbyte/resources/pull-requests-handbook/#pull-request-title-convention). -6. Pat yourself on the back for being an awesome contributor. -7. Someone from Airbyte will take a look at your PR and iterate with you to merge it into master. - diff --git a/airbyte-integrations/connectors/destination-doris/bootstrap.md b/airbyte-integrations/connectors/destination-doris/bootstrap.md deleted file mode 100644 index 30f9d07820e3..000000000000 --- a/airbyte-integrations/connectors/destination-doris/bootstrap.md +++ /dev/null @@ -1,29 +0,0 @@ -# Doris destination - - -Doris destination adopts MySQL protocol(JDBC) and Doris Stream Load to exchange data. - -1. JDBC is used to manipulate the data table structure and execute the create table statement before data import -2. Stream Load is a synchronous import method based on HTTP/HTTPS, For Doris destination, first pre-write csv file, and then write to doris with Stream Load transaction operation. - -## Introduction to Apache Doris - -Apache Doris is a high-performance, real-time analytical database based on MPP architecture, known for its extreme speed and ease of use. It only requires a sub-second response time to return query results under massive data and can support not only high-concurrent point query scenarios but also high-throughput complex analysis scenarios. Based on this, Apache Doris can better meet the scenarios of report analysis, ad-hoc query, unified data warehouse, Data Lake Query Acceleration, etc. Users can build user behavior analysis, AB test platform, log retrieval analysis, user portrait analysis, order analysis, and other applications on top of this. -[https://doris.apache.org/docs/summary/basic-summary](https://doris.apache.org/docs/summary/basic-summary) - - -## Technical Overview -The overall architecture of Apache Doris is shown in the following figure. The Doris architecture is very simple, with only two types of processes. - -#### Frontend(FE): -##### It is mainly responsible for user request access, query parsing and planning, management of metadata, and node management-related work. -#### Backend(BE): -##### It is mainly responsible for data storage and query plan execution. - -Both types of processes are horizontally scalable, and a single cluster can support up to hundreds of machines and tens of petabytes of storage capacity. And these two types of processes guarantee high availability of services and high reliability of data through consistency protocols. This highly integrated architecture design greatly reduces the operation and maintenance cost of a distributed system. - -Apache Doris adopts MySQL protocol, highly compatible with MySQL dialect, and supports standard SQL. Users can access Doris through various client tools and support seamless connection with BI tools. - -[Stream load](https://doris.apache.org/docs/data-operate/import/import-way/stream-load-manual/) is a synchronous way of importing. Users import local files or data streams into Doris by sending HTTP protocol requests. Stream load synchronously executes the import and returns the import result. Users can directly determine whether the import is successful by the return body of the request. Stream load is mainly suitable for importing local files or data from data streams through procedures. - -Each import job of Doris, whether it is batch import using Stream Load or single import using INSERT statement, is a complete transaction operation. The import transaction can ensure that the data in a batch takes effect atomically, and there will be no partial data writing. \ No newline at end of file diff --git a/airbyte-integrations/connectors/destination-doris/build.gradle b/airbyte-integrations/connectors/destination-doris/build.gradle deleted file mode 100644 index 1fe67aaf8ff2..000000000000 --- a/airbyte-integrations/connectors/destination-doris/build.gradle +++ /dev/null @@ -1,28 +0,0 @@ -plugins { - id 'application' - id 'airbyte-java-connector' -} - -airbyteJavaConnector { - cdkVersionRequired = '0.2.0' - features = ['db-destinations'] - useLocalCdk = false -} - -//remove once upgrading the CDK version to 0.4.x or later -java { - compileJava { - options.compilerArgs.remove("-Werror") - } -} - -airbyteJavaConnector.addCdkDependencies() - -application { - mainClass = 'io.airbyte.integrations.destination.doris.DorisDestination' -} - -dependencies { - implementation 'org.apache.commons:commons-csv:1.4' - implementation group: 'mysql', name: 'mysql-connector-java', version: '8.0.16' -} diff --git a/airbyte-integrations/connectors/destination-doris/src/main/java/io/airbyte/integrations/destination/doris/DorisConnectionOptions.java b/airbyte-integrations/connectors/destination-doris/src/main/java/io/airbyte/integrations/destination/doris/DorisConnectionOptions.java deleted file mode 100644 index 74450137d2f4..000000000000 --- a/airbyte-integrations/connectors/destination-doris/src/main/java/io/airbyte/integrations/destination/doris/DorisConnectionOptions.java +++ /dev/null @@ -1,102 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.integrations.destination.doris; - -import com.fasterxml.jackson.databind.JsonNode; - -public class DorisConnectionOptions { - - private String db; - private static String DB_KEY = "database"; - private String table; - private static final String TABLE_KEY = "table"; - - private String user; - private static final String USER_KEY = "username"; - - private String pwd; - private static final String PWD_KEY = "password"; - - private String feHost; - private static final String FE_HOST_KEY = "host"; - - private Integer feHttpPort; - private static final String FE_HTTP_PORT_KEY = "httpport"; - - private Integer feQueryPort; - private static final String FE_QUERY_PORT_KEY = "queryport"; - - public static DorisConnectionOptions getDorisConnection(final JsonNode config, String table) { - return new DorisConnectionOptions( - config.get(DB_KEY).asText(), - table, - config.get(USER_KEY).asText(), - config.get(PWD_KEY) == null ? "" : config.get(PWD_KEY).asText(), - config.get(FE_HOST_KEY).asText(), - config.get(FE_HTTP_PORT_KEY).asInt(8030), - config.get(FE_QUERY_PORT_KEY).asInt(9030)); - - } - - public DorisConnectionOptions(String db, String table, String user, String pwd, String feHost, Integer feHttpPort, Integer feQueryPort) { - this.db = db; - this.table = table; - this.user = user; - this.pwd = pwd; - this.feHost = feHost; - this.feHttpPort = feHttpPort; - this.feQueryPort = feQueryPort; - } - - public String getDb() { - return db; - } - - public String getTable() { - return table; - } - - public String getUser() { - return user; - } - - public String getPwd() { - return pwd; - } - - public String getFeHost() { - return feHost; - } - - public Integer getFeHttpPort() { - return feHttpPort; - } - - public String getHttpHostPort() { - return feHost + ":" + feHttpPort; - } - - public String getQueryHostPort() { - return feHost + ":" + feHttpPort; - } - - public Integer getFeQueryPort() { - return feQueryPort; - } - - @Override - public String toString() { - return "DorisConnectionOptions{" + - "db='" + db + '\'' + - ", table='" + table + '\'' + - ", user='" + user + '\'' + - ", pwd='" + pwd + '\'' + - ", feHost='" + feHost + '\'' + - ", feHttpPort=" + feHttpPort + - ", feQueryPort=" + feQueryPort + - '}'; - } - -} diff --git a/airbyte-integrations/connectors/destination-doris/src/main/java/io/airbyte/integrations/destination/doris/DorisConsumer.java b/airbyte-integrations/connectors/destination-doris/src/main/java/io/airbyte/integrations/destination/doris/DorisConsumer.java deleted file mode 100644 index db64c82b2a97..000000000000 --- a/airbyte-integrations/connectors/destination-doris/src/main/java/io/airbyte/integrations/destination/doris/DorisConsumer.java +++ /dev/null @@ -1,124 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.integrations.destination.doris; - -import com.fasterxml.jackson.core.io.JsonStringEncoder; -import io.airbyte.cdk.integrations.base.CommitOnStateAirbyteMessageConsumer; -import io.airbyte.commons.json.Jsons; -import io.airbyte.protocol.models.v0.AirbyteMessage; -import io.airbyte.protocol.models.v0.AirbyteRecordMessage; -import io.airbyte.protocol.models.v0.ConfiguredAirbyteCatalog; -import java.io.IOException; -import java.nio.file.Files; -import java.util.Map; -import java.util.UUID; -import java.util.function.Consumer; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -public class DorisConsumer extends CommitOnStateAirbyteMessageConsumer { - - private static final Logger LOGGER = LoggerFactory.getLogger(DorisConsumer.class); - - private final ConfiguredAirbyteCatalog catalog; - private final Map writeConfigs; - - private JsonStringEncoder jsonEncoder; - - public DorisConsumer( - final Map writeConfigs, - final ConfiguredAirbyteCatalog catalog, - final Consumer outputRecordCollector) { - super(outputRecordCollector); - jsonEncoder = JsonStringEncoder.getInstance(); - this.catalog = catalog; - this.writeConfigs = writeConfigs; - LOGGER.info("initializing DorisConsumer."); - } - - @Override - public void commit() throws Exception { - for (final DorisWriteConfig writeConfig : writeConfigs.values()) { - writeConfig.getWriter().flush(); - } - } - - @Override - protected void startTracked() throws Exception {} - - @Override - protected void acceptTracked(AirbyteMessage msg) throws Exception { - if (msg.getType() != AirbyteMessage.Type.RECORD) { - return; - } - final AirbyteRecordMessage recordMessage = msg.getRecord(); - if (!writeConfigs.containsKey(recordMessage.getStream())) { - throw new IllegalArgumentException( - String.format("Message contained record from a stream that was not in the catalog. \ncatalog: %s , \nmessage: %s", - Jsons.serialize(catalog), Jsons.serialize(recordMessage))); - } - - writeConfigs.get(recordMessage.getStream()).getWriter().printRecord( - UUID.randomUUID(), - // new SimpleDateFormat("yyyy-MM-dd HH:mm:ss.SSS").format(new Date(recordMessage.getEmittedAt())), - recordMessage.getEmittedAt(), - new String(jsonEncoder.quoteAsString(Jsons.serialize(recordMessage.getData())))); - - } - - @Override - protected void close(boolean hasFailed) throws Exception { - LOGGER.info("finalizing DorisConsumer"); - for (final Map.Entry entries : writeConfigs.entrySet()) { - try { - entries.getValue().getWriter().flush(); - entries.getValue().getWriter().close(); - } catch (final Exception e) { - hasFailed = true; - LOGGER.error("failed to close writer for: {}", entries.getKey()); - } - } - - try { - for (final DorisWriteConfig value : writeConfigs.values()) { - value.getDorisStreamLoad().firstCommit(); - } - } catch (final Exception e) { - hasFailed = true; - final String message = "Failed to pre-commit doris in destination: "; - LOGGER.error(message + e.getMessage()); - for (final DorisWriteConfig value : writeConfigs.values()) { - if (value.getDorisStreamLoad().getTxnID() > 0) - value.getDorisStreamLoad().abortTransaction(); - } - } - - // - try { - if (!hasFailed) { - for (final DorisWriteConfig writeConfig : writeConfigs.values()) { - if (writeConfig.getDorisStreamLoad().getTxnID() > 0) - writeConfig.getDorisStreamLoad().commitTransaction(); - LOGGER.info(String.format("stream load commit (TxnID: %s ) successed ", writeConfig.getDorisStreamLoad().getTxnID())); - } - } else { - final String message = "Failed to commit doris in destination"; - LOGGER.error(message); - for (final DorisWriteConfig writeConfig : writeConfigs.values()) { - if (writeConfig.getDorisStreamLoad().getTxnID() > 0) - writeConfig.getDorisStreamLoad().abortTransaction(); - } - throw new IOException(message); - } - } finally { - for (final DorisWriteConfig writeConfig : writeConfigs.values()) { - Files.deleteIfExists(writeConfig.getDorisStreamLoad().getPath()); - writeConfig.getDorisStreamLoad().close(); - } - } - - } - -} diff --git a/airbyte-integrations/connectors/destination-doris/src/main/java/io/airbyte/integrations/destination/doris/DorisDestination.java b/airbyte-integrations/connectors/destination-doris/src/main/java/io/airbyte/integrations/destination/doris/DorisDestination.java deleted file mode 100644 index 12fd21b26134..000000000000 --- a/airbyte-integrations/connectors/destination-doris/src/main/java/io/airbyte/integrations/destination/doris/DorisDestination.java +++ /dev/null @@ -1,164 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.integrations.destination.doris; - -import static io.airbyte.integrations.destination.doris.DorisStreamLoad.CSV_COLUMN_SEPARATOR; - -import com.fasterxml.jackson.databind.JsonNode; -import com.google.common.base.Preconditions; -import io.airbyte.cdk.integrations.BaseConnector; -import io.airbyte.cdk.integrations.base.AirbyteMessageConsumer; -import io.airbyte.cdk.integrations.base.Destination; -import io.airbyte.cdk.integrations.base.IntegrationRunner; -import io.airbyte.cdk.integrations.base.JavaBaseConstants; -import io.airbyte.cdk.integrations.destination.StandardNameTransformer; -import io.airbyte.protocol.models.v0.AirbyteConnectionStatus; -import io.airbyte.protocol.models.v0.AirbyteConnectionStatus.Status; -import io.airbyte.protocol.models.v0.AirbyteMessage; -import io.airbyte.protocol.models.v0.ConfiguredAirbyteCatalog; -import io.airbyte.protocol.models.v0.ConfiguredAirbyteStream; -import io.airbyte.protocol.models.v0.DestinationSyncMode; -import java.io.FileWriter; -import java.io.IOException; -import java.nio.charset.Charset; -import java.nio.file.Path; -import java.nio.file.Paths; -import java.sql.*; -import java.util.*; -import java.util.function.Consumer; -import org.apache.commons.csv.CSVFormat; -import org.apache.commons.csv.CSVPrinter; -import org.apache.commons.io.FileUtils; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -public class DorisDestination extends BaseConnector implements Destination { - - private static final Logger LOGGER = LoggerFactory.getLogger(DorisDestination.class); - private static final StandardNameTransformer namingResolver = new StandardNameTransformer(); - private static Connection conn = null; - private static HttpUtil http = new HttpUtil(); - static final String DESTINATION_TEMP_PATH_FIELD = "destination_temp_path"; - private static final String JDBC_DRIVER = "com.mysql.cj.jdbc.Driver"; - private static final String DB_URL_PATTERN = "jdbc:mysql://%s:%d/%s?rewriteBatchedStatements=true&useUnicode=true&characterEncoding=utf8"; - - public static void main(String[] args) throws Exception { - new IntegrationRunner(new DorisDestination()).run(args); - } - - @Override - public AirbyteConnectionStatus check(JsonNode config) { - try { - Preconditions.checkNotNull(config); - FileUtils.forceMkdir(getTempPathDir(config).toFile()); - checkDorisAndConnect(config); - } catch (final Exception e) { - return new AirbyteConnectionStatus().withStatus(Status.FAILED).withMessage(e.getMessage()); - } - return new AirbyteConnectionStatus().withStatus(Status.SUCCEEDED); - } - - @Override - public AirbyteMessageConsumer getConsumer(JsonNode config, - ConfiguredAirbyteCatalog configuredCatalog, - Consumer outputRecordCollector) - throws IOException, SQLException { - final Map writeConfigs = new HashMap<>(); - - try { - final Path destinationDir = getTempPathDir(config); - FileUtils.forceMkdir(destinationDir.toFile()); - for (ConfiguredAirbyteStream stream : configuredCatalog.getStreams()) { - - final DestinationSyncMode syncMode = stream.getDestinationSyncMode(); - if (syncMode == null) { - throw new IllegalStateException("Undefined destination sync mode"); - } - - final String streamName = stream.getStream().getName(); - final String tableName = namingResolver.getIdentifier(streamName); - final String tmpTableName = namingResolver.getTmpTableName(streamName); - final Path tmpPath = destinationDir.resolve(tmpTableName + ".csv"); - if (conn == null) - checkDorisAndConnect(config); - Statement stmt = conn.createStatement(); - stmt.execute(createTableQuery(tableName)); - if (syncMode == DestinationSyncMode.OVERWRITE) { - stmt.execute(truncateTable(tableName)); - } - CSVFormat csvFormat = CSVFormat.DEFAULT - .withSkipHeaderRecord() - .withDelimiter(CSV_COLUMN_SEPARATOR) - .withQuote(null) - .withHeader( - JavaBaseConstants.COLUMN_NAME_AB_ID, - JavaBaseConstants.COLUMN_NAME_EMITTED_AT, - JavaBaseConstants.COLUMN_NAME_DATA); - final FileWriter fileWriter = new FileWriter(tmpPath.toFile(), Charset.defaultCharset(), false); - final CSVPrinter printer = new CSVPrinter(fileWriter, csvFormat); - DorisStreamLoad dorisStreamLoad = new DorisStreamLoad( - tmpPath, - DorisConnectionOptions.getDorisConnection(config, tableName), - new DorisLabelInfo("airbyte_doris", tableName, true), - http.getClient(), - JavaBaseConstants.COLUMN_NAME_AB_ID, - JavaBaseConstants.COLUMN_NAME_EMITTED_AT, - JavaBaseConstants.COLUMN_NAME_DATA); - writeConfigs.put(streamName, new DorisWriteConfig(dorisStreamLoad, printer, csvFormat)); - } - } catch (SQLException | ClassNotFoundException e) { - LOGGER.error("Exception while creating Doris destination table: ", e); - throw new SQLException(e); - } catch (IOException e) { - LOGGER.error("Exception while handling temporary csv files : ", e); - throw new IOException(e); - } finally { - if (conn != null) - conn.close(); - } - return new DorisConsumer(writeConfigs, configuredCatalog, outputRecordCollector); - } - - protected void checkDorisAndConnect(JsonNode config) throws ClassNotFoundException, SQLException { - DorisConnectionOptions dorisConnection = DorisConnectionOptions.getDorisConnection(config, ""); - String dbUrl = String.format(DB_URL_PATTERN, dorisConnection.getFeHost(), dorisConnection.getFeQueryPort(), dorisConnection.getDb()); - Class.forName(JDBC_DRIVER); - conn = DriverManager.getConnection(dbUrl, dorisConnection.getUser(), dorisConnection.getPwd()); - } - - protected String createTableQuery(String tableName) { - String s = "CREATE TABLE IF NOT EXISTS `" + tableName + "` ( \n" - + "`" + JavaBaseConstants.COLUMN_NAME_AB_ID + "` varchar(40),\n" - + "`" + JavaBaseConstants.COLUMN_NAME_EMITTED_AT + "` BIGINT,\n" - + "`" + JavaBaseConstants.COLUMN_NAME_DATA + "` String)\n" - + "DUPLICATE KEY(`" + JavaBaseConstants.COLUMN_NAME_AB_ID + "`,`" + JavaBaseConstants.COLUMN_NAME_EMITTED_AT + "`) \n" - + "DISTRIBUTED BY HASH(`" + JavaBaseConstants.COLUMN_NAME_AB_ID + "`) BUCKETS 16 \n" - + "PROPERTIES ( \n" - + "\"replication_allocation\" = \"tag.location.default: 1\" \n" - + ");"; - LOGGER.info("create doris table SQL : \n " + s); - return s; - } - - protected String truncateTable(String tableName) { - String s = "TRUNCATE TABLE `" + tableName + "`;"; - LOGGER.info("truncate doris table SQL : \n " + s); - return s; - } - - protected Path getTempPathDir(final JsonNode config) { - Path path = Paths.get(DESTINATION_TEMP_PATH_FIELD); - Preconditions.checkNotNull(path); - if (!path.startsWith("/code/local")) { - path = Path.of("/local", path.toString()); - } - final Path normalizePath = path.normalize(); - if (!normalizePath.startsWith("/local")) { - throw new IllegalArgumentException("Stream Load destination temp file should be inside the /local directory"); - } - return path; - } - -} diff --git a/airbyte-integrations/connectors/destination-doris/src/main/java/io/airbyte/integrations/destination/doris/DorisLabelInfo.java b/airbyte-integrations/connectors/destination-doris/src/main/java/io/airbyte/integrations/destination/doris/DorisLabelInfo.java deleted file mode 100644 index 19182ee8cfb0..000000000000 --- a/airbyte-integrations/connectors/destination-doris/src/main/java/io/airbyte/integrations/destination/doris/DorisLabelInfo.java +++ /dev/null @@ -1,31 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.integrations.destination.doris; - -import java.util.UUID; - -public class DorisLabelInfo { - - private String prefix; - - private String table; - - private boolean enable2PC; - - public DorisLabelInfo(String labelPrefix, String table, boolean enable2PC) { - this.prefix = labelPrefix; - this.table = table; - this.enable2PC = enable2PC; - } - - public String label() { - return prefix + "_" + table + "_" + UUID.randomUUID() + System.currentTimeMillis(); - } - - public String label(long chkId) { - return prefix + "_" + chkId; - } - -} diff --git a/airbyte-integrations/connectors/destination-doris/src/main/java/io/airbyte/integrations/destination/doris/DorisStreamLoad.java b/airbyte-integrations/connectors/destination-doris/src/main/java/io/airbyte/integrations/destination/doris/DorisStreamLoad.java deleted file mode 100644 index 92051a999046..000000000000 --- a/airbyte-integrations/connectors/destination-doris/src/main/java/io/airbyte/integrations/destination/doris/DorisStreamLoad.java +++ /dev/null @@ -1,235 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.integrations.destination.doris; - -import com.fasterxml.jackson.core.type.TypeReference; -import com.fasterxml.jackson.databind.ObjectMapper; -import com.google.common.base.Preconditions; -import io.airbyte.integrations.destination.doris.exception.DorisRuntimeException; -import io.airbyte.integrations.destination.doris.exception.StreamLoadException; -import java.io.FileInputStream; -import java.io.IOException; -import java.nio.file.Path; -import java.util.*; -import java.util.concurrent.Future; -import java.util.regex.Matcher; -import java.util.regex.Pattern; -import org.apache.http.client.methods.CloseableHttpResponse; -import org.apache.http.client.methods.HttpPut; -import org.apache.http.entity.InputStreamEntity; -import org.apache.http.impl.client.CloseableHttpClient; -import org.apache.http.util.EntityUtils; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -public class DorisStreamLoad { - - private static final Logger LOGGER = LoggerFactory.getLogger(DorisStreamLoad.class); - private static final ObjectMapper OBJECT_MAPPER = new ObjectMapper(); - public static final Pattern LABEL_EXIST_PATTERN = - Pattern.compile("errCode = 2, detailMessage = Label \\[(.*)\\] " + - "has already been used, relate to txn \\[(\\d+)\\]"); - public static final Pattern COMMITTED_PATTERN = - Pattern.compile("errCode = 2, detailMessage = transaction \\[(\\d+)\\] " + - "is already \\b(COMMITTED|committed|VISIBLE|visible)\\b, not pre-committed."); - private final DorisLabelInfo dorisLabelInfo; - private static final String LOAD_FIRST_URL_PATTERN = "http://%s/api/%s/%s/_stream_load"; - private static final String LOAD_SECOND_URL_PATTERN = "http://%s/api/%s/_stream_load_2pc"; - private static final String LINE_DELIMITER_DEFAULT = "\n"; - public static final Character CSV_COLUMN_SEPARATOR = '\t'; - - private final String hostPort; - private final String loadUrlStr; - private final String secondUrlStr; - private final String user; - private final String passwd; - private final boolean enable2PC; - private final Properties streamLoadProp; - private final Integer maxRetry; - private Long txnID = 0L; - private final Path path; - private Future pendingLoadFuture; - private final CloseableHttpClient httpClient; - public static final String SUCCESS = "Success"; - public static final String PUBLISH_TIMEOUT = "Publish Timeout"; - private static final List DORIS_SUCCESS_STATUS = new ArrayList<>(Arrays.asList(SUCCESS, PUBLISH_TIMEOUT)); - public static final String FAIL = "Fail"; - - public DorisStreamLoad( - Path path, - DorisConnectionOptions dorisOptions, - DorisLabelInfo dorisLabelInfo, - CloseableHttpClient httpClient, - String... head) { - this.hostPort = dorisOptions.getHttpHostPort(); - String db = dorisOptions.getDb(); - this.user = dorisOptions.getUser(); - this.passwd = dorisOptions.getPwd(); - this.dorisLabelInfo = dorisLabelInfo; - this.loadUrlStr = String.format(LOAD_FIRST_URL_PATTERN, hostPort, db, dorisOptions.getTable()); - this.secondUrlStr = String.format(LOAD_SECOND_URL_PATTERN, hostPort, db); - this.enable2PC = true; - - StringBuilder stringBuilder = new StringBuilder(); - for (String s : head) { - if (!stringBuilder.isEmpty()) - stringBuilder.append(","); - stringBuilder.append(s); - } - this.streamLoadProp = new Properties(); - streamLoadProp.setProperty("column_separator", CSV_COLUMN_SEPARATOR.toString()); - streamLoadProp.setProperty("columns", stringBuilder.toString()); - this.maxRetry = 3; - this.path = path; - this.httpClient = httpClient; - } - - public Long getTxnID() { - return txnID; - } - - public void firstCommit() throws Exception { - Path pathChecked = Preconditions.checkNotNull(path, "stream load temp CSV file is empty."); - String label = dorisLabelInfo.label(); - LOGGER.info("preCommit label {}. .", label); - StreamLoadRespContent respContent = null; - try { - - InputStreamEntity entity = new InputStreamEntity(new FileInputStream(pathChecked.toFile())); - StreamLoadHttpPutBuilder builder = StreamLoadHttpPutBuilder.builder(); - builder.setUrl(loadUrlStr) - .baseAuth(user, passwd) - .addCommonHeader() - .enable2PC(enable2PC) - .setLabel(label) - .setEntity(entity) - .addProperties(streamLoadProp); - HttpPut build = builder.build(); - respContent = handlePreCommitResponse(httpClient.execute(build)); - Preconditions.checkState("true".equals(respContent.getTwoPhaseCommit())); - if (!DORIS_SUCCESS_STATUS.contains(respContent.getStatus())) { - String errMsg = String.format("stream load error: %s, see more in %s", respContent.getMessage(), respContent.getErrorURL()); - throw new DorisRuntimeException(errMsg); - } else { - String commitType = enable2PC ? "preCommit" : "commit"; - LOGGER.info("{} for label {} finished: {}", commitType, label, respContent.toString()); - } - } catch (Exception e) { - LOGGER.warn("failed to stream load data", e); - throw e; - } - this.txnID = respContent.getTxnId(); - } - - // commit - public void commitTransaction() throws IOException { - int statusCode = -1; - String reasonPhrase = null; - int retry = 0; - CloseableHttpResponse response = null; - StreamLoadHttpPutBuilder putBuilder = StreamLoadHttpPutBuilder.builder(); - putBuilder.setUrl(secondUrlStr) - .baseAuth(user, passwd) - .addCommonHeader() - .addTxnId(txnID) - .setEmptyEntity() - .commit(); - while (retry++ < maxRetry) { - - try { - response = httpClient.execute(putBuilder.build()); - } catch (IOException e) { - LOGGER.warn("try commit failed with {} times", retry + 1); - continue; - } - statusCode = response.getStatusLine().getStatusCode(); - reasonPhrase = response.getStatusLine().getReasonPhrase(); - if (statusCode != 200) { - LOGGER.warn("commit transaction failed with {}, reason {}", hostPort, reasonPhrase); - } else { - LOGGER.info("commit transaction successes , response: {}", response.getStatusLine().toString()); - break; - } - } - - if (statusCode != 200) { - throw new DorisRuntimeException("stream load error: " + reasonPhrase); - } - - ObjectMapper mapper = new ObjectMapper(); - if (response.getEntity() != null) { - String loadResult = EntityUtils.toString(response.getEntity()); - Map res = mapper.readValue(loadResult, new TypeReference>() {}); - Matcher matcher = COMMITTED_PATTERN.matcher(res.get("msg")); - if (res.get("status").equals(FAIL) && !matcher.matches()) { - throw new DorisRuntimeException("Commit failed " + loadResult); - } else { - LOGGER.info("load result {}", loadResult); - } - } - } - - // abort - public void abortTransaction() throws Exception { - StreamLoadHttpPutBuilder builder = StreamLoadHttpPutBuilder.builder(); - builder.setUrl(secondUrlStr) - .baseAuth(user, passwd) - .addCommonHeader() - .addTxnId(txnID) - .setEmptyEntity() - .abort(); - CloseableHttpResponse response = httpClient.execute(builder.build()); - - int statusCode = response.getStatusLine().getStatusCode(); - if (statusCode != 200 || response.getEntity() == null) { - LOGGER.warn("abort transaction response: " + response.getStatusLine().toString()); - throw new DorisRuntimeException("Failed abort transaction:" + txnID + ", with url " + secondUrlStr); - } else { - LOGGER.info("abort transaction response: " + response.getStatusLine().toString()); - } - - ObjectMapper mapper = new ObjectMapper(); - String loadResult = EntityUtils.toString(response.getEntity()); - Map res = mapper.readValue(loadResult, new TypeReference>() {}); - if (FAIL.equals(res.get("status"))) { - LOGGER.warn("Fail to abort transaction. error: {}", res.get("msg")); - } - } - - private StreamLoadRespContent stopLoad() throws IOException { - LOGGER.info("stream load stopped."); - Preconditions.checkState(pendingLoadFuture != null); - try { - return handlePreCommitResponse(pendingLoadFuture.get()); - } catch (Exception e) { - throw new DorisRuntimeException(e); - } - } - - public StreamLoadRespContent handlePreCommitResponse(CloseableHttpResponse response) throws Exception { - final int statusCode = response.getStatusLine().getStatusCode(); - if (statusCode == 200 && response.getEntity() != null) { - String loadResult = EntityUtils.toString(response.getEntity()); - LOGGER.info("load Result {}", loadResult); - return OBJECT_MAPPER.readValue(loadResult, StreamLoadRespContent.class); - } - throw new StreamLoadException("stream load response error: " + response.getStatusLine().toString()); - } - - public Path getPath() { - return path; - } - - public void close() throws IOException { - if (null != httpClient) { - try { - httpClient.close(); - } catch (IOException e) { - throw new IOException("Closing httpClient failed.", e); - } - } - } - -} diff --git a/airbyte-integrations/connectors/destination-doris/src/main/java/io/airbyte/integrations/destination/doris/DorisWriteConfig.java b/airbyte-integrations/connectors/destination-doris/src/main/java/io/airbyte/integrations/destination/doris/DorisWriteConfig.java deleted file mode 100644 index 2d0afa160406..000000000000 --- a/airbyte-integrations/connectors/destination-doris/src/main/java/io/airbyte/integrations/destination/doris/DorisWriteConfig.java +++ /dev/null @@ -1,34 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.integrations.destination.doris; - -import org.apache.commons.csv.CSVFormat; -import org.apache.commons.csv.CSVPrinter; - -public class DorisWriteConfig { - - private final DorisStreamLoad dorisStreamLoad; - private final CSVPrinter writer; - private final CSVFormat format; - - public DorisWriteConfig(DorisStreamLoad dorisStreamLoad, CSVPrinter writer, CSVFormat format) { - this.dorisStreamLoad = dorisStreamLoad; - this.writer = writer; - this.format = format; - } - - public DorisStreamLoad getDorisStreamLoad() { - return dorisStreamLoad; - } - - public CSVFormat getFormat() { - return format; - } - - public CSVPrinter getWriter() { - return writer; - } - -} diff --git a/airbyte-integrations/connectors/destination-doris/src/main/java/io/airbyte/integrations/destination/doris/HttpUtil.java b/airbyte-integrations/connectors/destination-doris/src/main/java/io/airbyte/integrations/destination/doris/HttpUtil.java deleted file mode 100644 index 2bf0b61d5088..000000000000 --- a/airbyte-integrations/connectors/destination-doris/src/main/java/io/airbyte/integrations/destination/doris/HttpUtil.java +++ /dev/null @@ -1,30 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.integrations.destination.doris; - -import org.apache.http.impl.client.CloseableHttpClient; -import org.apache.http.impl.client.DefaultRedirectStrategy; -import org.apache.http.impl.client.HttpClientBuilder; -import org.apache.http.impl.client.HttpClients; - -public class HttpUtil { - - private final HttpClientBuilder httpClientBuilder = - HttpClients - .custom() - .setRedirectStrategy(new DefaultRedirectStrategy() { - - @Override - protected boolean isRedirectable(String method) { - return true; - } - - }); - - public CloseableHttpClient getClient() { - return httpClientBuilder.build(); - } - -} diff --git a/airbyte-integrations/connectors/destination-doris/src/main/java/io/airbyte/integrations/destination/doris/StreamLoadHttpPutBuilder.java b/airbyte-integrations/connectors/destination-doris/src/main/java/io/airbyte/integrations/destination/doris/StreamLoadHttpPutBuilder.java deleted file mode 100644 index 103924e182c1..000000000000 --- a/airbyte-integrations/connectors/destination-doris/src/main/java/io/airbyte/integrations/destination/doris/StreamLoadHttpPutBuilder.java +++ /dev/null @@ -1,104 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.integrations.destination.doris; - -import com.google.common.base.Preconditions; -import java.nio.charset.StandardCharsets; -import java.util.HashMap; -import java.util.Map; -import java.util.Properties; -import org.apache.commons.codec.binary.Base64; -import org.apache.http.HttpEntity; -import org.apache.http.HttpHeaders; -import org.apache.http.client.methods.HttpPut; -import org.apache.http.entity.StringEntity; - -public class StreamLoadHttpPutBuilder { - - String url; - - Map prop; - - HttpEntity httpEntity; - - private StreamLoadHttpPutBuilder() { - this.prop = new HashMap<>(); - } - - public static StreamLoadHttpPutBuilder builder() { - return new StreamLoadHttpPutBuilder(); - } - - public StreamLoadHttpPutBuilder setUrl(String url) { - this.url = url; - return this; - } - - // 用户最好设置Expect Header字段内容100-continue,这样可以在某些出错场景下避免不必要的数据传输 - public StreamLoadHttpPutBuilder addCommonHeader() { - prop.put(HttpHeaders.EXPECT, "100-continue"); - return this; - } - - public StreamLoadHttpPutBuilder enable2PC(Boolean bool) { - prop.put("two_phase_commit", bool.toString()); - return this; - } - - public StreamLoadHttpPutBuilder baseAuth(String user, String password) { - byte[] encoded = Base64.encodeBase64(user.concat(":").concat(password).getBytes(StandardCharsets.UTF_8)); - prop.put(HttpHeaders.AUTHORIZATION, "Basic " + new String(encoded, StandardCharsets.UTF_8)); - return this; - } - - public StreamLoadHttpPutBuilder addTxnId(long txnID) { - prop.put("txn_id", String.valueOf(txnID)); - return this; - } - - public StreamLoadHttpPutBuilder commit() { - prop.put("txn_operation", "commit"); - return this; - } - - public StreamLoadHttpPutBuilder abort() { - prop.put("txn_operation", "abort"); - return this; - } - - public StreamLoadHttpPutBuilder setEntity(HttpEntity httpEntity) { - this.httpEntity = httpEntity; - return this; - } - - public StreamLoadHttpPutBuilder setEmptyEntity() { - try { - this.httpEntity = new StringEntity(""); - } catch (Exception e) { - throw new IllegalArgumentException(e); - } - return this; - } - - public StreamLoadHttpPutBuilder addProperties(Properties properties) { - properties.forEach((key, value) -> prop.put(String.valueOf(key), String.valueOf(value))); - return this; - } - - public StreamLoadHttpPutBuilder setLabel(String label) { - prop.put("label", label); - return this; - } - - public HttpPut build() { - Preconditions.checkNotNull(url); - Preconditions.checkNotNull(httpEntity); - HttpPut put = new HttpPut(url); - prop.forEach(put::setHeader); - put.setEntity(httpEntity); - return put; - } - -} diff --git a/airbyte-integrations/connectors/destination-doris/src/main/java/io/airbyte/integrations/destination/doris/StreamLoadRespContent.java b/airbyte-integrations/connectors/destination-doris/src/main/java/io/airbyte/integrations/destination/doris/StreamLoadRespContent.java deleted file mode 100644 index 16eaed478180..000000000000 --- a/airbyte-integrations/connectors/destination-doris/src/main/java/io/airbyte/integrations/destination/doris/StreamLoadRespContent.java +++ /dev/null @@ -1,103 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.integrations.destination.doris; - -import com.fasterxml.jackson.annotation.JsonIgnoreProperties; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.core.JsonProcessingException; -import com.fasterxml.jackson.databind.ObjectMapper; - -@JsonIgnoreProperties(ignoreUnknown = true) -public class StreamLoadRespContent { - - @JsonProperty(value = "TxnId") - private long TxnId; - - @JsonProperty(value = "Label") - private String Label; - - @JsonProperty(value = "Status") - private String Status; - - @JsonProperty(value = "TwoPhaseCommit") - private String TwoPhaseCommit; - - @JsonProperty(value = "ExistingJobStatus") - private String ExistingJobStatus; - - @JsonProperty(value = "Message") - private String Message; - - @JsonProperty(value = "NumberTotalRows") - private long NumberTotalRows; - - @JsonProperty(value = "NumberLoadedRows") - private long NumberLoadedRows; - - @JsonProperty(value = "NumberFilteredRows") - private int NumberFilteredRows; - - @JsonProperty(value = "NumberUnselectedRows") - private int NumberUnselectedRows; - - @JsonProperty(value = "LoadBytes") - private long LoadBytes; - - @JsonProperty(value = "LoadTimeMs") - private int LoadTimeMs; - - @JsonProperty(value = "BeginTxnTimeMs") - private int BeginTxnTimeMs; - - @JsonProperty(value = "StreamLoadPutTimeMs") - private int StreamLoadPutTimeMs; - - @JsonProperty(value = "ReadDataTimeMs") - private int ReadDataTimeMs; - - @JsonProperty(value = "WriteDataTimeMs") - private int WriteDataTimeMs; - - @JsonProperty(value = "CommitAndPublishTimeMs") - private int CommitAndPublishTimeMs; - - @JsonProperty(value = "ErrorURL") - private String ErrorURL; - - public long getTxnId() { - return TxnId; - } - - public String getStatus() { - return Status; - } - - public String getTwoPhaseCommit() { - return TwoPhaseCommit; - } - - public String getMessage() { - return Message; - } - - public String getExistingJobStatus() { - return ExistingJobStatus; - } - - @Override - public String toString() { - ObjectMapper mapper = new ObjectMapper(); - try { - return mapper.writeValueAsString(this); - } catch (JsonProcessingException e) { - return ""; - } - } - - public String getErrorURL() { - return ErrorURL; - } - -} diff --git a/airbyte-integrations/connectors/destination-doris/src/main/java/io/airbyte/integrations/destination/doris/exception/DorisException.java b/airbyte-integrations/connectors/destination-doris/src/main/java/io/airbyte/integrations/destination/doris/exception/DorisException.java deleted file mode 100644 index c416bd64cef2..000000000000 --- a/airbyte-integrations/connectors/destination-doris/src/main/java/io/airbyte/integrations/destination/doris/exception/DorisException.java +++ /dev/null @@ -1,32 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.integrations.destination.doris.exception; - -public class DorisException extends Exception { - - public DorisException() { - super(); - } - - public DorisException(String message) { - super(message); - } - - public DorisException(String message, Throwable cause) { - super(message, cause); - } - - public DorisException(Throwable cause) { - super(cause); - } - - protected DorisException(String message, - Throwable cause, - boolean enableSuppression, - boolean writableStackTrace) { - super(message, cause, enableSuppression, writableStackTrace); - } - -} diff --git a/airbyte-integrations/connectors/destination-doris/src/main/java/io/airbyte/integrations/destination/doris/exception/DorisRuntimeException.java b/airbyte-integrations/connectors/destination-doris/src/main/java/io/airbyte/integrations/destination/doris/exception/DorisRuntimeException.java deleted file mode 100644 index b7496070bc84..000000000000 --- a/airbyte-integrations/connectors/destination-doris/src/main/java/io/airbyte/integrations/destination/doris/exception/DorisRuntimeException.java +++ /dev/null @@ -1,35 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.integrations.destination.doris.exception; - -/** - * Doris runtime exception. - */ -public class DorisRuntimeException extends RuntimeException { - - public DorisRuntimeException() { - super(); - } - - public DorisRuntimeException(String message) { - super(message); - } - - public DorisRuntimeException(String message, Throwable cause) { - super(message, cause); - } - - public DorisRuntimeException(Throwable cause) { - super(cause); - } - - protected DorisRuntimeException(String message, - Throwable cause, - boolean enableSuppression, - boolean writableStackTrace) { - super(message, cause, enableSuppression, writableStackTrace); - } - -} diff --git a/airbyte-integrations/connectors/destination-doris/src/main/java/io/airbyte/integrations/destination/doris/exception/IllegalArgumentException.java b/airbyte-integrations/connectors/destination-doris/src/main/java/io/airbyte/integrations/destination/doris/exception/IllegalArgumentException.java deleted file mode 100644 index bc0995d53f86..000000000000 --- a/airbyte-integrations/connectors/destination-doris/src/main/java/io/airbyte/integrations/destination/doris/exception/IllegalArgumentException.java +++ /dev/null @@ -1,17 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.integrations.destination.doris.exception; - -public class IllegalArgumentException extends DorisException { - - public IllegalArgumentException(String msg, Throwable cause) { - super(msg, cause); - } - - public IllegalArgumentException(String arg, String value) { - super("argument '" + arg + "' is illegal, value is '" + value + "'."); - } - -} diff --git a/airbyte-integrations/connectors/destination-doris/src/main/java/io/airbyte/integrations/destination/doris/exception/StreamLoadException.java b/airbyte-integrations/connectors/destination-doris/src/main/java/io/airbyte/integrations/destination/doris/exception/StreamLoadException.java deleted file mode 100644 index 50d012f20e4a..000000000000 --- a/airbyte-integrations/connectors/destination-doris/src/main/java/io/airbyte/integrations/destination/doris/exception/StreamLoadException.java +++ /dev/null @@ -1,32 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.integrations.destination.doris.exception; - -public class StreamLoadException extends Exception { - - public StreamLoadException() { - super(); - } - - public StreamLoadException(String message) { - super(message); - } - - public StreamLoadException(String message, Throwable cause) { - super(message, cause); - } - - public StreamLoadException(Throwable cause) { - super(cause); - } - - protected StreamLoadException(String message, - Throwable cause, - boolean enableSuppression, - boolean writableStackTrace) { - super(message, cause, enableSuppression, writableStackTrace); - } - -} diff --git a/airbyte-integrations/connectors/destination-doris/src/main/resources/spec.json b/airbyte-integrations/connectors/destination-doris/src/main/resources/spec.json deleted file mode 100644 index 42cddd0a4780..000000000000 --- a/airbyte-integrations/connectors/destination-doris/src/main/resources/spec.json +++ /dev/null @@ -1,60 +0,0 @@ -{ - "documentationUrl": "https://docs.airbyte.io/integrations/destinations/doris", - "supportsIncremental": false, - "supportsNormalization": false, - "supportsDBT": false, - "supported_destination_sync_modes": ["append", "overwrite"], - "connectionSpecification": { - "$schema": "http://json-schema.org/draft-07/schema#", - "title": "Doris Destination Spec", - "type": "object", - "required": ["host", "httpport", "queryport", "username", "database"], - "properties": { - "host": { - "title": "Host", - "description": "Hostname of the database", - "type": "string", - "order": 0 - }, - "httpport": { - "title": "HttpPort", - "description": "Http Port of the database.", - "type": "integer", - "minimum": 0, - "maximum": 65536, - "default": 8030, - "examples": ["8030"], - "order": 1 - }, - "queryport": { - "title": "QueryPort", - "description": "Query(SQL) Port of the database.", - "type": "integer", - "minimum": 0, - "maximum": 65536, - "default": 9030, - "examples": ["9030"], - "order": 2 - }, - "database": { - "title": "DataBase Name", - "description": "Name of the database.", - "type": "string", - "order": 3 - }, - "username": { - "title": "UserName", - "description": "Username to use to access the database.", - "type": "string", - "order": 4 - }, - "password": { - "title": "Password", - "description": "Password associated with the username.", - "type": "string", - "airbyte_secret": true, - "order": 5 - } - } - } -} diff --git a/airbyte-integrations/connectors/destination-doris/src/test-integration/java/io/airbyte/integrations/destination/doris/DorisDestinationAcceptanceTest.java b/airbyte-integrations/connectors/destination-doris/src/test-integration/java/io/airbyte/integrations/destination/doris/DorisDestinationAcceptanceTest.java deleted file mode 100644 index b2e8dddf8037..000000000000 --- a/airbyte-integrations/connectors/destination-doris/src/test-integration/java/io/airbyte/integrations/destination/doris/DorisDestinationAcceptanceTest.java +++ /dev/null @@ -1,128 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.integrations.destination.doris; - -import com.fasterxml.jackson.databind.JsonNode; -import io.airbyte.cdk.integrations.base.JavaBaseConstants; -import io.airbyte.cdk.integrations.destination.StandardNameTransformer; -import io.airbyte.cdk.integrations.standardtest.destination.DestinationAcceptanceTest; -import io.airbyte.commons.io.IOs; -import io.airbyte.commons.json.Jsons; -import java.io.IOException; -import java.nio.file.Path; -import java.nio.file.Paths; -import java.sql.*; -import java.util.ArrayList; -import java.util.HashSet; -import java.util.List; -import org.apache.commons.lang3.StringEscapeUtils; -import org.junit.jupiter.api.AfterAll; -import org.junit.jupiter.api.BeforeAll; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -public class DorisDestinationAcceptanceTest extends DestinationAcceptanceTest { - - private static final Logger LOGGER = LoggerFactory.getLogger(DorisDestinationAcceptanceTest.class); - - private JsonNode configJson; - - private static final Path RELATIVE_PATH = Path.of("integration_test/test"); - - private static final String JDBC_DRIVER = "com.mysql.cj.jdbc.Driver"; - private static final String DB_URL_PATTERN = "jdbc:mysql://%s:%d?rewriteBatchedStatements=true&useSSL=true&useUnicode=true&characterEncoding=utf8"; - private static final int PORT = 8211; - private static Connection conn = null; - - private static final StandardNameTransformer namingResolver = new StandardNameTransformer(); - - @Override - protected String getImageName() { - return "airbyte/destination-doris:dev"; - } - - @BeforeAll - public static void getConnect() { - final JsonNode config = Jsons.deserialize(IOs.readFile(Paths.get("../../../secrets/config.json"))); - final String dbUrl = String.format(DB_URL_PATTERN, config.get("host").asText(), PORT); - try { - Class.forName(JDBC_DRIVER); - conn = - DriverManager.getConnection(dbUrl, config.get("username").asText(), config.get("password") == null ? "" : config.get("password").asText()); - } catch (final Exception e) { - e.printStackTrace(); - } - - } - - @AfterAll - public static void closeConnect() throws SQLException { - if (conn != null) { - conn.close(); - } - } - - @Override - protected JsonNode getConfig() { - // TODO: Generate the configuration JSON file to be used for running the destination during the test - // configJson can either be static and read from secrets/config.json directly - // or created in the setup method - configJson = Jsons.deserialize(IOs.readFile(Paths.get("../../../secrets/config.json"))); - return configJson; - } - - @Override - protected JsonNode getFailCheckConfig() { - // TODO return an invalid config which, when used to run the connector's check connection operation, - // should result in a failed connection check - return null; - } - - @Override - protected List retrieveRecords(final TestDestinationEnv testEnv, - final String streamName, - final String namespace, - final JsonNode streamSchema) - throws IOException, SQLException { - // TODO Implement this method to retrieve records which written to the destination by the connector. - // Records returned from this method will be compared against records provided to the connector - // to verify they were written correctly - - final String tableName = namingResolver.getIdentifier(streamName); - - final String query = String.format( - "SELECT * FROM %s.%s ORDER BY %s ASC;", configJson.get("database").asText(), tableName, - JavaBaseConstants.COLUMN_NAME_EMITTED_AT); - final PreparedStatement stmt = conn.prepareStatement(query); - final ResultSet resultSet = stmt.executeQuery(); - - final List res = new ArrayList<>(); - while (resultSet.next()) { - final String sss = resultSet.getString(JavaBaseConstants.COLUMN_NAME_DATA); - res.add(Jsons.deserialize(StringEscapeUtils.unescapeJava(sss))); - } - stmt.close(); - return res; - } - - @Override - protected void setup(final TestDestinationEnv testEnv, final HashSet TEST_SCHEMAS) { - // TODO Implement this method to run any setup actions needed before every test case - } - - @Override - protected void tearDown(final TestDestinationEnv testEnv) { - // TODO Implement this method to run any cleanup actions needed after every test case - } - - public void testLineBreakCharacters() { - // overrides test with a no-op until we handle full UTF-8 in the destination - } - - public void testSecondSync() throws Exception { - // PubSub cannot overwrite messages, its always append only - } - -} diff --git a/airbyte-integrations/connectors/destination-doris/src/test/java/io/airbyte/integrations/destination/doris/DorisDestinationTest.java b/airbyte-integrations/connectors/destination-doris/src/test/java/io/airbyte/integrations/destination/doris/DorisDestinationTest.java deleted file mode 100644 index d98a37bf711f..000000000000 --- a/airbyte-integrations/connectors/destination-doris/src/test/java/io/airbyte/integrations/destination/doris/DorisDestinationTest.java +++ /dev/null @@ -1,172 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.integrations.destination.doris; - -import static org.junit.jupiter.api.Assertions.*; -import static org.mockito.ArgumentMatchers.any; -import static org.mockito.Mockito.*; - -import com.fasterxml.jackson.databind.JsonNode; -import com.google.common.collect.ImmutableMap; -import com.google.common.collect.Lists; -import io.airbyte.cdk.integrations.base.AirbyteMessageConsumer; -import io.airbyte.cdk.integrations.base.Destination; -import io.airbyte.cdk.integrations.destination.StandardNameTransformer; -import io.airbyte.commons.io.IOs; -import io.airbyte.commons.json.Jsons; -import io.airbyte.commons.resources.MoreResources; -import io.airbyte.protocol.models.Field; -import io.airbyte.protocol.models.JsonSchemaType; -import io.airbyte.protocol.models.v0.AirbyteConnectionStatus; -import io.airbyte.protocol.models.v0.AirbyteConnectionStatus.Status; -import io.airbyte.protocol.models.v0.AirbyteMessage; -import io.airbyte.protocol.models.v0.AirbyteRecordMessage; -import io.airbyte.protocol.models.v0.AirbyteStateMessage; -import io.airbyte.protocol.models.v0.CatalogHelpers; -import io.airbyte.protocol.models.v0.ConfiguredAirbyteCatalog; -import io.airbyte.protocol.models.v0.ConnectorSpecification; -import java.io.IOException; -import java.nio.file.Files; -import java.nio.file.Path; -import java.nio.file.Paths; -import java.time.Instant; -import java.util.Collections; -import java.util.Set; -import java.util.stream.Collectors; -import org.apache.commons.io.FileUtils; -import org.junit.jupiter.api.BeforeEach; -import org.junit.jupiter.api.Test; - -class DorisDestinationTest { - - private static final Instant NOW = Instant.now(); - private static final Path TEST_ROOT = Path.of("/tmp/airbyte_tests"); - private static final String USERS_STREAM_NAME = "users"; - private static final String TASKS_STREAM_NAME = "tasks"; - private static final String USERS_FILE = new StandardNameTransformer().getRawTableName(USERS_STREAM_NAME) + ".csv"; - private static final String TASKS_FILE = new StandardNameTransformer().getRawTableName(TASKS_STREAM_NAME) + ".csv";; - private static final AirbyteMessage MESSAGE_USERS1 = new AirbyteMessage().withType(AirbyteMessage.Type.RECORD) - .withRecord(new AirbyteRecordMessage().withStream(USERS_STREAM_NAME) - .withData(Jsons.jsonNode(ImmutableMap.builder().put("name", "john").put("id", "10").build())) - .withEmittedAt(NOW.toEpochMilli())); - private static final AirbyteMessage MESSAGE_USERS2 = new AirbyteMessage().withType(AirbyteMessage.Type.RECORD) - .withRecord(new AirbyteRecordMessage().withStream(USERS_STREAM_NAME) - .withData(Jsons.jsonNode(ImmutableMap.builder().put("name", "susan").put("id", "30").build())) - .withEmittedAt(NOW.toEpochMilli())); - private static final AirbyteMessage MESSAGE_TASKS1 = new AirbyteMessage().withType(AirbyteMessage.Type.RECORD) - .withRecord(new AirbyteRecordMessage().withStream(TASKS_STREAM_NAME) - .withData(Jsons.jsonNode(ImmutableMap.builder().put("goal", "game").build())) - .withEmittedAt(NOW.toEpochMilli())); - private static final AirbyteMessage MESSAGE_TASKS2 = new AirbyteMessage().withType(AirbyteMessage.Type.RECORD) - .withRecord(new AirbyteRecordMessage().withStream(TASKS_STREAM_NAME) - .withData(Jsons.jsonNode(ImmutableMap.builder().put("goal", "code").build())) - .withEmittedAt(NOW.toEpochMilli())); - private static final AirbyteMessage MESSAGE_STATE = new AirbyteMessage().withType(AirbyteMessage.Type.STATE) - .withState(new AirbyteStateMessage().withData(Jsons.jsonNode(ImmutableMap.builder().put("checkpoint", "now!").build()))); - - private static final ConfiguredAirbyteCatalog CATALOG = new ConfiguredAirbyteCatalog().withStreams(Lists.newArrayList( - CatalogHelpers.createConfiguredAirbyteStream(USERS_STREAM_NAME, null, Field.of("name", JsonSchemaType.STRING), - Field.of("id", JsonSchemaType.STRING)), - CatalogHelpers.createConfiguredAirbyteStream(TASKS_STREAM_NAME, null, Field.of("goal", JsonSchemaType.STRING)))); - - private Path destinationPath; - private JsonNode config; - - @BeforeEach - void setup() throws IOException { - destinationPath = Files.createTempDirectory(Files.createDirectories(TEST_ROOT), "test"); - config = Jsons.deserialize(IOs.readFile(Paths.get("../../../secrets/config.json"))); - } - - private DorisDestination getDestination() { - final DorisDestination result = spy(DorisDestination.class); - doReturn(destinationPath).when(result).getTempPathDir(any()); - return result; - } - - @Test - void testSpec() throws Exception { - final ConnectorSpecification actual = getDestination().spec(); - final String resourceString = MoreResources.readResource("spec.json"); - final ConnectorSpecification expected = Jsons.deserialize(resourceString, ConnectorSpecification.class); - - assertEquals(expected, actual); - } - - @Test - void testCheckSuccess() { - final AirbyteConnectionStatus actual = getDestination().check(config); - final AirbyteConnectionStatus expected = new AirbyteConnectionStatus().withStatus(Status.SUCCEEDED); - assertEquals(expected, actual); - } - - @Test - void testCheckFailure() throws IOException { - final Path looksLikeADirectoryButIsAFile = destinationPath.resolve("file"); - FileUtils.touch(looksLikeADirectoryButIsAFile.toFile()); - final DorisDestination destination = spy(DorisDestination.class); - doReturn(looksLikeADirectoryButIsAFile).when(destination).getTempPathDir(any()); - // final JsonNode config = - // Jsons.jsonNode(ImmutableMap.of(DorisDestination.DESTINATION_TEMP_PATH_FIELD, - // looksLikeADirectoryButIsAFile.toString())); - final AirbyteConnectionStatus actual = destination.check(config); - final AirbyteConnectionStatus expected = new AirbyteConnectionStatus().withStatus(Status.FAILED); - - // the message includes the random file path, so just verify it exists and then remove it when we do - // rest of the comparison. - assertNotNull(actual.getMessage()); - actual.setMessage(null); - assertEquals(expected, actual); - } - - @Test - void testCheckInvalidDestinationFolder() { - // final Path relativePath = Path.of("../tmp/conf.d/"); - // final JsonNode config = - // Jsons.jsonNode(ImmutableMap.of(DorisDestination.DESTINATION_TEMP_PATH_FIELD, - // relativePath.toString())); - final AirbyteConnectionStatus actual = new DorisDestination().check(config); - final AirbyteConnectionStatus expected = new AirbyteConnectionStatus().withStatus(Status.FAILED); - // the message includes the random file path, so just verify it exists and then remove it when we do - // rest of the comparison. - assertNotNull(actual.getMessage()); - actual.setMessage(null); - assertEquals(expected, actual); - } - - @Test - void testWriteSuccess() throws Exception { - DorisDestination destination = getDestination(); - destination.check(config); - final AirbyteMessageConsumer consumer = destination.getConsumer(config, CATALOG, Destination::defaultOutputRecordCollector); - consumer.accept(MESSAGE_USERS1); - consumer.accept(MESSAGE_TASKS1); - consumer.accept(MESSAGE_USERS2); - consumer.accept(MESSAGE_TASKS2); - consumer.accept(MESSAGE_STATE); - consumer.close(); - - } - - @SuppressWarnings("ResultOfMethodCallIgnored") - @Test - void testWriteFailure() throws Exception { - // hack to force an exception to be thrown from within the consumer. - final AirbyteMessage spiedMessage = spy(MESSAGE_USERS1); - doThrow(new RuntimeException()).when(spiedMessage).getRecord(); - DorisDestination destination = getDestination(); - destination.check(config); - final AirbyteMessageConsumer consumer = spy(destination.getConsumer(config, CATALOG, Destination::defaultOutputRecordCollector)); - - assertThrows(RuntimeException.class, () -> consumer.accept(spiedMessage)); - consumer.accept(MESSAGE_USERS2); - assertThrows(IOException.class, consumer::close); - - // verify tmp files are cleaned up and no files are output at all - final Set actualFilenames = Files.list(destinationPath).map(Path::getFileName).map(Path::toString).collect(Collectors.toSet()); - assertEquals(Collections.emptySet(), actualFilenames); - } - -} diff --git a/airbyte-integrations/connectors/destination-exasol/README.md b/airbyte-integrations/connectors/destination-exasol/README.md deleted file mode 100644 index 8651db3ec762..000000000000 --- a/airbyte-integrations/connectors/destination-exasol/README.md +++ /dev/null @@ -1,71 +0,0 @@ -# Destination Exasol - -This is the repository for the Exasol destination connector in Java. -For information about how to use this connector within Airbyte, see [the User Documentation](https://docs.airbyte.io/integrations/destinations/exasol). - -## Local development - -#### Building via Gradle -From the Airbyte repository root, run: -``` -./gradlew :airbyte-integrations:connectors:destination-exasol:build -``` - -#### Create credentials -**If you are a community contributor**, generate the necessary credentials and place them in `secrets/config.json` conforming to the spec file in `src/main/resources/spec.json`. -Note that the `secrets` directory is git-ignored by default, so there is no danger of accidentally checking in sensitive information. - -**If you are an Airbyte core member**, follow the [instructions](https://docs.airbyte.io/connector-development#using-credentials-in-ci) to set up the credentials. - -### Locally running the connector docker image - -#### Build -Build the connector image via Gradle: - -``` -./gradlew :airbyte-integrations:connectors:destination-exasol:buildConnectorImage -``` -Once built, the docker image name and tag on your host will be `airbyte/destination-exasol:dev`. -the Dockerfile. - -#### Run -Then run any of the connector commands as follows: -``` -docker run --rm airbyte/destination-exasol:dev spec -docker run --rm -v $(pwd)/secrets:/secrets airbyte/destination-exasol:dev check --config /secrets/config.json -docker run --rm -v $(pwd)/secrets:/secrets airbyte/destination-exasol:dev discover --config /secrets/config.json -docker run --rm -v $(pwd)/secrets:/secrets -v $(pwd)/integration_tests:/integration_tests airbyte/destination-exasol:dev read --config /secrets/config.json --catalog /integration_tests/configured_catalog.json -``` - -## Testing -The connector uses `JUnit` for Java tests. - -### Unit and Integration Tests -Place unit tests under `src/test/io/airbyte/integrations/destinations/exasol`. - -#### Acceptance Tests -Airbyte has a standard test suite that all destination connectors must pass. - -### Using gradle to run tests -All commands should be run from airbyte project root. -To run unit tests: -``` -./gradlew :airbyte-integrations:connectors:destination-exasol:unitTest -``` -To run acceptance and custom integration tests: -``` -./gradlew :airbyte-integrations:connectors:destination-exasol:integrationTest -``` - -## Dependency Management - -### Publishing a new version of the connector -You've checked out the repo, implemented a million dollar feature, and you're ready to share your changes with the world. Now what? -1. Make sure your changes are passing our test suite: `airbyte-ci connectors --name=destination-exasol test` -2. Bump the connector version in `metadata.yaml`: increment the `dockerImageTag` value. Please follow [semantic versioning for connectors](https://docs.airbyte.com/contributing-to-airbyte/resources/pull-requests-handbook/#semantic-versioning-for-connectors). -3. Make sure the `metadata.yaml` content is up to date. -4. Make the connector documentation and its changelog is up to date (`docs/integrations/destinations/exasol.md`). -5. Create a Pull Request: use [our PR naming conventions](https://docs.airbyte.com/contributing-to-airbyte/resources/pull-requests-handbook/#pull-request-title-convention). -6. Pat yourself on the back for being an awesome contributor. -7. Someone from Airbyte will take a look at your PR and iterate with you to merge it into master. - diff --git a/airbyte-integrations/connectors/destination-exasol/bootstrap.md b/airbyte-integrations/connectors/destination-exasol/bootstrap.md deleted file mode 100644 index f3342f5024f4..000000000000 --- a/airbyte-integrations/connectors/destination-exasol/bootstrap.md +++ /dev/null @@ -1,19 +0,0 @@ -# Exasol - -## Overview - -Exasol is the in-memory database built for analytics. - -## Endpoints - -The destination-exasol connector uses the official [Exasol JDBC driver](https://docs.exasol.com/db/latest/connect_exasol/drivers/jdbc.htm). - - ## Quick Notes - -- TLS connections are used by default. If the Exasol database uses a self-signed certificate, specify the certificate fingerprint. - -## Reference - -- [Exasol homepage](https://www.exasol.com/) -- [Exasol documentation](https://docs.exasol.com/db/latest/home.htm) -- [Exasol JDBC driver documentation](https://docs.exasol.com/db/latest/connect_exasol/drivers/jdbc.htm) diff --git a/airbyte-integrations/connectors/destination-exasol/build.gradle b/airbyte-integrations/connectors/destination-exasol/build.gradle deleted file mode 100644 index 3380731e417d..000000000000 --- a/airbyte-integrations/connectors/destination-exasol/build.gradle +++ /dev/null @@ -1,37 +0,0 @@ -plugins { - id 'application' - id 'airbyte-java-connector' -} - -airbyteJavaConnector { - cdkVersionRequired = '0.2.0' - features = ['db-destinations'] - useLocalCdk = false -} - -//remove once upgrading the CDK version to 0.4.x or later -java { - compileJava { - options.compilerArgs.remove("-Werror") - } -} - -airbyteJavaConnector.addCdkDependencies() - -application { - mainClass = 'io.airbyte.integrations.destination.exasol.ExasolDestination' -} - -dependencies { - - implementation 'com.exasol:exasol-jdbc:7.1.17' - - testImplementation 'org.hamcrest:hamcrest-all:1.3' - - // Explicitly upgrade testcontainers to avoid java.lang.NoSuchMethodError: - // 'org.testcontainers.containers.GenericContainer com.exasol.containers.ExasolContainer.withCopyToContainer(org.testcontainers.images.builder.Transferable, java.lang.String)' - testImplementation 'org.testcontainers:testcontainers:1.17.6' - - integrationTestJavaImplementation 'com.exasol:exasol-testcontainers:6.5.0' - integrationTestJavaImplementation 'org.testcontainers:testcontainers:1.17.6' -} diff --git a/airbyte-integrations/connectors/destination-exasol/src/main/java/io/airbyte/integrations/destination/exasol/ExasolDestination.java b/airbyte-integrations/connectors/destination-exasol/src/main/java/io/airbyte/integrations/destination/exasol/ExasolDestination.java deleted file mode 100644 index 8145c85c2444..000000000000 --- a/airbyte-integrations/connectors/destination-exasol/src/main/java/io/airbyte/integrations/destination/exasol/ExasolDestination.java +++ /dev/null @@ -1,61 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.integrations.destination.exasol; - -import com.fasterxml.jackson.databind.JsonNode; -import com.google.common.collect.ImmutableMap; -import io.airbyte.cdk.db.factory.DatabaseDriver; -import io.airbyte.cdk.db.jdbc.JdbcUtils; -import io.airbyte.cdk.integrations.base.Destination; -import io.airbyte.cdk.integrations.base.IntegrationRunner; -import io.airbyte.cdk.integrations.destination.jdbc.AbstractJdbcDestination; -import io.airbyte.commons.json.Jsons; -import java.util.HashMap; -import java.util.Map; - -public class ExasolDestination extends AbstractJdbcDestination implements Destination { - - public static final String DRIVER_CLASS = DatabaseDriver.EXASOL.getDriverClassName(); - - public ExasolDestination() { - super(DRIVER_CLASS, new ExasolSQLNameTransformer(), new ExasolSqlOperations()); - } - - public static void main(String[] args) throws Exception { - new IntegrationRunner(new ExasolDestination()).run(args); - } - - @Override - public JsonNode toJdbcConfig(final JsonNode config) { - final String jdbcUrl = String.format(DatabaseDriver.EXASOL.getUrlFormatString(), - config.get(JdbcUtils.HOST_KEY).asText(), config.get(JdbcUtils.PORT_KEY).asInt()); - - final ImmutableMap.Builder configBuilder = ImmutableMap.builder() - .put(JdbcUtils.USERNAME_KEY, config.get(JdbcUtils.USERNAME_KEY).asText()) - .put(JdbcUtils.JDBC_URL_KEY, jdbcUrl) - .put("schema", config.get(JdbcUtils.SCHEMA_KEY).asText()); - - if (config.has(JdbcUtils.PASSWORD_KEY)) { - configBuilder.put(JdbcUtils.PASSWORD_KEY, config.get(JdbcUtils.PASSWORD_KEY).asText()); - } - - if (config.has(JdbcUtils.JDBC_URL_PARAMS_KEY)) { - configBuilder.put(JdbcUtils.JDBC_URL_PARAMS_KEY, config.get(JdbcUtils.JDBC_URL_PARAMS_KEY).asText()); - } - - return Jsons.jsonNode(configBuilder.build()); - } - - @Override - protected Map getDefaultConnectionProperties(final JsonNode config) { - Map properties = new HashMap<>(); - properties.put("autocommit", "0"); - if (config.has("certificateFingerprint")) { - properties.put("fingerprint", config.get("certificateFingerprint").asText()); - } - return properties; - } - -} diff --git a/airbyte-integrations/connectors/destination-exasol/src/main/java/io/airbyte/integrations/destination/exasol/ExasolSQLNameTransformer.java b/airbyte-integrations/connectors/destination-exasol/src/main/java/io/airbyte/integrations/destination/exasol/ExasolSQLNameTransformer.java deleted file mode 100644 index 8fd3caf20a75..000000000000 --- a/airbyte-integrations/connectors/destination-exasol/src/main/java/io/airbyte/integrations/destination/exasol/ExasolSQLNameTransformer.java +++ /dev/null @@ -1,47 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.integrations.destination.exasol; - -import io.airbyte.cdk.integrations.destination.StandardNameTransformer; -import io.airbyte.commons.text.Names; - -public class ExasolSQLNameTransformer extends StandardNameTransformer { - - @Override - public String applyDefaultCase(final String input) { - return input.toUpperCase(); - } - - @Override - public String getRawTableName(final String streamName) { - // Exasol identifiers starting with _ must be quoted - return Names.doubleQuote(super.getRawTableName(streamName)); - } - - @Override - public String getTmpTableName(final String streamName) { - // Exasol identifiers starting with _ must be quoted - return Names.doubleQuote(super.getTmpTableName(streamName)); - } - - @Override - public String convertStreamName(final String input) { - // Sometimes the stream name is already quoted, so remove quotes before converting. - // Exasol identifiers starting with _ must be quoted. - return Names.doubleQuote(super.convertStreamName(unquote(input))); - } - - private static String unquote(final String input) { - String result = input; - if (result.startsWith("\"")) { - result = result.substring(1); - } - if (result.endsWith("\"")) { - result = result.substring(0, result.length() - 1); - } - return result; - } - -} diff --git a/airbyte-integrations/connectors/destination-exasol/src/main/java/io/airbyte/integrations/destination/exasol/ExasolSqlOperations.java b/airbyte-integrations/connectors/destination-exasol/src/main/java/io/airbyte/integrations/destination/exasol/ExasolSqlOperations.java deleted file mode 100644 index e0353bd414b9..000000000000 --- a/airbyte-integrations/connectors/destination-exasol/src/main/java/io/airbyte/integrations/destination/exasol/ExasolSqlOperations.java +++ /dev/null @@ -1,73 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.integrations.destination.exasol; - -import io.airbyte.cdk.db.jdbc.JdbcDatabase; -import io.airbyte.cdk.integrations.base.JavaBaseConstants; -import io.airbyte.cdk.integrations.destination.jdbc.JdbcSqlOperations; -import io.airbyte.protocol.models.v0.AirbyteRecordMessage; -import java.nio.file.Files; -import java.nio.file.Path; -import java.util.List; - -public class ExasolSqlOperations extends JdbcSqlOperations { - - public static final String COLUMN_NAME_AB_ID = - "\"" + JavaBaseConstants.COLUMN_NAME_AB_ID.toUpperCase() + "\""; - public static final String COLUMN_NAME_DATA = - "\"" + JavaBaseConstants.COLUMN_NAME_DATA.toUpperCase() + "\""; - public static final String COLUMN_NAME_EMITTED_AT = - "\"" + JavaBaseConstants.COLUMN_NAME_EMITTED_AT.toUpperCase() + "\""; - - @Override - public String createTableQuery(final JdbcDatabase database, final String schemaName, final String tableName) { - String query = String.format(""" - CREATE TABLE IF NOT EXISTS %s.%s ( - %s VARCHAR(64), - %s VARCHAR(2000000), - %s TIMESTAMP DEFAULT CURRENT_TIMESTAMP, - PRIMARY KEY(%s) - )""", - schemaName, tableName, - ExasolSqlOperations.COLUMN_NAME_AB_ID, - ExasolSqlOperations.COLUMN_NAME_DATA, - ExasolSqlOperations.COLUMN_NAME_EMITTED_AT, - ExasolSqlOperations.COLUMN_NAME_AB_ID); - LOGGER.info("Create table query: {}", query); - return query; - } - - @Override - public void executeTransaction(final JdbcDatabase database, final List queries) throws Exception { - database.executeWithinTransaction(queries); - } - - @Override - protected void insertRecordsInternal(JdbcDatabase database, List records, String schemaName, String tableName) - throws Exception { - if (records.isEmpty()) { - return; - } - Path tmpFile = createBatchFile(tableName, records); - try { - String importStatement = String.format(""" - IMPORT INTO %s.%s - FROM LOCAL CSV FILE '%s' - ROW SEPARATOR = 'CRLF' - COLUMN SEPARATOR = ','""", schemaName, tableName, tmpFile.toAbsolutePath()); - LOGGER.info("IMPORT statement: {}", importStatement); - database.execute(connection -> connection.createStatement().execute(importStatement)); - } finally { - Files.delete(tmpFile); - } - } - - private Path createBatchFile(String tableName, List records) throws Exception { - Path tmpFile = Files.createTempFile(tableName + "-", ".tmp"); - writeBatchToFile(tmpFile.toFile(), records); - return tmpFile; - } - -} diff --git a/airbyte-integrations/connectors/destination-exasol/src/main/resources/spec.json b/airbyte-integrations/connectors/destination-exasol/src/main/resources/spec.json deleted file mode 100644 index 865270c7b853..000000000000 --- a/airbyte-integrations/connectors/destination-exasol/src/main/resources/spec.json +++ /dev/null @@ -1,64 +0,0 @@ -{ - "documentationUrl": "https://docs.airbyte.com/integrations/destinations/exasol", - "supportsIncremental": true, - "supportsNormalization": false, - "supportsDBT": false, - "supported_destination_sync_modes": ["overwrite", "append"], - "connectionSpecification": { - "$schema": "http://json-schema.org/draft-07/schema#", - "title": "Exasol Destination Spec", - "type": "object", - "required": ["host", "port", "username", "schema"], - "additionalProperties": true, - "properties": { - "host": { - "title": "Host", - "description": "Hostname of the database.", - "type": "string", - "order": 0 - }, - "port": { - "title": "Port", - "description": "Port of the database.", - "type": "integer", - "minimum": 0, - "maximum": 65536, - "default": 8563, - "examples": ["8563"], - "order": 1 - }, - "certificateFingerprint": { - "title": "Certificate Fingerprint", - "description": "Fingerprint of the Exasol server's TLS certificate", - "type": "string", - "examples": ["ABC123..."], - "order": 2 - }, - "username": { - "title": "User", - "description": "Username to use to access the database.", - "type": "string", - "order": 3 - }, - "password": { - "title": "Password", - "description": "Password associated with the username.", - "type": "string", - "airbyte_secret": true, - "order": 4 - }, - "schema": { - "title": "Schema Name", - "description": "Schema Name", - "type": "string", - "order": 5 - }, - "jdbc_url_params": { - "description": "Additional properties to pass to the JDBC URL string when connecting to the database formatted as 'key=value' pairs separated by the symbol ';'. (example: key1=value1;key2=value2;key3=value3).", - "title": "JDBC URL Params", - "type": "string", - "order": 6 - } - } - } -} diff --git a/airbyte-integrations/connectors/destination-exasol/src/test-integration/java/io/airbyte/integrations/destination/exasol/ExasolDestinationAcceptanceTest.java b/airbyte-integrations/connectors/destination-exasol/src/test-integration/java/io/airbyte/integrations/destination/exasol/ExasolDestinationAcceptanceTest.java deleted file mode 100644 index 8fd01ec062dc..000000000000 --- a/airbyte-integrations/connectors/destination-exasol/src/test-integration/java/io/airbyte/integrations/destination/exasol/ExasolDestinationAcceptanceTest.java +++ /dev/null @@ -1,150 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.integrations.destination.exasol; - -import com.exasol.containers.ExasolContainer; -import com.fasterxml.jackson.databind.JsonNode; -import com.fasterxml.jackson.databind.node.ObjectNode; -import com.google.common.collect.ImmutableMap; -import io.airbyte.cdk.db.Database; -import io.airbyte.cdk.db.factory.DSLContextFactory; -import io.airbyte.cdk.db.factory.DatabaseDriver; -import io.airbyte.cdk.db.jdbc.JdbcUtils; -import io.airbyte.cdk.integrations.base.JavaBaseConstants; -import io.airbyte.cdk.integrations.destination.NamingConventionTransformer; -import io.airbyte.cdk.integrations.standardtest.destination.JdbcDestinationAcceptanceTest; -import io.airbyte.commons.json.Jsons; -import java.sql.SQLException; -import java.util.ArrayList; -import java.util.HashSet; -import java.util.List; -import java.util.Map; -import java.util.stream.Collectors; -import org.jooq.DSLContext; -import org.junit.jupiter.api.AfterAll; -import org.junit.jupiter.api.BeforeAll; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -public class ExasolDestinationAcceptanceTest extends JdbcDestinationAcceptanceTest { - - private static final Logger LOGGER = LoggerFactory.getLogger(ExasolDestinationAcceptanceTest.class); - - private static final ExasolContainer> EXASOL = new ExasolContainer<>() - .withReuse(true); - - private final NamingConventionTransformer namingResolver = new ExasolSQLNameTransformer(); - private static JsonNode config; - - @BeforeAll - static void startExasolContainer() { - EXASOL.start(); - config = createExasolConfig(EXASOL); - } - - private static JsonNode createExasolConfig(final ExasolContainer> exasol) { - return Jsons.jsonNode(ImmutableMap.builder() - .put(JdbcUtils.HOST_KEY, exasol.getHost()) - .put(JdbcUtils.PORT_KEY, exasol.getFirstMappedDatabasePort()) - .put("certificateFingerprint", exasol.getTlsCertificateFingerprint().orElseThrow()) - .put(JdbcUtils.USERNAME_KEY, exasol.getUsername()) - .put(JdbcUtils.PASSWORD_KEY, exasol.getPassword()) - .put(JdbcUtils.SCHEMA_KEY, "TEST") - .build()); - } - - @AfterAll - static void stopExasolContainer() { - EXASOL.stop(); - } - - @Override - protected String getImageName() { - return "airbyte/destination-exasol:dev"; - } - - @Override - protected JsonNode getConfig() { - return Jsons.clone(config); - } - - @Override - protected JsonNode getFailCheckConfig() { - final JsonNode clone = Jsons.clone(getConfig()); - ((ObjectNode) clone).put(JdbcUtils.PASSWORD_KEY, "wrong password"); - return clone; - } - - @Override - protected boolean supportBasicDataTypeTest() { - return true; - } - - @Override - protected boolean supportArrayDataTypeTest() { - return true; - } - - @Override - protected boolean supportObjectDataTypeTest() { - return true; - } - - @Override - protected boolean implementsNamespaces() { - return true; - } - - @Override - protected List retrieveRecords(final TestDestinationEnv testEnv, - final String streamName, - final String namespace, - final JsonNode streamSchema) - throws SQLException { - return retrieveRecordsFromTable(namingResolver.getRawTableName(streamName), "\"" + namespace + "\"") - .stream() - .map(r -> r.get(JavaBaseConstants.COLUMN_NAME_DATA.toUpperCase())) - .map(node -> Jsons.deserialize(node.asText())) - .collect(Collectors.toList()); - } - - private List retrieveRecordsFromTable(final String tableName, final String schemaName) throws SQLException { - final String query = String.format("SELECT * FROM %s.%s ORDER BY %s ASC", schemaName, tableName, ExasolSqlOperations.COLUMN_NAME_EMITTED_AT); - LOGGER.info("Retrieving records using query {}", query); - try (final DSLContext dslContext = getDSLContext(config)) { - final List result = new Database(dslContext) - .query(ctx -> new ArrayList<>(ctx.fetch(query))); - return result - .stream() - .map(r -> r.formatJSON(JdbcUtils.getDefaultJSONFormat())) - .map(Jsons::deserialize) - .collect(Collectors.toList()); - } - } - - private static DSLContext getDSLContext(final JsonNode config) { - final String jdbcUrl = - String.format(DatabaseDriver.EXASOL.getUrlFormatString(), config.get(JdbcUtils.HOST_KEY).asText(), config.get(JdbcUtils.PORT_KEY).asInt()); - final Map jdbcConnectionProperties = Map.of("fingerprint", config.get("certificateFingerprint").asText()); - return DSLContextFactory.create( - config.get(JdbcUtils.USERNAME_KEY).asText(), - config.get(JdbcUtils.PASSWORD_KEY).asText(), - DatabaseDriver.EXASOL.getDriverClassName(), - jdbcUrl, - null, - jdbcConnectionProperties); - } - - @Override - protected void setup(final TestDestinationEnv testEnv, final HashSet TEST_SCHEMAS) { - // Nothing to do - } - - @Override - protected void tearDown(final TestDestinationEnv testEnv) { - EXASOL.purgeDatabase(); - } - -} diff --git a/airbyte-integrations/connectors/destination-exasol/src/test-integration/java/io/airbyte/integrations/destination/exasol/ExasolSqlOperationsAcceptanceTest.java b/airbyte-integrations/connectors/destination-exasol/src/test-integration/java/io/airbyte/integrations/destination/exasol/ExasolSqlOperationsAcceptanceTest.java deleted file mode 100644 index dd32fea81ee6..000000000000 --- a/airbyte-integrations/connectors/destination-exasol/src/test-integration/java/io/airbyte/integrations/destination/exasol/ExasolSqlOperationsAcceptanceTest.java +++ /dev/null @@ -1,93 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.integrations.destination.exasol; - -import static org.hamcrest.MatcherAssert.assertThat; -import static org.hamcrest.Matchers.*; -import static org.junit.jupiter.api.Assertions.assertDoesNotThrow; -import static org.junit.jupiter.api.Assertions.assertThrows; - -import com.exasol.containers.ExasolContainer; -import io.airbyte.cdk.db.factory.DataSourceFactory; -import io.airbyte.cdk.db.jdbc.DefaultJdbcDatabase; -import io.airbyte.cdk.db.jdbc.JdbcDatabase; -import java.sql.ResultSet; -import java.sql.SQLException; -import java.sql.SQLSyntaxErrorException; -import java.util.Arrays; -import javax.sql.DataSource; -import org.junit.jupiter.api.AfterAll; -import org.junit.jupiter.api.BeforeAll; -import org.junit.jupiter.api.BeforeEach; -import org.junit.jupiter.api.Test; - -class ExasolSqlOperationsAcceptanceTest { - - private static final ExasolContainer> EXASOL = new ExasolContainer<>() - .withReuse(true); - private ExasolSqlOperations operations; - - @BeforeAll - static void startExasolContainer() { - EXASOL.start(); - } - - @AfterAll - static void stopExasolContainer() { - EXASOL.stop(); - } - - @BeforeEach - void setup() { - this.operations = new ExasolSqlOperations(); - EXASOL.purgeDatabase(); - } - - @Test - void executeTransactionEmptyList() { - assertDoesNotThrow(() -> executeTransaction()); - } - - @Test - void executeTransactionSingleStatementSuccess() throws Exception { - executeTransaction("CREATE SCHEMA TESTING_SCHEMA"); - assertSchemaExists("TESTING_SCHEMA", true); - } - - @Test - void executeTransactionTowStatementsSuccess() throws Exception { - executeTransaction("CREATE SCHEMA TESTING_SCHEMA", "CREATE TABLE TESTING_TABLE (C1 VARCHAR(5))"); - assertSchemaExists("TESTING_SCHEMA", true); - assertTableExists("TESTING_SCHEMA", "TESTING_TABLE"); - } - - @Test - void executeTransactionTwoStatementsFailure() throws Exception { - assertThrows(SQLSyntaxErrorException.class, () -> executeTransaction("CREATE SCHEMA TESTING_SCHEMA", "INVALID STATEMENT")); - assertSchemaExists("TESTING_SCHEMA", false); - } - - private static void assertSchemaExists(String schemaName, boolean exists) throws SQLException { - try (ResultSet rs = EXASOL.createConnection().getMetaData().getSchemas(null, schemaName)) { - assertThat("Schema exists", rs.next(), equalTo(exists)); - } - } - - private static void assertTableExists(String schemaName, String tableName) throws SQLException { - try (ResultSet rs = EXASOL.createConnection().getMetaData().getTables(null, schemaName, tableName, null)) { - assertThat("Table exists", rs.next(), equalTo(true)); - } - } - - private void executeTransaction(String... statements) throws Exception { - this.operations.executeTransaction(createDatabase(), Arrays.asList(statements)); - } - - private JdbcDatabase createDatabase() { - DataSource dataSource = DataSourceFactory.create(EXASOL.getUsername(), EXASOL.getPassword(), ExasolDestination.DRIVER_CLASS, EXASOL.getJdbcUrl()); - return new DefaultJdbcDatabase(dataSource); - } - -} diff --git a/airbyte-integrations/connectors/destination-exasol/src/test/java/io/airbyte/integrations/destination/exasol/ExasolDestinationTest.java b/airbyte-integrations/connectors/destination-exasol/src/test/java/io/airbyte/integrations/destination/exasol/ExasolDestinationTest.java deleted file mode 100644 index 79789c1232eb..000000000000 --- a/airbyte-integrations/connectors/destination-exasol/src/test/java/io/airbyte/integrations/destination/exasol/ExasolDestinationTest.java +++ /dev/null @@ -1,85 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.integrations.destination.exasol; - -import static org.hamcrest.MatcherAssert.assertThat; -import static org.hamcrest.Matchers.*; -import static org.junit.jupiter.api.Assertions.*; - -import com.fasterxml.jackson.databind.JsonNode; -import io.airbyte.cdk.db.jdbc.JdbcUtils; -import io.airbyte.commons.json.Jsons; -import io.airbyte.commons.map.MoreMaps; -import java.util.HashMap; -import java.util.Map; -import org.junit.jupiter.api.BeforeEach; -import org.junit.jupiter.api.Test; -import org.testcontainers.shaded.com.google.common.collect.ImmutableMap; - -class ExasolDestinationTest { - - private ExasolDestination destination; - - @BeforeEach - void setup() { - destination = new ExasolDestination(); - } - - private JsonNode createConfig() { - return createConfig(new HashMap<>()); - } - - private JsonNode createConfig(final Map additionalConfigs) { - return Jsons.jsonNode(MoreMaps.merge(baseParameters(), additionalConfigs)); - } - - private Map baseParameters() { - return ImmutableMap.builder() - .put(JdbcUtils.HOST_KEY, "localhost") - .put(JdbcUtils.PORT_KEY, "8563") - .put(JdbcUtils.USERNAME_KEY, "sys") - .put(JdbcUtils.SCHEMA_KEY, "mySchema") - .build(); - } - - @Test - void toJdbcConfigDefault() { - var result = destination.toJdbcConfig(createConfig()); - assertAll( - () -> assertThat(result.size(), equalTo(3)), - () -> assertThat(result.get(JdbcUtils.USERNAME_KEY).asText(), equalTo("sys")), - () -> assertThat(result.get(JdbcUtils.JDBC_URL_KEY).asText(), equalTo("jdbc:exa:localhost:8563")), - () -> assertThat(result.get(JdbcUtils.SCHEMA_KEY).asText(), equalTo("mySchema"))); - } - - @Test - void toJdbcConfigWithPassword() { - var result = destination.toJdbcConfig(createConfig(Map.of(JdbcUtils.PASSWORD_KEY, "exasol"))); - assertAll( - () -> assertThat(result.size(), equalTo(4)), - () -> assertThat(result.get(JdbcUtils.PASSWORD_KEY).asText(), equalTo("exasol"))); - } - - @Test - void toJdbcConfigWithJdbcUrlParameters() { - var result = destination.toJdbcConfig(createConfig(Map.of(JdbcUtils.JDBC_URL_PARAMS_KEY, "param=value"))); - assertAll( - () -> assertThat(result.size(), equalTo(4)), - () -> assertThat(result.get(JdbcUtils.JDBC_URL_PARAMS_KEY).asText(), equalTo("param=value"))); - } - - @Test - void getDefaultConnectionProperties() { - var result = destination.getDefaultConnectionProperties(createConfig()); - assertThat(result, equalTo(Map.of("autocommit", "0"))); - } - - @Test - void getDefaultConnectionPropertiesWithFingerprint() { - var result = destination.getDefaultConnectionProperties(createConfig(Map.of("certificateFingerprint", "ABC"))); - assertThat(result, equalTo(Map.of("fingerprint", "ABC", "autocommit", "0"))); - } - -} diff --git a/airbyte-integrations/connectors/destination-exasol/src/test/java/io/airbyte/integrations/destination/exasol/ExasolSQLNameTransformerTest.java b/airbyte-integrations/connectors/destination-exasol/src/test/java/io/airbyte/integrations/destination/exasol/ExasolSQLNameTransformerTest.java deleted file mode 100644 index e5dd08d17c9a..000000000000 --- a/airbyte-integrations/connectors/destination-exasol/src/test/java/io/airbyte/integrations/destination/exasol/ExasolSQLNameTransformerTest.java +++ /dev/null @@ -1,69 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.integrations.destination.exasol; - -import static org.hamcrest.MatcherAssert.assertThat; -import static org.hamcrest.Matchers.*; -import static org.junit.jupiter.api.Assertions.*; - -import org.junit.jupiter.api.BeforeEach; -import org.junit.jupiter.api.Test; -import org.junit.jupiter.params.ParameterizedTest; -import org.junit.jupiter.params.provider.CsvSource; - -class ExasolSQLNameTransformerTest { - - private ExasolSQLNameTransformer transformer; - - @BeforeEach - void setUp() { - transformer = new ExasolSQLNameTransformer(); - } - - @ParameterizedTest - @CsvSource({"text, TEXT", "Text, TEXT", "TEXT, TEXT", "_äöüß, _ÄÖÜSS"}) - void applyDefaultCase(String input, String expectedOutput) { - assertEquals(expectedOutput, transformer.applyDefaultCase(input)); - } - - @ParameterizedTest - @CsvSource({"stream, \"_airbyte_raw_stream\"", - "Stream, \"_airbyte_raw_Stream\"", - "stream*, \"_airbyte_raw_stream_\"", - "äöü, \"_airbyte_raw_aou\""}) - void getRawTableName(String streamName, String expectedTableName) { - assertEquals(expectedTableName, transformer.getRawTableName(streamName)); - } - - @Test - void getTmpTableNamePrefixSuffix() { - String tmpTableName = transformer.getTmpTableName("stream"); - assertThat(tmpTableName, allOf( - startsWith("\"_airbyte_tmp_"), - endsWith("_stream\""))); - } - - @Test - void getTmpTableNameDifferentForEachCall() { - String name1 = transformer.getTmpTableName("stream"); - String name2 = transformer.getTmpTableName("stream"); - assertThat(name1, not(equalTo(name2))); - } - - @ParameterizedTest - @CsvSource({"stream, stream", - "Stream, Stream", - "STREAM, STREAM", - "stream*, stream_", - "_stream_, _stream_", - "äöü, aou", - "\"stream, stream", - "stream\", stream", - "\"stream\", stream",}) - void convertStreamName(String streamName, String expectedTableName) { - assertThat(transformer.convertStreamName(streamName), equalTo("\"" + expectedTableName + "\"")); - } - -} diff --git a/airbyte-integrations/connectors/destination-firebolt/Dockerfile b/airbyte-integrations/connectors/destination-firebolt/Dockerfile deleted file mode 100644 index 01a8aed15fc1..000000000000 --- a/airbyte-integrations/connectors/destination-firebolt/Dockerfile +++ /dev/null @@ -1,29 +0,0 @@ -FROM python:3.9-slim as base - -# build and load all requirements -FROM base as builder -WORKDIR /airbyte/integration_code - -COPY setup.py ./ -# install necessary packages to a temporary folder -RUN pip3 install --prefix=/install --no-cache-dir . - -# build a clean environment -FROM base -WORKDIR /airbyte/integration_code - -# copy all loaded and built libraries to a pure basic image -COPY --from=builder /install /usr/local -# add default timezone settings -COPY --from=builder /usr/share/zoneinfo/Etc/UTC /etc/localtime -RUN echo "Etc/UTC" > /etc/timezone - -# copy payload code only -COPY main.py ./ -COPY destination_firebolt ./destination_firebolt - -ENV AIRBYTE_ENTRYPOINT "python /airbyte/integration_code/main.py" -ENTRYPOINT ["python3", "/airbyte/integration_code/main.py"] - -LABEL io.airbyte.version=0.1.0 -LABEL io.airbyte.name=airbyte/destination-firebolt diff --git a/airbyte-integrations/connectors/destination-firebolt/README.md b/airbyte-integrations/connectors/destination-firebolt/README.md deleted file mode 100644 index d19fb11dc8a0..000000000000 --- a/airbyte-integrations/connectors/destination-firebolt/README.md +++ /dev/null @@ -1,99 +0,0 @@ -# Firebolt Destination - -This is the repository for the Firebolt destination connector, written in Python. -For information about how to use this connector within Airbyte, see [the documentation](https://docs.airbyte.io/integrations/destinations/firebolt). - -## Local development - -### Prerequisites -**To iterate on this connector, make sure to complete this prerequisites section.** - -#### Minimum Python version required `= 3.7.0` - -#### Build & Activate Virtual Environment and install dependencies -From this connector directory, create a virtual environment: -``` -python -m venv .venv -``` - -This will generate a virtualenv for this module in `.venv/`. Make sure this venv is active in your -development environment of choice. To activate it from the terminal, run: -``` -source .venv/bin/activate -pip install -r requirements.txt -``` -If you are in an IDE, follow your IDE's instructions to activate the virtualenv. - -Note that while we are installing dependencies from `requirements.txt`, you should only edit `setup.py` for your dependencies. `requirements.txt` is -used for editable installs (`pip install -e`) to pull in Python dependencies from the monorepo and will call `setup.py`. -If this is mumbo jumbo to you, don't worry about it, just put your deps in `setup.py` but install using `pip install -r requirements.txt` and everything -should work as you expect. - -#### Create credentials -**If you are a community contributor**, follow the instructions in the [documentation](https://docs.airbyte.io/integrations/destinations/firebolt) -to generate the necessary credentials. Then create a file `secrets/config.json` conforming to the `destination_firebolt/spec.json` file. -Note that the `secrets` directory is gitignored by default, so there is no danger of accidentally checking in sensitive information. -See `integration_tests/sample_config.json` for a sample config file. - -**If you are an Airbyte core member**, copy the credentials in Lastpass under the secret name `destination firebolt test creds` -and place them into `secrets/config.json`. - -### Locally running the connector -``` -python main.py spec -python main.py check --config secrets/config.json -# messages.jsonl is a file containing line-separated JSON representing AirbyteMessages -cat integration_tests/messages.jsonl | python main.py write --config secrets/config_sql.json --catalog integration_tests/configured_catalog.json -``` - -### Locally running the connector docker image - - -#### Build -**Via [`airbyte-ci`](https://github.com/airbytehq/airbyte/blob/master/airbyte-ci/connectors/pipelines/README.md) (recommended):** -```bash -airbyte-ci connectors --name=destination-firebolt build -``` - -An image will be built with the tag `airbyte/destination-firebolt:dev`. - -**Via `docker build`:** -```bash -docker build -t airbyte/destination-firebolt:dev . -``` - -#### Run -Then run any of the connector commands as follows: -``` -docker run --rm airbyte/destination-firebolt:dev spec -docker run --rm -v $(pwd)/secrets:/secrets airbyte/destination-firebolt:dev check --config /secrets/config.json -# messages.jsonl is a file containing line-separated JSON representing AirbyteMessages -cat integration_tests/messages.jsonl | docker run --rm -v $(pwd)/secrets:/secrets -v $(pwd)/integration_tests:/integration_tests airbyte/destination-firebolt:dev write --config /secrets/config.json --catalog /integration_tests/configured_catalog.json -``` - -## Testing -You can run our full test suite locally using [`airbyte-ci`](https://github.com/airbytehq/airbyte/blob/master/airbyte-ci/connectors/pipelines/README.md): -```bash -airbyte-ci connectors --name=destination-firebolt test -``` - -### Customizing acceptance Tests -Customize `acceptance-test-config.yml` file to configure tests. See [Connector Acceptance Tests](https://docs.airbyte.com/connector-development/testing-connectors/connector-acceptance-tests-reference) for more information. -If your connector requires to create or destroy resources for use during acceptance tests create fixtures for it and place them inside integration_tests/acceptance.py. - -## Dependency Management -All of your dependencies should go in `setup.py`, NOT `requirements.txt`. The requirements file is only used to connect internal Airbyte dependencies in the monorepo for local development. -We split dependencies between two groups, dependencies that are: -* required for your connector to work need to go to `MAIN_REQUIREMENTS` list. -* required for the testing need to go to `TEST_REQUIREMENTS` list - -### Publishing a new version of the connector -You've checked out the repo, implemented a million dollar feature, and you're ready to share your changes with the world. Now what? -1. Make sure your changes are passing our test suite: `airbyte-ci connectors --name=destination-firebolt test` -2. Bump the connector version in `metadata.yaml`: increment the `dockerImageTag` value. Please follow [semantic versioning for connectors](https://docs.airbyte.com/contributing-to-airbyte/resources/pull-requests-handbook/#semantic-versioning-for-connectors). -3. Make sure the `metadata.yaml` content is up to date. -4. Make the connector documentation and its changelog is up to date (`docs/integrations/destinations/firebolt.md`). -5. Create a Pull Request: use [our PR naming conventions](https://docs.airbyte.com/contributing-to-airbyte/resources/pull-requests-handbook/#pull-request-title-convention). -6. Pat yourself on the back for being an awesome contributor. -7. Someone from Airbyte will take a look at your PR and iterate with you to merge it into master. - diff --git a/airbyte-integrations/connectors/destination-firebolt/bootstrap.md b/airbyte-integrations/connectors/destination-firebolt/bootstrap.md deleted file mode 100644 index dade5200d2d5..000000000000 --- a/airbyte-integrations/connectors/destination-firebolt/bootstrap.md +++ /dev/null @@ -1,22 +0,0 @@ -# Firebolt Source - -## Overview - -Firebolt is a cloud data warehouse purpose-built to provide sub-second analytics performance on massive, terabyte-scale data sets. - -Firebolt has two main concepts: Databases, which denote the storage of data and Engines, which describe the compute layer on top of a Database. - -Firebolt has three types of tables: External, Fact and Dimension. External tables, which represent a raw file structure in storage. Dimension tables, which are optimised for fetching and store data on each node in an Engine. Fact tables are similar to Dimension, but they shard the data across the nodes. The usual workload is to write source data into a set of files on S3, wrap them with an External table and write this data to a fetch-optimised Fact or Dimension table. - -## Connector - -Firebolt is a data warehouse so the most efficient way to write data into it would be in bulk. Firebolt connector offers two ways of writing data: SQL and S3. SQL transfers data in small batches and is most useful for prototyping. S3 buffers data on Amazon S3 storage and persists the data to Firebolt at the end of execution. The latter is the most efficient way of loading data, but it requires AWS S3 access. - -This connector uses [firebolt-sdk](https://pypi.org/project/firebolt-sdk/), which is a [PEP-249](https://peps.python.org/pep-0249/) DB API implementation. -`Connection` object is used to connect to a specified Engine, wich runs subsequent queries against the data stored in the Database using the `Cursor` object. -[Pyarrow](https://pypi.org/project/pyarrow/) is used to efficiently store and upload data to S3. - -## Notes - -* Integration testing requires the user to have a running engine. Spinning up an engine can take a while so this ensures a faster iteration on the connector. -* S3 is generally faster writing strategy and should be preferred. \ No newline at end of file diff --git a/airbyte-integrations/connectors/destination-firebolt/destination_firebolt/__init__.py b/airbyte-integrations/connectors/destination-firebolt/destination_firebolt/__init__.py deleted file mode 100644 index 90396b049287..000000000000 --- a/airbyte-integrations/connectors/destination-firebolt/destination_firebolt/__init__.py +++ /dev/null @@ -1,8 +0,0 @@ -# -# Copyright (c) 2021 Airbyte, Inc., all rights reserved. -# - - -from .destination import DestinationFirebolt - -__all__ = ["DestinationFirebolt"] diff --git a/airbyte-integrations/connectors/destination-firebolt/destination_firebolt/destination.py b/airbyte-integrations/connectors/destination-firebolt/destination_firebolt/destination.py deleted file mode 100644 index 5b169f094237..000000000000 --- a/airbyte-integrations/connectors/destination-firebolt/destination_firebolt/destination.py +++ /dev/null @@ -1,128 +0,0 @@ -# -# Copyright (c) 2023 Airbyte, Inc., all rights reserved. -# - -import json -from datetime import datetime -from logging import getLogger -from typing import Any, Dict, Iterable, Mapping, Optional -from uuid import uuid4 - -from airbyte_cdk import AirbyteLogger -from airbyte_cdk.destinations import Destination -from airbyte_cdk.models import AirbyteConnectionStatus, AirbyteMessage, ConfiguredAirbyteCatalog, DestinationSyncMode, Status, Type -from firebolt.client import DEFAULT_API_URL -from firebolt.client.auth import UsernamePassword -from firebolt.db import Connection, connect - -from .writer import create_firebolt_wirter - -logger = getLogger("airbyte") - - -def parse_config(config: json, logger: Optional[AirbyteLogger] = None) -> Dict[str, Any]: - """ - Convert dict of config values to firebolt.db.Connection arguments - :param config: json-compatible dict of settings - :param logger: AirbyteLogger instance to print logs. - :return: dictionary of firebolt.db.Connection-compatible kwargs - """ - connection_args = { - "database": config["database"], - "auth": UsernamePassword(config["username"], config["password"]), - "api_endpoint": config.get("host", DEFAULT_API_URL), - "account_name": config.get("account"), - } - # engine can be a name or a full URL of a cluster - engine = config.get("engine") - if engine: - if "." in engine: - connection_args["engine_url"] = engine - else: - connection_args["engine_name"] = engine - elif logger: - logger.info("Engine parameter was not provided. Connecting to the default engine.") - return connection_args - - -def establish_connection(config: json, logger: Optional[AirbyteLogger] = None) -> Connection: - """ - Creates a connection to Firebolt database using the parameters provided. - :param config: Json object containing db credentials. - :param logger: AirbyteLogger instance to print logs. - :return: PEP-249 compliant database Connection object. - """ - logger.debug("Connecting to Firebolt.") if logger else None - connection = connect(**parse_config(config, logger)) - logger.debug("Connection to Firebolt established.") if logger else None - return connection - - -class DestinationFirebolt(Destination): - def write( - self, config: Mapping[str, Any], configured_catalog: ConfiguredAirbyteCatalog, input_messages: Iterable[AirbyteMessage] - ) -> Iterable[AirbyteMessage]: - - """ - Reads the input stream of messages, config, and catalog to write data to the destination. - - This method returns an iterable (typically a generator of AirbyteMessages via yield) containing state messages received - in the input message stream. Outputting a state message means that every AirbyteRecordMessage which came before it has been - successfully persisted to the destination. This is used to ensure fault tolerance in the case that a sync fails before fully completing, - then the source is given the last state message output from this method as the starting point of the next sync. - - :param config: dict of JSON configuration matching the configuration declared in spec.json - :param configured_catalog: The Configured Catalog describing the schema of the data being received and how it should be persisted in the - destination - :param input_messages: The stream of input messages received from the source - :return: Iterable of AirbyteStateMessages wrapped in AirbyteMessage structs - """ - streams = {s.stream.name for s in configured_catalog.streams} - - with establish_connection(config) as connection: - writer = create_firebolt_wirter(connection, config, logger) - - for configured_stream in configured_catalog.streams: - if configured_stream.destination_sync_mode == DestinationSyncMode.overwrite: - writer.delete_table(configured_stream.stream.name) - logger.info(f"Stream {configured_stream.stream.name} is wiped.") - writer.create_raw_table(configured_stream.stream.name) - - for message in input_messages: - if message.type == Type.STATE: - yield message - elif message.type == Type.RECORD: - data = message.record.data - stream = message.record.stream - # Skip unselected streams - if stream not in streams: - logger.debug(f"Stream {stream} was not present in configured streams, skipping") - continue - writer.queue_write_data(stream, str(uuid4()), datetime.now(), json.dumps(data)) - - # Flush any leftover messages - writer.flush() - - def check(self, logger: AirbyteLogger, config: Mapping[str, Any]) -> AirbyteConnectionStatus: - """ - Tests if the input configuration can be used to successfully connect to the destination with the needed permissions - e.g: if a provided API token or password can be used to connect and write to the destination. - - :param logger: Logging object to display debug/info/error to the logs - (logs will not be accessible via airbyte UI if they are not passed to this logger) - :param config: Json object containing the configuration of this destination, content of this json is as specified in - the properties of the spec.json file - - :return: AirbyteConnectionStatus indicating a Success or Failure - """ - try: - with establish_connection(config, logger) as connection: - # We can only verify correctness of connection parameters on execution - with connection.cursor() as cursor: - cursor.execute("SELECT 1") - # Test access to the bucket, if S3 strategy is used - create_firebolt_wirter(connection, config, logger) - - return AirbyteConnectionStatus(status=Status.SUCCEEDED) - except Exception as e: - return AirbyteConnectionStatus(status=Status.FAILED, message=f"An exception occurred: {repr(e)}") diff --git a/airbyte-integrations/connectors/destination-firebolt/destination_firebolt/spec.json b/airbyte-integrations/connectors/destination-firebolt/destination_firebolt/spec.json deleted file mode 100644 index a0263800bf39..000000000000 --- a/airbyte-integrations/connectors/destination-firebolt/destination_firebolt/spec.json +++ /dev/null @@ -1,109 +0,0 @@ -{ - "documentationUrl": "https://docs.airbyte.com/integrations/destinations/firebolt", - "supported_destination_sync_modes": ["overwrite", "append"], - "supportsIncremental": true, - "supportsDBT": true, - "supportsNormalization": false, - "connectionSpecification": { - "$schema": "http://json-schema.org/draft-07/schema#", - "title": "Firebolt Spec", - "type": "object", - "required": ["username", "password", "database"], - "additionalProperties": false, - "properties": { - "username": { - "type": "string", - "title": "Username", - "description": "Firebolt email address you use to login.", - "examples": ["username@email.com"], - "order": 0 - }, - "password": { - "type": "string", - "title": "Password", - "description": "Firebolt password.", - "airbyte_secret": true, - "order": 1 - }, - "account": { - "type": "string", - "title": "Account", - "description": "Firebolt account to login." - }, - "host": { - "type": "string", - "title": "Host", - "description": "The host name of your Firebolt database.", - "examples": ["api.app.firebolt.io"] - }, - "database": { - "type": "string", - "title": "Database", - "description": "The database to connect to." - }, - "engine": { - "type": "string", - "title": "Engine", - "description": "Engine name or url to connect to." - }, - "loading_method": { - "type": "object", - "title": "Loading Method", - "description": "Loading method used to select the way data will be uploaded to Firebolt", - "oneOf": [ - { - "title": "SQL Inserts", - "additionalProperties": false, - "required": ["method"], - "properties": { - "method": { - "type": "string", - "const": "SQL" - } - } - }, - { - "title": "External Table via S3", - "additionalProperties": false, - "required": [ - "method", - "s3_bucket", - "s3_region", - "aws_key_id", - "aws_key_secret" - ], - "properties": { - "method": { - "type": "string", - "const": "S3" - }, - "s3_bucket": { - "type": "string", - "title": "S3 bucket name", - "description": "The name of the S3 bucket." - }, - "s3_region": { - "type": "string", - "title": "S3 region name", - "description": "Region name of the S3 bucket.", - "examples": ["us-east-1"] - }, - "aws_key_id": { - "type": "string", - "title": "AWS Key ID", - "airbyte_secret": true, - "description": "AWS access key granting read and write access to S3." - }, - "aws_key_secret": { - "type": "string", - "title": "AWS Key Secret", - "airbyte_secret": true, - "description": "Corresponding secret part of the AWS Key" - } - } - } - ] - } - } - } -} diff --git a/airbyte-integrations/connectors/destination-firebolt/destination_firebolt/writer.py b/airbyte-integrations/connectors/destination-firebolt/destination_firebolt/writer.py deleted file mode 100644 index 6935fef35f0b..000000000000 --- a/airbyte-integrations/connectors/destination-firebolt/destination_firebolt/writer.py +++ /dev/null @@ -1,235 +0,0 @@ -# -# Copyright (c) 2023 Airbyte, Inc., all rights reserved. -# - -import json -from collections import defaultdict -from datetime import datetime -from time import time -from uuid import uuid4 - -import pyarrow as pa -import pyarrow.parquet as pq -from airbyte_cdk import AirbyteLogger -from firebolt.db import Connection -from pyarrow import fs - - -class FireboltWriter: - """ - Base class for shared writer logic. - """ - - flush_interval = 1000 - - def __init__(self, connection: Connection) -> None: - """ - :param connection: Firebolt SDK connection class with established connection - to the databse. - """ - self.connection = connection - self._buffer = defaultdict(list) - self._values = 0 - - def delete_table(self, name: str) -> None: - """ - Delete the resulting table. - Primarily used in Overwrite strategy to clean up previous data. - - :param name: table name to delete. - """ - cursor = self.connection.cursor() - cursor.execute(f"DROP TABLE IF EXISTS _airbyte_raw_{name}") - - def create_raw_table(self, name: str): - """ - Create the resulting _airbyte_raw table. - - :param name: table name to create. - """ - query = f""" - CREATE FACT TABLE IF NOT EXISTS _airbyte_raw_{name} ( - _airbyte_ab_id TEXT, - _airbyte_emitted_at TIMESTAMP, - _airbyte_data TEXT - ) - PRIMARY INDEX _airbyte_ab_id - """ - cursor = self.connection.cursor() - cursor.execute(query) - - def queue_write_data(self, stream_name: str, id: str, time: datetime, record: str) -> None: - """ - Queue up data in a buffer in memory before writing to the database. - When flush_interval is reached data is persisted. - - :param stream_name: name of the stream for which the data corresponds. - :param id: unique identifier of this data row. - :param time: time of writing. - :param record: string representation of the json data payload. - """ - self._buffer[stream_name].append((id, time, record)) - self._values += 1 - if self._values == self.flush_interval: - self._flush() - - def _flush(self): - """ - Stub for the intermediate data flush that's triggered during the - buffering operation. - """ - raise NotImplementedError() - - def flush(self): - """ - Stub for the data flush at the end of writing operation. - """ - raise NotImplementedError() - - -class FireboltS3Writer(FireboltWriter): - """ - Data writer using the S3 strategy. Data is buffered in memory - before being flushed to S3 in .parquet format. At the end of - the operation data is written to Firebolt databse from S3, allowing - greater ingestion speed. - """ - - flush_interval = 100000 - - def __init__(self, connection: Connection, s3_bucket: str, access_key: str, secret_key: str, s3_region: str) -> None: - """ - :param connection: Firebolt SDK connection class with established connection - to the databse. - :param s3_bucket: Intermediate bucket to store the data files before writing them to Firebolt. - Has to be created and accessible. - :param access_key: AWS Access Key ID that has read/write/delete permissions on the files in the bucket. - :param secret_key: Corresponding AWS Secret Key. - :param s3_region: S3 region. Best to keep this the same as Firebolt database region. Default us-east-1. - """ - super().__init__(connection) - self.key_id = access_key - self.secret_key = secret_key - self.s3_bucket = s3_bucket - self._updated_tables = set() - self.unique_dir = f"{int(time())}_{uuid4()}" - self.fs = fs.S3FileSystem(access_key=access_key, secret_key=secret_key, region=s3_region) - - def _flush(self) -> None: - """ - Intermediate data flush that's triggered during the - buffering operation. Uploads data stored in memory to the S3. - """ - for table, data in self._buffer.items(): - key_list, ts_list, payload = zip(*data) - upload_data = [pa.array(key_list), pa.array(ts_list), pa.array(payload)] - pa_table = pa.table(upload_data, names=["_airbyte_ab_id", "_airbyte_emitted_at", "_airbyte_data"]) - pq.write_to_dataset(table=pa_table, root_path=f"{self.s3_bucket}/airbyte_output/{self.unique_dir}/{table}", filesystem=self.fs) - # Update tables - self._updated_tables.update(self._buffer.keys()) - self._buffer.clear() - self._values = 0 - - def flush(self) -> None: - """ - Flush any leftover data after ingestion and write from S3 to Firebolt. - Intermediate data on S3 and External Table will be deleted after write is complete. - """ - self._flush() - for table in self._updated_tables: - self.create_raw_table(table) - self.create_external_table(table) - self.ingest_data(table) - self.cleanup(table) - - def create_external_table(self, name: str) -> None: - """ - Create Firebolt External Table to interface with the files on S3. - - :param name: Stream name from which the table name is derived. - """ - query = f""" - CREATE EXTERNAL TABLE IF NOT EXISTS ex_airbyte_raw_{name} ( - _airbyte_ab_id TEXT, - _airbyte_emitted_at TIMESTAMP, - _airbyte_data TEXT - ) - URL = ? - CREDENTIALS = ( AWS_KEY_ID = ? AWS_SECRET_KEY = ? ) - OBJECT_PATTERN = '*.parquet' - TYPE = (PARQUET); - """ - cursor = self.connection.cursor() - cursor.execute(query, parameters=(f"s3://{self.s3_bucket}/airbyte_output/{self.unique_dir}/{name}", self.key_id, self.secret_key)) - - def ingest_data(self, name: str) -> None: - """ - Write data from External Table to the _airbyte_raw table effectively - persisting data in Firebolt. - - :param name: Stream name from which the table name is derived. - """ - query = f"INSERT INTO _airbyte_raw_{name} SELECT * FROM ex_airbyte_raw_{name}" - cursor = self.connection.cursor() - cursor.execute(query) - - def cleanup(self, name: str) -> None: - """ - Clean intermediary External tables and wipe the S3 folder. - - :param name: Stream name from which the table name is derived. - """ - cursor = self.connection.cursor() - cursor.execute(f"DROP TABLE IF EXISTS ex_airbyte_raw_{name}") - self.fs.delete_dir_contents(f"{self.s3_bucket}/airbyte_output/{self.unique_dir}/{name}") - - -class FireboltSQLWriter(FireboltWriter): - """ - Data writer using the SQL writing strategy. Data is buffered in memory - and flushed using INSERT INTO SQL statement. This is less effective strategy - better suited for testing and small data sets. - """ - - flush_interval = 1000 - - def __init__(self, connection: Connection) -> None: - """ - :param connection: Firebolt SDK connection class with established connection - to the databse. - """ - super().__init__(connection) - - def _flush(self) -> None: - """ - Intermediate data flush that's triggered during the - buffering operation. Writes data stored in memory via SQL commands. - """ - cursor = self.connection.cursor() - # id, written_at, data - for table, data in self._buffer.items(): - cursor.executemany(f"INSERT INTO _airbyte_raw_{table} VALUES (?, ?, ?)", parameters_seq=data) - self._buffer.clear() - self._values = 0 - - def flush(self) -> None: - """ - Final data flush after all data has been written to memory. - """ - self._flush() - - -def create_firebolt_wirter(connection: Connection, config: json, logger: AirbyteLogger) -> FireboltWriter: - if config["loading_method"]["method"] == "S3": - logger.info("Using the S3 writing strategy") - writer = FireboltS3Writer( - connection, - config["loading_method"]["s3_bucket"], - config["loading_method"]["aws_key_id"], - config["loading_method"]["aws_key_secret"], - config["loading_method"]["s3_region"], - ) - else: - logger.info("Using the SQL writing strategy") - writer = FireboltSQLWriter(connection) - return writer diff --git a/airbyte-integrations/connectors/destination-firebolt/integration_tests/configured_catalog.json b/airbyte-integrations/connectors/destination-firebolt/integration_tests/configured_catalog.json deleted file mode 100644 index 7715d5bb6ff0..000000000000 --- a/airbyte-integrations/connectors/destination-firebolt/integration_tests/configured_catalog.json +++ /dev/null @@ -1,38 +0,0 @@ -{ - "streams": [ - { - "stream": { - "name": "airbyte_acceptance_table", - "supported_sync_modes": ["full_refresh"], - "source_defined_cursor": false, - "json_schema": { - "type": "object", - "properties": { - "column1": { - "type": "string" - }, - "column2": { - "type": "number" - }, - "column3": { - "type": "string", - "format": "date-time", - "airbyte_type": "timestamp_without_timezone" - }, - "column4": { - "type": "number" - }, - "column5": { - "type": "array", - "items": { - "type": "integer" - } - } - } - } - }, - "sync_mode": "full_refresh", - "destination_sync_mode": "overwrite" - } - ] -} diff --git a/airbyte-integrations/connectors/destination-firebolt/integration_tests/integration_test.py b/airbyte-integrations/connectors/destination-firebolt/integration_tests/integration_test.py deleted file mode 100644 index 872db32c3821..000000000000 --- a/airbyte-integrations/connectors/destination-firebolt/integration_tests/integration_test.py +++ /dev/null @@ -1,147 +0,0 @@ -# -# Copyright (c) 2023 Airbyte, Inc., all rights reserved. -# - -import random -import string -from datetime import datetime -from json import dumps, load -from typing import Dict -from unittest.mock import MagicMock - -from airbyte_cdk.models import AirbyteMessage, AirbyteRecordMessage, Status, Type -from airbyte_cdk.models.airbyte_protocol import ( - AirbyteStream, - ConfiguredAirbyteCatalog, - ConfiguredAirbyteStream, - DestinationSyncMode, - SyncMode, -) -from destination_firebolt.destination import DestinationFirebolt, establish_connection -from firebolt.common.exception import FireboltError -from pytest import fixture, mark, raises - - -@fixture(scope="module") -def config() -> Dict[str, str]: - with open( - "secrets/config.json", - ) as f: - yield load(f) - - -@fixture(scope="module") -def test_table_name() -> str: - letters = string.ascii_lowercase - rnd_string = "".join(random.choice(letters) for i in range(10)) - return f"airbyte_integration_{rnd_string}" - - -@fixture -def cleanup(config: Dict[str, str], test_table_name: str): - yield - with establish_connection(config, MagicMock()) as connection: - with connection.cursor() as cursor: - cursor.execute(f"DROP TABLE IF EXISTS _airbyte_raw_{test_table_name}") - cursor.execute(f"DROP TABLE IF EXISTS ex_airbyte_raw_{test_table_name}") - - -@fixture -def table_schema() -> str: - schema = { - "type": "object", - "properties": { - "column1": {"type": ["null", "string"]}, - }, - } - return schema - - -@fixture -def configured_catalogue(test_table_name: str, table_schema: str) -> ConfiguredAirbyteCatalog: - append_stream = ConfiguredAirbyteStream( - stream=AirbyteStream(name=test_table_name, json_schema=table_schema, supported_sync_modes=[SyncMode.incremental]), - sync_mode=SyncMode.incremental, - destination_sync_mode=DestinationSyncMode.append, - ) - return ConfiguredAirbyteCatalog(streams=[append_stream]) - - -@fixture(scope="module") -def invalid_config() -> Dict[str, str]: - with open( - "integration_tests/invalid_config.json", - ) as f: - yield load(f) - - -@fixture(scope="module") -def invalid_config_s3() -> Dict[str, str]: - with open( - "integration_tests/invalid_config_s3.json", - ) as f: - yield load(f) - - -@fixture -def airbyte_message1(test_table_name: str): - return AirbyteMessage( - type=Type.RECORD, - record=AirbyteRecordMessage( - stream=test_table_name, - data={"key1": "value1", "key2": 2}, - emitted_at=int(datetime.now().timestamp()) * 1000, - ), - ) - - -@fixture -def airbyte_message2(test_table_name: str): - return AirbyteMessage( - type=Type.RECORD, - record=AirbyteRecordMessage( - stream=test_table_name, - data={"key1": "value2", "key2": 3}, - emitted_at=int(datetime.now().timestamp()) * 1000, - ), - ) - - -@mark.parametrize("config", ["invalid_config", "invalid_config_s3"]) -def test_check_fails(config, request): - destination = DestinationFirebolt() - status = destination.check(logger=MagicMock(), config=config) - assert status.status == Status.FAILED - - -def test_check_succeeds(config, request): - destination = DestinationFirebolt() - status = destination.check(logger=MagicMock(), config=config) - assert status.status == Status.SUCCEEDED - - -def test_write( - config: Dict[str, str], - configured_catalogue: ConfiguredAirbyteCatalog, - airbyte_message1: AirbyteMessage, - airbyte_message2: AirbyteMessage, - test_table_name: str, - cleanup, - request, -): - destination = DestinationFirebolt() - generator = destination.write(config, configured_catalogue, [airbyte_message1, airbyte_message2]) - result = list(generator) - assert len(result) == 0 - with establish_connection(config, MagicMock()) as connection: - with connection.cursor() as cursor: - cursor.execute( - f"SELECT _airbyte_ab_id, _airbyte_emitted_at, _airbyte_data FROM _airbyte_raw_{test_table_name} ORDER BY _airbyte_data" - ) - result = cursor.fetchall() - # Make sure no temporary tables present - with raises(FireboltError): - cursor.execute(f"SELECT TOP 0 * FROM ex_airbyte_raw_{test_table_name}") - assert len(result) == 2 - assert result[0][2] == dumps(airbyte_message1.record.data) - assert result[1][2] == dumps(airbyte_message2.record.data) diff --git a/airbyte-integrations/connectors/destination-firebolt/integration_tests/invalid_config.json b/airbyte-integrations/connectors/destination-firebolt/integration_tests/invalid_config.json deleted file mode 100644 index f8251d5271fb..000000000000 --- a/airbyte-integrations/connectors/destination-firebolt/integration_tests/invalid_config.json +++ /dev/null @@ -1,9 +0,0 @@ -{ - "username": "xxx", - "password": "xxx", - "database": "non_existing_database_name", - "engine": "database_name_Analytics", - "loading_method": { - "method": "SQL" - } -} diff --git a/airbyte-integrations/connectors/destination-firebolt/integration_tests/invalid_config_s3.json b/airbyte-integrations/connectors/destination-firebolt/integration_tests/invalid_config_s3.json deleted file mode 100644 index 2ab29e87dfe5..000000000000 --- a/airbyte-integrations/connectors/destination-firebolt/integration_tests/invalid_config_s3.json +++ /dev/null @@ -1,13 +0,0 @@ -{ - "username": "xxx", - "password": "xxx", - "database": "non_existing_database_name", - "engine": "database_name_Analytics", - "loading_method": { - "method": "S3", - "s3_bucket": "sample_bucket", - "s3_region": "us-east-1", - "aws_key_id": "yyy", - "aws_key_secret": "yyy" - } -} diff --git a/airbyte-integrations/connectors/destination-firebolt/integration_tests/messages.jsonl b/airbyte-integrations/connectors/destination-firebolt/integration_tests/messages.jsonl deleted file mode 100644 index ab871c15bb02..000000000000 --- a/airbyte-integrations/connectors/destination-firebolt/integration_tests/messages.jsonl +++ /dev/null @@ -1,2 +0,0 @@ -{"type": "RECORD", "record": {"stream": "airbyte_acceptance_table", "data": {"column1": "my_value", "column2": 221, "column3": "2021-01-01T20:10:22", "column4": 1.214, "column5": [1,2,3]}, "emitted_at": 1626172757000}} -{"type": "RECORD", "record": {"stream": "airbyte_acceptance_table", "data": {"column1": "my_value2", "column2": 222, "column3": "2021-01-02T22:10:22", "column5": [1,2,null]}, "emitted_at": 1626172757000}} diff --git a/airbyte-integrations/connectors/destination-firebolt/main.py b/airbyte-integrations/connectors/destination-firebolt/main.py deleted file mode 100644 index 1b173be0c2b3..000000000000 --- a/airbyte-integrations/connectors/destination-firebolt/main.py +++ /dev/null @@ -1,11 +0,0 @@ -# -# Copyright (c) 2023 Airbyte, Inc., all rights reserved. -# - - -import sys - -from destination_firebolt import DestinationFirebolt - -if __name__ == "__main__": - DestinationFirebolt().run(sys.argv[1:]) diff --git a/airbyte-integrations/connectors/destination-firebolt/requirements.txt b/airbyte-integrations/connectors/destination-firebolt/requirements.txt deleted file mode 100644 index d6e1198b1ab1..000000000000 --- a/airbyte-integrations/connectors/destination-firebolt/requirements.txt +++ /dev/null @@ -1 +0,0 @@ --e . diff --git a/airbyte-integrations/connectors/destination-firebolt/setup.py b/airbyte-integrations/connectors/destination-firebolt/setup.py deleted file mode 100644 index a2597d9160af..000000000000 --- a/airbyte-integrations/connectors/destination-firebolt/setup.py +++ /dev/null @@ -1,23 +0,0 @@ -# -# Copyright (c) 2023 Airbyte, Inc., all rights reserved. -# - - -from setuptools import find_packages, setup - -MAIN_REQUIREMENTS = ["airbyte-cdk", "firebolt-sdk>=0.8.0", "pyarrow"] - -TEST_REQUIREMENTS = ["pytest~=6.1"] - -setup( - name="destination_firebolt", - description="Destination implementation for Firebolt.", - author="Airbyte", - author_email="contact@airbyte.io", - packages=find_packages(), - install_requires=MAIN_REQUIREMENTS, - package_data={"": ["*.json"]}, - extras_require={ - "tests": TEST_REQUIREMENTS, - }, -) diff --git a/airbyte-integrations/connectors/destination-firebolt/unit_tests/test_firebolt_destination.py b/airbyte-integrations/connectors/destination-firebolt/unit_tests/test_firebolt_destination.py deleted file mode 100644 index 8d70a1060b5a..000000000000 --- a/airbyte-integrations/connectors/destination-firebolt/unit_tests/test_firebolt_destination.py +++ /dev/null @@ -1,241 +0,0 @@ -# -# Copyright (c) 2023 Airbyte, Inc., all rights reserved. -# - -from datetime import datetime -from typing import Any, Dict -from unittest.mock import MagicMock, call, patch - -from airbyte_cdk.models import ( - AirbyteMessage, - AirbyteRecordMessage, - AirbyteStream, - ConfiguredAirbyteCatalog, - ConfiguredAirbyteStream, - DestinationSyncMode, - Status, - SyncMode, - Type, -) -from destination_firebolt.destination import DestinationFirebolt, establish_connection, parse_config -from pytest import fixture - - -@fixture(params=["my_engine", "my_engine.api.firebolt.io"]) -def config(request: Any) -> Dict[str, str]: - args = { - "database": "my_database", - "username": "my_username", - "password": "my_password", - "engine": request.param, - "loading_method": { - "method": "SQL", - }, - } - return args - - -@fixture -def config_external_table() -> Dict[str, str]: - args = { - "database": "my_database", - "username": "my_username", - "password": "my_password", - "engine": "my_engine", - "loading_method": { - "method": "S3", - "s3_bucket": "my_bucket", - "s3_region": "us-east-1", - "aws_key_id": "aws_key", - "aws_key_secret": "aws_secret", - }, - } - return args - - -@fixture -def config_no_engine() -> Dict[str, str]: - args = { - "database": "my_database", - "username": "my_username", - "password": "my_password", - } - return args - - -@fixture -def logger() -> MagicMock: - return MagicMock() - - -@fixture -def configured_stream1() -> ConfiguredAirbyteStream: - return ConfiguredAirbyteStream( - stream=AirbyteStream( - name="table1", - json_schema={ - "type": "object", - "properties": {"col1": {"type": "string"}, "col2": {"type": "integer"}}, - }, - supported_sync_modes=[SyncMode.incremental], - ), - sync_mode=SyncMode.incremental, - destination_sync_mode=DestinationSyncMode.append, - ) - - -@fixture -def configured_stream2() -> ConfiguredAirbyteStream: - return ConfiguredAirbyteStream( - stream=AirbyteStream( - name="table2", - json_schema={ - "type": "object", - "properties": {"col1": {"type": "string"}, "col2": {"type": "integer"}}, - }, - supported_sync_modes=[SyncMode.incremental], - ), - sync_mode=SyncMode.incremental, - destination_sync_mode=DestinationSyncMode.append, - ) - - -@fixture -def airbyte_message1() -> AirbyteMessage: - return AirbyteMessage( - type=Type.RECORD, - record=AirbyteRecordMessage( - stream="table1", - data={"key1": "value1", "key2": 2}, - emitted_at=int(datetime.now().timestamp()) * 1000, - ), - ) - - -@fixture -def airbyte_message2() -> AirbyteMessage: - return AirbyteMessage( - type=Type.RECORD, - record=AirbyteRecordMessage( - stream="table2", - data={"key1": "value2", "key2": 3}, - emitted_at=int(datetime.now().timestamp()) * 1000, - ), - ) - - -@fixture -def airbyte_state_message() -> AirbyteMessage: - return AirbyteMessage(type=Type.STATE) - - -def test_parse_config(config: Dict[str, str]): - config["engine"] = "override_engine" - result = parse_config(config) - assert result["database"] == "my_database" - assert result["engine_name"] == "override_engine" - assert result["auth"].username == "my_username" - assert result["auth"].password == "my_password" - config["engine"] = "override_engine.api.firebolt.io" - result = parse_config(config) - assert result["engine_url"] == "override_engine.api.firebolt.io" - - -@patch("destination_firebolt.destination.connect", MagicMock()) -def test_connection(config: Dict[str, str], config_no_engine: Dict[str, str], logger: MagicMock) -> None: - establish_connection(config, logger) - logger.reset_mock() - establish_connection(config_no_engine, logger) - assert any(["default engine" in msg.args[0] for msg in logger.info.mock_calls]), "No message on using default engine" - # Check no log object - establish_connection(config) - - -@patch("destination_firebolt.writer.FireboltS3Writer") -@patch("destination_firebolt.destination.connect") -def test_check( - mock_connection: MagicMock, mock_writer: MagicMock, config: Dict[str, str], config_external_table: Dict[str, str], logger: MagicMock -): - destination = DestinationFirebolt() - status = destination.check(logger, config) - assert status.status == Status.SUCCEEDED - mock_writer.assert_not_called() - status = destination.check(logger, config_external_table) - assert status.status == Status.SUCCEEDED - mock_writer.assert_called_once() - mock_connection().__enter__().cursor().__enter__().execute.side_effect = Exception("my exception") - status = destination.check(logger, config) - assert status.status == Status.FAILED - - -@patch("destination_firebolt.writer.FireboltSQLWriter") -@patch("destination_firebolt.destination.establish_connection") -def test_sql_write_append( - mock_connection: MagicMock, - mock_writer: MagicMock, - config: Dict[str, str], - configured_stream1: ConfiguredAirbyteStream, - configured_stream2: ConfiguredAirbyteStream, - airbyte_message1: AirbyteMessage, - airbyte_message2: AirbyteMessage, - airbyte_state_message: AirbyteMessage, -) -> None: - catalog = ConfiguredAirbyteCatalog(streams=[configured_stream1, configured_stream2]) - - destination = DestinationFirebolt() - result = destination.write(config, catalog, [airbyte_message1, airbyte_state_message, airbyte_message2]) - - assert list(result) == [airbyte_state_message] - mock_writer.return_value.delete_table.assert_not_called() - mock_writer.return_value.create_raw_table.mock_calls = [call(mock_connection, "table1"), call(mock_connection, "table2")] - assert len(mock_writer.return_value.queue_write_data.mock_calls) == 2 - mock_writer.return_value.flush.assert_called_once() - - -@patch("destination_firebolt.writer.FireboltS3Writer") -@patch("destination_firebolt.writer.FireboltSQLWriter") -@patch("destination_firebolt.destination.establish_connection") -def test_sql_write_overwrite( - mock_connection: MagicMock, - mock_writer: MagicMock, - mock_s3_writer: MagicMock, - config: Dict[str, str], - configured_stream1: ConfiguredAirbyteStream, - configured_stream2: ConfiguredAirbyteStream, - airbyte_message1: AirbyteMessage, - airbyte_message2: AirbyteMessage, - airbyte_state_message: AirbyteMessage, -): - # Overwrite triggers a delete - configured_stream1.destination_sync_mode = DestinationSyncMode.overwrite - catalog = ConfiguredAirbyteCatalog(streams=[configured_stream1, configured_stream2]) - - destination = DestinationFirebolt() - result = destination.write(config, catalog, [airbyte_message1, airbyte_state_message, airbyte_message2]) - - mock_s3_writer.assert_not_called() - assert list(result) == [airbyte_state_message] - mock_writer.return_value.delete_table.assert_called_once_with("table1") - mock_writer.return_value.create_raw_table.mock_calls = [call(mock_connection, "table1"), call(mock_connection, "table2")] - - -@patch("destination_firebolt.writer.FireboltS3Writer") -@patch("destination_firebolt.writer.FireboltSQLWriter") -@patch("destination_firebolt.destination.establish_connection", MagicMock()) -def test_s3_write( - mock_sql_writer: MagicMock, - mock_s3_writer: MagicMock, - config_external_table: Dict[str, str], - configured_stream1: ConfiguredAirbyteStream, - configured_stream2: ConfiguredAirbyteStream, - airbyte_message1: AirbyteMessage, - airbyte_message2: AirbyteMessage, - airbyte_state_message: AirbyteMessage, -): - catalog = ConfiguredAirbyteCatalog(streams=[configured_stream1, configured_stream2]) - - destination = DestinationFirebolt() - result = destination.write(config_external_table, catalog, [airbyte_message1, airbyte_state_message, airbyte_message2]) - assert list(result) == [airbyte_state_message] - mock_sql_writer.assert_not_called() - mock_s3_writer.assert_called_once() diff --git a/airbyte-integrations/connectors/destination-firebolt/unit_tests/test_writer.py b/airbyte-integrations/connectors/destination-firebolt/unit_tests/test_writer.py deleted file mode 100644 index 6ca5b69c7f24..000000000000 --- a/airbyte-integrations/connectors/destination-firebolt/unit_tests/test_writer.py +++ /dev/null @@ -1,156 +0,0 @@ -# -# Copyright (c) 2023 Airbyte, Inc., all rights reserved. -# - -from typing import Any, Union -from unittest.mock import ANY, MagicMock, call, patch - -from destination_firebolt.writer import FireboltS3Writer, FireboltSQLWriter -from pytest import fixture, mark - - -@fixture -def connection() -> MagicMock: - return MagicMock() - - -@fixture -def sql_writer(connection: MagicMock) -> FireboltSQLWriter: - return FireboltSQLWriter(connection) - - -@fixture -@patch("destination_firebolt.writer.time", MagicMock(return_value=111)) -@patch("destination_firebolt.writer.uuid4", MagicMock(return_value="dummy-uuid")) -def s3_writer(connection: MagicMock) -> FireboltS3Writer: - # Make sure S3FileSystem mock is reset each time - with patch("destination_firebolt.writer.fs.S3FileSystem", MagicMock()): - return FireboltS3Writer(connection, "dummy_bucket", "access_key", "secret_key", "us-east-1") - - -def test_sql_default(sql_writer: FireboltSQLWriter) -> None: - assert len(sql_writer._buffer) == 0 - assert sql_writer.flush_interval == 1000 - - -@mark.parametrize("writer", ["sql_writer", "s3_writer"]) -def test_sql_create(connection: MagicMock, writer: Union[FireboltSQLWriter, FireboltS3Writer], request: Any) -> None: - writer = request.getfixturevalue(writer) - expected_query = """ - CREATE FACT TABLE IF NOT EXISTS _airbyte_raw_dummy ( - _airbyte_ab_id TEXT, - _airbyte_emitted_at TIMESTAMP, - _airbyte_data TEXT - ) - PRIMARY INDEX _airbyte_ab_id - """ - writer.create_raw_table("dummy") - connection.cursor.return_value.execute.assert_called_once_with(expected_query) - - -def test_data_buffering(sql_writer: FireboltSQLWriter) -> None: - sql_writer.queue_write_data("dummy", "id1", 20200101, '{"key": "value"}') - sql_writer._buffer["dummy"][0] == ("id1", 20200101, '{"key": "value"}') - assert len(sql_writer._buffer["dummy"]) == 1 - assert len(sql_writer._buffer.keys()) == 1 - sql_writer.queue_write_data("dummy", "id2", 20200102, '{"key2": "value2"}') - sql_writer._buffer["dummy"][0] == ("id2", 20200102, '{"key2": "value2"}') - assert len(sql_writer._buffer["dummy"]) == 2 - assert len(sql_writer._buffer.keys()) == 1 - sql_writer.queue_write_data("dummy2", "id3", 20200103, '{"key3": "value3"}') - sql_writer._buffer["dummy"][0] == ("id3", 20200103, '{"key3": "value3"}') - assert len(sql_writer._buffer["dummy"]) == 2 - assert len(sql_writer._buffer["dummy2"]) == 1 - assert len(sql_writer._buffer.keys()) == 2 - - -def test_data_auto_flush_one_table(connection: MagicMock, sql_writer: FireboltSQLWriter) -> None: - sql_writer.flush_interval = 2 - sql_writer.queue_write_data("dummy", "id1", 20200101, '{"key": "value"}') - connection.cursor.return_value.executemany.assert_not_called() - assert sql_writer._values == 1 - sql_writer.queue_write_data("dummy", "id1", 20200101, '{"key": "value"}') - connection.cursor.return_value.executemany.assert_called_once() - assert len(sql_writer._buffer.keys()) == 0 - assert sql_writer._values == 0 - sql_writer.queue_write_data("dummy", "id1", 20200101, '{"key": "value"}') - assert len(sql_writer._buffer.keys()) == 1 - - -def test_data_auto_flush_multi_tables(connection: MagicMock, sql_writer: FireboltSQLWriter) -> None: - sql_writer.flush_interval = 2 - sql_writer.queue_write_data("dummy", "id1", 20200101, '{"key": "value"}') - connection.cursor.return_value.executemany.assert_not_called() - assert sql_writer._values == 1 - sql_writer.queue_write_data("dummy2", "id1", 20200101, '{"key": "value"}') - assert len(connection.cursor.return_value.executemany.mock_calls) == 2 - assert len(sql_writer._buffer.keys()) == 0 - assert sql_writer._values == 0 - - -def test_s3_default(s3_writer: FireboltS3Writer) -> None: - assert s3_writer.flush_interval == 100000 - assert s3_writer._values == 0 - assert len(s3_writer._buffer.keys()) == 0 - - -def test_s3_delete_tables(connection: MagicMock, s3_writer: FireboltS3Writer) -> None: - expected_sql = "DROP TABLE IF EXISTS _airbyte_raw_dummy" - s3_writer.delete_table("dummy") - connection.cursor.return_value.execute.assert_called_once_with(expected_sql) - - -@patch("pyarrow.parquet.write_to_dataset") -def test_s3_data_auto_flush_one_table(mock_write: MagicMock, s3_writer: FireboltS3Writer) -> None: - s3_writer.flush_interval = 2 - s3_writer.queue_write_data("dummy", "id1", 20200101, '{"key": "value"}') - mock_write.assert_not_called() - assert s3_writer._values == 1 - s3_writer.queue_write_data("dummy", "id1", 20200101, '{"key": "value"}') - mock_write.assert_called_once_with(table=ANY, root_path="dummy_bucket/airbyte_output/111_dummy-uuid/dummy", filesystem=s3_writer.fs) - assert len(s3_writer._buffer.keys()) == 0 - assert s3_writer._values == 0 - assert s3_writer._updated_tables == set(["dummy"]) - mock_write.reset_mock() - s3_writer.queue_write_data("dummy", "id1", 20200101, '{"key": "value"}') - mock_write.assert_not_called() - assert len(s3_writer._buffer.keys()) == 1 - assert s3_writer._updated_tables == set(["dummy"]) - - -@patch("pyarrow.parquet.write_to_dataset") -def test_s3_data_auto_flush_multi_tables(mock_write: MagicMock, s3_writer: FireboltS3Writer) -> None: - s3_writer.flush_interval = 2 - s3_writer.queue_write_data("dummy", "id1", 20200101, '{"key": "value"}') - mock_write.assert_not_called() - assert s3_writer._values == 1 - s3_writer.queue_write_data("dummy2", "id1", 20200101, '{"key": "value"}') - assert mock_write.mock_calls == [ - call(table=ANY, root_path="dummy_bucket/airbyte_output/111_dummy-uuid/dummy", filesystem=s3_writer.fs), - call(table=ANY, root_path="dummy_bucket/airbyte_output/111_dummy-uuid/dummy2", filesystem=s3_writer.fs), - ] - assert len(s3_writer._buffer.keys()) == 0 - assert s3_writer._values == 0 - assert s3_writer._updated_tables == set(["dummy", "dummy2"]) - - -def test_s3_final_flush(connection: MagicMock, s3_writer: FireboltS3Writer) -> None: - s3_writer._updated_tables = set(["dummy", "dummy2"]) - s3_writer.flush() - assert len(connection.cursor.return_value.execute.mock_calls) == 8 - expected_url1 = "s3://dummy_bucket/airbyte_output/111_dummy-uuid/dummy" - expected_url2 = "s3://dummy_bucket/airbyte_output/111_dummy-uuid/dummy2" - connection.cursor.return_value.execute.assert_any_call(ANY, parameters=(expected_url1, "access_key", "secret_key")) - connection.cursor.return_value.execute.assert_any_call(ANY, parameters=(expected_url2, "access_key", "secret_key")) - expected_query1 = "INSERT INTO _airbyte_raw_dummy SELECT * FROM ex_airbyte_raw_dummy" - expected_query2 = "INSERT INTO _airbyte_raw_dummy2 SELECT * FROM ex_airbyte_raw_dummy2" - connection.cursor.return_value.execute.assert_any_call(expected_query1) - connection.cursor.return_value.execute.assert_any_call(expected_query2) - - -def test_s3_cleanup(connection: MagicMock, s3_writer: FireboltS3Writer) -> None: - expected_sql = "DROP TABLE IF EXISTS ex_airbyte_raw_my_table" - bucket_path = "dummy_bucket/airbyte_output/111_dummy-uuid/my_table" - s3_writer.cleanup("my_table") - connection.cursor.return_value.execute.assert_called_once_with(expected_sql) - s3_writer.fs.delete_dir_contents.assert_called_once_with(bucket_path) diff --git a/airbyte-integrations/connectors/destination-keen/build.gradle b/airbyte-integrations/connectors/destination-keen/build.gradle deleted file mode 100644 index 777118dbb370..000000000000 --- a/airbyte-integrations/connectors/destination-keen/build.gradle +++ /dev/null @@ -1,33 +0,0 @@ -plugins { - id 'application' - id 'airbyte-java-connector' -} - -airbyteJavaConnector { - cdkVersionRequired = '0.2.0' - features = ['db-destinations'] - useLocalCdk = false -} - -//remove once upgrading the CDK version to 0.4.x or later -java { - compileJava { - options.compilerArgs.remove("-Werror") - } -} - -airbyteJavaConnector.addCdkDependencies() - -application { - mainClass = 'io.airbyte.integrations.destination.keen.KeenDestination' - applicationDefaultJvmArgs = ['-XX:+ExitOnOutOfMemoryError', '-XX:MaxRAMPercentage=75.0'] -} - -dependencies { - - implementation 'org.apache.kafka:kafka-clients:2.8.0' - implementation 'com.joestelmach:natty:0.11' - - // TODO: remove this dependency - implementation libs.google.cloud.storage -} diff --git a/airbyte-integrations/connectors/destination-keen/src/main/java/io/airbyte/integrations/destination/keen/KeenCharactersStripper.java b/airbyte-integrations/connectors/destination-keen/src/main/java/io/airbyte/integrations/destination/keen/KeenCharactersStripper.java deleted file mode 100644 index 6886629c491b..000000000000 --- a/airbyte-integrations/connectors/destination-keen/src/main/java/io/airbyte/integrations/destination/keen/KeenCharactersStripper.java +++ /dev/null @@ -1,18 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.integrations.destination.keen; - -import org.apache.commons.lang3.StringUtils; - -public class KeenCharactersStripper { - - // Keen collection names can't contain some special characters like non ascii accented characters - // while Kafka Topic names can't contain some other set of special characters, with except for -._ - // and whitespace characters - public static String stripSpecialCharactersFromStreamName(final String streamName) { - return StringUtils.stripAccents(streamName).replaceAll("[^A-Za-z0-9 -._]", ""); - } - -} diff --git a/airbyte-integrations/connectors/destination-keen/src/main/java/io/airbyte/integrations/destination/keen/KeenDestination.java b/airbyte-integrations/connectors/destination-keen/src/main/java/io/airbyte/integrations/destination/keen/KeenDestination.java deleted file mode 100644 index 6e9f94df646e..000000000000 --- a/airbyte-integrations/connectors/destination-keen/src/main/java/io/airbyte/integrations/destination/keen/KeenDestination.java +++ /dev/null @@ -1,93 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.integrations.destination.keen; - -import static org.apache.kafka.clients.CommonClientConfigs.SECURITY_PROTOCOL_CONFIG; -import static org.apache.kafka.clients.producer.ProducerConfig.ACKS_CONFIG; -import static org.apache.kafka.clients.producer.ProducerConfig.BOOTSTRAP_SERVERS_CONFIG; -import static org.apache.kafka.clients.producer.ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG; -import static org.apache.kafka.clients.producer.ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG; -import static org.apache.kafka.common.config.SaslConfigs.SASL_JAAS_CONFIG; -import static org.apache.kafka.common.config.SaslConfigs.SASL_MECHANISM; -import static org.apache.kafka.common.security.auth.SecurityProtocol.SASL_SSL; -import static org.apache.kafka.common.security.plain.internals.PlainSaslServer.PLAIN_MECHANISM; - -import com.fasterxml.jackson.databind.JsonNode; -import io.airbyte.cdk.integrations.BaseConnector; -import io.airbyte.cdk.integrations.base.AirbyteMessageConsumer; -import io.airbyte.cdk.integrations.base.Destination; -import io.airbyte.cdk.integrations.base.IntegrationRunner; -import io.airbyte.protocol.models.v0.AirbyteConnectionStatus; -import io.airbyte.protocol.models.v0.AirbyteConnectionStatus.Status; -import io.airbyte.protocol.models.v0.AirbyteMessage; -import io.airbyte.protocol.models.v0.ConfiguredAirbyteCatalog; -import java.util.Properties; -import java.util.function.Consumer; -import org.apache.kafka.clients.producer.KafkaProducer; -import org.apache.kafka.common.serialization.StringSerializer; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -public class KeenDestination extends BaseConnector implements Destination { - - private static final Logger LOGGER = LoggerFactory.getLogger(KeenDestination.class); - private static final String KAFKA_BROKER = "b1.kafka-in.keen.io:9092,b2.kafka-in.keen.io:9092,b3.kafka-in.keen.io:9092"; - - static final String KEEN_BASE_API_PATH = "https://api.keen.io/3.0"; - static final String CONFIG_PROJECT_ID = "project_id"; - static final String CONFIG_API_KEY = "api_key"; - static final String INFER_TIMESTAMP = "infer_timestamp"; - - @Override - public AirbyteConnectionStatus check(final JsonNode config) { - try { - final String projectId = config.get(CONFIG_PROJECT_ID).textValue(); - final String apiKey = config.get(CONFIG_API_KEY).textValue(); - final KafkaProducer producer = KafkaProducerFactory.create(projectId, apiKey); - - // throws an AuthenticationException if authentication fails - producer.partitionsFor("ANYTHING"); - - return new AirbyteConnectionStatus().withStatus(Status.SUCCEEDED); - } catch (final Exception e) { - return new AirbyteConnectionStatus().withStatus(Status.FAILED); - } - } - - @Override - public AirbyteMessageConsumer getConsumer(final JsonNode config, - final ConfiguredAirbyteCatalog catalog, - final Consumer outputRecordCollector) - throws Exception { - return new KeenRecordsConsumer(config, catalog, outputRecordCollector); - } - - public static void main(final String[] args) throws Exception { - final Destination destination = new KeenDestination(); - LOGGER.info("starting destination: {}", KeenDestination.class); - new IntegrationRunner(destination).run(args); - LOGGER.info("completed destination: {}", KeenDestination.class); - } - - public static class KafkaProducerFactory { - - public static KafkaProducer create(final String projectId, final String apiKey) { - final String jaasConfig = String.format("org.apache.kafka.common.security.plain.PlainLoginModule " + - "required username=\"%s\" password=\"%s\";", projectId, apiKey); - - final Properties props = new Properties(); - props.put(BOOTSTRAP_SERVERS_CONFIG, KAFKA_BROKER); - props.put(SECURITY_PROTOCOL_CONFIG, SASL_SSL.name()); - props.put(SASL_MECHANISM, PLAIN_MECHANISM); - props.put(SASL_JAAS_CONFIG, jaasConfig); - props.put(ACKS_CONFIG, "all"); - props.put(KEY_SERIALIZER_CLASS_CONFIG, StringSerializer.class.getName()); - props.put(VALUE_SERIALIZER_CLASS_CONFIG, StringSerializer.class.getName()); - return new KafkaProducer<>(props); - } - - } - -} diff --git a/airbyte-integrations/connectors/destination-keen/src/main/java/io/airbyte/integrations/destination/keen/KeenHttpClient.java b/airbyte-integrations/connectors/destination-keen/src/main/java/io/airbyte/integrations/destination/keen/KeenHttpClient.java deleted file mode 100644 index f94f663f99af..000000000000 --- a/airbyte-integrations/connectors/destination-keen/src/main/java/io/airbyte/integrations/destination/keen/KeenHttpClient.java +++ /dev/null @@ -1,85 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.integrations.destination.keen; - -import static io.airbyte.integrations.destination.keen.KeenDestination.KEEN_BASE_API_PATH; - -import com.fasterxml.jackson.databind.ObjectMapper; -import com.fasterxml.jackson.databind.node.ArrayNode; -import java.io.IOException; -import java.net.URI; -import java.net.http.HttpClient; -import java.net.http.HttpRequest; -import java.net.http.HttpResponse; -import java.net.http.HttpResponse.BodyHandlers; -import java.time.Duration; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -public class KeenHttpClient { - - private static final Logger LOGGER = LoggerFactory.getLogger(KeenHttpClient.class); - private static final String keenBaseApiPath = "https://api.keen.io/3.0"; - private static final int MINUTE_MILLIS = 1000 * 60; - final HttpClient httpClient = HttpClient.newHttpClient(); - final ObjectMapper objectMapper = new ObjectMapper(); - - public void eraseStream(final String streamToDelete, final String projectId, final String apiKey) - throws IOException, InterruptedException { - eraseStream(streamToDelete, projectId, apiKey, false); - } - - public void eraseStream(final String streamToDelete, final String projectId, final String apiKey, final boolean retried) - throws IOException, InterruptedException { - - final URI deleteUri = URI.create(String.format( - KEEN_BASE_API_PATH + "/projects/%s/events/%s", - projectId, streamToDelete)); - - final HttpRequest request = HttpRequest.newBuilder() - .uri(deleteUri) - .timeout(Duration.ofSeconds(30)) - .header("Authorization", apiKey) - .header("Content-Type", "application/json") - .DELETE() - .build(); - - final HttpResponse response = httpClient.send(request, BodyHandlers.ofString()); - - if (response.statusCode() != 204) { - if (response.statusCode() == 429 && !retried) { - LOGGER.info("Deletes limit exceeded. Sleeping 60 seconds."); - Thread.sleep(MINUTE_MILLIS); - eraseStream(streamToDelete, projectId, apiKey, true); - } else { - throw new IllegalStateException(String.format("Could not erase data from stream designed for overriding: " - + "%s. Error message: %s", streamToDelete, response.body())); - } - } - } - - public ArrayNode extract(final String streamName, final String projectId, final String apiKey) - throws IOException, InterruptedException { - final URI extractionUri = URI.create(String.format( - keenBaseApiPath + "/projects/%s/queries/extraction" + - "?api_key=%s&timeframe=this_7_years&event_collection=%s", - projectId, apiKey, streamName)); - - final HttpRequest request = HttpRequest.newBuilder() - .uri(extractionUri) - .timeout(Duration.ofSeconds(30)) - .header("Content-Type", "application/json") - .build(); - - final HttpResponse response = httpClient.send(request, HttpResponse.BodyHandlers.ofString()); - - if (response.statusCode() != 200) { - throw new IllegalStateException("Server did not return successful response: " + response.body()); - } - - return (ArrayNode) objectMapper.readTree(response.body()).get("result"); - } - -} diff --git a/airbyte-integrations/connectors/destination-keen/src/main/java/io/airbyte/integrations/destination/keen/KeenRecordsConsumer.java b/airbyte-integrations/connectors/destination-keen/src/main/java/io/airbyte/integrations/destination/keen/KeenRecordsConsumer.java deleted file mode 100644 index 62fef10b6141..000000000000 --- a/airbyte-integrations/connectors/destination-keen/src/main/java/io/airbyte/integrations/destination/keen/KeenRecordsConsumer.java +++ /dev/null @@ -1,130 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.integrations.destination.keen; - -import static io.airbyte.integrations.destination.keen.KeenDestination.CONFIG_API_KEY; -import static io.airbyte.integrations.destination.keen.KeenDestination.CONFIG_PROJECT_ID; -import static io.airbyte.integrations.destination.keen.KeenDestination.INFER_TIMESTAMP; - -import com.fasterxml.jackson.databind.JsonNode; -import io.airbyte.cdk.integrations.base.FailureTrackingAirbyteMessageConsumer; -import io.airbyte.commons.json.Jsons; -import io.airbyte.protocol.models.v0.AirbyteMessage; -import io.airbyte.protocol.models.v0.AirbyteMessage.Type; -import io.airbyte.protocol.models.v0.AirbyteRecordMessage; -import io.airbyte.protocol.models.v0.AirbyteStream; -import io.airbyte.protocol.models.v0.ConfiguredAirbyteCatalog; -import io.airbyte.protocol.models.v0.ConfiguredAirbyteStream; -import io.airbyte.protocol.models.v0.DestinationSyncMode; -import java.io.IOException; -import java.util.List; -import java.util.Optional; -import java.util.Set; -import java.util.function.Consumer; -import java.util.stream.Collectors; -import org.apache.kafka.clients.producer.KafkaProducer; -import org.apache.kafka.clients.producer.ProducerRecord; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -public class KeenRecordsConsumer extends FailureTrackingAirbyteMessageConsumer { - - private static final Logger LOGGER = LoggerFactory.getLogger(KeenRecordsConsumer.class); - - private final JsonNode config; - private final ConfiguredAirbyteCatalog catalog; - private final Consumer outputRecordCollector; - - private KeenTimestampService timestampService; - private String projectId; - private String apiKey; - private KafkaProducer kafkaProducer; - private Set streamNames; - - public KeenRecordsConsumer(final JsonNode config, - final ConfiguredAirbyteCatalog catalog, - final Consumer outputRecordCollector) { - this.config = config; - this.catalog = catalog; - this.outputRecordCollector = outputRecordCollector; - this.kafkaProducer = null; - this.streamNames = Set.of(); - LOGGER.info("initializing consumer."); - } - - @Override - protected void startTracked() throws IOException, InterruptedException { - projectId = config.get(CONFIG_PROJECT_ID).textValue(); - apiKey = config.get(CONFIG_API_KEY).textValue(); - final boolean timestampInferenceEnabled = Optional.ofNullable(config.get(INFER_TIMESTAMP)) - .map(JsonNode::booleanValue) - .orElse(true); - this.kafkaProducer = KeenDestination.KafkaProducerFactory.create(projectId, apiKey); - this.streamNames = getStrippedStreamNames(); - this.timestampService = new KeenTimestampService(this.catalog, timestampInferenceEnabled); - eraseOverwriteStreams(); - } - - @Override - protected void acceptTracked(final AirbyteMessage msg) { - if (msg.getType() == Type.STATE) { - outputRecordCollector.accept(msg); - return; - } else if (msg.getType() != Type.RECORD) { - return; - } - - final String streamName = getStreamName(msg.getRecord()); - final JsonNode data = this.timestampService.injectTimestamp(msg.getRecord()); - - kafkaProducer.send(new ProducerRecord<>(streamName, data.toString())); - } - - private Set getStrippedStreamNames() { - return catalog.getStreams() - .stream() - .map(ConfiguredAirbyteStream::getStream) - .map(AirbyteStream::getName) - .map(KeenCharactersStripper::stripSpecialCharactersFromStreamName) - .collect(Collectors.toSet()); - } - - private void eraseOverwriteStreams() throws IOException, InterruptedException { - final KeenHttpClient keenHttpClient = new KeenHttpClient(); - LOGGER.info("erasing streams with override options selected."); - - final List streamsToDelete = this.catalog.getStreams().stream() - .filter(stream -> stream.getDestinationSyncMode() == DestinationSyncMode.OVERWRITE) - .map(stream -> KeenCharactersStripper.stripSpecialCharactersFromStreamName(stream.getStream().getName())) - .collect(Collectors.toList()); - - for (final String streamToDelete : streamsToDelete) { - LOGGER.info("erasing stream " + streamToDelete); - keenHttpClient.eraseStream(streamToDelete, projectId, apiKey); - } - } - - private String getStreamName(final AirbyteRecordMessage recordMessage) { - String streamName = recordMessage.getStream(); - if (streamNames.contains(streamName)) { - return streamName; - } - streamName = KeenCharactersStripper.stripSpecialCharactersFromStreamName(streamName); - if (!streamNames.contains(streamName)) { - throw new IllegalArgumentException( - String.format( - "Message contained record from a stream that was not in the catalog. \ncatalog: %s , \nmessage: %s", - Jsons.serialize(catalog), Jsons.serialize(recordMessage))); - } - return streamName; - } - - @Override - protected void close(final boolean hasFailed) { - kafkaProducer.flush(); - kafkaProducer.close(); - } - -} diff --git a/airbyte-integrations/connectors/destination-keen/src/main/java/io/airbyte/integrations/destination/keen/KeenTimestampService.java b/airbyte-integrations/connectors/destination-keen/src/main/java/io/airbyte/integrations/destination/keen/KeenTimestampService.java deleted file mode 100644 index 43dfb127e3f9..000000000000 --- a/airbyte-integrations/connectors/destination-keen/src/main/java/io/airbyte/integrations/destination/keen/KeenTimestampService.java +++ /dev/null @@ -1,126 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.integrations.destination.keen; - -import com.fasterxml.jackson.databind.JsonNode; -import com.fasterxml.jackson.databind.node.JsonNodeFactory; -import com.fasterxml.jackson.databind.node.ObjectNode; -import com.joestelmach.natty.Parser; -import io.airbyte.protocol.models.v0.AirbyteRecordMessage; -import io.airbyte.protocol.models.v0.ConfiguredAirbyteCatalog; -import java.time.Instant; -import java.util.HashMap; -import java.util.List; -import java.util.Map; -import java.util.stream.Collectors; -import org.apache.commons.lang3.tuple.Pair; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -/** - * This class is used for timestamp inference. Keen leverages use of time-related data for it's - * analytics, so it's important to have timestamp values for historical data if possible. If stream - * contains cursor field, then its value is used as a timestamp, if parsing it is possible. - */ -public class KeenTimestampService { - - private static final Logger LOGGER = LoggerFactory.getLogger(KeenRecordsConsumer.class); - - private static final long SECONDS_FROM_EPOCH_THRESHOLD = 1_000_000_000L; - - private static final long MILLIS_FROM_EPOCH_THRESHOLD = 10_000_000_000L; - - // Map containing stream names paired with their cursor fields - private Map> streamCursorFields; - private final Parser parser; - private final boolean timestampInferenceEnabled; - - public KeenTimestampService(final ConfiguredAirbyteCatalog catalog, final boolean timestampInferenceEnabled) { - this.streamCursorFields = new HashMap<>(); - this.parser = new Parser(); - this.timestampInferenceEnabled = timestampInferenceEnabled; - - if (timestampInferenceEnabled) { - LOGGER.info("Initializing KeenTimestampService, finding cursor fields."); - streamCursorFields = catalog.getStreams() - .stream() - .filter(stream -> stream.getCursorField().size() > 0) - .map(s -> Pair.of(s.getStream().getName(), s.getCursorField())) - .collect(Collectors.toMap(Pair::getKey, Pair::getValue)); - } - } - - /** - * Tries to inject keen.timestamp field to the given message data. If the stream contains cursor - * field, it's value is tried to be parsed to timestamp. If this procedure fails, stream is removed - * from timestamp-parsable stream map, so parsing is not tried for future messages in the same - * stream. If parsing succeeds, keen.timestamp field is put as a JSON node to the message data and - * whole data is returned. Otherwise, keen.timestamp is set to emittedAt value - * - * @param message AirbyteRecordMessage containing record data - * @return Record data together with keen.timestamp field - */ - public JsonNode injectTimestamp(final AirbyteRecordMessage message) { - final String streamName = message.getStream(); - final List cursorField = streamCursorFields.get(streamName); - final JsonNode data = message.getData(); - if (timestampInferenceEnabled && cursorField != null) { - try { - final String timestamp = parseTimestamp(cursorField, data); - injectTimestamp(data, timestamp); - } catch (final Exception e) { - // If parsing of timestamp has failed, remove stream from timestamp-parsable stream map, - // so it won't be parsed for future messages. - LOGGER.info("Unable to parse cursor field: {} into a keen.timestamp", cursorField); - streamCursorFields.remove(streamName); - injectTimestamp(data, Instant.ofEpochMilli(message.getEmittedAt()).toString()); - } - } else { - injectTimestamp(data, Instant.ofEpochMilli(message.getEmittedAt()).toString()); - } - return data; - } - - private void injectTimestamp(final JsonNode data, final String timestamp) { - final ObjectNode root = ((ObjectNode) data); - root.set("keen", JsonNodeFactory.instance.objectNode().put("timestamp", timestamp)); - } - - private String parseTimestamp(final List cursorField, final JsonNode data) { - final JsonNode timestamp = getNestedNode(data, cursorField); - final long numberTimestamp = timestamp.asLong(); - // if cursor value is below given threshold, assume that it's not epoch timestamp but ordered id - if (numberTimestamp >= SECONDS_FROM_EPOCH_THRESHOLD) { - return dateFromNumber(numberTimestamp); - } - // if timestamp is 0, then parsing it to long failed - let's try with String now - if (numberTimestamp == 0) { - return parser - .parse(timestamp.asText()) - .get(0).getDates() - .get(0) - .toInstant() - .toString(); - } - throw new IllegalStateException(); - } - - private String dateFromNumber(final Long timestamp) { - // if cursor value is above given threshold, then assume that it's Unix timestamp in milliseconds - if (timestamp > MILLIS_FROM_EPOCH_THRESHOLD) { - return Instant.ofEpochMilli(timestamp).toString(); - } - return Instant.ofEpochSecond(timestamp).toString(); - } - - private static JsonNode getNestedNode(final JsonNode data, final List fieldNames) { - return fieldNames.stream().reduce(data, JsonNode::get, (first, second) -> second); - } - - public Map> getStreamCursorFields() { - return streamCursorFields; - } - -} diff --git a/airbyte-integrations/connectors/destination-keen/src/main/resources/spec.json b/airbyte-integrations/connectors/destination-keen/src/main/resources/spec.json deleted file mode 100644 index 084eb3f2a1a4..000000000000 --- a/airbyte-integrations/connectors/destination-keen/src/main/resources/spec.json +++ /dev/null @@ -1,35 +0,0 @@ -{ - "documentationUrl": "https://docs.airbyte.com/integrations/destinations/keen", - "supportsIncremental": true, - "supportsNormalization": false, - "supportsDBT": false, - "supported_destination_sync_modes": ["overwrite", "append"], - "connectionSpecification": { - "$schema": "http://json-schema.org/draft-07/schema#", - "title": "Keen Spec", - "type": "object", - "required": ["project_id", "api_key"], - "additionalProperties": false, - "properties": { - "project_id": { - "description": "To get Keen Project ID, navigate to the Access tab from the left-hand, side panel and check the Project Details section.", - "title": "Project ID", - "type": "string", - "examples": ["58b4acc22ba938934e888322e"] - }, - "api_key": { - "title": "API Key", - "description": "To get Keen Master API Key, navigate to the Access tab from the left-hand, side panel and check the Project Details section.", - "type": "string", - "examples": ["ABCDEFGHIJKLMNOPRSTUWXYZ"], - "airbyte_secret": true - }, - "infer_timestamp": { - "title": "Infer Timestamp", - "description": "Allow connector to guess keen.timestamp value based on the streamed data.", - "type": "boolean", - "default": true - } - } - } -} diff --git a/airbyte-integrations/connectors/destination-keen/src/test-integration/java/io/airbyte/integrations/destination/keen/KeenDestinationTest.java b/airbyte-integrations/connectors/destination-keen/src/test-integration/java/io/airbyte/integrations/destination/keen/KeenDestinationTest.java deleted file mode 100644 index 42f0242110ff..000000000000 --- a/airbyte-integrations/connectors/destination-keen/src/test-integration/java/io/airbyte/integrations/destination/keen/KeenDestinationTest.java +++ /dev/null @@ -1,129 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.integrations.destination.keen; - -import static io.airbyte.integrations.destination.keen.KeenDestination.CONFIG_API_KEY; -import static io.airbyte.integrations.destination.keen.KeenDestination.CONFIG_PROJECT_ID; - -import com.fasterxml.jackson.databind.JsonNode; -import com.fasterxml.jackson.databind.node.ArrayNode; -import com.fasterxml.jackson.databind.node.ObjectNode; -import com.google.api.client.util.Lists; -import io.airbyte.cdk.integrations.standardtest.destination.DestinationAcceptanceTest; -import io.airbyte.cdk.integrations.standardtest.destination.comparator.AdvancedTestDataComparator; -import io.airbyte.cdk.integrations.standardtest.destination.comparator.TestDataComparator; -import io.airbyte.commons.io.IOs; -import io.airbyte.commons.json.Jsons; -import io.airbyte.protocol.models.v0.AirbyteMessage; -import io.airbyte.protocol.models.v0.ConfiguredAirbyteCatalog; -import java.nio.file.Files; -import java.nio.file.Path; -import java.util.Comparator; -import java.util.HashSet; -import java.util.List; -import java.util.Set; -import java.util.stream.Collectors; - -public class KeenDestinationTest extends DestinationAcceptanceTest { - - private static final String SECRET_FILE_PATH = "secrets/config.json"; - - private final KeenHttpClient keenHttpClient = new KeenHttpClient(); - private final Set collectionsToDelete = new HashSet<>(); - - private String projectId; - private String apiKey; - private JsonNode configJson; - - @Override - protected String getImageName() { - return "airbyte/destination-keen:dev"; - } - - @Override - protected JsonNode getConfig() throws Exception { - return configJson; - } - - @Override - protected TestDataComparator getTestDataComparator() { - return new AdvancedTestDataComparator(); - } - - @Override - protected boolean supportBasicDataTypeTest() { - return true; - } - - @Override - protected boolean supportArrayDataTypeTest() { - return true; - } - - @Override - protected boolean supportObjectDataTypeTest() { - return true; - } - - @Override - protected JsonNode getFailCheckConfig() throws Exception { - ((ObjectNode) configJson).put(CONFIG_PROJECT_ID, "fake"); - ((ObjectNode) configJson).put(CONFIG_API_KEY, "fake"); - - return configJson; - } - - protected JsonNode getBaseConfigJson() { - return Jsons.deserialize(IOs.readFile(Path.of(SECRET_FILE_PATH))); - } - - @Override - protected List retrieveRecords(final TestDestinationEnv testEnv, - final String streamName, - final String namespace, - final JsonNode streamSchema) - throws Exception { - final String accentStrippedStreamName = KeenCharactersStripper.stripSpecialCharactersFromStreamName(streamName); - collectionsToDelete.add(accentStrippedStreamName); - - final ArrayNode array = keenHttpClient.extract(accentStrippedStreamName, projectId, apiKey); - return Lists.newArrayList(array.elements()).stream() - .sorted(Comparator.comparing(o -> o.get("keen").get("timestamp").textValue())) - .map(node -> (JsonNode) ((ObjectNode) node).without("keen")) - .collect(Collectors.toList()); - } - - @Override - protected void setup(final TestDestinationEnv testEnv, final HashSet TEST_SCHEMAS) throws Exception { - if (!Files.exists(Path.of(SECRET_FILE_PATH))) { - throw new IllegalStateException( - "Must provide path to a file containing Keen account credentials: Project ID and Master API Key. " + - "By default {module-root}/" + SECRET_FILE_PATH); - } - configJson = getBaseConfigJson(); - projectId = configJson.get(CONFIG_PROJECT_ID).asText(); - apiKey = configJson.get(CONFIG_API_KEY).asText(); - - } - - @Override - protected void tearDown(final TestDestinationEnv testEnv) throws Exception { - for (final String keenCollection : collectionsToDelete) { - keenHttpClient.eraseStream(keenCollection, projectId, apiKey); - } - collectionsToDelete.clear(); - } - - @Override - protected void runSyncAndVerifyStateOutput(final JsonNode config, - final List messages, - final ConfiguredAirbyteCatalog catalog, - final boolean runNormalization) - throws Exception { - super.runSyncAndVerifyStateOutput(config, messages, catalog, runNormalization); - Thread.sleep(10000); - } - -} diff --git a/airbyte-integrations/connectors/destination-keen/src/test/java/io/airbyte/integrations/destination/keen/KeenRecordConsumerTest.java b/airbyte-integrations/connectors/destination-keen/src/test/java/io/airbyte/integrations/destination/keen/KeenRecordConsumerTest.java deleted file mode 100644 index a6dd7852b1f6..000000000000 --- a/airbyte-integrations/connectors/destination-keen/src/test/java/io/airbyte/integrations/destination/keen/KeenRecordConsumerTest.java +++ /dev/null @@ -1,65 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.integrations.destination.keen; - -import static io.airbyte.integrations.destination.keen.KeenDestination.CONFIG_API_KEY; -import static io.airbyte.integrations.destination.keen.KeenDestination.CONFIG_PROJECT_ID; - -import com.fasterxml.jackson.databind.JsonNode; -import com.google.common.collect.ImmutableMap; -import io.airbyte.cdk.integrations.base.FailureTrackingAirbyteMessageConsumer; -import io.airbyte.cdk.integrations.standardtest.destination.PerStreamStateMessageTest; -import io.airbyte.commons.json.Jsons; -import io.airbyte.protocol.models.Field; -import io.airbyte.protocol.models.JsonSchemaType; -import io.airbyte.protocol.models.v0.AirbyteMessage; -import io.airbyte.protocol.models.v0.CatalogHelpers; -import io.airbyte.protocol.models.v0.ConfiguredAirbyteCatalog; -import java.util.List; -import java.util.function.Consumer; -import org.junit.jupiter.api.BeforeEach; -import org.junit.jupiter.api.DisplayName; -import org.junit.jupiter.api.extension.ExtendWith; -import org.mockito.Mock; -import org.mockito.junit.jupiter.MockitoExtension; - -@DisplayName("KafkaRecordConsumer") -@ExtendWith(MockitoExtension.class) -public class KeenRecordConsumerTest extends PerStreamStateMessageTest { - - private static final String SCHEMA_NAME = "public"; - private static final String STREAM_NAME = "id_and_name"; - - private static final ConfiguredAirbyteCatalog CATALOG = new ConfiguredAirbyteCatalog().withStreams(List.of( - CatalogHelpers.createConfiguredAirbyteStream( - STREAM_NAME, - SCHEMA_NAME, - Field.of("id", JsonSchemaType.NUMBER), - Field.of("name", JsonSchemaType.STRING)))); - @Mock - private Consumer outputRecordCollector; - - private KeenRecordsConsumer consumer; - - @BeforeEach - public void init() { - final JsonNode config = Jsons.jsonNode(ImmutableMap.builder() - .put(CONFIG_PROJECT_ID, "test_project") - .put(CONFIG_API_KEY, "test_apikey") - .build()); - consumer = new KeenRecordsConsumer(config, CATALOG, outputRecordCollector); - } - - @Override - protected Consumer getMockedConsumer() { - return outputRecordCollector; - } - - @Override - protected FailureTrackingAirbyteMessageConsumer getMessageConsumer() { - return consumer; - } - -} diff --git a/airbyte-integrations/connectors/destination-keen/src/test/java/io/airbyte/integrations/destination/keen/KeenTimestampServiceTest.java b/airbyte-integrations/connectors/destination-keen/src/test/java/io/airbyte/integrations/destination/keen/KeenTimestampServiceTest.java deleted file mode 100644 index 421f0fe1cd18..000000000000 --- a/airbyte-integrations/connectors/destination-keen/src/test/java/io/airbyte/integrations/destination/keen/KeenTimestampServiceTest.java +++ /dev/null @@ -1,200 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.integrations.destination.keen; - -import static java.util.Map.entry; - -import com.fasterxml.jackson.core.JsonProcessingException; -import com.fasterxml.jackson.databind.JsonNode; -import com.fasterxml.jackson.databind.ObjectMapper; -import com.google.common.collect.ImmutableMap; -import io.airbyte.commons.json.Jsons; -import io.airbyte.commons.resources.MoreResources; -import io.airbyte.protocol.models.v0.AirbyteCatalog; -import io.airbyte.protocol.models.v0.AirbyteMessage; -import io.airbyte.protocol.models.v0.AirbyteRecordMessage; -import io.airbyte.protocol.models.v0.AirbyteStream; -import io.airbyte.protocol.models.v0.ConfiguredAirbyteCatalog; -import io.airbyte.protocol.models.v0.ConfiguredAirbyteStream; -import java.io.IOException; -import java.util.List; -import java.util.Map; -import java.util.stream.Collectors; -import org.junit.jupiter.api.Assertions; -import org.junit.jupiter.api.Test; - -public class KeenTimestampServiceTest { - - private final ObjectMapper objectMapper = new ObjectMapper(); - - @Test - void shouldInitializeCursorFieldsFromCatalog() throws IOException { - final ConfiguredAirbyteCatalog configuredCatalog = readConfiguredCatalogFromFile("cursors_catalog.json"); - - final Map> expectedCursorFieldsMap = Map.ofEntries( - entry("StringTypeStream1", List.of("property1")), - entry("StringTypeStream2", List.of("property1")), - entry("StringTypeStream3", List.of("property1")), - entry("NumberTypeStream1", List.of("property1")), - entry("NumberTypeStream2", List.of("property1")), - entry("ArrayTypeStream1", List.of("property1")), - entry("ArrayTypeStream2", List.of("property1")), - entry("ArrayTypeStream3", List.of("property1")), - entry("NestedCursorStream", List.of("property1", "inside"))); - - final KeenTimestampService keenTimestampService = new KeenTimestampService(configuredCatalog, true); - - final Map> cursorFieldMap = keenTimestampService.getStreamCursorFields(); - Assertions.assertEquals(expectedCursorFieldsMap, cursorFieldMap); - } - - @Test - void shouldInjectTimestampWhenCursorIsValidString() throws IOException { - final ConfiguredAirbyteCatalog configuredCatalog = readConfiguredCatalogFromFile("string_cursor_catalog.json"); - - final KeenTimestampService keenTimestampService = new KeenTimestampService(configuredCatalog, true); - - final AirbyteMessage message = buildMessageWithCursorValue(configuredCatalog, "1999/12/15 14:44 utc"); - final JsonNode expectedJson = buildExpectedJsonWithTimestamp("\"1999/12/15 14:44 utc\"", "1999-12-15T14:44:00Z"); - final JsonNode jsonNode = keenTimestampService.injectTimestamp(message.getRecord()); - - Assertions.assertEquals(jsonNode, expectedJson); - } - - @Test - void shouldInjectNumberTimestampWhenTimestampIsSeconds() throws IOException { - final ConfiguredAirbyteCatalog configuredCatalog = readConfiguredCatalogFromFile("number_cursor_catalog.json"); - final KeenTimestampService keenTimestampService = new KeenTimestampService(configuredCatalog, true); - - final int secondsCursor = 1628080068; - final AirbyteMessage message = buildMessageWithCursorValue(configuredCatalog, secondsCursor); - final JsonNode expectedJson = buildExpectedJsonWithTimestamp(secondsCursor, "2021-08-04T12:27:48Z"); - final JsonNode jsonNode = keenTimestampService.injectTimestamp(message.getRecord()); - - Assertions.assertEquals(jsonNode, expectedJson); - } - - @Test - void shouldInjectNumberTimestampWhenTimestampIsMillis() throws IOException { - final ConfiguredAirbyteCatalog configuredCatalog = readConfiguredCatalogFromFile("number_cursor_catalog.json"); - final KeenTimestampService keenTimestampService = new KeenTimestampService(configuredCatalog, true); - - final long millisCursor = 1628081113151L; - final AirbyteMessage message = buildMessageWithCursorValue(configuredCatalog, millisCursor); - final JsonNode expectedJson = buildExpectedJsonWithTimestamp(millisCursor, "2021-08-04T12:45:13.151Z"); - final JsonNode jsonNode = keenTimestampService.injectTimestamp(message.getRecord()); - - Assertions.assertEquals(jsonNode, expectedJson); - } - - @Test - void shouldInjectEmittedAtWhenCursorNumberValueIsTooLow() throws IOException { - final ConfiguredAirbyteCatalog configuredCatalog = readConfiguredCatalogFromFile("number_cursor_catalog.json"); - final KeenTimestampService keenTimestampService = new KeenTimestampService(configuredCatalog, true); - - final int notUnixTimestampCursor = 250_000; - final AirbyteMessage message = buildMessageWithCursorValue(configuredCatalog, notUnixTimestampCursor); - - // 2020-10-14T01:09:49.200Z is hardcoded emitted at - final JsonNode expectedJson = buildExpectedJsonWithTimestamp(notUnixTimestampCursor, "2020-10-14T01:09:49.200Z"); - - final JsonNode jsonNode = keenTimestampService.injectTimestamp(message.getRecord()); - - Assertions.assertEquals(jsonNode, expectedJson); - } - - @Test - void shouldInjectEmittedAtWhenCursorIsUnparsableAndRemoveFieldFromMap() throws IOException { - final ConfiguredAirbyteCatalog configuredCatalog = readConfiguredCatalogFromFile("string_cursor_catalog.json"); - - final KeenTimestampService keenTimestampService = new KeenTimestampService(configuredCatalog, true); - - final Map> cursorFieldMap = keenTimestampService.getStreamCursorFields(); - Assertions.assertEquals(cursorFieldMap.size(), 1); - - final AirbyteMessage message = buildMessageWithCursorValue(configuredCatalog, "some_text"); - - // 2020-10-14T01:09:49.200Z is hardcoded emitted at - final JsonNode expectedJson = buildExpectedJsonWithTimestamp("\"some_text\"", "2020-10-14T01:09:49.200Z"); - - final JsonNode jsonNode = keenTimestampService.injectTimestamp(message.getRecord()); - - Assertions.assertEquals(jsonNode, expectedJson); - Assertions.assertEquals(cursorFieldMap.size(), 0); - } - - @Test - void shouldInjectEmittedAtWhenCursorIsValidAndInferenceIsDisabled() throws IOException { - final ConfiguredAirbyteCatalog configuredCatalog = readConfiguredCatalogFromFile("number_cursor_catalog.json"); - final KeenTimestampService keenTimestampService = new KeenTimestampService(configuredCatalog, false); - - final int secondsCursor = 1628080068; - final AirbyteMessage message = buildMessageWithCursorValue(configuredCatalog, secondsCursor); - - // 2020-10-14T01:09:49.200Z is hardcoded emitted at - final JsonNode expectedJson = buildExpectedJsonWithTimestamp(secondsCursor, "2020-10-14T01:09:49.200Z"); - final JsonNode jsonNode = keenTimestampService.injectTimestamp(message.getRecord()); - - Assertions.assertEquals(jsonNode, expectedJson); - } - - @Test - void shouldInjectTimestampWhenCursorIsNestedField() throws IOException { - final ConfiguredAirbyteCatalog configuredCatalog = readConfiguredCatalogFromFile("nested_cursor_catalog.json"); - final KeenTimestampService keenTimestampService = new KeenTimestampService(configuredCatalog, true); - - final int secondsCursor = 1628080068; - final AirbyteMessage message = buildMessageWithCursorValue(configuredCatalog, - ImmutableMap.builder().put("nestedProperty", secondsCursor).build()); - - final String nestedJson = String.format("{\"nestedProperty\": %s}", secondsCursor); - - final JsonNode expectedJson = buildExpectedJsonWithTimestamp(nestedJson, "2021-08-04T12:27:48Z"); - final JsonNode jsonNode = keenTimestampService.injectTimestamp(message.getRecord()); - - Assertions.assertEquals(jsonNode, expectedJson); - } - - private AirbyteMessage buildMessageWithCursorValue(final ConfiguredAirbyteCatalog configuredCatalog, final T cursorValue) { - return new AirbyteMessage() - .withType(AirbyteMessage.Type.RECORD) - .withRecord(new AirbyteRecordMessage() - .withStream(configuredCatalog.getStreams().get(0).getStream().getName()) - .withEmittedAt(1602637789200L) - .withData(Jsons.jsonNode(ImmutableMap.builder() - .put("cursorProperty", cursorValue) - .put("otherProperty", "something") - .build()))); - } - - private JsonNode buildExpectedJsonWithTimestamp(final T value, final String parsedTimestamp) - throws JsonProcessingException { - return objectMapper.readTree( - String.format( - "{" + - "\"cursorProperty\": %s," + - "\"otherProperty\": \"something\"," + - "\"keen\" : { \"timestamp\": \"%s\"}" + - "}", - value, parsedTimestamp)); - } - - private ConfiguredAirbyteCatalog readConfiguredCatalogFromFile(final String fileName) - throws IOException { - final AirbyteCatalog catalog = Jsons.deserialize(MoreResources.readResource(fileName), AirbyteCatalog.class); - return new ConfiguredAirbyteCatalog() - .withStreams(catalog.getStreams() - .stream() - .map(this::toConfiguredStreamWithCursors) - .collect(Collectors.toList())); - } - - public ConfiguredAirbyteStream toConfiguredStreamWithCursors(final AirbyteStream stream) { - return new ConfiguredAirbyteStream() - .withStream(stream) - .withCursorField(stream.getDefaultCursorField()); - } - -} diff --git a/airbyte-integrations/connectors/destination-keen/src/test/resources/cursors_catalog.json b/airbyte-integrations/connectors/destination-keen/src/test/resources/cursors_catalog.json deleted file mode 100644 index df442b54ebfe..000000000000 --- a/airbyte-integrations/connectors/destination-keen/src/test/resources/cursors_catalog.json +++ /dev/null @@ -1,124 +0,0 @@ -{ - "streams": [ - { - "name": "StreamWithoutCursors", - "json_schema": { - "properties": { - "property1": { - "type": "string" - } - } - } - }, - { - "name": "StringTypeStream1", - "source_defined_cursor": true, - "default_cursor_field": ["property1"], - "json_schema": { - "properties": { - "property1": { - "type": "string" - } - } - } - }, - { - "name": "StringTypeStream2", - "source_defined_cursor": true, - "default_cursor_field": ["property1"], - "json_schema": { - "properties": { - "property1": { - "type": "varchar" - } - } - } - }, - { - "name": "StringTypeStream3", - "source_defined_cursor": true, - "default_cursor_field": ["property1"], - "json_schema": { - "properties": { - "property1": { - "type": "time" - } - } - } - }, - { - "name": "NumberTypeStream1", - "source_defined_cursor": true, - "default_cursor_field": ["property1"], - "json_schema": { - "properties": { - "property1": { - "type": "number" - } - } - } - }, - { - "name": "NumberTypeStream2", - "source_defined_cursor": true, - "default_cursor_field": ["property1"], - "json_schema": { - "properties": { - "property1": { - "type": "integer" - } - } - } - }, - { - "name": "ArrayTypeStream1", - "source_defined_cursor": true, - "default_cursor_field": ["property1"], - "json_schema": { - "properties": { - "property1": { - "type": ["null", "integer"] - } - } - } - }, - { - "name": "ArrayTypeStream2", - "source_defined_cursor": true, - "default_cursor_field": ["property1"], - "json_schema": { - "properties": { - "property1": { - "type": [null, "integer"] - } - } - } - }, - { - "name": "ArrayTypeStream3", - "source_defined_cursor": true, - "default_cursor_field": ["property1"], - "json_schema": { - "properties": { - "property1": { - "type": ["anything", "integer", "anything"] - } - } - } - }, - { - "name": "NestedCursorStream", - "source_defined_cursor": true, - "default_cursor_field": ["property1", "inside"], - "json_schema": { - "properties": { - "property1": { - "inside": { - "type": "number" - } - } - } - } - } - ] -} diff --git a/airbyte-integrations/connectors/destination-keen/src/test/resources/nested_cursor_catalog.json b/airbyte-integrations/connectors/destination-keen/src/test/resources/nested_cursor_catalog.json deleted file mode 100644 index 0c81f13e382b..000000000000 --- a/airbyte-integrations/connectors/destination-keen/src/test/resources/nested_cursor_catalog.json +++ /dev/null @@ -1,21 +0,0 @@ -{ - "streams": [ - { - "name": "NumberTypeStream", - "source_defined_cursor": true, - "default_cursor_field": ["cursorProperty", "nestedProperty"], - "json_schema": { - "properties": { - "cursorProperty": { - "nestedProperty": { - "type": "number" - } - }, - "otherProperty": { - "type": "string" - } - } - } - } - ] -} diff --git a/airbyte-integrations/connectors/destination-keen/src/test/resources/number_cursor_catalog.json b/airbyte-integrations/connectors/destination-keen/src/test/resources/number_cursor_catalog.json deleted file mode 100644 index 3b7deae9c1c5..000000000000 --- a/airbyte-integrations/connectors/destination-keen/src/test/resources/number_cursor_catalog.json +++ /dev/null @@ -1,19 +0,0 @@ -{ - "streams": [ - { - "name": "NumberTypeStream", - "source_defined_cursor": true, - "default_cursor_field": ["cursorProperty"], - "json_schema": { - "properties": { - "cursorProperty": { - "type": "number" - }, - "otherProperty": { - "type": "string" - } - } - } - } - ] -} diff --git a/airbyte-integrations/connectors/destination-keen/src/test/resources/string_cursor_catalog.json b/airbyte-integrations/connectors/destination-keen/src/test/resources/string_cursor_catalog.json deleted file mode 100644 index 834e1e8989c2..000000000000 --- a/airbyte-integrations/connectors/destination-keen/src/test/resources/string_cursor_catalog.json +++ /dev/null @@ -1,19 +0,0 @@ -{ - "streams": [ - { - "name": "StringTypeStream", - "source_defined_cursor": true, - "default_cursor_field": ["cursorProperty"], - "json_schema": { - "properties": { - "cursorProperty": { - "type": "string" - }, - "otherProperty": { - "type": "string" - } - } - } - } - ] -} diff --git a/airbyte-integrations/connectors/destination-kinesis/README.md b/airbyte-integrations/connectors/destination-kinesis/README.md deleted file mode 100644 index d0647dfb2ef8..000000000000 --- a/airbyte-integrations/connectors/destination-kinesis/README.md +++ /dev/null @@ -1,72 +0,0 @@ -# Destination Kinesis - -This is the repository for the Kinesis destination connector in Java. -For information about how to use this connector within Airbyte, see [the User Documentation](https://docs.airbyte.io/integrations/destinations/kinesis). - -## Local development - -#### Building via Gradle -From the Airbyte repository root, run: -``` -./gradlew :airbyte-integrations:connectors:destination-kinesis:build -``` - -#### Create credentials -**If you are a community contributor**, generate the necessary credentials and place them in `secrets/config.json` conforming to the spec file in `src/main/resources/spec.json`. -Note that the `secrets` directory is git-ignored by default, so there is no danger of accidentally checking in sensitive information. - -**If you are an Airbyte core member**, follow the [instructions](https://docs.airbyte.io/connector-development#using-credentials-in-ci) to set up the credentials. - -### Locally running the connector docker image - -#### Build -Build the connector image via Gradle: - -``` -./gradlew :airbyte-integrations:connectors:destination-kinesis:buildConnectorImage -``` -Once built, the docker image name and tag on your host will be `airbyte/destination-kinesis:dev`. -the Dockerfile. - -#### Run -Then run any of the connector commands as follows: -``` -docker run --rm airbyte/destination-kinesis:dev spec -docker run --rm -v $(pwd)/secrets:/secrets airbyte/destination-kinesis:dev check --config /secrets/config.json -docker run --rm -v $(pwd)/secrets:/secrets airbyte/destination-kinesis:dev discover --config /secrets/config.json -docker run --rm -v $(pwd)/secrets:/secrets -v $(pwd)/integration_tests:/integration_tests airbyte/destination-kinesis:dev read --config /secrets/config.json --catalog /integration_tests/configured_catalog.json -``` - -## Testing -We use `JUnit` for Java tests. - -### Unit and Integration Tests -Place unit tests under `src/test/io/airbyte/integrations/destinations/kinesis`. - -#### Acceptance Tests -Airbyte has a standard test suite that all destination connectors must pass. Implement the `TODO`s in -`src/test-integration/java/io/airbyte/integrations/destinations/kinesisDestinationAcceptanceTest.java`. - -### Using gradle to run tests -All commands should be run from airbyte project root. -To run unit tests: -``` -./gradlew :airbyte-integrations:connectors:destination-kinesis:unitTest -``` -To run acceptance and custom integration tests: -``` -./gradlew :airbyte-integrations:connectors:destination-kinesis:integrationTest -``` - -## Dependency Management - -### Publishing a new version of the connector -You've checked out the repo, implemented a million dollar feature, and you're ready to share your changes with the world. Now what? -1. Make sure your changes are passing our test suite: `airbyte-ci connectors --name=destination-kinesis test` -2. Bump the connector version in `metadata.yaml`: increment the `dockerImageTag` value. Please follow [semantic versioning for connectors](https://docs.airbyte.com/contributing-to-airbyte/resources/pull-requests-handbook/#semantic-versioning-for-connectors). -3. Make sure the `metadata.yaml` content is up to date. -4. Make the connector documentation and its changelog is up to date (`docs/integrations/destinations/kinesis.md`). -5. Create a Pull Request: use [our PR naming conventions](https://docs.airbyte.com/contributing-to-airbyte/resources/pull-requests-handbook/#pull-request-title-convention). -6. Pat yourself on the back for being an awesome contributor. -7. Someone from Airbyte will take a look at your PR and iterate with you to merge it into master. - diff --git a/airbyte-integrations/connectors/destination-kinesis/bootstrap.md b/airbyte-integrations/connectors/destination-kinesis/bootstrap.md deleted file mode 100644 index 7ad2fc6f1c0d..000000000000 --- a/airbyte-integrations/connectors/destination-kinesis/bootstrap.md +++ /dev/null @@ -1,22 +0,0 @@ -# Kinesis Destination - -Amazon Kinesis makes it easy to collect, process, and analyze real-time, streaming data so you can get timely insights and react quickly to new information. Amazon Kinesis offers key capabilities to cost-effectively process streaming data at any scale, along with the flexibility to choose the tools that best suit the requirements of your application. -You can use Kinesis Data Streams for rapid and continuous data intake and aggregation. The type of data used can include IT infrastructure log data, application logs, social media, market data feeds, and web clickstream data. Because the response time for the data intake and processing is in real time, the processing is typically lightweight. -[Read more about Amazon Kinesis](https://aws.amazon.com/kinesis/) - -This connector maps an incoming Airbyte namespace and stream to a different Kinesis stream created and configured with the provided shard count. The connector -supports the `append` sync mode which enables records to be directly streamed to an existing Kinesis stream. - -The implementation uses the [Kinesis](https://docs.aws.amazon.com/sdk-for-java/latest/developer-guide/examples-kinesis.html) Aws v2 Java Sdk to access the Kinesis service. -[KinesisStream](./src/main/java/io/airbyte/integrations/destination/kinesis/KinesisStream.java) is the main entrypoint for communicating with Kinesis and providing the needed functionalities. Internally it uses a KinesisClient retreived from the -[KinesisClientPool](./src/main/java/io/airbyte/integrations/destination/kinesis/KinesisClientPool.java). Retrieved records from the Kinesis stream are mapped to -[KinesisRecord](./src/main/java/io/airbyte/integrations/destination/kinesis/KinesisRecord.java). Buffering of records is also supported which should increase performance and throughput by sending the records through a single HTTP request. - -The [KinesisMessageConsumer](./src/main/java/io/airbyte/integrations/destination/kinesis/KinesisMessageConsumer.java) -class contains the logic for handling airbyte messages, creating the needed Kinesis streams and streaming the received data. - -## Development - -See the [KinesisStream](./src/main/java/io/airbyte/integrations/destination/kinesis/KinesisStream.java) class on how to use the Kinesis client for accessing the Kinesis service. - -If you want to learn more, read the [Aws docs](https://docs.aws.amazon.com/sdk-for-java/latest/developer-guide/examples-kinesis.html) \ No newline at end of file diff --git a/airbyte-integrations/connectors/destination-kinesis/build.gradle b/airbyte-integrations/connectors/destination-kinesis/build.gradle deleted file mode 100644 index 3abe284a89a8..000000000000 --- a/airbyte-integrations/connectors/destination-kinesis/build.gradle +++ /dev/null @@ -1,37 +0,0 @@ -plugins { - id 'application' - id 'airbyte-java-connector' -} - -airbyteJavaConnector { - cdkVersionRequired = '0.2.0' - features = ['db-destinations'] - useLocalCdk = false -} - -//remove once upgrading the CDK version to 0.4.x or later -java { - compileJava { - options.compilerArgs.remove("-Werror") - } -} - -airbyteJavaConnector.addCdkDependencies() - -application { - mainClass = 'io.airbyte.integrations.destination.kinesis.KinesisDestination' - applicationDefaultJvmArgs = ['-XX:+ExitOnOutOfMemoryError', '-XX:MaxRAMPercentage=75.0'] -} - -def kinesisVersion = '2.17.75' -def testContainersVersion = '1.16.2' -def assertVersion = '3.21.0' - -dependencies { - - // https://mvnrepository.com/artifact/software.amazon.awssdk/kinesis - implementation "software.amazon.awssdk:kinesis:${kinesisVersion}" - - testImplementation "org.assertj:assertj-core:${assertVersion}" - testImplementation "org.testcontainers:localstack:${testContainersVersion}" -} diff --git a/airbyte-integrations/connectors/destination-kinesis/docker-compose.yml b/airbyte-integrations/connectors/destination-kinesis/docker-compose.yml deleted file mode 100644 index 64bafc3c5a95..000000000000 --- a/airbyte-integrations/connectors/destination-kinesis/docker-compose.yml +++ /dev/null @@ -1,14 +0,0 @@ -version: "3.7" - -services: - kinesis: - image: localstack/localstack:0.12.20 - ports: - - "4566:4566" - environment: - - "SERVICES=kinesis" - - "HOSTNAME=localhost" - - "KINESIS_LATENCY=200" - - "KINESIS_SHARD_LIMIT=500" -# - "AWS_ACCESS_KEY_ID=" -# - "AWS_SECRET_ACCESS_KEY=" diff --git a/airbyte-integrations/connectors/destination-kinesis/src/main/java/io/airbyte/integrations/destination/kinesis/KinesisClientPool.java b/airbyte-integrations/connectors/destination-kinesis/src/main/java/io/airbyte/integrations/destination/kinesis/KinesisClientPool.java deleted file mode 100644 index db424ba39f25..000000000000 --- a/airbyte-integrations/connectors/destination-kinesis/src/main/java/io/airbyte/integrations/destination/kinesis/KinesisClientPool.java +++ /dev/null @@ -1,65 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.integrations.destination.kinesis; - -import java.util.concurrent.ConcurrentHashMap; -import java.util.concurrent.atomic.AtomicInteger; -import software.amazon.awssdk.services.kinesis.KinesisClient; - -/** - * KinesisClientPool class for managing a pool of kinesis clients with different configurations. - */ -public class KinesisClientPool { - - private static final ConcurrentHashMap> clients; - - static { - clients = new ConcurrentHashMap<>(); - } - - private KinesisClientPool() { - - } - - /** - * Initializes a Kinesis client for accessing Kinesis. If there is already an existing client with - * the provided configuration it will return the existing one and increase the usage count, if not - * it will return a new one. - * - * @param kinesisConfig used to configure the Kinesis client. - * @return KinesisClient which can be used to access Kinesis. - */ - public static KinesisClient initClient(KinesisConfig kinesisConfig) { - var cachedClient = clients.get(kinesisConfig); - if (cachedClient != null) { - cachedClient.value2().incrementAndGet(); - return cachedClient.value1(); - } else { - var client = KinesisUtils.buildKinesisClient(kinesisConfig); - clients.put(kinesisConfig, Tuple.of(client, new AtomicInteger(1))); - return client; - } - } - - /** - * Returns a Kinesis client to the pool. If the client is no longer used by any other external - * instances it will be closed and removed from the map, if not only its usage count will be - * decreased. - * - * @param kinesisConfig that was used to configure the Kinesis client. - */ - public static void closeClient(KinesisConfig kinesisConfig) { - var cachedClient = clients.get(kinesisConfig); - if (cachedClient == null) { - throw new IllegalStateException("No session for the provided config"); - } - int count = cachedClient.value2().decrementAndGet(); - if (count < 1) { - cachedClient.value1().close(); - clients.remove(kinesisConfig); - } - } - -} diff --git a/airbyte-integrations/connectors/destination-kinesis/src/main/java/io/airbyte/integrations/destination/kinesis/KinesisConfig.java b/airbyte-integrations/connectors/destination-kinesis/src/main/java/io/airbyte/integrations/destination/kinesis/KinesisConfig.java deleted file mode 100644 index 674fb336eab3..000000000000 --- a/airbyte-integrations/connectors/destination-kinesis/src/main/java/io/airbyte/integrations/destination/kinesis/KinesisConfig.java +++ /dev/null @@ -1,107 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.integrations.destination.kinesis; - -import com.fasterxml.jackson.databind.JsonNode; -import java.net.URI; -import java.net.URISyntaxException; -import java.util.Objects; - -/* - * KinesisConfig class for storing immutable configuration for Kinesis. - */ -public class KinesisConfig { - - private final URI endpoint; - - private final String region; - - private final int shardCount; - - private final String accessKey; - - private final String privateKey; - - private final int bufferSize; - - public KinesisConfig(URI endpoint, - String region, - int shardCount, - String accessKey, - String privateKey, - int bufferSize) { - this.endpoint = endpoint; - this.region = region; - this.shardCount = shardCount; - this.accessKey = accessKey; - this.privateKey = privateKey; - this.bufferSize = bufferSize; - } - - public KinesisConfig(JsonNode jsonNode) { - String strend = jsonNode.get("endpoint").asText(); - try { - this.endpoint = strend != null && !strend.isBlank() ? new URI(strend) : null; - } catch (URISyntaxException e) { - throw new UncheckedURISyntaxException(e); - } - this.region = jsonNode.get("region").asText(); - this.shardCount = jsonNode.get("shardCount").asInt(5); - this.accessKey = jsonNode.get("accessKey").asText(); - this.privateKey = jsonNode.get("privateKey").asText(); - this.bufferSize = jsonNode.get("bufferSize").asInt(100); - } - - public URI getEndpoint() { - return endpoint; - } - - public String getRegion() { - return region; - } - - public int getShardCount() { - return shardCount; - } - - public String getAccessKey() { - return accessKey; - } - - public String getPrivateKey() { - return privateKey; - } - - public int getBufferSize() { - return bufferSize; - } - - @Override - public boolean equals(Object o) { - if (this == o) { - return true; - } - if (o == null || getClass() != o.getClass()) { - return false; - } - KinesisConfig that = (KinesisConfig) o; - return Objects.equals(endpoint, that.endpoint) && Objects.equals(region, that.region) && - accessKey.equals(that.accessKey) && privateKey.equals(that.privateKey); - } - - @Override - public int hashCode() { - return Objects.hash(endpoint, region, accessKey, privateKey); - } - - static class UncheckedURISyntaxException extends RuntimeException { - - public UncheckedURISyntaxException(Throwable cause) { - super(cause); - } - - } - -} diff --git a/airbyte-integrations/connectors/destination-kinesis/src/main/java/io/airbyte/integrations/destination/kinesis/KinesisDestination.java b/airbyte-integrations/connectors/destination-kinesis/src/main/java/io/airbyte/integrations/destination/kinesis/KinesisDestination.java deleted file mode 100644 index 1c4cdd16ff5f..000000000000 --- a/airbyte-integrations/connectors/destination-kinesis/src/main/java/io/airbyte/integrations/destination/kinesis/KinesisDestination.java +++ /dev/null @@ -1,79 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.integrations.destination.kinesis; - -import com.fasterxml.jackson.databind.JsonNode; -import io.airbyte.cdk.integrations.BaseConnector; -import io.airbyte.cdk.integrations.base.AirbyteMessageConsumer; -import io.airbyte.cdk.integrations.base.Destination; -import io.airbyte.cdk.integrations.base.IntegrationRunner; -import io.airbyte.protocol.models.v0.AirbyteConnectionStatus; -import io.airbyte.protocol.models.v0.AirbyteMessage; -import io.airbyte.protocol.models.v0.ConfiguredAirbyteCatalog; -import java.util.function.Consumer; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -/** - * KinesisDestination class for configuring Kinesis as an Airbyte destination. - */ -public class KinesisDestination extends BaseConnector implements Destination { - - private static final Logger LOGGER = LoggerFactory.getLogger(KinesisDestination.class); - - public static void main(String[] args) throws Exception { - new IntegrationRunner(new KinesisDestination()).run(args); - } - - /** - * Check Kinesis connection status with the provided Json configuration. - * - * @param config json configuration for connecting to Kinesis - * @return AirbyteConnectionStatus status of the connection result. - */ - @Override - public AirbyteConnectionStatus check(final JsonNode config) { - KinesisStream kinesisStream = null; - var streamName = "test_stream"; - try { - var kinesisConfig = new KinesisConfig(config); - kinesisStream = new KinesisStream(kinesisConfig); - kinesisStream.createStream(streamName); - var partitionKey = KinesisUtils.buildPartitionKey(); - kinesisStream.putRecord(streamName, partitionKey, "{}", e -> {}); - return new AirbyteConnectionStatus().withStatus(AirbyteConnectionStatus.Status.SUCCEEDED); - } catch (Exception e) { - LOGGER.error("Error while trying to connect to Kinesis: ", e); - return new AirbyteConnectionStatus().withStatus(AirbyteConnectionStatus.Status.FAILED); - } finally { - if (kinesisStream != null) { - try { - kinesisStream.flush(e -> {}); - kinesisStream.deleteStream(streamName); - } catch (Exception e) { - LOGGER.error("Error while deleting kinesis stream: ", e); - } - kinesisStream.close(); - } - } - } - - /** - * Returns an Airbyte message consumer which can be used to handle the incoming Airbyte messages. - * - * @param config json configuration for connecting to Kinesis - * @param configuredCatalog of the incoming stream. - * @param outputRecordCollector state collector. - * @return KinesisMessageConsumer for consuming Airbyte messages and streaming them to Kinesis. - */ - @Override - public AirbyteMessageConsumer getConsumer(final JsonNode config, - final ConfiguredAirbyteCatalog configuredCatalog, - final Consumer outputRecordCollector) { - final KinesisStream kinesisStream = new KinesisStream(new KinesisConfig(config)); - return new KinesisMessageConsumer(configuredCatalog, kinesisStream, outputRecordCollector); - } - -} diff --git a/airbyte-integrations/connectors/destination-kinesis/src/main/java/io/airbyte/integrations/destination/kinesis/KinesisMessageConsumer.java b/airbyte-integrations/connectors/destination-kinesis/src/main/java/io/airbyte/integrations/destination/kinesis/KinesisMessageConsumer.java deleted file mode 100644 index 071db77d34d3..000000000000 --- a/airbyte-integrations/connectors/destination-kinesis/src/main/java/io/airbyte/integrations/destination/kinesis/KinesisMessageConsumer.java +++ /dev/null @@ -1,110 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.integrations.destination.kinesis; - -import io.airbyte.cdk.integrations.base.FailureTrackingAirbyteMessageConsumer; -import io.airbyte.commons.json.Jsons; -import io.airbyte.protocol.models.v0.AirbyteMessage; -import io.airbyte.protocol.models.v0.AirbyteStreamNameNamespacePair; -import io.airbyte.protocol.models.v0.ConfiguredAirbyteCatalog; -import java.time.Instant; -import java.util.Map; -import java.util.function.Consumer; -import java.util.stream.Collectors; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -/** - * KinesisMessageConsumer class for handling incoming Airbyte messages. - */ -public class KinesisMessageConsumer extends FailureTrackingAirbyteMessageConsumer { - - private static final Logger LOGGER = LoggerFactory.getLogger(KinesisMessageConsumer.class); - - private final Consumer outputRecordCollector; - - private final KinesisStream kinesisStream; - - private final Map kinesisStreams; - - public KinesisMessageConsumer(final ConfiguredAirbyteCatalog configuredCatalog, - final KinesisStream kinesisStream, - final Consumer outputRecordCollector) { - this.outputRecordCollector = outputRecordCollector; - this.kinesisStream = kinesisStream; - var nameTransformer = new KinesisNameTransformer(); - this.kinesisStreams = configuredCatalog.getStreams().stream() - .collect(Collectors.toUnmodifiableMap( - AirbyteStreamNameNamespacePair::fromConfiguredAirbyteSteam, - k -> new KinesisStreamConfig( - nameTransformer.streamName(k.getStream().getNamespace(), k.getStream().getName()), - k.getDestinationSyncMode()))); - } - - /** - * Start tracking the incoming Airbyte streams by creating the needed Kinesis streams. - */ - @Override - protected void startTracked() { - kinesisStreams.forEach((k, v) -> kinesisStream.createStream(v.getStreamName())); - } - - /** - * Handle an incoming Airbyte message by serializing it to the appropriate Kinesis structure and - * sending it to the stream. - * - * @param message received from the Airbyte source. - */ - @Override - protected void acceptTracked(final AirbyteMessage message) { - if (message.getType() == AirbyteMessage.Type.RECORD) { - var messageRecord = message.getRecord(); - - var streamConfig = - kinesisStreams.get(AirbyteStreamNameNamespacePair.fromRecordMessage(messageRecord)); - - if (streamConfig == null) { - throw new IllegalArgumentException("Unrecognized destination stream"); - } - - var partitionKey = KinesisUtils.buildPartitionKey(); - - var data = Jsons.jsonNode(Map.of( - KinesisRecord.COLUMN_NAME_AB_ID, partitionKey, - KinesisRecord.COLUMN_NAME_DATA, Jsons.serialize(messageRecord.getData()), - KinesisRecord.COLUMN_NAME_EMITTED_AT, Instant.now())); - - var streamName = streamConfig.getStreamName(); - kinesisStream.putRecord(streamName, partitionKey, Jsons.serialize(data), e -> { - LOGGER.error("Error while streaming data to Kinesis", e); - // throw exception and end sync? - }); - } else if (message.getType() == AirbyteMessage.Type.STATE) { - outputRecordCollector.accept(message); - } else { - LOGGER.warn("Unsupported airbyte message type: {}", message.getType()); - } - } - - /** - * Flush the Kinesis stream if there are any remaining messages to be sent and close the client as a - * terminal operation. - * - * @param hasFailed flag for indicating if the operation has failed. - */ - @Override - protected void close(final boolean hasFailed) { - try { - if (!hasFailed) { - kinesisStream.flush(e -> { - LOGGER.error("Error while streaming data to Kinesis", e); - }); - } - } finally { - kinesisStream.close(); - } - } - -} diff --git a/airbyte-integrations/connectors/destination-kinesis/src/main/java/io/airbyte/integrations/destination/kinesis/KinesisNameTransformer.java b/airbyte-integrations/connectors/destination-kinesis/src/main/java/io/airbyte/integrations/destination/kinesis/KinesisNameTransformer.java deleted file mode 100644 index f9d002099b85..000000000000 --- a/airbyte-integrations/connectors/destination-kinesis/src/main/java/io/airbyte/integrations/destination/kinesis/KinesisNameTransformer.java +++ /dev/null @@ -1,28 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.integrations.destination.kinesis; - -import io.airbyte.cdk.integrations.destination.StandardNameTransformer; - -/** - * KinesisNameTransformer class for creating Kinesis stream names. - */ -class KinesisNameTransformer extends StandardNameTransformer { - - /** - * Create Kinesis destination stream name by combining the incoming namespace and stream - * - * @param namespace of the source data - * @param stream of the source data - */ - String streamName(String namespace, String stream) { - namespace = namespace != null ? namespace : ""; - var streamName = namespace + "_" + stream; - streamName = super.convertStreamName(streamName); - // max char length for kinesis stream name is 128 - return streamName.length() > 128 ? streamName.substring(0, 128) : streamName; - } - -} diff --git a/airbyte-integrations/connectors/destination-kinesis/src/main/java/io/airbyte/integrations/destination/kinesis/KinesisRecord.java b/airbyte-integrations/connectors/destination-kinesis/src/main/java/io/airbyte/integrations/destination/kinesis/KinesisRecord.java deleted file mode 100644 index 27ee0ecb8ab5..000000000000 --- a/airbyte-integrations/connectors/destination-kinesis/src/main/java/io/airbyte/integrations/destination/kinesis/KinesisRecord.java +++ /dev/null @@ -1,64 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.integrations.destination.kinesis; - -import com.fasterxml.jackson.annotation.JsonProperty; -import java.time.Instant; -import java.util.UUID; - -/** - * KinesisRecord class for mapping records in the Kinesis stream. - */ -public class KinesisRecord { - - public static final String COLUMN_NAME_AB_ID = "_airbyte_ab_id"; - public static final String COLUMN_NAME_DATA = "_airbyte_data"; - public static final String COLUMN_NAME_EMITTED_AT = "_airbyte_emitted_at"; - - @JsonProperty(COLUMN_NAME_AB_ID) - private UUID id; - - @JsonProperty(COLUMN_NAME_DATA) - private String data; - - @JsonProperty(COLUMN_NAME_EMITTED_AT) - private Instant timestamp; - - public KinesisRecord() { - - } - - public KinesisRecord(UUID id, String data, Instant timestamp) { - this.id = id; - this.data = data; - this.timestamp = timestamp; - } - - public static KinesisRecord of(UUID id, String data, Instant timestamp) { - return new KinesisRecord(id, data, timestamp); - } - - public UUID getId() { - return id; - } - - public String getData() { - return data; - } - - public Instant getTimestamp() { - return timestamp; - } - - @Override - public String toString() { - return "KinesisRecord{" + - "id=" + id + - ", data='" + data + '\'' + - ", timestamp=" + timestamp + - '}'; - } - -} diff --git a/airbyte-integrations/connectors/destination-kinesis/src/main/java/io/airbyte/integrations/destination/kinesis/KinesisStream.java b/airbyte-integrations/connectors/destination-kinesis/src/main/java/io/airbyte/integrations/destination/kinesis/KinesisStream.java deleted file mode 100644 index 6faa753a1a40..000000000000 --- a/airbyte-integrations/connectors/destination-kinesis/src/main/java/io/airbyte/integrations/destination/kinesis/KinesisStream.java +++ /dev/null @@ -1,193 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.integrations.destination.kinesis; - -import io.airbyte.commons.json.Jsons; -import java.io.Closeable; -import java.util.ArrayList; -import java.util.List; -import java.util.function.Consumer; -import java.util.stream.Collectors; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; -import software.amazon.awssdk.core.BytesWrapper; -import software.amazon.awssdk.core.SdkBytes; -import software.amazon.awssdk.services.kinesis.KinesisClient; -import software.amazon.awssdk.services.kinesis.model.DescribeStreamResponse; -import software.amazon.awssdk.services.kinesis.model.PutRecordsRequestEntry; -import software.amazon.awssdk.services.kinesis.model.Record; -import software.amazon.awssdk.services.kinesis.model.ResourceInUseException; -import software.amazon.awssdk.services.kinesis.model.ResourceNotFoundException; -import software.amazon.awssdk.services.kinesis.model.Shard; -import software.amazon.awssdk.services.kinesis.model.ShardIteratorType; -import software.amazon.awssdk.services.kinesis.model.StreamStatus; - -/** - * KinesisStream class for performing various operations on a Kinesis stream. - */ -public class KinesisStream implements Closeable { - - private static final Logger LOGGER = LoggerFactory.getLogger(KinesisStream.class); - - private final KinesisClient kinesisClient; - - private final KinesisConfig kinesisConfig; - - private final int bufferSize; - - // k:v tuples of > - private final List>> buffer; - - public KinesisStream(KinesisConfig kinesisConfig) { - this.kinesisConfig = kinesisConfig; - this.kinesisClient = KinesisClientPool.initClient(kinesisConfig); - this.bufferSize = kinesisConfig.getBufferSize(); - this.buffer = new ArrayList<>(bufferSize); - } - - /** - * Creates a stream specified via its name and with the provided shard count. The method will block - * and retry every 2s until it verifies that the stream is active and can be written to. If the - * stream is already created it will only wait until it is active. - * - * @param streamName name of the stream to be created. - */ - public void createStream(String streamName) { - try { - kinesisClient.createStream(b -> b.streamName(streamName).shardCount(kinesisConfig.getShardCount())); - } catch (ResourceInUseException e) { - LOGGER.info("Stream with name {} has already been created", streamName); - } - // block/wait until stream is active - for (;;) { - DescribeStreamResponse describeStream = kinesisClient.describeStream(b -> b.streamName(streamName)); - if (describeStream.streamDescription().streamStatus() == StreamStatus.ACTIVE) { - return; - } - try { - Thread.sleep(2000); - } catch (InterruptedException e) { - Thread.currentThread().interrupt(); - throw KinesisUtils.buildKinesisException("Thread interrupted while waiting for stream to be active", e); - } - } - } - - /** - * Delete the stream specified via its name. The method will block and retry every 2s until it - * verifies that the stream is deleted by receiving the appropriate exception. - * - * @param streamName name of the stream to be deleted. - */ - public void deleteStream(String streamName) { - kinesisClient.deleteStream(b -> b.streamName(streamName)); - // block/wait until stream is deleted - for (;;) { - try { - kinesisClient.describeStream(b -> b.streamName(streamName)); - Thread.sleep(2000); - } catch (ResourceNotFoundException e) { - return; - } catch (InterruptedException e) { - Thread.currentThread().interrupt(); - throw KinesisUtils.buildKinesisException("Thread interrupted while waiting for stream to be deleted", e); - } - } - } - - /** - * Deletes all streams in the Kinesis service, waiting/blocking until all of them are deleted. - */ - public void deleteAllStreams() { - kinesisClient.listStreams().streamNames().forEach(this::deleteStream); - } - - /** - * Sends a record to the Kinesis stream specified via its name. To improve performance the records - * are buffered until the buffer limit is reached after which they are flushed to its destination - * stream. - * - * @param streamName name of the stream where the record should be sent - * @param partitionKey to determine the destination shard - * @param data actual data to be streamed - * @param exceptionConsumer for handling errors related to flushing data per stream - */ - public void putRecord(String streamName, String partitionKey, String data, Consumer exceptionConsumer) { - buffer.add(Tuple.of(streamName, Tuple.of(partitionKey, data))); - if (buffer.size() == bufferSize) { - flush(exceptionConsumer); - } - } - - /** - * Iterates over all the shards for a given streams and retrieves the records which are combined and - * deserialized to a {@link io.airbyte.integrations.destination.kinesis.KinesisRecord} objects. - * - * @param streamName from where to retrieve the records. - * @return List of KinesisRecord objects retrieved from the stream. - */ - public List getRecords(String streamName) { - DescribeStreamResponse describeStream; - List shards = new ArrayList<>(); - do { - - describeStream = kinesisClient.describeStream(b -> b.streamName(streamName)); - - shards.addAll(describeStream.streamDescription().shards()); - - } while (describeStream.streamDescription().hasMoreShards()); - - // iterate over stream shards and retrieve records - return shards.stream() - .map(Shard::shardId) - .map(sh -> kinesisClient.getShardIterator(b -> b.streamName(streamName) - .shardIteratorType(ShardIteratorType.TRIM_HORIZON) - .shardId(sh)) - .shardIterator()) - .flatMap(it -> kinesisClient.getRecords(b -> b.shardIterator(it)).records().stream()) - .map(Record::data) - .map(BytesWrapper::asUtf8String) - .map(str -> Jsons.deserialize(str, KinesisRecord.class)) - .collect(Collectors.toList()); - } - - /** - * Flush all records previously buffered to increase throughput and performance. Records are grouped - * by stream name and are sent for each stream separately. - * - * @param exceptionConsumer for handling errors related to flushing data per stream, rethrowing an - * exception in the consumer will stop the sync and clear the cache - */ - public void flush(Consumer exceptionConsumer) { - try { - buffer.stream() - .collect(Collectors.groupingBy(Tuple::value1, Collectors.mapping(Tuple::value2, Collectors.toList()))) - .forEach((k, v) -> { - var records = v.stream().map(entry -> PutRecordsRequestEntry.builder() - // partition key used to determine stream shard. - .partitionKey(entry.value1()) - .data(SdkBytes.fromUtf8String(entry.value2())) - .build()) - .collect(Collectors.toList()); - try { - kinesisClient.putRecords(b -> b.streamName(k).records(records)); - } catch (Exception e) { - exceptionConsumer.accept(e); - } - }); - } finally { - buffer.clear(); - } - } - - /** - * Return the kinesis client to the pool to be closed if no longer used. - */ - @Override - public void close() { - KinesisClientPool.closeClient(kinesisConfig); - } - -} diff --git a/airbyte-integrations/connectors/destination-kinesis/src/main/java/io/airbyte/integrations/destination/kinesis/KinesisStreamConfig.java b/airbyte-integrations/connectors/destination-kinesis/src/main/java/io/airbyte/integrations/destination/kinesis/KinesisStreamConfig.java deleted file mode 100644 index 868ef0cfaa8f..000000000000 --- a/airbyte-integrations/connectors/destination-kinesis/src/main/java/io/airbyte/integrations/destination/kinesis/KinesisStreamConfig.java +++ /dev/null @@ -1,39 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.integrations.destination.kinesis; - -import io.airbyte.protocol.models.v0.DestinationSyncMode; - -/** - * KinesisStreamConfig class for storing configuration data for every stream. - */ -public class KinesisStreamConfig { - - private final String streamName; - - private final DestinationSyncMode destinationSyncMode; - - public KinesisStreamConfig(String streamName, DestinationSyncMode destinationSyncMode) { - this.streamName = streamName; - this.destinationSyncMode = destinationSyncMode; - } - - public String getStreamName() { - return streamName; - } - - public DestinationSyncMode getDestinationSyncMode() { - return destinationSyncMode; - } - - @Override - public String toString() { - return "KinesisStreamConfig{" + - "streamName='" + streamName + '\'' + - ", destinationSyncMode=" + destinationSyncMode + - '}'; - } - -} diff --git a/airbyte-integrations/connectors/destination-kinesis/src/main/java/io/airbyte/integrations/destination/kinesis/KinesisUtils.java b/airbyte-integrations/connectors/destination-kinesis/src/main/java/io/airbyte/integrations/destination/kinesis/KinesisUtils.java deleted file mode 100644 index c002f74a2eb3..000000000000 --- a/airbyte-integrations/connectors/destination-kinesis/src/main/java/io/airbyte/integrations/destination/kinesis/KinesisUtils.java +++ /dev/null @@ -1,72 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.integrations.destination.kinesis; - -import java.util.UUID; -import software.amazon.awssdk.auth.credentials.AwsBasicCredentials; -import software.amazon.awssdk.auth.credentials.StaticCredentialsProvider; -import software.amazon.awssdk.regions.Region; -import software.amazon.awssdk.services.kinesis.KinesisClient; -import software.amazon.awssdk.services.kinesis.model.KinesisException; - -/** - * KinesisUtils class providing utility methods for various Kinesis functionalities. - */ -public class KinesisUtils { - - private KinesisUtils() { - - } - - /** - * Configures and returns a Kinesis client with the provided configuration. - * - * @param kinesisConfig used to configure the Kinesis client. - * @return KinesisClient which can be used to access Kinesis. - */ - static KinesisClient buildKinesisClient(KinesisConfig kinesisConfig) { - var kinesisClientBuilder = KinesisClient.builder(); - - // configure access credentials - kinesisClientBuilder.credentialsProvider(StaticCredentialsProvider.create( - AwsBasicCredentials.create(kinesisConfig.getAccessKey(), kinesisConfig.getPrivateKey()))); - - if (kinesisConfig.getRegion() != null && !kinesisConfig.getRegion().isBlank()) { - // configure access region - kinesisClientBuilder.region(Region.of(kinesisConfig.getRegion())); - } - - if (kinesisConfig.getEndpoint() != null) { - // configure access endpoint - kinesisClientBuilder.endpointOverride(kinesisConfig.getEndpoint()); - } - - return kinesisClientBuilder.build(); - } - - /** - * Build a Kinesis exception with the provided message and cause. - * - * @param message of the exception - * @param cause of the exception - * @return KinesisException to be thrown - */ - static KinesisException buildKinesisException(String message, Throwable cause) { - return (KinesisException) KinesisException.builder() - .message(message) - .cause(cause) - .build(); - } - - /** - * Create random UUID which can be used as a partition key for streaming data. - * - * @return String partition key for distributing data across shards. - */ - static String buildPartitionKey() { - return UUID.randomUUID().toString(); - } - -} diff --git a/airbyte-integrations/connectors/destination-kinesis/src/main/java/io/airbyte/integrations/destination/kinesis/Tuple.java b/airbyte-integrations/connectors/destination-kinesis/src/main/java/io/airbyte/integrations/destination/kinesis/Tuple.java deleted file mode 100644 index 3a3a853776b9..000000000000 --- a/airbyte-integrations/connectors/destination-kinesis/src/main/java/io/airbyte/integrations/destination/kinesis/Tuple.java +++ /dev/null @@ -1,41 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.integrations.destination.kinesis; - -/** - * Tuple class for wrapping a pair od objects. - */ -public class Tuple { - - private final V1 value1; - - private final V2 value2; - - public Tuple(V1 value1, V2 value2) { - this.value1 = value1; - this.value2 = value2; - } - - public static Tuple of(V1 value1, V2 value2) { - return new Tuple<>(value1, value2); - } - - public V1 value1() { - return value1; - } - - public V2 value2() { - return value2; - } - - @Override - public String toString() { - return "Tuple{" + - "value1=" + value1 + - ", value2=" + value2 + - '}'; - } - -} diff --git a/airbyte-integrations/connectors/destination-kinesis/src/main/resources/spec.json b/airbyte-integrations/connectors/destination-kinesis/src/main/resources/spec.json deleted file mode 100644 index 3667ed0ea0b6..000000000000 --- a/airbyte-integrations/connectors/destination-kinesis/src/main/resources/spec.json +++ /dev/null @@ -1,67 +0,0 @@ -{ - "documentationUrl": "https://docs.airbyte.com/integrations/destinations/kinesis", - "supportsIncremental": true, - "supportsNormalization": false, - "supportsDBT": false, - "supported_destination_sync_modes": ["append"], - "connectionSpecification": { - "$schema": "http://json-schema.org/draft-07/schema#", - "title": "Kinesis Destination Spec", - "type": "object", - "required": [ - "endpoint", - "region", - "shardCount", - "accessKey", - "privateKey", - "bufferSize" - ], - "additionalProperties": true, - "properties": { - "endpoint": { - "title": "Endpoint", - "description": "AWS Kinesis endpoint.", - "type": "string", - "examples": ["kinesis.us‑west‑1.amazonaws.com"], - "order": 0 - }, - "region": { - "title": "Region", - "description": "AWS region. Your account determines the Regions that are available to you.", - "type": "string", - "examples": ["us‑west‑1"], - "order": 1 - }, - "shardCount": { - "title": "Shard Count", - "description": "Number of shards to which the data should be streamed.", - "type": "integer", - "default": 5, - "order": 2 - }, - "accessKey": { - "title": "Access Key", - "description": "Generate the AWS Access Key for current user.", - "airbyte_secret": true, - "type": "string", - "order": 3 - }, - "privateKey": { - "title": "Private Key", - "description": "The AWS Private Key - a string of numbers and letters that are unique for each account, also known as a \"recovery phrase\".", - "airbyte_secret": true, - "type": "string", - "order": 4 - }, - "bufferSize": { - "title": "Buffer Size", - "description": "Buffer size for storing kinesis records before being batch streamed.", - "type": "integer", - "minimum": 1, - "maximum": 500, - "default": 100, - "order": 5 - } - } - } -} diff --git a/airbyte-integrations/connectors/destination-kinesis/src/test-integration/java/io/airbyte/integrations/destination/kinesis/KinesisContainerInitializr.java b/airbyte-integrations/connectors/destination-kinesis/src/test-integration/java/io/airbyte/integrations/destination/kinesis/KinesisContainerInitializr.java deleted file mode 100644 index 1c98d558d9c8..000000000000 --- a/airbyte-integrations/connectors/destination-kinesis/src/test-integration/java/io/airbyte/integrations/destination/kinesis/KinesisContainerInitializr.java +++ /dev/null @@ -1,48 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.integrations.destination.kinesis; - -import java.net.URI; -import org.testcontainers.containers.localstack.LocalStackContainer; -import org.testcontainers.utility.DockerImageName; - -public class KinesisContainerInitializr { - - private static KinesisContainer kinesisContainer; - - private KinesisContainerInitializr() { - - } - - static KinesisContainer initContainer() { - if (kinesisContainer == null) { - kinesisContainer = KinesisContainer.createContainer(); - } - kinesisContainer.start(); - return kinesisContainer; - } - - static class KinesisContainer extends LocalStackContainer { - - private KinesisContainer() { - super(DockerImageName.parse("localstack/localstack:0.12.20")); - } - - static KinesisContainer createContainer() { - return (KinesisContainer) new KinesisContainer() - .withServices(Service.KINESIS) - // lower kinesis response latency to 200 ms to speed up tests - .withEnv("KINESIS_LATENCY", "200") - // increase default shard limit - .withEnv("KINESIS_SHARD_LIMIT", "500"); - } - - URI getEndpointOverride() { - return super.getEndpointOverride(LocalStackContainer.Service.KINESIS); - } - - } - -} diff --git a/airbyte-integrations/connectors/destination-kinesis/src/test-integration/java/io/airbyte/integrations/destination/kinesis/KinesisDataFactory.java b/airbyte-integrations/connectors/destination-kinesis/src/test-integration/java/io/airbyte/integrations/destination/kinesis/KinesisDataFactory.java deleted file mode 100644 index c481cc4d7d4f..000000000000 --- a/airbyte-integrations/connectors/destination-kinesis/src/test-integration/java/io/airbyte/integrations/destination/kinesis/KinesisDataFactory.java +++ /dev/null @@ -1,28 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.integrations.destination.kinesis; - -import com.fasterxml.jackson.databind.JsonNode; -import com.google.common.collect.ImmutableMap; -import io.airbyte.commons.json.Jsons; - -public class KinesisDataFactory { - - private KinesisDataFactory() { - - } - - static JsonNode jsonConfig(String endpoint, String region, String accessKey, String privateKey) { - return Jsons.jsonNode(ImmutableMap.builder() - .put("endpoint", endpoint) - .put("region", region) - .put("shardCount", 5) - .put("accessKey", accessKey) - .put("privateKey", privateKey) - .put("bufferSize", 100) - .build()); - } - -} diff --git a/airbyte-integrations/connectors/destination-kinesis/src/test-integration/java/io/airbyte/integrations/destination/kinesis/KinesisDestinationAcceptanceTest.java b/airbyte-integrations/connectors/destination-kinesis/src/test-integration/java/io/airbyte/integrations/destination/kinesis/KinesisDestinationAcceptanceTest.java deleted file mode 100644 index 0a30d3555420..000000000000 --- a/airbyte-integrations/connectors/destination-kinesis/src/test-integration/java/io/airbyte/integrations/destination/kinesis/KinesisDestinationAcceptanceTest.java +++ /dev/null @@ -1,110 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.integrations.destination.kinesis; - -import com.fasterxml.jackson.databind.JsonNode; -import io.airbyte.cdk.integrations.standardtest.destination.DestinationAcceptanceTest; -import io.airbyte.cdk.integrations.standardtest.destination.comparator.AdvancedTestDataComparator; -import io.airbyte.cdk.integrations.standardtest.destination.comparator.TestDataComparator; -import io.airbyte.commons.json.Jsons; -import java.util.Comparator; -import java.util.HashSet; -import java.util.List; -import java.util.stream.Collectors; -import org.junit.jupiter.api.BeforeAll; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -public class KinesisDestinationAcceptanceTest extends DestinationAcceptanceTest { - - private static final Logger LOGGER = LoggerFactory.getLogger(KinesisDestinationAcceptanceTest.class); - - private JsonNode configJson; - - private KinesisStream kinesisStream; - - private KinesisNameTransformer kinesisNameTransformer; - - private static KinesisContainerInitializr.KinesisContainer kinesisContainer; - - @BeforeAll - static void initContainer() { - kinesisContainer = KinesisContainerInitializr.initContainer(); - } - - @Override - protected void setup(final TestDestinationEnv testEnv, final HashSet TEST_SCHEMAS) { - configJson = KinesisDataFactory.jsonConfig( - kinesisContainer.getEndpointOverride().toString(), - kinesisContainer.getRegion(), - kinesisContainer.getAccessKey(), - kinesisContainer.getSecretKey()); - kinesisStream = new KinesisStream(new KinesisConfig(configJson)); - kinesisNameTransformer = new KinesisNameTransformer(); - } - - @Override - protected TestDataComparator getTestDataComparator() { - return new AdvancedTestDataComparator(); - } - - @Override - protected boolean supportBasicDataTypeTest() { - return true; - } - - @Override - protected boolean supportArrayDataTypeTest() { - return true; - } - - @Override - protected boolean supportObjectDataTypeTest() { - return true; - } - - @Override - protected void tearDown(final TestDestinationEnv testEnv) { - kinesisStream.deleteAllStreams(); - } - - @Override - protected String getImageName() { - return "airbyte/destination-kinesis:dev"; - } - - @Override - protected boolean implementsNamespaces() { - return true; - } - - @Override - protected JsonNode getConfig() { - return configJson; - } - - @Override - protected JsonNode getFailCheckConfig() { - return KinesisDataFactory.jsonConfig( - "127.0.0.9", - "eu-west-1", - "random_access_key", - "random_secret_key"); - } - - @Override - protected List retrieveRecords(final TestDestinationEnv testEnv, - final String streamName, - final String namespace, - final JsonNode streamSchema) { - final var stream = kinesisNameTransformer.streamName(namespace, streamName); - return kinesisStream.getRecords(stream).stream() - .sorted(Comparator.comparing(KinesisRecord::getTimestamp)) - .map(KinesisRecord::getData) - .map(Jsons::deserialize) - .collect(Collectors.toList()); - } - -} diff --git a/airbyte-integrations/connectors/destination-kinesis/src/test-integration/java/io/airbyte/integrations/destination/kinesis/KinesisDestinationTest.java b/airbyte-integrations/connectors/destination-kinesis/src/test-integration/java/io/airbyte/integrations/destination/kinesis/KinesisDestinationTest.java deleted file mode 100644 index 18cdba0a59c2..000000000000 --- a/airbyte-integrations/connectors/destination-kinesis/src/test-integration/java/io/airbyte/integrations/destination/kinesis/KinesisDestinationTest.java +++ /dev/null @@ -1,59 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.integrations.destination.kinesis; - -import static org.assertj.core.api.Assertions.assertThat; - -import io.airbyte.protocol.models.v0.AirbyteConnectionStatus; -import org.junit.jupiter.api.BeforeAll; -import org.junit.jupiter.api.BeforeEach; -import org.junit.jupiter.api.Test; - -class KinesisDestinationTest { - - private static KinesisContainerInitializr.KinesisContainer kinesisContainer; - - private KinesisDestination kinesisDestination; - - @BeforeAll - static void setup() { - kinesisContainer = KinesisContainerInitializr.initContainer(); - } - - @BeforeEach - void init() { - this.kinesisDestination = new KinesisDestination(); - } - - @Test - void testCheckConnectionWithSuccess() { - - var jsonConfig = KinesisDataFactory.jsonConfig( - kinesisContainer.getEndpointOverride().toString(), - kinesisContainer.getRegion(), - kinesisContainer.getAccessKey(), - kinesisContainer.getSecretKey()); - - var connectionStatus = kinesisDestination.check(jsonConfig); - - assertThat(connectionStatus.getStatus()).isEqualTo(AirbyteConnectionStatus.Status.SUCCEEDED); - } - - @Test - void testCheckTestConnectionWithFailure() { - - var jsonConfig = KinesisDataFactory.jsonConfig( - "127.0.0.9", - "eu-west-1", - "random_access_key", - "random_secret_key"); - - var connectionStatus = kinesisDestination.check(jsonConfig); - - assertThat(connectionStatus.getStatus()).isEqualTo(AirbyteConnectionStatus.Status.FAILED); - - } - -} diff --git a/airbyte-integrations/connectors/destination-kinesis/src/test-integration/java/io/airbyte/integrations/destination/kinesis/KinesisStreamTest.java b/airbyte-integrations/connectors/destination-kinesis/src/test-integration/java/io/airbyte/integrations/destination/kinesis/KinesisStreamTest.java deleted file mode 100644 index b2a07d9d2b4d..000000000000 --- a/airbyte-integrations/connectors/destination-kinesis/src/test-integration/java/io/airbyte/integrations/destination/kinesis/KinesisStreamTest.java +++ /dev/null @@ -1,126 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.integrations.destination.kinesis; - -import static org.assertj.core.api.Assertions.assertThat; -import static org.junit.jupiter.api.Assertions.assertThrows; - -import io.airbyte.commons.json.Jsons; -import java.time.Instant; -import java.util.Map; -import org.junit.jupiter.api.AfterEach; -import org.junit.jupiter.api.BeforeAll; -import org.junit.jupiter.api.BeforeEach; -import org.junit.jupiter.api.Test; -import software.amazon.awssdk.services.kinesis.model.ResourceNotFoundException; - -class KinesisStreamTest { - - private static KinesisContainerInitializr.KinesisContainer kinesisContainer; - - private KinesisStream kinesisStream; - - @BeforeAll - static void setup() { - kinesisContainer = KinesisContainerInitializr.initContainer(); - } - - @BeforeEach - void init() { - var jsonConfig = KinesisDataFactory.jsonConfig( - kinesisContainer.getEndpointOverride().toString(), - kinesisContainer.getRegion(), - kinesisContainer.getAccessKey(), - kinesisContainer.getSecretKey()); - this.kinesisStream = new KinesisStream(new KinesisConfig(jsonConfig)); - } - - @AfterEach - void cleanup() { - kinesisStream.deleteAllStreams(); - } - - @Test - void testCreateStream() { - String streamName = "test_create_stream"; - // given - kinesisStream.createStream(streamName); - kinesisStream.flush(e -> {}); - // when - var records = kinesisStream.getRecords(streamName); - - // then - assertThat(records) - .isNotNull() - .hasSize(0); - - } - - @Test - void testDeleteStream() { - String streamName = "test_delete_stream"; - // given - kinesisStream.createStream(streamName); - - // when - kinesisStream.deleteStream(streamName); - - // then - assertThrows(ResourceNotFoundException.class, () -> kinesisStream.getRecords(streamName)); - } - - @Test - void testDeleteAllStreams() { - var streamName1 = "test_delete_all_stream1"; - var streamName2 = "test_delete_all_stream2"; - // given - kinesisStream.createStream(streamName1); - kinesisStream.createStream(streamName2); - - // when - kinesisStream.deleteAllStreams(); - - // then - assertThrows(ResourceNotFoundException.class, () -> kinesisStream.getRecords(streamName1)); - assertThrows(ResourceNotFoundException.class, () -> kinesisStream.getRecords(streamName2)); - - } - - @Test - void testPutRecordAndFlush() { - // given - String streamName = "test_put_record_stream"; - kinesisStream.createStream(streamName); - - var partitionKey1 = KinesisUtils.buildPartitionKey(); - kinesisStream.putRecord(streamName, partitionKey1, createData(partitionKey1, "{\"property\":\"data1\"}"), - e -> {}); - - var partitionKey2 = KinesisUtils.buildPartitionKey(); - kinesisStream.putRecord(streamName, partitionKey2, createData(partitionKey2, "{\"property\":\"data2\"}"), - e -> {}); - - kinesisStream.flush(e -> {}); - - // when - var records = kinesisStream.getRecords(streamName); - - // then - assertThat(records) - .isNotNull() - .hasSize(2) - .anyMatch(r -> r.getData().equals("{\"property\":\"data1\"}")) - .anyMatch(r -> r.getData().equals("{\"property\":\"data2\"}")); - } - - private String createData(String partitionKey, String data) { - var kinesisRecord = Jsons.jsonNode(Map.of( - KinesisRecord.COLUMN_NAME_AB_ID, partitionKey, - KinesisRecord.COLUMN_NAME_DATA, data, - KinesisRecord.COLUMN_NAME_EMITTED_AT, Instant.now())); - return Jsons.serialize(kinesisRecord); - } - -} diff --git a/airbyte-integrations/connectors/destination-kinesis/src/test/java/io/airbyte/integrations/destination/kinesis/KinesisConfigTest.java b/airbyte-integrations/connectors/destination-kinesis/src/test/java/io/airbyte/integrations/destination/kinesis/KinesisConfigTest.java deleted file mode 100644 index 4286645ea7bb..000000000000 --- a/airbyte-integrations/connectors/destination-kinesis/src/test/java/io/airbyte/integrations/destination/kinesis/KinesisConfigTest.java +++ /dev/null @@ -1,41 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.integrations.destination.kinesis; - -import static org.assertj.core.api.Assertions.assertThat; - -import java.net.URI; -import java.net.URISyntaxException; -import org.junit.jupiter.api.BeforeEach; -import org.junit.jupiter.api.Test; - -class KinesisConfigTest { - - private KinesisConfig kinesisConfig; - - @BeforeEach - void setup() { - var jsonConfig = KinesisDataFactory.jsonConfig( - "http://aws.kinesis.com", - "eu-west-1", - "random_access_key", - "random_secret_key"); - this.kinesisConfig = new KinesisConfig(jsonConfig); - } - - @Test - void testConfig() throws URISyntaxException { - - assertThat(kinesisConfig) - .hasFieldOrPropertyWithValue("endpoint", new URI("http://aws.kinesis.com")) - .hasFieldOrPropertyWithValue("region", "eu-west-1") - .hasFieldOrPropertyWithValue("shardCount", 5) - .hasFieldOrPropertyWithValue("accessKey", "random_access_key") - .hasFieldOrPropertyWithValue("privateKey", "random_secret_key") - .hasFieldOrPropertyWithValue("bufferSize", 100); - - } - -} diff --git a/airbyte-integrations/connectors/destination-kinesis/src/test/java/io/airbyte/integrations/destination/kinesis/KinesisDataFactory.java b/airbyte-integrations/connectors/destination-kinesis/src/test/java/io/airbyte/integrations/destination/kinesis/KinesisDataFactory.java deleted file mode 100644 index c481cc4d7d4f..000000000000 --- a/airbyte-integrations/connectors/destination-kinesis/src/test/java/io/airbyte/integrations/destination/kinesis/KinesisDataFactory.java +++ /dev/null @@ -1,28 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.integrations.destination.kinesis; - -import com.fasterxml.jackson.databind.JsonNode; -import com.google.common.collect.ImmutableMap; -import io.airbyte.commons.json.Jsons; - -public class KinesisDataFactory { - - private KinesisDataFactory() { - - } - - static JsonNode jsonConfig(String endpoint, String region, String accessKey, String privateKey) { - return Jsons.jsonNode(ImmutableMap.builder() - .put("endpoint", endpoint) - .put("region", region) - .put("shardCount", 5) - .put("accessKey", accessKey) - .put("privateKey", privateKey) - .put("bufferSize", 100) - .build()); - } - -} diff --git a/airbyte-integrations/connectors/destination-kinesis/src/test/java/io/airbyte/integrations/destination/kinesis/KinesisNameTransformerTest.java b/airbyte-integrations/connectors/destination-kinesis/src/test/java/io/airbyte/integrations/destination/kinesis/KinesisNameTransformerTest.java deleted file mode 100644 index 451caadecdc1..000000000000 --- a/airbyte-integrations/connectors/destination-kinesis/src/test/java/io/airbyte/integrations/destination/kinesis/KinesisNameTransformerTest.java +++ /dev/null @@ -1,39 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.integrations.destination.kinesis; - -import static org.assertj.core.api.Assertions.assertThat; - -import org.junit.jupiter.api.BeforeEach; -import org.junit.jupiter.api.Test; - -class KinesisNameTransformerTest { - - private KinesisNameTransformer kinesisNameTransformer; - - @BeforeEach - void setup() { - this.kinesisNameTransformer = new KinesisNameTransformer(); - } - - @Test - void outputStream() { - - var column = kinesisNameTransformer.streamName("namespace", "stream"); - - assertThat(column).matches("namespace_stream"); - - } - - @Test - void outputStreamConvert() { - - var keyspace = kinesisNameTransformer.streamName("**namespace^h", "##stream"); - - assertThat(keyspace).matches("__namespace_h___stream"); - - } - -} diff --git a/airbyte-integrations/connectors/destination-kinesis/src/test/java/io/airbyte/integrations/destination/kinesis/KinesisRecordConsumerTest.java b/airbyte-integrations/connectors/destination-kinesis/src/test/java/io/airbyte/integrations/destination/kinesis/KinesisRecordConsumerTest.java deleted file mode 100644 index f2ac46a15cc3..000000000000 --- a/airbyte-integrations/connectors/destination-kinesis/src/test/java/io/airbyte/integrations/destination/kinesis/KinesisRecordConsumerTest.java +++ /dev/null @@ -1,47 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.integrations.destination.kinesis; - -import io.airbyte.cdk.integrations.base.FailureTrackingAirbyteMessageConsumer; -import io.airbyte.cdk.integrations.standardtest.destination.PerStreamStateMessageTest; -import io.airbyte.protocol.models.v0.AirbyteMessage; -import io.airbyte.protocol.models.v0.ConfiguredAirbyteCatalog; -import java.util.function.Consumer; -import org.junit.jupiter.api.BeforeEach; -import org.junit.jupiter.api.DisplayName; -import org.junit.jupiter.api.extension.ExtendWith; -import org.mockito.Mock; -import org.mockito.junit.jupiter.MockitoExtension; - -@DisplayName("KinesisRecordConsumer") -@ExtendWith(MockitoExtension.class) -public class KinesisRecordConsumerTest extends PerStreamStateMessageTest { - - @Mock - private Consumer outputRecordCollector; - - @Mock - private ConfiguredAirbyteCatalog catalog; - @Mock - private KinesisStream kinesisStream; - - private KinesisMessageConsumer consumer; - - @BeforeEach - public void init() { - consumer = new KinesisMessageConsumer(catalog, kinesisStream, outputRecordCollector); - } - - @Override - protected Consumer getMockedConsumer() { - return outputRecordCollector; - } - - @Override - protected FailureTrackingAirbyteMessageConsumer getMessageConsumer() { - return consumer; - } - -} diff --git a/airbyte-integrations/connectors/destination-kvdb/README.md b/airbyte-integrations/connectors/destination-kvdb/README.md deleted file mode 100644 index b834894111b6..000000000000 --- a/airbyte-integrations/connectors/destination-kvdb/README.md +++ /dev/null @@ -1,118 +0,0 @@ -# Kvdb Destination - -This is the repository for the [Kvdb](https://kvdb.io) destination connector, written in Python. It is intended to be an example for how to write a Python destination. KvDB is a very simple key value store, which makes it great for the purposes of illustrating how to write a Python destination connector. - -## Local development - -### Prerequisites -**To iterate on this connector, make sure to complete this prerequisites section.** - -#### Minimum Python version required `= 3.7.0` - -#### Build & Activate Virtual Environment and install dependencies -From this connector directory, create a virtual environment: -``` -python -m venv .venv -``` - -This will generate a virtualenv for this module in `.venv/`. Make sure this venv is active in your -development environment of choice. To activate it from the terminal, run: -``` -source .venv/bin/activate -pip install -r requirements.txt -``` -If you are in an IDE, follow your IDE's instructions to activate the virtualenv. - -Note that while we are installing dependencies from `requirements.txt`, you should only edit `setup.py` for your dependencies. `requirements.txt` is -used for editable installs (`pip install -e`) to pull in Python dependencies from the monorepo and will call `setup.py`. -If this is mumbo jumbo to you, don't worry about it, just put your deps in `setup.py` but install using `pip install -r requirements.txt` and everything -should work as you expect. - -#### Building via Gradle -From the Airbyte repository root, run: -``` -./gradlew :airbyte-integrations:connectors:destination-kvdb:build -``` - -#### Create credentials -**If you are a community contributor**, generate the necessary credentials from [Kvdb](https://kvdb.io/docs/api/), and then create a file `secrets/config.json` conforming to the `destination_kvdb/spec.json` file. -Note that the `secrets` directory is gitignored by default, so there is no danger of accidentally checking in sensitive information. -See `integration_tests/sample_config.json` for a sample config file. - -**If you are an Airbyte core member**, copy the credentials in Lastpass under the secret name `destination kvdb test creds` -and place them into `secrets/config.json`. - -### Locally running the connector -``` -python main.py spec -python main.py check --config secrets/config.json -python main.py discover --config secrets/config.json -python main.py read --config secrets/config.json --catalog integration_tests/configured_catalog.json -``` - -### Locally running the connector docker image - - - -#### Build -**Via [`airbyte-ci`](https://github.com/airbytehq/airbyte/blob/master/airbyte-ci/connectors/pipelines/README.md) (recommended):** -```bash -airbyte-ci connectors --name=destination-kvdb build -``` - -An image will be built with the tag `airbyte/destination-kvdb:dev`. - -**Via `docker build`:** -```bash -docker build -t airbyte/destination-kvdb:dev . -``` -#### Run -Then run any of the connector commands as follows: -``` -docker run --rm airbyte/destination-kvdb:dev spec -docker run --rm -v $(pwd)/secrets:/secrets airbyte/destination-kvdb:dev check --config /secrets/config.json -# messages.jsonl is a file containing line-separated JSON representing AirbyteMessages -cat messages.jsonl | docker run --rm -v $(pwd)/secrets:/secrets -v $(pwd)/integration_tests:/integration_tests airbyte/destination-kvdb:dev write --config /secrets/config.json --catalog /integration_tests/configured_catalog.json -``` -## Testing - Make sure to familiarize yourself with [pytest test discovery](https://docs.pytest.org/en/latest/goodpractices.html#test-discovery) to know how your test files and methods should be named. -First install test dependencies into your virtual environment: -``` -pip install .[tests] -``` -### Unit Tests -To run unit tests locally, from the connector directory run: -``` -python -m pytest unit_tests -``` - -### Integration Tests -There are two types of integration tests: Acceptance Tests (Airbyte's test suite for all destination connectors) and custom integration tests (which are specific to this connector). -#### Custom Integration tests -Place custom tests inside `integration_tests/` folder, then, from the connector root, run -``` -python -m pytest integration_tests -``` -#### Acceptance Tests -You can run our full test suite locally using [`airbyte-ci`](https://github.com/airbytehq/airbyte/blob/master/airbyte-ci/connectors/pipelines/README.md): -```bash -airbyte-ci connectors --name=destination-kvdb test -``` - - -## Dependency Management -All of your dependencies should go in `setup.py`, NOT `requirements.txt`. The requirements file is only used to connect internal Airbyte dependencies in the monorepo for local development. -We split dependencies between two groups, dependencies that are: -* required for your connector to work need to go to `MAIN_REQUIREMENTS` list. -* required for the testing need to go to `TEST_REQUIREMENTS` list - -### Publishing a new version of the connector -You've checked out the repo, implemented a million dollar feature, and you're ready to share your changes with the world. Now what? -1. Make sure your changes are passing our test suite: `airbyte-ci connectors --name=destination-kvdb test` -2. Bump the connector version in `metadata.yaml`: increment the `dockerImageTag` value. Please follow [semantic versioning for connectors](https://docs.airbyte.com/contributing-to-airbyte/resources/pull-requests-handbook/#semantic-versioning-for-connectors). -3. Make sure the `metadata.yaml` content is up to date. -4. Make the connector documentation and its changelog is up to date (`docs/integrations/destinations/kvdb.md`). -5. Create a Pull Request: use [our PR naming conventions](https://docs.airbyte.com/contributing-to-airbyte/resources/pull-requests-handbook/#pull-request-title-convention). -6. Pat yourself on the back for being an awesome contributor. -7. Someone from Airbyte will take a look at your PR and iterate with you to merge it into master. - diff --git a/airbyte-integrations/connectors/destination-kvdb/destination_kvdb/__init__.py b/airbyte-integrations/connectors/destination-kvdb/destination_kvdb/__init__.py deleted file mode 100644 index 5f3b041035bf..000000000000 --- a/airbyte-integrations/connectors/destination-kvdb/destination_kvdb/__init__.py +++ /dev/null @@ -1,26 +0,0 @@ -# MIT License -# -# Copyright (c) 2020 Airbyte -# -# Permission is hereby granted, free of charge, to any person obtaining a copy -# of this software and associated documentation files (the "Software"), to deal -# in the Software without restriction, including without limitation the rights -# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -# copies of the Software, and to permit persons to whom the Software is -# furnished to do so, subject to the following conditions: -# -# The above copyright notice and this permission notice shall be included in all -# copies or substantial portions of the Software. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -# SOFTWARE. - - -from .destination import DestinationKvdb - -__all__ = ["DestinationKvdb"] diff --git a/airbyte-integrations/connectors/destination-kvdb/destination_kvdb/client.py b/airbyte-integrations/connectors/destination-kvdb/destination_kvdb/client.py deleted file mode 100644 index 74d9f41176f5..000000000000 --- a/airbyte-integrations/connectors/destination-kvdb/destination_kvdb/client.py +++ /dev/null @@ -1,78 +0,0 @@ -# -# Copyright (c) 2023 Airbyte, Inc., all rights reserved. -# - -from typing import Any, Iterable, List, Mapping, Tuple, Union - -import requests - - -class KvDbClient: - base_url = "https://kvdb.io" - PAGE_SIZE = 1000 - - def __init__(self, bucket_id: str, secret_key: str = None): - self.secret_key = secret_key - self.bucket_id = bucket_id - - def write(self, key: str, value: Mapping[str, Any]): - return self.batch_write([(key, value)]) - - def batch_write(self, keys_and_values: List[Tuple[str, Mapping[str, Any]]]): - """ - https://kvdb.io/docs/api/#execute-transaction - """ - request_body = {"txn": [{"set": key, "value": value} for key, value in keys_and_values]} - return self._request("POST", json=request_body) - - def list_keys(self, list_values: bool = False, prefix: str = None) -> Iterable[Union[str, List]]: - """ - https://kvdb.io/docs/api/#list-keys - """ - # TODO handle rate limiting - pagination_complete = False - offset = 0 - - while not pagination_complete: - response = self._request( - "GET", - params={ - "limit": self.PAGE_SIZE, - "skip": offset, - "format": "json", - "prefix": prefix or "", - "values": "true" if list_values else "false", - }, - endpoint="/", # the "list" endpoint doesn't work without adding a trailing slash to the URL - ) - - response_json = response.json() - yield from response_json - - pagination_complete = len(response_json) < self.PAGE_SIZE - offset += self.PAGE_SIZE - - def delete(self, key: Union[str, List[str]]): - """ - https://kvdb.io/docs/api/#execute-transaction - """ - key_list = key if isinstance(key, List) else [key] - request_body = {"txn": [{"delete": k} for k in key_list]} - return self._request("POST", json=request_body) - - def _get_base_url(self) -> str: - return f"{self.base_url}/{self.bucket_id}" - - def _get_auth_headers(self) -> Mapping[str, Any]: - return {"Authorization": f"Bearer {self.secret_key}"} if self.secret_key else {} - - def _request( - self, http_method: str, endpoint: str = None, params: Mapping[str, Any] = None, json: Mapping[str, Any] = None - ) -> requests.Response: - url = self._get_base_url() + (endpoint or "") - headers = {"Accept": "application/json", **self._get_auth_headers()} - - response = requests.request(method=http_method, params=params, url=url, headers=headers, json=json) - - response.raise_for_status() - return response diff --git a/airbyte-integrations/connectors/destination-kvdb/destination_kvdb/destination.py b/airbyte-integrations/connectors/destination-kvdb/destination_kvdb/destination.py deleted file mode 100644 index 33ab8565fae4..000000000000 --- a/airbyte-integrations/connectors/destination-kvdb/destination_kvdb/destination.py +++ /dev/null @@ -1,72 +0,0 @@ -# -# Copyright (c) 2023 Airbyte, Inc., all rights reserved. -# - - -import time -import traceback -import uuid -from typing import Any, Iterable, Mapping - -from airbyte_cdk import AirbyteLogger -from airbyte_cdk.destinations import Destination -from airbyte_cdk.models import AirbyteConnectionStatus, AirbyteMessage, ConfiguredAirbyteCatalog, DestinationSyncMode, Status, Type -from destination_kvdb.client import KvDbClient -from destination_kvdb.writer import KvDbWriter - - -class DestinationKvdb(Destination): - def write( - self, config: Mapping[str, Any], configured_catalog: ConfiguredAirbyteCatalog, input_messages: Iterable[AirbyteMessage] - ) -> Iterable[AirbyteMessage]: - - """ - Reads the input stream of messages, config, and catalog to write data to the destination. - - This method returns an iterable (typically a generator of AirbyteMessages via yield) containing state messages received - in the input message stream. Outputting a state message means that every AirbyteRecordMessage which came before it has been - successfully persisted to the destination. This is used to ensure fault tolerance in the case that a sync fails before fully completing, - then the source is given the last state message output from this method as the starting point of the next sync. - """ - writer = KvDbWriter(KvDbClient(**config)) - - for configured_stream in configured_catalog.streams: - if configured_stream.destination_sync_mode == DestinationSyncMode.overwrite: - writer.delete_stream_entries(configured_stream.stream.name) - - for message in input_messages: - if message.type == Type.STATE: - # Emitting a state message indicates that all records which came before it have been written to the destination. So we flush - # the queue to ensure writes happen, then output the state message to indicate it's safe to checkpoint state - writer.flush() - yield message - elif message.type == Type.RECORD: - record = message.record - writer.queue_write_operation( - record.stream, record.data, time.time_ns() / 1_000_000 - ) # convert from nanoseconds to milliseconds - else: - # ignore other message types for now - continue - - # Make sure to flush any records still in the queue - writer.flush() - - def check(self, logger: AirbyteLogger, config: Mapping[str, Any]) -> AirbyteConnectionStatus: - """ - Tests if the input configuration can be used to successfully connect to the destination with the needed permissions - e.g: if a provided API token or password can be used to connect and write to the destination. - """ - try: - # Verify write access by attempting to write and then delete to a random key - client = KvDbClient(**config) - random_key = str(uuid.uuid4()) - client.write(random_key, {"value": "_airbyte_connection_check"}) - client.delete(random_key) - except Exception as e: - traceback.print_exc() - return AirbyteConnectionStatus( - status=Status.FAILED, message=f"An exception occurred: {e}. \nStacktrace: \n{traceback.format_exc()}" - ) - else: - return AirbyteConnectionStatus(status=Status.SUCCEEDED) diff --git a/airbyte-integrations/connectors/destination-kvdb/destination_kvdb/spec.json b/airbyte-integrations/connectors/destination-kvdb/destination_kvdb/spec.json deleted file mode 100644 index 0ced52c17a22..000000000000 --- a/airbyte-integrations/connectors/destination-kvdb/destination_kvdb/spec.json +++ /dev/null @@ -1,26 +0,0 @@ -{ - "documentationUrl": "https://kvdb.io/docs/api/", - "supported_destination_sync_modes": ["overwrite", "append"], - "supportsIncremental": true, - "connectionSpecification": { - "$schema": "http://json-schema.org/draft-07/schema#", - "title": "Destination KVdb", - "type": "object", - "required": ["bucket_id", "secret_key"], - "additionalProperties": false, - "properties": { - "bucket_id": { - "title": "Bucket ID", - "type": "string", - "description": "The ID of your KVdb bucket.", - "order": 1 - }, - "secret_key": { - "title": "Secret Key", - "type": "string", - "description": "Your bucket Secret Key.", - "order": 2 - } - } - } -} diff --git a/airbyte-integrations/connectors/destination-kvdb/destination_kvdb/writer.py b/airbyte-integrations/connectors/destination-kvdb/destination_kvdb/writer.py deleted file mode 100644 index 33acbf8a22fb..000000000000 --- a/airbyte-integrations/connectors/destination-kvdb/destination_kvdb/writer.py +++ /dev/null @@ -1,46 +0,0 @@ -# -# Copyright (c) 2023 Airbyte, Inc., all rights reserved. -# - -from collections import Mapping - -from destination_kvdb.client import KvDbClient - - -class KvDbWriter: - """ - Data is written to KvDB in the following format: - key: stream_name__ab__ - value: a JSON object representing the record's data - - This is because unless a data source explicitly designates a primary key, we don't know what to key the record on. - Since KvDB allows reading records with certain prefixes, we treat it more like a message queue, expecting the reader to - read messages with a particular prefix e.g: name__ab__123, where 123 is the timestamp they last read data from. - """ - - write_buffer = [] - flush_interval = 1000 - - def __init__(self, client: KvDbClient): - self.client = client - - def delete_stream_entries(self, stream_name: str): - """Deletes all the records belonging to the input stream""" - keys_to_delete = [] - for key in self.client.list_keys(prefix=f"{stream_name}__ab__"): - keys_to_delete.append(key) - if len(keys_to_delete) == self.flush_interval: - self.client.delete(keys_to_delete) - keys_to_delete.clear() - if len(keys_to_delete) > 0: - self.client.delete(keys_to_delete) - - def queue_write_operation(self, stream_name: str, record: Mapping, written_at: int): - kv_pair = (f"{stream_name}__ab__{written_at}", record) - self.write_buffer.append(kv_pair) - if len(self.write_buffer) == self.flush_interval: - self.flush() - - def flush(self): - self.client.batch_write(self.write_buffer) - self.write_buffer.clear() diff --git a/airbyte-integrations/connectors/destination-kvdb/main.py b/airbyte-integrations/connectors/destination-kvdb/main.py deleted file mode 100644 index 178789589e5a..000000000000 --- a/airbyte-integrations/connectors/destination-kvdb/main.py +++ /dev/null @@ -1,11 +0,0 @@ -# -# Copyright (c) 2023 Airbyte, Inc., all rights reserved. -# - - -import sys - -from destination_kvdb import DestinationKvdb - -if __name__ == "__main__": - DestinationKvdb().run(sys.argv[1:]) diff --git a/airbyte-integrations/connectors/destination-kvdb/poetry.lock b/airbyte-integrations/connectors/destination-kvdb/poetry.lock deleted file mode 100644 index 7835868ea21e..000000000000 --- a/airbyte-integrations/connectors/destination-kvdb/poetry.lock +++ /dev/null @@ -1,1108 +0,0 @@ -# This file is automatically @generated by Poetry 1.5.1 and should not be changed by hand. - -[[package]] -name = "airbyte-cdk" -version = "0.62.2" -description = "A framework for writing Airbyte Connectors." -optional = false -python-versions = ">=3.8" -files = [ - {file = "airbyte-cdk-0.62.2.tar.gz", hash = "sha256:bf45cb847e2d2ab7063d0e1989f6c9cf022771c6ae4fb1e854438c3b8377da85"}, - {file = "airbyte_cdk-0.62.2-py3-none-any.whl", hash = "sha256:6d04d2e8a9a32aa707ddf27a1916ac76969fb50ac39d60582ad2daa08ef832ef"}, -] - -[package.dependencies] -airbyte-protocol-models = "0.5.1" -backoff = "*" -cachetools = "*" -Deprecated = ">=1.2,<2.0" -dpath = ">=2.0.1,<2.1.0" -genson = "1.2.2" -isodate = ">=0.6.1,<0.7.0" -Jinja2 = ">=3.1.2,<3.2.0" -jsonref = ">=0.2,<1.0" -jsonschema = ">=3.2.0,<3.3.0" -pendulum = "<3.0.0" -pydantic = ">=1.10.8,<2.0.0" -pyrate-limiter = ">=3.1.0,<3.2.0" -python-dateutil = "*" -PyYAML = ">=6.0.1" -requests = "*" -requests-cache = "*" -wcmatch = "8.4" - -[package.extras] -dev = ["avro (>=1.11.2,<1.12.0)", "cohere (==4.21)", "fastavro (>=1.8.0,<1.9.0)", "freezegun", "langchain (==0.0.271)", "markdown", "mypy", "openai[embeddings] (==0.27.9)", "pandas (==2.0.3)", "pdf2image (==1.16.3)", "pdfminer.six (==20221105)", "pyarrow (==12.0.1)", "pytesseract (==0.3.10)", "pytest", "pytest-cov", "pytest-httpserver", "pytest-mock", "requests-mock", "tiktoken (==0.4.0)", "unstructured (==0.10.27)", "unstructured.pytesseract (>=0.3.12)", "unstructured[docx,pptx] (==0.10.27)"] -file-based = ["avro (>=1.11.2,<1.12.0)", "fastavro (>=1.8.0,<1.9.0)", "markdown", "pdf2image (==1.16.3)", "pdfminer.six (==20221105)", "pyarrow (==12.0.1)", "pytesseract (==0.3.10)", "unstructured (==0.10.27)", "unstructured.pytesseract (>=0.3.12)", "unstructured[docx,pptx] (==0.10.27)"] -sphinx-docs = ["Sphinx (>=4.2,<5.0)", "sphinx-rtd-theme (>=1.0,<2.0)"] -vector-db-based = ["cohere (==4.21)", "langchain (==0.0.271)", "openai[embeddings] (==0.27.9)", "tiktoken (==0.4.0)"] - -[[package]] -name = "airbyte-protocol-models" -version = "0.5.1" -description = "Declares the Airbyte Protocol." -optional = false -python-versions = ">=3.8" -files = [ - {file = "airbyte_protocol_models-0.5.1-py3-none-any.whl", hash = "sha256:dfe84e130e51ce2ae81a06d5aa36f6c5ce3152b9e36e6f0195fad6c3dab0927e"}, - {file = "airbyte_protocol_models-0.5.1.tar.gz", hash = "sha256:7c8b16c7c1c7956b1996052e40585a3a93b1e44cb509c4e97c1ee4fe507ea086"}, -] - -[package.dependencies] -pydantic = ">=1.9.2,<2.0.0" - -[[package]] -name = "atomicwrites" -version = "1.4.1" -description = "Atomic file writes." -optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" -files = [ - {file = "atomicwrites-1.4.1.tar.gz", hash = "sha256:81b2c9071a49367a7f770170e5eec8cb66567cfbbc8c73d20ce5ca4a8d71cf11"}, -] - -[[package]] -name = "attrs" -version = "23.2.0" -description = "Classes Without Boilerplate" -optional = false -python-versions = ">=3.7" -files = [ - {file = "attrs-23.2.0-py3-none-any.whl", hash = "sha256:99b87a485a5820b23b879f04c2305b44b951b502fd64be915879d77a7e8fc6f1"}, - {file = "attrs-23.2.0.tar.gz", hash = "sha256:935dc3b529c262f6cf76e50877d35a4bd3c1de194fd41f47a2b7ae8f19971f30"}, -] - -[package.extras] -cov = ["attrs[tests]", "coverage[toml] (>=5.3)"] -dev = ["attrs[tests]", "pre-commit"] -docs = ["furo", "myst-parser", "sphinx", "sphinx-notfound-page", "sphinxcontrib-towncrier", "towncrier", "zope-interface"] -tests = ["attrs[tests-no-zope]", "zope-interface"] -tests-mypy = ["mypy (>=1.6)", "pytest-mypy-plugins"] -tests-no-zope = ["attrs[tests-mypy]", "cloudpickle", "hypothesis", "pympler", "pytest (>=4.3.0)", "pytest-xdist[psutil]"] - -[[package]] -name = "backoff" -version = "2.2.1" -description = "Function decoration for backoff and retry" -optional = false -python-versions = ">=3.7,<4.0" -files = [ - {file = "backoff-2.2.1-py3-none-any.whl", hash = "sha256:63579f9a0628e06278f7e47b7d7d5b6ce20dc65c5e96a6f3ca99a6adca0396e8"}, - {file = "backoff-2.2.1.tar.gz", hash = "sha256:03f829f5bb1923180821643f8753b0502c3b682293992485b0eef2807afa5cba"}, -] - -[[package]] -name = "bracex" -version = "2.4" -description = "Bash style brace expander." -optional = false -python-versions = ">=3.8" -files = [ - {file = "bracex-2.4-py3-none-any.whl", hash = "sha256:efdc71eff95eaff5e0f8cfebe7d01adf2c8637c8c92edaf63ef348c241a82418"}, - {file = "bracex-2.4.tar.gz", hash = "sha256:a27eaf1df42cf561fed58b7a8f3fdf129d1ea16a81e1fadd1d17989bc6384beb"}, -] - -[[package]] -name = "cachetools" -version = "5.3.2" -description = "Extensible memoizing collections and decorators" -optional = false -python-versions = ">=3.7" -files = [ - {file = "cachetools-5.3.2-py3-none-any.whl", hash = "sha256:861f35a13a451f94e301ce2bec7cac63e881232ccce7ed67fab9b5df4d3beaa1"}, - {file = "cachetools-5.3.2.tar.gz", hash = "sha256:086ee420196f7b2ab9ca2db2520aca326318b68fe5ba8bc4d49cca91add450f2"}, -] - -[[package]] -name = "cattrs" -version = "23.2.3" -description = "Composable complex class support for attrs and dataclasses." -optional = false -python-versions = ">=3.8" -files = [ - {file = "cattrs-23.2.3-py3-none-any.whl", hash = "sha256:0341994d94971052e9ee70662542699a3162ea1e0c62f7ce1b4a57f563685108"}, - {file = "cattrs-23.2.3.tar.gz", hash = "sha256:a934090d95abaa9e911dac357e3a8699e0b4b14f8529bcc7d2b1ad9d51672b9f"}, -] - -[package.dependencies] -attrs = ">=23.1.0" -exceptiongroup = {version = ">=1.1.1", markers = "python_version < \"3.11\""} -typing-extensions = {version = ">=4.1.0,<4.6.3 || >4.6.3", markers = "python_version < \"3.11\""} - -[package.extras] -bson = ["pymongo (>=4.4.0)"] -cbor2 = ["cbor2 (>=5.4.6)"] -msgpack = ["msgpack (>=1.0.5)"] -orjson = ["orjson (>=3.9.2)"] -pyyaml = ["pyyaml (>=6.0)"] -tomlkit = ["tomlkit (>=0.11.8)"] -ujson = ["ujson (>=5.7.0)"] - -[[package]] -name = "certifi" -version = "2024.2.2" -description = "Python package for providing Mozilla's CA Bundle." -optional = false -python-versions = ">=3.6" -files = [ - {file = "certifi-2024.2.2-py3-none-any.whl", hash = "sha256:dc383c07b76109f368f6106eee2b593b04a011ea4d55f652c6ca24a754d1cdd1"}, - {file = "certifi-2024.2.2.tar.gz", hash = "sha256:0569859f95fc761b18b45ef421b1290a0f65f147e92a1e5eb3e635f9a5e4e66f"}, -] - -[[package]] -name = "charset-normalizer" -version = "3.3.2" -description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet." -optional = false -python-versions = ">=3.7.0" -files = [ - {file = "charset-normalizer-3.3.2.tar.gz", hash = "sha256:f30c3cb33b24454a82faecaf01b19c18562b1e89558fb6c56de4d9118a032fd5"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:25baf083bf6f6b341f4121c2f3c548875ee6f5339300e08be3f2b2ba1721cdd3"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:06435b539f889b1f6f4ac1758871aae42dc3a8c0e24ac9e60c2384973ad73027"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9063e24fdb1e498ab71cb7419e24622516c4a04476b17a2dab57e8baa30d6e03"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6897af51655e3691ff853668779c7bad41579facacf5fd7253b0133308cf000d"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1d3193f4a680c64b4b6a9115943538edb896edc190f0b222e73761716519268e"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:cd70574b12bb8a4d2aaa0094515df2463cb429d8536cfb6c7ce983246983e5a6"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8465322196c8b4d7ab6d1e049e4c5cb460d0394da4a27d23cc242fbf0034b6b5"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a9a8e9031d613fd2009c182b69c7b2c1ef8239a0efb1df3f7c8da66d5dd3d537"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:beb58fe5cdb101e3a055192ac291b7a21e3b7ef4f67fa1d74e331a7f2124341c"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:e06ed3eb3218bc64786f7db41917d4e686cc4856944f53d5bdf83a6884432e12"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:2e81c7b9c8979ce92ed306c249d46894776a909505d8f5a4ba55b14206e3222f"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:572c3763a264ba47b3cf708a44ce965d98555f618ca42c926a9c1616d8f34269"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:fd1abc0d89e30cc4e02e4064dc67fcc51bd941eb395c502aac3ec19fab46b519"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-win32.whl", hash = "sha256:3d47fa203a7bd9c5b6cee4736ee84ca03b8ef23193c0d1ca99b5089f72645c73"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-win_amd64.whl", hash = "sha256:10955842570876604d404661fbccbc9c7e684caf432c09c715ec38fbae45ae09"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:802fe99cca7457642125a8a88a084cef28ff0cf9407060f7b93dca5aa25480db"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:573f6eac48f4769d667c4442081b1794f52919e7edada77495aaed9236d13a96"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:549a3a73da901d5bc3ce8d24e0600d1fa85524c10287f6004fbab87672bf3e1e"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f27273b60488abe721a075bcca6d7f3964f9f6f067c8c4c605743023d7d3944f"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1ceae2f17a9c33cb48e3263960dc5fc8005351ee19db217e9b1bb15d28c02574"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:65f6f63034100ead094b8744b3b97965785388f308a64cf8d7c34f2f2e5be0c4"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:753f10e867343b4511128c6ed8c82f7bec3bd026875576dfd88483c5c73b2fd8"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4a78b2b446bd7c934f5dcedc588903fb2f5eec172f3d29e52a9096a43722adfc"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:e537484df0d8f426ce2afb2d0f8e1c3d0b114b83f8850e5f2fbea0e797bd82ae"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:eb6904c354526e758fda7167b33005998fb68c46fbc10e013ca97f21ca5c8887"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:deb6be0ac38ece9ba87dea880e438f25ca3eddfac8b002a2ec3d9183a454e8ae"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:4ab2fe47fae9e0f9dee8c04187ce5d09f48eabe611be8259444906793ab7cbce"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:80402cd6ee291dcb72644d6eac93785fe2c8b9cb30893c1af5b8fdd753b9d40f"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-win32.whl", hash = "sha256:7cd13a2e3ddeed6913a65e66e94b51d80a041145a026c27e6bb76c31a853c6ab"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-win_amd64.whl", hash = "sha256:663946639d296df6a2bb2aa51b60a2454ca1cb29835324c640dafb5ff2131a77"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:0b2b64d2bb6d3fb9112bafa732def486049e63de9618b5843bcdd081d8144cd8"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:ddbb2551d7e0102e7252db79ba445cdab71b26640817ab1e3e3648dad515003b"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:55086ee1064215781fff39a1af09518bc9255b50d6333f2e4c74ca09fac6a8f6"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8f4a014bc36d3c57402e2977dada34f9c12300af536839dc38c0beab8878f38a"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a10af20b82360ab00827f916a6058451b723b4e65030c5a18577c8b2de5b3389"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8d756e44e94489e49571086ef83b2bb8ce311e730092d2c34ca8f7d925cb20aa"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:90d558489962fd4918143277a773316e56c72da56ec7aa3dc3dbbe20fdfed15b"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6ac7ffc7ad6d040517be39eb591cac5ff87416c2537df6ba3cba3bae290c0fed"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:7ed9e526742851e8d5cc9e6cf41427dfc6068d4f5a3bb03659444b4cabf6bc26"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:8bdb58ff7ba23002a4c5808d608e4e6c687175724f54a5dade5fa8c67b604e4d"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:6b3251890fff30ee142c44144871185dbe13b11bab478a88887a639655be1068"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:b4a23f61ce87adf89be746c8a8974fe1c823c891d8f86eb218bb957c924bb143"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:efcb3f6676480691518c177e3b465bcddf57cea040302f9f4e6e191af91174d4"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-win32.whl", hash = "sha256:d965bba47ddeec8cd560687584e88cf699fd28f192ceb452d1d7ee807c5597b7"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-win_amd64.whl", hash = "sha256:96b02a3dc4381e5494fad39be677abcb5e6634bf7b4fa83a6dd3112607547001"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:95f2a5796329323b8f0512e09dbb7a1860c46a39da62ecb2324f116fa8fdc85c"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c002b4ffc0be611f0d9da932eb0f704fe2602a9a949d1f738e4c34c75b0863d5"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a981a536974bbc7a512cf44ed14938cf01030a99e9b3a06dd59578882f06f985"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3287761bc4ee9e33561a7e058c72ac0938c4f57fe49a09eae428fd88aafe7bb6"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:42cb296636fcc8b0644486d15c12376cb9fa75443e00fb25de0b8602e64c1714"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0a55554a2fa0d408816b3b5cedf0045f4b8e1a6065aec45849de2d6f3f8e9786"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:c083af607d2515612056a31f0a8d9e0fcb5876b7bfc0abad3ecd275bc4ebc2d5"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:87d1351268731db79e0f8e745d92493ee2841c974128ef629dc518b937d9194c"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:bd8f7df7d12c2db9fab40bdd87a7c09b1530128315d047a086fa3ae3435cb3a8"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:c180f51afb394e165eafe4ac2936a14bee3eb10debc9d9e4db8958fe36afe711"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:8c622a5fe39a48f78944a87d4fb8a53ee07344641b0562c540d840748571b811"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-win32.whl", hash = "sha256:db364eca23f876da6f9e16c9da0df51aa4f104a972735574842618b8c6d999d4"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-win_amd64.whl", hash = "sha256:86216b5cee4b06df986d214f664305142d9c76df9b6512be2738aa72a2048f99"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:6463effa3186ea09411d50efc7d85360b38d5f09b870c48e4600f63af490e56a"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:6c4caeef8fa63d06bd437cd4bdcf3ffefe6738fb1b25951440d80dc7df8c03ac"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:37e55c8e51c236f95b033f6fb391d7d7970ba5fe7ff453dad675e88cf303377a"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fb69256e180cb6c8a894fee62b3afebae785babc1ee98b81cdf68bbca1987f33"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ae5f4161f18c61806f411a13b0310bea87f987c7d2ecdbdaad0e94eb2e404238"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b2b0a0c0517616b6869869f8c581d4eb2dd83a4d79e0ebcb7d373ef9956aeb0a"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:45485e01ff4d3630ec0d9617310448a8702f70e9c01906b0d0118bdf9d124cf2"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:eb00ed941194665c332bf8e078baf037d6c35d7c4f3102ea2d4f16ca94a26dc8"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:2127566c664442652f024c837091890cb1942c30937add288223dc895793f898"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:a50aebfa173e157099939b17f18600f72f84eed3049e743b68ad15bd69b6bf99"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:4d0d1650369165a14e14e1e47b372cfcb31d6ab44e6e33cb2d4e57265290044d"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:923c0c831b7cfcb071580d3f46c4baf50f174be571576556269530f4bbd79d04"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:06a81e93cd441c56a9b65d8e1d043daeb97a3d0856d177d5c90ba85acb3db087"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-win32.whl", hash = "sha256:6ef1d82a3af9d3eecdba2321dc1b3c238245d890843e040e41e470ffa64c3e25"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-win_amd64.whl", hash = "sha256:eb8821e09e916165e160797a6c17edda0679379a4be5c716c260e836e122f54b"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:c235ebd9baae02f1b77bcea61bce332cb4331dc3617d254df3323aa01ab47bd4"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:5b4c145409bef602a690e7cfad0a15a55c13320ff7a3ad7ca59c13bb8ba4d45d"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:68d1f8a9e9e37c1223b656399be5d6b448dea850bed7d0f87a8311f1ff3dabb0"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:22afcb9f253dac0696b5a4be4a1c0f8762f8239e21b99680099abd9b2b1b2269"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e27ad930a842b4c5eb8ac0016b0a54f5aebbe679340c26101df33424142c143c"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1f79682fbe303db92bc2b1136016a38a42e835d932bab5b3b1bfcfbf0640e519"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b261ccdec7821281dade748d088bb6e9b69e6d15b30652b74cbbac25e280b796"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:122c7fa62b130ed55f8f285bfd56d5f4b4a5b503609d181f9ad85e55c89f4185"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:d0eccceffcb53201b5bfebb52600a5fb483a20b61da9dbc885f8b103cbe7598c"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:9f96df6923e21816da7e0ad3fd47dd8f94b2a5ce594e00677c0013018b813458"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:7f04c839ed0b6b98b1a7501a002144b76c18fb1c1850c8b98d458ac269e26ed2"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:34d1c8da1e78d2e001f363791c98a272bb734000fcef47a491c1e3b0505657a8"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:ff8fa367d09b717b2a17a052544193ad76cd49979c805768879cb63d9ca50561"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-win32.whl", hash = "sha256:aed38f6e4fb3f5d6bf81bfa990a07806be9d83cf7bacef998ab1a9bd660a581f"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-win_amd64.whl", hash = "sha256:b01b88d45a6fcb69667cd6d2f7a9aeb4bf53760d7fc536bf679ec94fe9f3ff3d"}, - {file = "charset_normalizer-3.3.2-py3-none-any.whl", hash = "sha256:3e4d1f6587322d2788836a99c69062fbb091331ec940e02d12d179c1d53e25fc"}, -] - -[[package]] -name = "colorama" -version = "0.4.6" -description = "Cross-platform colored terminal text." -optional = false -python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7" -files = [ - {file = "colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6"}, - {file = "colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44"}, -] - -[[package]] -name = "deprecated" -version = "1.2.14" -description = "Python @deprecated decorator to deprecate old python classes, functions or methods." -optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" -files = [ - {file = "Deprecated-1.2.14-py2.py3-none-any.whl", hash = "sha256:6fac8b097794a90302bdbb17b9b815e732d3c4720583ff1b198499d78470466c"}, - {file = "Deprecated-1.2.14.tar.gz", hash = "sha256:e5323eb936458dccc2582dc6f9c322c852a775a27065ff2b0c4970b9d53d01b3"}, -] - -[package.dependencies] -wrapt = ">=1.10,<2" - -[package.extras] -dev = ["PyTest", "PyTest-Cov", "bump2version (<1)", "sphinx (<2)", "tox"] - -[[package]] -name = "dpath" -version = "2.0.8" -description = "Filesystem-like pathing and searching for dictionaries" -optional = false -python-versions = ">=3.7" -files = [ - {file = "dpath-2.0.8-py3-none-any.whl", hash = "sha256:f92f595214dd93a00558d75d4b858beee519f4cffca87f02616ad6cd013f3436"}, - {file = "dpath-2.0.8.tar.gz", hash = "sha256:a3440157ebe80d0a3ad794f1b61c571bef125214800ffdb9afc9424e8250fe9b"}, -] - -[[package]] -name = "exceptiongroup" -version = "1.2.0" -description = "Backport of PEP 654 (exception groups)" -optional = false -python-versions = ">=3.7" -files = [ - {file = "exceptiongroup-1.2.0-py3-none-any.whl", hash = "sha256:4bfd3996ac73b41e9b9628b04e079f193850720ea5945fc96a08633c66912f14"}, - {file = "exceptiongroup-1.2.0.tar.gz", hash = "sha256:91f5c769735f051a4290d52edd0858999b57e5876e9f85937691bd4c9fa3ed68"}, -] - -[package.extras] -test = ["pytest (>=6)"] - -[[package]] -name = "freezegun" -version = "1.4.0" -description = "Let your Python tests travel through time" -optional = false -python-versions = ">=3.7" -files = [ - {file = "freezegun-1.4.0-py3-none-any.whl", hash = "sha256:55e0fc3c84ebf0a96a5aa23ff8b53d70246479e9a68863f1fcac5a3e52f19dd6"}, - {file = "freezegun-1.4.0.tar.gz", hash = "sha256:10939b0ba0ff5adaecf3b06a5c2f73071d9678e507c5eaedb23c761d56ac774b"}, -] - -[package.dependencies] -python-dateutil = ">=2.7" - -[[package]] -name = "genson" -version = "1.2.2" -description = "GenSON is a powerful, user-friendly JSON Schema generator." -optional = false -python-versions = "*" -files = [ - {file = "genson-1.2.2.tar.gz", hash = "sha256:8caf69aa10af7aee0e1a1351d1d06801f4696e005f06cedef438635384346a16"}, -] - -[[package]] -name = "graphql-core" -version = "3.2.3" -description = "GraphQL implementation for Python, a port of GraphQL.js, the JavaScript reference implementation for GraphQL." -optional = false -python-versions = ">=3.6,<4" -files = [ - {file = "graphql-core-3.2.3.tar.gz", hash = "sha256:06d2aad0ac723e35b1cb47885d3e5c45e956a53bc1b209a9fc5369007fe46676"}, - {file = "graphql_core-3.2.3-py3-none-any.whl", hash = "sha256:5766780452bd5ec8ba133f8bf287dc92713e3868ddd83aee4faab9fc3e303dc3"}, -] - -[[package]] -name = "idna" -version = "3.6" -description = "Internationalized Domain Names in Applications (IDNA)" -optional = false -python-versions = ">=3.5" -files = [ - {file = "idna-3.6-py3-none-any.whl", hash = "sha256:c05567e9c24a6b9faaa835c4821bad0590fbb9d5779e7caa6e1cc4978e7eb24f"}, - {file = "idna-3.6.tar.gz", hash = "sha256:9ecdbbd083b06798ae1e86adcbfe8ab1479cf864e4ee30fe4e46a003d12491ca"}, -] - -[[package]] -name = "iniconfig" -version = "2.0.0" -description = "brain-dead simple config-ini parsing" -optional = false -python-versions = ">=3.7" -files = [ - {file = "iniconfig-2.0.0-py3-none-any.whl", hash = "sha256:b6a85871a79d2e3b22d2d1b94ac2824226a63c6b741c88f7ae975f18b6778374"}, - {file = "iniconfig-2.0.0.tar.gz", hash = "sha256:2d91e135bf72d31a410b17c16da610a82cb55f6b0477d1a902134b24a455b8b3"}, -] - -[[package]] -name = "isodate" -version = "0.6.1" -description = "An ISO 8601 date/time/duration parser and formatter" -optional = false -python-versions = "*" -files = [ - {file = "isodate-0.6.1-py2.py3-none-any.whl", hash = "sha256:0751eece944162659049d35f4f549ed815792b38793f07cf73381c1c87cbed96"}, - {file = "isodate-0.6.1.tar.gz", hash = "sha256:48c5881de7e8b0a0d648cb024c8062dc84e7b840ed81e864c7614fd3c127bde9"}, -] - -[package.dependencies] -six = "*" - -[[package]] -name = "jinja2" -version = "3.1.3" -description = "A very fast and expressive template engine." -optional = false -python-versions = ">=3.7" -files = [ - {file = "Jinja2-3.1.3-py3-none-any.whl", hash = "sha256:7d6d50dd97d52cbc355597bd845fabfbac3f551e1f99619e39a35ce8c370b5fa"}, - {file = "Jinja2-3.1.3.tar.gz", hash = "sha256:ac8bd6544d4bb2c9792bf3a159e80bba8fda7f07e81bc3aed565432d5925ba90"}, -] - -[package.dependencies] -MarkupSafe = ">=2.0" - -[package.extras] -i18n = ["Babel (>=2.7)"] - -[[package]] -name = "jsonref" -version = "0.3.0" -description = "jsonref is a library for automatic dereferencing of JSON Reference objects for Python." -optional = false -python-versions = ">=3.3,<4.0" -files = [ - {file = "jsonref-0.3.0-py3-none-any.whl", hash = "sha256:9480ad1b500f7e795daeb0ef29f9c55ae3a9ab38fb8d6659b6f4868acb5a5bc8"}, - {file = "jsonref-0.3.0.tar.gz", hash = "sha256:68b330c6815dc0d490dbb3d65ccda265ddde9f7856fd2f3322f971d456ea7549"}, -] - -[[package]] -name = "jsonschema" -version = "3.2.0" -description = "An implementation of JSON Schema validation for Python" -optional = false -python-versions = "*" -files = [ - {file = "jsonschema-3.2.0-py2.py3-none-any.whl", hash = "sha256:4e5b3cf8216f577bee9ce139cbe72eca3ea4f292ec60928ff24758ce626cd163"}, - {file = "jsonschema-3.2.0.tar.gz", hash = "sha256:c8a85b28d377cc7737e46e2d9f2b4f44ee3c0e1deac6bf46ddefc7187d30797a"}, -] - -[package.dependencies] -attrs = ">=17.4.0" -pyrsistent = ">=0.14.0" -setuptools = "*" -six = ">=1.11.0" - -[package.extras] -format = ["idna", "jsonpointer (>1.13)", "rfc3987", "strict-rfc3339", "webcolors"] -format-nongpl = ["idna", "jsonpointer (>1.13)", "rfc3339-validator", "rfc3986-validator (>0.1.0)", "webcolors"] - -[[package]] -name = "markupsafe" -version = "2.1.5" -description = "Safely add untrusted strings to HTML/XML markup." -optional = false -python-versions = ">=3.7" -files = [ - {file = "MarkupSafe-2.1.5-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:a17a92de5231666cfbe003f0e4b9b3a7ae3afb1ec2845aadc2bacc93ff85febc"}, - {file = "MarkupSafe-2.1.5-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:72b6be590cc35924b02c78ef34b467da4ba07e4e0f0454a2c5907f473fc50ce5"}, - {file = "MarkupSafe-2.1.5-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e61659ba32cf2cf1481e575d0462554625196a1f2fc06a1c777d3f48e8865d46"}, - {file = "MarkupSafe-2.1.5-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2174c595a0d73a3080ca3257b40096db99799265e1c27cc5a610743acd86d62f"}, - {file = "MarkupSafe-2.1.5-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ae2ad8ae6ebee9d2d94b17fb62763125f3f374c25618198f40cbb8b525411900"}, - {file = "MarkupSafe-2.1.5-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:075202fa5b72c86ad32dc7d0b56024ebdbcf2048c0ba09f1cde31bfdd57bcfff"}, - {file = "MarkupSafe-2.1.5-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:598e3276b64aff0e7b3451b72e94fa3c238d452e7ddcd893c3ab324717456bad"}, - {file = "MarkupSafe-2.1.5-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:fce659a462a1be54d2ffcacea5e3ba2d74daa74f30f5f143fe0c58636e355fdd"}, - {file = "MarkupSafe-2.1.5-cp310-cp310-win32.whl", hash = "sha256:d9fad5155d72433c921b782e58892377c44bd6252b5af2f67f16b194987338a4"}, - {file = "MarkupSafe-2.1.5-cp310-cp310-win_amd64.whl", hash = "sha256:bf50cd79a75d181c9181df03572cdce0fbb75cc353bc350712073108cba98de5"}, - {file = "MarkupSafe-2.1.5-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:629ddd2ca402ae6dbedfceeba9c46d5f7b2a61d9749597d4307f943ef198fc1f"}, - {file = "MarkupSafe-2.1.5-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:5b7b716f97b52c5a14bffdf688f971b2d5ef4029127f1ad7a513973cfd818df2"}, - {file = "MarkupSafe-2.1.5-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6ec585f69cec0aa07d945b20805be741395e28ac1627333b1c5b0105962ffced"}, - {file = "MarkupSafe-2.1.5-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b91c037585eba9095565a3556f611e3cbfaa42ca1e865f7b8015fe5c7336d5a5"}, - {file = "MarkupSafe-2.1.5-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7502934a33b54030eaf1194c21c692a534196063db72176b0c4028e140f8f32c"}, - {file = "MarkupSafe-2.1.5-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:0e397ac966fdf721b2c528cf028494e86172b4feba51d65f81ffd65c63798f3f"}, - {file = "MarkupSafe-2.1.5-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:c061bb86a71b42465156a3ee7bd58c8c2ceacdbeb95d05a99893e08b8467359a"}, - {file = "MarkupSafe-2.1.5-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:3a57fdd7ce31c7ff06cdfbf31dafa96cc533c21e443d57f5b1ecc6cdc668ec7f"}, - {file = "MarkupSafe-2.1.5-cp311-cp311-win32.whl", hash = "sha256:397081c1a0bfb5124355710fe79478cdbeb39626492b15d399526ae53422b906"}, - {file = "MarkupSafe-2.1.5-cp311-cp311-win_amd64.whl", hash = "sha256:2b7c57a4dfc4f16f7142221afe5ba4e093e09e728ca65c51f5620c9aaeb9a617"}, - {file = "MarkupSafe-2.1.5-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:8dec4936e9c3100156f8a2dc89c4b88d5c435175ff03413b443469c7c8c5f4d1"}, - {file = "MarkupSafe-2.1.5-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:3c6b973f22eb18a789b1460b4b91bf04ae3f0c4234a0a6aa6b0a92f6f7b951d4"}, - {file = "MarkupSafe-2.1.5-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ac07bad82163452a6884fe8fa0963fb98c2346ba78d779ec06bd7a6262132aee"}, - {file = "MarkupSafe-2.1.5-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f5dfb42c4604dddc8e4305050aa6deb084540643ed5804d7455b5df8fe16f5e5"}, - {file = "MarkupSafe-2.1.5-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ea3d8a3d18833cf4304cd2fc9cbb1efe188ca9b5efef2bdac7adc20594a0e46b"}, - {file = "MarkupSafe-2.1.5-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:d050b3361367a06d752db6ead6e7edeb0009be66bc3bae0ee9d97fb326badc2a"}, - {file = "MarkupSafe-2.1.5-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:bec0a414d016ac1a18862a519e54b2fd0fc8bbfd6890376898a6c0891dd82e9f"}, - {file = "MarkupSafe-2.1.5-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:58c98fee265677f63a4385256a6d7683ab1832f3ddd1e66fe948d5880c21a169"}, - {file = "MarkupSafe-2.1.5-cp312-cp312-win32.whl", hash = "sha256:8590b4ae07a35970728874632fed7bd57b26b0102df2d2b233b6d9d82f6c62ad"}, - {file = "MarkupSafe-2.1.5-cp312-cp312-win_amd64.whl", hash = "sha256:823b65d8706e32ad2df51ed89496147a42a2a6e01c13cfb6ffb8b1e92bc910bb"}, - {file = "MarkupSafe-2.1.5-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:c8b29db45f8fe46ad280a7294f5c3ec36dbac9491f2d1c17345be8e69cc5928f"}, - {file = "MarkupSafe-2.1.5-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ec6a563cff360b50eed26f13adc43e61bc0c04d94b8be985e6fb24b81f6dcfdf"}, - {file = "MarkupSafe-2.1.5-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a549b9c31bec33820e885335b451286e2969a2d9e24879f83fe904a5ce59d70a"}, - {file = "MarkupSafe-2.1.5-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4f11aa001c540f62c6166c7726f71f7573b52c68c31f014c25cc7901deea0b52"}, - {file = "MarkupSafe-2.1.5-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:7b2e5a267c855eea6b4283940daa6e88a285f5f2a67f2220203786dfa59b37e9"}, - {file = "MarkupSafe-2.1.5-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:2d2d793e36e230fd32babe143b04cec8a8b3eb8a3122d2aceb4a371e6b09b8df"}, - {file = "MarkupSafe-2.1.5-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:ce409136744f6521e39fd8e2a24c53fa18ad67aa5bc7c2cf83645cce5b5c4e50"}, - {file = "MarkupSafe-2.1.5-cp37-cp37m-win32.whl", hash = "sha256:4096e9de5c6fdf43fb4f04c26fb114f61ef0bf2e5604b6ee3019d51b69e8c371"}, - {file = "MarkupSafe-2.1.5-cp37-cp37m-win_amd64.whl", hash = "sha256:4275d846e41ecefa46e2015117a9f491e57a71ddd59bbead77e904dc02b1bed2"}, - {file = "MarkupSafe-2.1.5-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:656f7526c69fac7f600bd1f400991cc282b417d17539a1b228617081106feb4a"}, - {file = "MarkupSafe-2.1.5-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:97cafb1f3cbcd3fd2b6fbfb99ae11cdb14deea0736fc2b0952ee177f2b813a46"}, - {file = "MarkupSafe-2.1.5-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1f3fbcb7ef1f16e48246f704ab79d79da8a46891e2da03f8783a5b6fa41a9532"}, - {file = "MarkupSafe-2.1.5-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fa9db3f79de01457b03d4f01b34cf91bc0048eb2c3846ff26f66687c2f6d16ab"}, - {file = "MarkupSafe-2.1.5-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ffee1f21e5ef0d712f9033568f8344d5da8cc2869dbd08d87c84656e6a2d2f68"}, - {file = "MarkupSafe-2.1.5-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:5dedb4db619ba5a2787a94d877bc8ffc0566f92a01c0ef214865e54ecc9ee5e0"}, - {file = "MarkupSafe-2.1.5-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:30b600cf0a7ac9234b2638fbc0fb6158ba5bdcdf46aeb631ead21248b9affbc4"}, - {file = "MarkupSafe-2.1.5-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:8dd717634f5a044f860435c1d8c16a270ddf0ef8588d4887037c5028b859b0c3"}, - {file = "MarkupSafe-2.1.5-cp38-cp38-win32.whl", hash = "sha256:daa4ee5a243f0f20d528d939d06670a298dd39b1ad5f8a72a4275124a7819eff"}, - {file = "MarkupSafe-2.1.5-cp38-cp38-win_amd64.whl", hash = "sha256:619bc166c4f2de5caa5a633b8b7326fbe98e0ccbfacabd87268a2b15ff73a029"}, - {file = "MarkupSafe-2.1.5-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:7a68b554d356a91cce1236aa7682dc01df0edba8d043fd1ce607c49dd3c1edcf"}, - {file = "MarkupSafe-2.1.5-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:db0b55e0f3cc0be60c1f19efdde9a637c32740486004f20d1cff53c3c0ece4d2"}, - {file = "MarkupSafe-2.1.5-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3e53af139f8579a6d5f7b76549125f0d94d7e630761a2111bc431fd820e163b8"}, - {file = "MarkupSafe-2.1.5-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:17b950fccb810b3293638215058e432159d2b71005c74371d784862b7e4683f3"}, - {file = "MarkupSafe-2.1.5-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4c31f53cdae6ecfa91a77820e8b151dba54ab528ba65dfd235c80b086d68a465"}, - {file = "MarkupSafe-2.1.5-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:bff1b4290a66b490a2f4719358c0cdcd9bafb6b8f061e45c7a2460866bf50c2e"}, - {file = "MarkupSafe-2.1.5-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:bc1667f8b83f48511b94671e0e441401371dfd0f0a795c7daa4a3cd1dde55bea"}, - {file = "MarkupSafe-2.1.5-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:5049256f536511ee3f7e1b3f87d1d1209d327e818e6ae1365e8653d7e3abb6a6"}, - {file = "MarkupSafe-2.1.5-cp39-cp39-win32.whl", hash = "sha256:00e046b6dd71aa03a41079792f8473dc494d564611a8f89bbbd7cb93295ebdcf"}, - {file = "MarkupSafe-2.1.5-cp39-cp39-win_amd64.whl", hash = "sha256:fa173ec60341d6bb97a89f5ea19c85c5643c1e7dedebc22f5181eb73573142c5"}, - {file = "MarkupSafe-2.1.5.tar.gz", hash = "sha256:d283d37a890ba4c1ae73ffadf8046435c76e7bc2247bbb63c00bd1a709c6544b"}, -] - -[[package]] -name = "packaging" -version = "23.2" -description = "Core utilities for Python packages" -optional = false -python-versions = ">=3.7" -files = [ - {file = "packaging-23.2-py3-none-any.whl", hash = "sha256:8c491190033a9af7e1d931d0b5dacc2ef47509b34dd0de67ed209b5203fc88c7"}, - {file = "packaging-23.2.tar.gz", hash = "sha256:048fb0e9405036518eaaf48a55953c750c11e1a1b68e0dd1a9d62ed0c092cfc5"}, -] - -[[package]] -name = "pendulum" -version = "2.1.2" -description = "Python datetimes made easy" -optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" -files = [ - {file = "pendulum-2.1.2-cp27-cp27m-macosx_10_15_x86_64.whl", hash = "sha256:b6c352f4bd32dff1ea7066bd31ad0f71f8d8100b9ff709fb343f3b86cee43efe"}, - {file = "pendulum-2.1.2-cp27-cp27m-win_amd64.whl", hash = "sha256:318f72f62e8e23cd6660dbafe1e346950281a9aed144b5c596b2ddabc1d19739"}, - {file = "pendulum-2.1.2-cp35-cp35m-macosx_10_15_x86_64.whl", hash = "sha256:0731f0c661a3cb779d398803655494893c9f581f6488048b3fb629c2342b5394"}, - {file = "pendulum-2.1.2-cp35-cp35m-manylinux1_i686.whl", hash = "sha256:3481fad1dc3f6f6738bd575a951d3c15d4b4ce7c82dce37cf8ac1483fde6e8b0"}, - {file = "pendulum-2.1.2-cp35-cp35m-manylinux1_x86_64.whl", hash = "sha256:9702069c694306297ed362ce7e3c1ef8404ac8ede39f9b28b7c1a7ad8c3959e3"}, - {file = "pendulum-2.1.2-cp35-cp35m-win_amd64.whl", hash = "sha256:fb53ffa0085002ddd43b6ca61a7b34f2d4d7c3ed66f931fe599e1a531b42af9b"}, - {file = "pendulum-2.1.2-cp36-cp36m-macosx_10_15_x86_64.whl", hash = "sha256:c501749fdd3d6f9e726086bf0cd4437281ed47e7bca132ddb522f86a1645d360"}, - {file = "pendulum-2.1.2-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:c807a578a532eeb226150d5006f156632df2cc8c5693d778324b43ff8c515dd0"}, - {file = "pendulum-2.1.2-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:2d1619a721df661e506eff8db8614016f0720ac171fe80dda1333ee44e684087"}, - {file = "pendulum-2.1.2-cp36-cp36m-win_amd64.whl", hash = "sha256:f888f2d2909a414680a29ae74d0592758f2b9fcdee3549887779cd4055e975db"}, - {file = "pendulum-2.1.2-cp37-cp37m-macosx_10_15_x86_64.whl", hash = "sha256:e95d329384717c7bf627bf27e204bc3b15c8238fa8d9d9781d93712776c14002"}, - {file = "pendulum-2.1.2-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:4c9c689747f39d0d02a9f94fcee737b34a5773803a64a5fdb046ee9cac7442c5"}, - {file = "pendulum-2.1.2-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:1245cd0075a3c6d889f581f6325dd8404aca5884dea7223a5566c38aab94642b"}, - {file = "pendulum-2.1.2-cp37-cp37m-win_amd64.whl", hash = "sha256:db0a40d8bcd27b4fb46676e8eb3c732c67a5a5e6bfab8927028224fbced0b40b"}, - {file = "pendulum-2.1.2-cp38-cp38-macosx_10_15_x86_64.whl", hash = "sha256:f5e236e7730cab1644e1b87aca3d2ff3e375a608542e90fe25685dae46310116"}, - {file = "pendulum-2.1.2-cp38-cp38-manylinux1_i686.whl", hash = "sha256:de42ea3e2943171a9e95141f2eecf972480636e8e484ccffaf1e833929e9e052"}, - {file = "pendulum-2.1.2-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:7c5ec650cb4bec4c63a89a0242cc8c3cebcec92fcfe937c417ba18277d8560be"}, - {file = "pendulum-2.1.2-cp38-cp38-win_amd64.whl", hash = "sha256:33fb61601083f3eb1d15edeb45274f73c63b3c44a8524703dc143f4212bf3269"}, - {file = "pendulum-2.1.2-cp39-cp39-manylinux1_i686.whl", hash = "sha256:29c40a6f2942376185728c9a0347d7c0f07905638c83007e1d262781f1e6953a"}, - {file = "pendulum-2.1.2-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:94b1fc947bfe38579b28e1cccb36f7e28a15e841f30384b5ad6c5e31055c85d7"}, - {file = "pendulum-2.1.2.tar.gz", hash = "sha256:b06a0ca1bfe41c990bbf0c029f0b6501a7f2ec4e38bfec730712015e8860f207"}, -] - -[package.dependencies] -python-dateutil = ">=2.6,<3.0" -pytzdata = ">=2020.1" - -[[package]] -name = "platformdirs" -version = "4.2.0" -description = "A small Python package for determining appropriate platform-specific dirs, e.g. a \"user data dir\"." -optional = false -python-versions = ">=3.8" -files = [ - {file = "platformdirs-4.2.0-py3-none-any.whl", hash = "sha256:0614df2a2f37e1a662acbd8e2b25b92ccf8632929bc6d43467e17fe89c75e068"}, - {file = "platformdirs-4.2.0.tar.gz", hash = "sha256:ef0cc731df711022c174543cb70a9b5bd22e5a9337c8624ef2c2ceb8ddad8768"}, -] - -[package.extras] -docs = ["furo (>=2023.9.10)", "proselint (>=0.13)", "sphinx (>=7.2.6)", "sphinx-autodoc-typehints (>=1.25.2)"] -test = ["appdirs (==1.4.4)", "covdefaults (>=2.3)", "pytest (>=7.4.3)", "pytest-cov (>=4.1)", "pytest-mock (>=3.12)"] - -[[package]] -name = "pluggy" -version = "1.4.0" -description = "plugin and hook calling mechanisms for python" -optional = false -python-versions = ">=3.8" -files = [ - {file = "pluggy-1.4.0-py3-none-any.whl", hash = "sha256:7db9f7b503d67d1c5b95f59773ebb58a8c1c288129a88665838012cfb07b8981"}, - {file = "pluggy-1.4.0.tar.gz", hash = "sha256:8c85c2876142a764e5b7548e7d9a0e0ddb46f5185161049a79b7e974454223be"}, -] - -[package.extras] -dev = ["pre-commit", "tox"] -testing = ["pytest", "pytest-benchmark"] - -[[package]] -name = "py" -version = "1.11.0" -description = "library with cross-python path, ini-parsing, io, code, log facilities" -optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" -files = [ - {file = "py-1.11.0-py2.py3-none-any.whl", hash = "sha256:607c53218732647dff4acdfcd50cb62615cedf612e72d1724fb1a0cc6405b378"}, - {file = "py-1.11.0.tar.gz", hash = "sha256:51c75c4126074b472f746a24399ad32f6053d1b34b68d2fa41e558e6f4a98719"}, -] - -[[package]] -name = "pydantic" -version = "1.10.14" -description = "Data validation and settings management using python type hints" -optional = false -python-versions = ">=3.7" -files = [ - {file = "pydantic-1.10.14-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:7f4fcec873f90537c382840f330b90f4715eebc2bc9925f04cb92de593eae054"}, - {file = "pydantic-1.10.14-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:8e3a76f571970fcd3c43ad982daf936ae39b3e90b8a2e96c04113a369869dc87"}, - {file = "pydantic-1.10.14-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:82d886bd3c3fbeaa963692ef6b643159ccb4b4cefaf7ff1617720cbead04fd1d"}, - {file = "pydantic-1.10.14-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:798a3d05ee3b71967844a1164fd5bdb8c22c6d674f26274e78b9f29d81770c4e"}, - {file = "pydantic-1.10.14-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:23d47a4b57a38e8652bcab15a658fdb13c785b9ce217cc3a729504ab4e1d6bc9"}, - {file = "pydantic-1.10.14-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:f9f674b5c3bebc2eba401de64f29948ae1e646ba2735f884d1594c5f675d6f2a"}, - {file = "pydantic-1.10.14-cp310-cp310-win_amd64.whl", hash = "sha256:24a7679fab2e0eeedb5a8924fc4a694b3bcaac7d305aeeac72dd7d4e05ecbebf"}, - {file = "pydantic-1.10.14-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:9d578ac4bf7fdf10ce14caba6f734c178379bd35c486c6deb6f49006e1ba78a7"}, - {file = "pydantic-1.10.14-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:fa7790e94c60f809c95602a26d906eba01a0abee9cc24150e4ce2189352deb1b"}, - {file = "pydantic-1.10.14-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:aad4e10efa5474ed1a611b6d7f0d130f4aafadceb73c11d9e72823e8f508e663"}, - {file = "pydantic-1.10.14-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1245f4f61f467cb3dfeced2b119afef3db386aec3d24a22a1de08c65038b255f"}, - {file = "pydantic-1.10.14-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:21efacc678a11114c765eb52ec0db62edffa89e9a562a94cbf8fa10b5db5c046"}, - {file = "pydantic-1.10.14-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:412ab4a3f6dbd2bf18aefa9f79c7cca23744846b31f1d6555c2ee2b05a2e14ca"}, - {file = "pydantic-1.10.14-cp311-cp311-win_amd64.whl", hash = "sha256:e897c9f35281f7889873a3e6d6b69aa1447ceb024e8495a5f0d02ecd17742a7f"}, - {file = "pydantic-1.10.14-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:d604be0f0b44d473e54fdcb12302495fe0467c56509a2f80483476f3ba92b33c"}, - {file = "pydantic-1.10.14-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a42c7d17706911199798d4c464b352e640cab4351efe69c2267823d619a937e5"}, - {file = "pydantic-1.10.14-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:596f12a1085e38dbda5cbb874d0973303e34227b400b6414782bf205cc14940c"}, - {file = "pydantic-1.10.14-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:bfb113860e9288d0886e3b9e49d9cf4a9d48b441f52ded7d96db7819028514cc"}, - {file = "pydantic-1.10.14-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:bc3ed06ab13660b565eed80887fcfbc0070f0aa0691fbb351657041d3e874efe"}, - {file = "pydantic-1.10.14-cp37-cp37m-win_amd64.whl", hash = "sha256:ad8c2bc677ae5f6dbd3cf92f2c7dc613507eafe8f71719727cbc0a7dec9a8c01"}, - {file = "pydantic-1.10.14-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:c37c28449752bb1f47975d22ef2882d70513c546f8f37201e0fec3a97b816eee"}, - {file = "pydantic-1.10.14-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:49a46a0994dd551ec051986806122767cf144b9702e31d47f6d493c336462597"}, - {file = "pydantic-1.10.14-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:53e3819bd20a42470d6dd0fe7fc1c121c92247bca104ce608e609b59bc7a77ee"}, - {file = "pydantic-1.10.14-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0fbb503bbbbab0c588ed3cd21975a1d0d4163b87e360fec17a792f7d8c4ff29f"}, - {file = "pydantic-1.10.14-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:336709883c15c050b9c55a63d6c7ff09be883dbc17805d2b063395dd9d9d0022"}, - {file = "pydantic-1.10.14-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:4ae57b4d8e3312d486e2498d42aed3ece7b51848336964e43abbf9671584e67f"}, - {file = "pydantic-1.10.14-cp38-cp38-win_amd64.whl", hash = "sha256:dba49d52500c35cfec0b28aa8b3ea5c37c9df183ffc7210b10ff2a415c125c4a"}, - {file = "pydantic-1.10.14-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:c66609e138c31cba607d8e2a7b6a5dc38979a06c900815495b2d90ce6ded35b4"}, - {file = "pydantic-1.10.14-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:d986e115e0b39604b9eee3507987368ff8148222da213cd38c359f6f57b3b347"}, - {file = "pydantic-1.10.14-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:646b2b12df4295b4c3148850c85bff29ef6d0d9621a8d091e98094871a62e5c7"}, - {file = "pydantic-1.10.14-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:282613a5969c47c83a8710cc8bfd1e70c9223feb76566f74683af889faadc0ea"}, - {file = "pydantic-1.10.14-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:466669501d08ad8eb3c4fecd991c5e793c4e0bbd62299d05111d4f827cded64f"}, - {file = "pydantic-1.10.14-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:13e86a19dca96373dcf3190fcb8797d40a6f12f154a244a8d1e8e03b8f280593"}, - {file = "pydantic-1.10.14-cp39-cp39-win_amd64.whl", hash = "sha256:08b6ec0917c30861e3fe71a93be1648a2aa4f62f866142ba21670b24444d7fd8"}, - {file = "pydantic-1.10.14-py3-none-any.whl", hash = "sha256:8ee853cd12ac2ddbf0ecbac1c289f95882b2d4482258048079d13be700aa114c"}, - {file = "pydantic-1.10.14.tar.gz", hash = "sha256:46f17b832fe27de7850896f3afee50ea682220dd218f7e9c88d436788419dca6"}, -] - -[package.dependencies] -typing-extensions = ">=4.2.0" - -[package.extras] -dotenv = ["python-dotenv (>=0.10.4)"] -email = ["email-validator (>=1.0.3)"] - -[[package]] -name = "pyrate-limiter" -version = "3.1.1" -description = "Python Rate-Limiter using Leaky-Bucket Algorithm" -optional = false -python-versions = ">=3.8,<4.0" -files = [ - {file = "pyrate_limiter-3.1.1-py3-none-any.whl", hash = "sha256:c51906f1d51d56dc992ff6c26e8300e32151bc6cfa3e6559792e31971dfd4e2b"}, - {file = "pyrate_limiter-3.1.1.tar.gz", hash = "sha256:2f57eda712687e6eccddf6afe8f8a15b409b97ed675fe64a626058f12863b7b7"}, -] - -[package.extras] -all = ["filelock (>=3.0)", "redis (>=5.0.0,<6.0.0)"] -docs = ["furo (>=2022.3.4,<2023.0.0)", "myst-parser (>=0.17)", "sphinx (>=4.3.0,<5.0.0)", "sphinx-autodoc-typehints (>=1.17,<2.0)", "sphinx-copybutton (>=0.5)", "sphinxcontrib-apidoc (>=0.3,<0.4)"] - -[[package]] -name = "pyrsistent" -version = "0.20.0" -description = "Persistent/Functional/Immutable data structures" -optional = false -python-versions = ">=3.8" -files = [ - {file = "pyrsistent-0.20.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:8c3aba3e01235221e5b229a6c05f585f344734bd1ad42a8ac51493d74722bbce"}, - {file = "pyrsistent-0.20.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c1beb78af5423b879edaf23c5591ff292cf7c33979734c99aa66d5914ead880f"}, - {file = "pyrsistent-0.20.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:21cc459636983764e692b9eba7144cdd54fdec23ccdb1e8ba392a63666c60c34"}, - {file = "pyrsistent-0.20.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f5ac696f02b3fc01a710427585c855f65cd9c640e14f52abe52020722bb4906b"}, - {file = "pyrsistent-0.20.0-cp310-cp310-win32.whl", hash = "sha256:0724c506cd8b63c69c7f883cc233aac948c1ea946ea95996ad8b1380c25e1d3f"}, - {file = "pyrsistent-0.20.0-cp310-cp310-win_amd64.whl", hash = "sha256:8441cf9616d642c475684d6cf2520dd24812e996ba9af15e606df5f6fd9d04a7"}, - {file = "pyrsistent-0.20.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:0f3b1bcaa1f0629c978b355a7c37acd58907390149b7311b5db1b37648eb6958"}, - {file = "pyrsistent-0.20.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5cdd7ef1ea7a491ae70d826b6cc64868de09a1d5ff9ef8d574250d0940e275b8"}, - {file = "pyrsistent-0.20.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cae40a9e3ce178415040a0383f00e8d68b569e97f31928a3a8ad37e3fde6df6a"}, - {file = "pyrsistent-0.20.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6288b3fa6622ad8a91e6eb759cfc48ff3089e7c17fb1d4c59a919769314af224"}, - {file = "pyrsistent-0.20.0-cp311-cp311-win32.whl", hash = "sha256:7d29c23bdf6e5438c755b941cef867ec2a4a172ceb9f50553b6ed70d50dfd656"}, - {file = "pyrsistent-0.20.0-cp311-cp311-win_amd64.whl", hash = "sha256:59a89bccd615551391f3237e00006a26bcf98a4d18623a19909a2c48b8e986ee"}, - {file = "pyrsistent-0.20.0-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:09848306523a3aba463c4b49493a760e7a6ca52e4826aa100ee99d8d39b7ad1e"}, - {file = "pyrsistent-0.20.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a14798c3005ec892bbada26485c2eea3b54109cb2533713e355c806891f63c5e"}, - {file = "pyrsistent-0.20.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b14decb628fac50db5e02ee5a35a9c0772d20277824cfe845c8a8b717c15daa3"}, - {file = "pyrsistent-0.20.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2e2c116cc804d9b09ce9814d17df5edf1df0c624aba3b43bc1ad90411487036d"}, - {file = "pyrsistent-0.20.0-cp312-cp312-win32.whl", hash = "sha256:e78d0c7c1e99a4a45c99143900ea0546025e41bb59ebc10182e947cf1ece9174"}, - {file = "pyrsistent-0.20.0-cp312-cp312-win_amd64.whl", hash = "sha256:4021a7f963d88ccd15b523787d18ed5e5269ce57aa4037146a2377ff607ae87d"}, - {file = "pyrsistent-0.20.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:79ed12ba79935adaac1664fd7e0e585a22caa539dfc9b7c7c6d5ebf91fb89054"}, - {file = "pyrsistent-0.20.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f920385a11207dc372a028b3f1e1038bb244b3ec38d448e6d8e43c6b3ba20e98"}, - {file = "pyrsistent-0.20.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4f5c2d012671b7391803263419e31b5c7c21e7c95c8760d7fc35602353dee714"}, - {file = "pyrsistent-0.20.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ef3992833fbd686ee783590639f4b8343a57f1f75de8633749d984dc0eb16c86"}, - {file = "pyrsistent-0.20.0-cp38-cp38-win32.whl", hash = "sha256:881bbea27bbd32d37eb24dd320a5e745a2a5b092a17f6debc1349252fac85423"}, - {file = "pyrsistent-0.20.0-cp38-cp38-win_amd64.whl", hash = "sha256:6d270ec9dd33cdb13f4d62c95c1a5a50e6b7cdd86302b494217137f760495b9d"}, - {file = "pyrsistent-0.20.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:ca52d1ceae015859d16aded12584c59eb3825f7b50c6cfd621d4231a6cc624ce"}, - {file = "pyrsistent-0.20.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b318ca24db0f0518630e8b6f3831e9cba78f099ed5c1d65ffe3e023003043ba0"}, - {file = "pyrsistent-0.20.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fed2c3216a605dc9a6ea50c7e84c82906e3684c4e80d2908208f662a6cbf9022"}, - {file = "pyrsistent-0.20.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2e14c95c16211d166f59c6611533d0dacce2e25de0f76e4c140fde250997b3ca"}, - {file = "pyrsistent-0.20.0-cp39-cp39-win32.whl", hash = "sha256:f058a615031eea4ef94ead6456f5ec2026c19fb5bd6bfe86e9665c4158cf802f"}, - {file = "pyrsistent-0.20.0-cp39-cp39-win_amd64.whl", hash = "sha256:58b8f6366e152092194ae68fefe18b9f0b4f89227dfd86a07770c3d86097aebf"}, - {file = "pyrsistent-0.20.0-py3-none-any.whl", hash = "sha256:c55acc4733aad6560a7f5f818466631f07efc001fd023f34a6c203f8b6df0f0b"}, - {file = "pyrsistent-0.20.0.tar.gz", hash = "sha256:4c48f78f62ab596c679086084d0dd13254ae4f3d6c72a83ffdf5ebdef8f265a4"}, -] - -[[package]] -name = "pytest" -version = "6.2.5" -description = "pytest: simple powerful testing with Python" -optional = false -python-versions = ">=3.6" -files = [ - {file = "pytest-6.2.5-py3-none-any.whl", hash = "sha256:7310f8d27bc79ced999e760ca304d69f6ba6c6649c0b60fb0e04a4a77cacc134"}, - {file = "pytest-6.2.5.tar.gz", hash = "sha256:131b36680866a76e6781d13f101efb86cf674ebb9762eb70d3082b6f29889e89"}, -] - -[package.dependencies] -atomicwrites = {version = ">=1.0", markers = "sys_platform == \"win32\""} -attrs = ">=19.2.0" -colorama = {version = "*", markers = "sys_platform == \"win32\""} -iniconfig = "*" -packaging = "*" -pluggy = ">=0.12,<2.0" -py = ">=1.8.2" -toml = "*" - -[package.extras] -testing = ["argcomplete", "hypothesis (>=3.56)", "mock", "nose", "requests", "xmlschema"] - -[[package]] -name = "pytest-mock" -version = "3.12.0" -description = "Thin-wrapper around the mock package for easier use with pytest" -optional = false -python-versions = ">=3.8" -files = [ - {file = "pytest-mock-3.12.0.tar.gz", hash = "sha256:31a40f038c22cad32287bb43932054451ff5583ff094bca6f675df2f8bc1a6e9"}, - {file = "pytest_mock-3.12.0-py3-none-any.whl", hash = "sha256:0972719a7263072da3a21c7f4773069bcc7486027d7e8e1f81d98a47e701bc4f"}, -] - -[package.dependencies] -pytest = ">=5.0" - -[package.extras] -dev = ["pre-commit", "pytest-asyncio", "tox"] - -[[package]] -name = "python-dateutil" -version = "2.8.2" -description = "Extensions to the standard Python datetime module" -optional = false -python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7" -files = [ - {file = "python-dateutil-2.8.2.tar.gz", hash = "sha256:0123cacc1627ae19ddf3c27a5de5bd67ee4586fbdd6440d9748f8abb483d3e86"}, - {file = "python_dateutil-2.8.2-py2.py3-none-any.whl", hash = "sha256:961d03dc3453ebbc59dbdea9e4e11c5651520a876d0f4db161e8674aae935da9"}, -] - -[package.dependencies] -six = ">=1.5" - -[[package]] -name = "pytzdata" -version = "2020.1" -description = "The Olson timezone database for Python." -optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" -files = [ - {file = "pytzdata-2020.1-py2.py3-none-any.whl", hash = "sha256:e1e14750bcf95016381e4d472bad004eef710f2d6417240904070b3d6654485f"}, - {file = "pytzdata-2020.1.tar.gz", hash = "sha256:3efa13b335a00a8de1d345ae41ec78dd11c9f8807f522d39850f2dd828681540"}, -] - -[[package]] -name = "pyyaml" -version = "6.0.1" -description = "YAML parser and emitter for Python" -optional = false -python-versions = ">=3.6" -files = [ - {file = "PyYAML-6.0.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d858aa552c999bc8a8d57426ed01e40bef403cd8ccdd0fc5f6f04a00414cac2a"}, - {file = "PyYAML-6.0.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:fd66fc5d0da6d9815ba2cebeb4205f95818ff4b79c3ebe268e75d961704af52f"}, - {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:69b023b2b4daa7548bcfbd4aa3da05b3a74b772db9e23b982788168117739938"}, - {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:81e0b275a9ecc9c0c0c07b4b90ba548307583c125f54d5b6946cfee6360c733d"}, - {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ba336e390cd8e4d1739f42dfe9bb83a3cc2e80f567d8805e11b46f4a943f5515"}, - {file = "PyYAML-6.0.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:326c013efe8048858a6d312ddd31d56e468118ad4cdeda36c719bf5bb6192290"}, - {file = "PyYAML-6.0.1-cp310-cp310-win32.whl", hash = "sha256:bd4af7373a854424dabd882decdc5579653d7868b8fb26dc7d0e99f823aa5924"}, - {file = "PyYAML-6.0.1-cp310-cp310-win_amd64.whl", hash = "sha256:fd1592b3fdf65fff2ad0004b5e363300ef59ced41c2e6b3a99d4089fa8c5435d"}, - {file = "PyYAML-6.0.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:6965a7bc3cf88e5a1c3bd2e0b5c22f8d677dc88a455344035f03399034eb3007"}, - {file = "PyYAML-6.0.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:f003ed9ad21d6a4713f0a9b5a7a0a79e08dd0f221aff4525a2be4c346ee60aab"}, - {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:42f8152b8dbc4fe7d96729ec2b99c7097d656dc1213a3229ca5383f973a5ed6d"}, - {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:062582fca9fabdd2c8b54a3ef1c978d786e0f6b3a1510e0ac93ef59e0ddae2bc"}, - {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d2b04aac4d386b172d5b9692e2d2da8de7bfb6c387fa4f801fbf6fb2e6ba4673"}, - {file = "PyYAML-6.0.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:e7d73685e87afe9f3b36c799222440d6cf362062f78be1013661b00c5c6f678b"}, - {file = "PyYAML-6.0.1-cp311-cp311-win32.whl", hash = "sha256:1635fd110e8d85d55237ab316b5b011de701ea0f29d07611174a1b42f1444741"}, - {file = "PyYAML-6.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:bf07ee2fef7014951eeb99f56f39c9bb4af143d8aa3c21b1677805985307da34"}, - {file = "PyYAML-6.0.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:855fb52b0dc35af121542a76b9a84f8d1cd886ea97c84703eaa6d88e37a2ad28"}, - {file = "PyYAML-6.0.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:40df9b996c2b73138957fe23a16a4f0ba614f4c0efce1e9406a184b6d07fa3a9"}, - {file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a08c6f0fe150303c1c6b71ebcd7213c2858041a7e01975da3a99aed1e7a378ef"}, - {file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6c22bec3fbe2524cde73d7ada88f6566758a8f7227bfbf93a408a9d86bcc12a0"}, - {file = "PyYAML-6.0.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8d4e9c88387b0f5c7d5f281e55304de64cf7f9c0021a3525bd3b1c542da3b0e4"}, - {file = "PyYAML-6.0.1-cp312-cp312-win32.whl", hash = "sha256:d483d2cdf104e7c9fa60c544d92981f12ad66a457afae824d146093b8c294c54"}, - {file = "PyYAML-6.0.1-cp312-cp312-win_amd64.whl", hash = "sha256:0d3304d8c0adc42be59c5f8a4d9e3d7379e6955ad754aa9d6ab7a398b59dd1df"}, - {file = "PyYAML-6.0.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:50550eb667afee136e9a77d6dc71ae76a44df8b3e51e41b77f6de2932bfe0f47"}, - {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1fe35611261b29bd1de0070f0b2f47cb6ff71fa6595c077e42bd0c419fa27b98"}, - {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:704219a11b772aea0d8ecd7058d0082713c3562b4e271b849ad7dc4a5c90c13c"}, - {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:afd7e57eddb1a54f0f1a974bc4391af8bcce0b444685d936840f125cf046d5bd"}, - {file = "PyYAML-6.0.1-cp36-cp36m-win32.whl", hash = "sha256:fca0e3a251908a499833aa292323f32437106001d436eca0e6e7833256674585"}, - {file = "PyYAML-6.0.1-cp36-cp36m-win_amd64.whl", hash = "sha256:f22ac1c3cac4dbc50079e965eba2c1058622631e526bd9afd45fedd49ba781fa"}, - {file = "PyYAML-6.0.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:b1275ad35a5d18c62a7220633c913e1b42d44b46ee12554e5fd39c70a243d6a3"}, - {file = "PyYAML-6.0.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:18aeb1bf9a78867dc38b259769503436b7c72f7a1f1f4c93ff9a17de54319b27"}, - {file = "PyYAML-6.0.1-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:596106435fa6ad000c2991a98fa58eeb8656ef2325d7e158344fb33864ed87e3"}, - {file = "PyYAML-6.0.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:baa90d3f661d43131ca170712d903e6295d1f7a0f595074f151c0aed377c9b9c"}, - {file = "PyYAML-6.0.1-cp37-cp37m-win32.whl", hash = "sha256:9046c58c4395dff28dd494285c82ba00b546adfc7ef001486fbf0324bc174fba"}, - {file = "PyYAML-6.0.1-cp37-cp37m-win_amd64.whl", hash = "sha256:4fb147e7a67ef577a588a0e2c17b6db51dda102c71de36f8549b6816a96e1867"}, - {file = "PyYAML-6.0.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1d4c7e777c441b20e32f52bd377e0c409713e8bb1386e1099c2415f26e479595"}, - {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a0cd17c15d3bb3fa06978b4e8958dcdc6e0174ccea823003a106c7d4d7899ac5"}, - {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:28c119d996beec18c05208a8bd78cbe4007878c6dd15091efb73a30e90539696"}, - {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7e07cbde391ba96ab58e532ff4803f79c4129397514e1413a7dc761ccd755735"}, - {file = "PyYAML-6.0.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:49a183be227561de579b4a36efbb21b3eab9651dd81b1858589f796549873dd6"}, - {file = "PyYAML-6.0.1-cp38-cp38-win32.whl", hash = "sha256:184c5108a2aca3c5b3d3bf9395d50893a7ab82a38004c8f61c258d4428e80206"}, - {file = "PyYAML-6.0.1-cp38-cp38-win_amd64.whl", hash = "sha256:1e2722cc9fbb45d9b87631ac70924c11d3a401b2d7f410cc0e3bbf249f2dca62"}, - {file = "PyYAML-6.0.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:9eb6caa9a297fc2c2fb8862bc5370d0303ddba53ba97e71f08023b6cd73d16a8"}, - {file = "PyYAML-6.0.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:c8098ddcc2a85b61647b2590f825f3db38891662cfc2fc776415143f599bb859"}, - {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5773183b6446b2c99bb77e77595dd486303b4faab2b086e7b17bc6bef28865f6"}, - {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b786eecbdf8499b9ca1d697215862083bd6d2a99965554781d0d8d1ad31e13a0"}, - {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bc1bf2925a1ecd43da378f4db9e4f799775d6367bdb94671027b73b393a7c42c"}, - {file = "PyYAML-6.0.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:04ac92ad1925b2cff1db0cfebffb6ffc43457495c9b3c39d3fcae417d7125dc5"}, - {file = "PyYAML-6.0.1-cp39-cp39-win32.whl", hash = "sha256:faca3bdcf85b2fc05d06ff3fbc1f83e1391b3e724afa3feba7d13eeab355484c"}, - {file = "PyYAML-6.0.1-cp39-cp39-win_amd64.whl", hash = "sha256:510c9deebc5c0225e8c96813043e62b680ba2f9c50a08d3724c7f28a747d1486"}, - {file = "PyYAML-6.0.1.tar.gz", hash = "sha256:bfdf460b1736c775f2ba9f6a92bca30bc2095067b8a9d77876d1fad6cc3b4a43"}, -] - -[[package]] -name = "requests" -version = "2.31.0" -description = "Python HTTP for Humans." -optional = false -python-versions = ">=3.7" -files = [ - {file = "requests-2.31.0-py3-none-any.whl", hash = "sha256:58cd2187c01e70e6e26505bca751777aa9f2ee0b7f4300988b709f44e013003f"}, - {file = "requests-2.31.0.tar.gz", hash = "sha256:942c5a758f98d790eaed1a29cb6eefc7ffb0d1cf7af05c3d2791656dbd6ad1e1"}, -] - -[package.dependencies] -certifi = ">=2017.4.17" -charset-normalizer = ">=2,<4" -idna = ">=2.5,<4" -urllib3 = ">=1.21.1,<3" - -[package.extras] -socks = ["PySocks (>=1.5.6,!=1.5.7)"] -use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"] - -[[package]] -name = "requests-cache" -version = "1.2.0" -description = "A persistent cache for python requests" -optional = false -python-versions = ">=3.8" -files = [ - {file = "requests_cache-1.2.0-py3-none-any.whl", hash = "sha256:490324301bf0cb924ff4e6324bd2613453e7e1f847353928b08adb0fdfb7f722"}, - {file = "requests_cache-1.2.0.tar.gz", hash = "sha256:db1c709ca343cc1cd5b6c8b1a5387298eceed02306a6040760db538c885e3838"}, -] - -[package.dependencies] -attrs = ">=21.2" -cattrs = ">=22.2" -platformdirs = ">=2.5" -requests = ">=2.22" -url-normalize = ">=1.4" -urllib3 = ">=1.25.5" - -[package.extras] -all = ["boto3 (>=1.15)", "botocore (>=1.18)", "itsdangerous (>=2.0)", "pymongo (>=3)", "pyyaml (>=6.0.1)", "redis (>=3)", "ujson (>=5.4)"] -bson = ["bson (>=0.5)"] -docs = ["furo (>=2023.3,<2024.0)", "linkify-it-py (>=2.0,<3.0)", "myst-parser (>=1.0,<2.0)", "sphinx (>=5.0.2,<6.0.0)", "sphinx-autodoc-typehints (>=1.19)", "sphinx-automodapi (>=0.14)", "sphinx-copybutton (>=0.5)", "sphinx-design (>=0.2)", "sphinx-notfound-page (>=0.8)", "sphinxcontrib-apidoc (>=0.3)", "sphinxext-opengraph (>=0.9)"] -dynamodb = ["boto3 (>=1.15)", "botocore (>=1.18)"] -json = ["ujson (>=5.4)"] -mongodb = ["pymongo (>=3)"] -redis = ["redis (>=3)"] -security = ["itsdangerous (>=2.0)"] -yaml = ["pyyaml (>=6.0.1)"] - -[[package]] -name = "requests-mock" -version = "1.11.0" -description = "Mock out responses from the requests package" -optional = false -python-versions = "*" -files = [ - {file = "requests-mock-1.11.0.tar.gz", hash = "sha256:ef10b572b489a5f28e09b708697208c4a3b2b89ef80a9f01584340ea357ec3c4"}, - {file = "requests_mock-1.11.0-py2.py3-none-any.whl", hash = "sha256:f7fae383f228633f6bececebdab236c478ace2284d6292c6e7e2867b9ab74d15"}, -] - -[package.dependencies] -requests = ">=2.3,<3" -six = "*" - -[package.extras] -fixture = ["fixtures"] -test = ["fixtures", "mock", "purl", "pytest", "requests-futures", "sphinx", "testtools"] - -[[package]] -name = "responses" -version = "0.23.3" -description = "A utility library for mocking out the `requests` Python library." -optional = false -python-versions = ">=3.7" -files = [ - {file = "responses-0.23.3-py3-none-any.whl", hash = "sha256:e6fbcf5d82172fecc0aa1860fd91e58cbfd96cee5e96da5b63fa6eb3caa10dd3"}, - {file = "responses-0.23.3.tar.gz", hash = "sha256:205029e1cb334c21cb4ec64fc7599be48b859a0fd381a42443cdd600bfe8b16a"}, -] - -[package.dependencies] -pyyaml = "*" -requests = ">=2.30.0,<3.0" -types-PyYAML = "*" -urllib3 = ">=1.25.10,<3.0" - -[package.extras] -tests = ["coverage (>=6.0.0)", "flake8", "mypy", "pytest (>=7.0.0)", "pytest-asyncio", "pytest-cov", "pytest-httpserver", "tomli", "tomli-w", "types-requests"] - -[[package]] -name = "setuptools" -version = "69.1.0" -description = "Easily download, build, install, upgrade, and uninstall Python packages" -optional = false -python-versions = ">=3.8" -files = [ - {file = "setuptools-69.1.0-py3-none-any.whl", hash = "sha256:c054629b81b946d63a9c6e732bc8b2513a7c3ea645f11d0139a2191d735c60c6"}, - {file = "setuptools-69.1.0.tar.gz", hash = "sha256:850894c4195f09c4ed30dba56213bf7c3f21d86ed6bdaafb5df5972593bfc401"}, -] - -[package.extras] -docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "rst.linker (>=1.9)", "sphinx (<7.2.5)", "sphinx (>=3.5)", "sphinx-favicon", "sphinx-inline-tabs", "sphinx-lint", "sphinx-notfound-page (>=1,<2)", "sphinx-reredirects", "sphinxcontrib-towncrier"] -testing = ["build[virtualenv]", "filelock (>=3.4.0)", "flake8-2020", "ini2toml[lite] (>=0.9)", "jaraco.develop (>=7.21)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "pip (>=19.1)", "pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-home (>=0.5)", "pytest-mypy (>=0.9.1)", "pytest-perf", "pytest-ruff (>=0.2.1)", "pytest-timeout", "pytest-xdist", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel"] -testing-integration = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "packaging (>=23.1)", "pytest", "pytest-enabler", "pytest-xdist", "tomli", "virtualenv (>=13.0.0)", "wheel"] - -[[package]] -name = "sgqlc" -version = "16.3" -description = "Simple GraphQL Client" -optional = false -python-versions = ">=3.7,<4.0" -files = [ - {file = "sgqlc-16.3-py3-none-any.whl", hash = "sha256:89d468386a4ba4b5ade991623228b6fb0a25bea1f25643ccac130fb3ef565b72"}, - {file = "sgqlc-16.3.tar.gz", hash = "sha256:be08857775aa3e65ef7b2c1f0cdcc65dd5794907b162b393c189187fee664558"}, -] - -[package.dependencies] -graphql-core = ">=3.1.7,<4.0.0" - -[package.extras] -requests = ["requests"] -websocket = ["websocket-client"] - -[[package]] -name = "six" -version = "1.16.0" -description = "Python 2 and 3 compatibility utilities" -optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*" -files = [ - {file = "six-1.16.0-py2.py3-none-any.whl", hash = "sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254"}, - {file = "six-1.16.0.tar.gz", hash = "sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926"}, -] - -[[package]] -name = "toml" -version = "0.10.2" -description = "Python Library for Tom's Obvious, Minimal Language" -optional = false -python-versions = ">=2.6, !=3.0.*, !=3.1.*, !=3.2.*" -files = [ - {file = "toml-0.10.2-py2.py3-none-any.whl", hash = "sha256:806143ae5bfb6a3c6e736a764057db0e6a0e05e338b5630894a5f779cabb4f9b"}, - {file = "toml-0.10.2.tar.gz", hash = "sha256:b3bda1d108d5dd99f4a20d24d9c348e91c4db7ab1b749200bded2f839ccbe68f"}, -] - -[[package]] -name = "types-pyyaml" -version = "6.0.12.12" -description = "Typing stubs for PyYAML" -optional = false -python-versions = "*" -files = [ - {file = "types-PyYAML-6.0.12.12.tar.gz", hash = "sha256:334373d392fde0fdf95af5c3f1661885fa10c52167b14593eb856289e1855062"}, - {file = "types_PyYAML-6.0.12.12-py3-none-any.whl", hash = "sha256:c05bc6c158facb0676674b7f11fe3960db4f389718e19e62bd2b84d6205cfd24"}, -] - -[[package]] -name = "typing-extensions" -version = "4.9.0" -description = "Backported and Experimental Type Hints for Python 3.8+" -optional = false -python-versions = ">=3.8" -files = [ - {file = "typing_extensions-4.9.0-py3-none-any.whl", hash = "sha256:af72aea155e91adfc61c3ae9e0e342dbc0cba726d6cba4b6c72c1f34e47291cd"}, - {file = "typing_extensions-4.9.0.tar.gz", hash = "sha256:23478f88c37f27d76ac8aee6c905017a143b0b1b886c3c9f66bc2fd94f9f5783"}, -] - -[[package]] -name = "url-normalize" -version = "1.4.3" -description = "URL normalization for Python" -optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*" -files = [ - {file = "url-normalize-1.4.3.tar.gz", hash = "sha256:d23d3a070ac52a67b83a1c59a0e68f8608d1cd538783b401bc9de2c0fac999b2"}, - {file = "url_normalize-1.4.3-py2.py3-none-any.whl", hash = "sha256:ec3c301f04e5bb676d333a7fa162fa977ad2ca04b7e652bfc9fac4e405728eed"}, -] - -[package.dependencies] -six = "*" - -[[package]] -name = "urllib3" -version = "2.2.1" -description = "HTTP library with thread-safe connection pooling, file post, and more." -optional = false -python-versions = ">=3.8" -files = [ - {file = "urllib3-2.2.1-py3-none-any.whl", hash = "sha256:450b20ec296a467077128bff42b73080516e71b56ff59a60a02bef2232c4fa9d"}, - {file = "urllib3-2.2.1.tar.gz", hash = "sha256:d0570876c61ab9e520d776c38acbbb5b05a776d3f9ff98a5c8fd5162a444cf19"}, -] - -[package.extras] -brotli = ["brotli (>=1.0.9)", "brotlicffi (>=0.8.0)"] -h2 = ["h2 (>=4,<5)"] -socks = ["pysocks (>=1.5.6,!=1.5.7,<2.0)"] -zstd = ["zstandard (>=0.18.0)"] - -[[package]] -name = "wcmatch" -version = "8.4" -description = "Wildcard/glob file name matcher." -optional = false -python-versions = ">=3.7" -files = [ - {file = "wcmatch-8.4-py3-none-any.whl", hash = "sha256:dc7351e5a7f8bbf4c6828d51ad20c1770113f5f3fd3dfe2a03cfde2a63f03f98"}, - {file = "wcmatch-8.4.tar.gz", hash = "sha256:ba4fc5558f8946bf1ffc7034b05b814d825d694112499c86035e0e4d398b6a67"}, -] - -[package.dependencies] -bracex = ">=2.1.1" - -[[package]] -name = "wrapt" -version = "1.16.0" -description = "Module for decorators, wrappers and monkey patching." -optional = false -python-versions = ">=3.6" -files = [ - {file = "wrapt-1.16.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:ffa565331890b90056c01db69c0fe634a776f8019c143a5ae265f9c6bc4bd6d4"}, - {file = "wrapt-1.16.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:e4fdb9275308292e880dcbeb12546df7f3e0f96c6b41197e0cf37d2826359020"}, - {file = "wrapt-1.16.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bb2dee3874a500de01c93d5c71415fcaef1d858370d405824783e7a8ef5db440"}, - {file = "wrapt-1.16.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2a88e6010048489cda82b1326889ec075a8c856c2e6a256072b28eaee3ccf487"}, - {file = "wrapt-1.16.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ac83a914ebaf589b69f7d0a1277602ff494e21f4c2f743313414378f8f50a4cf"}, - {file = "wrapt-1.16.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:73aa7d98215d39b8455f103de64391cb79dfcad601701a3aa0dddacf74911d72"}, - {file = "wrapt-1.16.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:807cc8543a477ab7422f1120a217054f958a66ef7314f76dd9e77d3f02cdccd0"}, - {file = "wrapt-1.16.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:bf5703fdeb350e36885f2875d853ce13172ae281c56e509f4e6eca049bdfb136"}, - {file = "wrapt-1.16.0-cp310-cp310-win32.whl", hash = "sha256:f6b2d0c6703c988d334f297aa5df18c45e97b0af3679bb75059e0e0bd8b1069d"}, - {file = "wrapt-1.16.0-cp310-cp310-win_amd64.whl", hash = "sha256:decbfa2f618fa8ed81c95ee18a387ff973143c656ef800c9f24fb7e9c16054e2"}, - {file = "wrapt-1.16.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:1a5db485fe2de4403f13fafdc231b0dbae5eca4359232d2efc79025527375b09"}, - {file = "wrapt-1.16.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:75ea7d0ee2a15733684badb16de6794894ed9c55aa5e9903260922f0482e687d"}, - {file = "wrapt-1.16.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a452f9ca3e3267cd4d0fcf2edd0d035b1934ac2bd7e0e57ac91ad6b95c0c6389"}, - {file = "wrapt-1.16.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:43aa59eadec7890d9958748db829df269f0368521ba6dc68cc172d5d03ed8060"}, - {file = "wrapt-1.16.0-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:72554a23c78a8e7aa02abbd699d129eead8b147a23c56e08d08dfc29cfdddca1"}, - {file = "wrapt-1.16.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:d2efee35b4b0a347e0d99d28e884dfd82797852d62fcd7ebdeee26f3ceb72cf3"}, - {file = "wrapt-1.16.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:6dcfcffe73710be01d90cae08c3e548d90932d37b39ef83969ae135d36ef3956"}, - {file = "wrapt-1.16.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:eb6e651000a19c96f452c85132811d25e9264d836951022d6e81df2fff38337d"}, - {file = "wrapt-1.16.0-cp311-cp311-win32.whl", hash = "sha256:66027d667efe95cc4fa945af59f92c5a02c6f5bb6012bff9e60542c74c75c362"}, - {file = "wrapt-1.16.0-cp311-cp311-win_amd64.whl", hash = "sha256:aefbc4cb0a54f91af643660a0a150ce2c090d3652cf4052a5397fb2de549cd89"}, - {file = "wrapt-1.16.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:5eb404d89131ec9b4f748fa5cfb5346802e5ee8836f57d516576e61f304f3b7b"}, - {file = "wrapt-1.16.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:9090c9e676d5236a6948330e83cb89969f433b1943a558968f659ead07cb3b36"}, - {file = "wrapt-1.16.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:94265b00870aa407bd0cbcfd536f17ecde43b94fb8d228560a1e9d3041462d73"}, - {file = "wrapt-1.16.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f2058f813d4f2b5e3a9eb2eb3faf8f1d99b81c3e51aeda4b168406443e8ba809"}, - {file = "wrapt-1.16.0-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:98b5e1f498a8ca1858a1cdbffb023bfd954da4e3fa2c0cb5853d40014557248b"}, - {file = "wrapt-1.16.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:14d7dc606219cdd7405133c713f2c218d4252f2a469003f8c46bb92d5d095d81"}, - {file = "wrapt-1.16.0-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:49aac49dc4782cb04f58986e81ea0b4768e4ff197b57324dcbd7699c5dfb40b9"}, - {file = "wrapt-1.16.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:418abb18146475c310d7a6dc71143d6f7adec5b004ac9ce08dc7a34e2babdc5c"}, - {file = "wrapt-1.16.0-cp312-cp312-win32.whl", hash = "sha256:685f568fa5e627e93f3b52fda002c7ed2fa1800b50ce51f6ed1d572d8ab3e7fc"}, - {file = "wrapt-1.16.0-cp312-cp312-win_amd64.whl", hash = "sha256:dcdba5c86e368442528f7060039eda390cc4091bfd1dca41e8046af7c910dda8"}, - {file = "wrapt-1.16.0-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:d462f28826f4657968ae51d2181a074dfe03c200d6131690b7d65d55b0f360f8"}, - {file = "wrapt-1.16.0-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a33a747400b94b6d6b8a165e4480264a64a78c8a4c734b62136062e9a248dd39"}, - {file = "wrapt-1.16.0-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b3646eefa23daeba62643a58aac816945cadc0afaf21800a1421eeba5f6cfb9c"}, - {file = "wrapt-1.16.0-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3ebf019be5c09d400cf7b024aa52b1f3aeebeff51550d007e92c3c1c4afc2a40"}, - {file = "wrapt-1.16.0-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:0d2691979e93d06a95a26257adb7bfd0c93818e89b1406f5a28f36e0d8c1e1fc"}, - {file = "wrapt-1.16.0-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:1acd723ee2a8826f3d53910255643e33673e1d11db84ce5880675954183ec47e"}, - {file = "wrapt-1.16.0-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:bc57efac2da352a51cc4658878a68d2b1b67dbe9d33c36cb826ca449d80a8465"}, - {file = "wrapt-1.16.0-cp36-cp36m-win32.whl", hash = "sha256:da4813f751142436b075ed7aa012a8778aa43a99f7b36afe9b742d3ed8bdc95e"}, - {file = "wrapt-1.16.0-cp36-cp36m-win_amd64.whl", hash = "sha256:6f6eac2360f2d543cc875a0e5efd413b6cbd483cb3ad7ebf888884a6e0d2e966"}, - {file = "wrapt-1.16.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:a0ea261ce52b5952bf669684a251a66df239ec6d441ccb59ec7afa882265d593"}, - {file = "wrapt-1.16.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7bd2d7ff69a2cac767fbf7a2b206add2e9a210e57947dd7ce03e25d03d2de292"}, - {file = "wrapt-1.16.0-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9159485323798c8dc530a224bd3ffcf76659319ccc7bbd52e01e73bd0241a0c5"}, - {file = "wrapt-1.16.0-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a86373cf37cd7764f2201b76496aba58a52e76dedfaa698ef9e9688bfd9e41cf"}, - {file = "wrapt-1.16.0-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:73870c364c11f03ed072dda68ff7aea6d2a3a5c3fe250d917a429c7432e15228"}, - {file = "wrapt-1.16.0-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:b935ae30c6e7400022b50f8d359c03ed233d45b725cfdd299462f41ee5ffba6f"}, - {file = "wrapt-1.16.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:db98ad84a55eb09b3c32a96c576476777e87c520a34e2519d3e59c44710c002c"}, - {file = "wrapt-1.16.0-cp37-cp37m-win32.whl", hash = "sha256:9153ed35fc5e4fa3b2fe97bddaa7cbec0ed22412b85bcdaf54aeba92ea37428c"}, - {file = "wrapt-1.16.0-cp37-cp37m-win_amd64.whl", hash = "sha256:66dfbaa7cfa3eb707bbfcd46dab2bc6207b005cbc9caa2199bcbc81d95071a00"}, - {file = "wrapt-1.16.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1dd50a2696ff89f57bd8847647a1c363b687d3d796dc30d4dd4a9d1689a706f0"}, - {file = "wrapt-1.16.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:44a2754372e32ab315734c6c73b24351d06e77ffff6ae27d2ecf14cf3d229202"}, - {file = "wrapt-1.16.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8e9723528b9f787dc59168369e42ae1c3b0d3fadb2f1a71de14531d321ee05b0"}, - {file = "wrapt-1.16.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:dbed418ba5c3dce92619656802cc5355cb679e58d0d89b50f116e4a9d5a9603e"}, - {file = "wrapt-1.16.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:941988b89b4fd6b41c3f0bfb20e92bd23746579736b7343283297c4c8cbae68f"}, - {file = "wrapt-1.16.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:6a42cd0cfa8ffc1915aef79cb4284f6383d8a3e9dcca70c445dcfdd639d51267"}, - {file = "wrapt-1.16.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:1ca9b6085e4f866bd584fb135a041bfc32cab916e69f714a7d1d397f8c4891ca"}, - {file = "wrapt-1.16.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:d5e49454f19ef621089e204f862388d29e6e8d8b162efce05208913dde5b9ad6"}, - {file = "wrapt-1.16.0-cp38-cp38-win32.whl", hash = "sha256:c31f72b1b6624c9d863fc095da460802f43a7c6868c5dda140f51da24fd47d7b"}, - {file = "wrapt-1.16.0-cp38-cp38-win_amd64.whl", hash = "sha256:490b0ee15c1a55be9c1bd8609b8cecd60e325f0575fc98f50058eae366e01f41"}, - {file = "wrapt-1.16.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:9b201ae332c3637a42f02d1045e1d0cccfdc41f1f2f801dafbaa7e9b4797bfc2"}, - {file = "wrapt-1.16.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:2076fad65c6736184e77d7d4729b63a6d1ae0b70da4868adeec40989858eb3fb"}, - {file = "wrapt-1.16.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c5cd603b575ebceca7da5a3a251e69561bec509e0b46e4993e1cac402b7247b8"}, - {file = "wrapt-1.16.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b47cfad9e9bbbed2339081f4e346c93ecd7ab504299403320bf85f7f85c7d46c"}, - {file = "wrapt-1.16.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f8212564d49c50eb4565e502814f694e240c55551a5f1bc841d4fcaabb0a9b8a"}, - {file = "wrapt-1.16.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:5f15814a33e42b04e3de432e573aa557f9f0f56458745c2074952f564c50e664"}, - {file = "wrapt-1.16.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:db2e408d983b0e61e238cf579c09ef7020560441906ca990fe8412153e3b291f"}, - {file = "wrapt-1.16.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:edfad1d29c73f9b863ebe7082ae9321374ccb10879eeabc84ba3b69f2579d537"}, - {file = "wrapt-1.16.0-cp39-cp39-win32.whl", hash = "sha256:ed867c42c268f876097248e05b6117a65bcd1e63b779e916fe2e33cd6fd0d3c3"}, - {file = "wrapt-1.16.0-cp39-cp39-win_amd64.whl", hash = "sha256:eb1b046be06b0fce7249f1d025cd359b4b80fc1c3e24ad9eca33e0dcdb2e4a35"}, - {file = "wrapt-1.16.0-py3-none-any.whl", hash = "sha256:6906c4100a8fcbf2fa735f6059214bb13b97f75b1a61777fcf6432121ef12ef1"}, - {file = "wrapt-1.16.0.tar.gz", hash = "sha256:5f370f952971e7d17c7d1ead40e49f32345a7f7a5373571ef44d800d06b1899d"}, -] - -[metadata] -lock-version = "2.0" -python-versions = "^3.9,<3.12" -content-hash = "40cc246c45e6c2d626e016673f3aa60794f3464d82c8ccd0b62a6b66df2b30da" diff --git a/airbyte-integrations/connectors/destination-kvdb/pyproject.toml b/airbyte-integrations/connectors/destination-kvdb/pyproject.toml deleted file mode 100644 index 2f61ed29c449..000000000000 --- a/airbyte-integrations/connectors/destination-kvdb/pyproject.toml +++ /dev/null @@ -1,31 +0,0 @@ -[build-system] -requires = [ "poetry-core>=1.0.0",] -build-backend = "poetry.core.masonry.api" - -[tool.poetry] -version = "0.1.3" -name = "destination-kvdb" -description = "Destination implementation for kvdb." -authors = [ "Airbyte ",] -license = "MIT" -readme = "README.md" -documentation = "https://docs.airbyte.com/integrations/destinations/kvdb" -homepage = "https://airbyte.com" -repository = "https://github.com/airbytehq/airbyte" -[[tool.poetry.packages]] -include = "destination_kvdb" - -[tool.poetry.dependencies] -python = "^3.9,<3.12" -airbyte-cdk = "^0.62.1" -sgqlc = "==16.3" - -[tool.poetry.scripts] -destination-kvdb = "destination_kvdb.run:run" - -[tool.poetry.group.dev.dependencies] -requests-mock = "^1.9.3" -freezegun = "^1.2" -pytest-mock = "^3.6.1" -pytest = "^6.2" -responses = "^0.23.1" diff --git a/airbyte-integrations/connectors/destination-kvdb/requirements.txt b/airbyte-integrations/connectors/destination-kvdb/requirements.txt deleted file mode 100644 index d6e1198b1ab1..000000000000 --- a/airbyte-integrations/connectors/destination-kvdb/requirements.txt +++ /dev/null @@ -1 +0,0 @@ --e . diff --git a/airbyte-integrations/connectors/destination-kvdb/unit_tests/unit_test.py b/airbyte-integrations/connectors/destination-kvdb/unit_tests/unit_test.py deleted file mode 100644 index 219ae0142c72..000000000000 --- a/airbyte-integrations/connectors/destination-kvdb/unit_tests/unit_test.py +++ /dev/null @@ -1,7 +0,0 @@ -# -# Copyright (c) 2023 Airbyte, Inc., all rights reserved. -# - - -def test_example_method(): - assert True diff --git a/airbyte-integrations/connectors/destination-mariadb-columnstore/README.md b/airbyte-integrations/connectors/destination-mariadb-columnstore/README.md deleted file mode 100644 index 1c1793ec10ee..000000000000 --- a/airbyte-integrations/connectors/destination-mariadb-columnstore/README.md +++ /dev/null @@ -1,72 +0,0 @@ -# Destination Mariadb ColumnStore - -This is the repository for the Mariadb ColumnStore destination connector in Java. -For information about how to use this connector within Airbyte, see [the User Documentation](https://docs.airbyte.io/integrations/destinations/mariadb-columnstore). - -## Local development - -#### Building via Gradle -From the Airbyte repository root, run: -``` -./gradlew :airbyte-integrations:connectors:destination-mariadb-columnstore:build -``` - -#### Create credentials -**If you are a community contributor**, generate the necessary credentials and place them in `secrets/config.json` conforming to the spec file in `src/main/resources/spec.json`. -Note that the `secrets` directory is git-ignored by default, so there is no danger of accidentally checking in sensitive information. - -**If you are an Airbyte core member**, follow the [instructions](https://docs.airbyte.io/connector-development#using-credentials-in-ci) to set up the credentials. - -### Locally running the connector docker image - -#### Build -Build the connector image via Gradle: - -``` -./gradlew :airbyte-integrations:connectors:destination-mariadb-columnstore:buildConnectorImage -``` -Once built, the docker image name and tag on your host will be `airbyte/destination-mariadb-columnstore:dev`. -the Dockerfile. - -#### Run -Then run any of the connector commands as follows: -``` -docker run --rm airbyte/destination-mariadb-columnstore:dev spec -docker run --rm -v $(pwd)/secrets:/secrets airbyte/destination-mariadb-columnstore:dev check --config /secrets/config.json -docker run --rm -v $(pwd)/secrets:/secrets airbyte/destination-mariadb-columnstore:dev discover --config /secrets/config.json -docker run --rm -v $(pwd)/secrets:/secrets -v $(pwd)/integration_tests:/integration_tests airbyte/destination-mariadb-columnstore:dev read --config /secrets/config.json --catalog /integration_tests/configured_catalog.json -``` - -## Testing -We use `JUnit` for Java tests. - -### Unit and Integration Tests -Place unit tests under `src/test/io/airbyte/integrations/destinations/mariadb_columnstore`. - -#### Acceptance Tests -Airbyte has a standard test suite that all destination connectors must pass. Implement the `TODO`s in -`src/test-integration/java/io/airbyte/integrations/destinations/MariadbColumnstoreDestinationAcceptanceTest.java`. - -### Using gradle to run tests -All commands should be run from airbyte project root. -To run unit tests: -``` -./gradlew :airbyte-integrations:connectors:destination-mariadb-columnstore:unitTest -``` -To run acceptance and custom integration tests: -``` -./gradlew :airbyte-integrations:connectors:destination-mariadb-columnstore:integrationTest -``` - -## Dependency Management - -### Publishing a new version of the connector -You've checked out the repo, implemented a million dollar feature, and you're ready to share your changes with the world. Now what? -1. Make sure your changes are passing our test suite: `airbyte-ci connectors --name=destination-mariadb-columnstore test` -2. Bump the connector version in `metadata.yaml`: increment the `dockerImageTag` value. Please follow [semantic versioning for connectors](https://docs.airbyte.com/contributing-to-airbyte/resources/pull-requests-handbook/#semantic-versioning-for-connectors). -3. Make sure the `metadata.yaml` content is up to date. -4. Make the connector documentation and its changelog is up to date (`docs/integrations/destinations/mariadb-columnstore.md`). -5. Create a Pull Request: use [our PR naming conventions](https://docs.airbyte.com/contributing-to-airbyte/resources/pull-requests-handbook/#pull-request-title-convention). -6. Pat yourself on the back for being an awesome contributor. -7. Someone from Airbyte will take a look at your PR and iterate with you to merge it into master. - diff --git a/airbyte-integrations/connectors/destination-mariadb-columnstore/bootstrap.md b/airbyte-integrations/connectors/destination-mariadb-columnstore/bootstrap.md deleted file mode 100644 index 12fe901e8682..000000000000 --- a/airbyte-integrations/connectors/destination-mariadb-columnstore/bootstrap.md +++ /dev/null @@ -1,24 +0,0 @@ -# MariaDB ColumnStore - -## Overview - -MariaDB ColumnStore is a columnar storage engine that utilizes a massively parallel distributed data architecture. -From MariaDB 10.5.4, it is available as a storage engine for MariaDB Server. - -## Endpoints - - This destination connector uses MariaDB official JDBC driver. [MariaDB Connector/J](https://mariadb.com/docs/clients/mariadb-connectors/connector-j/) - - ## Quick Notes - - - SSH Tunnel supported. - - TLS connection not supported yet. - - When creating ColumnStore table, we have to specify storage engine. `CREATE TABLE ... (...) ENGINE=ColumnStore;` - - Normalization not supported yet for the following reasons: - - [dbt-mysql](https://github.com/dbeatty10/dbt-mysql#dbt-mysql) adapter don't support MariaDB officially. - - When using [dbt-mysql](https://github.com/dbeatty10/dbt-mysql#dbt-mysql), we cannot specify the storage engine. For that reason tables are created with system's default storage engine.(it maybe InnoDB) - -## Reference - -- MariaDB ColumnStore documents: [https://mariadb.com/kb/en/mariadb-columnstore/](https://mariadb.com/kb/en/mariadb-columnstore/) -- MariaDB JDBC driver (Connector/J) reference: [https://mariadb.com/docs/clients/mariadb-connectors/connector-j/](https://mariadb.com/docs/clients/mariadb-connectors/connector-j/) diff --git a/airbyte-integrations/connectors/destination-mariadb-columnstore/build.gradle b/airbyte-integrations/connectors/destination-mariadb-columnstore/build.gradle deleted file mode 100644 index 8d545e9b1813..000000000000 --- a/airbyte-integrations/connectors/destination-mariadb-columnstore/build.gradle +++ /dev/null @@ -1,32 +0,0 @@ -plugins { - id 'application' - id 'airbyte-java-connector' -} - -airbyteJavaConnector { - cdkVersionRequired = '0.2.0' - features = ['db-destinations'] - useLocalCdk = false -} - -//remove once upgrading the CDK version to 0.4.x or later -java { - compileJava { - options.compilerArgs.remove("-Werror") - } -} - -airbyteJavaConnector.addCdkDependencies() - -application { - mainClass = 'io.airbyte.integrations.destination.mariadb_columnstore.MariadbColumnstoreDestination' - applicationDefaultJvmArgs = ['-XX:+ExitOnOutOfMemoryError', '-XX:MaxRAMPercentage=75.0'] -} - -dependencies { - - implementation 'org.mariadb.jdbc:mariadb-java-client:2.7.4' - implementation 'com.vdurmont:semver4j:3.1.0' - - integrationTestJavaImplementation libs.testcontainers.mariadb -} diff --git a/airbyte-integrations/connectors/destination-mariadb-columnstore/src/main/java/io/airbyte/integrations/destination/mariadb_columnstore/MariadbColumnstoreDestination.java b/airbyte-integrations/connectors/destination-mariadb-columnstore/src/main/java/io/airbyte/integrations/destination/mariadb_columnstore/MariadbColumnstoreDestination.java deleted file mode 100644 index af3dee3e9bd2..000000000000 --- a/airbyte-integrations/connectors/destination-mariadb-columnstore/src/main/java/io/airbyte/integrations/destination/mariadb_columnstore/MariadbColumnstoreDestination.java +++ /dev/null @@ -1,109 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.integrations.destination.mariadb_columnstore; - -import com.fasterxml.jackson.databind.JsonNode; -import com.google.common.collect.ImmutableMap; -import io.airbyte.cdk.db.factory.DataSourceFactory; -import io.airbyte.cdk.db.factory.DatabaseDriver; -import io.airbyte.cdk.db.jdbc.JdbcDatabase; -import io.airbyte.cdk.db.jdbc.JdbcUtils; -import io.airbyte.cdk.integrations.base.Destination; -import io.airbyte.cdk.integrations.base.IntegrationRunner; -import io.airbyte.cdk.integrations.base.ssh.SshWrappedDestination; -import io.airbyte.cdk.integrations.destination.jdbc.AbstractJdbcDestination; -import io.airbyte.commons.json.Jsons; -import io.airbyte.integrations.destination.mariadb_columnstore.MariadbColumnstoreSqlOperations.VersionCompatibility; -import io.airbyte.protocol.models.v0.AirbyteConnectionStatus; -import io.airbyte.protocol.models.v0.AirbyteConnectionStatus.Status; -import java.util.Map; -import javax.sql.DataSource; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -public class MariadbColumnstoreDestination extends AbstractJdbcDestination implements Destination { - - private static final Logger LOGGER = LoggerFactory.getLogger(MariadbColumnstoreDestination.class); - public static final String DRIVER_CLASS = DatabaseDriver.MARIADB.getDriverClassName(); - static final Map DEFAULT_JDBC_PARAMETERS = ImmutableMap.of( - "allowLoadLocalInfile", "true"); - - public static Destination sshWrappedDestination() { - return new SshWrappedDestination(new MariadbColumnstoreDestination(), JdbcUtils.HOST_LIST_KEY, JdbcUtils.PORT_LIST_KEY); - } - - public MariadbColumnstoreDestination() { - super(DRIVER_CLASS, new MariadbColumnstoreNameTransformer(), new MariadbColumnstoreSqlOperations()); - } - - @Override - public AirbyteConnectionStatus check(final JsonNode config) { - final DataSource dataSource = getDataSource(config); - try { - final JdbcDatabase database = getDatabase(dataSource); - final MariadbColumnstoreSqlOperations mariadbColumnstoreSqlOperations = (MariadbColumnstoreSqlOperations) getSqlOperations(); - final String outputSchema = getNamingResolver().getIdentifier(config.get(JdbcUtils.DATABASE_KEY).asText()); - - final VersionCompatibility compatibility = mariadbColumnstoreSqlOperations.isCompatibleVersion(database); - if (!compatibility.isCompatible()) { - throw new RuntimeException(String - .format("Your MariaDB Columnstore version %s is not compatible with Airbyte", - compatibility.getVersion())); - } - - mariadbColumnstoreSqlOperations.verifyLocalFileEnabled(database); - - attemptSQLCreateAndDropTableOperations( - outputSchema, - database, - getNamingResolver(), - mariadbColumnstoreSqlOperations); - } catch (final Exception e) { - LOGGER.error("Exception while checking connection: ", e); - return new AirbyteConnectionStatus() - .withStatus(Status.FAILED) - .withMessage("Could not connect with provided configuration. \n" + e.getMessage()); - } finally { - try { - DataSourceFactory.close(dataSource); - } catch (final Exception e) { - LOGGER.warn("Unable to close data source.", e); - } - } - - return new AirbyteConnectionStatus().withStatus(Status.SUCCEEDED); - } - - @Override - protected Map getDefaultConnectionProperties(final JsonNode config) { - return DEFAULT_JDBC_PARAMETERS; - } - - @Override - public JsonNode toJdbcConfig(final JsonNode config) { - final String jdbcUrl = String.format(DatabaseDriver.MARIADB.getUrlFormatString(), - config.get(JdbcUtils.HOST_KEY).asText(), - config.get(JdbcUtils.PORT_KEY).asInt(), - config.get(JdbcUtils.DATABASE_KEY).asText()); - - final ImmutableMap.Builder configBuilder = ImmutableMap.builder() - .put(JdbcUtils.USERNAME_KEY, config.get(JdbcUtils.USERNAME_KEY).asText()) - .put(JdbcUtils.JDBC_URL_KEY, jdbcUrl); - - if (config.has(JdbcUtils.PASSWORD_KEY)) { - configBuilder.put(JdbcUtils.PASSWORD_KEY, config.get(JdbcUtils.PASSWORD_KEY).asText()); - } - - return Jsons.jsonNode(configBuilder.build()); - } - - public static void main(final String[] args) throws Exception { - final Destination destination = MariadbColumnstoreDestination.sshWrappedDestination(); - LOGGER.info("starting destination: {}", MariadbColumnstoreDestination.class); - new IntegrationRunner(destination).run(args); - LOGGER.info("completed destination: {}", MariadbColumnstoreDestination.class); - } - -} diff --git a/airbyte-integrations/connectors/destination-mariadb-columnstore/src/main/java/io/airbyte/integrations/destination/mariadb_columnstore/MariadbColumnstoreNameTransformer.java b/airbyte-integrations/connectors/destination-mariadb-columnstore/src/main/java/io/airbyte/integrations/destination/mariadb_columnstore/MariadbColumnstoreNameTransformer.java deleted file mode 100644 index c2ac2540e171..000000000000 --- a/airbyte-integrations/connectors/destination-mariadb-columnstore/src/main/java/io/airbyte/integrations/destination/mariadb_columnstore/MariadbColumnstoreNameTransformer.java +++ /dev/null @@ -1,21 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.integrations.destination.mariadb_columnstore; - -import io.airbyte.cdk.integrations.destination.StandardNameTransformer; - -public class MariadbColumnstoreNameTransformer extends StandardNameTransformer { - - @Override - public String getIdentifier(final String name) { - return applyDefaultCase(super.getIdentifier(name)); - } - - @Override - public String applyDefaultCase(final String input) { - return input.toLowerCase(); - } - -} diff --git a/airbyte-integrations/connectors/destination-mariadb-columnstore/src/main/java/io/airbyte/integrations/destination/mariadb_columnstore/MariadbColumnstoreSqlOperations.java b/airbyte-integrations/connectors/destination-mariadb-columnstore/src/main/java/io/airbyte/integrations/destination/mariadb_columnstore/MariadbColumnstoreSqlOperations.java deleted file mode 100644 index f1289ce6e480..000000000000 --- a/airbyte-integrations/connectors/destination-mariadb-columnstore/src/main/java/io/airbyte/integrations/destination/mariadb_columnstore/MariadbColumnstoreSqlOperations.java +++ /dev/null @@ -1,161 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.integrations.destination.mariadb_columnstore; - -import com.vdurmont.semver4j.Semver; -import io.airbyte.cdk.db.jdbc.JdbcDatabase; -import io.airbyte.cdk.integrations.base.JavaBaseConstants; -import io.airbyte.cdk.integrations.destination.jdbc.JdbcSqlOperations; -import io.airbyte.protocol.models.v0.AirbyteRecordMessage; -import java.io.File; -import java.io.IOException; -import java.nio.file.Files; -import java.sql.SQLException; -import java.sql.Statement; -import java.util.List; -import java.util.regex.Matcher; -import java.util.regex.Pattern; - -public class MariadbColumnstoreSqlOperations extends JdbcSqlOperations { - - private final String MINIMUM_VERSION = "5.5.3"; - Pattern VERSION_PATTERN = Pattern.compile("^(\\d+\\.\\d+\\.\\d+)-MariaDB"); - private boolean isLocalFileEnabled = false; - - @Override - public void insertRecordsInternal(final JdbcDatabase database, - final List records, - final String schemaName, - final String tmpTableName) - throws SQLException { - if (records.isEmpty()) { - return; - } - - verifyLocalFileEnabled(database); - - File tmpFile = null; - Exception primaryException = null; - try { - tmpFile = Files.createTempFile(tmpTableName + "-", ".tmp").toFile(); - writeBatchToFile(tmpFile, records); - - final String query = String.format( - "LOAD DATA LOCAL INFILE %s INTO TABLE %s.%s FIELDS TERMINATED BY ',' ENCLOSED BY '\"' ESCAPED BY '\\\"' LINES TERMINATED BY '\\r\\n'", - String.format("'%s'", tmpFile.getAbsolutePath()), schemaName, tmpTableName); - - database.execute(query); - } catch (final Exception e) { - primaryException = e; - throw new RuntimeException(primaryException); - } finally { - try { - if (tmpFile != null) { - Files.delete(tmpFile.toPath()); - } - } catch (final IOException e) { - if (primaryException != null) - e.addSuppressed(primaryException); - throw new RuntimeException(e); - } - } - } - - @Override - public void executeTransaction(final JdbcDatabase database, final List queries) throws Exception { - database.execute(connection -> { - try (final Statement stmt = connection.createStatement()) { - stmt.addBatch("BEGIN;"); - for (final String query : queries) { - stmt.addBatch(query); - } - stmt.addBatch("COMMIT;"); - stmt.executeBatch(); - } - }); - } - - @Override - public boolean isSchemaRequired() { - return false; - } - - @Override - public String createTableQuery(final JdbcDatabase database, final String schemaName, final String tableName) { - return String.format( - "CREATE TABLE IF NOT EXISTS %s.%s ( \n" - + "%s VARCHAR(256),\n" - + "%s LONGTEXT,\n" - + "%s TIMESTAMP\n" - + ") engine=columnstore;\n", - schemaName, tableName, JavaBaseConstants.COLUMN_NAME_AB_ID, JavaBaseConstants.COLUMN_NAME_DATA, JavaBaseConstants.COLUMN_NAME_EMITTED_AT); - } - - VersionCompatibility isCompatibleVersion(final JdbcDatabase database) throws SQLException { - final Semver version = getVersion(database); - return new VersionCompatibility(version, version.isGreaterThanOrEqualTo(MINIMUM_VERSION)); - } - - private Semver getVersion(final JdbcDatabase database) throws SQLException { - final List versions = database.queryStrings( - connection -> connection.createStatement().executeQuery("SELECT version()"), - resultSet -> resultSet.getString("version()")); - - final Matcher matcher = VERSION_PATTERN.matcher(versions.get(0)); - if (matcher.find()) { - return new Semver(matcher.group(1)); - } else { - throw new RuntimeException(String.format("Unexpected version string: %s\nExpected version format is X.X.X-MariaDB", versions.get(0))); - } - } - - void verifyLocalFileEnabled(final JdbcDatabase database) throws SQLException { - final boolean localFileEnabled = isLocalFileEnabled || checkIfLocalFileIsEnabled(database); - if (!localFileEnabled) { - tryEnableLocalFile(database); - } - isLocalFileEnabled = true; - } - - private boolean checkIfLocalFileIsEnabled(final JdbcDatabase database) throws SQLException { - final List localFiles = database.queryStrings( - connection -> connection.createStatement().executeQuery("SHOW GLOBAL VARIABLES LIKE 'local_infile'"), - resultSet -> resultSet.getString("Value")); - return localFiles.get(0).equalsIgnoreCase("on"); - } - - private void tryEnableLocalFile(final JdbcDatabase database) throws SQLException { - database.execute(connection -> { - try (final Statement statement = connection.createStatement()) { - statement.execute("SET GLOBAL local_infile=true"); - } catch (final Exception e) { - throw new RuntimeException( - "The DB user provided to airbyte was unable to switch on the local_infile attribute on the MariaDB server. As an admin user, you will need to run \"SET GLOBAL local_infile = true\" before syncing data with Airbyte.", - e); - } - }); - } - - public static class VersionCompatibility { - - private final Semver version; - private final boolean isCompatible; - - public VersionCompatibility(final Semver version, final boolean isCompatible) { - this.version = version; - this.isCompatible = isCompatible; - } - - public Semver getVersion() { - return version; - } - - public boolean isCompatible() { - return isCompatible; - } - - } - -} diff --git a/airbyte-integrations/connectors/destination-mariadb-columnstore/src/main/resources/spec.json b/airbyte-integrations/connectors/destination-mariadb-columnstore/src/main/resources/spec.json deleted file mode 100644 index 163d9f42afea..000000000000 --- a/airbyte-integrations/connectors/destination-mariadb-columnstore/src/main/resources/spec.json +++ /dev/null @@ -1,57 +0,0 @@ -{ - "documentationUrl": "https://docs.airbyte.com/integrations/destinations/mariadb-columnstore", - "supportsIncremental": true, - "supportsNormalization": false, - "supportsDBT": false, - "supported_destination_sync_modes": ["overwrite", "append"], - "connectionSpecification": { - "$schema": "http://json-schema.org/draft-07/schema#", - "title": "MariaDB Columnstore Destination Spec", - "type": "object", - "required": ["host", "port", "username", "database"], - "additionalProperties": true, - "properties": { - "host": { - "title": "Host", - "description": "The Hostname of the database.", - "type": "string", - "order": 0 - }, - "port": { - "title": "Port", - "description": "The Port of the database.", - "type": "integer", - "minimum": 0, - "maximum": 65536, - "default": 3306, - "examples": ["3306"], - "order": 1 - }, - "database": { - "title": "Database", - "description": "Name of the database.", - "type": "string", - "order": 2 - }, - "username": { - "title": "Username", - "description": "The Username which is used to access the database.", - "type": "string", - "order": 3 - }, - "password": { - "title": "Password", - "description": "The Password associated with the username.", - "type": "string", - "airbyte_secret": true, - "order": 4 - }, - "jdbc_url_params": { - "description": "Additional properties to pass to the JDBC URL string when connecting to the database formatted as 'key=value' pairs separated by the symbol '&'. (example: key1=value1&key2=value2&key3=value3).", - "title": "JDBC URL Params", - "type": "string", - "order": 5 - } - } - } -} diff --git a/airbyte-integrations/connectors/destination-mariadb-columnstore/src/test-integration/java/io/airbyte/integrations/destination/mariadb_columnstore/MariaDbTestDataComparator.java b/airbyte-integrations/connectors/destination-mariadb-columnstore/src/test-integration/java/io/airbyte/integrations/destination/mariadb_columnstore/MariaDbTestDataComparator.java deleted file mode 100644 index 73da2d532ab5..000000000000 --- a/airbyte-integrations/connectors/destination-mariadb-columnstore/src/test-integration/java/io/airbyte/integrations/destination/mariadb_columnstore/MariaDbTestDataComparator.java +++ /dev/null @@ -1,26 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.integrations.destination.mariadb_columnstore; - -import io.airbyte.cdk.integrations.destination.StandardNameTransformer; -import io.airbyte.cdk.integrations.standardtest.destination.comparator.AdvancedTestDataComparator; -import java.util.ArrayList; -import java.util.List; - -public class MariaDbTestDataComparator extends AdvancedTestDataComparator { - - private final StandardNameTransformer namingResolver = new MariadbColumnstoreNameTransformer(); - - @Override - protected List resolveIdentifier(final String identifier) { - final List result = new ArrayList<>(); - final String resolved = namingResolver.getIdentifier(identifier); - result.add(identifier); - result.add(resolved); - - return result; - } - -} diff --git a/airbyte-integrations/connectors/destination-mariadb-columnstore/src/test-integration/java/io/airbyte/integrations/destination/mariadb_columnstore/MariadbColumnstoreDestinationAcceptanceTest.java b/airbyte-integrations/connectors/destination-mariadb-columnstore/src/test-integration/java/io/airbyte/integrations/destination/mariadb_columnstore/MariadbColumnstoreDestinationAcceptanceTest.java deleted file mode 100644 index 1e07845b0ecb..000000000000 --- a/airbyte-integrations/connectors/destination-mariadb-columnstore/src/test-integration/java/io/airbyte/integrations/destination/mariadb_columnstore/MariadbColumnstoreDestinationAcceptanceTest.java +++ /dev/null @@ -1,137 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.integrations.destination.mariadb_columnstore; - -import com.fasterxml.jackson.databind.JsonNode; -import com.fasterxml.jackson.databind.node.ObjectNode; -import com.google.common.collect.ImmutableMap; -import io.airbyte.cdk.db.factory.DataSourceFactory; -import io.airbyte.cdk.db.factory.DatabaseDriver; -import io.airbyte.cdk.db.jdbc.DefaultJdbcDatabase; -import io.airbyte.cdk.db.jdbc.JdbcDatabase; -import io.airbyte.cdk.db.jdbc.JdbcUtils; -import io.airbyte.cdk.integrations.base.JavaBaseConstants; -import io.airbyte.cdk.integrations.destination.StandardNameTransformer; -import io.airbyte.cdk.integrations.standardtest.destination.DestinationAcceptanceTest; -import io.airbyte.cdk.integrations.standardtest.destination.comparator.TestDataComparator; -import io.airbyte.commons.json.Jsons; -import java.sql.SQLException; -import java.util.HashSet; -import java.util.List; -import java.util.stream.Collectors; -import org.testcontainers.containers.MariaDBContainer; -import org.testcontainers.utility.DockerImageName; - -public class MariadbColumnstoreDestinationAcceptanceTest extends DestinationAcceptanceTest { - - private final StandardNameTransformer namingResolver = new MariadbColumnstoreNameTransformer(); - - private MariaDBContainer db; - - @Override - protected boolean implementsNamespaces() { - return true; - } - - @Override - protected String getImageName() { - return "airbyte/destination-mariadb-columnstore:dev"; - } - - @Override - protected JsonNode getConfig() { - return Jsons.jsonNode(ImmutableMap.builder() - .put(JdbcUtils.HOST_KEY, db.getHost()) - .put(JdbcUtils.PORT_KEY, db.getFirstMappedPort()) - .put(JdbcUtils.DATABASE_KEY, db.getDatabaseName()) - .put(JdbcUtils.USERNAME_KEY, db.getUsername()) - .put(JdbcUtils.PASSWORD_KEY, db.getPassword()) - .build()); - } - - @Override - protected JsonNode getFailCheckConfig() { - final JsonNode clone = Jsons.clone(getConfig()); - ((ObjectNode) clone).put(JdbcUtils.PASSWORD_KEY, "wrong password"); - return clone; - } - - @Override - protected String getDefaultSchema(final JsonNode config) { - if (config.get(JdbcUtils.DATABASE_KEY) == null) { - return null; - } - return config.get(JdbcUtils.DATABASE_KEY).asText(); - } - - @Override - protected TestDataComparator getTestDataComparator() { - return new MariaDbTestDataComparator(); - } - - @Override - protected boolean supportBasicDataTypeTest() { - return true; - } - - @Override - protected boolean supportArrayDataTypeTest() { - return true; - } - - @Override - protected boolean supportObjectDataTypeTest() { - return true; - } - - @Override - protected List retrieveRecords(final TestDestinationEnv testEnv, - final String streamName, - final String namespace, - final JsonNode streamSchema) - throws Exception { - return retrieveRecordsFromTable(namingResolver.getRawTableName(streamName), namespace) - .stream() - .map(r -> Jsons.deserialize(r.get(JavaBaseConstants.COLUMN_NAME_DATA).asText())) - .collect(Collectors.toList()); - } - - private List retrieveRecordsFromTable(final String tableName, final String schemaName) throws SQLException { - final JdbcDatabase database = getDatabase(getConfig()); - final String query = String.format("SELECT * FROM %s.%s ORDER BY %s ASC;", schemaName, tableName, JavaBaseConstants.COLUMN_NAME_EMITTED_AT); - return database.queryJsons(query); - } - - private static JdbcDatabase getDatabase(final JsonNode config) { - return new DefaultJdbcDatabase( - DataSourceFactory.create( - config.get(JdbcUtils.USERNAME_KEY).asText(), - config.has(JdbcUtils.PASSWORD_KEY) ? config.get(JdbcUtils.PASSWORD_KEY).asText() : null, - MariadbColumnstoreDestination.DRIVER_CLASS, - String.format(DatabaseDriver.MARIADB.getUrlFormatString(), - config.get(JdbcUtils.HOST_KEY).asText(), - config.get(JdbcUtils.PORT_KEY).asInt(), - config.get(JdbcUtils.DATABASE_KEY).asText()))); - } - - @Override - protected void setup(final TestDestinationEnv testEnv, HashSet TEST_SCHEMAS) throws Exception { - final DockerImageName mcsImage = DockerImageName.parse("fengdi/columnstore:1.5.2").asCompatibleSubstituteFor("mariadb"); - db = new MariaDBContainer(mcsImage); - db.start(); - - final String createUser = String.format("CREATE USER '%s'@'%%' IDENTIFIED BY '%s';", db.getUsername(), db.getPassword()); - final String grantAll = String.format("GRANT ALL PRIVILEGES ON *.* TO '%s'@'%%' IDENTIFIED BY '%s';", db.getUsername(), db.getPassword()); - final String createDb = String.format("CREATE DATABASE %s DEFAULT CHARSET = utf8;", db.getDatabaseName()); - db.execInContainer("mariadb", "-e", createUser + grantAll + createDb); - } - - @Override - protected void tearDown(final TestDestinationEnv testEnv) { - db.stop(); - db.close(); - } - -} diff --git a/airbyte-integrations/connectors/destination-mariadb-columnstore/src/test-integration/java/io/airbyte/integrations/destination/mariadb_columnstore/SshKeyMariadbColumnstoreDestinationAcceptanceTest.java b/airbyte-integrations/connectors/destination-mariadb-columnstore/src/test-integration/java/io/airbyte/integrations/destination/mariadb_columnstore/SshKeyMariadbColumnstoreDestinationAcceptanceTest.java deleted file mode 100644 index 7d7b6232b8cc..000000000000 --- a/airbyte-integrations/connectors/destination-mariadb-columnstore/src/test-integration/java/io/airbyte/integrations/destination/mariadb_columnstore/SshKeyMariadbColumnstoreDestinationAcceptanceTest.java +++ /dev/null @@ -1,16 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.integrations.destination.mariadb_columnstore; - -import io.airbyte.cdk.integrations.base.ssh.SshTunnel; - -public class SshKeyMariadbColumnstoreDestinationAcceptanceTest extends SshMariadbColumnstoreDestinationAcceptanceTest { - - @Override - public SshTunnel.TunnelMethod getTunnelMethod() { - return SshTunnel.TunnelMethod.SSH_KEY_AUTH; - } - -} diff --git a/airbyte-integrations/connectors/destination-mariadb-columnstore/src/test-integration/java/io/airbyte/integrations/destination/mariadb_columnstore/SshMariadbColumnstoreDestinationAcceptanceTest.java b/airbyte-integrations/connectors/destination-mariadb-columnstore/src/test-integration/java/io/airbyte/integrations/destination/mariadb_columnstore/SshMariadbColumnstoreDestinationAcceptanceTest.java deleted file mode 100644 index 898aa505d932..000000000000 --- a/airbyte-integrations/connectors/destination-mariadb-columnstore/src/test-integration/java/io/airbyte/integrations/destination/mariadb_columnstore/SshMariadbColumnstoreDestinationAcceptanceTest.java +++ /dev/null @@ -1,155 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.integrations.destination.mariadb_columnstore; - -import com.fasterxml.jackson.databind.JsonNode; -import com.fasterxml.jackson.databind.node.ObjectNode; -import io.airbyte.cdk.db.Database; -import io.airbyte.cdk.db.factory.DSLContextFactory; -import io.airbyte.cdk.db.factory.DatabaseDriver; -import io.airbyte.cdk.db.jdbc.JdbcUtils; -import io.airbyte.cdk.integrations.base.JavaBaseConstants; -import io.airbyte.cdk.integrations.base.ssh.SshBastionContainer; -import io.airbyte.cdk.integrations.base.ssh.SshTunnel; -import io.airbyte.cdk.integrations.destination.StandardNameTransformer; -import io.airbyte.cdk.integrations.standardtest.destination.DestinationAcceptanceTest; -import io.airbyte.commons.functional.CheckedFunction; -import io.airbyte.commons.json.Jsons; -import java.util.ArrayList; -import java.util.HashSet; -import java.util.List; -import java.util.stream.Collectors; -import org.jooq.DSLContext; -import org.jooq.SQLDialect; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; -import org.testcontainers.containers.MariaDBContainer; -import org.testcontainers.containers.Network; -import org.testcontainers.utility.DockerImageName; - -/** - * Abstract class that allows us to avoid duplicating testing logic for testing SSH with a key file - * or with a password. - */ -public abstract class SshMariadbColumnstoreDestinationAcceptanceTest extends DestinationAcceptanceTest { - - private static final Logger LOGGER = LoggerFactory.getLogger(MariadbColumnstoreDestinationAcceptanceTest.class); - private static final Network network = Network.newNetwork(); - - private final StandardNameTransformer namingResolver = new MariadbColumnstoreNameTransformer(); - - private JsonNode configJson; - - private MariaDBContainer db; - - private final SshBastionContainer bastion = new SshBastionContainer(); - - public abstract SshTunnel.TunnelMethod getTunnelMethod(); - - @Override - protected boolean implementsNamespaces() { - return true; - } - - @Override - protected String getImageName() { - return "airbyte/destination-mariadb-columnstore:dev"; - } - - @Override - protected JsonNode getConfig() throws Exception { - return bastion.getTunnelConfig(getTunnelMethod(), bastion.getBasicDbConfigBuider(db), false); - } - - @Override - protected JsonNode getFailCheckConfig() throws Exception { - final JsonNode clone = Jsons.clone(getConfig()); - ((ObjectNode) clone).put("password", "wrong password"); - return clone; - } - - @Override - protected String getDefaultSchema(final JsonNode config) { - if (config.get(JdbcUtils.DATABASE_KEY) == null) { - return null; - } - return config.get(JdbcUtils.DATABASE_KEY).asText(); - } - - @Override - protected List retrieveRecords(final TestDestinationEnv testEnv, - final String streamName, - final String namespace, - final JsonNode streamSchema) - throws Exception { - return retrieveRecordsFromTable(namingResolver.getRawTableName(streamName), namespace) - .stream() - .map(r -> Jsons.deserialize(r.get(JavaBaseConstants.COLUMN_NAME_DATA).asText())) - .collect(Collectors.toList()); - } - - private List retrieveRecordsFromTable(final String tableName, final String schemaName) throws Exception { - final JsonNode config = getConfig(); - return SshTunnel.sshWrap( - config, - JdbcUtils.HOST_LIST_KEY, - JdbcUtils.PORT_LIST_KEY, - (CheckedFunction, Exception>) mangledConfig -> getDatabaseFromConfig(mangledConfig) - .query( - ctx -> ctx - .fetch(String.format("SELECT * FROM %s.%s ORDER BY %s ASC;", schemaName, tableName, JavaBaseConstants.COLUMN_NAME_EMITTED_AT)) - .stream() - .map(r -> r.formatJSON(JdbcUtils.getDefaultJSONFormat())) - .map(Jsons::deserialize) - .collect(Collectors.toList()))); - } - - private static Database getDatabaseFromConfig(final JsonNode config) { - final DSLContext dslContext = DSLContextFactory.create( - config.get(JdbcUtils.USERNAME_KEY).asText(), - config.get(JdbcUtils.PASSWORD_KEY).asText(), - DatabaseDriver.MARIADB.getDriverClassName(), - String.format(DatabaseDriver.MARIADB.getUrlFormatString(), - config.get(JdbcUtils.HOST_KEY).asText(), - config.get(JdbcUtils.PORT_KEY).asInt(), - config.get(JdbcUtils.DATABASE_KEY).asText()), - SQLDialect.MARIADB); - return new Database(dslContext); - } - - @Override - protected List resolveIdentifier(final String identifier) { - final List result = new ArrayList<>(); - final String resolved = namingResolver.getIdentifier(identifier); - result.add(identifier); - result.add(resolved); - - return result; - } - - @Override - protected void setup(final TestDestinationEnv testEnv, final HashSet TEST_SCHEMAS) throws Exception { - bastion.initAndStartBastion(network); - startAndInitJdbcContainer(); - } - - private void startAndInitJdbcContainer() throws Exception { - final DockerImageName mcsImage = DockerImageName.parse("fengdi/columnstore:1.5.2").asCompatibleSubstituteFor("mariadb"); - db = new MariaDBContainer<>(mcsImage) - .withNetwork(network); - db.start(); - - final String createUser = String.format("CREATE USER '%s'@'%%' IDENTIFIED BY '%s';", db.getUsername(), db.getPassword()); - final String grantAll = String.format("GRANT ALL PRIVILEGES ON *.* TO '%s'@'%%' IDENTIFIED BY '%s';", db.getUsername(), db.getPassword()); - final String createDb = String.format("CREATE DATABASE %s DEFAULT CHARSET = utf8;", db.getDatabaseName()); - db.execInContainer("mariadb", "-e", createUser + grantAll + createDb); - } - - @Override - protected void tearDown(final TestDestinationEnv testEnv) { - bastion.stopAndCloseContainers(db); - } - -} diff --git a/airbyte-integrations/connectors/destination-mariadb-columnstore/src/test-integration/java/io/airbyte/integrations/destination/mariadb_columnstore/SshPasswordMariadbColumnstoreDestinationAcceptanceTest.java b/airbyte-integrations/connectors/destination-mariadb-columnstore/src/test-integration/java/io/airbyte/integrations/destination/mariadb_columnstore/SshPasswordMariadbColumnstoreDestinationAcceptanceTest.java deleted file mode 100644 index 89c7ca6d8910..000000000000 --- a/airbyte-integrations/connectors/destination-mariadb-columnstore/src/test-integration/java/io/airbyte/integrations/destination/mariadb_columnstore/SshPasswordMariadbColumnstoreDestinationAcceptanceTest.java +++ /dev/null @@ -1,16 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.integrations.destination.mariadb_columnstore; - -import io.airbyte.cdk.integrations.base.ssh.SshTunnel; - -public class SshPasswordMariadbColumnstoreDestinationAcceptanceTest extends SshMariadbColumnstoreDestinationAcceptanceTest { - - @Override - public SshTunnel.TunnelMethod getTunnelMethod() { - return SshTunnel.TunnelMethod.SSH_PASSWORD_AUTH; - } - -} diff --git a/airbyte-integrations/connectors/destination-mariadb-columnstore/src/test/java/io/airbyte/integrations/destination/mariadb_columnstore/MariadbColumnstoreDestinationTest.java b/airbyte-integrations/connectors/destination-mariadb-columnstore/src/test/java/io/airbyte/integrations/destination/mariadb_columnstore/MariadbColumnstoreDestinationTest.java deleted file mode 100644 index 11505ed4024e..000000000000 --- a/airbyte-integrations/connectors/destination-mariadb-columnstore/src/test/java/io/airbyte/integrations/destination/mariadb_columnstore/MariadbColumnstoreDestinationTest.java +++ /dev/null @@ -1,29 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.integrations.destination.mariadb_columnstore; - -import static org.junit.jupiter.api.Assertions.*; - -import com.fasterxml.jackson.databind.JsonNode; -import com.fasterxml.jackson.databind.ObjectMapper; -import org.junit.jupiter.api.Test; - -public class MariadbColumnstoreDestinationTest { - - @Test - public void testToJdbcConfig() throws Exception { - final MariadbColumnstoreDestination dest = new MariadbColumnstoreDestination(); - String configJson = "{\"host\": \"localhost\", \"port\": 3306, \"database\": \"test\", \"username\": \"root\", \"password\": \"secret\"}"; - String expectedJson = "{\"username\": \"root\", \"password\": \"secret\", \"jdbc_url\": \"jdbc:mariadb://localhost:3306/test\"}"; - ObjectMapper mapper = new ObjectMapper(); - JsonNode config = mapper.readTree(configJson); - - JsonNode actual = dest.toJdbcConfig(config); - JsonNode expected = mapper.readTree(expectedJson); - - assertEquals(expected, actual); - } - -} diff --git a/airbyte-integrations/connectors/destination-mariadb-columnstore/src/test/java/io/airbyte/integrations/destination/mariadb_columnstore/MariadbSpecTest.java b/airbyte-integrations/connectors/destination-mariadb-columnstore/src/test/java/io/airbyte/integrations/destination/mariadb_columnstore/MariadbSpecTest.java deleted file mode 100644 index 5cfea9ffc4d5..000000000000 --- a/airbyte-integrations/connectors/destination-mariadb-columnstore/src/test/java/io/airbyte/integrations/destination/mariadb_columnstore/MariadbSpecTest.java +++ /dev/null @@ -1,97 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.integrations.destination.mariadb_columnstore; - -import static org.junit.jupiter.api.Assertions.assertFalse; -import static org.junit.jupiter.api.Assertions.assertNotNull; -import static org.junit.jupiter.api.Assertions.assertTrue; - -import com.fasterxml.jackson.databind.JsonNode; -import com.fasterxml.jackson.databind.node.ObjectNode; -import io.airbyte.commons.io.IOs; -import io.airbyte.commons.json.Jsons; -import io.airbyte.commons.resources.MoreResources; -import io.airbyte.protocol.models.v0.ConnectorSpecification; -import io.airbyte.validation.json.JsonSchemaValidator; -import java.io.File; -import java.io.IOException; -import java.nio.file.Files; -import java.nio.file.Path; -import org.junit.jupiter.api.BeforeAll; -import org.junit.jupiter.api.BeforeEach; -import org.junit.jupiter.api.Test; - -public class MariadbSpecTest { - - private static JsonNode schema; - private static JsonNode config; - private static String configText; - private static JsonSchemaValidator validator; - - @BeforeAll - static void init() throws IOException { - configText = """ - { - "host": "localhost", - "port": 1521, - "username": "mariadb", - "password": "password", - "database": "db", - "jdbc_url_params": "property1=pValue1&property2=pValue2" - } - """; - final String spec = MoreResources.readResource("spec.json"); - final File schemaFile = IOs.writeFile(Files.createTempDirectory(Path.of("/tmp"), "spec-test"), "schema.json", spec).toFile(); - schema = JsonSchemaValidator.getSchema(schemaFile).get("connectionSpecification"); - validator = new JsonSchemaValidator(); - } - - @BeforeEach - void beforeEach() { - config = Jsons.deserialize(configText); - } - - @Test - void testHostMissing() { - ((ObjectNode) config).remove("host"); - assertFalse(validator.test(schema, config)); - } - - @Test - void testPortMissing() { - ((ObjectNode) config).remove("port"); - assertFalse(validator.test(schema, config)); - } - - @Test - void testDatabaseMissing() { - ((ObjectNode) config).remove("database"); - assertFalse(validator.test(schema, config)); - } - - @Test - void testUsernameMissing() { - ((ObjectNode) config).remove("username"); - assertFalse(validator.test(schema, config)); - } - - @Test - void testAdditionalJdbcParamMissing() { - ((ObjectNode) config).remove("jdbc_url_params"); - assertTrue(validator.test(schema, config)); - } - - @Test - void testWithJdbcAdditionalProperty() { - assertTrue(validator.test(schema, config)); - } - - @Test - void testJdbcAdditionalProperty() throws Exception { - final ConnectorSpecification spec = new MariadbColumnstoreDestination().spec(); - assertNotNull(spec.getConnectionSpecification().get("properties").get("jdbc_url_params")); - } - -} diff --git a/airbyte-integrations/connectors/destination-meilisearch/.dockerignore b/airbyte-integrations/connectors/destination-meilisearch/.dockerignore deleted file mode 100644 index 6d35a84f68b8..000000000000 --- a/airbyte-integrations/connectors/destination-meilisearch/.dockerignore +++ /dev/null @@ -1,5 +0,0 @@ -* -!Dockerfile -!main.py -!destination_meilisearch -!setup.py diff --git a/airbyte-integrations/connectors/destination-meilisearch/Dockerfile b/airbyte-integrations/connectors/destination-meilisearch/Dockerfile deleted file mode 100644 index 00bbfb9c4846..000000000000 --- a/airbyte-integrations/connectors/destination-meilisearch/Dockerfile +++ /dev/null @@ -1,38 +0,0 @@ -FROM python:3.9.11-alpine3.15 as base - -# build and load all requirements -FROM base as builder -WORKDIR /airbyte/integration_code - -# upgrade pip to the latest version -RUN apk --no-cache upgrade \ - && pip install --upgrade pip \ - && apk --no-cache add tzdata build-base - - -COPY setup.py ./ -# install necessary packages to a temporary folder -RUN pip install --prefix=/install . - -# build a clean environment -FROM base -WORKDIR /airbyte/integration_code - -# copy all loaded and built libraries to a pure basic image -COPY --from=builder /install /usr/local -# add default timezone settings -COPY --from=builder /usr/share/zoneinfo/Etc/UTC /etc/localtime -RUN echo "Etc/UTC" > /etc/timezone - -# bash is installed for more convenient debugging. -RUN apk --no-cache add bash - -# copy payload code only -COPY main.py ./ -COPY destination_meilisearch ./destination_meilisearch - -ENV AIRBYTE_ENTRYPOINT "python /airbyte/integration_code/main.py" -ENTRYPOINT ["python", "/airbyte/integration_code/main.py"] - -LABEL io.airbyte.version=1.0.1 -LABEL io.airbyte.name=airbyte/destination-meilisearch diff --git a/airbyte-integrations/connectors/destination-meilisearch/README.md b/airbyte-integrations/connectors/destination-meilisearch/README.md deleted file mode 100644 index 207e2898208e..000000000000 --- a/airbyte-integrations/connectors/destination-meilisearch/README.md +++ /dev/null @@ -1,99 +0,0 @@ -# Meilisearch Destination - -This is the repository for the Meilisearch destination connector, written in Python. -For information about how to use this connector within Airbyte, see [the documentation](https://docs.airbyte.io/integrations/destinations/meilisearch). - -## Local development - -### Prerequisites -**To iterate on this connector, make sure to complete this prerequisites section.** - -#### Minimum Python version required `= 3.7.0` - -#### Build & Activate Virtual Environment and install dependencies -From this connector directory, create a virtual environment: -``` -python -m venv .venv -``` - -This will generate a virtualenv for this module in `.venv/`. Make sure this venv is active in your -development environment of choice. To activate it from the terminal, run: -``` -source .venv/bin/activate -pip install -r requirements.txt -``` -If you are in an IDE, follow your IDE's instructions to activate the virtualenv. - -Note that while we are installing dependencies from `requirements.txt`, you should only edit `setup.py` for your dependencies. `requirements.txt` is -used for editable installs (`pip install -e`) to pull in Python dependencies from the monorepo and will call `setup.py`. -If this is mumbo jumbo to you, don't worry about it, just put your deps in `setup.py` but install using `pip install -r requirements.txt` and everything -should work as you expect. - -#### Create credentials -**If you are a community contributor**, follow the instructions in the [documentation](https://docs.airbyte.io/integrations/destinations/meilisearch) -to generate the necessary credentials. Then create a file `secrets/config.json` conforming to the `destination_meilisearch/spec.json` file. -Note that the `secrets` directory is gitignored by default, so there is no danger of accidentally checking in sensitive information. -See `integration_tests/sample_config.json` for a sample config file. - -**If you are an Airbyte core member**, copy the credentials in Lastpass under the secret name `destination meilisearch test creds` -and place them into `secrets/config.json`. - -### Locally running the connector -``` -python main.py spec -python main.py check --config secrets/config.json -python main.py discover --config secrets/config.json -python main.py read --config secrets/config.json --catalog integration_tests/configured_catalog.json -``` - -### Locally running the connector docker image - - -#### Build -**Via [`airbyte-ci`](https://github.com/airbytehq/airbyte/blob/master/airbyte-ci/connectors/pipelines/README.md) (recommended):** -```bash -airbyte-ci connectors --name=destination-meilisearch build -``` - -An image will be built with the tag `airbyte/destination-meilisearch:dev`. - -**Via `docker build`:** -```bash -docker build -t airbyte/destination-meilisearch:dev . -``` - -#### Run -Then run any of the connector commands as follows: -``` -docker run --rm airbyte/destination-meilisearch:dev spec -docker run --rm -v $(pwd)/secrets:/secrets airbyte/destination-meilisearch:dev check --config /secrets/config.json -# messages.jsonl is a file containing line-separated JSON representing AirbyteMessages -cat messages.jsonl | docker run --rm -v $(pwd)/secrets:/secrets -v $(pwd)/integration_tests:/integration_tests airbyte/destination-meilisearch:dev write --config /secrets/config.json --catalog /integration_tests/configured_catalog.json -``` - -## Testing -You can run our full test suite locally using [`airbyte-ci`](https://github.com/airbytehq/airbyte/blob/master/airbyte-ci/connectors/pipelines/README.md): -```bash -airbyte-ci connectors --name=destination-meilisearch test -``` - -### Customizing acceptance Tests -Customize `acceptance-test-config.yml` file to configure tests. See [Connector Acceptance Tests](https://docs.airbyte.com/connector-development/testing-connectors/connector-acceptance-tests-reference) for more information. -If your connector requires to create or destroy resources for use during acceptance tests create fixtures for it and place them inside integration_tests/acceptance.py. - -## Dependency Management -All of your dependencies should go in `setup.py`, NOT `requirements.txt`. The requirements file is only used to connect internal Airbyte dependencies in the monorepo for local development. -We split dependencies between two groups, dependencies that are: -* required for your connector to work need to go to `MAIN_REQUIREMENTS` list. -* required for the testing need to go to `TEST_REQUIREMENTS` list - -### Publishing a new version of the connector -You've checked out the repo, implemented a million dollar feature, and you're ready to share your changes with the world. Now what? -1. Make sure your changes are passing our test suite: `airbyte-ci connectors --name=destination-meilisearch test` -2. Bump the connector version in `metadata.yaml`: increment the `dockerImageTag` value. Please follow [semantic versioning for connectors](https://docs.airbyte.com/contributing-to-airbyte/resources/pull-requests-handbook/#semantic-versioning-for-connectors). -3. Make sure the `metadata.yaml` content is up to date. -4. Make the connector documentation and its changelog is up to date (`docs/integrations/destinations/meilisearch.md`). -5. Create a Pull Request: use [our PR naming conventions](https://docs.airbyte.com/contributing-to-airbyte/resources/pull-requests-handbook/#pull-request-title-convention). -6. Pat yourself on the back for being an awesome contributor. -7. Someone from Airbyte will take a look at your PR and iterate with you to merge it into master. - diff --git a/airbyte-integrations/connectors/destination-meilisearch/destination_meilisearch/__init__.py b/airbyte-integrations/connectors/destination-meilisearch/destination_meilisearch/__init__.py deleted file mode 100644 index f83a392b80f7..000000000000 --- a/airbyte-integrations/connectors/destination-meilisearch/destination_meilisearch/__init__.py +++ /dev/null @@ -1,8 +0,0 @@ -# -# Copyright (c) 2023 Airbyte, Inc., all rights reserved. -# - - -from .destination import DestinationMeilisearch - -__all__ = ["DestinationMeilisearch"] diff --git a/airbyte-integrations/connectors/destination-meilisearch/destination_meilisearch/destination.py b/airbyte-integrations/connectors/destination-meilisearch/destination_meilisearch/destination.py deleted file mode 100644 index 32d08b787bf1..000000000000 --- a/airbyte-integrations/connectors/destination-meilisearch/destination_meilisearch/destination.py +++ /dev/null @@ -1,84 +0,0 @@ -# -# Copyright (c) 2023 Airbyte, Inc., all rights reserved. -# - - -from logging import Logger, getLogger -from typing import Any, Dict, Iterable, Mapping - -from airbyte_cdk.destinations import Destination -from airbyte_cdk.models import AirbyteConnectionStatus, AirbyteMessage, ConfiguredAirbyteCatalog, DestinationSyncMode, Status, Type -from destination_meilisearch.writer import MeiliWriter -from meilisearch import Client - -logger = getLogger("airbyte") - - -def get_client(config: Mapping[str, Any]) -> Client: - host = config.get("host") - api_key = config.get("api_key") - return Client(host, api_key) - - -class DestinationMeilisearch(Destination): - primary_key = "_ab_pk" - - def _flush_streams(self, streams: Dict[str, MeiliWriter]) -> Iterable[AirbyteMessage]: - for stream in streams: - streams[stream].flush() - - def write( - self, config: Mapping[str, Any], configured_catalog: ConfiguredAirbyteCatalog, input_messages: Iterable[AirbyteMessage] - ) -> Iterable[AirbyteMessage]: - client = get_client(config=config) - # Creating Meilisearch writers - writers = {s.stream.name: MeiliWriter(client, s.stream.name, self.primary_key) for s in configured_catalog.streams} - - for configured_stream in configured_catalog.streams: - stream_name = configured_stream.stream.name - # Deleting index in Meilisearch if sync mode is overwite - if configured_stream.destination_sync_mode == DestinationSyncMode.overwrite: - logger.debug(f"Deleting index: {stream_name}.") - client.delete_index(stream_name) - # Creating index in Meilisearch - client.create_index(stream_name, {"primaryKey": self.primary_key}) - logger.debug(f"Creating index: {stream_name}.") - - for message in input_messages: - if message.type == Type.STATE: - yield message - elif message.type == Type.RECORD: - data = message.record.data - stream = message.record.stream - # Skip unselected streams - if stream not in writers: - logger.debug(f"Stream {stream} was not present in configured streams, skipping") - continue - writers[stream].queue_write_operation(data) - else: - logger.info(f"Unhandled message type {message.type}: {message}") - - # Flush any leftover messages - self._flush_streams(writers) - - def check(self, logger: Logger, config: Mapping[str, Any]) -> AirbyteConnectionStatus: - try: - client = get_client(config=config) - - client.create_index("_airbyte", {"primaryKey": "id"}) - - client.index("_airbyte").add_documents( - [ - { - "id": 287947, - "title": "Shazam", - "overview": "A boy is given the ability", - } - ] - ) - - client.delete_index("_airbyte") - return AirbyteConnectionStatus(status=Status.SUCCEEDED) - except Exception as e: - logger.error(f"Check connection failed. Error: {e}") - return AirbyteConnectionStatus(status=Status.FAILED, message=f"An exception occurred: {repr(e)}") diff --git a/airbyte-integrations/connectors/destination-meilisearch/destination_meilisearch/spec.json b/airbyte-integrations/connectors/destination-meilisearch/destination_meilisearch/spec.json deleted file mode 100644 index f3fe7aaeda47..000000000000 --- a/airbyte-integrations/connectors/destination-meilisearch/destination_meilisearch/spec.json +++ /dev/null @@ -1,27 +0,0 @@ -{ - "documentationUrl": "https://docs.airbyte.com/integrations/destinations/meilisearch", - "supported_destination_sync_modes": ["overwrite", "append"], - "supportsIncremental": true, - "connectionSpecification": { - "$schema": "http://json-schema.org/draft-07/schema#", - "title": "Destination Meilisearch", - "type": "object", - "required": ["host"], - "additionalProperties": false, - "properties": { - "host": { - "title": "Host", - "description": "Hostname of the MeiliSearch instance.", - "type": "string", - "order": 0 - }, - "api_key": { - "title": "API Key", - "airbyte_secret": true, - "description": "MeiliSearch API Key. See the docs for more information on how to obtain this key.", - "type": "string", - "order": 1 - } - } - } -} diff --git a/airbyte-integrations/connectors/destination-meilisearch/destination_meilisearch/writer.py b/airbyte-integrations/connectors/destination-meilisearch/destination_meilisearch/writer.py deleted file mode 100644 index e2450f825106..000000000000 --- a/airbyte-integrations/connectors/destination-meilisearch/destination_meilisearch/writer.py +++ /dev/null @@ -1,39 +0,0 @@ -# -# Copyright (c) 2023 Airbyte, Inc., all rights reserved. -# - -from collections.abc import Mapping -from logging import getLogger -from uuid import uuid4 - -from meilisearch import Client - -logger = getLogger("airbyte") - - -class MeiliWriter: - flush_interval = 50000 - - def __init__(self, client: Client, stream_name: str, primary_key: str): - self.client = client - self.primary_key = primary_key - self.stream_name: str = stream_name - self._write_buffer = [] - - logger.info(f"Creating MeiliWriter for {self.stream_name}") - - def queue_write_operation(self, data: Mapping): - random_key = str(uuid4()) - self._write_buffer.append({**data, self.primary_key: random_key}) - if len(self._write_buffer) == self.flush_interval: - logger.debug(f"Reached limit size: flushing records for {self.stream_name}") - self.flush() - - def flush(self): - buffer_size = len(self._write_buffer) - if buffer_size == 0: - return - logger.info(f"Flushing {buffer_size} records") - response = self.client.index(self.stream_name).add_documents(self._write_buffer) - self.client.wait_for_task(response.task_uid, 1800000, 1000) - self._write_buffer.clear() diff --git a/airbyte-integrations/connectors/destination-meilisearch/integration_tests/integration_test.py b/airbyte-integrations/connectors/destination-meilisearch/integration_tests/integration_test.py deleted file mode 100644 index 1d9687e97c7d..000000000000 --- a/airbyte-integrations/connectors/destination-meilisearch/integration_tests/integration_test.py +++ /dev/null @@ -1,103 +0,0 @@ -# -# Copyright (c) 2023 Airbyte, Inc., all rights reserved. -# - -import json -import logging -from typing import Any, Dict, Mapping - -import pytest -from airbyte_cdk.models import ( - AirbyteMessage, - AirbyteRecordMessage, - AirbyteStateMessage, - AirbyteStream, - ConfiguredAirbyteCatalog, - ConfiguredAirbyteStream, - DestinationSyncMode, - Status, - SyncMode, - Type, -) -from destination_meilisearch.destination import DestinationMeilisearch, get_client -from meilisearch import Client - - -@pytest.fixture(name="config") -def config_fixture() -> Mapping[str, Any]: - with open("secrets/config.json", "r") as f: - return json.loads(f.read()) - - -@pytest.fixture(name="configured_catalog") -def configured_catalog_fixture() -> ConfiguredAirbyteCatalog: - stream_schema = {"type": "object", "properties": {"string_col": {"type": "str"}, "int_col": {"type": "integer"}}} - - overwrite_stream = ConfiguredAirbyteStream( - stream=AirbyteStream( - name="_airbyte", json_schema=stream_schema, supported_sync_modes=[SyncMode.incremental, SyncMode.full_refresh] - ), - sync_mode=SyncMode.incremental, - destination_sync_mode=DestinationSyncMode.overwrite, - ) - - return ConfiguredAirbyteCatalog(streams=[overwrite_stream]) - - -@pytest.fixture(autouse=True) -def teardown(config: Mapping): - yield - client = get_client(config=config) - client.delete_index("_airbyte") - - -@pytest.fixture(name="client") -def client_fixture(config) -> Client: - client = get_client(config=config) - resp = client.create_index("_airbyte", {"primaryKey": "_ab_pk"}) - client.wait_for_task(_handle_breaking_wait_for_task(resp)) - return client - - -def test_check_valid_config(config: Mapping): - outcome = DestinationMeilisearch().check(logging.getLogger("airbyte"), config) - assert outcome.status == Status.SUCCEEDED - - -def test_check_invalid_config(): - outcome = DestinationMeilisearch().check( - logging.getLogger("airbyte"), {"api_key": "not_a_real_key", "host": "https://www.meilisearch.com"} - ) - assert outcome.status == Status.FAILED - - -def _state(data: Dict[str, Any]) -> AirbyteMessage: - return AirbyteMessage(type=Type.STATE, state=AirbyteStateMessage(data=data)) - - -def _record(stream: str, str_value: str, int_value: int) -> AirbyteMessage: - return AirbyteMessage( - type=Type.RECORD, record=AirbyteRecordMessage(stream=stream, data={"str_col": str_value, "int_col": int_value}, emitted_at=0) - ) - - -def _handle_breaking_wait_for_task(task: Any) -> int: - if type(task) is dict: - return task["taskUid"] - else: - return task.task_uid - - -def records_count(client: Client) -> int: - documents_results = client.index("_airbyte").get_documents() - return documents_results.total - - -def test_write(config: Mapping, configured_catalog: ConfiguredAirbyteCatalog, client: Client): - overwrite_stream = configured_catalog.streams[0].stream.name - first_state_message = _state({"state": "1"}) - first_record_chunk = [_record(overwrite_stream, str(i), i) for i in range(2)] - - destination = DestinationMeilisearch() - list(destination.write(config, configured_catalog, [*first_record_chunk, first_state_message])) - assert records_count(client) == 2 diff --git a/airbyte-integrations/connectors/destination-meilisearch/integration_tests/messages.jsonl b/airbyte-integrations/connectors/destination-meilisearch/integration_tests/messages.jsonl deleted file mode 100644 index e1d0682f9dad..000000000000 --- a/airbyte-integrations/connectors/destination-meilisearch/integration_tests/messages.jsonl +++ /dev/null @@ -1,2 +0,0 @@ -{"type": "RECORD", "record": {"stream": "ab-airbyte-testing", "data": {"_ab_pk": "my_value", "column2": 221, "column3": "2021-01-01T20:10:22", "column4": 1.214, "column5": [1,2,3]}, "emitted_at": 1626172757000}} -{"type": "RECORD", "record": {"stream": "ab-airbyte-testing", "data": {"_ab_pk": "my_value2", "column2": 222, "column3": "2021-01-02T22:10:22", "column5": [1,2,null]}, "emitted_at": 1626172757000}} diff --git a/airbyte-integrations/connectors/destination-meilisearch/main.py b/airbyte-integrations/connectors/destination-meilisearch/main.py deleted file mode 100644 index a5eba931a2fb..000000000000 --- a/airbyte-integrations/connectors/destination-meilisearch/main.py +++ /dev/null @@ -1,11 +0,0 @@ -# -# Copyright (c) 2023 Airbyte, Inc., all rights reserved. -# - - -import sys - -from destination_meilisearch import DestinationMeilisearch - -if __name__ == "__main__": - DestinationMeilisearch().run(sys.argv[1:]) diff --git a/airbyte-integrations/connectors/destination-meilisearch/requirements.txt b/airbyte-integrations/connectors/destination-meilisearch/requirements.txt deleted file mode 100644 index d6e1198b1ab1..000000000000 --- a/airbyte-integrations/connectors/destination-meilisearch/requirements.txt +++ /dev/null @@ -1 +0,0 @@ --e . diff --git a/airbyte-integrations/connectors/destination-meilisearch/sample_files/configured_catalog.json b/airbyte-integrations/connectors/destination-meilisearch/sample_files/configured_catalog.json deleted file mode 100644 index 9ac002e358d3..000000000000 --- a/airbyte-integrations/connectors/destination-meilisearch/sample_files/configured_catalog.json +++ /dev/null @@ -1,27 +0,0 @@ -{ - "streams": [ - { - "sync_mode": "full_refresh", - "destination_sync_mode": "overwrite", - "stream": { - "name": "ab-airbyte-testing", - "supported_sync_modes": ["full_refresh"], - "source_defined_cursor": false, - "json_schema": { - "type": "object", - "properties": { - "id": { - "type": "string" - }, - "body": { - "type": "string" - }, - "attributes": { - "type": ["null", "object"] - } - } - } - } - } - ] -} diff --git a/airbyte-integrations/connectors/destination-meilisearch/setup.py b/airbyte-integrations/connectors/destination-meilisearch/setup.py deleted file mode 100644 index 9d9bfe3e6e16..000000000000 --- a/airbyte-integrations/connectors/destination-meilisearch/setup.py +++ /dev/null @@ -1,23 +0,0 @@ -# -# Copyright (c) 2023 Airbyte, Inc., all rights reserved. -# - - -from setuptools import find_packages, setup - -MAIN_REQUIREMENTS = ["airbyte-cdk", "meilisearch>=0.22.0"] - -TEST_REQUIREMENTS = ["pytest~=6.1"] - -setup( - name="destination_meilisearch", - description="Destination implementation for Meilisearch.", - author="Airbyte", - author_email="contact@airbyte.io", - packages=find_packages(), - install_requires=MAIN_REQUIREMENTS, - package_data={"": ["*.json"]}, - extras_require={ - "tests": TEST_REQUIREMENTS, - }, -) diff --git a/airbyte-integrations/connectors/destination-meilisearch/unit_tests/unit_test.py b/airbyte-integrations/connectors/destination-meilisearch/unit_tests/unit_test.py deleted file mode 100644 index c09a3f7d8744..000000000000 --- a/airbyte-integrations/connectors/destination-meilisearch/unit_tests/unit_test.py +++ /dev/null @@ -1,29 +0,0 @@ -# -# Copyright (c) 2023 Airbyte, Inc., all rights reserved. -# - -from unittest.mock import patch - -from destination_meilisearch.writer import MeiliWriter - - -@patch("meilisearch.Client") -def test_queue_write_operation(client): - writer = MeiliWriter(client, "stream_name", "primary_key") - writer.queue_write_operation({"a": "a"}) - assert len(writer._write_buffer) == 1 - writer.queue_write_operation({"b": "b"}) - assert len(writer._write_buffer) == 2 - writer2 = MeiliWriter(client, "stream_name2", "primary_key") - writer2.queue_write_operation({"a": "a"}) - assert len(writer2._write_buffer) == 1 - assert len(writer._write_buffer) == 2 - - -@patch("meilisearch.Client") -def test_flush(client): - writer = MeiliWriter(client, "stream_name", "primary_key") - writer.queue_write_operation({"a": "a"}) - writer.flush() - client.index.assert_called_once_with("stream_name") - client.wait_for_task.assert_called_once() diff --git a/airbyte-integrations/connectors/destination-mqtt/README.md b/airbyte-integrations/connectors/destination-mqtt/README.md deleted file mode 100644 index f9d73b568550..000000000000 --- a/airbyte-integrations/connectors/destination-mqtt/README.md +++ /dev/null @@ -1,72 +0,0 @@ -# Destination MQTT - -This is the repository for the MQTT destination connector in Java. -For information about how to use this connector within Airbyte, see [the User Documentation](https://docs.airbyte.io/integrations/destinations/mqtt). - -## Local development - -#### Building via Gradle -From the Airbyte repository root, run: -``` -./gradlew :airbyte-integrations:connectors:destination-mqtt:build -``` - -#### Create credentials -**If you are a community contributor**, generate the necessary credentials and place them in `secrets/config.json` conforming to the spec file in `src/main/resources/spec.json`. -Note that the `secrets` directory is git-ignored by default, so there is no danger of accidentally checking in sensitive information. - -**If you are an Airbyte core member**, follow the [instructions](https://docs.airbyte.io/connector-development#using-credentials-in-ci) to set up the credentials. - -### Locally running the connector docker image - -#### Build -Build the connector image via Gradle: - -``` -./gradlew :airbyte-integrations:connectors:destination-mqtt:buildConnectorImage -``` -Once built, the docker image name and tag on your host will be `airbyte/destination-mqtt:dev`. -the Dockerfile. - -#### Run -Then run any of the connector commands as follows: -``` -docker run --rm airbyte/destination-mqtt:dev spec -docker run --rm -v $(pwd)/secrets:/secrets airbyte/destination-mqtt:dev check --config /secrets/config.json -docker run --rm -v $(pwd)/secrets:/secrets airbyte/destination-mqtt:dev discover --config /secrets/config.json -docker run --rm -v $(pwd)/secrets:/secrets -v $(pwd)/integration_tests:/integration_tests airbyte/destination-mqtt:dev read --config /secrets/config.json --catalog /integration_tests/configured_catalog.json -``` - -## Testing -We use `JUnit` for Java tests. - -### Unit and Integration Tests -Place unit tests under `src/test/io/airbyte/integrations/destinations/mqtt`. - -#### Acceptance Tests -Airbyte has a standard test suite that all destination connectors must pass. Implement the `TODO`s in -`src/test-integration/java/io/airbyte/integrations/destinations/MqttDestinationAcceptanceTest.java`. - -### Using gradle to run tests -All commands should be run from Airbyte project root. -To run unit tests: -``` -./gradlew :airbyte-integrations:connectors:destination-mqtt:unitTest -``` -To run acceptance and custom integration tests: -``` -./gradlew :airbyte-integrations:connectors:destination-mqtt:integrationTest -``` - -## Dependency Management - -### Publishing a new version of the connector -You've checked out the repo, implemented a million dollar feature, and you're ready to share your changes with the world. Now what? -1. Make sure your changes are passing our test suite: `airbyte-ci connectors --name=destination-mqtt test` -2. Bump the connector version in `metadata.yaml`: increment the `dockerImageTag` value. Please follow [semantic versioning for connectors](https://docs.airbyte.com/contributing-to-airbyte/resources/pull-requests-handbook/#semantic-versioning-for-connectors). -3. Make sure the `metadata.yaml` content is up to date. -4. Make the connector documentation and its changelog is up to date (`docs/integrations/destinations/mqtt.md`). -5. Create a Pull Request: use [our PR naming conventions](https://docs.airbyte.com/contributing-to-airbyte/resources/pull-requests-handbook/#pull-request-title-convention). -6. Pat yourself on the back for being an awesome contributor. -7. Someone from Airbyte will take a look at your PR and iterate with you to merge it into master. - diff --git a/airbyte-integrations/connectors/destination-mqtt/build.gradle b/airbyte-integrations/connectors/destination-mqtt/build.gradle deleted file mode 100644 index 599b538f4ac2..000000000000 --- a/airbyte-integrations/connectors/destination-mqtt/build.gradle +++ /dev/null @@ -1,31 +0,0 @@ -plugins { - id 'application' - id 'airbyte-java-connector' -} - -airbyteJavaConnector { - cdkVersionRequired = '0.2.0' - features = ['db-destinations'] - useLocalCdk = false -} - -//remove once upgrading the CDK version to 0.4.x or later -java { - compileJava { - options.compilerArgs.remove("-Werror") - } -} - -airbyteJavaConnector.addCdkDependencies() - -application { - mainClass = 'io.airbyte.integrations.destination.mqtt.MqttDestination' - applicationDefaultJvmArgs = ['-XX:+ExitOnOutOfMemoryError', '-XX:MaxRAMPercentage=75.0'] -} - -dependencies { - - implementation 'org.eclipse.paho:org.eclipse.paho.client.mqttv3:1.2.5' - - testImplementation 'com.hivemq:hivemq-testcontainer-junit5:2.0.0' -} diff --git a/airbyte-integrations/connectors/destination-mqtt/src/main/java/io/airbyte/integrations/destination/mqtt/MqttDestination.java b/airbyte-integrations/connectors/destination-mqtt/src/main/java/io/airbyte/integrations/destination/mqtt/MqttDestination.java deleted file mode 100644 index 179ddfb3a96f..000000000000 --- a/airbyte-integrations/connectors/destination-mqtt/src/main/java/io/airbyte/integrations/destination/mqtt/MqttDestination.java +++ /dev/null @@ -1,88 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.integrations.destination.mqtt; - -import com.fasterxml.jackson.databind.JsonNode; -import com.google.common.base.Charsets; -import com.google.common.collect.ImmutableMap; -import io.airbyte.cdk.integrations.BaseConnector; -import io.airbyte.cdk.integrations.base.AirbyteMessageConsumer; -import io.airbyte.cdk.integrations.base.Destination; -import io.airbyte.cdk.integrations.base.IntegrationRunner; -import io.airbyte.cdk.integrations.base.JavaBaseConstants; -import io.airbyte.commons.json.Jsons; -import io.airbyte.protocol.models.v0.AirbyteConnectionStatus; -import io.airbyte.protocol.models.v0.AirbyteConnectionStatus.Status; -import io.airbyte.protocol.models.v0.AirbyteMessage; -import io.airbyte.protocol.models.v0.ConfiguredAirbyteCatalog; -import java.util.UUID; -import java.util.function.Consumer; -import org.eclipse.paho.client.mqttv3.IMqttAsyncClient; -import org.eclipse.paho.client.mqttv3.MqttAsyncClient; -import org.eclipse.paho.client.mqttv3.MqttMessage; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -public class MqttDestination extends BaseConnector implements Destination { - - private static final Logger LOGGER = LoggerFactory.getLogger(MqttDestination.class); - - public static final String COLUMN_NAME_AB_ID = JavaBaseConstants.COLUMN_NAME_AB_ID; - public static final String COLUMN_NAME_EMITTED_AT = JavaBaseConstants.COLUMN_NAME_EMITTED_AT; - public static final String COLUMN_NAME_DATA = JavaBaseConstants.COLUMN_NAME_DATA; - public static final String COLUMN_NAME_STREAM = "_airbyte_stream"; - - @Override - public AirbyteConnectionStatus check(final JsonNode config) { - try { - final MqttDestinationConfig mqttConfig = MqttDestinationConfig.getMqttDestinationConfig(config); - final String testTopic = mqttConfig.getTestTopic(); - if (!testTopic.isBlank()) { - try (final IMqttAsyncClient client = new MqttAsyncClient(mqttConfig.getServerUri(), mqttConfig.getClientId())) { - client.connect(mqttConfig.getMqttConnectOptions()).waitForCompletion(); - - final String key = UUID.randomUUID().toString(); - final JsonNode payload = Jsons.jsonNode(ImmutableMap.of( - COLUMN_NAME_AB_ID, key, - COLUMN_NAME_STREAM, "test-topic-stream", - COLUMN_NAME_EMITTED_AT, System.currentTimeMillis(), - COLUMN_NAME_DATA, Jsons.jsonNode(ImmutableMap.of("test-key", "test-value")))); - - final MqttMessage message = new MqttMessage(payload.toString().getBytes(Charsets.UTF_8)); - message.setQos(mqttConfig.getQos()); - message.setRetained(mqttConfig.isRetainedMessage()); - - client.publish(testTopic, message).getMessage(); - client.disconnectForcibly(); - - LOGGER.info("Successfully sent message with key '{}' to MQTT broker for topic '{}'.", key, testTopic); - } - } - return new AirbyteConnectionStatus().withStatus(Status.SUCCEEDED); - } catch (final Exception e) { - LOGGER.error("Exception attempting to connect to the MQTT broker: ", e); - return new AirbyteConnectionStatus() - .withStatus(Status.FAILED) - .withMessage("Could not connect to the MQTT broker with provided configuration. \n" + e.getMessage()); - } - } - - @Override - public AirbyteMessageConsumer getConsumer(final JsonNode config, - final ConfiguredAirbyteCatalog catalog, - final Consumer outputRecordCollector) { - return new MqttRecordConsumer(MqttDestinationConfig.getMqttDestinationConfig(config), - catalog, - outputRecordCollector); - } - - public static void main(final String[] args) throws Exception { - final Destination destination = new MqttDestination(); - LOGGER.info("Starting destination: {}", MqttDestination.class); - new IntegrationRunner(destination).run(args); - LOGGER.info("Completed destination: {}", MqttDestination.class); - } - -} diff --git a/airbyte-integrations/connectors/destination-mqtt/src/main/java/io/airbyte/integrations/destination/mqtt/MqttDestinationConfig.java b/airbyte-integrations/connectors/destination-mqtt/src/main/java/io/airbyte/integrations/destination/mqtt/MqttDestinationConfig.java deleted file mode 100644 index d3efb68e4602..000000000000 --- a/airbyte-integrations/connectors/destination-mqtt/src/main/java/io/airbyte/integrations/destination/mqtt/MqttDestinationConfig.java +++ /dev/null @@ -1,140 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.integrations.destination.mqtt; - -import com.fasterxml.jackson.databind.JsonNode; -import java.util.UUID; -import org.eclipse.paho.client.mqttv3.MqttConnectOptions; - -public class MqttDestinationConfig { - - private final String clientId; - private final String serverUri; - private final String topicPattern; - private final String testTopic; - private final MqttConnectOptions options; - private final boolean retained; - private final boolean sync; - private final int qOs; - - private MqttDestinationConfig(final JsonNode config) { - this.clientId = buildClientId(config); - this.serverUri = buildServerUri(config); - this.testTopic = buildTestTopic(config); - this.topicPattern = buildTopicPattern(config); - this.options = buildMqttConnectOptions(config); - this.retained = isRetained(config); - this.sync = isSyncProducer(config); - this.qOs = buildQos(config); - } - - public static MqttDestinationConfig getMqttDestinationConfig(final JsonNode config) { - return new MqttDestinationConfig(config); - } - - public String getClientId() { - return clientId; - } - - public int getQos() { - return qOs; - } - - public MqttConnectOptions getMqttConnectOptions() { - return options; - } - - public String getServerUri() { - return serverUri; - } - - public String getTestTopic() { - return testTopic; - } - - public String getTopicPattern() { - return topicPattern; - } - - public boolean isSync() { - return sync; - } - - public boolean isRetainedMessage() { - return retained; - } - - private String buildClientId(final JsonNode config) { - if (config.has("client_id")) { - return config.get("client_id").asText(); - } - return "airbyte-" + UUID.randomUUID(); - } - - private MqttConnectOptions buildMqttConnectOptions(final JsonNode config) { - final MqttConnectOptions options = new MqttConnectOptions(); - options.setConnectionTimeout(config.get("connect_timeout").intValue()); - options.setAutomaticReconnect(config.get("automatic_reconnect").booleanValue()); - options.setCleanSession(config.get("clean_session").booleanValue()); - if (config.has("username") && !config.get("username").asText().isBlank()) { - options.setUserName(config.get("username").asText()); - } - if (config.has("password") && !config.get("password").asText().isBlank()) { - options.setPassword(config.get("password").asText().toCharArray()); - } - - if (config.has("max_in_flight") && !config.get("max_in_flight").asText().isBlank()) { - options.setMaxInflight(config.get("max_in_flight").asInt()); - } - - return options; - } - - private String buildServerUri(final JsonNode config) { - return String.format("%s://%s:%s", - config.get("use_tls").asBoolean() ? "ssl" : "tcp", - config.get("broker_host").asText(), - config.get("broker_port").intValue()); - } - - private String buildTestTopic(final JsonNode config) { - return config.has("test_topic") ? config.get("test_topic").asText() : ""; - } - - private String buildTopicPattern(final JsonNode config) { - return config.get("topic_pattern").asText(); - } - - private boolean isRetained(final JsonNode config) { - return config.get("message_retained").asBoolean(); - } - - private boolean isSyncProducer(final JsonNode config) { - return config.get("publisher_sync").asBoolean(); - } - - private int buildQos(final JsonNode config) { - return MessageQoS.valueOf(config.get("message_qos").asText()).getQos(); - } - - private enum MessageQoS { - - AT_MOST_ONCE(0), - AT_LEAST_ONCE(1), - EXACTLY_ONCE(2); - - private final int qos; - - MessageQoS(final int qos) { - this.qos = qos; - } - - public int getQos() { - return qos; - } - - } - -} diff --git a/airbyte-integrations/connectors/destination-mqtt/src/main/java/io/airbyte/integrations/destination/mqtt/MqttRecordConsumer.java b/airbyte-integrations/connectors/destination-mqtt/src/main/java/io/airbyte/integrations/destination/mqtt/MqttRecordConsumer.java deleted file mode 100644 index 5c4c3b0f12d3..000000000000 --- a/airbyte-integrations/connectors/destination-mqtt/src/main/java/io/airbyte/integrations/destination/mqtt/MqttRecordConsumer.java +++ /dev/null @@ -1,155 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.integrations.destination.mqtt; - -import com.fasterxml.jackson.databind.JsonNode; -import com.google.common.base.Charsets; -import com.google.common.collect.ImmutableMap; -import io.airbyte.cdk.integrations.base.FailureTrackingAirbyteMessageConsumer; -import io.airbyte.commons.json.Jsons; -import io.airbyte.commons.lang.Exceptions; -import io.airbyte.protocol.models.v0.AirbyteMessage; -import io.airbyte.protocol.models.v0.AirbyteRecordMessage; -import io.airbyte.protocol.models.v0.AirbyteStreamNameNamespacePair; -import io.airbyte.protocol.models.v0.ConfiguredAirbyteCatalog; -import java.util.HashMap; -import java.util.Map; -import java.util.Optional; -import java.util.UUID; -import java.util.function.Consumer; -import java.util.function.Function; -import java.util.stream.Collectors; -import org.eclipse.paho.client.mqttv3.IMqttActionListener; -import org.eclipse.paho.client.mqttv3.IMqttAsyncClient; -import org.eclipse.paho.client.mqttv3.IMqttDeliveryToken; -import org.eclipse.paho.client.mqttv3.IMqttToken; -import org.eclipse.paho.client.mqttv3.MqttAsyncClient; -import org.eclipse.paho.client.mqttv3.MqttException; -import org.eclipse.paho.client.mqttv3.MqttMessage; -import org.eclipse.paho.client.mqttv3.persist.MemoryPersistence; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -public class MqttRecordConsumer extends FailureTrackingAirbyteMessageConsumer { - - private static final Logger LOGGER = LoggerFactory.getLogger(MqttRecordConsumer.class); - - private final MqttDestinationConfig config; - private final Map topicMap; - private final ConfiguredAirbyteCatalog catalog; - private final Consumer outputRecordCollector; - private final IMqttAsyncClient client; - - private AirbyteMessage lastStateMessage = null; - - public MqttRecordConsumer(final MqttDestinationConfig mqttDestinationConfig, - final ConfiguredAirbyteCatalog catalog, - final Consumer outputRecordCollector) { - this.config = mqttDestinationConfig; - this.topicMap = new HashMap<>(); - this.catalog = catalog; - this.outputRecordCollector = outputRecordCollector; - this.client = buildMqttClient(); - } - - private IMqttAsyncClient buildMqttClient() { - try { - return new MqttAsyncClient(config.getServerUri(), config.getClientId(), new MemoryPersistence()); - } catch (MqttException e) { - throw new RuntimeException("Error creating MQTT client", e); - } - } - - @Override - protected void startTracked() { - try { - client.connect(config.getMqttConnectOptions()).waitForCompletion(); - } catch (MqttException e) { - throw new RuntimeException("Error connecting to MQTT broker", e); - } - topicMap.putAll(buildTopicMap()); - } - - @Override - protected void acceptTracked(final AirbyteMessage airbyteMessage) { - if (airbyteMessage.getType() == AirbyteMessage.Type.STATE) { - lastStateMessage = airbyteMessage; - } else if (airbyteMessage.getType() == AirbyteMessage.Type.RECORD) { - final AirbyteRecordMessage recordMessage = airbyteMessage.getRecord(); - final String topic = topicMap.get(AirbyteStreamNameNamespacePair.fromRecordMessage(recordMessage)); - - final String key = UUID.randomUUID().toString(); - final JsonNode payload = Jsons.jsonNode(ImmutableMap.of( - MqttDestination.COLUMN_NAME_AB_ID, key, - MqttDestination.COLUMN_NAME_STREAM, recordMessage.getStream(), - MqttDestination.COLUMN_NAME_EMITTED_AT, recordMessage.getEmittedAt(), - MqttDestination.COLUMN_NAME_DATA, recordMessage.getData())); - - final MqttMessage message = new MqttMessage(payload.toString().getBytes(Charsets.UTF_8)); - message.setRetained(config.isRetainedMessage()); - message.setQos(config.getQos()); - - sendRecord(topic, message); - } else { - LOGGER.warn("Unexpected message: " + airbyteMessage.getType()); - } - } - - Map buildTopicMap() { - return catalog.getStreams().stream() - .map(stream -> AirbyteStreamNameNamespacePair.fromAirbyteStream(stream.getStream())) - .collect(Collectors.toMap(Function.identity(), pair -> config.getTopicPattern() - .replaceAll("\\{namespace}", Optional.ofNullable(pair.getNamespace()).orElse("")) - .replaceAll("\\{stream}", Optional.ofNullable(pair.getName()).orElse("")), - (existing, newValue) -> existing)); - } - - private void sendRecord(final String topic, final MqttMessage message) { - try { - final IMqttDeliveryToken token = client.publish(topic, message, null, new MessageActionListener(outputRecordCollector, lastStateMessage)); - if (config.isSync()) { - token.waitForCompletion(); - } - } catch (MqttException e) { - LOGGER.error("Error sending message to topic '{}'.", topic, e); - throw new RuntimeException("Cannot send message to MQTT. Error: " + e.getMessage(), e); - } - } - - @Override - protected void close(final boolean hasFailed) { - Exceptions.swallow(client::disconnectForcibly); - Exceptions.swallow(client::close); - - if (lastStateMessage != null) { - outputRecordCollector.accept(lastStateMessage); - } - } - - private static class MessageActionListener implements IMqttActionListener { - - private final AirbyteMessage lastStateMessage; - private final Consumer outputRecordCollector; - - MessageActionListener(Consumer outputRecordCollector, AirbyteMessage lastStateMessage) { - this.outputRecordCollector = outputRecordCollector; - this.lastStateMessage = lastStateMessage; - } - - @Override - public void onSuccess(IMqttToken asyncActionToken) { - if (lastStateMessage != null) { - outputRecordCollector.accept(lastStateMessage); - } - } - - @Override - public void onFailure(IMqttToken asyncActionToken, Throwable exception) { - throw new RuntimeException("Cannot deliver message with ID '" + asyncActionToken.getMessageId() + "'", exception); - } - - } - -} diff --git a/airbyte-integrations/connectors/destination-mqtt/src/main/resources/spec.json b/airbyte-integrations/connectors/destination-mqtt/src/main/resources/spec.json deleted file mode 100644 index 64933cba71f4..000000000000 --- a/airbyte-integrations/connectors/destination-mqtt/src/main/resources/spec.json +++ /dev/null @@ -1,108 +0,0 @@ -{ - "documentationUrl": "https://docs.airbyte.com/integrations/destinations/mqtt", - "supportsIncremental": true, - "supportsNormalization": false, - "supportsDBT": false, - "supported_destination_sync_modes": ["append"], - "connectionSpecification": { - "$schema": "http://json-schema.org/draft-07/schema#", - "title": "MQTT Destination Spec", - "type": "object", - "required": [ - "broker_host", - "broker_port", - "use_tls", - "topic_pattern", - "publisher_sync", - "connect_timeout", - "automatic_reconnect", - "clean_session", - "message_retained", - "message_qos" - ], - "additionalProperties": true, - "properties": { - "broker_host": { - "title": "MQTT broker host", - "description": "Host of the broker to connect to.", - "type": "string" - }, - "broker_port": { - "title": "MQTT broker port", - "description": "Port of the broker.", - "type": "integer" - }, - "use_tls": { - "title": "Use TLS", - "description": "Whether to use TLS encryption on the connection.", - "type": "boolean", - "default": false - }, - "username": { - "title": "Username", - "description": "User name to use for the connection.", - "type": "string" - }, - "password": { - "title": "Password", - "description": "Password to use for the connection.", - "type": "string", - "airbyte_secret": true - }, - "topic_pattern": { - "title": "Topic pattern", - "description": "Topic pattern in which the records will be sent. You can use patterns like '{namespace}' and/or '{stream}' to send the message to a specific topic based on these values. Notice that the topic name will be transformed to a standard naming convention.", - "type": "string", - "examples": ["sample.topic", "{namespace}/{stream}/sample"] - }, - "topic_test": { - "title": "Test topic", - "description": "Topic to test if Airbyte can produce messages.", - "type": "string", - "examples": ["test/topic"] - }, - "client": { - "title": "Client ID", - "description": "A client identifier that is unique on the server being connected to.", - "type": "string", - "examples": ["airbyte-client1"] - }, - "publisher_sync": { - "title": "Sync publisher", - "description": "Wait synchronously until the record has been sent to the broker.", - "type": "boolean", - "default": false - }, - "connect_timeout": { - "title": "Connect timeout", - "description": " Maximum time interval (in seconds) the client will wait for the network connection to the MQTT server to be established.", - "type": "integer", - "default": 30 - }, - "automatic_reconnect": { - "title": "Automatic reconnect", - "description": "Whether the client will automatically attempt to reconnect to the server if the connection is lost.", - "type": "boolean", - "default": true - }, - "clean_session": { - "title": "Clean session", - "description": "Whether the client and server should remember state across restarts and reconnects.", - "type": "boolean", - "default": true - }, - "message_retained": { - "title": "Message retained", - "description": "Whether or not the publish message should be retained by the messaging engine.", - "type": "boolean", - "default": false - }, - "message_qos": { - "title": "Message QoS", - "description": "Quality of service used for each message to be delivered.", - "default": "AT_LEAST_ONCE", - "enum": ["AT_MOST_ONCE", "AT_LEAST_ONCE", "EXACTLY_ONCE"] - } - } - } -} diff --git a/airbyte-integrations/connectors/destination-mqtt/src/test-integration/java/io/airbyte/integrations/destination/mqtt/MqttDestinationAcceptanceTest.java b/airbyte-integrations/connectors/destination-mqtt/src/test-integration/java/io/airbyte/integrations/destination/mqtt/MqttDestinationAcceptanceTest.java deleted file mode 100644 index 6cb4ab0658a7..000000000000 --- a/airbyte-integrations/connectors/destination-mqtt/src/test-integration/java/io/airbyte/integrations/destination/mqtt/MqttDestinationAcceptanceTest.java +++ /dev/null @@ -1,165 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.integrations.destination.mqtt; - -import com.fasterxml.jackson.databind.JsonNode; -import com.fasterxml.jackson.databind.ObjectMapper; -import com.fasterxml.jackson.databind.ObjectReader; -import com.google.common.collect.ImmutableMap; -import com.google.common.collect.Streams; -import com.google.common.net.InetAddresses; -import com.hivemq.testcontainer.junit5.HiveMQTestContainerExtension; -import io.airbyte.cdk.integrations.standardtest.destination.DestinationAcceptanceTest; -import io.airbyte.cdk.integrations.standardtest.destination.comparator.AdvancedTestDataComparator; -import io.airbyte.cdk.integrations.standardtest.destination.comparator.TestDataComparator; -import io.airbyte.commons.json.Jsons; -import java.net.InetAddress; -import java.net.NetworkInterface; -import java.net.SocketException; -import java.net.UnknownHostException; -import java.util.ArrayList; -import java.util.Collections; -import java.util.HashMap; -import java.util.HashSet; -import java.util.List; -import java.util.Map; -import java.util.UUID; -import org.eclipse.paho.client.mqttv3.MqttClient; -import org.eclipse.paho.client.mqttv3.MqttConnectOptions; -import org.eclipse.paho.client.mqttv3.MqttException; -import org.eclipse.paho.client.mqttv3.persist.MemoryPersistence; -import org.junit.jupiter.api.extension.RegisterExtension; -import org.testcontainers.utility.DockerImageName; - -public class MqttDestinationAcceptanceTest extends DestinationAcceptanceTest { - - private static final String TOPIC_PREFIX = "test/integration/"; - private static final String TOPIC_NAME = "test.topic"; - private static final ObjectReader READER = new ObjectMapper().reader(); - - private final Map> recordsPerTopic = new HashMap<>(); - private MqttClient client; - - @RegisterExtension - public final HiveMQTestContainerExtension extension = new HiveMQTestContainerExtension(DockerImageName.parse("hivemq/hivemq-ce:2021.2")); - - @Override - protected String getImageName() { - return "airbyte/destination-mqtt:dev"; - } - - @Override - protected JsonNode getConfig() throws UnknownHostException { - return Jsons.jsonNode(ImmutableMap.builder() - .put("broker_host", getIpAddress()) - .put("broker_port", extension.getMqttPort()) - .put("use_tls", false) - .put("topic_pattern", TOPIC_PREFIX + "{namespace}/{stream}/" + TOPIC_NAME) - .put("client_id", UUID.randomUUID()) - .put("publisher_sync", true) - .put("connect_timeout", 10) - .put("automatic_reconnect", true) - .put("clean_session", true) - .put("message_retained", false) - .put("message_qos", "EXACTLY_ONCE") - .put("max_in_flight", 1000) - .build()); - } - - @Override - protected JsonNode getFailCheckConfig() { - return Jsons.jsonNode(ImmutableMap.builder() - .put("broker_host", extension.getHost()) - .put("broker_port", extension.getMqttPort()) - .put("topic_pattern", TOPIC_PREFIX + "{namespace}/{stream}/" + TOPIC_NAME) - .put("client_id", UUID.randomUUID()) - .put("publisher_sync", true) - .put("connect_timeout", 10) - .put("automatic_reconnect", true) - .build()); - } - - @Override - protected TestDataComparator getTestDataComparator() { - return new AdvancedTestDataComparator(); - } - - @Override - protected boolean supportBasicDataTypeTest() { - return true; - } - - @Override - protected boolean supportArrayDataTypeTest() { - return true; - } - - @Override - protected boolean supportObjectDataTypeTest() { - return true; - } - - @Override - protected boolean implementsNamespaces() { - return true; - } - - @Override - protected String getDefaultSchema(final JsonNode config) { - return ""; - } - - @Override - protected List retrieveNormalizedRecords(final TestDestinationEnv testEnv, final String streamName, final String namespace) { - return retrieveRecords(testEnv, streamName, namespace, null); - } - - @Override - protected List retrieveRecords(final TestDestinationEnv testEnv, - final String streamName, - final String namespace, - final JsonNode streamSchema) { - final String topic = TOPIC_PREFIX + namespace + "/" + streamName + "/" + TOPIC_NAME; - return recordsPerTopic.getOrDefault(topic, Collections.emptyList()); - } - - @SuppressWarnings("UnstableApiUsage") - private String getIpAddress() throws UnknownHostException { - try { - return Streams.stream(NetworkInterface.getNetworkInterfaces().asIterator()) - .flatMap(ni -> Streams.stream(ni.getInetAddresses().asIterator())) - .filter(add -> !add.isLoopbackAddress()) - .map(InetAddress::getHostAddress) - .filter(InetAddresses::isUriInetAddress) - .findFirst().orElse(InetAddress.getLocalHost().getHostAddress()); - } catch (final SocketException e) { - return InetAddress.getLocalHost().getHostAddress(); - } - } - - @Override - protected void setup(final TestDestinationEnv testEnv, final HashSet TEST_SCHEMAS) throws MqttException { - recordsPerTopic.clear(); - client = new MqttClient("tcp://" + extension.getHost() + ":" + extension.getMqttPort(), UUID.randomUUID().toString(), new MemoryPersistence()); - - final MqttConnectOptions options = new MqttConnectOptions(); - options.setAutomaticReconnect(true); - - client.connect(options); - - client.subscribe(TOPIC_PREFIX + "#", (topic, msg) -> { - final List records = recordsPerTopic.getOrDefault(topic, new ArrayList<>()); - records.add(READER.readTree(msg.getPayload()).get(MqttDestination.COLUMN_NAME_DATA)); - recordsPerTopic.put(topic, records); - }); - } - - @Override - protected void tearDown(final TestDestinationEnv testEnv) throws MqttException { - client.disconnectForcibly(); - client.close(); - } - -} diff --git a/airbyte-integrations/connectors/destination-mqtt/src/test/java/io/airbyte/integrations/destination/mqtt/MqttRecordConsumerTest.java b/airbyte-integrations/connectors/destination-mqtt/src/test/java/io/airbyte/integrations/destination/mqtt/MqttRecordConsumerTest.java deleted file mode 100644 index c3ad236499be..000000000000 --- a/airbyte-integrations/connectors/destination-mqtt/src/test/java/io/airbyte/integrations/destination/mqtt/MqttRecordConsumerTest.java +++ /dev/null @@ -1,175 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.integrations.destination.mqtt; - -import static org.junit.jupiter.api.Assertions.assertEquals; -import static org.junit.jupiter.api.Assertions.assertThrows; -import static org.mockito.Mockito.mock; - -import com.fasterxml.jackson.databind.JsonNode; -import com.google.common.collect.ImmutableList; -import com.google.common.collect.ImmutableMap; -import com.google.common.collect.Sets; -import com.hivemq.testcontainer.junit5.HiveMQTestContainerExtension; -import io.airbyte.commons.json.Jsons; -import io.airbyte.protocol.models.Field; -import io.airbyte.protocol.models.JsonSchemaType; -import io.airbyte.protocol.models.v0.AirbyteMessage; -import io.airbyte.protocol.models.v0.AirbyteRecordMessage; -import io.airbyte.protocol.models.v0.AirbyteStateMessage; -import io.airbyte.protocol.models.v0.AirbyteStream; -import io.airbyte.protocol.models.v0.AirbyteStreamNameNamespacePair; -import io.airbyte.protocol.models.v0.CatalogHelpers; -import io.airbyte.protocol.models.v0.ConfiguredAirbyteCatalog; -import io.airbyte.protocol.models.v0.ConfiguredAirbyteStream; -import java.time.Instant; -import java.util.ArrayList; -import java.util.Collection; -import java.util.List; -import java.util.Map; -import java.util.function.Consumer; -import java.util.stream.Collectors; -import java.util.stream.IntStream; -import java.util.stream.Stream; -import org.junit.jupiter.api.DisplayName; -import org.junit.jupiter.api.Test; -import org.junit.jupiter.api.extension.ExtensionContext; -import org.junit.jupiter.api.extension.RegisterExtension; -import org.junit.jupiter.params.ParameterizedTest; -import org.junit.jupiter.params.provider.Arguments; -import org.junit.jupiter.params.provider.ArgumentsProvider; -import org.junit.jupiter.params.provider.ArgumentsSource; -import org.testcontainers.utility.DockerImageName; - -@DisplayName("MqttRecordConsumer") -public class MqttRecordConsumerTest { - - @RegisterExtension - public final HiveMQTestContainerExtension extension = new HiveMQTestContainerExtension(DockerImageName.parse("hivemq/hivemq-ce:2021.2")); - - @ParameterizedTest - @ArgumentsSource(TopicMapArgumentsProvider.class) - @SuppressWarnings("unchecked") - public void testBuildTopicMap(final ConfiguredAirbyteCatalog catalog, - final String streamName, - final String namespace, - final String topicPattern, - final String expectedTopic) { - final MqttDestinationConfig config = MqttDestinationConfig - .getMqttDestinationConfig(getConfig(extension.getHost(), extension.getMqttPort(), topicPattern)); - - final MqttRecordConsumer recordConsumer = new MqttRecordConsumer(config, catalog, mock(Consumer.class)); - final Map topicMap = recordConsumer.buildTopicMap(); - assertEquals(Sets.newHashSet(catalog.getStreams()).size(), topicMap.size()); - - final AirbyteStreamNameNamespacePair streamNameNamespacePair = new AirbyteStreamNameNamespacePair(streamName, namespace); - assertEquals(expectedTopic, topicMap.get(streamNameNamespacePair)); - } - - @Test - @SuppressWarnings("unchecked") - void testCannotConnectToBrokers() throws Exception { - final MqttDestinationConfig config = MqttDestinationConfig - .getMqttDestinationConfig(getConfig(extension.getHost(), extension.getMqttPort() + 10, "test-topic")); - - final String streamName = "test-stream"; - final String namespace = "test-schema"; - final ConfiguredAirbyteCatalog catalog = new ConfiguredAirbyteCatalog().withStreams(List.of( - CatalogHelpers.createConfiguredAirbyteStream( - streamName, - namespace, - Field.of("id", JsonSchemaType.NUMBER), - Field.of("name", JsonSchemaType.STRING)))); - final MqttRecordConsumer consumer = new MqttRecordConsumer(config, catalog, mock(Consumer.class)); - final List expectedRecords = getNRecords(10, streamName, namespace); - - assertThrows(RuntimeException.class, consumer::start); - - expectedRecords.forEach(m -> assertThrows(RuntimeException.class, () -> consumer.accept(m))); - - consumer.accept(new AirbyteMessage() - .withType(AirbyteMessage.Type.STATE) - .withState(new AirbyteStateMessage().withData(Jsons.jsonNode(ImmutableMap.of(namespace + "." + streamName, 0))))); - consumer.close(); - } - - private JsonNode getConfig(final String broker, final int port, final String topic) { - return Jsons.jsonNode(ImmutableMap.builder() - .put("broker_host", broker) - .put("broker_port", port) - .put("use_tls", false) - .put("topic_pattern", topic) - .put("publisher_sync", true) - .put("connect_timeout", 10) - .put("automatic_reconnect", false) - .put("clean_session", true) - .put("message_retained", true) - .put("message_qos", "EXACTLY_ONCE") - .build()); - } - - private List getNRecords(final int n, final String streamName, final String namespace) { - return IntStream.range(0, n) - .boxed() - .map(i -> new AirbyteMessage() - .withType(AirbyteMessage.Type.RECORD) - .withRecord(new AirbyteRecordMessage() - .withStream(streamName) - .withNamespace(namespace) - .withEmittedAt(Instant.now().toEpochMilli()) - .withData(Jsons.jsonNode(ImmutableMap.of("id", i, "name", "human " + i))))) - .collect(Collectors.toList()); - - } - - public static class TopicMapArgumentsProvider implements ArgumentsProvider { - - private static final String TOPIC_NAME = "test.topic"; - private static final String SCHEMA_NAME1 = "public"; - private static final String STREAM_NAME1 = "id_and_name"; - private static final String SCHEMA_NAME2 = SCHEMA_NAME1 + 2; - private static final String STREAM_NAME2 = STREAM_NAME1 + 2; - - private final ConfiguredAirbyteStream stream1 = CatalogHelpers.createConfiguredAirbyteStream( - SCHEMA_NAME1, - STREAM_NAME1, - Field.of("id", JsonSchemaType.NUMBER), - Field.of("name", JsonSchemaType.STRING)); - private final ConfiguredAirbyteStream stream2 = CatalogHelpers.createConfiguredAirbyteStream( - SCHEMA_NAME2, - STREAM_NAME2, - Field.of("id", JsonSchemaType.NUMBER), - Field.of("name", JsonSchemaType.STRING)); - - @Override - public Stream provideArguments(final ExtensionContext context) { - final List catalogs = new ArrayList<>(); - catalogs.add(new ConfiguredAirbyteCatalog().withStreams(List.of(stream1))); - catalogs.add(new ConfiguredAirbyteCatalog().withStreams(List.of(stream1, stream1))); - catalogs.add(new ConfiguredAirbyteCatalog().withStreams(List.of(stream1, stream2))); - - return catalogs.stream() - .flatMap(catalog -> catalog.getStreams().stream() - .map(stream -> buildArgs(catalog, stream.getStream())) - .flatMap(Collection::stream)); - } - - private List buildArgs(final ConfiguredAirbyteCatalog catalog, final AirbyteStream stream) { - return ImmutableList.of( - Arguments.of(catalog, stream.getName(), stream.getNamespace(), TOPIC_NAME, TOPIC_NAME), - Arguments.of(catalog, stream.getName(), stream.getNamespace(), "test-topic", "test-topic"), - Arguments.of(catalog, stream.getName(), stream.getNamespace(), "{namespace}", stream.getNamespace()), - Arguments.of(catalog, stream.getName(), stream.getNamespace(), "{stream}", stream.getName()), - Arguments.of(catalog, stream.getName(), stream.getNamespace(), "{namespace}.{stream}." + TOPIC_NAME, - stream.getNamespace() + "." + stream.getName() + "." + TOPIC_NAME), - Arguments.of(catalog, stream.getName(), stream.getNamespace(), "{namespace}-{stream}-" + TOPIC_NAME, - stream.getNamespace() + "-" + stream.getName() + "-" + TOPIC_NAME), - Arguments.of(catalog, stream.getName(), stream.getNamespace(), "topic with spaces", "topic with spaces"), - Arguments.of(catalog, stream.getName(), stream.getNamespace(), "UppercaseTopic/test", "UppercaseTopic/test")); - } - - } - -} diff --git a/airbyte-integrations/connectors/destination-pulsar/README.md b/airbyte-integrations/connectors/destination-pulsar/README.md deleted file mode 100644 index f2c554ba1f67..000000000000 --- a/airbyte-integrations/connectors/destination-pulsar/README.md +++ /dev/null @@ -1,72 +0,0 @@ -# Destination Pulsar - -This is the repository for the Pulsar destination connector in Java. -For information about how to use this connector within Airbyte, see [the User Documentation](https://docs.airbyte.io/integrations/destinations/pulsar). - -## Local development - -#### Building via Gradle -From the Airbyte repository root, run: -``` -./gradlew :airbyte-integrations:connectors:destination-pulsar:build -``` - -#### Create credentials -**If you are a community contributor**, generate the necessary credentials and place them in `secrets/config.json` conforming to the spec file in `src/main/resources/spec.json`. -Note that the `secrets` directory is git-ignored by default, so there is no danger of accidentally checking in sensitive information. - -**If you are an Airbyte core member**, follow the [instructions](https://docs.airbyte.io/connector-development#using-credentials-in-ci) to set up the credentials. - -### Locally running the connector docker image - -#### Build -Build the connector image via Gradle: - -``` -./gradlew :airbyte-integrations:connectors:destination-pulsar:buildConnectorImage -``` -Once built, the docker image name and tag on your host will be `airbyte/destination-pulsar:dev`. -the Dockerfile. - -#### Run -Then run any of the connector commands as follows: -``` -docker run --rm airbyte/destination-pulsar:dev spec -docker run --rm -v $(pwd)/secrets:/secrets airbyte/destination-pulsar:dev check --config /secrets/config.json -docker run --rm -v $(pwd)/secrets:/secrets airbyte/destination-pulsar:dev discover --config /secrets/config.json -docker run --rm -v $(pwd)/secrets:/secrets -v $(pwd)/integration_tests:/integration_tests airbyte/destination-pulsar:dev read --config /secrets/config.json --catalog /integration_tests/configured_catalog.json -``` - -## Testing -We use `JUnit` for Java tests. - -### Unit and Integration Tests -Place unit tests under `src/test/io/airbyte/integrations/destinations/pulsar`. - -#### Acceptance Tests -Airbyte has a standard test suite that all destination connectors must pass. Implement the `TODO`s in -`src/test-integration/java/io/airbyte/integrations/destinations/PulsarDestinationAcceptanceTest.java`. - -### Using gradle to run tests -All commands should be run from airbyte project root. -To run unit tests: -``` -./gradlew :airbyte-integrations:connectors:destination-pulsar:unitTest -``` -To run acceptance and custom integration tests: -``` -./gradlew :airbyte-integrations:connectors:destination-pulsar:integrationTest -``` - -## Dependency Management - -### Publishing a new version of the connector -You've checked out the repo, implemented a million dollar feature, and you're ready to share your changes with the world. Now what? -1. Make sure your changes are passing our test suite: `airbyte-ci connectors --name=destination-pulsar test` -2. Bump the connector version in `metadata.yaml`: increment the `dockerImageTag` value. Please follow [semantic versioning for connectors](https://docs.airbyte.com/contributing-to-airbyte/resources/pull-requests-handbook/#semantic-versioning-for-connectors). -3. Make sure the `metadata.yaml` content is up to date. -4. Make the connector documentation and its changelog is up to date (`docs/integrations/destinations/pulsar.md`). -5. Create a Pull Request: use [our PR naming conventions](https://docs.airbyte.com/contributing-to-airbyte/resources/pull-requests-handbook/#pull-request-title-convention). -6. Pat yourself on the back for being an awesome contributor. -7. Someone from Airbyte will take a look at your PR and iterate with you to merge it into master. - diff --git a/airbyte-integrations/connectors/destination-pulsar/build.gradle b/airbyte-integrations/connectors/destination-pulsar/build.gradle deleted file mode 100644 index fad585d1ed6b..000000000000 --- a/airbyte-integrations/connectors/destination-pulsar/build.gradle +++ /dev/null @@ -1,31 +0,0 @@ -plugins { - id 'application' - id 'airbyte-java-connector' -} - -airbyteJavaConnector { - cdkVersionRequired = '0.2.0' - features = ['db-destinations'] - useLocalCdk = false -} - -//remove once upgrading the CDK version to 0.4.x or later -java { - compileJava { - options.compilerArgs.remove("-Werror") - } -} - -airbyteJavaConnector.addCdkDependencies() - -application { - mainClass = 'io.airbyte.integrations.destination.pulsar.PulsarDestination' - applicationDefaultJvmArgs = ['-XX:+ExitOnOutOfMemoryError', '-XX:MaxRAMPercentage=75.0'] -} - -dependencies { - - implementation 'org.apache.pulsar:pulsar-client:2.8.1' - - testImplementation libs.testcontainers.pulsar -} diff --git a/airbyte-integrations/connectors/destination-pulsar/src/main/java/io/airbyte/integrations/destination/pulsar/PulsarDestination.java b/airbyte-integrations/connectors/destination-pulsar/src/main/java/io/airbyte/integrations/destination/pulsar/PulsarDestination.java deleted file mode 100644 index 79e5e7239bdc..000000000000 --- a/airbyte-integrations/connectors/destination-pulsar/src/main/java/io/airbyte/integrations/destination/pulsar/PulsarDestination.java +++ /dev/null @@ -1,98 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.integrations.destination.pulsar; - -import com.fasterxml.jackson.databind.JsonNode; -import com.google.common.collect.ImmutableMap; -import io.airbyte.cdk.integrations.BaseConnector; -import io.airbyte.cdk.integrations.base.AirbyteMessageConsumer; -import io.airbyte.cdk.integrations.base.Destination; -import io.airbyte.cdk.integrations.base.IntegrationRunner; -import io.airbyte.cdk.integrations.base.JavaBaseConstants; -import io.airbyte.cdk.integrations.destination.StandardNameTransformer; -import io.airbyte.commons.json.Jsons; -import io.airbyte.protocol.models.v0.AirbyteConnectionStatus; -import io.airbyte.protocol.models.v0.AirbyteConnectionStatus.Status; -import io.airbyte.protocol.models.v0.AirbyteMessage; -import io.airbyte.protocol.models.v0.ConfiguredAirbyteCatalog; -import java.util.UUID; -import java.util.function.Consumer; -import org.apache.pulsar.client.api.MessageId; -import org.apache.pulsar.client.api.Producer; -import org.apache.pulsar.client.api.PulsarClient; -import org.apache.pulsar.client.api.Schema; -import org.apache.pulsar.client.api.schema.GenericRecord; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -public class PulsarDestination extends BaseConnector implements Destination { - - private static final Logger LOGGER = LoggerFactory.getLogger(PulsarDestination.class); - - public static final String COLUMN_NAME_AB_ID = JavaBaseConstants.COLUMN_NAME_AB_ID; - public static final String COLUMN_NAME_EMITTED_AT = JavaBaseConstants.COLUMN_NAME_EMITTED_AT; - public static final String COLUMN_NAME_DATA = JavaBaseConstants.COLUMN_NAME_DATA; - public static final String COLUMN_NAME_STREAM = "_airbyte_stream"; - - private final StandardNameTransformer namingResolver; - - public PulsarDestination() { - this.namingResolver = new StandardNameTransformer(); - } - - @Override - public AirbyteConnectionStatus check(final JsonNode config) { - try { - final PulsarDestinationConfig pulsarConfig = PulsarDestinationConfig.getPulsarDestinationConfig(config); - final String testTopic = pulsarConfig.getTestTopic(); - if (!testTopic.isBlank()) { - final String key = UUID.randomUUID().toString(); - final GenericRecord value = Schema.generic(PulsarDestinationConfig.getSchemaInfo()) - .newRecordBuilder() - .set(PulsarDestination.COLUMN_NAME_AB_ID, key) - .set(PulsarDestination.COLUMN_NAME_STREAM, "test-topic-stream") - .set(PulsarDestination.COLUMN_NAME_EMITTED_AT, System.currentTimeMillis()) - .set(PulsarDestination.COLUMN_NAME_DATA, Jsons.jsonNode(ImmutableMap.of("test-key", "test-value"))) - .build(); - - try (final PulsarClient client = PulsarUtils.buildClient(pulsarConfig.getServiceUrl()); - final Producer producer = PulsarUtils.buildProducer(client, Schema.generic(PulsarDestinationConfig.getSchemaInfo()), - pulsarConfig.getProducerConfig(), pulsarConfig.uriForTopic(testTopic))) { - final MessageId messageId = producer.send(value); - - producer.flush(); - - LOGGER.info("Successfully sent message id '{}' to Pulsar brokers for topic '{}'.", messageId, testTopic); - } - } - return new AirbyteConnectionStatus().withStatus(Status.SUCCEEDED); - } catch (final Exception e) { - LOGGER.error("Exception attempting to connect to the Pulsar brokers: ", e); - return new AirbyteConnectionStatus() - .withStatus(Status.FAILED) - .withMessage("Could not connect to the Pulsar brokers with provided configuration. \n" + e.getMessage()); - } - } - - @Override - public AirbyteMessageConsumer getConsumer(final JsonNode config, - final ConfiguredAirbyteCatalog catalog, - final Consumer outputRecordCollector) { - final PulsarDestinationConfig pulsarConfig = PulsarDestinationConfig.getPulsarDestinationConfig(config); - return new PulsarRecordConsumer(pulsarConfig, - catalog, - PulsarUtils.buildClient(pulsarConfig.getServiceUrl()), - outputRecordCollector, - namingResolver); - } - - public static void main(final String[] args) throws Exception { - final Destination destination = new PulsarDestination(); - LOGGER.info("Starting destination: {}", PulsarDestination.class); - new IntegrationRunner(destination).run(args); - LOGGER.info("Completed destination: {}", PulsarDestination.class); - } - -} diff --git a/airbyte-integrations/connectors/destination-pulsar/src/main/java/io/airbyte/integrations/destination/pulsar/PulsarDestinationConfig.java b/airbyte-integrations/connectors/destination-pulsar/src/main/java/io/airbyte/integrations/destination/pulsar/PulsarDestinationConfig.java deleted file mode 100644 index 39dd6b067e69..000000000000 --- a/airbyte-integrations/connectors/destination-pulsar/src/main/java/io/airbyte/integrations/destination/pulsar/PulsarDestinationConfig.java +++ /dev/null @@ -1,114 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.integrations.destination.pulsar; - -import com.fasterxml.jackson.databind.JsonNode; -import com.google.common.collect.ImmutableMap; -import java.util.Map; -import org.apache.pulsar.client.api.CompressionType; -import org.apache.pulsar.client.api.schema.RecordSchemaBuilder; -import org.apache.pulsar.client.api.schema.SchemaBuilder; -import org.apache.pulsar.common.schema.SchemaInfo; -import org.apache.pulsar.common.schema.SchemaType; - -public class PulsarDestinationConfig { - - private final String serviceUrl; - private final String topicPattern; - private final String topicPrefix; - private final String testTopic; - private final Map producerConfig; - private final boolean sync; - - private PulsarDestinationConfig(final JsonNode config) { - this.serviceUrl = buildServiceUrl(config); - this.topicPattern = buildTopicPattern(config); - this.topicPrefix = buildTopicPrefix(config); - this.testTopic = buildTestTopic(config); - this.producerConfig = buildProducerConfig(config); - this.sync = isSyncProducer(config); - } - - public static PulsarDestinationConfig getPulsarDestinationConfig(final JsonNode config) { - return new PulsarDestinationConfig(config); - } - - public Map getProducerConfig() { - return producerConfig; - } - - public String getServiceUrl() { - return serviceUrl; - } - - public static SchemaInfo getSchemaInfo() { - RecordSchemaBuilder recordSchemaBuilder = SchemaBuilder.record("airbyte"); - recordSchemaBuilder.field(PulsarDestination.COLUMN_NAME_AB_ID).type(SchemaType.STRING).required(); - recordSchemaBuilder.field(PulsarDestination.COLUMN_NAME_STREAM).type(SchemaType.STRING).required(); - recordSchemaBuilder.field(PulsarDestination.COLUMN_NAME_EMITTED_AT).type(SchemaType.TIMESTAMP).required(); - recordSchemaBuilder.field(PulsarDestination.COLUMN_NAME_DATA).type(SchemaType.BYTES).required(); - - return recordSchemaBuilder.build(SchemaType.JSON); - } - - public String uriForTopic(final String topic) { - return topicPrefix + topic; - } - - public String getTestTopic() { - return testTopic; - } - - public String getTopicPattern() { - return topicPattern; - } - - public boolean isSync() { - return sync; - } - - private String buildServiceUrl(final JsonNode config) { - return String.format("pulsar%s://%s", - config.get("use_tls").asBoolean() ? "+ssl" : "", - config.get("brokers").asText()); - } - - private String buildTestTopic(final JsonNode config) { - return config.has("test_topic") ? config.get("test_topic").asText() : ""; - } - - private String buildTopicPattern(final JsonNode config) { - return config.get("topic_pattern").asText(); - } - - private String buildTopicPrefix(final JsonNode config) { - return String.format("%s://%s/%s/", - config.get("topic_type").asText(), - config.get("topic_tenant").asText(), - config.get("topic_namespace").asText()); - } - - private Map buildProducerConfig(final JsonNode config) { - final ImmutableMap.Builder conf = ImmutableMap.builder(); - if (config.has("producer_name")) { - conf.put("producerName", config.get("producer_name").asText()); - } - conf.put("compressionType", CompressionType.valueOf(config.get("compression_type").asText())); - conf.put("sendTimeoutMs", config.get("send_timeout_ms").asInt()); - conf.put("maxPendingMessages", config.get("max_pending_messages").asInt()); - conf.put("maxPendingMessagesAcrossPartitions", config.get("max_pending_messages_across_partitions").asInt()); - conf.put("batchingEnabled", config.get("batching_enabled").asBoolean()); - conf.put("batchingMaxMessages", config.get("batching_max_messages").asInt()); - conf.put("batchingMaxPublishDelayMicros", config.get("batching_max_publish_delay").asInt() * 1000); - conf.put("blockIfQueueFull", config.get("block_if_queue_full").asBoolean()); - - return conf.build(); - } - - private boolean isSyncProducer(final JsonNode config) { - return config.has("producer_sync") && config.get("producer_sync").asBoolean(); - } - -} diff --git a/airbyte-integrations/connectors/destination-pulsar/src/main/java/io/airbyte/integrations/destination/pulsar/PulsarRecordConsumer.java b/airbyte-integrations/connectors/destination-pulsar/src/main/java/io/airbyte/integrations/destination/pulsar/PulsarRecordConsumer.java deleted file mode 100644 index 1ccd22c1437a..000000000000 --- a/airbyte-integrations/connectors/destination-pulsar/src/main/java/io/airbyte/integrations/destination/pulsar/PulsarRecordConsumer.java +++ /dev/null @@ -1,114 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.integrations.destination.pulsar; - -import io.airbyte.cdk.integrations.base.FailureTrackingAirbyteMessageConsumer; -import io.airbyte.cdk.integrations.destination.NamingConventionTransformer; -import io.airbyte.commons.lang.Exceptions; -import io.airbyte.protocol.models.v0.AirbyteMessage; -import io.airbyte.protocol.models.v0.AirbyteRecordMessage; -import io.airbyte.protocol.models.v0.AirbyteStreamNameNamespacePair; -import io.airbyte.protocol.models.v0.ConfiguredAirbyteCatalog; -import java.nio.charset.StandardCharsets; -import java.util.HashMap; -import java.util.Map; -import java.util.Optional; -import java.util.UUID; -import java.util.function.Consumer; -import java.util.function.Function; -import java.util.stream.Collectors; -import org.apache.pulsar.client.api.Producer; -import org.apache.pulsar.client.api.PulsarClient; -import org.apache.pulsar.client.api.PulsarClientException; -import org.apache.pulsar.client.api.Schema; -import org.apache.pulsar.client.api.schema.GenericRecord; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -public class PulsarRecordConsumer extends FailureTrackingAirbyteMessageConsumer { - - private static final Logger LOGGER = LoggerFactory.getLogger(PulsarRecordConsumer.class); - - private final PulsarDestinationConfig config; - private final Map> producerMap; - private final ConfiguredAirbyteCatalog catalog; - private final Consumer outputRecordCollector; - private final NamingConventionTransformer nameTransformer; - private final PulsarClient client; - - public PulsarRecordConsumer(final PulsarDestinationConfig pulsarDestinationConfig, - final ConfiguredAirbyteCatalog catalog, - final PulsarClient pulsarClient, - final Consumer outputRecordCollector, - final NamingConventionTransformer nameTransformer) { - this.config = pulsarDestinationConfig; - this.producerMap = new HashMap<>(); - this.catalog = catalog; - this.outputRecordCollector = outputRecordCollector; - this.nameTransformer = nameTransformer; - this.client = pulsarClient; - } - - @Override - protected void startTracked() { - producerMap.putAll(buildProducerMap()); - } - - @Override - protected void acceptTracked(final AirbyteMessage airbyteMessage) { - if (airbyteMessage.getType() == AirbyteMessage.Type.STATE) { - outputRecordCollector.accept(airbyteMessage); - } else if (airbyteMessage.getType() == AirbyteMessage.Type.RECORD) { - final AirbyteRecordMessage recordMessage = airbyteMessage.getRecord(); - final Producer producer = producerMap.get(AirbyteStreamNameNamespacePair.fromRecordMessage(recordMessage)); - final String key = UUID.randomUUID().toString(); - final GenericRecord value = Schema.generic(PulsarDestinationConfig.getSchemaInfo()) - .newRecordBuilder() - .set(PulsarDestination.COLUMN_NAME_AB_ID, key) - .set(PulsarDestination.COLUMN_NAME_STREAM, recordMessage.getStream()) - .set(PulsarDestination.COLUMN_NAME_EMITTED_AT, recordMessage.getEmittedAt()) - .set(PulsarDestination.COLUMN_NAME_DATA, recordMessage.getData().toString().getBytes(StandardCharsets.UTF_8)) - .build(); - - sendRecord(producer, value); - } else { - LOGGER.warn("Unexpected message: " + airbyteMessage.getType()); - } - } - - Map> buildProducerMap() { - return catalog.getStreams().stream() - .map(stream -> AirbyteStreamNameNamespacePair.fromAirbyteStream(stream.getStream())) - .collect(Collectors.toMap(Function.identity(), pair -> { - String topic = nameTransformer.getIdentifier(config.getTopicPattern() - .replaceAll("\\{namespace}", Optional.ofNullable(pair.getNamespace()).orElse("")) - .replaceAll("\\{stream}", Optional.ofNullable(pair.getName()).orElse(""))); - return PulsarUtils.buildProducer(client, Schema.generic(PulsarDestinationConfig.getSchemaInfo()), config.getProducerConfig(), - config.uriForTopic(topic)); - }, (existing, newValue) -> existing)); - } - - private void sendRecord(final Producer producer, final GenericRecord record) { - producer.sendAsync(record); - if (config.isSync()) { - try { - producer.flush(); - } catch (PulsarClientException e) { - LOGGER.error("Error sending message to topic.", e); - throw new RuntimeException("Cannot send message to Pulsar. Error: " + e.getMessage(), e); - } - } - } - - @Override - protected void close(final boolean hasFailed) { - producerMap.values().forEach(producer -> { - Exceptions.swallow(producer::flush); - Exceptions.swallow(producer::close); - }); - Exceptions.swallow(client::close); - } - -} diff --git a/airbyte-integrations/connectors/destination-pulsar/src/main/java/io/airbyte/integrations/destination/pulsar/PulsarUtils.java b/airbyte-integrations/connectors/destination-pulsar/src/main/java/io/airbyte/integrations/destination/pulsar/PulsarUtils.java deleted file mode 100644 index d851cc6ffb8b..000000000000 --- a/airbyte-integrations/connectors/destination-pulsar/src/main/java/io/airbyte/integrations/destination/pulsar/PulsarUtils.java +++ /dev/null @@ -1,40 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.integrations.destination.pulsar; - -import java.util.Map; -import org.apache.pulsar.client.api.Producer; -import org.apache.pulsar.client.api.PulsarClient; -import org.apache.pulsar.client.api.PulsarClientException; -import org.apache.pulsar.client.api.Schema; -import org.apache.pulsar.client.api.schema.GenericRecord; - -class PulsarUtils { - - static PulsarClient buildClient(final String serviceUrl) { - try { - return PulsarClient.builder() - .serviceUrl(serviceUrl) - .build(); - } catch (PulsarClientException e) { - throw new RuntimeException("Error creating the Pulsar client", e); - } - } - - static Producer buildProducer(final PulsarClient client, - final Schema schema, - final Map config, - final String topic) { - try { - return client.newProducer(schema) - .loadConf(config) - .topic(topic) - .create(); - } catch (PulsarClientException e) { - throw new RuntimeException("Error creating the Pulsar producer", e); - } - } - -} diff --git a/airbyte-integrations/connectors/destination-pulsar/src/main/resources/spec.json b/airbyte-integrations/connectors/destination-pulsar/src/main/resources/spec.json deleted file mode 100644 index e31691e78069..000000000000 --- a/airbyte-integrations/connectors/destination-pulsar/src/main/resources/spec.json +++ /dev/null @@ -1,137 +0,0 @@ -{ - "documentationUrl": "https://docs.airbyte.com/integrations/destinations/pulsar", - "supportsIncremental": true, - "supportsNormalization": false, - "supportsDBT": false, - "supported_destination_sync_modes": ["append"], - "connectionSpecification": { - "$schema": "http://json-schema.org/draft-07/schema#", - "title": "Pulsar Destination Spec", - "type": "object", - "required": [ - "brokers", - "use_tls", - "topic_type", - "topic_tenant", - "topic_namespace", - "topic_pattern", - "compression_type", - "send_timeout_ms", - "max_pending_messages", - "max_pending_messages_across_partitions", - "batching_enabled", - "batching_max_messages", - "batching_max_publish_delay", - "block_if_queue_full" - ], - "additionalProperties": true, - "properties": { - "brokers": { - "title": "Pulsar brokers", - "description": "A list of host/port pairs to use for establishing the initial connection to the Pulsar cluster.", - "type": "string", - "examples": ["broker1:6650,broker2:6650"] - }, - "use_tls": { - "title": "Use TLS", - "description": "Whether to use TLS encryption on the connection.", - "type": "boolean", - "default": false - }, - "topic_type": { - "title": "Topic type", - "description": "It identifies type of topic. Pulsar supports two kind of topics: persistent and non-persistent. In persistent topic, all messages are durably persisted on disk (that means on multiple disks unless the broker is standalone), whereas non-persistent topic does not persist message into storage disk.", - "type": "string", - "default": "persistent", - "enum": ["persistent", "non-persistent"] - }, - "topic_tenant": { - "title": "Topic tenant", - "description": "The topic tenant within the instance. Tenants are essential to multi-tenancy in Pulsar, and spread across clusters.", - "type": "string", - "default": "public", - "examples": ["public"] - }, - "topic_namespace": { - "title": "Topic namespace", - "description": "The administrative unit of the topic, which acts as a grouping mechanism for related topics. Most topic configuration is performed at the namespace level. Each tenant has one or multiple namespaces.", - "type": "string", - "default": "default", - "examples": ["default"] - }, - "topic_pattern": { - "title": "Topic pattern", - "description": "Topic pattern in which the records will be sent. You can use patterns like '{namespace}' and/or '{stream}' to send the message to a specific topic based on these values. Notice that the topic name will be transformed to a standard naming convention.", - "type": "string", - "examples": ["sample.topic", "{namespace}.{stream}.sample"] - }, - "topic_test": { - "title": "Test topic", - "description": "Topic to test if Airbyte can produce messages.", - "type": "string", - "examples": ["test.topic"] - }, - "producer_name": { - "title": "Producer name", - "description": "Name for the producer. If not filled, the system will generate a globally unique name which can be accessed with.", - "type": "string", - "examples": ["airbyte-producer"] - }, - "producer_sync": { - "title": "Sync producer", - "description": "Wait synchronously until the record has been sent to Pulsar.", - "type": "boolean", - "default": false - }, - "compression_type": { - "title": "Compression type", - "description": "Compression type for the producer.", - "type": "string", - "default": "NONE", - "enum": ["NONE", "LZ4", "ZLIB", "ZSTD", "SNAPPY"] - }, - "send_timeout_ms": { - "title": "Message send timeout", - "description": "If a message is not acknowledged by a server before the send-timeout expires, an error occurs (in ms).", - "type": "integer", - "default": 30000 - }, - "max_pending_messages": { - "title": "Max pending messages", - "description": "The maximum size of a queue holding pending messages.", - "type": "integer", - "default": 1000 - }, - "max_pending_messages_across_partitions": { - "title": "Max pending messages across partitions", - "description": "The maximum number of pending messages across partitions.", - "type": "integer", - "default": 50000 - }, - "batching_enabled": { - "title": "Enable batching", - "description": "Control whether automatic batching of messages is enabled for the producer.", - "type": "boolean", - "default": true - }, - "batching_max_messages": { - "title": "Batching max messages", - "description": "Maximum number of messages permitted in a batch.", - "type": "integer", - "default": 1000 - }, - "batching_max_publish_delay": { - "title": "Batching max publish delay", - "description": " Time period in milliseconds within which the messages sent will be batched.", - "type": "integer", - "default": 1 - }, - "block_if_queue_full": { - "title": "Block if queue is full", - "description": "If the send operation should block when the outgoing message queue is full.", - "type": "boolean", - "default": false - } - } - } -} diff --git a/airbyte-integrations/connectors/destination-pulsar/src/test-integration/java/io/airbyte/integrations/destination/pulsar/PulsarDestinationAcceptanceTest.java b/airbyte-integrations/connectors/destination-pulsar/src/test-integration/java/io/airbyte/integrations/destination/pulsar/PulsarDestinationAcceptanceTest.java deleted file mode 100644 index 8b2dcd685acc..000000000000 --- a/airbyte-integrations/connectors/destination-pulsar/src/test-integration/java/io/airbyte/integrations/destination/pulsar/PulsarDestinationAcceptanceTest.java +++ /dev/null @@ -1,193 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.integrations.destination.pulsar; - -import com.fasterxml.jackson.databind.JsonNode; -import com.fasterxml.jackson.databind.ObjectMapper; -import com.fasterxml.jackson.databind.ObjectReader; -import com.google.common.collect.ImmutableMap; -import com.google.common.collect.Streams; -import com.google.common.net.InetAddresses; -import io.airbyte.cdk.integrations.destination.NamingConventionTransformer; -import io.airbyte.cdk.integrations.destination.StandardNameTransformer; -import io.airbyte.cdk.integrations.standardtest.destination.DestinationAcceptanceTest; -import io.airbyte.cdk.integrations.standardtest.destination.comparator.AdvancedTestDataComparator; -import io.airbyte.cdk.integrations.standardtest.destination.comparator.TestDataComparator; -import io.airbyte.commons.json.Jsons; -import io.airbyte.commons.lang.Exceptions; -import java.io.IOException; -import java.net.InetAddress; -import java.net.NetworkInterface; -import java.net.SocketException; -import java.net.UnknownHostException; -import java.util.ArrayList; -import java.util.Base64; -import java.util.Collections; -import java.util.HashSet; -import java.util.List; -import java.util.UUID; -import java.util.concurrent.TimeUnit; -import java.util.stream.Collectors; -import java.util.stream.Stream; -import org.apache.pulsar.client.api.Consumer; -import org.apache.pulsar.client.api.Message; -import org.apache.pulsar.client.api.PulsarClient; -import org.apache.pulsar.client.api.Schema; -import org.apache.pulsar.client.api.SubscriptionInitialPosition; -import org.apache.pulsar.client.api.SubscriptionType; -import org.apache.pulsar.client.api.schema.GenericRecord; -import org.testcontainers.containers.PulsarContainer; -import org.testcontainers.utility.DockerImageName; - -public class PulsarDestinationAcceptanceTest extends DestinationAcceptanceTest { - - private static final String TOPIC_NAME = "test.topic"; - private static final ObjectReader READER = new ObjectMapper().reader(); - - private static PulsarContainer PULSAR; - - private final NamingConventionTransformer namingResolver = new StandardNameTransformer(); - - @Override - protected String getImageName() { - return "airbyte/destination-pulsar:dev"; - } - - @Override - protected JsonNode getConfig() throws UnknownHostException { - String brokers = Stream.concat(getIpAddresses().stream(), Stream.of("localhost")) - .map(ip -> ip + ":" + PULSAR.getMappedPort(PulsarContainer.BROKER_PORT)) - .collect(Collectors.joining(",")); - return Jsons.jsonNode(ImmutableMap.builder() - .put("brokers", brokers) - .put("use_tls", false) - .put("topic_type", "persistent") - .put("topic_tenant", "public") - .put("topic_namespace", "default") - .put("topic_pattern", "{namespace}.{stream}." + TOPIC_NAME) - .put("producer_name", "test-producer-" + UUID.randomUUID()) - .put("producer_sync", true) - .put("compression_type", "NONE") - .put("send_timeout_ms", 30000) - .put("max_pending_messages", 1000) - .put("max_pending_messages_across_partitions", 50000) - .put("batching_enabled", false) - .put("batching_max_messages", 1000) - .put("batching_max_publish_delay", 1) - .put("block_if_queue_full", true) - .build()); - } - - @Override - protected JsonNode getFailCheckConfig() { - return Jsons.jsonNode(ImmutableMap.builder() - .put("brokers", PULSAR.getHost() + ":" + PULSAR.getMappedPort(PulsarContainer.BROKER_PORT)) - .put("use_tls", false) - .put("topic_pattern", "{namespace}.{stream}." + TOPIC_NAME) - .put("producer_sync", true) - .put("producer_name", "test-producer") - .put("compression_type", "NONE") - .put("send_timeout_ms", 30000) - .put("max_pending_messages", 1000) - .put("max_pending_messages_across_partitions", 50000) - .put("block_if_queue_full", true) - .build()); - } - - @Override - protected boolean implementsNamespaces() { - return true; - } - - @Override - protected String getDefaultSchema(final JsonNode config) { - return ""; - } - - @Override - protected TestDataComparator getTestDataComparator() { - return new AdvancedTestDataComparator(); - } - - @Override - protected boolean supportBasicDataTypeTest() { - return true; - } - - @Override - protected boolean supportArrayDataTypeTest() { - return true; - } - - @Override - protected boolean supportObjectDataTypeTest() { - return true; - } - - @Override - protected List retrieveNormalizedRecords(final TestDestinationEnv testEnv, final String streamName, final String namespace) - throws IOException { - return retrieveRecords(testEnv, streamName, namespace, null); - } - - @Override - protected List retrieveRecords(final TestDestinationEnv testEnv, - final String streamName, - final String namespace, - final JsonNode streamSchema) - throws IOException { - final PulsarClient client = PulsarClient.builder() - .serviceUrl(PULSAR.getPulsarBrokerUrl()) - .build(); - final String topic = namingResolver.getIdentifier(namespace + "." + streamName + "." + TOPIC_NAME); - final Consumer consumer = client.newConsumer(Schema.AUTO_CONSUME()) - .topic(topic) - .subscriptionName("test-subscription-" + UUID.randomUUID()) - .enableRetry(true) - .subscriptionType(SubscriptionType.Exclusive) - .subscriptionInitialPosition(SubscriptionInitialPosition.Earliest) - .subscribe(); - - final List records = new ArrayList<>(); - while (!consumer.hasReachedEndOfTopic()) { - Message message = consumer.receive(5, TimeUnit.SECONDS); - if (message == null) { - break; - } - records.add(READER.readTree(Base64.getDecoder().decode(message.getValue().getField(PulsarDestination.COLUMN_NAME_DATA).toString()))); - Exceptions.swallow(() -> consumer.acknowledge(message)); - } - consumer.unsubscribe(); - consumer.close(); - client.close(); - - return records; - } - - @SuppressWarnings("UnstableApiUsage") - private List getIpAddresses() throws UnknownHostException { - try { - return Streams.stream(NetworkInterface.getNetworkInterfaces().asIterator()) - .flatMap(ni -> Streams.stream(ni.getInetAddresses().asIterator())) - .map(InetAddress::getHostAddress) - .filter(InetAddresses::isUriInetAddress) - .collect(Collectors.toList()); - } catch (SocketException e) { - return Collections.singletonList(InetAddress.getLocalHost().getHostAddress()); - } - } - - @Override - protected void setup(final TestDestinationEnv testEnv, HashSet TEST_SCHEMAS) { - PULSAR = new PulsarContainer(DockerImageName.parse("apachepulsar/pulsar:2.8.1")); - PULSAR.start(); - } - - @Override - protected void tearDown(final TestDestinationEnv testEnv) { - PULSAR.close(); - } - -} diff --git a/airbyte-integrations/connectors/destination-pulsar/src/test/java/io/airbyte/integrations/destination/pulsar/PulsarRecordConsumerTest.java b/airbyte-integrations/connectors/destination-pulsar/src/test/java/io/airbyte/integrations/destination/pulsar/PulsarRecordConsumerTest.java deleted file mode 100644 index 07750e293d99..000000000000 --- a/airbyte-integrations/connectors/destination-pulsar/src/test/java/io/airbyte/integrations/destination/pulsar/PulsarRecordConsumerTest.java +++ /dev/null @@ -1,258 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.integrations.destination.pulsar; - -import static org.junit.jupiter.api.Assertions.assertEquals; -import static org.junit.jupiter.api.Assertions.assertThrows; - -import com.fasterxml.jackson.databind.JsonNode; -import com.google.common.collect.ImmutableList; -import com.google.common.collect.ImmutableMap; -import com.google.common.collect.Sets; -import com.google.common.collect.Streams; -import com.google.common.net.InetAddresses; -import io.airbyte.cdk.integrations.base.FailureTrackingAirbyteMessageConsumer; -import io.airbyte.cdk.integrations.destination.StandardNameTransformer; -import io.airbyte.cdk.integrations.standardtest.destination.PerStreamStateMessageTest; -import io.airbyte.commons.json.Jsons; -import io.airbyte.protocol.models.Field; -import io.airbyte.protocol.models.JsonSchemaType; -import io.airbyte.protocol.models.v0.AirbyteMessage; -import io.airbyte.protocol.models.v0.AirbyteRecordMessage; -import io.airbyte.protocol.models.v0.AirbyteStateMessage; -import io.airbyte.protocol.models.v0.AirbyteStream; -import io.airbyte.protocol.models.v0.AirbyteStreamNameNamespacePair; -import io.airbyte.protocol.models.v0.CatalogHelpers; -import io.airbyte.protocol.models.v0.ConfiguredAirbyteCatalog; -import io.airbyte.protocol.models.v0.ConfiguredAirbyteStream; -import java.net.InetAddress; -import java.net.NetworkInterface; -import java.net.SocketException; -import java.net.UnknownHostException; -import java.time.Instant; -import java.util.ArrayList; -import java.util.Collection; -import java.util.Collections; -import java.util.List; -import java.util.Map; -import java.util.function.Consumer; -import java.util.stream.Collectors; -import java.util.stream.IntStream; -import java.util.stream.Stream; -import org.apache.pulsar.client.api.Producer; -import org.apache.pulsar.client.api.PulsarClient; -import org.apache.pulsar.client.api.schema.GenericRecord; -import org.junit.jupiter.api.AfterEach; -import org.junit.jupiter.api.BeforeEach; -import org.junit.jupiter.api.DisplayName; -import org.junit.jupiter.api.Test; -import org.junit.jupiter.api.extension.ExtendWith; -import org.junit.jupiter.api.extension.ExtensionContext; -import org.junit.jupiter.params.ParameterizedTest; -import org.junit.jupiter.params.provider.Arguments; -import org.junit.jupiter.params.provider.ArgumentsProvider; -import org.junit.jupiter.params.provider.ArgumentsSource; -import org.mockito.Mock; -import org.mockito.junit.jupiter.MockitoExtension; -import org.testcontainers.containers.PulsarContainer; -import org.testcontainers.utility.DockerImageName; - -@DisplayName("PulsarRecordConsumer") -@ExtendWith(MockitoExtension.class) -public class PulsarRecordConsumerTest extends PerStreamStateMessageTest { - - @Mock - private Consumer outputRecordCollector; - - private PulsarRecordConsumer consumer; - - @Mock - private PulsarDestinationConfig config; - - @Mock - private ConfiguredAirbyteCatalog catalog; - - @Mock - private PulsarClient pulsarClient; - - private static final StandardNameTransformer NAMING_RESOLVER = new StandardNameTransformer(); - - private static PulsarContainer PULSAR; - - @ParameterizedTest - @ArgumentsSource(TopicMapArgumentsProvider.class) - @SuppressWarnings("unchecked") - public void testBuildProducerMap(final ConfiguredAirbyteCatalog catalog, - final String streamName, - final String namespace, - final String topicPattern, - final String expectedTopic) - throws UnknownHostException { - String brokers = Stream.concat(getIpAddresses().stream(), Stream.of("localhost")) - .map(ip -> ip + ":" + PULSAR.getMappedPort(PulsarContainer.BROKER_PORT)) - .collect(Collectors.joining(",")); - final PulsarDestinationConfig config = PulsarDestinationConfig - .getPulsarDestinationConfig(getConfig(brokers, topicPattern)); - final PulsarClient pulsarClient = PulsarUtils.buildClient(config.getServiceUrl()); - final PulsarRecordConsumer recordConsumer = new PulsarRecordConsumer(config, catalog, pulsarClient, outputRecordCollector, NAMING_RESOLVER); - final Map> producerMap = recordConsumer.buildProducerMap(); - assertEquals(Sets.newHashSet(catalog.getStreams()).size(), producerMap.size()); - - final AirbyteStreamNameNamespacePair streamNameNamespacePair = new AirbyteStreamNameNamespacePair(streamName, namespace); - assertEquals(expectedTopic, producerMap.get(streamNameNamespacePair).getTopic()); - } - - @Test - @SuppressWarnings("unchecked") - void testCannotConnectToBrokers() throws Exception { - final PulsarDestinationConfig config = PulsarDestinationConfig - .getPulsarDestinationConfig(getConfig(PULSAR.getHost() + ":" + (PULSAR.getMappedPort(PulsarContainer.BROKER_PORT) + 10), "test-topic")); - - final String streamName = "test-stream"; - final String namespace = "test-schema"; - final ConfiguredAirbyteCatalog catalog = new ConfiguredAirbyteCatalog().withStreams(List.of( - CatalogHelpers.createConfiguredAirbyteStream( - streamName, - namespace, - Field.of("id", JsonSchemaType.NUMBER), - Field.of("name", JsonSchemaType.STRING)))); - final PulsarClient pulsarClient = PulsarUtils.buildClient(config.getServiceUrl()); - final PulsarRecordConsumer consumer = new PulsarRecordConsumer(config, catalog, pulsarClient, outputRecordCollector, NAMING_RESOLVER); - final List expectedRecords = getNRecords(10, streamName, namespace); - - assertThrows(RuntimeException.class, consumer::start); - - expectedRecords.forEach(m -> assertThrows(RuntimeException.class, () -> consumer.accept(m))); - - consumer.accept(new AirbyteMessage() - .withType(AirbyteMessage.Type.STATE) - .withState(new AirbyteStateMessage().withData(Jsons.jsonNode(ImmutableMap.of(namespace + "." + streamName, 0))))); - consumer.close(); - } - - private JsonNode getConfig(final String brokers, final String topic) { - return Jsons.jsonNode(ImmutableMap.builder() - .put("brokers", brokers) - .put("use_tls", false) - .put("topic_type", "non-persistent") - .put("topic_tenant", "public") - .put("topic_namespace", "default") - .put("topic_pattern", topic) - .put("producer_sync", true) - .put("compression_type", "NONE") - .put("send_timeout_ms", 30000) - .put("max_pending_messages", 1000) - .put("max_pending_messages_across_partitions", 50000) - .put("batching_enabled", true) - .put("batching_max_messages", 1000) - .put("batching_max_publish_delay", 1) - .put("block_if_queue_full", true) - .build()); - } - - private List getNRecords(final int n, final String streamName, final String namespace) { - return IntStream.range(0, n) - .boxed() - .map(i -> new AirbyteMessage() - .withType(AirbyteMessage.Type.RECORD) - .withRecord(new AirbyteRecordMessage() - .withStream(streamName) - .withNamespace(namespace) - .withEmittedAt(Instant.now().toEpochMilli()) - .withData(Jsons.jsonNode(ImmutableMap.of("id", i, "name", "human " + i))))) - .collect(Collectors.toList()); - - } - - @SuppressWarnings("UnstableApiUsage") - private List getIpAddresses() throws UnknownHostException { - try { - return Streams.stream(NetworkInterface.getNetworkInterfaces().asIterator()) - .flatMap(ni -> Streams.stream(ni.getInetAddresses().asIterator())) - .map(InetAddress::getHostAddress) - .filter(InetAddresses::isUriInetAddress) - .collect(Collectors.toList()); - } catch (SocketException e) { - return Collections.singletonList(InetAddress.getLocalHost().getHostAddress()); - } - } - - public static class TopicMapArgumentsProvider implements ArgumentsProvider { - - private static final String TOPIC_NAME = "test.topic"; - private static final String SCHEMA_NAME1 = "public"; - private static final String STREAM_NAME1 = "id_and_name"; - private static final String SCHEMA_NAME2 = SCHEMA_NAME1 + 2; - private static final String STREAM_NAME2 = STREAM_NAME1 + 2; - - private final ConfiguredAirbyteStream stream1 = CatalogHelpers.createConfiguredAirbyteStream( - SCHEMA_NAME1, - STREAM_NAME1, - Field.of("id", JsonSchemaType.NUMBER), - Field.of("name", JsonSchemaType.STRING)); - private final ConfiguredAirbyteStream stream2 = CatalogHelpers.createConfiguredAirbyteStream( - SCHEMA_NAME2, - STREAM_NAME2, - Field.of("id", JsonSchemaType.NUMBER), - Field.of("name", JsonSchemaType.STRING)); - - @Override - public Stream provideArguments(final ExtensionContext context) { - final String prefix = "non-persistent://public/default/"; - - final List catalogs = new ArrayList<>(); - catalogs.add(new ConfiguredAirbyteCatalog().withStreams(List.of(stream1))); - catalogs.add(new ConfiguredAirbyteCatalog().withStreams(List.of(stream1, stream1))); - catalogs.add(new ConfiguredAirbyteCatalog().withStreams(List.of(stream1, stream2))); - - return catalogs.stream() - .flatMap(catalog -> catalog.getStreams().stream() - .map(stream -> buildArgs(catalog, stream.getStream(), prefix)) - .flatMap(Collection::stream)); - } - - private List buildArgs(final ConfiguredAirbyteCatalog catalog, final AirbyteStream stream, final String prefix) { - final String transformedTopic = NAMING_RESOLVER.getIdentifier(TOPIC_NAME); - final String transformedName = NAMING_RESOLVER.getIdentifier(stream.getName()); - final String transformedNamespace = NAMING_RESOLVER.getIdentifier(stream.getNamespace()); - - return ImmutableList.of( - Arguments.of(catalog, stream.getName(), stream.getNamespace(), TOPIC_NAME, prefix + "test_topic"), - Arguments.of(catalog, stream.getName(), stream.getNamespace(), "test-topic", prefix + "test_topic"), - Arguments.of(catalog, stream.getName(), stream.getNamespace(), "{namespace}", prefix + transformedNamespace), - Arguments.of(catalog, stream.getName(), stream.getNamespace(), "{stream}", prefix + transformedName), - Arguments.of(catalog, stream.getName(), stream.getNamespace(), "{namespace}.{stream}." + TOPIC_NAME, - prefix + transformedNamespace + "_" + transformedName + "_" + transformedTopic), - Arguments.of(catalog, stream.getName(), stream.getNamespace(), "{namespace}-{stream}-" + TOPIC_NAME, - prefix + transformedNamespace + "_" + transformedName + "_" + transformedTopic), - Arguments.of(catalog, stream.getName(), stream.getNamespace(), "topic with spaces", prefix + "topic_with_spaces")); - } - - } - - @Override - protected Consumer getMockedConsumer() { - return outputRecordCollector; - } - - @Override - protected FailureTrackingAirbyteMessageConsumer getMessageConsumer() { - return consumer; - } - - @BeforeEach - void setup() { - // TODO: Unit tests should not use Testcontainers - PULSAR = new PulsarContainer(DockerImageName.parse("apachepulsar/pulsar:2.8.1")); - PULSAR.start(); - consumer = new PulsarRecordConsumer(config, catalog, pulsarClient, outputRecordCollector, NAMING_RESOLVER); - } - - @AfterEach - void tearDown() { - PULSAR.close(); - } - -} diff --git a/airbyte-integrations/connectors/destination-r2/README.md b/airbyte-integrations/connectors/destination-r2/README.md deleted file mode 100644 index 229c1d0e2d21..000000000000 --- a/airbyte-integrations/connectors/destination-r2/README.md +++ /dev/null @@ -1,74 +0,0 @@ -# Destination R2 - -This is the repository for the R2 destination connector in Java. -For information about how to use this connector within Airbyte, see [the User Documentation](https://docs.airbyte.io/integrations/destinations/r2). - -**Currently, no integration test has been set up for this connector, which requires either a local R2 container, or a remote R2 account.** - -## Local development - -#### Building via Gradle -From the Airbyte repository root, run: -``` -./gradlew :airbyte-integrations:connectors:destination-r2:build -``` - -#### Create credentials -**If you are a community contributor**, generate the necessary credentials and place them in `secrets/config.json` conforming to the spec file in `src/main/resources/spec.json`. -Note that the `secrets` directory is git-ignored by default, so there is no danger of accidentally checking in sensitive information. - -**If you are an Airbyte core member**, follow the [instructions](https://docs.airbyte.io/connector-development#using-credentials-in-ci) to set up the credentials. - -### Locally running the connector docker image - -#### Build -Build the connector image via Gradle: - -``` -./gradlew :airbyte-integrations:connectors:destination-r2:buildConnectorImage -``` -Once built, the docker image name and tag on your host will be `airbyte/destination-r2:dev`. -the Dockerfile. - -#### Run -Then run any of the connector commands as follows: -``` -docker run --rm airbyte/destination-r2:dev spec -docker run --rm -v $(pwd)/secrets:/secrets airbyte/destination-r2:dev check --config /secrets/config.json -docker run --rm -v $(pwd)/secrets:/secrets airbyte/destination-r2:dev discover --config /secrets/config.json -docker run --rm -v $(pwd)/secrets:/secrets -v $(pwd)/integration_tests:/integration_tests airbyte/destination-r2:dev read --config /secrets/config.json --catalog /integration_tests/configured_catalog.json -``` - -## Testing -We use `JUnit` for Java tests. - -### Unit and Integration Tests -Place unit tests under `src/test/io/airbyte/integrations/destinations/r2`. - -#### Acceptance Tests -Airbyte has a standard test suite that all destination connectors must pass. Implement the `TODO`s in -`src/test-integration/java/io/airbyte/integrations/destinations/r2DestinationAcceptanceTest.java`. - -### Using gradle to run tests -All commands should be run from airbyte project root. -To run unit tests: -``` -./gradlew :airbyte-integrations:connectors:destination-r2:unitTest -``` -To run acceptance and custom integration tests: -``` -./gradlew :airbyte-integrations:connectors:destination-r2:integrationTest -``` - -## Dependency Management - -### Publishing a new version of the connector -You've checked out the repo, implemented a million dollar feature, and you're ready to share your changes with the world. Now what? -1. Make sure your changes are passing our test suite: `airbyte-ci connectors --name=destination-r2 test` -2. Bump the connector version in `metadata.yaml`: increment the `dockerImageTag` value. Please follow [semantic versioning for connectors](https://docs.airbyte.com/contributing-to-airbyte/resources/pull-requests-handbook/#semantic-versioning-for-connectors). -3. Make sure the `metadata.yaml` content is up to date. -4. Make the connector documentation and its changelog is up to date (`docs/integrations/destinations/r2.md`). -5. Create a Pull Request: use [our PR naming conventions](https://docs.airbyte.com/contributing-to-airbyte/resources/pull-requests-handbook/#pull-request-title-convention). -6. Pat yourself on the back for being an awesome contributor. -7. Someone from Airbyte will take a look at your PR and iterate with you to merge it into master. - diff --git a/airbyte-integrations/connectors/destination-r2/build.gradle b/airbyte-integrations/connectors/destination-r2/build.gradle deleted file mode 100644 index 94626b963c02..000000000000 --- a/airbyte-integrations/connectors/destination-r2/build.gradle +++ /dev/null @@ -1,49 +0,0 @@ -plugins { - id 'application' - id 'airbyte-java-connector' -} - -airbyteJavaConnector { - cdkVersionRequired = '0.2.0' - features = ['db-destinations'] - useLocalCdk = false -} - -//remove once upgrading the CDK version to 0.4.x or later -java { - compileJava { - options.compilerArgs.remove("-Werror") - } -} - -airbyteJavaConnector.addCdkDependencies() - -application { - mainClass = 'io.airbyte.integrations.destination.r2.R2Destination' - applicationDefaultJvmArgs = ['-XX:+ExitOnOutOfMemoryError', '-XX:MaxRAMPercentage=75.0'] -} - -dependencies { - - // csv - implementation 'com.amazonaws:aws-java-sdk-s3:1.11.978' - implementation 'org.apache.commons:commons-csv:1.4' - implementation 'com.github.alexmojaki:s3-stream-upload:2.2.2' - - // parquet - implementation ('org.apache.hadoop:hadoop-common:3.3.3') { - exclude group: 'org.slf4j', module: 'slf4j-log4j12' - exclude group: 'org.slf4j', module: 'slf4j-reload4j' - } - implementation ('org.apache.hadoop:hadoop-aws:3.3.3') { exclude group: 'org.slf4j', module: 'slf4j-log4j12'} - implementation ('org.apache.hadoop:hadoop-mapreduce-client-core:3.3.3') { - exclude group: 'org.slf4j', module: 'slf4j-log4j12' - exclude group: 'org.slf4j', module: 'slf4j-reload4j' - } - implementation ('org.apache.parquet:parquet-avro:1.12.3') { exclude group: 'org.slf4j', module: 'slf4j-log4j12'} - implementation ('com.github.airbytehq:json-avro-converter:1.1.0') { exclude group: 'ch.qos.logback', module: 'logback-classic'} - - testImplementation 'org.apache.commons:commons-lang3:3.11' - testImplementation 'org.xerial.snappy:snappy-java:1.1.8.4' - testImplementation "org.mockito:mockito-inline:4.1.0" -} diff --git a/airbyte-integrations/connectors/destination-r2/sample_secrets/config.json b/airbyte-integrations/connectors/destination-r2/sample_secrets/config.json deleted file mode 100644 index db86c13f3a82..000000000000 --- a/airbyte-integrations/connectors/destination-r2/sample_secrets/config.json +++ /dev/null @@ -1,7 +0,0 @@ -{ - "s3_bucket_name": "paste-bucket-name-here", - "s3_bucket_path": "integration-test", - "account_id": "paster-account-id-here", - "access_key_id": "paste-access-key-id-here", - "secret_access_key": "paste-secret-access-key-here" -} diff --git a/airbyte-integrations/connectors/destination-r2/src/main/java/io/airbyte/integrations/destination/r2/R2Destination.java b/airbyte-integrations/connectors/destination-r2/src/main/java/io/airbyte/integrations/destination/r2/R2Destination.java deleted file mode 100644 index 1c663ed2f54e..000000000000 --- a/airbyte-integrations/connectors/destination-r2/src/main/java/io/airbyte/integrations/destination/r2/R2Destination.java +++ /dev/null @@ -1,24 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.integrations.destination.r2; - -import io.airbyte.cdk.integrations.base.IntegrationRunner; -import io.airbyte.cdk.integrations.destination.s3.BaseS3Destination; -import io.airbyte.cdk.integrations.destination.s3.StorageProvider; - -public class R2Destination extends BaseS3Destination { - - public static void main(String[] args) throws Exception { - System.setProperty("com.amazonaws.services.s3.disableGetObjectMD5Validation", "true"); - System.setProperty("com.amazonaws.services.s3.disablePutObjectMD5Validation", "true"); - new IntegrationRunner(new R2Destination()).run(args); - } - - @Override - public StorageProvider storageProvider() { - return StorageProvider.CF_R2; - } - -} diff --git a/airbyte-integrations/connectors/destination-r2/src/main/resources/spec.json b/airbyte-integrations/connectors/destination-r2/src/main/resources/spec.json deleted file mode 100644 index 5274dcf4e5d0..000000000000 --- a/airbyte-integrations/connectors/destination-r2/src/main/resources/spec.json +++ /dev/null @@ -1,296 +0,0 @@ -{ - "documentationUrl": "https://docs.airbyte.com/integrations/destinations/r2", - "supportsIncremental": true, - "supportsNormalization": false, - "supportsDBT": false, - "supported_destination_sync_modes": ["overwrite", "append"], - "connectionSpecification": { - "$schema": "http://json-schema.org/draft-07/schema#", - "title": "R2 Destination Spec", - "type": "object", - "required": [ - "account_id", - "access_key_id", - "secret_access_key", - "s3_bucket_name", - "s3_bucket_path", - "format" - ], - - "properties": { - "account_id": { - "type": "string", - "description": "Cloudflare account ID", - "title": "Cloudflare account ID", - "examples": ["12345678aa1a1a11111aaa1234567abc"], - "order": 0 - }, - "access_key_id": { - "type": "string", - "description": "The access key ID to access the R2 bucket. Airbyte requires Read and Write permissions to the given bucket. Read more here.", - "title": "R2 Key ID", - "airbyte_secret": true, - "examples": ["A012345678910EXAMPLE"], - "order": 1 - }, - "secret_access_key": { - "type": "string", - "description": "The corresponding secret to the access key ID. Read more here", - "title": "R2 Access Key", - "airbyte_secret": true, - "examples": ["a012345678910ABCDEFGHAbCdEfGhEXAMPLEKEY"], - "order": 2 - }, - "s3_bucket_name": { - "title": "R2 Bucket Name", - "type": "string", - "description": "The name of the R2 bucket. Read more here.", - "examples": ["r2_sync"], - "order": 3 - }, - "s3_bucket_path": { - "title": "R2 Bucket Path", - "description": "Directory under the R2 bucket where data will be written.", - "type": "string", - "examples": ["data_sync/test"], - "order": 4 - }, - "format": { - "title": "Output Format", - "type": "object", - "description": "Format of the data output. See here for more details", - "oneOf": [ - { - "title": "Avro: Apache Avro", - "required": ["format_type", "compression_codec"], - "properties": { - "format_type": { - "title": "Format Type", - "type": "string", - "enum": ["Avro"], - "default": "Avro", - "order": 0 - }, - "compression_codec": { - "title": "Compression Codec", - "description": "The compression algorithm used to compress data. Default to no compression.", - "type": "object", - "oneOf": [ - { - "title": "No Compression", - "required": ["codec"], - "properties": { - "codec": { - "type": "string", - "enum": ["no compression"], - "default": "no compression" - } - } - }, - { - "title": "Deflate", - "required": ["codec", "compression_level"], - "properties": { - "codec": { - "type": "string", - "enum": ["Deflate"], - "default": "Deflate" - }, - "compression_level": { - "title": "Deflate Level", - "description": "0: no compression & fastest, 9: best compression & slowest.", - "type": "integer", - "default": 0, - "minimum": 0, - "maximum": 9 - } - } - }, - { - "title": "bzip2", - "required": ["codec"], - "properties": { - "codec": { - "type": "string", - "enum": ["bzip2"], - "default": "bzip2" - } - } - }, - { - "title": "xz", - "required": ["codec", "compression_level"], - "properties": { - "codec": { - "type": "string", - "enum": ["xz"], - "default": "xz" - }, - "compression_level": { - "title": "Compression Level", - "description": "See here for details.", - "type": "integer", - "default": 6, - "minimum": 0, - "maximum": 9 - } - } - }, - { - "title": "zstandard", - "required": ["codec", "compression_level"], - "properties": { - "codec": { - "type": "string", - "enum": ["zstandard"], - "default": "zstandard" - }, - "compression_level": { - "title": "Compression Level", - "description": "Negative levels are 'fast' modes akin to lz4 or snappy, levels above 9 are generally for archival purposes, and levels above 18 use a lot of memory.", - "type": "integer", - "default": 3, - "minimum": -5, - "maximum": 22 - }, - "include_checksum": { - "title": "Include Checksum", - "description": "If true, include a checksum with each data block.", - "type": "boolean", - "default": false - } - } - }, - { - "title": "snappy", - "required": ["codec"], - "properties": { - "codec": { - "type": "string", - "enum": ["snappy"], - "default": "snappy" - } - } - } - ], - "order": 1 - } - } - }, - { - "title": "CSV: Comma-Separated Values", - "required": ["format_type", "flattening"], - "properties": { - "format_type": { - "title": "Format Type", - "type": "string", - "enum": ["CSV"], - "default": "CSV" - }, - "flattening": { - "type": "string", - "title": "Normalization (Flattening)", - "description": "Whether the input json data should be normalized (flattened) in the output CSV. Please refer to docs for details.", - "default": "No flattening", - "enum": ["No flattening", "Root level flattening"] - }, - "compression": { - "title": "Compression", - "type": "object", - "description": "Whether the output files should be compressed. If compression is selected, the output filename will have an extra extension (GZIP: \".csv.gz\").", - "oneOf": [ - { - "title": "No Compression", - "requires": ["compression_type"], - "properties": { - "compression_type": { - "type": "string", - "enum": ["No Compression"], - "default": "No Compression" - } - } - }, - { - "title": "GZIP", - "requires": ["compression_type"], - "properties": { - "compression_type": { - "type": "string", - "enum": ["GZIP"], - "default": "GZIP" - } - } - } - ] - } - } - }, - { - "title": "JSON Lines: Newline-delimited JSON", - "required": ["format_type"], - "properties": { - "format_type": { - "title": "Format Type", - "type": "string", - "enum": ["JSONL"], - "default": "JSONL" - }, - "compression": { - "title": "Compression", - "type": "object", - "description": "Whether the output files should be compressed. If compression is selected, the output filename will have an extra extension (GZIP: \".jsonl.gz\").", - "oneOf": [ - { - "title": "No Compression", - "requires": "compression_type", - "properties": { - "compression_type": { - "type": "string", - "enum": ["No Compression"], - "default": "No Compression" - } - } - }, - { - "title": "GZIP", - "requires": "compression_type", - "properties": { - "compression_type": { - "type": "string", - "enum": ["GZIP"], - "default": "GZIP" - } - } - } - ] - } - } - } - ], - "order": 5 - }, - "s3_path_format": { - "title": "R2 Path Format", - "description": "Format string on how data will be organized inside the R2 bucket directory. Read more here", - "type": "string", - "examples": [ - "${NAMESPACE}/${STREAM_NAME}/${YEAR}_${MONTH}_${DAY}_${EPOCH}_" - ], - "order": 6 - }, - "file_name_pattern": { - "type": "string", - "description": "The pattern allows you to set the file-name format for the R2 staging file(s)", - "title": "R2 Filename pattern", - "examples": [ - "{date}", - "{date:yyyy_MM}", - "{timestamp}", - "{part_number}", - "{sync_id}" - ], - "order": 7 - } - } - } -} diff --git a/airbyte-integrations/connectors/destination-r2/src/test-integration/java/io/airbyte/integrations/destination/r2/R2AvroDestinationAcceptanceTest.java b/airbyte-integrations/connectors/destination-r2/src/test-integration/java/io/airbyte/integrations/destination/r2/R2AvroDestinationAcceptanceTest.java deleted file mode 100644 index ac4b5267098a..000000000000 --- a/airbyte-integrations/connectors/destination-r2/src/test-integration/java/io/airbyte/integrations/destination/r2/R2AvroDestinationAcceptanceTest.java +++ /dev/null @@ -1,22 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.integrations.destination.r2; - -import io.airbyte.cdk.integrations.destination.s3.S3BaseAvroDestinationAcceptanceTest; -import io.airbyte.cdk.integrations.destination.s3.StorageProvider; - -public class R2AvroDestinationAcceptanceTest extends S3BaseAvroDestinationAcceptanceTest { - - @Override - protected String getImageName() { - return "airbyte/destination-r2:dev"; - } - - @Override - public StorageProvider storageProvider() { - return StorageProvider.CF_R2; - } - -} diff --git a/airbyte-integrations/connectors/destination-r2/src/test-integration/java/io/airbyte/integrations/destination/r2/R2CsvDestinationAcceptanceTest.java b/airbyte-integrations/connectors/destination-r2/src/test-integration/java/io/airbyte/integrations/destination/r2/R2CsvDestinationAcceptanceTest.java deleted file mode 100644 index a9f2b72a5fd8..000000000000 --- a/airbyte-integrations/connectors/destination-r2/src/test-integration/java/io/airbyte/integrations/destination/r2/R2CsvDestinationAcceptanceTest.java +++ /dev/null @@ -1,22 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.integrations.destination.r2; - -import io.airbyte.cdk.integrations.destination.s3.S3BaseCsvDestinationAcceptanceTest; -import io.airbyte.cdk.integrations.destination.s3.StorageProvider; - -public class R2CsvDestinationAcceptanceTest extends S3BaseCsvDestinationAcceptanceTest { - - @Override - protected String getImageName() { - return "airbyte/destination-r2:dev"; - } - - @Override - public StorageProvider storageProvider() { - return StorageProvider.CF_R2; - } - -} diff --git a/airbyte-integrations/connectors/destination-r2/src/test-integration/java/io/airbyte/integrations/destination/r2/R2CsvGzipDestinationAcceptanceTest.java b/airbyte-integrations/connectors/destination-r2/src/test-integration/java/io/airbyte/integrations/destination/r2/R2CsvGzipDestinationAcceptanceTest.java deleted file mode 100644 index 3b6df013b10e..000000000000 --- a/airbyte-integrations/connectors/destination-r2/src/test-integration/java/io/airbyte/integrations/destination/r2/R2CsvGzipDestinationAcceptanceTest.java +++ /dev/null @@ -1,22 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.integrations.destination.r2; - -import io.airbyte.cdk.integrations.destination.s3.S3BaseCsvGzipDestinationAcceptanceTest; -import io.airbyte.cdk.integrations.destination.s3.StorageProvider; - -public class R2CsvGzipDestinationAcceptanceTest extends S3BaseCsvGzipDestinationAcceptanceTest { - - @Override - protected String getImageName() { - return "airbyte/destination-r2:dev"; - } - - @Override - public StorageProvider storageProvider() { - return StorageProvider.CF_R2; - } - -} diff --git a/airbyte-integrations/connectors/destination-r2/src/test-integration/java/io/airbyte/integrations/destination/r2/R2JsonlDestinationAcceptanceTest.java b/airbyte-integrations/connectors/destination-r2/src/test-integration/java/io/airbyte/integrations/destination/r2/R2JsonlDestinationAcceptanceTest.java deleted file mode 100644 index 0385e1c7d008..000000000000 --- a/airbyte-integrations/connectors/destination-r2/src/test-integration/java/io/airbyte/integrations/destination/r2/R2JsonlDestinationAcceptanceTest.java +++ /dev/null @@ -1,22 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.integrations.destination.r2; - -import io.airbyte.cdk.integrations.destination.s3.S3BaseJsonlDestinationAcceptanceTest; -import io.airbyte.cdk.integrations.destination.s3.StorageProvider; - -public class R2JsonlDestinationAcceptanceTest extends S3BaseJsonlDestinationAcceptanceTest { - - @Override - protected String getImageName() { - return "airbyte/destination-r2:dev"; - } - - @Override - public StorageProvider storageProvider() { - return StorageProvider.CF_R2; - } - -} diff --git a/airbyte-integrations/connectors/destination-r2/src/test-integration/java/io/airbyte/integrations/destination/r2/R2JsonlGzipDestinationAcceptanceTest.java b/airbyte-integrations/connectors/destination-r2/src/test-integration/java/io/airbyte/integrations/destination/r2/R2JsonlGzipDestinationAcceptanceTest.java deleted file mode 100644 index 13506ab48dee..000000000000 --- a/airbyte-integrations/connectors/destination-r2/src/test-integration/java/io/airbyte/integrations/destination/r2/R2JsonlGzipDestinationAcceptanceTest.java +++ /dev/null @@ -1,22 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.integrations.destination.r2; - -import io.airbyte.cdk.integrations.destination.s3.S3BaseJsonlGzipDestinationAcceptanceTest; -import io.airbyte.cdk.integrations.destination.s3.StorageProvider; - -public class R2JsonlGzipDestinationAcceptanceTest extends S3BaseJsonlGzipDestinationAcceptanceTest { - - @Override - protected String getImageName() { - return "airbyte/destination-r2:dev"; - } - - @Override - public StorageProvider storageProvider() { - return StorageProvider.CF_R2; - } - -} diff --git a/airbyte-integrations/connectors/destination-r2/src/test-integration/java/io/airbyte/integrations/destination/r2/R2ParquetDestinationAcceptanceTest.java b/airbyte-integrations/connectors/destination-r2/src/test-integration/java/io/airbyte/integrations/destination/r2/R2ParquetDestinationAcceptanceTest.java deleted file mode 100644 index 6393a0e27502..000000000000 --- a/airbyte-integrations/connectors/destination-r2/src/test-integration/java/io/airbyte/integrations/destination/r2/R2ParquetDestinationAcceptanceTest.java +++ /dev/null @@ -1,27 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.integrations.destination.r2; - -import io.airbyte.cdk.integrations.destination.s3.S3BaseParquetDestinationAcceptanceTest; -import io.airbyte.cdk.integrations.destination.s3.StorageProvider; -import org.junit.jupiter.api.Disabled; - -/** - * s3a client not supported by cloudflare R2 - */ -@Disabled -public class R2ParquetDestinationAcceptanceTest extends S3BaseParquetDestinationAcceptanceTest { - - @Override - protected String getImageName() { - return "airbyte/destination-r2:dev"; - } - - @Override - public StorageProvider storageProvider() { - return StorageProvider.CF_R2; - } - -} diff --git a/airbyte-integrations/connectors/destination-rabbitmq/.dockerignore b/airbyte-integrations/connectors/destination-rabbitmq/.dockerignore deleted file mode 100644 index f3757e1aa586..000000000000 --- a/airbyte-integrations/connectors/destination-rabbitmq/.dockerignore +++ /dev/null @@ -1,5 +0,0 @@ -* -!Dockerfile -!main.py -!destination_rabbitmq -!setup.py diff --git a/airbyte-integrations/connectors/destination-rabbitmq/Dockerfile b/airbyte-integrations/connectors/destination-rabbitmq/Dockerfile deleted file mode 100644 index ebf1791aa25d..000000000000 --- a/airbyte-integrations/connectors/destination-rabbitmq/Dockerfile +++ /dev/null @@ -1,38 +0,0 @@ -FROM python:3.9.11-alpine3.15 as base - -# build and load all requirements -FROM base as builder -WORKDIR /airbyte/integration_code - -# upgrade pip to the latest version -RUN apk --no-cache upgrade \ - && pip install --upgrade pip \ - && apk --no-cache add tzdata build-base - - -COPY setup.py ./ -# install necessary packages to a temporary folder -RUN pip install --prefix=/install . - -# build a clean environment -FROM base -WORKDIR /airbyte/integration_code - -# copy all loaded and built libraries to a pure basic image -COPY --from=builder /install /usr/local -# add default timezone settings -COPY --from=builder /usr/share/zoneinfo/Etc/UTC /etc/localtime -RUN echo "Etc/UTC" > /etc/timezone - -# bash is installed for more convenient debugging. -RUN apk --no-cache add bash - -# copy payload code only -COPY main.py ./ -COPY destination_rabbitmq ./destination_rabbitmq - -ENV AIRBYTE_ENTRYPOINT "python /airbyte/integration_code/main.py" -ENTRYPOINT ["python", "/airbyte/integration_code/main.py"] - -LABEL io.airbyte.version=0.1.1 -LABEL io.airbyte.name=airbyte/destination-rabbitmq diff --git a/airbyte-integrations/connectors/destination-rabbitmq/README.md b/airbyte-integrations/connectors/destination-rabbitmq/README.md deleted file mode 100644 index f6952028a518..000000000000 --- a/airbyte-integrations/connectors/destination-rabbitmq/README.md +++ /dev/null @@ -1,99 +0,0 @@ -# Rabbitmq Destination - -This is the repository for the Rabbitmq destination connector, written in Python. -For information about how to use this connector within Airbyte, see [the documentation](https://docs.airbyte.io/integrations/destinations/rabbitmq). - -## Local development - -### Prerequisites -**To iterate on this connector, make sure to complete this prerequisites section.** - -#### Minimum Python version required `= 3.7.0` - -#### Build & Activate Virtual Environment and install dependencies -From this connector directory, create a virtual environment: -``` -python -m venv .venv -``` - -This will generate a virtualenv for this module in `.venv/`. Make sure this venv is active in your -development environment of choice. To activate it from the terminal, run: -``` -source .venv/bin/activate -pip install -r requirements.txt -``` -If you are in an IDE, follow your IDE's instructions to activate the virtualenv. - -Note that while we are installing dependencies from `requirements.txt`, you should only edit `setup.py` for your dependencies. `requirements.txt` is -used for editable installs (`pip install -e`) to pull in Python dependencies from the monorepo and will call `setup.py`. -If this is mumbo jumbo to you, don't worry about it, just put your deps in `setup.py` but install using `pip install -r requirements.txt` and everything -should work as you expect. - -#### Create credentials -**If you are a community contributor**, follow the instructions in the [documentation](https://docs.airbyte.io/integrations/destinations/rabbitmq) -to generate the necessary credentials. Then create a file `secrets/config.json` conforming to the `destination_rabbitmq/spec.json` file. -Note that the `secrets` directory is gitignored by default, so there is no danger of accidentally checking in sensitive information. -See `integration_tests/sample_config.json` for a sample config file. - -**If you are an Airbyte core member**, copy the credentials in Lastpass under the secret name `destination rabbitmq test creds` -and place them into `secrets/config.json`. - -### Locally running the connector -``` -python main.py spec -python main.py check --config secrets/config.json -python main.py discover --config secrets/config.json -python main.py read --config secrets/config.json --catalog integration_tests/configured_catalog.json -``` - -### Locally running the connector docker image - - -#### Build -**Via [`airbyte-ci`](https://github.com/airbytehq/airbyte/blob/master/airbyte-ci/connectors/pipelines/README.md) (recommended):** -```bash -airbyte-ci connectors --name=destination-rabbitmq build -``` - -An image will be built with the tag `airbyte/destination-rabbitmq:dev`. - -**Via `docker build`:** -```bash -docker build -t airbyte/destination-rabbitmq:dev . -``` - -#### Run -Then run any of the connector commands as follows: -``` -docker run --rm airbyte/destination-rabbitmq:dev spec -docker run --rm -v $(pwd)/secrets:/secrets airbyte/destination-rabbitmq:dev check --config /secrets/config.json -# messages.jsonl is a file containing line-separated JSON representing AirbyteMessages -cat messages.jsonl | docker run --rm -v $(pwd)/secrets:/secrets -v $(pwd)/integration_tests:/integration_tests airbyte/destination-rabbitmq:dev write --config /secrets/config.json --catalog /integration_tests/configured_catalog.json -``` - -## Testing -You can run our full test suite locally using [`airbyte-ci`](https://github.com/airbytehq/airbyte/blob/master/airbyte-ci/connectors/pipelines/README.md): -```bash -airbyte-ci connectors --name=destination-rabbitmq test -``` - -### Customizing acceptance Tests -Customize `acceptance-test-config.yml` file to configure tests. See [Connector Acceptance Tests](https://docs.airbyte.com/connector-development/testing-connectors/connector-acceptance-tests-reference) for more information. -If your connector requires to create or destroy resources for use during acceptance tests create fixtures for it and place them inside integration_tests/acceptance.py. - -## Dependency Management -All of your dependencies should go in `setup.py`, NOT `requirements.txt`. The requirements file is only used to connect internal Airbyte dependencies in the monorepo for local development. -We split dependencies between two groups, dependencies that are: -* required for your connector to work need to go to `MAIN_REQUIREMENTS` list. -* required for the testing need to go to `TEST_REQUIREMENTS` list - -### Publishing a new version of the connector -You've checked out the repo, implemented a million dollar feature, and you're ready to share your changes with the world. Now what? -1. Make sure your changes are passing our test suite: `airbyte-ci connectors --name=destination-rabbitmq test` -2. Bump the connector version in `metadata.yaml`: increment the `dockerImageTag` value. Please follow [semantic versioning for connectors](https://docs.airbyte.com/contributing-to-airbyte/resources/pull-requests-handbook/#semantic-versioning-for-connectors). -3. Make sure the `metadata.yaml` content is up to date. -4. Make the connector documentation and its changelog is up to date (`docs/integrations/destinations/rabbitmq.md`). -5. Create a Pull Request: use [our PR naming conventions](https://docs.airbyte.com/contributing-to-airbyte/resources/pull-requests-handbook/#pull-request-title-convention). -6. Pat yourself on the back for being an awesome contributor. -7. Someone from Airbyte will take a look at your PR and iterate with you to merge it into master. - diff --git a/airbyte-integrations/connectors/destination-rabbitmq/destination_rabbitmq/__init__.py b/airbyte-integrations/connectors/destination-rabbitmq/destination_rabbitmq/__init__.py deleted file mode 100644 index db4e71f357d9..000000000000 --- a/airbyte-integrations/connectors/destination-rabbitmq/destination_rabbitmq/__init__.py +++ /dev/null @@ -1,8 +0,0 @@ -# -# Copyright (c) 2021 Airbyte, Inc., all rights reserved. -# - - -from .destination import DestinationRabbitmq - -__all__ = ["DestinationRabbitmq"] diff --git a/airbyte-integrations/connectors/destination-rabbitmq/destination_rabbitmq/__init__.pyc b/airbyte-integrations/connectors/destination-rabbitmq/destination_rabbitmq/__init__.pyc deleted file mode 100644 index 7f48a3bafd1aadfe7ceb85fa60d5e98e7083500c..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 300 zcmYk2F;4?A427M$3M#5fNS)blX|pf_Vqj)y#lm9cE>UI3C2i~u{oVWl98}<<XBS45M7LjT^0VpMYr~iLtz_F#P(3~M;L^yqtc%x{MI!k w3H$Ps%XY{qsVnQPN!Yqv9yAs4Pei0dg19-HWv;OgWA>*Qp02d#`YE@~H#L<^E&u=k diff --git a/airbyte-integrations/connectors/destination-rabbitmq/destination_rabbitmq/destination.py b/airbyte-integrations/connectors/destination-rabbitmq/destination_rabbitmq/destination.py deleted file mode 100644 index 162a7a048e00..000000000000 --- a/airbyte-integrations/connectors/destination-rabbitmq/destination_rabbitmq/destination.py +++ /dev/null @@ -1,84 +0,0 @@ -# -# Copyright (c) 2023 Airbyte, Inc., all rights reserved. -# - - -import json -from typing import Any, Iterable, Mapping - -import pika -from airbyte_cdk import AirbyteLogger -from airbyte_cdk.destinations import Destination -from airbyte_cdk.models import AirbyteConnectionStatus, AirbyteMessage, ConfiguredAirbyteCatalog, Status, Type -from pika.adapters.blocking_connection import BlockingConnection -from pika.spec import BasicProperties - -_DEFAULT_PORT = 5672 - - -def create_connection(config: Mapping[str, Any]) -> BlockingConnection: - host = config.get("host") - port = config.get("port") or _DEFAULT_PORT - username = config.get("username") - password = config.get("password") - virtual_host = config.get("virtual_host", "") - ssl_enabled = config.get("ssl", False) - amqp_protocol = "amqp" - host_url = host - if ssl_enabled: - amqp_protocol = "amqps" - if port: - host_url = host + ":" + str(port) - credentials = f"{username}:{password}@" if username and password else "" - params = pika.URLParameters(f"{amqp_protocol}://{credentials}{host_url}/{virtual_host}") - return BlockingConnection(params) - - -class DestinationRabbitmq(Destination): - def write( - self, config: Mapping[str, Any], configured_catalog: ConfiguredAirbyteCatalog, input_messages: Iterable[AirbyteMessage] - ) -> Iterable[AirbyteMessage]: - exchange = config.get("exchange") - routing_key = config["routing_key"] - connection = create_connection(config=config) - channel = connection.channel() - - streams = {s.stream.name for s in configured_catalog.streams} - try: - for message in input_messages: - if message.type == Type.STATE: - # Emitting a state message means all records that came before it - # have already been published. - yield message - elif message.type == Type.RECORD: - record = message.record - if record.stream not in streams: - # Message contains record from a stream that is not in the catalog. Skip it! - continue - headers = {"stream": record.stream, "emitted_at": record.emitted_at, "namespace": record.namespace} - properties = BasicProperties(content_type="application/json", headers=headers) - channel.basic_publish( - exchange=exchange or "", routing_key=routing_key, properties=properties, body=json.dumps(record.data) - ) - else: - # Let's ignore other message types for now - continue - finally: - connection.close() - - def check(self, logger: AirbyteLogger, config: Mapping[str, Any]) -> AirbyteConnectionStatus: - try: - connection = create_connection(config=config) - except Exception as e: - logger.error(f"Failed to create connection. Error: {e}") - return AirbyteConnectionStatus(status=Status.FAILED, message=f"Could not create connection: {repr(e)}") - try: - channel = connection.channel() - if channel.is_open: - return AirbyteConnectionStatus(status=Status.SUCCEEDED) - return AirbyteConnectionStatus(status=Status.FAILED, message="Could not open channel") - except Exception as e: - logger.error(f"Failed to open RabbitMQ channel. Error: {e}") - return AirbyteConnectionStatus(status=Status.FAILED, message=f"An exception occurred: {repr(e)}") - finally: - connection.close() diff --git a/airbyte-integrations/connectors/destination-rabbitmq/destination_rabbitmq/spec.json b/airbyte-integrations/connectors/destination-rabbitmq/destination_rabbitmq/spec.json deleted file mode 100644 index cbeb330e5a96..000000000000 --- a/airbyte-integrations/connectors/destination-rabbitmq/destination_rabbitmq/spec.json +++ /dev/null @@ -1,49 +0,0 @@ -{ - "documentationUrl": "https://docs.airbyte.com/integrations/destinations/rabbitmq", - "supported_destination_sync_modes": ["append"], - "supportsIncremental": true, - "connectionSpecification": { - "$schema": "http://json-schema.org/draft-07/schema#", - "title": "Destination Rabbitmq", - "type": "object", - "required": ["host", "routing_key"], - "additionalProperties": false, - "properties": { - "ssl": { - "type": "boolean", - "description": "SSL enabled.", - "default": true - }, - "host": { - "type": "string", - "description": "The RabbitMQ host name." - }, - "port": { - "type": "integer", - "description": "The RabbitMQ port." - }, - "virtual_host": { - "type": "string", - "description": "The RabbitMQ virtual host name." - }, - "username": { - "type": "string", - "description": "The username to connect." - }, - "password": { - "type": "string", - "title": "Password", - "description": "The password to connect.", - "airbyte_secret": true - }, - "exchange": { - "type": "string", - "description": "The exchange name." - }, - "routing_key": { - "type": "string", - "description": "The routing key." - } - } - } -} diff --git a/airbyte-integrations/connectors/destination-rabbitmq/integration_tests/integration_test.py b/airbyte-integrations/connectors/destination-rabbitmq/integration_tests/integration_test.py deleted file mode 100644 index f99c64178d4f..000000000000 --- a/airbyte-integrations/connectors/destination-rabbitmq/integration_tests/integration_test.py +++ /dev/null @@ -1,90 +0,0 @@ -# -# Copyright (c) 2023 Airbyte, Inc., all rights reserved. -# - -import json -from unittest.mock import Mock - -from airbyte_cdk.models import AirbyteMessage, Status, Type -from airbyte_cdk.models.airbyte_protocol import ( - AirbyteRecordMessage, - AirbyteStateMessage, - AirbyteStream, - ConfiguredAirbyteCatalog, - ConfiguredAirbyteStream, - DestinationSyncMode, - SyncMode, -) -from destination_rabbitmq.destination import DestinationRabbitmq, create_connection - -TEST_STREAM = "animals" -TEST_NAMESPACE = "test_namespace" -TEST_MESSAGE = {"name": "cat"} - - -def _configured_catalog() -> ConfiguredAirbyteCatalog: - stream_schema = {"type": "object", "properties": {"name": {"type": "string"}}} - append_stream = ConfiguredAirbyteStream( - stream=AirbyteStream(name=TEST_STREAM, json_schema=stream_schema, supported_sync_modes=[SyncMode.incremental]), - sync_mode=SyncMode.incremental, - destination_sync_mode=DestinationSyncMode.append, - ) - return ConfiguredAirbyteCatalog(streams=[append_stream]) - - -def consume(config): - connection = create_connection(config=config) - channel = connection.channel() - - def assert_message(ch, method, properties, body): - assert json.loads(body) == TEST_MESSAGE - assert properties.content_type == "application/json" - assert properties.headers["stream"] == TEST_STREAM - assert properties.headers["namespace"] == TEST_NAMESPACE - assert "emitted_at" in properties.headers - channel.stop_consuming() - - channel.basic_consume(queue=config["routing_key"], on_message_callback=assert_message, auto_ack=True) - channel.start_consuming() - - -def _state() -> AirbyteMessage: - return AirbyteMessage(type=Type.STATE, state=AirbyteStateMessage(data={})) - - -def _record() -> AirbyteMessage: - return AirbyteMessage( - type=Type.RECORD, record=AirbyteRecordMessage(stream=TEST_STREAM, data=TEST_MESSAGE, emitted_at=0, namespace=TEST_NAMESPACE) - ) - - -def test_check_fails(): - f = open( - "integration_tests/invalid_config.json", - ) - config = json.load(f) - destination = DestinationRabbitmq() - status = destination.check(logger=Mock(), config=config) - assert status.status == Status.FAILED - - -def test_check_succeeds(): - f = open( - "secrets/config.json", - ) - config = json.load(f) - destination = DestinationRabbitmq() - status = destination.check(logger=Mock(), config=config) - assert status.status == Status.SUCCEEDED - - -def test_write(): - f = open( - "secrets/config.json", - ) - config = json.load(f) - messages = [_record(), _state()] - destination = DestinationRabbitmq() - for m in destination.write(config=config, configured_catalog=_configured_catalog(), input_messages=messages): - assert m.type == Type.STATE - consume(config) diff --git a/airbyte-integrations/connectors/destination-rabbitmq/integration_tests/invalid_config.json b/airbyte-integrations/connectors/destination-rabbitmq/integration_tests/invalid_config.json deleted file mode 100644 index a482e4705f21..000000000000 --- a/airbyte-integrations/connectors/destination-rabbitmq/integration_tests/invalid_config.json +++ /dev/null @@ -1,9 +0,0 @@ -{ - "host": "invalid.host.io", - "port": 5672, - "virtual_host": "invalid_vh", - "username": "invalid_username", - "password": "invalid_password", - "routing_key": "test_queue", - "exchange": "test_exchange" -} diff --git a/airbyte-integrations/connectors/destination-rabbitmq/main.py b/airbyte-integrations/connectors/destination-rabbitmq/main.py deleted file mode 100644 index fc09374015c7..000000000000 --- a/airbyte-integrations/connectors/destination-rabbitmq/main.py +++ /dev/null @@ -1,11 +0,0 @@ -# -# Copyright (c) 2023 Airbyte, Inc., all rights reserved. -# - - -import sys - -from destination_rabbitmq import DestinationRabbitmq - -if __name__ == "__main__": - DestinationRabbitmq().run(sys.argv[1:]) diff --git a/airbyte-integrations/connectors/destination-rabbitmq/requirements.txt b/airbyte-integrations/connectors/destination-rabbitmq/requirements.txt deleted file mode 100644 index d6e1198b1ab1..000000000000 --- a/airbyte-integrations/connectors/destination-rabbitmq/requirements.txt +++ /dev/null @@ -1 +0,0 @@ --e . diff --git a/airbyte-integrations/connectors/destination-rabbitmq/setup.py b/airbyte-integrations/connectors/destination-rabbitmq/setup.py deleted file mode 100644 index 352ded5f8b4e..000000000000 --- a/airbyte-integrations/connectors/destination-rabbitmq/setup.py +++ /dev/null @@ -1,23 +0,0 @@ -# -# Copyright (c) 2023 Airbyte, Inc., all rights reserved. -# - - -from setuptools import find_packages, setup - -MAIN_REQUIREMENTS = ["airbyte-cdk", "pika>=1.1.0"] - -TEST_REQUIREMENTS = ["pytest~=6.1"] - -setup( - name="destination_rabbitmq", - description="Destination implementation for Rabbitmq.", - author="Airbyte", - author_email="contact@airbyte.io", - packages=find_packages(), - install_requires=MAIN_REQUIREMENTS, - package_data={"": ["*.json"]}, - extras_require={ - "tests": TEST_REQUIREMENTS, - }, -) diff --git a/airbyte-integrations/connectors/destination-rabbitmq/unit_tests/unit_test.py b/airbyte-integrations/connectors/destination-rabbitmq/unit_tests/unit_test.py deleted file mode 100644 index 57c34b6f9f58..000000000000 --- a/airbyte-integrations/connectors/destination-rabbitmq/unit_tests/unit_test.py +++ /dev/null @@ -1,130 +0,0 @@ -# -# Copyright (c) 2023 Airbyte, Inc., all rights reserved. -# - -import json -from typing import Any, Dict -from unittest import mock -from unittest.mock import Mock - -from airbyte_cdk.models import AirbyteMessage, Status, Type -from airbyte_cdk.models.airbyte_protocol import ( - AirbyteRecordMessage, - AirbyteStateMessage, - AirbyteStream, - ConfiguredAirbyteCatalog, - ConfiguredAirbyteStream, - DestinationSyncMode, - SyncMode, -) -from destination_rabbitmq.destination import DestinationRabbitmq -from pika.spec import Queue - -config = { - "host": "test.rabbitmq", - "port": 5672, - "virtual_host": "test_vh", - "username": "john.doe", - "password": "secret", - "exchange": "test_exchange", - "routing_key": "test_routing_key", -} - - -def _init_mocks(connection_init): - connection, channel = Mock(), Mock() - connection_init.return_value = connection - connection.channel.return_value = channel - return channel - - -@mock.patch("destination_rabbitmq.destination.BlockingConnection") -def test_check_succeeds(connection_init): - result = Mock() - result.method = Queue.DeclareOk() - channel = _init_mocks(connection_init=connection_init) - channel.queue_declare.return_value = result - destination = DestinationRabbitmq() - status = destination.check(logger=Mock(), config=config) - assert status.status == Status.SUCCEEDED - - -@mock.patch("destination_rabbitmq.destination.BlockingConnection") -def test_check_fails_on_getting_channel(connection_init): - connection = Mock() - connection_init.return_value = connection - connection.channel.side_effect = Exception("Failed to get channel") - destination = DestinationRabbitmq() - status = destination.check(logger=Mock(), config=config) - assert status.status == Status.FAILED - - -@mock.patch("destination_rabbitmq.destination.BlockingConnection") -def test_check_fails_on_creating_connection(connection_init): - connection_init.side_effect = Exception("Could not open connection") - destination = DestinationRabbitmq() - status = destination.check(logger=Mock(), config=config) - assert status.status == Status.FAILED - - -def _state() -> AirbyteMessage: - return AirbyteMessage(type=Type.STATE, state=AirbyteStateMessage(data={})) - - -def _record(stream: str, data: Dict[str, Any]) -> AirbyteMessage: - return AirbyteMessage(type=Type.RECORD, record=AirbyteRecordMessage(stream=stream, data=data, emitted_at=0)) - - -def _configured_catalog() -> ConfiguredAirbyteCatalog: - stream_schema = {"type": "object", "properties": {"name": {"type": "string"}, "email": {"type": "string"}}} - append_stream = ConfiguredAirbyteStream( - stream=AirbyteStream(name="people", json_schema=stream_schema, supported_sync_modes=[SyncMode.incremental]), - sync_mode=SyncMode.incremental, - destination_sync_mode=DestinationSyncMode.append, - ) - return ConfiguredAirbyteCatalog(streams=[append_stream]) - - -@mock.patch("destination_rabbitmq.destination.BlockingConnection") -def test_write_succeeds(connection_init): - stream = "people" - data = {"name": "John Doe", "email": "john.doe@example.com"} - channel = _init_mocks(connection_init=connection_init) - input_messages = [_record(stream=stream, data=data), _state()] - destination = DestinationRabbitmq() - for m in destination.write(config=config, configured_catalog=_configured_catalog(), input_messages=input_messages): - assert m.type == Type.STATE - _, _, args = channel.basic_publish.mock_calls[0] - assert args["exchange"] == "test_exchange" - assert args["routing_key"] == "test_routing_key" - assert args["properties"].content_type == "application/json" - assert args["properties"].headers["stream"] == stream - assert json.loads(args["body"]) == data - - -@mock.patch("destination_rabbitmq.destination.BlockingConnection") -def test_write_succeeds_with_direct_exchange(connection_init): - stream = "people" - data = {"name": "John Doe", "email": "john.doe@example.com"} - channel = _init_mocks(connection_init=connection_init) - input_messages = [_record(stream=stream, data=data), _state()] - custom_config = dict(config) - del custom_config["exchange"] - destination = DestinationRabbitmq() - for m in destination.write(config=custom_config, configured_catalog=_configured_catalog(), input_messages=input_messages): - assert m.type == Type.STATE - _, _, args = channel.basic_publish.mock_calls[0] - assert args["exchange"] == "" - assert json.loads(args["body"]) == data - - -@mock.patch("destination_rabbitmq.destination.BlockingConnection") -def test_write_skips_message_from_unknown_stream(connection_init): - stream = "shapes" - data = {"name": "Rectangle", "color": "blue"} - channel = _init_mocks(connection_init=connection_init) - input_messages = [_record(stream=stream, data=data), _state()] - destination = DestinationRabbitmq() - for m in destination.write(config=config, configured_catalog=_configured_catalog(), input_messages=input_messages): - assert m.type == Type.STATE - channel.basic_publish.assert_not_called() diff --git a/airbyte-integrations/connectors/destination-redpanda/README.md b/airbyte-integrations/connectors/destination-redpanda/README.md deleted file mode 100644 index 6f9f022a7f73..000000000000 --- a/airbyte-integrations/connectors/destination-redpanda/README.md +++ /dev/null @@ -1,72 +0,0 @@ -# Destination Redpanda - -This is the repository for the Redpanda destination connector in Java. -For information about how to use this connector within Airbyte, see [the User Documentation](https://docs.airbyte.io/integrations/destinations/redpanda). - -## Local development - -#### Building via Gradle -From the Airbyte repository root, run: -``` -./gradlew :airbyte-integrations:connectors:destination-redpanda:build -``` - -#### Create credentials -**If you are a community contributor**, generate the necessary credentials and place them in `secrets/config.json` conforming to the spec file in `src/main/resources/spec.json`. -Note that the `secrets` directory is git-ignored by default, so there is no danger of accidentally checking in sensitive information. - -**If you are an Airbyte core member**, follow the [instructions](https://docs.airbyte.io/connector-development#using-credentials-in-ci) to set up the credentials. - -### Locally running the connector docker image - -#### Build -Build the connector image via Gradle: - -``` -./gradlew :airbyte-integrations:connectors:destination-redpanda:buildConnectorImage -``` -Once built, the docker image name and tag on your host will be `airbyte/destination-redpanda:dev`. -the Dockerfile. - -#### Run -Then run any of the connector commands as follows: -``` -docker run --rm airbyte/destination-redpanda:dev spec -docker run --rm -v $(pwd)/secrets:/secrets airbyte/destination-redpanda:dev check --config /secrets/config.json -docker run --rm -v $(pwd)/secrets:/secrets airbyte/destination-redpanda:dev discover --config /secrets/config.json -docker run --rm -v $(pwd)/secrets:/secrets -v $(pwd)/integration_tests:/integration_tests airbyte/destination-redpanda:dev read --config /secrets/config.json --catalog /integration_tests/configured_catalog.json -``` - -## Testing -We use `JUnit` for Java tests. - -### Unit and Integration Tests -Place unit tests under `src/test/io/airbyte/integrations/destinations/redpanda`. - -#### Acceptance Tests -Airbyte has a standard test suite that all destination connectors must pass. Implement the `TODO`s in -`src/test-integration/java/io/airbyte/integrations/destinations/redpandaDestinationAcceptanceTest.java`. - -### Using gradle to run tests -All commands should be run from airbyte project root. -To run unit tests: -``` -./gradlew :airbyte-integrations:connectors:destination-redpanda:unitTest -``` -To run acceptance and custom integration tests: -``` -./gradlew :airbyte-integrations:connectors:destination-redpanda:integrationTest -``` - -## Dependency Management - -### Publishing a new version of the connector -You've checked out the repo, implemented a million dollar feature, and you're ready to share your changes with the world. Now what? -1. Make sure your changes are passing our test suite: `airbyte-ci connectors --name=destination-redpanda test` -2. Bump the connector version in `metadata.yaml`: increment the `dockerImageTag` value. Please follow [semantic versioning for connectors](https://docs.airbyte.com/contributing-to-airbyte/resources/pull-requests-handbook/#semantic-versioning-for-connectors). -3. Make sure the `metadata.yaml` content is up to date. -4. Make the connector documentation and its changelog is up to date (`docs/integrations/destinations/redpanda.md`). -5. Create a Pull Request: use [our PR naming conventions](https://docs.airbyte.com/contributing-to-airbyte/resources/pull-requests-handbook/#pull-request-title-convention). -6. Pat yourself on the back for being an awesome contributor. -7. Someone from Airbyte will take a look at your PR and iterate with you to merge it into master. - diff --git a/airbyte-integrations/connectors/destination-redpanda/build.gradle b/airbyte-integrations/connectors/destination-redpanda/build.gradle deleted file mode 100644 index a79982fe7c56..000000000000 --- a/airbyte-integrations/connectors/destination-redpanda/build.gradle +++ /dev/null @@ -1,32 +0,0 @@ -plugins { - id 'application' - id 'airbyte-java-connector' -} - -airbyteJavaConnector { - cdkVersionRequired = '0.2.0' - features = ['db-destinations'] - useLocalCdk = false -} - -//remove once upgrading the CDK version to 0.4.x or later -java { - compileJava { - options.compilerArgs.remove("-Werror") - } -} - -airbyteJavaConnector.addCdkDependencies() - -application { - mainClass = 'io.airbyte.integrations.destination.redpanda.RedpandaDestination' -} - -dependencies { - - // https://mvnrepository.com/artifact/org.apache.kafka/kafka-clients - implementation 'org.apache.kafka:kafka-clients:3.3.1' - implementation 'org.apache.kafka:connect-json:3.3.1' - - testImplementation "org.testcontainers:redpanda:1.17.5" -} diff --git a/airbyte-integrations/connectors/destination-redpanda/src/main/java/io/airbyte/integrations/destination/redpanda/RedpandaConfig.java b/airbyte-integrations/connectors/destination-redpanda/src/main/java/io/airbyte/integrations/destination/redpanda/RedpandaConfig.java deleted file mode 100644 index b8ecbc000f4a..000000000000 --- a/airbyte-integrations/connectors/destination-redpanda/src/main/java/io/airbyte/integrations/destination/redpanda/RedpandaConfig.java +++ /dev/null @@ -1,100 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.integrations.destination.redpanda; - -import com.fasterxml.jackson.databind.JsonNode; -import java.util.Map; -import java.util.Optional; -import org.apache.kafka.clients.admin.Admin; -import org.apache.kafka.clients.admin.AdminClient; -import org.apache.kafka.clients.admin.AdminClientConfig; -import org.apache.kafka.clients.producer.KafkaProducer; -import org.apache.kafka.clients.producer.ProducerConfig; - -public class RedpandaConfig { - - // host1:port1,host2:port2,... - private final String bootstrapServers; - - private final long bufferMemory; - - private final String compressionType; - - private final int retries; - - private final int batchSize; - - private final Optional topicNumPartitions; - - private final Optional topicReplicationFactor; - - private final int socketConnectionSetupTimeoutMs; - - private final int socketConnectionSetupTimeoutMaxMs; - - private RedpandaConfig(String bootstrapServers, - long bufferMemory, - String compressionType, - int retries, - int batchSize, - Optional topicNumPartitions, - Optional topicReplicationFactor, - int socketConnectionSetupTimeoutMs, - int socketConnectionSetupTimeoutMaxMs) { - this.bootstrapServers = bootstrapServers; - this.bufferMemory = bufferMemory; - this.compressionType = compressionType; - this.retries = retries; - this.batchSize = batchSize; - this.topicNumPartitions = topicNumPartitions; - this.topicReplicationFactor = topicReplicationFactor; - this.socketConnectionSetupTimeoutMs = socketConnectionSetupTimeoutMs; - this.socketConnectionSetupTimeoutMaxMs = socketConnectionSetupTimeoutMaxMs; - } - - public static RedpandaConfig createConfig(JsonNode jsonConfig) { - return new RedpandaConfig( - jsonConfig.get("bootstrap_servers").asText(), - jsonConfig.get("buffer_memory").asLong(33554432L), - jsonConfig.get("compression_type").asText("none"), - jsonConfig.get("retries").asInt(5), - jsonConfig.get("batch_size").asInt(16384), - Optional.of(jsonConfig.get("topic_num_partitions").asInt(1)), - Optional.of(((Integer) jsonConfig.get("topic_replication_factor").asInt(1)).shortValue()), - jsonConfig.get("socket_connection_setup_timeout_ms").asInt(10000), - jsonConfig.get("socket_connection_setup_timeout_max_ms").asInt(30000)); - } - - public KafkaProducer createKafkaProducer() { - return new KafkaProducer<>(Map.of( - ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, bootstrapServers, - ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, "org.apache.kafka.common.serialization.StringSerializer", - ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, "org.apache.kafka.connect.json.JsonSerializer", - ProducerConfig.BUFFER_MEMORY_CONFIG, bufferMemory, - ProducerConfig.COMPRESSION_TYPE_CONFIG, compressionType, - ProducerConfig.RETRIES_CONFIG, retries, - ProducerConfig.BATCH_SIZE_CONFIG, batchSize, - ProducerConfig.SOCKET_CONNECTION_SETUP_TIMEOUT_MS_CONFIG, socketConnectionSetupTimeoutMs, - ProducerConfig.SOCKET_CONNECTION_SETUP_TIMEOUT_MAX_MS_CONFIG, socketConnectionSetupTimeoutMaxMs)); - - } - - public Admin createAdminClient() { - return AdminClient.create(Map.of( - AdminClientConfig.BOOTSTRAP_SERVERS_CONFIG, bootstrapServers, - AdminClientConfig.RETRIES_CONFIG, retries, - AdminClientConfig.SOCKET_CONNECTION_SETUP_TIMEOUT_MS_CONFIG, socketConnectionSetupTimeoutMs, - AdminClientConfig.SOCKET_CONNECTION_SETUP_TIMEOUT_MAX_MS_CONFIG, socketConnectionSetupTimeoutMaxMs)); - } - - public Optional topicNumPartitions() { - return topicNumPartitions; - } - - public Optional topicReplicationFactor() { - return topicReplicationFactor; - } - -} diff --git a/airbyte-integrations/connectors/destination-redpanda/src/main/java/io/airbyte/integrations/destination/redpanda/RedpandaDestination.java b/airbyte-integrations/connectors/destination-redpanda/src/main/java/io/airbyte/integrations/destination/redpanda/RedpandaDestination.java deleted file mode 100644 index 93b6e3d13f92..000000000000 --- a/airbyte-integrations/connectors/destination-redpanda/src/main/java/io/airbyte/integrations/destination/redpanda/RedpandaDestination.java +++ /dev/null @@ -1,67 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.integrations.destination.redpanda; - -import com.fasterxml.jackson.databind.JsonNode; -import io.airbyte.cdk.integrations.BaseConnector; -import io.airbyte.cdk.integrations.base.AirbyteMessageConsumer; -import io.airbyte.cdk.integrations.base.Destination; -import io.airbyte.cdk.integrations.base.IntegrationRunner; -import io.airbyte.commons.json.Jsons; -import io.airbyte.protocol.models.v0.AirbyteConnectionStatus; -import io.airbyte.protocol.models.v0.AirbyteMessage; -import io.airbyte.protocol.models.v0.ConfiguredAirbyteCatalog; -import java.util.List; -import java.util.Optional; -import java.util.UUID; -import java.util.function.Consumer; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -public class RedpandaDestination extends BaseConnector implements Destination { - - private static final Logger LOGGER = LoggerFactory.getLogger(RedpandaDestination.class); - - public static void main(String[] args) throws Exception { - new IntegrationRunner(new RedpandaDestination()).run(args); - } - - @Override - public AirbyteConnectionStatus check(JsonNode config) { - String topicName = "namespace.stream"; - RedpandaOperations redpandaOperations = null; - try { - RedpandaConfig redpandaConfig = RedpandaConfig.createConfig(config); - redpandaOperations = new RedpandaOperations(redpandaConfig); - redpandaOperations.createTopic( - List.of(new RedpandaOperations.TopicInfo(topicName, Optional.empty(), Optional.empty()))); - redpandaOperations.putRecordBlocking(topicName, UUID.randomUUID().toString(), Jsons.emptyObject()); - redpandaOperations.flush(); - return new AirbyteConnectionStatus().withStatus(AirbyteConnectionStatus.Status.SUCCEEDED); - } catch (Exception e) { - LOGGER.error("Error while trying to connect to Redpanda: ", e); - return new AirbyteConnectionStatus().withStatus(AirbyteConnectionStatus.Status.FAILED); - } finally { - if (redpandaOperations != null) { - try { - redpandaOperations.deleteTopic(List.of(topicName)); - } catch (Exception e) { - LOGGER.error("Error while deleting Redpanda topic: ", e); - } - redpandaOperations.close(); - } - } - } - - @Override - public AirbyteMessageConsumer getConsumer(JsonNode config, - ConfiguredAirbyteCatalog configuredCatalog, - Consumer outputRecordCollector) { - RedpandaConfig redpandaConfig = RedpandaConfig.createConfig(config); - return new RedpandaMessageConsumer(configuredCatalog, new RedpandaOperations(redpandaConfig), redpandaConfig, - outputRecordCollector); - } - -} diff --git a/airbyte-integrations/connectors/destination-redpanda/src/main/java/io/airbyte/integrations/destination/redpanda/RedpandaMessageConsumer.java b/airbyte-integrations/connectors/destination-redpanda/src/main/java/io/airbyte/integrations/destination/redpanda/RedpandaMessageConsumer.java deleted file mode 100644 index 4be72a2c3931..000000000000 --- a/airbyte-integrations/connectors/destination-redpanda/src/main/java/io/airbyte/integrations/destination/redpanda/RedpandaMessageConsumer.java +++ /dev/null @@ -1,101 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.integrations.destination.redpanda; - -import static io.airbyte.cdk.integrations.base.JavaBaseConstants.COLUMN_NAME_AB_ID; -import static io.airbyte.cdk.integrations.base.JavaBaseConstants.COLUMN_NAME_DATA; -import static io.airbyte.cdk.integrations.base.JavaBaseConstants.COLUMN_NAME_EMITTED_AT; - -import io.airbyte.cdk.integrations.base.FailureTrackingAirbyteMessageConsumer; -import io.airbyte.commons.json.Jsons; -import io.airbyte.protocol.models.v0.AirbyteMessage; -import io.airbyte.protocol.models.v0.AirbyteStreamNameNamespacePair; -import io.airbyte.protocol.models.v0.ConfiguredAirbyteCatalog; -import java.time.Instant; -import java.util.Map; -import java.util.UUID; -import java.util.function.Consumer; -import java.util.stream.Collectors; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -public class RedpandaMessageConsumer extends FailureTrackingAirbyteMessageConsumer { - - private static final Logger LOGGER = LoggerFactory.getLogger(RedpandaMessageConsumer.class); - - private final Consumer outputRecordCollector; - - private final RedpandaOperations redpandaOperations; - - private final RedpandaConfig redpandaConfig; - - private final Map redpandaWriteConfigs; - - public RedpandaMessageConsumer(ConfiguredAirbyteCatalog configuredCatalog, - RedpandaOperations redpandaOperations, - RedpandaConfig redpandaConfig, - Consumer outputRecordCollector) { - this.outputRecordCollector = outputRecordCollector; - this.redpandaOperations = redpandaOperations; - this.redpandaConfig = redpandaConfig; - this.redpandaWriteConfigs = configuredCatalog.getStreams().stream() - .collect( - Collectors.toUnmodifiableMap(AirbyteStreamNameNamespacePair::fromConfiguredAirbyteSteam, - str -> new RedpandaWriteConfig( - new RedpandaNameTransformer().topicName(str.getStream().getNamespace(), - str.getStream().getName()), - str.getDestinationSyncMode()))); - } - - @Override - protected void startTracked() { - redpandaOperations.createTopic(redpandaWriteConfigs.values().stream() - .map(wc -> new RedpandaOperations.TopicInfo(wc.topicName(), redpandaConfig.topicNumPartitions(), - redpandaConfig.topicReplicationFactor())) - .collect(Collectors.toSet())); - } - - @Override - protected void acceptTracked(AirbyteMessage message) { - if (message.getType() == AirbyteMessage.Type.RECORD) { - var messageRecord = message.getRecord(); - - var streamConfig = - redpandaWriteConfigs.get(AirbyteStreamNameNamespacePair.fromRecordMessage(messageRecord)); - - if (streamConfig == null) { - throw new IllegalArgumentException("Unrecognized destination stream"); - } - - String key = UUID.randomUUID().toString(); - - var data = Jsons.jsonNode(Map.of( - COLUMN_NAME_AB_ID, key, - COLUMN_NAME_DATA, messageRecord.getData(), - COLUMN_NAME_EMITTED_AT, Instant.now())); - - var topic = streamConfig.topicName(); - - redpandaOperations.putRecord(topic, key, data, e -> { - LOGGER.error("Error while sending record to Redpanda with reason ", e); - try { - throw e; - } catch (Exception ex) { - throw new RuntimeException(ex); - } - }); - } else if (message.getType() == AirbyteMessage.Type.STATE) { - outputRecordCollector.accept(message); - } else { - LOGGER.warn("Unsupported airbyte message type: {}", message.getType()); - } - } - - @Override - protected void close(boolean hasFailed) { - redpandaOperations.close(); - } - -} diff --git a/airbyte-integrations/connectors/destination-redpanda/src/main/java/io/airbyte/integrations/destination/redpanda/RedpandaNameTransformer.java b/airbyte-integrations/connectors/destination-redpanda/src/main/java/io/airbyte/integrations/destination/redpanda/RedpandaNameTransformer.java deleted file mode 100644 index b368a4a137a8..000000000000 --- a/airbyte-integrations/connectors/destination-redpanda/src/main/java/io/airbyte/integrations/destination/redpanda/RedpandaNameTransformer.java +++ /dev/null @@ -1,19 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.integrations.destination.redpanda; - -import io.airbyte.cdk.integrations.destination.StandardNameTransformer; - -public class RedpandaNameTransformer extends StandardNameTransformer { - - String topicName(String namespace, String stream) { - namespace = namespace != null ? namespace : ""; - var streamName = namespace + "_" + stream; - streamName = super.convertStreamName(streamName); - // max char length for redpanda topic name is 255 - return streamName.length() > 255 ? streamName.substring(0, 255) : streamName; - } - -} diff --git a/airbyte-integrations/connectors/destination-redpanda/src/main/java/io/airbyte/integrations/destination/redpanda/RedpandaOperations.java b/airbyte-integrations/connectors/destination-redpanda/src/main/java/io/airbyte/integrations/destination/redpanda/RedpandaOperations.java deleted file mode 100644 index c772e1eace4c..000000000000 --- a/airbyte-integrations/connectors/destination-redpanda/src/main/java/io/airbyte/integrations/destination/redpanda/RedpandaOperations.java +++ /dev/null @@ -1,144 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.integrations.destination.redpanda; - -import com.fasterxml.jackson.databind.JsonNode; -import java.io.Closeable; -import java.util.Collection; -import java.util.Optional; -import java.util.Set; -import java.util.concurrent.ExecutionException; -import java.util.concurrent.Future; -import java.util.function.Consumer; -import java.util.function.Function; -import java.util.function.Supplier; -import java.util.stream.Collectors; -import org.apache.kafka.clients.admin.Admin; -import org.apache.kafka.clients.admin.NewTopic; -import org.apache.kafka.clients.producer.KafkaProducer; -import org.apache.kafka.clients.producer.ProducerRecord; -import org.apache.kafka.common.errors.TopicExistsException; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -public class RedpandaOperations implements Closeable { - - private static final Logger LOGGER = LoggerFactory.getLogger(RedpandaOperations.class); - - private final Admin adminClient; - - private final KafkaProducer kafkaProducer; - - public RedpandaOperations(RedpandaConfig redpandaConfig) { - this.adminClient = redpandaConfig.createAdminClient(); - this.kafkaProducer = redpandaConfig.createKafkaProducer(); - } - - public void createTopic(Collection topics) { - var newTopics = topics.stream() - .map(tf -> new NewTopic(tf.name(), tf.numPartitions(), tf.replicationFactor())) - .collect(Collectors.toSet()); - - var createTopicsResult = adminClient.createTopics(newTopics); - - // we need to wait for results since data replication is directly dependent on topic creation - - createTopicsResult.values().values().forEach(f -> { - try { - syncWrapper(() -> f); - } catch (ExecutionException e) { - // errors related to already existing topics should be ignored - if (!(e.getCause() instanceof TopicExistsException)) { - throw new RuntimeException(e); - } - } - }); - } - - public void deleteTopic(Collection topics) { - - var deleteTopicsResult = adminClient.deleteTopics(topics); - - try { - syncWrapper(deleteTopicsResult::all); - } catch (ExecutionException e) { - throw new RuntimeException(e); - } - } - - public Set listTopics() { - - var listTopics = adminClient.listTopics(); - - try { - return syncWrapper(listTopics::names); - } catch (ExecutionException e) { - throw new RuntimeException(e); - } - - } - - public void putRecord(String topic, String key, JsonNode data, Consumer consumer) { - var producerRecord = new ProducerRecord<>(topic, key, data); - - kafkaProducer.send(producerRecord, ((metadata, exception) -> { - if (exception != null) { - consumer.accept(exception); - } - })); - - } - - // used when testing write permissions on check - public void putRecordBlocking(String topic, String key, JsonNode data) { - - var producerRecord = new ProducerRecord<>(topic, key, data); - - try { - syncWrapper(kafkaProducer::send, producerRecord); - } catch (ExecutionException e) { - throw new RuntimeException(e); - } - } - - public void flush() { - kafkaProducer.flush(); - } - - private T syncWrapper(Supplier> asyncFunction) throws ExecutionException { - try { - return asyncFunction.get().get(); - } catch (InterruptedException e) { - Thread.currentThread().interrupt(); - throw new RuntimeException(e); - } - } - - private T syncWrapper(Function, Future> asyncFunction, - ProducerRecord producerRecord) - throws ExecutionException { - return syncWrapper(() -> asyncFunction.apply(producerRecord)); - } - - public record TopicInfo( - - String name, - - Optional numPartitions, - - Optional replicationFactor - - ) { - - } - - @Override - public void close() { - kafkaProducer.flush(); - kafkaProducer.close(); - adminClient.close(); - } - -} diff --git a/airbyte-integrations/connectors/destination-redpanda/src/main/java/io/airbyte/integrations/destination/redpanda/RedpandaWriteConfig.java b/airbyte-integrations/connectors/destination-redpanda/src/main/java/io/airbyte/integrations/destination/redpanda/RedpandaWriteConfig.java deleted file mode 100644 index 9af6557ee4f1..000000000000 --- a/airbyte-integrations/connectors/destination-redpanda/src/main/java/io/airbyte/integrations/destination/redpanda/RedpandaWriteConfig.java +++ /dev/null @@ -1,15 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.integrations.destination.redpanda; - -import io.airbyte.protocol.models.v0.DestinationSyncMode; - -public record RedpandaWriteConfig( - - String topicName, - - DestinationSyncMode destinationSyncMode - -) {} diff --git a/airbyte-integrations/connectors/destination-redpanda/src/main/resources/spec.json b/airbyte-integrations/connectors/destination-redpanda/src/main/resources/spec.json deleted file mode 100644 index 89e41c686a26..000000000000 --- a/airbyte-integrations/connectors/destination-redpanda/src/main/resources/spec.json +++ /dev/null @@ -1,76 +0,0 @@ -{ - "documentationUrl": "https://docs.airbyte.com/integrations/destinations/redpanda", - "supportsIncremental": true, - "supportsNormalization": false, - "supportsDBT": false, - "supported_destination_sync_modes": ["append"], - "connectionSpecification": { - "$schema": "http://json-schema.org/draft-07/schema#", - "title": "Redpanda destination connector", - "type": "object", - "required": [ - "bootstrap_servers", - "buffer_memory", - "compression_type", - "retries", - "batch_size" - ], - "properties": { - "bootstrap_servers": { - "title": "Bootstrap Servers", - "description": "A list of host/port pairs to use for establishing the initial connection to the Redpanda cluster. The client will make use of all servers irrespective of which servers are specified here for bootstrapping—this list only impacts the initial hosts used to discover the full set of servers. This list should be in the form host1:port1,host2:port2,.... Since these servers are just used for the initial connection to discover the full cluster membership (which may change dynamically), this list need not contain the full set of servers (you may want more than one, though, in case a server is down).", - "type": "string", - "examples": ["redpanda-broker1:9092,redpanda-broker2:9092"] - }, - "buffer_memory": { - "title": "Buffer Memory", - "description": "The total bytes of memory the producer can use to buffer records waiting to be sent to the server.", - "type": "string", - "examples": 33554432 - }, - "compression_type": { - "title": "Compression Type", - "description": "The compression type for all data generated by the producer.", - "type": "string", - "default": "none", - "enum": ["none", "gzip", "snappy", "lz4", "zstd"] - }, - "batch_size": { - "title": "Batch Size", - "description": "The producer will attempt to batch records together into fewer requests whenever multiple records are being sent to the same partition.", - "type": "integer", - "examples": [16384] - }, - "retries": { - "title": "Retries", - "description": "Setting a value greater than zero will cause the client to resend any record whose send fails with a potentially transient error.", - "type": "integer", - "examples": [2147483647] - }, - "topic_num_partitions": { - "title": "Number of topic partitions", - "description": "The number of topic partitions which will be created on topic creation", - "type": "integer", - "examples": [10] - }, - "topic_replication_factor": { - "title": "Topic replication factor", - "description": "The number of topics to which messages will be replicated", - "type": "integer", - "examples": [10] - }, - "socket_connection_setup_timeout_ms": { - "title": "Socket Connection Setup Timeout", - "description": "The amount of time the client will wait for the socket connection to be established.", - "type": "integer", - "examples": [10000] - }, - "socket_connection_setup_timeout_max_ms": { - "title": "Socket Connection Setup Max Timeout", - "description": "The maximum amount of time the client will wait for the socket connection to be established. The connection setup timeout will increase exponentially for each consecutive connection failure up to this maximum.", - "type": "integer", - "examples": [30000] - } - } - } -} diff --git a/airbyte-integrations/connectors/destination-redpanda/src/test-integration/java/io/airbyte/integrations/destination/redpanda/RedpandaConsumer.java b/airbyte-integrations/connectors/destination-redpanda/src/test-integration/java/io/airbyte/integrations/destination/redpanda/RedpandaConsumer.java deleted file mode 100644 index fc5b22b71c7d..000000000000 --- a/airbyte-integrations/connectors/destination-redpanda/src/test-integration/java/io/airbyte/integrations/destination/redpanda/RedpandaConsumer.java +++ /dev/null @@ -1,16 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.integrations.destination.redpanda; - -import java.util.Map; -import org.apache.kafka.clients.consumer.KafkaConsumer; - -public class RedpandaConsumer extends KafkaConsumer { - - public RedpandaConsumer(Map configs) { - super(configs); - } - -} diff --git a/airbyte-integrations/connectors/destination-redpanda/src/test-integration/java/io/airbyte/integrations/destination/redpanda/RedpandaConsumerFactory.java b/airbyte-integrations/connectors/destination-redpanda/src/test-integration/java/io/airbyte/integrations/destination/redpanda/RedpandaConsumerFactory.java deleted file mode 100644 index d5b0cad995f2..000000000000 --- a/airbyte-integrations/connectors/destination-redpanda/src/test-integration/java/io/airbyte/integrations/destination/redpanda/RedpandaConsumerFactory.java +++ /dev/null @@ -1,30 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.integrations.destination.redpanda; - -import com.fasterxml.jackson.databind.JsonNode; -import com.google.common.collect.ImmutableMap; -import java.util.Map; -import org.apache.kafka.clients.consumer.ConsumerConfig; - -public class RedpandaConsumerFactory { - - private RedpandaConsumerFactory() { - - } - - public static RedpandaConsumer getInstance(String bootstrapServers, String groupId) { - Map props = ImmutableMap.builder() - .put(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, bootstrapServers) - .put(ConsumerConfig.GROUP_ID_CONFIG, groupId) - .put(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "earliest") - .put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, "org.apache.kafka.common.serialization.StringDeserializer") - .put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, "org.apache.kafka.connect.json.JsonDeserializer") - .build(); - - return new RedpandaConsumer<>(props); - } - -} diff --git a/airbyte-integrations/connectors/destination-redpanda/src/test-integration/java/io/airbyte/integrations/destination/redpanda/RedpandaContainerFactory.java b/airbyte-integrations/connectors/destination-redpanda/src/test-integration/java/io/airbyte/integrations/destination/redpanda/RedpandaContainerFactory.java deleted file mode 100644 index 66be43eff8f6..000000000000 --- a/airbyte-integrations/connectors/destination-redpanda/src/test-integration/java/io/airbyte/integrations/destination/redpanda/RedpandaContainerFactory.java +++ /dev/null @@ -1,19 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.integrations.destination.redpanda; - -import org.testcontainers.redpanda.RedpandaContainer; - -class RedpandaContainerFactory { - - private RedpandaContainerFactory() { - - } - - public static RedpandaContainer createRedpandaContainer() { - return new RedpandaContainer("docker.redpanda.com/vectorized/redpanda:v22.2.7"); - } - -} diff --git a/airbyte-integrations/connectors/destination-redpanda/src/test-integration/java/io/airbyte/integrations/destination/redpanda/RedpandaDestinationAcceptanceTest.java b/airbyte-integrations/connectors/destination-redpanda/src/test-integration/java/io/airbyte/integrations/destination/redpanda/RedpandaDestinationAcceptanceTest.java deleted file mode 100644 index 9d276c456a33..000000000000 --- a/airbyte-integrations/connectors/destination-redpanda/src/test-integration/java/io/airbyte/integrations/destination/redpanda/RedpandaDestinationAcceptanceTest.java +++ /dev/null @@ -1,151 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.integrations.destination.redpanda; - -import com.fasterxml.jackson.databind.JsonNode; -import com.google.common.collect.ImmutableMap; -import io.airbyte.cdk.integrations.base.JavaBaseConstants; -import io.airbyte.cdk.integrations.standardtest.destination.DestinationAcceptanceTest; -import io.airbyte.cdk.integrations.standardtest.destination.comparator.AdvancedTestDataComparator; -import io.airbyte.cdk.integrations.standardtest.destination.comparator.TestDataComparator; -import io.airbyte.commons.json.Jsons; -import java.time.Duration; -import java.util.ArrayList; -import java.util.Collections; -import java.util.HashSet; -import java.util.List; -import java.util.Map; -import java.util.concurrent.ExecutionException; -import java.util.stream.Collectors; -import org.apache.kafka.clients.admin.Admin; -import org.apache.kafka.clients.admin.AdminClient; -import org.apache.kafka.clients.admin.AdminClientConfig; -import org.apache.kafka.clients.admin.TopicListing; -import org.junit.jupiter.api.AfterAll; -import org.junit.jupiter.api.BeforeAll; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; -import org.testcontainers.redpanda.RedpandaContainer; - -public class RedpandaDestinationAcceptanceTest extends DestinationAcceptanceTest { - - private static final Logger LOGGER = LoggerFactory.getLogger(RedpandaDestinationAcceptanceTest.class); - - private static RedpandaContainer redpandaContainer; - - private RedpandaNameTransformer redpandaNameTransformer; - - private Admin adminClient; - - @BeforeAll - static void initContainer() { - redpandaContainer = RedpandaContainerFactory.createRedpandaContainer(); - redpandaContainer.start(); - } - - @AfterAll - static void stopContainer() { - redpandaContainer.stop(); - redpandaContainer.close(); - } - - @Override - protected void setup(final TestDestinationEnv testEnv, final HashSet TEST_SCHEMAS) { - this.redpandaNameTransformer = new RedpandaNameTransformer(); - this.adminClient = AdminClient.create(Map.of( - AdminClientConfig.BOOTSTRAP_SERVERS_CONFIG, redpandaContainer.getBootstrapServers(), - AdminClientConfig.RETRIES_CONFIG, 5, - AdminClientConfig.SOCKET_CONNECTION_SETUP_TIMEOUT_MS_CONFIG, 3000, - AdminClientConfig.SOCKET_CONNECTION_SETUP_TIMEOUT_MAX_MS_CONFIG, 30000)); - } - - @Override - protected void tearDown(final TestDestinationEnv testEnv) throws ExecutionException, InterruptedException { - final var topics = adminClient.listTopics().listings().get().stream() - .filter(tl -> !tl.isInternal()) - .map(TopicListing::name) - .collect(Collectors.toSet()); - - adminClient.deleteTopics(topics); - } - - @Override - protected String getImageName() { - return "airbyte/destination-redpanda:dev"; - } - - @Override - protected JsonNode getConfig() { - return Jsons.jsonNode(ImmutableMap.builder() - .put("bootstrap_servers", redpandaContainer.getBootstrapServers()) - .put("compression_type", "none") - .put("batch_size", 16384) - .put("buffer_memory", "33554432") - .put("retries", 1) - .put("topic_num_partitions", 1) - .put("topic_replication_factor", 1) - .put("socket_connection_setup_timeout_ms", 3000) - .put("socket_connection_setup_timeout_max_ms", 3000) - .build()); - } - - @Override - protected JsonNode getFailCheckConfig() { - return Jsons.jsonNode(ImmutableMap.builder() - .put("bootstrap_servers", "127.0.0.9") - .put("compression_type", "none") - .put("batch_size", 16384) - .put("buffer_memory", "33554432") - .put("retries", 1) - .put("topic_num_partitions", 1) - .put("topic_replication_factor", 1) - .put("socket_connection_setup_timeout_ms", 3000) - .put("socket_connection_setup_timeout_max_ms", 3000) - .build()); - } - - @Override - protected TestDataComparator getTestDataComparator() { - return new AdvancedTestDataComparator(); - } - - @Override - protected boolean supportBasicDataTypeTest() { - return true; - } - - @Override - protected boolean supportArrayDataTypeTest() { - return true; - } - - @Override - protected boolean supportObjectDataTypeTest() { - return true; - } - - @Override - protected boolean implementsNamespaces() { - return true; - } - - @Override - protected List retrieveRecords(final TestDestinationEnv testEnv, - final String streamName, - final String namespace, - final JsonNode streamSchema) { - final List records = new ArrayList<>(); - final String bootstrapServers = redpandaContainer.getBootstrapServers(); - final String groupId = redpandaNameTransformer.getIdentifier(namespace + "-" + streamName); - try (final RedpandaConsumer redpandaConsumer = RedpandaConsumerFactory.getInstance(bootstrapServers, groupId)) { - final String topicName = redpandaNameTransformer.topicName(namespace, streamName); - redpandaConsumer.subscribe(Collections.singletonList(topicName)); - redpandaConsumer.poll(Duration.ofSeconds(5)).iterator() - .forEachRemaining(r -> records.add(r.value().get(JavaBaseConstants.COLUMN_NAME_DATA))); - } - return records; - } - -} diff --git a/airbyte-integrations/connectors/destination-redpanda/src/test-integration/java/io/airbyte/integrations/destination/redpanda/RedpandaDestinationTest.java b/airbyte-integrations/connectors/destination-redpanda/src/test-integration/java/io/airbyte/integrations/destination/redpanda/RedpandaDestinationTest.java deleted file mode 100644 index 925a332c504f..000000000000 --- a/airbyte-integrations/connectors/destination-redpanda/src/test-integration/java/io/airbyte/integrations/destination/redpanda/RedpandaDestinationTest.java +++ /dev/null @@ -1,78 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.integrations.destination.redpanda; - -import static org.assertj.core.api.Assertions.assertThat; - -import com.google.common.collect.ImmutableMap; -import io.airbyte.commons.json.Jsons; -import io.airbyte.protocol.models.v0.AirbyteConnectionStatus; -import org.junit.jupiter.api.AfterEach; -import org.junit.jupiter.api.BeforeEach; -import org.junit.jupiter.api.Test; -import org.testcontainers.redpanda.RedpandaContainer; - -class RedpandaDestinationTest { - - private RedpandaContainer redpandaContainer; - - private RedpandaDestination redpandaDestination; - - @BeforeEach - void setup() { - this.redpandaDestination = new RedpandaDestination(); - this.redpandaContainer = RedpandaContainerFactory.createRedpandaContainer(); - this.redpandaContainer.start(); - } - - @AfterEach - void shutdown() { - this.redpandaContainer.stop(); - this.redpandaContainer.close(); - } - - @Test - void testCheckWithSuccess() { - - var jsonConfig = Jsons.jsonNode(ImmutableMap.builder() - .put("bootstrap_servers", redpandaContainer.getBootstrapServers()) - .put("compression_type", "none") - .put("batch_size", 16384) - .put("buffer_memory", "33554432") - .put("retries", 1) - .put("topic_num_partitions", 1) - .put("topic_replication_factor", 1) - .put("socket_connection_setup_timeout_ms", 3000) - .put("socket_connection_setup_timeout_max_ms", 3000) - .build()); - - var status = redpandaDestination.check(jsonConfig); - - assertThat(status.getStatus()).isEqualTo(AirbyteConnectionStatus.Status.SUCCEEDED); - - } - - @Test - void testCheckWithFailure() { - - var jsonConfig = Jsons.jsonNode(ImmutableMap.builder() - .put("bootstrap_servers", "127.0.0.9") - .put("compression_type", "none") - .put("batch_size", 16384) - .put("buffer_memory", "33554432") - .put("retries", 1) - .put("topic_num_partitions", 1) - .put("topic_replication_factor", 1) - .put("socket_connection_setup_timeout_ms", 3000) - .put("socket_connection_setup_timeout_max_ms", 3000) - .build()); - - var status = redpandaDestination.check(jsonConfig); - - assertThat(status.getStatus()).isEqualTo(AirbyteConnectionStatus.Status.FAILED); - - } - -} diff --git a/airbyte-integrations/connectors/destination-redpanda/src/test-integration/java/io/airbyte/integrations/destination/redpanda/RedpandaOperationsTest.java b/airbyte-integrations/connectors/destination-redpanda/src/test-integration/java/io/airbyte/integrations/destination/redpanda/RedpandaOperationsTest.java deleted file mode 100644 index efa071bdfdb5..000000000000 --- a/airbyte-integrations/connectors/destination-redpanda/src/test-integration/java/io/airbyte/integrations/destination/redpanda/RedpandaOperationsTest.java +++ /dev/null @@ -1,122 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.integrations.destination.redpanda; - -import static org.assertj.core.api.Assertions.assertThat; - -import com.fasterxml.jackson.databind.JsonNode; -import com.google.common.collect.ImmutableMap; -import io.airbyte.commons.json.Jsons; -import java.time.Duration; -import java.util.ArrayList; -import java.util.Collections; -import java.util.List; -import java.util.Map; -import java.util.Optional; -import java.util.Set; -import java.util.UUID; -import org.junit.jupiter.api.AfterEach; -import org.junit.jupiter.api.BeforeEach; -import org.junit.jupiter.api.Test; -import org.testcontainers.redpanda.RedpandaContainer; - -class RedpandaOperationsTest { - - private static final String TEST_TOPIC = "test_topic"; - - private RedpandaOperations redpandaOperations; - - private RedpandaConsumer redpandaConsumer; - - private RedpandaContainer redpandaContainer; - - @BeforeEach - void setup() { - this.redpandaContainer = RedpandaContainerFactory.createRedpandaContainer(); - this.redpandaContainer.start(); - var jsonConfig = Jsons.jsonNode(ImmutableMap.builder() - .put("bootstrap_servers", redpandaContainer.getBootstrapServers()) - .put("compression_type", "none") - .put("batch_size", 16384) - .put("buffer_memory", "33554432") - .put("retries", 1) - .put("topic_num_partitions", 1) - .put("topic_replication_factor", 1) - .put("socket_connection_setup_timeout_ms", 3000) - .put("socket_connection_setup_timeout_max_ms", 3000) - .put("", false) - .build()); - this.redpandaOperations = new RedpandaOperations(RedpandaConfig.createConfig(jsonConfig)); - this.redpandaConsumer = RedpandaConsumerFactory.getInstance(redpandaContainer.getBootstrapServers(), TEST_TOPIC); - } - - @AfterEach - void shutdown() { - this.redpandaOperations.close(); - this.redpandaConsumer.close(); - this.redpandaContainer.stop(); - this.redpandaContainer.close(); - } - - @Test - void testPutRecord() { - - redpandaOperations.putRecord(TEST_TOPIC, UUID.randomUUID().toString(), Jsons.jsonNode(Map.of("attr_1", "data1")), e -> {}); - redpandaOperations.putRecord(TEST_TOPIC, UUID.randomUUID().toString(), Jsons.jsonNode(Map.of("attr_1", "data2")), e -> {}); - redpandaOperations.flush(); - - List records = new ArrayList<>(); - redpandaConsumer.subscribe(Collections.singletonList(TEST_TOPIC)); - redpandaConsumer.poll(Duration.ofSeconds(5)).iterator().forEachRemaining(r -> records.add(r.value())); - - assertThat(records) - .hasSize(2); - } - - @Test - void testCreateTopic() { - - var topicInfo = new RedpandaOperations.TopicInfo(TEST_TOPIC, Optional.of(1), Optional.of((short) 1)); - redpandaOperations.createTopic(Set.of(topicInfo)); - - Set topics = redpandaOperations.listTopics(); - - assertThat(topics).anyMatch(topic -> topic.equals(TEST_TOPIC)); - } - - @Test - void testDeleteTopic() { - - // given - var topicInfo = new RedpandaOperations.TopicInfo(TEST_TOPIC, Optional.of(1), Optional.of((short) 1)); - redpandaOperations.createTopic(Set.of(topicInfo)); - - // when - redpandaOperations.deleteTopic(Set.of(TEST_TOPIC)); - - // then - Set topics = redpandaOperations.listTopics(); - - assertThat(topics).isEmpty(); - - } - - @Test - void testPutRecordBlocking() { - - redpandaOperations.putRecordBlocking(TEST_TOPIC, UUID.randomUUID().toString(), Jsons.jsonNode(Map.of("attr_1", "data1"))); - redpandaOperations.putRecordBlocking(TEST_TOPIC, UUID.randomUUID().toString(), Jsons.jsonNode(Map.of("attr_1", "data2"))); - redpandaOperations.flush(); - - List records = new ArrayList<>(); - redpandaConsumer.subscribe(Collections.singletonList(TEST_TOPIC)); - redpandaConsumer.poll(Duration.ofSeconds(5)).iterator().forEachRemaining(r -> records.add(r.value())); - - assertThat(records) - .hasSize(2); - - } - -} diff --git a/airbyte-integrations/connectors/destination-redpanda/src/test/java/io/airbyte/integrations/destination/redpanda/RedpandaConfigTest.java b/airbyte-integrations/connectors/destination-redpanda/src/test/java/io/airbyte/integrations/destination/redpanda/RedpandaConfigTest.java deleted file mode 100644 index 7fbef953d64a..000000000000 --- a/airbyte-integrations/connectors/destination-redpanda/src/test/java/io/airbyte/integrations/destination/redpanda/RedpandaConfigTest.java +++ /dev/null @@ -1,56 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.integrations.destination.redpanda; - -import static org.assertj.core.api.Assertions.assertThat; - -import io.airbyte.commons.json.Jsons; -import java.util.Comparator; -import java.util.Map; -import java.util.Optional; -import org.junit.jupiter.api.Test; - -class RedpandaConfigTest { - - @Test - void testRedpandaConfig() { - - var jsonConfig = Jsons.jsonNode(Map.of( - "bootstrap_servers", "host1:port1,host2:port2", - "buffer_memory", 33554432L, - "compression_type", "none", - "retries", 5, - "batch_size", 16384, - "topic_num_partitions", 1, - "topic_replication_factor", 1, - "socket_connection_setup_timeout_ms", 10000, - "socket_connection_setup_timeout_max_ms", 30000)); - - var redpandaConfig = RedpandaConfig.createConfig(jsonConfig); - - assertThat(redpandaConfig) - .usingComparatorForFields(new OptionalComparator(), "topicNumPartitions", "topicReplicationFactor") - .hasFieldOrPropertyWithValue("bootstrapServers", "host1:port1,host2:port2") - .hasFieldOrPropertyWithValue("bufferMemory", 33554432L) - .hasFieldOrPropertyWithValue("compressionType", "none") - .hasFieldOrPropertyWithValue("retries", 5) - .hasFieldOrPropertyWithValue("batchSize", 16384) - .hasFieldOrPropertyWithValue("topicNumPartitions", Optional.of(1)) - .hasFieldOrPropertyWithValue("topicReplicationFactor", Optional.of((short) 1)) - .hasFieldOrPropertyWithValue("socketConnectionSetupTimeoutMs", 10000) - .hasFieldOrPropertyWithValue("socketConnectionSetupTimeoutMaxMs", 30000); - - } - - private static class OptionalComparator implements Comparator> { - - @Override - public int compare(Optional o1, Optional o2) { - return Integer.compare(o1.get(), o2.get()); - } - - } - -} diff --git a/airbyte-integrations/connectors/destination-redpanda/src/test/java/io/airbyte/integrations/destination/redpanda/RedpandaNameTransformerTest.java b/airbyte-integrations/connectors/destination-redpanda/src/test/java/io/airbyte/integrations/destination/redpanda/RedpandaNameTransformerTest.java deleted file mode 100644 index cefb90661cb9..000000000000 --- a/airbyte-integrations/connectors/destination-redpanda/src/test/java/io/airbyte/integrations/destination/redpanda/RedpandaNameTransformerTest.java +++ /dev/null @@ -1,24 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.integrations.destination.redpanda; - -import static org.assertj.core.api.Assertions.assertThat; - -import org.junit.jupiter.api.Test; - -class RedpandaNameTransformerTest { - - @Test - void testTransformTopicName() { - - var redpandaNameTransformer = new RedpandaNameTransformer(); - - String topicName = redpandaNameTransformer.topicName("namespace", "stream"); - - assertThat(topicName).isEqualTo("namespace_stream"); - - } - -} diff --git a/airbyte-integrations/connectors/destination-redpanda/src/test/java/io/airbyte/integrations/destination/redpanda/RedpandaWriteConfigTest.java b/airbyte-integrations/connectors/destination-redpanda/src/test/java/io/airbyte/integrations/destination/redpanda/RedpandaWriteConfigTest.java deleted file mode 100644 index f0a248741a3b..000000000000 --- a/airbyte-integrations/connectors/destination-redpanda/src/test/java/io/airbyte/integrations/destination/redpanda/RedpandaWriteConfigTest.java +++ /dev/null @@ -1,25 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.integrations.destination.redpanda; - -import static org.assertj.core.api.Assertions.assertThat; - -import io.airbyte.protocol.models.v0.DestinationSyncMode; -import org.junit.jupiter.api.Test; - -class RedpandaWriteConfigTest { - - @Test - void testRedpandaWriteConfig() { - - var writeConfig = new RedpandaWriteConfig("namespace_stream", DestinationSyncMode.OVERWRITE); - - assertThat(writeConfig) - .hasFieldOrPropertyWithValue("topicName", "namespace_stream") - .hasFieldOrPropertyWithValue("destinationSyncMode", DestinationSyncMode.OVERWRITE); - - } - -} diff --git a/airbyte-integrations/connectors/destination-rockset/BOOTSTRAP.md b/airbyte-integrations/connectors/destination-rockset/BOOTSTRAP.md deleted file mode 100644 index 8e426bd08878..000000000000 --- a/airbyte-integrations/connectors/destination-rockset/BOOTSTRAP.md +++ /dev/null @@ -1,6 +0,0 @@ -# Rockset Destination Connector Bootstrap - -[Rockset](https://rockset.com/) is a real-time analytics database for fast queries on fresh data from databases, lakes and streams. -An [API key](https://rockset.com/docs/rest-api/#authentication) is required. -Configured collections will be created as required and data will be written via the [write API](https://rockset.com/docs/rest-api/#adddocuments). - diff --git a/airbyte-integrations/connectors/destination-rockset/README.md b/airbyte-integrations/connectors/destination-rockset/README.md deleted file mode 100644 index 7eb71ac3ee7c..000000000000 --- a/airbyte-integrations/connectors/destination-rockset/README.md +++ /dev/null @@ -1,72 +0,0 @@ -# Destination Rockset - -This is the repository for the Rockset destination connector in Java. -For information about how to use this connector within Airbyte, see [the User Documentation](https://docs.airbyte.io/integrations/destinations/rockset). - -## Local development - -#### Building via Gradle -From the Airbyte repository root, run: -``` -./gradlew :airbyte-integrations:connectors:destination-rockset:build -``` - -#### Create credentials -**If you are a community contributor**, generate the necessary credentials and place them in `secrets/config.json` conforming to the spec file in `src/main/resources/spec.json`. -Note that the `secrets` directory is git-ignored by default, so there is no danger of accidentally checking in sensitive information. - -**If you are an Airbyte core member**, follow the [instructions](https://docs.airbyte.io/contributing-to-airbyte/building-new-connector#using-credentials-in-ci) to set up the credentials. - -### Locally running the connector docker image - -#### Build -Build the connector image via Gradle: - -``` -./gradlew :airbyte-integrations:connectors:destination-rockset:buildConnectorImage -``` -Once built, the docker image name and tag on your host will be `airbyte/destination-rockset:dev`. -the Dockerfile. - -#### Run -Then run any of the connector commands as follows: -``` -docker run --rm airbyte/destination-rockset:dev spec -docker run --rm -v $(pwd)/secrets:/secrets airbyte/destination-rockset:dev check --config /secrets/config.json -docker run --rm -v $(pwd)/secrets:/secrets airbyte/destination-rockset:dev discover --config /secrets/config.json -docker run --rm -v $(pwd)/secrets:/secrets -v $(pwd)/integration_tests:/integration_tests airbyte/destination-rockset:dev read --config /secrets/config.json --catalog /integration_tests/configured_catalog.json -``` - -## Testing -We use `JUnit` for Java tests. - -### Unit and Integration Tests -Place unit tests under `src/test/io/airbyte/integrations/destinations/rockset`. - -#### Acceptance Tests -Airbyte has a standard test suite that all destination connectors must pass. Implement the `TODO`s in -`src/test-integration/java/io/airbyte/integrations/destinations/rocksetDestinationAcceptanceTest.java`. - -### Using gradle to run tests -All commands should be run from airbyte project root. -To run unit tests: -``` -./gradlew :airbyte-integrations:connectors:destination-rockset:unitTest -``` -To run acceptance and custom integration tests: -``` -./gradlew :airbyte-integrations:connectors:destination-rockset:integrationTest -``` - -## Dependency Management - -### Publishing a new version of the connector -You've checked out the repo, implemented a million dollar feature, and you're ready to share your changes with the world. Now what? -1. Make sure your changes are passing our test suite: `airbyte-ci connectors --name=destination-rockset test` -2. Bump the connector version in `metadata.yaml`: increment the `dockerImageTag` value. Please follow [semantic versioning for connectors](https://docs.airbyte.com/contributing-to-airbyte/resources/pull-requests-handbook/#semantic-versioning-for-connectors). -3. Make sure the `metadata.yaml` content is up to date. -4. Make the connector documentation and its changelog is up to date (`docs/integrations/destinations/rockset.md`). -5. Create a Pull Request: use [our PR naming conventions](https://docs.airbyte.com/contributing-to-airbyte/resources/pull-requests-handbook/#pull-request-title-convention). -6. Pat yourself on the back for being an awesome contributor. -7. Someone from Airbyte will take a look at your PR and iterate with you to merge it into master. - diff --git a/airbyte-integrations/connectors/destination-rockset/build.gradle b/airbyte-integrations/connectors/destination-rockset/build.gradle deleted file mode 100644 index a5e64c058290..000000000000 --- a/airbyte-integrations/connectors/destination-rockset/build.gradle +++ /dev/null @@ -1,30 +0,0 @@ -plugins { - id 'application' - id 'airbyte-java-connector' -} - -airbyteJavaConnector { - cdkVersionRequired = '0.2.0' - features = ['db-destinations'] - useLocalCdk = false -} - -//remove once upgrading the CDK version to 0.4.x or later -java { - compileJava { - options.compilerArgs.remove("-Werror") - } -} - -airbyteJavaConnector.addCdkDependencies() - -application { - mainClass = 'io.airbyte.integrations.destination.rockset.RocksetDestination' - applicationDefaultJvmArgs = ['-XX:+ExitOnOutOfMemoryError', '-XX:MaxRAMPercentage=75.0'] -} - -dependencies { - - implementation group: 'com.rockset', name: 'rockset-java', version: '0.9.0' - implementation group: 'org.awaitility', name: 'awaitility', version: '4.1.1' -} diff --git a/airbyte-integrations/connectors/destination-rockset/src/main/java/io/airbyte/integrations/destination/rockset/RocksetDestination.java b/airbyte-integrations/connectors/destination-rockset/src/main/java/io/airbyte/integrations/destination/rockset/RocksetDestination.java deleted file mode 100644 index 2c4b25c613dd..000000000000 --- a/airbyte-integrations/connectors/destination-rockset/src/main/java/io/airbyte/integrations/destination/rockset/RocksetDestination.java +++ /dev/null @@ -1,90 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.integrations.destination.rockset; - -import static io.airbyte.integrations.destination.rockset.RocksetUtils.ROCKSET_WORKSPACE_ID; - -import com.fasterxml.jackson.core.type.TypeReference; -import com.fasterxml.jackson.databind.JsonNode; -import com.fasterxml.jackson.databind.ObjectMapper; -import com.google.common.collect.ImmutableMap; -import com.rockset.client.ApiClient; -import com.rockset.client.api.DocumentsApi; -import com.rockset.client.model.AddDocumentsRequest; -import io.airbyte.cdk.integrations.BaseConnector; -import io.airbyte.cdk.integrations.base.AirbyteMessageConsumer; -import io.airbyte.cdk.integrations.base.Destination; -import io.airbyte.cdk.integrations.base.IntegrationRunner; -import io.airbyte.protocol.models.v0.AirbyteConnectionStatus; -import io.airbyte.protocol.models.v0.AirbyteConnectionStatus.Status; -import io.airbyte.protocol.models.v0.AirbyteMessage; -import io.airbyte.protocol.models.v0.ConfiguredAirbyteCatalog; -import java.util.Map; -import java.util.UUID; -import java.util.function.Consumer; -import org.apache.commons.lang3.RandomStringUtils; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -public class RocksetDestination extends BaseConnector implements Destination { - - private static final Logger LOGGER = LoggerFactory.getLogger(RocksetDestination.class); - private static final ObjectMapper mapper = new ObjectMapper(); - - public static void main(String[] args) throws Exception { - new IntegrationRunner(new RocksetDestination()).run(args); - } - - @Override - public AirbyteConnectionStatus check(final JsonNode config) { - ApiClient client = null; - String workspace = null; - // Create a temporary table - final String cname = "tmp_test_airbyte_collection_" + RandomStringUtils.randomAlphabetic(7).toLowerCase(); - - try { - client = RocksetUtils.apiClientFromConfig(config); - - workspace = config.get(ROCKSET_WORKSPACE_ID).asText(); - RocksetUtils.createWorkspaceIfNotExists(client, workspace); - - RocksetUtils.createCollectionIfNotExists(client, workspace, cname); - RocksetUtils.waitUntilCollectionReady(client, workspace, cname); - - // Write a single document - final String unique = UUID.randomUUID().toString(); - final Map dummyRecord = ImmutableMap.of("_id", unique); - final AddDocumentsRequest req = new AddDocumentsRequest(); - req.addDataItem(mapper.convertValue(dummyRecord, new TypeReference<>() {})); - new DocumentsApi(client).add(workspace, cname, req); - - // Verify that the doc shows up - final String sql = String.format("SELECT * FROM %s.%s WHERE _id = '%s';", workspace, cname, unique); - RocksetUtils.waitUntilDocCount(client, sql, 1); - - LOGGER.info("Check succeeded"); - return new AirbyteConnectionStatus().withStatus(Status.SUCCEEDED); - } catch (Exception e) { - LOGGER.info("Check failed.", e); - return new AirbyteConnectionStatus().withStatus(Status.FAILED).withMessage(e.getMessage() != null ? e.getMessage() : e.toString()); - } finally { - // Delete the collection - if (client != null && workspace != null) { - RocksetUtils.deleteCollectionIfExists(client, workspace, cname); - } - - } - } - - @Override - public AirbyteMessageConsumer getConsumer( - JsonNode config, - ConfiguredAirbyteCatalog catalog, - Consumer outputRecordCollector) - throws Exception { - return new RocksetWriteApiConsumer(config, catalog, outputRecordCollector); - } - -} diff --git a/airbyte-integrations/connectors/destination-rockset/src/main/java/io/airbyte/integrations/destination/rockset/RocksetSQLNameTransformer.java b/airbyte-integrations/connectors/destination-rockset/src/main/java/io/airbyte/integrations/destination/rockset/RocksetSQLNameTransformer.java deleted file mode 100644 index 8faaef972fef..000000000000 --- a/airbyte-integrations/connectors/destination-rockset/src/main/java/io/airbyte/integrations/destination/rockset/RocksetSQLNameTransformer.java +++ /dev/null @@ -1,21 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.integrations.destination.rockset; - -import io.airbyte.cdk.integrations.destination.StandardNameTransformer; - -public class RocksetSQLNameTransformer extends StandardNameTransformer { - - @Override - public String convertStreamName(String input) { - return super.convertStreamName(input).toLowerCase(); - } - - @Override - public String applyDefaultCase(String input) { - return input.toLowerCase(); - } - -} diff --git a/airbyte-integrations/connectors/destination-rockset/src/main/java/io/airbyte/integrations/destination/rockset/RocksetUtils.java b/airbyte-integrations/connectors/destination-rockset/src/main/java/io/airbyte/integrations/destination/rockset/RocksetUtils.java deleted file mode 100644 index 775564c84f81..000000000000 --- a/airbyte-integrations/connectors/destination-rockset/src/main/java/io/airbyte/integrations/destination/rockset/RocksetUtils.java +++ /dev/null @@ -1,264 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.integrations.destination.rockset; - -import com.fasterxml.jackson.databind.JsonNode; -import com.google.common.hash.Hasher; -import com.google.common.hash.Hashing; -import com.google.gson.internal.LinkedTreeMap; -import com.rockset.client.ApiClient; -import com.rockset.client.ApiException; -import com.rockset.client.api.CollectionsApi; -import com.rockset.client.api.DocumentsApi; -import com.rockset.client.api.QueriesApi; -import com.rockset.client.api.WorkspacesApi; -import com.rockset.client.model.Collection; -import com.rockset.client.model.CreateCollectionRequest; -import com.rockset.client.model.CreateWorkspaceRequest; -import com.rockset.client.model.DeleteDocumentsRequest; -import com.rockset.client.model.DeleteDocumentsRequestData; -import com.rockset.client.model.ErrorModel; -import com.rockset.client.model.GetCollectionResponse; -import com.rockset.client.model.ListCollectionsResponse; -import com.rockset.client.model.QueryRequest; -import com.rockset.client.model.QueryRequestSql; -import com.rockset.client.model.QueryResponse; -import io.airbyte.commons.lang.Exceptions; -import java.nio.charset.Charset; -import java.util.List; -import java.util.Optional; -import java.util.concurrent.TimeUnit; -import java.util.stream.Collectors; -import org.awaitility.Awaitility; -import org.awaitility.Duration; -import org.awaitility.core.ConditionFactory; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -public class RocksetUtils { - - private static final Logger LOGGER = LoggerFactory.getLogger(RocksetUtils.class); - - public static final String ROCKSET_WORKSPACE_ID = "workspace"; - public static final String API_KEY_ID = "api_key"; - public static final String API_SERVER_ID = "api_server"; - public static final Duration DEFAULT_TIMEOUT = new Duration(20, TimeUnit.MINUTES); - public static final Duration DEFAULT_POLL_INTERVAL = Duration.FIVE_SECONDS; - private static final java.time.Duration DEFAULT_HTTP_CLIENT_TIMEOUT = java.time.Duration.ofMinutes(1L); - private static final String DEFAULT_ROCKSET_CLIENT_VERSION = "0.9.0"; - - public static ApiClient apiClientFromConfig(JsonNode config) { - final String apiKey = config.get(API_KEY_ID).asText(); - final String apiServer = config.get(API_SERVER_ID).asText(); - return apiClient(apiKey, apiServer); - } - - public static ApiClient apiClient(String apiKey, String apiServer) { - final ApiClient client = new ApiClient(); - - client.setReadTimeout((int) DEFAULT_HTTP_CLIENT_TIMEOUT.toMillis()) - .setConnectTimeout((int) DEFAULT_HTTP_CLIENT_TIMEOUT.toMillis()) - .setWriteTimeout((int) DEFAULT_HTTP_CLIENT_TIMEOUT.toMillis()); - - client.setApiKey(apiKey); - client.setApiServer(apiServer); - client.setVersion(DEFAULT_ROCKSET_CLIENT_VERSION); - return client; - } - - public static void createWorkspaceIfNotExists(ApiClient client, String workspace) { - final CreateWorkspaceRequest request = new CreateWorkspaceRequest().name(workspace); - - try { - new WorkspacesApi(client).create(request); - LOGGER.info(String.format("Created workspace %s", workspace)); - } catch (ApiException e) { - if (e.getCode() == 400 && e.getErrorModel().getType() == ErrorModel.TypeEnum.ALREADYEXISTS) { - LOGGER.info(String.format("Workspace %s already exists", workspace)); - return; - } - - throw new RuntimeException(e); - } catch (Exception e) { - throw new RuntimeException(e); - } - } - - // Assumes the workspace exists - public static void createCollectionIfNotExists(ApiClient client, String workspace, String cname) { - final CreateCollectionRequest request = new CreateCollectionRequest().name(cname); - try { - new CollectionsApi(client).create(workspace, request); - LOGGER.info(String.format("Created collection %s.%s", workspace, cname)); - } catch (ApiException e) { - if (e.getCode() == 400 && e.getErrorModel().getType() == ErrorModel.TypeEnum.ALREADYEXISTS) { - LOGGER.info(String.format("Collection %s.%s already exists", workspace, cname)); - return; - } - throw new RuntimeException(e); - } catch (Exception e) { - throw new RuntimeException(e); - } - } - - // Assumes the collection exists - public static void deleteCollectionIfExists(ApiClient client, String workspace, String cname) { - try { - new CollectionsApi(client).delete(workspace, cname); - LOGGER.info(String.format("Deleted collection %s.%s", workspace, cname)); - } catch (ApiException e) { - if (e.getCode() == 404 && e.getErrorModel().getType() == ErrorModel.TypeEnum.NOTFOUND) { - LOGGER.info(String.format("Collection %s.%s does not exist", workspace, cname)); - return; - } - - throw new RuntimeException(e); - } catch (Exception e) { - throw new RuntimeException(e); - } - } - - // Assumes the collection exists - public static void waitUntilCollectionReady(ApiClient client, String workspace, String cname) { - pollingConfig(workspace, cname) - .until(() -> isCollectionReady(client, workspace, cname)); - - } - - private static boolean isCollectionReady(ApiClient client, String workspace, String cname) throws Exception { - final GetCollectionResponse resp = new CollectionsApi(client).get(workspace, cname); - final Collection.StatusEnum status = resp.getData().getStatus(); - if (status == Collection.StatusEnum.READY) { - LOGGER.info(String.format("Collection %s.%s is READY", workspace, cname)); - return true; - } else { - LOGGER.info( - String.format( - "Waiting until %s.%s is READY, it is %s", workspace, cname, status.toString())); - return false; - } - } - - // Assumes the collection exists - public static void waitUntilCollectionDeleted(ApiClient client, String workspace, String cname) { - pollingConfig(workspace, cname) - .until(() -> isCollectionDeleted(client, workspace, cname)); - - } - - private static boolean isCollectionDeleted(ApiClient client, String workspace, String cname) throws Exception { - try { - new CollectionsApi(client).get(workspace, cname); - LOGGER.info( - String.format( - "Collection %s.%s still exists, waiting for deletion to complete", - workspace, cname)); - } catch (ApiException e) { - if (e.getCode() == 404 && e.getErrorModel().getType() == ErrorModel.TypeEnum.NOTFOUND) { - LOGGER.info(String.format("Collection %s.%s does not exist", workspace, cname)); - return true; - } - - throw e; - } - return false; - } - - // Assumes the collection exists - public static void waitUntilDocCount(ApiClient client, String sql, int desiredCount) { - pollingConfig(sql) - .until(() -> queryMatchesCount(client, sql, desiredCount)); - } - - private static boolean queryMatchesCount(ApiClient client, String sql, int desiredCount) throws Exception { - LOGGER.info(String.format("Running query %s", sql)); - final QueryRequestSql qrs = new QueryRequestSql(); - qrs.setQuery(sql); - - final QueryRequest qr = new QueryRequest(); - qr.setSql(qrs); - - final QueryResponse response = new QueriesApi(client).query(qr); - final int resultCount = response.getResults().size(); - - if (resultCount == desiredCount) { - LOGGER.info(String.format("Desired result count %s found", desiredCount)); - return true; - } else { - LOGGER.info( - String.format( - "Waiting for desired result count %s, current is %s", desiredCount, resultCount)); - return false; - } - } - - private static boolean doesCollectionExist(ApiClient client, String workspace, String cname) throws Exception { - final ListCollectionsResponse collectionsResponse = new CollectionsApi(client).workspace(workspace); - return collectionsResponse - .getData() - .stream() - .anyMatch(coll -> coll.getName().equals(cname)); - } - - public static void clearCollectionIfCollectionExists(ApiClient client, String workspace, String cname) { - Exceptions.toRuntime(() -> { - - if (!doesCollectionExist(client, workspace, cname)) { - return; - } - - final QueryRequest qr = new QueryRequest().sql(new QueryRequestSql().query(String.format("SELECT _id from %s.%s", workspace, cname))); - try { - final QueryResponse resp = new QueriesApi(client).query(qr); - final List ids = - resp.getResults().stream().map(f -> (LinkedTreeMap) f).map(f -> (String) f.get("_id")).collect(Collectors.toList()); - final DeleteDocumentsRequest ddr = new DeleteDocumentsRequest(); - for (String id : ids) { - ddr.addDataItem(new DeleteDocumentsRequestData().id(id)); - } - LOGGER.info("Deleting documents from " + cname); - new DocumentsApi(client).delete(workspace, cname, ddr); - } catch (Exception e) { - LOGGER.error("Error while trying to clear a collection ", e); - } - - pollingConfig(workspace, cname) - .until(() -> isCollectionEmpty(client, workspace, cname)); - - }); - } - - private static boolean isCollectionEmpty(ApiClient client, String workspace, String cname) { - return Exceptions.toRuntime(() -> { - final String elementCount = String.format("SELECT count(*) as numel from %s.%s", workspace, cname); - - final QueryRequest qr = new QueryRequest().sql(new QueryRequestSql().query(elementCount)); - final QueryResponse resp = new QueriesApi(client).query(qr); - Optional count = - resp.getResults().stream().map(f -> (LinkedTreeMap) f).map(f -> f.get("numel")).map(f -> (Number) f).findFirst(); - return count.filter(number -> number.intValue() == 0).isPresent(); - - }); - - } - - private static Duration jitter(String... args) { - final Hasher hsh = Hashing.murmur3_32().newHasher(); - for (String s : args) { - hsh.putString(s, Charset.defaultCharset()); - } - - return new Duration(Math.abs(hsh.hash().asInt()) % DEFAULT_POLL_INTERVAL.getValueInMS(), TimeUnit.MILLISECONDS); - - } - - private static ConditionFactory pollingConfig(final String... args) { - return Awaitility.await() - .timeout(DEFAULT_TIMEOUT) - .pollDelay(jitter(args)) - .pollInterval(DEFAULT_POLL_INTERVAL); - } - -} diff --git a/airbyte-integrations/connectors/destination-rockset/src/main/java/io/airbyte/integrations/destination/rockset/RocksetWriteApiConsumer.java b/airbyte-integrations/connectors/destination-rockset/src/main/java/io/airbyte/integrations/destination/rockset/RocksetWriteApiConsumer.java deleted file mode 100644 index 1dd6687c0ea7..000000000000 --- a/airbyte-integrations/connectors/destination-rockset/src/main/java/io/airbyte/integrations/destination/rockset/RocksetWriteApiConsumer.java +++ /dev/null @@ -1,194 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.integrations.destination.rockset; - -import static io.airbyte.integrations.destination.rockset.RocksetUtils.API_KEY_ID; -import static io.airbyte.integrations.destination.rockset.RocksetUtils.API_SERVER_ID; -import static io.airbyte.integrations.destination.rockset.RocksetUtils.ROCKSET_WORKSPACE_ID; - -import com.fasterxml.jackson.core.type.TypeReference; -import com.fasterxml.jackson.databind.JsonNode; -import com.fasterxml.jackson.databind.ObjectMapper; -import com.rockset.client.ApiClient; -import com.rockset.client.api.DocumentsApi; -import com.rockset.client.model.AddDocumentsRequest; -import com.rockset.client.model.AddDocumentsResponse; -import com.rockset.client.model.DocumentStatus; -import io.airbyte.cdk.integrations.base.FailureTrackingAirbyteMessageConsumer; -import io.airbyte.commons.lang.Exceptions; -import io.airbyte.protocol.models.v0.AirbyteMessage; -import io.airbyte.protocol.models.v0.ConfiguredAirbyteCatalog; -import io.airbyte.protocol.models.v0.DestinationSyncMode; -import java.time.Instant; -import java.time.temporal.ChronoUnit; -import java.util.ArrayList; -import java.util.HashMap; -import java.util.List; -import java.util.Map; -import java.util.concurrent.CompletableFuture; -import java.util.concurrent.ExecutorService; -import java.util.concurrent.Executors; -import java.util.concurrent.ScheduledExecutorService; -import java.util.concurrent.TimeUnit; -import java.util.function.Consumer; -import java.util.stream.Collectors; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -public class RocksetWriteApiConsumer extends FailureTrackingAirbyteMessageConsumer { - - private static final Logger LOGGER = LoggerFactory.getLogger(RocksetWriteApiConsumer.class); - private static final ObjectMapper mapper = new ObjectMapper(); - // IO bound tasks, use cached thread pool - private final ExecutorService exec = Executors.newFixedThreadPool(5); - - private final ScheduledExecutorService schedExec = Executors.newSingleThreadScheduledExecutor(); - - private final String apiKey; - private final String apiServer; - private final String workspace; - - private final ConfiguredAirbyteCatalog catalog; - private final Consumer outputRecordCollector; - - // records to be sent per collection - private final Map> records; - private final RocksetSQLNameTransformer nameTransformer = new RocksetSQLNameTransformer(); - private long lastSentDocumentMicroSeconds = 0L; - private ApiClient client; - - public RocksetWriteApiConsumer( - JsonNode config, - ConfiguredAirbyteCatalog catalog, - Consumer outputRecordCollector) { - this.apiKey = config.get(API_KEY_ID).asText(); - this.apiServer = config.get(API_SERVER_ID).asText(); - this.workspace = config.get(ROCKSET_WORKSPACE_ID).asText(); - this.records = new HashMap<>(); - - this.catalog = catalog; - this.outputRecordCollector = outputRecordCollector; - } - - @Override - protected void startTracked() throws Exception { - this.client = RocksetUtils.apiClient(apiKey, apiServer); - LOGGER.info("Creating workspace"); - RocksetUtils.createWorkspaceIfNotExists(client, workspace); - - CompletableFuture[] overwrittenStreams = catalog.getStreams() - .stream() - .filter(s -> s.getDestinationSyncMode() == DestinationSyncMode.OVERWRITE) - .map(s -> s.getStream().getName()) - .map(nameTransformer::convertStreamName) - .map(this::emptyCollection) - .collect(Collectors.toList()) - .toArray(CompletableFuture[]::new); - - CompletableFuture[] appendStreams = catalog.getStreams().stream() - .filter(s -> s.getDestinationSyncMode() == DestinationSyncMode.APPEND) - .map(s -> s.getStream().getName()) - .map(nameTransformer::convertStreamName) - .map(this::createCollectionIntoReadyState) - .collect(Collectors.toList()) - .toArray(CompletableFuture[]::new); - - CompletableFuture initStreams = CompletableFuture.allOf( - CompletableFuture.allOf(overwrittenStreams), - CompletableFuture.allOf(appendStreams)); - - // Creating and readying many collections at once can be slow - initStreams.get(30, TimeUnit.MINUTES); - - // Schedule sending of records at a fixed rate - schedExec.scheduleAtFixedRate(this::sendBatches, 0L, 5L, TimeUnit.SECONDS); - } - - @Override - protected void acceptTracked(AirbyteMessage message) throws Exception { - if (message.getType() == AirbyteMessage.Type.RECORD) { - String cname = nameTransformer.convertStreamName(message.getRecord().getStream()); - - Map obj = mapper.convertValue(message.getRecord().getData(), new TypeReference<>() {}); - long current = ChronoUnit.MICROS.between(Instant.EPOCH, Instant.now()); - - // ensure a monotonic timestamp on records at microsecond precision. - while (current <= lastSentDocumentMicroSeconds) { - current = ChronoUnit.MICROS.between(Instant.EPOCH, Instant.now()); - } - lastSentDocumentMicroSeconds = current; - - // microsecond precision - // See https://rockset.com/docs/special-fields/#the-_event_time-field - obj.put("_event_time", current); - addRequestToBatch(obj, cname); - } else if (message.getType() == AirbyteMessage.Type.STATE) { - this.outputRecordCollector.accept(message); - } - } - - @Override - protected void close(boolean hasFailed) throws Exception { - // Nothing to do - LOGGER.info("Shutting down!"); - LOGGER.info("Sending final batch of records if any remain!"); - sendBatches(); - LOGGER.info("Final batch of records sent!"); - LOGGER.info("Shutting down executors"); - this.schedExec.shutdown(); - exec.shutdown(); - LOGGER.info("Executors shut down"); - } - - private void addRequestToBatch(Object document, String cname) { - synchronized (this.records) { - List collectionRecords = this.records.getOrDefault(cname, new ArrayList<>()); - collectionRecords.add(document); - this.records.put(cname, collectionRecords); - } - } - - private void sendBatches() { - List> requests; - synchronized (this.records) { - requests = this.records.entrySet().stream().filter(e -> e.getValue().size() > 0) - .map((e) -> { - AddDocumentsRequest adr = new AddDocumentsRequest(); - e.getValue().forEach(adr::addDataItem); - return Map.entry(e.getKey(), adr); - } - - ).collect(Collectors.toList()); - this.records.clear(); - } - List responses; - responses = requests.stream().map((e) -> Exceptions.toRuntime(() -> new DocumentsApi(client).add(workspace, e.getKey(), e.getValue()))) - .collect(Collectors.toList()); - - responses - .stream() - .flatMap(d -> d.getData().stream()) - .collect(Collectors.groupingBy(DocumentStatus::getStatus)) - .entrySet() - .stream() - .forEach((e) -> LOGGER.info("{} documents added with a status of {}", e.getValue().size(), e.getKey())); - } - - private CompletableFuture emptyCollection(String cname) { - return CompletableFuture.runAsync(() -> { - RocksetUtils.clearCollectionIfCollectionExists(client, workspace, cname); - RocksetUtils.createCollectionIfNotExists(client, workspace, cname); - RocksetUtils.waitUntilCollectionReady(client, workspace, cname); - }, exec); - } - - private CompletableFuture createCollectionIntoReadyState(String cname) { - return CompletableFuture.runAsync(() -> { - RocksetUtils.createCollectionIfNotExists(client, workspace, cname); - RocksetUtils.waitUntilCollectionReady(client, workspace, cname); - }, exec); - } - -} diff --git a/airbyte-integrations/connectors/destination-rockset/src/main/resources/spec.json b/airbyte-integrations/connectors/destination-rockset/src/main/resources/spec.json deleted file mode 100644 index 1bf5a3af68af..000000000000 --- a/airbyte-integrations/connectors/destination-rockset/src/main/resources/spec.json +++ /dev/null @@ -1,39 +0,0 @@ -{ - "documentationUrl": "https://docs.airbyte.com/integrations/destinations/rockset", - "supportsIncremental": true, - "supported_destination_sync_modes": ["append", "overwrite"], - "connectionSpecification": { - "$schema": "http://json-schema.org/draft-07/schema#", - "title": "Rockset Destination Spec", - "type": "object", - "required": ["api_key", "workspace"], - "additionalProperties": false, - "properties": { - "api_key": { - "title": "Api Key", - "description": "Rockset api key", - "type": "string", - "order": 0, - "airbyte_secret": true - }, - "workspace": { - "title": "Workspace", - "description": "The Rockset workspace in which collections will be created + written to.", - "type": "string", - "examples": ["commons", "my_workspace"], - "default": "commons", - "airbyte_secret": false, - "order": 1 - }, - "api_server": { - "title": "Api Server", - "description": "Rockset api URL", - "type": "string", - "airbyte_secret": false, - "default": "https://api.rs2.usw2.rockset.com", - "pattern": "^https:\\/\\/.*.rockset.com$", - "order": 2 - } - } - } -} diff --git a/airbyte-integrations/connectors/destination-rockset/src/test-integration/java/io/airbyte/integrations/destination/rockset/RocksetDestinationAcceptanceTest.java b/airbyte-integrations/connectors/destination-rockset/src/test-integration/java/io/airbyte/integrations/destination/rockset/RocksetDestinationAcceptanceTest.java deleted file mode 100644 index 81c294eb7213..000000000000 --- a/airbyte-integrations/connectors/destination-rockset/src/test-integration/java/io/airbyte/integrations/destination/rockset/RocksetDestinationAcceptanceTest.java +++ /dev/null @@ -1,196 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.integrations.destination.rockset; - -import com.fasterxml.jackson.databind.JsonNode; -import com.fasterxml.jackson.databind.ObjectMapper; -import com.fasterxml.jackson.databind.node.ObjectNode; -import com.google.common.collect.ImmutableMap; -import com.google.common.collect.Lists; -import com.rockset.client.ApiClient; -import com.rockset.client.api.QueriesApi; -import com.rockset.client.model.QueryRequest; -import com.rockset.client.model.QueryRequestSql; -import com.squareup.okhttp.Response; -import io.airbyte.cdk.integrations.standardtest.destination.DestinationAcceptanceTest; -import io.airbyte.cdk.integrations.standardtest.destination.comparator.AdvancedTestDataComparator; -import io.airbyte.cdk.integrations.standardtest.destination.comparator.TestDataComparator; -import io.airbyte.commons.io.IOs; -import io.airbyte.commons.json.Jsons; -import io.airbyte.commons.lang.Exceptions; -import java.io.IOException; -import java.nio.file.Path; -import java.util.ArrayList; -import java.util.Arrays; -import java.util.HashSet; -import java.util.List; -import java.util.Set; -import java.util.concurrent.CompletableFuture; -import java.util.concurrent.ExecutorService; -import java.util.concurrent.Executors; -import java.util.stream.Collectors; -import org.junit.jupiter.api.AfterAll; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; -import org.testng.collections.Sets; - -public class RocksetDestinationAcceptanceTest extends DestinationAcceptanceTest { - - private static final ObjectMapper mapper = new ObjectMapper(); - private static final Set collectionsToClear = Sets.newHashSet(); - private static final Set collectionsToDelete = Sets.newHashSet(); - private static final ExecutorService tearDownExec = Executors.newCachedThreadPool(); - private static final RocksetSQLNameTransformer nameTransformer = new RocksetSQLNameTransformer(); - - private static final Logger LOGGER = - LoggerFactory.getLogger(RocksetDestinationAcceptanceTest.class); - - @Override - protected String getImageName() { - return "airbyte/destination-rockset:dev"; - } - - @Override - protected JsonNode getConfig() throws IOException { - return Jsons.deserialize(IOs.readFile(Path.of("secrets/config.json"))); - } - - @Override - protected TestDataComparator getTestDataComparator() { - return new AdvancedTestDataComparator(); - } - - @Override - protected boolean supportBasicDataTypeTest() { - return true; - } - - @Override - protected boolean supportArrayDataTypeTest() { - return true; - } - - @Override - protected boolean supportObjectDataTypeTest() { - return true; - } - - @Override - protected JsonNode getFailCheckConfig() throws Exception { - return Jsons.jsonNode( - ImmutableMap.builder() - .put("workspace", "commons") - .put("api_key", "nope nope nope") - .build()); - } - - @Override - protected List retrieveRecords( - TestDestinationEnv testEnv, - String stream, - String namespace, - JsonNode streamSchema) - throws Exception { - - final String ws = getConfig().get("workspace").asText(); - final ApiClient client = RocksetUtils.apiClientFromConfig(getConfig()); - final String streamName = nameTransformer.convertStreamName(stream); - LOGGER.info("Retrieving records for " + streamName); - - RocksetUtils.createWorkspaceIfNotExists(client, ws); - RocksetUtils.createCollectionIfNotExists(client, ws, streamName); - RocksetUtils.waitUntilCollectionReady(client, ws, streamName); - collectionsToClear.add(streamName); - collectionsToDelete.add(streamName); - - // ORDER BY _event_time because the test suite expects to retrieve messages in the order they - // were - // originally written - final String sqlText = String.format("SELECT * FROM %s.%s ORDER BY _event_time;", ws, streamName); - - final QueryRequest query = new QueryRequest().sql(new QueryRequestSql().query(sqlText)); - - final QueriesApi queryClient = new QueriesApi(RocksetUtils.apiClientFromConfig(getConfig())); - - LOGGER.info("About to wait for indexing on " + streamName); - try { - // As Rockset is not a transactional database, we have to wait a few seconds to be extra sure - // that we've given documents enough time to be fully indexed when retrieving records - Thread.sleep(20_000); - } catch (InterruptedException e) { - e.printStackTrace(); - } - List results = new ArrayList<>(); - int previousResultSize; - // By heuristic once the document level stabilizes, the ingestion is probably done - do { - previousResultSize = results.size(); - Thread.sleep(10_000); - final Response response = queryClient.queryCall(query, null, null).execute(); - final JsonNode json = mapper.readTree(response.body().string()); - results = Lists.newArrayList(json.get("results").iterator()); - LOGGER.info("Waiting on stable doc counts, prev= " + previousResultSize + " currrent=" + results.size()); - } while (results.size() != previousResultSize); - - return results.stream() - .peek(RocksetDestinationAcceptanceTest::dropRocksetAddedFields) - .collect(Collectors.toList()); - } - - private static void dropRocksetAddedFields(JsonNode n) { - dropFields(n, "_id", "_event_time"); - } - - private static void dropFields(JsonNode node, String... fields) { - Arrays.stream(fields).forEach(((ObjectNode) node)::remove); - } - - @Override - protected void setup(TestDestinationEnv testEnv, HashSet TEST_SCHEMAS) { - // Nothing to do - } - - @Override - protected void tearDown(TestDestinationEnv testEnv) { - try { - final ApiClient client = RocksetUtils.apiClientFromConfig(getConfig()); - String workspace = getConfig().get("workspace").asText(); - collectionsToClear.stream() - .map( - cn -> CompletableFuture.runAsync(() -> { - RocksetUtils.clearCollectionIfCollectionExists(client, workspace, cn); - }, tearDownExec)) - // collect to avoid laziness of stream - .collect(Collectors.toList()) - .forEach(CompletableFuture::join); - collectionsToClear.clear(); - } catch (IOException e) { - e.printStackTrace(); - } - } - - @AfterAll - public static void exitSuite() throws Exception { - LOGGER.info("Deleting all collections used during testing "); - final JsonNode config = Jsons.deserialize(IOs.readFile(Path.of("secrets/config.json"))); - final ApiClient client = RocksetUtils.apiClientFromConfig(config); - final String workspace = config.get("workspace").asText(); - collectionsToDelete.stream().map(cn -> deleteCollection(client, workspace, cn)).collect(Collectors.toList()).forEach(CompletableFuture::join); - tearDownExec.shutdown(); - - } - - private static CompletableFuture deleteCollection(ApiClient client, String workspace, String cn) { - return CompletableFuture.runAsync( - () -> Exceptions.toRuntime( - () -> { - RocksetUtils.deleteCollectionIfExists(client, workspace, cn); - RocksetUtils.waitUntilCollectionDeleted(client, workspace, cn); - Thread.sleep(2500); // Let services pick up deletion in case of re-creation - }), - tearDownExec); - } - -} diff --git a/airbyte-integrations/connectors/destination-rockset/src/test/java/io/airbyte/integrations/destination/rockset/RocksetWriteApiConsumerTest.java b/airbyte-integrations/connectors/destination-rockset/src/test/java/io/airbyte/integrations/destination/rockset/RocksetWriteApiConsumerTest.java deleted file mode 100644 index 6146f73fc59a..000000000000 --- a/airbyte-integrations/connectors/destination-rockset/src/test/java/io/airbyte/integrations/destination/rockset/RocksetWriteApiConsumerTest.java +++ /dev/null @@ -1,59 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.integrations.destination.rockset; - -import static io.airbyte.integrations.destination.rockset.RocksetUtils.API_KEY_ID; -import static io.airbyte.integrations.destination.rockset.RocksetUtils.API_SERVER_ID; -import static io.airbyte.integrations.destination.rockset.RocksetUtils.ROCKSET_WORKSPACE_ID; - -import com.fasterxml.jackson.databind.JsonNode; -import com.google.common.collect.ImmutableMap; -import io.airbyte.cdk.integrations.base.FailureTrackingAirbyteMessageConsumer; -import io.airbyte.cdk.integrations.standardtest.destination.PerStreamStateMessageTest; -import io.airbyte.commons.json.Jsons; -import io.airbyte.protocol.models.v0.AirbyteMessage; -import io.airbyte.protocol.models.v0.ConfiguredAirbyteCatalog; -import java.util.function.Consumer; -import org.junit.jupiter.api.BeforeEach; -import org.junit.jupiter.api.extension.ExtendWith; -import org.mockito.Mock; -import org.mockito.junit.jupiter.MockitoExtension; - -@ExtendWith(MockitoExtension.class) -public class RocksetWriteApiConsumerTest extends PerStreamStateMessageTest { - - @Mock - private Consumer outputRecordCollector; - - @Mock - private ConfiguredAirbyteCatalog catalog; - - private RocksetWriteApiConsumer consumer; - - @BeforeEach - public void init() { - consumer = new RocksetWriteApiConsumer(getTestConfig(), catalog, outputRecordCollector); - } - - @Override - protected Consumer getMockedConsumer() { - return outputRecordCollector; - } - - @Override - protected FailureTrackingAirbyteMessageConsumer getMessageConsumer() { - return consumer; - } - - private JsonNode getTestConfig() { - return Jsons.jsonNode( - ImmutableMap.builder() - .put(API_KEY_ID, "testApiKey") - .put(API_SERVER_ID, "testApiServerId") - .put(ROCKSET_WORKSPACE_ID, "testRocksetWorkspaceId") - .build()); - } - -} diff --git a/airbyte-integrations/connectors/destination-scaffold-destination-python/README.md b/airbyte-integrations/connectors/destination-scaffold-destination-python/README.md deleted file mode 100644 index f22d294346dd..000000000000 --- a/airbyte-integrations/connectors/destination-scaffold-destination-python/README.md +++ /dev/null @@ -1,159 +0,0 @@ -# Scaffold Destination Python Destination - -This is the repository for the Scaffold Destination Python destination connector, written in Python. -For information about how to use this connector within Airbyte, see [the documentation](https://docs.airbyte.com/integrations/destinations/scaffold-destination-python). - -## Local development - -### Prerequisites -**To iterate on this connector, make sure to complete this prerequisites section.** - -#### Minimum Python version required `= 3.9.0` - -#### Activate Virtual Environment and install dependencies -From this connector directory, create a virtual environment: -``` -python -m venv .venv -``` - -This will generate a virtualenv for this module in `.venv/`. Make sure this venv is active in your -development environment of choice. To activate it from the terminal, run: -``` -source .venv/bin/activate -pip install -r requirements.txt -``` -If you are in an IDE, follow your IDE's instructions to activate the virtualenv. - -Note that while we are installing dependencies from `requirements.txt`, you should only edit `setup.py` for your dependencies. `requirements.txt` is -used for editable installs (`pip install -e`) to pull in Python dependencies from the monorepo and will call `setup.py`. -If this is mumbo jumbo to you, don't worry about it, just put your deps in `setup.py` but install using `pip install -r requirements.txt` and everything -should work as you expect. - - -#### Create credentials -**If you are a community contributor**, follow the instructions in the [documentation](https://docs.airbyte.com/integrations/destinations/scaffold-destination-python) -to generate the necessary credentials. Then create a file `secrets/config.json` conforming to the `destination_scaffold_destination_python/spec.json` file. -Note that the `secrets` directory is gitignored by default, so there is no danger of accidentally checking in sensitive information. -See `integration_tests/sample_config.json` for a sample config file. - -**If you are an Airbyte core member**, copy the credentials in Lastpass under the secret name `destination scaffold-destination-python test creds` -and place them into `secrets/config.json`. - -### Locally running the connector -``` -python main.py spec -python main.py check --config secrets/config.json -python main.py discover --config secrets/config.json -python main.py read --config secrets/config.json --catalog integration_tests/configured_catalog.json -``` - -### Locally running the connector docker image - -#### Use `airbyte-ci` to build your connector -The Airbyte way of building this connector is to use our `airbyte-ci` tool. -You can follow install instructions [here](https://github.com/airbytehq/airbyte/blob/master/airbyte-ci/connectors/pipelines/README.md#L1). -Then running the following command will build your connector: - -```bash -airbyte-ci connectors --name destination-scaffold-destination-python build -``` -Once the command is done, you will find your connector image in your local docker registry: `airbyte/destination-scaffold-destination-python:dev`. - -##### Customizing our build process -When contributing on our connector you might need to customize the build process to add a system dependency or set an env var. -You can customize our build process by adding a `build_customization.py` module to your connector. -This module should contain a `pre_connector_install` and `post_connector_install` async function that will mutate the base image and the connector container respectively. -It will be imported at runtime by our build process and the functions will be called if they exist. - -Here is an example of a `build_customization.py` module: -```python -from __future__ import annotations - -from typing import TYPE_CHECKING - -if TYPE_CHECKING: - # Feel free to check the dagger documentation for more information on the Container object and its methods. - # https://dagger-io.readthedocs.io/en/sdk-python-v0.6.4/ - from dagger import Container - - -async def pre_connector_install(base_image_container: Container) -> Container: - return await base_image_container.with_env_variable("MY_PRE_BUILD_ENV_VAR", "my_pre_build_env_var_value") - -async def post_connector_install(connector_container: Container) -> Container: - return await connector_container.with_env_variable("MY_POST_BUILD_ENV_VAR", "my_post_build_env_var_value") -``` - -#### Build your own connector image -This connector is built using our dynamic built process in `airbyte-ci`. -The base image used to build it is defined within the metadata.yaml file under the `connectorBuildOptions`. -The build logic is defined using [Dagger](https://dagger.io/) [here](https://github.com/airbytehq/airbyte/blob/master/airbyte-ci/connectors/pipelines/pipelines/builds/python_connectors.py). -It does not rely on a Dockerfile. - -If you would like to patch our connector and build your own a simple approach would be to: - -1. Create your own Dockerfile based on the latest version of the connector image. -```Dockerfile -FROM airbyte/destination-scaffold-destination-python:latest - -COPY . ./airbyte/integration_code -RUN pip install ./airbyte/integration_code - -# The entrypoint and default env vars are already set in the base image -# ENV AIRBYTE_ENTRYPOINT "python /airbyte/integration_code/main.py" -# ENTRYPOINT ["python", "/airbyte/integration_code/main.py"] -``` -Please use this as an example. This is not optimized. - -2. Build your image: -```bash -docker build -t airbyte/destination-scaffold-destination-python:dev . -# Running the spec command against your patched connector -docker run airbyte/destination-scaffold-destination-python:dev spec -```` -#### Run -Then run any of the connector commands as follows: -``` -docker run --rm airbyte/destination-scaffold-destination-python:dev spec -docker run --rm -v $(pwd)/secrets:/secrets airbyte/destination-scaffold-destination-python:dev check --config /secrets/config.json -# messages.jsonl is a file containing line-separated JSON representing AirbyteMessages -cat messages.jsonl | docker run --rm -v $(pwd)/secrets:/secrets -v $(pwd)/integration_tests:/integration_tests airbyte/destination-scaffold-destination-python:dev write --config /secrets/config.json --catalog /integration_tests/configured_catalog.json -``` -## Testing - Make sure to familiarize yourself with [pytest test discovery](https://docs.pytest.org/en/latest/goodpractices.html#test-discovery) to know how your test files and methods should be named. -First install test dependencies into your virtual environment: -``` -pip install .[tests] -``` -### Unit Tests -To run unit tests locally, from the connector directory run: -``` -python -m pytest unit_tests -``` - -### Integration Tests -There are two types of integration tests: Acceptance Tests (Airbyte's test suite for all destination connectors) and custom integration tests (which are specific to this connector). -#### Custom Integration tests -Place custom tests inside `integration_tests/` folder, then, from the connector root, run -``` -python -m pytest integration_tests -``` -#### Acceptance Tests -Coming soon: - -### Using `airbyte-ci` to run tests -See [airbyte-ci documentation](https://github.com/airbytehq/airbyte/blob/master/airbyte-ci/connectors/pipelines/README.md#connectors-test-command) - -## Dependency Management -All of your dependencies should go in `setup.py`, NOT `requirements.txt`. The requirements file is only used to connect internal Airbyte dependencies in the monorepo for local development. -We split dependencies between two groups, dependencies that are: -* required for your connector to work need to go to `MAIN_REQUIREMENTS` list. -* required for the testing need to go to `TEST_REQUIREMENTS` list - -### Publishing a new version of the connector -You've checked out the repo, implemented a million dollar feature, and you're ready to share your changes with the world. Now what? -1. Make sure your changes are passing unit and integration tests. -1. Bump the connector version in `Dockerfile` -- just increment the value of the `LABEL io.airbyte.version` appropriately (we use [SemVer](https://semver.org/)). -1. Create a Pull Request. -1. Pat yourself on the back for being an awesome contributor. -1. Someone from Airbyte will take a look at your PR and iterate with you to merge it into master. diff --git a/airbyte-integrations/connectors/destination-scaffold-destination-python/destination_scaffold_destination_python/__init__.py b/airbyte-integrations/connectors/destination-scaffold-destination-python/destination_scaffold_destination_python/__init__.py deleted file mode 100644 index c1075c912546..000000000000 --- a/airbyte-integrations/connectors/destination-scaffold-destination-python/destination_scaffold_destination_python/__init__.py +++ /dev/null @@ -1,8 +0,0 @@ -# -# Copyright (c) 2023 Airbyte, Inc., all rights reserved. -# - - -from .destination import DestinationScaffoldDestinationPython - -__all__ = ["DestinationScaffoldDestinationPython"] diff --git a/airbyte-integrations/connectors/destination-scaffold-destination-python/destination_scaffold_destination_python/destination.py b/airbyte-integrations/connectors/destination-scaffold-destination-python/destination_scaffold_destination_python/destination.py deleted file mode 100644 index 55575d629cd3..000000000000 --- a/airbyte-integrations/connectors/destination-scaffold-destination-python/destination_scaffold_destination_python/destination.py +++ /dev/null @@ -1,53 +0,0 @@ -# -# Copyright (c) 2023 Airbyte, Inc., all rights reserved. -# - - -from typing import Any, Iterable, Mapping - -from airbyte_cdk import AirbyteLogger -from airbyte_cdk.destinations import Destination -from airbyte_cdk.models import AirbyteConnectionStatus, AirbyteMessage, ConfiguredAirbyteCatalog, Status - - -class DestinationScaffoldDestinationPython(Destination): - def write( - self, config: Mapping[str, Any], configured_catalog: ConfiguredAirbyteCatalog, input_messages: Iterable[AirbyteMessage] - ) -> Iterable[AirbyteMessage]: - - """ - TODO - Reads the input stream of messages, config, and catalog to write data to the destination. - - This method returns an iterable (typically a generator of AirbyteMessages via yield) containing state messages received - in the input message stream. Outputting a state message means that every AirbyteRecordMessage which came before it has been - successfully persisted to the destination. This is used to ensure fault tolerance in the case that a sync fails before fully completing, - then the source is given the last state message output from this method as the starting point of the next sync. - - :param config: dict of JSON configuration matching the configuration declared in spec.json - :param configured_catalog: The Configured Catalog describing the schema of the data being received and how it should be persisted in the - destination - :param input_messages: The stream of input messages received from the source - :return: Iterable of AirbyteStateMessages wrapped in AirbyteMessage structs - """ - - pass - - def check(self, logger: AirbyteLogger, config: Mapping[str, Any]) -> AirbyteConnectionStatus: - """ - Tests if the input configuration can be used to successfully connect to the destination with the needed permissions - e.g: if a provided API token or password can be used to connect and write to the destination. - - :param logger: Logging object to display debug/info/error to the logs - (logs will not be accessible via airbyte UI if they are not passed to this logger) - :param config: Json object containing the configuration of this destination, content of this json is as specified in - the properties of the spec.json file - - :return: AirbyteConnectionStatus indicating a Success or Failure - """ - try: - # TODO - - return AirbyteConnectionStatus(status=Status.SUCCEEDED) - except Exception as e: - return AirbyteConnectionStatus(status=Status.FAILED, message=f"An exception occurred: {repr(e)}") diff --git a/airbyte-integrations/connectors/destination-scaffold-destination-python/destination_scaffold_destination_python/spec.json b/airbyte-integrations/connectors/destination-scaffold-destination-python/destination_scaffold_destination_python/spec.json deleted file mode 100644 index 7b75cfca6de7..000000000000 --- a/airbyte-integrations/connectors/destination-scaffold-destination-python/destination_scaffold_destination_python/spec.json +++ /dev/null @@ -1,20 +0,0 @@ -{ - "documentationUrl": "https://docs.airbyte.com/integrations/destinations/scaffold-destination-python", - "supported_destination_sync_modes": [ - "TODO, available options are: 'overwrite', 'append', and 'append_dedup'" - ], - "supportsIncremental": true, - "connectionSpecification": { - "$schema": "http://json-schema.org/draft-07/schema#", - "title": "Destination Scaffold Destination Python", - "type": "object", - "required": ["TODO -- fix me!"], - "additionalProperties": false, - "properties": { - "TODO": { - "type": "string", - "description": "FIX ME" - } - } - } -} diff --git a/airbyte-integrations/connectors/destination-scaffold-destination-python/integration_tests/integration_test.py b/airbyte-integrations/connectors/destination-scaffold-destination-python/integration_tests/integration_test.py deleted file mode 100644 index d945ab6b09af..000000000000 --- a/airbyte-integrations/connectors/destination-scaffold-destination-python/integration_tests/integration_test.py +++ /dev/null @@ -1,8 +0,0 @@ -# -# Copyright (c) 2023 Airbyte, Inc., all rights reserved. -# - - -def integration_test(): - # TODO write integration tests - pass diff --git a/airbyte-integrations/connectors/destination-scaffold-destination-python/main.py b/airbyte-integrations/connectors/destination-scaffold-destination-python/main.py deleted file mode 100644 index facd8fdc8309..000000000000 --- a/airbyte-integrations/connectors/destination-scaffold-destination-python/main.py +++ /dev/null @@ -1,11 +0,0 @@ -# -# Copyright (c) 2023 Airbyte, Inc., all rights reserved. -# - - -import sys - -from destination_scaffold_destination_python import DestinationScaffoldDestinationPython - -if __name__ == "__main__": - DestinationScaffoldDestinationPython().run(sys.argv[1:]) diff --git a/airbyte-integrations/connectors/destination-scaffold-destination-python/requirements.txt b/airbyte-integrations/connectors/destination-scaffold-destination-python/requirements.txt deleted file mode 100644 index d6e1198b1ab1..000000000000 --- a/airbyte-integrations/connectors/destination-scaffold-destination-python/requirements.txt +++ /dev/null @@ -1 +0,0 @@ --e . diff --git a/airbyte-integrations/connectors/destination-scaffold-destination-python/setup.py b/airbyte-integrations/connectors/destination-scaffold-destination-python/setup.py deleted file mode 100644 index 18cf1034cf6f..000000000000 --- a/airbyte-integrations/connectors/destination-scaffold-destination-python/setup.py +++ /dev/null @@ -1,25 +0,0 @@ -# -# Copyright (c) 2023 Airbyte, Inc., all rights reserved. -# - - -from setuptools import find_packages, setup - -MAIN_REQUIREMENTS = [ - "airbyte-cdk", -] - -TEST_REQUIREMENTS = ["pytest~=6.2"] - -setup( - name="destination_scaffold_destination_python", - description="Destination implementation for Scaffold Destination Python.", - author="Airbyte", - author_email="contact@airbyte.io", - packages=find_packages(), - install_requires=MAIN_REQUIREMENTS, - package_data={"": ["*.json"]}, - extras_require={ - "tests": TEST_REQUIREMENTS, - }, -) diff --git a/airbyte-integrations/connectors/destination-scaffold-destination-python/unit_tests/unit_test.py b/airbyte-integrations/connectors/destination-scaffold-destination-python/unit_tests/unit_test.py deleted file mode 100644 index 219ae0142c72..000000000000 --- a/airbyte-integrations/connectors/destination-scaffold-destination-python/unit_tests/unit_test.py +++ /dev/null @@ -1,7 +0,0 @@ -# -# Copyright (c) 2023 Airbyte, Inc., all rights reserved. -# - - -def test_example_method(): - assert True diff --git a/airbyte-integrations/connectors/destination-scylla/README.md b/airbyte-integrations/connectors/destination-scylla/README.md deleted file mode 100644 index 6fc6d930e4c3..000000000000 --- a/airbyte-integrations/connectors/destination-scylla/README.md +++ /dev/null @@ -1,72 +0,0 @@ -# Destination Scylla - -This is the repository for the Scylla destination connector in Java. -For information about how to use this connector within Airbyte, see [the User Documentation](https://docs.airbyte.io/integrations/destinations/scylla). - -## Local development - -#### Building via Gradle -From the Airbyte repository root, run: -``` -./gradlew :airbyte-integrations:connectors:destination-scylla:build -``` - -#### Create credentials -**If you are a community contributor**, generate the necessary credentials and place them in `secrets/config.json` conforming to the spec file in `src/main/resources/spec.json`. -Note that the `secrets` directory is git-ignored by default, so there is no danger of accidentally checking in sensitive information. - -**If you are an Airbyte core member**, follow the [instructions](https://docs.airbyte.io/connector-development#using-credentials-in-ci) to set up the credentials. - -### Locally running the connector docker image - -#### Build -Build the connector image via Gradle: - -``` -./gradlew :airbyte-integrations:connectors:destination-scylla:buildConnectorImage -``` -Once built, the docker image name and tag on your host will be `airbyte/destination-scylla:dev`. -the Dockerfile. - -#### Run -Then run any of the connector commands as follows: -``` -docker run --rm airbyte/destination-scylla:dev spec -docker run --rm -v $(pwd)/secrets:/secrets airbyte/destination-scylla:dev check --config /secrets/config.json -docker run --rm -v $(pwd)/secrets:/secrets airbyte/destination-scylla:dev discover --config /secrets/config.json -docker run --rm -v $(pwd)/secrets:/secrets -v $(pwd)/integration_tests:/integration_tests airbyte/destination-scylla:dev read --config /secrets/config.json --catalog /integration_tests/configured_catalog.json -``` - -## Testing -We use `JUnit` for Java tests. - -### Unit and Integration Tests -Place unit tests under `src/test/io/airbyte/integrations/destinations/scylla`. - -#### Acceptance Tests -Airbyte has a standard test suite that all destination connectors must pass. Implement the `TODO`s in -`src/test-integration/java/io/airbyte/integrations/destinations/scyllaDestinationAcceptanceTest.java`. - -### Using gradle to run tests -All commands should be run from airbyte project root. -To run unit tests: -``` -./gradlew :airbyte-integrations:connectors:destination-scylla:unitTest -``` -To run acceptance and custom integration tests: -``` -./gradlew :airbyte-integrations:connectors:destination-scylla:integrationTest -``` - -## Dependency Management - -### Publishing a new version of the connector -You've checked out the repo, implemented a million dollar feature, and you're ready to share your changes with the world. Now what? -1. Make sure your changes are passing our test suite: `airbyte-ci connectors --name=destination-scylla test` -2. Bump the connector version in `metadata.yaml`: increment the `dockerImageTag` value. Please follow [semantic versioning for connectors](https://docs.airbyte.com/contributing-to-airbyte/resources/pull-requests-handbook/#semantic-versioning-for-connectors). -3. Make sure the `metadata.yaml` content is up to date. -4. Make the connector documentation and its changelog is up to date (`docs/integrations/destinations/scylla.md`). -5. Create a Pull Request: use [our PR naming conventions](https://docs.airbyte.com/contributing-to-airbyte/resources/pull-requests-handbook/#pull-request-title-convention). -6. Pat yourself on the back for being an awesome contributor. -7. Someone from Airbyte will take a look at your PR and iterate with you to merge it into master. - diff --git a/airbyte-integrations/connectors/destination-scylla/bootstrap.md b/airbyte-integrations/connectors/destination-scylla/bootstrap.md deleted file mode 100644 index 3a2e33fc5521..000000000000 --- a/airbyte-integrations/connectors/destination-scylla/bootstrap.md +++ /dev/null @@ -1,32 +0,0 @@ -# Scylla Destination - -Scylla is an open-source distributed NoSQL wide-column data store designed to handle large amounts of data across many -commodity servers, providing high availability with no single point of failure. It is designed to be compatible with -Apache Cassandra while achieving significantly higher throughputs and lower latencies. It supports the same protocols as -Cassandra (CQL and Thrift) and the same file formats (SSTable) - -The data is structured in keyspaces and tables and is partitioned and replicated across different nodes in the -cluster. -[Read more about Scylla](https://www.scylladb.com/) - -This connector maps an incoming `stream` to a Scylla `table` and a `namespace` to a Scylla`keyspace`. -When using destination sync mode `append` and `append_dedup`, an `insert` operation is performed against an existing -Scylla table. -When using `overwrite`, the records are first placed in a temp table. When all the messages have been received the data -is copied to the final table which is first truncated and the temp table is deleted. - -The Implementation uses the [Scylla](https://github.com/scylladb/java-driver/) driver in order to access -Scylla. [ScyllaCqlProvider](./src/main/java/io/airbyte/integrations/destination/scylla/ScyllaCqlProvider.java) -handles the communication with the Scylla cluster and internally it uses -the [ScyllaSessionPool](./src/main/java/io/airbyte/integrations/destination/scylla/ScyllaSessionPool.java) to retrieve a -session to the cluster. - -The [ScyllaMessageConsumer](./src/main/java/io/airbyte/integrations/destination/scylla/ScyllaMessageConsumer.java) -class contains the logic for handling airbyte messages, events and copying data between tables. - -## Development - -See the [ScyllaCqlProvider](./src/main/java/io/airbyte/integrations/destination/scylla/ScyllaCqlProvider.java) -class on how to use the Scylla driver. - -[Scylla driver docs.](https://docs.scylladb.com/using-scylla/drivers/cql-drivers/scylla-java-driver/) \ No newline at end of file diff --git a/airbyte-integrations/connectors/destination-scylla/build.gradle b/airbyte-integrations/connectors/destination-scylla/build.gradle deleted file mode 100644 index 512279a1345e..000000000000 --- a/airbyte-integrations/connectors/destination-scylla/build.gradle +++ /dev/null @@ -1,37 +0,0 @@ -plugins { - id 'application' - id 'airbyte-java-connector' -} - -airbyteJavaConnector { - cdkVersionRequired = '0.2.0' - features = ['db-destinations'] - useLocalCdk = false -} - -//remove once upgrading the CDK version to 0.4.x or later -java { - compileJava { - options.compilerArgs.remove("-Werror") - } -} - -airbyteJavaConnector.addCdkDependencies() - -application { - mainClass = 'io.airbyte.integrations.destination.scylla.ScyllaDestination' - applicationDefaultJvmArgs = ['-XX:+ExitOnOutOfMemoryError', '-XX:MaxRAMPercentage=75.0'] -} - -def scyllaDriver = '3.10.2-scylla-1' -def assertVersion = '3.21.0' - -dependencies { - - implementation "com.scylladb:scylla-driver-core:${scyllaDriver}" - - // https://mvnrepository.com/artifact/org.assertj/assertj-core - testImplementation "org.assertj:assertj-core:${assertVersion}" - // https://mvnrepository.com/artifact/org.testcontainers/testcontainers - testImplementation libs.testcontainers.scylla -} diff --git a/airbyte-integrations/connectors/destination-scylla/docker-compose.yml b/airbyte-integrations/connectors/destination-scylla/docker-compose.yml deleted file mode 100644 index af0ecac58303..000000000000 --- a/airbyte-integrations/connectors/destination-scylla/docker-compose.yml +++ /dev/null @@ -1,19 +0,0 @@ -version: "3" - -services: - scylla1: - image: scylladb/scylla - ports: - - "9042:9042" - container_name: scylla1 - command: --smp 1 -# uncomment if you want to run a cluster of scylladb nodes -# scylla2: -# image: scylladb/scylla -# container_name: scylla2 -# command: --seeds=scylla1 -# -# scylla3: -# image: scylladb/scylla -# container_name: scylla3 -# command: --seeds=scylla1 diff --git a/airbyte-integrations/connectors/destination-scylla/src/main/java/io/airbyte/integrations/destination/scylla/ScyllaConfig.java b/airbyte-integrations/connectors/destination-scylla/src/main/java/io/airbyte/integrations/destination/scylla/ScyllaConfig.java deleted file mode 100644 index 8bc995f4083e..000000000000 --- a/airbyte-integrations/connectors/destination-scylla/src/main/java/io/airbyte/integrations/destination/scylla/ScyllaConfig.java +++ /dev/null @@ -1,96 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.integrations.destination.scylla; - -import com.fasterxml.jackson.databind.JsonNode; -import java.util.Objects; - -public class ScyllaConfig { - - private final String keyspace; - - private final String username; - - private final String password; - - private final String address; - - private final int port; - - private final int replication; - - public ScyllaConfig(String keyspace, String username, String password, String address, int port, int replication) { - this.keyspace = keyspace; - this.username = username; - this.password = password; - this.address = address; - this.port = port; - this.replication = replication; - } - - public ScyllaConfig(JsonNode jsonNode) { - this.keyspace = jsonNode.get("keyspace").asText(); - this.username = jsonNode.get("username").asText(); - this.password = jsonNode.get("password").asText(); - this.address = jsonNode.get("address").asText(); - this.port = jsonNode.get("port").asInt(); - this.replication = jsonNode.get("replication").asInt(1); - } - - public String getKeyspace() { - return keyspace; - } - - public String getUsername() { - return username; - } - - public String getPassword() { - return password; - } - - public String getAddress() { - return address; - } - - public int getPort() { - return port; - } - - public int getReplication() { - return replication; - } - - @Override - public String toString() { - return "ScyllaConfig{" + - "keyspace='" + keyspace + '\'' + - ", username='" + username + '\'' + - ", password='" + password + '\'' + - ", address='" + address + '\'' + - ", port=" + port + - ", replication=" + replication + - '}'; - } - - @Override - public boolean equals(Object o) { - if (this == o) { - return true; - } - if (o == null || getClass() != o.getClass()) { - return false; - } - ScyllaConfig that = (ScyllaConfig) o; - return port == that.port && username.equals(that.username) && password.equals(that.password) && - address.equals(that.address); - } - - @Override - public int hashCode() { - return Objects.hash(username, password, address, port); - } - -} diff --git a/airbyte-integrations/connectors/destination-scylla/src/main/java/io/airbyte/integrations/destination/scylla/ScyllaCqlProvider.java b/airbyte-integrations/connectors/destination-scylla/src/main/java/io/airbyte/integrations/destination/scylla/ScyllaCqlProvider.java deleted file mode 100644 index d296bcc5faee..000000000000 --- a/airbyte-integrations/connectors/destination-scylla/src/main/java/io/airbyte/integrations/destination/scylla/ScyllaCqlProvider.java +++ /dev/null @@ -1,177 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.integrations.destination.scylla; - -import com.datastax.driver.core.AbstractTableMetadata; -import com.datastax.driver.core.BatchStatement; -import com.datastax.driver.core.BoundStatement; -import com.datastax.driver.core.Cluster; -import com.datastax.driver.core.DataType; -import com.datastax.driver.core.PreparedStatement; -import com.datastax.driver.core.Session; -import com.datastax.driver.core.querybuilder.QueryBuilder; -import com.datastax.driver.core.schemabuilder.SchemaBuilder; -import com.datastax.driver.core.utils.UUIDs; -import io.airbyte.cdk.integrations.base.JavaBaseConstants; -import java.io.Closeable; -import java.time.Instant; -import java.util.Date; -import java.util.List; -import java.util.Map; -import java.util.UUID; -import java.util.concurrent.ExecutionException; -import java.util.concurrent.ExecutorService; -import java.util.concurrent.Executors; -import java.util.concurrent.Future; -import java.util.stream.Collectors; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -public class ScyllaCqlProvider implements Closeable { - - private static final Logger LOGGER = LoggerFactory.getLogger(ScyllaCqlProvider.class); - - private static final int N_THREADS = Runtime.getRuntime().availableProcessors(); - - private final ScyllaConfig scyllaConfig; - - private final Cluster cluster; - - private final Session session; - - private final ExecutorService executorService; - - private final String columnId; - - private final String columnData; - - private final String columnTimestamp; - - public ScyllaCqlProvider(ScyllaConfig scyllaConfig) { - this.scyllaConfig = scyllaConfig; - var sessionTuple = ScyllaSessionPool.initSession(scyllaConfig); - this.cluster = sessionTuple.value1(); - this.session = sessionTuple.value2(); - this.executorService = Executors.newFixedThreadPool(N_THREADS); - var nameTransformer = new ScyllaNameTransformer(scyllaConfig); - this.columnId = nameTransformer.outputColumn(JavaBaseConstants.COLUMN_NAME_AB_ID); - this.columnData = nameTransformer.outputColumn(JavaBaseConstants.COLUMN_NAME_DATA); - this.columnTimestamp = nameTransformer.outputColumn(JavaBaseConstants.COLUMN_NAME_EMITTED_AT); - } - - public void createKeyspaceIfNotExists(String keyspace) { - var createKeyspace = SchemaBuilder.createKeyspace(keyspace) - .ifNotExists() - .with() - .replication(Map.of( - "class", "SimpleStrategy", - "replication_factor", scyllaConfig.getReplication())) - .durableWrites(true); - session.execute(createKeyspace); - } - - public void createTableIfNotExists(String keyspace, String table) { - var createTable = SchemaBuilder.createTable(keyspace, table) - .ifNotExists() - .addPartitionKey(columnId, DataType.uuid()) - .addColumn(columnData, DataType.text()) - .addColumn(columnTimestamp, DataType.timestamp()); - session.execute(createTable); - } - - public void dropTableIfExists(String keyspace, String table) { - var drop = SchemaBuilder.dropTable(keyspace, table).ifExists(); - session.execute(drop); - } - - public void truncate(String keyspace, String table) { - var truncate = QueryBuilder.truncate(keyspace, table); - session.execute(truncate); - } - - public void insert(String keyspace, String table, String data) { - var insert = QueryBuilder.insertInto(keyspace, table) - .value(columnId, UUIDs.random()) - .value(columnData, data) - .value(columnTimestamp, Instant.now().toEpochMilli()); - session.execute(insert); - } - - public List> select(String keyspace, String table) { - var select = QueryBuilder.select().all().from(keyspace, table); - return session.execute(select).all().stream() - .map(r -> Triplet.of( - r.get(columnId, UUID.class), - r.get(columnData, String.class), - r.get(columnTimestamp, Date.class).toInstant())) - .collect(Collectors.toList()); - } - - public List>> metadata() { - return cluster.getMetadata().getKeyspaces().stream() - .map(keyspace -> Tuple.of(keyspace.getName(), keyspace.getTables().stream() - .map(AbstractTableMetadata::getName) - .collect(Collectors.toList()))) - .collect(Collectors.toList()); - } - - public void copy(String keyspace, String sourceTable, String destinationTable) { - - var select = String.format("SELECT * FROM %s.%s WHERE token(%s) > ? AND token(%s) <= ?", - keyspace, sourceTable, columnId, columnId); - - var selectStatement = session.prepare(select); - - var insert = String.format("INSERT INTO %s.%s (%s, %s, %s) VALUES (?, ?, ?)", - keyspace, destinationTable, columnId, columnData, columnTimestamp); - - var insertStatement = session.prepare(insert); - // insertStatement.setConsistencyLevel(ConsistencyLevel.ONE); - - // perform full table scan in parallel using token ranges - // optimal for copying large amounts of data - cluster.getMetadata().getTokenRanges().stream() - .flatMap(range -> range.unwrap().stream()) - .map(range -> selectStatement.bind(range.getStart(), range.getEnd())) - .map(selectBoundStatement -> executorService.submit(() -> batchInsert(selectBoundStatement, insertStatement))) - .forEach(this::awaitThread); - - } - - private void batchInsert(BoundStatement select, PreparedStatement insert) { - // unlogged removes the log record for increased insert speed - var batchStatement = new BatchStatement(BatchStatement.Type.UNLOGGED); - - session.execute(select).all().stream() - .map(r -> Triplet.of( - r.get(columnId, UUID.class), - r.get(columnData, String.class), - r.get(columnTimestamp, Date.class))) - .map(t -> insert.bind(t.value1(), t.value2(), t.value3())) - .forEach(batchStatement::add); - - session.execute(batchStatement); - } - - private void awaitThread(Future future) { - try { - future.get(); - } catch (InterruptedException e) { - Thread.currentThread().interrupt(); - LOGGER.error("Interrupted thread while copying data: ", e); - } catch (ExecutionException e) { - LOGGER.error("Error while copying data: ", e); - } - } - - @Override - public void close() { - // gracefully shutdown executor service - executorService.shutdown(); - // close scylla session - ScyllaSessionPool.closeSession(scyllaConfig); - } - -} diff --git a/airbyte-integrations/connectors/destination-scylla/src/main/java/io/airbyte/integrations/destination/scylla/ScyllaDestination.java b/airbyte-integrations/connectors/destination-scylla/src/main/java/io/airbyte/integrations/destination/scylla/ScyllaDestination.java deleted file mode 100644 index 1a60831b9f15..000000000000 --- a/airbyte-integrations/connectors/destination-scylla/src/main/java/io/airbyte/integrations/destination/scylla/ScyllaDestination.java +++ /dev/null @@ -1,63 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.integrations.destination.scylla; - -import com.fasterxml.jackson.databind.JsonNode; -import io.airbyte.cdk.integrations.BaseConnector; -import io.airbyte.cdk.integrations.base.AirbyteMessageConsumer; -import io.airbyte.cdk.integrations.base.Destination; -import io.airbyte.cdk.integrations.base.IntegrationRunner; -import io.airbyte.protocol.models.v0.AirbyteConnectionStatus; -import io.airbyte.protocol.models.v0.AirbyteMessage; -import io.airbyte.protocol.models.v0.ConfiguredAirbyteCatalog; -import java.util.UUID; -import java.util.function.Consumer; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -public class ScyllaDestination extends BaseConnector implements Destination { - - private static final Logger LOGGER = LoggerFactory.getLogger(ScyllaDestination.class); - - public static void main(String[] args) throws Exception { - new IntegrationRunner(new ScyllaDestination()).run(args); - } - - @Override - public AirbyteConnectionStatus check(JsonNode config) { - var scyllaConfig = new ScyllaConfig(config); - // add random uuid to avoid conflicts with existing tables. - String tableName = "table_" + UUID.randomUUID().toString().replace("-", ""); - ScyllaCqlProvider scyllaCqlProvider = null; - try { - scyllaCqlProvider = new ScyllaCqlProvider(scyllaConfig); - // check connection and write permissions - scyllaCqlProvider.createKeyspaceIfNotExists(scyllaConfig.getKeyspace()); - scyllaCqlProvider.createTableIfNotExists(scyllaConfig.getKeyspace(), tableName); - scyllaCqlProvider.insert(scyllaConfig.getKeyspace(), tableName, "{}"); - return new AirbyteConnectionStatus().withStatus(AirbyteConnectionStatus.Status.SUCCEEDED); - } catch (Exception e) { - LOGGER.error("Can't establish Scylla connection with reason: ", e); - return new AirbyteConnectionStatus().withStatus(AirbyteConnectionStatus.Status.FAILED); - } finally { - if (scyllaCqlProvider != null) { - try { - scyllaCqlProvider.dropTableIfExists(scyllaConfig.getKeyspace(), tableName); - } catch (Exception e) { - LOGGER.error("Error while deleting temp table {} with reason: ", tableName, e); - } - scyllaCqlProvider.close(); - } - } - } - - @Override - public AirbyteMessageConsumer getConsumer(JsonNode config, - ConfiguredAirbyteCatalog configuredCatalog, - Consumer outputRecordCollector) { - return new ScyllaMessageConsumer(new ScyllaConfig(config), configuredCatalog, outputRecordCollector); - } - -} diff --git a/airbyte-integrations/connectors/destination-scylla/src/main/java/io/airbyte/integrations/destination/scylla/ScyllaMessageConsumer.java b/airbyte-integrations/connectors/destination-scylla/src/main/java/io/airbyte/integrations/destination/scylla/ScyllaMessageConsumer.java deleted file mode 100644 index 2c9edfb4493a..000000000000 --- a/airbyte-integrations/connectors/destination-scylla/src/main/java/io/airbyte/integrations/destination/scylla/ScyllaMessageConsumer.java +++ /dev/null @@ -1,105 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.integrations.destination.scylla; - -import io.airbyte.cdk.integrations.base.FailureTrackingAirbyteMessageConsumer; -import io.airbyte.commons.json.Jsons; -import io.airbyte.protocol.models.v0.AirbyteMessage; -import io.airbyte.protocol.models.v0.AirbyteStreamNameNamespacePair; -import io.airbyte.protocol.models.v0.ConfiguredAirbyteCatalog; -import java.util.Map; -import java.util.function.Consumer; -import java.util.stream.Collectors; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -public class ScyllaMessageConsumer extends FailureTrackingAirbyteMessageConsumer { - - private static final Logger LOGGER = LoggerFactory.getLogger(ScyllaMessageConsumer.class); - - private final ScyllaConfig scyllaConfig; - - private final Consumer outputRecordCollector; - - private final Map scyllaStreams; - - private final ScyllaCqlProvider scyllaCqlProvider; - - public ScyllaMessageConsumer(ScyllaConfig scyllaConfig, - ConfiguredAirbyteCatalog configuredCatalog, - Consumer outputRecordCollector) { - this.scyllaConfig = scyllaConfig; - this.outputRecordCollector = outputRecordCollector; - this.scyllaCqlProvider = new ScyllaCqlProvider(scyllaConfig); - var nameTransformer = new ScyllaNameTransformer(scyllaConfig); - this.scyllaStreams = configuredCatalog.getStreams().stream() - .collect(Collectors.toUnmodifiableMap( - AirbyteStreamNameNamespacePair::fromConfiguredAirbyteSteam, - k -> new ScyllaStreamConfig( - nameTransformer.outputKeyspace(k.getStream().getNamespace()), - nameTransformer.outputTable(k.getStream().getName()), - nameTransformer.outputTmpTable(k.getStream().getName()), - k.getDestinationSyncMode()))); - } - - @Override - protected void startTracked() { - scyllaStreams.forEach((k, v) -> { - scyllaCqlProvider.createKeyspaceIfNotExists(v.getKeyspace()); - scyllaCqlProvider.createTableIfNotExists(v.getKeyspace(), v.getTempTableName()); - }); - } - - @Override - protected void acceptTracked(AirbyteMessage message) { - if (message.getType() == AirbyteMessage.Type.RECORD) { - var messageRecord = message.getRecord(); - var streamConfig = - scyllaStreams.get(AirbyteStreamNameNamespacePair.fromRecordMessage(messageRecord)); - if (streamConfig == null) { - throw new IllegalArgumentException("Unrecognized destination stream"); - } - var data = Jsons.serialize(messageRecord.getData()); - scyllaCqlProvider.insert(streamConfig.getKeyspace(), streamConfig.getTempTableName(), data); - } else if (message.getType() == AirbyteMessage.Type.STATE) { - outputRecordCollector.accept(message); - } else { - LOGGER.warn("Unsupported airbyte message type: {}", message.getType()); - } - } - - @Override - protected void close(boolean hasFailed) { - if (!hasFailed) { - scyllaStreams.forEach((k, v) -> { - try { - scyllaCqlProvider.createTableIfNotExists(v.getKeyspace(), v.getTableName()); - switch (v.getDestinationSyncMode()) { - case APPEND -> { - scyllaCqlProvider.copy(v.getKeyspace(), v.getTempTableName(), v.getTableName()); - } - case OVERWRITE -> { - scyllaCqlProvider.truncate(v.getKeyspace(), v.getTableName()); - scyllaCqlProvider.copy(v.getKeyspace(), v.getTempTableName(), v.getTableName()); - } - default -> throw new UnsupportedOperationException("Unsupported destination sync mode"); - } - } catch (Exception e) { - LOGGER.error("Error while copying data to table {}: ", v.getTableName(), e); - } - }); - } - - scyllaStreams.forEach((k, v) -> { - try { - scyllaCqlProvider.dropTableIfExists(v.getKeyspace(), v.getTempTableName()); - } catch (Exception e) { - LOGGER.error("Error while deleting temp table {} with reason: ", v.getTempTableName(), e); - } - }); - scyllaCqlProvider.close(); - } - -} diff --git a/airbyte-integrations/connectors/destination-scylla/src/main/java/io/airbyte/integrations/destination/scylla/ScyllaNameTransformer.java b/airbyte-integrations/connectors/destination-scylla/src/main/java/io/airbyte/integrations/destination/scylla/ScyllaNameTransformer.java deleted file mode 100644 index c45a9db07bd7..000000000000 --- a/airbyte-integrations/connectors/destination-scylla/src/main/java/io/airbyte/integrations/destination/scylla/ScyllaNameTransformer.java +++ /dev/null @@ -1,42 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.integrations.destination.scylla; - -import com.google.common.base.CharMatcher; -import io.airbyte.cdk.integrations.destination.StandardNameTransformer; -import io.airbyte.commons.text.Names; - -class ScyllaNameTransformer extends StandardNameTransformer { - - private final ScyllaConfig scyllaConfig; - - public ScyllaNameTransformer(ScyllaConfig scyllaConfig) { - this.scyllaConfig = scyllaConfig; - } - - String outputKeyspace(String namespace) { - if (namespace == null || namespace.isBlank()) { - return scyllaConfig.getKeyspace(); - } - return CharMatcher.is('_').trimLeadingFrom(Names.toAlphanumericAndUnderscore(namespace)); - } - - String outputTable(String streamName) { - var tableName = super.getRawTableName(streamName.toLowerCase()).substring(1); - // max allowed length for a scylla table is 48 characters - return tableName.length() > 48 ? tableName.substring(0, 48) : tableName; - } - - String outputTmpTable(String streamName) { - var tableName = super.getTmpTableName(streamName.toLowerCase()).substring(1); - // max allowed length for a scylla table is 48 characters - return tableName.length() > 48 ? tableName.substring(0, 48) : tableName; - } - - String outputColumn(String columnName) { - return Names.doubleQuote(columnName.toLowerCase()); - } - -} diff --git a/airbyte-integrations/connectors/destination-scylla/src/main/java/io/airbyte/integrations/destination/scylla/ScyllaSessionPool.java b/airbyte-integrations/connectors/destination-scylla/src/main/java/io/airbyte/integrations/destination/scylla/ScyllaSessionPool.java deleted file mode 100644 index e6806dca4052..000000000000 --- a/airbyte-integrations/connectors/destination-scylla/src/main/java/io/airbyte/integrations/destination/scylla/ScyllaSessionPool.java +++ /dev/null @@ -1,54 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.integrations.destination.scylla; - -import com.datastax.driver.core.Cluster; -import com.datastax.driver.core.Session; -import java.util.concurrent.ConcurrentHashMap; -import java.util.concurrent.atomic.AtomicInteger; - -class ScyllaSessionPool { - - private static final ConcurrentHashMap> sessions; - - static { - sessions = new ConcurrentHashMap<>(); - } - - private ScyllaSessionPool() { - - } - - static Tuple initSession(ScyllaConfig scyllaConfig) { - var cachedSession = sessions.get(scyllaConfig); - if (cachedSession != null) { - cachedSession.value3().incrementAndGet(); - return Tuple.of(cachedSession.value1(), cachedSession.value2()); - } else { - var cluster = Cluster.builder() - .addContactPoint(scyllaConfig.getAddress()) - .withPort(scyllaConfig.getPort()) - .withCredentials(scyllaConfig.getUsername(), scyllaConfig.getPassword()) - .build(); - var session = cluster.connect(); - sessions.put(scyllaConfig, Triplet.of(cluster, session, new AtomicInteger(1))); - return Tuple.of(cluster, session); - } - } - - static void closeSession(ScyllaConfig scyllaConfig) { - var session = sessions.get(scyllaConfig); - if (session == null) { - throw new IllegalStateException("No session for the provided config"); - } - int usage = session.value3().decrementAndGet(); - if (usage < 1) { - session.value2().close(); - session.value1().close(); - sessions.remove(scyllaConfig); - } - } - -} diff --git a/airbyte-integrations/connectors/destination-scylla/src/main/java/io/airbyte/integrations/destination/scylla/ScyllaStreamConfig.java b/airbyte-integrations/connectors/destination-scylla/src/main/java/io/airbyte/integrations/destination/scylla/ScyllaStreamConfig.java deleted file mode 100644 index 6163fcaa45e7..000000000000 --- a/airbyte-integrations/connectors/destination-scylla/src/main/java/io/airbyte/integrations/destination/scylla/ScyllaStreamConfig.java +++ /dev/null @@ -1,58 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.integrations.destination.scylla; - -import io.airbyte.protocol.models.v0.DestinationSyncMode; - -/* - * Immutable configuration class for storing destination stream config. - */ -class ScyllaStreamConfig { - - private final String keyspace; - - private final String tableName; - - private final String tempTableName; - - private final DestinationSyncMode destinationSyncMode; - - public ScyllaStreamConfig(String keyspace, - String tableName, - String tempTableName, - DestinationSyncMode destinationSyncMode) { - this.keyspace = keyspace; - this.tableName = tableName; - this.tempTableName = tempTableName; - this.destinationSyncMode = destinationSyncMode; - } - - public String getKeyspace() { - return keyspace; - } - - public String getTableName() { - return tableName; - } - - public String getTempTableName() { - return tempTableName; - } - - public DestinationSyncMode getDestinationSyncMode() { - return destinationSyncMode; - } - - @Override - public String toString() { - return "ScyllaStreamConfig{" + - "keyspace='" + keyspace + '\'' + - ", tableName='" + tableName + '\'' + - ", tempTableName='" + tempTableName + '\'' + - ", destinationSyncMode=" + destinationSyncMode + - '}'; - } - -} diff --git a/airbyte-integrations/connectors/destination-scylla/src/main/java/io/airbyte/integrations/destination/scylla/Triplet.java b/airbyte-integrations/connectors/destination-scylla/src/main/java/io/airbyte/integrations/destination/scylla/Triplet.java deleted file mode 100644 index 5be53f3626d5..000000000000 --- a/airbyte-integrations/connectors/destination-scylla/src/main/java/io/airbyte/integrations/destination/scylla/Triplet.java +++ /dev/null @@ -1,46 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.integrations.destination.scylla; - -public class Triplet { - - private final V1 value1; - - private final V2 value2; - - private final V3 value3; - - public Triplet(V1 value1, V2 value2, V3 value3) { - this.value1 = value1; - this.value2 = value2; - this.value3 = value3; - } - - public static Triplet of(V1 value1, V2 value2, V3 value3) { - return new Triplet<>(value1, value2, value3); - } - - public V1 value1() { - return value1; - } - - public V2 value2() { - return value2; - } - - public V3 value3() { - return value3; - } - - @Override - public String toString() { - return "Triplet{" + - "value1=" + value1 + - ", value2=" + value2 + - ", value3=" + value3 + - '}'; - } - -} diff --git a/airbyte-integrations/connectors/destination-scylla/src/main/java/io/airbyte/integrations/destination/scylla/Tuple.java b/airbyte-integrations/connectors/destination-scylla/src/main/java/io/airbyte/integrations/destination/scylla/Tuple.java deleted file mode 100644 index 13f01ddecdb9..000000000000 --- a/airbyte-integrations/connectors/destination-scylla/src/main/java/io/airbyte/integrations/destination/scylla/Tuple.java +++ /dev/null @@ -1,38 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.integrations.destination.scylla; - -public class Tuple { - - private final V1 value1; - - private final V2 value2; - - public Tuple(V1 value1, V2 value2) { - this.value1 = value1; - this.value2 = value2; - } - - public static Tuple of(V1 value1, V2 value2) { - return new Tuple<>(value1, value2); - } - - public V1 value1() { - return value1; - } - - public V2 value2() { - return value2; - } - - @Override - public String toString() { - return "Tuple{" + - "value1=" + value1 + - ", value2=" + value2 + - '}'; - } - -} diff --git a/airbyte-integrations/connectors/destination-scylla/src/main/resources/spec.json b/airbyte-integrations/connectors/destination-scylla/src/main/resources/spec.json deleted file mode 100644 index fb1ea4161761..000000000000 --- a/airbyte-integrations/connectors/destination-scylla/src/main/resources/spec.json +++ /dev/null @@ -1,57 +0,0 @@ -{ - "documentationUrl": "https://docs.airbyte.com/integrations/destinations/scylla", - "supportsIncremental": true, - "supportsNormalization": false, - "supportsDBT": false, - "supported_destination_sync_modes": ["overwrite", "append"], - "connectionSpecification": { - "$schema": "http://json-schema.org/draft-07/schema#", - "title": "Scylla Destination Spec", - "type": "object", - "required": ["keyspace", "username", "password", "address", "port"], - "additionalProperties": true, - "properties": { - "keyspace": { - "title": "Keyspace", - "description": "Default Scylla keyspace to create data in.", - "type": "string", - "order": 0 - }, - "username": { - "title": "Username", - "description": "Username to use to access Scylla.", - "type": "string", - "order": 1 - }, - "password": { - "title": "Password", - "description": "Password associated with Scylla.", - "type": "string", - "airbyte_secret": true, - "order": 2 - }, - "address": { - "title": "Address", - "description": "Address to connect to.", - "type": "string", - "order": 3 - }, - "port": { - "title": "Port", - "description": "Port of Scylla.", - "type": "integer", - "minimum": 0, - "maximum": 65536, - "default": 9042, - "order": 4 - }, - "replication": { - "title": "Replication factor", - "type": "integer", - "description": "Indicates to how many nodes the data should be replicated to.", - "default": 1, - "order": 5 - } - } - } -} diff --git a/airbyte-integrations/connectors/destination-scylla/src/test-integration/java/io/airbyte/integrations/destination/scylla/ScyllaContainerInitializr.java b/airbyte-integrations/connectors/destination-scylla/src/test-integration/java/io/airbyte/integrations/destination/scylla/ScyllaContainerInitializr.java deleted file mode 100644 index acfcb205abd7..000000000000 --- a/airbyte-integrations/connectors/destination-scylla/src/test-integration/java/io/airbyte/integrations/destination/scylla/ScyllaContainerInitializr.java +++ /dev/null @@ -1,36 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.integrations.destination.scylla; - -import org.testcontainers.containers.GenericContainer; - -class ScyllaContainerInitializr { - - private static ScyllaContainer scyllaContainer; - - private ScyllaContainerInitializr() { - - } - - public static ScyllaContainer initContainer() { - if (scyllaContainer == null) { - scyllaContainer = new ScyllaContainer() - .withExposedPorts(9042) - // single cpu core cluster - .withCommand("--smp 1"); - } - scyllaContainer.start(); - return scyllaContainer; - } - - static class ScyllaContainer extends GenericContainer { - - public ScyllaContainer() { - super("scylladb/scylla:4.5.0"); - } - - } - -} diff --git a/airbyte-integrations/connectors/destination-scylla/src/test-integration/java/io/airbyte/integrations/destination/scylla/ScyllaCqlProviderTest.java b/airbyte-integrations/connectors/destination-scylla/src/test-integration/java/io/airbyte/integrations/destination/scylla/ScyllaCqlProviderTest.java deleted file mode 100644 index dea28fb7eb88..000000000000 --- a/airbyte-integrations/connectors/destination-scylla/src/test-integration/java/io/airbyte/integrations/destination/scylla/ScyllaCqlProviderTest.java +++ /dev/null @@ -1,134 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.integrations.destination.scylla; - -import static org.assertj.core.api.Assertions.assertThat; -import static org.junit.jupiter.api.Assertions.assertDoesNotThrow; -import static org.junit.jupiter.api.Assertions.assertThrows; - -import com.datastax.driver.core.exceptions.InvalidQueryException; -import io.airbyte.cdk.integrations.util.HostPortResolver; -import org.junit.jupiter.api.AfterEach; -import org.junit.jupiter.api.BeforeAll; -import org.junit.jupiter.api.Test; -import org.junit.jupiter.api.TestInstance; - -@TestInstance(TestInstance.Lifecycle.PER_CLASS) -class ScyllaCqlProviderTest { - - private static final String SCYLLA_KEYSPACE = "scylla_keyspace"; - - private static final String SCYLLA_TABLE = "scylla_table"; - - private ScyllaCqlProvider scyllaCqlProvider; - - private ScyllaNameTransformer nameTransformer; - - @BeforeAll - void setup() { - var scyllaContainer = ScyllaContainerInitializr.initContainer(); - var scyllaConfig = TestDataFactory.scyllaConfig( - HostPortResolver.resolveHost(scyllaContainer), - HostPortResolver.resolvePort(scyllaContainer)); - this.scyllaCqlProvider = new ScyllaCqlProvider(scyllaConfig); - this.nameTransformer = new ScyllaNameTransformer(scyllaConfig); - this.scyllaCqlProvider.createKeyspaceIfNotExists(SCYLLA_KEYSPACE); - this.scyllaCqlProvider.createTableIfNotExists(SCYLLA_KEYSPACE, SCYLLA_TABLE); - } - - @AfterEach - void clean() { - scyllaCqlProvider.truncate(SCYLLA_KEYSPACE, SCYLLA_TABLE); - } - - @Test - void testCreateKeySpaceIfNotExists() { - String keyspace = nameTransformer.outputKeyspace("test_keyspace"); - assertDoesNotThrow(() -> scyllaCqlProvider.createKeyspaceIfNotExists(keyspace)); - } - - @Test - void testCreateTableIfNotExists() { - String table = nameTransformer.outputTable("test_stream"); - assertDoesNotThrow(() -> scyllaCqlProvider.createTableIfNotExists(SCYLLA_KEYSPACE, table)); - } - - @Test - void testInsert() { - // given - scyllaCqlProvider.insert(SCYLLA_KEYSPACE, SCYLLA_TABLE, "{\"property\":\"data1\"}"); - scyllaCqlProvider.insert(SCYLLA_KEYSPACE, SCYLLA_TABLE, "{\"property\":\"data2\"}"); - scyllaCqlProvider.insert(SCYLLA_KEYSPACE, SCYLLA_TABLE, "{\"property\":\"data3\"}"); - - // when - var resultSet = scyllaCqlProvider.select(SCYLLA_KEYSPACE, SCYLLA_TABLE); - - // then - assertThat(resultSet) - .isNotNull() - .hasSize(3) - .anyMatch(r -> r.value2().equals("{\"property\":\"data1\"}")) - .anyMatch(r -> r.value2().equals("{\"property\":\"data2\"}")) - .anyMatch(r -> r.value2().equals("{\"property\":\"data3\"}")); - - } - - @Test - void testTruncate() { - // given - scyllaCqlProvider.insert(SCYLLA_KEYSPACE, SCYLLA_TABLE, "{\"property\":\"data1\"}"); - scyllaCqlProvider.insert(SCYLLA_KEYSPACE, SCYLLA_TABLE, "{\"property\":\"data2\"}"); - scyllaCqlProvider.insert(SCYLLA_KEYSPACE, SCYLLA_TABLE, "{\"property\":\"data3\"}"); - - // when - scyllaCqlProvider.truncate(SCYLLA_KEYSPACE, SCYLLA_TABLE); - var resultSet = scyllaCqlProvider.select(SCYLLA_KEYSPACE, SCYLLA_TABLE); - - // then - assertThat(resultSet) - .isNotNull() - .isEmpty(); - } - - @Test - void testDropTableIfExists() { - // given - String table = nameTransformer.outputTmpTable("test_stream"); - scyllaCqlProvider.createTableIfNotExists(SCYLLA_KEYSPACE, table); - - // when - scyllaCqlProvider.dropTableIfExists(SCYLLA_KEYSPACE, table); - - // then - assertThrows(InvalidQueryException.class, () -> scyllaCqlProvider.select(SCYLLA_KEYSPACE, table)); - } - - @Test - void testCopy() { - // given - String tmpTable = nameTransformer.outputTmpTable("test_stream_copy"); - scyllaCqlProvider.createTableIfNotExists(SCYLLA_KEYSPACE, tmpTable); - scyllaCqlProvider.insert(SCYLLA_KEYSPACE, tmpTable, "{\"property\":\"data1\"}"); - scyllaCqlProvider.insert(SCYLLA_KEYSPACE, tmpTable, "{\"property\":\"data2\"}"); - scyllaCqlProvider.insert(SCYLLA_KEYSPACE, tmpTable, "{\"property\":\"data3\"}"); - - String rawTable = nameTransformer.outputTable("test_stream_copy"); - scyllaCqlProvider.createTableIfNotExists(SCYLLA_KEYSPACE, rawTable); - - // when - scyllaCqlProvider.copy(SCYLLA_KEYSPACE, tmpTable, rawTable); - var resultSet = scyllaCqlProvider.select(SCYLLA_KEYSPACE, rawTable); - - // then - assertThat(resultSet) - .isNotNull() - .hasSize(3) - .anyMatch(r -> r.value2().equals("{\"property\":\"data1\"}")) - .anyMatch(r -> r.value2().equals("{\"property\":\"data2\"}")) - .anyMatch(r -> r.value2().equals("{\"property\":\"data3\"}")); - - } - -} diff --git a/airbyte-integrations/connectors/destination-scylla/src/test-integration/java/io/airbyte/integrations/destination/scylla/ScyllaDestinationAcceptanceTest.java b/airbyte-integrations/connectors/destination-scylla/src/test-integration/java/io/airbyte/integrations/destination/scylla/ScyllaDestinationAcceptanceTest.java deleted file mode 100644 index d2f9c7d7d1f5..000000000000 --- a/airbyte-integrations/connectors/destination-scylla/src/test-integration/java/io/airbyte/integrations/destination/scylla/ScyllaDestinationAcceptanceTest.java +++ /dev/null @@ -1,109 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.integrations.destination.scylla; - -import com.fasterxml.jackson.databind.JsonNode; -import io.airbyte.cdk.integrations.standardtest.destination.DestinationAcceptanceTest; -import io.airbyte.cdk.integrations.standardtest.destination.comparator.AdvancedTestDataComparator; -import io.airbyte.cdk.integrations.standardtest.destination.comparator.TestDataComparator; -import io.airbyte.cdk.integrations.util.HostPortResolver; -import io.airbyte.commons.json.Jsons; -import io.airbyte.integrations.destination.scylla.ScyllaContainerInitializr.ScyllaContainer; -import java.util.Comparator; -import java.util.HashSet; -import java.util.List; -import java.util.stream.Collectors; -import org.junit.jupiter.api.BeforeAll; - -class ScyllaDestinationAcceptanceTest extends DestinationAcceptanceTest { - - private JsonNode configJson; - - private ScyllaCqlProvider scyllaCqlProvider; - - private ScyllaNameTransformer nameTransformer; - - private static ScyllaContainer scyllaContainer; - - @Override - protected String getImageName() { - return "airbyte/destination-scylla:dev"; - } - - @BeforeAll - static void initContainer() { - scyllaContainer = ScyllaContainerInitializr.initContainer(); - } - - @Override - protected void setup(final TestDestinationEnv testEnv, final HashSet TEST_SCHEMAS) { - configJson = TestDataFactory.jsonConfig( - HostPortResolver.resolveHost(scyllaContainer), - HostPortResolver.resolvePort(scyllaContainer)); - final var scyllaConfig = new ScyllaConfig(configJson); - this.scyllaCqlProvider = new ScyllaCqlProvider(scyllaConfig); - this.nameTransformer = new ScyllaNameTransformer(scyllaConfig); - } - - @Override - protected void tearDown(final TestDestinationEnv testEnv) { - scyllaCqlProvider.metadata().stream() - .filter(m -> !m.value1().startsWith("system")) - .forEach(meta -> { - final var keyspace = meta.value1(); - meta.value2().forEach(table -> scyllaCqlProvider.truncate(keyspace, table)); - }); - } - - @Override - protected JsonNode getConfig() { - return configJson; - } - - @Override - protected JsonNode getFailCheckConfig() { - return TestDataFactory.jsonConfig("127.129.0.1", 8080); - } - - @Override - protected boolean implementsNamespaces() { - return true; - } - - @Override - protected TestDataComparator getTestDataComparator() { - return new AdvancedTestDataComparator(); - } - - @Override - protected boolean supportBasicDataTypeTest() { - return true; - } - - @Override - protected boolean supportArrayDataTypeTest() { - return true; - } - - @Override - protected boolean supportObjectDataTypeTest() { - return true; - } - - @Override - protected List retrieveRecords(final TestDestinationEnv testEnv, - final String streamName, - final String namespace, - final JsonNode streamSchema) { - final var keyspace = nameTransformer.outputKeyspace(namespace); - final var table = nameTransformer.outputTable(streamName); - return scyllaCqlProvider.select(keyspace, table).stream() - .sorted(Comparator.comparing(Triplet::value3)) - .map(Triplet::value2) - .map(Jsons::deserialize) - .collect(Collectors.toList()); - } - -} diff --git a/airbyte-integrations/connectors/destination-scylla/src/test-integration/java/io/airbyte/integrations/destination/scylla/ScyllaDestinationTest.java b/airbyte-integrations/connectors/destination-scylla/src/test-integration/java/io/airbyte/integrations/destination/scylla/ScyllaDestinationTest.java deleted file mode 100644 index 53460b3f5714..000000000000 --- a/airbyte-integrations/connectors/destination-scylla/src/test-integration/java/io/airbyte/integrations/destination/scylla/ScyllaDestinationTest.java +++ /dev/null @@ -1,52 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.integrations.destination.scylla; - -import static org.assertj.core.api.Assertions.assertThat; - -import io.airbyte.cdk.integrations.util.HostPortResolver; -import io.airbyte.integrations.destination.scylla.ScyllaContainerInitializr.ScyllaContainer; -import io.airbyte.protocol.models.v0.AirbyteConnectionStatus; -import org.junit.jupiter.api.BeforeAll; -import org.junit.jupiter.api.Test; -import org.junit.jupiter.api.TestInstance; - -@TestInstance(TestInstance.Lifecycle.PER_CLASS) -class ScyllaDestinationTest { - - private ScyllaDestination scyllaDestination; - - private ScyllaContainer scyllaContainer; - - @BeforeAll - void setup() { - this.scyllaContainer = ScyllaContainerInitializr.initContainer(); - this.scyllaDestination = new ScyllaDestination(); - } - - @Test - void testCheckWithStatusSucceeded() { - - var jsonConfiguration = TestDataFactory.jsonConfig( - HostPortResolver.resolveHost(scyllaContainer), - HostPortResolver.resolvePort(scyllaContainer)); - - var connectionStatus = scyllaDestination.check(jsonConfiguration); - - assertThat(connectionStatus.getStatus()).isEqualTo(AirbyteConnectionStatus.Status.SUCCEEDED); - } - - @Test - void testCheckWithStatusFailed() { - - var jsonConfiguration = TestDataFactory.jsonConfig("192.0.2.1", 8080); - - var connectionStatus = scyllaDestination.check(jsonConfiguration); - - assertThat(connectionStatus.getStatus()).isEqualTo(AirbyteConnectionStatus.Status.FAILED); - - } - -} diff --git a/airbyte-integrations/connectors/destination-scylla/src/test-integration/java/io/airbyte/integrations/destination/scylla/TestDataFactory.java b/airbyte-integrations/connectors/destination-scylla/src/test-integration/java/io/airbyte/integrations/destination/scylla/TestDataFactory.java deleted file mode 100644 index 4773a8700afe..000000000000 --- a/airbyte-integrations/connectors/destination-scylla/src/test-integration/java/io/airbyte/integrations/destination/scylla/TestDataFactory.java +++ /dev/null @@ -1,38 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.integrations.destination.scylla; - -import com.fasterxml.jackson.databind.JsonNode; -import com.google.common.collect.ImmutableMap; -import io.airbyte.commons.json.Jsons; - -class TestDataFactory { - - private TestDataFactory() { - - } - - static ScyllaConfig scyllaConfig(String address, int port) { - return new ScyllaConfig( - "default_keyspace", - "usr", - "pw", - address, - port, - 2); - } - - static JsonNode jsonConfig(String address, int port) { - return Jsons.jsonNode(ImmutableMap.builder() - .put("keyspace", "default_keyspace") - .put("username", "usr") - .put("password", "pw") - .put("address", address) - .put("port", port) - .put("replication", 2) - .build()); - } - -} diff --git a/airbyte-integrations/connectors/destination-scylla/src/test/java/io/airbyte/integrations/destination/scylla/ScyllaConfigTest.java b/airbyte-integrations/connectors/destination-scylla/src/test/java/io/airbyte/integrations/destination/scylla/ScyllaConfigTest.java deleted file mode 100644 index cb1bbcec1ed8..000000000000 --- a/airbyte-integrations/connectors/destination-scylla/src/test/java/io/airbyte/integrations/destination/scylla/ScyllaConfigTest.java +++ /dev/null @@ -1,35 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.integrations.destination.scylla; - -import static org.assertj.core.api.Assertions.assertThat; - -import org.junit.jupiter.api.BeforeEach; -import org.junit.jupiter.api.Test; - -class ScyllaConfigTest { - - private ScyllaConfig scyllaConfig; - - @BeforeEach - void setup() { - var jsonNode = TestDataFactory.jsonConfig("127.0.0.1", 9042); - this.scyllaConfig = new ScyllaConfig(jsonNode); - } - - @Test - void testConfig() { - - assertThat(scyllaConfig) - .hasFieldOrPropertyWithValue("keyspace", "default_keyspace") - .hasFieldOrPropertyWithValue("username", "usr") - .hasFieldOrPropertyWithValue("password", "pw") - .hasFieldOrPropertyWithValue("address", "127.0.0.1") - .hasFieldOrPropertyWithValue("port", 9042) - .hasFieldOrPropertyWithValue("replication", 2); - - } - -} diff --git a/airbyte-integrations/connectors/destination-scylla/src/test/java/io/airbyte/integrations/destination/scylla/ScyllaNameTransformerTest.java b/airbyte-integrations/connectors/destination-scylla/src/test/java/io/airbyte/integrations/destination/scylla/ScyllaNameTransformerTest.java deleted file mode 100644 index f269168beefe..000000000000 --- a/airbyte-integrations/connectors/destination-scylla/src/test/java/io/airbyte/integrations/destination/scylla/ScyllaNameTransformerTest.java +++ /dev/null @@ -1,60 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.integrations.destination.scylla; - -import static org.assertj.core.api.Assertions.assertThat; - -import org.junit.jupiter.api.BeforeAll; -import org.junit.jupiter.api.Test; -import org.junit.jupiter.api.TestInstance; - -@TestInstance(TestInstance.Lifecycle.PER_CLASS) -class ScyllaNameTransformerTest { - - private ScyllaNameTransformer scyllaNameTransformer; - - @BeforeAll - void setup() { - var scyllaConfig = TestDataFactory.scyllaConfig("127.0.0.1", 9042); - this.scyllaNameTransformer = new ScyllaNameTransformer(scyllaConfig); - } - - @Test - void testOutputTable() { - - var table = scyllaNameTransformer.outputTable("stream_name"); - - assertThat(table).matches("airbyte_raw_stream_name"); - - } - - @Test - void testOutputTmpTable() { - - var table = scyllaNameTransformer.outputTmpTable("stream_name"); - - assertThat(table).matches("airbyte_tmp_+[a-z]+_stream_name"); - - } - - @Test - void testOutputKeyspace() { - - var keyspace = scyllaNameTransformer.outputKeyspace("***keyspace^h"); - - assertThat(keyspace).matches("keyspace_h"); - - } - - @Test - void outputColumn() { - - var column = scyllaNameTransformer.outputColumn("_airbyte_data"); - - assertThat(column).matches("\"_airbyte_data\""); - - } - -} diff --git a/airbyte-integrations/connectors/destination-scylla/src/test/java/io/airbyte/integrations/destination/scylla/ScyllaRecordConsumerTest.java b/airbyte-integrations/connectors/destination-scylla/src/test/java/io/airbyte/integrations/destination/scylla/ScyllaRecordConsumerTest.java deleted file mode 100644 index 7dcf5275909d..000000000000 --- a/airbyte-integrations/connectors/destination-scylla/src/test/java/io/airbyte/integrations/destination/scylla/ScyllaRecordConsumerTest.java +++ /dev/null @@ -1,77 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.integrations.destination.scylla; - -import com.fasterxml.jackson.databind.JsonNode; -import io.airbyte.cdk.integrations.base.FailureTrackingAirbyteMessageConsumer; -import io.airbyte.cdk.integrations.standardtest.destination.PerStreamStateMessageTest; -import io.airbyte.cdk.integrations.util.HostPortResolver; -import io.airbyte.protocol.models.v0.AirbyteMessage; -import io.airbyte.protocol.models.v0.ConfiguredAirbyteCatalog; -import java.util.function.Consumer; -import org.junit.jupiter.api.BeforeEach; -import org.junit.jupiter.api.DisplayName; -import org.junit.jupiter.api.extension.ExtendWith; -import org.mockito.Mock; -import org.mockito.junit.jupiter.MockitoExtension; -import org.testcontainers.containers.GenericContainer; - -@DisplayName("ScyllaRecordConsumer") -@ExtendWith(MockitoExtension.class) -public class ScyllaRecordConsumerTest extends PerStreamStateMessageTest { - - private static ScyllaContainer scyllaContainer; - - @Mock - private Consumer outputRecordCollector; - - private ScyllaMessageConsumer consumer; - - @Mock - ScyllaConfig scyllaConfig; - - @Mock - private ConfiguredAirbyteCatalog configuredCatalog; - - public static ScyllaContainer initContainer() { - if (scyllaContainer == null) { - scyllaContainer = new ScyllaContainer() - .withExposedPorts(9042) - // single cpu core cluster - .withCommand("--smp 1"); - } - scyllaContainer.start(); - return scyllaContainer; - } - - @BeforeEach - public void init() { - ScyllaContainer scyllaContainer = initContainer(); - JsonNode configJson = TestDataFactory.jsonConfig( - HostPortResolver.resolveHost(scyllaContainer), - HostPortResolver.resolvePort(scyllaContainer)); - var scyllaConfig = new ScyllaConfig(configJson); - consumer = new ScyllaMessageConsumer(scyllaConfig, configuredCatalog, outputRecordCollector); - } - - @Override - protected Consumer getMockedConsumer() { - return outputRecordCollector; - } - - @Override - protected FailureTrackingAirbyteMessageConsumer getMessageConsumer() { - return consumer; - } - - static class ScyllaContainer extends GenericContainer { - - public ScyllaContainer() { - super("scylladb/scylla:4.5.0"); - } - - } - -} diff --git a/airbyte-integrations/connectors/destination-scylla/src/test/java/io/airbyte/integrations/destination/scylla/TestDataFactory.java b/airbyte-integrations/connectors/destination-scylla/src/test/java/io/airbyte/integrations/destination/scylla/TestDataFactory.java deleted file mode 100644 index 78bd6785af89..000000000000 --- a/airbyte-integrations/connectors/destination-scylla/src/test/java/io/airbyte/integrations/destination/scylla/TestDataFactory.java +++ /dev/null @@ -1,34 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.integrations.destination.scylla; - -import com.fasterxml.jackson.databind.JsonNode; -import com.google.common.collect.ImmutableMap; -import io.airbyte.commons.json.Jsons; - -class TestDataFactory { - - static JsonNode jsonConfig(String address, int port) { - return Jsons.jsonNode(ImmutableMap.builder() - .put("keyspace", "default_keyspace") - .put("username", "usr") - .put("password", "pw") - .put("address", address) - .put("port", port) - .put("replication", 2) - .build()); - } - - static ScyllaConfig scyllaConfig(String address, int port) { - return new ScyllaConfig( - "default_keyspace", - "usr", - "pw", - address, - port, - 2); - } - -} diff --git a/airbyte-integrations/connectors/destination-selectdb/README.md b/airbyte-integrations/connectors/destination-selectdb/README.md deleted file mode 100644 index cca3da005386..000000000000 --- a/airbyte-integrations/connectors/destination-selectdb/README.md +++ /dev/null @@ -1,72 +0,0 @@ -# Destination Selectdb - -This is the repository for the Selectdb destination connector in Java. -For information about how to use this connector within Airbyte, see [the User Documentation](https://docs.airbyte.com/integrations/destinations/selectdb). - -## Local development - -#### Building via Gradle -From the Airbyte repository root, run: -``` -./gradlew :airbyte-integrations:connectors:destination-selectdb:build -``` - -#### Create credentials -**If you are a community contributor**, generate the necessary credentials and place them in `secrets/config.json` conforming to the spec file in `src/main/resources/spec.json`. -Note that the `secrets` directory is git-ignored by default, so there is no danger of accidentally checking in sensitive information. - -**If you are an Airbyte core member**, follow the [instructions](https://docs.airbyte.com/connector-development#using-credentials-in-ci) to set up the credentials. - -### Locally running the connector docker image - -#### Build -Build the connector image via Gradle: - -``` -./gradlew :airbyte-integrations:connectors:destination-selectdb:buildConnectorImage -``` -Once built, the docker image name and tag on your host will be `airbyte/destination-selectdb:dev`. -the Dockerfile. - -#### Run -Then run any of the connector commands as follows: -``` -docker run --rm airbyte/destination-selectdb:dev spec -docker run --rm -v $(pwd)/secrets:/secrets airbyte/destination-selectdb:dev check --config /secrets/config.json -docker run --rm -v $(pwd)/secrets:/secrets airbyte/destination-selectdb:dev discover --config /secrets/config.json -docker run --rm -v $(pwd)/secrets:/secrets -v $(pwd)/integration_tests:/integration_tests airbyte/destination-selectdb:dev read --config /secrets/config.json --catalog /integration_tests/configured_catalog.json -``` - -## Testing -We use `JUnit` for Java tests. - -### Unit and Integration Tests -Place unit tests under `src/test/io/airbyte/integrations/destinations/selectdb`. - -#### Acceptance Tests -Airbyte has a standard test suite that all destination connectors must pass. Implement the `TODO`s in -`src/test-integration/java/io/airbyte/integrations/destinations/selectdbDestinationAcceptanceTest.java`. - -### Using gradle to run tests -All commands should be run from airbyte project root. -To run unit tests: -``` -./gradlew :airbyte-integrations:connectors:destination-selectdb:unitTest -``` -To run acceptance and custom integration tests: -``` -./gradlew :airbyte-integrations:connectors:destination-selectdb:integrationTest -``` - -## Dependency Management - -### Publishing a new version of the connector -You've checked out the repo, implemented a million dollar feature, and you're ready to share your changes with the world. Now what? -1. Make sure your changes are passing our test suite: `airbyte-ci connectors --name=destination-selectdb test` -2. Bump the connector version in `metadata.yaml`: increment the `dockerImageTag` value. Please follow [semantic versioning for connectors](https://docs.airbyte.com/contributing-to-airbyte/resources/pull-requests-handbook/#semantic-versioning-for-connectors). -3. Make sure the `metadata.yaml` content is up to date. -4. Make the connector documentation and its changelog is up to date (`docs/integrations/destinations/selectdb.md`). -5. Create a Pull Request: use [our PR naming conventions](https://docs.airbyte.com/contributing-to-airbyte/resources/pull-requests-handbook/#pull-request-title-convention). -6. Pat yourself on the back for being an awesome contributor. -7. Someone from Airbyte will take a look at your PR and iterate with you to merge it into master. - diff --git a/airbyte-integrations/connectors/destination-selectdb/bootstrap.md b/airbyte-integrations/connectors/destination-selectdb/bootstrap.md deleted file mode 100644 index 7303dcbc3a0a..000000000000 --- a/airbyte-integrations/connectors/destination-selectdb/bootstrap.md +++ /dev/null @@ -1,41 +0,0 @@ -# SelectDB destination - - -SelectDB destination adopts MySQL protocol(JDBC) and copy into to exchange data. - -1. JDBC is used to manipulate the data table structure and execute the create table statement before data import -2. Copy Into is an import method based on Object storage, For SelectDB destination, first upload csv file into selectdb internal stage, and then copy into SelectDB with transaction operation. - -## Introduction to SelectDB - -SelectDB is a cloud-native realtime data warehouse built by the core developers of Apache Doris based on the Apache Doris open source project. -[SelectDB](https://en.selectdb.com/docs/selectdb) - -### Core Features - -- **Extremely fast** : In terms of storage, it adopts efficient columnar storage and data indexing; in terms of computing, it relies on the MPP distributed computing architecture and the vectorized execution engine optimized for X64 and ARM64; in the ClickBench public performance evaluation, it is at the world's leading level. -- **Single unified** : It can run multiple analytical workloads on a single system. It supports real-time/interactive/batch computing types, structured/semi-structured data types, and federated querying with external data lakes and databases. -- **Easy to use** : Compatible with MySQL network protocols; powerful and easy-to-use WebUI-based database management tools, and rich connectors for integration with Spark/Flink/dbt/Kafka. -- **Cost-effective** : Deeply adapted to the cloud platforms, and adopts an implementation architecture that separates storage and computing. In terms of computing, it provides on-demand automatic expansion and contraction, and the storage adopts tiered storage of hot and cold data. -- **Open** : It is developed based on the open source Apache Doris, and data can be freely migrated with Doris. Runs on multiple clouds and provides a consistent user experience. -- **Enterprise-grade features** : provides user authentication and access control, data protection and backup. In the future, it will also provide data masking, finer-grained authority control, and data lineage to meet the needs of data governance. - - -### Difference with Apache Doris - -SelectDB is developed based on the Apache Doris. SelectDB will continue to work with the Doris community to strengthen the open source kernel. At the same time, SelectDB also provides the following enhanced features and services for enterprise customers. -- **Apache Doris LTS version** : Provides up to 18 months of Apache Doris LTS version to meet the needs of enterprises for stronger stability of Doris. This version is free and the code is open source. -- **Cloud-native kernel** : In addition to the enhancement of the open source Doris kernel, it also provides a deeply adapted cloud-native kernel for public cloud platforms, so as to provide enterprises with best price / performance and enterprise-grade features. -- **Native management tools** : provides powerful and easy-to-use web-based database management and development tools. It can be used to replace tools like Navicat. -- **Professional technical support** : Professional technical support services are provided for open source Apache Doris and SelectDB products. - -### Two Product Editions - -According to the needs of different enterprises, there are currently two editions for SelectDB: - -- **SelectDB Cloud:** A fully managed data warehouse as a service on public clouds. -- **SelectDB Enterprise:** Delivered as on-premises software, deployed in your IDC or VPC of public cloud. - -SelectDB 1.0 was opened for trial application in July 2022, and dozens of companies have already tried it. - -The SelectDB 2.0 preview is now open for trial application. Starting from SelectDB 2.0, SelectDB has also officially launched the international site. If customers want to use AWS, Azure and GCP, please visit SelectDB International Station; if customers want to use Alibaba Cloud, Tencent Cloud and Huawei Cloud, please visit SelectDB China Station. \ No newline at end of file diff --git a/airbyte-integrations/connectors/destination-selectdb/build.gradle b/airbyte-integrations/connectors/destination-selectdb/build.gradle deleted file mode 100644 index 0a654ec66f67..000000000000 --- a/airbyte-integrations/connectors/destination-selectdb/build.gradle +++ /dev/null @@ -1,28 +0,0 @@ -plugins { - id 'application' - id 'airbyte-java-connector' -} - -airbyteJavaConnector { - cdkVersionRequired = '0.2.0' - features = ['db-destinations'] - useLocalCdk = false -} - -//remove once upgrading the CDK version to 0.4.x or later -java { - compileJava { - options.compilerArgs.remove("-Werror") - } -} - -airbyteJavaConnector.addCdkDependencies() - -application { - mainClass = 'io.airbyte.integrations.destination.selectdb.SelectdbDestination' -} - -dependencies { - implementation 'org.apache.commons:commons-csv:1.4' - implementation group: 'mysql', name: 'mysql-connector-java', version: '8.0.16' -} diff --git a/airbyte-integrations/connectors/destination-selectdb/src/main/java/io/airbyte/integrations/destination/selectdb/BaseResponse.java b/airbyte-integrations/connectors/destination-selectdb/src/main/java/io/airbyte/integrations/destination/selectdb/BaseResponse.java deleted file mode 100644 index 864328c37704..000000000000 --- a/airbyte-integrations/connectors/destination-selectdb/src/main/java/io/airbyte/integrations/destination/selectdb/BaseResponse.java +++ /dev/null @@ -1,29 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.integrations.destination.selectdb; - -import com.fasterxml.jackson.annotation.JsonIgnoreProperties; - -@JsonIgnoreProperties(ignoreUnknown = true) -public class BaseResponse { - - private int code; - private String msg; - private T data; - private int count; - - public int getCode() { - return code; - } - - public String getMsg() { - return msg; - } - - public T getData() { - return data; - } - -} diff --git a/airbyte-integrations/connectors/destination-selectdb/src/main/java/io/airbyte/integrations/destination/selectdb/CopyIntoResp.java b/airbyte-integrations/connectors/destination-selectdb/src/main/java/io/airbyte/integrations/destination/selectdb/CopyIntoResp.java deleted file mode 100644 index dc2c18f15e65..000000000000 --- a/airbyte-integrations/connectors/destination-selectdb/src/main/java/io/airbyte/integrations/destination/selectdb/CopyIntoResp.java +++ /dev/null @@ -1,33 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.integrations.destination.selectdb; - -/* - * Copyright (c) 2022 Airbyte, Inc., all rights reserved. - */ - -import com.fasterxml.jackson.annotation.JsonIgnoreProperties; -import java.util.Map; - -@JsonIgnoreProperties(ignoreUnknown = true) -public class CopyIntoResp extends BaseResponse { - - private String code; - private String exception; - private Map result; - - public String getDataCode() { - return code; - } - - public String getException() { - return exception; - } - - public Map getResult() { - return result; - } - -} diff --git a/airbyte-integrations/connectors/destination-selectdb/src/main/java/io/airbyte/integrations/destination/selectdb/LabelInfo.java b/airbyte-integrations/connectors/destination-selectdb/src/main/java/io/airbyte/integrations/destination/selectdb/LabelInfo.java deleted file mode 100644 index aff5f0ce3ba5..000000000000 --- a/airbyte-integrations/connectors/destination-selectdb/src/main/java/io/airbyte/integrations/destination/selectdb/LabelInfo.java +++ /dev/null @@ -1,24 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.integrations.destination.selectdb; - -import java.util.UUID; - -public class LabelInfo { - - private String prefix; - - private String table; - - public LabelInfo(String labelPrefix, String table) { - this.prefix = labelPrefix; - this.table = table; - } - - public String label() { - return prefix + "_" + table + "_" + UUID.randomUUID() + System.currentTimeMillis(); - } - -} diff --git a/airbyte-integrations/connectors/destination-selectdb/src/main/java/io/airbyte/integrations/destination/selectdb/SelectdbConnectionOptions.java b/airbyte-integrations/connectors/destination-selectdb/src/main/java/io/airbyte/integrations/destination/selectdb/SelectdbConnectionOptions.java deleted file mode 100644 index 1f3ab90777b2..000000000000 --- a/airbyte-integrations/connectors/destination-selectdb/src/main/java/io/airbyte/integrations/destination/selectdb/SelectdbConnectionOptions.java +++ /dev/null @@ -1,115 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.integrations.destination.selectdb; - -import com.fasterxml.jackson.databind.JsonNode; -import com.google.common.base.Preconditions; - -public class SelectdbConnectionOptions { - - protected final String driverName = "com.mysql.jdbc.Driver"; - protected final String cjDriverName = "com.mysql.cj.jdbc.Driver"; - - private String db; - private static String DB_KEY = "database"; - - private String table; - private static final String TABLE_KEY = "table"; - - private String user; - private static final String USER_KEY = "user_name"; - - private String pwd; - private static final String PWD_KEY = "password"; - - private String loadUrl; - private static final String LOAD_URL_KEY = "load_url"; - - private String jdbcUrl; - private static final String JDBC_URL_KEY = "jdbc_url"; - - private String clusterName; - private static final String CLUSTER_NAME_KEY = "cluster_name"; - - public static SelectdbConnectionOptions getSelectdbConnection(final JsonNode config, String table) { - return new SelectdbConnectionOptions( - config.get(DB_KEY).asText(), - table, - config.get(LOAD_URL_KEY).asText(), - config.get(JDBC_URL_KEY).asText(), - config.get(CLUSTER_NAME_KEY).asText(), - config.get(USER_KEY).asText(), - config.get(PWD_KEY) == null ? "" : config.get(PWD_KEY).asText()); - - } - - public SelectdbConnectionOptions(String db, - String table, - String loadUrl, - String jdbcUrl, - String clusterName, - String username, - String password) { - this.db = db; - this.table = table; - this.loadUrl = Preconditions.checkNotNull(loadUrl, "loadUrl is empty"); - this.jdbcUrl = Preconditions.checkNotNull(jdbcUrl, "jdbcUrl is empty"); - this.clusterName = Preconditions.checkNotNull(clusterName, "clusterName is empty"); - this.user = username; - this.pwd = password; - } - - public String getLoadUrl() { - return loadUrl; - } - - public String getJdbcUrl() { - return jdbcUrl; - } - - public String getClusterName() { - return clusterName; - } - - public String getDb() { - return db; - } - - public String getTable() { - return table; - } - - public String getUser() { - return user; - } - - public String getPwd() { - return pwd; - } - - public String getCjDriverName() { - return cjDriverName; - } - - public String getDriverName() { - return driverName; - } - - @Override - public String toString() { - return "SelectdbConnectionOptions{" + - "driverName='" + driverName + '\'' + - ", cjDriverName='" + cjDriverName + '\'' + - ", db='" + db + '\'' + - ", table='" + table + '\'' + - ", user='" + user + '\'' + - ", pwd='" + pwd + '\'' + - ", loadUrl='" + loadUrl + '\'' + - ", jdbcUrl='" + jdbcUrl + '\'' + - ", clusterName='" + clusterName + '\'' + - '}'; - } - -} diff --git a/airbyte-integrations/connectors/destination-selectdb/src/main/java/io/airbyte/integrations/destination/selectdb/SelectdbConsumer.java b/airbyte-integrations/connectors/destination-selectdb/src/main/java/io/airbyte/integrations/destination/selectdb/SelectdbConsumer.java deleted file mode 100644 index c14c7b2101d5..000000000000 --- a/airbyte-integrations/connectors/destination-selectdb/src/main/java/io/airbyte/integrations/destination/selectdb/SelectdbConsumer.java +++ /dev/null @@ -1,116 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.integrations.destination.selectdb; - -import com.fasterxml.jackson.core.io.JsonStringEncoder; -import io.airbyte.cdk.integrations.base.CommitOnStateAirbyteMessageConsumer; -import io.airbyte.commons.json.Jsons; -import io.airbyte.protocol.models.v0.AirbyteMessage; -import io.airbyte.protocol.models.v0.AirbyteRecordMessage; -import io.airbyte.protocol.models.v0.ConfiguredAirbyteCatalog; -import java.io.IOException; -import java.nio.file.Files; -import java.util.Map; -import java.util.UUID; -import java.util.function.Consumer; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -public class SelectdbConsumer extends CommitOnStateAirbyteMessageConsumer { - - private static final Logger LOGGER = LoggerFactory.getLogger(SelectdbConsumer.class); - - private final ConfiguredAirbyteCatalog catalog; - private final Map writeConfigs; - - private JsonStringEncoder jsonEncoder; - - public SelectdbConsumer( - final Map writeConfigs, - final ConfiguredAirbyteCatalog catalog, - final Consumer outputRecordCollector) { - super(outputRecordCollector); - jsonEncoder = JsonStringEncoder.getInstance(); - this.catalog = catalog; - this.writeConfigs = writeConfigs; - LOGGER.info("initializing SelectdbConsumer."); - } - - @Override - public void commit() throws Exception { - for (final SelectdbWriteConfig writeConfig : writeConfigs.values()) { - writeConfig.getWriter().flush(); - } - } - - @Override - protected void startTracked() throws Exception {} - - @Override - protected void acceptTracked(AirbyteMessage msg) throws Exception { - if (msg.getType() != AirbyteMessage.Type.RECORD) { - return; - } - final AirbyteRecordMessage recordMessage = msg.getRecord(); - if (!writeConfigs.containsKey(recordMessage.getStream())) { - throw new IllegalArgumentException( - String.format( - "Message contained record from a stream that was not in the catalog. \ncatalog: %s , \nmessage: %s", - Jsons.serialize(catalog), Jsons.serialize(recordMessage))); - } - - writeConfigs.get(recordMessage.getStream()).getWriter().printRecord( - UUID.randomUUID(), - // new SimpleDateFormat("yyyy-MM-dd HH:mm:ss.SSS").format(new Date(recordMessage.getEmittedAt())), - recordMessage.getEmittedAt(), - new String(jsonEncoder.quoteAsString(Jsons.serialize(recordMessage.getData())))); - - } - - @Override - protected void close(boolean hasFailed) throws Exception { - LOGGER.info("finalizing SelectdbConsumer"); - for (final Map.Entry entries : writeConfigs.entrySet()) { - try { - entries.getValue().getWriter().flush(); - entries.getValue().getWriter().close(); - } catch (final Exception e) { - hasFailed = true; - LOGGER.error("failed to close writer for: {}", entries.getKey()); - } - } - - try { - for (final SelectdbWriteConfig value : writeConfigs.values()) { - value.getsci().firstCommit(); - } - } catch (final Exception e) { - hasFailed = true; - final String message = "Failed to upload selectdb stage in destination: "; - LOGGER.error(message + e.getMessage()); - } - try { - if (!hasFailed) { - for (final SelectdbWriteConfig writeConfig : writeConfigs.values()) { - if (writeConfig.getsci().isUpload()) { - writeConfig.getsci().commitTransaction(); - } - LOGGER.info("upload commit (temp file: {} ) successed ", writeConfig.getsci().getPath()); - } - } else { - final String message = "Failed to copy into selectdb in destination"; - LOGGER.error(message); - throw new IOException(message); - } - } finally { - for (final SelectdbWriteConfig writeConfig : writeConfigs.values()) { - Files.deleteIfExists(writeConfig.getsci().getPath()); - writeConfig.getsci().close(); - } - } - - } - -} diff --git a/airbyte-integrations/connectors/destination-selectdb/src/main/java/io/airbyte/integrations/destination/selectdb/SelectdbCopyInto.java b/airbyte-integrations/connectors/destination-selectdb/src/main/java/io/airbyte/integrations/destination/selectdb/SelectdbCopyInto.java deleted file mode 100644 index a3c5f9507d12..000000000000 --- a/airbyte-integrations/connectors/destination-selectdb/src/main/java/io/airbyte/integrations/destination/selectdb/SelectdbCopyInto.java +++ /dev/null @@ -1,261 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.integrations.destination.selectdb; - -import com.fasterxml.jackson.core.type.TypeReference; -import com.fasterxml.jackson.databind.ObjectMapper; -import com.google.common.base.Preconditions; -import io.airbyte.integrations.destination.selectdb.exception.CopyIntoException; -import io.airbyte.integrations.destination.selectdb.exception.UploadException; -import io.airbyte.integrations.destination.selectdb.http.HttpPostBuilder; -import io.airbyte.integrations.destination.selectdb.http.HttpPutBuilder; -import io.airbyte.integrations.destination.selectdb.utils.ResponseUtils; -import java.io.FileInputStream; -import java.io.IOException; -import java.nio.file.Path; -import java.util.*; -import org.apache.http.Header; -import org.apache.http.HttpEntity; -import org.apache.http.client.methods.CloseableHttpResponse; -import org.apache.http.entity.ByteArrayEntity; -import org.apache.http.entity.StringEntity; -import org.apache.http.impl.client.CloseableHttpClient; -import org.apache.http.util.EntityUtils; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -public class SelectdbCopyInto { - - private static final Logger LOGGER = LoggerFactory.getLogger(SelectdbCopyInto.class); - private static final ObjectMapper OBJECT_MAPPER = new ObjectMapper(); - private static final String UPLOAD_URL_PATTERN = "http://%s/copy/upload"; - private static final String COPY_URL_PATTERN = "http://%s/copy/query"; - public static final Character CSV_COLUMN_SEPARATOR = '\t'; - - private final String tableName; - private final String db; - private final String clusterName; - private final String loadUrl; - private final String uploadUrlStr; - private final String jdbcUrlStr; - private final String user; - private final String passwd; - private final Integer maxRetry; - private Boolean isUpload = false; - private final Path path; - private final CloseableHttpClient httpClient; - - private static final int SUCCESS = 0; - private static final String FAIL = "1"; - - private final static String COPY_SYNC = "copy.async"; - private String COPY_INTO_SQL = ""; - private String internalSatgeFileName = ""; - private Properties copyIntoSqlProp; - - public SelectdbCopyInto( - Path path, - SelectdbConnectionOptions selectdbOptions, - LabelInfo labelInfo, - CloseableHttpClient httpClient, - String... head) { - this.loadUrl = selectdbOptions.getLoadUrl(); - this.db = selectdbOptions.getDb(); - this.tableName = selectdbOptions.getTable(); - this.clusterName = selectdbOptions.getClusterName(); - this.user = selectdbOptions.getUser(); - this.passwd = selectdbOptions.getPwd(); - this.uploadUrlStr = String.format(UPLOAD_URL_PATTERN, loadUrl); - this.jdbcUrlStr = String.format(COPY_URL_PATTERN, loadUrl); - this.copyIntoSqlProp = new Properties(); - this.maxRetry = 3; - this.path = path; - this.httpClient = httpClient; - - this.internalSatgeFileName = labelInfo.label() + ".csv"; - List files = new ArrayList<>(); - files.add(this.internalSatgeFileName); - this.COPY_INTO_SQL = buildCopyIntoSql(files); - } - - public void firstCommit() throws IOException { - Path pathChecked = Preconditions.checkNotNull(path, "upload temp CSV file is empty."); - String uploadAddress = getUploadAddress(); - LOGGER.info("redirect to s3 address:{}", uploadAddress); - try { - HttpPutBuilder putBuilder = new HttpPutBuilder(); - putBuilder.setUrl(uploadAddress) - .setCommonHeader() - .setEntity(new ByteArrayEntity(new FileInputStream(pathChecked.toFile()).readAllBytes())); - - CloseableHttpResponse execute = httpClient.execute(putBuilder.build()); - handlePreCommitResponse(execute); - } catch (Exception e) { - throw new UploadException(e); - } - this.isUpload = true; - } - - private String getUploadAddress() throws IOException { - HttpPutBuilder putBuilder = new HttpPutBuilder(); - putBuilder.setUrl(uploadUrlStr) - .setFileName(this.internalSatgeFileName) - .setCommonHeader() - .setEmptyEntity() - .baseAuth(user, passwd); - - try (CloseableHttpResponse execute = httpClient.execute(putBuilder.build())) { - int statusCode = execute.getStatusLine().getStatusCode(); - String reason = execute.getStatusLine().getReasonPhrase(); - if (statusCode == 307) { - Header location = execute.getFirstHeader("location"); - return location.getValue(); - } else { - HttpEntity entity = execute.getEntity(); - String result = entity == null ? null : EntityUtils.toString(entity); - LOGGER.error("Failed get the redirected address, status {}, reason {}, response {}", statusCode, reason, - result); - throw new RuntimeException("Could not get the redirected address."); - } - } - } - - public Boolean isUpload() { - return this.isUpload; - } - - private String buildCopyIntoSql(List fileList) { - StringBuilder sb = new StringBuilder(); - sb.append("COPY INTO `") - .append(db) - .append("`.`") - .append(tableName) - .append("` FROM @~('{").append(String.join(",", fileList)).append("}') ") - .append("PROPERTIES ("); - - // this copy into is sync - copyIntoSqlProp.put(COPY_SYNC, false); - StringJoiner props = new StringJoiner(","); - for (Map.Entry entry : copyIntoSqlProp.entrySet()) { - String key = String.valueOf(entry.getKey()); - String value = String.valueOf(entry.getValue()); - String prop = String.format("'%s'='%s'", key, value); - props.add(prop); - } - sb.append(props).append(")"); - return sb.toString(); - } - - // copy into - public void commitTransaction() throws IOException { - long start = System.currentTimeMillis(); - LOGGER.info("commit copy SQL: {}", COPY_INTO_SQL); - int statusCode = -1; - String reasonPhrase = null; - int retry = 0; - Map params = new HashMap<>(); - // params.put("cluster", clusterName); - params.put("sql", COPY_INTO_SQL); - boolean success = false; - CloseableHttpResponse response = null; - String loadResult = ""; - while (retry++ <= maxRetry) { - HttpPostBuilder postBuilder = new HttpPostBuilder(); - postBuilder.setUrl(jdbcUrlStr) - .baseAuth(user, passwd) - .setEntity(new StringEntity(OBJECT_MAPPER.writeValueAsString(params))); - try { - response = httpClient.execute(postBuilder.build()); - } catch (IOException e) { - LOGGER.error("commit error : ", e); - continue; - } - statusCode = response.getStatusLine().getStatusCode(); - reasonPhrase = response.getStatusLine().getReasonPhrase(); - if (statusCode != 200) { - LOGGER.warn("commit failed with status {} {}, reason {}", statusCode, loadUrl, reasonPhrase); - continue; - } else if (response.getEntity() != null) { - loadResult = EntityUtils.toString(response.getEntity()); - success = handleCommitResponse(loadResult); - if (success) { - LOGGER.info("commit success cost {}ms, response is {}", System.currentTimeMillis() - start, - loadResult); - break; - } else { - LOGGER.warn("commit failed, retry again"); - } - } - } - - if (!success) { - LOGGER.error("commit error with status {}, reason {}, response {}", statusCode, reasonPhrase, loadResult); - throw new CopyIntoException("commit error with " + COPY_INTO_SQL); - } - } - - public void handlePreCommitResponse(CloseableHttpResponse response) throws IOException { - try { - final int statusCode = response.getStatusLine().getStatusCode(); - if (statusCode == 200 && response.getEntity() != null) { - String loadResult = EntityUtils.toString(response.getEntity()); - if (loadResult == null || loadResult.isBlank()) { - return; - } - LOGGER.info("response result {}", loadResult); - BaseResponse> baseResponse = new ObjectMapper().readValue(loadResult, - new TypeReference>>() {}); - if (baseResponse.getCode() == 0) { - return; - } else { - throw new RuntimeException("upload file error: " + baseResponse.getMsg()); - } - } - throw new RuntimeException("upload file error: " + response.getStatusLine().toString()); - } finally { - if (response != null) { - response.close(); - } - } - } - - public boolean handleCommitResponse(String loadResult) throws IOException { - BaseResponse baseResponse = OBJECT_MAPPER.readValue(loadResult, - new TypeReference>() {}); - if (baseResponse.getCode() == SUCCESS) { - CopyIntoResp dataResp = baseResponse.getData(); - if (FAIL.equals(dataResp.getDataCode())) { - LOGGER.error("copy into execute failed, reason:{}", loadResult); - return false; - } else { - Map result = dataResp.getResult(); - if (!result.get("state").equals("FINISHED") && !ResponseUtils.isCommitted(result.get("msg"))) { - LOGGER.error("copy into load failed, reason:{}", loadResult); - return false; - } else { - return true; - } - } - } else { - LOGGER.error("commit failed, reason:{}", loadResult); - return false; - } - } - - public Path getPath() { - return path; - } - - public void close() throws IOException { - if (null != httpClient) { - try { - httpClient.close(); - } catch (IOException e) { - throw new IOException("Closing httpClient failed.", e); - } - } - } - -} diff --git a/airbyte-integrations/connectors/destination-selectdb/src/main/java/io/airbyte/integrations/destination/selectdb/SelectdbDestination.java b/airbyte-integrations/connectors/destination-selectdb/src/main/java/io/airbyte/integrations/destination/selectdb/SelectdbDestination.java deleted file mode 100644 index 9619f3c19b8d..000000000000 --- a/airbyte-integrations/connectors/destination-selectdb/src/main/java/io/airbyte/integrations/destination/selectdb/SelectdbDestination.java +++ /dev/null @@ -1,138 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.integrations.destination.selectdb; - -import com.fasterxml.jackson.databind.JsonNode; -import com.google.common.base.Preconditions; -import io.airbyte.cdk.integrations.BaseConnector; -import io.airbyte.cdk.integrations.base.AirbyteMessageConsumer; -import io.airbyte.cdk.integrations.base.Destination; -import io.airbyte.cdk.integrations.base.IntegrationRunner; -import io.airbyte.cdk.integrations.base.JavaBaseConstants; -import io.airbyte.cdk.integrations.destination.StandardNameTransformer; -import io.airbyte.integrations.destination.selectdb.http.HttpUtil; -import io.airbyte.protocol.models.v0.AirbyteConnectionStatus; -import io.airbyte.protocol.models.v0.AirbyteConnectionStatus.Status; -import io.airbyte.protocol.models.v0.AirbyteMessage; -import io.airbyte.protocol.models.v0.ConfiguredAirbyteCatalog; -import io.airbyte.protocol.models.v0.ConfiguredAirbyteStream; -import io.airbyte.protocol.models.v0.DestinationSyncMode; -import java.io.FileWriter; -import java.io.IOException; -import java.nio.charset.Charset; -import java.nio.file.Path; -import java.nio.file.Paths; -import java.sql.SQLException; -import java.sql.Statement; -import java.util.HashMap; -import java.util.Map; -import java.util.function.Consumer; -import org.apache.commons.csv.CSVFormat; -import org.apache.commons.csv.CSVPrinter; -import org.apache.commons.io.FileUtils; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -public class SelectdbDestination extends BaseConnector implements Destination { - - private static final Logger LOGGER = LoggerFactory.getLogger(SelectdbDestination.class); - - private static final StandardNameTransformer namingResolver = new StandardNameTransformer(); - private static HttpUtil http = new HttpUtil(); - static final String DESTINATION_TEMP_PATH_FIELD = "destination_temp_path"; - private SelectdbOperations selectdbOperations; - - public static void main(String[] args) throws Exception { - new IntegrationRunner(new SelectdbDestination()).run(args); - } - - public SelectdbDestination() { - this.selectdbOperations = new SelectdbOperations(); - } - - @Override - public AirbyteConnectionStatus check(JsonNode config) { - try { - Preconditions.checkNotNull(config); - FileUtils.forceMkdir(getTempPathDir(config).toFile()); - selectdbOperations.getConn(config); - } catch (final Exception e) { - return new AirbyteConnectionStatus().withStatus(Status.FAILED).withMessage(e.getMessage()); - } - return new AirbyteConnectionStatus().withStatus(Status.SUCCEEDED); - } - - @Override - public AirbyteMessageConsumer getConsumer(JsonNode config, - ConfiguredAirbyteCatalog configuredCatalog, - Consumer outputRecordCollector) - throws IOException, SQLException { - final Map writeConfigs = new HashMap<>(); - try { - final Path destinationDir = getTempPathDir(config); - FileUtils.forceMkdir(destinationDir.toFile()); - for (ConfiguredAirbyteStream stream : configuredCatalog.getStreams()) { - - final DestinationSyncMode syncMode = stream.getDestinationSyncMode(); - if (syncMode == null) { - throw new IllegalStateException("Undefined destination sync mode"); - } - - final String streamName = stream.getStream().getName(); - final String tableName = namingResolver.getIdentifier(streamName); - final String tmpTableName = namingResolver.getTmpTableName(streamName); - final Path tmpPath = destinationDir.resolve(tmpTableName + ".csv"); - - Statement stmt = selectdbOperations.getConn(config).createStatement(); - stmt.execute(selectdbOperations.createTableQuery(tableName)); - if (syncMode == DestinationSyncMode.OVERWRITE) { - stmt.execute(selectdbOperations.truncateTable(tableName)); - } - CSVFormat csvFormat = CSVFormat.DEFAULT - .withSkipHeaderRecord() - .withDelimiter(SelectdbCopyInto.CSV_COLUMN_SEPARATOR) - .withQuote(null) - .withHeader( - JavaBaseConstants.COLUMN_NAME_AB_ID, - JavaBaseConstants.COLUMN_NAME_EMITTED_AT, - JavaBaseConstants.COLUMN_NAME_DATA); - final FileWriter fileWriter = new FileWriter(tmpPath.toFile(), Charset.defaultCharset(), false); - final CSVPrinter printer = new CSVPrinter(fileWriter, csvFormat); - SelectdbCopyInto sci = new SelectdbCopyInto( - tmpPath, - SelectdbConnectionOptions.getSelectdbConnection(config, tableName), - new LabelInfo("", tableName), - http.getClient(), - JavaBaseConstants.COLUMN_NAME_AB_ID, - JavaBaseConstants.COLUMN_NAME_EMITTED_AT, - JavaBaseConstants.COLUMN_NAME_DATA); - writeConfigs.put(streamName, new SelectdbWriteConfig(sci, printer, csvFormat)); - } - } catch (SQLException | ClassNotFoundException e) { - LOGGER.error("Exception while creating Selectdb destination table: ", e); - throw new SQLException(e); - } catch (IOException e) { - LOGGER.error("Exception while handling temporary csv files : ", e); - throw new IOException(e); - } finally { - selectdbOperations.closeConn(); - } - return new SelectdbConsumer(writeConfigs, configuredCatalog, outputRecordCollector); - } - - protected Path getTempPathDir(final JsonNode config) { - Path path = Paths.get(DESTINATION_TEMP_PATH_FIELD); - Preconditions.checkNotNull(path); - if (!path.startsWith("/code/local")) { - path = Path.of("/local", path.toString()); - } - final Path normalizePath = path.normalize(); - if (!normalizePath.startsWith("/local")) { - throw new IllegalArgumentException("Copy into destination temp file should be inside the /local directory"); - } - return path; - } - -} diff --git a/airbyte-integrations/connectors/destination-selectdb/src/main/java/io/airbyte/integrations/destination/selectdb/SelectdbOperations.java b/airbyte-integrations/connectors/destination-selectdb/src/main/java/io/airbyte/integrations/destination/selectdb/SelectdbOperations.java deleted file mode 100644 index 05c322a8516d..000000000000 --- a/airbyte-integrations/connectors/destination-selectdb/src/main/java/io/airbyte/integrations/destination/selectdb/SelectdbOperations.java +++ /dev/null @@ -1,67 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.integrations.destination.selectdb; - -import com.fasterxml.jackson.databind.JsonNode; -import io.airbyte.cdk.integrations.base.JavaBaseConstants; -import java.sql.Connection; -import java.sql.DriverManager; -import java.sql.SQLException; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -public class SelectdbOperations { - - private static final Logger LOGGER = LoggerFactory.getLogger(SelectdbOperations.class); - - private static final String JDBC_DRIVER = "com.mysql.cj.jdbc.Driver"; - private static final String DB_URL_PATTERN = "jdbc:mysql://%s/%s?rewriteBatchedStatements=true&useUnicode=true&characterEncoding=utf8"; - - // private JsonNode config; - private Connection conn = null; - - public SelectdbOperations() { - // this.config = config; - } - - public Connection getConn(JsonNode config) throws SQLException, ClassNotFoundException { - if (conn == null) { - checkSelectdbAndConnect(config); - } - return conn; - } - - public void closeConn() throws SQLException { - if (conn != null) { - conn.close(); - } - } - - private void checkSelectdbAndConnect(JsonNode config) throws ClassNotFoundException, SQLException { - SelectdbConnectionOptions selectdbConnection = SelectdbConnectionOptions.getSelectdbConnection(config, ""); - String dbUrl = String.format(DB_URL_PATTERN, selectdbConnection.getJdbcUrl(), selectdbConnection.getDb()); - Class.forName(JDBC_DRIVER); - conn = DriverManager.getConnection(dbUrl, selectdbConnection.getUser(), selectdbConnection.getPwd()); - } - - public String truncateTable(String tableName) { - String s = "TRUNCATE TABLE `" + tableName + "`;"; - LOGGER.info("truncate selectdb table SQL : \n " + s); - return s; - } - - protected String createTableQuery(String tableName) { - String s = "CREATE TABLE IF NOT EXISTS `" + tableName + "` ( \n" - + "`" + JavaBaseConstants.COLUMN_NAME_AB_ID + "` varchar(40),\n" - + "`" + JavaBaseConstants.COLUMN_NAME_EMITTED_AT + "` BIGINT,\n" - + "`" + JavaBaseConstants.COLUMN_NAME_DATA + "` String)\n" - + "DUPLICATE KEY(`" + JavaBaseConstants.COLUMN_NAME_AB_ID + "`,`" - + JavaBaseConstants.COLUMN_NAME_EMITTED_AT + "`) \n" - + "DISTRIBUTED BY HASH(`" + JavaBaseConstants.COLUMN_NAME_AB_ID + "`) BUCKETS 16 ;"; - LOGGER.info("create selectdb table SQL : \n " + s); - return s; - } - -} diff --git a/airbyte-integrations/connectors/destination-selectdb/src/main/java/io/airbyte/integrations/destination/selectdb/SelectdbWriteConfig.java b/airbyte-integrations/connectors/destination-selectdb/src/main/java/io/airbyte/integrations/destination/selectdb/SelectdbWriteConfig.java deleted file mode 100644 index 8c28cc52daeb..000000000000 --- a/airbyte-integrations/connectors/destination-selectdb/src/main/java/io/airbyte/integrations/destination/selectdb/SelectdbWriteConfig.java +++ /dev/null @@ -1,34 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.integrations.destination.selectdb; - -import org.apache.commons.csv.CSVFormat; -import org.apache.commons.csv.CSVPrinter; - -public class SelectdbWriteConfig { - - private final SelectdbCopyInto selectdbCopyInto; - private final CSVPrinter writer; - private final CSVFormat format; - - public SelectdbWriteConfig(SelectdbCopyInto sci, CSVPrinter writer, CSVFormat format) { - this.selectdbCopyInto = sci; - this.writer = writer; - this.format = format; - } - - public SelectdbCopyInto getsci() { - return selectdbCopyInto; - } - - public CSVFormat getFormat() { - return format; - } - - public CSVPrinter getWriter() { - return writer; - } - -} diff --git a/airbyte-integrations/connectors/destination-selectdb/src/main/java/io/airbyte/integrations/destination/selectdb/exception/CopyIntoException.java b/airbyte-integrations/connectors/destination-selectdb/src/main/java/io/airbyte/integrations/destination/selectdb/exception/CopyIntoException.java deleted file mode 100644 index e3e0fb787081..000000000000 --- a/airbyte-integrations/connectors/destination-selectdb/src/main/java/io/airbyte/integrations/destination/selectdb/exception/CopyIntoException.java +++ /dev/null @@ -1,13 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.integrations.destination.selectdb.exception; - -public class CopyIntoException extends SelectdbRuntimeException { - - public CopyIntoException(String message) { - super(message); - } - -} diff --git a/airbyte-integrations/connectors/destination-selectdb/src/main/java/io/airbyte/integrations/destination/selectdb/exception/SelectdbRuntimeException.java b/airbyte-integrations/connectors/destination-selectdb/src/main/java/io/airbyte/integrations/destination/selectdb/exception/SelectdbRuntimeException.java deleted file mode 100644 index 34f14d4e009e..000000000000 --- a/airbyte-integrations/connectors/destination-selectdb/src/main/java/io/airbyte/integrations/destination/selectdb/exception/SelectdbRuntimeException.java +++ /dev/null @@ -1,20 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.integrations.destination.selectdb.exception; - -/** - * Selectdb runtime exception. - */ -public class SelectdbRuntimeException extends RuntimeException { - - public SelectdbRuntimeException(String message) { - super(message); - } - - public SelectdbRuntimeException(Throwable cause) { - super(cause); - } - -} diff --git a/airbyte-integrations/connectors/destination-selectdb/src/main/java/io/airbyte/integrations/destination/selectdb/exception/UploadException.java b/airbyte-integrations/connectors/destination-selectdb/src/main/java/io/airbyte/integrations/destination/selectdb/exception/UploadException.java deleted file mode 100644 index 593cf678f329..000000000000 --- a/airbyte-integrations/connectors/destination-selectdb/src/main/java/io/airbyte/integrations/destination/selectdb/exception/UploadException.java +++ /dev/null @@ -1,13 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.integrations.destination.selectdb.exception; - -public class UploadException extends SelectdbRuntimeException { - - public UploadException(Exception exception) { - super(exception); - } - -} diff --git a/airbyte-integrations/connectors/destination-selectdb/src/main/java/io/airbyte/integrations/destination/selectdb/http/HttpPostBuilder.java b/airbyte-integrations/connectors/destination-selectdb/src/main/java/io/airbyte/integrations/destination/selectdb/http/HttpPostBuilder.java deleted file mode 100644 index 4b281a09a5f8..000000000000 --- a/airbyte-integrations/connectors/destination-selectdb/src/main/java/io/airbyte/integrations/destination/selectdb/http/HttpPostBuilder.java +++ /dev/null @@ -1,55 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.integrations.destination.selectdb.http; - -import com.google.common.base.Preconditions; -import java.nio.charset.StandardCharsets; -import java.util.HashMap; -import java.util.Map; -import org.apache.commons.codec.binary.Base64; -import org.apache.http.HttpEntity; -import org.apache.http.HttpHeaders; -import org.apache.http.client.methods.HttpPost; - -/** - * Builder for HttpPost. - */ -public class HttpPostBuilder { - - String url; - Map header; - HttpEntity httpEntity; - - public HttpPostBuilder() { - header = new HashMap<>(); - } - - public HttpPostBuilder setUrl(String url) { - this.url = url; - return this; - } - - public HttpPostBuilder baseAuth(String user, String password) { - final String authInfo = user + ":" + password; - byte[] encoded = Base64.encodeBase64(authInfo.getBytes(StandardCharsets.UTF_8)); - header.put(HttpHeaders.AUTHORIZATION, "Basic " + new String(encoded, StandardCharsets.UTF_8)); - return this; - } - - public HttpPostBuilder setEntity(HttpEntity httpEntity) { - this.httpEntity = httpEntity; - return this; - } - - public HttpPost build() { - Preconditions.checkNotNull(url); - Preconditions.checkNotNull(httpEntity); - HttpPost put = new HttpPost(url); - header.forEach(put::setHeader); - put.setEntity(httpEntity); - return put; - } - -} diff --git a/airbyte-integrations/connectors/destination-selectdb/src/main/java/io/airbyte/integrations/destination/selectdb/http/HttpPutBuilder.java b/airbyte-integrations/connectors/destination-selectdb/src/main/java/io/airbyte/integrations/destination/selectdb/http/HttpPutBuilder.java deleted file mode 100644 index 585f9e722557..000000000000 --- a/airbyte-integrations/connectors/destination-selectdb/src/main/java/io/airbyte/integrations/destination/selectdb/http/HttpPutBuilder.java +++ /dev/null @@ -1,72 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.integrations.destination.selectdb.http; - -import com.google.common.base.Preconditions; -import java.nio.charset.StandardCharsets; -import java.util.HashMap; -import java.util.Map; -import org.apache.commons.codec.binary.Base64; -import org.apache.http.HttpEntity; -import org.apache.http.HttpHeaders; -import org.apache.http.client.methods.HttpPut; -import org.apache.http.entity.StringEntity; - -public class HttpPutBuilder { - - String url; - Map header; - HttpEntity httpEntity; - - public HttpPutBuilder() { - header = new HashMap<>(); - } - - public HttpPutBuilder setUrl(String url) { - this.url = url; - return this; - } - - public HttpPutBuilder setFileName(String fileName) { - header.put("fileName", fileName); - return this; - } - - public HttpPutBuilder setEmptyEntity() { - try { - this.httpEntity = new StringEntity(""); - } catch (Exception e) { - throw new IllegalArgumentException(e); - } - return this; - } - - public HttpPutBuilder setCommonHeader() { - header.put(HttpHeaders.EXPECT, "100-continue"); - return this; - } - - public HttpPutBuilder baseAuth(String user, String password) { - final String authInfo = user + ":" + password; - byte[] encoded = Base64.encodeBase64(authInfo.getBytes(StandardCharsets.UTF_8)); - header.put(HttpHeaders.AUTHORIZATION, "Basic " + new String(encoded, StandardCharsets.UTF_8)); - return this; - } - - public HttpPutBuilder setEntity(HttpEntity httpEntity) { - this.httpEntity = httpEntity; - return this; - } - - public HttpPut build() { - Preconditions.checkNotNull(url); - Preconditions.checkNotNull(httpEntity); - HttpPut put = new HttpPut(url); - header.forEach(put::setHeader); - put.setEntity(httpEntity); - return put; - } - -} diff --git a/airbyte-integrations/connectors/destination-selectdb/src/main/java/io/airbyte/integrations/destination/selectdb/http/HttpUtil.java b/airbyte-integrations/connectors/destination-selectdb/src/main/java/io/airbyte/integrations/destination/selectdb/http/HttpUtil.java deleted file mode 100644 index b5f2fc98b6cc..000000000000 --- a/airbyte-integrations/connectors/destination-selectdb/src/main/java/io/airbyte/integrations/destination/selectdb/http/HttpUtil.java +++ /dev/null @@ -1,22 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.integrations.destination.selectdb.http; - -import org.apache.http.impl.client.CloseableHttpClient; -import org.apache.http.impl.client.HttpClientBuilder; -import org.apache.http.impl.client.HttpClients; - -public class HttpUtil { - - private final HttpClientBuilder httpClientBuilder = - HttpClients - .custom() - .disableRedirectHandling(); - - public CloseableHttpClient getClient() { - return httpClientBuilder.build(); - } - -} diff --git a/airbyte-integrations/connectors/destination-selectdb/src/main/java/io/airbyte/integrations/destination/selectdb/utils/ResponseUtils.java b/airbyte-integrations/connectors/destination-selectdb/src/main/java/io/airbyte/integrations/destination/selectdb/utils/ResponseUtils.java deleted file mode 100644 index 65968cdb0b1a..000000000000 --- a/airbyte-integrations/connectors/destination-selectdb/src/main/java/io/airbyte/integrations/destination/selectdb/utils/ResponseUtils.java +++ /dev/null @@ -1,27 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.integrations.destination.selectdb.utils; - -import java.util.regex.Pattern; - -public class ResponseUtils { - - public static final Pattern LABEL_EXIST_PATTERN = - Pattern.compile("errCode = 2, detailMessage = Label \\[(.*)\\] " + - "has already been used, relate to txn \\[(\\d+)\\]"); - - public static final Pattern COMMITTED_PATTERN = - Pattern.compile("errCode = 2, detailMessage = No files can be copied, matched (\\d+) files, " + - "filtered (\\d+) files because files may be loading or loaded"); - - public static final String RETRY_COMMIT = "submit task failed, queue size is full: SQL submitter with block policy"; - - private ResponseUtils() {} - - public static boolean isCommitted(String msg) { - return COMMITTED_PATTERN.matcher(msg).matches(); - } - -} diff --git a/airbyte-integrations/connectors/destination-selectdb/src/main/resources/spec.json b/airbyte-integrations/connectors/destination-selectdb/src/main/resources/spec.json deleted file mode 100644 index e06e54a5c9ac..000000000000 --- a/airbyte-integrations/connectors/destination-selectdb/src/main/resources/spec.json +++ /dev/null @@ -1,59 +0,0 @@ -{ - "documentationUrl": "https://docs.airbyte.com/integrations/destinations/selectdb", - "supportsIncremental": false, - "supportsNormalization": false, - "supportsDBT": false, - "supported_destination_sync_modes": ["append", "overwrite"], - "connectionSpecification": { - "$schema": "http://json-schema.org/draft-07/schema#", - "title": "SelectDB Destination Spec", - "type": "object", - "required": [ - "load_url", - "jdbc_url", - "cluster_name", - "user_name", - "password", - "database" - ], - "properties": { - "load_url": { - "title": "loadURL", - "description": "load host and port: xxx.privatelink.aliyun.com:47057", - "type": "string", - "order": 0 - }, - "jdbc_url": { - "title": "jdbcURL", - "description": "jdbc host and port: xxx.privatelink.aliyun.com:30523", - "type": "string", - "order": 1 - }, - "cluster_name": { - "title": "ClusterName", - "description": "clusterName of SelectDB", - "type": "string", - "order": 2 - }, - "user_name": { - "title": "UserName", - "description": "Username to use to access the database.", - "type": "string", - "order": 3 - }, - "password": { - "title": "Password", - "description": "Password associated with the username.", - "type": "string", - "airbyte_secret": true, - "order": 4 - }, - "database": { - "title": "DataBase Name", - "description": "Name of the database.", - "type": "string", - "order": 5 - } - } - } -} diff --git a/airbyte-integrations/connectors/destination-selectdb/src/test-integration/java/io/airbyte/integrations/destination/selectdb/SelectdbDestinationAcceptanceTest.java b/airbyte-integrations/connectors/destination-selectdb/src/test-integration/java/io/airbyte/integrations/destination/selectdb/SelectdbDestinationAcceptanceTest.java deleted file mode 100644 index a3e097408da9..000000000000 --- a/airbyte-integrations/connectors/destination-selectdb/src/test-integration/java/io/airbyte/integrations/destination/selectdb/SelectdbDestinationAcceptanceTest.java +++ /dev/null @@ -1,127 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.integrations.destination.selectdb; - -import com.fasterxml.jackson.databind.JsonNode; -import io.airbyte.cdk.integrations.base.JavaBaseConstants; -import io.airbyte.cdk.integrations.destination.StandardNameTransformer; -import io.airbyte.cdk.integrations.standardtest.destination.DestinationAcceptanceTest; -import io.airbyte.commons.io.IOs; -import io.airbyte.commons.json.Jsons; -import java.io.IOException; -import java.nio.file.Path; -import java.nio.file.Paths; -import java.sql.*; -import java.util.ArrayList; -import java.util.HashSet; -import java.util.List; -import org.apache.commons.lang3.StringEscapeUtils; -import org.junit.jupiter.api.AfterAll; -import org.junit.jupiter.api.BeforeAll; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -public class SelectdbDestinationAcceptanceTest extends DestinationAcceptanceTest { - - private static final Logger LOGGER = LoggerFactory.getLogger(SelectdbDestinationAcceptanceTest.class); - - private JsonNode configJson; - - private static final Path RELATIVE_PATH = Path.of("integration_test/test"); - - private static final String JDBC_DRIVER = "com.mysql.cj.jdbc.Driver"; - private static final String DB_URL_PATTERN = "jdbc:mysql://%s?rewriteBatchedStatements=true&useSSL=true&useUnicode=true&characterEncoding=utf8"; - private static Connection conn = null; - - private static final StandardNameTransformer namingResolver = new StandardNameTransformer(); - - @Override - protected String getImageName() { - return "airbyte/destination-selectdb:dev"; - } - - @BeforeAll - public static void getConnect() { - JsonNode config = Jsons.deserialize(IOs.readFile(Paths.get("secrets/config.json"))); - String dbUrl = String.format(DB_URL_PATTERN, config.get("jdbc_url").asText()); - try { - Class.forName(JDBC_DRIVER); - conn = - DriverManager.getConnection(dbUrl, config.get("user_name").asText(), config.get("password") == null ? "" : config.get("password").asText()); - } catch (Exception e) { - e.printStackTrace(); - } - - } - - @AfterAll - public static void closeConnect() throws SQLException { - if (conn != null) { - conn.close(); - } - } - - @Override - protected JsonNode getConfig() { - // TODO: Generate the configuration JSON file to be used for running the destination during the test - // configJson can either be static and read from secrets/config.json directly - // or created in the setup method - configJson = Jsons.deserialize(IOs.readFile(Paths.get("secrets/config.json"))); - return configJson; - } - - @Override - protected JsonNode getFailCheckConfig() { - // TODO return an invalid config which, when used to run the connector's check connection operation, - // should result in a failed connection check - return null; - } - - @Override - protected List retrieveRecords(TestDestinationEnv testEnv, - String streamName, - String namespace, - JsonNode streamSchema) - throws IOException, SQLException { - // TODO Implement this method to retrieve records which written to the destination by the connector. - // Records returned from this method will be compared against records provided to the connector - // to verify they were written correctly - - final String tableName = namingResolver.getIdentifier(streamName); - - String query = String.format( - "SELECT * FROM %s.%s ORDER BY %s ASC;", configJson.get("database").asText(), tableName, - JavaBaseConstants.COLUMN_NAME_EMITTED_AT); - PreparedStatement stmt = conn.prepareStatement(query); - ResultSet resultSet = stmt.executeQuery(); - - List res = new ArrayList<>(); - while (resultSet.next()) { - String sss = resultSet.getString(JavaBaseConstants.COLUMN_NAME_DATA); - res.add(Jsons.deserialize(StringEscapeUtils.unescapeJava(sss))); - } - stmt.close(); - return res; - } - - @Override - protected void setup(TestDestinationEnv testEnv, HashSet TEST_SCHEMAS) { - // TODO Implement this method to run any setup actions needed before every test case - } - - @Override - protected void tearDown(TestDestinationEnv testEnv) { - // TODO Implement this method to run any cleanup actions needed after every test case - } - - public void testLineBreakCharacters() { - // overrides test with a no-op until we handle full UTF-8 in the destination - } - - public void testSecondSync() throws Exception { - // PubSub cannot overwrite messages, its always append only - } - -} diff --git a/airbyte-integrations/connectors/destination-selectdb/src/test/java/io/airbyte/integrations/destination/selectdb/SelectdbDestinationTest.java b/airbyte-integrations/connectors/destination-selectdb/src/test/java/io/airbyte/integrations/destination/selectdb/SelectdbDestinationTest.java deleted file mode 100644 index fdd249676d98..000000000000 --- a/airbyte-integrations/connectors/destination-selectdb/src/test/java/io/airbyte/integrations/destination/selectdb/SelectdbDestinationTest.java +++ /dev/null @@ -1,175 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.integrations.destination.selectdb; - -import static org.junit.jupiter.api.Assertions.*; -import static org.mockito.ArgumentMatchers.any; -import static org.mockito.Mockito.*; - -import com.fasterxml.jackson.databind.JsonNode; -import com.google.common.collect.ImmutableMap; -import com.google.common.collect.Lists; -import io.airbyte.cdk.integrations.base.AirbyteMessageConsumer; -import io.airbyte.cdk.integrations.base.Destination; -import io.airbyte.cdk.integrations.destination.StandardNameTransformer; -import io.airbyte.commons.io.IOs; -import io.airbyte.commons.json.Jsons; -import io.airbyte.commons.resources.MoreResources; -import io.airbyte.protocol.models.Field; -import io.airbyte.protocol.models.JsonSchemaType; -import io.airbyte.protocol.models.v0.AirbyteConnectionStatus; -import io.airbyte.protocol.models.v0.AirbyteConnectionStatus.Status; -import io.airbyte.protocol.models.v0.AirbyteMessage; -import io.airbyte.protocol.models.v0.AirbyteRecordMessage; -import io.airbyte.protocol.models.v0.AirbyteStateMessage; -import io.airbyte.protocol.models.v0.CatalogHelpers; -import io.airbyte.protocol.models.v0.ConfiguredAirbyteCatalog; -import io.airbyte.protocol.models.v0.ConnectorSpecification; -import java.io.IOException; -import java.nio.file.Files; -import java.nio.file.Path; -import java.nio.file.Paths; -import java.time.Instant; -import java.util.Collections; -import java.util.Set; -import java.util.stream.Collectors; -import org.apache.commons.io.FileUtils; -import org.junit.jupiter.api.BeforeEach; -import org.junit.jupiter.api.Test; - -class SelectdbDestinationTest { - - private static final Instant NOW = Instant.now(); - private static final Path TEST_ROOT = Path.of("/tmp/airbyte_tests"); - private static final String USERS_STREAM_NAME = "users"; - private static final String TASKS_STREAM_NAME = "tasks"; - private static final String USERS_FILE = new StandardNameTransformer().getRawTableName(USERS_STREAM_NAME) + ".csv"; - private static final String TASKS_FILE = new StandardNameTransformer().getRawTableName(TASKS_STREAM_NAME) + ".csv"; - - private static final AirbyteMessage MESSAGE_USERS1 = new AirbyteMessage().withType(AirbyteMessage.Type.RECORD) - .withRecord(new AirbyteRecordMessage().withStream(USERS_STREAM_NAME) - .withData(Jsons.jsonNode(ImmutableMap.builder().put("name", "john").put("id", "10").build())) - .withEmittedAt(NOW.toEpochMilli())); - private static final AirbyteMessage MESSAGE_USERS2 = new AirbyteMessage().withType(AirbyteMessage.Type.RECORD) - .withRecord(new AirbyteRecordMessage().withStream(USERS_STREAM_NAME) - .withData(Jsons.jsonNode(ImmutableMap.builder().put("name", "susan").put("id", "30").build())) - .withEmittedAt(NOW.toEpochMilli())); - private static final AirbyteMessage MESSAGE_TASKS1 = new AirbyteMessage().withType(AirbyteMessage.Type.RECORD) - .withRecord(new AirbyteRecordMessage().withStream(TASKS_STREAM_NAME) - .withData(Jsons.jsonNode(ImmutableMap.builder().put("goal", "game").build())) - .withEmittedAt(NOW.toEpochMilli())); - private static final AirbyteMessage MESSAGE_TASKS2 = new AirbyteMessage().withType(AirbyteMessage.Type.RECORD) - .withRecord(new AirbyteRecordMessage().withStream(TASKS_STREAM_NAME) - .withData(Jsons.jsonNode(ImmutableMap.builder().put("goal", "code").build())) - .withEmittedAt(NOW.toEpochMilli())); - private static final AirbyteMessage MESSAGE_STATE = new AirbyteMessage().withType(AirbyteMessage.Type.STATE) - .withState(new AirbyteStateMessage().withData( - Jsons.jsonNode(ImmutableMap.builder().put("checkpoint", "now!").build()))); - - private static final ConfiguredAirbyteCatalog CATALOG = new ConfiguredAirbyteCatalog().withStreams( - Lists.newArrayList( - CatalogHelpers.createConfiguredAirbyteStream(USERS_STREAM_NAME, null, - Field.of("name", JsonSchemaType.STRING), - Field.of("id", JsonSchemaType.STRING)), - CatalogHelpers.createConfiguredAirbyteStream(TASKS_STREAM_NAME, null, - Field.of("goal", JsonSchemaType.STRING)))); - - private Path destinationPath; - private JsonNode config; - - @BeforeEach - void setup() throws IOException { - destinationPath = Files.createTempDirectory(Files.createDirectories(TEST_ROOT), "test"); - config = Jsons.deserialize(IOs.readFile(Paths.get("secrets/config.json"))); - } - - private SelectdbDestination getDestination() { - final SelectdbDestination result = spy(SelectdbDestination.class); - doReturn(destinationPath).when(result).getTempPathDir(any()); - return result; - } - - @Test - void testSpec() throws Exception { - final ConnectorSpecification actual = getDestination().spec(); - final String resourceString = MoreResources.readResource("spec.json"); - final ConnectorSpecification expected = Jsons.deserialize(resourceString, ConnectorSpecification.class); - - assertEquals(expected, actual); - } - - @Test - void testCheckSuccess() { - final AirbyteConnectionStatus actual = getDestination().check(config); - final AirbyteConnectionStatus expected = new AirbyteConnectionStatus().withStatus(Status.SUCCEEDED); - assertEquals(expected, actual); - } - - @Test - void testCheckFailure() throws IOException { - final Path looksLikeADirectoryButIsAFile = destinationPath.resolve("file"); - FileUtils.touch(looksLikeADirectoryButIsAFile.toFile()); - final SelectdbDestination destination = spy(SelectdbDestination.class); - doReturn(looksLikeADirectoryButIsAFile).when(destination).getTempPathDir(any()); - - final AirbyteConnectionStatus actual = destination.check(config); - final AirbyteConnectionStatus expected = new AirbyteConnectionStatus().withStatus(Status.FAILED); - - // the message includes the random file path, so just verify it exists and then remove it when we do - // rest of the comparison. - assertNotNull(actual.getMessage()); - actual.setMessage(null); - assertEquals(expected, actual); - } - - @Test - void testCheckInvalidDestinationFolder() { - - final AirbyteConnectionStatus actual = new SelectdbDestination().check(config); - final AirbyteConnectionStatus expected = new AirbyteConnectionStatus().withStatus(Status.FAILED); - // the message includes the random file path, so just verify it exists and then remove it when we do - // rest of the comparison. - assertNotNull(actual.getMessage()); - actual.setMessage(null); - assertEquals(expected, actual); - } - - @Test - void testWriteSuccess() throws Exception { - SelectdbDestination destination = getDestination(); - destination.check(config); - final AirbyteMessageConsumer consumer = destination.getConsumer(config, CATALOG, - Destination::defaultOutputRecordCollector); - consumer.accept(MESSAGE_USERS1); - consumer.accept(MESSAGE_TASKS1); - consumer.accept(MESSAGE_USERS2); - consumer.accept(MESSAGE_TASKS2); - consumer.accept(MESSAGE_STATE); - consumer.close(); - - } - - @SuppressWarnings("ResultOfMethodCallIgnored") - @Test - void testWriteFailure() throws Exception { - // hack to force an exception to be thrown from within the consumer. - final AirbyteMessage spiedMessage = spy(MESSAGE_USERS1); - doThrow(new RuntimeException()).when(spiedMessage).getRecord(); - SelectdbDestination destination = getDestination(); - destination.check(config); - final AirbyteMessageConsumer consumer = spy( - destination.getConsumer(config, CATALOG, Destination::defaultOutputRecordCollector)); - - assertThrows(RuntimeException.class, () -> consumer.accept(spiedMessage)); - consumer.accept(MESSAGE_USERS2); - assertThrows(IOException.class, consumer::close); - - // verify tmp files are cleaned up and no files are output at all - final Set actualFilenames = Files.list(destinationPath).map(Path::getFileName).map(Path::toString) - .collect(Collectors.toSet()); - assertEquals(Collections.emptySet(), actualFilenames); - } - -} diff --git a/airbyte-integrations/connectors/destination-tidb/README.md b/airbyte-integrations/connectors/destination-tidb/README.md deleted file mode 100644 index 0672e49025a0..000000000000 --- a/airbyte-integrations/connectors/destination-tidb/README.md +++ /dev/null @@ -1,72 +0,0 @@ -# Destination TiDB - -This is the repository for the TiDB destination connector in Java. -For information about how to use this connector within Airbyte, see [the User Documentation](https://docs.airbyte.io/integrations/destinations/tidb). - -## Local development - -#### Building via Gradle -From the Airbyte repository root, run: -``` -./gradlew :airbyte-integrations:connectors:destination-tidb:build -``` - -#### Create credentials -**If you are a community contributor**, generate the necessary credentials and place them in `secrets/config.json` conforming to the spec file in `src/main/resources/spec.json`. -Note that the `secrets` directory is git-ignored by default, so there is no danger of accidentally checking in sensitive information. - -**If you are an Airbyte core member**, follow the [instructions](https://docs.airbyte.io/connector-development#using-credentials-in-ci) to set up the credentials. - -### Locally running the connector docker image - -#### Build -Build the connector image via Gradle: - -``` -./gradlew :airbyte-integrations:connectors:destination-tidb:buildConnectorImage -``` -Once built, the docker image name and tag on your host will be `airbyte/destination-tidb:dev`. -the Dockerfile. - -#### Run -Then run any of the connector commands as follows: -``` -docker run --rm airbyte/destination-tidb:dev spec -docker run --rm -v $(pwd)/secrets:/secrets airbyte/destination-tidb:dev check --config /secrets/config.json -docker run --rm -v $(pwd)/secrets:/secrets airbyte/destination-tidb:dev discover --config /secrets/config.json -docker run --rm -v $(pwd)/secrets:/secrets -v $(pwd)/integration_tests:/integration_tests airbyte/destination-tidb:dev read --config /secrets/config.json --catalog /integration_tests/configured_catalog.json -``` - -## Testing -We use `JUnit` for Java tests. - -### Unit and Integration Tests -Place unit tests under `src/test/io/airbyte/integrations/destinations/tidb`. - -#### Acceptance Tests -Airbyte has a standard test suite that all destination connectors must pass. Implement the `TODO`s in -`src/test-integration/java/io/airbyte/integrations/destinations/tidbDestinationAcceptanceTest.java`. - -### Using gradle to run tests -All commands should be run from airbyte project root. -To run unit tests: -``` -./gradlew :airbyte-integrations:connectors:destination-tidb:unitTest -``` -To run acceptance and custom integration tests: -``` -./gradlew :airbyte-integrations:connectors:destination-tidb:integrationTest -``` - -## Dependency Management - -### Publishing a new version of the connector -You've checked out the repo, implemented a million dollar feature, and you're ready to share your changes with the world. Now what? -1. Make sure your changes are passing our test suite: `airbyte-ci connectors --name=destination-tidb test` -2. Bump the connector version in `metadata.yaml`: increment the `dockerImageTag` value. Please follow [semantic versioning for connectors](https://docs.airbyte.com/contributing-to-airbyte/resources/pull-requests-handbook/#semantic-versioning-for-connectors). -3. Make sure the `metadata.yaml` content is up to date. -4. Make the connector documentation and its changelog is up to date (`docs/integrations/destinations/tidb.md`). -5. Create a Pull Request: use [our PR naming conventions](https://docs.airbyte.com/contributing-to-airbyte/resources/pull-requests-handbook/#pull-request-title-convention). -6. Pat yourself on the back for being an awesome contributor. -7. Someone from Airbyte will take a look at your PR and iterate with you to merge it into master. - diff --git a/airbyte-integrations/connectors/destination-tidb/build.gradle b/airbyte-integrations/connectors/destination-tidb/build.gradle deleted file mode 100644 index e0845bc0ee01..000000000000 --- a/airbyte-integrations/connectors/destination-tidb/build.gradle +++ /dev/null @@ -1,31 +0,0 @@ -plugins { - id 'application' - id 'airbyte-java-connector' -} - -airbyteJavaConnector { - cdkVersionRequired = '0.2.0' - features = ['db-destinations'] - useLocalCdk = false -} - -//remove once upgrading the CDK version to 0.4.x or later -java { - compileJava { - options.compilerArgs.remove("-Werror") - } -} - -airbyteJavaConnector.addCdkDependencies() - -application { - mainClass = 'io.airbyte.integrations.destination.tidb.TiDBDestination' -} - -dependencies { - - implementation 'mysql:mysql-connector-java:8.0.30' - testImplementation libs.testcontainers.tidb - - integrationTestJavaImplementation libs.testcontainers.tidb -} diff --git a/airbyte-integrations/connectors/destination-tidb/src/main/java/io/airbyte/integrations/destination/tidb/TiDBDestination.java b/airbyte-integrations/connectors/destination-tidb/src/main/java/io/airbyte/integrations/destination/tidb/TiDBDestination.java deleted file mode 100644 index a4da8be25ac3..000000000000 --- a/airbyte-integrations/connectors/destination-tidb/src/main/java/io/airbyte/integrations/destination/tidb/TiDBDestination.java +++ /dev/null @@ -1,106 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.integrations.destination.tidb; - -import com.fasterxml.jackson.databind.JsonNode; -import com.google.common.collect.ImmutableMap; -import io.airbyte.cdk.db.factory.DataSourceFactory; -import io.airbyte.cdk.db.jdbc.JdbcDatabase; -import io.airbyte.cdk.db.jdbc.JdbcUtils; -import io.airbyte.cdk.integrations.base.Destination; -import io.airbyte.cdk.integrations.base.IntegrationRunner; -import io.airbyte.cdk.integrations.base.ssh.SshWrappedDestination; -import io.airbyte.cdk.integrations.destination.jdbc.AbstractJdbcDestination; -import io.airbyte.commons.json.Jsons; -import io.airbyte.commons.map.MoreMaps; -import io.airbyte.protocol.models.v0.AirbyteConnectionStatus; -import java.util.Map; -import javax.sql.DataSource; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -public class TiDBDestination extends AbstractJdbcDestination implements Destination { - - private static final Logger LOGGER = LoggerFactory.getLogger(TiDBDestination.class); - public static final String DRIVER_CLASS = "com.mysql.cj.jdbc.Driver"; - - static final Map DEFAULT_JDBC_PARAMETERS = ImmutableMap.of( - "allowLoadLocalInfile", "true"); - - static final Map DEFAULT_SSL_JDBC_PARAMETERS = MoreMaps.merge(ImmutableMap.of( - "useSSL", "true", - "requireSSL", "true", - "verifyServerCertificate", "false"), - DEFAULT_JDBC_PARAMETERS); - - public TiDBDestination() { - super(DRIVER_CLASS, new TiDBSQLNameTransformer(), new TiDBSqlOperations()); - } - - public static Destination sshWrappedDestination() { - return new SshWrappedDestination(new TiDBDestination(), JdbcUtils.HOST_LIST_KEY, JdbcUtils.PORT_LIST_KEY); - } - - @Override - public AirbyteConnectionStatus check(JsonNode config) { - final DataSource dataSource = getDataSource(config); - - try { - final JdbcDatabase database = getDatabase(dataSource); - final String outputSchema = getNamingResolver().getIdentifier(config.get(JdbcUtils.DATABASE_KEY).asText()); - attemptSQLCreateAndDropTableOperations(outputSchema, database, getNamingResolver(), getSqlOperations()); - return new AirbyteConnectionStatus().withStatus(AirbyteConnectionStatus.Status.SUCCEEDED); - } catch (final Exception e) { - LOGGER.error("Exception while checking connection: ", e); - return new AirbyteConnectionStatus() - .withStatus(AirbyteConnectionStatus.Status.FAILED) - .withMessage("Could not connect with provided configuration. \n" + e.getMessage()); - } finally { - try { - DataSourceFactory.close(dataSource); - } catch (final Exception e) { - LOGGER.warn("Unable to close data source.", e); - } - } - } - - @Override - protected Map getDefaultConnectionProperties(JsonNode config) { - if (config.has(JdbcUtils.SSL_KEY) && config.get(JdbcUtils.SSL_KEY).asBoolean()) { - return DEFAULT_SSL_JDBC_PARAMETERS; - } else { - return DEFAULT_JDBC_PARAMETERS; - } - } - - @Override - public JsonNode toJdbcConfig(JsonNode config) { - final StringBuilder jdbcUrl = new StringBuilder(String.format("jdbc:mysql://%s:%s/%s", - config.get(JdbcUtils.HOST_KEY).asText(), - config.get(JdbcUtils.PORT_KEY).asInt(), - config.get(JdbcUtils.DATABASE_KEY).asText())); - - final ImmutableMap.Builder configBuilder = ImmutableMap.builder() - .put(JdbcUtils.USERNAME_KEY, config.get(JdbcUtils.USERNAME_KEY).asText()) - .put(JdbcUtils.JDBC_URL_KEY, jdbcUrl); - - if (config.has(JdbcUtils.PASSWORD_KEY)) { - configBuilder.put(JdbcUtils.PASSWORD_KEY, config.get(JdbcUtils.PASSWORD_KEY).asText()); - } - if (config.has(JdbcUtils.JDBC_URL_PARAMS_KEY)) { - configBuilder.put(JdbcUtils.JDBC_URL_PARAMS_KEY, config.get(JdbcUtils.JDBC_URL_PARAMS_KEY)); - } - - return Jsons.jsonNode(configBuilder.build()); - } - - public static void main(String[] args) throws Exception { - final Destination destination = TiDBDestination.sshWrappedDestination(); - LOGGER.info("starting destination: {}", TiDBDestination.class); - new IntegrationRunner(destination).run(args); - LOGGER.info("completed destination: {}", TiDBDestination.class); - } - -} diff --git a/airbyte-integrations/connectors/destination-tidb/src/main/java/io/airbyte/integrations/destination/tidb/TiDBSQLNameTransformer.java b/airbyte-integrations/connectors/destination-tidb/src/main/java/io/airbyte/integrations/destination/tidb/TiDBSQLNameTransformer.java deleted file mode 100644 index 5fd93204e645..000000000000 --- a/airbyte-integrations/connectors/destination-tidb/src/main/java/io/airbyte/integrations/destination/tidb/TiDBSQLNameTransformer.java +++ /dev/null @@ -1,64 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.integrations.destination.tidb; - -import io.airbyte.cdk.integrations.destination.StandardNameTransformer; - -/** - * TiDB has some limitations on identifier length. - * https://docs.pingcap.com/tidb/stable/tidb-limitations - *

- * Identifier type | Maximum length (number of characters allowed) Database 64 Table 64 Column 64 - * Index 64 View 64 Sequence 64 - *

- * TiDBSQLNameTransformer is reference to - * io.airbyte.integrations.destination.mysql.MySQLNameTransformer. - */ -public class TiDBSQLNameTransformer extends StandardNameTransformer { - - @Override - public String applyDefaultCase(final String input) { - return input.toLowerCase(); - } - - // These constants must match those in destination_name_transformer.py - public static final int MAX_TIDB_NAME_LENGTH = 64; - // DBT appends a suffix to table names - public static final int TRUNCATE_DBT_RESERVED_SIZE = 12; - // 4 charachters for 1 underscore and 3 suffix (e.g. _ab1) - // 4 charachters for 1 underscore and 3 schema hash - public static final int TRUNCATE_RESERVED_SIZE = 8; - public static final int TRUNCATION_MAX_NAME_LENGTH = MAX_TIDB_NAME_LENGTH - TRUNCATE_DBT_RESERVED_SIZE - TRUNCATE_RESERVED_SIZE; - - @Override - public String getIdentifier(final String name) { - final String identifier = applyDefaultCase(super.getIdentifier(name)); - return truncateName(identifier, TRUNCATION_MAX_NAME_LENGTH); - } - - @Override - public String getTmpTableName(final String streamName) { - final String tmpTableName = applyDefaultCase(super.getTmpTableName(streamName)); - return truncateName(tmpTableName, TRUNCATION_MAX_NAME_LENGTH); - } - - @Override - public String getRawTableName(final String streamName) { - final String rawTableName = applyDefaultCase(super.getRawTableName(streamName)); - return truncateName(rawTableName, TRUNCATION_MAX_NAME_LENGTH); - } - - static String truncateName(final String name, final int maxLength) { - if (name.length() <= maxLength) { - return name; - } - - final int allowedLength = maxLength - 2; - final String prefix = name.substring(0, allowedLength / 2); - final String suffix = name.substring(name.length() - allowedLength / 2); - return prefix + "__" + suffix; - } - -} diff --git a/airbyte-integrations/connectors/destination-tidb/src/main/java/io/airbyte/integrations/destination/tidb/TiDBSqlOperations.java b/airbyte-integrations/connectors/destination-tidb/src/main/java/io/airbyte/integrations/destination/tidb/TiDBSqlOperations.java deleted file mode 100644 index dc10af530a6c..000000000000 --- a/airbyte-integrations/connectors/destination-tidb/src/main/java/io/airbyte/integrations/destination/tidb/TiDBSqlOperations.java +++ /dev/null @@ -1,97 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.integrations.destination.tidb; - -import com.fasterxml.jackson.databind.JsonNode; -import edu.umd.cs.findbugs.annotations.SuppressFBWarnings; -import io.airbyte.cdk.db.jdbc.JdbcDatabase; -import io.airbyte.cdk.integrations.base.JavaBaseConstants; -import io.airbyte.cdk.integrations.destination.StandardNameTransformer; -import io.airbyte.cdk.integrations.destination.jdbc.JdbcSqlOperations; -import io.airbyte.protocol.models.v0.AirbyteRecordMessage; -import java.io.File; -import java.io.IOException; -import java.nio.file.Files; -import java.sql.SQLException; -import java.sql.Statement; -import java.util.List; - -@SuppressFBWarnings( - value = {"SQL_NONCONSTANT_STRING_PASSED_TO_EXECUTE"}, - justification = "There is little chance of SQL injection. There is also little need for statement reuse. The basic statement is more readable than the prepared statement.") -public class TiDBSqlOperations extends JdbcSqlOperations { - - @Override - public void executeTransaction(final JdbcDatabase database, final List queries) throws Exception { - database.executeWithinTransaction(queries); - } - - @Override - public void insertRecordsInternal(final JdbcDatabase database, - final List records, - final String schemaName, - final String tmpTableName) - throws SQLException { - if (records.isEmpty()) { - return; - } - database.execute(connection -> { - File tmpFile = null; - try { - tmpFile = Files.createTempFile(tmpTableName + "-", ".tmp").toFile(); - writeBatchToFile(tmpFile, records); - String filePath = "'" + tmpFile.getAbsolutePath() + "'"; - String query = String.format( - "LOAD DATA LOCAL INFILE %s INTO TABLE %s.%s FIELDS TERMINATED BY ',' ENCLOSED BY '\"' ESCAPED BY '' LINES TERMINATED BY '\\r\\n'", - filePath, schemaName, tmpTableName); - - try (final Statement stmt = connection.createStatement()) { - stmt.execute(query); - } - - } catch (IOException e) { - throw new SQLException(e); - } catch (Exception e) { - throw new RuntimeException(e); - } finally { - try { - if (tmpFile != null) { - Files.delete(tmpFile.toPath()); - } - } catch (final IOException e) { - throw new RuntimeException(e); - } - } - }); - } - - @Override - public boolean isSchemaRequired() { - return false; - } - - @Override - public String createTableQuery(final JdbcDatabase database, final String schemaName, final String tableName) { - return String.format( - "CREATE TABLE IF NOT EXISTS %s.%s ( \n" - + "%s VARCHAR(256) PRIMARY KEY,\n" - + "%s JSON,\n" - + "%s TIMESTAMP(6) DEFAULT CURRENT_TIMESTAMP(6)\n" - + ");\n", - schemaName, tableName, JavaBaseConstants.COLUMN_NAME_AB_ID, JavaBaseConstants.COLUMN_NAME_DATA, JavaBaseConstants.COLUMN_NAME_EMITTED_AT); - } - - @Override - public void createSchemaIfNotExists(final JdbcDatabase database, final String schemaName) throws Exception { - // TiDB use database instead of schema. - database.execute(String.format("CREATE DATABASE IF NOT EXISTS %s;\n", schemaName)); - } - - @Override - protected JsonNode formatData(JsonNode data) { - return StandardNameTransformer.formatJsonPath(data); - } - -} diff --git a/airbyte-integrations/connectors/destination-tidb/src/main/resources/spec.json b/airbyte-integrations/connectors/destination-tidb/src/main/resources/spec.json deleted file mode 100644 index 2de2e1bb17ed..000000000000 --- a/airbyte-integrations/connectors/destination-tidb/src/main/resources/spec.json +++ /dev/null @@ -1,65 +0,0 @@ -{ - "documentationUrl": "https://docs.airbyte.com/integrations/destinations/tidb", - "supportsIncremental": true, - "supportsNormalization": true, - "supportsDBT": true, - "supported_destination_sync_modes": ["overwrite", "append", "append_dedup"], - "connectionSpecification": { - "$schema": "http://json-schema.org/draft-07/schema#", - "title": "TiDB Destination Spec", - "type": "object", - "required": ["host", "port", "username", "database"], - "additionalProperties": true, - "properties": { - "host": { - "title": "Host", - "description": "Hostname of the database.", - "type": "string", - "order": 0 - }, - "port": { - "title": "Port", - "description": "Port of the database.", - "type": "integer", - "minimum": 0, - "maximum": 65536, - "default": 4000, - "examples": ["4000"], - "order": 1 - }, - "database": { - "title": "Database", - "description": "Name of the database.", - "type": "string", - "order": 2 - }, - "username": { - "title": "User", - "description": "Username to use to access the database.", - "type": "string", - "order": 3 - }, - "password": { - "title": "Password", - "description": "Password associated with the username.", - "type": "string", - "airbyte_secret": true, - "default": "", - "order": 4 - }, - "ssl": { - "title": "SSL Connection", - "description": "Encrypt data using SSL.", - "type": "boolean", - "default": false, - "order": 5 - }, - "jdbc_url_params": { - "description": "Additional properties to pass to the JDBC URL string when connecting to the database formatted as 'key=value' pairs separated by the symbol '&'. (example: key1=value1&key2=value2&key3=value3).", - "title": "JDBC URL Params", - "type": "string", - "order": 6 - } - } - } -} diff --git a/airbyte-integrations/connectors/destination-tidb/src/test-integration/java/io/airbyte/integrations/destination/tidb/TiDBDestinationAcceptanceTest.java b/airbyte-integrations/connectors/destination-tidb/src/test-integration/java/io/airbyte/integrations/destination/tidb/TiDBDestinationAcceptanceTest.java deleted file mode 100644 index 59af74411461..000000000000 --- a/airbyte-integrations/connectors/destination-tidb/src/test-integration/java/io/airbyte/integrations/destination/tidb/TiDBDestinationAcceptanceTest.java +++ /dev/null @@ -1,151 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.integrations.destination.tidb; - -import com.fasterxml.jackson.databind.JsonNode; -import com.google.common.collect.ImmutableMap; -import io.airbyte.cdk.db.Database; -import io.airbyte.cdk.db.factory.DSLContextFactory; -import io.airbyte.cdk.db.factory.DatabaseDriver; -import io.airbyte.cdk.db.jdbc.JdbcUtils; -import io.airbyte.cdk.integrations.base.JavaBaseConstants; -import io.airbyte.cdk.integrations.destination.StandardNameTransformer; -import io.airbyte.cdk.integrations.standardtest.destination.JdbcDestinationAcceptanceTest; -import io.airbyte.cdk.integrations.standardtest.destination.comparator.TestDataComparator; -import io.airbyte.cdk.integrations.util.HostPortResolver; -import io.airbyte.commons.json.Jsons; -import java.sql.SQLException; -import java.util.HashSet; -import java.util.List; -import java.util.stream.Collectors; -import org.jooq.DSLContext; -import org.jooq.SQLDialect; -import org.testcontainers.containers.GenericContainer; -import org.testcontainers.utility.DockerImageName; - -public class TiDBDestinationAcceptanceTest extends JdbcDestinationAcceptanceTest { - - private final StandardNameTransformer namingResolver = new TiDBSQLNameTransformer(); - private GenericContainer container; - private final String usernameKey = "root"; - private final String passwordKey = ""; - private final String databaseKey = "test"; - private final Boolean sslKey = false; - - @Override - protected String getImageName() { - return "airbyte/destination-tidb:dev"; - } - - @Override - protected boolean implementsNamespaces() { - return true; - } - - @Override - protected boolean supportBasicDataTypeTest() { - return true; - } - - @Override - protected boolean supportArrayDataTypeTest() { - return true; - } - - @Override - protected boolean supportObjectDataTypeTest() { - return true; - } - - @Override - protected TestDataComparator getTestDataComparator() { - return new TiDBTestDataComparator(); - } - - @Override - protected JsonNode getConfig() { - return Jsons.jsonNode(ImmutableMap.builder() - .put(JdbcUtils.HOST_KEY, HostPortResolver.resolveHost(container)) - .put(JdbcUtils.USERNAME_KEY, usernameKey) - .put(JdbcUtils.DATABASE_KEY, databaseKey) - .put(JdbcUtils.PORT_KEY, HostPortResolver.resolvePort(container)) - .put(JdbcUtils.SSL_KEY, sslKey) - .build()); - } - - @Override - protected JsonNode getFailCheckConfig() { - return Jsons.jsonNode(ImmutableMap.builder() - .put(JdbcUtils.HOST_KEY, HostPortResolver.resolveHost(container)) - .put(JdbcUtils.USERNAME_KEY, usernameKey) - .put(JdbcUtils.PASSWORD_KEY, "wrong password") - .put(JdbcUtils.DATABASE_KEY, databaseKey) - .put(JdbcUtils.PORT_KEY, HostPortResolver.resolvePort(container)) - .put(JdbcUtils.SSL_KEY, sslKey) - .build()); - } - - @Override - protected String getDefaultSchema(final JsonNode config) { - if (config.get(JdbcUtils.DATABASE_KEY) == null) { - return null; - } - return config.get(JdbcUtils.DATABASE_KEY).asText(); - } - - @Override - protected List retrieveRecords(TestDestinationEnv testEnv, - String streamName, - String namespace, - JsonNode streamSchema) - throws Exception { - return retrieveRecordsFromTable(namingResolver.getRawTableName(streamName), namespace) - .stream() - .map(r -> r.get(JavaBaseConstants.COLUMN_NAME_DATA)) - .collect(Collectors.toList()); - } - - private List retrieveRecordsFromTable(final String tableName, final String schemaName) throws SQLException { - try (final DSLContext dslContext = DSLContextFactory.create( - usernameKey, - passwordKey, - DatabaseDriver.MYSQL.getDriverClassName(), - String.format(DatabaseDriver.MYSQL.getUrlFormatString(), - container.getHost(), - container.getFirstMappedPort(), - databaseKey), - SQLDialect.MYSQL)) { - return new Database(dslContext).query( - ctx -> ctx - .fetch(String.format("SELECT * FROM %s.%s ORDER BY %s ASC;", schemaName, tableName, - JavaBaseConstants.COLUMN_NAME_EMITTED_AT)) - .stream() - .map(this::getJsonFromRecord) - .collect(Collectors.toList())); - } - } - - @Override - protected List retrieveNormalizedRecords(final TestDestinationEnv testEnv, final String streamName, final String namespace) - throws Exception { - final String tableName = namingResolver.getIdentifier(streamName); - final String schema = namingResolver.getIdentifier(namespace); - return retrieveRecordsFromTable(tableName, schema); - } - - @Override - protected void setup(TestDestinationEnv testEnv, HashSet TEST_SCHEMAS) { - container = new GenericContainer(DockerImageName.parse("pingcap/tidb:nightly")) - .withExposedPorts(4000); - container.start(); - } - - @Override - protected void tearDown(TestDestinationEnv testEnv) { - container.stop(); - container.close(); - } - -} diff --git a/airbyte-integrations/connectors/destination-tidb/src/test-integration/java/io/airbyte/integrations/destination/tidb/TiDBTestDataComparator.java b/airbyte-integrations/connectors/destination-tidb/src/test-integration/java/io/airbyte/integrations/destination/tidb/TiDBTestDataComparator.java deleted file mode 100644 index cf65de1e2899..000000000000 --- a/airbyte-integrations/connectors/destination-tidb/src/test-integration/java/io/airbyte/integrations/destination/tidb/TiDBTestDataComparator.java +++ /dev/null @@ -1,48 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.integrations.destination.tidb; - -import io.airbyte.cdk.integrations.destination.StandardNameTransformer; -import io.airbyte.cdk.integrations.standardtest.destination.comparator.AdvancedTestDataComparator; -import java.time.LocalDateTime; -import java.time.format.DateTimeFormatter; -import java.util.ArrayList; -import java.util.List; - -public class TiDBTestDataComparator extends AdvancedTestDataComparator { - - private final StandardNameTransformer namingResolver = new TiDBSQLNameTransformer(); - private final String TIDB_DATATIME_FORMAT = "yyyy-MM-dd HH:mm:ss.S"; - - @Override - protected List resolveIdentifier(final String identifier) { - final List result = new ArrayList<>(); - final String resolved = namingResolver.getIdentifier(identifier); - result.add(identifier); - result.add(resolved); - if (!resolved.startsWith("\"")) { - result.add(resolved.toLowerCase()); - } - return result; - } - - @Override - protected boolean compareDateTimeValues(String expectedValue, String actualValue) { - if (!isDateTimeValue(actualValue)) { - actualValue = LocalDateTime.parse(actualValue, DateTimeFormatter.ofPattern(TIDB_DATATIME_FORMAT)).toString(); - } - return super.compareDateTimeValues(expectedValue, actualValue); - } - - @Override - protected boolean compareBooleanValues(String firstBooleanValue, String secondBooleanValue) { - if (secondBooleanValue.equalsIgnoreCase("true") || secondBooleanValue.equalsIgnoreCase("false")) { - return super.compareBooleanValues(firstBooleanValue, secondBooleanValue); - } else { - return super.compareBooleanValues(firstBooleanValue, String.valueOf(secondBooleanValue.equals("1"))); - } - } - -} diff --git a/airbyte-integrations/connectors/destination-tidb/src/test/java/io/airbyte/integrations/destination/tidb/TiDBDestinationTest.java b/airbyte-integrations/connectors/destination-tidb/src/test/java/io/airbyte/integrations/destination/tidb/TiDBDestinationTest.java deleted file mode 100644 index c1fa5e99dd46..000000000000 --- a/airbyte-integrations/connectors/destination-tidb/src/test/java/io/airbyte/integrations/destination/tidb/TiDBDestinationTest.java +++ /dev/null @@ -1,131 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.integrations.destination.tidb; - -import static org.junit.jupiter.api.Assertions.assertEquals; -import static org.junit.jupiter.api.Assertions.assertTrue; - -import com.fasterxml.jackson.databind.JsonNode; -import com.google.common.collect.ImmutableMap; -import io.airbyte.cdk.db.factory.DataSourceFactory; -import io.airbyte.cdk.db.factory.DatabaseDriver; -import io.airbyte.cdk.db.jdbc.DefaultJdbcDatabase; -import io.airbyte.cdk.db.jdbc.JdbcDatabase; -import io.airbyte.cdk.db.jdbc.JdbcUtils; -import io.airbyte.cdk.integrations.base.AirbyteMessageConsumer; -import io.airbyte.cdk.integrations.base.Destination; -import io.airbyte.commons.json.Jsons; -import io.airbyte.protocol.models.Field; -import io.airbyte.protocol.models.JsonSchemaType; -import io.airbyte.protocol.models.v0.AirbyteConnectionStatus.Status; -import io.airbyte.protocol.models.v0.AirbyteMessage; -import io.airbyte.protocol.models.v0.AirbyteRecordMessage; -import io.airbyte.protocol.models.v0.AirbyteStateMessage; -import io.airbyte.protocol.models.v0.CatalogHelpers; -import io.airbyte.protocol.models.v0.ConfiguredAirbyteCatalog; -import java.time.Instant; -import java.util.HashMap; -import java.util.List; -import java.util.Map; -import java.util.stream.Collectors; -import java.util.stream.IntStream; -import org.junit.jupiter.api.AfterEach; -import org.junit.jupiter.api.BeforeEach; -import org.junit.jupiter.api.Test; -import org.testcontainers.containers.GenericContainer; -import org.testcontainers.utility.DockerImageName; - -public class TiDBDestinationTest { - - private GenericContainer container; - final String SCHEMA_NAME = "public"; - final String STREAM_NAME = "id_and_name"; - - @BeforeEach - public void setup() { - container = new GenericContainer(DockerImageName.parse("pingcap/tidb:nightly")) - .withExposedPorts(4000); - container.start(); - } - - @AfterEach - public void tearDown() { - container.stop(); - container.close(); - } - - private JsonNode getConfig() { - return Jsons.jsonNode(ImmutableMap.of( - JdbcUtils.HOST_KEY, "127.0.0.1", - JdbcUtils.PORT_KEY, container.getFirstMappedPort(), - JdbcUtils.USERNAME_KEY, "root", - JdbcUtils.DATABASE_KEY, "test")); - } - - @Test - public void sanityTest() throws Exception { - final Destination destination = new TiDBDestination(); - final ConfiguredAirbyteCatalog CATALOG = new ConfiguredAirbyteCatalog().withStreams(List.of( - CatalogHelpers.createConfiguredAirbyteStream( - STREAM_NAME, - SCHEMA_NAME, - Field.of("id", JsonSchemaType.NUMBER), - Field.of("name", JsonSchemaType.STRING)))); - JsonNode config = getConfig(); - final AirbyteMessageConsumer consumer = destination.getConsumer(config, CATALOG, Destination::defaultOutputRecordCollector); - final List expectedRecords = getNRecords(10); - consumer.start(); - expectedRecords.forEach(m -> { - try { - consumer.accept(m); - } catch (final Exception e) { - throw new RuntimeException(e); - } - }); - consumer.accept(new AirbyteMessage() - .withType(AirbyteMessage.Type.STATE) - .withState(new AirbyteStateMessage().withData(Jsons.jsonNode(ImmutableMap.of(SCHEMA_NAME + "." + STREAM_NAME, 10))))); - consumer.close(); - final JdbcDatabase database = new DefaultJdbcDatabase( - DataSourceFactory.create( - config.get(JdbcUtils.USERNAME_KEY).asText(), - "", - DatabaseDriver.MYSQL.getDriverClassName(), - String.format(DatabaseDriver.MYSQL.getUrlFormatString(), - config.get(JdbcUtils.HOST_KEY).asText(), - config.get(JdbcUtils.PORT_KEY).asInt(), - config.get(JdbcUtils.DATABASE_KEY).asText()))); - final List actualRecords = database.bufferedResultSetQuery( - connection -> connection.createStatement().executeQuery("SELECT * FROM public._airbyte_raw_id_and_name;"), - JdbcUtils.getDefaultSourceOperations()::rowToJson); - final Map expectedRecordsWithId = new HashMap<>(); - expectedRecords.stream().map(AirbyteMessage::getRecord).map(AirbyteRecordMessage::getData) - .forEach(data -> expectedRecordsWithId.put(data.get("id").asInt(), data)); - actualRecords.stream().map(o -> o.get("_airbyte_data").asText()).map(Jsons::deserialize).forEach(actual -> { - assertTrue(expectedRecordsWithId.containsKey(actual.get("id").asInt())); - assertEquals(expectedRecordsWithId.get(actual.get("id").asInt()), actual); - }); - } - - private List getNRecords(final int n) { - return IntStream.range(0, n) - .boxed() - .map(i -> new AirbyteMessage() - .withType(AirbyteMessage.Type.RECORD) - .withRecord(new AirbyteRecordMessage() - .withStream(STREAM_NAME) - .withNamespace(SCHEMA_NAME) - .withEmittedAt(Instant.now().toEpochMilli()) - .withData(Jsons.jsonNode(ImmutableMap.of("id", i, "name", "human " + i))))) - .collect(Collectors.toList()); - } - - @Test - public void testCheckConnection() throws Exception { - Destination destination = new TiDBDestination(); - assertEquals(Status.SUCCEEDED, destination.check(getConfig()).getStatus()); - } - -} diff --git a/airbyte-integrations/connectors/destination-timeplus/.dockerignore b/airbyte-integrations/connectors/destination-timeplus/.dockerignore deleted file mode 100755 index 40dea8ad1f6f..000000000000 --- a/airbyte-integrations/connectors/destination-timeplus/.dockerignore +++ /dev/null @@ -1,5 +0,0 @@ -* -!Dockerfile -!main.py -!destination_timeplus -!setup.py diff --git a/airbyte-integrations/connectors/destination-timeplus/Dockerfile b/airbyte-integrations/connectors/destination-timeplus/Dockerfile deleted file mode 100755 index 34f3c7492dd3..000000000000 --- a/airbyte-integrations/connectors/destination-timeplus/Dockerfile +++ /dev/null @@ -1,38 +0,0 @@ -FROM python:3.9.11-alpine3.15 as base - -# build and load all requirements -FROM base as builder -WORKDIR /airbyte/integration_code - -# upgrade pip to the latest version -RUN apk --no-cache upgrade \ - && pip install --upgrade pip \ - && apk --no-cache add tzdata build-base - - -COPY setup.py ./ -# install necessary packages to a temporary folder -RUN pip install --prefix=/install . - -# build a clean environment -FROM base -WORKDIR /airbyte/integration_code - -# copy all loaded and built libraries to a pure basic image -COPY --from=builder /install /usr/local -# add default timezone settings -COPY --from=builder /usr/share/zoneinfo/Etc/UTC /etc/localtime -RUN echo "Etc/UTC" > /etc/timezone - -# bash is installed for more convenient debugging. -RUN apk --no-cache add bash - -# copy payload code only -COPY main.py ./ -COPY destination_timeplus ./destination_timeplus - -ENV AIRBYTE_ENTRYPOINT "python /airbyte/integration_code/main.py" -ENTRYPOINT ["python", "/airbyte/integration_code/main.py"] - -LABEL io.airbyte.version=0.1.0 -LABEL io.airbyte.name=airbyte/destination-timeplus diff --git a/airbyte-integrations/connectors/destination-timeplus/README.md b/airbyte-integrations/connectors/destination-timeplus/README.md deleted file mode 100755 index 6ba14518f631..000000000000 --- a/airbyte-integrations/connectors/destination-timeplus/README.md +++ /dev/null @@ -1,108 +0,0 @@ -# Timeplus Destination - -This is the repository for the Timeplus destination connector, written in Python. -For information about how to use this connector within Airbyte, see [the documentation](https://docs.airbyte.io/integrations/destinations/timeplus). - -## Local development - -### Prerequisites - -#### Minimum Python version required `= 3.9.0` - -#### Build & Activate Virtual Environment and install dependencies - -From this connector directory, create a virtual environment: - -``` -python -m venv .venv -``` - -This will generate a virtualenv for this module in `.venv/`. Make sure this venv is active in your -development environment of choice. To activate it from the terminal, run: - -``` -source .venv/bin/activate -pip install -r requirements.txt -``` - -If you are in an IDE, follow your IDE's instructions to activate the virtualenv. - -Note that while we are installing dependencies from `requirements.txt`, you should only edit `setup.py` for your dependencies. `requirements.txt` is -used for editable installs (`pip install -e`) to pull in Python dependencies from the monorepo and will call `setup.py`. -If this is mumbo jumbo to you, don't worry about it, just put your deps in `setup.py` but install using `pip install -r requirements.txt` and everything -should work as you expect. - -#### Create credentials - -**If you are a community contributor**, follow the instructions in the [documentation](https://docs.airbyte.io/integrations/destinations/timeplus) -to generate the necessary credentials. Then create a file `secrets/config.json` conforming to the `destination_timeplus/spec.json` file. -Note that the `secrets` directory is gitignored by default, so there is no danger of accidentally checking in sensitive information. -See `integration_tests/sample_config.json` for a sample config file. - -**If you are an Airbyte core member**, copy the credentials in Lastpass under the secret name `destination timeplus test creds` -and place them into `secrets/config.json`. - -### Locally running the connector - -``` -python main.py spec -python main.py check --config secrets/config.json -cat integration_tests/messages.jsonl | python main.py write --config secrets/config.json --catalog integration_tests/configured_catalog.json -``` - -### Locally running the connector docker image - - -#### Build -**Via [`airbyte-ci`](https://github.com/airbytehq/airbyte/blob/master/airbyte-ci/connectors/pipelines/README.md) (recommended):** -```bash -airbyte-ci connectors --name=destination-timeplus build -``` - -An image will be built with the tag `airbyte/destination-timeplus:dev`. - -**Via `docker build`:** -```bash -docker build -t airbyte/destination-timeplus:dev . -``` - -#### Run - -Then run any of the connector commands as follows: - -``` -docker run --rm airbyte/destination-timeplus:dev spec -docker run --rm -v $(pwd)/secrets:/secrets airbyte/destination-timeplus:dev check --config /secrets/config.json -# messages.jsonl is a file containing line-separated JSON representing AirbyteMessages -cat messages.jsonl | docker run --rm -v $(pwd)/secrets:/secrets -v $(pwd)/integration_tests:/integration_tests airbyte/destination-timeplus:dev write --config /secrets/config.json --catalog /integration_tests/configured_catalog.json -``` - - -## Testing -You can run our full test suite locally using [`airbyte-ci`](https://github.com/airbytehq/airbyte/blob/master/airbyte-ci/connectors/pipelines/README.md): -```bash -airbyte-ci connectors --name=destination-timeplus test -``` - -### Customizing acceptance Tests -Customize `acceptance-test-config.yml` file to configure tests. See [Connector Acceptance Tests](https://docs.airbyte.com/connector-development/testing-connectors/connector-acceptance-tests-reference) for more information. -If your connector requires to create or destroy resources for use during acceptance tests create fixtures for it and place them inside integration_tests/acceptance.py. - -## Dependency Management - -All of your dependencies should go in `setup.py`, NOT `requirements.txt`. The requirements file is only used to connect internal Airbyte dependencies in the monorepo for local development. -We split dependencies between two groups, dependencies that are: - -- required for your connector to work need to go to `MAIN_REQUIREMENTS` list. -- required for the testing need to go to `TEST_REQUIREMENTS` list - -### Publishing a new version of the connector -You've checked out the repo, implemented a million dollar feature, and you're ready to share your changes with the world. Now what? -1. Make sure your changes are passing our test suite: `airbyte-ci connectors --name=destination-timeplus test` -2. Bump the connector version in `metadata.yaml`: increment the `dockerImageTag` value. Please follow [semantic versioning for connectors](https://docs.airbyte.com/contributing-to-airbyte/resources/pull-requests-handbook/#semantic-versioning-for-connectors). -3. Make sure the `metadata.yaml` content is up to date. -4. Make the connector documentation and its changelog is up to date (`docs/integrations/destinations/timeplus.md`). -5. Create a Pull Request: use [our PR naming conventions](https://docs.airbyte.com/contributing-to-airbyte/resources/pull-requests-handbook/#pull-request-title-convention). -6. Pat yourself on the back for being an awesome contributor. -7. Someone from Airbyte will take a look at your PR and iterate with you to merge it into master. - diff --git a/airbyte-integrations/connectors/destination-timeplus/destination_timeplus/__init__.py b/airbyte-integrations/connectors/destination-timeplus/destination_timeplus/__init__.py deleted file mode 100755 index fa8a30eb633c..000000000000 --- a/airbyte-integrations/connectors/destination-timeplus/destination_timeplus/__init__.py +++ /dev/null @@ -1,8 +0,0 @@ -# -# Copyright (c) 2023 Airbyte, Inc., all rights reserved. -# - - -from .destination import DestinationTimeplus - -__all__ = ["DestinationTimeplus"] diff --git a/airbyte-integrations/connectors/destination-timeplus/destination_timeplus/destination.py b/airbyte-integrations/connectors/destination-timeplus/destination_timeplus/destination.py deleted file mode 100755 index 3cf5c8920e78..000000000000 --- a/airbyte-integrations/connectors/destination-timeplus/destination_timeplus/destination.py +++ /dev/null @@ -1,160 +0,0 @@ -# -# Copyright (c) 2023 Airbyte, Inc., all rights reserved. -# - - -from logging import getLogger -from typing import Any, Iterable, Mapping - -from airbyte_cdk import AirbyteLogger -from airbyte_cdk.destinations import Destination -from airbyte_cdk.models import ( - AirbyteConnectionStatus, - AirbyteMessage, - AirbyteStream, - ConfiguredAirbyteCatalog, - DestinationSyncMode, - Status, - Type, -) -from timeplus import Environment, Stream - -logger = getLogger("airbyte") - - -class DestinationTimeplus(Destination): - def write( - self, config: Mapping[str, Any], configured_catalog: ConfiguredAirbyteCatalog, input_messages: Iterable[AirbyteMessage] - ) -> Iterable[AirbyteMessage]: - """ - Reads the input stream of messages, config, and catalog to write data to the destination. - - This method returns an iterable (typically a generator of AirbyteMessages via yield) containing state messages received - in the input message stream. Outputting a state message means that every AirbyteRecordMessage which came before it has been - successfully persisted to the destination. This is used to ensure fault tolerance in the case that a sync fails before fully completing, - then the source is given the last state message output from this method as the starting point of the next sync. - - :param config: dict of JSON configuration matching the configuration declared in spec.json - :param configured_catalog: The Configured Catalog describing the schema of the data being received and how it should be persisted in the - destination - :param input_messages: The stream of input messages received from the source - :return: Iterable of AirbyteStateMessages wrapped in AirbyteMessage structs - """ - endpoint = config["endpoint"] - apikey = config["apikey"] - if endpoint[-1] == "/": - endpoint = endpoint[0 : len(endpoint) - 1] - env = Environment().address(endpoint).apikey(apikey) - stream_list = Stream(env=env).list() - all_streams = {s.name for s in stream_list} - - # only support "overwrite", "append" - for configured_stream in configured_catalog.streams: - is_overwrite = configured_stream.destination_sync_mode == DestinationSyncMode.overwrite - stream_exists = configured_stream.stream.name in all_streams - logger.info(f"Stream {configured_stream.stream.name} {configured_stream.destination_sync_mode}") - need_delete_stream = False - need_create_stream = False - if is_overwrite: - if stream_exists: - # delete all data in the existing stream and recreate the stream. - need_delete_stream = True - need_create_stream = True - else: - # only need to create the stream - need_create_stream = True - else: - if stream_exists: - # for append mode, just add more data to the existing stream. No need to do anything. - pass - else: - # for append mode, create the stream and append data to it. - need_create_stream = True - - if need_delete_stream: - # delete the existing stream - Stream(env=env).name(configured_stream.stream.name).get().delete() - logger.info(f"Stream {configured_stream.stream.name} deleted successfully") - if need_create_stream: - # create a new stream - DestinationTimeplus.create_stream(env, configured_stream.stream) - logger.info(f"Stream {configured_stream.stream.name} created successfully") - - for message in input_messages: - if message.type == Type.STATE: - # Emitting a state message indicates that all records which came before it have been written to the destination. So we flush - # the queue to ensure writes happen, then output the state message to indicate it's safe to checkpoint state - yield message - elif message.type == Type.RECORD: - record = message.record - - # this code is to send data to a single-column stream - # Stream(env=env).name(record.stream).column("raw", "string").ingest(payload=record.data) - - Stream(env=env).name(record.stream).ingest(payload=record.data, format="streaming") - else: - # ignore other message types for now - continue - - @staticmethod - def create_stream(env, stream: AirbyteStream): - # singlel-column stream - # Stream(env=env).name(stream.name).column('raw','string').create() - - tp_stream = Stream(env=env).name(stream.name.strip()) - for name, v in stream.json_schema["properties"].items(): - tp_stream.column(name.strip(), DestinationTimeplus.type_mapping(v)) - tp_stream.create() - - @staticmethod - def type_mapping(v) -> str: - airbyte_type = v["type"] - if type(airbyte_type) is list: - for t in list(airbyte_type): - if t != "null": - type_def = {"type": t} - if t == "array": - type_def["items"] = v["items"] - return DestinationTimeplus.type_mapping(type_def) - if airbyte_type == "number": - return "float" - elif airbyte_type == "integer": - return "integer" - elif airbyte_type == "boolean": - return "bool" - elif airbyte_type == "object": - return "string" - elif airbyte_type == "array": - return f"array({DestinationTimeplus.type_mapping(v['items'])})" - else: - return "string" - - def check(self, logger: AirbyteLogger, config: Mapping[str, Any]) -> AirbyteConnectionStatus: - """ - Tests if the input configuration can be used to successfully connect to the destination with the needed permissions - e.g: if a provided API token or password can be used to connect and write to the destination. - - :param logger: Logging object to display debug/info/error to the logs - (logs will not be accessible via airbyte UI if they are not passed to this logger) - :param config: Json object containing the configuration of this destination, content of this json is as specified in - the properties of the spec.json file - - :return: AirbyteConnectionStatus indicating a Success or Failure - """ - try: - endpoint = config["endpoint"] - apikey = config["apikey"] - if not endpoint.startswith("http"): - return AirbyteConnectionStatus(status=Status.FAILED, message="Endpoint must start with http or https") - if len(apikey) != 60: - return AirbyteConnectionStatus(status=Status.FAILED, message="API Key must be 60 characters") - if endpoint[-1] == "/": - endpoint = endpoint[0 : len(endpoint) - 1] - env = Environment().address(endpoint).apikey(apikey) - Stream(env=env).list() - logger.info("Successfully connected to " + endpoint) - return AirbyteConnectionStatus(status=Status.SUCCEEDED) - except Exception as e: - return AirbyteConnectionStatus( - status=Status.FAILED, message=f"Fail to connect to Timeplus endpoint with the given API key: {repr(e)}" - ) diff --git a/airbyte-integrations/connectors/destination-timeplus/destination_timeplus/spec.json b/airbyte-integrations/connectors/destination-timeplus/destination_timeplus/spec.json deleted file mode 100755 index 6a56f1b0252e..000000000000 --- a/airbyte-integrations/connectors/destination-timeplus/destination_timeplus/spec.json +++ /dev/null @@ -1,31 +0,0 @@ -{ - "documentationUrl": "https://docs.timeplus.com", - "supported_destination_sync_modes": ["overwrite", "append"], - "supportsIncremental": true, - "supportsDBT": false, - "supportsNormalization": false, - "connectionSpecification": { - "$schema": "http://json-schema.org/draft-07/schema#", - "title": "Destination Timeplus", - "type": "object", - "required": ["endpoint", "apikey"], - "additionalProperties": false, - "properties": { - "endpoint": { - "title": "Endpoint", - "description": "Timeplus workspace endpoint", - "type": "string", - "default": "https://us.timeplus.cloud/", - "examples": ["https://us.timeplus.cloud/workspace_id"], - "order": 0 - }, - "apikey": { - "title": "API key", - "description": "Personal API key", - "type": "string", - "airbyte_secret": true, - "order": 1 - } - } - } -} diff --git a/airbyte-integrations/connectors/destination-timeplus/integration_tests/configured_catalog.json b/airbyte-integrations/connectors/destination-timeplus/integration_tests/configured_catalog.json deleted file mode 100644 index 96540519acb1..000000000000 --- a/airbyte-integrations/connectors/destination-timeplus/integration_tests/configured_catalog.json +++ /dev/null @@ -1,263 +0,0 @@ -{ - "streams": [ - { - "stream": { - "name": "airbyte_single_str_col", - "supported_sync_modes": ["full_refresh"], - "source_defined_cursor": false, - "json_schema": { - "type": "object", - "properties": { - "raw": { - "type": "string" - } - } - } - }, - "sync_mode": "full_refresh", - "destination_sync_mode": "overwrite" - }, - { - "stream": { - "name": "airbyte_acceptance_table", - "supported_sync_modes": ["full_refresh"], - "source_defined_cursor": false, - "json_schema": { - "type": "object", - "properties": { - "column1": { - "type": "string" - }, - "column2": { - "type": "number" - }, - "column3": { - "type": "string", - "format": "datetime", - "airbyte_type": "timestamp_without_timezone" - }, - "column4": { - "type": "number" - }, - "column5": { - "type": "array", - "items": { - "type": "integer" - } - } - } - } - }, - "sync_mode": "full_refresh", - "destination_sync_mode": "append" - }, - { - "stream": { - "name": "airbyte_test_boolean", - "supported_sync_modes": ["full_refresh"], - "source_defined_cursor": false, - "json_schema": { - "type": "object", - "properties": { - "column1": { - "type": "boolean" - }, - "column2": { - "type": "number" - } - } - } - }, - "sync_mode": "full_refresh", - "destination_sync_mode": "append" - }, - { - "destination_sync_mode": "overwrite", - "stream": { - "json_schema": { - "$schema": "http://json-schema.org/draft-07/schema#", - "type": "object", - "properties": { - "id": { "type": ["null", "integer"] }, - "name": { "type": ["null", "string"] }, - "base_experience": { "type": ["null", "integer"] }, - "height": { "type": ["null", "integer"] }, - "is_default": { "type": ["null", "boolean"] }, - "order": { "type": ["null", "integer"] }, - "weight": { "type": ["null", "integer"] }, - "abilities": { - "type": ["null", "array"], - "items": { - "type": ["null", "object"], - "properties": { - "is_hidden": { "type": ["null", "boolean"] }, - "slot": { "type": ["null", "integer"] }, - "ability": { - "type": ["null", "object"], - "properties": { - "name": { "type": ["null", "string"] }, - "url": { "type": ["null", "string"] } - } - } - } - } - }, - "forms": { - "type": ["null", "array"], - "items": { - "type": ["null", "object"], - "properties": { - "name": { "type": ["null", "string"] }, - "url": { "type": ["null", "string"] } - } - } - }, - "game_indices": { - "type": ["null", "array"], - "items": { - "type": ["null", "object"], - "properties": { - "game_index": { "type": ["null", "integer"] }, - "version": { - "type": ["null", "object"], - "properties": { - "name": { "type": ["null", "string"] }, - "url": { "type": ["null", "string"] } - } - } - } - } - }, - "held_items": { - "type": ["null", "array"], - "items": { - "type": ["null", "object"], - "properties": { - "item": { - "type": ["null", "object"], - "properties": { - "name": { "type": ["null", "string"] }, - "url": { "type": ["null", "string"] } - } - }, - "version_details": { - "type": ["null", "array"], - "items": { - "type": ["null", "object"], - "properties": { - "version": { - "type": ["null", "object"], - "properties": { - "name": { "type": ["null", "string"] }, - "url": { "type": ["null", "string"] } - } - }, - "rarity": { "type": ["null", "integer"] } - } - } - } - } - } - }, - "location_area_encounters": { "type": ["null", "string"] }, - "moves": { - "type": ["null", "array"], - "items": { - "type": ["null", "object"], - "properties": { - "move": { - "type": ["null", "object"], - "properties": { - "name": { "type": ["null", "string"] }, - "url": { "type": ["null", "string"] } - } - }, - "version_group_details": { - "type": ["null", "array"], - "items": { - "type": ["null", "object"], - "properties": { - "move_learn_method": { - "type": ["null", "object"], - "properties": { - "name": { "type": ["null", "string"] }, - "url": { "type": ["null", "string"] } - } - }, - "version_group": { - "type": ["null", "object"], - "properties": { - "name": { "type": ["null", "string"] }, - "url": { "type": ["null", "string"] } - } - }, - "level_learned_at": { "type": ["null", "integer"] } - } - } - } - } - } - }, - "sprites": { - "type": ["null", "object"], - "properties": { - "front_default": { "type": ["null", "string"] }, - "front_shiny": { "type": ["null", "string"] }, - "front_female": { "type": ["null", "string"] }, - "front_shiny_female": { "type": ["null", "string"] }, - "back_default": { "type": ["null", "string"] }, - "back_shiny": { "type": ["null", "string"] }, - "back_female": { "type": ["null", "string"] }, - "back_shiny_female": { "type": ["null", "string"] } - } - }, - "species": { - "type": ["null", "object"], - "properties": { - "name": { "type": ["null", "string"] }, - "url": { "type": ["null", "string"] } - } - }, - "stats": { - "type": ["null", "array"], - "items": { - "type": ["null", "object"], - "properties": { - "stat": { - "type": ["null", "object"], - "properties": { - "name": { "type": ["null", "string"] }, - "url": { "type": ["null", "string"] } - } - }, - "effort": { "type": ["null", "integer"] }, - "base_stat": { "type": ["null", "integer"] } - } - } - }, - "types": { - "type": ["null", "array"], - "items": { - "type": ["null", "object"], - "properties": { - "slot": { "type": ["null", "integer"] }, - "type": { - "type": ["null", "object"], - "properties": { - "name": { "type": ["null", "string"] }, - "url": { "type": ["null", "string"] } - } - } - } - } - } - } - }, - "name": "pokemon", - "source_defined_cursor": false, - "supported_sync_modes": ["full_refresh"] - }, - "sync_mode": "full_refresh" - } - ] -} diff --git a/airbyte-integrations/connectors/destination-timeplus/integration_tests/integration_test.py b/airbyte-integrations/connectors/destination-timeplus/integration_tests/integration_test.py deleted file mode 100755 index e3de7dac9e71..000000000000 --- a/airbyte-integrations/connectors/destination-timeplus/integration_tests/integration_test.py +++ /dev/null @@ -1,74 +0,0 @@ -# -# Copyright (c) 2023 Airbyte, Inc., all rights reserved. -# - -import json -import logging -from datetime import datetime -from typing import Any, Mapping - -import pytest -from airbyte_cdk.models import ( - AirbyteMessage, - AirbyteRecordMessage, - AirbyteStream, - ConfiguredAirbyteCatalog, - ConfiguredAirbyteStream, - DestinationSyncMode, - Status, - SyncMode, - Type, -) -from destination_timeplus import DestinationTimeplus - - -@pytest.fixture(name="config") -def config_fixture() -> Mapping[str, Any]: - with open("secrets/config.json", "r") as f: - return json.loads(f.read()) - - -@pytest.fixture(name="configured_catalog") -def configured_catalog_fixture() -> ConfiguredAirbyteCatalog: - stream_schema = {"type": "object", "properties": {"string_col": {"type": "str"}, "int_col": {"type": "integer"}}} - append_stream = ConfiguredAirbyteStream( - stream=AirbyteStream(name="append_stream", json_schema=stream_schema, supported_sync_modes=[SyncMode.incremental]), - sync_mode=SyncMode.incremental, - destination_sync_mode=DestinationSyncMode.append, - ) - - overwrite_stream = ConfiguredAirbyteStream( - stream=AirbyteStream(name="overwrite_stream", json_schema=stream_schema, supported_sync_modes=[SyncMode.incremental]), - sync_mode=SyncMode.incremental, - destination_sync_mode=DestinationSyncMode.overwrite, - ) - - return ConfiguredAirbyteCatalog(streams=[append_stream, overwrite_stream]) - - -def test_check_valid_config(config: Mapping): - outcome = DestinationTimeplus().check(logging.getLogger("airbyte"), config) - assert outcome.status == Status.SUCCEEDED - - -def test_check_invalid_config(): - outcome = DestinationTimeplus().check(logging.getLogger("airbyte"), {"secret_key": "not_a_real_secret"}) - assert outcome.status == Status.FAILED - - -def test_write(config: Mapping[str, Any], configured_catalog: ConfiguredAirbyteCatalog): - records = [ - AirbyteMessage( - type=Type.RECORD, - record=AirbyteRecordMessage( - stream="append_stream", - data={ - "string_col": "example", - "int_col": 1, - }, - emitted_at=int(datetime.now().timestamp()) * 1000, - ), - ) - ] - dest = DestinationTimeplus() - dest.write(config, configured_catalog, records) diff --git a/airbyte-integrations/connectors/destination-timeplus/integration_tests/messages.jsonl b/airbyte-integrations/connectors/destination-timeplus/integration_tests/messages.jsonl deleted file mode 100644 index 6db122f96411..000000000000 --- a/airbyte-integrations/connectors/destination-timeplus/integration_tests/messages.jsonl +++ /dev/null @@ -1,5 +0,0 @@ -{"type": "RECORD", "record": {"stream": "airbyte_single_str_col", "data": {"raw": "my_value"}, "emitted_at": 1626172757000}} -{"type": "RECORD", "record": {"stream": "airbyte_acceptance_table", "data": {"column1": "my_value", "column2": 221, "column3": "2021-01-01T20:10:22", "column4": 1.214, "column5": [1,2,3]}, "emitted_at": 1626172757000}} -{"type": "RECORD", "record": {"stream": "airbyte_acceptance_table", "data": {"column1": "my_value2", "column2": 222, "column3": "2021-01-02T22:10:22", "column5": [1,2,null]}, "emitted_at": 1626172757000}} -{"type": "RECORD", "record": {"stream": "airbyte_test_boolean", "data": {"column1": true, "column2": 222}, "emitted_at": 1626172757000}} -{"type": "RECORD", "record": {"stream": "pokemon","data": { "abilities": [ { "ability": { "name": "limber", "url": "https://pokeapi.co/api/v2/ability/7/" }, "is_hidden": false, "slot": 1 }, { "ability": { "name": "imposter", "url": "https://pokeapi.co/api/v2/ability/150/" }, "is_hidden": true, "slot": 3 } ], "base_experience": 101, "forms": [ { "name": "ditto", "url": "https://pokeapi.co/api/v2/pokemon-form/132/" } ], "game_indices": [ { "game_index": 76, "version": { "name": "red", "url": "https://pokeapi.co/api/v2/version/1/" } }, { "game_index": 76, "version": { "name": "blue", "url": "https://pokeapi.co/api/v2/version/2/" } }, { "game_index": 76, "version": { "name": "yellow", "url": "https://pokeapi.co/api/v2/version/3/" } }, { "game_index": 132, "version": { "name": "gold", "url": "https://pokeapi.co/api/v2/version/4/" } }, { "game_index": 132, "version": { "name": "silver", "url": "https://pokeapi.co/api/v2/version/5/" } }, { "game_index": 132, "version": { "name": "crystal", "url": "https://pokeapi.co/api/v2/version/6/" } }, { "game_index": 132, "version": { "name": "ruby", "url": "https://pokeapi.co/api/v2/version/7/" } }, { "game_index": 132, "version": { "name": "sapphire", "url": "https://pokeapi.co/api/v2/version/8/" } }, { "game_index": 132, "version": { "name": "emerald", "url": "https://pokeapi.co/api/v2/version/9/" } }, { "game_index": 132, "version": { "name": "firered", "url": "https://pokeapi.co/api/v2/version/10/" } }, { "game_index": 132, "version": { "name": "leafgreen", "url": "https://pokeapi.co/api/v2/version/11/" } }, { "game_index": 132, "version": { "name": "diamond", "url": "https://pokeapi.co/api/v2/version/12/" } }, { "game_index": 132, "version": { "name": "pearl", "url": "https://pokeapi.co/api/v2/version/13/" } }, { "game_index": 132, "version": { "name": "platinum", "url": "https://pokeapi.co/api/v2/version/14/" } }, { "game_index": 132, "version": { "name": "heartgold", "url": "https://pokeapi.co/api/v2/version/15/" } }, { "game_index": 132, "version": { "name": "soulsilver", "url": "https://pokeapi.co/api/v2/version/16/" } }, { "game_index": 132, "version": { "name": "black", "url": "https://pokeapi.co/api/v2/version/17/" } }, { "game_index": 132, "version": { "name": "white", "url": "https://pokeapi.co/api/v2/version/18/" } }, { "game_index": 132, "version": { "name": "black-2", "url": "https://pokeapi.co/api/v2/version/21/" } }, { "game_index": 132, "version": { "name": "white-2", "url": "https://pokeapi.co/api/v2/version/22/" } } ], "height": 3, "held_items": [ { "item": { "name": "metal-powder", "url": "https://pokeapi.co/api/v2/item/234/" }, "version_details": [ { "rarity": 5, "version": { "name": "ruby", "url": "https://pokeapi.co/api/v2/version/7/" } }, { "rarity": 5, "version": { "name": "sapphire", "url": "https://pokeapi.co/api/v2/version/8/" } }, { "rarity": 5, "version": { "name": "emerald", "url": "https://pokeapi.co/api/v2/version/9/" } }, { "rarity": 5, "version": { "name": "firered", "url": "https://pokeapi.co/api/v2/version/10/" } }, { "rarity": 5, "version": { "name": "leafgreen", "url": "https://pokeapi.co/api/v2/version/11/" } }, { "rarity": 5, "version": { "name": "diamond", "url": "https://pokeapi.co/api/v2/version/12/" } }, { "rarity": 5, "version": { "name": "pearl", "url": "https://pokeapi.co/api/v2/version/13/" } }, { "rarity": 5, "version": { "name": "platinum", "url": "https://pokeapi.co/api/v2/version/14/" } }, { "rarity": 5, "version": { "name": "heartgold", "url": "https://pokeapi.co/api/v2/version/15/" } }, { "rarity": 5, "version": { "name": "soulsilver", "url": "https://pokeapi.co/api/v2/version/16/" } }, { "rarity": 5, "version": { "name": "black", "url": "https://pokeapi.co/api/v2/version/17/" } }, { "rarity": 5, "version": { "name": "white", "url": "https://pokeapi.co/api/v2/version/18/" } }, { "rarity": 5, "version": { "name": "black-2", "url": "https://pokeapi.co/api/v2/version/21/" } }, { "rarity": 5, "version": { "name": "white-2", "url": "https://pokeapi.co/api/v2/version/22/" } }, { "rarity": 5, "version": { "name": "x", "url": "https://pokeapi.co/api/v2/version/23/" } }, { "rarity": 5, "version": { "name": "y", "url": "https://pokeapi.co/api/v2/version/24/" } }, { "rarity": 5, "version": { "name": "omega-ruby", "url": "https://pokeapi.co/api/v2/version/25/" } }, { "rarity": 5, "version": { "name": "alpha-sapphire", "url": "https://pokeapi.co/api/v2/version/26/" } }, { "rarity": 5, "version": { "name": "sun", "url": "https://pokeapi.co/api/v2/version/27/" } }, { "rarity": 5, "version": { "name": "moon", "url": "https://pokeapi.co/api/v2/version/28/" } }, { "rarity": 5, "version": { "name": "ultra-sun", "url": "https://pokeapi.co/api/v2/version/29/" } }, { "rarity": 5, "version": { "name": "ultra-moon", "url": "https://pokeapi.co/api/v2/version/30/" } } ] }, { "item": { "name": "quick-powder", "url": "https://pokeapi.co/api/v2/item/251/" }, "version_details": [ { "rarity": 50, "version": { "name": "diamond", "url": "https://pokeapi.co/api/v2/version/12/" } }, { "rarity": 50, "version": { "name": "pearl", "url": "https://pokeapi.co/api/v2/version/13/" } }, { "rarity": 50, "version": { "name": "platinum", "url": "https://pokeapi.co/api/v2/version/14/" } }, { "rarity": 50, "version": { "name": "heartgold", "url": "https://pokeapi.co/api/v2/version/15/" } }, { "rarity": 50, "version": { "name": "soulsilver", "url": "https://pokeapi.co/api/v2/version/16/" } }, { "rarity": 50, "version": { "name": "black", "url": "https://pokeapi.co/api/v2/version/17/" } }, { "rarity": 50, "version": { "name": "white", "url": "https://pokeapi.co/api/v2/version/18/" } }, { "rarity": 50, "version": { "name": "black-2", "url": "https://pokeapi.co/api/v2/version/21/" } }, { "rarity": 50, "version": { "name": "white-2", "url": "https://pokeapi.co/api/v2/version/22/" } }, { "rarity": 50, "version": { "name": "x", "url": "https://pokeapi.co/api/v2/version/23/" } }, { "rarity": 50, "version": { "name": "y", "url": "https://pokeapi.co/api/v2/version/24/" } }, { "rarity": 50, "version": { "name": "omega-ruby", "url": "https://pokeapi.co/api/v2/version/25/" } }, { "rarity": 50, "version": { "name": "alpha-sapphire", "url": "https://pokeapi.co/api/v2/version/26/" } }, { "rarity": 50, "version": { "name": "sun", "url": "https://pokeapi.co/api/v2/version/27/" } }, { "rarity": 50, "version": { "name": "moon", "url": "https://pokeapi.co/api/v2/version/28/" } }, { "rarity": 50, "version": { "name": "ultra-sun", "url": "https://pokeapi.co/api/v2/version/29/" } }, { "rarity": 50, "version": { "name": "ultra-moon", "url": "https://pokeapi.co/api/v2/version/30/" } } ] } ], "id": 132, "is_default": true, "location_area_encounters": "https://pokeapi.co/api/v2/pokemon/132/encounters", "moves": [ { "move": { "name": "transform", "url": "https://pokeapi.co/api/v2/move/144/" }, "version_group_details": [ { "level_learned_at": 1, "move_learn_method": { "name": "level-up", "url": "https://pokeapi.co/api/v2/move-learn-method/1/" }, "version_group": { "name": "red-blue", "url": "https://pokeapi.co/api/v2/version-group/1/" } }, { "level_learned_at": 1, "move_learn_method": { "name": "level-up", "url": "https://pokeapi.co/api/v2/move-learn-method/1/" }, "version_group": { "name": "yellow", "url": "https://pokeapi.co/api/v2/version-group/2/" } }, { "level_learned_at": 1, "move_learn_method": { "name": "level-up", "url": "https://pokeapi.co/api/v2/move-learn-method/1/" }, "version_group": { "name": "gold-silver", "url": "https://pokeapi.co/api/v2/version-group/3/" } }, { "level_learned_at": 1, "move_learn_method": { "name": "level-up", "url": "https://pokeapi.co/api/v2/move-learn-method/1/" }, "version_group": { "name": "crystal", "url": "https://pokeapi.co/api/v2/version-group/4/" } }, { "level_learned_at": 1, "move_learn_method": { "name": "level-up", "url": "https://pokeapi.co/api/v2/move-learn-method/1/" }, "version_group": { "name": "ruby-sapphire", "url": "https://pokeapi.co/api/v2/version-group/5/" } }, { "level_learned_at": 1, "move_learn_method": { "name": "level-up", "url": "https://pokeapi.co/api/v2/move-learn-method/1/" }, "version_group": { "name": "emerald", "url": "https://pokeapi.co/api/v2/version-group/6/" } }, { "level_learned_at": 1, "move_learn_method": { "name": "level-up", "url": "https://pokeapi.co/api/v2/move-learn-method/1/" }, "version_group": { "name": "firered-leafgreen", "url": "https://pokeapi.co/api/v2/version-group/7/" } }, { "level_learned_at": 1, "move_learn_method": { "name": "level-up", "url": "https://pokeapi.co/api/v2/move-learn-method/1/" }, "version_group": { "name": "diamond-pearl", "url": "https://pokeapi.co/api/v2/version-group/8/" } }, { "level_learned_at": 1, "move_learn_method": { "name": "level-up", "url": "https://pokeapi.co/api/v2/move-learn-method/1/" }, "version_group": { "name": "platinum", "url": "https://pokeapi.co/api/v2/version-group/9/" } }, { "level_learned_at": 1, "move_learn_method": { "name": "level-up", "url": "https://pokeapi.co/api/v2/move-learn-method/1/" }, "version_group": { "name": "heartgold-soulsilver", "url": "https://pokeapi.co/api/v2/version-group/10/" } }, { "level_learned_at": 1, "move_learn_method": { "name": "level-up", "url": "https://pokeapi.co/api/v2/move-learn-method/1/" }, "version_group": { "name": "black-white", "url": "https://pokeapi.co/api/v2/version-group/11/" } }, { "level_learned_at": 1, "move_learn_method": { "name": "level-up", "url": "https://pokeapi.co/api/v2/move-learn-method/1/" }, "version_group": { "name": "colosseum", "url": "https://pokeapi.co/api/v2/version-group/12/" } }, { "level_learned_at": 1, "move_learn_method": { "name": "level-up", "url": "https://pokeapi.co/api/v2/move-learn-method/1/" }, "version_group": { "name": "xd", "url": "https://pokeapi.co/api/v2/version-group/13/" } }, { "level_learned_at": 1, "move_learn_method": { "name": "level-up", "url": "https://pokeapi.co/api/v2/move-learn-method/1/" }, "version_group": { "name": "black-2-white-2", "url": "https://pokeapi.co/api/v2/version-group/14/" } }, { "level_learned_at": 1, "move_learn_method": { "name": "level-up", "url": "https://pokeapi.co/api/v2/move-learn-method/1/" }, "version_group": { "name": "x-y", "url": "https://pokeapi.co/api/v2/version-group/15/" } }, { "level_learned_at": 1, "move_learn_method": { "name": "level-up", "url": "https://pokeapi.co/api/v2/move-learn-method/1/" }, "version_group": { "name": "omega-ruby-alpha-sapphire", "url": "https://pokeapi.co/api/v2/version-group/16/" } }, { "level_learned_at": 1, "move_learn_method": { "name": "level-up", "url": "https://pokeapi.co/api/v2/move-learn-method/1/" }, "version_group": { "name": "sun-moon", "url": "https://pokeapi.co/api/v2/version-group/17/" } }, { "level_learned_at": 1, "move_learn_method": { "name": "level-up", "url": "https://pokeapi.co/api/v2/move-learn-method/1/" }, "version_group": { "name": "ultra-sun-ultra-moon", "url": "https://pokeapi.co/api/v2/version-group/18/" } }, { "level_learned_at": 1, "move_learn_method": { "name": "level-up", "url": "https://pokeapi.co/api/v2/move-learn-method/1/" }, "version_group": { "name": "lets-go-pikachu-lets-go-eevee", "url": "https://pokeapi.co/api/v2/version-group/19/" } }, { "level_learned_at": 1, "move_learn_method": { "name": "level-up", "url": "https://pokeapi.co/api/v2/move-learn-method/1/" }, "version_group": { "name": "sword-shield", "url": "https://pokeapi.co/api/v2/version-group/20/" } }, { "level_learned_at": 1, "move_learn_method": { "name": "level-up", "url": "https://pokeapi.co/api/v2/move-learn-method/1/" }, "version_group": { "name": "scarlet-violet", "url": "https://pokeapi.co/api/v2/version-group/25/" } } ] } ], "name": "ditto", "order": 214, "species": { "name": "ditto", "url": "https://pokeapi.co/api/v2/pokemon-species/132/" }, "sprites": { "back_default": "https://raw.githubusercontent.com/PokeAPI/sprites/master/sprites/pokemon/back/132.png", "back_female": null, "back_shiny": "https://raw.githubusercontent.com/PokeAPI/sprites/master/sprites/pokemon/back/shiny/132.png", "back_shiny_female": null, "front_default": "https://raw.githubusercontent.com/PokeAPI/sprites/master/sprites/pokemon/132.png", "front_female": null, "front_shiny": "https://raw.githubusercontent.com/PokeAPI/sprites/master/sprites/pokemon/shiny/132.png", "front_shiny_female": null, "other": { "dream_world": { "front_default": "https://raw.githubusercontent.com/PokeAPI/sprites/master/sprites/pokemon/other/dream-world/132.svg", "front_female": null }, "home": { "front_default": "https://raw.githubusercontent.com/PokeAPI/sprites/master/sprites/pokemon/other/home/132.png", "front_female": null, "front_shiny": "https://raw.githubusercontent.com/PokeAPI/sprites/master/sprites/pokemon/other/home/shiny/132.png", "front_shiny_female": null }, "official-artwork": { "front_default": "https://raw.githubusercontent.com/PokeAPI/sprites/master/sprites/pokemon/other/official-artwork/132.png", "front_shiny": "https://raw.githubusercontent.com/PokeAPI/sprites/master/sprites/pokemon/other/official-artwork/shiny/132.png" } }, "versions": { "generation-i": { "red-blue": { "back_default": "https://raw.githubusercontent.com/PokeAPI/sprites/master/sprites/pokemon/versions/generation-i/red-blue/back/132.png", "back_gray": "https://raw.githubusercontent.com/PokeAPI/sprites/master/sprites/pokemon/versions/generation-i/red-blue/back/gray/132.png", "back_transparent": "https://raw.githubusercontent.com/PokeAPI/sprites/master/sprites/pokemon/versions/generation-i/red-blue/transparent/back/132.png", "front_default": "https://raw.githubusercontent.com/PokeAPI/sprites/master/sprites/pokemon/versions/generation-i/red-blue/132.png", "front_gray": "https://raw.githubusercontent.com/PokeAPI/sprites/master/sprites/pokemon/versions/generation-i/red-blue/gray/132.png", "front_transparent": "https://raw.githubusercontent.com/PokeAPI/sprites/master/sprites/pokemon/versions/generation-i/red-blue/transparent/132.png" }, "yellow": { "back_default": "https://raw.githubusercontent.com/PokeAPI/sprites/master/sprites/pokemon/versions/generation-i/yellow/back/132.png", "back_gray": "https://raw.githubusercontent.com/PokeAPI/sprites/master/sprites/pokemon/versions/generation-i/yellow/back/gray/132.png", "back_transparent": "https://raw.githubusercontent.com/PokeAPI/sprites/master/sprites/pokemon/versions/generation-i/yellow/transparent/back/132.png", "front_default": "https://raw.githubusercontent.com/PokeAPI/sprites/master/sprites/pokemon/versions/generation-i/yellow/132.png", "front_gray": "https://raw.githubusercontent.com/PokeAPI/sprites/master/sprites/pokemon/versions/generation-i/yellow/gray/132.png", "front_transparent": "https://raw.githubusercontent.com/PokeAPI/sprites/master/sprites/pokemon/versions/generation-i/yellow/transparent/132.png" } }, "generation-ii": { "crystal": { "back_default": "https://raw.githubusercontent.com/PokeAPI/sprites/master/sprites/pokemon/versions/generation-ii/crystal/back/132.png", "back_shiny": "https://raw.githubusercontent.com/PokeAPI/sprites/master/sprites/pokemon/versions/generation-ii/crystal/back/shiny/132.png", "back_shiny_transparent": "https://raw.githubusercontent.com/PokeAPI/sprites/master/sprites/pokemon/versions/generation-ii/crystal/transparent/back/shiny/132.png", "back_transparent": "https://raw.githubusercontent.com/PokeAPI/sprites/master/sprites/pokemon/versions/generation-ii/crystal/transparent/back/132.png", "front_default": "https://raw.githubusercontent.com/PokeAPI/sprites/master/sprites/pokemon/versions/generation-ii/crystal/132.png", "front_shiny": "https://raw.githubusercontent.com/PokeAPI/sprites/master/sprites/pokemon/versions/generation-ii/crystal/shiny/132.png", "front_shiny_transparent": "https://raw.githubusercontent.com/PokeAPI/sprites/master/sprites/pokemon/versions/generation-ii/crystal/transparent/shiny/132.png", "front_transparent": "https://raw.githubusercontent.com/PokeAPI/sprites/master/sprites/pokemon/versions/generation-ii/crystal/transparent/132.png" }, "gold": { "back_default": "https://raw.githubusercontent.com/PokeAPI/sprites/master/sprites/pokemon/versions/generation-ii/gold/back/132.png", "back_shiny": "https://raw.githubusercontent.com/PokeAPI/sprites/master/sprites/pokemon/versions/generation-ii/gold/back/shiny/132.png", "front_default": "https://raw.githubusercontent.com/PokeAPI/sprites/master/sprites/pokemon/versions/generation-ii/gold/132.png", "front_shiny": "https://raw.githubusercontent.com/PokeAPI/sprites/master/sprites/pokemon/versions/generation-ii/gold/shiny/132.png", "front_transparent": "https://raw.githubusercontent.com/PokeAPI/sprites/master/sprites/pokemon/versions/generation-ii/gold/transparent/132.png" }, "silver": { "back_default": "https://raw.githubusercontent.com/PokeAPI/sprites/master/sprites/pokemon/versions/generation-ii/silver/back/132.png", "back_shiny": "https://raw.githubusercontent.com/PokeAPI/sprites/master/sprites/pokemon/versions/generation-ii/silver/back/shiny/132.png", "front_default": "https://raw.githubusercontent.com/PokeAPI/sprites/master/sprites/pokemon/versions/generation-ii/silver/132.png", "front_shiny": "https://raw.githubusercontent.com/PokeAPI/sprites/master/sprites/pokemon/versions/generation-ii/silver/shiny/132.png", "front_transparent": "https://raw.githubusercontent.com/PokeAPI/sprites/master/sprites/pokemon/versions/generation-ii/silver/transparent/132.png" } }, "generation-iii": { "emerald": { "front_default": "https://raw.githubusercontent.com/PokeAPI/sprites/master/sprites/pokemon/versions/generation-iii/emerald/132.png", "front_shiny": "https://raw.githubusercontent.com/PokeAPI/sprites/master/sprites/pokemon/versions/generation-iii/emerald/shiny/132.png" }, "firered-leafgreen": { "back_default": "https://raw.githubusercontent.com/PokeAPI/sprites/master/sprites/pokemon/versions/generation-iii/firered-leafgreen/back/132.png", "back_shiny": "https://raw.githubusercontent.com/PokeAPI/sprites/master/sprites/pokemon/versions/generation-iii/firered-leafgreen/back/shiny/132.png", "front_default": "https://raw.githubusercontent.com/PokeAPI/sprites/master/sprites/pokemon/versions/generation-iii/firered-leafgreen/132.png", "front_shiny": "https://raw.githubusercontent.com/PokeAPI/sprites/master/sprites/pokemon/versions/generation-iii/firered-leafgreen/shiny/132.png" }, "ruby-sapphire": { "back_default": "https://raw.githubusercontent.com/PokeAPI/sprites/master/sprites/pokemon/versions/generation-iii/ruby-sapphire/back/132.png", "back_shiny": "https://raw.githubusercontent.com/PokeAPI/sprites/master/sprites/pokemon/versions/generation-iii/ruby-sapphire/back/shiny/132.png", "front_default": "https://raw.githubusercontent.com/PokeAPI/sprites/master/sprites/pokemon/versions/generation-iii/ruby-sapphire/132.png", "front_shiny": "https://raw.githubusercontent.com/PokeAPI/sprites/master/sprites/pokemon/versions/generation-iii/ruby-sapphire/shiny/132.png" } }, "generation-iv": { "diamond-pearl": { "back_default": "https://raw.githubusercontent.com/PokeAPI/sprites/master/sprites/pokemon/versions/generation-iv/diamond-pearl/back/132.png", "back_female": null, "back_shiny": "https://raw.githubusercontent.com/PokeAPI/sprites/master/sprites/pokemon/versions/generation-iv/diamond-pearl/back/shiny/132.png", "back_shiny_female": null, "front_default": "https://raw.githubusercontent.com/PokeAPI/sprites/master/sprites/pokemon/versions/generation-iv/diamond-pearl/132.png", "front_female": null, "front_shiny": "https://raw.githubusercontent.com/PokeAPI/sprites/master/sprites/pokemon/versions/generation-iv/diamond-pearl/shiny/132.png", "front_shiny_female": null }, "heartgold-soulsilver": { "back_default": "https://raw.githubusercontent.com/PokeAPI/sprites/master/sprites/pokemon/versions/generation-iv/heartgold-soulsilver/back/132.png", "back_female": null, "back_shiny": "https://raw.githubusercontent.com/PokeAPI/sprites/master/sprites/pokemon/versions/generation-iv/heartgold-soulsilver/back/shiny/132.png", "back_shiny_female": null, "front_default": "https://raw.githubusercontent.com/PokeAPI/sprites/master/sprites/pokemon/versions/generation-iv/heartgold-soulsilver/132.png", "front_female": null, "front_shiny": "https://raw.githubusercontent.com/PokeAPI/sprites/master/sprites/pokemon/versions/generation-iv/heartgold-soulsilver/shiny/132.png", "front_shiny_female": null }, "platinum": { "back_default": "https://raw.githubusercontent.com/PokeAPI/sprites/master/sprites/pokemon/versions/generation-iv/platinum/back/132.png", "back_female": null, "back_shiny": "https://raw.githubusercontent.com/PokeAPI/sprites/master/sprites/pokemon/versions/generation-iv/platinum/back/shiny/132.png", "back_shiny_female": null, "front_default": "https://raw.githubusercontent.com/PokeAPI/sprites/master/sprites/pokemon/versions/generation-iv/platinum/132.png", "front_female": null, "front_shiny": "https://raw.githubusercontent.com/PokeAPI/sprites/master/sprites/pokemon/versions/generation-iv/platinum/shiny/132.png", "front_shiny_female": null } }, "generation-v": { "black-white": { "animated": { "back_default": "https://raw.githubusercontent.com/PokeAPI/sprites/master/sprites/pokemon/versions/generation-v/black-white/animated/back/132.gif", "back_female": null, "back_shiny": "https://raw.githubusercontent.com/PokeAPI/sprites/master/sprites/pokemon/versions/generation-v/black-white/animated/back/shiny/132.gif", "back_shiny_female": null, "front_default": "https://raw.githubusercontent.com/PokeAPI/sprites/master/sprites/pokemon/versions/generation-v/black-white/animated/132.gif", "front_female": null, "front_shiny": "https://raw.githubusercontent.com/PokeAPI/sprites/master/sprites/pokemon/versions/generation-v/black-white/animated/shiny/132.gif", "front_shiny_female": null }, "back_default": "https://raw.githubusercontent.com/PokeAPI/sprites/master/sprites/pokemon/versions/generation-v/black-white/back/132.png", "back_female": null, "back_shiny": "https://raw.githubusercontent.com/PokeAPI/sprites/master/sprites/pokemon/versions/generation-v/black-white/back/shiny/132.png", "back_shiny_female": null, "front_default": "https://raw.githubusercontent.com/PokeAPI/sprites/master/sprites/pokemon/versions/generation-v/black-white/132.png", "front_female": null, "front_shiny": "https://raw.githubusercontent.com/PokeAPI/sprites/master/sprites/pokemon/versions/generation-v/black-white/shiny/132.png", "front_shiny_female": null } }, "generation-vi": { "omegaruby-alphasapphire": { "front_default": "https://raw.githubusercontent.com/PokeAPI/sprites/master/sprites/pokemon/versions/generation-vi/omegaruby-alphasapphire/132.png", "front_female": null, "front_shiny": "https://raw.githubusercontent.com/PokeAPI/sprites/master/sprites/pokemon/versions/generation-vi/omegaruby-alphasapphire/shiny/132.png", "front_shiny_female": null }, "x-y": { "front_default": "https://raw.githubusercontent.com/PokeAPI/sprites/master/sprites/pokemon/versions/generation-vi/x-y/132.png", "front_female": null, "front_shiny": "https://raw.githubusercontent.com/PokeAPI/sprites/master/sprites/pokemon/versions/generation-vi/x-y/shiny/132.png", "front_shiny_female": null } }, "generation-vii": { "icons": { "front_default": "https://raw.githubusercontent.com/PokeAPI/sprites/master/sprites/pokemon/versions/generation-vii/icons/132.png", "front_female": null }, "ultra-sun-ultra-moon": { "front_default": "https://raw.githubusercontent.com/PokeAPI/sprites/master/sprites/pokemon/versions/generation-vii/ultra-sun-ultra-moon/132.png", "front_female": null, "front_shiny": "https://raw.githubusercontent.com/PokeAPI/sprites/master/sprites/pokemon/versions/generation-vii/ultra-sun-ultra-moon/shiny/132.png", "front_shiny_female": null } }, "generation-viii": { "icons": { "front_default": "https://raw.githubusercontent.com/PokeAPI/sprites/master/sprites/pokemon/versions/generation-viii/icons/132.png", "front_female": null } } } }, "stats": [ { "base_stat": 48, "effort": 1, "stat": { "name": "hp", "url": "https://pokeapi.co/api/v2/stat/1/" } }, { "base_stat": 48, "effort": 0, "stat": { "name": "attack", "url": "https://pokeapi.co/api/v2/stat/2/" } }, { "base_stat": 48, "effort": 0, "stat": { "name": "defense", "url": "https://pokeapi.co/api/v2/stat/3/" } }, { "base_stat": 48, "effort": 0, "stat": { "name": "special-attack", "url": "https://pokeapi.co/api/v2/stat/4/" } }, { "base_stat": 48, "effort": 0, "stat": { "name": "special-defense", "url": "https://pokeapi.co/api/v2/stat/5/" } }, { "base_stat": 48, "effort": 0, "stat": { "name": "speed", "url": "https://pokeapi.co/api/v2/stat/6/" } } ], "types": [ { "slot": 1, "type": { "name": "normal", "url": "https://pokeapi.co/api/v2/type/1/" } } ], "weight": 40 }, "emitted_at": 1673989852906 }} \ No newline at end of file diff --git a/airbyte-integrations/connectors/destination-timeplus/main.py b/airbyte-integrations/connectors/destination-timeplus/main.py deleted file mode 100755 index a6f1b6b49d3c..000000000000 --- a/airbyte-integrations/connectors/destination-timeplus/main.py +++ /dev/null @@ -1,11 +0,0 @@ -# -# Copyright (c) 2023 Airbyte, Inc., all rights reserved. -# - - -import sys - -from destination_timeplus import DestinationTimeplus - -if __name__ == "__main__": - DestinationTimeplus().run(sys.argv[1:]) diff --git a/airbyte-integrations/connectors/destination-timeplus/requirements.txt b/airbyte-integrations/connectors/destination-timeplus/requirements.txt deleted file mode 100755 index d6e1198b1ab1..000000000000 --- a/airbyte-integrations/connectors/destination-timeplus/requirements.txt +++ /dev/null @@ -1 +0,0 @@ --e . diff --git a/airbyte-integrations/connectors/destination-timeplus/setup.py b/airbyte-integrations/connectors/destination-timeplus/setup.py deleted file mode 100755 index c082df533d8c..000000000000 --- a/airbyte-integrations/connectors/destination-timeplus/setup.py +++ /dev/null @@ -1,26 +0,0 @@ -# -# Copyright (c) 2023 Airbyte, Inc., all rights reserved. -# - - -from setuptools import find_packages, setup - -MAIN_REQUIREMENTS = [ - "airbyte-cdk", - "timeplus~=1.2.1", -] - -TEST_REQUIREMENTS = ["pytest~=6.2"] - -setup( - name="destination_timeplus", - description="Destination implementation for Timeplus.", - author="Airbyte", - author_email="jove@timeplus.io", - packages=find_packages(), - install_requires=MAIN_REQUIREMENTS, - package_data={"": ["*.json"]}, - extras_require={ - "tests": TEST_REQUIREMENTS, - }, -) diff --git a/airbyte-integrations/connectors/destination-timeplus/unit_tests/unit_test.py b/airbyte-integrations/connectors/destination-timeplus/unit_tests/unit_test.py deleted file mode 100755 index 0b6359090af8..000000000000 --- a/airbyte-integrations/connectors/destination-timeplus/unit_tests/unit_test.py +++ /dev/null @@ -1,17 +0,0 @@ -# -# Copyright (c) 2023 Airbyte, Inc., all rights reserved. -# - -from destination_timeplus import DestinationTimeplus - - -def test_type_mapping(): - expected = { - "float": {"type": "number"}, - "bool": {"type": "boolean"}, - "string": {"type": "string"}, - "integer": {"type": "integer"}, - "array(integer)": {"type": "array", "items": {"type": "integer"}}, - } - for k, v in expected.items(): - assert k == DestinationTimeplus.type_mapping(v) diff --git a/airbyte-integrations/connectors/destination-vectara/.dockerignore b/airbyte-integrations/connectors/destination-vectara/.dockerignore deleted file mode 100644 index f784000e19e2..000000000000 --- a/airbyte-integrations/connectors/destination-vectara/.dockerignore +++ /dev/null @@ -1,5 +0,0 @@ -* -!Dockerfile -!main.py -!destination_vectara -!setup.py diff --git a/airbyte-integrations/connectors/destination-vectara/Dockerfile b/airbyte-integrations/connectors/destination-vectara/Dockerfile deleted file mode 100644 index 9afa4fa81a36..000000000000 --- a/airbyte-integrations/connectors/destination-vectara/Dockerfile +++ /dev/null @@ -1,38 +0,0 @@ -FROM python:3.9.11-alpine3.15 as base - -# build and load all requirements -FROM base as builder -WORKDIR /airbyte/integration_code - -# upgrade pip to the latest version -RUN apk --no-cache upgrade \ - && pip install --upgrade pip \ - && apk --no-cache add tzdata build-base - - -COPY setup.py ./ -# install necessary packages to a temporary folder -RUN pip install --prefix=/install . - -# build a clean environment -FROM base -WORKDIR /airbyte/integration_code - -# copy all loaded and built libraries to a pure basic image -COPY --from=builder /install /usr/local -# add default timezone settings -COPY --from=builder /usr/share/zoneinfo/Etc/UTC /etc/localtime -RUN echo "Etc/UTC" > /etc/timezone - -# bash is installed for more convenient debugging. -RUN apk --no-cache add bash - -# copy payload code only -COPY main.py ./ -COPY destination_vectara ./destination_vectara - -ENV AIRBYTE_ENTRYPOINT "python /airbyte/integration_code/main.py" -ENTRYPOINT ["python", "/airbyte/integration_code/main.py"] - -LABEL io.airbyte.version=0.2.0 -LABEL io.airbyte.name=airbyte/destination-vectara diff --git a/airbyte-integrations/connectors/destination-vectara/README.md b/airbyte-integrations/connectors/destination-vectara/README.md deleted file mode 100644 index 2c68229551bc..000000000000 --- a/airbyte-integrations/connectors/destination-vectara/README.md +++ /dev/null @@ -1,123 +0,0 @@ -# Vectara Destination - -This is the repository for the Vectara destination connector, written in Python. -For information about how to use this connector within Airbyte, see [the documentation](https://docs.airbyte.com/integrations/destinations/vectara). - -## Local development - -### Prerequisites -**To iterate on this connector, make sure to complete this prerequisites section.** - -#### Minimum Python version required `= 3.7.0` - -#### Build & Activate Virtual Environment and install dependencies -From this connector directory, create a virtual environment: -``` -python -m venv .venv -``` - -This will generate a virtualenv for this module in `.venv/`. Make sure this venv is active in your -development environment of choice. To activate it from the terminal, run: -``` -source .venv/bin/activate -pip install -r requirements.txt -``` -If you are in an IDE, follow your IDE's instructions to activate the virtualenv. - -Note that while we are installing dependencies from `requirements.txt`, you should only edit `setup.py` for your dependencies. `requirements.txt` is -used for editable installs (`pip install -e`) to pull in Python dependencies from the monorepo and will call `setup.py`. -If this is mumbo jumbo to you, don't worry about it, just put your deps in `setup.py` but install using `pip install -r requirements.txt` and everything -should work as you expect. - -#### Building via Gradle -From the Airbyte repository root, run: -``` -./gradlew :airbyte-integrations:connectors:destination-vectara:build -``` - -#### Create credentials -**If you are a community contributor**, follow the instructions in the [documentation](https://docs.airbyte.com/integrations/destinations/vectara) -to generate the necessary credentials. Then create a file `secrets/config.json` conforming to the `destination_vectara/spec.json` file. -Note that the `secrets` directory is gitignored by default, so there is no danger of accidentally checking in sensitive information. -See `integration_tests/sample_config.json` for a sample config file. - -**If you are an Airbyte core member**, copy the credentials in Lastpass under the secret name `destination vectara test creds` -and place them into `secrets/config.json`. - -### Locally running the connector -``` -python main.py spec -python main.py check --config secrets/config.json -python main.py discover --config secrets/config.json -python main.py read --config secrets/config.json --catalog integration_tests/configured_catalog.json -``` - -### Locally running the connector docker image - -#### Build -First, make sure you build the latest Docker image: -``` -docker build . -t airbyte/destination-vectara:dev -``` - -You can also build the connector image via Gradle: -``` -./gradlew :airbyte-integrations:connectors:destination-vectara:airbyteDocker -``` -When building via Gradle, the docker image name and tag, respectively, are the values of the `io.airbyte.name` and `io.airbyte.version` `LABEL`s in -the Dockerfile. - -#### Run -Then run any of the connector commands as follows: -``` -docker run --rm airbyte/destination-vectara:dev spec -docker run --rm -v $(pwd)/secrets:/secrets airbyte/destination-vectara:dev check --config /secrets/config.json -# messages.jsonl is a file containing line-separated JSON representing AirbyteMessages -cat messages.jsonl | docker run --rm -v $(pwd)/secrets:/secrets -v $(pwd)/integration_tests:/integration_tests airbyte/destination-vectara:dev write --config /secrets/config.json --catalog /integration_tests/configured_catalog.json -``` -## Testing - Make sure to familiarize yourself with [pytest test discovery](https://docs.pytest.org/en/latest/goodpractices.html#test-discovery) to know how your test files and methods should be named. -First install test dependencies into your virtual environment: -``` -pip install .[tests] -``` -### Unit Tests -To run unit tests locally, from the connector directory run: -``` -python -m pytest unit_tests -``` - -### Integration Tests -There are two types of integration tests: Acceptance Tests (Airbyte's test suite for all destination connectors) and custom integration tests (which are specific to this connector). -#### Custom Integration tests -Place custom tests inside `integration_tests/` folder, then, from the connector root, run -``` -python -m pytest integration_tests -``` -#### Acceptance Tests -Coming soon: - -### Using gradle to run tests -All commands should be run from airbyte project root. -To run unit tests: -``` -./gradlew :airbyte-integrations:connectors:destination-vectara:unitTest -``` -To run acceptance and custom integration tests: -``` -./gradlew :airbyte-integrations:connectors:destination-vectara:integrationTest -``` - -## Dependency Management -All of your dependencies should go in `setup.py`, NOT `requirements.txt`. The requirements file is only used to connect internal Airbyte dependencies in the monorepo for local development. -We split dependencies between two groups, dependencies that are: -* required for your connector to work need to go to `MAIN_REQUIREMENTS` list. -* required for the testing need to go to `TEST_REQUIREMENTS` list - -### Publishing a new version of the connector -You've checked out the repo, implemented a million dollar feature, and you're ready to share your changes with the world. Now what? -1. Make sure your changes are passing unit and integration tests. -1. Bump the connector version in `Dockerfile` -- just increment the value of the `LABEL io.airbyte.version` appropriately (we use [SemVer](https://semver.org/)). -1. Create a Pull Request. -1. Pat yourself on the back for being an awesome contributor. -1. Someone from Airbyte will take a look at your PR and iterate with you to merge it into master. diff --git a/airbyte-integrations/connectors/destination-vectara/destination_vectara/__init__.py b/airbyte-integrations/connectors/destination-vectara/destination_vectara/__init__.py deleted file mode 100644 index 1bc53911e4ef..000000000000 --- a/airbyte-integrations/connectors/destination-vectara/destination_vectara/__init__.py +++ /dev/null @@ -1,8 +0,0 @@ -# -# Copyright (c) 2023 Airbyte, Inc., all rights reserved. -# - - -from .destination import DestinationVectara - -__all__ = ["DestinationVectara"] diff --git a/airbyte-integrations/connectors/destination-vectara/destination_vectara/client.py b/airbyte-integrations/connectors/destination-vectara/destination_vectara/client.py deleted file mode 100644 index 755d30014780..000000000000 --- a/airbyte-integrations/connectors/destination-vectara/destination_vectara/client.py +++ /dev/null @@ -1,199 +0,0 @@ -# -# Copyright (c) 2023 Airbyte, Inc., all rights reserved. -# - -import datetime -import json -import traceback -from concurrent.futures import ThreadPoolExecutor -from typing import Any, Mapping - -import backoff -import requests -from destination_vectara.config import VectaraConfig - -METADATA_STREAM_FIELD = "_ab_stream" - - -def user_error(e: Exception) -> bool: - """ - Return True if this exception is caused by user error, False otherwise. - """ - if not isinstance(e, requests.exceptions.RequestException): - return False - return bool(e.response and 400 <= e.response.status_code < 500) - - -class VectaraClient: - - BASE_URL = "https://api.vectara.io/v1" - - def __init__(self, config: VectaraConfig): - if isinstance(config, dict): - config = VectaraConfig.parse_obj(config) - self.customer_id = config.customer_id - self.corpus_name = config.corpus_name - self.client_id = config.oauth2.client_id - self.client_secret = config.oauth2.client_secret - self.parallelize = config.parallelize - self.check() - - def check(self): - """ - Check for an existing corpus in Vectara. - If more than one exists - then return a message - If exactly one exists with this name - ensure that the corpus has the correct metadata fields, and use it. - If not, create it. - """ - try: - jwt_token = self._get_jwt_token() - if not jwt_token: - return "Unable to get JWT Token. Confirm your Client ID and Client Secret." - - list_corpora_response = self._request(endpoint="list-corpora", data={"numResults": 100, "filter": self.corpus_name}) - possible_corpora_ids_names_map = { - corpus.get("id"): corpus.get("name") - for corpus in list_corpora_response.get("corpus") - if corpus.get("name") == self.corpus_name - } - if len(possible_corpora_ids_names_map) > 1: - return f"Multiple Corpora exist with name {self.corpus_name}" - if len(possible_corpora_ids_names_map) == 1: - self.corpus_id = list(possible_corpora_ids_names_map.keys())[0] - else: - data = { - "corpus": { - "name": self.corpus_name, - "filterAttributes": [ - { - "name": METADATA_STREAM_FIELD, - "indexed": True, - "type": "FILTER_ATTRIBUTE_TYPE__TEXT", - "level": "FILTER_ATTRIBUTE_LEVEL__DOCUMENT", - }, - ], - } - } - - create_corpus_response = self._request(endpoint="create-corpus", data=data) - self.corpus_id = create_corpus_response.get("corpusId") - - except Exception as e: - return str(e) + "\n" + "".join(traceback.TracebackException.from_exception(e).format()) - - def _get_jwt_token(self): - """Connect to the server and get a JWT token.""" - token_endpoint = f"https://vectara-prod-{self.customer_id}.auth.us-west-2.amazoncognito.com/oauth2/token" - headers = { - "Content-Type": "application/x-www-form-urlencoded", - } - data = {"grant_type": "client_credentials", "client_id": self.client_id, "client_secret": self.client_secret} - - request_time = datetime.datetime.now().timestamp() - response = requests.request(method="POST", url=token_endpoint, headers=headers, data=data) - response_json = response.json() - - self.jwt_token = response_json.get("access_token") - self.jwt_token_expires_ts = request_time + response_json.get("expires_in") - return self.jwt_token - - @backoff.on_exception(backoff.expo, requests.exceptions.RequestException, max_tries=5, giveup=user_error) - def _request(self, endpoint: str, http_method: str = "POST", params: Mapping[str, Any] = None, data: Mapping[str, Any] = None): - - url = f"{self.BASE_URL}/{endpoint}" - - current_ts = datetime.datetime.now().timestamp() - if self.jwt_token_expires_ts - current_ts <= 60: - self._get_jwt_token() - - headers = { - "Content-Type": "application/json", - "Accept": "application/json", - "Authorization": f"Bearer {self.jwt_token}", - "customer-id": self.customer_id, - "X-source": "airbyte", - } - - response = requests.request(method=http_method, url=url, headers=headers, params=params, data=json.dumps(data)) - response.raise_for_status() - return response.json() - - def delete_doc_by_metadata(self, metadata_field_name, metadata_field_values): - document_ids = [] - for value in metadata_field_values: - data = { - "query": [ - { - "query": "", - "numResults": 100, - "corpusKey": [ - { - "customerId": self.customer_id, - "corpusId": self.corpus_id, - "metadataFilter": f"doc.{metadata_field_name} = '{value}'", - } - ], - } - ] - } - query_documents_response = self._request(endpoint="query", data=data) - document_ids.extend([document.get("id") for document in query_documents_response.get("responseSet")[0].get("document")]) - self.delete_docs_by_id(document_ids=document_ids) - - def delete_docs_by_id(self, document_ids): - for document_id in document_ids: - self._request( - endpoint="delete-doc", data={"customerId": self.customer_id, "corpusId": self.corpus_id, "documentId": document_id} - ) - - def index_document(self, document): - document_section, document_metadata, document_title, document_id = document - if len(document_section) == 0: - return None # Document is empty, so skip it - document_metadata = self._normalize(document_metadata) - data = { - "customerId": self.customer_id, - "corpusId": self.corpus_id, - "document": { - "documentId": document_id, - "metadataJson": json.dumps(document_metadata), - "title": document_title, - "section": [ - {"text": f"{section_key}: {section_value}"} - for section_key, section_value in document_section.items() - if section_key != METADATA_STREAM_FIELD - ], - }, - } - index_document_response = self._request(endpoint="index", data=data) - return index_document_response - - def index_documents(self, documents): - if self.parallelize: - with ThreadPoolExecutor() as executor: - futures = [executor.submit(self.index_document, doc) for doc in documents] - for future in futures: - try: - response = future.result() - if response is None: - continue - assert ( - response.get("status").get("code") == "OK" - or response.get("status").get("statusDetail") == "Document should have at least one part." - ) - except AssertionError as e: - # Handle the assertion error - pass - else: - for doc in documents: - self.index_document(doc) - - def _normalize(self, metadata: dict) -> dict: - result = {} - for key, value in metadata.items(): - if isinstance(value, (str, int, float, bool)): - result[key] = value - else: - # JSON encode all other types - result[key] = json.dumps(value) - return result diff --git a/airbyte-integrations/connectors/destination-vectara/destination_vectara/config.py b/airbyte-integrations/connectors/destination-vectara/destination_vectara/config.py deleted file mode 100644 index 86ca2dba16f5..000000000000 --- a/airbyte-integrations/connectors/destination-vectara/destination_vectara/config.py +++ /dev/null @@ -1,75 +0,0 @@ -# -# Copyright (c) 2023 Airbyte, Inc., all rights reserved. -# - -from typing import List, Optional - -from airbyte_cdk.utils.spec_schema_transformations import resolve_refs -from pydantic import BaseModel, Field - - -class OAuth2(BaseModel): - client_id: str = Field(..., title="OAuth Client ID", description="OAuth2.0 client id", order=0) - client_secret: str = Field(..., title="OAuth Client Secret", description="OAuth2.0 client secret", airbyte_secret=True, order=1) - - class Config: - title = "OAuth2.0 Credentials" - schema_extra = { - "description": "OAuth2.0 credentials used to authenticate admin actions (creating/deleting corpora)", - "group": "auth", - } - - -class VectaraConfig(BaseModel): - oauth2: OAuth2 - customer_id: str = Field( - ..., title="Customer ID", description="Your customer id as it is in the authenticaion url", order=2, group="account" - ) - corpus_name: str = Field(..., title="Corpus Name", description="The Name of Corpus to load data into", order=3, group="account") - - parallelize: Optional[bool] = Field( - default=False, - title="Parallelize", - description="Parallelize indexing into Vectara with multiple threads", - always_show=True, - group="account", - ) - - text_fields: Optional[List[str]] = Field( - default=[], - title="Text fields to index with Vectara", - description="List of fields in the record that should be in the section of the document. The field list is applied to all streams in the same way and non-existing fields are ignored. If none are defined, all fields are considered text fields. When specifying text fields, you can access nested fields in the record by using dot notation, e.g. `user.name` will access the `name` field in the `user` object. It's also possible to use wildcards to access all fields in an object, e.g. `users.*.name` will access all `names` fields in all entries of the `users` array.", - always_show=True, - examples=["text", "user.name", "users.*.name"], - ) - title_field: Optional[str] = Field( - default="", - title="Text field to use as document title with Vectara", - description="A field that will be used to populate the `title` of each document. The field list is applied to all streams in the same way and non-existing fields are ignored. If none are defined, all fields are considered text fields. When specifying text fields, you can access nested fields in the record by using dot notation, e.g. `user.name` will access the `name` field in the `user` object. It's also possible to use wildcards to access all fields in an object, e.g. `users.*.name` will access all `names` fields in all entries of the `users` array.", - always_show=True, - examples=["document_key"], - ) - metadata_fields: Optional[List[str]] = Field( - default=[], - title="Fields to store as metadata", - description="List of fields in the record that should be stored as metadata. The field list is applied to all streams in the same way and non-existing fields are ignored. If none are defined, all fields are considered metadata fields. When specifying text fields, you can access nested fields in the record by using dot notation, e.g. `user.name` will access the `name` field in the `user` object. It's also possible to use wildcards to access all fields in an object, e.g. `users.*.name` will access all `names` fields in all entries of the `users` array. When specifying nested paths, all matching values are flattened into an array set to a field named by the path.", - always_show=True, - examples=["age", "user"], - ) - - class Config: - title = "Vectara Config" - schema_extra = { - "description": "Configuration to connect to the Vectara instance", - "groups": [ - {"id": "account", "title": "Account"}, - {"id": "auth", "title": "Authentication"}, - ], - } - - @classmethod - def schema(cls): - """we're overriding the schema classmethod to enable some post-processing""" - schema = super().schema() - schema = resolve_refs(schema) - return schema diff --git a/airbyte-integrations/connectors/destination-vectara/destination_vectara/destination.py b/airbyte-integrations/connectors/destination-vectara/destination_vectara/destination.py deleted file mode 100644 index 6a580655ff91..000000000000 --- a/airbyte-integrations/connectors/destination-vectara/destination_vectara/destination.py +++ /dev/null @@ -1,95 +0,0 @@ -# -# Copyright (c) 2023 Airbyte, Inc., all rights reserved. -# - -from typing import Any, Iterable, Mapping - -from airbyte_cdk import AirbyteLogger -from airbyte_cdk.destinations import Destination -from airbyte_cdk.models import ( - AirbyteConnectionStatus, - AirbyteMessage, - ConfiguredAirbyteCatalog, - ConnectorSpecification, - DestinationSyncMode, - Status, - Type, -) -from destination_vectara.client import VectaraClient -from destination_vectara.config import VectaraConfig -from destination_vectara.writer import VectaraWriter - - -class DestinationVectara(Destination): - def write( - self, config: Mapping[str, Any], configured_catalog: ConfiguredAirbyteCatalog, input_messages: Iterable[AirbyteMessage] - ) -> Iterable[AirbyteMessage]: - - """ - Reads the input stream of messages, config, and catalog to write data to the destination. - - This method returns an iterable (typically a generator of AirbyteMessages via yield) containing state messages received - in the input message stream. Outputting a state message means that every AirbyteRecordMessage which came before it has been - successfully persisted to the destination. This is used to ensure fault tolerance in the case that a sync fails before fully completing, - then the source is given the last state message output from this method as the starting point of the next sync. - - :param config: dict of JSON configuration matching the configuration declared in spec.json - :param configured_catalog: The Configured Catalog describing the schema of the data being received and how it should be persisted in the - destination - :param input_messages: The stream of input messages received from the source - :return: Iterable of AirbyteStateMessages wrapped in AirbyteMessage structs - """ - - config_model = VectaraConfig.parse_obj(config) - writer = VectaraWriter( - client=VectaraClient(config_model), - text_fields=config_model.text_fields, - title_field=config_model.title_field, - metadata_fields=config_model.metadata_fields, - catalog=configured_catalog, - ) - - writer.delete_streams_to_overwrite(catalog=configured_catalog) - - for message in input_messages: - if message.type == Type.STATE: - # Emitting a state message indicates that all records which came before it have been written to the destination. So we flush - # the queue to ensure writes happen, then output the state message to indicate it's safe to checkpoint state - writer.flush() - yield message - elif message.type == Type.RECORD: - record = message.record - writer.queue_write_operation(record) - else: - # ignore other message types for now - continue - - # Make sure to flush any records still in the queue - writer.flush() - - def check(self, logger: AirbyteLogger, config: VectaraConfig) -> AirbyteConnectionStatus: - """ - Tests if the input configuration can be used to successfully connect to the destination with the needed permissions - e.g: if a provided API token or password can be used to connect and write to the destination. - - :param logger: Logging object to display debug/info/error to the logs - (logs will not be accessible via airbyte UI if they are not passed to this logger) - :param config: Json object containing the configuration of this destination, content of this json is as specified in - the properties of the spec.json file - - :return: AirbyteConnectionStatus indicating a Success or Failure - """ - client = VectaraClient(config=config) - client_error = client.check() - if client_error: - return AirbyteConnectionStatus(status=Status.FAILED, message="\n".join([client_error])) - else: - return AirbyteConnectionStatus(status=Status.SUCCEEDED) - - def spec(self, *args: Any, **kwargs: Any) -> ConnectorSpecification: - return ConnectorSpecification( - documentationUrl="https://docs.airbyte.com/integrations/destinations/vectara", - supportsIncremental=True, - supported_destination_sync_modes=[DestinationSyncMode.overwrite, DestinationSyncMode.append], - connectionSpecification=VectaraConfig.schema(), - ) diff --git a/airbyte-integrations/connectors/destination-vectara/destination_vectara/writer.py b/airbyte-integrations/connectors/destination-vectara/destination_vectara/writer.py deleted file mode 100644 index 0794b0dc9410..000000000000 --- a/airbyte-integrations/connectors/destination-vectara/destination_vectara/writer.py +++ /dev/null @@ -1,128 +0,0 @@ -# -# Copyright (c) 2023 Airbyte, Inc., all rights reserved. -# - -import uuid -from typing import Any, Dict, List, Mapping, Optional - -import dpath.util -from airbyte_cdk.models import AirbyteRecordMessage, ConfiguredAirbyteCatalog, ConfiguredAirbyteStream, DestinationSyncMode -from airbyte_cdk.models.airbyte_protocol import DestinationSyncMode -from airbyte_cdk.utils.traced_exception import AirbyteTracedException, FailureType -from destination_vectara.client import VectaraClient - -METADATA_STREAM_FIELD = "_ab_stream" - - -class VectaraWriter: - - write_buffer: List[Mapping[str, Any]] = [] - flush_interval = 1000 - - def __init__( - self, - client: VectaraClient, - text_fields: Optional[List[str]], - title_field: Optional[str], - metadata_fields: Optional[List[str]], - catalog: ConfiguredAirbyteCatalog, - ): - self.client = client - self.text_fields = text_fields - self.title_field = title_field - self.metadata_fields = metadata_fields - self.streams = {f"{stream.stream.namespace}_{stream.stream.name}": stream for stream in catalog.streams} - self.ids_to_delete: List[str] = [] - - def delete_streams_to_overwrite(self, catalog: ConfiguredAirbyteCatalog) -> None: - streams_to_overwrite = [ - f"{stream.stream.namespace}_{stream.stream.name}" - for stream in catalog.streams - if stream.destination_sync_mode == DestinationSyncMode.overwrite - ] - if len(streams_to_overwrite): - self.client.delete_doc_by_metadata(metadata_field_name=METADATA_STREAM_FIELD, metadata_field_values=streams_to_overwrite) - - def _delete_documents_to_dedupe(self): - if len(self.ids_to_delete) > 0: - self.client.delete_docs_by_id(document_ids=self.ids_to_delete) - - def queue_write_operation(self, record: AirbyteRecordMessage) -> None: - """Adds messages to the write queue and flushes if the buffer is full""" - - stream_identifier = self._get_stream_id(record=record) - document_section = self._get_document_section(record=record) - document_metadata = self._get_document_metadata(record=record) - document_title = self._get_document_title(record=record) - primary_key = self._get_record_primary_key(record=record) - - if primary_key: - document_id = f"Stream_{stream_identifier}_Key_{primary_key}" - if self.streams[stream_identifier].destination_sync_mode == DestinationSyncMode.append_dedup: - self.ids_to_delete.append(document_id) - else: - document_id = str(uuid.uuid4().int) - - self.write_buffer.append((document_section, document_metadata, document_title, document_id)) - if len(self.write_buffer) == self.flush_interval: - self.flush() - - def flush(self) -> None: - """Flush all documents in Queue to Vectara""" - self._delete_documents_to_dedupe() - self.client.index_documents(self.write_buffer) - self.write_buffer.clear() - self.ids_to_delete.clear() - - def _get_document_section(self, record: AirbyteRecordMessage): - relevant_fields = self._extract_relevant_fields(record, self.text_fields) - if len(relevant_fields) == 0: - text_fields = ", ".join(self.text_fields) if self.text_fields else "all fields" - raise AirbyteTracedException( - internal_message="No text fields found in record", - message=f"Record {str(record.data)[:250]}... does not contain any of the configured text fields: {text_fields}. Please check your processing configuration, there has to be at least one text field set in each record.", - failure_type=FailureType.config_error, - ) - document_section = relevant_fields - return document_section - - def _extract_relevant_fields(self, record: AirbyteRecordMessage, fields: Optional[List[str]]) -> Dict[str, Any]: - relevant_fields = {} - if fields and len(fields) > 0: - for field in fields: - values = dpath.util.values(record.data, field, separator=".") - if values and len(values) > 0: - relevant_fields[field] = values if len(values) > 1 else values[0] - else: - relevant_fields = record.data - return relevant_fields - - def _get_document_metadata(self, record: AirbyteRecordMessage) -> Dict[str, Any]: - document_metadata = self._extract_relevant_fields(record, self.metadata_fields) - document_metadata[METADATA_STREAM_FIELD] = self._get_stream_id(record) - return document_metadata - - def _get_document_title(self, record: AirbyteRecordMessage) -> str: - title = "Untitled" - if self.title_field: - title = dpath.util.get(record.data, self.title_field) - return title - - def _get_stream_id(self, record: AirbyteRecordMessage) -> str: - return f"{record.namespace}_{record.stream}" - - def _get_record_primary_key(self, record: AirbyteRecordMessage) -> Optional[str]: - stream_identifier = self._get_stream_id(record) - current_stream: ConfiguredAirbyteStream = self.streams[stream_identifier] - - if not current_stream.primary_key: - return None - - primary_key = [] - for key in current_stream.primary_key: - try: - primary_key.append(str(dpath.util.get(record.data, key))) - except KeyError: - primary_key.append("__not_found__") - stringified_primary_key = "_".join(primary_key) - return f"{stream_identifier}_{stringified_primary_key}" diff --git a/airbyte-integrations/connectors/destination-vectara/integration_tests/integration_test.py b/airbyte-integrations/connectors/destination-vectara/integration_tests/integration_test.py deleted file mode 100644 index 052006303d85..000000000000 --- a/airbyte-integrations/connectors/destination-vectara/integration_tests/integration_test.py +++ /dev/null @@ -1,127 +0,0 @@ -# -# Copyright (c) 2023 Airbyte, Inc., all rights reserved. -# - -import json -import logging -import unittest -from typing import Any, Dict - -from airbyte_cdk.models import ( - AirbyteMessage, - AirbyteRecordMessage, - AirbyteStateMessage, - AirbyteStream, - ConfiguredAirbyteCatalog, - ConfiguredAirbyteStream, - DestinationSyncMode, - Status, - SyncMode, - Type, -) -from destination_vectara.client import VectaraClient -from destination_vectara.destination import DestinationVectara - - -class VectaraIntegrationTest(unittest.TestCase): - def _get_configured_catalog(self, destination_mode: DestinationSyncMode) -> ConfiguredAirbyteCatalog: - stream_schema = {"type": "object", "properties": {"str_col": {"type": "str"}, "int_col": {"type": "integer"}}} - - overwrite_stream = ConfiguredAirbyteStream( - stream=AirbyteStream( - name="mystream", json_schema=stream_schema, supported_sync_modes=[SyncMode.incremental, SyncMode.full_refresh] - ), - primary_key=[["int_col"]], - sync_mode=SyncMode.incremental, - destination_sync_mode=destination_mode, - ) - - return ConfiguredAirbyteCatalog(streams=[overwrite_stream]) - - def _state(self, data: Dict[str, Any]) -> AirbyteMessage: - return AirbyteMessage(type=Type.STATE, state=AirbyteStateMessage(data=data)) - - def _record(self, stream: str, str_value: str, int_value: int) -> AirbyteMessage: - return AirbyteMessage( - type=Type.RECORD, record=AirbyteRecordMessage(stream=stream, data={"str_col": str_value, "int_col": int_value}, emitted_at=0) - ) - def _clean(self): - self._client.delete_doc_by_metadata(metadata_field_name="_ab_stream", metadata_field_values=["None_mystream"]) - - def setUp(self): - with open("secrets/config.json", "r") as f: - self.config = json.loads(f.read()) - self._client = VectaraClient(self.config) - self._clean() - - def tearDown(self): - self._clean() - - def test_check_valid_config(self): - outcome = DestinationVectara().check(logging.getLogger("airbyte"), self.config) - assert outcome.status == Status.SUCCEEDED - - def test_check_invalid_config(self): - outcome = DestinationVectara().check( - logging.getLogger("airbyte"), - { - "oauth2": {"client_id": "myclientid", "client_secret": "myclientsecret"}, - "corpus_name": "teststore", - "customer_id": "123456", - "text_fields": [], - "metadata_fields": [], - "title_field": "", - }, - ) - assert outcome.status == Status.FAILED - - def _query_index(self, query="Everything", num_results=100): - return self._client._request( - "query", - data={ - "query": [ - { - "query": query, - "numResults": num_results, - "corpusKey": [ - { - "customerId": self._client.customer_id, - "corpusId": self._client.corpus_id, - } - ], - } - ] - }, - )["responseSet"][0] - - def test_write(self): - # validate corpus starts empty - initial_result = self._query_index()["document"] - assert len(initial_result) == 0 - - catalog = self._get_configured_catalog(DestinationSyncMode.overwrite) - first_state_message = self._state({"state": "1"}) - first_record_chunk = [self._record("mystream", f"Dogs are number {i}", i) for i in range(5)] - - # initial sync - destination = DestinationVectara() - list(destination.write(self.config, catalog, [*first_record_chunk, first_state_message])) - assert len(self._query_index()["document"]) == 5 - - # incrementalally update a doc - incremental_catalog = self._get_configured_catalog(DestinationSyncMode.append_dedup) - list(destination.write(self.config, incremental_catalog, [self._record("mystream", "Cats are nice", 2), first_state_message])) - assert len(self._query_index()["document"]) == 5 - - # use semantic search - result = self._query_index("Feline animals", 1) - assert result["document"] == [ - { - "id": "Stream_None_mystream_Key_None_mystream_2", - "metadata": [ - {"name": "int_col", "value": "2"}, - {"name": "_ab_stream", "value": "None_mystream"}, - {"name": "title", "value": "Cats are nice"}, - ], - } - ] diff --git a/airbyte-integrations/connectors/destination-vectara/main.py b/airbyte-integrations/connectors/destination-vectara/main.py deleted file mode 100644 index 289b411fb318..000000000000 --- a/airbyte-integrations/connectors/destination-vectara/main.py +++ /dev/null @@ -1,11 +0,0 @@ -# -# Copyright (c) 2023 Airbyte, Inc., all rights reserved. -# - - -import sys - -from destination_vectara import DestinationVectara - -if __name__ == "__main__": - DestinationVectara().run(sys.argv[1:]) diff --git a/airbyte-integrations/connectors/destination-vectara/requirements.txt b/airbyte-integrations/connectors/destination-vectara/requirements.txt deleted file mode 100644 index d6e1198b1ab1..000000000000 --- a/airbyte-integrations/connectors/destination-vectara/requirements.txt +++ /dev/null @@ -1 +0,0 @@ --e . diff --git a/airbyte-integrations/connectors/destination-vectara/setup.py b/airbyte-integrations/connectors/destination-vectara/setup.py deleted file mode 100644 index ab10a8c60fb9..000000000000 --- a/airbyte-integrations/connectors/destination-vectara/setup.py +++ /dev/null @@ -1,25 +0,0 @@ -# -# Copyright (c) 2023 Airbyte, Inc., all rights reserved. -# - - -from setuptools import find_packages, setup - -MAIN_REQUIREMENTS = [ - "airbyte-cdk==0.57.8", -] - -TEST_REQUIREMENTS = ["pytest~=6.2"] - -setup( - name="destination_vectara", - description="Destination implementation for Vectara.", - author="Airbyte", - author_email="contact@airbyte.io", - packages=find_packages(), - install_requires=MAIN_REQUIREMENTS, - package_data={"": ["*.json"]}, - extras_require={ - "tests": TEST_REQUIREMENTS, - }, -) diff --git a/airbyte-integrations/connectors/destination-vectara/unit_tests/__init__.py b/airbyte-integrations/connectors/destination-vectara/unit_tests/__init__.py deleted file mode 100644 index e69de29bb2d1..000000000000 diff --git a/airbyte-integrations/connectors/destination-xata/.dockerignore b/airbyte-integrations/connectors/destination-xata/.dockerignore deleted file mode 100644 index 40370594ddc6..000000000000 --- a/airbyte-integrations/connectors/destination-xata/.dockerignore +++ /dev/null @@ -1,5 +0,0 @@ -* -!Dockerfile -!main.py -!destination_xata -!setup.py diff --git a/airbyte-integrations/connectors/destination-xata/Dockerfile b/airbyte-integrations/connectors/destination-xata/Dockerfile deleted file mode 100644 index a2ac681f7b79..000000000000 --- a/airbyte-integrations/connectors/destination-xata/Dockerfile +++ /dev/null @@ -1,38 +0,0 @@ -FROM python:3.9.11-alpine3.15 as base - -# build and load all requirements -FROM base as builder -WORKDIR /airbyte/integration_code - -# upgrade pip to the latest version -RUN apk --no-cache upgrade \ - && pip install --upgrade pip \ - && apk --no-cache add tzdata build-base - - -COPY setup.py ./ -# install necessary packages to a temporary folder -RUN pip install --prefix=/install . - -# build a clean environment -FROM base -WORKDIR /airbyte/integration_code - -# copy all loaded and built libraries to a pure basic image -COPY --from=builder /install /usr/local -# add default timezone settings -COPY --from=builder /usr/share/zoneinfo/Etc/UTC /etc/localtime -RUN echo "Etc/UTC" > /etc/timezone - -# bash is installed for more convenient debugging. -RUN apk --no-cache add bash - -# copy payload code only -COPY main.py ./ -COPY destination_xata ./destination_xata - -ENV AIRBYTE_ENTRYPOINT "python /airbyte/integration_code/main.py" -ENTRYPOINT ["python", "/airbyte/integration_code/main.py"] - -LABEL io.airbyte.version=0.1.1 -LABEL io.airbyte.name=airbyte/destination-xata diff --git a/airbyte-integrations/connectors/destination-xata/README.md b/airbyte-integrations/connectors/destination-xata/README.md deleted file mode 100644 index e6153ac20ba1..000000000000 --- a/airbyte-integrations/connectors/destination-xata/README.md +++ /dev/null @@ -1,99 +0,0 @@ -# Xata Destination - -This is the repository for the Xata destination connector, written in Python. -For information about how to use this connector within Airbyte, see [the documentation](https://docs.airbyte.com/integrations/destinations/xata). - -## Local development - -### Prerequisites -**To iterate on this connector, make sure to complete this prerequisites section.** - -#### Minimum Python version required `= 3.7.0` - -#### Build & Activate Virtual Environment and install dependencies -From this connector directory, create a virtual environment: -``` -python -m venv .venv -``` - -This will generate a virtualenv for this module in `.venv/`. Make sure this venv is active in your -development environment of choice. To activate it from the terminal, run: -``` -source .venv/bin/activate -pip install -r requirements.txt -``` -If you are in an IDE, follow your IDE's instructions to activate the virtualenv. - -Note that while we are installing dependencies from `requirements.txt`, you should only edit `setup.py` for your dependencies. `requirements.txt` is -used for editable installs (`pip install -e`) to pull in Python dependencies from the monorepo and will call `setup.py`. -If this is mumbo jumbo to you, don't worry about it, just put your deps in `setup.py` but install using `pip install -r requirements.txt` and everything -should work as you expect. - -#### Create credentials -**If you are a community contributor**, follow the instructions in the [documentation](https://docs.airbyte.com/integrations/destinations/xata) -to generate the necessary credentials. Then create a file `secrets/config.json` conforming to the `destination_xata/spec.json` file. -Note that the `secrets` directory is gitignored by default, so there is no danger of accidentally checking in sensitive information. -See `integration_tests/sample_config.json` for a sample config file. - -**If you are an Airbyte core member**, copy the credentials in Lastpass under the secret name `destination xata test creds` -and place them into `secrets/config.json`. - -### Locally running the connector -``` -python main.py spec -python main.py check --config secrets/config.json -python main.py discover --config secrets/config.json -python main.py read --config secrets/config.json --catalog integration_tests/configured_catalog.json -``` - -### Locally running the connector docker image - - -#### Build -**Via [`airbyte-ci`](https://github.com/airbytehq/airbyte/blob/master/airbyte-ci/connectors/pipelines/README.md) (recommended):** -```bash -airbyte-ci connectors --name=destination-xata build -``` - -An image will be built with the tag `airbyte/destination-xata:dev`. - -**Via `docker build`:** -```bash -docker build -t airbyte/destination-xata:dev . -``` - -#### Run -Then run any of the connector commands as follows: -``` -docker run --rm airbyte/destination-xata:dev spec -docker run --rm -v $(pwd)/secrets:/secrets airbyte/destination-xata:dev check --config /secrets/config.json -# messages.jsonl is a file containing line-separated JSON representing AirbyteMessages -cat messages.jsonl | docker run --rm -v $(pwd)/secrets:/secrets -v $(pwd)/integration_tests:/integration_tests airbyte/destination-xata:dev write --config /secrets/config.json --catalog /integration_tests/configured_catalog.json -``` - -## Testing -You can run our full test suite locally using [`airbyte-ci`](https://github.com/airbytehq/airbyte/blob/master/airbyte-ci/connectors/pipelines/README.md): -```bash -airbyte-ci connectors --name=destination-xata test -``` - -### Customizing acceptance Tests -Customize `acceptance-test-config.yml` file to configure tests. See [Connector Acceptance Tests](https://docs.airbyte.com/connector-development/testing-connectors/connector-acceptance-tests-reference) for more information. -If your connector requires to create or destroy resources for use during acceptance tests create fixtures for it and place them inside integration_tests/acceptance.py. - -## Dependency Management -All of your dependencies should go in `setup.py`, NOT `requirements.txt`. The requirements file is only used to connect internal Airbyte dependencies in the monorepo for local development. -We split dependencies between two groups, dependencies that are: -* required for your connector to work need to go to `MAIN_REQUIREMENTS` list. -* required for the testing need to go to `TEST_REQUIREMENTS` list - -### Publishing a new version of the connector -You've checked out the repo, implemented a million dollar feature, and you're ready to share your changes with the world. Now what? -1. Make sure your changes are passing our test suite: `airbyte-ci connectors --name=destination-xata test` -2. Bump the connector version in `metadata.yaml`: increment the `dockerImageTag` value. Please follow [semantic versioning for connectors](https://docs.airbyte.com/contributing-to-airbyte/resources/pull-requests-handbook/#semantic-versioning-for-connectors). -3. Make sure the `metadata.yaml` content is up to date. -4. Make the connector documentation and its changelog is up to date (`docs/integrations/destinations/xata.md`). -5. Create a Pull Request: use [our PR naming conventions](https://docs.airbyte.com/contributing-to-airbyte/resources/pull-requests-handbook/#pull-request-title-convention). -6. Pat yourself on the back for being an awesome contributor. -7. Someone from Airbyte will take a look at your PR and iterate with you to merge it into master. - diff --git a/airbyte-integrations/connectors/destination-xata/bootstrap.md b/airbyte-integrations/connectors/destination-xata/bootstrap.md deleted file mode 100644 index bac35e3ae53c..000000000000 --- a/airbyte-integrations/connectors/destination-xata/bootstrap.md +++ /dev/null @@ -1 +0,0 @@ -# Xata Destination Connector diff --git a/airbyte-integrations/connectors/destination-xata/destination_xata/__init__.py b/airbyte-integrations/connectors/destination-xata/destination_xata/__init__.py deleted file mode 100644 index d03079997c13..000000000000 --- a/airbyte-integrations/connectors/destination-xata/destination_xata/__init__.py +++ /dev/null @@ -1,8 +0,0 @@ -# -# Copyright (c) 2023 Airbyte, Inc., all rights reserved. -# - - -from .destination import DestinationXata - -__all__ = ["DestinationXata"] diff --git a/airbyte-integrations/connectors/destination-xata/destination_xata/destination.py b/airbyte-integrations/connectors/destination-xata/destination_xata/destination.py deleted file mode 100644 index a9698c49c446..000000000000 --- a/airbyte-integrations/connectors/destination-xata/destination_xata/destination.py +++ /dev/null @@ -1,79 +0,0 @@ -# -# Copyright (c) 2023 Airbyte, Inc., all rights reserved. -# - -import logging -from typing import Any, Iterable, Mapping - -from airbyte_cdk import AirbyteLogger -from airbyte_cdk.destinations import Destination -from airbyte_cdk.models import AirbyteConnectionStatus, AirbyteMessage, ConfiguredAirbyteCatalog, Status, Type -from xata.client import XataClient -from xata.helpers import BulkProcessor - -__version__ = "0.0.1" - -logger = logging.getLogger("airbyte") - - -class DestinationXata(Destination): - def write( - self, config: Mapping[str, Any], configured_catalog: ConfiguredAirbyteCatalog, input_messages: Iterable[AirbyteMessage] - ) -> Iterable[AirbyteMessage]: - """ - Reads the input stream of messages, config, and catalog to write data to the destination. - - This method returns an iterable (typically a generator of AirbyteMessages via yield) containing state messages received - in the input message stream. Outputting a state message means that every AirbyteRecordMessage which came before it has been - successfully persisted to the destination. This is used to ensure fault tolerance in the case that a sync fails before fully completing, - then the source is given the last state message output from this method as the starting point of the next sync. - - :param config: dict of JSON configuration matching the configuration declared in spec.json - :param configured_catalog: The Configured Catalog describing the schema of the data being received and how it should be persisted in the - destination - :param input_messages: The stream of input messages received from the source - :return: Iterable of AirbyteStateMessages wrapped in AirbyteMessage structs - """ - - xata = XataClient(api_key=config["api_key"], db_url=config["db_url"]) - xata.set_header("user-agent", f"airbyte/destination-xata:{__version__}") - - bp = BulkProcessor(xata) - count = 0 - for message in input_messages: - if message.type == Type.RECORD: - # Put record to processing queue - bp.put_record(message.record.stream, message.record.data) - count += 1 - if message.type == Type.STATE: - yield message - bp.flush_queue() - logger.info(bp.get_stats()) - if count != bp.get_stats()["total"] or bp.get_stats()["failed_batches"] != 0: - raise Exception( - "inconsistency found, expected %d records pushed, actual: %d with %d failures." - % (count, bp.get_stats()["total"], bp.get_stats()["failed_batches"]) - ) - - def check(self, logger: AirbyteLogger, config: Mapping[str, Any]) -> AirbyteConnectionStatus: - """ - Tests if the input configuration can be used to successfully connect to the destination with the needed permissions - e.g: if a provided API token or password can be used to connect and write to the destination. - - :param logger: Logging object to display debug/info/error to the logs - (logs will not be accessible via airbyte UI if they are not passed to this logger) - :param config: Json object containing the configuration of this destination, content of this json is as specified in - the properties of the spec.json file - - :return: AirbyteConnectionStatus indicating a Success or Failure - """ - try: - xata = XataClient(api_key=config["api_key"], db_url=config["db_url"]) - xata.set_header("user-agent", f"airbyte/destination-xata:{__version__}") - - r = xata.users().getUser() - if r.status_code != 200: - raise Exception("Invalid connection parameters.") - return AirbyteConnectionStatus(status=Status.SUCCEEDED) - except Exception as e: - return AirbyteConnectionStatus(status=Status.FAILED, message=f"An exception occurred: {repr(e)}") diff --git a/airbyte-integrations/connectors/destination-xata/destination_xata/spec.json b/airbyte-integrations/connectors/destination-xata/destination_xata/spec.json deleted file mode 100644 index 6e73b6cec519..000000000000 --- a/airbyte-integrations/connectors/destination-xata/destination_xata/spec.json +++ /dev/null @@ -1,28 +0,0 @@ -{ - "documentationUrl": "https://docs.airbyte.com/integrations/destinations/xata", - "supported_destination_sync_modes": ["append"], - "supportsIncremental": false, - "connectionSpecification": { - "$schema": "http://json-schema.org/draft-07/schema#", - "title": "Destination Xata", - "type": "object", - "required": ["api_key", "db_url"], - "additionalProperties": true, - "properties": { - "api_key": { - "title": "API Key", - "description": "API Key to connect.", - "type": "string", - "order": 0, - "airbyte_secret": true - }, - "db_url": { - "title": "Database URL", - "description": "URL pointing to your workspace.", - "type": "string", - "order": 1, - "example": "https://my-workspace-abc123.us-east-1.xata.sh/db/nyc-taxi-fares:main" - } - } - } -} diff --git a/airbyte-integrations/connectors/destination-xata/integration_tests/integration_test.py b/airbyte-integrations/connectors/destination-xata/integration_tests/integration_test.py deleted file mode 100644 index b98d151d31d3..000000000000 --- a/airbyte-integrations/connectors/destination-xata/integration_tests/integration_test.py +++ /dev/null @@ -1,120 +0,0 @@ -# -# Copyright (c) 2023 Airbyte, Inc., all rights reserved. -# - -import json -from typing import Any, Mapping -from unittest.mock import Mock - -import pytest -from airbyte_cdk.models import ( - AirbyteMessage, - AirbyteRecordMessage, - AirbyteStream, - ConfiguredAirbyteCatalog, - ConfiguredAirbyteStream, - DestinationSyncMode, - Status, - SyncMode, - Type, -) -from destination_xata import DestinationXata -from xata.client import XataClient - - -@pytest.fixture(name="config") -def config_fixture() -> Mapping[str, Any]: - with open("secrets/config.json", "r") as f: - return json.loads(f.read()) - - -@pytest.fixture(name="configured_catalog") -def configured_catalog_fixture() -> ConfiguredAirbyteCatalog: - stream_schema = {"type": "object", "properties": {"string_col": {"type": "str"}, "int_col": {"type": "integer"}}} - - append_stream = ConfiguredAirbyteStream( - stream=AirbyteStream(name="append_stream", json_schema=stream_schema, supported_sync_modes=[SyncMode.incremental]), - sync_mode=SyncMode.incremental, - destination_sync_mode=DestinationSyncMode.append, - ) - # TODO implement overwrite - """ - overwrite_stream = ConfiguredAirbyteStream( - stream=AirbyteStream(name="overwrite_stream", json_schema=stream_schema, supported_sync_modes=[SyncMode.incremental]), - sync_mode=SyncMode.incremental, - destination_sync_mode=DestinationSyncMode.overwrite, - ) - """ - return ConfiguredAirbyteCatalog(streams=[append_stream]) - - -def test_check_valid_config(config: Mapping): - outcome = DestinationXata().check(logger=Mock(), config=config) - assert outcome.status == Status.SUCCEEDED - - -def test_check_invalid_config(): - f = open("integration_tests/invalid_config.json") - config = json.load(f) - outcome = DestinationXata().check(logger=Mock(), config=config) - assert outcome.status == Status.FAILED - - -def test_write(config: Mapping): - test_schema = {"type": "object", "properties": {"str_col": {"type": "str"}, "int_col": {"type": "integer"}}} - - test_stream = ConfiguredAirbyteStream( - stream=AirbyteStream(name="test_stream", json_schema=test_schema, supported_sync_modes=[SyncMode.incremental]), - sync_mode=SyncMode.incremental, - destination_sync_mode=DestinationSyncMode.append, - ) - - records = [ - AirbyteMessage( - type=Type.RECORD, - record=AirbyteRecordMessage( - stream="test_stream", - data={ - "str_col": "example", - "int_col": 1, - }, - emitted_at=0, - ), - ) - ] - - # setup Xata workspace - xata = XataClient(api_key=config["api_key"], db_url=config["db_url"]) - db_name = xata.get_config()["dbName"] - # database exists ? - assert xata.databases().getDatabaseMetadata(db_name).status_code == 200, f"database '{db_name}' does not exist." - assert xata.table().createTable("test_stream").status_code == 201, "could not create table, if it already exists, please delete it." - assert ( - xata.table() - .setTableSchema( - "test_stream", - { - "columns": [ - {"name": "str_col", "type": "string"}, - {"name": "int_col", "type": "int"}, - ] - }, - ) - .status_code - == 200 - ), "failed to set table schema" - - dest = DestinationXata() - list(dest.write(config=config, configured_catalog=test_stream, input_messages=records)) - - # fetch record - records = xata.data().queryTable("test_stream", {}) - assert records.status_code == 200 - assert len(records.json()["records"]) == 1 - - proof = records.json()["records"][0] - assert proof["str_col"] == "example" - assert proof["int_col"] == 1 - - # cleanup - assert xata.table().deleteTable("test_stream").status_code == 200 diff --git a/airbyte-integrations/connectors/destination-xata/integration_tests/invalid_config.json b/airbyte-integrations/connectors/destination-xata/integration_tests/invalid_config.json deleted file mode 100644 index 36bd35acc0b5..000000000000 --- a/airbyte-integrations/connectors/destination-xata/integration_tests/invalid_config.json +++ /dev/null @@ -1,4 +0,0 @@ -{ - "api_key": "husenvasen", - "database_url": "https://invalid" -} diff --git a/airbyte-integrations/connectors/destination-xata/main.py b/airbyte-integrations/connectors/destination-xata/main.py deleted file mode 100644 index 76e7d8f087c0..000000000000 --- a/airbyte-integrations/connectors/destination-xata/main.py +++ /dev/null @@ -1,11 +0,0 @@ -# -# Copyright (c) 2023 Airbyte, Inc., all rights reserved. -# - - -import sys - -from destination_xata import DestinationXata - -if __name__ == "__main__": - DestinationXata().run(sys.argv[1:]) diff --git a/airbyte-integrations/connectors/destination-xata/requirements.txt b/airbyte-integrations/connectors/destination-xata/requirements.txt deleted file mode 100644 index d6e1198b1ab1..000000000000 --- a/airbyte-integrations/connectors/destination-xata/requirements.txt +++ /dev/null @@ -1 +0,0 @@ --e . diff --git a/airbyte-integrations/connectors/destination-xata/sample_files/configured_catalog.json b/airbyte-integrations/connectors/destination-xata/sample_files/configured_catalog.json deleted file mode 100644 index f526611d3df1..000000000000 --- a/airbyte-integrations/connectors/destination-xata/sample_files/configured_catalog.json +++ /dev/null @@ -1,13 +0,0 @@ -{ - "streams": [ - { - "stream": { - "name": "issues", - "json_schema": {}, - "supported_sync_modes": ["full_refresh"] - }, - "sync_mode": "full_refresh", - "destination_sync_mode": "append" - } - ] -} diff --git a/airbyte-integrations/connectors/destination-xata/setup.py b/airbyte-integrations/connectors/destination-xata/setup.py deleted file mode 100644 index 5fcb33e94fbb..000000000000 --- a/airbyte-integrations/connectors/destination-xata/setup.py +++ /dev/null @@ -1,23 +0,0 @@ -# -# Copyright (c) 2023 Airbyte, Inc., all rights reserved. -# - - -from setuptools import find_packages, setup - -MAIN_REQUIREMENTS = ["airbyte-cdk", "xata==0.10.1"] - -TEST_REQUIREMENTS = ["pytest~=6.2"] - -setup( - name="destination_xata", - description="Destination implementation for Xata.io", - author="Philip Krauss ", - author_email="support@xata.io", - packages=find_packages(), - install_requires=MAIN_REQUIREMENTS, - package_data={"": ["*.json"]}, - extras_require={ - "tests": TEST_REQUIREMENTS, - }, -) diff --git a/airbyte-integrations/connectors/destination-xata/unit_tests/unit_test.py b/airbyte-integrations/connectors/destination-xata/unit_tests/unit_test.py deleted file mode 100644 index 51726247685a..000000000000 --- a/airbyte-integrations/connectors/destination-xata/unit_tests/unit_test.py +++ /dev/null @@ -1,28 +0,0 @@ -# -# Copyright (c) 2023 Airbyte, Inc., all rights reserved. -# - -import unittest - -from xata.client import XataClient -from xata.helpers import BulkProcessor - - -class DestinationConnectorXataTestCase(unittest.TestCase): - def test_request(self): - xata = XataClient(db_url="https://unit_tests-mock.results-store.xata.sh/db/mock-db", api_key="mock-key") - bp = BulkProcessor(xata, thread_pool_size=1, batch_size=2, flush_interval=1) - stats = bp.get_stats() - - assert "total" in stats - assert "queue" in stats - assert "failed_batches" in stats - assert "tables" in stats - - assert stats["total"] == 0 - assert stats["queue"] == 0 - assert stats["failed_batches"] == 0 - - -if __name__ == "__main__": - unittest.main() diff --git a/airbyte-integrations/connectors/destination-yugabytedb/README.md b/airbyte-integrations/connectors/destination-yugabytedb/README.md deleted file mode 100644 index cf5c9b91fc61..000000000000 --- a/airbyte-integrations/connectors/destination-yugabytedb/README.md +++ /dev/null @@ -1,72 +0,0 @@ -# Destination Yugabytedb - -This is the repository for the Yugabytedb destination connector in Java. -For information about how to use this connector within Airbyte, see [the User Documentation](https://docs.airbyte.io/integrations/destinations/yugabytedb). - -## Local development - -#### Building via Gradle -From the Airbyte repository root, run: -``` -./gradlew :airbyte-integrations:connectors:destination-yugabytedb:build -``` - -#### Create credentials -**If you are a community contributor**, generate the necessary credentials and place them in `secrets/config.json` conforming to the spec file in `src/main/resources/spec.json`. -Note that the `secrets` directory is git-ignored by default, so there is no danger of accidentally checking in sensitive information. - -**If you are an Airbyte core member**, follow the [instructions](https://docs.airbyte.io/connector-development#using-credentials-in-ci) to set up the credentials. - -### Locally running the connector docker image - -#### Build -Build the connector image via Gradle: - -``` -./gradlew :airbyte-integrations:connectors:destination-yugabytedb:buildConnectorImage -``` -Once built, the docker image name and tag on your host will be `airbyte/destination-yugabytedb:dev`. -the Dockerfile. - -#### Run -Then run any of the connector commands as follows: -``` -docker run --rm airbyte/destination-yugabytedb:dev spec -docker run --rm -v $(pwd)/secrets:/secrets airbyte/destination-yugabytedb:dev check --config /secrets/config.json -docker run --rm -v $(pwd)/secrets:/secrets airbyte/destination-yugabytedb:dev discover --config /secrets/config.json -docker run --rm -v $(pwd)/secrets:/secrets -v $(pwd)/integration_tests:/integration_tests airbyte/destination-yugabytedb:dev read --config /secrets/config.json --catalog /integration_tests/configured_catalog.json -``` - -## Testing -We use `JUnit` for Java tests. - -### Unit and Integration Tests -Place unit tests under `src/test/io/airbyte/integrations/destinations/yugabytedb`. - -#### Acceptance Tests -Airbyte has a standard test suite that all destination connectors must pass. Implement the `TODO`s in -`src/test-integration/java/io/airbyte/integrations/destinations/yugabytedbDestinationAcceptanceTest.java`. - -### Using gradle to run tests -All commands should be run from airbyte project root. -To run unit tests: -``` -./gradlew :airbyte-integrations:connectors:destination-yugabytedb:unitTest -``` -To run acceptance and custom integration tests: -``` -./gradlew :airbyte-integrations:connectors:destination-yugabytedb:integrationTest -``` - -## Dependency Management - -### Publishing a new version of the connector -You've checked out the repo, implemented a million dollar feature, and you're ready to share your changes with the world. Now what? -1. Make sure your changes are passing our test suite: `airbyte-ci connectors --name=destination-yugabytedb test` -2. Bump the connector version in `metadata.yaml`: increment the `dockerImageTag` value. Please follow [semantic versioning for connectors](https://docs.airbyte.com/contributing-to-airbyte/resources/pull-requests-handbook/#semantic-versioning-for-connectors). -3. Make sure the `metadata.yaml` content is up to date. -4. Make the connector documentation and its changelog is up to date (`docs/integrations/destinations/yugabytedb.md`). -5. Create a Pull Request: use [our PR naming conventions](https://docs.airbyte.com/contributing-to-airbyte/resources/pull-requests-handbook/#pull-request-title-convention). -6. Pat yourself on the back for being an awesome contributor. -7. Someone from Airbyte will take a look at your PR and iterate with you to merge it into master. - diff --git a/airbyte-integrations/connectors/destination-yugabytedb/bootstrap.md b/airbyte-integrations/connectors/destination-yugabytedb/bootstrap.md deleted file mode 100644 index e69de29bb2d1..000000000000 diff --git a/airbyte-integrations/connectors/destination-yugabytedb/build.gradle b/airbyte-integrations/connectors/destination-yugabytedb/build.gradle deleted file mode 100644 index 2186a1b5d8ee..000000000000 --- a/airbyte-integrations/connectors/destination-yugabytedb/build.gradle +++ /dev/null @@ -1,33 +0,0 @@ -plugins { - id 'application' - id 'airbyte-java-connector' -} - -airbyteJavaConnector { - cdkVersionRequired = '0.2.0' - features = ['db-destinations'] - useLocalCdk = false -} - -//remove once upgrading the CDK version to 0.4.x or later -java { - compileJava { - options.compilerArgs.remove("-Werror") - } -} - -airbyteJavaConnector.addCdkDependencies() - -application { - mainClass = 'io.airbyte.integrations.destination.yugabytedb.YugabytedbDestination' -} - -dependencies { - - implementation 'com.yugabyte:jdbc-yugabytedb:42.3.5-yb-1' - - testImplementation "org.assertj:assertj-core:3.21.0" - testImplementation "org.junit.jupiter:junit-jupiter:5.8.1" - testImplementation "org.testcontainers:junit-jupiter:1.17.5" - testImplementation "org.testcontainers:jdbc:1.17.5" -} diff --git a/airbyte-integrations/connectors/destination-yugabytedb/docker-compose.yml b/airbyte-integrations/connectors/destination-yugabytedb/docker-compose.yml deleted file mode 100644 index d8763350fa30..000000000000 --- a/airbyte-integrations/connectors/destination-yugabytedb/docker-compose.yml +++ /dev/null @@ -1,36 +0,0 @@ -version: "3" - -# Note: add mount points at /mnt/master and /mnt/tserver for persistence - -services: - yb-master: - image: yugabytedb/yugabyte:latest - container_name: yb-master-n1 - command: - [ - "/home/yugabyte/bin/yb-master", - "--fs_data_dirs=/mnt/master", - "--master_addresses=yb-master-n1:7100", - "--rpc_bind_addresses=yb-master-n1:7100", - "--replication_factor=1", - ] - ports: - - "7000:7000" - - yb-tserver: - image: yugabytedb/yugabyte:latest - container_name: yb-tserver-n1 - command: - [ - "/home/yugabyte/bin/yb-tserver", - "--fs_data_dirs=/mnt/tserver", - "--start_pgsql_proxy", - "--rpc_bind_addresses=yb-tserver-n1:9100", - "--tserver_master_addrs=yb-master-n1:7100", - ] - ports: - - "9042:9042" - - "5433:5433" - - "9000:9000" - depends_on: - - yb-master diff --git a/airbyte-integrations/connectors/destination-yugabytedb/src/main/java/io/airbyte/integrations/destination/yugabytedb/YugabytedbDestination.java b/airbyte-integrations/connectors/destination-yugabytedb/src/main/java/io/airbyte/integrations/destination/yugabytedb/YugabytedbDestination.java deleted file mode 100644 index 2ae3fc7c423e..000000000000 --- a/airbyte-integrations/connectors/destination-yugabytedb/src/main/java/io/airbyte/integrations/destination/yugabytedb/YugabytedbDestination.java +++ /dev/null @@ -1,66 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.integrations.destination.yugabytedb; - -import com.fasterxml.jackson.databind.JsonNode; -import com.google.common.collect.ImmutableMap; -import io.airbyte.cdk.db.factory.DatabaseDriver; -import io.airbyte.cdk.db.jdbc.JdbcUtils; -import io.airbyte.cdk.integrations.base.IntegrationRunner; -import io.airbyte.cdk.integrations.destination.jdbc.AbstractJdbcDestination; -import io.airbyte.commons.json.Jsons; -import java.util.Collections; -import java.util.Map; -import java.util.Optional; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -public class YugabytedbDestination extends AbstractJdbcDestination { - - private static final Logger LOGGER = LoggerFactory.getLogger(YugabytedbDestination.class); - - public static final String DRIVER_CLASS = DatabaseDriver.YUGABYTEDB.getDriverClassName(); - - public YugabytedbDestination() { - super(DRIVER_CLASS, new YugabytedbNamingTransformer(), new YugabytedbSqlOperations()); - } - - public static void main(String[] args) throws Exception { - LOGGER.info("starting destination: {}", YugabytedbDestination.class); - new IntegrationRunner(new YugabytedbDestination()).run(args); - LOGGER.info("completed destination: {}", YugabytedbDestination.class); - } - - @Override - protected Map getDefaultConnectionProperties(JsonNode config) { - return Collections.emptyMap(); - } - - @Override - public JsonNode toJdbcConfig(JsonNode config) { - String schema = - Optional.ofNullable(config.get(JdbcUtils.SCHEMA_KEY)).map(JsonNode::asText).orElse("public"); - - String jdbcUrl = "jdbc:yugabytedb://" + config.get(JdbcUtils.HOST_KEY).asText() + ":" - + config.get(JdbcUtils.PORT_KEY).asText() + "/" - + config.get(JdbcUtils.DATABASE_KEY).asText(); - - ImmutableMap.Builder configBuilder = ImmutableMap.builder() - .put(JdbcUtils.USERNAME_KEY, config.get(JdbcUtils.USERNAME_KEY).asText()) - .put(JdbcUtils.JDBC_URL_KEY, jdbcUrl) - .put(JdbcUtils.SCHEMA_KEY, schema); - - if (config.has(JdbcUtils.PASSWORD_KEY)) { - configBuilder.put(JdbcUtils.PASSWORD_KEY, config.get(JdbcUtils.PASSWORD_KEY).asText()); - } - - if (config.has(JdbcUtils.JDBC_URL_PARAMS_KEY)) { - configBuilder.put(JdbcUtils.JDBC_URL_PARAMS_KEY, config.get(JdbcUtils.JDBC_URL_PARAMS_KEY).asText()); - } - - return Jsons.jsonNode(configBuilder.build()); - } - -} diff --git a/airbyte-integrations/connectors/destination-yugabytedb/src/main/java/io/airbyte/integrations/destination/yugabytedb/YugabytedbNamingTransformer.java b/airbyte-integrations/connectors/destination-yugabytedb/src/main/java/io/airbyte/integrations/destination/yugabytedb/YugabytedbNamingTransformer.java deleted file mode 100644 index 2485c777308b..000000000000 --- a/airbyte-integrations/connectors/destination-yugabytedb/src/main/java/io/airbyte/integrations/destination/yugabytedb/YugabytedbNamingTransformer.java +++ /dev/null @@ -1,16 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.integrations.destination.yugabytedb; - -import io.airbyte.cdk.integrations.destination.StandardNameTransformer; - -public class YugabytedbNamingTransformer extends StandardNameTransformer { - - @Override - public String applyDefaultCase(final String input) { - return input.toLowerCase(); - } - -} diff --git a/airbyte-integrations/connectors/destination-yugabytedb/src/main/java/io/airbyte/integrations/destination/yugabytedb/YugabytedbSqlOperations.java b/airbyte-integrations/connectors/destination-yugabytedb/src/main/java/io/airbyte/integrations/destination/yugabytedb/YugabytedbSqlOperations.java deleted file mode 100644 index bb876f884d55..000000000000 --- a/airbyte-integrations/connectors/destination-yugabytedb/src/main/java/io/airbyte/integrations/destination/yugabytedb/YugabytedbSqlOperations.java +++ /dev/null @@ -1,58 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.integrations.destination.yugabytedb; - -import com.yugabyte.copy.CopyManager; -import com.yugabyte.core.BaseConnection; -import io.airbyte.cdk.db.jdbc.JdbcDatabase; -import io.airbyte.cdk.integrations.destination.jdbc.JdbcSqlOperations; -import io.airbyte.protocol.models.v0.AirbyteRecordMessage; -import java.io.BufferedReader; -import java.io.File; -import java.io.FileReader; -import java.io.IOException; -import java.io.UncheckedIOException; -import java.nio.charset.StandardCharsets; -import java.nio.file.Files; -import java.util.List; - -public class YugabytedbSqlOperations extends JdbcSqlOperations { - - @Override - protected void insertRecordsInternal(JdbcDatabase database, - List records, - String schemaName, - String tableName) - throws Exception { - - if (records.isEmpty()) { - return; - } - - File tempFile = null; - try { - tempFile = Files.createTempFile(tableName + "-", ".tmp").toFile(); - writeBatchToFile(tempFile, records); - - File finalTempFile = tempFile; - database.execute(connection -> { - - var copyManager = new CopyManager(connection.unwrap(BaseConnection.class)); - var sql = String.format("COPY %s.%s FROM STDIN DELIMITER ',' CSV", schemaName, tableName); - - try (var bufferedReader = new BufferedReader(new FileReader(finalTempFile, StandardCharsets.UTF_8))) { - copyManager.copyIn(sql, bufferedReader); - } catch (IOException e) { - throw new UncheckedIOException(e); - } - }); - } finally { - if (tempFile != null) { - Files.delete(tempFile.toPath()); - } - } - } - -} diff --git a/airbyte-integrations/connectors/destination-yugabytedb/src/main/resources/spec.json b/airbyte-integrations/connectors/destination-yugabytedb/src/main/resources/spec.json deleted file mode 100644 index d9861baabe3c..000000000000 --- a/airbyte-integrations/connectors/destination-yugabytedb/src/main/resources/spec.json +++ /dev/null @@ -1,65 +0,0 @@ -{ - "documentationUrl": "https://docs.airbyte.io/integrations/destinations/yugabytedb", - "supportsIncremental": true, - "supportsNormalization": false, - "supportsDBT": false, - "supported_destination_sync_modes": ["overwrite", "append"], - "connectionSpecification": { - "$schema": "http://json-schema.org/draft-07/schema#", - "title": "Yugabytedb destination spec", - "type": "object", - "required": ["host", "port", "username", "database", "schema"], - "additionalProperties": true, - "properties": { - "host": { - "title": "Host", - "description": "The Hostname of the database.", - "type": "string", - "order": 0 - }, - "port": { - "title": "Port", - "description": "The Port of the database.", - "type": "integer", - "minimum": 0, - "maximum": 65536, - "default": 3306, - "examples": ["3306"], - "order": 1 - }, - "database": { - "title": "Database", - "description": "Name of the database.", - "type": "string", - "order": 2 - }, - "username": { - "title": "Username", - "description": "The Username which is used to access the database.", - "type": "string", - "order": 4 - }, - "schema": { - "title": "Default Schema", - "description": "The default schema tables are written to if the source does not specify a namespace. The usual value for this field is \"public\".", - "type": "string", - "examples": ["public"], - "default": "public", - "order": 3 - }, - "password": { - "title": "Password", - "description": "The Password associated with the username.", - "type": "string", - "airbyte_secret": true, - "order": 5 - }, - "jdbc_url_params": { - "description": "Additional properties to pass to the JDBC URL string when connecting to the database formatted as 'key=value' pairs separated by the symbol '&'. (example: key1=value1&key2=value2&key3=value3).", - "title": "JDBC URL Params", - "type": "string", - "order": 6 - } - } - } -} diff --git a/airbyte-integrations/connectors/destination-yugabytedb/src/test-integration/java/io/airbyte/integrations/destination/yugabytedb/YugabyteDataSource.java b/airbyte-integrations/connectors/destination-yugabytedb/src/test-integration/java/io/airbyte/integrations/destination/yugabytedb/YugabyteDataSource.java deleted file mode 100644 index f7cea140311f..000000000000 --- a/airbyte-integrations/connectors/destination-yugabytedb/src/test-integration/java/io/airbyte/integrations/destination/yugabytedb/YugabyteDataSource.java +++ /dev/null @@ -1,28 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.integrations.destination.yugabytedb; - -import io.airbyte.cdk.db.factory.DataSourceFactory; -import io.airbyte.cdk.db.factory.DatabaseDriver; -import java.util.Collections; -import javax.sql.DataSource; - -public class YugabyteDataSource { - - private YugabyteDataSource() { - - } - - static DataSource getInstance(String host, int port, String database, String username, String password) { - String jdbcUrl = "jdbc:yugabytedb://" + host + ":" + port + "/" + database; - return DataSourceFactory.create( - username, - password, - DatabaseDriver.YUGABYTEDB.getDriverClassName(), - jdbcUrl, - Collections.emptyMap()); - } - -} diff --git a/airbyte-integrations/connectors/destination-yugabytedb/src/test-integration/java/io/airbyte/integrations/destination/yugabytedb/YugabytedbContainerInitializr.java b/airbyte-integrations/connectors/destination-yugabytedb/src/test-integration/java/io/airbyte/integrations/destination/yugabytedb/YugabytedbContainerInitializr.java deleted file mode 100644 index fe81ca9572ca..000000000000 --- a/airbyte-integrations/connectors/destination-yugabytedb/src/test-integration/java/io/airbyte/integrations/destination/yugabytedb/YugabytedbContainerInitializr.java +++ /dev/null @@ -1,75 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.integrations.destination.yugabytedb; - -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; -import org.testcontainers.containers.JdbcDatabaseContainer; -import org.testcontainers.utility.DockerImageName; - -public class YugabytedbContainerInitializr { - - private static final Logger LOGGER = LoggerFactory.getLogger(YugabytedbContainerInitializr.class); - - private static YugabytedbContainer yugabytedbContainer; - - private YugabytedbContainerInitializr() { - - } - - public static YugabytedbContainer initContainer() { - if (yugabytedbContainer == null) { - yugabytedbContainer = new YugabytedbContainer(); - } - yugabytedbContainer.start(); - return yugabytedbContainer; - } - - static class YugabytedbContainer extends JdbcDatabaseContainer { - - private static final int YUGABYTE_PORT = 5433; - - public YugabytedbContainer() { - super(DockerImageName.parse("yugabytedb/yugabyte:2.15.2.0-b87")); - - this.setCommand("bin/yugabyted", "start", "--daemon=false"); - this.addExposedPort(YUGABYTE_PORT); - - } - - @Override - public String getDriverClassName() { - return "com.yugabyte.Driver"; - } - - @Override - public String getJdbcUrl() { - String params = constructUrlParameters("?", "&"); - return "jdbc:yugabytedb://" + getHost() + ":" + getMappedPort(YUGABYTE_PORT) + "/yugabyte" + params; - } - - @Override - public String getDatabaseName() { - return "yugabyte"; - } - - @Override - public String getUsername() { - return "yugabyte"; - } - - @Override - public String getPassword() { - return "yugabyte"; - } - - @Override - protected String getTestQueryString() { - return "SELECT 1"; - } - - } - -} diff --git a/airbyte-integrations/connectors/destination-yugabytedb/src/test-integration/java/io/airbyte/integrations/destination/yugabytedb/YugabytedbDestinationAcceptanceTest.java b/airbyte-integrations/connectors/destination-yugabytedb/src/test-integration/java/io/airbyte/integrations/destination/yugabytedb/YugabytedbDestinationAcceptanceTest.java deleted file mode 100644 index ef4e3b8a80da..000000000000 --- a/airbyte-integrations/connectors/destination-yugabytedb/src/test-integration/java/io/airbyte/integrations/destination/yugabytedb/YugabytedbDestinationAcceptanceTest.java +++ /dev/null @@ -1,154 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.integrations.destination.yugabytedb; - -import com.fasterxml.jackson.databind.JsonNode; -import com.google.common.collect.ImmutableMap; -import io.airbyte.cdk.db.jdbc.DefaultJdbcDatabase; -import io.airbyte.cdk.db.jdbc.JdbcDatabase; -import io.airbyte.cdk.integrations.base.JavaBaseConstants; -import io.airbyte.cdk.integrations.destination.StandardNameTransformer; -import io.airbyte.cdk.integrations.standardtest.destination.JdbcDestinationAcceptanceTest; -import io.airbyte.cdk.integrations.standardtest.destination.comparator.AdvancedTestDataComparator; -import io.airbyte.cdk.integrations.standardtest.destination.comparator.TestDataComparator; -import io.airbyte.commons.json.Jsons; -import java.sql.SQLException; -import java.util.HashSet; -import java.util.List; -import java.util.Set; -import org.junit.jupiter.api.BeforeAll; -import org.junit.jupiter.api.TestInstance; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -@TestInstance(TestInstance.Lifecycle.PER_CLASS) -public class YugabytedbDestinationAcceptanceTest extends JdbcDestinationAcceptanceTest { - - private static final Logger LOGGER = LoggerFactory.getLogger(YugabytedbDestinationAcceptanceTest.class); - - private YugabytedbContainerInitializr.YugabytedbContainer yugabytedbContainer; - - private final StandardNameTransformer namingResolver = new StandardNameTransformer(); - - private JsonNode jsonConfig; - - private JdbcDatabase database; - - private static final Set cleanupTables = new HashSet<>(); - - @BeforeAll - void initContainer() { - yugabytedbContainer = YugabytedbContainerInitializr.initContainer(); - } - - @Override - protected String getImageName() { - return "airbyte/destination-yugabytedb:dev"; - } - - @Override - protected void setup(final TestDestinationEnv testEnv, final HashSet TEST_SCHEMAS) throws Exception { - jsonConfig = Jsons.jsonNode(ImmutableMap.builder() - .put("host", yugabytedbContainer.getHost()) - .put("port", yugabytedbContainer.getMappedPort(5433)) - .put("database", yugabytedbContainer.getDatabaseName()) - .put("username", yugabytedbContainer.getUsername()) - .put("password", yugabytedbContainer.getPassword()) - .put("schema", "public") - .build()); - - database = new DefaultJdbcDatabase(YugabyteDataSource.getInstance( - yugabytedbContainer.getHost(), - yugabytedbContainer.getMappedPort(5433), - yugabytedbContainer.getDatabaseName(), - yugabytedbContainer.getUsername(), - yugabytedbContainer.getPassword())); - - } - - @Override - protected void tearDown(final TestDestinationEnv testEnv) throws Exception { - database.execute(connection -> { - final var statement = connection.createStatement(); - cleanupTables.forEach(tb -> { - try { - statement.execute("DROP TABLE " + tb + ";"); - } catch (final SQLException e) { - throw new RuntimeException(e); - } - }); - }); - cleanupTables.clear(); - } - - @Override - protected JsonNode getConfig() { - return jsonConfig; - } - - @Override - protected JsonNode getFailCheckConfig() { - return Jsons.jsonNode(ImmutableMap.builder() - .put("host", yugabytedbContainer.getHost()) - .put("port", yugabytedbContainer.getMappedPort(5433)) - .put("database", yugabytedbContainer.getDatabaseName()) - .put("username", "usr") - .put("password", "pw") - .put("schema", "public") - .build()); - } - - @Override - protected boolean implementsNamespaces() { - return true; - } - - @Override - protected TestDataComparator getTestDataComparator() { - return new AdvancedTestDataComparator(); - } - - @Override - protected boolean supportBasicDataTypeTest() { - return true; - } - - @Override - protected boolean supportArrayDataTypeTest() { - return true; - } - - @Override - protected boolean supportObjectDataTypeTest() { - return true; - } - - @Override - protected List retrieveRecords(final TestDestinationEnv testEnv, - final String streamName, - final String namespace, - final JsonNode streamSchema) - throws SQLException { - - final String tableName = namingResolver.getRawTableName(streamName); - final String schemaName = namingResolver.getNamespace(namespace); - cleanupTables.add(schemaName + "." + tableName); - return retrieveRecordsFromTable(tableName, schemaName); - } - - private List retrieveRecordsFromTable(final String tableName, final String schemaName) - throws SQLException { - - return database.bufferedResultSetQuery( - connection -> { - final var statement = connection.createStatement(); - return statement.executeQuery( - String.format("SELECT * FROM %s.%s ORDER BY %s ASC;", schemaName, tableName, - JavaBaseConstants.COLUMN_NAME_EMITTED_AT)); - }, - rs -> Jsons.deserialize(rs.getString(JavaBaseConstants.COLUMN_NAME_DATA))); - } - -} diff --git a/airbyte-integrations/connectors/destination-yugabytedb/src/test/java/io/airbyte/integrations/destination/yugabytedb/YugabytedbDestinationTest.java b/airbyte-integrations/connectors/destination-yugabytedb/src/test/java/io/airbyte/integrations/destination/yugabytedb/YugabytedbDestinationTest.java deleted file mode 100644 index 308638334c98..000000000000 --- a/airbyte-integrations/connectors/destination-yugabytedb/src/test/java/io/airbyte/integrations/destination/yugabytedb/YugabytedbDestinationTest.java +++ /dev/null @@ -1,54 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.integrations.destination.yugabytedb; - -import static org.assertj.core.api.Assertions.assertThat; - -import com.google.common.collect.ImmutableMap; -import io.airbyte.commons.json.Jsons; -import java.util.Collections; -import org.junit.jupiter.api.BeforeEach; -import org.junit.jupiter.api.Test; - -class YugabytedbDestinationTest { - - private YugabytedbDestination yugabytedbDestination; - - @BeforeEach - void setup() { - yugabytedbDestination = new YugabytedbDestination(); - } - - @Test - void testToJdbcConfig() { - - var config = Jsons.jsonNode(ImmutableMap.builder() - .put("host", "localhost") - .put("port", 5433) - .put("database", "yugabyte") - .put("username", "yugabyte") - .put("password", "yugabyte") - .put("schema", "public") - .build()); - - var jdbcConfig = yugabytedbDestination.toJdbcConfig(config); - - assertThat(jdbcConfig.get("schema").asText()).isEqualTo("public"); - assertThat(jdbcConfig.get("username").asText()).isEqualTo("yugabyte"); - assertThat(jdbcConfig.get("password").asText()).isEqualTo("yugabyte"); - assertThat(jdbcConfig.get("jdbc_url").asText()).isEqualTo("jdbc:yugabytedb://localhost:5433/yugabyte"); - - } - - @Test - void testGetDefaultConnectionProperties() { - - var map = yugabytedbDestination.getDefaultConnectionProperties(Jsons.jsonNode(Collections.emptyMap())); - - assertThat(map).isEmpty(); - - } - -} diff --git a/airbyte-integrations/connectors/destination-yugabytedb/src/test/java/io/airbyte/integrations/destination/yugabytedb/YugabytedbNamingTransformerTest.java b/airbyte-integrations/connectors/destination-yugabytedb/src/test/java/io/airbyte/integrations/destination/yugabytedb/YugabytedbNamingTransformerTest.java deleted file mode 100644 index a05a8a1f5514..000000000000 --- a/airbyte-integrations/connectors/destination-yugabytedb/src/test/java/io/airbyte/integrations/destination/yugabytedb/YugabytedbNamingTransformerTest.java +++ /dev/null @@ -1,30 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.integrations.destination.yugabytedb; - -import static org.assertj.core.api.Assertions.assertThat; - -import org.junit.jupiter.api.BeforeEach; -import org.junit.jupiter.api.Test; - -class YugabytedbNamingTransformerTest { - - private YugabytedbNamingTransformer yugabytedbNamingTransformer; - - @BeforeEach - void setup() { - yugabytedbNamingTransformer = new YugabytedbNamingTransformer(); - } - - @Test - void testApplyDefaultCase() { - - var defaultCase = yugabytedbNamingTransformer.applyDefaultCase("DEFAULT_CASE"); - - assertThat(defaultCase).isEqualTo("default_case"); - - } - -} From 72f6362ddd3eb7444fa68b3920bce7e7776d3681 Mon Sep 17 00:00:00 2001 From: Anton Karpets Date: Fri, 1 Mar 2024 18:35:11 +0200 Subject: [PATCH 049/172] =?UTF-8?q?=F0=9F=8F=A5Source=20Twilio:=20fix=20CA?= =?UTF-8?q?T=20(#35751)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .../connectors/source-twilio/README.md | 2 +- .../integration_tests/expected_records.jsonl | 16 ++++++++-------- .../source-twilio/unit_tests/test_streams.py | 11 +++++++++++ 3 files changed, 20 insertions(+), 9 deletions(-) diff --git a/airbyte-integrations/connectors/source-twilio/README.md b/airbyte-integrations/connectors/source-twilio/README.md index 21f3011ac732..b4d9f466f181 100644 --- a/airbyte-integrations/connectors/source-twilio/README.md +++ b/airbyte-integrations/connectors/source-twilio/README.md @@ -30,7 +30,7 @@ See `sample_files/sample_config.json` for a sample config file. poetry run source-twilio spec poetry run source-twilio check --config secrets/config.json poetry run source-twilio discover --config secrets/config.json -poetry run source-twilio read --config secrets/config.json --catalog sample_files/configured_catalog.json +poetry run source-twilio read --config secrets/config.json --catalog integration_tests/constant_records_catalog.json ``` ### Running unit tests diff --git a/airbyte-integrations/connectors/source-twilio/integration_tests/expected_records.jsonl b/airbyte-integrations/connectors/source-twilio/integration_tests/expected_records.jsonl index ba744d425a95..b0476fbfb2d0 100644 --- a/airbyte-integrations/connectors/source-twilio/integration_tests/expected_records.jsonl +++ b/airbyte-integrations/connectors/source-twilio/integration_tests/expected_records.jsonl @@ -7,10 +7,10 @@ {"stream": "available_phone_number_countries", "data": {"country_code": "AU", "country": "Australia", "uri": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/AvailablePhoneNumbers/AU.json", "beta": false, "subresource_uris": {"local": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/AvailablePhoneNumbers/AU/Local.json", "toll_free": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/AvailablePhoneNumbers/AU/TollFree.json", "mobile": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/AvailablePhoneNumbers/AU/Mobile.json"}}, "emitted_at": 1691419684730} {"stream": "available_phone_number_countries", "data": {"country_code": "BE", "country": "Belgium", "uri": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/AvailablePhoneNumbers/BE.json", "beta": false, "subresource_uris": {"toll_free": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/AvailablePhoneNumbers/BE/TollFree.json"}}, "emitted_at": 1691419684732} {"stream": "available_phone_number_countries", "data": {"country_code": "SE", "country": "Sweden", "uri": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/AvailablePhoneNumbers/SE.json", "beta": false, "subresource_uris": {"mobile": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/AvailablePhoneNumbers/SE/Mobile.json"}}, "emitted_at": 1691419684732} -{"stream": "calls", "data": { "date_updated": "2023-06-15T19:57:59Z", "price_unit": "USD", "parent_call_sid": null, "caller_name": null, "duration": 13, "from": "+12056890337", "to": "+12056561170", "annotation": null, "answered_by": null, "sid": "CAb70f3b70167dd8d4ee2e1dc15db64e02", "queue_time": 0, "price": -0.0085, "api_version": "2010-04-01", "status": "completed", "direction": "inbound", "start_time": "2023-06-15T19:57:46Z", "date_created": "2023-06-15T19:57:46Z", "from_formatted": "(205) 689-0337", "group_sid": null, "trunk_sid": "", "forwarded_from": "+12056561170", "uri": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Calls/CAb70f3b70167dd8d4ee2e1dc15db64e02.json", "account_sid": "ACdade166c12e160e9ed0a6088226718fb", "end_time": "2023-06-15T19:57:59Z", "to_formatted": "(205) 656-1170", "phone_number_sid": "PNe40bd7f3ac343b32fd51275d2d5b3dcc", "subresource_uris": { "feedback": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Calls/CAb70f3b70167dd8d4ee2e1dc15db64e02/Feedback.json", "user_defined_messages": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Calls/CAb70f3b70167dd8d4ee2e1dc15db64e02/UserDefinedMessages.json", "notifications": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Calls/CAb70f3b70167dd8d4ee2e1dc15db64e02/Notifications.json", "recordings": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Calls/CAb70f3b70167dd8d4ee2e1dc15db64e02/Recordings.json", "streams": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Calls/CAb70f3b70167dd8d4ee2e1dc15db64e02/Streams.json", "payments": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Calls/CAb70f3b70167dd8d4ee2e1dc15db64e02/Payments.json", "user_defined_message_subscriptions": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Calls/CAb70f3b70167dd8d4ee2e1dc15db64e02/UserDefinedMessageSubscriptions.json", "siprec": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Calls/CAb70f3b70167dd8d4ee2e1dc15db64e02/Siprec.json", "events": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Calls/CAb70f3b70167dd8d4ee2e1dc15db64e02/Events.json", "feedback_summaries": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Calls/FeedbackSummary.json" } }, "emitted_at": 1694639568884} -{"stream": "calls", "data": { "date_updated": "2023-03-15T11:35:20Z", "price_unit": "USD", "parent_call_sid": null, "caller_name": "", "duration": 0, "from": "+12056561170", "to": "+14156236785", "annotation": null, "answered_by": null, "sid": "CA651f21262d4308879ea685e704dd0384", "queue_time": 0, "price": null, "api_version": "2010-04-01", "status": "busy", "direction": "outbound-api", "start_time": "2023-03-15T11:35:03Z", "date_created": "2023-03-15T11:35:03Z", "from_formatted": "(205) 656-1170", "group_sid": null, "trunk_sid": "", "forwarded_from": null, "uri": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Calls/CA651f21262d4308879ea685e704dd0384.json", "account_sid": "ACdade166c12e160e9ed0a6088226718fb", "end_time": "2023-03-15T11:35:20Z", "to_formatted": "(415) 623-6785", "phone_number_sid": "PNe40bd7f3ac343b32fd51275d2d5b3dcc", "subresource_uris": { "feedback": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Calls/CA651f21262d4308879ea685e704dd0384/Feedback.json", "user_defined_messages": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Calls/CA651f21262d4308879ea685e704dd0384/UserDefinedMessages.json", "notifications": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Calls/CA651f21262d4308879ea685e704dd0384/Notifications.json", "recordings": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Calls/CA651f21262d4308879ea685e704dd0384/Recordings.json", "streams": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Calls/CA651f21262d4308879ea685e704dd0384/Streams.json", "payments": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Calls/CA651f21262d4308879ea685e704dd0384/Payments.json", "user_defined_message_subscriptions": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Calls/CA651f21262d4308879ea685e704dd0384/UserDefinedMessageSubscriptions.json", "siprec": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Calls/CA651f21262d4308879ea685e704dd0384/Siprec.json", "events": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Calls/CA651f21262d4308879ea685e704dd0384/Events.json", "feedback_summaries": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Calls/FeedbackSummary.json" } }, "emitted_at": 1694639568886} -{"stream": "calls", "data": { "date_updated": "2023-02-16T14:37:32Z", "price_unit": "USD", "parent_call_sid": null, "caller_name": null, "duration": 14, "from": "+380636306253", "to": "+13603004201", "annotation": null, "answered_by": null, "sid": "CA9121cd06fb7a1c0c96664c089621c979", "queue_time": 0, "price": -0.0085, "api_version": "2010-04-01", "status": "completed", "direction": "inbound", "start_time": "2023-02-16T14:37:18Z", "date_created": "2023-02-16T14:37:18Z", "from_formatted": "+380636306253", "group_sid": null, "trunk_sid": "", "forwarded_from": "+13603004201", "uri": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Calls/CA9121cd06fb7a1c0c96664c089621c979.json", "account_sid": "ACdade166c12e160e9ed0a6088226718fb", "end_time": "2023-02-16T14:37:32Z", "to_formatted": "(360) 300-4201", "phone_number_sid": "PN1fe31291fa81c17bf71cd128bc649e68", "subresource_uris": { "feedback": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Calls/CA9121cd06fb7a1c0c96664c089621c979/Feedback.json", "user_defined_messages": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Calls/CA9121cd06fb7a1c0c96664c089621c979/UserDefinedMessages.json", "notifications": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Calls/CA9121cd06fb7a1c0c96664c089621c979/Notifications.json", "recordings": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Calls/CA9121cd06fb7a1c0c96664c089621c979/Recordings.json", "streams": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Calls/CA9121cd06fb7a1c0c96664c089621c979/Streams.json", "payments": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Calls/CA9121cd06fb7a1c0c96664c089621c979/Payments.json", "user_defined_message_subscriptions": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Calls/CA9121cd06fb7a1c0c96664c089621c979/UserDefinedMessageSubscriptions.json", "siprec": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Calls/CA9121cd06fb7a1c0c96664c089621c979/Siprec.json", "events": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Calls/CA9121cd06fb7a1c0c96664c089621c979/Events.json", "feedback_summaries": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Calls/FeedbackSummary.json" } }, "emitted_at": 1694639568887} -{"stream": "conferences", "data": {"status": "completed", "reason_conference_ended": "last-participant-left", "date_updated": "2022-09-23T14:44:41Z", "region": "us1", "friendly_name": "test_conference", "uri": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Conferences/CFca0fa08200f55a6d60779d18b644a675.json", "account_sid": "ACdade166c12e160e9ed0a6088226718fb", "call_sid_ending_conference": "CA8858f240bdccfb3393def1682c2dbdf0", "sid": "CFca0fa08200f55a6d60779d18b644a675", "date_created": "2022-09-23T14:44:11Z", "api_version": "2010-04-01", "subresource_uris": {"participants": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Conferences/CFca0fa08200f55a6d60779d18b644a675/Participants.json", "recordings": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Conferences/CFca0fa08200f55a6d60779d18b644a675/Recordings.json"}}, "emitted_at": 1691419855153} +{"stream": "calls", "data": {"date_updated": "2023-06-15T19:57:59Z", "price_unit": "USD", "parent_call_sid": null, "caller_name": null, "duration": 13, "from": "+12056890337", "to": "+12056561170", "annotation": null, "answered_by": null, "sid": "CAb70f3b70167dd8d4ee2e1dc15db64e02", "queue_time": 0, "price": -0.0085, "api_version": "2010-04-01", "status": "completed", "direction": "inbound", "start_time": "2023-06-15T19:57:46Z", "date_created": "2023-06-15T19:57:46Z", "from_formatted": "(205) 689-0337", "group_sid": null, "trunk_sid": "", "forwarded_from": "+12056561170", "uri": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Calls/CAb70f3b70167dd8d4ee2e1dc15db64e02.json", "account_sid": "ACdade166c12e160e9ed0a6088226718fb", "end_time": "2023-06-15T19:57:59Z", "to_formatted": "(205) 656-1170", "phone_number_sid": "PNe40bd7f3ac343b32fd51275d2d5b3dcc", "subresource_uris": {"feedback": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Calls/CAb70f3b70167dd8d4ee2e1dc15db64e02/Feedback.json", "user_defined_messages": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Calls/CAb70f3b70167dd8d4ee2e1dc15db64e02/UserDefinedMessages.json", "notifications": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Calls/CAb70f3b70167dd8d4ee2e1dc15db64e02/Notifications.json", "recordings": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Calls/CAb70f3b70167dd8d4ee2e1dc15db64e02/Recordings.json", "streams": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Calls/CAb70f3b70167dd8d4ee2e1dc15db64e02/Streams.json", "payments": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Calls/CAb70f3b70167dd8d4ee2e1dc15db64e02/Payments.json", "user_defined_message_subscriptions": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Calls/CAb70f3b70167dd8d4ee2e1dc15db64e02/UserDefinedMessageSubscriptions.json", "siprec": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Calls/CAb70f3b70167dd8d4ee2e1dc15db64e02/Siprec.json", "events": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Calls/CAb70f3b70167dd8d4ee2e1dc15db64e02/Events.json", "feedback_summaries": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Calls/FeedbackSummary.json"}}, "emitted_at": 1694639568884} +{"stream": "calls", "data": {"date_updated": "2023-03-15T11:35:20Z", "price_unit": "USD", "parent_call_sid": null, "caller_name": "", "duration": 0, "from": "+12056561170", "to": "+14156236785", "annotation": null, "answered_by": null, "sid": "CA651f21262d4308879ea685e704dd0384", "queue_time": 0, "price": null, "api_version": "2010-04-01", "status": "busy", "direction": "outbound-api", "start_time": "2023-03-15T11:35:03Z", "date_created": "2023-03-15T11:35:03Z", "from_formatted": "(205) 656-1170", "group_sid": null, "trunk_sid": "", "forwarded_from": null, "uri": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Calls/CA651f21262d4308879ea685e704dd0384.json", "account_sid": "ACdade166c12e160e9ed0a6088226718fb", "end_time": "2023-03-15T11:35:20Z", "to_formatted": "(415) 623-6785", "phone_number_sid": "PNe40bd7f3ac343b32fd51275d2d5b3dcc", "subresource_uris": {"feedback": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Calls/CA651f21262d4308879ea685e704dd0384/Feedback.json", "user_defined_messages": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Calls/CA651f21262d4308879ea685e704dd0384/UserDefinedMessages.json", "notifications": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Calls/CA651f21262d4308879ea685e704dd0384/Notifications.json", "recordings": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Calls/CA651f21262d4308879ea685e704dd0384/Recordings.json", "streams": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Calls/CA651f21262d4308879ea685e704dd0384/Streams.json", "payments": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Calls/CA651f21262d4308879ea685e704dd0384/Payments.json", "user_defined_message_subscriptions": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Calls/CA651f21262d4308879ea685e704dd0384/UserDefinedMessageSubscriptions.json", "siprec": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Calls/CA651f21262d4308879ea685e704dd0384/Siprec.json", "events": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Calls/CA651f21262d4308879ea685e704dd0384/Events.json", "feedback_summaries": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Calls/FeedbackSummary.json"}}, "emitted_at": 1694639568886} +{"stream": "calls", "data": {"date_updated": "2023-02-16T14:37:32Z", "price_unit": "USD", "parent_call_sid": null, "caller_name": null, "duration": 14, "from": "+380636306253", "to": "+13603004201", "annotation": null, "answered_by": null, "sid": "CA9121cd06fb7a1c0c96664c089621c979", "queue_time": 0, "price": -0.0085, "api_version": "2010-04-01", "status": "completed", "direction": "inbound", "start_time": "2023-02-16T14:37:18Z", "date_created": "2023-02-16T14:37:18Z", "from_formatted": "+380636306253", "group_sid": null, "trunk_sid": "", "forwarded_from": "+13603004201", "uri": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Calls/CA9121cd06fb7a1c0c96664c089621c979.json", "account_sid": "ACdade166c12e160e9ed0a6088226718fb", "end_time": "2023-02-16T14:37:32Z", "to_formatted": "(360) 300-4201", "phone_number_sid": "PN1fe31291fa81c17bf71cd128bc649e68", "subresource_uris": {"feedback": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Calls/CA9121cd06fb7a1c0c96664c089621c979/Feedback.json", "user_defined_messages": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Calls/CA9121cd06fb7a1c0c96664c089621c979/UserDefinedMessages.json", "notifications": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Calls/CA9121cd06fb7a1c0c96664c089621c979/Notifications.json", "recordings": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Calls/CA9121cd06fb7a1c0c96664c089621c979/Recordings.json", "streams": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Calls/CA9121cd06fb7a1c0c96664c089621c979/Streams.json", "payments": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Calls/CA9121cd06fb7a1c0c96664c089621c979/Payments.json", "user_defined_message_subscriptions": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Calls/CA9121cd06fb7a1c0c96664c089621c979/UserDefinedMessageSubscriptions.json", "siprec": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Calls/CA9121cd06fb7a1c0c96664c089621c979/Siprec.json", "events": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Calls/CA9121cd06fb7a1c0c96664c089621c979/Events.json", "feedback_summaries": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Calls/FeedbackSummary.json"}}, "emitted_at": 1694639568887} +{"stream": "conferences", "data": {"status": "completed", "reason_conference_ended": "last-participant-left", "date_updated": "2023-02-16T10:09:23Z", "region": "us1", "friendly_name": "Conference4", "uri": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Conferences/CF582c8fbd75e1fa02301b553711f87e7f.json", "account_sid": "ACdade166c12e160e9ed0a6088226718fb", "call_sid_ending_conference": "CA9c8671884602ca5e64895a917ad1ba90", "sid": "CF582c8fbd75e1fa02301b553711f87e7f", "date_created": "2023-02-16T10:09:11Z", "api_version": "2010-04-01", "subresource_uris": {"participants": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Conferences/CF582c8fbd75e1fa02301b553711f87e7f/Participants.json", "recordings": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Conferences/CF582c8fbd75e1fa02301b553711f87e7f/Recordings.json"}}, "emitted_at": 1709305079075} {"stream": "conferences", "data": {"status": "completed", "reason_conference_ended": "last-participant-left", "date_updated": "2023-02-15T14:49:37Z", "region": "us1", "friendly_name": "Conference2", "uri": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Conferences/CF15e8707d15e02c1af88809b159ff8b42.json", "account_sid": "ACdade166c12e160e9ed0a6088226718fb", "call_sid_ending_conference": "CA04ae9210566d36c425bae2087736f6ac", "sid": "CF15e8707d15e02c1af88809b159ff8b42", "date_created": "2023-02-15T14:49:21Z", "api_version": "2010-04-01", "subresource_uris": {"participants": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Conferences/CF15e8707d15e02c1af88809b159ff8b42/Participants.json", "recordings": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Conferences/CF15e8707d15e02c1af88809b159ff8b42/Recordings.json"}}, "emitted_at": 1691419855509} {"stream": "conferences", "data": {"status": "completed", "reason_conference_ended": "last-participant-left", "date_updated": "2023-02-16T09:57:39Z", "region": "us1", "friendly_name": "Conference2", "uri": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Conferences/CF33199d5a9a0b202b3bd9558438a052d8.json", "account_sid": "ACdade166c12e160e9ed0a6088226718fb", "call_sid_ending_conference": "CAf8464ca5eda3ab7cc3e2d86cdb3c720f", "sid": "CF33199d5a9a0b202b3bd9558438a052d8", "date_created": "2023-02-16T09:57:11Z", "api_version": "2010-04-01", "subresource_uris": {"participants": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Conferences/CF33199d5a9a0b202b3bd9558438a052d8/Participants.json", "recordings": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Conferences/CF33199d5a9a0b202b3bd9558438a052d8/Recordings.json"}}, "emitted_at": 1691419855510} {"stream": "conversations", "data": {"unique_name": null, "date_updated": "2023-03-21T13:39:44Z", "friendly_name": "Friendly Conversation", "timers": {}, "account_sid": "ACdade166c12e160e9ed0a6088226718fb", "url": "https://conversations.twilio.com/v1/Conversations/CH0ed7b4c3498e455a96fa09fcccee720e", "state": "active", "date_created": "2023-03-21T13:39:44Z", "messaging_service_sid": "MGfdf707ca9a7e03496ad79dc64e5e543e", "sid": "CH0ed7b4c3498e455a96fa09fcccee720e", "attributes": "{}", "bindings": null, "chat_service_sid": "IS5fcc074f7ead44c99a0a24a374a7e19f", "links": {"participants": "https://conversations.twilio.com/v1/Conversations/CH0ed7b4c3498e455a96fa09fcccee720e/Participants", "messages": "https://conversations.twilio.com/v1/Conversations/CH0ed7b4c3498e455a96fa09fcccee720e/Messages", "webhooks": "https://conversations.twilio.com/v1/Conversations/CH0ed7b4c3498e455a96fa09fcccee720e/Webhooks"}}, "emitted_at": 1691419856305} @@ -22,12 +22,12 @@ {"stream": "incoming_phone_numbers", "data": {"origin": "twilio", "status": "in-use", "address_requirements": "none", "date_updated": "2023-03-27T07:57:03Z", "voice_url": "https://handler.twilio.com/twiml/EH5793263d703ad674bbcdeb31ac80e359", "sms_application_sid": "", "voice_fallback_method": "POST", "emergency_address_status": "unregistered", "identity_sid": null, "emergency_status": "Active", "voice_application_sid": "", "capabilities": {"fax": false, "voice": true, "sms": true, "mms": true}, "api_version": "2010-04-01", "sid": "PNf2eb05a16e73094f891b01076b830a6a", "status_callback_method": "POST", "voice_fallback_url": "", "phone_number": "+16508997708", "emergency_address_sid": null, "beta": false, "address_sid": "AD07820b628d536f40af85140c67e108f0", "sms_url": "https://webhooks.twilio.com/v1/Accounts/ACdade166c12e160e9ed0a6088226718fb/Flows/FWbd726b7110b21294a9f27a47f4ab0080", "voice_method": "POST", "voice_caller_id_lookup": false, "friendly_name": "Test phone number 8", "uri": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/IncomingPhoneNumbers/PNf2eb05a16e73094f891b01076b830a6a.json", "sms_fallback_url": "", "account_sid": "ACdade166c12e160e9ed0a6088226718fb", "sms_method": "POST", "trunk_sid": null, "sms_fallback_method": "POST", "date_created": "2023-02-16T14:31:29Z", "bundle_sid": null, "status_callback": "", "subresource_uris": {"assigned_add_ons": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/IncomingPhoneNumbers/PNf2eb05a16e73094f891b01076b830a6a/AssignedAddOns.json"}}, "emitted_at": 1691419867845} {"stream": "incoming_phone_numbers", "data": {"origin": "twilio", "status": "in-use", "address_requirements": "none", "date_updated": "2023-03-27T07:58:14Z", "voice_url": "https://handler.twilio.com/twiml/EHb6471af720e8b66baa14e7226227893b", "sms_application_sid": "", "voice_fallback_method": "POST", "emergency_address_status": "unregistered", "identity_sid": null, "emergency_status": "Active", "voice_application_sid": "", "capabilities": {"fax": false, "voice": true, "sms": true, "mms": true}, "api_version": "2010-04-01", "sid": "PNd74715bab1be123cc9004f03b85bb067", "status_callback_method": "POST", "voice_fallback_url": "", "phone_number": "+14246220939", "emergency_address_sid": null, "beta": false, "address_sid": "AD0164001bc0f84d9bc29e17378fe47c20", "sms_url": "https://webhooks.twilio.com/v1/Accounts/ACdade166c12e160e9ed0a6088226718fb/Flows/FWbd726b7110b21294a9f27a47f4ab0080", "voice_method": "POST", "voice_caller_id_lookup": false, "friendly_name": "Test phone number 9", "uri": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/IncomingPhoneNumbers/PNd74715bab1be123cc9004f03b85bb067.json", "sms_fallback_url": "", "account_sid": "ACdade166c12e160e9ed0a6088226718fb", "sms_method": "POST", "trunk_sid": null, "sms_fallback_method": "POST", "date_created": "2023-02-16T14:34:00Z", "bundle_sid": null, "status_callback": "", "subresource_uris": {"assigned_add_ons": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/IncomingPhoneNumbers/PNd74715bab1be123cc9004f03b85bb067/AssignedAddOns.json"}}, "emitted_at": 1691419867848} {"stream": "incoming_phone_numbers", "data": {"origin": "twilio", "status": "in-use", "address_requirements": "none", "date_updated": "2023-03-27T07:58:40Z", "voice_url": "https://handler.twilio.com/twiml/EHb77bc7c1f889b6c9fe5202d0463edfc4", "sms_application_sid": "", "voice_fallback_method": "POST", "emergency_address_status": "unregistered", "identity_sid": null, "emergency_status": "Active", "voice_application_sid": "", "capabilities": {"fax": false, "voice": true, "sms": true, "mms": true}, "api_version": "2010-04-01", "sid": "PN99400a65bf5a4305d5420060842d4d2c", "status_callback_method": "POST", "voice_fallback_url": "", "phone_number": "+19125901057", "emergency_address_sid": null, "beta": false, "address_sid": "AD0e69bf9110f766787a88f99b507c9eeb", "sms_url": "https://webhooks.twilio.com/v1/Accounts/ACdade166c12e160e9ed0a6088226718fb/Flows/FWbd726b7110b21294a9f27a47f4ab0080", "voice_method": "POST", "voice_caller_id_lookup": false, "friendly_name": "Test phone number 2", "uri": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/IncomingPhoneNumbers/PN99400a65bf5a4305d5420060842d4d2c.json", "sms_fallback_url": "", "account_sid": "ACdade166c12e160e9ed0a6088226718fb", "sms_method": "POST", "trunk_sid": null, "sms_fallback_method": "POST", "date_created": "2023-02-15T09:31:24Z", "bundle_sid": null, "status_callback": "", "subresource_uris": {"assigned_add_ons": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/IncomingPhoneNumbers/PN99400a65bf5a4305d5420060842d4d2c/AssignedAddOns.json"}}, "emitted_at": 1691419867849} -{"stream": "message_media", "data": { "sid": "ME66ee8039997ee13231f5bd4a9121162c", "account_sid": "ACdade166c12e160e9ed0a6088226718fb", "parent_sid": "MMf491b7a98d00cdf54afc20b1839cea4e", "content_type": "image/png", "date_created": "2023-07-19T07:03:14Z", "date_updated": "2023-07-19T07:03:14Z", "uri": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Messages/MMf491b7a98d00cdf54afc20b1839cea4e/Media/ME66ee8039997ee13231f5bd4a9121162c.json" }, "emitted_at": 1691419887396} +{"stream": "message_media", "data": {"sid": "ME66ee8039997ee13231f5bd4a9121162c", "account_sid": "ACdade166c12e160e9ed0a6088226718fb", "parent_sid": "MMf491b7a98d00cdf54afc20b1839cea4e", "content_type": "image/png", "date_created": "2023-07-19T07:03:14Z", "date_updated": "2023-07-19T07:03:14Z", "uri": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Messages/MMf491b7a98d00cdf54afc20b1839cea4e/Media/ME66ee8039997ee13231f5bd4a9121162c.json"}, "emitted_at": 1691419887396} {"stream": "message_media", "data": {"sid": "ME34324546a4398b36fc96fd36500038c3", "account_sid": "ACdade166c12e160e9ed0a6088226718fb", "parent_sid": "MM56662e159d1a5d1f1c6e2d43202b7940", "content_type": "image/png", "date_created": "2023-02-14T14:02:28Z", "date_updated": "2023-02-14T14:02:28Z", "uri": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Messages/MM56662e159d1a5d1f1c6e2d43202b7940/Media/ME34324546a4398b36fc96fd36500038c3.json"}, "emitted_at": 1691419915272} {"stream": "message_media", "data": {"sid": "ME45c86c927aa3eb6749bac07b9bc6f418", "account_sid": "ACdade166c12e160e9ed0a6088226718fb", "parent_sid": "MM5e9241ae9a444f8061b28e3de05fe818", "content_type": "image/png", "date_created": "2023-02-14T14:02:59Z", "date_updated": "2023-02-14T14:02:59Z", "uri": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Messages/MM5e9241ae9a444f8061b28e3de05fe818/Media/ME45c86c927aa3eb6749bac07b9bc6f418.json"}, "emitted_at": 1691419915437} -{"stream": "messages", "data": { "body": "Hi there, Test 4!", "num_segments": 1, "direction": "outbound-api", "from": "+12056561170", "date_updated": "2023-02-14T16:03:17Z", "price": -0.02, "error_message": null, "uri": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Messages/MM70ec51fd8ba9408302cdd16b98a47c81.json", "account_sid": "ACdade166c12e160e9ed0a6088226718fb", "num_media": 1, "to": "+14156236785", "date_created": "2023-02-14T14:03:37Z", "status": "sent", "sid": "MM70ec51fd8ba9408302cdd16b98a47c81", "date_sent": "2023-02-14T14:03:38Z", "messaging_service_sid": null, "error_code": null, "price_unit": "USD", "api_version": "2010-04-01", "subresource_uris": { "media": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Messages/MM70ec51fd8ba9408302cdd16b98a47c81/Media.json", "feedback": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Messages/MM70ec51fd8ba9408302cdd16b98a47c81/Feedback.json" } }, "emitted_at": 1691419956845} -{"stream": "messages", "data": {"body": "Test", "num_segments": 1, "direction": "inbound", "from": "+12025502908", "date_updated": "2022-12-16T18:58:29Z", "price": -0.0079, "error_message": null, "uri": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Messages/SMf73a453514f0a1d8bd4d0121713a8be9.json", "account_sid": "ACdade166c12e160e9ed0a6088226718fb", "num_media": 0, "to": "+12056561170", "date_created": "2022-12-16T18:58:28Z", "status": "received", "sid": "SMf73a453514f0a1d8bd4d0121713a8be9", "date_sent": "2022-12-16T18:58:29Z", "messaging_service_sid": null, "error_code": null, "price_unit": "USD", "api_version": "2010-04-01", "subresource_uris": {"media": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Messages/SMf73a453514f0a1d8bd4d0121713a8be9/Media.json", "feedback": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Messages/SMf73a453514f0a1d8bd4d0121713a8be9/Feedback.json"}}, "emitted_at": 1691419957399} -{"stream": "messages", "data": {"body": "Airbyte", "num_segments": 1, "direction": "inbound", "from": "+12025502908", "date_updated": "2022-12-16T18:58:47Z", "price": -0.0079, "error_message": null, "uri": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Messages/SM0ae34204de318609bd2801af5396442d.json", "account_sid": "ACdade166c12e160e9ed0a6088226718fb", "num_media": 0, "to": "+12056561170", "date_created": "2022-12-16T18:58:47Z", "status": "received", "sid": "SM0ae34204de318609bd2801af5396442d", "date_sent": "2022-12-16T18:58:47Z", "messaging_service_sid": null, "error_code": null, "price_unit": "USD", "api_version": "2010-04-01", "subresource_uris": {"media": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Messages/SM0ae34204de318609bd2801af5396442d/Media.json", "feedback": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Messages/SM0ae34204de318609bd2801af5396442d/Feedback.json"}}, "emitted_at": 1691419957401} +{"stream": "messages", "data": {"body": "Hi there, Test 3!", "num_segments": 1, "direction": "outbound-api", "from": "+12056561170", "date_updated": "2023-02-14T15:53:24Z", "price": -0.02, "error_message": null, "uri": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Messages/MM863b367b7d0725532b80a161c9dab4e5.json", "account_sid": "ACdade166c12e160e9ed0a6088226718fb", "num_media": 1, "to": "+14156236785", "date_created": "2023-02-14T14:03:42Z", "status": "sent", "sid": "MM863b367b7d0725532b80a161c9dab4e5", "date_sent": "2023-02-14T14:03:43Z", "messaging_service_sid": null, "error_code": null, "price_unit": "USD", "api_version": "2010-04-01", "subresource_uris": {"media": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Messages/MM863b367b7d0725532b80a161c9dab4e5/Media.json", "feedback": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Messages/MM863b367b7d0725532b80a161c9dab4e5/Feedback.json"}}, "emitted_at": 1709305631427} +{"stream": "messages", "data": {"body": "Hi there, Test 2!", "num_segments": 1, "direction": "outbound-api", "from": "+12056561170", "date_updated": "2023-02-14T15:54:04Z", "price": -0.02, "error_message": null, "uri": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Messages/MM5a93ae7d20c07ceae87cd2649485ba72.json", "account_sid": "ACdade166c12e160e9ed0a6088226718fb", "num_media": 1, "to": "+14156236785", "date_created": "2023-02-14T14:03:47Z", "status": "sent", "sid": "MM5a93ae7d20c07ceae87cd2649485ba72", "date_sent": "2023-02-14T14:03:48Z", "messaging_service_sid": null, "error_code": null, "price_unit": "USD", "api_version": "2010-04-01", "subresource_uris": {"media": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Messages/MM5a93ae7d20c07ceae87cd2649485ba72/Media.json", "feedback": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Messages/MM5a93ae7d20c07ceae87cd2649485ba72/Feedback.json"}}, "emitted_at": 1709305631428} +{"stream": "messages", "data": {"body": "Hi there, Test 1!", "num_segments": 1, "direction": "outbound-api", "from": "+12056561170", "date_updated": "2023-02-14T16:12:21Z", "price": -0.02, "error_message": null, "uri": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Messages/MM89c1a0785d96b5faa30d65aa644c70b4.json", "account_sid": "ACdade166c12e160e9ed0a6088226718fb", "num_media": 1, "to": "+14156236785", "date_created": "2023-02-14T14:03:52Z", "status": "sent", "sid": "MM89c1a0785d96b5faa30d65aa644c70b4", "date_sent": "2023-02-14T14:03:53Z", "messaging_service_sid": null, "error_code": null, "price_unit": "USD", "api_version": "2010-04-01", "subresource_uris": {"media": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Messages/MM89c1a0785d96b5faa30d65aa644c70b4/Media.json", "feedback": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/Messages/MM89c1a0785d96b5faa30d65aa644c70b4/Feedback.json"}}, "emitted_at": 1709305631429} {"stream": "outgoing_caller_ids", "data": {"phone_number": "+14153597503", "date_updated": "2020-11-17T04:17:37Z", "friendly_name": "(415) 359-7503", "uri": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/OutgoingCallerIds/PN16ba111c0df5756cfe37044ed0ee3136.json", "account_sid": "ACdade166c12e160e9ed0a6088226718fb", "sid": "PN16ba111c0df5756cfe37044ed0ee3136", "date_created": "2020-11-17T04:17:37Z"}, "emitted_at": 1691419960444} {"stream": "outgoing_caller_ids", "data": {"phone_number": "+18023494963", "date_updated": "2020-12-11T04:28:02Z", "friendly_name": "(802) 349-4963", "uri": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/OutgoingCallerIds/PN726d635f970c30193cd12e7b994510a1.json", "account_sid": "ACdade166c12e160e9ed0a6088226718fb", "sid": "PN726d635f970c30193cd12e7b994510a1", "date_created": "2020-12-11T04:28:02Z"}, "emitted_at": 1691419960446} {"stream": "outgoing_caller_ids", "data": {"phone_number": "+14156236785", "date_updated": "2023-02-15T15:33:09Z", "friendly_name": "Slack sms channel", "uri": "/2010-04-01/Accounts/ACdade166c12e160e9ed0a6088226718fb/OutgoingCallerIds/PNbb9c658169cfd057a46cdce9dc00afa3.json", "account_sid": "ACdade166c12e160e9ed0a6088226718fb", "sid": "PNbb9c658169cfd057a46cdce9dc00afa3", "date_created": "2023-02-14T12:11:53Z"}, "emitted_at": 1691419960447} diff --git a/airbyte-integrations/connectors/source-twilio/unit_tests/test_streams.py b/airbyte-integrations/connectors/source-twilio/unit_tests/test_streams.py index 10d64697b22e..309355222b86 100644 --- a/airbyte-integrations/connectors/source-twilio/unit_tests/test_streams.py +++ b/airbyte-integrations/connectors/source-twilio/unit_tests/test_streams.py @@ -145,6 +145,17 @@ def test_request_params(self, stream_cls, next_page_token, expected): result = stream.request_params(stream_state=None, next_page_token=next_page_token) assert result == expected + @pytest.mark.parametrize( + "original_value, field_schema, expected_value", + [ + ("Fri, 11 Dec 2020 04:28:40 +0000", {"format": "date-time"}, "2020-12-11T04:28:40Z"), + ("2020-12-11T04:28:40Z", {"format": "date-time"}, "2020-12-11T04:28:40Z"), + ("some_string", {}, "some_string"), + ] + ) + def test_transform_function(self, original_value, field_schema, expected_value): + assert Accounts.custom_transform_function(original_value, field_schema) == expected_value + class TestIncrementalTwilioStream: From e54f5debbdd773682da898ad8dc99bd45e358fb0 Mon Sep 17 00:00:00 2001 From: Anton Karpets Date: Fri, 1 Mar 2024 18:54:13 +0200 Subject: [PATCH 050/172] =?UTF-8?q?=F0=9F=8F=A5Source=20Sendgrid:=20fix=20?= =?UTF-8?q?CAT=20(#35753)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .../source-sendgrid/unit_tests/unit_test.py | 29 +++++++++++++++++-- 1 file changed, 26 insertions(+), 3 deletions(-) diff --git a/airbyte-integrations/connectors/source-sendgrid/unit_tests/unit_test.py b/airbyte-integrations/connectors/source-sendgrid/unit_tests/unit_test.py index 750a1db4c8b3..eb663f30825b 100644 --- a/airbyte-integrations/connectors/source-sendgrid/unit_tests/unit_test.py +++ b/airbyte-integrations/connectors/source-sendgrid/unit_tests/unit_test.py @@ -17,17 +17,21 @@ from source_sendgrid.source import SourceSendgrid from source_sendgrid.streams import ( Blocks, + Bounces, Campaigns, Contacts, GlobalSuppressions, + InvalidEmails, Lists, Segments, SendgridStream, SendgridStreamIncrementalMixin, SendgridStreamOffsetPagination, + SpamReports, SuppressionGroupMembers, SuppressionGroups, Templates, + UnsubscribeGroups, ) FAKE_NOW = pendulum.DateTime(2022, 1, 1, tzinfo=pendulum.timezone("utc")) @@ -36,9 +40,10 @@ @pytest.fixture(name="sendgrid_stream") def sendgrid_stream_fixture(mocker) -> SendgridStream: - # Wipe the internal list of abstract methods to allow instantiating the abstract class without implementing its abstract methods + # Wipe the internal list of abstract methods to allow instantiating + # the abstract class without implementing its abstract methods mocker.patch("source_sendgrid.streams.SendgridStream.__abstractmethods__", set()) - # Mypy yells at us because we're init'ing an abstract class + # Mypy yells at us because we're initializing an abstract class return SendgridStream() # type: ignore @@ -130,6 +135,10 @@ def test_read_records( [SuppressionGroupMembers, "asm/suppressions"], [SuppressionGroups, "asm/groups"], [GlobalSuppressions, "suppression/unsubscribes"], + [Bounces, "suppression/bounces"], + [InvalidEmails, "suppression/invalid_emails"], + [SpamReports, "suppression/spam_reports"], + [UnsubscribeGroups, "asm/groups"], ), ) def test_path(stream_class, expected): @@ -144,7 +153,7 @@ def test_path(stream_class, expected): (SuppressionGroupMembers, 401, False), ), ) -def test_should_retry_on_permission_error(requests_mock, stream_class, status, expected): +def test_should_retry_on_permission_error(stream_class, status, expected): stream = stream_class(Mock()) response_mock = MagicMock() response_mock.status_code = status @@ -211,3 +220,17 @@ def test_read_chunks_pd(): list(stream.read_with_chunks(path="file_not_exist.csv", file_encoding="utf-8")) with pytest.raises(FileNotFoundError): list(stream.read_with_chunks(path="file_not_exist.csv", file_encoding="utf-8")) + + +@pytest.mark.parametrize( + "current_stream_state, latest_record, expected_state", + ( + ({}, {"created": "7270247822"}, {"created": "7270247822"}), + ({"created": "7270247899"}, {"created": "7270247822"}, {"created": "7270247899"}), + ({"created": "7270247822"}, {"created": "7270247899"}, {"created": "7270247899"}), + ), +) +def test_get_updated_state(current_stream_state, latest_record, expected_state): + stream = Blocks(Mock()) + assert stream.get_updated_state(current_stream_state, latest_record) == expected_state + From 0f000b99f7527905af7121381341c07336b4c156 Mon Sep 17 00:00:00 2001 From: Evan Tahler Date: Fri, 1 Mar 2024 10:01:49 -0800 Subject: [PATCH 051/172] Archive connectors already hidden with legacy state (#35752) --- .../connectors/source-appstore-singer/metadata.yaml | 4 ++-- airbyte-integrations/connectors/source-courier/metadata.yaml | 4 ++-- airbyte-integrations/connectors/source-dv-360/metadata.yaml | 4 ++-- .../connectors/source-kustomer-singer/metadata.yaml | 4 ++-- airbyte-integrations/connectors/source-recurly/metadata.yaml | 4 ++-- .../connectors/source-search-metrics/metadata.yaml | 4 ++-- .../connectors/source-talkdesk-explore/metadata.yaml | 4 ++-- airbyte-integrations/connectors/source-zuora/metadata.yaml | 4 ++-- 8 files changed, 16 insertions(+), 16 deletions(-) diff --git a/airbyte-integrations/connectors/source-appstore-singer/metadata.yaml b/airbyte-integrations/connectors/source-appstore-singer/metadata.yaml index c0aad524053e..ef46dd04f093 100644 --- a/airbyte-integrations/connectors/source-appstore-singer/metadata.yaml +++ b/airbyte-integrations/connectors/source-appstore-singer/metadata.yaml @@ -13,7 +13,7 @@ data: enabled: false # TODO: Set enabled=true after `airbyte-lib-validate-source` is passing. packageName: airbyte-source-appstore-singer - registries: # Removed from registries due to LEGACY STATE + registries: cloud: enabled: false oss: @@ -25,5 +25,5 @@ data: ab_internal: sl: 100 ql: 100 - supportLevel: community + supportLevel: archived metadataSpecVersion: "1.0" diff --git a/airbyte-integrations/connectors/source-courier/metadata.yaml b/airbyte-integrations/connectors/source-courier/metadata.yaml index 06a78d2cfc9d..be6f7586c5ad 100644 --- a/airbyte-integrations/connectors/source-courier/metadata.yaml +++ b/airbyte-integrations/connectors/source-courier/metadata.yaml @@ -12,7 +12,7 @@ data: pypi: enabled: true packageName: airbyte-source-courier - registries: # Removed from registries due to LEGACY STATE + registries: cloud: enabled: false oss: @@ -25,5 +25,5 @@ data: ab_internal: sl: 100 ql: 100 - supportLevel: community + supportLevel: archived metadataSpecVersion: "1.0" diff --git a/airbyte-integrations/connectors/source-dv-360/metadata.yaml b/airbyte-integrations/connectors/source-dv-360/metadata.yaml index 21de388e91c2..51026a80ab88 100644 --- a/airbyte-integrations/connectors/source-dv-360/metadata.yaml +++ b/airbyte-integrations/connectors/source-dv-360/metadata.yaml @@ -12,7 +12,7 @@ data: pypi: enabled: true packageName: airbyte-source-dv-360 - registries: # Removed from registries due to LEGACY STATE + registries: cloud: enabled: false oss: @@ -24,5 +24,5 @@ data: ab_internal: sl: 100 ql: 100 - supportLevel: community + supportLevel: archived metadataSpecVersion: "1.0" diff --git a/airbyte-integrations/connectors/source-kustomer-singer/metadata.yaml b/airbyte-integrations/connectors/source-kustomer-singer/metadata.yaml index 0aa460d17c75..948cb8adc902 100644 --- a/airbyte-integrations/connectors/source-kustomer-singer/metadata.yaml +++ b/airbyte-integrations/connectors/source-kustomer-singer/metadata.yaml @@ -13,7 +13,7 @@ data: enabled: false # TODO: Set enabled=true after `airbyte-lib-validate-source` is passing. packageName: airbyte-source-kustomer-singer - registries: # Removed from registries due to LEGACY STATE + registries: cloud: enabled: false oss: @@ -25,5 +25,5 @@ data: ab_internal: sl: 100 ql: 100 - supportLevel: community + supportLevel: archived metadataSpecVersion: "1.0" diff --git a/airbyte-integrations/connectors/source-recurly/metadata.yaml b/airbyte-integrations/connectors/source-recurly/metadata.yaml index 0a8afd004e8d..ba3b93c18dfc 100644 --- a/airbyte-integrations/connectors/source-recurly/metadata.yaml +++ b/airbyte-integrations/connectors/source-recurly/metadata.yaml @@ -16,7 +16,7 @@ data: name: Recurly registries: cloud: - enabled: true + enabled: false oss: enabled: true releaseStage: alpha @@ -24,7 +24,7 @@ data: pypi: enabled: true packageName: airbyte-source-recurly - supportLevel: community + supportLevel: archived tags: - language:python metadataSpecVersion: "1.0" diff --git a/airbyte-integrations/connectors/source-search-metrics/metadata.yaml b/airbyte-integrations/connectors/source-search-metrics/metadata.yaml index 58b77c1ed967..7258ec160d1b 100644 --- a/airbyte-integrations/connectors/source-search-metrics/metadata.yaml +++ b/airbyte-integrations/connectors/source-search-metrics/metadata.yaml @@ -12,7 +12,7 @@ data: pypi: enabled: true packageName: airbyte-source-search-metrics - registries: # Removed from registries due to LEGACY STATE + registries: cloud: enabled: false oss: @@ -24,5 +24,5 @@ data: ab_internal: sl: 100 ql: 100 - supportLevel: community + supportLevel: archived metadataSpecVersion: "1.0" diff --git a/airbyte-integrations/connectors/source-talkdesk-explore/metadata.yaml b/airbyte-integrations/connectors/source-talkdesk-explore/metadata.yaml index 915acd44f1a4..6d9238de6442 100644 --- a/airbyte-integrations/connectors/source-talkdesk-explore/metadata.yaml +++ b/airbyte-integrations/connectors/source-talkdesk-explore/metadata.yaml @@ -12,7 +12,7 @@ data: pypi: enabled: true packageName: airbyte-source-talkdesk-explore - registries: # Removed from registries due to LEGACY STATE + registries: cloud: enabled: false oss: @@ -24,5 +24,5 @@ data: ab_internal: sl: 100 ql: 100 - supportLevel: community + supportLevel: archived metadataSpecVersion: "1.0" diff --git a/airbyte-integrations/connectors/source-zuora/metadata.yaml b/airbyte-integrations/connectors/source-zuora/metadata.yaml index 2070c12d1591..7a4d4b53dfb6 100644 --- a/airbyte-integrations/connectors/source-zuora/metadata.yaml +++ b/airbyte-integrations/connectors/source-zuora/metadata.yaml @@ -16,13 +16,13 @@ data: pypi: enabled: true packageName: airbyte-source-zuora - registries: # Removed from registries due to LEGACY STATE + registries: cloud: enabled: false oss: enabled: false releaseStage: alpha - supportLevel: community + supportLevel: archived tags: - language:python metadataSpecVersion: "1.0" From 9e848c4fd1fa0ce34b3e0bc52e818c3f0e204a82 Mon Sep 17 00:00:00 2001 From: Evan Tahler Date: Fri, 1 Mar 2024 10:22:28 -0800 Subject: [PATCH 052/172] Archive the code of 8 sources with legacy state (#35757) --- .../source-appstore-singer/.dockerignore | 6 - .../source-appstore-singer/.gitignore | 1 - .../source-appstore-singer/Dockerfile | 38 - .../source-appstore-singer/README.md | 67 - .../connectors/source-appstore-singer/main.py | 8 - .../source-appstore-singer/requirements.txt | 2 - .../sample_files/catalog.json | 424 ---- .../sample_files/configured_catalog.json | 436 ---- .../sample_files/sample_config.json | 7 - .../source-appstore-singer/setup.py | 49 - .../source_appstore_singer/__init__.py | 3 - .../source_appstore_singer/run.py | 14 - .../source_appstore_singer/source.py | 113 - .../source_appstore_singer/spec.json | 41 - .../unit_tests/unit_test.py | 7 - .../connectors/source-courier/.dockerignore | 6 - .../connectors/source-courier/Dockerfile | 38 - .../connectors/source-courier/README.md | 67 - .../connectors/source-courier/__init__.py | 3 - .../source-courier/acceptance-test-config.yml | 23 - .../integration_tests/__init__.py | 3 - .../integration_tests/acceptance.py | 16 - .../integration_tests/configured_catalog.json | 13 - .../integration_tests/expected_records.jsonl | 160 -- .../integration_tests/invalid_config.json | 3 - .../integration_tests/sample_config.json | 3 - .../connectors/source-courier/main.py | 8 - .../source-courier/requirements.txt | 1 - .../connectors/source-courier/setup.py | 46 - .../source-courier/source_courier/__init__.py | 8 - .../source_courier/manifest.yaml | 152 -- .../source-courier/source_courier/run.py | 14 - .../schemas/message_history.json | 100 - .../source_courier/schemas/message_info.json | 137 -- .../schemas/message_output.json | 38 - .../source_courier/schemas/messages.json | 137 -- .../source-courier/source_courier/source.py | 18 - .../source-courier/source_courier/spec.yaml | 13 - .../connectors/source-dv-360/.dockerignore | 6 - .../connectors/source-dv-360/BOOTSTRAP.md | 17 - .../connectors/source-dv-360/Dockerfile | 38 - .../connectors/source-dv-360/README.md | 99 - .../source-dv-360/acceptance-test-config.yml | 28 - .../integration_tests/__init__.py | 3 - .../integration_tests/abnormal_state.json | 5 - .../integration_tests/acceptance.py | 16 - .../integration_tests/catalog.json | 2016 ----------------- .../integration_tests/configured_catalog.json | 100 - .../integration_tests/invalid_config.json | 12 - .../integration_tests/sample_config.json | 13 - .../integration_tests/sample_state.json | 5 - .../connectors/source-dv-360/main.py | 8 - .../connectors/source-dv-360/requirements.txt | 1 - .../connectors/source-dv-360/setup.py | 40 - .../source-dv-360/source_dv_360/__init__.py | 8 - .../source-dv-360/source_dv_360/fields.py | 557 ----- .../source_dv_360/queries/query_template.json | 33 - .../source-dv-360/source_dv_360/run.py | 14 - .../schemas/audience_composition.json | 51 - .../source_dv_360/schemas/floodlight.json | 177 -- .../source_dv_360/schemas/reach.json | 114 - .../source_dv_360/schemas/standard.json | 1506 ------------ .../schemas/unique_reach_audience.json | 144 -- .../source-dv-360/source_dv_360/source.py | 140 -- .../source-dv-360/source_dv_360/spec.json | 74 - .../source-dv-360/source_dv_360/streams.py | 398 ---- .../source-dv-360/unit_tests/conftest.py | 13 - .../source-dv-360/unit_tests/test_fields.py | 53 - .../source-dv-360/unit_tests/test_source.py | 39 - .../source-dv-360/unit_tests/test_streams.py | 234 -- .../source-dv-360/unit_tests/unit_test.py | 7 - .../source-kustomer-singer/.dockerignore | 7 - .../source-kustomer-singer/.gitignore | 1 - .../source-kustomer-singer/Dockerfile | 40 - .../source-kustomer-singer/README.md | 99 - .../acceptance-test-config.yml | 24 - .../integration_tests/__init__.py | 0 .../integration_tests/abnormal_state.json | 13 - .../integration_tests/acceptance.py | 13 - .../integration_tests/configured_catalog.json | 1590 ------------- .../integration_tests/invalid_config.json | 4 - .../integration_tests/sample_state.json | 13 - .../connectors/source-kustomer-singer/main.py | 8 - .../source-kustomer-singer/requirements.txt | 2 - .../source-kustomer-singer/setup.py | 88 - .../source_kustomer_singer/__init__.py | 26 - .../source_kustomer_singer/run.py | 13 - .../source_kustomer_singer/source.py | 53 - .../source_kustomer_singer/spec.json | 24 - .../unit_tests/unit_test.py | 7 - .../connectors/source-recurly/.dockerignore | 6 - .../connectors/source-recurly/README.md | 104 - .../source-recurly/acceptance-test-config.yml | 40 - .../integration_tests/acceptance.py | 14 - .../integration_tests/configured_catalog.json | 205 -- .../integration_tests/future_state.json | 114 - .../integration_tests/invalid_config.json | 3 - .../integration_tests/sample_config.json | 3 - .../connectors/source-recurly/main.py | 8 - .../connectors/source-recurly/poetry.lock | 1045 --------- .../connectors/source-recurly/pyproject.toml | 29 - .../sample_files/configured_catalog.json | 16 - .../sample_files/sample_catalog.json | 102 - .../sample_files/sample_config.json | 3 - .../source-recurly/source_recurly/__init__.py | 3 - .../source-recurly/source_recurly/run.py | 14 - .../schemas/account_coupon_redemptions.json | 46 - .../source_recurly/schemas/account_notes.json | 30 - .../source_recurly/schemas/accounts.json | 182 -- .../source_recurly/schemas/add_ons.json | 151 -- .../source_recurly/schemas/billing_infos.json | 3 - .../source_recurly/schemas/coupons.json | 3 - .../schemas/credit_payments.json | 123 - .../source_recurly/schemas/export_dates.json | 13 - .../source_recurly/schemas/invoices.json | 377 --- .../source_recurly/schemas/line_items.json | 3 - .../schemas/measured_units.json | 41 - .../source_recurly/schemas/plans.json | 191 -- .../schemas/shared/account_details.json | 35 - .../schemas/shared/billing_infos.json | 213 -- .../schemas/shared/coupon_redemptions.json | 85 - .../schemas/shared/coupons.json | 194 -- .../schemas/shared/external_accounts.json | 24 - .../schemas/shared/line_items.json | 293 --- .../schemas/shared/shipping_addresses.json | 91 - .../schemas/shared/tax_info.json | 44 - .../schemas/shared/unique_coupons.json | 66 - .../source_recurly/schemas/shared/users.json | 33 - .../schemas/shipping_addresses.json | 3 - .../schemas/shipping_methods.json | 54 - .../source_recurly/schemas/subscriptions.json | 368 --- .../source_recurly/schemas/transactions.json | 345 --- .../schemas/unique_coupons.json | 3 - .../source-recurly/source_recurly/source.py | 80 - .../source-recurly/source_recurly/spec.json | 33 - .../source-recurly/source_recurly/streams.py | 337 --- .../source-recurly/unit_tests/__init__.py | 0 .../source-recurly/unit_tests/test_streams.py | 203 -- .../source-search-metrics/.dockerignore | 7 - .../source-search-metrics/Dockerfile | 38 - .../source-search-metrics/README.md | 100 - .../acceptance-test-config.yml | 34 - .../integration_tests/__init__.py | 3 - .../integration_tests/abnormal_state.json | 11 - .../integration_tests/acceptance.py | 13 - .../integration_tests/catalog.json | 24 - .../integration_tests/configured_catalog.json | 208 -- .../integration_tests/invalid_config.json | 7 - .../integration_tests/sample_config.json | 7 - .../integration_tests/sample_state.json | 11 - .../connectors/source-search-metrics/main.py | 8 - .../source-search-metrics/requirements.txt | 1 - .../connectors/source-search-metrics/setup.py | 46 - .../source_search_metrics/__init__.py | 8 - .../source_search_metrics/run.py | 14 - .../source_search_metrics/schemas/TODO.md | 25 - .../schemas/benchmark_rankings_s7.json | 21 - .../schemas/competitor_rankings_s7.json | 21 - .../schemas/count_domain_keyword.json | 12 - .../schemas/distribution_keywords_s7.json | 21 - .../schemas/keyword_potentials_s7.json | 27 - .../schemas/list_competitors.json | 33 - .../schemas/list_competitors_relevancy.json | 66 - .../schemas/list_losers_s7.json | 44 - .../schemas/list_market_share_s7.json | 29 - .../list_position_spread_historic_s7.json | 135 -- .../schemas/list_rankings_analysis_s7.json | 38 - .../schemas/list_rankings_domain.json | 88 - .../schemas/list_rankings_historic_s7.json | 87 - .../schemas/list_seo_visibility_country.json | 12 - .../list_seo_visibility_historic_s7.json | 27 - .../schemas/list_serp_spread_s7.json | 87 - .../schemas/list_winners_s7.json | 44 - .../schemas/marketshare_value_s7.json | 38 - .../schemas/projects.json | 27 - .../schemas/seo_visibility_value_s7.json | 33 - .../schemas/serp_spread_value_s7.json | 29 - .../schemas/tag_potentials_s7.json | 36 - .../source_search_metrics/schemas/tags.json | 33 - .../schemas/url_rankings_s7.json | 87 - .../source_search_metrics/source.py | 360 --- .../source_search_metrics/spec.json | 71 - .../source_search_metrics/utils.py | 14 - .../unit_tests/unit_test.py | 8 - .../source-talkdesk-explore/.dockerignore | 6 - .../source-talkdesk-explore/Dockerfile | 38 - .../source-talkdesk-explore/README.md | 99 - .../acceptance-test-config.yml | 21 - .../source-talkdesk-explore/bootstrap.md | 23 - .../integration_tests/__init__.py | 3 - .../integration_tests/acceptance.py | 16 - .../integration_tests/configured_catalog.json | 76 - .../integration_tests/invalid_config.json | 6 - .../integration_tests/sample_config.json | 3 - .../integration_tests/sample_state.json | 57 - .../source-talkdesk-explore/main.py | 8 - .../source-talkdesk-explore/requirements.txt | 2 - .../sample_files/configured_catalog.json | 93 - .../source-talkdesk-explore/setup.py | 46 - .../source_talkdesk_explore/__init__.py | 8 - .../source_talkdesk_explore/run.py | 14 - .../schemas/calls.json | 162 -- .../schemas/contacts.json | 180 -- .../schemas/ring_attempts.json | 54 - .../schemas/studio_flow_execution.json | 62 - .../schemas/user_status.json | 50 - .../source_talkdesk_explore/source.py | 50 - .../source_talkdesk_explore/spec.json | 43 - .../source_talkdesk_explore/streams.py | 248 -- .../source_talkdesk_explore/talkdesk_auth.py | 44 - .../unit_tests/unit_test.py | 7 - .../connectors/source-zuora/.dockerignore | 7 - .../connectors/source-zuora/BOOTSTRAP.md | 19 - .../connectors/source-zuora/Dockerfile | 37 - .../connectors/source-zuora/README.md | 100 - .../source-zuora/acceptance-test-config.yml | 27 - .../integration_tests/__init__.py | 0 .../integration_tests/abnormal_state.json | 173 -- .../integration_tests/acceptance.py | 14 - .../integration_tests/configured_catalog.json | 739 ------ .../integration_tests/integration_test.py | 241 -- .../integration_tests/invalid_config.json | 8 - .../connectors/source-zuora/main.py | 8 - .../connectors/source-zuora/requirements.txt | 1 - .../connectors/source-zuora/setup.py | 46 - .../source-zuora/source_zuora/__init__.py | 27 - .../source-zuora/source_zuora/run.py | 14 - .../source-zuora/source_zuora/source.py | 534 ----- .../source-zuora/source_zuora/spec.json | 66 - .../source-zuora/source_zuora/zuora_auth.py | 38 - .../source_zuora/zuora_endpoint.py | 26 - .../source-zuora/source_zuora/zuora_errors.py | 77 - .../source_zuora/zuora_excluded_streams.py | 16 - .../source-zuora/unit_tests/unit_test.py | 8 - 234 files changed, 21331 deletions(-) delete mode 100644 airbyte-integrations/connectors/source-appstore-singer/.dockerignore delete mode 100644 airbyte-integrations/connectors/source-appstore-singer/.gitignore delete mode 100644 airbyte-integrations/connectors/source-appstore-singer/Dockerfile delete mode 100644 airbyte-integrations/connectors/source-appstore-singer/README.md delete mode 100644 airbyte-integrations/connectors/source-appstore-singer/main.py delete mode 100644 airbyte-integrations/connectors/source-appstore-singer/requirements.txt delete mode 100644 airbyte-integrations/connectors/source-appstore-singer/sample_files/catalog.json delete mode 100644 airbyte-integrations/connectors/source-appstore-singer/sample_files/configured_catalog.json delete mode 100644 airbyte-integrations/connectors/source-appstore-singer/sample_files/sample_config.json delete mode 100644 airbyte-integrations/connectors/source-appstore-singer/setup.py delete mode 100644 airbyte-integrations/connectors/source-appstore-singer/source_appstore_singer/__init__.py delete mode 100644 airbyte-integrations/connectors/source-appstore-singer/source_appstore_singer/run.py delete mode 100644 airbyte-integrations/connectors/source-appstore-singer/source_appstore_singer/source.py delete mode 100644 airbyte-integrations/connectors/source-appstore-singer/source_appstore_singer/spec.json delete mode 100644 airbyte-integrations/connectors/source-appstore-singer/unit_tests/unit_test.py delete mode 100644 airbyte-integrations/connectors/source-courier/.dockerignore delete mode 100644 airbyte-integrations/connectors/source-courier/Dockerfile delete mode 100644 airbyte-integrations/connectors/source-courier/README.md delete mode 100644 airbyte-integrations/connectors/source-courier/__init__.py delete mode 100644 airbyte-integrations/connectors/source-courier/acceptance-test-config.yml delete mode 100644 airbyte-integrations/connectors/source-courier/integration_tests/__init__.py delete mode 100644 airbyte-integrations/connectors/source-courier/integration_tests/acceptance.py delete mode 100644 airbyte-integrations/connectors/source-courier/integration_tests/configured_catalog.json delete mode 100644 airbyte-integrations/connectors/source-courier/integration_tests/expected_records.jsonl delete mode 100644 airbyte-integrations/connectors/source-courier/integration_tests/invalid_config.json delete mode 100644 airbyte-integrations/connectors/source-courier/integration_tests/sample_config.json delete mode 100644 airbyte-integrations/connectors/source-courier/main.py delete mode 100644 airbyte-integrations/connectors/source-courier/requirements.txt delete mode 100644 airbyte-integrations/connectors/source-courier/setup.py delete mode 100644 airbyte-integrations/connectors/source-courier/source_courier/__init__.py delete mode 100644 airbyte-integrations/connectors/source-courier/source_courier/manifest.yaml delete mode 100644 airbyte-integrations/connectors/source-courier/source_courier/run.py delete mode 100644 airbyte-integrations/connectors/source-courier/source_courier/schemas/message_history.json delete mode 100644 airbyte-integrations/connectors/source-courier/source_courier/schemas/message_info.json delete mode 100644 airbyte-integrations/connectors/source-courier/source_courier/schemas/message_output.json delete mode 100644 airbyte-integrations/connectors/source-courier/source_courier/schemas/messages.json delete mode 100644 airbyte-integrations/connectors/source-courier/source_courier/source.py delete mode 100644 airbyte-integrations/connectors/source-courier/source_courier/spec.yaml delete mode 100644 airbyte-integrations/connectors/source-dv-360/.dockerignore delete mode 100644 airbyte-integrations/connectors/source-dv-360/BOOTSTRAP.md delete mode 100644 airbyte-integrations/connectors/source-dv-360/Dockerfile delete mode 100644 airbyte-integrations/connectors/source-dv-360/README.md delete mode 100644 airbyte-integrations/connectors/source-dv-360/acceptance-test-config.yml delete mode 100644 airbyte-integrations/connectors/source-dv-360/integration_tests/__init__.py delete mode 100644 airbyte-integrations/connectors/source-dv-360/integration_tests/abnormal_state.json delete mode 100644 airbyte-integrations/connectors/source-dv-360/integration_tests/acceptance.py delete mode 100644 airbyte-integrations/connectors/source-dv-360/integration_tests/catalog.json delete mode 100644 airbyte-integrations/connectors/source-dv-360/integration_tests/configured_catalog.json delete mode 100644 airbyte-integrations/connectors/source-dv-360/integration_tests/invalid_config.json delete mode 100644 airbyte-integrations/connectors/source-dv-360/integration_tests/sample_config.json delete mode 100644 airbyte-integrations/connectors/source-dv-360/integration_tests/sample_state.json delete mode 100644 airbyte-integrations/connectors/source-dv-360/main.py delete mode 100644 airbyte-integrations/connectors/source-dv-360/requirements.txt delete mode 100644 airbyte-integrations/connectors/source-dv-360/setup.py delete mode 100644 airbyte-integrations/connectors/source-dv-360/source_dv_360/__init__.py delete mode 100644 airbyte-integrations/connectors/source-dv-360/source_dv_360/fields.py delete mode 100644 airbyte-integrations/connectors/source-dv-360/source_dv_360/queries/query_template.json delete mode 100644 airbyte-integrations/connectors/source-dv-360/source_dv_360/run.py delete mode 100644 airbyte-integrations/connectors/source-dv-360/source_dv_360/schemas/audience_composition.json delete mode 100644 airbyte-integrations/connectors/source-dv-360/source_dv_360/schemas/floodlight.json delete mode 100644 airbyte-integrations/connectors/source-dv-360/source_dv_360/schemas/reach.json delete mode 100644 airbyte-integrations/connectors/source-dv-360/source_dv_360/schemas/standard.json delete mode 100644 airbyte-integrations/connectors/source-dv-360/source_dv_360/schemas/unique_reach_audience.json delete mode 100644 airbyte-integrations/connectors/source-dv-360/source_dv_360/source.py delete mode 100644 airbyte-integrations/connectors/source-dv-360/source_dv_360/spec.json delete mode 100644 airbyte-integrations/connectors/source-dv-360/source_dv_360/streams.py delete mode 100644 airbyte-integrations/connectors/source-dv-360/unit_tests/conftest.py delete mode 100644 airbyte-integrations/connectors/source-dv-360/unit_tests/test_fields.py delete mode 100644 airbyte-integrations/connectors/source-dv-360/unit_tests/test_source.py delete mode 100644 airbyte-integrations/connectors/source-dv-360/unit_tests/test_streams.py delete mode 100644 airbyte-integrations/connectors/source-dv-360/unit_tests/unit_test.py delete mode 100644 airbyte-integrations/connectors/source-kustomer-singer/.dockerignore delete mode 100644 airbyte-integrations/connectors/source-kustomer-singer/.gitignore delete mode 100644 airbyte-integrations/connectors/source-kustomer-singer/Dockerfile delete mode 100644 airbyte-integrations/connectors/source-kustomer-singer/README.md delete mode 100644 airbyte-integrations/connectors/source-kustomer-singer/acceptance-test-config.yml delete mode 100644 airbyte-integrations/connectors/source-kustomer-singer/integration_tests/__init__.py delete mode 100644 airbyte-integrations/connectors/source-kustomer-singer/integration_tests/abnormal_state.json delete mode 100644 airbyte-integrations/connectors/source-kustomer-singer/integration_tests/acceptance.py delete mode 100644 airbyte-integrations/connectors/source-kustomer-singer/integration_tests/configured_catalog.json delete mode 100644 airbyte-integrations/connectors/source-kustomer-singer/integration_tests/invalid_config.json delete mode 100644 airbyte-integrations/connectors/source-kustomer-singer/integration_tests/sample_state.json delete mode 100644 airbyte-integrations/connectors/source-kustomer-singer/main.py delete mode 100644 airbyte-integrations/connectors/source-kustomer-singer/requirements.txt delete mode 100644 airbyte-integrations/connectors/source-kustomer-singer/setup.py delete mode 100644 airbyte-integrations/connectors/source-kustomer-singer/source_kustomer_singer/__init__.py delete mode 100644 airbyte-integrations/connectors/source-kustomer-singer/source_kustomer_singer/run.py delete mode 100644 airbyte-integrations/connectors/source-kustomer-singer/source_kustomer_singer/source.py delete mode 100644 airbyte-integrations/connectors/source-kustomer-singer/source_kustomer_singer/spec.json delete mode 100644 airbyte-integrations/connectors/source-kustomer-singer/unit_tests/unit_test.py delete mode 100644 airbyte-integrations/connectors/source-recurly/.dockerignore delete mode 100644 airbyte-integrations/connectors/source-recurly/README.md delete mode 100644 airbyte-integrations/connectors/source-recurly/acceptance-test-config.yml delete mode 100644 airbyte-integrations/connectors/source-recurly/integration_tests/acceptance.py delete mode 100644 airbyte-integrations/connectors/source-recurly/integration_tests/configured_catalog.json delete mode 100644 airbyte-integrations/connectors/source-recurly/integration_tests/future_state.json delete mode 100644 airbyte-integrations/connectors/source-recurly/integration_tests/invalid_config.json delete mode 100644 airbyte-integrations/connectors/source-recurly/integration_tests/sample_config.json delete mode 100644 airbyte-integrations/connectors/source-recurly/main.py delete mode 100644 airbyte-integrations/connectors/source-recurly/poetry.lock delete mode 100644 airbyte-integrations/connectors/source-recurly/pyproject.toml delete mode 100644 airbyte-integrations/connectors/source-recurly/sample_files/configured_catalog.json delete mode 100644 airbyte-integrations/connectors/source-recurly/sample_files/sample_catalog.json delete mode 100644 airbyte-integrations/connectors/source-recurly/sample_files/sample_config.json delete mode 100644 airbyte-integrations/connectors/source-recurly/source_recurly/__init__.py delete mode 100644 airbyte-integrations/connectors/source-recurly/source_recurly/run.py delete mode 100644 airbyte-integrations/connectors/source-recurly/source_recurly/schemas/account_coupon_redemptions.json delete mode 100644 airbyte-integrations/connectors/source-recurly/source_recurly/schemas/account_notes.json delete mode 100644 airbyte-integrations/connectors/source-recurly/source_recurly/schemas/accounts.json delete mode 100644 airbyte-integrations/connectors/source-recurly/source_recurly/schemas/add_ons.json delete mode 100644 airbyte-integrations/connectors/source-recurly/source_recurly/schemas/billing_infos.json delete mode 100644 airbyte-integrations/connectors/source-recurly/source_recurly/schemas/coupons.json delete mode 100644 airbyte-integrations/connectors/source-recurly/source_recurly/schemas/credit_payments.json delete mode 100644 airbyte-integrations/connectors/source-recurly/source_recurly/schemas/export_dates.json delete mode 100644 airbyte-integrations/connectors/source-recurly/source_recurly/schemas/invoices.json delete mode 100644 airbyte-integrations/connectors/source-recurly/source_recurly/schemas/line_items.json delete mode 100644 airbyte-integrations/connectors/source-recurly/source_recurly/schemas/measured_units.json delete mode 100644 airbyte-integrations/connectors/source-recurly/source_recurly/schemas/plans.json delete mode 100644 airbyte-integrations/connectors/source-recurly/source_recurly/schemas/shared/account_details.json delete mode 100644 airbyte-integrations/connectors/source-recurly/source_recurly/schemas/shared/billing_infos.json delete mode 100644 airbyte-integrations/connectors/source-recurly/source_recurly/schemas/shared/coupon_redemptions.json delete mode 100644 airbyte-integrations/connectors/source-recurly/source_recurly/schemas/shared/coupons.json delete mode 100644 airbyte-integrations/connectors/source-recurly/source_recurly/schemas/shared/external_accounts.json delete mode 100644 airbyte-integrations/connectors/source-recurly/source_recurly/schemas/shared/line_items.json delete mode 100644 airbyte-integrations/connectors/source-recurly/source_recurly/schemas/shared/shipping_addresses.json delete mode 100644 airbyte-integrations/connectors/source-recurly/source_recurly/schemas/shared/tax_info.json delete mode 100644 airbyte-integrations/connectors/source-recurly/source_recurly/schemas/shared/unique_coupons.json delete mode 100644 airbyte-integrations/connectors/source-recurly/source_recurly/schemas/shared/users.json delete mode 100644 airbyte-integrations/connectors/source-recurly/source_recurly/schemas/shipping_addresses.json delete mode 100644 airbyte-integrations/connectors/source-recurly/source_recurly/schemas/shipping_methods.json delete mode 100644 airbyte-integrations/connectors/source-recurly/source_recurly/schemas/subscriptions.json delete mode 100644 airbyte-integrations/connectors/source-recurly/source_recurly/schemas/transactions.json delete mode 100644 airbyte-integrations/connectors/source-recurly/source_recurly/schemas/unique_coupons.json delete mode 100644 airbyte-integrations/connectors/source-recurly/source_recurly/source.py delete mode 100644 airbyte-integrations/connectors/source-recurly/source_recurly/spec.json delete mode 100644 airbyte-integrations/connectors/source-recurly/source_recurly/streams.py delete mode 100644 airbyte-integrations/connectors/source-recurly/unit_tests/__init__.py delete mode 100644 airbyte-integrations/connectors/source-recurly/unit_tests/test_streams.py delete mode 100644 airbyte-integrations/connectors/source-search-metrics/.dockerignore delete mode 100644 airbyte-integrations/connectors/source-search-metrics/Dockerfile delete mode 100644 airbyte-integrations/connectors/source-search-metrics/README.md delete mode 100644 airbyte-integrations/connectors/source-search-metrics/acceptance-test-config.yml delete mode 100644 airbyte-integrations/connectors/source-search-metrics/integration_tests/__init__.py delete mode 100644 airbyte-integrations/connectors/source-search-metrics/integration_tests/abnormal_state.json delete mode 100644 airbyte-integrations/connectors/source-search-metrics/integration_tests/acceptance.py delete mode 100644 airbyte-integrations/connectors/source-search-metrics/integration_tests/catalog.json delete mode 100644 airbyte-integrations/connectors/source-search-metrics/integration_tests/configured_catalog.json delete mode 100644 airbyte-integrations/connectors/source-search-metrics/integration_tests/invalid_config.json delete mode 100644 airbyte-integrations/connectors/source-search-metrics/integration_tests/sample_config.json delete mode 100644 airbyte-integrations/connectors/source-search-metrics/integration_tests/sample_state.json delete mode 100644 airbyte-integrations/connectors/source-search-metrics/main.py delete mode 100644 airbyte-integrations/connectors/source-search-metrics/requirements.txt delete mode 100644 airbyte-integrations/connectors/source-search-metrics/setup.py delete mode 100644 airbyte-integrations/connectors/source-search-metrics/source_search_metrics/__init__.py delete mode 100644 airbyte-integrations/connectors/source-search-metrics/source_search_metrics/run.py delete mode 100644 airbyte-integrations/connectors/source-search-metrics/source_search_metrics/schemas/TODO.md delete mode 100644 airbyte-integrations/connectors/source-search-metrics/source_search_metrics/schemas/benchmark_rankings_s7.json delete mode 100644 airbyte-integrations/connectors/source-search-metrics/source_search_metrics/schemas/competitor_rankings_s7.json delete mode 100644 airbyte-integrations/connectors/source-search-metrics/source_search_metrics/schemas/count_domain_keyword.json delete mode 100644 airbyte-integrations/connectors/source-search-metrics/source_search_metrics/schemas/distribution_keywords_s7.json delete mode 100644 airbyte-integrations/connectors/source-search-metrics/source_search_metrics/schemas/keyword_potentials_s7.json delete mode 100644 airbyte-integrations/connectors/source-search-metrics/source_search_metrics/schemas/list_competitors.json delete mode 100644 airbyte-integrations/connectors/source-search-metrics/source_search_metrics/schemas/list_competitors_relevancy.json delete mode 100644 airbyte-integrations/connectors/source-search-metrics/source_search_metrics/schemas/list_losers_s7.json delete mode 100644 airbyte-integrations/connectors/source-search-metrics/source_search_metrics/schemas/list_market_share_s7.json delete mode 100644 airbyte-integrations/connectors/source-search-metrics/source_search_metrics/schemas/list_position_spread_historic_s7.json delete mode 100644 airbyte-integrations/connectors/source-search-metrics/source_search_metrics/schemas/list_rankings_analysis_s7.json delete mode 100644 airbyte-integrations/connectors/source-search-metrics/source_search_metrics/schemas/list_rankings_domain.json delete mode 100644 airbyte-integrations/connectors/source-search-metrics/source_search_metrics/schemas/list_rankings_historic_s7.json delete mode 100644 airbyte-integrations/connectors/source-search-metrics/source_search_metrics/schemas/list_seo_visibility_country.json delete mode 100644 airbyte-integrations/connectors/source-search-metrics/source_search_metrics/schemas/list_seo_visibility_historic_s7.json delete mode 100644 airbyte-integrations/connectors/source-search-metrics/source_search_metrics/schemas/list_serp_spread_s7.json delete mode 100644 airbyte-integrations/connectors/source-search-metrics/source_search_metrics/schemas/list_winners_s7.json delete mode 100644 airbyte-integrations/connectors/source-search-metrics/source_search_metrics/schemas/marketshare_value_s7.json delete mode 100644 airbyte-integrations/connectors/source-search-metrics/source_search_metrics/schemas/projects.json delete mode 100644 airbyte-integrations/connectors/source-search-metrics/source_search_metrics/schemas/seo_visibility_value_s7.json delete mode 100644 airbyte-integrations/connectors/source-search-metrics/source_search_metrics/schemas/serp_spread_value_s7.json delete mode 100644 airbyte-integrations/connectors/source-search-metrics/source_search_metrics/schemas/tag_potentials_s7.json delete mode 100644 airbyte-integrations/connectors/source-search-metrics/source_search_metrics/schemas/tags.json delete mode 100644 airbyte-integrations/connectors/source-search-metrics/source_search_metrics/schemas/url_rankings_s7.json delete mode 100644 airbyte-integrations/connectors/source-search-metrics/source_search_metrics/source.py delete mode 100644 airbyte-integrations/connectors/source-search-metrics/source_search_metrics/spec.json delete mode 100644 airbyte-integrations/connectors/source-search-metrics/source_search_metrics/utils.py delete mode 100644 airbyte-integrations/connectors/source-search-metrics/unit_tests/unit_test.py delete mode 100644 airbyte-integrations/connectors/source-talkdesk-explore/.dockerignore delete mode 100644 airbyte-integrations/connectors/source-talkdesk-explore/Dockerfile delete mode 100644 airbyte-integrations/connectors/source-talkdesk-explore/README.md delete mode 100644 airbyte-integrations/connectors/source-talkdesk-explore/acceptance-test-config.yml delete mode 100644 airbyte-integrations/connectors/source-talkdesk-explore/bootstrap.md delete mode 100644 airbyte-integrations/connectors/source-talkdesk-explore/integration_tests/__init__.py delete mode 100644 airbyte-integrations/connectors/source-talkdesk-explore/integration_tests/acceptance.py delete mode 100644 airbyte-integrations/connectors/source-talkdesk-explore/integration_tests/configured_catalog.json delete mode 100644 airbyte-integrations/connectors/source-talkdesk-explore/integration_tests/invalid_config.json delete mode 100644 airbyte-integrations/connectors/source-talkdesk-explore/integration_tests/sample_config.json delete mode 100644 airbyte-integrations/connectors/source-talkdesk-explore/integration_tests/sample_state.json delete mode 100644 airbyte-integrations/connectors/source-talkdesk-explore/main.py delete mode 100644 airbyte-integrations/connectors/source-talkdesk-explore/requirements.txt delete mode 100644 airbyte-integrations/connectors/source-talkdesk-explore/sample_files/configured_catalog.json delete mode 100644 airbyte-integrations/connectors/source-talkdesk-explore/setup.py delete mode 100644 airbyte-integrations/connectors/source-talkdesk-explore/source_talkdesk_explore/__init__.py delete mode 100644 airbyte-integrations/connectors/source-talkdesk-explore/source_talkdesk_explore/run.py delete mode 100644 airbyte-integrations/connectors/source-talkdesk-explore/source_talkdesk_explore/schemas/calls.json delete mode 100644 airbyte-integrations/connectors/source-talkdesk-explore/source_talkdesk_explore/schemas/contacts.json delete mode 100644 airbyte-integrations/connectors/source-talkdesk-explore/source_talkdesk_explore/schemas/ring_attempts.json delete mode 100644 airbyte-integrations/connectors/source-talkdesk-explore/source_talkdesk_explore/schemas/studio_flow_execution.json delete mode 100644 airbyte-integrations/connectors/source-talkdesk-explore/source_talkdesk_explore/schemas/user_status.json delete mode 100644 airbyte-integrations/connectors/source-talkdesk-explore/source_talkdesk_explore/source.py delete mode 100644 airbyte-integrations/connectors/source-talkdesk-explore/source_talkdesk_explore/spec.json delete mode 100644 airbyte-integrations/connectors/source-talkdesk-explore/source_talkdesk_explore/streams.py delete mode 100644 airbyte-integrations/connectors/source-talkdesk-explore/source_talkdesk_explore/talkdesk_auth.py delete mode 100644 airbyte-integrations/connectors/source-talkdesk-explore/unit_tests/unit_test.py delete mode 100644 airbyte-integrations/connectors/source-zuora/.dockerignore delete mode 100644 airbyte-integrations/connectors/source-zuora/BOOTSTRAP.md delete mode 100644 airbyte-integrations/connectors/source-zuora/Dockerfile delete mode 100644 airbyte-integrations/connectors/source-zuora/README.md delete mode 100644 airbyte-integrations/connectors/source-zuora/acceptance-test-config.yml delete mode 100644 airbyte-integrations/connectors/source-zuora/integration_tests/__init__.py delete mode 100644 airbyte-integrations/connectors/source-zuora/integration_tests/abnormal_state.json delete mode 100644 airbyte-integrations/connectors/source-zuora/integration_tests/acceptance.py delete mode 100644 airbyte-integrations/connectors/source-zuora/integration_tests/configured_catalog.json delete mode 100644 airbyte-integrations/connectors/source-zuora/integration_tests/integration_test.py delete mode 100644 airbyte-integrations/connectors/source-zuora/integration_tests/invalid_config.json delete mode 100644 airbyte-integrations/connectors/source-zuora/main.py delete mode 100644 airbyte-integrations/connectors/source-zuora/requirements.txt delete mode 100644 airbyte-integrations/connectors/source-zuora/setup.py delete mode 100644 airbyte-integrations/connectors/source-zuora/source_zuora/__init__.py delete mode 100644 airbyte-integrations/connectors/source-zuora/source_zuora/run.py delete mode 100644 airbyte-integrations/connectors/source-zuora/source_zuora/source.py delete mode 100644 airbyte-integrations/connectors/source-zuora/source_zuora/spec.json delete mode 100644 airbyte-integrations/connectors/source-zuora/source_zuora/zuora_auth.py delete mode 100644 airbyte-integrations/connectors/source-zuora/source_zuora/zuora_endpoint.py delete mode 100644 airbyte-integrations/connectors/source-zuora/source_zuora/zuora_errors.py delete mode 100644 airbyte-integrations/connectors/source-zuora/source_zuora/zuora_excluded_streams.py delete mode 100644 airbyte-integrations/connectors/source-zuora/unit_tests/unit_test.py diff --git a/airbyte-integrations/connectors/source-appstore-singer/.dockerignore b/airbyte-integrations/connectors/source-appstore-singer/.dockerignore deleted file mode 100644 index fabf490b08f8..000000000000 --- a/airbyte-integrations/connectors/source-appstore-singer/.dockerignore +++ /dev/null @@ -1,6 +0,0 @@ -* -!Dockerfile -!main.py -!source_appstore_singer -!setup.py -!secrets diff --git a/airbyte-integrations/connectors/source-appstore-singer/.gitignore b/airbyte-integrations/connectors/source-appstore-singer/.gitignore deleted file mode 100644 index 29fffc6a50cc..000000000000 --- a/airbyte-integrations/connectors/source-appstore-singer/.gitignore +++ /dev/null @@ -1 +0,0 @@ -NEW_SOURCE_CHECKLIST.md diff --git a/airbyte-integrations/connectors/source-appstore-singer/Dockerfile b/airbyte-integrations/connectors/source-appstore-singer/Dockerfile deleted file mode 100644 index 2527cdf48fbb..000000000000 --- a/airbyte-integrations/connectors/source-appstore-singer/Dockerfile +++ /dev/null @@ -1,38 +0,0 @@ -FROM python:3.9.11-alpine3.15 as base - -# build and load all requirements -FROM base as builder -WORKDIR /airbyte/integration_code - -# upgrade pip to the latest version -RUN apk --no-cache upgrade \ - && pip install --upgrade pip \ - && apk --no-cache add tzdata build-base libffi-dev openssl-dev - - -COPY setup.py ./ -# install necessary packages to a temporary folder -RUN pip install --prefix=/install . - -# build a clean environment -FROM base -WORKDIR /airbyte/integration_code - -# copy all loaded and built libraries to a pure basic image -COPY --from=builder /install /usr/local -# add default timezone settings -COPY --from=builder /usr/share/zoneinfo/Etc/UTC /etc/localtime -RUN echo "Etc/UTC" > /etc/timezone - -# bash is installed for more convenient debugging. -RUN apk --no-cache add bash - -# copy payload code only -COPY main.py ./ -COPY source_appstore_singer ./source_appstore_singer - -ENV AIRBYTE_ENTRYPOINT "python /airbyte/integration_code/main.py" -ENTRYPOINT ["python", "/airbyte/integration_code/main.py"] - -LABEL io.airbyte.version=0.2.6 -LABEL io.airbyte.name=airbyte/source-appstore-singer diff --git a/airbyte-integrations/connectors/source-appstore-singer/README.md b/airbyte-integrations/connectors/source-appstore-singer/README.md deleted file mode 100644 index 1967ca8fb8f7..000000000000 --- a/airbyte-integrations/connectors/source-appstore-singer/README.md +++ /dev/null @@ -1,67 +0,0 @@ -# Pendo Source - -This is the repository for the Pendo configuration based source connector. -For information about how to use this connector within Airbyte, see [the documentation](https://docs.airbyte.com/integrations/sources/pendo). - -## Local development - -#### Create credentials -**If you are a community contributor**, follow the instructions in the [documentation](https://docs.airbyte.com/integrations/sources/pendo) -to generate the necessary credentials. Then create a file `secrets/config.json` conforming to the `source_pendo/spec.yaml` file. -Note that any directory named `secrets` is gitignored across the entire Airbyte repo, so there is no danger of accidentally checking in sensitive information. -See `integration_tests/sample_config.json` for a sample config file. - -**If you are an Airbyte core member**, copy the credentials in Lastpass under the secret name `source pendo test creds` -and place them into `secrets/config.json`. - -### Locally running the connector docker image - - -#### Build -**Via [`airbyte-ci`](https://github.com/airbytehq/airbyte/blob/master/airbyte-ci/connectors/pipelines/README.md) (recommended):** -```bash -airbyte-ci connectors --name source-pendo build -``` - -An image will be built with the tag `airbyte/source-pendo:dev`. - -**Via `docker build`:** -```bash -docker build -t airbyte/source-pendo:dev . -``` - -#### Run -Then run any of the connector commands as follows: -``` -docker run --rm airbyte/source-pendo:dev spec -docker run --rm -v $(pwd)/secrets:/secrets airbyte/source-pendo:dev check --config /secrets/config.json -docker run --rm -v $(pwd)/secrets:/secrets airbyte/source-pendo:dev discover --config /secrets/config.json -docker run --rm -v $(pwd)/secrets:/secrets -v $(pwd)/integration_tests:/integration_tests airbyte/source-pendo:dev read --config /secrets/config.json --catalog /integration_tests/configured_catalog.json -``` - -## Testing -You can run our full test suite locally using [`airbyte-ci`](https://github.com/airbytehq/airbyte/blob/master/airbyte-ci/connectors/pipelines/README.md): -```bash -airbyte-ci connectors --name=source-appstore-singer test -``` - -### Customizing acceptance Tests -Customize `acceptance-test-config.yml` file to configure tests. See [Connector Acceptance Tests](https://docs.airbyte.com/connector-development/testing-connectors/connector-acceptance-tests-reference) for more information. -If your connector requires to create or destroy resources for use during acceptance tests create fixtures for it and place them inside integration_tests/acceptance.py. - -## Dependency Management -All of your dependencies should go in `setup.py`, NOT `requirements.txt`. The requirements file is only used to connect internal Airbyte dependencies in the monorepo for local development. -We split dependencies between two groups, dependencies that are: -* required for your connector to work need to go to `MAIN_REQUIREMENTS` list. -* required for the testing need to go to `TEST_REQUIREMENTS` list - -### Publishing a new version of the connector -You've checked out the repo, implemented a million dollar feature, and you're ready to share your changes with the world. Now what? -1. Make sure your changes are passing our test suite: `airbyte-ci connectors --name=source-appstore-singer test` -2. Bump the connector version in `metadata.yaml`: increment the `dockerImageTag` value. Please follow [semantic versioning for connectors](https://docs.airbyte.com/contributing-to-airbyte/resources/pull-requests-handbook/#semantic-versioning-for-connectors). -3. Make sure the `metadata.yaml` content is up to date. -4. Make the connector documentation and its changelog is up to date (`docs/integrations/sources/appstore.md`). -5. Create a Pull Request: use [our PR naming conventions](https://docs.airbyte.com/contributing-to-airbyte/resources/pull-requests-handbook/#pull-request-title-convention). -6. Pat yourself on the back for being an awesome contributor. -7. Someone from Airbyte will take a look at your PR and iterate with you to merge it into master. - diff --git a/airbyte-integrations/connectors/source-appstore-singer/main.py b/airbyte-integrations/connectors/source-appstore-singer/main.py deleted file mode 100644 index 34e585afeeed..000000000000 --- a/airbyte-integrations/connectors/source-appstore-singer/main.py +++ /dev/null @@ -1,8 +0,0 @@ -# -# Copyright (c) 2023 Airbyte, Inc., all rights reserved. -# - -from source_appstore_singer.run import run - -if __name__ == "__main__": - run() diff --git a/airbyte-integrations/connectors/source-appstore-singer/requirements.txt b/airbyte-integrations/connectors/source-appstore-singer/requirements.txt deleted file mode 100644 index 7b9114ed5867..000000000000 --- a/airbyte-integrations/connectors/source-appstore-singer/requirements.txt +++ /dev/null @@ -1,2 +0,0 @@ -# This file is autogenerated -- only edit if you know what you are doing. Use setup.py for declaring dependencies. --e . diff --git a/airbyte-integrations/connectors/source-appstore-singer/sample_files/catalog.json b/airbyte-integrations/connectors/source-appstore-singer/sample_files/catalog.json deleted file mode 100644 index 0a659fc7263f..000000000000 --- a/airbyte-integrations/connectors/source-appstore-singer/sample_files/catalog.json +++ /dev/null @@ -1,424 +0,0 @@ -{ - "streams": [ - { - "name": "sales_report", - "supported_sync_modes": ["incremental"], - "json_schema": { - "$schema": "http://json-schema.org/draft-07/schema#", - "type": "object", - "properties": { - "_line_id": { - "type": ["null", "integer"] - }, - "_time_extracted": { - "type": ["null", "string"], - "format": "date-time" - }, - "_api_report_date": { - "type": ["null", "string"], - "format": "date" - }, - "provider": { - "type": ["null", "string"] - }, - "provider_country": { - "type": ["null", "string"] - }, - "sku": { - "type": ["null", "string"] - }, - "developer": { - "type": ["null", "string"] - }, - "title": { - "type": ["null", "string"] - }, - "version": { - "type": ["null", "string"] - }, - "product_type_identifier": { - "type": ["null", "string"] - }, - "units": { - "type": ["null", "integer"] - }, - "developer_proceeds": { - "type": ["null", "string"] - }, - "begin_date": { - "type": ["null", "string"], - "format": "date" - }, - "end_date": { - "type": ["null", "string"], - "format": "date" - }, - "customer_currency": { - "type": ["null", "string"] - }, - "country_code": { - "type": ["null", "string"] - }, - "currency_of_proceeds": { - "type": ["null", "string"] - }, - "apple_identifier": { - "type": ["null", "string"] - }, - "customer_price": { - "type": ["null", "string"] - }, - "promo_code": { - "type": ["null", "string"] - }, - "parent_identifier": { - "type": ["null", "string"] - }, - "subscription": { - "type": ["null", "string"] - }, - "period": { - "type": ["null", "string"] - }, - "category": { - "type": ["null", "string"] - }, - "cmb": { - "type": ["null", "string"] - }, - "device": { - "type": ["null", "string"] - }, - "supported_platforms": { - "type": ["null", "string"] - }, - "proceeds_reason": { - "type": ["null", "string"] - }, - "preserved_pricing": { - "type": ["null", "string"] - }, - "client": { - "type": ["null", "string"] - }, - "order_type": { - "type": ["null", "string"] - } - } - } - }, - { - "name": "subscriber_report", - "supported_sync_modes": ["incremental"], - "json_schema": { - "$schema": "http://json-schema.org/draft-07/schema#", - "type": "object", - "properties": { - "_line_id": { - "type": ["null", "integer"] - }, - "_time_extracted": { - "type": ["null", "string"], - "format": "date-time" - }, - "_api_report_date": { - "type": ["null", "string"], - "format": "date" - }, - "event_date": { - "type": ["null", "string"], - "format": "date" - }, - "app_name": { - "type": ["null", "string"] - }, - "app_apple_id": { - "type": ["null", "string"] - }, - "subscription_name": { - "type": ["null", "string"] - }, - "subscription_apple_id": { - "type": ["null", "string"] - }, - "subscription_group_id": { - "type": ["null", "string"] - }, - "standard_subscription_duration": { - "type": ["null", "string"] - }, - "promotional_offer_name": { - "type": ["null", "string"] - }, - "promotional_offer_id": { - "type": ["null", "string"] - }, - "subscription_offer_type": { - "type": ["null", "string"] - }, - "subscription_offer_duration": { - "type": ["null", "string"] - }, - "marketing_opt_in_duration": { - "type": ["null", "string"] - }, - "customer_price": { - "type": ["null", "string"] - }, - "customer_currency": { - "type": ["null", "string"] - }, - "developer_proceeds": { - "type": ["null", "string"] - }, - "proceeds_currency": { - "type": ["null", "string"] - }, - "preserved_pricing": { - "type": ["null", "string"] - }, - "proceeds_reason": { - "type": ["null", "string"] - }, - "client": { - "type": ["null", "string"] - }, - "device": { - "type": ["null", "string"] - }, - "country": { - "type": ["null", "string"] - }, - "subscriber_id": { - "type": ["null", "string"] - }, - "subscriber_id_reset": { - "type": ["null", "string"] - }, - "refund": { - "type": ["null", "string"] - }, - "purchase_date": { - "type": ["null", "string"], - "format": "date" - }, - "units": { - "type": ["null", "integer"] - } - } - } - }, - { - "name": "subscription_event_report", - "supported_sync_modes": ["incremental"], - "json_schema": { - "$schema": "http://json-schema.org/draft-07/schema#", - "type": "object", - "properties": { - "_line_id": { - "type": ["null", "integer"] - }, - "_time_extracted": { - "type": ["null", "string"], - "format": "date-time" - }, - "_api_report_date": { - "type": ["null", "string"], - "format": "date" - }, - "event_date": { - "type": ["null", "string"], - "format": "date" - }, - "event": { - "type": ["null", "string"] - }, - "app_name": { - "type": ["null", "string"] - }, - "app_apple_id": { - "type": ["null", "string"] - }, - "subscription_name": { - "type": ["null", "string"] - }, - "subscription_apple_id": { - "type": ["null", "string"] - }, - "subscription_group_id": { - "type": ["null", "string"] - }, - "standard_subscription_duration": { - "type": ["null", "string"] - }, - "promotional_offer_name": { - "type": ["null", "string"] - }, - "promotional_offer_id": { - "type": ["null", "string"] - }, - "subscription_offer_type": { - "type": ["null", "string"] - }, - "subscription_offer_duration": { - "type": ["null", "string"] - }, - "marketing_opt_in": { - "type": ["null", "string"] - }, - "marketing_opt_in_duration": { - "type": ["null", "string"] - }, - "preserved_pricing": { - "type": ["null", "string"] - }, - "proceeds_reason": { - "type": ["null", "string"] - }, - "consecutive_paid_periods": { - "type": ["null", "integer"] - }, - "original_start_date": { - "type": ["null", "string"], - "format": "date" - }, - "client": { - "type": ["null", "string"] - }, - "device": { - "type": ["null", "string"] - }, - "state": { - "type": ["null", "string"] - }, - "country": { - "type": ["null", "string"] - }, - "previous_subscription_name": { - "type": ["null", "string"] - }, - "previous_subscription_apple_id": { - "type": ["null", "string"] - }, - "days_before_canceling": { - "type": ["null", "string"] - }, - "cancellation_reason": { - "type": ["null", "string"] - }, - "days_canceled": { - "type": ["null", "integer"] - }, - "quantity": { - "type": ["null", "integer"] - } - } - } - }, - { - "name": "subscription_report", - "supported_sync_modes": ["incremental"], - "json_schema": { - "$schema": "http://json-schema.org/draft-07/schema#", - "type": "object", - "properties": { - "_line_id": { - "type": ["null", "integer"] - }, - "_time_extracted": { - "type": ["null", "string"], - "format": "date-time" - }, - "_api_report_date": { - "type": ["null", "string"], - "format": "date" - }, - "app_name": { - "type": ["null", "string"] - }, - "app_apple_id": { - "type": ["null", "string"] - }, - "subscription_name": { - "type": ["null", "string"] - }, - "subscription_apple_id": { - "type": ["null", "string"] - }, - "subscription_group_id": { - "type": ["null", "string"] - }, - "standard_subscription_duration": { - "type": ["null", "string"] - }, - "promotional_offer_name": { - "type": ["null", "string"] - }, - "promotional_offer_id": { - "type": ["null", "string"] - }, - "customer_price": { - "type": ["null", "string"] - }, - "customer_currency": { - "type": ["null", "string"] - }, - "developer_proceeds": { - "type": ["null", "string"] - }, - "proceeds_currency": { - "type": ["null", "string"] - }, - "preserved_pricing": { - "type": ["null", "string"] - }, - "proceeds_reason": { - "type": ["null", "string"] - }, - "client": { - "type": ["null", "string"] - }, - "device": { - "type": ["null", "string"] - }, - "state": { - "type": ["null", "string"] - }, - "country": { - "type": ["null", "string"] - }, - "active_standard_price_subscriptions": { - "type": ["null", "integer"] - }, - "active_free_trial_introductory_offer_subscriptions": { - "type": ["null", "integer"] - }, - "active_pay_up_front_introductory_offer_subscriptions": { - "type": ["null", "integer"] - }, - "active_pay_as_you_go_introductory_offer_subscriptions": { - "type": ["null", "integer"] - }, - "free_trial_promotional_offer_subscriptions": { - "type": ["null", "integer"] - }, - "pay_up_front_promotional_offer_subscriptions": { - "type": ["null", "integer"] - }, - "pay_as_you_go_promotional_offer_subscriptions": { - "type": ["null", "integer"] - }, - "marketing_opt_ins": { - "type": ["null", "integer"] - }, - "billing_retry": { - "type": ["null", "integer"] - }, - "grace_period": { - "type": ["null", "integer"] - } - } - } - } - ] -} diff --git a/airbyte-integrations/connectors/source-appstore-singer/sample_files/configured_catalog.json b/airbyte-integrations/connectors/source-appstore-singer/sample_files/configured_catalog.json deleted file mode 100644 index d7c356228cd7..000000000000 --- a/airbyte-integrations/connectors/source-appstore-singer/sample_files/configured_catalog.json +++ /dev/null @@ -1,436 +0,0 @@ -{ - "streams": [ - { - "sync_mode": "incremental", - "destination_sync_mode": "append", - "stream": { - "name": "sales_report", - "supported_sync_modes": ["incremental"], - "json_schema": { - "type": "object", - "properties": { - "_line_id": { - "type": ["null", "integer"] - }, - "_time_extracted": { - "type": ["null", "string"], - "format": "date-time" - }, - "_api_report_date": { - "type": ["null", "string"], - "format": "date" - }, - "provider": { - "type": ["null", "string"] - }, - "provider_country": { - "type": ["null", "string"] - }, - "sku": { - "type": ["null", "string"] - }, - "developer": { - "type": ["null", "string"] - }, - "title": { - "type": ["null", "string"] - }, - "version": { - "type": ["null", "string"] - }, - "product_type_identifier": { - "type": ["null", "string"] - }, - "units": { - "type": ["null", "integer"] - }, - "developer_proceeds": { - "type": ["null", "string"] - }, - "begin_date": { - "type": ["null", "string"], - "format": "date" - }, - "end_date": { - "type": ["null", "string"], - "format": "date" - }, - "customer_currency": { - "type": ["null", "string"] - }, - "country_code": { - "type": ["null", "string"] - }, - "currency_of_proceeds": { - "type": ["null", "string"] - }, - "apple_identifier": { - "type": ["null", "string"] - }, - "customer_price": { - "type": ["null", "string"] - }, - "promo_code": { - "type": ["null", "string"] - }, - "parent_identifier": { - "type": ["null", "string"] - }, - "subscription": { - "type": ["null", "string"] - }, - "period": { - "type": ["null", "string"] - }, - "category": { - "type": ["null", "string"] - }, - "cmb": { - "type": ["null", "string"] - }, - "device": { - "type": ["null", "string"] - }, - "supported_platforms": { - "type": ["null", "string"] - }, - "proceeds_reason": { - "type": ["null", "string"] - }, - "preserved_pricing": { - "type": ["null", "string"] - }, - "client": { - "type": ["null", "string"] - }, - "order_type": { - "type": ["null", "string"] - } - } - } - } - }, - { - "sync_mode": "incremental", - "destination_sync_mode": "append", - "stream": { - "name": "subscriber_report", - "supported_sync_modes": ["incremental"], - "json_schema": { - "type": "object", - "properties": { - "_line_id": { - "type": ["null", "integer"] - }, - "_time_extracted": { - "type": ["null", "string"], - "format": "date-time" - }, - "_api_report_date": { - "type": ["null", "string"], - "format": "date" - }, - "event_date": { - "type": ["null", "string"], - "format": "date" - }, - "app_name": { - "type": ["null", "string"] - }, - "app_apple_id": { - "type": ["null", "string"] - }, - "subscription_name": { - "type": ["null", "string"] - }, - "subscription_apple_id": { - "type": ["null", "string"] - }, - "subscription_group_id": { - "type": ["null", "string"] - }, - "standard_subscription_duration": { - "type": ["null", "string"] - }, - "promotional_offer_name": { - "type": ["null", "string"] - }, - "promotional_offer_id": { - "type": ["null", "string"] - }, - "subscription_offer_type": { - "type": ["null", "string"] - }, - "subscription_offer_duration": { - "type": ["null", "string"] - }, - "marketing_opt_in_duration": { - "type": ["null", "string"] - }, - "customer_price": { - "type": ["null", "string"] - }, - "customer_currency": { - "type": ["null", "string"] - }, - "developer_proceeds": { - "type": ["null", "string"] - }, - "proceeds_currency": { - "type": ["null", "string"] - }, - "preserved_pricing": { - "type": ["null", "string"] - }, - "proceeds_reason": { - "type": ["null", "string"] - }, - "client": { - "type": ["null", "string"] - }, - "device": { - "type": ["null", "string"] - }, - "country": { - "type": ["null", "string"] - }, - "subscriber_id": { - "type": ["null", "string"] - }, - "subscriber_id_reset": { - "type": ["null", "string"] - }, - "refund": { - "type": ["null", "string"] - }, - "purchase_date": { - "type": ["null", "string"], - "format": "date" - }, - "units": { - "type": ["null", "integer"] - } - } - } - } - }, - { - "sync_mode": "incremental", - "destination_sync_mode": "append", - "stream": { - "name": "subscription_event_report", - "supported_sync_modes": ["incremental"], - "json_schema": { - "type": "object", - "properties": { - "_line_id": { - "type": ["null", "integer"] - }, - "_time_extracted": { - "type": ["null", "string"], - "format": "date-time" - }, - "_api_report_date": { - "type": ["null", "string"], - "format": "date" - }, - "event_date": { - "type": ["null", "string"], - "format": "date" - }, - "event": { - "type": ["null", "string"] - }, - "app_name": { - "type": ["null", "string"] - }, - "app_apple_id": { - "type": ["null", "string"] - }, - "subscription_name": { - "type": ["null", "string"] - }, - "subscription_apple_id": { - "type": ["null", "string"] - }, - "subscription_group_id": { - "type": ["null", "string"] - }, - "standard_subscription_duration": { - "type": ["null", "string"] - }, - "promotional_offer_name": { - "type": ["null", "string"] - }, - "promotional_offer_id": { - "type": ["null", "string"] - }, - "subscription_offer_type": { - "type": ["null", "string"] - }, - "subscription_offer_duration": { - "type": ["null", "string"] - }, - "marketing_opt_in": { - "type": ["null", "string"] - }, - "marketing_opt_in_duration": { - "type": ["null", "string"] - }, - "preserved_pricing": { - "type": ["null", "string"] - }, - "proceeds_reason": { - "type": ["null", "string"] - }, - "consecutive_paid_periods": { - "type": ["null", "integer"] - }, - "original_start_date": { - "type": ["null", "string"], - "format": "date" - }, - "client": { - "type": ["null", "string"] - }, - "device": { - "type": ["null", "string"] - }, - "state": { - "type": ["null", "string"] - }, - "country": { - "type": ["null", "string"] - }, - "previous_subscription_name": { - "type": ["null", "string"] - }, - "previous_subscription_apple_id": { - "type": ["null", "string"] - }, - "days_before_canceling": { - "type": ["null", "string"] - }, - "cancellation_reason": { - "type": ["null", "string"] - }, - "days_canceled": { - "type": ["null", "integer"] - }, - "quantity": { - "type": ["null", "integer"] - } - } - } - } - }, - { - "sync_mode": "incremental", - "destination_sync_mode": "append", - "stream": { - "name": "subscription_report", - "supported_sync_modes": ["incremental"], - "json_schema": { - "type": "object", - "properties": { - "_line_id": { - "type": ["null", "integer"] - }, - "_time_extracted": { - "type": ["null", "string"], - "format": "date-time" - }, - "_api_report_date": { - "type": ["null", "string"], - "format": "date" - }, - "app_name": { - "type": ["null", "string"] - }, - "app_apple_id": { - "type": ["null", "string"] - }, - "subscription_name": { - "type": ["null", "string"] - }, - "subscription_apple_id": { - "type": ["null", "string"] - }, - "subscription_group_id": { - "type": ["null", "string"] - }, - "standard_subscription_duration": { - "type": ["null", "string"] - }, - "promotional_offer_name": { - "type": ["null", "string"] - }, - "promotional_offer_id": { - "type": ["null", "string"] - }, - "customer_price": { - "type": ["null", "string"] - }, - "customer_currency": { - "type": ["null", "string"] - }, - "developer_proceeds": { - "type": ["null", "string"] - }, - "proceeds_currency": { - "type": ["null", "string"] - }, - "preserved_pricing": { - "type": ["null", "string"] - }, - "proceeds_reason": { - "type": ["null", "string"] - }, - "client": { - "type": ["null", "string"] - }, - "device": { - "type": ["null", "string"] - }, - "state": { - "type": ["null", "string"] - }, - "country": { - "type": ["null", "string"] - }, - "active_standard_price_subscriptions": { - "type": ["null", "integer"] - }, - "active_free_trial_introductory_offer_subscriptions": { - "type": ["null", "integer"] - }, - "active_pay_up_front_introductory_offer_subscriptions": { - "type": ["null", "integer"] - }, - "active_pay_as_you_go_introductory_offer_subscriptions": { - "type": ["null", "integer"] - }, - "free_trial_promotional_offer_subscriptions": { - "type": ["null", "integer"] - }, - "pay_up_front_promotional_offer_subscriptions": { - "type": ["null", "integer"] - }, - "pay_as_you_go_promotional_offer_subscriptions": { - "type": ["null", "integer"] - }, - "marketing_opt_ins": { - "type": ["null", "integer"] - }, - "billing_retry": { - "type": ["null", "integer"] - }, - "grace_period": { - "type": ["null", "integer"] - } - } - } - } - } - ] -} diff --git a/airbyte-integrations/connectors/source-appstore-singer/sample_files/sample_config.json b/airbyte-integrations/connectors/source-appstore-singer/sample_files/sample_config.json deleted file mode 100644 index 6f2e99506c97..000000000000 --- a/airbyte-integrations/connectors/source-appstore-singer/sample_files/sample_config.json +++ /dev/null @@ -1,7 +0,0 @@ -{ - "key_id": "", - "private_key": "-----BEGIN PRIVATE KEY-----\nfirst_row_of_private_key\nsecond_row_of_private_key\n...etc...-----END PRIVATE KEY-----\n", - "issuer_id": "", - "vendor": "", - "start_date": "2020-11-22T20:32:05Z" -} diff --git a/airbyte-integrations/connectors/source-appstore-singer/setup.py b/airbyte-integrations/connectors/source-appstore-singer/setup.py deleted file mode 100644 index ecf51e5403a8..000000000000 --- a/airbyte-integrations/connectors/source-appstore-singer/setup.py +++ /dev/null @@ -1,49 +0,0 @@ -# -# Copyright (c) 2023 Airbyte, Inc., all rights reserved. -# - - -from setuptools import find_packages, setup - -MAIN_REQUIREMENTS = [ - "airbyte-cdk~=0.1", - "appstoreconnect==0.9.0", - "pyjwt==1.6.4", - "tap-appstore @ https://github.com/airbytehq/tap-appstore/tarball/v0.2.1-airbyte", -] - -TEST_REQUIREMENTS = [ - "requests-mock~=1.9.3", - "pytest~=6.1", - "pytest-mock~=3.6.1", -] - -setup( - entry_points={ - "console_scripts": [ - "source-appstore-singer=source_appstore_singer.run:run", - ], - }, - name="source_appstore_singer", - description="Source implementation for Appstore, built on the Singer tap implementation.", - author="Airbyte", - author_email="contact@airbyte.io", - packages=find_packages(), - install_requires=MAIN_REQUIREMENTS, - package_data={ - "": [ - # Include yaml files in the package (if any) - "*.yml", - "*.yaml", - # Include all json files in the package, up to 4 levels deep - "*.json", - "*/*.json", - "*/*/*.json", - "*/*/*/*.json", - "*/*/*/*/*.json", - ] - }, - extras_require={ - "tests": TEST_REQUIREMENTS, - }, -) diff --git a/airbyte-integrations/connectors/source-appstore-singer/source_appstore_singer/__init__.py b/airbyte-integrations/connectors/source-appstore-singer/source_appstore_singer/__init__.py deleted file mode 100644 index ad0c7b94e882..000000000000 --- a/airbyte-integrations/connectors/source-appstore-singer/source_appstore_singer/__init__.py +++ /dev/null @@ -1,3 +0,0 @@ -from .source import SourceAppstoreSinger - -__all__ = ["SourceAppstoreSinger"] diff --git a/airbyte-integrations/connectors/source-appstore-singer/source_appstore_singer/run.py b/airbyte-integrations/connectors/source-appstore-singer/source_appstore_singer/run.py deleted file mode 100644 index ef9f845e8d81..000000000000 --- a/airbyte-integrations/connectors/source-appstore-singer/source_appstore_singer/run.py +++ /dev/null @@ -1,14 +0,0 @@ -# -# Copyright (c) 2023 Airbyte, Inc., all rights reserved. -# - - -import sys - -from airbyte_cdk.entrypoint import launch -from source_appstore_singer import SourceAppstoreSinger - - -def run(): - source = SourceAppstoreSinger() - launch(source, sys.argv[1:]) diff --git a/airbyte-integrations/connectors/source-appstore-singer/source_appstore_singer/source.py b/airbyte-integrations/connectors/source-appstore-singer/source_appstore_singer/source.py deleted file mode 100644 index b7abdb9070db..000000000000 --- a/airbyte-integrations/connectors/source-appstore-singer/source_appstore_singer/source.py +++ /dev/null @@ -1,113 +0,0 @@ -# -# Copyright (c) 2023 Airbyte, Inc., all rights reserved. -# - - -import json -from datetime import date, timedelta -from typing import Dict - -from airbyte_cdk.logger import AirbyteLogger -from airbyte_cdk.models import Status, SyncMode -from airbyte_cdk.models.airbyte_protocol import AirbyteConnectionStatus -from airbyte_cdk.sources.singer.singer_helpers import SyncModeInfo -from airbyte_cdk.sources.singer.source import SingerSource -from appstoreconnect import Api - - -class SourceAppstoreSinger(SingerSource): - TAP_CMD = "tap-appstore" - - def check_config(self, logger: AirbyteLogger, config_path: str, config: json) -> AirbyteConnectionStatus: - """ - Tests if the input configuration can be used to successfully connect to the integration - e.g: if a provided Stripe API token can be used to connect to the Stripe API. - - :param logger: Logging object to display debug/info/error to the logs - (logs will not be accessible via airbyte UI if they are not passed to this logger) - :param config_path: Path to the file containing the configuration json config - :param config: Json object containing the configuration of this source, content of this json is as specified in - the properties of the spec.json file - - :return: AirbyteConnectionStatus indicating a Success or Failure - """ - try: - # If an app on the appstore does not support subscriptions or sales, it cannot pull the relevant reports. - # However, the way the Appstore API expresses this is not via clear error messages. Instead it expresses it by throwing an unrelated - # error, in this case "invalid vendor ID". There is no way to distinguish if this error is due to invalid credentials or due to - # the account not supporting this kind of report. So to "check connection" we see if any of the reports can be pulled and if so - # return success. If no reports can be pulled we display the exception messages generated for all reports and return failure. - api_fields_to_test = { - "subscription_event_report": { - "reportType": "SUBSCRIPTION_EVENT", - "frequency": "DAILY", - "reportSubType": "SUMMARY", - "version": "1_2", - }, - "subscriber_report": {"reportType": "SUBSCRIBER", "frequency": "DAILY", "reportSubType": "DETAILED", "version": "1_2"}, - "subscription_report": {"reportType": "SUBSCRIPTION", "frequency": "DAILY", "reportSubType": "SUMMARY", "version": "1_2"}, - "sales_report": {"reportType": "SALES", "frequency": "DAILY", "reportSubType": "SUMMARY", "version": "1_0"}, - } - - api = Api(config["key_id"], config["key_file"], config["issuer_id"]) - stream_to_error = {} - for stream, params in api_fields_to_test.items(): - test_date = date.today() - timedelta(days=2) - report_filters = {"reportDate": test_date.strftime("%Y-%m-%d"), "vendorNumber": f"{config['vendor']}"} - report_filters.update(api_fields_to_test[stream]) - try: - rep_tsv = api.download_sales_and_trends_reports(filters=report_filters) - if isinstance(rep_tsv, dict): - raise Exception(f"An exception occurred: Received a JSON response instead of" f" the report: {str(rep_tsv)}") - except Exception as e: - logger.warn(f"Unable to download {stream}: {e}") - stream_to_error[stream] = e - - # All streams have failed - if len(stream_to_error.keys()) == api_fields_to_test.keys(): - message = "\n".join([f"Unable to access {stream} due to error: {e}" for stream, e in stream_to_error]) - return AirbyteConnectionStatus(status=Status.FAILED, message=message) - - return AirbyteConnectionStatus(status=Status.SUCCEEDED) - except Exception as e: - logger.warn(e) - return AirbyteConnectionStatus(status=Status.FAILED, message=f"An exception occurred: {str(e)}") - - def discover_cmd(self, logger: AirbyteLogger, config_path: str) -> str: - """ - Return the string commands to invoke the tap with the --discover flag and the right configuration options - """ - return f"{self.TAP_CMD} -c {config_path} --discover" - - def read_cmd(self, logger: AirbyteLogger, config_path: str, catalog_path: str, state_path: str = None) -> str: - """ - Return the string commands to invoke the tap with the right configuration options to read data from the source - """ - config_option = f"--config {config_path}" - properties_option = f"--properties {catalog_path}" - state_option = f"--state {state_path}" if state_path else "" - return f"{self.TAP_CMD} {config_option} {properties_option} {state_option}" - - def transform_config(self, raw_config: json) -> json: - """ - Return the string commands to invoke the tap with the right configuration options to read data from the source - """ - # path where we will write the private key. - keyfile_path = "/tmp/keyfile.p8" - - # write the private key to a file. - private_key = raw_config["private_key"] - with open(keyfile_path, "w") as fh: - fh.write(private_key) - - # add the path of the key file in he config for tap-appstore to use. - raw_config["key_file"] = keyfile_path - - # remove private_key because we shouldn't need it for anything else in the config. - del raw_config["private_key"] - - return raw_config - - def get_sync_mode_overrides(self) -> Dict[str, SyncModeInfo]: - streams = ["sales_report", "subscriber_report", "subscription_report", "subscription_event_report"] - return {s: SyncModeInfo(supported_sync_modes=[SyncMode.incremental], source_defined_cursor=True) for s in streams} diff --git a/airbyte-integrations/connectors/source-appstore-singer/source_appstore_singer/spec.json b/airbyte-integrations/connectors/source-appstore-singer/source_appstore_singer/spec.json deleted file mode 100644 index 408f65f29d44..000000000000 --- a/airbyte-integrations/connectors/source-appstore-singer/source_appstore_singer/spec.json +++ /dev/null @@ -1,41 +0,0 @@ -{ - "documentationUrl": "https://docs.airbyte.com/integrations/sources/appstore", - "connectionSpecification": { - "$schema": "http://json-schema.org/draft-07/schema#", - "title": "Source Appstore Singer Spec", - "type": "object", - "required": ["key_id", "private_key", "issuer_id", "vendor", "start_date"], - "additionalProperties": false, - "properties": { - "key_id": { - "type": "string", - "title": "Key ID", - "description": "Appstore Key ID. See the docs for more information on how to obtain this key." - }, - "private_key": { - "type": "string", - "title": "Private Key", - "description": "Appstore Private Key. See the docs for more information on how to obtain this key.", - "airbyte_secret": true, - "multiline": true - }, - "issuer_id": { - "type": "string", - "title": "Issuer ID", - "description": "Appstore Issuer ID. See the docs for more information on how to obtain this ID." - }, - "vendor": { - "type": "string", - "title": "Vendor ID", - "description": "Appstore Vendor ID. See the docs for more information on how to obtain this ID." - }, - "start_date": { - "type": "string", - "title": "Start Date", - "description": "UTC date and time in the format 2017-01-25T00:00:00Z. Any data before this date will not be replicated.", - "examples": ["2020-11-16T00:00:00Z"], - "pattern": "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" - } - } - } -} diff --git a/airbyte-integrations/connectors/source-appstore-singer/unit_tests/unit_test.py b/airbyte-integrations/connectors/source-appstore-singer/unit_tests/unit_test.py deleted file mode 100644 index 219ae0142c72..000000000000 --- a/airbyte-integrations/connectors/source-appstore-singer/unit_tests/unit_test.py +++ /dev/null @@ -1,7 +0,0 @@ -# -# Copyright (c) 2023 Airbyte, Inc., all rights reserved. -# - - -def test_example_method(): - assert True diff --git a/airbyte-integrations/connectors/source-courier/.dockerignore b/airbyte-integrations/connectors/source-courier/.dockerignore deleted file mode 100644 index b40b5ec059ef..000000000000 --- a/airbyte-integrations/connectors/source-courier/.dockerignore +++ /dev/null @@ -1,6 +0,0 @@ -* -!Dockerfile -!main.py -!source_courier -!setup.py -!secrets diff --git a/airbyte-integrations/connectors/source-courier/Dockerfile b/airbyte-integrations/connectors/source-courier/Dockerfile deleted file mode 100644 index 0b10dba1b45d..000000000000 --- a/airbyte-integrations/connectors/source-courier/Dockerfile +++ /dev/null @@ -1,38 +0,0 @@ -FROM python:3.9.11-alpine3.15 as base - -# build and load all requirements -FROM base as builder -WORKDIR /airbyte/integration_code - -# upgrade pip to the latest version -RUN apk --no-cache upgrade \ - && pip install --upgrade pip \ - && apk --no-cache add tzdata build-base - - -COPY setup.py ./ -# install necessary packages to a temporary folder -RUN pip install --prefix=/install . - -# build a clean environment -FROM base -WORKDIR /airbyte/integration_code - -# copy all loaded and built libraries to a pure basic image -COPY --from=builder /install /usr/local -# add default timezone settings -COPY --from=builder /usr/share/zoneinfo/Etc/UTC /etc/localtime -RUN echo "Etc/UTC" > /etc/timezone - -# bash is installed for more convenient debugging. -RUN apk --no-cache add bash - -# copy payload code only -COPY main.py ./ -COPY source_courier ./source_courier - -ENV AIRBYTE_ENTRYPOINT "python /airbyte/integration_code/main.py" -ENTRYPOINT ["python", "/airbyte/integration_code/main.py"] - -LABEL io.airbyte.version=0.1.0 -LABEL io.airbyte.name=airbyte/source-courier diff --git a/airbyte-integrations/connectors/source-courier/README.md b/airbyte-integrations/connectors/source-courier/README.md deleted file mode 100644 index a6678343f478..000000000000 --- a/airbyte-integrations/connectors/source-courier/README.md +++ /dev/null @@ -1,67 +0,0 @@ -# Courier Source - -This is the repository for the Courier configuration based source connector. -For information about how to use this connector within Airbyte, see [the documentation](https://docs.airbyte.io/integrations/sources/courier). - -## Local development - -#### Create credentials -**If you are a community contributor**, follow the instructions in the [documentation](https://docs.airbyte.io/integrations/sources/courier) -to generate the necessary credentials. Then create a file `secrets/config.json` conforming to the `source_courier/spec.yaml` file. -Note that any directory named `secrets` is gitignored across the entire Airbyte repo, so there is no danger of accidentally checking in sensitive information. -See `integration_tests/sample_config.json` for a sample config file. - -**If you are an Airbyte core member**, copy the credentials in Lastpass under the secret name `source courier test creds` -and place them into `secrets/config.json`. - -### Locally running the connector docker image - - -#### Build -**Via [`airbyte-ci`](https://github.com/airbytehq/airbyte/blob/master/airbyte-ci/connectors/pipelines/README.md) (recommended):** -```bash -airbyte-ci connectors --name=source-courier build -``` - -An image will be built with the tag `airbyte/source-courier:dev`. - -**Via `docker build`:** -```bash -docker build -t airbyte/source-courier:dev . -``` - -#### Run -Then run any of the connector commands as follows: -``` -docker run --rm airbyte/source-courier:dev spec -docker run --rm -v $(pwd)/secrets:/secrets airbyte/source-courier:dev check --config /secrets/config.json -docker run --rm -v $(pwd)/secrets:/secrets airbyte/source-courier:dev discover --config /secrets/config.json -docker run --rm -v $(pwd)/secrets:/secrets -v $(pwd)/integration_tests:/integration_tests airbyte/source-courier:dev read --config /secrets/config.json --catalog /integration_tests/configured_catalog.json -``` - -## Testing -You can run our full test suite locally using [`airbyte-ci`](https://github.com/airbytehq/airbyte/blob/master/airbyte-ci/connectors/pipelines/README.md): -```bash -airbyte-ci connectors --name=source-courier test -``` - -### Customizing acceptance Tests -Customize `acceptance-test-config.yml` file to configure tests. See [Connector Acceptance Tests](https://docs.airbyte.com/connector-development/testing-connectors/connector-acceptance-tests-reference) for more information. -If your connector requires to create or destroy resources for use during acceptance tests create fixtures for it and place them inside integration_tests/acceptance.py. - -## Dependency Management -All of your dependencies should go in `setup.py`, NOT `requirements.txt`. The requirements file is only used to connect internal Airbyte dependencies in the monorepo for local development. -We split dependencies between two groups, dependencies that are: -* required for your connector to work need to go to `MAIN_REQUIREMENTS` list. -* required for the testing need to go to `TEST_REQUIREMENTS` list - -### Publishing a new version of the connector -You've checked out the repo, implemented a million dollar feature, and you're ready to share your changes with the world. Now what? -1. Make sure your changes are passing our test suite: `airbyte-ci connectors --name=source-courier test` -2. Bump the connector version in `metadata.yaml`: increment the `dockerImageTag` value. Please follow [semantic versioning for connectors](https://docs.airbyte.com/contributing-to-airbyte/resources/pull-requests-handbook/#semantic-versioning-for-connectors). -3. Make sure the `metadata.yaml` content is up to date. -4. Make the connector documentation and its changelog is up to date (`docs/integrations/sources/courier.md`). -5. Create a Pull Request: use [our PR naming conventions](https://docs.airbyte.com/contributing-to-airbyte/resources/pull-requests-handbook/#pull-request-title-convention). -6. Pat yourself on the back for being an awesome contributor. -7. Someone from Airbyte will take a look at your PR and iterate with you to merge it into master. - diff --git a/airbyte-integrations/connectors/source-courier/__init__.py b/airbyte-integrations/connectors/source-courier/__init__.py deleted file mode 100644 index c941b3045795..000000000000 --- a/airbyte-integrations/connectors/source-courier/__init__.py +++ /dev/null @@ -1,3 +0,0 @@ -# -# Copyright (c) 2023 Airbyte, Inc., all rights reserved. -# diff --git a/airbyte-integrations/connectors/source-courier/acceptance-test-config.yml b/airbyte-integrations/connectors/source-courier/acceptance-test-config.yml deleted file mode 100644 index c81243abdfbb..000000000000 --- a/airbyte-integrations/connectors/source-courier/acceptance-test-config.yml +++ /dev/null @@ -1,23 +0,0 @@ -# See [Connector Acceptance Tests](https://docs.airbyte.io/connector-development/testing-connectors/connector-acceptance-tests-reference) -# for more information about how to configure these tests -connector_image: airbyte/source-courier:dev -tests: - spec: - - spec_path: "source_courier/spec.yaml" - connection: - - config_path: "secrets/config.json" - status: "succeed" - - config_path: "integration_tests/invalid_config.json" - status: "failed" - discovery: - - config_path: "secrets/config.json" - basic_read: - - config_path: "secrets/config.json" - configured_catalog_path: "integration_tests/configured_catalog.json" - timeout_seconds: 3600 - expect_records: - path: "integration_tests/expected_records.jsonl" - full_refresh: - - config_path: "secrets/config.json" - configured_catalog_path: "integration_tests/configured_catalog.json" - timeout_seconds: 3600 diff --git a/airbyte-integrations/connectors/source-courier/integration_tests/__init__.py b/airbyte-integrations/connectors/source-courier/integration_tests/__init__.py deleted file mode 100644 index c941b3045795..000000000000 --- a/airbyte-integrations/connectors/source-courier/integration_tests/__init__.py +++ /dev/null @@ -1,3 +0,0 @@ -# -# Copyright (c) 2023 Airbyte, Inc., all rights reserved. -# diff --git a/airbyte-integrations/connectors/source-courier/integration_tests/acceptance.py b/airbyte-integrations/connectors/source-courier/integration_tests/acceptance.py deleted file mode 100644 index 9e6409236281..000000000000 --- a/airbyte-integrations/connectors/source-courier/integration_tests/acceptance.py +++ /dev/null @@ -1,16 +0,0 @@ -# -# Copyright (c) 2023 Airbyte, Inc., all rights reserved. -# - - -import pytest - -pytest_plugins = ("connector_acceptance_test.plugin",) - - -@pytest.fixture(scope="session", autouse=True) -def connector_setup(): - """This fixture is a placeholder for external resources that acceptance test might require.""" - # TODO: setup test dependencies if needed. otherwise remove the TODO comments - yield - # TODO: clean up test dependencies diff --git a/airbyte-integrations/connectors/source-courier/integration_tests/configured_catalog.json b/airbyte-integrations/connectors/source-courier/integration_tests/configured_catalog.json deleted file mode 100644 index 316b0fe1754d..000000000000 --- a/airbyte-integrations/connectors/source-courier/integration_tests/configured_catalog.json +++ /dev/null @@ -1,13 +0,0 @@ -{ - "streams": [ - { - "stream": { - "name": "messages", - "json_schema": {}, - "supported_sync_modes": ["full_refresh"] - }, - "sync_mode": "full_refresh", - "destination_sync_mode": "overwrite" - } - ] -} diff --git a/airbyte-integrations/connectors/source-courier/integration_tests/expected_records.jsonl b/airbyte-integrations/connectors/source-courier/integration_tests/expected_records.jsonl deleted file mode 100644 index 0d88ebb0b1e0..000000000000 --- a/airbyte-integrations/connectors/source-courier/integration_tests/expected_records.jsonl +++ /dev/null @@ -1,160 +0,0 @@ -{"stream":"messages","data":{"enqueued":1666121735936,"event":"ABTQN5NW3F4VA3H5EYTXHES0QXD8","id":"1-634f0008-bec7b59a56bca8de101130db","notification":"ABTQN5NW3F4VA3H5EYTXHES0QXD8","recipient":"marcos@airbyte.io","recipientId":"anon_IjXLXfhjax5bN0OH6u7a5","sent":1666121737211,"status":"SENT"},"emitted_at":1667947171536} -{"stream":"messages","data":{"enqueued":1666121725434,"event":"ABTQN5NW3F4VA3H5EYTXHES0QXD8","id":"1-634efffd-c0dba647b6f2b35f9f3bcb33","notification":"ABTQN5NW3F4VA3H5EYTXHES0QXD8","recipient":"marcos@airbyte.io","recipientId":"anon_olEjEpmTQpF4aF6ZQsxLX","sent":1666121726852,"status":"SENT"},"emitted_at":1667947171536} -{"stream":"messages","data":{"enqueued":1666121724248,"event":"ABTQN5NW3F4VA3H5EYTXHES0QXD8","id":"1-634efffc-e532825f9425164998e14dc4","notification":"ABTQN5NW3F4VA3H5EYTXHES0QXD8","recipient":"marcos@airbyte.io","recipientId":"anon_02ZUW5cK9GlX9I168W5XO","sent":1666121725788,"status":"SENT"},"emitted_at":1667947171537} -{"stream":"messages","data":{"enqueued":1666121690699,"event":"ABTQN5NW3F4VA3H5EYTXHES0QXD8","id":"1-634effda-77b4ad7bf93c58f0a9700e0c","notification":"ABTQN5NW3F4VA3H5EYTXHES0QXD8","recipient":"marcos@airbyte.io","recipientId":"anon_l-qjdf3fR99wdkY-EIi26","sent":1666121692149,"status":"SENT"},"emitted_at":1667947171537} -{"stream":"messages","data":{"enqueued":1666121685251,"event":"ABTQN5NW3F4VA3H5EYTXHES0QXD8","id":"1-634effd5-cce28d39137f340e58ac1aff","notification":"ABTQN5NW3F4VA3H5EYTXHES0QXD8","recipient":"marcos@airbyte.io","recipientId":"anon_rlH5Ftz575KDsgPbAvW1G","sent":1666121688519,"status":"SENT"},"emitted_at":1667947171538} -{"stream":"messages","data":{"enqueued":1666121683846,"event":"ABTQN5NW3F4VA3H5EYTXHES0QXD8","id":"1-634effd4-1172e40fd51beec78ee3173d","notification":"ABTQN5NW3F4VA3H5EYTXHES0QXD8","recipient":"marcos@airbyte.io","recipientId":"anon_st0uBid48Owwr5MUpuYWE","sent":1666121685697,"status":"SENT"},"emitted_at":1667947171538} -{"stream":"messages","data":{"enqueued":1666121682503,"event":"ABTQN5NW3F4VA3H5EYTXHES0QXD8","id":"1-634effd2-75f7518b8633e5dd765b7484","notification":"ABTQN5NW3F4VA3H5EYTXHES0QXD8","recipient":"marcos@airbyte.io","recipientId":"anon_4sLvpFy1g3ngksqOwuaMh","sent":1666121683805,"status":"SENT"},"emitted_at":1667947171538} -{"stream":"messages","data":{"enqueued":1666121680370,"event":"ABTQN5NW3F4VA3H5EYTXHES0QXD8","id":"1-634effd0-5765da642b7eadb4f534953b","notification":"ABTQN5NW3F4VA3H5EYTXHES0QXD8","recipient":"marcos@airbyte.io","recipientId":"anon_qskL2Vf7-m6pVKM7F1Xe8","sent":1666121682269,"status":"SENT"},"emitted_at":1667947171539} -{"stream":"messages","data":{"enqueued":1666118182536,"event":"ABTQN5NW3F4VA3H5EYTXHES0QXD8","id":"1-634ef226-a611b43243f7cdbe2a5115b3","notification":"ABTQN5NW3F4VA3H5EYTXHES0QXD8","opened":1666119196057,"recipient":"marcosmarxm@gmail.com","recipientId":"anon_b4KEGwKFKwKCPRf7DHoux","sent":1666118184073,"status":"OPENED"},"emitted_at":1667947171539} -{"stream":"messages","data":{"enqueued":1666118180791,"event":"ABTQN5NW3F4VA3H5EYTXHES0QXD8","id":"1-634ef225-8b222f3539887a52ac69cc26","notification":"ABTQN5NW3F4VA3H5EYTXHES0QXD8","opened":1666119193320,"recipient":"marcosmarxm@gmail.com","recipientId":"anon_eNQjQGZSTf0LU0Z_uCMSM","sent":1666118182302,"status":"OPENED"},"emitted_at":1667947171540} -{"stream":"messages","data":{"enqueued":1666118150572,"event":"ABTQN5NW3F4VA3H5EYTXHES0QXD8","id":"1-634ef206-8620ab19a54eeab221126dad","notification":"ABTQN5NW3F4VA3H5EYTXHES0QXD8","recipient":"marcosmarxm@gmail.com","recipientId":"anon_OqdvsDBUNBR-SKWiBqzrO","sent":1666118151976,"status":"SENT"},"emitted_at":1667947173106} -{"stream":"messages","data":{"enqueued":1666117975292,"event":"ABTQN5NW3F4VA3H5EYTXHES0QXD8","id":"1-634ef157-ebfbd09d59d116c4ce45c89e","notification":"ABTQN5NW3F4VA3H5EYTXHES0QXD8","opened":1667501919456,"recipient":"integration-test@airbyte.io","recipientId":"anon_fzpKb-gaY14Sg7D72yXFX","sent":1666117976893,"status":"OPENED"},"emitted_at":1667947173107} -{"stream":"messages","data":{"enqueued":1666117146240,"id":"1-634eee1a-9130394a2b4ada99aa21567b","opened":1667501920954,"recipient":"integration-test@airbyte.io","recipientId":"anon_39GMHrCYlLlW6vOucqnRl","sent":1666117147345,"status":"OPENED"},"emitted_at":1667947173108} -{"stream":"message_info","data":{"enqueued":1666121735936,"event":"ABTQN5NW3F4VA3H5EYTXHES0QXD8","id":"1-634f0008-bec7b59a56bca8de101130db","notification":"ABTQN5NW3F4VA3H5EYTXHES0QXD8","providers":[{"channel":{"name":"","template":"99cd000d-1492-45ce-aa58-1a735478ec65"},"provider":"gmail","providerResponse":{"status":200,"statusText":"OK","data":{"id":"183ec98228ea52fa","threadId":"183ec98228ea52fa","labelIds":["UNREAD","SENT","INBOX"]},"headers":{"content-type":"application/json; charset=UTF-8","vary":"X-Origin, Referer, Origin,Accept-Encoding","date":"Tue, 18 Oct 2022 19:35:37 GMT","server":"ESF","cache-control":"private","x-xss-protection":"0","x-frame-options":"SAMEORIGIN","x-content-type-options":"nosniff","alt-svc":"h3=\":443\"; ma=2592000,h3-29=\":443\"; ma=2592000,h3-Q050=\":443\"; ma=2592000,h3-Q046=\":443\"; ma=2592000,h3-Q043=\":443\"; ma=2592000,quic=\":443\"; ma=2592000; v=\"46,43\"","accept-ranges":"none","connection":"close","transfer-encoding":"chunked"}},"sent":1666121737211,"status":"SENT"}],"recipient":"marcos@airbyte.io","recipientId":"anon_IjXLXfhjax5bN0OH6u7a5","sent":1666121737211,"status":"SENT"},"emitted_at":1667947367464} -{"stream":"message_info","data":{"enqueued":1666121725434,"event":"ABTQN5NW3F4VA3H5EYTXHES0QXD8","id":"1-634efffd-c0dba647b6f2b35f9f3bcb33","notification":"ABTQN5NW3F4VA3H5EYTXHES0QXD8","providers":[{"channel":{"name":"","template":"99cd000d-1492-45ce-aa58-1a735478ec65"},"provider":"gmail","providerResponse":{"status":200,"statusText":"OK","data":{"id":"183ec97f813b7d89","threadId":"183ec97f813b7d89","labelIds":["UNREAD","SENT","INBOX"]},"headers":{"content-type":"application/json; charset=UTF-8","vary":"X-Origin, Referer, Origin,Accept-Encoding","date":"Tue, 18 Oct 2022 19:35:26 GMT","server":"ESF","cache-control":"private","x-xss-protection":"0","x-frame-options":"SAMEORIGIN","x-content-type-options":"nosniff","alt-svc":"h3=\":443\"; ma=2592000,h3-29=\":443\"; ma=2592000,h3-Q050=\":443\"; ma=2592000,h3-Q046=\":443\"; ma=2592000,h3-Q043=\":443\"; ma=2592000,quic=\":443\"; ma=2592000; v=\"46,43\"","accept-ranges":"none","connection":"close","transfer-encoding":"chunked"}},"sent":1666121726852,"status":"SENT"}],"recipient":"marcos@airbyte.io","recipientId":"anon_olEjEpmTQpF4aF6ZQsxLX","sent":1666121726852,"status":"SENT"},"emitted_at":1667947367837} -{"stream":"message_info","data":{"enqueued":1666121724248,"event":"ABTQN5NW3F4VA3H5EYTXHES0QXD8","id":"1-634efffc-e532825f9425164998e14dc4","notification":"ABTQN5NW3F4VA3H5EYTXHES0QXD8","providers":[{"channel":{"name":"","template":"99cd000d-1492-45ce-aa58-1a735478ec65"},"provider":"gmail","providerResponse":{"status":200,"statusText":"OK","data":{"id":"183ec97f7fb4fad1","threadId":"183ec97f7fb4fad1","labelIds":["UNREAD","SENT","INBOX"]},"headers":{"content-type":"application/json; charset=UTF-8","vary":"X-Origin, Referer, Origin,Accept-Encoding","date":"Tue, 18 Oct 2022 19:35:25 GMT","server":"ESF","cache-control":"private","x-xss-protection":"0","x-frame-options":"SAMEORIGIN","x-content-type-options":"nosniff","alt-svc":"h3=\":443\"; ma=2592000,h3-29=\":443\"; ma=2592000,h3-Q050=\":443\"; ma=2592000,h3-Q046=\":443\"; ma=2592000,h3-Q043=\":443\"; ma=2592000,quic=\":443\"; ma=2592000; v=\"46,43\"","accept-ranges":"none","connection":"close","transfer-encoding":"chunked"}},"sent":1666121725788,"status":"SENT"}],"recipient":"marcos@airbyte.io","recipientId":"anon_02ZUW5cK9GlX9I168W5XO","sent":1666121725788,"status":"SENT"},"emitted_at":1667947368375} -{"stream":"message_info","data":{"enqueued":1666121690699,"event":"ABTQN5NW3F4VA3H5EYTXHES0QXD8","id":"1-634effda-77b4ad7bf93c58f0a9700e0c","notification":"ABTQN5NW3F4VA3H5EYTXHES0QXD8","providers":[{"channel":{"name":"","template":"99cd000d-1492-45ce-aa58-1a735478ec65"},"provider":"gmail","providerResponse":{"status":200,"statusText":"OK","data":{"id":"183ec9772444bd1c","threadId":"183ec9772444bd1c","labelIds":["UNREAD","SENT","INBOX"]},"headers":{"content-type":"application/json; charset=UTF-8","vary":"X-Origin, Referer, Origin,Accept-Encoding","date":"Tue, 18 Oct 2022 19:34:52 GMT","server":"ESF","cache-control":"private","x-xss-protection":"0","x-frame-options":"SAMEORIGIN","x-content-type-options":"nosniff","alt-svc":"h3=\":443\"; ma=2592000,h3-29=\":443\"; ma=2592000,h3-Q050=\":443\"; ma=2592000,h3-Q046=\":443\"; ma=2592000,h3-Q043=\":443\"; ma=2592000,quic=\":443\"; ma=2592000; v=\"46,43\"","accept-ranges":"none","connection":"close","transfer-encoding":"chunked"}},"sent":1666121692149,"status":"SENT"}],"recipient":"marcos@airbyte.io","recipientId":"anon_l-qjdf3fR99wdkY-EIi26","sent":1666121692149,"status":"SENT"},"emitted_at":1667947368778} -{"stream":"message_info","data":{"enqueued":1666121685251,"event":"ABTQN5NW3F4VA3H5EYTXHES0QXD8","id":"1-634effd5-cce28d39137f340e58ac1aff","notification":"ABTQN5NW3F4VA3H5EYTXHES0QXD8","providers":[{"channel":{"name":"","template":"99cd000d-1492-45ce-aa58-1a735478ec65"},"provider":"gmail","providerResponse":{"status":200,"statusText":"OK","data":{"id":"183ec976476c31cd","threadId":"183ec976476c31cd","labelIds":["UNREAD","SENT","INBOX"]},"headers":{"content-type":"application/json; charset=UTF-8","vary":"X-Origin, Referer, Origin,Accept-Encoding","date":"Tue, 18 Oct 2022 19:34:48 GMT","server":"ESF","cache-control":"private","x-xss-protection":"0","x-frame-options":"SAMEORIGIN","x-content-type-options":"nosniff","alt-svc":"h3=\":443\"; ma=2592000,h3-29=\":443\"; ma=2592000,h3-Q050=\":443\"; ma=2592000,h3-Q046=\":443\"; ma=2592000,h3-Q043=\":443\"; ma=2592000,quic=\":443\"; ma=2592000; v=\"46,43\"","accept-ranges":"none","connection":"close","transfer-encoding":"chunked"}},"sent":1666121688519,"status":"SENT"}],"recipient":"marcos@airbyte.io","recipientId":"anon_rlH5Ftz575KDsgPbAvW1G","sent":1666121688519,"status":"SENT"},"emitted_at":1667947369153} -{"stream":"message_info","data":{"enqueued":1666121683846,"event":"ABTQN5NW3F4VA3H5EYTXHES0QXD8","id":"1-634effd4-1172e40fd51beec78ee3173d","notification":"ABTQN5NW3F4VA3H5EYTXHES0QXD8","providers":[{"channel":{"name":"","template":"99cd000d-1492-45ce-aa58-1a735478ec65"},"provider":"gmail","providerResponse":{"status":200,"statusText":"OK","data":{"id":"183ec975a05099d8","threadId":"183ec975a05099d8","labelIds":["UNREAD","SENT","INBOX"]},"headers":{"content-type":"application/json; charset=UTF-8","vary":"X-Origin, Referer, Origin,Accept-Encoding","date":"Tue, 18 Oct 2022 19:34:45 GMT","server":"ESF","cache-control":"private","x-xss-protection":"0","x-frame-options":"SAMEORIGIN","x-content-type-options":"nosniff","alt-svc":"h3=\":443\"; ma=2592000,h3-29=\":443\"; ma=2592000,h3-Q050=\":443\"; ma=2592000,h3-Q046=\":443\"; ma=2592000,h3-Q043=\":443\"; ma=2592000,quic=\":443\"; ma=2592000; v=\"46,43\"","accept-ranges":"none","connection":"close","transfer-encoding":"chunked"}},"sent":1666121685697,"status":"SENT"}],"recipient":"marcos@airbyte.io","recipientId":"anon_st0uBid48Owwr5MUpuYWE","sent":1666121685697,"status":"SENT"},"emitted_at":1667947369610} -{"stream":"message_info","data":{"enqueued":1666121682503,"event":"ABTQN5NW3F4VA3H5EYTXHES0QXD8","id":"1-634effd2-75f7518b8633e5dd765b7484","notification":"ABTQN5NW3F4VA3H5EYTXHES0QXD8","providers":[{"channel":{"name":"","template":"99cd000d-1492-45ce-aa58-1a735478ec65"},"provider":"gmail","providerResponse":{"status":200,"statusText":"OK","data":{"id":"183ec975386d6917","threadId":"183ec975386d6917","labelIds":["UNREAD","SENT","INBOX"]},"headers":{"content-type":"application/json; charset=UTF-8","vary":"X-Origin, Referer, Origin,Accept-Encoding","date":"Tue, 18 Oct 2022 19:34:43 GMT","server":"ESF","cache-control":"private","x-xss-protection":"0","x-frame-options":"SAMEORIGIN","x-content-type-options":"nosniff","alt-svc":"h3=\":443\"; ma=2592000,h3-29=\":443\"; ma=2592000,h3-Q050=\":443\"; ma=2592000,h3-Q046=\":443\"; ma=2592000,h3-Q043=\":443\"; ma=2592000,quic=\":443\"; ma=2592000; v=\"46,43\"","accept-ranges":"none","connection":"close","transfer-encoding":"chunked"}},"sent":1666121683805,"status":"SENT"}],"recipient":"marcos@airbyte.io","recipientId":"anon_4sLvpFy1g3ngksqOwuaMh","sent":1666121683805,"status":"SENT"},"emitted_at":1667947370017} -{"stream":"message_info","data":{"enqueued":1666121680370,"event":"ABTQN5NW3F4VA3H5EYTXHES0QXD8","id":"1-634effd0-5765da642b7eadb4f534953b","notification":"ABTQN5NW3F4VA3H5EYTXHES0QXD8","providers":[{"channel":{"name":"","template":"99cd000d-1492-45ce-aa58-1a735478ec65"},"provider":"gmail","providerResponse":{"status":200,"statusText":"OK","data":{"id":"183ec974d3e4ab1a","threadId":"183ec974d3e4ab1a","labelIds":["UNREAD","SENT","INBOX"]},"headers":{"content-type":"application/json; charset=UTF-8","vary":"X-Origin, Referer, Origin,Accept-Encoding","date":"Tue, 18 Oct 2022 19:34:42 GMT","server":"ESF","cache-control":"private","x-xss-protection":"0","x-frame-options":"SAMEORIGIN","x-content-type-options":"nosniff","alt-svc":"h3=\":443\"; ma=2592000,h3-29=\":443\"; ma=2592000,h3-Q050=\":443\"; ma=2592000,h3-Q046=\":443\"; ma=2592000,h3-Q043=\":443\"; ma=2592000,quic=\":443\"; ma=2592000; v=\"46,43\"","accept-ranges":"none","connection":"close","transfer-encoding":"chunked"}},"sent":1666121682269,"status":"SENT"}],"recipient":"marcos@airbyte.io","recipientId":"anon_qskL2Vf7-m6pVKM7F1Xe8","sent":1666121682269,"status":"SENT"},"emitted_at":1667947370431} -{"stream":"message_info","data":{"enqueued":1666118182536,"event":"ABTQN5NW3F4VA3H5EYTXHES0QXD8","id":"1-634ef226-a611b43243f7cdbe2a5115b3","notification":"ABTQN5NW3F4VA3H5EYTXHES0QXD8","opened":1666119196057,"providers":[{"channel":{"name":"","template":"99cd000d-1492-45ce-aa58-1a735478ec65"},"opened":1666119196057,"provider":"gmail","sent":1666118184073,"status":"OPENED"}],"recipient":"marcosmarxm@gmail.com","recipientId":"anon_b4KEGwKFKwKCPRf7DHoux","sent":1666118184073,"status":"OPENED"},"emitted_at":1667947370798} -{"stream":"message_info","data":{"enqueued":1666118180791,"event":"ABTQN5NW3F4VA3H5EYTXHES0QXD8","id":"1-634ef225-8b222f3539887a52ac69cc26","notification":"ABTQN5NW3F4VA3H5EYTXHES0QXD8","opened":1666119193320,"providers":[{"channel":{"name":"","template":"99cd000d-1492-45ce-aa58-1a735478ec65"},"opened":1666119193320,"provider":"gmail","sent":1666118182302,"status":"OPENED"}],"recipient":"marcosmarxm@gmail.com","recipientId":"anon_eNQjQGZSTf0LU0Z_uCMSM","sent":1666118182302,"status":"OPENED"},"emitted_at":1667947371019} -{"stream":"message_info","data":{"enqueued":1666118150572,"event":"ABTQN5NW3F4VA3H5EYTXHES0QXD8","id":"1-634ef206-8620ab19a54eeab221126dad","notification":"ABTQN5NW3F4VA3H5EYTXHES0QXD8","providers":[{"channel":{"name":"","template":"99cd000d-1492-45ce-aa58-1a735478ec65"},"provider":"gmail","providerResponse":{"status":200,"statusText":"OK","data":{"id":"183ec616dd6676bd","threadId":"183ec616dd6676bd","labelIds":["SENT"]},"headers":{"content-type":"application/json; charset=UTF-8","vary":"X-Origin, Referer, Origin,Accept-Encoding","date":"Tue, 18 Oct 2022 18:35:51 GMT","server":"ESF","cache-control":"private","x-xss-protection":"0","x-frame-options":"SAMEORIGIN","x-content-type-options":"nosniff","alt-svc":"h3=\":443\"; ma=2592000,h3-29=\":443\"; ma=2592000,h3-Q050=\":443\"; ma=2592000,h3-Q046=\":443\"; ma=2592000,h3-Q043=\":443\"; ma=2592000,quic=\":443\"; ma=2592000; v=\"46,43\"","accept-ranges":"none","connection":"close","transfer-encoding":"chunked"}},"sent":1666118151976,"status":"SENT"}],"recipient":"marcosmarxm@gmail.com","recipientId":"anon_OqdvsDBUNBR-SKWiBqzrO","sent":1666118151976,"status":"SENT"},"emitted_at":1667947372536} -{"stream":"message_info","data":{"enqueued":1666117975292,"event":"ABTQN5NW3F4VA3H5EYTXHES0QXD8","id":"1-634ef157-ebfbd09d59d116c4ce45c89e","notification":"ABTQN5NW3F4VA3H5EYTXHES0QXD8","opened":1667501919456,"providers":[{"channel":{"name":"","template":"99cd000d-1492-45ce-aa58-1a735478ec65"},"opened":1667501919456,"provider":"gmail","sent":1666117976893,"status":"OPENED"}],"recipient":"integration-test@airbyte.io","recipientId":"anon_fzpKb-gaY14Sg7D72yXFX","sent":1666117976893,"status":"OPENED"},"emitted_at":1667947373077} -{"stream":"message_info","data":{"enqueued":1666117146240,"id":"1-634eee1a-9130394a2b4ada99aa21567b","opened":1667501920954,"providers":[{"channel":{"key":"gmail"},"opened":1667501920954,"provider":"gmail","sent":1666117147345,"status":"OPENED"}],"recipient":"integration-test@airbyte.io","recipientId":"anon_39GMHrCYlLlW6vOucqnRl","sent":1666117147345,"status":"OPENED"},"emitted_at":1667947373418} -{"stream":"message_history","data":{"ts":1666121735945,"type":"ENQUEUED","message_id":"1-634f0008-bec7b59a56bca8de101130db"},"emitted_at":1667947495264} -{"stream":"message_history","data":{"merged_profile":{"email":"marcos@airbyte.io"},"received_profile":{"email":"marcos@airbyte.io"},"ts":1666121736184,"type":"PROFILE_LOADED","message_id":"1-634f0008-bec7b59a56bca8de101130db"},"emitted_at":1667947495266} -{"stream":"message_history","data":{"event_id":"ABTQN5NW3F4VA3H5EYTXHES0QXD8","notification_id":"ABTQN5NW3F4VA3H5EYTXHES0QXD8","ts":1666121736300,"type":"MAPPED","message_id":"1-634f0008-bec7b59a56bca8de101130db"},"emitted_at":1667947495268} -{"stream":"message_history","data":{"channel":{"id":"99cd000d-1492-45ce-aa58-1a735478ec65","label":""},"integration":{"id":"bae9e7c4-2ac9-44d5-80b4-66458d00ae4e","provider":"gmail"},"output":{"text":"/messages/1-634f0008-bec7b59a56bca8de101130db/output/83a0c138-2328-4d3d-93f9-7467a7e4395c/text","subject":"/messages/1-634f0008-bec7b59a56bca8de101130db/output/83a0c138-2328-4d3d-93f9-7467a7e4395c/subject","html":"/messages/1-634f0008-bec7b59a56bca8de101130db/output/83a0c138-2328-4d3d-93f9-7467a7e4395c/html"},"ts":1666121736733,"type":"RENDERED","message_id":"1-634f0008-bec7b59a56bca8de101130db"},"emitted_at":1667947495269} -{"stream":"message_history","data":{"channel":{"id":"99cd000d-1492-45ce-aa58-1a735478ec65","label":""},"integration":{"id":"bae9e7c4-2ac9-44d5-80b4-66458d00ae4e","provider":"gmail"},"ts":1666121737211,"type":"SENT","message_id":"1-634f0008-bec7b59a56bca8de101130db"},"emitted_at":1667947495270} -{"stream":"message_history","data":{"ts":1666121725442,"type":"ENQUEUED","message_id":"1-634efffd-c0dba647b6f2b35f9f3bcb33"},"emitted_at":1667947495870} -{"stream":"message_history","data":{"merged_profile":{"email":"marcos@airbyte.io"},"received_profile":{"email":"marcos@airbyte.io"},"ts":1666121725618,"type":"PROFILE_LOADED","message_id":"1-634efffd-c0dba647b6f2b35f9f3bcb33"},"emitted_at":1667947495871} -{"stream":"message_history","data":{"event_id":"ABTQN5NW3F4VA3H5EYTXHES0QXD8","notification_id":"ABTQN5NW3F4VA3H5EYTXHES0QXD8","ts":1666121725667,"type":"MAPPED","message_id":"1-634efffd-c0dba647b6f2b35f9f3bcb33"},"emitted_at":1667947495872} -{"stream":"message_history","data":{"channel":{"id":"99cd000d-1492-45ce-aa58-1a735478ec65","label":""},"integration":{"id":"bae9e7c4-2ac9-44d5-80b4-66458d00ae4e","provider":"gmail"},"output":{"text":"/messages/1-634efffd-c0dba647b6f2b35f9f3bcb33/output/d8d7bb5c-f74f-4dc6-b40e-4e560b8d0e40/text","subject":"/messages/1-634efffd-c0dba647b6f2b35f9f3bcb33/output/d8d7bb5c-f74f-4dc6-b40e-4e560b8d0e40/subject","html":"/messages/1-634efffd-c0dba647b6f2b35f9f3bcb33/output/d8d7bb5c-f74f-4dc6-b40e-4e560b8d0e40/html"},"ts":1666121726110,"type":"RENDERED","message_id":"1-634efffd-c0dba647b6f2b35f9f3bcb33"},"emitted_at":1667947495873} -{"stream":"message_history","data":{"channel":{"id":"99cd000d-1492-45ce-aa58-1a735478ec65","label":""},"integration":{"id":"bae9e7c4-2ac9-44d5-80b4-66458d00ae4e","provider":"gmail"},"ts":1666121726852,"type":"SENT","message_id":"1-634efffd-c0dba647b6f2b35f9f3bcb33"},"emitted_at":1667947495875} -{"stream":"message_history","data":{"ts":1666121724260,"type":"ENQUEUED","message_id":"1-634efffc-e532825f9425164998e14dc4"},"emitted_at":1667947496108} -{"stream":"message_history","data":{"merged_profile":{"email":"marcos@airbyte.io"},"received_profile":{"email":"marcos@airbyte.io"},"ts":1666121724484,"type":"PROFILE_LOADED","message_id":"1-634efffc-e532825f9425164998e14dc4"},"emitted_at":1667947496108} -{"stream":"message_history","data":{"event_id":"ABTQN5NW3F4VA3H5EYTXHES0QXD8","notification_id":"ABTQN5NW3F4VA3H5EYTXHES0QXD8","ts":1666121724555,"type":"MAPPED","message_id":"1-634efffc-e532825f9425164998e14dc4"},"emitted_at":1667947496109} -{"stream":"message_history","data":{"channel":{"id":"99cd000d-1492-45ce-aa58-1a735478ec65","label":""},"integration":{"id":"bae9e7c4-2ac9-44d5-80b4-66458d00ae4e","provider":"gmail"},"output":{"text":"/messages/1-634efffc-e532825f9425164998e14dc4/output/b869a222-8301-45e2-a21b-e34db3952918/text","subject":"/messages/1-634efffc-e532825f9425164998e14dc4/output/b869a222-8301-45e2-a21b-e34db3952918/subject","html":"/messages/1-634efffc-e532825f9425164998e14dc4/output/b869a222-8301-45e2-a21b-e34db3952918/html"},"ts":1666121725015,"type":"RENDERED","message_id":"1-634efffc-e532825f9425164998e14dc4"},"emitted_at":1667947496110} -{"stream":"message_history","data":{"channel":{"id":"99cd000d-1492-45ce-aa58-1a735478ec65","label":""},"integration":{"id":"bae9e7c4-2ac9-44d5-80b4-66458d00ae4e","provider":"gmail"},"ts":1666121725788,"type":"SENT","message_id":"1-634efffc-e532825f9425164998e14dc4"},"emitted_at":1667947496110} -{"stream":"message_history","data":{"ts":1666121690745,"type":"ENQUEUED","message_id":"1-634effda-77b4ad7bf93c58f0a9700e0c"},"emitted_at":1667947496380} -{"stream":"message_history","data":{"merged_profile":{"email":"marcos@airbyte.io"},"received_profile":{"email":"marcos@airbyte.io"},"ts":1666121690962,"type":"PROFILE_LOADED","message_id":"1-634effda-77b4ad7bf93c58f0a9700e0c"},"emitted_at":1667947496381} -{"stream":"message_history","data":{"event_id":"ABTQN5NW3F4VA3H5EYTXHES0QXD8","notification_id":"ABTQN5NW3F4VA3H5EYTXHES0QXD8","ts":1666121691007,"type":"MAPPED","message_id":"1-634effda-77b4ad7bf93c58f0a9700e0c"},"emitted_at":1667947496382} -{"stream":"message_history","data":{"channel":{"id":"99cd000d-1492-45ce-aa58-1a735478ec65","label":""},"integration":{"id":"bae9e7c4-2ac9-44d5-80b4-66458d00ae4e","provider":"gmail"},"output":{"text":"/messages/1-634effda-77b4ad7bf93c58f0a9700e0c/output/111c6056-156a-4cdc-bd66-372e9e521750/text","subject":"/messages/1-634effda-77b4ad7bf93c58f0a9700e0c/output/111c6056-156a-4cdc-bd66-372e9e521750/subject","html":"/messages/1-634effda-77b4ad7bf93c58f0a9700e0c/output/111c6056-156a-4cdc-bd66-372e9e521750/html"},"ts":1666121691654,"type":"RENDERED","message_id":"1-634effda-77b4ad7bf93c58f0a9700e0c"},"emitted_at":1667947496384} -{"stream":"message_history","data":{"channel":{"id":"99cd000d-1492-45ce-aa58-1a735478ec65","label":""},"integration":{"id":"bae9e7c4-2ac9-44d5-80b4-66458d00ae4e","provider":"gmail"},"ts":1666121692149,"type":"SENT","message_id":"1-634effda-77b4ad7bf93c58f0a9700e0c"},"emitted_at":1667947496385} -{"stream":"message_history","data":{"ts":1666121685259,"type":"ENQUEUED","message_id":"1-634effd5-cce28d39137f340e58ac1aff"},"emitted_at":1667947496687} -{"stream":"message_history","data":{"merged_profile":{"email":"marcos@airbyte.io"},"received_profile":{"email":"marcos@airbyte.io"},"ts":1666121685491,"type":"PROFILE_LOADED","message_id":"1-634effd5-cce28d39137f340e58ac1aff"},"emitted_at":1667947496687} -{"stream":"message_history","data":{"event_id":"ABTQN5NW3F4VA3H5EYTXHES0QXD8","notification_id":"ABTQN5NW3F4VA3H5EYTXHES0QXD8","ts":1666121685537,"type":"MAPPED","message_id":"1-634effd5-cce28d39137f340e58ac1aff"},"emitted_at":1667947496688} -{"stream":"message_history","data":{"channel":{"id":"99cd000d-1492-45ce-aa58-1a735478ec65","label":""},"integration":{"id":"bae9e7c4-2ac9-44d5-80b4-66458d00ae4e","provider":"gmail"},"output":{"text":"/messages/1-634effd5-cce28d39137f340e58ac1aff/output/3fa6953a-5ef2-4b45-bd10-09c685768a27/text","subject":"/messages/1-634effd5-cce28d39137f340e58ac1aff/output/3fa6953a-5ef2-4b45-bd10-09c685768a27/subject","html":"/messages/1-634effd5-cce28d39137f340e58ac1aff/output/3fa6953a-5ef2-4b45-bd10-09c685768a27/html"},"ts":1666121686024,"type":"RENDERED","message_id":"1-634effd5-cce28d39137f340e58ac1aff"},"emitted_at":1667947496688} -{"stream":"message_history","data":{"channel":{"id":"99cd000d-1492-45ce-aa58-1a735478ec65","label":""},"integration":{"id":"bae9e7c4-2ac9-44d5-80b4-66458d00ae4e","provider":"gmail"},"ts":1666121688519,"type":"SENT","message_id":"1-634effd5-cce28d39137f340e58ac1aff"},"emitted_at":1667947496689} -{"stream":"message_history","data":{"ts":1666121683855,"type":"ENQUEUED","message_id":"1-634effd4-1172e40fd51beec78ee3173d"},"emitted_at":1667947496937} -{"stream":"message_history","data":{"merged_profile":{"email":"marcos@airbyte.io"},"received_profile":{"email":"marcos@airbyte.io"},"ts":1666121684334,"type":"PROFILE_LOADED","message_id":"1-634effd4-1172e40fd51beec78ee3173d"},"emitted_at":1667947496940} -{"stream":"message_history","data":{"event_id":"ABTQN5NW3F4VA3H5EYTXHES0QXD8","notification_id":"ABTQN5NW3F4VA3H5EYTXHES0QXD8","ts":1666121684385,"type":"MAPPED","message_id":"1-634effd4-1172e40fd51beec78ee3173d"},"emitted_at":1667947496941} -{"stream":"message_history","data":{"channel":{"id":"99cd000d-1492-45ce-aa58-1a735478ec65","label":""},"integration":{"id":"bae9e7c4-2ac9-44d5-80b4-66458d00ae4e","provider":"gmail"},"output":{"text":"/messages/1-634effd4-1172e40fd51beec78ee3173d/output/5251854d-361d-44aa-944a-bd9898edb7bb/text","subject":"/messages/1-634effd4-1172e40fd51beec78ee3173d/output/5251854d-361d-44aa-944a-bd9898edb7bb/subject","html":"/messages/1-634effd4-1172e40fd51beec78ee3173d/output/5251854d-361d-44aa-944a-bd9898edb7bb/html"},"ts":1666121685065,"type":"RENDERED","message_id":"1-634effd4-1172e40fd51beec78ee3173d"},"emitted_at":1667947496941} -{"stream":"message_history","data":{"channel":{"id":"99cd000d-1492-45ce-aa58-1a735478ec65","label":""},"integration":{"id":"bae9e7c4-2ac9-44d5-80b4-66458d00ae4e","provider":"gmail"},"ts":1666121685697,"type":"SENT","message_id":"1-634effd4-1172e40fd51beec78ee3173d"},"emitted_at":1667947496943} -{"stream":"message_history","data":{"ts":1666121682514,"type":"ENQUEUED","message_id":"1-634effd2-75f7518b8633e5dd765b7484"},"emitted_at":1667947497173} -{"stream":"message_history","data":{"merged_profile":{"email":"marcos@airbyte.io"},"received_profile":{"email":"marcos@airbyte.io"},"ts":1666121682751,"type":"PROFILE_LOADED","message_id":"1-634effd2-75f7518b8633e5dd765b7484"},"emitted_at":1667947497175} -{"stream":"message_history","data":{"event_id":"ABTQN5NW3F4VA3H5EYTXHES0QXD8","notification_id":"ABTQN5NW3F4VA3H5EYTXHES0QXD8","ts":1666121682875,"type":"MAPPED","message_id":"1-634effd2-75f7518b8633e5dd765b7484"},"emitted_at":1667947497176} -{"stream":"message_history","data":{"channel":{"id":"99cd000d-1492-45ce-aa58-1a735478ec65","label":""},"integration":{"id":"bae9e7c4-2ac9-44d5-80b4-66458d00ae4e","provider":"gmail"},"output":{"text":"/messages/1-634effd2-75f7518b8633e5dd765b7484/output/12b2f13c-6280-4fa7-9557-f6b838d93757/text","subject":"/messages/1-634effd2-75f7518b8633e5dd765b7484/output/12b2f13c-6280-4fa7-9557-f6b838d93757/subject","html":"/messages/1-634effd2-75f7518b8633e5dd765b7484/output/12b2f13c-6280-4fa7-9557-f6b838d93757/html"},"ts":1666121683336,"type":"RENDERED","message_id":"1-634effd2-75f7518b8633e5dd765b7484"},"emitted_at":1667947497178} -{"stream":"message_history","data":{"channel":{"id":"99cd000d-1492-45ce-aa58-1a735478ec65","label":""},"integration":{"id":"bae9e7c4-2ac9-44d5-80b4-66458d00ae4e","provider":"gmail"},"ts":1666121683805,"type":"SENT","message_id":"1-634effd2-75f7518b8633e5dd765b7484"},"emitted_at":1667947497179} -{"stream":"message_history","data":{"ts":1666121680380,"type":"ENQUEUED","message_id":"1-634effd0-5765da642b7eadb4f534953b"},"emitted_at":1667947497397} -{"stream":"message_history","data":{"merged_profile":{"email":"marcos@airbyte.io"},"received_profile":{"email":"marcos@airbyte.io"},"ts":1666121680814,"type":"PROFILE_LOADED","message_id":"1-634effd0-5765da642b7eadb4f534953b"},"emitted_at":1667947497399} -{"stream":"message_history","data":{"event_id":"ABTQN5NW3F4VA3H5EYTXHES0QXD8","notification_id":"ABTQN5NW3F4VA3H5EYTXHES0QXD8","ts":1666121680916,"type":"MAPPED","message_id":"1-634effd0-5765da642b7eadb4f534953b"},"emitted_at":1667947497401} -{"stream":"message_history","data":{"channel":{"id":"99cd000d-1492-45ce-aa58-1a735478ec65","label":""},"integration":{"id":"bae9e7c4-2ac9-44d5-80b4-66458d00ae4e","provider":"gmail"},"output":{"text":"/messages/1-634effd0-5765da642b7eadb4f534953b/output/f315b4ad-e291-4958-beed-3a671cf6f247/text","subject":"/messages/1-634effd0-5765da642b7eadb4f534953b/output/f315b4ad-e291-4958-beed-3a671cf6f247/subject","html":"/messages/1-634effd0-5765da642b7eadb4f534953b/output/f315b4ad-e291-4958-beed-3a671cf6f247/html"},"ts":1666121681280,"type":"RENDERED","message_id":"1-634effd0-5765da642b7eadb4f534953b"},"emitted_at":1667947497402} -{"stream":"message_history","data":{"channel":{"id":"99cd000d-1492-45ce-aa58-1a735478ec65","label":""},"integration":{"id":"bae9e7c4-2ac9-44d5-80b4-66458d00ae4e","provider":"gmail"},"ts":1666121682269,"type":"SENT","message_id":"1-634effd0-5765da642b7eadb4f534953b"},"emitted_at":1667947497403} -{"stream":"message_history","data":{"ts":1666118182542,"type":"ENQUEUED","message_id":"1-634ef226-a611b43243f7cdbe2a5115b3"},"emitted_at":1667947498020} -{"stream":"message_history","data":{"merged_profile":{"email":"marcosmarxm@gmail.com"},"received_profile":{"email":"marcosmarxm@gmail.com"},"ts":1666118182749,"type":"PROFILE_LOADED","message_id":"1-634ef226-a611b43243f7cdbe2a5115b3"},"emitted_at":1667947498022} -{"stream":"message_history","data":{"event_id":"ABTQN5NW3F4VA3H5EYTXHES0QXD8","notification_id":"ABTQN5NW3F4VA3H5EYTXHES0QXD8","ts":1666118182789,"type":"MAPPED","message_id":"1-634ef226-a611b43243f7cdbe2a5115b3"},"emitted_at":1667947498023} -{"stream":"message_history","data":{"channel":{"id":"99cd000d-1492-45ce-aa58-1a735478ec65","label":""},"integration":{"id":"bae9e7c4-2ac9-44d5-80b4-66458d00ae4e","provider":"gmail"},"output":{"text":"/messages/1-634ef226-a611b43243f7cdbe2a5115b3/output/a925e133-dd34-4f77-96f3-789098a76b9a/text","subject":"/messages/1-634ef226-a611b43243f7cdbe2a5115b3/output/a925e133-dd34-4f77-96f3-789098a76b9a/subject","html":"/messages/1-634ef226-a611b43243f7cdbe2a5115b3/output/a925e133-dd34-4f77-96f3-789098a76b9a/html"},"ts":1666118183370,"type":"RENDERED","message_id":"1-634ef226-a611b43243f7cdbe2a5115b3"},"emitted_at":1667947498024} -{"stream":"message_history","data":{"channel":{"id":"99cd000d-1492-45ce-aa58-1a735478ec65","label":""},"integration":{"id":"bae9e7c4-2ac9-44d5-80b4-66458d00ae4e","provider":"gmail"},"ts":1666118184073,"type":"SENT","message_id":"1-634ef226-a611b43243f7cdbe2a5115b3"},"emitted_at":1667947498026} -{"stream":"message_history","data":{"channel":{"id":"99cd000d-1492-45ce-aa58-1a735478ec65"},"integration":{"id":"bae9e7c4-2ac9-44d5-80b4-66458d00ae4e","provider":"gmail"},"ts":1666119196057,"type":"OPENED","message_id":"1-634ef226-a611b43243f7cdbe2a5115b3"},"emitted_at":1667947498027} -{"stream":"message_history","data":{"ts":1666118180798,"type":"ENQUEUED","message_id":"1-634ef225-8b222f3539887a52ac69cc26"},"emitted_at":1667947498637} -{"stream":"message_history","data":{"merged_profile":{"email":"marcosmarxm@gmail.com"},"received_profile":{"email":"marcosmarxm@gmail.com"},"ts":1666118181051,"type":"PROFILE_LOADED","message_id":"1-634ef225-8b222f3539887a52ac69cc26"},"emitted_at":1667947498638} -{"stream":"message_history","data":{"event_id":"ABTQN5NW3F4VA3H5EYTXHES0QXD8","notification_id":"ABTQN5NW3F4VA3H5EYTXHES0QXD8","ts":1666118181204,"type":"MAPPED","message_id":"1-634ef225-8b222f3539887a52ac69cc26"},"emitted_at":1667947498638} -{"stream":"message_history","data":{"channel":{"id":"99cd000d-1492-45ce-aa58-1a735478ec65","label":""},"integration":{"id":"bae9e7c4-2ac9-44d5-80b4-66458d00ae4e","provider":"gmail"},"output":{"text":"/messages/1-634ef225-8b222f3539887a52ac69cc26/output/f0593540-235f-447b-8526-b1ec321fb0a1/text","subject":"/messages/1-634ef225-8b222f3539887a52ac69cc26/output/f0593540-235f-447b-8526-b1ec321fb0a1/subject","html":"/messages/1-634ef225-8b222f3539887a52ac69cc26/output/f0593540-235f-447b-8526-b1ec321fb0a1/html"},"ts":1666118181718,"type":"RENDERED","message_id":"1-634ef225-8b222f3539887a52ac69cc26"},"emitted_at":1667947498639} -{"stream":"message_history","data":{"channel":{"id":"99cd000d-1492-45ce-aa58-1a735478ec65","label":""},"integration":{"id":"bae9e7c4-2ac9-44d5-80b4-66458d00ae4e","provider":"gmail"},"ts":1666118182302,"type":"SENT","message_id":"1-634ef225-8b222f3539887a52ac69cc26"},"emitted_at":1667947498639} -{"stream":"message_history","data":{"channel":{"id":"99cd000d-1492-45ce-aa58-1a735478ec65"},"integration":{"id":"bae9e7c4-2ac9-44d5-80b4-66458d00ae4e","provider":"gmail"},"ts":1666119193320,"type":"OPENED","message_id":"1-634ef225-8b222f3539887a52ac69cc26"},"emitted_at":1667947498640} -{"stream":"message_history","data":{"ts":1666118150579,"type":"ENQUEUED","message_id":"1-634ef206-8620ab19a54eeab221126dad"},"emitted_at":1667947500596} -{"stream":"message_history","data":{"merged_profile":{"email":"marcosmarxm@gmail.com"},"received_profile":{"email":"marcosmarxm@gmail.com"},"ts":1666118150805,"type":"PROFILE_LOADED","message_id":"1-634ef206-8620ab19a54eeab221126dad"},"emitted_at":1667947500597} -{"stream":"message_history","data":{"event_id":"ABTQN5NW3F4VA3H5EYTXHES0QXD8","notification_id":"ABTQN5NW3F4VA3H5EYTXHES0QXD8","ts":1666118150939,"type":"MAPPED","message_id":"1-634ef206-8620ab19a54eeab221126dad"},"emitted_at":1667947500597} -{"stream":"message_history","data":{"channel":{"id":"99cd000d-1492-45ce-aa58-1a735478ec65","label":""},"integration":{"id":"bae9e7c4-2ac9-44d5-80b4-66458d00ae4e","provider":"gmail"},"output":{"text":"/messages/1-634ef206-8620ab19a54eeab221126dad/output/6cd79f99-a0d4-429e-b66a-6c99047e0777/text","subject":"/messages/1-634ef206-8620ab19a54eeab221126dad/output/6cd79f99-a0d4-429e-b66a-6c99047e0777/subject","html":"/messages/1-634ef206-8620ab19a54eeab221126dad/output/6cd79f99-a0d4-429e-b66a-6c99047e0777/html"},"ts":1666118151367,"type":"RENDERED","message_id":"1-634ef206-8620ab19a54eeab221126dad"},"emitted_at":1667947500598} -{"stream":"message_history","data":{"channel":{"id":"99cd000d-1492-45ce-aa58-1a735478ec65","label":""},"integration":{"id":"bae9e7c4-2ac9-44d5-80b4-66458d00ae4e","provider":"gmail"},"ts":1666118151976,"type":"SENT","message_id":"1-634ef206-8620ab19a54eeab221126dad"},"emitted_at":1667947500598} -{"stream":"message_history","data":{"ts":1666117975299,"type":"ENQUEUED","message_id":"1-634ef157-ebfbd09d59d116c4ce45c89e"},"emitted_at":1667947502116} -{"stream":"message_history","data":{"merged_profile":{"email":"integration-test@airbyte.io"},"received_profile":{"email":"integration-test@airbyte.io"},"ts":1666117975590,"type":"PROFILE_LOADED","message_id":"1-634ef157-ebfbd09d59d116c4ce45c89e"},"emitted_at":1667947502117} -{"stream":"message_history","data":{"event_id":"ABTQN5NW3F4VA3H5EYTXHES0QXD8","notification_id":"ABTQN5NW3F4VA3H5EYTXHES0QXD8","ts":1666117975701,"type":"MAPPED","message_id":"1-634ef157-ebfbd09d59d116c4ce45c89e"},"emitted_at":1667947502118} -{"stream":"message_history","data":{"channel":{"id":"99cd000d-1492-45ce-aa58-1a735478ec65","label":""},"integration":{"id":"bae9e7c4-2ac9-44d5-80b4-66458d00ae4e","provider":"gmail"},"output":{"text":"/messages/1-634ef157-ebfbd09d59d116c4ce45c89e/output/bcbffbd9-7daa-473c-ac11-7b49be7035e3/text","subject":"/messages/1-634ef157-ebfbd09d59d116c4ce45c89e/output/bcbffbd9-7daa-473c-ac11-7b49be7035e3/subject","html":"/messages/1-634ef157-ebfbd09d59d116c4ce45c89e/output/bcbffbd9-7daa-473c-ac11-7b49be7035e3/html"},"ts":1666117976071,"type":"RENDERED","message_id":"1-634ef157-ebfbd09d59d116c4ce45c89e"},"emitted_at":1667947502119} -{"stream":"message_history","data":{"channel":{"id":"99cd000d-1492-45ce-aa58-1a735478ec65","label":""},"integration":{"id":"bae9e7c4-2ac9-44d5-80b4-66458d00ae4e","provider":"gmail"},"ts":1666117976893,"type":"SENT","message_id":"1-634ef157-ebfbd09d59d116c4ce45c89e"},"emitted_at":1667947502119} -{"stream":"message_history","data":{"channel":{"id":"99cd000d-1492-45ce-aa58-1a735478ec65"},"integration":{"id":"bae9e7c4-2ac9-44d5-80b4-66458d00ae4e","provider":"gmail"},"ts":1666117986671,"type":"OPENED","message_id":"1-634ef157-ebfbd09d59d116c4ce45c89e"},"emitted_at":1667947502120} -{"stream":"message_history","data":{"channel":{"id":"99cd000d-1492-45ce-aa58-1a735478ec65"},"integration":{"id":"bae9e7c4-2ac9-44d5-80b4-66458d00ae4e","provider":"gmail"},"ts":1666117988580,"type":"OPENED","message_id":"1-634ef157-ebfbd09d59d116c4ce45c89e"},"emitted_at":1667947502121} -{"stream":"message_history","data":{"channel":{"id":"99cd000d-1492-45ce-aa58-1a735478ec65"},"integration":{"id":"bae9e7c4-2ac9-44d5-80b4-66458d00ae4e","provider":"gmail"},"ts":1666118081296,"type":"OPENED","message_id":"1-634ef157-ebfbd09d59d116c4ce45c89e"},"emitted_at":1667947502122} -{"stream":"message_history","data":{"channel":{"id":"99cd000d-1492-45ce-aa58-1a735478ec65"},"integration":{"id":"bae9e7c4-2ac9-44d5-80b4-66458d00ae4e","provider":"gmail"},"ts":1666118563186,"type":"OPENED","message_id":"1-634ef157-ebfbd09d59d116c4ce45c89e"},"emitted_at":1667947502122} -{"stream":"message_history","data":{"channel":{"id":"99cd000d-1492-45ce-aa58-1a735478ec65"},"integration":{"id":"bae9e7c4-2ac9-44d5-80b4-66458d00ae4e","provider":"gmail"},"ts":1666118758257,"type":"OPENED","message_id":"1-634ef157-ebfbd09d59d116c4ce45c89e"},"emitted_at":1667947502123} -{"stream":"message_history","data":{"channel":{"id":"99cd000d-1492-45ce-aa58-1a735478ec65"},"integration":{"id":"bae9e7c4-2ac9-44d5-80b4-66458d00ae4e","provider":"gmail"},"ts":1666118935356,"type":"OPENED","message_id":"1-634ef157-ebfbd09d59d116c4ce45c89e"},"emitted_at":1667947502124} -{"stream":"message_history","data":{"channel":{"id":"99cd000d-1492-45ce-aa58-1a735478ec65"},"integration":{"id":"bae9e7c4-2ac9-44d5-80b4-66458d00ae4e","provider":"gmail"},"ts":1666119128407,"type":"OPENED","message_id":"1-634ef157-ebfbd09d59d116c4ce45c89e"},"emitted_at":1667947502124} -{"stream":"message_history","data":{"channel":{"id":"99cd000d-1492-45ce-aa58-1a735478ec65"},"integration":{"id":"bae9e7c4-2ac9-44d5-80b4-66458d00ae4e","provider":"gmail"},"ts":1666119448182,"type":"OPENED","message_id":"1-634ef157-ebfbd09d59d116c4ce45c89e"},"emitted_at":1667947502125} -{"stream":"message_history","data":{"channel":{"id":"99cd000d-1492-45ce-aa58-1a735478ec65"},"integration":{"id":"bae9e7c4-2ac9-44d5-80b4-66458d00ae4e","provider":"gmail"},"ts":1666119725709,"type":"OPENED","message_id":"1-634ef157-ebfbd09d59d116c4ce45c89e"},"emitted_at":1667947502125} -{"stream":"message_history","data":{"channel":{"id":"99cd000d-1492-45ce-aa58-1a735478ec65"},"integration":{"id":"bae9e7c4-2ac9-44d5-80b4-66458d00ae4e","provider":"gmail"},"ts":1666121775742,"type":"OPENED","message_id":"1-634ef157-ebfbd09d59d116c4ce45c89e"},"emitted_at":1667947502126} -{"stream":"message_history","data":{"channel":{"id":"99cd000d-1492-45ce-aa58-1a735478ec65"},"integration":{"id":"bae9e7c4-2ac9-44d5-80b4-66458d00ae4e","provider":"gmail"},"ts":1666128609009,"type":"OPENED","message_id":"1-634ef157-ebfbd09d59d116c4ce45c89e"},"emitted_at":1667947502127} -{"stream":"message_history","data":{"channel":{"id":"99cd000d-1492-45ce-aa58-1a735478ec65"},"integration":{"id":"bae9e7c4-2ac9-44d5-80b4-66458d00ae4e","provider":"gmail"},"ts":1666128611530,"type":"OPENED","message_id":"1-634ef157-ebfbd09d59d116c4ce45c89e"},"emitted_at":1667947502127} -{"stream":"message_history","data":{"channel":{"id":"99cd000d-1492-45ce-aa58-1a735478ec65"},"integration":{"id":"bae9e7c4-2ac9-44d5-80b4-66458d00ae4e","provider":"gmail"},"ts":1666128649996,"type":"OPENED","message_id":"1-634ef157-ebfbd09d59d116c4ce45c89e"},"emitted_at":1667947502128} -{"stream":"message_history","data":{"channel":{"id":"99cd000d-1492-45ce-aa58-1a735478ec65"},"integration":{"id":"bae9e7c4-2ac9-44d5-80b4-66458d00ae4e","provider":"gmail"},"ts":1666134970418,"type":"OPENED","message_id":"1-634ef157-ebfbd09d59d116c4ce45c89e"},"emitted_at":1667947502128} -{"stream":"message_history","data":{"channel":{"id":"99cd000d-1492-45ce-aa58-1a735478ec65"},"integration":{"id":"bae9e7c4-2ac9-44d5-80b4-66458d00ae4e","provider":"gmail"},"ts":1666134974510,"type":"OPENED","message_id":"1-634ef157-ebfbd09d59d116c4ce45c89e"},"emitted_at":1667947502129} -{"stream":"message_history","data":{"channel":{"id":"99cd000d-1492-45ce-aa58-1a735478ec65"},"integration":{"id":"bae9e7c4-2ac9-44d5-80b4-66458d00ae4e","provider":"gmail"},"ts":1666148305097,"type":"OPENED","message_id":"1-634ef157-ebfbd09d59d116c4ce45c89e"},"emitted_at":1667947502130} -{"stream":"message_history","data":{"channel":{"id":"99cd000d-1492-45ce-aa58-1a735478ec65"},"integration":{"id":"bae9e7c4-2ac9-44d5-80b4-66458d00ae4e","provider":"gmail"},"ts":1666160528537,"type":"OPENED","message_id":"1-634ef157-ebfbd09d59d116c4ce45c89e"},"emitted_at":1667947502130} -{"stream":"message_history","data":{"channel":{"id":"99cd000d-1492-45ce-aa58-1a735478ec65"},"integration":{"id":"bae9e7c4-2ac9-44d5-80b4-66458d00ae4e","provider":"gmail"},"ts":1666161400073,"type":"OPENED","message_id":"1-634ef157-ebfbd09d59d116c4ce45c89e"},"emitted_at":1667947502131} -{"stream":"message_history","data":{"channel":{"id":"99cd000d-1492-45ce-aa58-1a735478ec65"},"integration":{"id":"bae9e7c4-2ac9-44d5-80b4-66458d00ae4e","provider":"gmail"},"ts":1666163704656,"type":"OPENED","message_id":"1-634ef157-ebfbd09d59d116c4ce45c89e"},"emitted_at":1667947502131} -{"stream":"message_history","data":{"channel":{"id":"99cd000d-1492-45ce-aa58-1a735478ec65"},"integration":{"id":"bae9e7c4-2ac9-44d5-80b4-66458d00ae4e","provider":"gmail"},"ts":1666163788243,"type":"OPENED","message_id":"1-634ef157-ebfbd09d59d116c4ce45c89e"},"emitted_at":1667947502132} -{"stream":"message_history","data":{"channel":{"id":"99cd000d-1492-45ce-aa58-1a735478ec65"},"integration":{"id":"bae9e7c4-2ac9-44d5-80b4-66458d00ae4e","provider":"gmail"},"ts":1666163931562,"type":"OPENED","message_id":"1-634ef157-ebfbd09d59d116c4ce45c89e"},"emitted_at":1667947502133} -{"stream":"message_history","data":{"channel":{"id":"99cd000d-1492-45ce-aa58-1a735478ec65"},"integration":{"id":"bae9e7c4-2ac9-44d5-80b4-66458d00ae4e","provider":"gmail"},"ts":1666171803626,"type":"OPENED","message_id":"1-634ef157-ebfbd09d59d116c4ce45c89e"},"emitted_at":1667947502133} -{"stream":"message_history","data":{"channel":{"id":"99cd000d-1492-45ce-aa58-1a735478ec65"},"integration":{"id":"bae9e7c4-2ac9-44d5-80b4-66458d00ae4e","provider":"gmail"},"ts":1666174474590,"type":"OPENED","message_id":"1-634ef157-ebfbd09d59d116c4ce45c89e"},"emitted_at":1667947502134} -{"stream":"message_history","data":{"channel":{"id":"99cd000d-1492-45ce-aa58-1a735478ec65"},"integration":{"id":"bae9e7c4-2ac9-44d5-80b4-66458d00ae4e","provider":"gmail"},"ts":1666249893593,"type":"OPENED","message_id":"1-634ef157-ebfbd09d59d116c4ce45c89e"},"emitted_at":1667947502134} -{"stream":"message_history","data":{"channel":{"id":"99cd000d-1492-45ce-aa58-1a735478ec65"},"integration":{"id":"bae9e7c4-2ac9-44d5-80b4-66458d00ae4e","provider":"gmail"},"ts":1666355531786,"type":"OPENED","message_id":"1-634ef157-ebfbd09d59d116c4ce45c89e"},"emitted_at":1667947502135} -{"stream":"message_history","data":{"channel":{"id":"99cd000d-1492-45ce-aa58-1a735478ec65"},"integration":{"id":"bae9e7c4-2ac9-44d5-80b4-66458d00ae4e","provider":"gmail"},"ts":1667501919456,"type":"OPENED","message_id":"1-634ef157-ebfbd09d59d116c4ce45c89e"},"emitted_at":1667947502136} -{"stream":"message_history","data":{"ts":1666117146246,"type":"ENQUEUED","message_id":"1-634eee1a-9130394a2b4ada99aa21567b"},"emitted_at":1667947503332} -{"stream":"message_history","data":{"merged_profile":{"email":"integration-test@airbyte.io"},"received_profile":{"email":"integration-test@airbyte.io"},"ts":1666117146354,"type":"PROFILE_LOADED","message_id":"1-634eee1a-9130394a2b4ada99aa21567b"},"emitted_at":1667947503333} -{"stream":"message_history","data":{"channel":{},"integration":{"id":"bae9e7c4-2ac9-44d5-80b4-66458d00ae4e","provider":"gmail"},"output":{"text":"/messages/1-634eee1a-9130394a2b4ada99aa21567b/output/d8895731-341c-4c0f-ab5b-8959170de7d3/text","subject":"/messages/1-634eee1a-9130394a2b4ada99aa21567b/output/d8895731-341c-4c0f-ab5b-8959170de7d3/subject","html":"/messages/1-634eee1a-9130394a2b4ada99aa21567b/output/d8895731-341c-4c0f-ab5b-8959170de7d3/html"},"ts":1666117146764,"type":"RENDERED","message_id":"1-634eee1a-9130394a2b4ada99aa21567b"},"emitted_at":1667947503333} -{"stream":"message_history","data":{"channel":{"id":"","label":""},"integration":{"id":"bae9e7c4-2ac9-44d5-80b4-66458d00ae4e","provider":"gmail"},"ts":1666117147345,"type":"SENT","message_id":"1-634eee1a-9130394a2b4ada99aa21567b"},"emitted_at":1667947503334} -{"stream":"message_history","data":{"channel":{},"integration":{"id":"bae9e7c4-2ac9-44d5-80b4-66458d00ae4e","provider":"gmail"},"ts":1666117158183,"type":"OPENED","message_id":"1-634eee1a-9130394a2b4ada99aa21567b"},"emitted_at":1667947503335} -{"stream":"message_history","data":{"channel":{},"integration":{"id":"bae9e7c4-2ac9-44d5-80b4-66458d00ae4e","provider":"gmail"},"ts":1666117159506,"type":"OPENED","message_id":"1-634eee1a-9130394a2b4ada99aa21567b"},"emitted_at":1667947503335} -{"stream":"message_history","data":{"channel":{},"integration":{"id":"bae9e7c4-2ac9-44d5-80b4-66458d00ae4e","provider":"gmail"},"ts":1666117161996,"type":"OPENED","message_id":"1-634eee1a-9130394a2b4ada99aa21567b"},"emitted_at":1667947503336} -{"stream":"message_history","data":{"channel":{},"integration":{"id":"bae9e7c4-2ac9-44d5-80b4-66458d00ae4e","provider":"gmail"},"ts":1666117180676,"type":"OPENED","message_id":"1-634eee1a-9130394a2b4ada99aa21567b"},"emitted_at":1667947503336} -{"stream":"message_history","data":{"channel":{},"integration":{"id":"bae9e7c4-2ac9-44d5-80b4-66458d00ae4e","provider":"gmail"},"ts":1666117181352,"type":"OPENED","message_id":"1-634eee1a-9130394a2b4ada99aa21567b"},"emitted_at":1667947503338} -{"stream":"message_history","data":{"channel":{},"integration":{"id":"bae9e7c4-2ac9-44d5-80b4-66458d00ae4e","provider":"gmail"},"ts":1666117185871,"type":"OPENED","message_id":"1-634eee1a-9130394a2b4ada99aa21567b"},"emitted_at":1667947503338} -{"stream":"message_history","data":{"channel":{},"integration":{"id":"bae9e7c4-2ac9-44d5-80b4-66458d00ae4e","provider":"gmail"},"ts":1666117440248,"type":"OPENED","message_id":"1-634eee1a-9130394a2b4ada99aa21567b"},"emitted_at":1667947503339} -{"stream":"message_history","data":{"channel":{},"integration":{"id":"bae9e7c4-2ac9-44d5-80b4-66458d00ae4e","provider":"gmail"},"ts":1666118058308,"type":"OPENED","message_id":"1-634eee1a-9130394a2b4ada99aa21567b"},"emitted_at":1667947503340} -{"stream":"message_history","data":{"channel":{},"integration":{"id":"bae9e7c4-2ac9-44d5-80b4-66458d00ae4e","provider":"gmail"},"ts":1666118098458,"type":"OPENED","message_id":"1-634eee1a-9130394a2b4ada99aa21567b"},"emitted_at":1667947503340} -{"stream":"message_history","data":{"channel":{},"integration":{"id":"bae9e7c4-2ac9-44d5-80b4-66458d00ae4e","provider":"gmail"},"ts":1666118565173,"type":"OPENED","message_id":"1-634eee1a-9130394a2b4ada99aa21567b"},"emitted_at":1667947503341} -{"stream":"message_history","data":{"channel":{},"integration":{"id":"bae9e7c4-2ac9-44d5-80b4-66458d00ae4e","provider":"gmail"},"ts":1666118934619,"type":"OPENED","message_id":"1-634eee1a-9130394a2b4ada99aa21567b"},"emitted_at":1667947503341} -{"stream":"message_history","data":{"channel":{},"integration":{"id":"bae9e7c4-2ac9-44d5-80b4-66458d00ae4e","provider":"gmail"},"ts":1666119130253,"type":"OPENED","message_id":"1-634eee1a-9130394a2b4ada99aa21567b"},"emitted_at":1667947503342} -{"stream":"message_history","data":{"channel":{},"integration":{"id":"bae9e7c4-2ac9-44d5-80b4-66458d00ae4e","provider":"gmail"},"ts":1666119252052,"type":"OPENED","message_id":"1-634eee1a-9130394a2b4ada99aa21567b"},"emitted_at":1667947503342} -{"stream":"message_history","data":{"channel":{},"integration":{"id":"bae9e7c4-2ac9-44d5-80b4-66458d00ae4e","provider":"gmail"},"ts":1666119725701,"type":"OPENED","message_id":"1-634eee1a-9130394a2b4ada99aa21567b"},"emitted_at":1667947503343} -{"stream":"message_history","data":{"channel":{},"integration":{"id":"bae9e7c4-2ac9-44d5-80b4-66458d00ae4e","provider":"gmail"},"ts":1666128615007,"type":"OPENED","message_id":"1-634eee1a-9130394a2b4ada99aa21567b"},"emitted_at":1667947503343} -{"stream":"message_history","data":{"channel":{},"integration":{"id":"bae9e7c4-2ac9-44d5-80b4-66458d00ae4e","provider":"gmail"},"ts":1666128650024,"type":"OPENED","message_id":"1-634eee1a-9130394a2b4ada99aa21567b"},"emitted_at":1667947503344} -{"stream":"message_history","data":{"channel":{},"integration":{"id":"bae9e7c4-2ac9-44d5-80b4-66458d00ae4e","provider":"gmail"},"ts":1666134970833,"type":"OPENED","message_id":"1-634eee1a-9130394a2b4ada99aa21567b"},"emitted_at":1667947503344} -{"stream":"message_history","data":{"channel":{},"integration":{"id":"bae9e7c4-2ac9-44d5-80b4-66458d00ae4e","provider":"gmail"},"ts":1666134972436,"type":"OPENED","message_id":"1-634eee1a-9130394a2b4ada99aa21567b"},"emitted_at":1667947503345} -{"stream":"message_history","data":{"channel":{},"integration":{"id":"bae9e7c4-2ac9-44d5-80b4-66458d00ae4e","provider":"gmail"},"ts":1666148314052,"type":"OPENED","message_id":"1-634eee1a-9130394a2b4ada99aa21567b"},"emitted_at":1667947503345} -{"stream":"message_history","data":{"channel":{},"integration":{"id":"bae9e7c4-2ac9-44d5-80b4-66458d00ae4e","provider":"gmail"},"ts":1666160528529,"type":"OPENED","message_id":"1-634eee1a-9130394a2b4ada99aa21567b"},"emitted_at":1667947503345} -{"stream":"message_history","data":{"channel":{},"integration":{"id":"bae9e7c4-2ac9-44d5-80b4-66458d00ae4e","provider":"gmail"},"ts":1666161412122,"type":"OPENED","message_id":"1-634eee1a-9130394a2b4ada99aa21567b"},"emitted_at":1667947503346} -{"stream":"message_history","data":{"channel":{},"integration":{"id":"bae9e7c4-2ac9-44d5-80b4-66458d00ae4e","provider":"gmail"},"ts":1666163817661,"type":"OPENED","message_id":"1-634eee1a-9130394a2b4ada99aa21567b"},"emitted_at":1667947503346} -{"stream":"message_history","data":{"channel":{},"integration":{"id":"bae9e7c4-2ac9-44d5-80b4-66458d00ae4e","provider":"gmail"},"ts":1666163949680,"type":"OPENED","message_id":"1-634eee1a-9130394a2b4ada99aa21567b"},"emitted_at":1667947503347} -{"stream":"message_history","data":{"channel":{},"integration":{"id":"bae9e7c4-2ac9-44d5-80b4-66458d00ae4e","provider":"gmail"},"ts":1666171803621,"type":"OPENED","message_id":"1-634eee1a-9130394a2b4ada99aa21567b"},"emitted_at":1667947503347} -{"stream":"message_history","data":{"channel":{},"integration":{"id":"bae9e7c4-2ac9-44d5-80b4-66458d00ae4e","provider":"gmail"},"ts":1666174499363,"type":"OPENED","message_id":"1-634eee1a-9130394a2b4ada99aa21567b"},"emitted_at":1667947503348} -{"stream":"message_history","data":{"channel":{},"integration":{"id":"bae9e7c4-2ac9-44d5-80b4-66458d00ae4e","provider":"gmail"},"ts":1666249874814,"type":"OPENED","message_id":"1-634eee1a-9130394a2b4ada99aa21567b"},"emitted_at":1667947503348} -{"stream":"message_history","data":{"channel":{},"integration":{"id":"bae9e7c4-2ac9-44d5-80b4-66458d00ae4e","provider":"gmail"},"ts":1666355531518,"type":"OPENED","message_id":"1-634eee1a-9130394a2b4ada99aa21567b"},"emitted_at":1667947503349} -{"stream":"message_history","data":{"channel":{},"integration":{"id":"bae9e7c4-2ac9-44d5-80b4-66458d00ae4e","provider":"gmail"},"ts":1666369358702,"type":"OPENED","message_id":"1-634eee1a-9130394a2b4ada99aa21567b"},"emitted_at":1667947503349} -{"stream":"message_history","data":{"channel":{},"integration":{"id":"bae9e7c4-2ac9-44d5-80b4-66458d00ae4e","provider":"gmail"},"ts":1667501920954,"type":"OPENED","message_id":"1-634eee1a-9130394a2b4ada99aa21567b"},"emitted_at":1667947503350} -{"stream":"message_output","data":{"channel":"email","channel_id":"99cd000d-1492-45ce-aa58-1a735478ec65","content":{"text":"Hey Airbyters,\n\nThis is an example of a email sent through Courier. Updated Message.\n\nCheers,\nThe Courier Team","subject":"Hi, Airbyters","html":"\n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n

\n \n \n \n "},"message_id":"1-634f0008-bec7b59a56bca8de101130db"},"emitted_at":1667947633162} -{"stream":"message_output","data":{"channel":"email","channel_id":"99cd000d-1492-45ce-aa58-1a735478ec65","content":{"text":"Hey Airbyters,\n\nThis is an example of a email sent through Courier. Updated Message.\n\nCheers,\nThe Courier Team","subject":"Hi, Airbyters","html":"\n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n "},"message_id":"1-634efffd-c0dba647b6f2b35f9f3bcb33"},"emitted_at":1667947633430} -{"stream":"message_output","data":{"channel":"email","channel_id":"99cd000d-1492-45ce-aa58-1a735478ec65","content":{"text":"Hey Airbyters,\n\nThis is an example of a email sent through Courier. Updated Message.\n\nCheers,\nThe Courier Team","subject":"Hi, Airbyters","html":"\n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n "},"message_id":"1-634efffc-e532825f9425164998e14dc4"},"emitted_at":1667947633814} -{"stream":"message_output","data":{"channel":"email","channel_id":"99cd000d-1492-45ce-aa58-1a735478ec65","content":{"text":"Hey Airbyters,\n\nThis is an example of a email sent through Courier. Updated Message.\n\nCheers,\nThe Courier Team","subject":"Hi, Airbyters","html":"\n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n "},"message_id":"1-634effda-77b4ad7bf93c58f0a9700e0c"},"emitted_at":1667947634210} -{"stream":"message_output","data":{"channel":"email","channel_id":"99cd000d-1492-45ce-aa58-1a735478ec65","content":{"text":"Hey Airbyters,\n\nThis is an example of a email sent through Courier. Updated Message.\n\nCheers,\nThe Courier Team","subject":"Hi, Airbyters","html":"\n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n "},"message_id":"1-634effd5-cce28d39137f340e58ac1aff"},"emitted_at":1667947634616} -{"stream":"message_output","data":{"channel":"email","channel_id":"99cd000d-1492-45ce-aa58-1a735478ec65","content":{"text":"Hey Airbyters,\n\nThis is an example of a email sent through Courier. Updated Message.\n\nCheers,\nThe Courier Team","subject":"Hi, Airbyters","html":"\n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n "},"message_id":"1-634effd4-1172e40fd51beec78ee3173d"},"emitted_at":1667947634800} -{"stream":"message_output","data":{"channel":"email","channel_id":"99cd000d-1492-45ce-aa58-1a735478ec65","content":{"text":"Hey Airbyters,\n\nThis is an example of a email sent through Courier. Updated Message.\n\nCheers,\nThe Courier Team","subject":"Hi, Airbyters","html":"\n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n "},"message_id":"1-634effd2-75f7518b8633e5dd765b7484"},"emitted_at":1667947634989} -{"stream":"message_output","data":{"channel":"email","channel_id":"99cd000d-1492-45ce-aa58-1a735478ec65","content":{"text":"Hey Airbyters,\n\nThis is an example of a email sent through Courier. Updated Message.\n\nCheers,\nThe Courier Team","subject":"Hi, Airbyters","html":"\n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n "},"message_id":"1-634effd0-5765da642b7eadb4f534953b"},"emitted_at":1667947635543} -{"stream":"message_output","data":{"channel":"email","channel_id":"99cd000d-1492-45ce-aa58-1a735478ec65","content":{"text":"Hey Airbyters,\n\nThis is an example of a email sent through Courier. Updated Message.\n\nCheers,\nThe Courier Team","subject":"Hi, Airbyters","html":"\n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n "},"message_id":"1-634ef226-a611b43243f7cdbe2a5115b3"},"emitted_at":1667947635742} -{"stream":"message_output","data":{"channel":"email","channel_id":"99cd000d-1492-45ce-aa58-1a735478ec65","content":{"text":"Hey Airbyters,\n\nThis is an example of a email sent through Courier. Updated Message.\n\nCheers,\nThe Courier Team","subject":"Hi, Airbyters","html":"\n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n "},"message_id":"1-634ef225-8b222f3539887a52ac69cc26"},"emitted_at":1667947635927} -{"stream":"message_output","data":{"channel":"email","channel_id":"99cd000d-1492-45ce-aa58-1a735478ec65","content":{"text":"Hey Airbyters,\n\nThis is an example of a email sent through Courier. Updated Message.\n\nCheers,\nThe Courier Team","subject":"Hi, Airbyters","html":"\n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n "},"message_id":"1-634ef206-8620ab19a54eeab221126dad"},"emitted_at":1667947637465} -{"stream":"message_output","data":{"channel":"email","channel_id":"99cd000d-1492-45ce-aa58-1a735478ec65","content":{"text":"Hey Airbyters,\n\nThis is an example of a email sent through Courier\n\nCheers,\nThe Courier Team","subject":"Hi, Airbyters","html":"\n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n "},"message_id":"1-634ef157-ebfbd09d59d116c4ce45c89e"},"emitted_at":1667947638315} -{"stream":"message_output","data":{"channel":"email","content":{"text":"Want to hear a joke? How did the T-Rex feel after a set of bicep curls? Dino-sore!\n","subject":"Welcome to Courier!","html":"\n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n "},"message_id":"1-634eee1a-9130394a2b4ada99aa21567b"},"emitted_at":1667947639031} diff --git a/airbyte-integrations/connectors/source-courier/integration_tests/invalid_config.json b/airbyte-integrations/connectors/source-courier/integration_tests/invalid_config.json deleted file mode 100644 index c238e30eb7ed..000000000000 --- a/airbyte-integrations/connectors/source-courier/integration_tests/invalid_config.json +++ /dev/null @@ -1,3 +0,0 @@ -{ - "api_key": "bad_pk_123" -} diff --git a/airbyte-integrations/connectors/source-courier/integration_tests/sample_config.json b/airbyte-integrations/connectors/source-courier/integration_tests/sample_config.json deleted file mode 100644 index f40906e82ed9..000000000000 --- a/airbyte-integrations/connectors/source-courier/integration_tests/sample_config.json +++ /dev/null @@ -1,3 +0,0 @@ -{ - "api_key": "example_pk_123" -} diff --git a/airbyte-integrations/connectors/source-courier/main.py b/airbyte-integrations/connectors/source-courier/main.py deleted file mode 100644 index d4de8fbdc917..000000000000 --- a/airbyte-integrations/connectors/source-courier/main.py +++ /dev/null @@ -1,8 +0,0 @@ -# -# Copyright (c) 2023 Airbyte, Inc., all rights reserved. -# - -from source_courier.run import run - -if __name__ == "__main__": - run() diff --git a/airbyte-integrations/connectors/source-courier/requirements.txt b/airbyte-integrations/connectors/source-courier/requirements.txt deleted file mode 100644 index d6e1198b1ab1..000000000000 --- a/airbyte-integrations/connectors/source-courier/requirements.txt +++ /dev/null @@ -1 +0,0 @@ --e . diff --git a/airbyte-integrations/connectors/source-courier/setup.py b/airbyte-integrations/connectors/source-courier/setup.py deleted file mode 100644 index 9bb4390f1913..000000000000 --- a/airbyte-integrations/connectors/source-courier/setup.py +++ /dev/null @@ -1,46 +0,0 @@ -# -# Copyright (c) 2023 Airbyte, Inc., all rights reserved. -# - - -from setuptools import find_packages, setup - -MAIN_REQUIREMENTS = [ - "airbyte-cdk~=0.1", -] - -TEST_REQUIREMENTS = [ - "requests-mock~=1.9.3", - "pytest~=6.1", - "pytest-mock~=3.6.1", -] - -setup( - entry_points={ - "console_scripts": [ - "source-courier=source_courier.run:run", - ], - }, - name="source_courier", - description="Source implementation for Courier.", - author="Airbyte", - author_email="contact@airbyte.io", - packages=find_packages(), - install_requires=MAIN_REQUIREMENTS, - package_data={ - "": [ - # Include yaml files in the package (if any) - "*.yml", - "*.yaml", - # Include all json files in the package, up to 4 levels deep - "*.json", - "*/*.json", - "*/*/*.json", - "*/*/*/*.json", - "*/*/*/*/*.json", - ] - }, - extras_require={ - "tests": TEST_REQUIREMENTS, - }, -) diff --git a/airbyte-integrations/connectors/source-courier/source_courier/__init__.py b/airbyte-integrations/connectors/source-courier/source_courier/__init__.py deleted file mode 100644 index 3d7fa7aec3d4..000000000000 --- a/airbyte-integrations/connectors/source-courier/source_courier/__init__.py +++ /dev/null @@ -1,8 +0,0 @@ -# -# Copyright (c) 2023 Airbyte, Inc., all rights reserved. -# - - -from .source import SourceCourier - -__all__ = ["SourceCourier"] diff --git a/airbyte-integrations/connectors/source-courier/source_courier/manifest.yaml b/airbyte-integrations/connectors/source-courier/source_courier/manifest.yaml deleted file mode 100644 index 4eab957a85ce..000000000000 --- a/airbyte-integrations/connectors/source-courier/source_courier/manifest.yaml +++ /dev/null @@ -1,152 +0,0 @@ -version: "0.29.0" - -definitions: - schema_loader: - type: JsonFileSchemaLoader - file_path: "./source_courier/schemas/{{ parameters['name'] }}.json" - - root_selector: - type: RecordSelector - extractor: - type: DpathExtractor - field_path: [] - - results_selector: - type: RecordSelector - extractor: - type: DpathExtractor - field_path: - - results - - requester: - type: HttpRequester - url_base: "https://api.courier.com" - http_method: "GET" - authenticator: - type: BearerAuthenticator - api_token: "{{ config['api_key'] }}" - - cursor_paginator: - type: DefaultPaginator - page_token_option: - type: RequestOption - field_name: cursor - inject_into: request_parameter - page_size_option: - inject_into: request_parameter - field_name: page_size - pagination_strategy: - type: "CursorPagination" - cursor_value: "{{ response.paging.cursor }}" - page_size: 1 - - retriever: - type: SimpleRetriever - record_selector: - $ref: "#/definitions/results_selector" - - base_stream: - type: DeclarativeStream - primary_key: "id" - schema_loader: - $ref: "#/definitions/schema_loader" - retriever: - $ref: "#/definitions/retriever" - requester: - $ref: "#/definitions/requester" - record_selector: - $ref: "#/definitions/root_selector" - - ## MESSAGES API ## - messages_stream: - $ref: "#/definitions/base_stream" - primary_key: id - $parameters: - name: "messages" - path: "/messages" - retriever: - $ref: "#/definitions/retriever" - record_selector: - $ref: "#/definitions/results_selector" - requester: - $ref: "#/definitions/requester" - path: "/messages" - paginator: - $ref: "#/definitions/cursor_paginator" - - ## MESSAGE INFO / HISTORY / OUTPUT STREAMS ## - message_id_transformer: - type: AddFields - fields: - - path: ["message_id"] - value: "{{ stream_slice.id }}" - message_partition_router: - type: SubstreamPartitionRouter - parent_stream_configs: - - stream: "#/definitions/messages_stream" - parent_key: id - partition_field: id - - message_info_stream: - $ref: "#/definitions/base_stream" - type: DeclarativeStream - primary_key: id - $parameters: - name: message_info - retriever: - $ref: "#/definitions/retriever" - requester: - $ref: "#/definitions/requester" - path: "/messages/{{ stream_slice.id }}" - partition_router: - $ref: "#/definitions/message_partition_router" - record_selector: - $ref: "#/definitions/root_selector" - - message_history_stream: - type: DeclarativeStream - $parameters: - name: message_history - primary_key: message_id - schema_loader: - $ref: "#/definitions/schema_loader" - retriever: - $ref: "#/definitions/retriever" - record_selector: - $ref: "#/definitions/results_selector" - requester: - $ref: "#/definitions/requester" - path: "/messages/{{ stream_slice.id }}/history" - partition_router: - $ref: "#/definitions/message_partition_router" - transformations: - - $ref: "#/definitions/message_id_transformer" - - message_output_stream: - type: DeclarativeStream - $parameters: - name: message_output - primary_key: message_id - schema_loader: - $ref: "#/definitions/schema_loader" - retriever: - $ref: "#/definitions/retriever" - record_selector: - $ref: "#/definitions/results_selector" - requester: - $ref: "#/definitions/requester" - path: "/messages/{{ stream_slice.id }}/output" - partition_router: - $ref: "#/definitions/message_partition_router" - transformations: - - $ref: "#/definitions/message_id_transformer" - -streams: - - "#/definitions/messages_stream" - - "#/definitions/message_info_stream" - - "#/definitions/message_history_stream" - - "#/definitions/message_output_stream" - -check: - type: CheckStream - stream_names: ["messages"] diff --git a/airbyte-integrations/connectors/source-courier/source_courier/run.py b/airbyte-integrations/connectors/source-courier/source_courier/run.py deleted file mode 100644 index c6e85fbde238..000000000000 --- a/airbyte-integrations/connectors/source-courier/source_courier/run.py +++ /dev/null @@ -1,14 +0,0 @@ -# -# Copyright (c) 2023 Airbyte, Inc., all rights reserved. -# - - -import sys - -from airbyte_cdk.entrypoint import launch -from source_courier import SourceCourier - - -def run(): - source = SourceCourier() - launch(source, sys.argv[1:]) diff --git a/airbyte-integrations/connectors/source-courier/source_courier/schemas/message_history.json b/airbyte-integrations/connectors/source-courier/source_courier/schemas/message_history.json deleted file mode 100644 index 0ad455ed0361..000000000000 --- a/airbyte-integrations/connectors/source-courier/source_courier/schemas/message_history.json +++ /dev/null @@ -1,100 +0,0 @@ -{ - "$schema": "http://json-schema.org/draft-07/schema#", - "type": "object", - "properties": { - "message_id": { - "type": "string" - }, - "ts": { - "type": "number" - }, - "type": { - "type": "string" - }, - "data": { - "type": "object", - "properties": {}, - "additionalProperties": true - }, - "event": { - "type": "string" - }, - "profile": { - "type": "object", - "properties": {}, - "additionalProperties": true - }, - "override": { - "type": "object", - "properties": {}, - "additionalProperties": true - }, - "recipient": { - "type": "string" - }, - "event_id": { - "type": "string" - }, - "notification_id": { - "type": "string" - }, - "merged_profile": { - "type": "object", - "properties": {}, - "additionalProperties": true - }, - "received_profile": { - "type": "object", - "properties": {}, - "additionalProperties": true - }, - "stored_profile": { - "type": "object", - "properties": {}, - "additionalProperties": true - }, - "output": { - "type": "object", - "properties": {}, - "additionalProperties": true - }, - "channel": { - "type": "object", - "properties": { - "id": { - "type": "string" - }, - "label": { - "type": "string" - } - } - }, - "integration": { - "type": "object", - "properties": { - "id": { - "type": "string" - }, - "provider": { - "type": "string" - } - } - }, - "reason": { - "type": "string", - "enum": [ - "FILTERED", - "NO_CHANNELS", - "NO_PROVIDERS", - "PROVIDER_ERROR", - "UNPUBLISHED", - "UNDELIVERABLE", - "UNSUBSCRIBED" - ] - }, - "reasonCode": { - "type": "string", - "enum": ["HARD", "SOFT"] - } - } -} diff --git a/airbyte-integrations/connectors/source-courier/source_courier/schemas/message_info.json b/airbyte-integrations/connectors/source-courier/source_courier/schemas/message_info.json deleted file mode 100644 index db5b636fb048..000000000000 --- a/airbyte-integrations/connectors/source-courier/source_courier/schemas/message_info.json +++ /dev/null @@ -1,137 +0,0 @@ -{ - "$schema": "http://json-schema.org/draft-07/schema#", - "type": "object", - "properties": { - "id": { - "type": "string" - }, - "idempotencyKey": { - "type": "string" - }, - "listId": { - "type": "string" - }, - "listMessageId": { - "type": "string" - }, - "status": { - "type": "string", - "enum": [ - "CLICKED", - "DELIVERED", - "ENQUEUED", - "OPENED", - "SENT", - "UNDELIVERABLE", - "UNMAPPED", - "UNROUTABLE" - ] - }, - "enqueued": { - "type": "number" - }, - "sent": { - "type": "number" - }, - "delivered": { - "type": "number" - }, - "opened": { - "type": "number" - }, - "clicked": { - "type": "number" - }, - "recipient": { - "type": "string" - }, - "event": { - "type": "string" - }, - "notification": { - "type": "string" - }, - "error": { - "type": "string" - }, - "reason": { - "type": "string", - "enum": [ - "FILTERED", - "NO_CHANNELS", - "NO_PROVIDERS", - "PROVIDER_ERROR", - "UNPUBLISHED", - "UNDELIVERABLE", - "UNSUBSCRIBED" - ] - }, - "reasonCode": { - "type": "string", - "enum": ["HARD", "SOFT"] - }, - "reasonDetails": { - "type": "string" - }, - "runId": { - "type": "string" - }, - "providers": { - "type": "array", - "items": { - "type": "object", - "properties": { - "channel": { - "type": "object", - "properties": { - "key": { - "type": "string" - }, - "name": { - "type": "string" - }, - "template": { - "type": "string" - } - } - }, - "clicked": { - "type": "number" - }, - "delivered": { - "type": "number" - }, - "error": { - "type": "string" - }, - "provider": { - "type": "string" - }, - "reference": { - "type": "object", - "properties": {}, - "additionalProperties": true - }, - "sent": { - "type": "number" - }, - "status": { - "type": "string", - "enum": [ - "CLICKED", - "DELIVERED", - "ENQUEUED", - "FILTERED", - "OPENED", - "SENT", - "SIMULATED", - "UNDELIVERABLE", - "UNMAPPED", - "UNROUTABLE" - ] - } - } - } - } - } -} diff --git a/airbyte-integrations/connectors/source-courier/source_courier/schemas/message_output.json b/airbyte-integrations/connectors/source-courier/source_courier/schemas/message_output.json deleted file mode 100644 index 11f5ffdd9b2f..000000000000 --- a/airbyte-integrations/connectors/source-courier/source_courier/schemas/message_output.json +++ /dev/null @@ -1,38 +0,0 @@ -{ - "$schema": "http://json-schema.org/draft-07/schema#", - "properties": { - "message_id": { - "type": "string" - }, - "channel": { - "type": "string" - }, - "channel_id": { - "type": "string" - }, - "content": { - "properties": { - "blocks": { - "type": "array" - }, - "body": { - "type": "string" - }, - "html": { - "type": "string" - }, - "subject": { - "type": "string" - }, - "text": { - "type": "string" - }, - "title": { - "type": "string" - } - }, - "type": "object" - } - }, - "type": "object" -} diff --git a/airbyte-integrations/connectors/source-courier/source_courier/schemas/messages.json b/airbyte-integrations/connectors/source-courier/source_courier/schemas/messages.json deleted file mode 100644 index db5b636fb048..000000000000 --- a/airbyte-integrations/connectors/source-courier/source_courier/schemas/messages.json +++ /dev/null @@ -1,137 +0,0 @@ -{ - "$schema": "http://json-schema.org/draft-07/schema#", - "type": "object", - "properties": { - "id": { - "type": "string" - }, - "idempotencyKey": { - "type": "string" - }, - "listId": { - "type": "string" - }, - "listMessageId": { - "type": "string" - }, - "status": { - "type": "string", - "enum": [ - "CLICKED", - "DELIVERED", - "ENQUEUED", - "OPENED", - "SENT", - "UNDELIVERABLE", - "UNMAPPED", - "UNROUTABLE" - ] - }, - "enqueued": { - "type": "number" - }, - "sent": { - "type": "number" - }, - "delivered": { - "type": "number" - }, - "opened": { - "type": "number" - }, - "clicked": { - "type": "number" - }, - "recipient": { - "type": "string" - }, - "event": { - "type": "string" - }, - "notification": { - "type": "string" - }, - "error": { - "type": "string" - }, - "reason": { - "type": "string", - "enum": [ - "FILTERED", - "NO_CHANNELS", - "NO_PROVIDERS", - "PROVIDER_ERROR", - "UNPUBLISHED", - "UNDELIVERABLE", - "UNSUBSCRIBED" - ] - }, - "reasonCode": { - "type": "string", - "enum": ["HARD", "SOFT"] - }, - "reasonDetails": { - "type": "string" - }, - "runId": { - "type": "string" - }, - "providers": { - "type": "array", - "items": { - "type": "object", - "properties": { - "channel": { - "type": "object", - "properties": { - "key": { - "type": "string" - }, - "name": { - "type": "string" - }, - "template": { - "type": "string" - } - } - }, - "clicked": { - "type": "number" - }, - "delivered": { - "type": "number" - }, - "error": { - "type": "string" - }, - "provider": { - "type": "string" - }, - "reference": { - "type": "object", - "properties": {}, - "additionalProperties": true - }, - "sent": { - "type": "number" - }, - "status": { - "type": "string", - "enum": [ - "CLICKED", - "DELIVERED", - "ENQUEUED", - "FILTERED", - "OPENED", - "SENT", - "SIMULATED", - "UNDELIVERABLE", - "UNMAPPED", - "UNROUTABLE" - ] - } - } - } - } - } -} diff --git a/airbyte-integrations/connectors/source-courier/source_courier/source.py b/airbyte-integrations/connectors/source-courier/source_courier/source.py deleted file mode 100644 index cf660db85034..000000000000 --- a/airbyte-integrations/connectors/source-courier/source_courier/source.py +++ /dev/null @@ -1,18 +0,0 @@ -# -# Copyright (c) 2023 Airbyte, Inc., all rights reserved. -# - -from airbyte_cdk.sources.declarative.yaml_declarative_source import YamlDeclarativeSource - -""" -This file provides the necessary constructs to interpret a provided declarative YAML configuration file into -source connector. - -WARNING: Do not modify this file. -""" - - -# Declarative Source -class SourceCourier(YamlDeclarativeSource): - def __init__(self): - super().__init__(**{"path_to_yaml": "manifest.yaml"}) diff --git a/airbyte-integrations/connectors/source-courier/source_courier/spec.yaml b/airbyte-integrations/connectors/source-courier/source_courier/spec.yaml deleted file mode 100644 index c9f1b5925f01..000000000000 --- a/airbyte-integrations/connectors/source-courier/source_courier/spec.yaml +++ /dev/null @@ -1,13 +0,0 @@ -documentationUrl: https://docs.airbyte.io/integrations/sources/courier -connectionSpecification: - $schema: http://json-schema.org/draft-07/schema# - title: Courier Source Spec - type: object - required: - - api_key - additionalProperties: true - properties: - api_key: - type: string - description: Courier API Key to retrieve your data. - airbyte_secret: true diff --git a/airbyte-integrations/connectors/source-dv-360/.dockerignore b/airbyte-integrations/connectors/source-dv-360/.dockerignore deleted file mode 100644 index 8a6afc671730..000000000000 --- a/airbyte-integrations/connectors/source-dv-360/.dockerignore +++ /dev/null @@ -1,6 +0,0 @@ -* -!Dockerfile -!main.py -!source_dv_360 -!setup.py -!secrets diff --git a/airbyte-integrations/connectors/source-dv-360/BOOTSTRAP.md b/airbyte-integrations/connectors/source-dv-360/BOOTSTRAP.md deleted file mode 100644 index 7951ef8f6eec..000000000000 --- a/airbyte-integrations/connectors/source-dv-360/BOOTSTRAP.md +++ /dev/null @@ -1,17 +0,0 @@ -# Display & Video 360 - -Google DoubleClick Bid Manager (DBM) is the API that enables developers to manage Queries and retrieve Reports from Display & Video 360. - -DoubleClick Bid Manager API `v1.1` is the latest available and recommended version. - -[Link](https://developers.google.com/bid-manager/v1.1) to the official documentation. - -[Getting started with the API](https://developers.google.com/bid-manager/guides/getting-started-api) - -**Workflow of the API**: -* In order to fetch data from the DBM API, it is necessary to first build a [query](https://developers.google.com/bid-manager/v1.1/queries) that gets created in the [user interface (UI)](https://www.google.com/ddm/bidmanager/). -* Once the query is created it can be executed, and the resulting [report](https://developers.google.com/bid-manager/v1.1/reports) can be found and downloaded in the UI. - -**Filters and Metrics**: Dimensions are referred to as Filters in DV360. All available dimensions metrics can be found [here](https://developers.google.com/bid-manager/v1.1/filters-metrics). - -**Note**: It is recommended in the reporting [best practices](https://developers.google.com/bid-manager/guides/scheduled-reports/best-practices) to first build the desired report in the UI to avoid any errors, since there are several limilations and requirements pertaining to reporting types, filters, dimensions, and metrics (such as valid combinations of metrics and dimensions). \ No newline at end of file diff --git a/airbyte-integrations/connectors/source-dv-360/Dockerfile b/airbyte-integrations/connectors/source-dv-360/Dockerfile deleted file mode 100644 index 33d9e3f0e5d6..000000000000 --- a/airbyte-integrations/connectors/source-dv-360/Dockerfile +++ /dev/null @@ -1,38 +0,0 @@ -FROM python:3.7.11-alpine3.14 as base - -# build and load all requirements -FROM base as builder -WORKDIR /airbyte/integration_code - -# upgrade pip to the latest version -RUN apk --no-cache upgrade \ - && pip install --upgrade pip \ - && apk --no-cache add tzdata build-base - - -COPY setup.py ./ -# install necessary packages to a temporary folder -RUN pip install --prefix=/install . - -# build a clean environment -FROM base -WORKDIR /airbyte/integration_code - -# copy all loaded and built libraries to a pure basic image -COPY --from=builder /install /usr/local -# add default timezone settings -COPY --from=builder /usr/share/zoneinfo/Etc/UTC /etc/localtime -RUN echo "Etc/UTC" > /etc/timezone - -# bash is installed for more convenient debugging. -RUN apk --no-cache add bash - -# copy payload code only -COPY main.py ./ -COPY source_dv_360 ./source_dv_360 - -ENV AIRBYTE_ENTRYPOINT "python /airbyte/integration_code/main.py" -ENTRYPOINT ["python", "/airbyte/integration_code/main.py"] - -LABEL io.airbyte.version=0.1.0 -LABEL io.airbyte.name=airbyte/source-dv-360 diff --git a/airbyte-integrations/connectors/source-dv-360/README.md b/airbyte-integrations/connectors/source-dv-360/README.md deleted file mode 100644 index 024e835dfa63..000000000000 --- a/airbyte-integrations/connectors/source-dv-360/README.md +++ /dev/null @@ -1,99 +0,0 @@ -# DISPLAY & VIDEO 360 Source - -This is the repository for the DISPLAY & VIDEO 360 source connector, written in Python. -For information about how to use this connector within Airbyte, see [the documentation](https://docs.airbyte.io/integrations/sources/dv360). - -## Local development - -### Prerequisites -**To iterate on this connector, make sure to complete this prerequisites section.** - -#### Minimum Python version required `= 3.7.0` - -#### Build & Activate Virtual Environment and install dependencies -From this connector directory, create a virtual environment: -``` -python -m venv .venv -``` - -This will generate a virtualenv for this module in `.venv/`. Make sure this venv is active in your -development environment of choice. To activate it from the terminal, run: -``` -source .venv/bin/activate -pip install -r requirements.txt -``` -If you are in an IDE, follow your IDE's instructions to activate the virtualenv. - -Note that while we are installing dependencies from `requirements.txt`, you should only edit `setup.py` for your dependencies. `requirements.txt` is -used for editable installs (`pip install -e`) to pull in Python dependencies from the monorepo and will call `setup.py`. -If this is mumbo jumbo to you, don't worry about it, just put your deps in `setup.py` but install using `pip install -r requirements.txt` and everything -should work as you expect. - -#### Create credentials -**If you are a community contributor**, follow the instructions in the [documentation](https://docs.airbyte.io/integrations/sources/dv360) -to generate the necessary credentials. Then create a file `secrets/config.json` conforming to the `source_dv_360/spec.json` file. -Note that the `secrets` directory is gitignored by default, so there is no danger of accidentally checking in sensitive information. -See `integration_tests/sample_config.json` for a sample config file. - -**If you are an Airbyte core member**, copy the credentials in Lastpass under the secret name `source dv360 test creds` -and place them into `secrets/config.json`. - -### Locally running the connector -``` -python main.py spec -python main.py check --config secrets/config.json -python main.py discover --config secrets/config.json -python main.py read --config secrets/config.json --catalog integration_tests/configured_catalog.json -``` - -### Locally running the connector docker image - - -#### Build -**Via [`airbyte-ci`](https://github.com/airbytehq/airbyte/blob/master/airbyte-ci/connectors/pipelines/README.md) (recommended):** -```bash -airbyte-ci connectors --name=source-dv-360 build -``` - -An image will be built with the tag `airbyte/source-dv-360:dev`. - -**Via `docker build`:** -```bash -docker build -t airbyte/source-dv-360:dev . -``` - -#### Run -Then run any of the connector commands as follows: -``` -docker run --rm airbyte/source-dv-360:dev spec -docker run --rm -v $(pwd)/secrets:/secrets airbyte/source-dv-360:dev check --config /secrets/config.json -docker run --rm -v $(pwd)/secrets:/secrets airbyte/source-dv-360:dev discover --config /secrets/config.json -docker run --rm -v $(pwd)/secrets:/secrets -v $(pwd)/integration_tests:/integration_tests airbyte/source-dv-360:dev read --config /secrets/config.json --catalog /integration_tests/configured_catalog.json -``` - -## Testing -You can run our full test suite locally using [`airbyte-ci`](https://github.com/airbytehq/airbyte/blob/master/airbyte-ci/connectors/pipelines/README.md): -```bash -airbyte-ci connectors --name=source-dv-360 test -``` - -### Customizing acceptance Tests -Customize `acceptance-test-config.yml` file to configure tests. See [Connector Acceptance Tests](https://docs.airbyte.com/connector-development/testing-connectors/connector-acceptance-tests-reference) for more information. -If your connector requires to create or destroy resources for use during acceptance tests create fixtures for it and place them inside integration_tests/acceptance.py. - -## Dependency Management -All of your dependencies should go in `setup.py`, NOT `requirements.txt`. The requirements file is only used to connect internal Airbyte dependencies in the monorepo for local development. -We split dependencies between two groups, dependencies that are: -* required for your connector to work need to go to `MAIN_REQUIREMENTS` list. -* required for the testing need to go to `TEST_REQUIREMENTS` list - -### Publishing a new version of the connector -You've checked out the repo, implemented a million dollar feature, and you're ready to share your changes with the world. Now what? -1. Make sure your changes are passing our test suite: `airbyte-ci connectors --name=source-dv-360 test` -2. Bump the connector version in `metadata.yaml`: increment the `dockerImageTag` value. Please follow [semantic versioning for connectors](https://docs.airbyte.com/contributing-to-airbyte/resources/pull-requests-handbook/#semantic-versioning-for-connectors). -3. Make sure the `metadata.yaml` content is up to date. -4. Make the connector documentation and its changelog is up to date (`docs/integrations/sources/dv-360.md`). -5. Create a Pull Request: use [our PR naming conventions](https://docs.airbyte.com/contributing-to-airbyte/resources/pull-requests-handbook/#pull-request-title-convention). -6. Pat yourself on the back for being an awesome contributor. -7. Someone from Airbyte will take a look at your PR and iterate with you to merge it into master. - diff --git a/airbyte-integrations/connectors/source-dv-360/acceptance-test-config.yml b/airbyte-integrations/connectors/source-dv-360/acceptance-test-config.yml deleted file mode 100644 index d92e6a06e982..000000000000 --- a/airbyte-integrations/connectors/source-dv-360/acceptance-test-config.yml +++ /dev/null @@ -1,28 +0,0 @@ -# See [Connector Acceptance Tests](https://docs.airbyte.com/connector-development/testing-connectors/connector-acceptance-tests-reference) -# for more information about how to configure these tests -connector_image: airbyte/source-dv-360:dev -tests: - spec: - - spec_path: "source_dv_360/spec.json" - connection: - - config_path: "secrets/config.json" - status: "succeed" - - config_path: "integration_tests/invalid_config.json" - status: "failed" - discovery: - - config_path: "secrets/config.json" - basic_read: - - config_path: "secrets/config.json" - configured_catalog_path: "integration_tests/configured_catalog.json" - empty_streams: [] - # TODO uncomment this block to specify that the tests should assert the connector outputs the records provided in the input file a file - # expect_records: - # path: "integration_tests/expected_records.jsonl" - # exact_order: no - incremental: # TODO if your connector does not implement incremental sync, remove this block - - config_path: "secrets/config.json" - configured_catalog_path: "integration_tests/configured_catalog.json" - future_state_path: "integration_tests/abnormal_state.json" - full_refresh: - - config_path: "secrets/config.json" - configured_catalog_path: "integration_tests/configured_catalog.json" diff --git a/airbyte-integrations/connectors/source-dv-360/integration_tests/__init__.py b/airbyte-integrations/connectors/source-dv-360/integration_tests/__init__.py deleted file mode 100644 index 46b7376756ec..000000000000 --- a/airbyte-integrations/connectors/source-dv-360/integration_tests/__init__.py +++ /dev/null @@ -1,3 +0,0 @@ -# -# Copyright (c) 2021 Airbyte, Inc., all rights reserved. -# diff --git a/airbyte-integrations/connectors/source-dv-360/integration_tests/abnormal_state.json b/airbyte-integrations/connectors/source-dv-360/integration_tests/abnormal_state.json deleted file mode 100644 index 24ce726cb18d..000000000000 --- a/airbyte-integrations/connectors/source-dv-360/integration_tests/abnormal_state.json +++ /dev/null @@ -1,5 +0,0 @@ -{ - "standard": { - "date": "2224-01-01" - } -} diff --git a/airbyte-integrations/connectors/source-dv-360/integration_tests/acceptance.py b/airbyte-integrations/connectors/source-dv-360/integration_tests/acceptance.py deleted file mode 100644 index 43ce950d77ca..000000000000 --- a/airbyte-integrations/connectors/source-dv-360/integration_tests/acceptance.py +++ /dev/null @@ -1,16 +0,0 @@ -# -# Copyright (c) 2023 Airbyte, Inc., all rights reserved. -# - - -import pytest - -pytest_plugins = ("connector_acceptance_test.plugin",) - - -@pytest.fixture(scope="session", autouse=True) -def connector_setup(): - """This fixture is a placeholder for external resources that acceptance test might require.""" - # TODO: setup test dependencies - yield - # TODO: clean up test dependencies diff --git a/airbyte-integrations/connectors/source-dv-360/integration_tests/catalog.json b/airbyte-integrations/connectors/source-dv-360/integration_tests/catalog.json deleted file mode 100644 index 33819136327b..000000000000 --- a/airbyte-integrations/connectors/source-dv-360/integration_tests/catalog.json +++ /dev/null @@ -1,2016 +0,0 @@ -{ - "type": "CATALOG", - "catalog": { - "streams": [ - { - "name": "reach", - "json_schema": { - "$schema": "http://json-schema.org/draft-07/schema#", - "type": "object", - "properties": { - "advertiser": { - "type": ["null", "string"] - }, - "advertiser_id": { - "type": ["null", "string"] - }, - "advertiser_integration_code": { - "type": ["null", "string"] - }, - "advertiser_status": { - "type": ["null", "string"] - }, - "app_url": { - "type": ["null", "string"] - }, - "app_url_excluded": { - "type": ["null", "string"] - }, - "campaign": { - "type": ["null", "string"] - }, - "campaign_id": { - "type": ["null", "string"] - }, - "cm_placement_id": { - "type": ["null", "string"] - }, - "country": { - "type": ["null", "string"] - }, - "country_id": { - "type": ["null", "string"] - }, - "creative": { - "type": ["null", "string"] - }, - "creative_id": { - "type": ["null", "string"] - }, - "creative_integration_code": { - "type": ["null", "string"] - }, - "creative_source": { - "type": ["null", "string"] - }, - "creative_status": { - "type": ["null", "string"] - }, - "date": { - "type": ["null", "string"] - }, - "insertion_order": { - "type": ["null", "string"] - }, - "insertion_order_id": { - "type": ["null", "string"] - }, - "insertion_order_integration_code": { - "type": ["null", "string"] - }, - "insertion_order_status": { - "type": ["null", "string"] - }, - "inventory_source": { - "type": ["null", "string"] - }, - "line_item": { - "type": ["null", "string"] - }, - "line_item_id": { - "type": ["null", "string"] - }, - "line_item_integration_code": { - "type": ["null", "string"] - }, - "line_item_status": { - "type": ["null", "string"] - }, - "partner": { - "type": ["null", "string"] - }, - "partner_id": { - "type": ["null", "string"] - }, - "partner_status": { - "type": ["null", "string"] - }, - "targeted_data_providers": { - "type": ["null", "string"] - }, - "cookie_reach_average_impression_frequency": { - "type": ["null", "string"] - }, - "cookie_reach_impression_reach": { - "type": ["null", "string"] - }, - "unique_reach_average_impression_frequency": { - "type": ["null", "string"] - }, - "unique_reach_click_reach": { - "type": ["null", "string"] - }, - "unique_reach_impression_reach": { - "type": ["null", "string"] - }, - "unique_reach_total_reach": { - "type": ["null", "string"] - } - } - }, - "supported_sync_modes": ["full_refresh", "incremental"] - }, - { - "name": "standard", - "json_schema": { - "$schema": "http://json-schema.org/draft-07/schema#", - "type": "object", - "properties": { - "active_view_custom_metric_id ": { - "type": ["null", "string"] - }, - "active_view_custom_metric_name ": { - "type": ["null", "string"] - }, - "ad_position ": { - "type": ["null", "string"] - }, - "ad_type ": { - "type": ["null", "string"] - }, - "advertiser ": { - "type": ["null", "string"] - }, - "advertiser_currency ": { - "type": ["null", "string"] - }, - "advertiser_id ": { - "type": ["null", "string"] - }, - "advertiser_integration_code ": { - "type": ["null", "string"] - }, - "advertiser_status ": { - "type": ["null", "string"] - }, - "advertiser_time_zone ": { - "type": ["null", "string"] - }, - "algorithm ": { - "type": ["null", "string"] - }, - "algorithm_id ": { - "type": ["null", "string"] - }, - "amp_page_request ": { - "type": ["null", "string"] - }, - "app_url ": { - "type": ["null", "string"] - }, - "app_url_excluded ": { - "type": ["null", "string"] - }, - "app_url_id ": { - "type": ["null", "string"] - }, - "attributed_userlist ": { - "type": ["null", "string"] - }, - "attributed_userlist_cost ": { - "type": ["null", "string"] - }, - "attributed_userlist_id ": { - "type": ["null", "string"] - }, - "attributed_userlist_type ": { - "type": ["null", "string"] - }, - "attribution_model ": { - "type": ["null", "string"] - }, - "audience_list ": { - "type": ["null", "string"] - }, - "audience_list_cost ": { - "type": ["null", "string"] - }, - "audience_list_id ": { - "type": ["null", "string"] - }, - "audience_list_type ": { - "type": ["null", "string"] - }, - "audience_name ": { - "type": ["null", "string"] - }, - "audience_type ": { - "type": ["null", "string"] - }, - "authorized_seller_state ": { - "type": ["null", "string"] - }, - "billable_outcome ": { - "type": ["null", "string"] - }, - "brand_lift_type ": { - "type": ["null", "string"] - }, - "browser_id ": { - "type": ["null", "string"] - }, - "budget_segment_description ": { - "type": ["null", "string"] - }, - "campaign ": { - "type": ["null", "string"] - }, - "campaign_id ": { - "type": ["null", "string"] - }, - "category ": { - "type": ["null", "string"] - }, - "channel ": { - "type": ["null", "string"] - }, - "channel_id ": { - "type": ["null", "string"] - }, - "channel_type ": { - "type": ["null", "string"] - }, - "city ": { - "type": ["null", "string"] - }, - "city_id ": { - "type": ["null", "string"] - }, - "cm_placement_id ": { - "type": ["null", "string"] - }, - "companion_creative ": { - "type": ["null", "string"] - }, - "companion_creative_id ": { - "type": ["null", "string"] - }, - "companion_creative_size ": { - "type": ["null", "string"] - }, - "country ": { - "type": ["null", "string"] - }, - "country_id ": { - "type": ["null", "string"] - }, - "creative ": { - "type": ["null", "string"] - }, - "creative_asset ": { - "type": ["null", "string"] - }, - "creative_attributes ": { - "type": ["null", "string"] - }, - "creative_height ": { - "type": ["null", "string"] - }, - "creative_id ": { - "type": ["null", "string"] - }, - "creative_integration_code ": { - "type": ["null", "string"] - }, - "creative_rendered_in_amp ": { - "type": ["null", "string"] - }, - "creative_size ": { - "type": ["null", "string"] - }, - "creative_source ": { - "type": ["null", "string"] - }, - "creative_status ": { - "type": ["null", "string"] - }, - "creative_type ": { - "type": ["null", "string"] - }, - "creative_width ": { - "type": ["null", "string"] - }, - "data_provider ": { - "type": ["null", "string"] - }, - "data_provider_id ": { - "type": ["null", "string"] - }, - "date ": { - "type": ["null", "string"] - }, - "day_of_week ": { - "type": ["null", "string"] - }, - "detailed_demographics ": { - "type": ["null", "string"] - }, - "detailed_demographics_id ": { - "type": ["null", "string"] - }, - "device ": { - "type": ["null", "string"] - }, - "device_make ": { - "type": ["null", "string"] - }, - "device_model ": { - "type": ["null", "string"] - }, - "device_type ": { - "type": ["null", "string"] - }, - "digital_content_label ": { - "type": ["null", "string"] - }, - "dma ": { - "type": ["null", "string"] - }, - "dma_code ": { - "type": ["null", "string"] - }, - "exchange ": { - "type": ["null", "string"] - }, - "exchange_code ": { - "type": ["null", "string"] - }, - "exchange_id ": { - "type": ["null", "string"] - }, - "extension ": { - "type": ["null", "string"] - }, - "extension_status ": { - "type": ["null", "string"] - }, - "extension_type ": { - "type": ["null", "string"] - }, - "floodlight_activity ": { - "type": ["null", "string"] - }, - "floodlight_activity_id ": { - "type": ["null", "string"] - }, - "format ": { - "type": ["null", "string"] - }, - "gmail_age ": { - "type": ["null", "string"] - }, - "gmail_city ": { - "type": ["null", "string"] - }, - "gmail_country ": { - "type": ["null", "string"] - }, - "gmail_device_type ": { - "type": ["null", "string"] - }, - "gmail_gender ": { - "type": ["null", "string"] - }, - "gmail_region ": { - "type": ["null", "string"] - }, - "gmail_remarketing_list ": { - "type": ["null", "string"] - }, - "household_income ": { - "type": ["null", "string"] - }, - "impression_counting_method ": { - "type": ["null", "string"] - }, - "insertion_order ": { - "type": ["null", "string"] - }, - "insertion_order_daily_frequency ": { - "type": ["null", "string"] - }, - "insertion_order_id ": { - "type": ["null", "string"] - }, - "insertion_order_integration_code ": { - "type": ["null", "string"] - }, - "insertion_order_status ": { - "type": ["null", "string"] - }, - "interest ": { - "type": ["null", "string"] - }, - "inventory_commitment_type ": { - "type": ["null", "string"] - }, - "inventory_delivery_method ": { - "type": ["null", "string"] - }, - "inventory_rate_type ": { - "type": ["null", "string"] - }, - "inventory_source ": { - "type": ["null", "string"] - }, - "inventory_source_group ": { - "type": ["null", "string"] - }, - "inventory_source_group_id ": { - "type": ["null", "string"] - }, - "inventory_source_id ": { - "type": ["null", "string"] - }, - "inventory_source_id_external ": { - "type": ["null", "string"] - }, - "inventory_source_type ": { - "type": ["null", "string"] - }, - "isp_or_carrier ": { - "type": ["null", "string"] - }, - "isp_or_carrier_id ": { - "type": ["null", "string"] - }, - "keyword ": { - "type": ["null", "string"] - }, - "life_event ": { - "type": ["null", "string"] - }, - "life_events ": { - "type": ["null", "string"] - }, - "line_item ": { - "type": ["null", "string"] - }, - "line_item_daily_frequency ": { - "type": ["null", "string"] - }, - "line_item_id ": { - "type": ["null", "string"] - }, - "line_item_integration_code ": { - "type": ["null", "string"] - }, - "line_item_lifetime_frequency ": { - "type": ["null", "string"] - }, - "line_item_status ": { - "type": ["null", "string"] - }, - "line_item_type ": { - "type": ["null", "string"] - }, - "max_video_duration ": { - "type": ["null", "string"] - }, - "measurement_source ": { - "type": ["null", "string"] - }, - "month ": { - "type": ["null", "string"] - }, - "operating_system ": { - "type": ["null", "string"] - }, - "partner ": { - "type": ["null", "string"] - }, - "partner_currency ": { - "type": ["null", "string"] - }, - "partner_id ": { - "type": ["null", "string"] - }, - "partner_status ": { - "type": ["null", "string"] - }, - "platform ": { - "type": ["null", "string"] - }, - "playback_method ": { - "type": ["null", "string"] - }, - "position_in_content ": { - "type": ["null", "string"] - }, - "public_inventory ": { - "type": ["null", "string"] - }, - "publisher_property ": { - "type": ["null", "string"] - }, - "publisher_property_id ": { - "type": ["null", "string"] - }, - "publisher_property_section ": { - "type": ["null", "string"] - }, - "publisher_property_section_id ": { - "type": ["null", "string"] - }, - "refund_reason ": { - "type": ["null", "string"] - }, - "region ": { - "type": ["null", "string"] - }, - "region_id ": { - "type": ["null", "string"] - }, - "rewarded ": { - "type": ["null", "string"] - }, - "sensitive_category ": { - "type": ["null", "string"] - }, - "served_pixel_density ": { - "type": ["null", "string"] - }, - "targeted_data_providers ": { - "type": ["null", "string"] - }, - "time_of_day ": { - "type": ["null", "string"] - }, - "time_to_conversion ": { - "type": ["null", "string"] - }, - "variant_id ": { - "type": ["null", "string"] - }, - "variant_name ": { - "type": ["null", "string"] - }, - "variant_version ": { - "type": ["null", "string"] - }, - "verification_video_player_size ": { - "type": ["null", "string"] - }, - "verification_video_position ": { - "type": ["null", "string"] - }, - "video_continuous_play ": { - "type": ["null", "string"] - }, - "video_player_size ": { - "type": ["null", "string"] - }, - "video_skippable_support ": { - "type": ["null", "string"] - }, - "week ": { - "type": ["null", "string"] - }, - "year ": { - "type": ["null", "string"] - }, - "zip_code ": { - "type": ["null", "string"] - }, - "zip_code_id ": { - "type": ["null", "string"] - }, - "pct_clicks_leading_to_conversions ": { - "type": ["null", "string"] - }, - "pct_impressions_leading_to_conversions ": { - "type": ["null", "string"] - }, - "pct_impressions_with_positive_custom_value ": { - "type": ["null", "string"] - }, - "active_view_pct_audible_and_visible_at_completion ": { - "type": ["null", "string"] - }, - "active_view_pct_audible_and_visible_at_first_quartile ": { - "type": ["null", "string"] - }, - "active_view_pct_audible_and_visible_at_midpoint ": { - "type": ["null", "string"] - }, - "active_view_pct_audible_and_visible_at_start ": { - "type": ["null", "string"] - }, - "active_view_pct_audible_and_visible_at_third_quartile ": { - "type": ["null", "string"] - }, - "active_view_pct_audible_impressions ": { - "type": ["null", "string"] - }, - "active_view_pct_full_screen ": { - "type": ["null", "string"] - }, - "active_view_pct_fully_on_screen_2_sec ": { - "type": ["null", "string"] - }, - "active_view_pct_in_background ": { - "type": ["null", "string"] - }, - "active_view_pct_measurable_impressions ": { - "type": ["null", "string"] - }, - "active_view_pct_of_ad_played ": { - "type": ["null", "string"] - }, - "active_view_pct_of_completed_impressions_audible_and_visible ": { - "type": ["null", "string"] - }, - "active_view_pct_of_completed_impressions_visible ": { - "type": ["null", "string"] - }, - "active_view_pct_of_first_quartile_impressions_audible_and_visible ": { - "type": ["null", "string"] - }, - "active_view_pct_of_first_quartile_impressions_visible ": { - "type": ["null", "string"] - }, - "active_view_pct_of_midpoint_impressions_audible_and_visible ": { - "type": ["null", "string"] - }, - "active_view_pct_of_midpoint_impressions_visible ": { - "type": ["null", "string"] - }, - "active_view_pct_of_third_quartile_impressions_audible_and_visible ": { - "type": ["null", "string"] - }, - "active_view_pct_of_third_quartile_impressions_visible ": { - "type": ["null", "string"] - }, - "active_view_pct_play_time_audible ": { - "type": ["null", "string"] - }, - "active_view_pct_play_time_audible_and_visible ": { - "type": ["null", "string"] - }, - "active_view_pct_play_time_visible ": { - "type": ["null", "string"] - }, - "active_view_pct_viewable_impressions ": { - "type": ["null", "string"] - }, - "active_view_pct_visible_10_seconds ": { - "type": ["null", "string"] - }, - "active_view_pct_visible_at_completion ": { - "type": ["null", "string"] - }, - "active_view_pct_visible_at_first_quartile ": { - "type": ["null", "string"] - }, - "active_view_pct_visible_at_midpoint ": { - "type": ["null", "string"] - }, - "active_view_pct_visible_at_start ": { - "type": ["null", "string"] - }, - "active_view_pct_visible_at_third_quartile ": { - "type": ["null", "string"] - }, - "active_view_audible_and_fully_on_screen_for_half_of_duration_15_sec_cap_impressions ": { - "type": ["null", "string"] - }, - "active_view_audible_and_fully_on_screen_for_half_of_duration_15_sec_cap_measurable_impressions ": { - "type": ["null", "string"] - }, - "active_view_audible_and_fully_on_screen_for_half_of_duration_15_sec_cap_rate ": { - "type": ["null", "string"] - }, - "active_view_audible_and_fully_on_screen_for_half_of_duration_trueview_impressions ": { - "type": ["null", "string"] - }, - "active_view_audible_and_fully_on_screen_for_half_of_duration_trueview_measurable_impressions ": { - "type": ["null", "string"] - }, - "active_view_audible_and_fully_on_screen_for_half_of_duration_trueview_rate ": { - "type": ["null", "string"] - }, - "active_view_average_viewable_time_seconds ": { - "type": ["null", "string"] - }, - "active_view_custom_metric_measurable_impressions ": { - "type": ["null", "string"] - }, - "active_view_custom_metric_viewable_impressions ": { - "type": ["null", "string"] - }, - "active_view_custom_metric_viewable_rate ": { - "type": ["null", "string"] - }, - "active_view_eligible_impressions ": { - "type": ["null", "string"] - }, - "active_view_impression_distribution_not_measurable ": { - "type": ["null", "string"] - }, - "active_view_impression_distribution_not_viewable ": { - "type": ["null", "string"] - }, - "active_view_impression_distribution_viewable ": { - "type": ["null", "string"] - }, - "active_view_impressions_audible_and_visible_at_completion ": { - "type": ["null", "string"] - }, - "active_view_impressions_visible_10_seconds ": { - "type": ["null", "string"] - }, - "active_view_measurable_impressions ": { - "type": ["null", "string"] - }, - "active_view_not_measurable_impressions ": { - "type": ["null", "string"] - }, - "active_view_not_viewable_impressions ": { - "type": ["null", "string"] - }, - "active_view_viewable_impressions ": { - "type": ["null", "string"] - }, - "adlingo_fee_advertiser_currency ": { - "type": ["null", "string"] - }, - "adloox_fee_advertiser_currency ": { - "type": ["null", "string"] - }, - "adloox_fee_partner_currency ": { - "type": ["null", "string"] - }, - "adloox_fee_usd ": { - "type": ["null", "string"] - }, - "adloox_pre_bid_fee_advertiser_currency ": { - "type": ["null", "string"] - }, - "adloox_pre_bid_fee_partner_currency ": { - "type": ["null", "string"] - }, - "adloox_pre_bid_fee_usd ": { - "type": ["null", "string"] - }, - "adsafe_fee_advertiser_currency ": { - "type": ["null", "string"] - }, - "adsafe_fee_partner_currency ": { - "type": ["null", "string"] - }, - "adsafe_fee_usd ": { - "type": ["null", "string"] - }, - "adxpose_fee_advertiser_currency ": { - "type": ["null", "string"] - }, - "adxpose_fee_partner_currency ": { - "type": ["null", "string"] - }, - "adxpose_fee_usd ": { - "type": ["null", "string"] - }, - "agency_trading_desk_fee_advertiser_currency ": { - "type": ["null", "string"] - }, - "agency_trading_desk_fee_partner_currency ": { - "type": ["null", "string"] - }, - "agency_trading_desk_fee_usd ": { - "type": ["null", "string"] - }, - "aggregate_knowledge_fee_advertiser_currency ": { - "type": ["null", "string"] - }, - "aggregate_knowledge_fee_partner_currency ": { - "type": ["null", "string"] - }, - "aggregate_knowledge_fee_usd ": { - "type": ["null", "string"] - }, - "audio_client_cost_ecpcl_advertiser_currency ": { - "type": ["null", "string"] - }, - "audio_media_cost_ecpcl_advertiser_currency ": { - "type": ["null", "string"] - }, - "audio_mutes_audio ": { - "type": ["null", "string"] - }, - "audio_mutes_video ": { - "type": ["null", "string"] - }, - "audio_revenue_ecpcl_advertiser_currency ": { - "type": ["null", "string"] - }, - "audio_unmutes_audio ": { - "type": ["null", "string"] - }, - "audio_unmutes_video ": { - "type": ["null", "string"] - }, - "average_display_time ": { - "type": ["null", "string"] - }, - "average_interaction_time ": { - "type": ["null", "string"] - }, - "begin_to_render_eligible_impressions ": { - "type": ["null", "string"] - }, - "begin_to_render_impressions ": { - "type": ["null", "string"] - }, - "billable_cost_advertiser_currency ": { - "type": ["null", "string"] - }, - "billable_cost_partner_currency ": { - "type": ["null", "string"] - }, - "billable_cost_usd ": { - "type": ["null", "string"] - }, - "billable_impressions ": { - "type": ["null", "string"] - }, - "click_rate_ctr ": { - "type": ["null", "string"] - }, - "clicks ": { - "type": ["null", "string"] - }, - "client_cost_advertiser_currency ": { - "type": ["null", "string"] - }, - "client_cost_ecpa_advertiser_currency ": { - "type": ["null", "string"] - }, - "client_cost_ecpa_pc_advertiser_currency ": { - "type": ["null", "string"] - }, - "client_cost_ecpa_pv_advertiser_currency ": { - "type": ["null", "string"] - }, - "client_cost_ecpc_advertiser_currency ": { - "type": ["null", "string"] - }, - "client_cost_ecpm_advertiser_currency ": { - "type": ["null", "string"] - }, - "client_cost_viewable_ecpm_advertiser_currency ": { - "type": ["null", "string"] - }, - "cm_post_click_revenue ": { - "type": ["null", "string"] - }, - "cm_post_click_revenue__cross_environment ": { - "type": ["null", "string"] - }, - "cm_post_view_revenue ": { - "type": ["null", "string"] - }, - "cm_post_view_revenue__cross_environment ": { - "type": ["null", "string"] - }, - "companion_clicks_audio ": { - "type": ["null", "string"] - }, - "companion_clicks_video ": { - "type": ["null", "string"] - }, - "companion_impressions_audio ": { - "type": ["null", "string"] - }, - "companion_impressions_video ": { - "type": ["null", "string"] - }, - "complete_listens_audio ": { - "type": ["null", "string"] - }, - "complete_views_video ": { - "type": ["null", "string"] - }, - "completion_rate_audio ": { - "type": ["null", "string"] - }, - "completion_rate_video ": { - "type": ["null", "string"] - }, - "comscore_vce_in_doubleclick_fee_advertiser_currency ": { - "type": ["null", "string"] - }, - "comscore_vce_in_doubleclick_fee_partner_currency ": { - "type": ["null", "string"] - }, - "comscore_vce_in_doubleclick_fee_usd ": { - "type": ["null", "string"] - }, - "conversions_per_1000_impressions ": { - "type": ["null", "string"] - }, - "cookie_unconsented_clicks ": { - "type": ["null", "string"] - }, - "counters ": { - "type": ["null", "string"] - }, - "cpm_fee_1_advertiser_currency ": { - "type": ["null", "string"] - }, - "cpm_fee_1_partner_currency ": { - "type": ["null", "string"] - }, - "cpm_fee_1_usd ": { - "type": ["null", "string"] - }, - "cpm_fee_2_advertiser_currency ": { - "type": ["null", "string"] - }, - "cpm_fee_2_partner_currency ": { - "type": ["null", "string"] - }, - "cpm_fee_2_usd ": { - "type": ["null", "string"] - }, - "cpm_fee_3_advertiser_currency ": { - "type": ["null", "string"] - }, - "cpm_fee_3_partner_currency ": { - "type": ["null", "string"] - }, - "cpm_fee_3_usd ": { - "type": ["null", "string"] - }, - "cpm_fee_4_advertiser_currency ": { - "type": ["null", "string"] - }, - "cpm_fee_4_partner_currency ": { - "type": ["null", "string"] - }, - "cpm_fee_4_usd ": { - "type": ["null", "string"] - }, - "cpm_fee_5_advertiser_currency ": { - "type": ["null", "string"] - }, - "cpm_fee_5_partner_currency ": { - "type": ["null", "string"] - }, - "cpm_fee_5_usd ": { - "type": ["null", "string"] - }, - "custom_fee_1_advertiser_currency ": { - "type": ["null", "string"] - }, - "custom_fee_2_advertiser_currency ": { - "type": ["null", "string"] - }, - "custom_fee_3_advertiser_currency ": { - "type": ["null", "string"] - }, - "custom_fee_4_advertiser_currency ": { - "type": ["null", "string"] - }, - "custom_fee_5_advertiser_currency ": { - "type": ["null", "string"] - }, - "data_fees_advertiser_currency ": { - "type": ["null", "string"] - }, - "data_fees_partner_currency ": { - "type": ["null", "string"] - }, - "data_fees_usd ": { - "type": ["null", "string"] - }, - "data_management_platform_fee_advertiser_currency ": { - "type": ["null", "string"] - }, - "data_management_platform_fee_partner_currency ": { - "type": ["null", "string"] - }, - "data_management_platform_fee_usd ": { - "type": ["null", "string"] - }, - "doubleverify_fee_advertiser_currency ": { - "type": ["null", "string"] - }, - "doubleverify_fee_partner_currency ": { - "type": ["null", "string"] - }, - "doubleverify_fee_usd ": { - "type": ["null", "string"] - }, - "doubleverify_pre_bid_fee_advertiser_currency ": { - "type": ["null", "string"] - }, - "doubleverify_pre_bid_fee_partner_currency ": { - "type": ["null", "string"] - }, - "doubleverify_pre_bid_fee_usd ": { - "type": ["null", "string"] - }, - "engagement_rate ": { - "type": ["null", "string"] - }, - "engagements ": { - "type": ["null", "string"] - }, - "estimated_cpm_for_impressions_with_custom_value_advertiser_currency ": { - "type": ["null", "string"] - }, - "estimated_total_cost_for_impressions_with_custom_value_advertiser_currency ": { - "type": ["null", "string"] - }, - "evidon_fee_advertiser_currency ": { - "type": ["null", "string"] - }, - "evidon_fee_partner_currency ": { - "type": ["null", "string"] - }, - "evidon_fee_usd ": { - "type": ["null", "string"] - }, - "exits ": { - "type": ["null", "string"] - }, - "expansions ": { - "type": ["null", "string"] - }, - "first_quartile_audio ": { - "type": ["null", "string"] - }, - "first_quartile_views_video ": { - "type": ["null", "string"] - }, - "fullscreens_video ": { - "type": ["null", "string"] - }, - "general_invalid_traffic_givt_active_view_eligible_impressions ": { - "type": ["null", "string"] - }, - "general_invalid_traffic_givt_active_view_measurable_impressions ": { - "type": ["null", "string"] - }, - "general_invalid_traffic_givt_active_view_viewable_impressions ": { - "type": ["null", "string"] - }, - "general_invalid_traffic_givt_begin_to_render_impressions ": { - "type": ["null", "string"] - }, - "general_invalid_traffic_givt_clicks ": { - "type": ["null", "string"] - }, - "general_invalid_traffic_givt_impressions ": { - "type": ["null", "string"] - }, - "general_invalid_traffic_givt_tracked_ads ": { - "type": ["null", "string"] - }, - "gmail_conversions ": { - "type": ["null", "string"] - }, - "gmail_post_click_conversions ": { - "type": ["null", "string"] - }, - "gmail_post_view_conversions ": { - "type": ["null", "string"] - }, - "impression_custom_value_cost ": { - "type": ["null", "string"] - }, - "impressions ": { - "type": ["null", "string"] - }, - "impressions_with_custom_value ": { - "type": ["null", "string"] - }, - "impressions_with_positive_custom_value ": { - "type": ["null", "string"] - }, - "integral_ad_science_pre_bid_fee_advertiser_currency ": { - "type": ["null", "string"] - }, - "integral_ad_science_pre_bid_fee_partner_currency ": { - "type": ["null", "string"] - }, - "integral_ad_science_pre_bid_fee_usd ": { - "type": ["null", "string"] - }, - "integral_ad_science_video_fee_advertiser_currency ": { - "type": ["null", "string"] - }, - "integral_ad_science_video_fee_partner_currency ": { - "type": ["null", "string"] - }, - "integral_ad_science_video_fee_usd ": { - "type": ["null", "string"] - }, - "interactive_impressions ": { - "type": ["null", "string"] - }, - "invalid_active_view_eligible_impressions ": { - "type": ["null", "string"] - }, - "invalid_active_view_measurable_impressions ": { - "type": ["null", "string"] - }, - "invalid_active_view_viewable_impressions ": { - "type": ["null", "string"] - }, - "invalid_begin_to_render_impressions ": { - "type": ["null", "string"] - }, - "invalid_clicks ": { - "type": ["null", "string"] - }, - "invalid_impressions ": { - "type": ["null", "string"] - }, - "invalid_tracked_ads ": { - "type": ["null", "string"] - }, - "media_cost_advertiser_currency ": { - "type": ["null", "string"] - }, - "media_cost_partner_currency ": { - "type": ["null", "string"] - }, - "media_cost_usd ": { - "type": ["null", "string"] - }, - "media_cost_ecpa_advertiser_currency ": { - "type": ["null", "string"] - }, - "media_cost_ecpa_partner_currency ": { - "type": ["null", "string"] - }, - "media_cost_ecpa_pc_advertiser_currency ": { - "type": ["null", "string"] - }, - "media_cost_ecpa_pv_advertiser_currency ": { - "type": ["null", "string"] - }, - "media_cost_ecpa_usd ": { - "type": ["null", "string"] - }, - "media_cost_ecpc_advertiser_currency ": { - "type": ["null", "string"] - }, - "media_cost_ecpc_partner_currency ": { - "type": ["null", "string"] - }, - "media_cost_ecpc_pc_partner_currency ": { - "type": ["null", "string"] - }, - "media_cost_ecpc_pc_usd ": { - "type": ["null", "string"] - }, - "media_cost_ecpc_pv_partner_currency ": { - "type": ["null", "string"] - }, - "media_cost_ecpc_pv_usd ": { - "type": ["null", "string"] - }, - "media_cost_ecpc_usd ": { - "type": ["null", "string"] - }, - "media_cost_ecpm_advertiser_currency ": { - "type": ["null", "string"] - }, - "media_cost_ecpm_partner_currency ": { - "type": ["null", "string"] - }, - "media_cost_ecpm_usd ": { - "type": ["null", "string"] - }, - "media_cost_viewable_ecpm_advertiser_currency ": { - "type": ["null", "string"] - }, - "media_cost_viewable_ecpm_partner_currency ": { - "type": ["null", "string"] - }, - "media_cost_viewable_ecpm_usd ": { - "type": ["null", "string"] - }, - "media_fee_1_advertiser_currency ": { - "type": ["null", "string"] - }, - "media_fee_1_partner_currency ": { - "type": ["null", "string"] - }, - "media_fee_1_usd ": { - "type": ["null", "string"] - }, - "media_fee_2_advertiser_currency ": { - "type": ["null", "string"] - }, - "media_fee_2_partner_currency ": { - "type": ["null", "string"] - }, - "media_fee_2_usd ": { - "type": ["null", "string"] - }, - "media_fee_3_advertiser_currency ": { - "type": ["null", "string"] - }, - "media_fee_3_partner_currency ": { - "type": ["null", "string"] - }, - "media_fee_3_usd ": { - "type": ["null", "string"] - }, - "media_fee_4_advertiser_currency ": { - "type": ["null", "string"] - }, - "media_fee_4_partner_currency ": { - "type": ["null", "string"] - }, - "media_fee_4_usd ": { - "type": ["null", "string"] - }, - "media_fee_5_advertiser_currency ": { - "type": ["null", "string"] - }, - "media_fee_5_partner_currency ": { - "type": ["null", "string"] - }, - "media_fee_5_usd ": { - "type": ["null", "string"] - }, - "mediacost_data_fee_advertiser_currency ": { - "type": ["null", "string"] - }, - "mediacost_data_fee_partner_currency ": { - "type": ["null", "string"] - }, - "mediacost_data_fee_usd ": { - "type": ["null", "string"] - }, - "midpoint_audio ": { - "type": ["null", "string"] - }, - "midpoint_views_video ": { - "type": ["null", "string"] - }, - "moat_video_fee_advertiser_currency ": { - "type": ["null", "string"] - }, - "moat_video_fee_partner_currency ": { - "type": ["null", "string"] - }, - "moat_video_fee_usd ": { - "type": ["null", "string"] - }, - "nielsen_digital_ad_ratings_fee_advertiser_currency ": { - "type": ["null", "string"] - }, - "nielsen_digital_ad_ratings_fee_partner_currency ": { - "type": ["null", "string"] - }, - "nielsen_digital_ad_ratings_fee_usd ": { - "type": ["null", "string"] - }, - "pauses_audio ": { - "type": ["null", "string"] - }, - "pauses_video ": { - "type": ["null", "string"] - }, - "platform_fee_advertiser_currency ": { - "type": ["null", "string"] - }, - "platform_fee_partner_currency ": { - "type": ["null", "string"] - }, - "platform_fee_usd ": { - "type": ["null", "string"] - }, - "platform_fee_rate ": { - "type": ["null", "string"] - }, - "post_click_conversions ": { - "type": ["null", "string"] - }, - "post_view_conversions ": { - "type": ["null", "string"] - }, - "post_view_conversions__cross_environment ": { - "type": ["null", "string"] - }, - "premium_fee_advertiser_currency ": { - "type": ["null", "string"] - }, - "profit_advertiser_currency ": { - "type": ["null", "string"] - }, - "profit_partner_currency ": { - "type": ["null", "string"] - }, - "profit_usd ": { - "type": ["null", "string"] - }, - "profit_ecpm_advertiser_currency ": { - "type": ["null", "string"] - }, - "profit_ecpm_partner_currency ": { - "type": ["null", "string"] - }, - "profit_ecpm_usd ": { - "type": ["null", "string"] - }, - "profit_margin ": { - "type": ["null", "string"] - }, - "profit_viewable_ecpm_advertiser_currency ": { - "type": ["null", "string"] - }, - "profit_viewable_ecpm_partner_currency ": { - "type": ["null", "string"] - }, - "profit_viewable_ecpm_usd ": { - "type": ["null", "string"] - }, - "programmatic_guaranteed_impressions_passed_due_to_frequency ": { - "type": ["null", "string"] - }, - "programmatic_guaranteed_savings_re_invested_due_to_frequency_advertiser_currency ": { - "type": ["null", "string"] - }, - "refund_billable_cost_advertiser_currency ": { - "type": ["null", "string"] - }, - "refund_media_cost_advertiser_currency ": { - "type": ["null", "string"] - }, - "refund_platform_fee_advertiser_currency ": { - "type": ["null", "string"] - }, - "revenue_advertiser_currency ": { - "type": ["null", "string"] - }, - "revenue_partner_currency ": { - "type": ["null", "string"] - }, - "revenue_usd ": { - "type": ["null", "string"] - }, - "revenue_ecpa_advertiser_currency ": { - "type": ["null", "string"] - }, - "revenue_ecpa_partner_currency ": { - "type": ["null", "string"] - }, - "revenue_ecpa_pc_advertiser_currency ": { - "type": ["null", "string"] - }, - "revenue_ecpa_pc_partner_currency ": { - "type": ["null", "string"] - }, - "revenue_ecpa_pc_usd ": { - "type": ["null", "string"] - }, - "revenue_ecpa_pv_advertiser_currency ": { - "type": ["null", "string"] - }, - "revenue_ecpa_pv_partner_currency ": { - "type": ["null", "string"] - }, - "revenue_ecpa_pv_usd ": { - "type": ["null", "string"] - }, - "revenue_ecpa_usd ": { - "type": ["null", "string"] - }, - "revenue_ecpc_advertiser_currency ": { - "type": ["null", "string"] - }, - "revenue_ecpc_partner_currency ": { - "type": ["null", "string"] - }, - "revenue_ecpc_usd ": { - "type": ["null", "string"] - }, - "revenue_ecpe_advertiser_currency ": { - "type": ["null", "string"] - }, - "revenue_ecpe_partner_currency ": { - "type": ["null", "string"] - }, - "revenue_ecpe_usd ": { - "type": ["null", "string"] - }, - "revenue_ecpm_advertiser_currency ": { - "type": ["null", "string"] - }, - "revenue_ecpm_partner_currency ": { - "type": ["null", "string"] - }, - "revenue_ecpm_usd ": { - "type": ["null", "string"] - }, - "revenue_ecpv_advertiser_currency ": { - "type": ["null", "string"] - }, - "revenue_ecpv_partner_currency ": { - "type": ["null", "string"] - }, - "revenue_ecpv_usd ": { - "type": ["null", "string"] - }, - "revenue_viewable_ecpm_advertiser_currency ": { - "type": ["null", "string"] - }, - "revenue_viewable_ecpm_partner_currency ": { - "type": ["null", "string"] - }, - "revenue_viewable_ecpm_usd ": { - "type": ["null", "string"] - }, - "rich_media_engagements ": { - "type": ["null", "string"] - }, - "scrolls ": { - "type": ["null", "string"] - }, - "shoplocal_fee_advertiser_currency ": { - "type": ["null", "string"] - }, - "shoplocal_fee_partner_currency ": { - "type": ["null", "string"] - }, - "shoplocal_fee_usd ": { - "type": ["null", "string"] - }, - "skips_video ": { - "type": ["null", "string"] - }, - "starts_audio ": { - "type": ["null", "string"] - }, - "starts_video ": { - "type": ["null", "string"] - }, - "stops_audio ": { - "type": ["null", "string"] - }, - "teracent_fee_advertiser_currency ": { - "type": ["null", "string"] - }, - "teracent_fee_partner_currency ": { - "type": ["null", "string"] - }, - "teracent_fee_usd ": { - "type": ["null", "string"] - }, - "third_party_ad_server_fee_advertiser_currency ": { - "type": ["null", "string"] - }, - "third_party_ad_server_fee_partner_currency ": { - "type": ["null", "string"] - }, - "third_party_ad_server_fee_usd ": { - "type": ["null", "string"] - }, - "third_quartile_audio ": { - "type": ["null", "string"] - }, - "third_quartile_views_video ": { - "type": ["null", "string"] - }, - "timers ": { - "type": ["null", "string"] - }, - "total_conversions ": { - "type": ["null", "string"] - }, - "total_conversions__cross_environment ": { - "type": ["null", "string"] - }, - "total_display_time ": { - "type": ["null", "string"] - }, - "total_impression_custom_value ": { - "type": ["null", "string"] - }, - "total_interaction_time ": { - "type": ["null", "string"] - }, - "total_media_cost_advertiser_currency ": { - "type": ["null", "string"] - }, - "total_media_cost_partner_currency ": { - "type": ["null", "string"] - }, - "total_media_cost_usd ": { - "type": ["null", "string"] - }, - "total_media_cost_ecpa_advertiser_currency ": { - "type": ["null", "string"] - }, - "total_media_cost_ecpa_partner_currency ": { - "type": ["null", "string"] - }, - "total_media_cost_ecpa_pc_advertiser_currency ": { - "type": ["null", "string"] - }, - "total_media_cost_ecpa_pc_partner_currency ": { - "type": ["null", "string"] - }, - "total_media_cost_ecpa_pc_usd ": { - "type": ["null", "string"] - }, - "total_media_cost_ecpa_pv_advertiser_currency ": { - "type": ["null", "string"] - }, - "total_media_cost_ecpa_pv_partner_currency ": { - "type": ["null", "string"] - }, - "total_media_cost_ecpa_pv_usd ": { - "type": ["null", "string"] - }, - "total_media_cost_ecpa_usd ": { - "type": ["null", "string"] - }, - "total_media_cost_ecpc_advertiser_currency ": { - "type": ["null", "string"] - }, - "total_media_cost_ecpc_partner_currency ": { - "type": ["null", "string"] - }, - "total_media_cost_ecpc_usd ": { - "type": ["null", "string"] - }, - "total_media_cost_ecpm_advertiser_currency ": { - "type": ["null", "string"] - }, - "total_media_cost_ecpm_partner_currency ": { - "type": ["null", "string"] - }, - "total_media_cost_ecpm_usd ": { - "type": ["null", "string"] - }, - "total_media_cost_viewable_ecpm_advertiser_currency ": { - "type": ["null", "string"] - }, - "total_media_cost_viewable_ecpm_partner_currency ": { - "type": ["null", "string"] - }, - "total_media_cost_viewable_ecpm_usd ": { - "type": ["null", "string"] - }, - "total_video_media_cost_ecpcv_advertiser_currency ": { - "type": ["null", "string"] - }, - "total_video_media_cost_ecpcv_partner_currency ": { - "type": ["null", "string"] - }, - "total_video_media_cost_ecpcv_usd ": { - "type": ["null", "string"] - }, - "tracked_ads ": { - "type": ["null", "string"] - }, - "trueview_general_invalid_traffic_givt_views ": { - "type": ["null", "string"] - }, - "trueview_invalid_views ": { - "type": ["null", "string"] - }, - "trustmetrics_fee_advertiser_currency ": { - "type": ["null", "string"] - }, - "trustmetrics_fee_partner_currency ": { - "type": ["null", "string"] - }, - "trustmetrics_fee_usd ": { - "type": ["null", "string"] - }, - "verifiable_impressions ": { - "type": ["null", "string"] - }, - "video_client_cost_ecpcv_advertiser_currency ": { - "type": ["null", "string"] - }, - "video_media_cost_ecpcv_advertiser_currency ": { - "type": ["null", "string"] - }, - "video_media_cost_ecpcv_partner_currency ": { - "type": ["null", "string"] - }, - "video_media_cost_ecpcv_usd ": { - "type": ["null", "string"] - }, - "vizu_fee_advertiser_currency ": { - "type": ["null", "string"] - }, - "vizu_fee_partner_currency ": { - "type": ["null", "string"] - }, - "vizu_fee_usd ": { - "type": ["null", "string"] - }, - "youtube_view_rate ": { - "type": ["null", "string"] - }, - "youtube_views ": { - "type": ["null", "string"] - } - } - }, - "supported_sync_modes": ["full_refresh", "incremental"] - }, - { - "name": "audience_composition", - "json_schema": { - "$schema": "http://json-schema.org/draft-07/schema#", - "type": "object", - "properties": { - "audience_list": { - "type": ["null", "string"] - }, - "date": { - "type": ["null", "string"] - }, - "eligible_cookies_on_first_party_audience_list": { - "type": ["null", "string"] - }, - "eligible_cookies_on_third_party_audience_list_and_interest": { - "type": ["null", "string"] - }, - "first_party_audience_list": { - "type": ["null", "string"] - }, - "first_party_audience_list_cost": { - "type": ["null", "string"] - }, - "first_party_audience_list_id": { - "type": ["null", "string"] - }, - "first_party_audience_list_type": { - "type": ["null", "string"] - }, - "match_ratio": { - "type": ["null", "string"] - }, - "third_party_audience_list": { - "type": ["null", "string"] - }, - "third_party_audience_list_cost": { - "type": ["null", "string"] - }, - "third_party_audience_list_id": { - "type": ["null", "string"] - }, - "third_party_audience_list_type": { - "type": ["null", "string"] - }, - "potential_impressions": { - "type": ["null", "string"] - }, - "unique_cookies_with_impressions": { - "type": ["null", "string"] - } - } - }, - "supported_sync_modes": ["full_refresh", "incremental"] - }, - { - "name": "floodlight", - "json_schema": { - "$schema": "http://json-schema.org/draft-07/schema#", - "type": "object", - "properties": { - "advertiser": { - "type": ["null", "string"] - }, - "advertiser_currency": { - "type": ["null", "string"] - }, - "advertiser_id": { - "type": ["null", "string"] - }, - "advertiser_integration_code": { - "type": ["null", "string"] - }, - "advertiser_status": { - "type": ["null", "string"] - }, - "advertiser_time_zone": { - "type": ["null", "string"] - }, - "app_url": { - "type": ["null", "string"] - }, - "app_url_excluded": { - "type": ["null", "string"] - }, - "app_url_id": { - "type": ["null", "string"] - }, - "campaign": { - "type": ["null", "string"] - }, - "campaign_id": { - "type": ["null", "string"] - }, - "category": { - "type": ["null", "string"] - }, - "cm_placement_id": { - "type": ["null", "string"] - }, - "creative": { - "type": ["null", "string"] - }, - "creative_asset": { - "type": ["null", "string"] - }, - "creative_attributes": { - "type": ["null", "string"] - }, - "creative_height": { - "type": ["null", "string"] - }, - "creative_id": { - "type": ["null", "string"] - }, - "creative_integration_code": { - "type": ["null", "string"] - }, - "creative_rendered_in_amp": { - "type": ["null", "string"] - }, - "creative_size": { - "type": ["null", "string"] - }, - "creative_source": { - "type": ["null", "string"] - }, - "creative_status": { - "type": ["null", "string"] - }, - "creative_type": { - "type": ["null", "string"] - }, - "creative_width": { - "type": ["null", "string"] - }, - "date": { - "type": ["null", "string"] - }, - "day_of_week": { - "type": ["null", "string"] - }, - "exchange": { - "type": ["null", "string"] - }, - "exchange_code": { - "type": ["null", "string"] - }, - "exchange_id": { - "type": ["null", "string"] - }, - "floodlight_activity": { - "type": ["null", "string"] - }, - "floodlight_activity_id": { - "type": ["null", "string"] - }, - "insertion_order": { - "type": ["null", "string"] - }, - "insertion_order_integration_code": { - "type": ["null", "string"] - }, - "insertion_order_status": { - "type": ["null", "string"] - }, - "line_item": { - "type": ["null", "string"] - }, - "line_item_id": { - "type": ["null", "string"] - }, - "line_item_integration_code": { - "type": ["null", "string"] - }, - "line_item_status": { - "type": ["null", "string"] - }, - "line_item_type": { - "type": ["null", "string"] - }, - "month": { - "type": ["null", "string"] - }, - "order_id": { - "type": ["null", "string"] - }, - "partner": { - "type": ["null", "string"] - }, - "partner_currency": { - "type": ["null", "string"] - }, - "partner_id": { - "type": ["null", "string"] - }, - "partner_status": { - "type": ["null", "string"] - }, - "targeted_data_providers": { - "type": ["null", "string"] - }, - "year": { - "type": ["null", "string"] - }, - "cm_post_click_revenue": { - "type": ["null", "string"] - }, - "cm_post_view_revenue": { - "type": ["null", "string"] - }, - "cookie_consented_floodlight_impressions": { - "type": ["null", "string"] - }, - "cookie_unconsented_floodlight_impressions": { - "type": ["null", "string"] - }, - "duplicate_floodlight_impressions": { - "type": ["null", "string"] - }, - "floodlight_impressions": { - "type": ["null", "string"] - }, - "post_click_conversions": { - "type": ["null", "string"] - }, - "post_view_conversions": { - "type": ["null", "string"] - }, - "total_conversions": { - "type": ["null", "string"] - } - } - }, - "supported_sync_modes": ["full_refresh", "incremental"] - }, - { - "name": "unique_reach_audience", - "json_schema": { - "$schema": "http://json-schema.org/draft-07/schema#", - "type": "object", - "properties": { - "advertiser ": { - "type": ["null", "string"] - }, - "advertiser_id ": { - "type": ["null", "string"] - }, - "age ": { - "type": ["null", "string"] - }, - "country ": { - "type": ["null", "string"] - }, - "gender ": { - "type": ["null", "string"] - }, - "date ": { - "type": ["null", "string"] - }, - "insertion_order_id ": { - "type": ["null", "string"] - }, - "insertion_order_integration_code ": { - "type": ["null", "string"] - }, - "insertion_order ": { - "type": ["null", "string"] - }, - "insertion_order_status ": { - "type": ["null", "string"] - }, - "line_item_id ": { - "type": ["null", "string"] - }, - "line_item_integration_code ": { - "type": ["null", "string"] - }, - "line_item ": { - "type": ["null", "string"] - }, - "line_item_status ": { - "type": ["null", "string"] - }, - "line_item_type ": { - "type": ["null", "string"] - }, - "device_type ": { - "type": ["null", "string"] - }, - "creative ": { - "type": ["null", "string"] - }, - "creative_height ": { - "type": ["null", "string"] - }, - "creative_id ": { - "type": ["null", "string"] - }, - "creative_size ": { - "type": ["null", "string"] - }, - "creative_source ": { - "type": ["null", "string"] - }, - "creative_status ": { - "type": ["null", "string"] - }, - "creative_type ": { - "type": ["null", "string"] - }, - "creative_width ": { - "type": ["null", "string"] - }, - "partner_id ": { - "type": ["null", "string"] - }, - "partner ": { - "type": ["null", "string"] - }, - "month ": { - "type": ["null", "string"] - }, - "campaign_id ": { - "type": ["null", "string"] - }, - "campaign ": { - "type": ["null", "string"] - }, - "pct_composition_impressions ": { - "type": ["null", "string"] - }, - "pct_composition_reach ": { - "type": ["null", "string"] - }, - "pct_population_reach ": { - "type": ["null", "string"] - }, - "clicks ": { - "type": ["null", "string"] - }, - "impressions ": { - "type": ["null", "string"] - }, - "population ": { - "type": ["null", "string"] - }, - "target_rating_points ": { - "type": ["null", "string"] - }, - "unique_reach_average_impression_frequency ": { - "type": ["null", "string"] - }, - "unique_reach_click_reach ": { - "type": ["null", "string"] - }, - "unique_reach_impression_reach ": { - "type": ["null", "string"] - }, - "unique_reach_viewable_impression_reach ": { - "type": ["null", "string"] - }, - "viewable_target_rating_points ": { - "type": ["null", "string"] - }, - "viewable_impressions ": { - "type": ["null", "string"] - }, - "pct_viewable_composition_impressions ": { - "type": ["null", "string"] - }, - "pct_viewable_composition_reach ": { - "type": ["null", "string"] - }, - "pct_viewable_population_reach ": { - "type": ["null", "string"] - } - } - }, - "supported_sync_modes": ["full_refresh", "incremental"] - } - ] - } -} diff --git a/airbyte-integrations/connectors/source-dv-360/integration_tests/configured_catalog.json b/airbyte-integrations/connectors/source-dv-360/integration_tests/configured_catalog.json deleted file mode 100644 index f36ba916f928..000000000000 --- a/airbyte-integrations/connectors/source-dv-360/integration_tests/configured_catalog.json +++ /dev/null @@ -1,100 +0,0 @@ -{ - "streams": [ - { - "stream": { - "name": "standard", - "json_schema": { - "$schema": "http://json-schema.org/draft-07/schema#", - "type": "object", - "properties": { - "date": { - "type": ["null", "string"] - }, - "partner_id": { - "type": ["null", "string"] - }, - "partner": { - "type": ["null", "string"] - }, - "clicks": { - "type": ["null", "string"] - } - }, - "supported_sync_modes": ["full_refresh", "incremental"], - "source_defined_cursor": false, - "default_cursor_field": ["date"] - } - }, - "sync_mode": "incremental", - "destination_sync_mode": "overwrite", - "cursor_field": ["date"] - }, - { - "stream": { - "name": "reach", - "json_schema": { - "$schema": "http://json-schema.org/draft-07/schema#", - "type": "object", - "properties": { - "date": { - "type": ["null", "string"] - }, - "partner_id": { - "type": ["null", "string"] - }, - "partner": { - "type": ["null", "string"] - }, - "cookie_reach_impression_reach": { - "type": ["null", "string"] - } - } - }, - "supported_sync_modes": ["full_refresh", "incremental"], - "source_defined_cursor": false, - "default_cursor_field": ["date"] - }, - "sync_mode": "incremental", - "destination_sync_mode": "overwrite", - "cursor_field": ["date"] - }, - { - "stream": { - "name": "unique_reach_audience", - "json_schema": { - "$schema": "http://json-schema.org/draft-07/schema#", - "type": "object", - "properties": { - "date": { - "type": ["null", "string"] - }, - "partner_id": { - "type": ["null", "string"] - }, - "partner": { - "type": ["null", "string"] - }, - "country": { - "type": ["null", "string"] - }, - "age": { - "type": ["null", "string"] - }, - "gender": { - "type": ["null", "string"] - }, - "viewable_impressions": { - "type": ["null", "string"] - } - }, - "supported_sync_modes": ["full_refresh", "incremental"], - "source_defined_cursor": false, - "default_cursor_field": ["date"] - } - }, - "sync_mode": "incremental", - "destination_sync_mode": "overwrite", - "cursor_field": ["date"] - } - ] -} diff --git a/airbyte-integrations/connectors/source-dv-360/integration_tests/invalid_config.json b/airbyte-integrations/connectors/source-dv-360/integration_tests/invalid_config.json deleted file mode 100644 index cf2b62201433..000000000000 --- a/airbyte-integrations/connectors/source-dv-360/integration_tests/invalid_config.json +++ /dev/null @@ -1,12 +0,0 @@ -{ - "credentials": { - "access_token": "access_token", - "refresh_token": "refresh_token", - "token_uri": "uri", - "client_id": "client_id", - "client_secret": "client_secret" - }, - "start_date": "2022-03-01", - "end_date": "2022-03-08", - "partner_id": 123 -} diff --git a/airbyte-integrations/connectors/source-dv-360/integration_tests/sample_config.json b/airbyte-integrations/connectors/source-dv-360/integration_tests/sample_config.json deleted file mode 100644 index cad699edff6a..000000000000 --- a/airbyte-integrations/connectors/source-dv-360/integration_tests/sample_config.json +++ /dev/null @@ -1,13 +0,0 @@ -{ - "credentials": { - "access_token": "access_token", - "refresh_token": "refresh_token", - "token_uri": "uri", - "client_id": "client_id", - "client_secret": "client_secret" - }, - "start_date": "2022-03-01", - "end_date": "2022-03-08", - "partner_id": 123, - "filters": [] -} diff --git a/airbyte-integrations/connectors/source-dv-360/integration_tests/sample_state.json b/airbyte-integrations/connectors/source-dv-360/integration_tests/sample_state.json deleted file mode 100644 index 4b3085983f1f..000000000000 --- a/airbyte-integrations/connectors/source-dv-360/integration_tests/sample_state.json +++ /dev/null @@ -1,5 +0,0 @@ -{ - "standard": { - "date": "2022-03-15" - } -} diff --git a/airbyte-integrations/connectors/source-dv-360/main.py b/airbyte-integrations/connectors/source-dv-360/main.py deleted file mode 100644 index 4d7158d2a11d..000000000000 --- a/airbyte-integrations/connectors/source-dv-360/main.py +++ /dev/null @@ -1,8 +0,0 @@ -# -# Copyright (c) 2023 Airbyte, Inc., all rights reserved. -# - -from source_dv_360.run import run - -if __name__ == "__main__": - run() diff --git a/airbyte-integrations/connectors/source-dv-360/requirements.txt b/airbyte-integrations/connectors/source-dv-360/requirements.txt deleted file mode 100644 index d6e1198b1ab1..000000000000 --- a/airbyte-integrations/connectors/source-dv-360/requirements.txt +++ /dev/null @@ -1 +0,0 @@ --e . diff --git a/airbyte-integrations/connectors/source-dv-360/setup.py b/airbyte-integrations/connectors/source-dv-360/setup.py deleted file mode 100644 index 850d55c1e665..000000000000 --- a/airbyte-integrations/connectors/source-dv-360/setup.py +++ /dev/null @@ -1,40 +0,0 @@ -# -# Copyright (c) 2023 Airbyte, Inc., all rights reserved. -# - - -from setuptools import find_packages, setup - -MAIN_REQUIREMENTS = ["airbyte-cdk~=0.1", "google-api-python-client"] - -TEST_REQUIREMENTS = ["requests-mock~=1.9.3", "pytest~=6.1", "pytest-mock"] - -setup( - entry_points={ - "console_scripts": [ - "source-dv-360=source_dv_360.run:run", - ], - }, - name="source_dv_360", - description="Source implementation for Display & Video 360.", - author="Airbyte", - author_email="contact@airbyte.io", - packages=find_packages(), - install_requires=MAIN_REQUIREMENTS, - package_data={ - "": [ - # Include yaml files in the package (if any) - "*.yml", - "*.yaml", - # Include all json files in the package, up to 4 levels deep - "*.json", - "*/*.json", - "*/*/*.json", - "*/*/*/*.json", - "*/*/*/*/*.json", - ] - }, - extras_require={ - "tests": TEST_REQUIREMENTS, - }, -) diff --git a/airbyte-integrations/connectors/source-dv-360/source_dv_360/__init__.py b/airbyte-integrations/connectors/source-dv-360/source_dv_360/__init__.py deleted file mode 100644 index 0b229354f45a..000000000000 --- a/airbyte-integrations/connectors/source-dv-360/source_dv_360/__init__.py +++ /dev/null @@ -1,8 +0,0 @@ -# -# Copyright (c) 2021 Airbyte, Inc., all rights reserved. -# - - -from .source import SourceDV360 - -__all__ = ["SourceDV360"] diff --git a/airbyte-integrations/connectors/source-dv-360/source_dv_360/fields.py b/airbyte-integrations/connectors/source-dv-360/source_dv_360/fields.py deleted file mode 100644 index b14bdc21486a..000000000000 --- a/airbyte-integrations/connectors/source-dv-360/source_dv_360/fields.py +++ /dev/null @@ -1,557 +0,0 @@ -# -# Copyright (c) 2023 Airbyte, Inc., all rights reserved. -# - - -import re - - -def sanitize(s): - s = re.sub(r"[&]", "and", s) - s = re.sub(r"[%]", "pct", s) - s = re.sub(r"[\s/-]+", "_", s.strip()) - # Remove punctuation, which is anything that is not either a word or a whitespace character - s = re.sub(r"[^\w\s]+", "", s) - return s.lower() - - -# Mapping betwee naming in the connector and naming in the report builder -API_REPORT_BUILDER_MAPPING = { - "audience_list": "FILTER_AUDIENCE_LIST", - "date": "FILTER_DATE", - "eligible_cookies_on_first_party_audience_list": "FILTER_ELIGIBLE_COOKIES_ON_FIRST_PARTY_AUDIENCE_LIST", - "eligible_cookies_on_third_party_audience_list_and_interest": "FILTER_ELIGIBLE_COOKIES_ON_THIRD_PARTY_AUDIENCE_LIST_AND_INTEREST", - "first_party_audience_list": "FILTER_USER_LIST_FIRST_PARTY_NAME", - "first_party_audience_list_cost": "FILTER_FIRST_PARTY_AUDIENCE_LIST_COST", - "first_party_audience_list_id": "FILTER_USER_LIST_FIRST_PARTY", - "first_party_audience_list_type": "FILTER_FIRST_PARTY_AUDIENCE_LIST_TYPE", - "match_ratio": "FILTER_MATCH_RATIO", - "third_party_audience_list": "FILTER_USER_LIST_THIRD_PARTY_NAME", - "third_party_audience_list_cost": "FILTER_THIRD_PARTY_AUDIENCE_LIST_COST", - "third_party_audience_list_id": "FILTER_USER_LIST_THIRD_PARTY", - "third_party_audience_list_type": "FILTER_THIRD_PARTY_AUDIENCE_LIST_TYPE", - "advertiser": "FILTER_ADVERTISER_NAME", - "advertiser_currency": "FILTER_ADVERTISER_CURRENCY", - "advertiser_id": "FILTER_ADVERTISER", - "advertiser_integration_code": "FILTER_ADVERTISER_INTEGRATION_CODE", - "advertiser_status": "FILTER_ADVERTISER_INTEGRATION_STATUS", - "advertiser_time_zone": "FILTER_ADVERTISER_TIMEZONE", - "app_url": "FILTER_APP_URL", - "app_url_excluded": "FILTER_APP_URL_EXCLUDED", - "app_url_id": "FILTER_SITE_ID", - "campaign": "FILTER_MEDIA_PLAN_NAME", - "campaign_id": "FILTER_MEDIA_PLAN", - "category": "FILTER_PAGE_CATEGORY", - "cm_placement_id": "FILTER_CM_PLACEMENT_ID", - "creative": "FILTER_CREATIVE", - "creative_asset": "FILTER_CREATIVE_ASSET", - "creative_attributes": "FILTER_CREATIVE_ATTRIBUTE", - "creative_height": "FILTER_CREATIVE_HEIGHT", - "creative_id": "FILTER_CREATIVE_ID", - "creative_integration_code": "FILTER_CREATIVE_INTEGRATION_CODE", - "creative_rendered_in_amp": "FILTER_CREATIVE_RENDERED_IN_AMP", - "creative_size": "FILTER_CREATIVE_SIZE", - "creative_source": "FILTER_CREATIVE_SOURCE", - "creative_status": "FILTER_CREATIVE_STATUS", - "creative_type": "FILTER_CREATIVE_TYPE", - "creative_width": "FILTER_CREATIVE_WIDTH", - "day_of_week": "FILTER_DAY_OF_WEEK", - "exchange": "FILTER_EXCHANGE", - "exchange_code": "FILTER_EXCHANGE_CODE", - "exchange_id": "FILTER_EXCHANGE_ID", - "floodlight_activity": "FILTER_FLOODLIGHT_ACTIVITY", - "floodlight_activity_id": "FILTER_FLOODLIGHT_ACTIVITY_ID", - "insertion_order": "FILTER_INSERTION_ORDER_NAME", - "insertion_order_integration_code": "FILTER_INSERTION_ORDER_INTEGRATION_CODE", - "insertion_order_status": "FILTER_INSERTION_ORDER_STATUS", - "line_item": "FILTER_LINE_ITEM_NAME", - "line_item_id": "FILTER_LINE_ITEM", - "line_item_integration_code": "FILTER_LINE_ITEM_INTEGRATION_CODE", - "line_item_status": "FILTER_LINE_ITEM_STATUS", - "line_item_type": "FILTER_LINE_ITEM_TYPE", - "month": "FILTER_MONTH", - "order_id": "FILTER_ORDER_ID", - "partner": "FILTER_PARTNER_NAME", - "partner_currency": "FILTER_PARTNER_CURRENCY", - "partner_id": "FILTER_PARTNER", - "partner_status": "FILTER_PARTNER_STATUS", - "targeted_data_providers": "FILTER_TARGETED_DATA_PROVIDERS", - "year": "FILTER_YEAR", - "country": "FILTER_COUNTRY", - "country_id": "FILTER_COUNTRY_ID", - "insertion_order_id": "FILTER_INSERTION_ORDER", - "inventory_source": "FILTER_INVENTORY_SOURCE_NAME", - "active_view_custom_metric_id": "FILTER_ACTIVE_VIEW_CUSTOM_METRIC_ID", - "active_view_custom_metric_name": "FILTER_ACTIVE_VIEW_CUSTOM_METRIC_NAME", - "ad_position": "FILTER_AD_POSITION", - "ad_type": "FILTER_AD_TYPE", - "algorithm": "FILTER_ALGORITHM", - "algorithm_id": "FILTER_ALGORITHM_ID", - "amp_page_request": "FILTER_AMP_PAGE_REQUEST", - "attributed_userlist": "FILTER_ATTRIBUTED_USERLIST", - "attributed_userlist_cost": "FILTER_ATTRIBUTED_USERLIST_COST", - "attributed_userlist_id": "FILTER_TARGETED_USER_LIST", - "attributed_userlist_type": "FILTER_ATTRIBUTED_USERLIST_TYPE", - "attribution_model": "FILTER_ATTRIBUTION_MODEL", - "audience_list_cost": "FILTER_AUDIENCE_LIST_COST", - "audience_list_id": "FILTER_USER_LIST", - "audience_list_type": "FILTER_AUDIENCE_LIST_TYPE", - "audience_name": "FILTER_AUDIENCE_NAME", - "audience_type": "FILTER_AUDIENCE_TYPE", - "authorized_seller_state": "FILTER_AUTHORIZED_SELLER_STATE", - "billable_outcome": "FILTER_BILLABLE_OUTCOME", - "brand_lift_type": "FILTER_BRAND_LIFT_TYPE", - "browser_id": "FILTER_BROWSER", - "budget_segment_description": "FILTER_BUDGET_SEGMENT_DESCRIPTION", - "channel": "FILTER_CHANNEL_NAME", - "channel_id": "FILTER_CHANNEL_ID", - "channel_type": "FILTER_CHANNEL_TYPE", - "city": "FILTER_CITY_NAME", - "city_id": "FILTER_CITY", - "companion_creative": "FILTER_COMPANION_CREATIVE_NAME", - "companion_creative_id": "FILTER_COMPANION_CREATIVE_ID", - "companion_creative_size": "FILTER_COMPANION_CREATIVE_SIZE", - "data_provider": "FILTER_DATA_PROVIDER_NAME", - "data_provider_id": "FILTER_DATA_PROVIDER", - "detailed_demographics": "FILTER_DETAILED_DEMOGRAPHICS", - "detailed_demographics_id": "FILTER_DETAILED_DEMOGRAPHICS_ID", - "device": "FILTER_DEVICE", - "device_make": "FILTER_DEVICE_MAKE", - "device_model": "FILTER_DEVICE_MODEL", - "device_type": "FILTER_DEVICE_TYPE", - "digital_content_label": "FILTER_DIGITAL_CONTENT_LABEL", - "dma": "FILTER_DMA_NAME", - "dma_code": "FILTER_DMA", - "extension": "FILTER_EXTENSION", - "extension_status": "FILTER_EXTENSION_STATUS", - "extension_type": "FILTER_EXTENSION_TYPE", - "format": "FILTER_FORMAT", - "gmail_age": "FILTER_GMAIL_AGE", - "gmail_city": "FILTER_GMAIL_CITY", - "gmail_country": "FILTER_GMAIL_COUNTRY", - "gmail_device_type": "FILTER_GMAIL_DEVICE_TYPE", - "gmail_gender": "FILTER_GMAIL_GENDER", - "gmail_region": "FILTER_GMAIL_REGION", - "gmail_remarketing_list": "FILTER_GMAIL_REMARKETING_LIST", - "household_income": "FILTER_HOUSEHOLD_INCOME", - "impression_counting_method": "FILTER_IMPRESSION_COUNTING_METHOD", - "insertion_order_daily_frequency": "FILTER_CAMPAIGN_DAILY_FREQUENCY", - "interest": "FILTER_INTEREST", - "inventory_commitment_type": "FILTER_INVENTORY_COMMITMENT_TYPE", - "inventory_delivery_method": "FILTER_INVENTORY_DELIVERY_METHOD", - "inventory_rate_type": "FILTER_INVENTORY_RATE_TYPE", - "inventory_source_group": "FILTER_INVENTORY_SOURCE_GROUP", - "inventory_source_group_id": "FILTER_INVENTORY_SOURCE_GROUP_ID", - "inventory_source_id": "FILTER_INVENTORY_SOURCE_ID", - "inventory_source_id_external": "FILTER_INVENTORY_SOURCE_EXTERNAL_ID", - "inventory_source_type": "FILTER_INVENTORY_SOURCE_TYPE", - "isp_or_carrier": "FILTER_CARRIER_NAME", - "isp_or_carrier_id": "FILTER_CARRIER", - "keyword": "FILTER_KEYWORD", - "life_event": "FILTER_LIFE_EVENT", - "life_events": "FILTER_LIFE_EVENTS", - "line_item_daily_frequency": "FILTER_LINE_ITEM_DAILY_FREQUENCY", - "line_item_lifetime_frequency": "FILTER_LINE_ITEM_LIFETIME_FREQUENCY", - "max_video_duration": "FILTER_VIDEO_DURATION_SECONDS", - "measurement_source": "FILTER_MEASUREMENT_SOURCE", - "operating_system": "FILTER_OS", - "platform": "FILTER_PLATFORM", - "playback_method": "FILTER_PLAYBACK_METHOD", - "position_in_content": "FILTER_POSITION_IN_CONTENT", - "public_inventory": "FILTER_PUBLIC_INVENTORY", - "publisher_property": "FILTER_PUBLISHER_PROPERTY", - "publisher_property_id": "FILTER_PUBLISHER_PROPERTY_ID", - "publisher_property_section": "FILTER_PUBLISHER_PROPERTY_SECTION", - "publisher_property_section_id": "FILTER_PUBLISHER_PROPERTY_SECTION_ID", - "refund_reason": "FILTER_REFUND_REASON", - "region": "FILTER_REGION_NAME", - "region_id": "FILTER_REGION", - "rewarded": "FILTER_REWARDED", - "sensitive_category": "FILTER_SENSITIVE_CATEGORY", - "served_pixel_density": "FILTER_SERVED_PIXEL_DENSITY", - "time_of_day": "FILTER_TIME_OF_DAY", - "time_to_conversion": "FILTER_CONVERSION_DELAY", - "variant_id": "FILTER_VARIANT_ID", - "variant_name": "FILTER_VARIANT_NAME", - "variant_version": "FILTER_VARIANT_VERSION", - "verification_video_player_size": "FILTER_VERIFICATION_VIDEO_PLAYER_SIZE", - "verification_video_position": "FILTER_VERIFICATION_VIDEO_POSITION", - "video_continuous_play": "FILTER_VIDEO_CONTINUOUS_PLAY", - "video_player_size": "FILTER_VIDEO_PLAYER_SIZE", - "video_skippable_support": "FILTER_SKIPPABLE_SUPPORT", - "week": "FILTER_WEEK", - "zip_code": "FILTER_ZIP_POSTAL_CODE", - "zip_code_id": "FILTER_ZIP_CODE", - "age": "FILTER_AGE", - "gender": "FILTER_GENDER", - "potential_impressions": "METRIC_POTENTIAL_IMPRESSIONS", - "unique_cookies_with_impressions": "METRIC_UNIQUE_COOKIES_WITH_IMPRESSIONS", - "cm_post_click_revenue": "METRIC_CM_POST_CLICK_REVENUE", - "cm_post_view_revenue": "METRIC_CM_POST_VIEW_REVENUE", - "cookie_consented_floodlight_impressions": "METRIC_COOKIE_CONSENTED_FLOODLIGHT_IMPRESSIONS", - "cookie_unconsented_floodlight_impressions": "METRIC_COOKIE_UNCONSENTED_FLOODLIGHT_IMPRESSIONS", - "duplicate_floodlight_impressions": "METRIC_DUPLICATE_FLOODLIGHT_IMPRESSIONS", - "floodlight_impressions": "METRIC_FLOODLIGHT_IMPRESSIONS", - "post_click_conversions": "METRIC_LAST_CLICKS", - "post_view_conversions": "METRIC_LAST_IMPRESSIONS", - "total_conversions": "METRIC_TOTAL_CONVERSIONS", - "cookie_reach_average_impression_frequency": "METRIC_COOKIE_REACH_AVERAGE_IMPRESSION_FREQUENCY", - "cookie_reach_impression_reach": "METRIC_COOKIE_REACH_IMPRESSION_REACH", - "unique_reach_average_impression_frequency": "METRIC_UNIQUE_REACH_AVERAGE_IMPRESSION_FREQUENCY", - "unique_reach_click_reach": "METRIC_UNIQUE_REACH_CLICK_REACH", - "unique_reach_impression_reach": "METRIC_UNIQUE_REACH_IMPRESSION_REACH", - "unique_reach_total_reach": "METRIC_UNIQUE_REACH_TOTAL_REACH", - "pct_clicks_leading_to_conversions": "METRIC_CLICK_TO_POST_CLICK_CONVERSION_RATE", - "pct_impressions_leading_to_conversions": "METRIC_IMPRESSIONS_TO_CONVERSION_RATE", - "pct_impressions_with_positive_custom_value": "METRIC_PERCENT_IMPRESSIONS_WITH_POSITIVE_CUSTOM_VALUE", - "active_view_pct_audible_and_visible_at_completion": "METRIC_ACTIVE_VIEW_PERCENT_AUDIBLE_VISIBLE_ON_COMPLETE", - "active_view_pct_audible_and_visible_at_first_quartile": "METRIC_ACTIVE_VIEW_PERCENT_AUDIBLE_VISIBLE_FIRST_QUAR", - "active_view_pct_audible_and_visible_at_midpoint": "METRIC_ACTIVE_VIEW_PERCENT_AUDIBLE_VISIBLE_SECOND_QUAR", - "active_view_pct_audible_and_visible_at_start": "METRIC_ACTIVE_VIEW_PERCENT_AUDIBLE_VISIBLE_AT_START", - "active_view_pct_audible_and_visible_at_third_quartile": "METRIC_ACTIVE_VIEW_PERCENT_AUDIBLE_VISIBLE_THIRD_QUAR", - "active_view_pct_audible_impressions": "METRIC_ACTIVE_VIEW_PERCENT_AUDIBLE_IMPRESSIONS", - "active_view_pct_full_screen": "METRIC_ACTIVE_VIEW_PERCENT_FULL_SCREEN", - "active_view_pct_fully_on_screen_2_sec": "METRIC_ACTIVE_VIEW_PERCENT_FULLY_ON_SCREEN_2_SEC", - "active_view_pct_in_background": "METRIC_ACTIVE_VIEW_PERCENT_IN_BACKGROUND", - "active_view_pct_measurable_impressions": "METRIC_ACTIVE_VIEW_PCT_MEASURABLE_IMPRESSIONS", - "active_view_pct_of_ad_played": "METRIC_ACTIVE_VIEW_PERCENT_OF_AD_PLAYED", - "active_view_pct_of_completed_impressions_audible_and_visible": "METRIC_ACTIVE_VIEW_PERCENT_OF_COMPLETED_IMPRESSIONS_AUDIBLE_AND_VISIBLE", - "active_view_pct_of_completed_impressions_visible": "METRIC_ACTIVE_VIEW_PERCENT_OF_COMPLETED_IMPRESSIONS_VISIBLE", - "active_view_pct_of_first_quartile_impressions_audible_and_visible": "METRIC_ACTIVE_VIEW_PERCENT_OF_FIRST_QUARTILE_IMPRESSIONS_AUDIBLE_AND_VISIBLE", - "active_view_pct_of_first_quartile_impressions_visible": "METRIC_ACTIVE_VIEW_PERCENT_OF_FIRST_QUARTILE_IMPRESSIONS_VISIBLE", - "active_view_pct_of_midpoint_impressions_audible_and_visible": "METRIC_ACTIVE_VIEW_PERCENT_OF_MIDPOINT_IMPRESSIONS_AUDIBLE_AND_VISIBLE", - "active_view_pct_of_midpoint_impressions_visible": "METRIC_ACTIVE_VIEW_PERCENT_OF_MIDPOINT_IMPRESSIONS_VISIBLE", - "active_view_pct_of_third_quartile_impressions_audible_and_visible": "METRIC_ACTIVE_VIEW_PERCENT_OF_THIRD_QUARTILE_IMPRESSIONS_AUDIBLE_AND_VISIBLE", - "active_view_pct_of_third_quartile_impressions_visible": "METRIC_ACTIVE_VIEW_PERCENT_OF_THIRD_QUARTILE_IMPRESSIONS_VISIBLE", - "active_view_pct_play_time_audible": "METRIC_ACTIVE_VIEW_PERCENT_PLAY_TIME_AUDIBLE", - "active_view_pct_play_time_audible_and_visible": "METRIC_ACTIVE_VIEW_PERCENT_PLAY_TIME_AUDIBLE_AND_VISIBLE", - "active_view_pct_play_time_visible": "METRIC_ACTIVE_VIEW_PERCENT_PLAY_TIME_VISIBLE", - "active_view_pct_viewable_impressions": "METRIC_ACTIVE_VIEW_PCT_VIEWABLE_IMPRESSIONS", - "active_view_pct_visible_10_seconds": "METRIC_ACTIVE_VIEW_PERCENT_VIEWABLE_FOR_TIME_THRESHOLD", - "active_view_pct_visible_at_completion": "METRIC_ACTIVE_VIEW_PERCENT_VISIBLE_ON_COMPLETE", - "active_view_pct_visible_at_first_quartile": "METRIC_ACTIVE_VIEW_PERCENT_VISIBLE_FIRST_QUAR", - "active_view_pct_visible_at_midpoint": "METRIC_ACTIVE_VIEW_PERCENT_VISIBLE_SECOND_QUAR", - "active_view_pct_visible_at_start": "METRIC_ACTIVE_VIEW_PERCENT_VISIBLE_AT_START", - "active_view_pct_visible_at_third_quartile": "METRIC_ACTIVE_VIEW_PERCENT_VISIBLE_THIRD_QUAR", - "active_view_audible_and_fully_on_screen_for_half_of_duration_15_sec_cap_impressions": "METRIC_ACTIVE_VIEW_AUDIBLE_FULLY_ON_SCREEN_HALF_OF_DURATION_IMPRESSIONS", - "active_view_audible_and_fully_on_screen_for_half_of_duration_15_sec_cap_measurable_impressions": "METRIC_ACTIVE_VIEW_AUDIBLE_FULLY_ON_SCREEN_HALF_OF_DURATION_MEASURABLE_IMPRESSIONS", - "active_view_audible_and_fully_on_screen_for_half_of_duration_15_sec_cap_rate": "METRIC_ACTIVE_VIEW_AUDIBLE_FULLY_ON_SCREEN_HALF_OF_DURATION_RATE", - "active_view_audible_and_fully_on_screen_for_half_of_duration_trueview_impressions": "METRIC_ACTIVE_VIEW_AUDIBLE_FULLY_ON_SCREEN_HALF_OF_DURATION_TRUEVIEW_IMPRESSIONS", - "active_view_audible_and_fully_on_screen_for_half_of_duration_trueview_measurable_impressions": "METRIC_ACTIVE_VIEW_AUDIBLE_FULLY_ON_SCREEN_HALF_OF_DURATION_TRUEVIEW_MEASURABLE_IMPRESSIONS", - "active_view_audible_and_fully_on_screen_for_half_of_duration_trueview_rate": "METRIC_ACTIVE_VIEW_AUDIBLE_FULLY_ON_SCREEN_HALF_OF_DURATION_TRUEVIEW_RATE", - "active_view_average_viewable_time_seconds": "METRIC_ACTIVE_VIEW_AVERAGE_VIEWABLE_TIME", - "active_view_custom_metric_measurable_impressions": "METRIC_ACTIVE_VIEW_CUSTOM_METRIC_MEASURABLE_IMPRESSIONS", - "active_view_custom_metric_viewable_impressions": "METRIC_ACTIVE_VIEW_CUSTOM_METRIC_VIEWABLE_IMPRESSIONS", - "active_view_custom_metric_viewable_rate": "METRIC_ACTIVE_VIEW_CUSTOM_METRIC_VIEWABLE_RATE", - "active_view_eligible_impressions": "METRIC_ACTIVE_VIEW_ELIGIBLE_IMPRESSIONS", - "active_view_impression_distribution_not_measurable": "METRIC_ACTIVE_VIEW_DISTRIBUTION_UNMEASURABLE", - "active_view_impression_distribution_not_viewable": "METRIC_ACTIVE_VIEW_DISTRIBUTION_UNVIEWABLE", - "active_view_impression_distribution_viewable": "METRIC_ACTIVE_VIEW_DISTRIBUTION_VIEWABLE", - "active_view_impressions_audible_and_visible_at_completion": "METRIC_ACTIVE_VIEW_AUDIBLE_VISIBLE_ON_COMPLETE_IMPRESSIONS", - "active_view_impressions_visible_10_seconds": "METRIC_ACTIVE_VIEW_VIEWABLE_FOR_TIME_THRESHOLD", - "active_view_measurable_impressions": "METRIC_ACTIVE_VIEW_MEASURABLE_IMPRESSIONS", - "active_view_not_measurable_impressions": "METRIC_ACTIVE_VIEW_UNMEASURABLE_IMPRESSIONS", - "active_view_not_viewable_impressions": "METRIC_ACTIVE_VIEW_UNVIEWABLE_IMPRESSIONS", - "active_view_viewable_impressions": "METRIC_ACTIVE_VIEW_VIEWABLE_IMPRESSIONS", - "adlingo_fee_advertiser_currency": "METRIC_ADLINGO_FEE_ADVERTISER_CURRENCY", - "adloox_fee_advertiser_currency": "METRIC_FEE21_ADVERTISER", - "adloox_fee_partner_currency": "METRIC_FEE21_PARTNER", - "adloox_fee_usd": "METRIC_FEE21_USD", - "adloox_pre_bid_fee_advertiser_currency": "METRIC_FEE22_ADVERTISER", - "adloox_pre_bid_fee_partner_currency": "METRIC_FEE22_PARTNER", - "adloox_pre_bid_fee_usd": "METRIC_FEE22_USD", - "adsafe_fee_advertiser_currency": "METRIC_FEE4_ADVERTISER", - "adsafe_fee_partner_currency": "METRIC_FEE4_PARTNER", - "adsafe_fee_usd": "METRIC_FEE4_USD", - "adxpose_fee_advertiser_currency": "METRIC_FEE5_ADVERTISER", - "adxpose_fee_partner_currency": "METRIC_FEE5_PARTNER", - "adxpose_fee_usd": "METRIC_FEE5_USD", - "agency_trading_desk_fee_advertiser_currency": "METRIC_FEE10_ADVERTISER", - "agency_trading_desk_fee_partner_currency": "METRIC_FEE10_PARTNER", - "agency_trading_desk_fee_usd": "METRIC_FEE10_USD", - "aggregate_knowledge_fee_advertiser_currency": "METRIC_FEE7_ADVERTISER", - "aggregate_knowledge_fee_partner_currency": "METRIC_FEE7_PARTNER", - "aggregate_knowledge_fee_usd": "METRIC_FEE7_USD", - "audio_client_cost_ecpcl_advertiser_currency": "METRIC_AUDIO_CLIENT_COST_ECPCL_ADVERTISER_CURRENCY", - "audio_media_cost_ecpcl_advertiser_currency": "METRIC_AUDIO_MEDIA_COST_ECPCL_ADVERTISER_CURRENCY", - "audio_mutes_audio": "METRIC_AUDIO_MUTES_AUDIO", - "audio_mutes_video": "METRIC_RICH_MEDIA_VIDEO_MUTES", - "audio_revenue_ecpcl_advertiser_currency": "METRIC_AUDIO_REVENUE_ECPCL_ADVERTISER_CURRENCY", - "audio_unmutes_audio": "METRIC_AUDIO_UNMUTES_AUDIO", - "audio_unmutes_video": "METRIC_AUDIO_UNMUTES_VIDEO", - "average_display_time": "METRIC_AVERAGE_DISPLAY_TIME", - "average_interaction_time": "METRIC_AVERAGE_INTERACTION_TIME", - "begin_to_render_eligible_impressions": "METRIC_BEGIN_TO_RENDER_ELIGIBLE_IMPRESSIONS", - "begin_to_render_impressions": "METRIC_BEGIN_TO_RENDER_IMPRESSIONS", - "billable_cost_advertiser_currency": "METRIC_BILLABLE_COST_ADVERTISER", - "billable_cost_partner_currency": "METRIC_BILLABLE_COST_PARTNER", - "billable_cost_usd": "METRIC_BILLABLE_COST_USD", - "billable_impressions": "METRIC_BILLABLE_IMPRESSIONS", - "click_rate_ctr": "METRIC_CTR", - "clicks": "METRIC_CLICKS", - "client_cost_advertiser_currency": "METRIC_CLIENT_COST_ADVERTISER_CURRENCY", - "client_cost_ecpa_advertiser_currency": "METRIC_CLIENT_COST_ECPA_ADVERTISER_CURRENCY", - "client_cost_ecpa_pc_advertiser_currency": "METRIC_CLIENT_COST_ECPA_PC_ADVERTISER_CURRENCY", - "client_cost_ecpa_pv_advertiser_currency": "METRIC_CLIENT_COST_ECPA_PV_ADVERTISER_CURRENCY", - "client_cost_ecpc_advertiser_currency": "METRIC_CLIENT_COST_ECPC_ADVERTISER_CURRENCY", - "client_cost_ecpm_advertiser_currency": "METRIC_CLIENT_COST_ECPM_ADVERTISER_CURRENCY", - "client_cost_viewable_ecpm_advertiser_currency": "METRIC_CLIENT_COST_VIEWABLE_ECPM_ADVERTISER_CURRENCY", - "cm_post_click_revenue__cross_environment": "METRIC_CM_POST_CLICK_REVENUE_CROSS_ENVIRONMENT", - "cm_post_view_revenue__cross_environment": "METRIC_CM_POST_VIEW_REVENUE_CROSS_ENVIRONMENT", - "companion_clicks_audio": "METRIC_COMPANION_CLICKS_AUDIO", - "companion_clicks_video": "METRIC_VIDEO_COMPANION_CLICKS", - "companion_impressions_audio": "METRIC_COMPANION_IMPRESSIONS_AUDIO", - "companion_impressions_video": "METRIC_VIDEO_COMPANION_IMPRESSIONS", - "complete_listens_audio": "METRIC_COMPLETE_LISTENS_AUDIO", - "complete_views_video": "METRIC_RICH_MEDIA_VIDEO_COMPLETIONS", - "completion_rate_audio": "METRIC_COMPLETION_RATE_AUDIO", - "completion_rate_video": "METRIC_VIDEO_COMPLETION_RATE", - "comscore_vce_in_doubleclick_fee_advertiser_currency": "METRIC_FEE20_ADVERTISER", - "comscore_vce_in_doubleclick_fee_partner_currency": "METRIC_FEE20_PARTNER", - "comscore_vce_in_doubleclick_fee_usd": "METRIC_FEE20_USD", - "conversions_per_1000_impressions": "METRIC_CONVERSIONS_PER_MILLE", - "cookie_unconsented_clicks": "METRIC_TRACKING_UNCONSENTED_CLICKS", - "counters": "METRIC_COUNTERS", - "cpm_fee_1_advertiser_currency": "METRIC_CPM_FEE1_ADVERTISER", - "cpm_fee_1_partner_currency": "METRIC_CPM_FEE1_PARTNER", - "cpm_fee_1_usd": "METRIC_CPM_FEE1_USD", - "cpm_fee_2_advertiser_currency": "METRIC_CPM_FEE2_ADVERTISER", - "cpm_fee_2_partner_currency": "METRIC_CPM_FEE2_PARTNER", - "cpm_fee_2_usd": "METRIC_CPM_FEE2_USD", - "cpm_fee_3_advertiser_currency": "METRIC_CPM_FEE3_ADVERTISER", - "cpm_fee_3_partner_currency": "METRIC_CPM_FEE3_PARTNER", - "cpm_fee_3_usd": "METRIC_CPM_FEE3_USD", - "cpm_fee_4_advertiser_currency": "METRIC_CPM_FEE4_ADVERTISER", - "cpm_fee_4_partner_currency": "METRIC_CPM_FEE4_PARTNER", - "cpm_fee_4_usd": "METRIC_CPM_FEE4_USD", - "cpm_fee_5_advertiser_currency": "METRIC_CPM_FEE5_ADVERTISER", - "cpm_fee_5_partner_currency": "METRIC_CPM_FEE5_PARTNER", - "cpm_fee_5_usd": "METRIC_CPM_FEE5_USD", - "custom_fee_1_advertiser_currency": "METRIC_CUSTOM_FEE_1_ADVERTISER_CURRENCY", - "custom_fee_2_advertiser_currency": "METRIC_CUSTOM_FEE_2_ADVERTISER_CURRENCY", - "custom_fee_3_advertiser_currency": "METRIC_CUSTOM_FEE_3_ADVERTISER_CURRENCY", - "custom_fee_4_advertiser_currency": "METRIC_CUSTOM_FEE_4_ADVERTISER_CURRENCY", - "custom_fee_5_advertiser_currency": "METRIC_CUSTOM_FEE_5_ADVERTISER_CURRENCY", - "data_fees_advertiser_currency": "METRIC_DATA_COST_ADVERTISER", - "data_fees_partner_currency": "METRIC_DATA_COST_PARTNER", - "data_fees_usd": "METRIC_DATA_COST_USD", - "data_management_platform_fee_advertiser_currency": "METRIC_FEE11_ADVERTISER", - "data_management_platform_fee_partner_currency": "METRIC_FEE11_PARTNER", - "data_management_platform_fee_usd": "METRIC_FEE11_USD", - "doubleverify_fee_advertiser_currency": "METRIC_FEE3_ADVERTISER", - "doubleverify_fee_partner_currency": "METRIC_FEE3_PARTNER", - "doubleverify_fee_usd": "METRIC_FEE3_USD", - "doubleverify_pre_bid_fee_advertiser_currency": "METRIC_FEE13_ADVERTISER", - "doubleverify_pre_bid_fee_partner_currency": "METRIC_FEE13_PARTNER", - "doubleverify_pre_bid_fee_usd": "METRIC_FEE13_USD", - "engagement_rate": "METRIC_DBM_ENGAGEMENT_RATE", - "engagements": "METRIC_ENGAGEMENTS", - "estimated_cpm_for_impressions_with_custom_value_advertiser_currency": "METRIC_ESTIMATED_CPM_FOR_IMPRESSIONS_WITH_CUSTOM_VALUE_ADVERTISER_CURRENCY", - "estimated_total_cost_for_impressions_with_custom_value_advertiser_currency": "METRIC_ESTIMATED_TOTAL_COST_FOR_IMPRESSIONS_WITH_CUSTOM_VALUE_ADVERTISER_CURRENCY", - "evidon_fee_advertiser_currency": "METRIC_FEE9_ADVERTISER", - "evidon_fee_partner_currency": "METRIC_FEE9_PARTNER", - "evidon_fee_usd": "METRIC_FEE9_USD", - "exits": "METRIC_EXITS", - "expansions": "METRIC_EXPANSIONS", - "first_quartile_audio": "METRIC_FIRST_QUARTILE_AUDIO", - "first_quartile_views_video": "METRIC_RICH_MEDIA_VIDEO_FIRST_QUARTILE_COMPLETES", - "fullscreens_video": "METRIC_RICH_MEDIA_VIDEO_FULL_SCREENS", - "general_invalid_traffic_givt_active_view_eligible_impressions": "METRIC_GIVT_ACTIVE_VIEW_ELIGIBLE_IMPRESSIONS", - "general_invalid_traffic_givt_active_view_measurable_impressions": "METRIC_GIVT_ACTIVE_VIEW_MEASURABLE_IMPRESSIONS", - "general_invalid_traffic_givt_active_view_viewable_impressions": "METRIC_GIVT_ACTIVE_VIEW_VIEWABLE_IMPRESSIONS", - "general_invalid_traffic_givt_begin_to_render_impressions": "METRIC_GIVT_BEGIN_TO_RENDER_IMPRESSIONS", - "general_invalid_traffic_givt_clicks": "METRIC_GIVT_CLICKS", - "general_invalid_traffic_givt_impressions": "METRIC_GENERAL_INVALID_TRAFFIC_GIVT_IMPRESSIONS", - "general_invalid_traffic_givt_tracked_ads": "METRIC_GENERAL_INVALID_TRAFFIC_GIVT_TRACKED_ADS", - "gmail_conversions": "METRIC_GMAIL_CONVERSIONS", - "gmail_post_click_conversions": "METRIC_GMAIL_POST_CLICK_CONVERSIONS", - "gmail_post_view_conversions": "METRIC_GMAIL_POST_VIEW_CONVERSIONS", - "impression_custom_value_cost": "METRIC_IMPRESSION_CUSTOM_VALUE_COST", - "impressions": "METRIC_IMPRESSIONS", - "impressions_with_custom_value": "METRIC_IMPRESSIONS_WITH_CUSTOM_VALUE", - "impressions_with_positive_custom_value": "METRIC_IMPRESSIONS_WITH_POSITIVE_CUSTOM_VALUE", - "integral_ad_science_pre_bid_fee_advertiser_currency": "METRIC_FEE12_ADVERTISER", - "integral_ad_science_pre_bid_fee_partner_currency": "METRIC_FEE12_PARTNER", - "integral_ad_science_pre_bid_fee_usd": "METRIC_FEE12_USD", - "integral_ad_science_video_fee_advertiser_currency": "METRIC_FEE17_ADVERTISER", - "integral_ad_science_video_fee_partner_currency": "METRIC_FEE17_PARTNER", - "integral_ad_science_video_fee_usd": "METRIC_FEE17_USD", - "interactive_impressions": "METRIC_INTERACTIVE_IMPRESSIONS", - "invalid_active_view_eligible_impressions": "METRIC_INVALID_ACTIVE_VIEW_ELIGIBLE_IMPRESSIONS", - "invalid_active_view_measurable_impressions": "METRIC_INVALID_ACTIVE_VIEW_MEASURABLE_IMPRESSIONS", - "invalid_active_view_viewable_impressions": "METRIC_INVALID_ACTIVE_VIEW_VIEWABLE_IMPRESSIONS", - "invalid_begin_to_render_impressions": "METRIC_INVALID_BEGIN_TO_RENDER_IMPRESSIONS", - "invalid_clicks": "METRIC_INVALID_CLICKS", - "invalid_impressions": "METRIC_INVALID_IMPRESSIONS", - "invalid_tracked_ads": "METRIC_INVALID_TRACKED_ADS", - "media_cost_advertiser_currency": "METRIC_MEDIA_COST_ADVERTISER", - "media_cost_partner_currency": "METRIC_MEDIA_COST_PARTNER", - "media_cost_usd": "METRIC_MEDIA_COST_USD", - "media_cost_ecpa_advertiser_currency": "METRIC_MEDIA_COST_ECPA_ADVERTISER", - "media_cost_ecpa_partner_currency": "METRIC_MEDIA_COST_ECPA_PARTNER", - "media_cost_ecpa_pc_advertiser_currency": "METRIC_MEDIA_COST_ECPAPC_ADVERTISER", - "media_cost_ecpa_pv_advertiser_currency": "METRIC_MEDIA_COST_ECPAPV_ADVERTISER", - "media_cost_ecpa_usd": "METRIC_MEDIA_COST_ECPA_USD", - "media_cost_ecpc_advertiser_currency": "METRIC_MEDIA_COST_ECPC_ADVERTISER", - "media_cost_ecpc_partner_currency": "METRIC_MEDIA_COST_ECPC_PARTNER", - "media_cost_ecpc_pc_partner_currency": "METRIC_MEDIA_COST_ECPAPC_PARTNER", - "media_cost_ecpc_pc_usd": "METRIC_MEDIA_COST_ECPAPC_USD", - "media_cost_ecpc_pv_partner_currency": "METRIC_MEDIA_COST_ECPAPV_PARTNER", - "media_cost_ecpc_pv_usd": "METRIC_MEDIA_COST_ECPAPV_USD", - "media_cost_ecpc_usd": "METRIC_MEDIA_COST_ECPC_USD", - "media_cost_ecpm_advertiser_currency": "METRIC_MEDIA_COST_ECPM_ADVERTISER", - "media_cost_ecpm_partner_currency": "METRIC_MEDIA_COST_ECPM_PARTNER", - "media_cost_ecpm_usd": "METRIC_MEDIA_COST_ECPM_USD", - "media_cost_viewable_ecpm_advertiser_currency": "METRIC_MEDIA_COST_VIEWABLE_ECPM_ADVERTISER", - "media_cost_viewable_ecpm_partner_currency": "METRIC_MEDIA_COST_VIEWABLE_ECPM_PARTNER", - "media_cost_viewable_ecpm_usd": "METRIC_MEDIA_COST_VIEWABLE_ECPM_USD", - "media_fee_1_advertiser_currency": "METRIC_MEDIA_FEE1_ADVERTISER", - "media_fee_1_partner_currency": "METRIC_MEDIA_FEE1_PARTNER", - "media_fee_1_usd": "METRIC_MEDIA_FEE1_USD", - "media_fee_2_advertiser_currency": "METRIC_MEDIA_FEE2_ADVERTISER", - "media_fee_2_partner_currency": "METRIC_MEDIA_FEE2_PARTNER", - "media_fee_2_usd": "METRIC_MEDIA_FEE2_USD", - "media_fee_3_advertiser_currency": "METRIC_MEDIA_FEE3_ADVERTISER", - "media_fee_3_partner_currency": "METRIC_MEDIA_FEE3_PARTNER", - "media_fee_3_usd": "METRIC_MEDIA_FEE3_USD", - "media_fee_4_advertiser_currency": "METRIC_MEDIA_FEE4_ADVERTISER", - "media_fee_4_partner_currency": "METRIC_MEDIA_FEE4_PARTNER", - "media_fee_4_usd": "METRIC_MEDIA_FEE4_USD", - "media_fee_5_advertiser_currency": "METRIC_MEDIA_FEE5_ADVERTISER", - "media_fee_5_partner_currency": "METRIC_MEDIA_FEE5_PARTNER", - "media_fee_5_usd": "METRIC_MEDIA_FEE5_USD", - "mediacost_data_fee_advertiser_currency": "METRIC_FEE16_ADVERTISER", - "mediacost_data_fee_partner_currency": "METRIC_FEE16_PARTNER", - "mediacost_data_fee_usd": "METRIC_FEE16_USD", - "midpoint_audio": "METRIC_MIDPOINT_AUDIO", - "midpoint_views_video": "METRIC_RICH_MEDIA_VIDEO_MIDPOINTS", - "moat_video_fee_advertiser_currency": "METRIC_FEE18_ADVERTISER", - "moat_video_fee_partner_currency": "METRIC_FEE18_PARTNER", - "moat_video_fee_usd": "METRIC_FEE18_USD", - "nielsen_digital_ad_ratings_fee_advertiser_currency": "METRIC_FEE19_ADVERTISER", - "nielsen_digital_ad_ratings_fee_partner_currency": "METRIC_FEE19_PARTNER", - "nielsen_digital_ad_ratings_fee_usd": "METRIC_FEE19_USD", - "pauses_audio": "METRIC_PAUSES_AUDIO", - "pauses_video": "METRIC_RICH_MEDIA_VIDEO_PAUSES", - "platform_fee_advertiser_currency": "METRIC_PLATFORM_FEE_ADVERTISER", - "platform_fee_partner_currency": "METRIC_PLATFORM_FEE_PARTNER", - "platform_fee_usd": "METRIC_PLATFORM_FEE_USD", - "platform_fee_rate": "METRIC_PLATFORM_FEE_RATE", - "post_view_conversions__cross_environment": "METRIC_POST_VIEW_CONVERSIONS_CROSS_ENVIRONMENT", - "premium_fee_advertiser_currency": "METRIC_PREMIUM_FEE_ADVERTISER_CURRENCY", - "profit_advertiser_currency": "METRIC_PROFIT_ADVERTISER", - "profit_partner_currency": "METRIC_PROFIT_PARTNER", - "profit_usd": "METRIC_PROFIT_USD", - "profit_ecpm_advertiser_currency": "METRIC_PROFIT_ECPM_ADVERTISER", - "profit_ecpm_partner_currency": "METRIC_PROFIT_ECPM_PARTNER", - "profit_ecpm_usd": "METRIC_PROFIT_ECPM_USD", - "profit_margin": "METRIC_PROFIT_MARGIN", - "profit_viewable_ecpm_advertiser_currency": "METRIC_PROFIT_VIEWABLE_ECPM_ADVERTISER", - "profit_viewable_ecpm_partner_currency": "METRIC_PROFIT_VIEWABLE_ECPM_PARTNER", - "profit_viewable_ecpm_usd": "METRIC_PROFIT_VIEWABLE_ECPM_USD", - "programmatic_guaranteed_impressions_passed_due_to_frequency": "METRIC_PROGRAMMATIC_GUARANTEED_IMPRESSIONS_PASSED_DUE_TO_FREQUENCY", - "programmatic_guaranteed_savings_re_invested_due_to_frequency_advertiser_currency": "METRIC_PROGRAMMATIC_GUARANTEED_SAVINGS_RE_INVESTED_DUE_TO_FREQUENCY_ADVERTISER_CURRENCY", - "refund_billable_cost_advertiser_currency": "METRIC_REFUND_BILLABLE_COST_ADVERTISER_CURRENCY", - "refund_media_cost_advertiser_currency": "METRIC_REFUND_MEDIA_COST_ADVERTISER_CURRENCY", - "refund_platform_fee_advertiser_currency": "METRIC_REFUND_PLATFORM_FEE_ADVERTISER_CURRENCY", - "revenue_advertiser_currency": "METRIC_REVENUE_ADVERTISER", - "revenue_partner_currency": "METRIC_REVENUE_PARTNER", - "revenue_usd": "METRIC_REVENUE_USD", - "revenue_ecpa_advertiser_currency": "METRIC_REVENUE_ECPA_ADVERTISER", - "revenue_ecpa_partner_currency": "METRIC_REVENUE_ECPA_PARTNER", - "revenue_ecpa_pc_advertiser_currency": "METRIC_REVENUE_ECPAPC_ADVERTISER", - "revenue_ecpa_pc_partner_currency": "METRIC_REVENUE_ECPAPC_PARTNER", - "revenue_ecpa_pc_usd": "METRIC_REVENUE_ECPAPC_USD", - "revenue_ecpa_pv_advertiser_currency": "METRIC_REVENUE_ECPAPV_ADVERTISER", - "revenue_ecpa_pv_partner_currency": "METRIC_REVENUE_ECPAPV_PARTNER", - "revenue_ecpa_pv_usd": "METRIC_REVENUE_ECPAPV_USD", - "revenue_ecpa_usd": "METRIC_REVENUE_ECPA_USD", - "revenue_ecpc_advertiser_currency": "METRIC_REVENUE_ECPC_ADVERTISER", - "revenue_ecpc_partner_currency": "METRIC_REVENUE_ECPC_PARTNER", - "revenue_ecpc_usd": "METRIC_REVENUE_ECPC_USD", - "revenue_ecpe_advertiser_currency": "METRIC_TRUEVIEW_AVERAGE_CPE_ADVERTISER", - "revenue_ecpe_partner_currency": "METRIC_TRUEVIEW_AVERAGE_CPE_PARTNER", - "revenue_ecpe_usd": "METRIC_TRUEVIEW_AVERAGE_CPE_USD", - "revenue_ecpm_advertiser_currency": "METRIC_REVENUE_ECPM_ADVERTISER", - "revenue_ecpm_partner_currency": "METRIC_REVENUE_ECPM_PARTNER", - "revenue_ecpm_usd": "METRIC_REVENUE_ECPM_USD", - "revenue_ecpv_advertiser_currency": "METRIC_TRUEVIEW_CPV_ADVERTISER", - "revenue_ecpv_partner_currency": "METRIC_TRUEVIEW_CPV_PARTNER", - "revenue_ecpv_usd": "METRIC_TRUEVIEW_CPV_USD", - "revenue_viewable_ecpm_advertiser_currency": "METRIC_REVENUE_VIEWABLE_ECPM_ADVERTISER", - "revenue_viewable_ecpm_partner_currency": "METRIC_REVENUE_VIEWABLE_ECPM_PARTNER", - "revenue_viewable_ecpm_usd": "METRIC_REVENUE_VIEWABLE_ECPM_USD", - "rich_media_engagements": "METRIC_RICH_MEDIA_ENGAGEMENTS", - "scrolls": "METRIC_RICH_MEDIA_SCROLLS", - "shoplocal_fee_advertiser_currency": "METRIC_FEE14_ADVERTISER", - "shoplocal_fee_partner_currency": "METRIC_FEE14_PARTNER", - "shoplocal_fee_usd": "METRIC_FEE14_USD", - "skips_video": "METRIC_RICH_MEDIA_VIDEO_SKIPS", - "starts_audio": "METRIC_STARTS_AUDIO", - "starts_video": "METRIC_RICH_MEDIA_VIDEO_PLAYS", - "stops_audio": "METRIC_STOPS_AUDIO", - "teracent_fee_advertiser_currency": "METRIC_FEE8_ADVERTISER", - "teracent_fee_partner_currency": "METRIC_FEE8_PARTNER", - "teracent_fee_usd": "METRIC_FEE8_USD", - "third_party_ad_server_fee_advertiser_currency": "METRIC_FEE2_ADVERTISER", - "third_party_ad_server_fee_partner_currency": "METRIC_FEE2_PARTNER", - "third_party_ad_server_fee_usd": "METRIC_FEE2_USD", - "third_quartile_audio": "METRIC_THIRD_QUARTILE_AUDIO", - "third_quartile_views_video": "METRIC_RICH_MEDIA_VIDEO_THIRD_QUARTILE_COMPLETES", - "timers": "METRIC_TIMERS", - "total_conversions__cross_environment": "METRIC_TOTAL_CONVERSIONS_CROSS_ENVIRONMENT", - "total_display_time": "METRIC_TOTAL_DISPLAY_TIME", - "total_impression_custom_value": "METRIC_TOTAL_IMPRESSION_CUSTOM_VALUE", - "total_interaction_time": "METRIC_TOTAL_INTERACTION_TIME", - "total_media_cost_advertiser_currency": "METRIC_TOTAL_MEDIA_COST_ADVERTISER", - "total_media_cost_partner_currency": "METRIC_TOTAL_MEDIA_COST_PARTNER", - "total_media_cost_usd": "METRIC_TOTAL_MEDIA_COST_USD", - "total_media_cost_ecpa_advertiser_currency": "METRIC_TOTAL_MEDIA_COST_ECPA_ADVERTISER", - "total_media_cost_ecpa_partner_currency": "METRIC_TOTAL_MEDIA_COST_ECPA_PARTNER", - "total_media_cost_ecpa_pc_advertiser_currency": "METRIC_TOTAL_MEDIA_COST_ECPAPC_ADVERTISER", - "total_media_cost_ecpa_pc_partner_currency": "METRIC_TOTAL_MEDIA_COST_ECPAPC_PARTNER", - "total_media_cost_ecpa_pc_usd": "METRIC_TOTAL_MEDIA_COST_ECPAPC_USD", - "total_media_cost_ecpa_pv_advertiser_currency": "METRIC_TOTAL_MEDIA_COST_ECPAPV_ADVERTISER", - "total_media_cost_ecpa_pv_partner_currency": "METRIC_TOTAL_MEDIA_COST_ECPAPV_PARTNER", - "total_media_cost_ecpa_pv_usd": "METRIC_TOTAL_MEDIA_COST_ECPAPV_USD", - "total_media_cost_ecpa_usd": "METRIC_TOTAL_MEDIA_COST_ECPA_USD", - "total_media_cost_ecpc_advertiser_currency": "METRIC_TOTAL_MEDIA_COST_ECPC_ADVERTISER", - "total_media_cost_ecpc_partner_currency": "METRIC_TOTAL_MEDIA_COST_ECPC_PARTNER", - "total_media_cost_ecpc_usd": "METRIC_TOTAL_MEDIA_COST_ECPC_USD", - "total_media_cost_ecpm_advertiser_currency": "METRIC_TOTAL_MEDIA_COST_ECPM_ADVERTISER", - "total_media_cost_ecpm_partner_currency": "METRIC_TOTAL_MEDIA_COST_ECPM_PARTNER", - "total_media_cost_ecpm_usd": "METRIC_TOTAL_MEDIA_COST_ECPM_USD", - "total_media_cost_viewable_ecpm_advertiser_currency": "METRIC_TOTAL_MEDIA_COST_VIEWABLE_ECPM_ADVERTISER", - "total_media_cost_viewable_ecpm_partner_currency": "METRIC_TOTAL_MEDIA_COST_VIEWABLE_ECPM_PARTNER", - "total_media_cost_viewable_ecpm_usd": "METRIC_TOTAL_MEDIA_COST_VIEWABLE_ECPM_USD", - "total_video_media_cost_ecpcv_advertiser_currency": "METRIC_TOTAL_MEDIA_COST_ECPCV_ADVERTISER", - "total_video_media_cost_ecpcv_partner_currency": "METRIC_TOTAL_MEDIA_COST_ECPCV_PARTNER", - "total_video_media_cost_ecpcv_usd": "METRIC_TOTAL_MEDIA_COST_ECPCV_USD", - "tracked_ads": "METRIC_TRACKED_ADS", - "trueview_general_invalid_traffic_givt_views": "METRIC_TRUEVIEW_GENERAL_INVALID_TRAFFIC_GIVT_VIEWS", - "trueview_invalid_views": "METRIC_TRUEVIEW_INVALID_VIEWS", - "trustmetrics_fee_advertiser_currency": "METRIC_FEE15_ADVERTISER", - "trustmetrics_fee_partner_currency": "METRIC_FEE15_PARTNER", - "trustmetrics_fee_usd": "METRIC_FEE15_USD", - "verifiable_impressions": "METRIC_VERIFIABLE_IMPRESSIONS", - "video_client_cost_ecpcv_advertiser_currency": "METRIC_VIDEO_CLIENT_COST_ECPCV_ADVERTISER_CURRENCY", - "video_media_cost_ecpcv_advertiser_currency": "METRIC_MEDIA_COST_ECPCV_ADVERTISER", - "video_media_cost_ecpcv_partner_currency": "METRIC_MEDIA_COST_ECPCV_PARTNER", - "video_media_cost_ecpcv_usd": "METRIC_MEDIA_COST_ECPCV_USD", - "vizu_fee_advertiser_currency": "METRIC_FEE6_ADVERTISER", - "vizu_fee_partner_currency": "METRIC_FEE6_PARTNER", - "vizu_fee_usd": "METRIC_FEE6_USD", - "youtube_view_rate": "METRIC_TRUEVIEW_VIEW_RATE", - "youtube_views": "METRIC_TRUEVIEW_VIEWS", - "pct_composition_impressions": "METRIC_DEMO_COMPOSITION_IMPRESSION", - "pct_composition_reach": "METRIC_VIRTUAL_PEOPLE_IMPRESSION_REACH_SHARE_PERCENT", - "pct_population_reach": "METRIC_VIRTUAL_PEOPLE_IMPRESSION_REACH_PERCENT", - "population": "METRIC_DEMO_POPULATION", - "target_rating_points": "METRIC_TARGET_RATING_POINTS", - "unique_reach_viewable_impression_reach": "METRIC_VIRTUAL_PEOPLE_VIEWABLE_IMPRESSION_REACH_BY_DEMO", - "viewable_target_rating_points": "METRIC_VIEWABLE_GROSS_RATING_POINTS", - "viewable_impressions": "METRIC_GRP_CORRECTED_VIEWABLE_IMPRESSIONS", - "pct_viewable_composition_impressions": "METRIC_GRP_CORRECTED_VIEWABLE_IMPRESSIONS_SHARE_PERCENT", - "pct_viewable_composition_reach": "METRIC_VIRTUAL_PEOPLE_VIEWABLE_IMPRESSION_REACH_SHARE_PERCENT", - "pct_viewable_population_reach": "METRIC_VIRTUAL_PEOPLE_VIEWABLE_IMPRESSION_REACH_PERCENT", -} diff --git a/airbyte-integrations/connectors/source-dv-360/source_dv_360/queries/query_template.json b/airbyte-integrations/connectors/source-dv-360/source_dv_360/queries/query_template.json deleted file mode 100644 index 4268df6b9cab..000000000000 --- a/airbyte-integrations/connectors/source-dv-360/source_dv_360/queries/query_template.json +++ /dev/null @@ -1,33 +0,0 @@ -{ - "kind": "doubleclickbidmanager#query", - "queryId": "0", - "metadata": { - "title": "", - "dataRange": "CUSTOM_DATES", - "format": "CSV", - "running": false, - "googleCloudStoragePathForLatestReport": "", - "latestReportRunTimeMs": "0", - "sendNotification": false - }, - "params": { - "type": "", - "groupBys": [], - "filters": [ - { - "type": "FILTER_PARTNER", - "value": "" - } - ], - "metrics": [], - "options": { - "includeOnlyTargetedUserLists": false - } - }, - "schedule": { - "frequency": "ONE_TIME" - }, - "reportDataStartTimeMs": "", - "reportDataEndTimeMs": "", - "timezoneCode": "UTC" -} diff --git a/airbyte-integrations/connectors/source-dv-360/source_dv_360/run.py b/airbyte-integrations/connectors/source-dv-360/source_dv_360/run.py deleted file mode 100644 index a869331c4dbf..000000000000 --- a/airbyte-integrations/connectors/source-dv-360/source_dv_360/run.py +++ /dev/null @@ -1,14 +0,0 @@ -# -# Copyright (c) 2023 Airbyte, Inc., all rights reserved. -# - - -import sys - -from airbyte_cdk.entrypoint import launch -from source_dv_360 import SourceDV360 - - -def run(): - source = SourceDV360() - launch(source, sys.argv[1:]) diff --git a/airbyte-integrations/connectors/source-dv-360/source_dv_360/schemas/audience_composition.json b/airbyte-integrations/connectors/source-dv-360/source_dv_360/schemas/audience_composition.json deleted file mode 100644 index aea493ba027d..000000000000 --- a/airbyte-integrations/connectors/source-dv-360/source_dv_360/schemas/audience_composition.json +++ /dev/null @@ -1,51 +0,0 @@ -{ - "$schema": "http://json-schema.org/draft-07/schema#", - "type": "object", - "properties": { - "audience_list": { - "type": ["null", "string"] - }, - "date": { - "type": ["null", "string"] - }, - "eligible_cookies_on_first_party_audience_list": { - "type": ["null", "string"] - }, - "eligible_cookies_on_third_party_audience_list_and_interest": { - "type": ["null", "string"] - }, - "first_party_audience_list": { - "type": ["null", "string"] - }, - "first_party_audience_list_cost": { - "type": ["null", "string"] - }, - "first_party_audience_list_id": { - "type": ["null", "string"] - }, - "first_party_audience_list_type": { - "type": ["null", "string"] - }, - "match_ratio": { - "type": ["null", "string"] - }, - "third_party_audience_list": { - "type": ["null", "string"] - }, - "third_party_audience_list_cost": { - "type": ["null", "string"] - }, - "third_party_audience_list_id": { - "type": ["null", "string"] - }, - "third_party_audience_list_type": { - "type": ["null", "string"] - }, - "potential_impressions": { - "type": ["null", "string"] - }, - "unique_cookies_with_impressions": { - "type": ["null", "string"] - } - } -} diff --git a/airbyte-integrations/connectors/source-dv-360/source_dv_360/schemas/floodlight.json b/airbyte-integrations/connectors/source-dv-360/source_dv_360/schemas/floodlight.json deleted file mode 100644 index 537336600b16..000000000000 --- a/airbyte-integrations/connectors/source-dv-360/source_dv_360/schemas/floodlight.json +++ /dev/null @@ -1,177 +0,0 @@ -{ - "$schema": "http://json-schema.org/draft-07/schema#", - "type": "object", - "properties": { - "advertiser": { - "type": ["null", "string"] - }, - "advertiser_currency": { - "type": ["null", "string"] - }, - "advertiser_id": { - "type": ["null", "string"] - }, - "advertiser_integration_code": { - "type": ["null", "string"] - }, - "advertiser_status": { - "type": ["null", "string"] - }, - "advertiser_time_zone": { - "type": ["null", "string"] - }, - "app_url": { - "type": ["null", "string"] - }, - "app_url_excluded": { - "type": ["null", "string"] - }, - "app_url_id": { - "type": ["null", "string"] - }, - "campaign": { - "type": ["null", "string"] - }, - "campaign_id": { - "type": ["null", "string"] - }, - "category": { - "type": ["null", "string"] - }, - "cm_placement_id": { - "type": ["null", "string"] - }, - "creative": { - "type": ["null", "string"] - }, - "creative_asset": { - "type": ["null", "string"] - }, - "creative_attributes": { - "type": ["null", "string"] - }, - "creative_height": { - "type": ["null", "string"] - }, - "creative_id": { - "type": ["null", "string"] - }, - "creative_integration_code": { - "type": ["null", "string"] - }, - "creative_rendered_in_amp": { - "type": ["null", "string"] - }, - "creative_size": { - "type": ["null", "string"] - }, - "creative_source": { - "type": ["null", "string"] - }, - "creative_status": { - "type": ["null", "string"] - }, - "creative_type": { - "type": ["null", "string"] - }, - "creative_width": { - "type": ["null", "string"] - }, - "date": { - "type": ["null", "string"] - }, - "day_of_week": { - "type": ["null", "string"] - }, - "exchange": { - "type": ["null", "string"] - }, - "exchange_code": { - "type": ["null", "string"] - }, - "exchange_id": { - "type": ["null", "string"] - }, - "floodlight_activity": { - "type": ["null", "string"] - }, - "floodlight_activity_id": { - "type": ["null", "string"] - }, - "insertion_order": { - "type": ["null", "string"] - }, - "insertion_order_integration_code": { - "type": ["null", "string"] - }, - "insertion_order_status": { - "type": ["null", "string"] - }, - "line_item": { - "type": ["null", "string"] - }, - "line_item_id": { - "type": ["null", "string"] - }, - "line_item_integration_code": { - "type": ["null", "string"] - }, - "line_item_status": { - "type": ["null", "string"] - }, - "line_item_type": { - "type": ["null", "string"] - }, - "month": { - "type": ["null", "string"] - }, - "order_id": { - "type": ["null", "string"] - }, - "partner": { - "type": ["null", "string"] - }, - "partner_currency": { - "type": ["null", "string"] - }, - "partner_id": { - "type": ["null", "string"] - }, - "partner_status": { - "type": ["null", "string"] - }, - "targeted_data_providers": { - "type": ["null", "string"] - }, - "year": { - "type": ["null", "string"] - }, - "cm_post_click_revenue": { - "type": ["null", "string"] - }, - "cm_post_view_revenue": { - "type": ["null", "string"] - }, - "cookie_consented_floodlight_impressions": { - "type": ["null", "string"] - }, - "cookie_unconsented_floodlight_impressions": { - "type": ["null", "string"] - }, - "duplicate_floodlight_impressions": { - "type": ["null", "string"] - }, - "floodlight_impressions": { - "type": ["null", "string"] - }, - "post_click_conversions": { - "type": ["null", "string"] - }, - "post_view_conversions": { - "type": ["null", "string"] - }, - "total_conversions": { - "type": ["null", "string"] - } - } -} diff --git a/airbyte-integrations/connectors/source-dv-360/source_dv_360/schemas/reach.json b/airbyte-integrations/connectors/source-dv-360/source_dv_360/schemas/reach.json deleted file mode 100644 index a63c8efc86cf..000000000000 --- a/airbyte-integrations/connectors/source-dv-360/source_dv_360/schemas/reach.json +++ /dev/null @@ -1,114 +0,0 @@ -{ - "$schema": "http://json-schema.org/draft-07/schema#", - "type": "object", - "properties": { - "advertiser": { - "type": ["null", "string"] - }, - "advertiser_id": { - "type": ["null", "string"] - }, - "advertiser_integration_code": { - "type": ["null", "string"] - }, - "advertiser_status": { - "type": ["null", "string"] - }, - "app_url": { - "type": ["null", "string"] - }, - "app_url_excluded": { - "type": ["null", "string"] - }, - "campaign": { - "type": ["null", "string"] - }, - "campaign_id": { - "type": ["null", "string"] - }, - "cm_placement_id": { - "type": ["null", "string"] - }, - "country": { - "type": ["null", "string"] - }, - "country_id": { - "type": ["null", "string"] - }, - "creative": { - "type": ["null", "string"] - }, - "creative_id": { - "type": ["null", "string"] - }, - "creative_integration_code": { - "type": ["null", "string"] - }, - "creative_source": { - "type": ["null", "string"] - }, - "creative_status": { - "type": ["null", "string"] - }, - "date": { - "type": ["null", "string"] - }, - "insertion_order": { - "type": ["null", "string"] - }, - "insertion_order_id": { - "type": ["null", "string"] - }, - "insertion_order_integration_code": { - "type": ["null", "string"] - }, - "insertion_order_status": { - "type": ["null", "string"] - }, - "inventory_source": { - "type": ["null", "string"] - }, - "line_item": { - "type": ["null", "string"] - }, - "line_item_id": { - "type": ["null", "string"] - }, - "line_item_integration_code": { - "type": ["null", "string"] - }, - "line_item_status": { - "type": ["null", "string"] - }, - "partner": { - "type": ["null", "string"] - }, - "partner_id": { - "type": ["null", "string"] - }, - "partner_status": { - "type": ["null", "string"] - }, - "targeted_data_providers": { - "type": ["null", "string"] - }, - "cookie_reach_average_impression_frequency": { - "type": ["null", "string"] - }, - "cookie_reach_impression_reach": { - "type": ["null", "string"] - }, - "unique_reach_average_impression_frequency": { - "type": ["null", "string"] - }, - "unique_reach_click_reach": { - "type": ["null", "string"] - }, - "unique_reach_impression_reach": { - "type": ["null", "string"] - }, - "unique_reach_total_reach": { - "type": ["null", "string"] - } - } -} diff --git a/airbyte-integrations/connectors/source-dv-360/source_dv_360/schemas/standard.json b/airbyte-integrations/connectors/source-dv-360/source_dv_360/schemas/standard.json deleted file mode 100644 index 4c96e3c339eb..000000000000 --- a/airbyte-integrations/connectors/source-dv-360/source_dv_360/schemas/standard.json +++ /dev/null @@ -1,1506 +0,0 @@ -{ - "$schema": "http://json-schema.org/draft-07/schema#", - "type": "object", - "properties": { - "active_view_custom_metric_id": { - "type": ["null", "string"] - }, - "active_view_custom_metric_name": { - "type": ["null", "string"] - }, - "ad_position": { - "type": ["null", "string"] - }, - "ad_type": { - "type": ["null", "string"] - }, - "advertiser": { - "type": ["null", "string"] - }, - "advertiser_currency": { - "type": ["null", "string"] - }, - "advertiser_id": { - "type": ["null", "string"] - }, - "advertiser_integration_code": { - "type": ["null", "string"] - }, - "advertiser_status": { - "type": ["null", "string"] - }, - "advertiser_time_zone": { - "type": ["null", "string"] - }, - "algorithm": { - "type": ["null", "string"] - }, - "algorithm_id": { - "type": ["null", "string"] - }, - "amp_page_request": { - "type": ["null", "string"] - }, - "app_url": { - "type": ["null", "string"] - }, - "app_url_excluded": { - "type": ["null", "string"] - }, - "app_url_id": { - "type": ["null", "string"] - }, - "attributed_userlist": { - "type": ["null", "string"] - }, - "attributed_userlist_cost": { - "type": ["null", "string"] - }, - "attributed_userlist_id": { - "type": ["null", "string"] - }, - "attributed_userlist_type": { - "type": ["null", "string"] - }, - "attribution_model": { - "type": ["null", "string"] - }, - "audience_list": { - "type": ["null", "string"] - }, - "audience_list_cost": { - "type": ["null", "string"] - }, - "audience_list_id": { - "type": ["null", "string"] - }, - "audience_list_type": { - "type": ["null", "string"] - }, - "audience_name": { - "type": ["null", "string"] - }, - "audience_type": { - "type": ["null", "string"] - }, - "authorized_seller_state": { - "type": ["null", "string"] - }, - "billable_outcome": { - "type": ["null", "string"] - }, - "brand_lift_type": { - "type": ["null", "string"] - }, - "browser_id": { - "type": ["null", "string"] - }, - "budget_segment_description": { - "type": ["null", "string"] - }, - "campaign": { - "type": ["null", "string"] - }, - "campaign_id": { - "type": ["null", "string"] - }, - "category": { - "type": ["null", "string"] - }, - "channel": { - "type": ["null", "string"] - }, - "channel_id": { - "type": ["null", "string"] - }, - "channel_type": { - "type": ["null", "string"] - }, - "city": { - "type": ["null", "string"] - }, - "city_id": { - "type": ["null", "string"] - }, - "cm_placement_id": { - "type": ["null", "string"] - }, - "companion_creative": { - "type": ["null", "string"] - }, - "companion_creative_id": { - "type": ["null", "string"] - }, - "companion_creative_size": { - "type": ["null", "string"] - }, - "country": { - "type": ["null", "string"] - }, - "country_id": { - "type": ["null", "string"] - }, - "creative": { - "type": ["null", "string"] - }, - "creative_asset": { - "type": ["null", "string"] - }, - "creative_attributes": { - "type": ["null", "string"] - }, - "creative_height": { - "type": ["null", "string"] - }, - "creative_id": { - "type": ["null", "string"] - }, - "creative_integration_code": { - "type": ["null", "string"] - }, - "creative_rendered_in_amp": { - "type": ["null", "string"] - }, - "creative_size": { - "type": ["null", "string"] - }, - "creative_source": { - "type": ["null", "string"] - }, - "creative_status": { - "type": ["null", "string"] - }, - "creative_type": { - "type": ["null", "string"] - }, - "creative_width": { - "type": ["null", "string"] - }, - "data_provider": { - "type": ["null", "string"] - }, - "data_provider_id": { - "type": ["null", "string"] - }, - "date": { - "type": ["null", "string"] - }, - "day_of_week": { - "type": ["null", "string"] - }, - "detailed_demographics": { - "type": ["null", "string"] - }, - "detailed_demographics_id": { - "type": ["null", "string"] - }, - "device": { - "type": ["null", "string"] - }, - "device_make": { - "type": ["null", "string"] - }, - "device_model": { - "type": ["null", "string"] - }, - "device_type": { - "type": ["null", "string"] - }, - "digital_content_label": { - "type": ["null", "string"] - }, - "dma": { - "type": ["null", "string"] - }, - "dma_code": { - "type": ["null", "string"] - }, - "exchange": { - "type": ["null", "string"] - }, - "exchange_code": { - "type": ["null", "string"] - }, - "exchange_id": { - "type": ["null", "string"] - }, - "extension": { - "type": ["null", "string"] - }, - "extension_status": { - "type": ["null", "string"] - }, - "extension_type": { - "type": ["null", "string"] - }, - "floodlight_activity": { - "type": ["null", "string"] - }, - "floodlight_activity_id": { - "type": ["null", "string"] - }, - "format": { - "type": ["null", "string"] - }, - "gmail_age": { - "type": ["null", "string"] - }, - "gmail_city": { - "type": ["null", "string"] - }, - "gmail_country": { - "type": ["null", "string"] - }, - "gmail_device_type": { - "type": ["null", "string"] - }, - "gmail_gender": { - "type": ["null", "string"] - }, - "gmail_region": { - "type": ["null", "string"] - }, - "gmail_remarketing_list": { - "type": ["null", "string"] - }, - "household_income": { - "type": ["null", "string"] - }, - "impression_counting_method": { - "type": ["null", "string"] - }, - "insertion_order": { - "type": ["null", "string"] - }, - "insertion_order_daily_frequency": { - "type": ["null", "string"] - }, - "insertion_order_id": { - "type": ["null", "string"] - }, - "insertion_order_integration_code": { - "type": ["null", "string"] - }, - "insertion_order_status": { - "type": ["null", "string"] - }, - "interest": { - "type": ["null", "string"] - }, - "inventory_commitment_type": { - "type": ["null", "string"] - }, - "inventory_delivery_method": { - "type": ["null", "string"] - }, - "inventory_rate_type": { - "type": ["null", "string"] - }, - "inventory_source": { - "type": ["null", "string"] - }, - "inventory_source_group": { - "type": ["null", "string"] - }, - "inventory_source_group_id": { - "type": ["null", "string"] - }, - "inventory_source_id": { - "type": ["null", "string"] - }, - "inventory_source_id_external": { - "type": ["null", "string"] - }, - "inventory_source_type": { - "type": ["null", "string"] - }, - "isp_or_carrier": { - "type": ["null", "string"] - }, - "isp_or_carrier_id": { - "type": ["null", "string"] - }, - "keyword": { - "type": ["null", "string"] - }, - "life_event": { - "type": ["null", "string"] - }, - "life_events": { - "type": ["null", "string"] - }, - "line_item": { - "type": ["null", "string"] - }, - "line_item_daily_frequency": { - "type": ["null", "string"] - }, - "line_item_id": { - "type": ["null", "string"] - }, - "line_item_integration_code": { - "type": ["null", "string"] - }, - "line_item_lifetime_frequency": { - "type": ["null", "string"] - }, - "line_item_status": { - "type": ["null", "string"] - }, - "line_item_type": { - "type": ["null", "string"] - }, - "max_video_duration": { - "type": ["null", "string"] - }, - "measurement_source": { - "type": ["null", "string"] - }, - "month": { - "type": ["null", "string"] - }, - "operating_system": { - "type": ["null", "string"] - }, - "partner": { - "type": ["null", "string"] - }, - "partner_currency": { - "type": ["null", "string"] - }, - "partner_id": { - "type": ["null", "string"] - }, - "partner_status": { - "type": ["null", "string"] - }, - "platform": { - "type": ["null", "string"] - }, - "playback_method": { - "type": ["null", "string"] - }, - "position_in_content": { - "type": ["null", "string"] - }, - "public_inventory": { - "type": ["null", "string"] - }, - "publisher_property": { - "type": ["null", "string"] - }, - "publisher_property_id": { - "type": ["null", "string"] - }, - "publisher_property_section": { - "type": ["null", "string"] - }, - "publisher_property_section_id": { - "type": ["null", "string"] - }, - "refund_reason": { - "type": ["null", "string"] - }, - "region": { - "type": ["null", "string"] - }, - "region_id": { - "type": ["null", "string"] - }, - "rewarded": { - "type": ["null", "string"] - }, - "sensitive_category": { - "type": ["null", "string"] - }, - "served_pixel_density": { - "type": ["null", "string"] - }, - "targeted_data_providers": { - "type": ["null", "string"] - }, - "time_of_day": { - "type": ["null", "string"] - }, - "time_to_conversion": { - "type": ["null", "string"] - }, - "variant_id": { - "type": ["null", "string"] - }, - "variant_name": { - "type": ["null", "string"] - }, - "variant_version": { - "type": ["null", "string"] - }, - "verification_video_player_size": { - "type": ["null", "string"] - }, - "verification_video_position": { - "type": ["null", "string"] - }, - "video_continuous_play": { - "type": ["null", "string"] - }, - "video_player_size": { - "type": ["null", "string"] - }, - "video_skippable_support": { - "type": ["null", "string"] - }, - "week": { - "type": ["null", "string"] - }, - "year": { - "type": ["null", "string"] - }, - "zip_code": { - "type": ["null", "string"] - }, - "zip_code_id": { - "type": ["null", "string"] - }, - "pct_clicks_leading_to_conversions": { - "type": ["null", "string"] - }, - "pct_impressions_leading_to_conversions": { - "type": ["null", "string"] - }, - "pct_impressions_with_positive_custom_value": { - "type": ["null", "string"] - }, - "active_view_pct_audible_and_visible_at_completion": { - "type": ["null", "string"] - }, - "active_view_pct_audible_and_visible_at_first_quartile": { - "type": ["null", "string"] - }, - "active_view_pct_audible_and_visible_at_midpoint": { - "type": ["null", "string"] - }, - "active_view_pct_audible_and_visible_at_start": { - "type": ["null", "string"] - }, - "active_view_pct_audible_and_visible_at_third_quartile": { - "type": ["null", "string"] - }, - "active_view_pct_audible_impressions": { - "type": ["null", "string"] - }, - "active_view_pct_full_screen": { - "type": ["null", "string"] - }, - "active_view_pct_fully_on_screen_2_sec": { - "type": ["null", "string"] - }, - "active_view_pct_in_background": { - "type": ["null", "string"] - }, - "active_view_pct_measurable_impressions": { - "type": ["null", "string"] - }, - "active_view_pct_of_ad_played": { - "type": ["null", "string"] - }, - "active_view_pct_of_completed_impressions_audible_and_visible": { - "type": ["null", "string"] - }, - "active_view_pct_of_completed_impressions_visible": { - "type": ["null", "string"] - }, - "active_view_pct_of_first_quartile_impressions_audible_and_visible": { - "type": ["null", "string"] - }, - "active_view_pct_of_first_quartile_impressions_visible": { - "type": ["null", "string"] - }, - "active_view_pct_of_midpoint_impressions_audible_and_visible": { - "type": ["null", "string"] - }, - "active_view_pct_of_midpoint_impressions_visible": { - "type": ["null", "string"] - }, - "active_view_pct_of_third_quartile_impressions_audible_and_visible": { - "type": ["null", "string"] - }, - "active_view_pct_of_third_quartile_impressions_visible": { - "type": ["null", "string"] - }, - "active_view_pct_play_time_audible": { - "type": ["null", "string"] - }, - "active_view_pct_play_time_audible_and_visible": { - "type": ["null", "string"] - }, - "active_view_pct_play_time_visible": { - "type": ["null", "string"] - }, - "active_view_pct_viewable_impressions": { - "type": ["null", "string"] - }, - "active_view_pct_visible_10_seconds": { - "type": ["null", "string"] - }, - "active_view_pct_visible_at_completion": { - "type": ["null", "string"] - }, - "active_view_pct_visible_at_first_quartile": { - "type": ["null", "string"] - }, - "active_view_pct_visible_at_midpoint": { - "type": ["null", "string"] - }, - "active_view_pct_visible_at_start": { - "type": ["null", "string"] - }, - "active_view_pct_visible_at_third_quartile": { - "type": ["null", "string"] - }, - "active_view_audible_and_fully_on_screen_for_half_of_duration_15_sec_cap_impressions": { - "type": ["null", "string"] - }, - "active_view_audible_and_fully_on_screen_for_half_of_duration_15_sec_cap_measurable_impressions": { - "type": ["null", "string"] - }, - "active_view_audible_and_fully_on_screen_for_half_of_duration_15_sec_cap_rate": { - "type": ["null", "string"] - }, - "active_view_audible_and_fully_on_screen_for_half_of_duration_trueview_impressions": { - "type": ["null", "string"] - }, - "active_view_audible_and_fully_on_screen_for_half_of_duration_trueview_measurable_impressions": { - "type": ["null", "string"] - }, - "active_view_audible_and_fully_on_screen_for_half_of_duration_trueview_rate": { - "type": ["null", "string"] - }, - "active_view_average_viewable_time_seconds": { - "type": ["null", "string"] - }, - "active_view_custom_metric_measurable_impressions": { - "type": ["null", "string"] - }, - "active_view_custom_metric_viewable_impressions": { - "type": ["null", "string"] - }, - "active_view_custom_metric_viewable_rate": { - "type": ["null", "string"] - }, - "active_view_eligible_impressions": { - "type": ["null", "string"] - }, - "active_view_impression_distribution_not_measurable": { - "type": ["null", "string"] - }, - "active_view_impression_distribution_not_viewable": { - "type": ["null", "string"] - }, - "active_view_impression_distribution_viewable": { - "type": ["null", "string"] - }, - "active_view_impressions_audible_and_visible_at_completion": { - "type": ["null", "string"] - }, - "active_view_impressions_visible_10_seconds": { - "type": ["null", "string"] - }, - "active_view_measurable_impressions": { - "type": ["null", "string"] - }, - "active_view_not_measurable_impressions": { - "type": ["null", "string"] - }, - "active_view_not_viewable_impressions": { - "type": ["null", "string"] - }, - "active_view_viewable_impressions": { - "type": ["null", "string"] - }, - "adlingo_fee_advertiser_currency": { - "type": ["null", "string"] - }, - "adloox_fee_advertiser_currency": { - "type": ["null", "string"] - }, - "adloox_fee_partner_currency": { - "type": ["null", "string"] - }, - "adloox_fee_usd": { - "type": ["null", "string"] - }, - "adloox_pre_bid_fee_advertiser_currency": { - "type": ["null", "string"] - }, - "adloox_pre_bid_fee_partner_currency": { - "type": ["null", "string"] - }, - "adloox_pre_bid_fee_usd": { - "type": ["null", "string"] - }, - "adsafe_fee_advertiser_currency": { - "type": ["null", "string"] - }, - "adsafe_fee_partner_currency": { - "type": ["null", "string"] - }, - "adsafe_fee_usd": { - "type": ["null", "string"] - }, - "adxpose_fee_advertiser_currency": { - "type": ["null", "string"] - }, - "adxpose_fee_partner_currency": { - "type": ["null", "string"] - }, - "adxpose_fee_usd": { - "type": ["null", "string"] - }, - "agency_trading_desk_fee_advertiser_currency": { - "type": ["null", "string"] - }, - "agency_trading_desk_fee_partner_currency": { - "type": ["null", "string"] - }, - "agency_trading_desk_fee_usd": { - "type": ["null", "string"] - }, - "aggregate_knowledge_fee_advertiser_currency": { - "type": ["null", "string"] - }, - "aggregate_knowledge_fee_partner_currency": { - "type": ["null", "string"] - }, - "aggregate_knowledge_fee_usd": { - "type": ["null", "string"] - }, - "audio_client_cost_ecpcl_advertiser_currency": { - "type": ["null", "string"] - }, - "audio_media_cost_ecpcl_advertiser_currency": { - "type": ["null", "string"] - }, - "audio_mutes_audio": { - "type": ["null", "string"] - }, - "audio_mutes_video": { - "type": ["null", "string"] - }, - "audio_revenue_ecpcl_advertiser_currency": { - "type": ["null", "string"] - }, - "audio_unmutes_audio": { - "type": ["null", "string"] - }, - "audio_unmutes_video": { - "type": ["null", "string"] - }, - "average_display_time": { - "type": ["null", "string"] - }, - "average_interaction_time": { - "type": ["null", "string"] - }, - "begin_to_render_eligible_impressions": { - "type": ["null", "string"] - }, - "begin_to_render_impressions": { - "type": ["null", "string"] - }, - "billable_cost_advertiser_currency": { - "type": ["null", "string"] - }, - "billable_cost_partner_currency": { - "type": ["null", "string"] - }, - "billable_cost_usd": { - "type": ["null", "string"] - }, - "billable_impressions": { - "type": ["null", "string"] - }, - "click_rate_ctr": { - "type": ["null", "string"] - }, - "clicks": { - "type": ["null", "string"] - }, - "client_cost_advertiser_currency": { - "type": ["null", "string"] - }, - "client_cost_ecpa_advertiser_currency": { - "type": ["null", "string"] - }, - "client_cost_ecpa_pc_advertiser_currency": { - "type": ["null", "string"] - }, - "client_cost_ecpa_pv_advertiser_currency": { - "type": ["null", "string"] - }, - "client_cost_ecpc_advertiser_currency": { - "type": ["null", "string"] - }, - "client_cost_ecpm_advertiser_currency": { - "type": ["null", "string"] - }, - "client_cost_viewable_ecpm_advertiser_currency": { - "type": ["null", "string"] - }, - "cm_post_click_revenue": { - "type": ["null", "string"] - }, - "cm_post_click_revenue__cross_environment": { - "type": ["null", "string"] - }, - "cm_post_view_revenue": { - "type": ["null", "string"] - }, - "cm_post_view_revenue__cross_environment": { - "type": ["null", "string"] - }, - "companion_clicks_audio": { - "type": ["null", "string"] - }, - "companion_clicks_video": { - "type": ["null", "string"] - }, - "companion_impressions_audio": { - "type": ["null", "string"] - }, - "companion_impressions_video": { - "type": ["null", "string"] - }, - "complete_listens_audio": { - "type": ["null", "string"] - }, - "complete_views_video": { - "type": ["null", "string"] - }, - "completion_rate_audio": { - "type": ["null", "string"] - }, - "completion_rate_video": { - "type": ["null", "string"] - }, - "comscore_vce_in_doubleclick_fee_advertiser_currency": { - "type": ["null", "string"] - }, - "comscore_vce_in_doubleclick_fee_partner_currency": { - "type": ["null", "string"] - }, - "comscore_vce_in_doubleclick_fee_usd": { - "type": ["null", "string"] - }, - "conversions_per_1000_impressions": { - "type": ["null", "string"] - }, - "cookie_unconsented_clicks": { - "type": ["null", "string"] - }, - "counters": { - "type": ["null", "string"] - }, - "cpm_fee_1_advertiser_currency": { - "type": ["null", "string"] - }, - "cpm_fee_1_partner_currency": { - "type": ["null", "string"] - }, - "cpm_fee_1_usd": { - "type": ["null", "string"] - }, - "cpm_fee_2_advertiser_currency": { - "type": ["null", "string"] - }, - "cpm_fee_2_partner_currency": { - "type": ["null", "string"] - }, - "cpm_fee_2_usd": { - "type": ["null", "string"] - }, - "cpm_fee_3_advertiser_currency": { - "type": ["null", "string"] - }, - "cpm_fee_3_partner_currency": { - "type": ["null", "string"] - }, - "cpm_fee_3_usd": { - "type": ["null", "string"] - }, - "cpm_fee_4_advertiser_currency": { - "type": ["null", "string"] - }, - "cpm_fee_4_partner_currency": { - "type": ["null", "string"] - }, - "cpm_fee_4_usd": { - "type": ["null", "string"] - }, - "cpm_fee_5_advertiser_currency": { - "type": ["null", "string"] - }, - "cpm_fee_5_partner_currency": { - "type": ["null", "string"] - }, - "cpm_fee_5_usd": { - "type": ["null", "string"] - }, - "custom_fee_1_advertiser_currency": { - "type": ["null", "string"] - }, - "custom_fee_2_advertiser_currency": { - "type": ["null", "string"] - }, - "custom_fee_3_advertiser_currency": { - "type": ["null", "string"] - }, - "custom_fee_4_advertiser_currency": { - "type": ["null", "string"] - }, - "custom_fee_5_advertiser_currency": { - "type": ["null", "string"] - }, - "data_fees_advertiser_currency": { - "type": ["null", "string"] - }, - "data_fees_partner_currency": { - "type": ["null", "string"] - }, - "data_fees_usd": { - "type": ["null", "string"] - }, - "data_management_platform_fee_advertiser_currency": { - "type": ["null", "string"] - }, - "data_management_platform_fee_partner_currency": { - "type": ["null", "string"] - }, - "data_management_platform_fee_usd": { - "type": ["null", "string"] - }, - "doubleverify_fee_advertiser_currency": { - "type": ["null", "string"] - }, - "doubleverify_fee_partner_currency": { - "type": ["null", "string"] - }, - "doubleverify_fee_usd": { - "type": ["null", "string"] - }, - "doubleverify_pre_bid_fee_advertiser_currency": { - "type": ["null", "string"] - }, - "doubleverify_pre_bid_fee_partner_currency": { - "type": ["null", "string"] - }, - "doubleverify_pre_bid_fee_usd": { - "type": ["null", "string"] - }, - "engagement_rate": { - "type": ["null", "string"] - }, - "engagements": { - "type": ["null", "string"] - }, - "estimated_cpm_for_impressions_with_custom_value_advertiser_currency": { - "type": ["null", "string"] - }, - "estimated_total_cost_for_impressions_with_custom_value_advertiser_currency": { - "type": ["null", "string"] - }, - "evidon_fee_advertiser_currency": { - "type": ["null", "string"] - }, - "evidon_fee_partner_currency": { - "type": ["null", "string"] - }, - "evidon_fee_usd": { - "type": ["null", "string"] - }, - "exits": { - "type": ["null", "string"] - }, - "expansions": { - "type": ["null", "string"] - }, - "first_quartile_audio": { - "type": ["null", "string"] - }, - "first_quartile_views_video": { - "type": ["null", "string"] - }, - "fullscreens_video": { - "type": ["null", "string"] - }, - "general_invalid_traffic_givt_active_view_eligible_impressions": { - "type": ["null", "string"] - }, - "general_invalid_traffic_givt_active_view_measurable_impressions": { - "type": ["null", "string"] - }, - "general_invalid_traffic_givt_active_view_viewable_impressions": { - "type": ["null", "string"] - }, - "general_invalid_traffic_givt_begin_to_render_impressions": { - "type": ["null", "string"] - }, - "general_invalid_traffic_givt_clicks": { - "type": ["null", "string"] - }, - "general_invalid_traffic_givt_impressions": { - "type": ["null", "string"] - }, - "general_invalid_traffic_givt_tracked_ads": { - "type": ["null", "string"] - }, - "gmail_conversions": { - "type": ["null", "string"] - }, - "gmail_post_click_conversions": { - "type": ["null", "string"] - }, - "gmail_post_view_conversions": { - "type": ["null", "string"] - }, - "impression_custom_value_cost": { - "type": ["null", "string"] - }, - "impressions": { - "type": ["null", "string"] - }, - "impressions_with_custom_value": { - "type": ["null", "string"] - }, - "impressions_with_positive_custom_value": { - "type": ["null", "string"] - }, - "integral_ad_science_pre_bid_fee_advertiser_currency": { - "type": ["null", "string"] - }, - "integral_ad_science_pre_bid_fee_partner_currency": { - "type": ["null", "string"] - }, - "integral_ad_science_pre_bid_fee_usd": { - "type": ["null", "string"] - }, - "integral_ad_science_video_fee_advertiser_currency": { - "type": ["null", "string"] - }, - "integral_ad_science_video_fee_partner_currency": { - "type": ["null", "string"] - }, - "integral_ad_science_video_fee_usd": { - "type": ["null", "string"] - }, - "interactive_impressions": { - "type": ["null", "string"] - }, - "invalid_active_view_eligible_impressions": { - "type": ["null", "string"] - }, - "invalid_active_view_measurable_impressions": { - "type": ["null", "string"] - }, - "invalid_active_view_viewable_impressions": { - "type": ["null", "string"] - }, - "invalid_begin_to_render_impressions": { - "type": ["null", "string"] - }, - "invalid_clicks": { - "type": ["null", "string"] - }, - "invalid_impressions": { - "type": ["null", "string"] - }, - "invalid_tracked_ads": { - "type": ["null", "string"] - }, - "media_cost_advertiser_currency": { - "type": ["null", "string"] - }, - "media_cost_partner_currency": { - "type": ["null", "string"] - }, - "media_cost_usd": { - "type": ["null", "string"] - }, - "media_cost_ecpa_advertiser_currency": { - "type": ["null", "string"] - }, - "media_cost_ecpa_partner_currency": { - "type": ["null", "string"] - }, - "media_cost_ecpa_pc_advertiser_currency": { - "type": ["null", "string"] - }, - "media_cost_ecpa_pv_advertiser_currency": { - "type": ["null", "string"] - }, - "media_cost_ecpa_usd": { - "type": ["null", "string"] - }, - "media_cost_ecpc_advertiser_currency": { - "type": ["null", "string"] - }, - "media_cost_ecpc_partner_currency": { - "type": ["null", "string"] - }, - "media_cost_ecpc_pc_partner_currency": { - "type": ["null", "string"] - }, - "media_cost_ecpc_pc_usd": { - "type": ["null", "string"] - }, - "media_cost_ecpc_pv_partner_currency": { - "type": ["null", "string"] - }, - "media_cost_ecpc_pv_usd": { - "type": ["null", "string"] - }, - "media_cost_ecpc_usd": { - "type": ["null", "string"] - }, - "media_cost_ecpm_advertiser_currency": { - "type": ["null", "string"] - }, - "media_cost_ecpm_partner_currency": { - "type": ["null", "string"] - }, - "media_cost_ecpm_usd": { - "type": ["null", "string"] - }, - "media_cost_viewable_ecpm_advertiser_currency": { - "type": ["null", "string"] - }, - "media_cost_viewable_ecpm_partner_currency": { - "type": ["null", "string"] - }, - "media_cost_viewable_ecpm_usd": { - "type": ["null", "string"] - }, - "media_fee_1_advertiser_currency": { - "type": ["null", "string"] - }, - "media_fee_1_partner_currency": { - "type": ["null", "string"] - }, - "media_fee_1_usd": { - "type": ["null", "string"] - }, - "media_fee_2_advertiser_currency": { - "type": ["null", "string"] - }, - "media_fee_2_partner_currency": { - "type": ["null", "string"] - }, - "media_fee_2_usd": { - "type": ["null", "string"] - }, - "media_fee_3_advertiser_currency": { - "type": ["null", "string"] - }, - "media_fee_3_partner_currency": { - "type": ["null", "string"] - }, - "media_fee_3_usd": { - "type": ["null", "string"] - }, - "media_fee_4_advertiser_currency": { - "type": ["null", "string"] - }, - "media_fee_4_partner_currency": { - "type": ["null", "string"] - }, - "media_fee_4_usd": { - "type": ["null", "string"] - }, - "media_fee_5_advertiser_currency": { - "type": ["null", "string"] - }, - "media_fee_5_partner_currency": { - "type": ["null", "string"] - }, - "media_fee_5_usd": { - "type": ["null", "string"] - }, - "mediacost_data_fee_advertiser_currency": { - "type": ["null", "string"] - }, - "mediacost_data_fee_partner_currency": { - "type": ["null", "string"] - }, - "mediacost_data_fee_usd": { - "type": ["null", "string"] - }, - "midpoint_audio": { - "type": ["null", "string"] - }, - "midpoint_views_video": { - "type": ["null", "string"] - }, - "moat_video_fee_advertiser_currency": { - "type": ["null", "string"] - }, - "moat_video_fee_partner_currency": { - "type": ["null", "string"] - }, - "moat_video_fee_usd": { - "type": ["null", "string"] - }, - "nielsen_digital_ad_ratings_fee_advertiser_currency": { - "type": ["null", "string"] - }, - "nielsen_digital_ad_ratings_fee_partner_currency": { - "type": ["null", "string"] - }, - "nielsen_digital_ad_ratings_fee_usd": { - "type": ["null", "string"] - }, - "pauses_audio": { - "type": ["null", "string"] - }, - "pauses_video": { - "type": ["null", "string"] - }, - "platform_fee_advertiser_currency": { - "type": ["null", "string"] - }, - "platform_fee_partner_currency": { - "type": ["null", "string"] - }, - "platform_fee_usd": { - "type": ["null", "string"] - }, - "platform_fee_rate": { - "type": ["null", "string"] - }, - "post_click_conversions": { - "type": ["null", "string"] - }, - "post_view_conversions": { - "type": ["null", "string"] - }, - "post_view_conversions__cross_environment": { - "type": ["null", "string"] - }, - "premium_fee_advertiser_currency": { - "type": ["null", "string"] - }, - "profit_advertiser_currency": { - "type": ["null", "string"] - }, - "profit_partner_currency": { - "type": ["null", "string"] - }, - "profit_usd": { - "type": ["null", "string"] - }, - "profit_ecpm_advertiser_currency": { - "type": ["null", "string"] - }, - "profit_ecpm_partner_currency": { - "type": ["null", "string"] - }, - "profit_ecpm_usd": { - "type": ["null", "string"] - }, - "profit_margin": { - "type": ["null", "string"] - }, - "profit_viewable_ecpm_advertiser_currency": { - "type": ["null", "string"] - }, - "profit_viewable_ecpm_partner_currency": { - "type": ["null", "string"] - }, - "profit_viewable_ecpm_usd": { - "type": ["null", "string"] - }, - "programmatic_guaranteed_impressions_passed_due_to_frequency": { - "type": ["null", "string"] - }, - "programmatic_guaranteed_savings_re_invested_due_to_frequency_advertiser_currency": { - "type": ["null", "string"] - }, - "refund_billable_cost_advertiser_currency": { - "type": ["null", "string"] - }, - "refund_media_cost_advertiser_currency": { - "type": ["null", "string"] - }, - "refund_platform_fee_advertiser_currency": { - "type": ["null", "string"] - }, - "revenue_advertiser_currency": { - "type": ["null", "string"] - }, - "revenue_partner_currency": { - "type": ["null", "string"] - }, - "revenue_usd": { - "type": ["null", "string"] - }, - "revenue_ecpa_advertiser_currency": { - "type": ["null", "string"] - }, - "revenue_ecpa_partner_currency": { - "type": ["null", "string"] - }, - "revenue_ecpa_pc_advertiser_currency": { - "type": ["null", "string"] - }, - "revenue_ecpa_pc_partner_currency": { - "type": ["null", "string"] - }, - "revenue_ecpa_pc_usd": { - "type": ["null", "string"] - }, - "revenue_ecpa_pv_advertiser_currency": { - "type": ["null", "string"] - }, - "revenue_ecpa_pv_partner_currency": { - "type": ["null", "string"] - }, - "revenue_ecpa_pv_usd": { - "type": ["null", "string"] - }, - "revenue_ecpa_usd": { - "type": ["null", "string"] - }, - "revenue_ecpc_advertiser_currency": { - "type": ["null", "string"] - }, - "revenue_ecpc_partner_currency": { - "type": ["null", "string"] - }, - "revenue_ecpc_usd": { - "type": ["null", "string"] - }, - "revenue_ecpe_advertiser_currency": { - "type": ["null", "string"] - }, - "revenue_ecpe_partner_currency": { - "type": ["null", "string"] - }, - "revenue_ecpe_usd": { - "type": ["null", "string"] - }, - "revenue_ecpm_advertiser_currency": { - "type": ["null", "string"] - }, - "revenue_ecpm_partner_currency": { - "type": ["null", "string"] - }, - "revenue_ecpm_usd": { - "type": ["null", "string"] - }, - "revenue_ecpv_advertiser_currency": { - "type": ["null", "string"] - }, - "revenue_ecpv_partner_currency": { - "type": ["null", "string"] - }, - "revenue_ecpv_usd": { - "type": ["null", "string"] - }, - "revenue_viewable_ecpm_advertiser_currency": { - "type": ["null", "string"] - }, - "revenue_viewable_ecpm_partner_currency": { - "type": ["null", "string"] - }, - "revenue_viewable_ecpm_usd": { - "type": ["null", "string"] - }, - "rich_media_engagements": { - "type": ["null", "string"] - }, - "scrolls": { - "type": ["null", "string"] - }, - "shoplocal_fee_advertiser_currency": { - "type": ["null", "string"] - }, - "shoplocal_fee_partner_currency": { - "type": ["null", "string"] - }, - "shoplocal_fee_usd": { - "type": ["null", "string"] - }, - "skips_video": { - "type": ["null", "string"] - }, - "starts_audio": { - "type": ["null", "string"] - }, - "starts_video": { - "type": ["null", "string"] - }, - "stops_audio": { - "type": ["null", "string"] - }, - "teracent_fee_advertiser_currency": { - "type": ["null", "string"] - }, - "teracent_fee_partner_currency": { - "type": ["null", "string"] - }, - "teracent_fee_usd": { - "type": ["null", "string"] - }, - "third_party_ad_server_fee_advertiser_currency": { - "type": ["null", "string"] - }, - "third_party_ad_server_fee_partner_currency": { - "type": ["null", "string"] - }, - "third_party_ad_server_fee_usd": { - "type": ["null", "string"] - }, - "third_quartile_audio": { - "type": ["null", "string"] - }, - "third_quartile_views_video": { - "type": ["null", "string"] - }, - "timers": { - "type": ["null", "string"] - }, - "total_conversions": { - "type": ["null", "string"] - }, - "total_conversions__cross_environment": { - "type": ["null", "string"] - }, - "total_display_time": { - "type": ["null", "string"] - }, - "total_impression_custom_value": { - "type": ["null", "string"] - }, - "total_interaction_time": { - "type": ["null", "string"] - }, - "total_media_cost_advertiser_currency": { - "type": ["null", "string"] - }, - "total_media_cost_partner_currency": { - "type": ["null", "string"] - }, - "total_media_cost_usd": { - "type": ["null", "string"] - }, - "total_media_cost_ecpa_advertiser_currency": { - "type": ["null", "string"] - }, - "total_media_cost_ecpa_partner_currency": { - "type": ["null", "string"] - }, - "total_media_cost_ecpa_pc_advertiser_currency": { - "type": ["null", "string"] - }, - "total_media_cost_ecpa_pc_partner_currency": { - "type": ["null", "string"] - }, - "total_media_cost_ecpa_pc_usd": { - "type": ["null", "string"] - }, - "total_media_cost_ecpa_pv_advertiser_currency": { - "type": ["null", "string"] - }, - "total_media_cost_ecpa_pv_partner_currency": { - "type": ["null", "string"] - }, - "total_media_cost_ecpa_pv_usd": { - "type": ["null", "string"] - }, - "total_media_cost_ecpa_usd": { - "type": ["null", "string"] - }, - "total_media_cost_ecpc_advertiser_currency": { - "type": ["null", "string"] - }, - "total_media_cost_ecpc_partner_currency": { - "type": ["null", "string"] - }, - "total_media_cost_ecpc_usd": { - "type": ["null", "string"] - }, - "total_media_cost_ecpm_advertiser_currency": { - "type": ["null", "string"] - }, - "total_media_cost_ecpm_partner_currency": { - "type": ["null", "string"] - }, - "total_media_cost_ecpm_usd": { - "type": ["null", "string"] - }, - "total_media_cost_viewable_ecpm_advertiser_currency": { - "type": ["null", "string"] - }, - "total_media_cost_viewable_ecpm_partner_currency": { - "type": ["null", "string"] - }, - "total_media_cost_viewable_ecpm_usd": { - "type": ["null", "string"] - }, - "total_video_media_cost_ecpcv_advertiser_currency": { - "type": ["null", "string"] - }, - "total_video_media_cost_ecpcv_partner_currency": { - "type": ["null", "string"] - }, - "total_video_media_cost_ecpcv_usd": { - "type": ["null", "string"] - }, - "tracked_ads": { - "type": ["null", "string"] - }, - "trueview_general_invalid_traffic_givt_views": { - "type": ["null", "string"] - }, - "trueview_invalid_views": { - "type": ["null", "string"] - }, - "trustmetrics_fee_advertiser_currency": { - "type": ["null", "string"] - }, - "trustmetrics_fee_partner_currency": { - "type": ["null", "string"] - }, - "trustmetrics_fee_usd": { - "type": ["null", "string"] - }, - "verifiable_impressions": { - "type": ["null", "string"] - }, - "video_client_cost_ecpcv_advertiser_currency": { - "type": ["null", "string"] - }, - "video_media_cost_ecpcv_advertiser_currency": { - "type": ["null", "string"] - }, - "video_media_cost_ecpcv_partner_currency": { - "type": ["null", "string"] - }, - "video_media_cost_ecpcv_usd": { - "type": ["null", "string"] - }, - "vizu_fee_advertiser_currency": { - "type": ["null", "string"] - }, - "vizu_fee_partner_currency": { - "type": ["null", "string"] - }, - "vizu_fee_usd": { - "type": ["null", "string"] - }, - "youtube_view_rate": { - "type": ["null", "string"] - }, - "youtube_views": { - "type": ["null", "string"] - } - } -} diff --git a/airbyte-integrations/connectors/source-dv-360/source_dv_360/schemas/unique_reach_audience.json b/airbyte-integrations/connectors/source-dv-360/source_dv_360/schemas/unique_reach_audience.json deleted file mode 100644 index 0aefa4f72a59..000000000000 --- a/airbyte-integrations/connectors/source-dv-360/source_dv_360/schemas/unique_reach_audience.json +++ /dev/null @@ -1,144 +0,0 @@ -{ - "$schema": "http://json-schema.org/draft-07/schema#", - "type": "object", - "properties": { - "advertiser": { - "type": ["null", "string"] - }, - "advertiser_id": { - "type": ["null", "string"] - }, - "age": { - "description": "required", - "type": ["null", "string"] - }, - "country": { - "description": "required", - "type": ["null", "string"] - }, - "gender": { - "description": "required", - "type": ["null", "string"] - }, - "date": { - "type": ["null", "string"] - }, - "insertion_order_id": { - "type": ["null", "string"] - }, - "insertion_order_integration_code": { - "type": ["null", "string"] - }, - "insertion_order": { - "type": ["null", "string"] - }, - "insertion_order_status": { - "type": ["null", "string"] - }, - "line_item_id": { - "type": ["null", "string"] - }, - "line_item_integration_code": { - "type": ["null", "string"] - }, - "line_item": { - "type": ["null", "string"] - }, - "line_item_status": { - "type": ["null", "string"] - }, - "line_item_type": { - "type": ["null", "string"] - }, - "device_type": { - "type": ["null", "string"] - }, - "creative": { - "type": ["null", "string"] - }, - "creative_height": { - "type": ["null", "string"] - }, - "creative_id": { - "type": ["null", "string"] - }, - "creative_size": { - "type": ["null", "string"] - }, - "creative_source": { - "type": ["null", "string"] - }, - "creative_status": { - "type": ["null", "string"] - }, - "creative_type": { - "type": ["null", "string"] - }, - "creative_width": { - "type": ["null", "string"] - }, - "partner_id": { - "type": ["null", "string"] - }, - "partner": { - "type": ["null", "string"] - }, - "month": { - "type": ["null", "string"] - }, - "campaign_id": { - "type": ["null", "string"] - }, - "campaign": { - "type": ["null", "string"] - }, - "pct_composition_impressions": { - "type": ["null", "string"] - }, - "pct_composition_reach": { - "type": ["null", "string"] - }, - "pct_population_reach": { - "type": ["null", "string"] - }, - "clicks": { - "type": ["null", "string"] - }, - "impressions": { - "type": ["null", "string"] - }, - "population": { - "type": ["null", "string"] - }, - "target_rating_points": { - "type": ["null", "string"] - }, - "unique_reach_average_impression_frequency": { - "type": ["null", "string"] - }, - "unique_reach_click_reach": { - "type": ["null", "string"] - }, - "unique_reach_impression_reach": { - "type": ["null", "string"] - }, - "unique_reach_viewable_impression_reach": { - "type": ["null", "string"] - }, - "viewable_target_rating_points": { - "type": ["null", "string"] - }, - "viewable_impressions": { - "type": ["null", "string"] - }, - "pct_viewable_composition_impressions": { - "type": ["null", "string"] - }, - "pct_viewable_composition_reach": { - "type": ["null", "string"] - }, - "pct_viewable_population_reach": { - "type": ["null", "string"] - } - } -} diff --git a/airbyte-integrations/connectors/source-dv-360/source_dv_360/source.py b/airbyte-integrations/connectors/source-dv-360/source_dv_360/source.py deleted file mode 100644 index 00af5a0f024a..000000000000 --- a/airbyte-integrations/connectors/source-dv-360/source_dv_360/source.py +++ /dev/null @@ -1,140 +0,0 @@ -# -# Copyright (c) 2023 Airbyte, Inc., all rights reserved. -# - -import json -from datetime import datetime -from typing import Any, Generator, List, Mapping, MutableMapping, Tuple - -from airbyte_cdk.logger import AirbyteLogger -from airbyte_cdk.models import AirbyteMessage, AirbyteRecordMessage, AirbyteStateMessage, ConfiguredAirbyteCatalog, SyncMode, Type -from airbyte_cdk.sources import AbstractSource -from airbyte_cdk.sources.streams import Stream -from google.oauth2.credentials import Credentials -from googleapiclient.discovery import build - -from .streams import AudienceComposition, Floodlight, Reach, Standard, UniqueReachAudience - - -class SourceDV360(AbstractSource): - def get_credentials(self, config: json) -> Credentials: - """ - Get the credentials from the config file and returns them as a Credentials object - """ - cred_json = config.get("credentials") - creds = Credentials( - token=cred_json.get("access_token"), - refresh_token=cred_json.get("refresh_token"), - token_uri=cred_json.get("token_uri"), - client_id=cred_json.get("client_id"), - client_secret=cred_json.get("client_secret"), - ) - return creds - - def check_connection(self, logger: AirbyteLogger, config: Mapping[str, Any]) -> Tuple[bool, any]: - """ - Tests if the input configuration can be used to successfully connect to the integration - e.g: if a provided Stripe API token can be used to connect to the Stripe API. - - :param logger: Logging object to display debug/info/error to the logs - (logs will not be accessible via airbyte UI if they are not passed to this logger) - :param config: Json object containing the configuration of this source, content of this json is as specified in - the properties of the spec.json file - - :return: AirbyteConnectionStatus indicating a Success or Failure - """ - try: - dbm_service = build("doubleclickbidmanager", "v1.1", credentials=self.get_credentials(config)) - request = dbm_service.queries().listqueries().execute() - if request: - return True, None - except Exception as err: - return False, f"Unable to connect to Google Ads API with the provided credentials - {repr(err)}" - - def streams(self, config: Mapping[str, Any]) -> List[Stream]: - """ - :param config: The user-provided configuration as specified by the source's spec. - Any stream construction related operation should happen here. - :return: A list of the streams in this source connector. - """ - args = dict( - credentials=self.get_credentials(config), - partner_id=config.get("partner_id"), - start_date=config.get("start_date"), - end_date=config.get("end_date"), - filters=config.get("filters"), - ) - - streams = [ - Reach(**args), - Standard(**args), - AudienceComposition(**args), - Floodlight(**args), - UniqueReachAudience(**args), - ] - return streams - - def read( - self, logger: AirbyteLogger, config: json, catalog: ConfiguredAirbyteCatalog, state: MutableMapping[str, Any] - ) -> Generator[AirbyteMessage, None, None]: - """ - Returns a generator of the AirbyteMessages generated by reading the source with the given configuration, - catalog, and state. - - :param logger: Logging object to display debug/info/error to the logs - (logs will not be accessible via airbyte UI if they are not passed to this logger) - :param config: Json object containing the configuration of this source, content of this json is as specified in - the properties of the spec.json file - :param catalog: The input catalog is a ConfiguredAirbyteCatalog which is almost the same as AirbyteCatalog - returned by discover(), but - in addition, it's been configured in the UI! For each particular stream and field, there may have been provided - with extra modifications such as: filtering streams and/or columns out, renaming some entities, etc - :param state: When a Airbyte reads data from a source, it might need to keep a checkpoint cursor to resume - replication in the future from that saved checkpoint. - This is the object that is provided with state from previous runs and avoid replicating the entire set of - data everytime. - - :return: A generator that produces a stream of AirbyteRecordMessage contained in AirbyteMessage object. - """ - stream_instances = {s.name: s for s in self.streams(config)} - for configured_stream in catalog.streams: - stream_name = configured_stream.stream.name - stream_instance = stream_instances.get(stream_name) - if not stream_instance: - raise KeyError( - f"The requested stream {stream_name} was not found in the source." f" Available streams: {stream_instances.keys()}" - ) - stream_state = state.get(stream_name, {}) - # if stream_state and "state" in dir(stream_instance): - stream_instance.state = stream_state - logger.info(f"Syncing {stream_name} stream") - logger.info(f"Setting state of {stream_name} stream to {stream_state}") - yield AirbyteMessage(type=Type.STATE, state=AirbyteStateMessage(data=state)) - try: - config_catalog_fields = configured_stream.stream.json_schema.get("properties").keys() - slices = stream_instance.stream_slices( - cursor_field=configured_stream.cursor_field, - sync_mode=SyncMode.incremental, - stream_state=stream_state, - ) - for _slice in slices: - data = stream_instance.read_records( - sync_mode=SyncMode.incremental, - catalog_fields=config_catalog_fields, - stream_slice=_slice, - stream_state=stream_state, - cursor_field=configured_stream.cursor_field or None, - ) - - # data= stream_instance.read_records(catalog_fields= config_catalog_fields, sync_mode= SyncMode.incremental, stream_slice= _slice) - for row in data: - yield AirbyteMessage( - type=Type.RECORD, - record=AirbyteRecordMessage(stream=stream_name, data=row, emitted_at=int(datetime.now().timestamp()) * 1000), - ) - - yield self._checkpoint_state(stream_instance, stream_state, state) - - logger.info(f"Finished syncing {stream_name} stream") - except Exception as e: - logger.error("Failed to read the data: " + repr(e)) diff --git a/airbyte-integrations/connectors/source-dv-360/source_dv_360/spec.json b/airbyte-integrations/connectors/source-dv-360/source_dv_360/spec.json deleted file mode 100644 index 8b3e147b0883..000000000000 --- a/airbyte-integrations/connectors/source-dv-360/source_dv_360/spec.json +++ /dev/null @@ -1,74 +0,0 @@ -{ - "documentationUrl": "https://docsurl.com", - "connectionSpecification": { - "$schema": "http://json-schema.org/draft-07/schema#", - "title": "Display & Video 360 Spec", - "type": "object", - "required": ["credentials", "partner_id", "start_date"], - "additionalProperties": true, - "properties": { - "credentials": { - "type": "object", - "description": "Oauth2 credentials", - "order": 0, - "required": [ - "access_token", - "refresh_token", - "token_uri", - "client_id", - "client_secret" - ], - "properties": { - "access_token": { - "type": "string", - "description": "Access token", - "airbyte_secret": true - }, - "refresh_token": { - "type": "string", - "description": "Refresh token", - "airbyte_secret": true - }, - "token_uri": { - "type": "string", - "description": "Token URI", - "airbyte_secret": true - }, - "client_id": { - "type": "string", - "description": "Client ID", - "airbyte_secret": true - }, - "client_secret": { - "type": "string", - "description": "Client secret", - "airbyte_secret": true - } - } - }, - "partner_id": { - "type": "integer", - "description": "Partner ID", - "order": 1 - }, - "start_date": { - "type": "string", - "description": "UTC date and time in the format 2017-01-25. Any data before this date will not be replicated", - "pattern": "^[0-9]{4}-[0-9]{2}-[0-9]{2}$", - "order": 2 - }, - "end_date": { - "type": "string", - "description": "UTC date and time in the format 2017-01-25. Any data after this date will not be replicated.", - "pattern": "^[0-9]{4}-[0-9]{2}-[0-9]{2}$", - "order": 3 - }, - "filters": { - "type": "array", - "description": "filters for the dimensions. each filter object had 2 keys: 'type' for the name of the dimension to be used as. and 'value' for the value of the filter", - "default": [], - "order": 4 - } - } - } -} diff --git a/airbyte-integrations/connectors/source-dv-360/source_dv_360/streams.py b/airbyte-integrations/connectors/source-dv-360/source_dv_360/streams.py deleted file mode 100644 index 152889c930e2..000000000000 --- a/airbyte-integrations/connectors/source-dv-360/source_dv_360/streams.py +++ /dev/null @@ -1,398 +0,0 @@ -# -# Copyright (c) 2023 Airbyte, Inc., all rights reserved. -# - - -import csv -import io -import json -from abc import ABC -from typing import Any, Iterable, List, Mapping, MutableMapping, Optional, Tuple - -import pendulum -import requests -from airbyte_cdk.models import SyncMode -from airbyte_cdk.sources.streams import Stream -from google.oauth2.credentials import Credentials -from googleapiclient.discovery import build - -from .fields import API_REPORT_BUILDER_MAPPING, sanitize - -# Mapping between the schema names and the report types in the report builder -REPORT_TYPE_MAPPING = { - "audience_composition": "TYPE_AUDIENCE_COMPOSITION", - "reach": "TYPE_REACH_AND_FREQUENCY", - "floodlight": "FLOODLIGHT", - "standard": "TYPE_GENERAL", - "unique_reach_audience": "TYPE_REACH_AUDIENCE", -} - - -def chunk_date_range( - start_date: str, - field: str, - end_date: str = None, - range_days: int = None, -) -> Iterable[Mapping[str, any]]: - """ - Passing optional parameter end_date for testing - Returns a list of the beginning and ending timestamps of each `range_days` between the start date and now. - The return value is a list of dicts {'date': str} which can be used directly with the Slack API - """ - intervals = [] - end_date = pendulum.parse(end_date) if end_date else pendulum.yesterday() - start_date = pendulum.parse(start_date) - - # to return some state when state is abnormal - if start_date > end_date: - start_date = end_date - - while start_date < end_date: - intervals.append( - { - "start_date": start_date.to_date_string(), - "end_date": end_date.to_date_string(), - } - ) - start_date = start_date.add(days=range_days) - return intervals - - -class DBM: - QUERY_TEMPLATE_PATH = "source_dv_360/queries/query_template.json" # Template for creating the query object - DBM_SCOPE = "doubleclickbidmanager" # Scope required to fetch data - - def __init__(self, credentials: Credentials, partner_id: str, scope: str = DBM_SCOPE, version: str = "v1.1"): - self.service = build(scope, version, credentials=credentials) # build a service with scope dbm - self.partner_id = partner_id - - @staticmethod - def get_date_params_ms(start_date: str, end_date: str = None) -> Tuple[str, str]: - """ - Returns `start_date` and `end_date` in milliseconds - """ - start_date = pendulum.parse(start_date) - # if end_date is null, take date until yesterday - end_date = pendulum.parse(end_date) if end_date else pendulum.yesterday() - - # check if start date is after end date - if start_date > end_date: - start_date = end_date - - start_date_ms = str(int(start_date.timestamp() * 1000)) - end_date_ms = str(int(end_date.timestamp() * 1000)) - - return start_date_ms, end_date_ms - - @staticmethod - def get_fields_from_schema(schema: Mapping[str, Any], catalog_fields: List[str]) -> List[str]: - """ - Get list of fields in a given schema - :param schema: the list of fields to be converted - :param catalog_fields: the list of fields to be converted - - :return: A list of fields - """ - schema_fields = schema.get("properties").keys() - fields = [field for field in schema_fields if field in catalog_fields] - return fields - - @staticmethod - def convert_fields(fields: List[str]) -> List[str]: - """ - Convert a list of fields into the API naming - :param fields: the list of fields to be converted - - :return: A list of converted fields - """ - return [API_REPORT_BUILDER_MAPPING[key] for key in fields] - - @staticmethod - def get_dimensions_from_fields(fields: List[str]) -> List[str]: - """ - Get a list of dimensions from a list of fields. Dimensions start with FILTER_ - :param fields: A list of fields from the stream - - :return: A list of dimensions in the naming form of the API - """ - conv_fields = DBM.convert_fields(fields) - dimensions = [field for field in conv_fields if field.startswith("FILTER")] - return dimensions - - @staticmethod - def get_metrics_from_fields(fields: List[str]) -> List[str]: - """ - Get a list of metrics from from a list of fields. Metrics start with METRIC_ - :param fields: A list of fields from the stream - - :return: A list of metrics in the naming form of the API - """ - conv_fields = DBM.convert_fields(fields) - metrics = [field for field in conv_fields if field.startswith("METRIC")] - return metrics - - @staticmethod - def set_partner_filter(query: Mapping[str, Any], partner_id: str): - """ - set the partner id filter to the partner id in the config - :param query: the query object where the filter is to be set - """ - filters = query.get("params").get("filters") - if filters: - partner_filter_index = next( - (index for (index, filter) in enumerate(filters) if filter["type"] == "FILTER_PARTNER"), None - ) # get the index of the partner filter - if partner_filter_index is not None: - query["params"]["filters"][partner_filter_index]["value"] = partner_id # set filter to the partner id in the config - - @staticmethod - def create_query_object( - report_name: str, - dimensions: List[str], - metrics: List[str], - partner_id: str, - start_date: str, - end_date: str, - filters: List[dict] = [], - ) -> Mapping[str, Any]: - """ - Create a query object using the query template and a list of parameter for the query - :param report_name: Name of the report - :param dimensions: List of dimensions - :param metrics: list of metrics - :param start_date: Start date of the report, in the same form of the date in the config, as specified in the spec - :param end_date: End date of the report, in the same form of the date in the config, as specified in the spec - :param filters: additional filters to be set - - :return the query object created according to the template - """ - with open(DBM.QUERY_TEMPLATE_PATH, "r") as template: - query_body = json.loads(template.read()) - - # get dates in ms - start_date_ms, end_date_ms = DBM.get_date_params_ms(start_date, end_date) - - DBM.set_partner_filter(query_body, partner_id) # Set partner Id in the filter - query_body["metadata"]["title"] = report_name - query_body["params"]["type"] = REPORT_TYPE_MAPPING[report_name] # get the report type from the mapping - query_body["params"]["groupBys"] = dimensions # dimensions are put in the groupBy section of the query - query_body["params"]["filters"].extend(filters) # Add additional filters if needed - query_body["params"]["metrics"] = metrics - query_body["reportDataStartTimeMs"] = start_date_ms - query_body["reportDataEndTimeMs"] = end_date_ms - return query_body - - def convert_schema_into_query( - self, - schema: Mapping[str, Any], - report_name: str, - catalog_fields: List[str], - partner_id: str, - filters: List[dict], - start_date: str, - end_date: str, - ) -> str: - """ - Create and run a query from the given schema - :param report_name: Name of the report - :param catalog_fields: List of fields which names are sanitized - :param start_date: Start date of the report, in the same form of the date in the config, as specified in the spec - :param end_date: End date of the report, in the same form of the date in the config, as specified in the spec - :param filters: additional filters to be set - - :return the query object created according to the template - """ - fields = self.get_fields_from_schema(schema, catalog_fields) - query = self.create_query_object( - report_name=report_name, - dimensions=self.get_dimensions_from_fields(fields), - metrics=self.get_metrics_from_fields(fields), - start_date=start_date, - end_date=end_date, - partner_id=partner_id, - filters=filters or [], - ) - create_query = self.service.queries().createquery(body=query).execute() # Create query - get_query = ( - self.service.queries().getquery(queryId=create_query.get("queryId")).execute() - ) # get the query which will include the report url - return get_query - - -class DBMStream(Stream, ABC): - """ - Base stream class - """ - - primary_key = None - - def __init__(self, credentials: Credentials, partner_id: str, filters: List[dict], start_date: str, end_date: str = None): - self.dbm = DBM(credentials=credentials, partner_id=partner_id) - self._start_date = start_date - self._end_date = end_date - self._partner_id = partner_id - self._filters = filters - - def get_query(self, catalog_fields: List[str], stream_slice: Mapping[str, Any]) -> Iterable[Mapping]: - """ - Create and run a query from the datastream schema and parameters, and a list of fields provided in the configured catalog - :param catalog_fields: A list of fields provided in the configured catalog - - :return the created query - """ - query = self.dbm.convert_schema_into_query( - schema=self.get_json_schema(), - catalog_fields=catalog_fields, - filters=self._filters, - report_name=self.name, - start_date=self._start_date, - end_date=self._end_date, - partner_id=self._partner_id, - ) - return query - - def read_records(self, catalog_fields: List[str], stream_slice: Mapping[str, Any] = None, sync_mode=None): - """ - Get the report from the url specified in the created query. The report is in csv form, with - additional meta data below the data that need to be remove. - :param catalog_fields: A list of fields provided in the configured catalog to create the query - - :return a generator of dict rows from the file - """ - query = self.get_query(catalog_fields=catalog_fields, stream_slice=stream_slice) # create and run the query - report_url = query["metadata"]["googleCloudStoragePathForLatestReport"] # Take the url of the generated report - with io.StringIO(requests.get(report_url).text) as csv_response: - header = csv_response.readline().split(",") # get the header of the file - header = [sanitize(field) for field in header] # sanitize the field names - data = self.buffer_reader(csv_response) # Remove the unnecessary rows that do not have data - reader = csv.DictReader(data, fieldnames=header) # convert csv data into dict rows to be yielded by the generator - report_type = query["params"]["type"] - list_reader = list(reader) - nb_rows = len(list_reader) - for index, row in enumerate(list_reader): - # In the case of the standard report, we are getting an additional summary row, therefore we need to exclude it. - if not (report_type == "TYPE_GENERAL" and index > nb_rows - 2): - yield row - - def buffer_reader(self, buffer: io.StringIO): - """ - Yield all lines from a file text buffer until the empty line is reached - - :return a generator of dict rows from the file - """ - for line in buffer.readlines(): - if line != "\n": # NB: the last non empty line contains the sum of the metrics in the data - yield line - else: - break - - -class DBMIncrementalStream(DBMStream, ABC): - cursor_field = "date" - primary_key = None - range_days = 30 # range of stream slice - - def __init__(self, credentials: Credentials, partner_id: str, filters: List[dict], start_date: str, end_date: str = None): - super().__init__(credentials, partner_id, filters, start_date, end_date) - - def get_updated_state(self, current_stream_state: MutableMapping[str, Any], latest_record: Mapping[str, Any]) -> Mapping[str, Any]: - """ - Update stream state from latest record - """ - current_stream_state = current_stream_state or {} - record_value = latest_record[self.cursor_field] - state_value = current_stream_state.get(self.cursor_field) or record_value - max_cursor = max(pendulum.parse(state_value), pendulum.parse(record_value)) - toreturn = { - self.cursor_field: max_cursor.to_date_string(), - } - return toreturn - - def stream_slices(self, stream_state: Mapping[str, Any] = None, **kwargs) -> Iterable[Optional[Mapping[str, any]]]: - """ - Slice the stream by date periods. - """ - stream_state = stream_state or {} - start_date = stream_state.get(self.cursor_field) or self._start_date - date_chunks = chunk_date_range( - start_date=start_date, - end_date=self._end_date, - field=self.cursor_field, - range_days=self.range_days, - ) - for chunk in date_chunks: - yield chunk - - def read_records( - self, - sync_mode: SyncMode, - catalog_fields: List[str], - cursor_field: List[str] = None, - stream_slice: Mapping[str, Any] = None, - stream_state: Mapping[str, Any] = None, - ) -> Iterable[Mapping[str, Any]]: - """ - This method is overridden to update `start_date` key in the `stream_slice` with the latest read record's cursor value. - """ - records = super().read_records(catalog_fields=catalog_fields, sync_mode=sync_mode, stream_slice=stream_slice) - for record in records: - self.state = self.get_updated_state(self.state, record) - yield record - - def get_query(self, catalog_fields: List[str], stream_slice: Mapping[str, Any]) -> Iterable[Mapping]: - """ - Create and run a query from the datastream schema and parameters, and a list of fields provided in the configured catalog - :param catalog_fields: A list of fields provided in the configured catalog - - :return the created query - """ - query = self.dbm.convert_schema_into_query( - schema=self.get_json_schema(), - catalog_fields=catalog_fields, - filters=self._filters, - report_name=self.name, - start_date=stream_slice.get("start_date"), - end_date=stream_slice.get("end_date"), - partner_id=self._partner_id, - ) - return query - - -class AudienceComposition(DBMIncrementalStream): - """ - Audience Composition stream - """ - - primary_key = None - - -class Floodlight(DBMIncrementalStream): - """ - Floodlight stream - """ - - primary_key = None - - -class Standard(DBMIncrementalStream): - """ - Standard stream - """ - - primary_key = None - - -class UniqueReachAudience(DBMIncrementalStream): - """ - Unique Reach Audience stream - """ - - primary_key = None - - -class Reach(DBMIncrementalStream): - """ - Reach stream - """ - - primary_key = None diff --git a/airbyte-integrations/connectors/source-dv-360/unit_tests/conftest.py b/airbyte-integrations/connectors/source-dv-360/unit_tests/conftest.py deleted file mode 100644 index 044e962b5bc4..000000000000 --- a/airbyte-integrations/connectors/source-dv-360/unit_tests/conftest.py +++ /dev/null @@ -1,13 +0,0 @@ -# -# Copyright (c) 2023 Airbyte, Inc., all rights reserved. -# - -import json - -import pytest - - -@pytest.fixture(scope="session", name="config") -def config_fixture(): - with open("secrets/config.json", "r") as config_file: - return json.load(config_file) diff --git a/airbyte-integrations/connectors/source-dv-360/unit_tests/test_fields.py b/airbyte-integrations/connectors/source-dv-360/unit_tests/test_fields.py deleted file mode 100644 index edc674642348..000000000000 --- a/airbyte-integrations/connectors/source-dv-360/unit_tests/test_fields.py +++ /dev/null @@ -1,53 +0,0 @@ -# -# Copyright (c) 2023 Airbyte, Inc., all rights reserved. -# - -from source_dv_360.fields import sanitize - - -def test_sanitize_with_pct(): - string = "% tesT string:" - sanitized_string = sanitize(string) - expected_result = "pct_test_string" - - assert sanitized_string == expected_result - - -def test_sanitize_trailing_space(): - string = "% tesT string: " - sanitized_string = sanitize(string) - expected_result = "pct_test_string" - - assert sanitized_string == expected_result - - -def test_sanitize_leading_space(): - string = " % tesT string:" - sanitized_string = sanitize(string) - expected_result = "pct_test_string" - - assert sanitized_string == expected_result - - -def test_sanitize_punctuation(): - string = "% tesT string:,;()#$" - sanitized_string = sanitize(string) - expected_result = "pct_test_string" - - assert sanitized_string == expected_result - - -def test_sanitize_slash(): - string = "% tesT string:/test" - sanitized_string = sanitize(string) - expected_result = "pct_test_string_test" - - assert sanitized_string == expected_result - - -def test_sanitize_and(): - string = "% tesT string & test" - sanitized_string = sanitize(string) - expected_result = "pct_test_string_and_test" - - assert sanitized_string == expected_result diff --git a/airbyte-integrations/connectors/source-dv-360/unit_tests/test_source.py b/airbyte-integrations/connectors/source-dv-360/unit_tests/test_source.py deleted file mode 100644 index c5844a6fc92e..000000000000 --- a/airbyte-integrations/connectors/source-dv-360/unit_tests/test_source.py +++ /dev/null @@ -1,39 +0,0 @@ -# -# Copyright (c) 2023 Airbyte, Inc., all rights reserved. -# - -from source_dv_360.source import SourceDV360 - -SAMPLE_CONFIG = { - "credentials": { - "access_token": "access_token", - "refresh_token": "refresh_token", - "token_uri": "uri", - "client_id": "client_id", - "client_secret": "client_secret", - }, - "start_date": "2022-03-01", - "end_date": "2022-03-08", - "partner_id": 123, - "filters": [], -} - - -EXPECTED_CRED = { - "access_token": "access_token", - "refresh_token": "refresh_token", - "token_uri": "uri", - "client_id": "client_id", - "client_secret": "client_secret", -} - - -def test_get_credentials(): - client = SourceDV360() - credentials = client.get_credentials(SAMPLE_CONFIG) - - assert credentials.token == "access_token" - assert credentials.refresh_token == "refresh_token" - assert credentials.token_uri == "uri" - assert credentials.client_id == "client_id" - assert credentials.client_secret == "client_secret" diff --git a/airbyte-integrations/connectors/source-dv-360/unit_tests/test_streams.py b/airbyte-integrations/connectors/source-dv-360/unit_tests/test_streams.py deleted file mode 100644 index 52210ffa6f6c..000000000000 --- a/airbyte-integrations/connectors/source-dv-360/unit_tests/test_streams.py +++ /dev/null @@ -1,234 +0,0 @@ -# -# Copyright (c) 2023 Airbyte, Inc., all rights reserved. -# - -import json - -from source_dv_360.streams import DBM - - -def test_convert_fields(): - fields = ["app_url_id", "cm_placement_id", "pct_clicks_leading_to_conversions", "region_id", "date"] - converted_fields = DBM.convert_fields(fields) - expected_fields = [ - "FILTER_SITE_ID", - "FILTER_CM_PLACEMENT_ID", - "METRIC_CLICK_TO_POST_CLICK_CONVERSION_RATE", - "FILTER_REGION", - "FILTER_DATE", - ] - - assert converted_fields == expected_fields - - -with open("source_dv_360/schemas/reach.json") as FILE: - SCHEMA = json.loads(FILE.read()) - -CATALOG_FIELDS = [ - "advertiser", - "advertiser_id", - "advertiser_integration_code", - "advertiser_status", - "app_url", - "campaign", - "campaign_id", - "creative", - "creative_id", - "creative_source", - "date", - "insertion_order", - "insertion_order_id", - "insertion_order_integration_code", - "insertion_order_status", - "inventory_source", - "line_item", - "line_item_id", - "line_item_status", - "partner", - "partner_id", - "partner_status", - "targeted_data_providers", - "cookie_reach_average_impression_frequency", - "cookie_reach_impression_reach", - "unique_reach_average_impression_frequency", - "unique_reach_click_reach", - "unique_reach_impression_reach", -] - - -def test_get_fields_from_schema(): - fields = DBM.get_fields_from_schema(SCHEMA, CATALOG_FIELDS) - expected_fields = [ - "advertiser", - "advertiser_id", - "advertiser_integration_code", - "advertiser_status", - "app_url", - "campaign", - "campaign_id", - "creative", - "creative_id", - "creative_source", - "date", - "insertion_order", - "insertion_order_id", - "insertion_order_integration_code", - "insertion_order_status", - "inventory_source", - "line_item", - "line_item_id", - "line_item_status", - "partner", - "partner_id", - "partner_status", - "targeted_data_providers", - "cookie_reach_average_impression_frequency", - "cookie_reach_impression_reach", - "unique_reach_average_impression_frequency", - "unique_reach_click_reach", - "unique_reach_impression_reach", - ] - assert expected_fields == fields - - -def test_get_dimensions_from_fields(): - fields = DBM.get_fields_from_schema(SCHEMA, CATALOG_FIELDS) - diemsions = DBM.get_dimensions_from_fields(fields) - expected_diemsions = [ - "FILTER_ADVERTISER_NAME", - "FILTER_ADVERTISER", - "FILTER_ADVERTISER_INTEGRATION_CODE", - "FILTER_ADVERTISER_INTEGRATION_STATUS", - "FILTER_APP_URL", - "FILTER_MEDIA_PLAN_NAME", - "FILTER_MEDIA_PLAN", - "FILTER_CREATIVE", - "FILTER_CREATIVE_ID", - "FILTER_CREATIVE_SOURCE", - "FILTER_DATE", - "FILTER_INSERTION_ORDER_NAME", - "FILTER_INSERTION_ORDER", - "FILTER_INSERTION_ORDER_INTEGRATION_CODE", - "FILTER_INSERTION_ORDER_STATUS", - "FILTER_INVENTORY_SOURCE_NAME", - "FILTER_LINE_ITEM_NAME", - "FILTER_LINE_ITEM", - "FILTER_LINE_ITEM_STATUS", - "FILTER_PARTNER_NAME", - "FILTER_PARTNER", - "FILTER_PARTNER_STATUS", - "FILTER_TARGETED_DATA_PROVIDERS", - ] - assert expected_diemsions == diemsions - - -def test_get_metrics_from_fields(): - fields = DBM.get_fields_from_schema(SCHEMA, CATALOG_FIELDS) - metrics = DBM.get_metrics_from_fields(fields) - expected_metrics = [ - "METRIC_COOKIE_REACH_AVERAGE_IMPRESSION_FREQUENCY", - "METRIC_COOKIE_REACH_IMPRESSION_REACH", - "METRIC_UNIQUE_REACH_AVERAGE_IMPRESSION_FREQUENCY", - "METRIC_UNIQUE_REACH_CLICK_REACH", - "METRIC_UNIQUE_REACH_IMPRESSION_REACH", - ] - assert expected_metrics == metrics - - -EXPECTED_QUERY = { - "kind": "doubleclickbidmanager#query", - "queryId": "0", - "metadata": { - "title": "reach", - "dataRange": "CUSTOM_DATES", - "format": "CSV", - "running": False, - "googleCloudStoragePathForLatestReport": "", - "latestReportRunTimeMs": "0", - "sendNotification": False, - }, - "params": { - "type": "TYPE_REACH_AND_FREQUENCY", - "groupBys": [ - "FILTER_ADVERTISER_NAME", - "FILTER_ADVERTISER", - "FILTER_ADVERTISER_INTEGRATION_CODE", - "FILTER_ADVERTISER_INTEGRATION_STATUS", - "FILTER_APP_URL", - "FILTER_MEDIA_PLAN_NAME", - "FILTER_MEDIA_PLAN", - "FILTER_CREATIVE", - "FILTER_CREATIVE_ID", - "FILTER_CREATIVE_SOURCE", - "FILTER_DATE", - "FILTER_INSERTION_ORDER_NAME", - "FILTER_INSERTION_ORDER", - "FILTER_INSERTION_ORDER_INTEGRATION_CODE", - "FILTER_INSERTION_ORDER_STATUS", - "FILTER_INVENTORY_SOURCE_NAME", - "FILTER_LINE_ITEM_NAME", - "FILTER_LINE_ITEM", - "FILTER_LINE_ITEM_STATUS", - "FILTER_PARTNER_NAME", - "FILTER_PARTNER", - "FILTER_PARTNER_STATUS", - "FILTER_TARGETED_DATA_PROVIDERS", - ], - "filters": [{"type": "FILTER_PARTNER", "value": "123"}, {"type": "FILTER_LINE_ITEM", "value": 55}], - "metrics": [ - "METRIC_COOKIE_REACH_AVERAGE_IMPRESSION_FREQUENCY", - "METRIC_COOKIE_REACH_IMPRESSION_REACH", - "METRIC_UNIQUE_REACH_AVERAGE_IMPRESSION_FREQUENCY", - "METRIC_UNIQUE_REACH_CLICK_REACH", - "METRIC_UNIQUE_REACH_IMPRESSION_REACH", - ], - "options": {"includeOnlyTargetedUserLists": False}, - }, - "schedule": {"frequency": "ONE_TIME"}, - "reportDataStartTimeMs": "1646092800000", - "reportDataEndTimeMs": "1646697600000", - "timezoneCode": "UTC", -} - - -def test_create_query_object(): - query = DBM.create_query_object( - report_name="reach", - dimensions=[ - "FILTER_ADVERTISER_NAME", - "FILTER_ADVERTISER", - "FILTER_ADVERTISER_INTEGRATION_CODE", - "FILTER_ADVERTISER_INTEGRATION_STATUS", - "FILTER_APP_URL", - "FILTER_MEDIA_PLAN_NAME", - "FILTER_MEDIA_PLAN", - "FILTER_CREATIVE", - "FILTER_CREATIVE_ID", - "FILTER_CREATIVE_SOURCE", - "FILTER_DATE", - "FILTER_INSERTION_ORDER_NAME", - "FILTER_INSERTION_ORDER", - "FILTER_INSERTION_ORDER_INTEGRATION_CODE", - "FILTER_INSERTION_ORDER_STATUS", - "FILTER_INVENTORY_SOURCE_NAME", - "FILTER_LINE_ITEM_NAME", - "FILTER_LINE_ITEM", - "FILTER_LINE_ITEM_STATUS", - "FILTER_PARTNER_NAME", - "FILTER_PARTNER", - "FILTER_PARTNER_STATUS", - "FILTER_TARGETED_DATA_PROVIDERS", - ], - metrics=[ - "METRIC_COOKIE_REACH_AVERAGE_IMPRESSION_FREQUENCY", - "METRIC_COOKIE_REACH_IMPRESSION_REACH", - "METRIC_UNIQUE_REACH_AVERAGE_IMPRESSION_FREQUENCY", - "METRIC_UNIQUE_REACH_CLICK_REACH", - "METRIC_UNIQUE_REACH_IMPRESSION_REACH", - ], - start_date="2022-03-01", - end_date="2022-03-08", - partner_id="123", - filters=[{"type": "FILTER_LINE_ITEM", "value": 55}], - ) - assert query == EXPECTED_QUERY diff --git a/airbyte-integrations/connectors/source-dv-360/unit_tests/unit_test.py b/airbyte-integrations/connectors/source-dv-360/unit_tests/unit_test.py deleted file mode 100644 index 219ae0142c72..000000000000 --- a/airbyte-integrations/connectors/source-dv-360/unit_tests/unit_test.py +++ /dev/null @@ -1,7 +0,0 @@ -# -# Copyright (c) 2023 Airbyte, Inc., all rights reserved. -# - - -def test_example_method(): - assert True diff --git a/airbyte-integrations/connectors/source-kustomer-singer/.dockerignore b/airbyte-integrations/connectors/source-kustomer-singer/.dockerignore deleted file mode 100644 index 86f906d3b809..000000000000 --- a/airbyte-integrations/connectors/source-kustomer-singer/.dockerignore +++ /dev/null @@ -1,7 +0,0 @@ -* -!Dockerfile -!Dockerfile.test -!main.py -!source_kustomer_singer -!setup.py -!secrets diff --git a/airbyte-integrations/connectors/source-kustomer-singer/.gitignore b/airbyte-integrations/connectors/source-kustomer-singer/.gitignore deleted file mode 100644 index 29fffc6a50cc..000000000000 --- a/airbyte-integrations/connectors/source-kustomer-singer/.gitignore +++ /dev/null @@ -1 +0,0 @@ -NEW_SOURCE_CHECKLIST.md diff --git a/airbyte-integrations/connectors/source-kustomer-singer/Dockerfile b/airbyte-integrations/connectors/source-kustomer-singer/Dockerfile deleted file mode 100644 index 788587c5482f..000000000000 --- a/airbyte-integrations/connectors/source-kustomer-singer/Dockerfile +++ /dev/null @@ -1,40 +0,0 @@ -FROM python:3.9.11-alpine3.15 as base - -# build and load all requirements -FROM base as builder -WORKDIR /airbyte/integration_code - -# upgrade pip to the latest version -RUN apk --no-cache upgrade \ - && pip install --upgrade pip \ - && apk --no-cache add tzdata \ - && apk --no-cache add git \ - && apk --no-cache add build-base - - -COPY setup.py ./ -# install necessary packages to a temporary folder -RUN pip install --prefix=/install . - -# build a clean environment -FROM base -WORKDIR /airbyte/integration_code - -# copy all loaded and built libraries to a pure basic image -COPY --from=builder /install /usr/local -# add default timezone settings -COPY --from=builder /usr/share/zoneinfo/Etc/UTC /etc/localtime -RUN echo "Etc/UTC" > /etc/timezone - -# bash is installed for more convenient debugging. -RUN apk --no-cache add bash - -# copy payload code only -COPY main.py ./ -COPY source_kustomer_singer ./source_kustomer_singer - -ENV AIRBYTE_ENTRYPOINT "python /airbyte/integration_code/main.py" -ENTRYPOINT ["python", "/airbyte/integration_code/main.py"] - -LABEL io.airbyte.version=0.1.2 -LABEL io.airbyte.name=airbyte/source-kustomer-singer diff --git a/airbyte-integrations/connectors/source-kustomer-singer/README.md b/airbyte-integrations/connectors/source-kustomer-singer/README.md deleted file mode 100644 index b0d3d66e7960..000000000000 --- a/airbyte-integrations/connectors/source-kustomer-singer/README.md +++ /dev/null @@ -1,99 +0,0 @@ -# Source Kustomer Singer - -This is the repository for the Kustomer source connector, based on a Singer tap. -For information about how to use this connector within Airbyte, see [the User Documentation](https://docs.airbyte.io/integrations/sources/kustomer). - -## Local development - -### Prerequisites -**To iterate on this connector, make sure to complete this prerequisites section.** - -#### Minimum Python version required `= 3.7.0` - -#### Build & Activate Virtual Environment and install dependencies -From this connector directory, create a virtual environment: -``` -python -m venv .venv -``` - -This will generate a virtualenv for this module in `.venv/`. Make sure this venv is active in your -development environment of choice. To activate it from the terminal, run: -``` -source .venv/bin/activate -pip install -r requirements.txt -``` -If you are in an IDE, follow your IDE's instructions to activate the virtualenv. - -Note that while we are installing dependencies from `requirements.txt`, you should only edit `setup.py` for your dependencies. `requirements.txt` is -used for editable installs (`pip install -e`) to pull in Python dependencies from the monorepo and will call `setup.py`. -If this is mumbo jumbo to you, don't worry about it, just put your deps in `setup.py` but install using `pip install -r requirements.txt` and everything -should work as you expect. - -#### Create credentials -**If you are a community contributor**, follow the instructions in the [documentation](https://docs.airbyte.io/integrations/sources/kustomer) -to generate the necessary credentials. Then create a file `secrets/config.json` conforming to the `source_kustomer_singer/spec.json` file. -Note that the `secrets` directory is gitignored by default, so there is no danger of accidentally checking in sensitive information. -See `integration_tests/sample_config.json` for a sample config file. - -**If you are an Airbyte core member**, copy the credentials in Lastpass under the secret name `source kustomer test creds` -and place them into `secrets/config.json`. - -### Locally running the connector -``` -python main.py spec -python main.py check --config secrets/config.json -python main.py discover --config secrets/config.json -python main.py read --config secrets/config.json --catalog integration_tests/configured_catalog.json -``` - -### Locally running the connector docker image - - -#### Build -**Via [`airbyte-ci`](https://github.com/airbytehq/airbyte/blob/master/airbyte-ci/connectors/pipelines/README.md) (recommended):** -```bash -airbyte-ci connectors --name=source-kustomer-singer build -``` - -An image will be built with the tag `airbyte/source-kustomer-singer:dev`. - -**Via `docker build`:** -```bash -docker build -t airbyte/source-kustomer-singer:dev . -``` - -#### Run -Then run any of the connector commands as follows: -``` -docker run --rm airbyte/source-kustomer-singer:dev spec -docker run --rm -v $(pwd)/secrets:/secrets airbyte/source-kustomer-singer:dev check --config /secrets/config.json -docker run --rm -v $(pwd)/secrets:/secrets airbyte/source-kustomer-singer:dev discover --config /secrets/config.json -docker run --rm -v $(pwd)/secrets:/secrets -v $(pwd)/integration_tests:/integration_tests airbyte/source-kustomer-singer:dev read --config /secrets/config.json --catalog /integration_tests/configured_catalog.json -``` - -## Testing -You can run our full test suite locally using [`airbyte-ci`](https://github.com/airbytehq/airbyte/blob/master/airbyte-ci/connectors/pipelines/README.md): -```bash -airbyte-ci connectors --name=source-kustomer-singer test -``` - -### Customizing acceptance Tests -Customize `acceptance-test-config.yml` file to configure tests. See [Connector Acceptance Tests](https://docs.airbyte.com/connector-development/testing-connectors/connector-acceptance-tests-reference) for more information. -If your connector requires to create or destroy resources for use during acceptance tests create fixtures for it and place them inside integration_tests/acceptance.py. - -## Dependency Management -All of your dependencies should go in `setup.py`, NOT `requirements.txt`. The requirements file is only used to connect internal Airbyte dependencies in the monorepo for local development. -We split dependencies between two groups, dependencies that are: -* required for your connector to work need to go to `MAIN_REQUIREMENTS` list. -* required for the testing need to go to `TEST_REQUIREMENTS` list - -### Publishing a new version of the connector -You've checked out the repo, implemented a million dollar feature, and you're ready to share your changes with the world. Now what? -1. Make sure your changes are passing our test suite: `airbyte-ci connectors --name=source-kustomer-singer test` -2. Bump the connector version in `metadata.yaml`: increment the `dockerImageTag` value. Please follow [semantic versioning for connectors](https://docs.airbyte.com/contributing-to-airbyte/resources/pull-requests-handbook/#semantic-versioning-for-connectors). -3. Make sure the `metadata.yaml` content is up to date. -4. Make the connector documentation and its changelog is up to date (`docs/integrations/sources/kustomer-singer.md`). -5. Create a Pull Request: use [our PR naming conventions](https://docs.airbyte.com/contributing-to-airbyte/resources/pull-requests-handbook/#pull-request-title-convention). -6. Pat yourself on the back for being an awesome contributor. -7. Someone from Airbyte will take a look at your PR and iterate with you to merge it into master. - diff --git a/airbyte-integrations/connectors/source-kustomer-singer/acceptance-test-config.yml b/airbyte-integrations/connectors/source-kustomer-singer/acceptance-test-config.yml deleted file mode 100644 index 8da27742f89a..000000000000 --- a/airbyte-integrations/connectors/source-kustomer-singer/acceptance-test-config.yml +++ /dev/null @@ -1,24 +0,0 @@ -# See [Connector Acceptance Tests](https://docs.airbyte.com/connector-development/testing-connectors/connector-acceptance-tests-reference) -# for more information about how to configure these tests -connector_image: airbyte/source-kustomer-singer:dev -tests: - spec: - - spec_path: "source_kustomer_singer/spec.json" - connection: - - config_path: "secrets/config.json" - status: "succeed" - - config_path: "integration_tests/invalid_config.json" - status: "failed" - discovery: - - config_path: "secrets/config.json" - basic_read: - - config_path: "secrets/config.json" - configured_catalog_path: "integration_tests/configured_catalog.json" - validate_output_from_all_streams: yes - incremental: - - config_path: "secrets/config.json" - configured_catalog_path: "integration_tests/configured_catalog.json" - future_state_path: "integration_tests/abnormal_state.json" - full_refresh: - - config_path: "secrets/config.json" - configured_catalog_path: "integration_tests/configured_catalog.json" diff --git a/airbyte-integrations/connectors/source-kustomer-singer/integration_tests/__init__.py b/airbyte-integrations/connectors/source-kustomer-singer/integration_tests/__init__.py deleted file mode 100644 index e69de29bb2d1..000000000000 diff --git a/airbyte-integrations/connectors/source-kustomer-singer/integration_tests/abnormal_state.json b/airbyte-integrations/connectors/source-kustomer-singer/integration_tests/abnormal_state.json deleted file mode 100644 index 50bb4a8d5f73..000000000000 --- a/airbyte-integrations/connectors/source-kustomer-singer/integration_tests/abnormal_state.json +++ /dev/null @@ -1,13 +0,0 @@ -{ - "bookmarks": { - "conversations": "2050-12-26T18:35:41.583Z", - "customers": "2050-12-26T18:39:38.186Z", - "kobjects": "2050-12-26T18:24:18.283Z", - "messages": "2050-12-26T18:39:45.018Z", - "notes": "2050-12-26T18:39:45.018Z", - "shortcuts": "2050-12-25T18:00:00Z", - "tags": "2050-12-25T18:00:00Z", - "teams": "2050-12-25T18:00:00Z", - "users": "2050-12-25T18:00:00Z" - } -} diff --git a/airbyte-integrations/connectors/source-kustomer-singer/integration_tests/acceptance.py b/airbyte-integrations/connectors/source-kustomer-singer/integration_tests/acceptance.py deleted file mode 100644 index d49b55882333..000000000000 --- a/airbyte-integrations/connectors/source-kustomer-singer/integration_tests/acceptance.py +++ /dev/null @@ -1,13 +0,0 @@ -# -# Copyright (c) 2023 Airbyte, Inc., all rights reserved. -# - - -import pytest - -pytest_plugins = ("connector_acceptance_test.plugin",) - - -@pytest.fixture(scope="session", autouse=True) -def connector_setup(): - yield diff --git a/airbyte-integrations/connectors/source-kustomer-singer/integration_tests/configured_catalog.json b/airbyte-integrations/connectors/source-kustomer-singer/integration_tests/configured_catalog.json deleted file mode 100644 index 4e475d0a14c2..000000000000 --- a/airbyte-integrations/connectors/source-kustomer-singer/integration_tests/configured_catalog.json +++ /dev/null @@ -1,1590 +0,0 @@ -{ - "streams": [ - { - "stream": { - "name": "customers", - "json_schema": { - "properties": { - "active_users": { "type": ["null", "string"] }, - "avatar_url": { "type": ["null", "string"] }, - "birthday_at": { "type": ["null", "string"] }, - "conversation_counts": { - "properties": { - "all": { "type": ["null", "integer"] }, - "done": { "type": ["null", "integer"] }, - "open": { "type": ["null", "integer"] }, - "snoozed": { "type": ["null", "integer"] } - }, - "type": ["null", "object"], - "additionalProperties": false - }, - "created_at": { "type": ["null", "string"] }, - "created_by": { - "properties": { - "id": { "type": ["null", "string"] }, - "type": { "type": ["null", "string"] } - }, - "type": ["null", "object"], - "additionalProperties": false - }, - "custom": { - "properties": { - "accepts_marketing_bool": { "type": ["null", "boolean"] }, - "activation_date_str": { "type": ["null", "string"] }, - "app_version_str": { "type": ["null", "string"] }, - "august_id_str": { "type": ["null", "string"] }, - "august_serial_number_str": { "type": ["null", "string"] }, - "bridge_date_installed_at": { "type": ["null", "string"] }, - "bridge_serial_number_str": { "type": ["null", "string"] }, - "connect_firmware_version_str": { "type": ["null", "string"] }, - "connect_serial_number_str": { "type": ["null", "string"] }, - "doorbell_date_installed_at": { "type": ["null", "string"] }, - "doorbell_firmware_version_str": { "type": ["null", "string"] }, - "doorbell_serial_str": { "type": ["null", "string"] }, - "firmware_version_str": { "type": ["null", "string"] }, - "keypad_firmware_version_str": { "type": ["null", "string"] }, - "keypad_installed_at": { "type": ["null", "string"] }, - "keypad_serial_str": { "type": ["null", "string"] }, - "last_autoresponse_at": { "type": ["null", "string"] }, - "last_order_id_num": { "type": ["integer", "null"] }, - "last_order_name_str": { "type": ["null", "string"] }, - "latest_csat_num": { "type": ["integer", "null"] }, - "latest_nps_num": { "type": ["integer", "null"] }, - "lock_date_installed_at": { "type": ["null", "string"] }, - "lock_firmware_version_str": { "type": ["null", "string"] }, - "lock_serial_number_str": { "type": ["null", "string"] }, - "orders_count_num": { "type": ["integer", "null"] }, - "os00_str": { "type": ["null", "string"] }, - "owner_guest_str": { "type": ["null", "string"] }, - "products_owned_str": { "type": ["null", "string"] }, - "proof_of_purchase_str": { "type": ["null", "string"] }, - "purchase_date_str": { "type": ["null", "string"] }, - "sales_force_id_str": { "type": ["null", "string"] }, - "total_spent_num": { "type": ["integer", "null"] } - }, - "type": ["null", "object"], - "additionalProperties": false - }, - "deleted": { "type": ["null", "string"] }, - "display_color": { "type": ["null", "string"] }, - "display_icon": { "type": ["null", "string"] }, - "display_name": { "type": ["null", "string"] }, - "emails": { - "anyOf": [ - { - "type": "array", - "items": { - "type": ["null", "object"], - "additionalProperties": false, - "properties": { - "email": { "type": ["null", "string"] }, - "type": { "type": ["null", "string"] }, - "verified": { "type": ["null", "boolean"] } - } - } - }, - { "type": "null" } - ] - }, - "external_id": { "type": ["null", "string"] }, - "external_ids": { - "anyOf": [ - { - "type": "array", - "items": { - "type": ["null", "object"], - "additionalProperties": false, - "properties": { - "external_id": { "type": ["null", "string"] }, - "verified": { "type": ["null", "boolean"] } - } - } - }, - { "type": "null" } - ] - }, - "facebook_ids": { "type": ["null", "string"] }, - "first_name": { "type": ["null", "string"] }, - "gender": { "type": ["null", "string"] }, - "id": { "type": ["null", "string"] }, - "imported_at": { - "format": "date-time", - "type": ["null", "string"] - }, - "instagram_ids": { "type": ["null", "string"] }, - "last_activity_at": { - "format": "date-time", - "type": ["null", "string"] - }, - "last_conversation": { - "properties": { - "channels": { - "items": { "type": ["null", "string"] }, - "type": "array" - }, - "id": { "type": ["null", "string"] }, - "sentiment": { - "properties": { - "polarity": { "type": ["null", "integer"] }, - "confidence": { "type": ["null", "number"] } - }, - "type": ["null", "object"], - "additionalProperties": false - }, - "tags": { - "anyOf": [ - { - "type": "array", - "items": { "type": ["null", "string"] } - }, - { "type": "null" } - ] - } - }, - "type": ["null", "object"], - "additionalProperties": false - }, - "last_customer_activity_at": { "type": ["null", "string"] }, - "last_message_at": { "type": ["null", "string"] }, - "last_message_in": { - "properties": { - "channel": { "type": ["null", "string"] }, - "sent_at": { "type": ["null", "string"] }, - "sentiment": { "type": ["null", "string"] } - }, - "type": ["null", "object"], - "additionalProperties": false - }, - "last_message_out": { - "properties": { "sent_at": { "type": ["null", "string"] } }, - "type": ["null", "object"], - "additionalProperties": false - }, - "last_message_unresponded_to": { - "properties": { - "channel": { "type": ["null", "string"] }, - "sent_at": { "type": ["null", "string"] } - }, - "type": ["null", "object"], - "additionalProperties": false - }, - "last_name": { "type": ["null", "string"] }, - "last_seen_at": { "type": ["null", "string"] }, - "links": { - "properties": { "self": { "type": ["null", "string"] } }, - "type": ["null", "object"], - "additionalProperties": false - }, - "locale": { "type": ["null", "string"] }, - "locations": { "type": ["null", "string"] }, - "modified_at": { "type": ["null", "string"] }, - "modified_by": { - "properties": { - "id": { "type": ["null", "string"] }, - "type": { "type": ["null", "string"] } - }, - "type": ["null", "object"], - "additionalProperties": false - }, - "name": { "type": ["null", "string"] }, - "org": { - "properties": { - "id": { "type": ["null", "string"] }, - "type": { "type": ["null", "string"] } - }, - "type": ["null", "object"], - "additionalProperties": false - }, - "phones": { - "anyOf": [ - { - "type": "array", - "items": { - "type": ["null", "object"], - "additionalProperties": false, - "properties": { - "phone": { "type": ["null", "string"] }, - "type": { "type": ["null", "string"] }, - "verified": { "type": ["null", "boolean"] } - } - } - }, - { "type": "null" } - ] - }, - "preview": { - "properties": { - "channel": { "type": ["null", "string"] }, - "preview_at": { "type": ["null", "string"] }, - "subject": { "type": ["null", "string"] }, - "text": { "type": ["null", "string"] }, - "type": { "type": ["null", "string"] } - }, - "type": ["null", "object"], - "additionalProperties": false - }, - "progressive_status": { "type": ["null", "string"] }, - "recent_items": { - "anyOf": [ - { - "type": "array", - "items": { - "type": ["null", "object"], - "additionalProperties": false, - "properties": { - "id": { "type": ["null", "string"] }, - "meta": { - "type": ["null", "object"], - "additionalProperties": false, - "properties": { - "klass_name": { "type": ["null", "string"] } - } - }, - "type": { "type": ["null", "string"] }, - "updated_at": { "type": ["null", "string"] } - } - } - }, - { "type": "null" } - ] - }, - "recent_location": { - "properties": { "updated_at": { "type": ["null", "string"] } }, - "type": ["null", "object"], - "additionalProperties": false - }, - "rev": { "type": ["null", "integer"] }, - "role_group_versions": { "type": ["null", "string"] }, - "satisfaction_level": { - "properties": { - "first_satisfaction": { - "properties": { - "sent_by_teams": { - "anyOf": [ - { - "type": "array", - "items": { "type": ["null", "string"] } - }, - { "type": "null" } - ] - } - }, - "type": ["null", "object"], - "additionalProperties": false - }, - "last_satisfaction": { - "properties": { - "sent_by_teams": { - "anyOf": [ - { - "type": "array", - "items": { "type": ["null", "string"] } - }, - { "type": "null" } - ] - } - }, - "type": ["null", "object"], - "additionalProperties": false - } - }, - "type": ["null", "object"], - "additionalProperties": false - }, - "sentiment": { - "properties": { - "confidence": { "type": "number" }, - "polarity": { "type": ["null", "integer"] } - }, - "type": ["null", "object"], - "additionalProperties": false - }, - "shared_emails": { "type": ["null", "string"] }, - "shared_external_ids": { "type": ["null", "string"] }, - "shared_phones": { "type": ["null", "string"] }, - "shared_socials": { "type": ["null", "string"] }, - "signed_up_at": { "type": ["null", "string"] }, - "smooch_ids": { "type": ["null", "string"] }, - "socials": { "type": ["null", "string"] }, - "tags": { "type": ["null", "string"] }, - "time_zone": { "type": ["null", "string"] }, - "updated_at": { "type": ["null", "string"] }, - "urls": { "type": ["null", "string"] }, - "username": { "type": ["null", "string"] }, - "verified": { "type": ["null", "boolean"] }, - "watchers": { "type": ["null", "string"] }, - "whatsapps": { "type": ["null", "string"] } - }, - "type": "object", - "additionalProperties": false - }, - "supported_sync_modes": ["incremental"], - "source_defined_cursor": true, - "default_cursor_field": ["updated_at"] - }, - "sync_mode": "incremental", - "destination_sync_mode": "append" - }, - { - "stream": { - "name": "kobjects", - "json_schema": { - "properties": { - "created_at": { "type": ["null", "string"] }, - "custom": { - "properties": { - "billing_status_str": { "type": ["null", "string"] }, - "comment_txt": { "type": ["null", "string"] }, - "delighted_link_str": { "type": ["null", "string"] }, - "order_created_at": { "type": ["null", "string"] }, - "order_number_str": { "type": ["null", "string"] }, - "order_updated_at": { "type": ["null", "string"] }, - "purchase_amount_num": { "type": "integer" }, - "score_num": { "type": "integer" }, - "shipping_address_str": { "type": ["null", "string"] }, - "shipping_country_str": { "type": ["null", "string"] }, - "shipping_state_str": { "type": ["null", "string"] }, - "shipping_zip_str": { "type": ["null", "string"] }, - "skus_txt": { "type": ["null", "string"] }, - "special_instructions_str": { "type": ["null", "string"] }, - "tags_str": { "type": ["null", "string"] }, - "total_price_num": { "type": "number" }, - "total_price_str": { "type": ["null", "string"] }, - "updated_at": { "type": ["null", "string"] } - }, - "type": ["null", "object"], - "additionalProperties": false - }, - "customer": { - "properties": { - "id": { "type": ["null", "string"] }, - "type": { "type": ["null", "string"] } - }, - "type": ["null", "object"], - "additionalProperties": false - }, - "description": { "type": ["null", "string"] }, - "external_id": { "type": ["null", "string"] }, - "icon": { "type": ["null", "string"] }, - "id": { "type": ["null", "string"] }, - "klass": { - "properties": { - "id": { "type": ["null", "string"] }, - "type": { "type": ["null", "string"] } - }, - "type": ["null", "object"], - "additionalProperties": false - }, - "links": { - "properties": { "self": { "type": ["null", "string"] } }, - "type": ["null", "object"], - "additionalProperties": false - }, - "org": { - "properties": { "type": { "type": ["null", "string"] } }, - "type": ["null", "object"], - "additionalProperties": false - }, - "title": { "type": ["null", "string"] }, - "type": { "type": ["null", "string"] }, - "updated_at": { "type": ["null", "string"] } - }, - "type": "object", - "additionalProperties": false - }, - "supported_sync_modes": ["incremental"], - "source_defined_cursor": true, - "default_cursor_field": ["updated_at"] - }, - "sync_mode": "incremental", - "destination_sync_mode": "append" - }, - { - "stream": { - "name": "conversations", - "json_schema": { - "properties": { - "assigned_teams": { - "anyOf": [ - { "items": { "type": ["null", "string"] }, "type": "array" }, - { "type": "null" } - ] - }, - "assigned_users": { - "anyOf": [ - { "type": "array", "items": { "type": ["null", "string"] } }, - { "type": "null" } - ] - }, - "channels": { - "anyOf": [ - { "type": "array", "items": { "type": ["null", "string"] } }, - { "type": "null" } - ] - }, - "created_at": { "type": ["null", "string"] }, - "created_by": { - "properties": { - "id": { "type": ["null", "string"] }, - "type": { "type": ["null", "string"] } - }, - "type": ["null", "object"], - "additionalProperties": false - }, - "custom": { - "properties": { - "auto_response_at": { "type": ["null", "string"] }, - "brand_str": { "type": ["null", "string"] }, - "case_id_str": { "type": ["null", "string"] }, - "close_out_tree": { "type": ["null", "string"] }, - "contact_reason_tree": { "type": ["null", "string"] }, - "desk_ticket_url": { "type": ["null", "string"] }, - "new_primary_menu_str": { "type": ["null", "string"] }, - "refund_status_str": { "type": ["null", "string"] }, - "return_requested_str": { "type": ["null", "string"] }, - "return_str": { "type": ["null", "string"] }, - "rma_id_str": { "type": ["null", "string"] }, - "serial_number_str": { "type": ["null", "string"] }, - "yale_call_reason_tree": { "type": ["null", "string"] }, - "yale_connectivity_str": { "type": ["null", "string"] }, - "yale_finish_str": { "type": ["null", "string"] }, - "yale_model_str": { "type": ["null", "string"] }, - "yale_notes_txt": { "type": ["null", "string"] }, - "yale_resolution_str": { "type": ["null", "string"] }, - "yale_serial_number_str": { "type": ["null", "string"] } - }, - "type": ["null", "object"], - "additionalProperties": false - }, - "customer": { - "properties": { - "id": { "type": ["null", "string"] }, - "type": { "type": ["null", "string"] } - }, - "type": ["null", "object"], - "additionalProperties": false - }, - "direction": { "type": ["null", "string"] }, - "done_count": { "type": ["null", "integer"] }, - "ended": { "type": ["boolean", "null"] }, - "ended_at": { "type": ["null", "string"] }, - "ended_by": { - "properties": { - "id": { "type": ["null", "string"] }, - "type": { "type": ["null", "string"] } - }, - "type": ["null", "object"], - "additionalProperties": false - }, - "ended_by_type": { "type": ["null", "string"] }, - "ended_reason": { "type": ["null", "string"] }, - "external_id": { "type": ["null", "string"] }, - "first_done": { - "properties": { - "assigned_teams": { - "anyOf": [ - { - "type": "array", - "items": { "type": ["null", "string"] } - }, - { "type": "null" } - ] - }, - "assigned_users": { - "anyOf": [ - { - "type": "array", - "items": { "type": ["null", "string"] } - }, - { "type": "null" } - ] - }, - "business_time": { "type": ["null", "integer"] }, - "created_at": { "type": ["null", "string"] }, - "created_by": { "type": ["null", "string"] }, - "created_by_teams": { - "anyOf": [ - { - "items": { "type": ["null", "string"] }, - "type": "array" - }, - { "type": "null" } - ] - }, - "last_message_direction": { "type": ["null", "string"] }, - "last_message_direction_type": { "type": ["null", "string"] }, - "message_count": { "type": ["null", "integer"] }, - "message_count_by_channel": { - "properties": { - "email": { "type": ["null", "integer"] }, - "sms": { "type": ["null", "integer"] }, - "voice": { "type": ["null", "integer"] } - }, - "type": ["null", "object"], - "additionalProperties": false - }, - "note_count": { "type": ["null", "integer"] }, - "outbound_message_count": { "type": ["null", "integer"] }, - "outbound_message_count_by_channel": { - "properties": { - "email": { "type": ["null", "integer"] }, - "sms": { "type": ["null", "integer"] }, - "voice": { "type": ["null", "integer"] } - }, - "type": ["null", "object"], - "additionalProperties": false - }, - "time": { "type": ["null", "integer"] } - }, - "type": ["null", "object"], - "additionalProperties": false - }, - "first_message_in": { - "properties": { - "channel": { "type": ["null", "string"] }, - "created_at": { "type": ["null", "string"] }, - "direction_type": { "type": ["null", "string"] }, - "id": { "type": ["null", "string"] }, - "sent_at": { "type": ["null", "string"] } - }, - "type": ["null", "object"], - "additionalProperties": false - }, - "first_message_out": { - "properties": { - "channel": { "type": ["null", "string"] }, - "created_at": { "type": ["null", "string"] }, - "created_by": { "type": ["null", "string"] }, - "created_by_teams": { - "anyOf": [ - { - "items": { "type": ["null", "string"] }, - "type": "array" - }, - { "type": "null" } - ] - }, - "direction_type": { "type": ["null", "string"] }, - "id": { "type": ["null", "string"] }, - "sent_at": { "type": ["null", "string"] } - }, - "type": ["null", "object"], - "additionalProperties": false - }, - "first_response": { - "properties": { - "assigned_teams": { - "anyOf": [ - { - "items": { "type": ["null", "string"] }, - "type": "array" - }, - { "type": "null" } - ] - }, - "assigned_users": { - "anyOf": [ - { - "items": { "type": ["null", "string"] }, - "type": "array" - }, - { "type": "null" } - ] - }, - "business_time": { "type": ["null", "integer"] }, - "created_at": { "type": ["null", "string"] }, - "created_by": { "type": ["null", "string"] }, - "created_by_teams": { - "anyOf": [ - { - "items": { "type": ["null", "string"] }, - "type": "array" - }, - { "type": "null" } - ] - }, - "id": { "type": ["null", "string"] }, - "response_time": { "type": ["null", "integer"] }, - "sent_at": { "type": ["null", "string"] }, - "time": { "type": ["null", "integer"] } - }, - "type": ["null", "object"], - "additionalProperties": false - }, - "first_response_since_last_done": { - "properties": { - "assigned_teams": { - "anyOf": [ - { - "items": { "type": ["null", "string"] }, - "type": "array" - }, - { "type": "null" } - ] - }, - "assigned_users": { - "anyOf": [ - { - "items": { "type": ["null", "string"] }, - "type": "array" - }, - { "type": "null" } - ] - }, - "business_time": { "type": ["null", "integer"] }, - "created_at": { "type": ["null", "string"] }, - "created_by": { "type": ["null", "string"] }, - "created_by_teams": { - "anyOf": [ - { - "items": { "type": ["null", "string"] }, - "type": "array" - }, - { "type": "null" } - ] - }, - "id": { "type": ["null", "string"] }, - "response_time": { "type": ["null", "integer"] }, - "sent_at": { "type": ["null", "string"] }, - "time": { "type": ["null", "integer"] } - }, - "type": ["null", "object"], - "additionalProperties": false - }, - "id": { "type": ["null", "string"] }, - "imported_at": { "type": ["null", "string"] }, - "last_activity_at": { "type": ["null", "string"] }, - "last_done": { - "properties": { - "assigned_teams": { - "anyOf": [ - { - "items": { "type": ["null", "string"] }, - "type": "array" - }, - { "type": "null" } - ] - }, - "assigned_users": { - "anyOf": [ - { - "items": { "type": ["null", "string"] }, - "type": "array" - }, - { "type": "null" } - ] - }, - "business_time": { "type": ["null", "integer"] }, - "created_at": { "type": ["null", "string"] }, - "created_by": { "type": ["null", "string"] }, - "created_by_teams": { - "anyOf": [ - { - "items": { "type": ["null", "string"] }, - "type": "array" - }, - { "type": "null" } - ] - }, - "last_message_direction": { "type": ["null", "string"] }, - "last_message_direction_type": { "type": ["null", "string"] }, - "message_count": { "type": ["null", "integer"] }, - "message_count_by_channel": { - "properties": { - "email": { "type": ["null", "integer"] }, - "sms": { "type": ["null", "integer"] }, - "voice": { "type": ["null", "integer"] } - }, - "type": ["null", "object"], - "additionalProperties": false - }, - "note_count": { "type": ["null", "integer"] }, - "outbound_message_count": { "type": ["null", "integer"] }, - "outbound_message_count_by_channel": { - "properties": { - "email": { "type": ["null", "integer"] }, - "sms": { "type": ["null", "integer"] }, - "voice": { "type": ["null", "integer"] } - }, - "type": ["null", "object"], - "additionalProperties": false - }, - "time": { "type": ["null", "integer"] } - }, - "type": ["null", "object"], - "additionalProperties": false - }, - "last_message_at": { "type": ["null", "string"] }, - "last_message_direction": { "type": ["null", "string"] }, - "last_message_in": { - "properties": { - "created_at": { "type": ["null", "string"] }, - "id": { "type": ["null", "string"] }, - "sent_at": { "type": ["null", "string"] } - }, - "type": ["null", "object"], - "additionalProperties": false - }, - "last_message_out": { - "properties": { - "created_at": { "type": ["null", "string"] }, - "id": { "type": ["null", "string"] }, - "sent_at": { "type": ["null", "string"] } - }, - "type": ["null", "object"], - "additionalProperties": false - }, - "last_message_unresponded_to": { - "properties": { - "created_at": { "type": ["null", "string"] }, - "id": { "type": ["null", "string"] }, - "sent_at": { "type": ["null", "string"] } - }, - "type": ["null", "object"], - "additionalProperties": false - }, - "last_message_unresponded_to_since_last_done": { - "properties": { - "created_at": { "type": ["null", "string"] }, - "id": { "type": ["null", "string"] }, - "sent_at": { "type": ["null", "string"] } - }, - "type": ["null", "object"], - "additionalProperties": false - }, - "last_received_at": { "type": ["null", "string"] }, - "last_response": { - "properties": { - "assigned_teams": { - "anyOf": [ - { - "items": { "type": ["null", "string"] }, - "type": "array" - }, - { "type": "null" } - ] - }, - "assigned_users": { - "anyOf": [ - { - "items": { "type": ["null", "string"] }, - "type": "array" - }, - { "type": "null" } - ] - }, - "business_time": { "type": ["null", "integer"] }, - "created_at": { "type": ["null", "string"] }, - "created_by": { "type": ["null", "string"] }, - "created_by_teams": { - "anyOf": [ - { - "items": { "type": ["null", "string"] }, - "type": "array" - }, - { "type": "null" } - ] - }, - "id": { "type": ["null", "string"] }, - "time": { "type": ["null", "integer"] } - }, - "type": ["null", "object"], - "additionalProperties": false - }, - "links": { - "properties": { "self": { "type": ["null", "string"] } }, - "type": ["null", "object"], - "additionalProperties": false - }, - "merged_target": { "type": ["null", "boolean"] }, - "message_count": { "type": ["null", "integer"] }, - "modified_at": { "type": ["null", "string"] }, - "modified_by": { - "properties": { - "id": { "type": ["null", "string"] }, - "type": { "type": ["null", "string"] } - }, - "type": ["null", "object"], - "additionalProperties": false - }, - "name": { "type": ["null", "string"] }, - "note_count": { "type": ["null", "integer"] }, - "org": { - "properties": { - "id": { "type": ["null", "string"] }, - "type": { "type": ["null", "string"] } - }, - "type": ["null", "object"], - "additionalProperties": false - }, - "outbound_message_count": { "type": ["null", "integer"] }, - "preview": { "type": ["null", "string"] }, - "priority": { "type": ["null", "integer"] }, - "queue": { - "properties": { - "id": { "type": ["null", "string"] }, - "type": { "type": ["null", "string"] } - }, - "type": ["null", "object"], - "additionalProperties": false - }, - "reopen_count": { "type": ["null", "integer"] }, - "rev": { "type": ["null", "integer"] }, - "role_group_versions": { "type": ["null", "string"] }, - "satisfaction": { "type": ["null", "string"] }, - "satisfaction_level": { - "properties": { - "sent_by_teams": { - "anyOf": [ - { - "items": { "type": ["null", "string"] }, - "type": "array" - }, - { "type": "null" } - ] - } - }, - "type": ["null", "object"], - "additionalProperties": false - }, - "sla": { - "properties": { - "breach": { - "properties": { - "at": { "type": ["null", "string"] }, - "metric": { "type": ["null", "string"] } - }, - "type": ["null", "object"], - "additionalProperties": false - }, - "breached": { "type": ["null", "boolean"] }, - "matched_at": { "type": ["null", "string"] }, - "metrics": { - "properties": { - "first_response": { - "properties": { - "breach_at": { "type": ["null", "string"] }, - "satisfied_at": { "type": ["null", "string"] } - }, - "type": ["null", "object"], - "additionalProperties": false - }, - "longest_unresponded_message": { - "properties": { - "breach_at": { "type": ["null", "string"] }, - "satisfied_at": { "type": ["null", "string"] } - }, - "type": ["null", "object"], - "additionalProperties": false - } - }, - "type": ["null", "object"], - "additionalProperties": false - }, - "status": { "type": ["null", "string"] }, - "summary": { - "properties": { - "first_breach_at": { "type": ["null", "string"] }, - "satisfied_at": { "type": ["null", "string"] } - }, - "type": ["null", "object"], - "additionalProperties": false - }, - "version": { "type": ["null", "integer"] } - }, - "type": ["null", "object"], - "additionalProperties": false - }, - "sla_data": { - "properties": { - "id": { "type": ["null", "string"] }, - "type": { "type": ["null", "string"] } - }, - "type": ["null", "object"], - "additionalProperties": false - }, - "sla_version": { - "properties": { - "id": { "type": ["null", "string"] }, - "type": { "type": ["null", "string"] } - }, - "type": ["null", "object"], - "additionalProperties": false - }, - "snooze": { - "properties": { - "status": { "type": ["null", "string"] }, - "status_at": { "type": ["null", "string"] }, - "time": { "type": ["null", "string"] } - }, - "type": ["null", "object"], - "additionalProperties": false - }, - "snooze_count": { "type": ["null", "integer"] }, - "status": { "type": ["null", "string"] }, - "suggested_shortcuts": { "type": ["null", "string"] }, - "suggested_tags": { "type": ["null", "string"] }, - "tags": { - "anyOf": [ - { "type": "array", "items": { "type": ["null", "string"] } }, - { "type": "null" } - ] - }, - "total_done": { - "properties": { - "business_time": { "type": ["null", "integer"] }, - "time": { "type": ["null", "integer"] } - }, - "type": ["null", "object"], - "additionalProperties": false - }, - "total_open": { - "properties": { - "business_time": { "type": ["null", "integer"] }, - "business_time_since_last_done": { - "type": ["null", "integer"] - }, - "time": { "type": ["null", "integer"] }, - "time_since_last_done": { "type": ["null", "integer"] } - }, - "type": ["null", "object"], - "additionalProperties": false - }, - "total_snooze": { - "properties": { - "business_time": { "type": ["null", "integer"] }, - "time": { "type": ["null", "integer"] } - }, - "type": ["null", "object"], - "additionalProperties": false - }, - "type": { "type": ["null", "string"] }, - "updated_at": { "type": ["null", "string"] } - }, - "type": "object", - "additionalProperties": false - }, - "supported_sync_modes": ["incremental"], - "source_defined_cursor": true, - "default_cursor_field": ["updated_at"] - }, - "sync_mode": "incremental", - "destination_sync_mode": "append" - }, - { - "stream": { - "name": "messages", - "json_schema": { - "properties": { - "created_at": { "type": ["null", "string"] }, - "custom": { - "properties": { - "billing_status_str": { "type": ["null", "string"] }, - "comment_txt": { "type": ["null", "string"] }, - "delighted_link_str": { "type": ["null", "string"] }, - "order_created_at": { "type": ["null", "string"] }, - "order_number_str": { "type": ["null", "string"] }, - "order_updated_at": { "type": ["null", "string"] }, - "purchase_amount_num": { "type": "integer" }, - "score_num": { "type": "integer" }, - "shipping_address_str": { "type": ["null", "string"] }, - "shipping_country_str": { "type": ["null", "string"] }, - "shipping_state_str": { "type": ["null", "string"] }, - "shipping_zip_str": { "type": ["null", "string"] }, - "skus_txt": { "type": ["null", "string"] }, - "special_instructions_str": { "type": ["null", "string"] }, - "tags_str": { "type": ["null", "string"] }, - "total_price_num": { "type": "number" }, - "total_price_str": { "type": ["null", "string"] }, - "updated_at": { "type": ["null", "string"] } - }, - "type": ["null", "object"], - "additionalProperties": false - }, - "customer": { - "properties": { - "id": { "type": ["null", "string"] }, - "type": { "type": ["null", "string"] } - }, - "type": ["null", "object"], - "additionalProperties": false - }, - "description": { "type": ["null", "string"] }, - "external_id": { "type": ["null", "string"] }, - "icon": { "type": ["null", "string"] }, - "id": { "type": ["null", "string"] }, - "images": { "type": "array" }, - "klass": { - "properties": { - "id": { "type": ["null", "string"] }, - "type": { "type": ["null", "string"] } - }, - "type": ["null", "object"], - "additionalProperties": false - }, - "links": { - "properties": { "self": { "type": ["null", "string"] } }, - "type": ["null", "object"], - "additionalProperties": false - }, - "org": { - "properties": { "type": { "type": ["null", "string"] } }, - "type": ["null", "object"], - "additionalProperties": false - }, - "tags": { "type": "array" }, - "title": { "type": ["null", "string"] }, - "type": { "type": ["null", "string"] }, - "updated_at": { "type": ["null", "string"] } - }, - "type": "object", - "additionalProperties": false - }, - "supported_sync_modes": ["incremental"], - "source_defined_cursor": true, - "default_cursor_field": ["updated_at"] - }, - "sync_mode": "incremental", - "destination_sync_mode": "append" - }, - { - "stream": { - "name": "users", - "json_schema": { - "properties": { - "avatar_url": { "type": ["null", "string"] }, - "created_at": { "format": "date-time", "type": ["null", "string"] }, - "created_by": { - "properties": { - "id": { "type": ["null", "string"] }, - "type": { "type": ["null", "string"] } - }, - "type": ["null", "object"], - "additionalProperties": false - }, - "deleted_at": { "type": ["null", "string"] }, - "display_name": { "type": ["null", "string"] }, - "email": { "type": ["null", "string"] }, - "email_signature": { "type": ["null", "string"] }, - "email_verified_at": { "type": ["null", "string"] }, - "first_email_verified_at": { "type": ["null", "string"] }, - "id": { "type": ["null", "string"] }, - "links": { - "properties": { "self": { "type": ["null", "string"] } }, - "type": ["null", "object"], - "additionalProperties": false - }, - "mobile": { "type": ["null", "string"] }, - "modified_at": { "type": ["null", "string"] }, - "modified_by": { - "properties": { - "id": { "type": ["null", "string"] }, - "type": { "type": ["null", "string"] } - }, - "type": ["null", "object"], - "additionalProperties": false - }, - "name": { "type": ["null", "string"] }, - "org": { - "properties": { - "id": { "type": ["null", "string"] }, - "type": { "type": ["null", "string"] } - }, - "type": ["null", "object"], - "additionalProperties": false - }, - "password": { - "properties": { - "allow_new": { "type": "boolean" }, - "force_new": { "type": "boolean" }, - "updated_at": { - "format": "date-time", - "type": ["null", "string"] - } - }, - "type": ["null", "object"], - "additionalProperties": false - }, - "role_groups": { - "anyOf": [ - { "items": { "type": ["null", "string"] }, "type": "array" }, - { "type": "null" } - ] - }, - "roles": { - "anyOf": [ - { "items": { "type": ["null", "string"] }, "type": "array" }, - { "type": "null" } - ] - }, - "type": { "type": ["null", "string"] }, - "updated_at": { "format": "date-time", "type": ["null", "string"] }, - "user_type": { "type": ["null", "string"] } - }, - "type": ["null", "object"], - "additionalProperties": false - }, - "supported_sync_modes": ["incremental"], - "source_defined_cursor": true, - "default_cursor_field": ["updated_at"] - }, - "sync_mode": "incremental", - "destination_sync_mode": "append" - }, - { - "stream": { - "name": "teams", - "json_schema": { - "properties": { - "created_at": { "format": "date-time", "type": ["null", "string"] }, - "created_by": { - "properties": { - "id": { "type": ["null", "string"] }, - "type": { "type": ["null", "string"] } - }, - "type": ["null", "object"], - "additionalProperties": false - }, - "deleted": { "type": ["null", "boolean"] }, - "deleted_at": { "type": ["null", "string"] }, - "deleted_by": { - "properties": { - "id": { "type": ["null", "string"] }, - "type": { "type": ["null", "string"] } - }, - "type": ["null", "object"], - "additionalProperties": false - }, - "display_name": { "type": ["null", "string"] }, - "icon": { "type": ["null", "string"] }, - "id": { "type": ["null", "string"] }, - "links": { - "properties": { "self": { "type": ["null", "string"] } }, - "type": ["null", "object"], - "additionalProperties": false - }, - "members": { - "anyOf": [ - { "items": { "type": ["null", "string"] }, "type": "array" }, - { "type": "null" } - ] - }, - "modified_at": { "type": ["null", "string"] }, - "modified_by": { - "properties": { - "id": { "type": ["null", "string"] }, - "type": { "type": ["null", "string"] } - }, - "type": ["null", "object"], - "additionalProperties": false - }, - "name": { "type": ["null", "string"] }, - "org": { - "properties": { - "id": { "type": ["null", "string"] }, - "type": { "type": ["null", "string"] } - }, - "type": ["null", "object"], - "additionalProperties": false - }, - "role_groups": { - "anyOf": [ - { "type": "array", "items": { "type": ["null", "string"] } }, - { "type": "null" } - ] - }, - "type": { "type": ["null", "string"] }, - "updated_at": { "format": "date-time", "type": ["null", "string"] } - }, - "type": "object", - "additionalProperties": false - }, - "supported_sync_modes": ["incremental"], - "source_defined_cursor": true, - "default_cursor_field": ["updated_at"] - }, - "sync_mode": "incremental", - "destination_sync_mode": "append" - }, - { - "stream": { - "name": "tags", - "json_schema": { - "properties": { - "color": { "type": ["null", "string"] }, - "created_at": { "format": "date-time", "type": ["null", "string"] }, - "created_by": { - "properties": { - "id": { "type": ["null", "string"] }, - "type": { "type": ["null", "string"] } - }, - "type": ["null", "object"], - "additionalProperties": false - }, - "deleted": { "type": ["null", "boolean"] }, - "id": { "type": ["null", "string"] }, - "links": { - "properties": { "self": { "type": ["null", "string"] } }, - "type": ["null", "object"], - "additionalProperties": false - }, - "modified_at": { "type": ["null", "string"] }, - "modified_by": { - "properties": { - "id": { "type": ["null", "string"] }, - "type": { "type": ["null", "string"] } - }, - "type": ["null", "object"], - "additionalProperties": false - }, - "name": { "type": ["null", "string"] }, - "org": { - "properties": { - "id": { "type": ["null", "string"] }, - "type": { "type": ["null", "string"] } - }, - "type": ["null", "object"], - "additionalProperties": false - }, - "type": { "type": ["null", "string"] }, - "updated_at": { "format": "date-time", "type": ["null", "string"] } - }, - "type": "object", - "additionalProperties": false - }, - "supported_sync_modes": ["incremental"], - "source_defined_cursor": true, - "default_cursor_field": ["updated_at"] - }, - "sync_mode": "incremental", - "destination_sync_mode": "append" - }, - { - "stream": { - "name": "shortcuts", - "json_schema": { - "properties": { - "conversation": { - "properties": { - "name": { - "properties": { - "operator": { "type": ["null", "string"] }, - "value": { "type": ["null", "string"] } - }, - "type": ["null", "object"], - "additionalProperties": false - }, - "tags": { - "properties": { - "operator": { "type": ["null", "string"] }, - "value": { - "items": { "type": ["null", "string"] }, - "type": "array" - } - }, - "type": ["null", "object"], - "additionalProperties": false - } - }, - "type": ["null", "object"], - "additionalProperties": false - }, - "created_at": { "format": "date-time", "type": ["null", "string"] }, - "created_by": { - "properties": { - "id": { "type": ["null", "string"] }, - "type": { "type": ["null", "string"] } - }, - "type": ["null", "object"], - "additionalProperties": false - }, - "deleted": { "type": ["null", "boolean"] }, - "draft": { - "properties": { "text": { "type": ["null", "string"] } }, - "type": ["null", "object"], - "additionalProperties": false - }, - "id": { "type": ["null", "string"] }, - "is_private": { "type": ["null", "boolean"] }, - "links": { - "properties": { "self": { "type": ["null", "string"] } }, - "type": ["null", "object"], - "additionalProperties": false - }, - "modified_at": { "type": ["null", "string"] }, - "modified_by": { - "properties": { - "id": { "type": ["null", "string"] }, - "type": { "type": ["null", "string"] } - }, - "type": ["null", "object"], - "additionalProperties": false - }, - "name": { "type": ["null", "string"] }, - "org": { - "properties": { - "id": { "type": ["null", "string"] }, - "type": { "type": ["null", "string"] } - }, - "type": ["null", "object"], - "additionalProperties": false - }, - "payload": { - "properties": { - "text": { - "properties": { - "blocks": { - "anyOf": [ - { - "items": { - "additionalProperties": false, - "properties": { - "data": { "type": ["null", "object"] }, - "depth": { "type": ["null", "integer"] }, - "entity_ranges": { - "anyOf": [ - { - "type": "array", - "items": { - "additionalProperties": false, - "properties": { - "key": { "type": ["null", "integer"] }, - "length": { - "type": ["null", "integer"] - }, - "offset": { - "type": ["null", "integer"] - } - }, - "type": ["null", "object"] - } - }, - { "type": "null" } - ] - }, - "inline_style_ranges": { - "anyOf": [ - { - "items": { - "additionalProperties": false, - "properties": { - "length": { - "type": ["null", "integer"] - }, - "offset": { - "type": ["null", "integer"] - }, - "style": { "type": ["null", "string"] } - }, - "type": ["null", "object"] - }, - "type": "array" - }, - { "type": "null" } - ] - }, - "key": { "type": ["null", "string"] }, - "text": { "type": ["null", "string"] }, - "type": { "type": ["null", "string"] } - }, - "type": ["null", "object"] - }, - "type": "array" - }, - { "type": "null" } - ] - }, - "entity_map": { - "properties": { - "0": { - "properties": { - "data": { - "properties": { - "fallback_text": { "type": ["null", "string"] }, - "key": { "type": ["null", "string"] }, - "url": { "type": ["null", "string"] } - }, - "type": ["null", "object"], - "additionalProperties": false - }, - "mutability": { "type": ["null", "string"] }, - "type": { "type": ["null", "string"] } - }, - "type": ["null", "object"], - "additionalProperties": false - }, - "1": { - "properties": { - "data": { - "properties": { - "fallback_text": { "type": ["null", "string"] }, - "key": { "type": ["null", "string"] }, - "url": { "type": ["null", "string"] } - }, - "type": ["null", "object"], - "additionalProperties": false - }, - "mutability": { "type": ["null", "string"] }, - "type": { "type": ["null", "string"] } - }, - "type": ["null", "object"], - "additionalProperties": false - }, - "2": { - "properties": { - "data": { - "properties": { - "url": { "type": ["null", "string"] } - }, - "type": ["null", "object"], - "additionalProperties": false - }, - "mutability": { "type": ["null", "string"] }, - "type": { "type": ["null", "string"] } - }, - "type": ["null", "object"], - "additionalProperties": false - }, - "3": { - "properties": { - "data": { - "properties": { - "url": { "type": ["null", "string"] } - }, - "type": ["null", "object"], - "additionalProperties": false - }, - "mutability": { "type": ["null", "string"] }, - "type": { "type": ["null", "string"] } - }, - "type": ["null", "object"], - "additionalProperties": false - } - }, - "type": ["null", "object"], - "additionalProperties": false - } - }, - "type": ["null", "object"], - "additionalProperties": false - } - }, - "type": ["null", "object"], - "additionalProperties": false - }, - "type": { "type": ["null", "string"] }, - "updated_at": { "format": "date-time", "type": ["null", "string"] } - }, - "type": "object", - "additionalProperties": false - }, - "supported_sync_modes": ["incremental"], - "source_defined_cursor": true, - "default_cursor_field": ["updated_at"] - }, - "sync_mode": "incremental", - "destination_sync_mode": "append" - }, - { - "stream": { - "name": "notes", - "json_schema": { - "properties": { - "body": { "type": ["null", "string"] }, - "conversation": { - "properties": { - "id": { "type": ["null", "string"] }, - "type": { "type": ["null", "string"] } - }, - "type": ["null", "object"], - "additionalProperties": false - }, - "created_at": { "format": "date-time", "type": ["null", "string"] }, - "created_by": { - "properties": { - "id": { "type": ["null", "string"] }, - "type": { "type": ["null", "string"] } - }, - "type": ["null", "object"], - "additionalProperties": false - }, - "customer": { - "properties": { - "id": { "type": ["null", "string"] }, - "type": { "type": ["null", "string"] } - }, - "type": ["null", "object"], - "additionalProperties": false - }, - "deleted": { "type": ["null", "boolean"] }, - "deleted_at": { "format": "date-time", "type": ["null", "string"] }, - "deleted_by": { - "properties": { - "id": { "type": ["null", "string"] }, - "type": { "type": ["null", "string"] } - }, - "type": ["null", "object"], - "additionalProperties": false - }, - "external_id": { "type": ["null", "string"] }, - "id": { "type": ["null", "string"] }, - "imported_at": { - "format": "date-time", - "type": ["null", "string"] - }, - "links": { - "properties": { "self": { "type": ["null", "string"] } }, - "type": ["null", "object"], - "additionalProperties": false - }, - "modified_at": { - "format": "date-time", - "type": ["null", "string"] - }, - "modified_by": { - "properties": { - "id": { "type": ["null", "string"] }, - "type": { "type": ["null", "string"] } - }, - "type": ["null", "object"], - "additionalProperties": false - }, - "org": { - "properties": { - "id": { "type": ["null", "string"] }, - "type": { "type": ["null", "string"] } - }, - "type": ["null", "object"], - "additionalProperties": false - }, - "type": { "type": ["null", "string"] }, - "updated_at": { "format": "date-time", "type": ["null", "string"] }, - "user_mentions": { - "anyOf": [ - { - "type": "array", - "items": { - "type": ["null", "object"], - "additionalProperties": false, - "properties": { - "id": { "type": ["null", "string"] }, - "type": { "type": ["null", "string"] } - } - } - }, - { "type": "null" } - ] - } - }, - "type": "object", - "additionalProperties": false - }, - "supported_sync_modes": ["incremental"], - "source_defined_cursor": true, - "default_cursor_field": ["updated_at"] - }, - "sync_mode": "incremental", - "destination_sync_mode": "append" - } - ] -} diff --git a/airbyte-integrations/connectors/source-kustomer-singer/integration_tests/invalid_config.json b/airbyte-integrations/connectors/source-kustomer-singer/integration_tests/invalid_config.json deleted file mode 100644 index 83c81895ddfa..000000000000 --- a/airbyte-integrations/connectors/source-kustomer-singer/integration_tests/invalid_config.json +++ /dev/null @@ -1,4 +0,0 @@ -{ - "api_token": "invalid_api_token", - "start_date": "0000-01-01T00:00:00Z" -} diff --git a/airbyte-integrations/connectors/source-kustomer-singer/integration_tests/sample_state.json b/airbyte-integrations/connectors/source-kustomer-singer/integration_tests/sample_state.json deleted file mode 100644 index 89e5dd5d1c44..000000000000 --- a/airbyte-integrations/connectors/source-kustomer-singer/integration_tests/sample_state.json +++ /dev/null @@ -1,13 +0,0 @@ -{ - "bookmarks": { - "conversations": "2021-12-25T18:00:00Z", - "customers": "2021-12-25T18:00:00Z", - "kobjects": "2021-12-25T18:00:00Z", - "messages": "2021-12-25T18:00:00Z", - "notes": "2021-12-25T18:00:00Z", - "shortcuts": "2021-12-25T18:00:00Z", - "tags": "2021-12-25T18:00:00Z", - "teams": "2021-12-25T18:00:00Z", - "users": "2021-12-25T18:00:00Z" - } -} diff --git a/airbyte-integrations/connectors/source-kustomer-singer/main.py b/airbyte-integrations/connectors/source-kustomer-singer/main.py deleted file mode 100644 index 00920dc58a36..000000000000 --- a/airbyte-integrations/connectors/source-kustomer-singer/main.py +++ /dev/null @@ -1,8 +0,0 @@ -# -# Copyright (c) 2023 Airbyte, Inc., all rights reserved. -# - -from source_kustomer_singer.run import run - -if __name__ == "__main__": - run() diff --git a/airbyte-integrations/connectors/source-kustomer-singer/requirements.txt b/airbyte-integrations/connectors/source-kustomer-singer/requirements.txt deleted file mode 100644 index 16f70161a28f..000000000000 --- a/airbyte-integrations/connectors/source-kustomer-singer/requirements.txt +++ /dev/null @@ -1,2 +0,0 @@ -# This file is autogenerated -- only edit if you know what you are doing. Use setup.py for declaring dependencies. -# -e . diff --git a/airbyte-integrations/connectors/source-kustomer-singer/setup.py b/airbyte-integrations/connectors/source-kustomer-singer/setup.py deleted file mode 100644 index bc8c57120807..000000000000 --- a/airbyte-integrations/connectors/source-kustomer-singer/setup.py +++ /dev/null @@ -1,88 +0,0 @@ -# -# Copyright (c) 2023 Airbyte, Inc., all rights reserved. -# - -import os -import shutil -from pathlib import Path -from subprocess import check_call - -from setuptools import find_packages, setup -from setuptools.command.develop import develop -from setuptools.command.egg_info import egg_info -from setuptools.command.install import install - - -def check_singer(): - tmp_dir = "/tmp/singer-python" - if not os.path.exists(tmp_dir): - check_call(f"git clone -b v5.8.1 https://github.com/singer-io/singer-python.git {tmp_dir}".split()) - setup_py = Path(tmp_dir) / "setup.py" - setup_py.write_text(setup_py.read_text().replace("jsonschema==", "jsonschema>=")) - setup_py.write_text(setup_py.read_text().replace("backoff==", "backoff>=")) - setup_py.write_text(setup_py.read_text().replace("requests==", "backoff>=")) - check_call(f"pip install -U {tmp_dir}".split()) - - -class CustomInstallCommand(install): - def run(self): - check_singer() - install.run(self) - if os.path.exists("/tmp/singer-python"): - shutil.rmtree("/tmp/singer-python") - - -class CustomDevelopCommand(develop): - def run(self): - check_singer() - develop.run(self) - if os.path.exists("/tmp/singer-python"): - shutil.rmtree("/tmp/singer-python") - - -class CustomEggInfoCommand(egg_info): - def run(self): - check_singer() - egg_info.run(self) - if os.path.exists("/tmp/singer-python"): - shutil.rmtree("/tmp/singer-python") - - -MAIN_REQUIREMENTS = ["airbyte-cdk", "tap-kustomer==1.0.2"] - -TEST_REQUIREMENTS = ["requests-mock~=1.9.3", "pytest-mock~=3.6.1", "pytest~=6.1"] - -setup( - entry_points={ - "console_scripts": [ - "source-kustomer-singer=source_kustomer_singer.run:run", - ], - }, - name="source_kustomer_singer", - description="Source implementation for Kustomer, built on the Singer tap implementation.", - author="Airbyte", - author_email="contact@airbyte.io", - packages=find_packages(), - install_requires=MAIN_REQUIREMENTS, - cmdclass={ - "install": CustomInstallCommand, - "develop": CustomDevelopCommand, - "egg_info": CustomEggInfoCommand, - }, - package_data={ - "": [ - # Include yaml files in the package (if any) - "*.yml", - "*.yaml", - # Include all json files in the package, up to 4 levels deep - "*.json", - "*/*.json", - "*/*/*.json", - "*/*/*/*.json", - "*/*/*/*/*.json", - ] - }, - extras_require={ - "tests": TEST_REQUIREMENTS, - }, -) diff --git a/airbyte-integrations/connectors/source-kustomer-singer/source_kustomer_singer/__init__.py b/airbyte-integrations/connectors/source-kustomer-singer/source_kustomer_singer/__init__.py deleted file mode 100644 index f93e610e1f75..000000000000 --- a/airbyte-integrations/connectors/source-kustomer-singer/source_kustomer_singer/__init__.py +++ /dev/null @@ -1,26 +0,0 @@ -# MIT License -# -# Copyright (c) 2020 Airbyte -# -# Permission is hereby granted, free of charge, to any person obtaining a copy -# of this software and associated documentation files (the "Software"), to deal -# in the Software without restriction, including without limitation the rights -# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -# copies of the Software, and to permit persons to whom the Software is -# furnished to do so, subject to the following conditions: -# -# The above copyright notice and this permission notice shall be included in all -# copies or substantial portions of the Software. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -# SOFTWARE. - - -from .source import SourceKustomerSinger - -__all__ = ["SourceKustomerSinger"] diff --git a/airbyte-integrations/connectors/source-kustomer-singer/source_kustomer_singer/run.py b/airbyte-integrations/connectors/source-kustomer-singer/source_kustomer_singer/run.py deleted file mode 100644 index fba603c749e8..000000000000 --- a/airbyte-integrations/connectors/source-kustomer-singer/source_kustomer_singer/run.py +++ /dev/null @@ -1,13 +0,0 @@ -# -# Copyright (c) 2023 Airbyte, Inc., all rights reserved. -# - -import sys - -from airbyte_cdk.entrypoint import launch -from source_kustomer_singer import SourceKustomerSinger - - -def run(): - source = SourceKustomerSinger() - launch(source, sys.argv[1:]) diff --git a/airbyte-integrations/connectors/source-kustomer-singer/source_kustomer_singer/source.py b/airbyte-integrations/connectors/source-kustomer-singer/source_kustomer_singer/source.py deleted file mode 100644 index cc49e81bfa97..000000000000 --- a/airbyte-integrations/connectors/source-kustomer-singer/source_kustomer_singer/source.py +++ /dev/null @@ -1,53 +0,0 @@ -# -# Copyright (c) 2023 Airbyte, Inc., all rights reserved. -# - - -import json - -import requests -from airbyte_cdk.logger import AirbyteLogger -from airbyte_cdk.models import AirbyteConnectionStatus, Status -from airbyte_cdk.sources.singer import SingerSource - - -class SourceKustomerSinger(SingerSource): - TAP_CMD = "tap-kustomer" - - def check_config(self, logger: AirbyteLogger, config_path: str, config: json) -> AirbyteConnectionStatus: - """ - Tests if the input configuration can be used to successfully connect to the integration - e.g: if a provided Stripe API token can be used to connect to the Stripe API. - - :param logger: Logging object to display debug/info/error to the logs - (logs will not be accessible via airbyte UI if they are not passed to this logger) - :param config_path: Path to the file containing the configuration json config - :param config: Json object containing the configuration of this source, content of this json is as specified in - the properties of the spec.json file - - :return: AirbyteConnectionStatus indicating a Success or Failure - """ - url = "https://api.kustomerapp.com/v1/customers" - headers = {"Authorization": f"Bearer {config['api_token']}"} - - try: - session = requests.get(url, headers=headers) - session.raise_for_status() - return AirbyteConnectionStatus(status=Status.SUCCEEDED) - except requests.exceptions.RequestException as e: - return AirbyteConnectionStatus(status=Status.FAILED, message=f"An exception occurred: {repr(e)}") - - def discover_cmd(self, logger: AirbyteLogger, config_path: str) -> str: - """ - Return the string commands to invoke the tap with the --discover flag and the right configuration options - """ - return f"{self.TAP_CMD} -c {config_path} --discover" - - def read_cmd(self, logger: AirbyteLogger, config_path: str, catalog_path: str, state_path: str = None) -> str: - """ - Return the string commands to invoke the tap with the right configuration options to read data from the source - """ - config_option = f"--config {config_path}" - properties_option = f"--catalog {catalog_path}" - state_option = f"--state {state_path}" if state_path else "" - return f"{self.TAP_CMD} {config_option} {properties_option} {state_option}" diff --git a/airbyte-integrations/connectors/source-kustomer-singer/source_kustomer_singer/spec.json b/airbyte-integrations/connectors/source-kustomer-singer/source_kustomer_singer/spec.json deleted file mode 100644 index dd88b02571ad..000000000000 --- a/airbyte-integrations/connectors/source-kustomer-singer/source_kustomer_singer/spec.json +++ /dev/null @@ -1,24 +0,0 @@ -{ - "documentationUrl": "https://docs.airbyte.com/integrations/sources/kustomer", - "connectionSpecification": { - "$schema": "http://json-schema.org/draft-07/schema#", - "title": "Source Kustomer Singer Spec", - "type": "object", - "required": ["api_token", "start_date"], - "additionalProperties": true, - "properties": { - "api_token": { - "title": "API Token", - "type": "string", - "description": "Kustomer API Token. See the docs on how to obtain this", - "airbyte_secret": true - }, - "start_date": { - "title": "Start Date", - "type": "string", - "description": "The date from which you'd like to replicate the data", - "examples": ["2019-01-01T00:00:00Z"] - } - } - } -} diff --git a/airbyte-integrations/connectors/source-kustomer-singer/unit_tests/unit_test.py b/airbyte-integrations/connectors/source-kustomer-singer/unit_tests/unit_test.py deleted file mode 100644 index 219ae0142c72..000000000000 --- a/airbyte-integrations/connectors/source-kustomer-singer/unit_tests/unit_test.py +++ /dev/null @@ -1,7 +0,0 @@ -# -# Copyright (c) 2023 Airbyte, Inc., all rights reserved. -# - - -def test_example_method(): - assert True diff --git a/airbyte-integrations/connectors/source-recurly/.dockerignore b/airbyte-integrations/connectors/source-recurly/.dockerignore deleted file mode 100644 index 7ac167d6e945..000000000000 --- a/airbyte-integrations/connectors/source-recurly/.dockerignore +++ /dev/null @@ -1,6 +0,0 @@ -* -!Dockerfile -!main.py -!source_recurly -!setup.py -!secrets diff --git a/airbyte-integrations/connectors/source-recurly/README.md b/airbyte-integrations/connectors/source-recurly/README.md deleted file mode 100644 index 936201b1a143..000000000000 --- a/airbyte-integrations/connectors/source-recurly/README.md +++ /dev/null @@ -1,104 +0,0 @@ -# Recurly source connector - -This is the repository for the Recurly source connector, written in Python. -For information about how to use this connector within Airbyte, see [the documentation](https://docs.airbyte.io/integrations/sources/recurly). - -## Local development - -### Prerequisites - -* Python (~=3.9) -* Poetry (~=1.7) - installation instructions [here](https://python-poetry.org/docs/#installation) - -### Installing the connector - -From this connector directory, run: - -```bash -poetry install --with dev -``` - -### Creating credentials - -**If you are a community contributor**, follow the instructions in the [documentation](https://docs.airbyte.io/integrations/sources/recurly) -to generate the necessary credentials. Then create a file `secrets/config.json` conforming to the `source_recurly/spec.json` file. -Note that the `secrets` directory is gitignored by default, so there is no danger of accidentally checking in sensitive information. -See `integration_tests/sample_config.json` for a sample config file. - -**If you are an Airbyte core member**, copy the credentials in Lastpass under the secret name `source recurly test creds` -and place them into `secrets/config.json`. - -### Locally running the connector - -```bash -poetry run source-recurly spec -poetry run source-recurly check --config secrets/config.json -poetry run source-recurly discover --config secrets/config.json -poetry run source-recurly read --config secrets/config.json --catalog sample_files/configured_catalog.json -``` - -### Running unit tests - -To run unit tests locally, from the connector directory run: - -```bash -poetry run pytest unit_tests -``` - -### Building the docker image - -1. Install [`airbyte-ci`](https://github.com/airbytehq/airbyte/blob/master/airbyte-ci/connectors/pipelines/README.md) -2. Run the following command to build the docker image: - -```bash -airbyte-ci connectors --name=source-recurly build -``` - -An image will be available on your host with the tag `airbyte/source-recurly:dev`. - -### Running the docker container - -Then run any of the connector commands as follows: - -```bash -docker run --rm airbyte/source-recurly:dev spec -docker run --rm -v $(pwd)/secrets:/secrets airbyte/source-recurly:dev check --config /secrets/config.json -docker run --rm -v $(pwd)/secrets:/secrets airbyte/source-recurly:dev discover --config /secrets/config.json -docker run --rm -v $(pwd)/secrets:/secrets -v $(pwd)/integration_tests:/integration_tests airbyte/source-recurly:dev read --config /secrets/config.json --catalog /integration_tests/configured_catalog.json -``` - -### Running our CI test suite - -You can run our full test suite locally using [`airbyte-ci`](https://github.com/airbytehq/airbyte/blob/master/airbyte-ci/connectors/pipelines/README.md): - -```bash -airbyte-ci connectors --name=source-recurly test -``` - -### Customizing acceptance Tests - -Customize the `acceptance-test-config.yml` file to configure acceptance tests. See our [Connector Acceptance Tests reference](https://docs.airbyte.com/connector-development/testing-connectors/connector-acceptance-tests-reference) for more information. -If your connector requires you to create or destroy resources for use during acceptance tests create fixtures for it and place them inside integration_tests/acceptance.py. - -### Dependency Management - -All of your dependencies should be managed via Poetry. To add a new dependency, run: - -```bash -poetry add -``` - -Please commit the changes to the `pyproject.toml` and `poetry.lock` files. - -## Publishing a new version of the connector - -You've checked out the repo, implemented a million dollar feature, and you're ready to share your changes with the world. Now what? - -1. Make sure your changes are passing our test suite: `airbyte-ci connectors --name=source-recurly test` -2. Bump the connector version listed as `dockerImageTag` in `metadata.yaml`. Please follow [semantic versioning for connectors](https://docs.airbyte.com/contributing-to-airbyte/resources/pull-requests-handbook/#semantic-versioning-for-connectors). -3. Make sure the `metadata.yaml` content is up to date. -4. Make the connector documentation and its changelog is up to date (`docs/integrations/sources/recurly.md`). -5. Create a Pull Request: use [our PR naming conventions](https://docs.airbyte.com/contributing-to-airbyte/resources/pull-requests-handbook/#pull-request-title-convention). -6. Pat yourself on the back for being an awesome contributor. -7. Someone from Airbyte will take a look at your PR and iterate with you to merge it into master. -8. Once your PR is merged, the new version of the connector will be automatically published to Docker Hub and our connector registry. diff --git a/airbyte-integrations/connectors/source-recurly/acceptance-test-config.yml b/airbyte-integrations/connectors/source-recurly/acceptance-test-config.yml deleted file mode 100644 index c1e9dfe6ee73..000000000000 --- a/airbyte-integrations/connectors/source-recurly/acceptance-test-config.yml +++ /dev/null @@ -1,40 +0,0 @@ -# See [Connector Acceptance Tests](https://docs.airbyte.com/connector-development/testing-connectors/connector-acceptance-tests-reference) -# for more information about how to configure these tests -connector_image: airbyte/source-recurly:dev -acceptance_tests: - spec: - tests: - - spec_path: "source_recurly/spec.json" - connection: - tests: - - config_path: "secrets/config.json" - status: "succeed" - - config_path: "integration_tests/invalid_config.json" - status: "failed" - discovery: - tests: - - config_path: "secrets/config.json" - basic_read: - tests: - - config_path: "secrets/config.json" - configured_catalog_path: "integration_tests/configured_catalog.json" - empty_streams: - - name: "add_ons" - bypass_reason: "Cannot seed this stream with free sandbox account" - - name: "billing_infos" - bypass_reason: "Cannot seed this stream with free sandbox account" - - name: "credit_payments" - bypass_reason: "Cannot seed this stream with free sandbox account" - - name: "shipping_methods" - bypass_reason: "Cannot seed this stream with free sandbox account" - full_refresh: - tests: - - config_path: "secrets/config.json" - configured_catalog_path: "integration_tests/configured_catalog.json" - incremental: - tests: - - config_path: "secrets/config.json" - configured_catalog_path: "integration_tests/configured_catalog.json" - future_state: - future_state_path: "integration_tests/future_state.json" - skip_comprehensive_incremental_tests: true diff --git a/airbyte-integrations/connectors/source-recurly/integration_tests/acceptance.py b/airbyte-integrations/connectors/source-recurly/integration_tests/acceptance.py deleted file mode 100644 index 82823254d266..000000000000 --- a/airbyte-integrations/connectors/source-recurly/integration_tests/acceptance.py +++ /dev/null @@ -1,14 +0,0 @@ -# -# Copyright (c) 2023 Airbyte, Inc., all rights reserved. -# - - -import pytest - -pytest_plugins = ("connector_acceptance_test.plugin",) - - -@pytest.fixture(scope="session", autouse=True) -def connector_setup(): - """This fixture is a placeholder for external resources that acceptance test might require.""" - yield diff --git a/airbyte-integrations/connectors/source-recurly/integration_tests/configured_catalog.json b/airbyte-integrations/connectors/source-recurly/integration_tests/configured_catalog.json deleted file mode 100644 index 75bcfeaf58ad..000000000000 --- a/airbyte-integrations/connectors/source-recurly/integration_tests/configured_catalog.json +++ /dev/null @@ -1,205 +0,0 @@ -{ - "streams": [ - { - "stream": { - "name": "accounts", - "json_schema": {}, - "supported_sync_modes": ["full_refresh", "incremental"], - "source_defined_cursor": true, - "default_cursor_field": ["updated_at"], - "source_defined_primary_key": [["id"]] - }, - "sync_mode": "incremental", - "destination_sync_mode": "overwrite" - }, - { - "stream": { - "name": "account_coupon_redemptions", - "json_schema": {}, - "supported_sync_modes": ["full_refresh", "incremental"], - "source_defined_cursor": true, - "default_cursor_field": ["updated_at"], - "source_defined_primary_key": [["id"]] - }, - "sync_mode": "incremental", - "destination_sync_mode": "overwrite" - }, - { - "stream": { - "name": "account_notes", - "json_schema": {}, - "supported_sync_modes": ["full_refresh", "incremental"], - "source_defined_cursor": true, - "default_cursor_field": ["created_at"], - "source_defined_primary_key": [["id"]] - }, - "sync_mode": "incremental", - "destination_sync_mode": "overwrite" - }, - { - "stream": { - "name": "add_ons", - "json_schema": {}, - "supported_sync_modes": ["full_refresh", "incremental"], - "source_defined_cursor": true, - "default_cursor_field": ["updated_at"], - "source_defined_primary_key": [["id"]] - }, - "sync_mode": "incremental", - "destination_sync_mode": "overwrite" - }, - { - "stream": { - "name": "billing_infos", - "json_schema": {}, - "supported_sync_modes": ["full_refresh", "incremental"], - "source_defined_cursor": true, - "default_cursor_field": ["updated_at"], - "source_defined_primary_key": [["id"]] - }, - "sync_mode": "incremental", - "destination_sync_mode": "overwrite" - }, - { - "stream": { - "name": "coupons", - "json_schema": {}, - "supported_sync_modes": ["full_refresh", "incremental"], - "source_defined_cursor": true, - "default_cursor_field": ["updated_at"], - "source_defined_primary_key": [["id"]] - }, - "sync_mode": "incremental", - "destination_sync_mode": "overwrite" - }, - { - "stream": { - "name": "credit_payments", - "json_schema": {}, - "supported_sync_modes": ["full_refresh", "incremental"], - "source_defined_cursor": true, - "default_cursor_field": ["updated_at"], - "source_defined_primary_key": [["id"]] - }, - "sync_mode": "incremental", - "destination_sync_mode": "overwrite" - }, - { - "stream": { - "name": "export_dates", - "json_schema": {}, - "supported_sync_modes": ["full_refresh"] - }, - "sync_mode": "full_refresh", - "destination_sync_mode": "overwrite" - }, - { - "stream": { - "name": "invoices", - "json_schema": {}, - "supported_sync_modes": ["full_refresh", "incremental"], - "source_defined_cursor": true, - "default_cursor_field": ["updated_at"], - "source_defined_primary_key": [["id"]] - }, - "sync_mode": "incremental", - "destination_sync_mode": "overwrite" - }, - { - "stream": { - "name": "line_items", - "json_schema": {}, - "supported_sync_modes": ["full_refresh", "incremental"], - "source_defined_cursor": true, - "default_cursor_field": ["updated_at"], - "source_defined_primary_key": [["id"]] - }, - "sync_mode": "incremental", - "destination_sync_mode": "overwrite" - }, - { - "stream": { - "name": "measured_units", - "json_schema": {}, - "supported_sync_modes": ["full_refresh", "incremental"], - "source_defined_cursor": true, - "default_cursor_field": ["updated_at"], - "source_defined_primary_key": [["id"]] - }, - "sync_mode": "incremental", - "destination_sync_mode": "overwrite" - }, - { - "stream": { - "name": "plans", - "json_schema": {}, - "supported_sync_modes": ["full_refresh", "incremental"], - "source_defined_cursor": true, - "default_cursor_field": ["updated_at"], - "source_defined_primary_key": [["id"]] - }, - "sync_mode": "incremental", - "destination_sync_mode": "overwrite" - }, - { - "stream": { - "name": "shipping_addresses", - "json_schema": {}, - "supported_sync_modes": ["full_refresh", "incremental"], - "source_defined_cursor": true, - "default_cursor_field": ["updated_at"], - "source_defined_primary_key": [["id"]] - }, - "sync_mode": "incremental", - "destination_sync_mode": "overwrite" - }, - { - "stream": { - "name": "shipping_methods", - "json_schema": {}, - "supported_sync_modes": ["full_refresh", "incremental"], - "source_defined_cursor": true, - "default_cursor_field": ["updated_at"], - "source_defined_primary_key": [["id"]] - }, - "sync_mode": "incremental", - "destination_sync_mode": "overwrite" - }, - { - "stream": { - "name": "subscriptions", - "json_schema": {}, - "supported_sync_modes": ["full_refresh", "incremental"], - "source_defined_cursor": true, - "default_cursor_field": ["updated_at"], - "source_defined_primary_key": [["id"]] - }, - "sync_mode": "incremental", - "destination_sync_mode": "overwrite" - }, - { - "stream": { - "name": "transactions", - "json_schema": {}, - "supported_sync_modes": ["full_refresh", "incremental"], - "source_defined_cursor": true, - "default_cursor_field": ["updated_at"], - "source_defined_primary_key": [["id"]] - }, - "sync_mode": "incremental", - "destination_sync_mode": "overwrite" - }, - { - "stream": { - "name": "unique_coupons", - "json_schema": {}, - "supported_sync_modes": ["full_refresh", "incremental"], - "source_defined_cursor": true, - "default_cursor_field": ["updated_at"], - "source_defined_primary_key": [["id"]] - }, - "sync_mode": "incremental", - "destination_sync_mode": "overwrite" - } - ] -} diff --git a/airbyte-integrations/connectors/source-recurly/integration_tests/future_state.json b/airbyte-integrations/connectors/source-recurly/integration_tests/future_state.json deleted file mode 100644 index 1c9442f7c6d3..000000000000 --- a/airbyte-integrations/connectors/source-recurly/integration_tests/future_state.json +++ /dev/null @@ -1,114 +0,0 @@ -[ - { - "type": "STREAM", - "stream": { - "stream_state": { "updated_at": "2036-07-19T22:21:37Z" }, - "stream_descriptor": { "name": "accounts" } - } - }, - { - "type": "STREAM", - "stream": { - "stream_state": { "updated_at": "2036-07-19T22:21:37Z" }, - "stream_descriptor": { "name": "account_coupon_redemptions" } - } - }, - { - "type": "STREAM", - "stream": { - "stream_state": { "created_at": "2036-07-19T22:21:37Z" }, - "stream_descriptor": { "name": "account_notes" } - } - }, - { - "type": "STREAM", - "stream": { - "stream_state": { "updated_at": "2036-07-19T22:21:37Z" }, - "stream_descriptor": { "name": "add_ons" } - } - }, - { - "type": "STREAM", - "stream": { - "stream_state": { "updated_at": "2036-07-19T22:21:37Z" }, - "stream_descriptor": { "name": "billing_infos" } - } - }, - { - "type": "STREAM", - "stream": { - "stream_state": { "updated_at": "2036-07-19T22:21:37Z" }, - "stream_descriptor": { "name": "coupons" } - } - }, - { - "type": "STREAM", - "stream": { - "stream_state": { "updated_at": "2036-07-19T22:21:37Z" }, - "stream_descriptor": { "name": "credit_payments" } - } - }, - { - "type": "STREAM", - "stream": { - "stream_state": { "updated_at": "2036-07-19T22:21:37Z" }, - "stream_descriptor": { "name": "invoices" } - } - }, - { - "type": "STREAM", - "stream": { - "stream_state": { "updated_at": "2036-07-19T22:21:37Z" }, - "stream_descriptor": { "name": "line_items" } - } - }, - { - "type": "STREAM", - "stream": { - "stream_state": { "updated_at": "2036-07-19T22:21:37Z" }, - "stream_descriptor": { "name": "measured_units" } - } - }, - { - "type": "STREAM", - "stream": { - "stream_state": { "updated_at": "2036-07-19T22:21:37Z" }, - "stream_descriptor": { "name": "plans" } - } - }, - { - "type": "STREAM", - "stream": { - "stream_state": { "updated_at": "2036-07-19T22:21:37Z" }, - "stream_descriptor": { "name": "shipping_addresses" } - } - }, - { - "type": "STREAM", - "stream": { - "stream_state": { "updated_at": "2036-07-19T22:21:37Z" }, - "stream_descriptor": { "name": "shipping_methods" } - } - }, - { - "type": "STREAM", - "stream": { - "stream_state": { "updated_at": "2036-07-19T22:21:37Z" }, - "stream_descriptor": { "name": "subscriptions" } - } - }, - { - "type": "STREAM", - "stream": { - "stream_state": { "updated_at": "2036-07-19T22:21:37Z" }, - "stream_descriptor": { "name": "transactions" } - } - }, - { - "type": "STREAM", - "stream": { - "stream_state": { "updated_at": "2036-07-19T22:21:37Z" }, - "stream_descriptor": { "name": "unique_coupons" } - } - } -] diff --git a/airbyte-integrations/connectors/source-recurly/integration_tests/invalid_config.json b/airbyte-integrations/connectors/source-recurly/integration_tests/invalid_config.json deleted file mode 100644 index 6016942564e8..000000000000 --- a/airbyte-integrations/connectors/source-recurly/integration_tests/invalid_config.json +++ /dev/null @@ -1,3 +0,0 @@ -{ - "api_key": "wrong-api-key" -} diff --git a/airbyte-integrations/connectors/source-recurly/integration_tests/sample_config.json b/airbyte-integrations/connectors/source-recurly/integration_tests/sample_config.json deleted file mode 100644 index f0f3959f86a3..000000000000 --- a/airbyte-integrations/connectors/source-recurly/integration_tests/sample_config.json +++ /dev/null @@ -1,3 +0,0 @@ -{ - "api_key": "api-key" -} diff --git a/airbyte-integrations/connectors/source-recurly/main.py b/airbyte-integrations/connectors/source-recurly/main.py deleted file mode 100644 index ba5c26176fde..000000000000 --- a/airbyte-integrations/connectors/source-recurly/main.py +++ /dev/null @@ -1,8 +0,0 @@ -# -# Copyright (c) 2023 Airbyte, Inc., all rights reserved. -# - -from source_recurly.run import run - -if __name__ == "__main__": - run() diff --git a/airbyte-integrations/connectors/source-recurly/poetry.lock b/airbyte-integrations/connectors/source-recurly/poetry.lock deleted file mode 100644 index 144b7f527169..000000000000 --- a/airbyte-integrations/connectors/source-recurly/poetry.lock +++ /dev/null @@ -1,1045 +0,0 @@ -# This file is automatically @generated by Poetry 1.7.1 and should not be changed by hand. - -[[package]] -name = "airbyte-cdk" -version = "0.67.0" -description = "A framework for writing Airbyte Connectors." -optional = false -python-versions = ">=3.8" -files = [ - {file = "airbyte-cdk-0.67.0.tar.gz", hash = "sha256:cbbff1b3895c89313764a721870bb293a396c74bad8dd6e5c36a0c3b0a2f6a10"}, - {file = "airbyte_cdk-0.67.0-py3-none-any.whl", hash = "sha256:2082c859536a2450c03b89dba1bbdab21bad314fbf5ef6d2e86fefc4ba935373"}, -] - -[package.dependencies] -airbyte-protocol-models = "0.5.1" -backoff = "*" -cachetools = "*" -Deprecated = ">=1.2,<2.0" -dpath = ">=2.0.1,<2.1.0" -genson = "1.2.2" -isodate = ">=0.6.1,<0.7.0" -Jinja2 = ">=3.1.2,<3.2.0" -jsonref = ">=0.2,<1.0" -jsonschema = ">=3.2.0,<3.3.0" -pendulum = "<3.0.0" -pydantic = ">=1.10.8,<2.0.0" -pyrate-limiter = ">=3.1.0,<3.2.0" -python-dateutil = "*" -PyYAML = ">=6.0.1" -requests = "*" -requests-cache = "*" -wcmatch = "8.4" - -[package.extras] -dev = ["avro (>=1.11.2,<1.12.0)", "cohere (==4.21)", "fastavro (>=1.8.0,<1.9.0)", "freezegun", "langchain (==0.0.271)", "markdown", "mypy", "openai[embeddings] (==0.27.9)", "pandas (==2.0.3)", "pdf2image (==1.16.3)", "pdfminer.six (==20221105)", "pyarrow (==12.0.1)", "pytesseract (==0.3.10)", "pytest", "pytest-cov", "pytest-httpserver", "pytest-mock", "requests-mock", "tiktoken (==0.4.0)", "unstructured (==0.10.27)", "unstructured.pytesseract (>=0.3.12)", "unstructured[docx,pptx] (==0.10.27)"] -file-based = ["avro (>=1.11.2,<1.12.0)", "fastavro (>=1.8.0,<1.9.0)", "markdown", "pdf2image (==1.16.3)", "pdfminer.six (==20221105)", "pyarrow (==12.0.1)", "pytesseract (==0.3.10)", "unstructured (==0.10.27)", "unstructured.pytesseract (>=0.3.12)", "unstructured[docx,pptx] (==0.10.27)"] -sphinx-docs = ["Sphinx (>=4.2,<5.0)", "sphinx-rtd-theme (>=1.0,<2.0)"] -vector-db-based = ["cohere (==4.21)", "langchain (==0.0.271)", "openai[embeddings] (==0.27.9)", "tiktoken (==0.4.0)"] - -[[package]] -name = "airbyte-protocol-models" -version = "0.5.1" -description = "Declares the Airbyte Protocol." -optional = false -python-versions = ">=3.8" -files = [ - {file = "airbyte_protocol_models-0.5.1-py3-none-any.whl", hash = "sha256:dfe84e130e51ce2ae81a06d5aa36f6c5ce3152b9e36e6f0195fad6c3dab0927e"}, - {file = "airbyte_protocol_models-0.5.1.tar.gz", hash = "sha256:7c8b16c7c1c7956b1996052e40585a3a93b1e44cb509c4e97c1ee4fe507ea086"}, -] - -[package.dependencies] -pydantic = ">=1.9.2,<2.0.0" - -[[package]] -name = "atomicwrites" -version = "1.4.1" -description = "Atomic file writes." -optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" -files = [ - {file = "atomicwrites-1.4.1.tar.gz", hash = "sha256:81b2c9071a49367a7f770170e5eec8cb66567cfbbc8c73d20ce5ca4a8d71cf11"}, -] - -[[package]] -name = "attrs" -version = "23.2.0" -description = "Classes Without Boilerplate" -optional = false -python-versions = ">=3.7" -files = [ - {file = "attrs-23.2.0-py3-none-any.whl", hash = "sha256:99b87a485a5820b23b879f04c2305b44b951b502fd64be915879d77a7e8fc6f1"}, - {file = "attrs-23.2.0.tar.gz", hash = "sha256:935dc3b529c262f6cf76e50877d35a4bd3c1de194fd41f47a2b7ae8f19971f30"}, -] - -[package.extras] -cov = ["attrs[tests]", "coverage[toml] (>=5.3)"] -dev = ["attrs[tests]", "pre-commit"] -docs = ["furo", "myst-parser", "sphinx", "sphinx-notfound-page", "sphinxcontrib-towncrier", "towncrier", "zope-interface"] -tests = ["attrs[tests-no-zope]", "zope-interface"] -tests-mypy = ["mypy (>=1.6)", "pytest-mypy-plugins"] -tests-no-zope = ["attrs[tests-mypy]", "cloudpickle", "hypothesis", "pympler", "pytest (>=4.3.0)", "pytest-xdist[psutil]"] - -[[package]] -name = "backoff" -version = "2.2.1" -description = "Function decoration for backoff and retry" -optional = false -python-versions = ">=3.7,<4.0" -files = [ - {file = "backoff-2.2.1-py3-none-any.whl", hash = "sha256:63579f9a0628e06278f7e47b7d7d5b6ce20dc65c5e96a6f3ca99a6adca0396e8"}, - {file = "backoff-2.2.1.tar.gz", hash = "sha256:03f829f5bb1923180821643f8753b0502c3b682293992485b0eef2807afa5cba"}, -] - -[[package]] -name = "bracex" -version = "2.4" -description = "Bash style brace expander." -optional = false -python-versions = ">=3.8" -files = [ - {file = "bracex-2.4-py3-none-any.whl", hash = "sha256:efdc71eff95eaff5e0f8cfebe7d01adf2c8637c8c92edaf63ef348c241a82418"}, - {file = "bracex-2.4.tar.gz", hash = "sha256:a27eaf1df42cf561fed58b7a8f3fdf129d1ea16a81e1fadd1d17989bc6384beb"}, -] - -[[package]] -name = "cachetools" -version = "5.3.2" -description = "Extensible memoizing collections and decorators" -optional = false -python-versions = ">=3.7" -files = [ - {file = "cachetools-5.3.2-py3-none-any.whl", hash = "sha256:861f35a13a451f94e301ce2bec7cac63e881232ccce7ed67fab9b5df4d3beaa1"}, - {file = "cachetools-5.3.2.tar.gz", hash = "sha256:086ee420196f7b2ab9ca2db2520aca326318b68fe5ba8bc4d49cca91add450f2"}, -] - -[[package]] -name = "cattrs" -version = "23.2.3" -description = "Composable complex class support for attrs and dataclasses." -optional = false -python-versions = ">=3.8" -files = [ - {file = "cattrs-23.2.3-py3-none-any.whl", hash = "sha256:0341994d94971052e9ee70662542699a3162ea1e0c62f7ce1b4a57f563685108"}, - {file = "cattrs-23.2.3.tar.gz", hash = "sha256:a934090d95abaa9e911dac357e3a8699e0b4b14f8529bcc7d2b1ad9d51672b9f"}, -] - -[package.dependencies] -attrs = ">=23.1.0" -exceptiongroup = {version = ">=1.1.1", markers = "python_version < \"3.11\""} -typing-extensions = {version = ">=4.1.0,<4.6.3 || >4.6.3", markers = "python_version < \"3.11\""} - -[package.extras] -bson = ["pymongo (>=4.4.0)"] -cbor2 = ["cbor2 (>=5.4.6)"] -msgpack = ["msgpack (>=1.0.5)"] -orjson = ["orjson (>=3.9.2)"] -pyyaml = ["pyyaml (>=6.0)"] -tomlkit = ["tomlkit (>=0.11.8)"] -ujson = ["ujson (>=5.7.0)"] - -[[package]] -name = "certifi" -version = "2024.2.2" -description = "Python package for providing Mozilla's CA Bundle." -optional = false -python-versions = ">=3.6" -files = [ - {file = "certifi-2024.2.2-py3-none-any.whl", hash = "sha256:dc383c07b76109f368f6106eee2b593b04a011ea4d55f652c6ca24a754d1cdd1"}, - {file = "certifi-2024.2.2.tar.gz", hash = "sha256:0569859f95fc761b18b45ef421b1290a0f65f147e92a1e5eb3e635f9a5e4e66f"}, -] - -[[package]] -name = "charset-normalizer" -version = "3.3.2" -description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet." -optional = false -python-versions = ">=3.7.0" -files = [ - {file = "charset-normalizer-3.3.2.tar.gz", hash = "sha256:f30c3cb33b24454a82faecaf01b19c18562b1e89558fb6c56de4d9118a032fd5"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:25baf083bf6f6b341f4121c2f3c548875ee6f5339300e08be3f2b2ba1721cdd3"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:06435b539f889b1f6f4ac1758871aae42dc3a8c0e24ac9e60c2384973ad73027"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9063e24fdb1e498ab71cb7419e24622516c4a04476b17a2dab57e8baa30d6e03"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6897af51655e3691ff853668779c7bad41579facacf5fd7253b0133308cf000d"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1d3193f4a680c64b4b6a9115943538edb896edc190f0b222e73761716519268e"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:cd70574b12bb8a4d2aaa0094515df2463cb429d8536cfb6c7ce983246983e5a6"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8465322196c8b4d7ab6d1e049e4c5cb460d0394da4a27d23cc242fbf0034b6b5"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a9a8e9031d613fd2009c182b69c7b2c1ef8239a0efb1df3f7c8da66d5dd3d537"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:beb58fe5cdb101e3a055192ac291b7a21e3b7ef4f67fa1d74e331a7f2124341c"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:e06ed3eb3218bc64786f7db41917d4e686cc4856944f53d5bdf83a6884432e12"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:2e81c7b9c8979ce92ed306c249d46894776a909505d8f5a4ba55b14206e3222f"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:572c3763a264ba47b3cf708a44ce965d98555f618ca42c926a9c1616d8f34269"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:fd1abc0d89e30cc4e02e4064dc67fcc51bd941eb395c502aac3ec19fab46b519"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-win32.whl", hash = "sha256:3d47fa203a7bd9c5b6cee4736ee84ca03b8ef23193c0d1ca99b5089f72645c73"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-win_amd64.whl", hash = "sha256:10955842570876604d404661fbccbc9c7e684caf432c09c715ec38fbae45ae09"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:802fe99cca7457642125a8a88a084cef28ff0cf9407060f7b93dca5aa25480db"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:573f6eac48f4769d667c4442081b1794f52919e7edada77495aaed9236d13a96"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:549a3a73da901d5bc3ce8d24e0600d1fa85524c10287f6004fbab87672bf3e1e"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f27273b60488abe721a075bcca6d7f3964f9f6f067c8c4c605743023d7d3944f"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1ceae2f17a9c33cb48e3263960dc5fc8005351ee19db217e9b1bb15d28c02574"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:65f6f63034100ead094b8744b3b97965785388f308a64cf8d7c34f2f2e5be0c4"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:753f10e867343b4511128c6ed8c82f7bec3bd026875576dfd88483c5c73b2fd8"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4a78b2b446bd7c934f5dcedc588903fb2f5eec172f3d29e52a9096a43722adfc"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:e537484df0d8f426ce2afb2d0f8e1c3d0b114b83f8850e5f2fbea0e797bd82ae"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:eb6904c354526e758fda7167b33005998fb68c46fbc10e013ca97f21ca5c8887"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:deb6be0ac38ece9ba87dea880e438f25ca3eddfac8b002a2ec3d9183a454e8ae"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:4ab2fe47fae9e0f9dee8c04187ce5d09f48eabe611be8259444906793ab7cbce"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:80402cd6ee291dcb72644d6eac93785fe2c8b9cb30893c1af5b8fdd753b9d40f"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-win32.whl", hash = "sha256:7cd13a2e3ddeed6913a65e66e94b51d80a041145a026c27e6bb76c31a853c6ab"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-win_amd64.whl", hash = "sha256:663946639d296df6a2bb2aa51b60a2454ca1cb29835324c640dafb5ff2131a77"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:0b2b64d2bb6d3fb9112bafa732def486049e63de9618b5843bcdd081d8144cd8"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:ddbb2551d7e0102e7252db79ba445cdab71b26640817ab1e3e3648dad515003b"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:55086ee1064215781fff39a1af09518bc9255b50d6333f2e4c74ca09fac6a8f6"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8f4a014bc36d3c57402e2977dada34f9c12300af536839dc38c0beab8878f38a"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a10af20b82360ab00827f916a6058451b723b4e65030c5a18577c8b2de5b3389"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8d756e44e94489e49571086ef83b2bb8ce311e730092d2c34ca8f7d925cb20aa"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:90d558489962fd4918143277a773316e56c72da56ec7aa3dc3dbbe20fdfed15b"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6ac7ffc7ad6d040517be39eb591cac5ff87416c2537df6ba3cba3bae290c0fed"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:7ed9e526742851e8d5cc9e6cf41427dfc6068d4f5a3bb03659444b4cabf6bc26"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:8bdb58ff7ba23002a4c5808d608e4e6c687175724f54a5dade5fa8c67b604e4d"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:6b3251890fff30ee142c44144871185dbe13b11bab478a88887a639655be1068"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:b4a23f61ce87adf89be746c8a8974fe1c823c891d8f86eb218bb957c924bb143"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:efcb3f6676480691518c177e3b465bcddf57cea040302f9f4e6e191af91174d4"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-win32.whl", hash = "sha256:d965bba47ddeec8cd560687584e88cf699fd28f192ceb452d1d7ee807c5597b7"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-win_amd64.whl", hash = "sha256:96b02a3dc4381e5494fad39be677abcb5e6634bf7b4fa83a6dd3112607547001"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:95f2a5796329323b8f0512e09dbb7a1860c46a39da62ecb2324f116fa8fdc85c"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c002b4ffc0be611f0d9da932eb0f704fe2602a9a949d1f738e4c34c75b0863d5"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a981a536974bbc7a512cf44ed14938cf01030a99e9b3a06dd59578882f06f985"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3287761bc4ee9e33561a7e058c72ac0938c4f57fe49a09eae428fd88aafe7bb6"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:42cb296636fcc8b0644486d15c12376cb9fa75443e00fb25de0b8602e64c1714"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0a55554a2fa0d408816b3b5cedf0045f4b8e1a6065aec45849de2d6f3f8e9786"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:c083af607d2515612056a31f0a8d9e0fcb5876b7bfc0abad3ecd275bc4ebc2d5"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:87d1351268731db79e0f8e745d92493ee2841c974128ef629dc518b937d9194c"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:bd8f7df7d12c2db9fab40bdd87a7c09b1530128315d047a086fa3ae3435cb3a8"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:c180f51afb394e165eafe4ac2936a14bee3eb10debc9d9e4db8958fe36afe711"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:8c622a5fe39a48f78944a87d4fb8a53ee07344641b0562c540d840748571b811"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-win32.whl", hash = "sha256:db364eca23f876da6f9e16c9da0df51aa4f104a972735574842618b8c6d999d4"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-win_amd64.whl", hash = "sha256:86216b5cee4b06df986d214f664305142d9c76df9b6512be2738aa72a2048f99"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:6463effa3186ea09411d50efc7d85360b38d5f09b870c48e4600f63af490e56a"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:6c4caeef8fa63d06bd437cd4bdcf3ffefe6738fb1b25951440d80dc7df8c03ac"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:37e55c8e51c236f95b033f6fb391d7d7970ba5fe7ff453dad675e88cf303377a"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fb69256e180cb6c8a894fee62b3afebae785babc1ee98b81cdf68bbca1987f33"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ae5f4161f18c61806f411a13b0310bea87f987c7d2ecdbdaad0e94eb2e404238"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b2b0a0c0517616b6869869f8c581d4eb2dd83a4d79e0ebcb7d373ef9956aeb0a"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:45485e01ff4d3630ec0d9617310448a8702f70e9c01906b0d0118bdf9d124cf2"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:eb00ed941194665c332bf8e078baf037d6c35d7c4f3102ea2d4f16ca94a26dc8"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:2127566c664442652f024c837091890cb1942c30937add288223dc895793f898"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:a50aebfa173e157099939b17f18600f72f84eed3049e743b68ad15bd69b6bf99"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:4d0d1650369165a14e14e1e47b372cfcb31d6ab44e6e33cb2d4e57265290044d"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:923c0c831b7cfcb071580d3f46c4baf50f174be571576556269530f4bbd79d04"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:06a81e93cd441c56a9b65d8e1d043daeb97a3d0856d177d5c90ba85acb3db087"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-win32.whl", hash = "sha256:6ef1d82a3af9d3eecdba2321dc1b3c238245d890843e040e41e470ffa64c3e25"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-win_amd64.whl", hash = "sha256:eb8821e09e916165e160797a6c17edda0679379a4be5c716c260e836e122f54b"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:c235ebd9baae02f1b77bcea61bce332cb4331dc3617d254df3323aa01ab47bd4"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:5b4c145409bef602a690e7cfad0a15a55c13320ff7a3ad7ca59c13bb8ba4d45d"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:68d1f8a9e9e37c1223b656399be5d6b448dea850bed7d0f87a8311f1ff3dabb0"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:22afcb9f253dac0696b5a4be4a1c0f8762f8239e21b99680099abd9b2b1b2269"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e27ad930a842b4c5eb8ac0016b0a54f5aebbe679340c26101df33424142c143c"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1f79682fbe303db92bc2b1136016a38a42e835d932bab5b3b1bfcfbf0640e519"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b261ccdec7821281dade748d088bb6e9b69e6d15b30652b74cbbac25e280b796"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:122c7fa62b130ed55f8f285bfd56d5f4b4a5b503609d181f9ad85e55c89f4185"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:d0eccceffcb53201b5bfebb52600a5fb483a20b61da9dbc885f8b103cbe7598c"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:9f96df6923e21816da7e0ad3fd47dd8f94b2a5ce594e00677c0013018b813458"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:7f04c839ed0b6b98b1a7501a002144b76c18fb1c1850c8b98d458ac269e26ed2"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:34d1c8da1e78d2e001f363791c98a272bb734000fcef47a491c1e3b0505657a8"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:ff8fa367d09b717b2a17a052544193ad76cd49979c805768879cb63d9ca50561"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-win32.whl", hash = "sha256:aed38f6e4fb3f5d6bf81bfa990a07806be9d83cf7bacef998ab1a9bd660a581f"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-win_amd64.whl", hash = "sha256:b01b88d45a6fcb69667cd6d2f7a9aeb4bf53760d7fc536bf679ec94fe9f3ff3d"}, - {file = "charset_normalizer-3.3.2-py3-none-any.whl", hash = "sha256:3e4d1f6587322d2788836a99c69062fbb091331ec940e02d12d179c1d53e25fc"}, -] - -[[package]] -name = "colorama" -version = "0.4.6" -description = "Cross-platform colored terminal text." -optional = false -python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7" -files = [ - {file = "colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6"}, - {file = "colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44"}, -] - -[[package]] -name = "deprecated" -version = "1.2.14" -description = "Python @deprecated decorator to deprecate old python classes, functions or methods." -optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" -files = [ - {file = "Deprecated-1.2.14-py2.py3-none-any.whl", hash = "sha256:6fac8b097794a90302bdbb17b9b815e732d3c4720583ff1b198499d78470466c"}, - {file = "Deprecated-1.2.14.tar.gz", hash = "sha256:e5323eb936458dccc2582dc6f9c322c852a775a27065ff2b0c4970b9d53d01b3"}, -] - -[package.dependencies] -wrapt = ">=1.10,<2" - -[package.extras] -dev = ["PyTest", "PyTest-Cov", "bump2version (<1)", "sphinx (<2)", "tox"] - -[[package]] -name = "dpath" -version = "2.0.8" -description = "Filesystem-like pathing and searching for dictionaries" -optional = false -python-versions = ">=3.7" -files = [ - {file = "dpath-2.0.8-py3-none-any.whl", hash = "sha256:f92f595214dd93a00558d75d4b858beee519f4cffca87f02616ad6cd013f3436"}, - {file = "dpath-2.0.8.tar.gz", hash = "sha256:a3440157ebe80d0a3ad794f1b61c571bef125214800ffdb9afc9424e8250fe9b"}, -] - -[[package]] -name = "exceptiongroup" -version = "1.2.0" -description = "Backport of PEP 654 (exception groups)" -optional = false -python-versions = ">=3.7" -files = [ - {file = "exceptiongroup-1.2.0-py3-none-any.whl", hash = "sha256:4bfd3996ac73b41e9b9628b04e079f193850720ea5945fc96a08633c66912f14"}, - {file = "exceptiongroup-1.2.0.tar.gz", hash = "sha256:91f5c769735f051a4290d52edd0858999b57e5876e9f85937691bd4c9fa3ed68"}, -] - -[package.extras] -test = ["pytest (>=6)"] - -[[package]] -name = "genson" -version = "1.2.2" -description = "GenSON is a powerful, user-friendly JSON Schema generator." -optional = false -python-versions = "*" -files = [ - {file = "genson-1.2.2.tar.gz", hash = "sha256:8caf69aa10af7aee0e1a1351d1d06801f4696e005f06cedef438635384346a16"}, -] - -[[package]] -name = "idna" -version = "3.6" -description = "Internationalized Domain Names in Applications (IDNA)" -optional = false -python-versions = ">=3.5" -files = [ - {file = "idna-3.6-py3-none-any.whl", hash = "sha256:c05567e9c24a6b9faaa835c4821bad0590fbb9d5779e7caa6e1cc4978e7eb24f"}, - {file = "idna-3.6.tar.gz", hash = "sha256:9ecdbbd083b06798ae1e86adcbfe8ab1479cf864e4ee30fe4e46a003d12491ca"}, -] - -[[package]] -name = "iniconfig" -version = "2.0.0" -description = "brain-dead simple config-ini parsing" -optional = false -python-versions = ">=3.7" -files = [ - {file = "iniconfig-2.0.0-py3-none-any.whl", hash = "sha256:b6a85871a79d2e3b22d2d1b94ac2824226a63c6b741c88f7ae975f18b6778374"}, - {file = "iniconfig-2.0.0.tar.gz", hash = "sha256:2d91e135bf72d31a410b17c16da610a82cb55f6b0477d1a902134b24a455b8b3"}, -] - -[[package]] -name = "isodate" -version = "0.6.1" -description = "An ISO 8601 date/time/duration parser and formatter" -optional = false -python-versions = "*" -files = [ - {file = "isodate-0.6.1-py2.py3-none-any.whl", hash = "sha256:0751eece944162659049d35f4f549ed815792b38793f07cf73381c1c87cbed96"}, - {file = "isodate-0.6.1.tar.gz", hash = "sha256:48c5881de7e8b0a0d648cb024c8062dc84e7b840ed81e864c7614fd3c127bde9"}, -] - -[package.dependencies] -six = "*" - -[[package]] -name = "jinja2" -version = "3.1.3" -description = "A very fast and expressive template engine." -optional = false -python-versions = ">=3.7" -files = [ - {file = "Jinja2-3.1.3-py3-none-any.whl", hash = "sha256:7d6d50dd97d52cbc355597bd845fabfbac3f551e1f99619e39a35ce8c370b5fa"}, - {file = "Jinja2-3.1.3.tar.gz", hash = "sha256:ac8bd6544d4bb2c9792bf3a159e80bba8fda7f07e81bc3aed565432d5925ba90"}, -] - -[package.dependencies] -MarkupSafe = ">=2.0" - -[package.extras] -i18n = ["Babel (>=2.7)"] - -[[package]] -name = "jsonref" -version = "0.3.0" -description = "jsonref is a library for automatic dereferencing of JSON Reference objects for Python." -optional = false -python-versions = ">=3.3,<4.0" -files = [ - {file = "jsonref-0.3.0-py3-none-any.whl", hash = "sha256:9480ad1b500f7e795daeb0ef29f9c55ae3a9ab38fb8d6659b6f4868acb5a5bc8"}, - {file = "jsonref-0.3.0.tar.gz", hash = "sha256:68b330c6815dc0d490dbb3d65ccda265ddde9f7856fd2f3322f971d456ea7549"}, -] - -[[package]] -name = "jsonschema" -version = "3.2.0" -description = "An implementation of JSON Schema validation for Python" -optional = false -python-versions = "*" -files = [ - {file = "jsonschema-3.2.0-py2.py3-none-any.whl", hash = "sha256:4e5b3cf8216f577bee9ce139cbe72eca3ea4f292ec60928ff24758ce626cd163"}, - {file = "jsonschema-3.2.0.tar.gz", hash = "sha256:c8a85b28d377cc7737e46e2d9f2b4f44ee3c0e1deac6bf46ddefc7187d30797a"}, -] - -[package.dependencies] -attrs = ">=17.4.0" -pyrsistent = ">=0.14.0" -setuptools = "*" -six = ">=1.11.0" - -[package.extras] -format = ["idna", "jsonpointer (>1.13)", "rfc3987", "strict-rfc3339", "webcolors"] -format-nongpl = ["idna", "jsonpointer (>1.13)", "rfc3339-validator", "rfc3986-validator (>0.1.0)", "webcolors"] - -[[package]] -name = "markupsafe" -version = "2.1.5" -description = "Safely add untrusted strings to HTML/XML markup." -optional = false -python-versions = ">=3.7" -files = [ - {file = "MarkupSafe-2.1.5-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:a17a92de5231666cfbe003f0e4b9b3a7ae3afb1ec2845aadc2bacc93ff85febc"}, - {file = "MarkupSafe-2.1.5-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:72b6be590cc35924b02c78ef34b467da4ba07e4e0f0454a2c5907f473fc50ce5"}, - {file = "MarkupSafe-2.1.5-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e61659ba32cf2cf1481e575d0462554625196a1f2fc06a1c777d3f48e8865d46"}, - {file = "MarkupSafe-2.1.5-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2174c595a0d73a3080ca3257b40096db99799265e1c27cc5a610743acd86d62f"}, - {file = "MarkupSafe-2.1.5-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ae2ad8ae6ebee9d2d94b17fb62763125f3f374c25618198f40cbb8b525411900"}, - {file = "MarkupSafe-2.1.5-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:075202fa5b72c86ad32dc7d0b56024ebdbcf2048c0ba09f1cde31bfdd57bcfff"}, - {file = "MarkupSafe-2.1.5-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:598e3276b64aff0e7b3451b72e94fa3c238d452e7ddcd893c3ab324717456bad"}, - {file = "MarkupSafe-2.1.5-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:fce659a462a1be54d2ffcacea5e3ba2d74daa74f30f5f143fe0c58636e355fdd"}, - {file = "MarkupSafe-2.1.5-cp310-cp310-win32.whl", hash = "sha256:d9fad5155d72433c921b782e58892377c44bd6252b5af2f67f16b194987338a4"}, - {file = "MarkupSafe-2.1.5-cp310-cp310-win_amd64.whl", hash = "sha256:bf50cd79a75d181c9181df03572cdce0fbb75cc353bc350712073108cba98de5"}, - {file = "MarkupSafe-2.1.5-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:629ddd2ca402ae6dbedfceeba9c46d5f7b2a61d9749597d4307f943ef198fc1f"}, - {file = "MarkupSafe-2.1.5-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:5b7b716f97b52c5a14bffdf688f971b2d5ef4029127f1ad7a513973cfd818df2"}, - {file = "MarkupSafe-2.1.5-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6ec585f69cec0aa07d945b20805be741395e28ac1627333b1c5b0105962ffced"}, - {file = "MarkupSafe-2.1.5-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b91c037585eba9095565a3556f611e3cbfaa42ca1e865f7b8015fe5c7336d5a5"}, - {file = "MarkupSafe-2.1.5-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7502934a33b54030eaf1194c21c692a534196063db72176b0c4028e140f8f32c"}, - {file = "MarkupSafe-2.1.5-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:0e397ac966fdf721b2c528cf028494e86172b4feba51d65f81ffd65c63798f3f"}, - {file = "MarkupSafe-2.1.5-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:c061bb86a71b42465156a3ee7bd58c8c2ceacdbeb95d05a99893e08b8467359a"}, - {file = "MarkupSafe-2.1.5-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:3a57fdd7ce31c7ff06cdfbf31dafa96cc533c21e443d57f5b1ecc6cdc668ec7f"}, - {file = "MarkupSafe-2.1.5-cp311-cp311-win32.whl", hash = "sha256:397081c1a0bfb5124355710fe79478cdbeb39626492b15d399526ae53422b906"}, - {file = "MarkupSafe-2.1.5-cp311-cp311-win_amd64.whl", hash = "sha256:2b7c57a4dfc4f16f7142221afe5ba4e093e09e728ca65c51f5620c9aaeb9a617"}, - {file = "MarkupSafe-2.1.5-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:8dec4936e9c3100156f8a2dc89c4b88d5c435175ff03413b443469c7c8c5f4d1"}, - {file = "MarkupSafe-2.1.5-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:3c6b973f22eb18a789b1460b4b91bf04ae3f0c4234a0a6aa6b0a92f6f7b951d4"}, - {file = "MarkupSafe-2.1.5-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ac07bad82163452a6884fe8fa0963fb98c2346ba78d779ec06bd7a6262132aee"}, - {file = "MarkupSafe-2.1.5-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f5dfb42c4604dddc8e4305050aa6deb084540643ed5804d7455b5df8fe16f5e5"}, - {file = "MarkupSafe-2.1.5-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ea3d8a3d18833cf4304cd2fc9cbb1efe188ca9b5efef2bdac7adc20594a0e46b"}, - {file = "MarkupSafe-2.1.5-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:d050b3361367a06d752db6ead6e7edeb0009be66bc3bae0ee9d97fb326badc2a"}, - {file = "MarkupSafe-2.1.5-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:bec0a414d016ac1a18862a519e54b2fd0fc8bbfd6890376898a6c0891dd82e9f"}, - {file = "MarkupSafe-2.1.5-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:58c98fee265677f63a4385256a6d7683ab1832f3ddd1e66fe948d5880c21a169"}, - {file = "MarkupSafe-2.1.5-cp312-cp312-win32.whl", hash = "sha256:8590b4ae07a35970728874632fed7bd57b26b0102df2d2b233b6d9d82f6c62ad"}, - {file = "MarkupSafe-2.1.5-cp312-cp312-win_amd64.whl", hash = "sha256:823b65d8706e32ad2df51ed89496147a42a2a6e01c13cfb6ffb8b1e92bc910bb"}, - {file = "MarkupSafe-2.1.5-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:c8b29db45f8fe46ad280a7294f5c3ec36dbac9491f2d1c17345be8e69cc5928f"}, - {file = "MarkupSafe-2.1.5-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ec6a563cff360b50eed26f13adc43e61bc0c04d94b8be985e6fb24b81f6dcfdf"}, - {file = "MarkupSafe-2.1.5-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a549b9c31bec33820e885335b451286e2969a2d9e24879f83fe904a5ce59d70a"}, - {file = "MarkupSafe-2.1.5-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4f11aa001c540f62c6166c7726f71f7573b52c68c31f014c25cc7901deea0b52"}, - {file = "MarkupSafe-2.1.5-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:7b2e5a267c855eea6b4283940daa6e88a285f5f2a67f2220203786dfa59b37e9"}, - {file = "MarkupSafe-2.1.5-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:2d2d793e36e230fd32babe143b04cec8a8b3eb8a3122d2aceb4a371e6b09b8df"}, - {file = "MarkupSafe-2.1.5-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:ce409136744f6521e39fd8e2a24c53fa18ad67aa5bc7c2cf83645cce5b5c4e50"}, - {file = "MarkupSafe-2.1.5-cp37-cp37m-win32.whl", hash = "sha256:4096e9de5c6fdf43fb4f04c26fb114f61ef0bf2e5604b6ee3019d51b69e8c371"}, - {file = "MarkupSafe-2.1.5-cp37-cp37m-win_amd64.whl", hash = "sha256:4275d846e41ecefa46e2015117a9f491e57a71ddd59bbead77e904dc02b1bed2"}, - {file = "MarkupSafe-2.1.5-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:656f7526c69fac7f600bd1f400991cc282b417d17539a1b228617081106feb4a"}, - {file = "MarkupSafe-2.1.5-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:97cafb1f3cbcd3fd2b6fbfb99ae11cdb14deea0736fc2b0952ee177f2b813a46"}, - {file = "MarkupSafe-2.1.5-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1f3fbcb7ef1f16e48246f704ab79d79da8a46891e2da03f8783a5b6fa41a9532"}, - {file = "MarkupSafe-2.1.5-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fa9db3f79de01457b03d4f01b34cf91bc0048eb2c3846ff26f66687c2f6d16ab"}, - {file = "MarkupSafe-2.1.5-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ffee1f21e5ef0d712f9033568f8344d5da8cc2869dbd08d87c84656e6a2d2f68"}, - {file = "MarkupSafe-2.1.5-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:5dedb4db619ba5a2787a94d877bc8ffc0566f92a01c0ef214865e54ecc9ee5e0"}, - {file = "MarkupSafe-2.1.5-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:30b600cf0a7ac9234b2638fbc0fb6158ba5bdcdf46aeb631ead21248b9affbc4"}, - {file = "MarkupSafe-2.1.5-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:8dd717634f5a044f860435c1d8c16a270ddf0ef8588d4887037c5028b859b0c3"}, - {file = "MarkupSafe-2.1.5-cp38-cp38-win32.whl", hash = "sha256:daa4ee5a243f0f20d528d939d06670a298dd39b1ad5f8a72a4275124a7819eff"}, - {file = "MarkupSafe-2.1.5-cp38-cp38-win_amd64.whl", hash = "sha256:619bc166c4f2de5caa5a633b8b7326fbe98e0ccbfacabd87268a2b15ff73a029"}, - {file = "MarkupSafe-2.1.5-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:7a68b554d356a91cce1236aa7682dc01df0edba8d043fd1ce607c49dd3c1edcf"}, - {file = "MarkupSafe-2.1.5-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:db0b55e0f3cc0be60c1f19efdde9a637c32740486004f20d1cff53c3c0ece4d2"}, - {file = "MarkupSafe-2.1.5-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3e53af139f8579a6d5f7b76549125f0d94d7e630761a2111bc431fd820e163b8"}, - {file = "MarkupSafe-2.1.5-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:17b950fccb810b3293638215058e432159d2b71005c74371d784862b7e4683f3"}, - {file = "MarkupSafe-2.1.5-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4c31f53cdae6ecfa91a77820e8b151dba54ab528ba65dfd235c80b086d68a465"}, - {file = "MarkupSafe-2.1.5-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:bff1b4290a66b490a2f4719358c0cdcd9bafb6b8f061e45c7a2460866bf50c2e"}, - {file = "MarkupSafe-2.1.5-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:bc1667f8b83f48511b94671e0e441401371dfd0f0a795c7daa4a3cd1dde55bea"}, - {file = "MarkupSafe-2.1.5-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:5049256f536511ee3f7e1b3f87d1d1209d327e818e6ae1365e8653d7e3abb6a6"}, - {file = "MarkupSafe-2.1.5-cp39-cp39-win32.whl", hash = "sha256:00e046b6dd71aa03a41079792f8473dc494d564611a8f89bbbd7cb93295ebdcf"}, - {file = "MarkupSafe-2.1.5-cp39-cp39-win_amd64.whl", hash = "sha256:fa173ec60341d6bb97a89f5ea19c85c5643c1e7dedebc22f5181eb73573142c5"}, - {file = "MarkupSafe-2.1.5.tar.gz", hash = "sha256:d283d37a890ba4c1ae73ffadf8046435c76e7bc2247bbb63c00bd1a709c6544b"}, -] - -[[package]] -name = "packaging" -version = "23.2" -description = "Core utilities for Python packages" -optional = false -python-versions = ">=3.7" -files = [ - {file = "packaging-23.2-py3-none-any.whl", hash = "sha256:8c491190033a9af7e1d931d0b5dacc2ef47509b34dd0de67ed209b5203fc88c7"}, - {file = "packaging-23.2.tar.gz", hash = "sha256:048fb0e9405036518eaaf48a55953c750c11e1a1b68e0dd1a9d62ed0c092cfc5"}, -] - -[[package]] -name = "pendulum" -version = "2.1.2" -description = "Python datetimes made easy" -optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" -files = [ - {file = "pendulum-2.1.2-cp27-cp27m-macosx_10_15_x86_64.whl", hash = "sha256:b6c352f4bd32dff1ea7066bd31ad0f71f8d8100b9ff709fb343f3b86cee43efe"}, - {file = "pendulum-2.1.2-cp27-cp27m-win_amd64.whl", hash = "sha256:318f72f62e8e23cd6660dbafe1e346950281a9aed144b5c596b2ddabc1d19739"}, - {file = "pendulum-2.1.2-cp35-cp35m-macosx_10_15_x86_64.whl", hash = "sha256:0731f0c661a3cb779d398803655494893c9f581f6488048b3fb629c2342b5394"}, - {file = "pendulum-2.1.2-cp35-cp35m-manylinux1_i686.whl", hash = "sha256:3481fad1dc3f6f6738bd575a951d3c15d4b4ce7c82dce37cf8ac1483fde6e8b0"}, - {file = "pendulum-2.1.2-cp35-cp35m-manylinux1_x86_64.whl", hash = "sha256:9702069c694306297ed362ce7e3c1ef8404ac8ede39f9b28b7c1a7ad8c3959e3"}, - {file = "pendulum-2.1.2-cp35-cp35m-win_amd64.whl", hash = "sha256:fb53ffa0085002ddd43b6ca61a7b34f2d4d7c3ed66f931fe599e1a531b42af9b"}, - {file = "pendulum-2.1.2-cp36-cp36m-macosx_10_15_x86_64.whl", hash = "sha256:c501749fdd3d6f9e726086bf0cd4437281ed47e7bca132ddb522f86a1645d360"}, - {file = "pendulum-2.1.2-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:c807a578a532eeb226150d5006f156632df2cc8c5693d778324b43ff8c515dd0"}, - {file = "pendulum-2.1.2-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:2d1619a721df661e506eff8db8614016f0720ac171fe80dda1333ee44e684087"}, - {file = "pendulum-2.1.2-cp36-cp36m-win_amd64.whl", hash = "sha256:f888f2d2909a414680a29ae74d0592758f2b9fcdee3549887779cd4055e975db"}, - {file = "pendulum-2.1.2-cp37-cp37m-macosx_10_15_x86_64.whl", hash = "sha256:e95d329384717c7bf627bf27e204bc3b15c8238fa8d9d9781d93712776c14002"}, - {file = "pendulum-2.1.2-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:4c9c689747f39d0d02a9f94fcee737b34a5773803a64a5fdb046ee9cac7442c5"}, - {file = "pendulum-2.1.2-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:1245cd0075a3c6d889f581f6325dd8404aca5884dea7223a5566c38aab94642b"}, - {file = "pendulum-2.1.2-cp37-cp37m-win_amd64.whl", hash = "sha256:db0a40d8bcd27b4fb46676e8eb3c732c67a5a5e6bfab8927028224fbced0b40b"}, - {file = "pendulum-2.1.2-cp38-cp38-macosx_10_15_x86_64.whl", hash = "sha256:f5e236e7730cab1644e1b87aca3d2ff3e375a608542e90fe25685dae46310116"}, - {file = "pendulum-2.1.2-cp38-cp38-manylinux1_i686.whl", hash = "sha256:de42ea3e2943171a9e95141f2eecf972480636e8e484ccffaf1e833929e9e052"}, - {file = "pendulum-2.1.2-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:7c5ec650cb4bec4c63a89a0242cc8c3cebcec92fcfe937c417ba18277d8560be"}, - {file = "pendulum-2.1.2-cp38-cp38-win_amd64.whl", hash = "sha256:33fb61601083f3eb1d15edeb45274f73c63b3c44a8524703dc143f4212bf3269"}, - {file = "pendulum-2.1.2-cp39-cp39-manylinux1_i686.whl", hash = "sha256:29c40a6f2942376185728c9a0347d7c0f07905638c83007e1d262781f1e6953a"}, - {file = "pendulum-2.1.2-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:94b1fc947bfe38579b28e1cccb36f7e28a15e841f30384b5ad6c5e31055c85d7"}, - {file = "pendulum-2.1.2.tar.gz", hash = "sha256:b06a0ca1bfe41c990bbf0c029f0b6501a7f2ec4e38bfec730712015e8860f207"}, -] - -[package.dependencies] -python-dateutil = ">=2.6,<3.0" -pytzdata = ">=2020.1" - -[[package]] -name = "platformdirs" -version = "4.2.0" -description = "A small Python package for determining appropriate platform-specific dirs, e.g. a \"user data dir\"." -optional = false -python-versions = ">=3.8" -files = [ - {file = "platformdirs-4.2.0-py3-none-any.whl", hash = "sha256:0614df2a2f37e1a662acbd8e2b25b92ccf8632929bc6d43467e17fe89c75e068"}, - {file = "platformdirs-4.2.0.tar.gz", hash = "sha256:ef0cc731df711022c174543cb70a9b5bd22e5a9337c8624ef2c2ceb8ddad8768"}, -] - -[package.extras] -docs = ["furo (>=2023.9.10)", "proselint (>=0.13)", "sphinx (>=7.2.6)", "sphinx-autodoc-typehints (>=1.25.2)"] -test = ["appdirs (==1.4.4)", "covdefaults (>=2.3)", "pytest (>=7.4.3)", "pytest-cov (>=4.1)", "pytest-mock (>=3.12)"] - -[[package]] -name = "pluggy" -version = "1.4.0" -description = "plugin and hook calling mechanisms for python" -optional = false -python-versions = ">=3.8" -files = [ - {file = "pluggy-1.4.0-py3-none-any.whl", hash = "sha256:7db9f7b503d67d1c5b95f59773ebb58a8c1c288129a88665838012cfb07b8981"}, - {file = "pluggy-1.4.0.tar.gz", hash = "sha256:8c85c2876142a764e5b7548e7d9a0e0ddb46f5185161049a79b7e974454223be"}, -] - -[package.extras] -dev = ["pre-commit", "tox"] -testing = ["pytest", "pytest-benchmark"] - -[[package]] -name = "py" -version = "1.11.0" -description = "library with cross-python path, ini-parsing, io, code, log facilities" -optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" -files = [ - {file = "py-1.11.0-py2.py3-none-any.whl", hash = "sha256:607c53218732647dff4acdfcd50cb62615cedf612e72d1724fb1a0cc6405b378"}, - {file = "py-1.11.0.tar.gz", hash = "sha256:51c75c4126074b472f746a24399ad32f6053d1b34b68d2fa41e558e6f4a98719"}, -] - -[[package]] -name = "pydantic" -version = "1.10.14" -description = "Data validation and settings management using python type hints" -optional = false -python-versions = ">=3.7" -files = [ - {file = "pydantic-1.10.14-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:7f4fcec873f90537c382840f330b90f4715eebc2bc9925f04cb92de593eae054"}, - {file = "pydantic-1.10.14-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:8e3a76f571970fcd3c43ad982daf936ae39b3e90b8a2e96c04113a369869dc87"}, - {file = "pydantic-1.10.14-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:82d886bd3c3fbeaa963692ef6b643159ccb4b4cefaf7ff1617720cbead04fd1d"}, - {file = "pydantic-1.10.14-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:798a3d05ee3b71967844a1164fd5bdb8c22c6d674f26274e78b9f29d81770c4e"}, - {file = "pydantic-1.10.14-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:23d47a4b57a38e8652bcab15a658fdb13c785b9ce217cc3a729504ab4e1d6bc9"}, - {file = "pydantic-1.10.14-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:f9f674b5c3bebc2eba401de64f29948ae1e646ba2735f884d1594c5f675d6f2a"}, - {file = "pydantic-1.10.14-cp310-cp310-win_amd64.whl", hash = "sha256:24a7679fab2e0eeedb5a8924fc4a694b3bcaac7d305aeeac72dd7d4e05ecbebf"}, - {file = "pydantic-1.10.14-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:9d578ac4bf7fdf10ce14caba6f734c178379bd35c486c6deb6f49006e1ba78a7"}, - {file = "pydantic-1.10.14-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:fa7790e94c60f809c95602a26d906eba01a0abee9cc24150e4ce2189352deb1b"}, - {file = "pydantic-1.10.14-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:aad4e10efa5474ed1a611b6d7f0d130f4aafadceb73c11d9e72823e8f508e663"}, - {file = "pydantic-1.10.14-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1245f4f61f467cb3dfeced2b119afef3db386aec3d24a22a1de08c65038b255f"}, - {file = "pydantic-1.10.14-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:21efacc678a11114c765eb52ec0db62edffa89e9a562a94cbf8fa10b5db5c046"}, - {file = "pydantic-1.10.14-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:412ab4a3f6dbd2bf18aefa9f79c7cca23744846b31f1d6555c2ee2b05a2e14ca"}, - {file = "pydantic-1.10.14-cp311-cp311-win_amd64.whl", hash = "sha256:e897c9f35281f7889873a3e6d6b69aa1447ceb024e8495a5f0d02ecd17742a7f"}, - {file = "pydantic-1.10.14-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:d604be0f0b44d473e54fdcb12302495fe0467c56509a2f80483476f3ba92b33c"}, - {file = "pydantic-1.10.14-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a42c7d17706911199798d4c464b352e640cab4351efe69c2267823d619a937e5"}, - {file = "pydantic-1.10.14-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:596f12a1085e38dbda5cbb874d0973303e34227b400b6414782bf205cc14940c"}, - {file = "pydantic-1.10.14-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:bfb113860e9288d0886e3b9e49d9cf4a9d48b441f52ded7d96db7819028514cc"}, - {file = "pydantic-1.10.14-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:bc3ed06ab13660b565eed80887fcfbc0070f0aa0691fbb351657041d3e874efe"}, - {file = "pydantic-1.10.14-cp37-cp37m-win_amd64.whl", hash = "sha256:ad8c2bc677ae5f6dbd3cf92f2c7dc613507eafe8f71719727cbc0a7dec9a8c01"}, - {file = "pydantic-1.10.14-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:c37c28449752bb1f47975d22ef2882d70513c546f8f37201e0fec3a97b816eee"}, - {file = "pydantic-1.10.14-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:49a46a0994dd551ec051986806122767cf144b9702e31d47f6d493c336462597"}, - {file = "pydantic-1.10.14-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:53e3819bd20a42470d6dd0fe7fc1c121c92247bca104ce608e609b59bc7a77ee"}, - {file = "pydantic-1.10.14-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0fbb503bbbbab0c588ed3cd21975a1d0d4163b87e360fec17a792f7d8c4ff29f"}, - {file = "pydantic-1.10.14-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:336709883c15c050b9c55a63d6c7ff09be883dbc17805d2b063395dd9d9d0022"}, - {file = "pydantic-1.10.14-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:4ae57b4d8e3312d486e2498d42aed3ece7b51848336964e43abbf9671584e67f"}, - {file = "pydantic-1.10.14-cp38-cp38-win_amd64.whl", hash = "sha256:dba49d52500c35cfec0b28aa8b3ea5c37c9df183ffc7210b10ff2a415c125c4a"}, - {file = "pydantic-1.10.14-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:c66609e138c31cba607d8e2a7b6a5dc38979a06c900815495b2d90ce6ded35b4"}, - {file = "pydantic-1.10.14-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:d986e115e0b39604b9eee3507987368ff8148222da213cd38c359f6f57b3b347"}, - {file = "pydantic-1.10.14-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:646b2b12df4295b4c3148850c85bff29ef6d0d9621a8d091e98094871a62e5c7"}, - {file = "pydantic-1.10.14-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:282613a5969c47c83a8710cc8bfd1e70c9223feb76566f74683af889faadc0ea"}, - {file = "pydantic-1.10.14-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:466669501d08ad8eb3c4fecd991c5e793c4e0bbd62299d05111d4f827cded64f"}, - {file = "pydantic-1.10.14-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:13e86a19dca96373dcf3190fcb8797d40a6f12f154a244a8d1e8e03b8f280593"}, - {file = "pydantic-1.10.14-cp39-cp39-win_amd64.whl", hash = "sha256:08b6ec0917c30861e3fe71a93be1648a2aa4f62f866142ba21670b24444d7fd8"}, - {file = "pydantic-1.10.14-py3-none-any.whl", hash = "sha256:8ee853cd12ac2ddbf0ecbac1c289f95882b2d4482258048079d13be700aa114c"}, - {file = "pydantic-1.10.14.tar.gz", hash = "sha256:46f17b832fe27de7850896f3afee50ea682220dd218f7e9c88d436788419dca6"}, -] - -[package.dependencies] -typing-extensions = ">=4.2.0" - -[package.extras] -dotenv = ["python-dotenv (>=0.10.4)"] -email = ["email-validator (>=1.0.3)"] - -[[package]] -name = "pyrate-limiter" -version = "3.1.1" -description = "Python Rate-Limiter using Leaky-Bucket Algorithm" -optional = false -python-versions = ">=3.8,<4.0" -files = [ - {file = "pyrate_limiter-3.1.1-py3-none-any.whl", hash = "sha256:c51906f1d51d56dc992ff6c26e8300e32151bc6cfa3e6559792e31971dfd4e2b"}, - {file = "pyrate_limiter-3.1.1.tar.gz", hash = "sha256:2f57eda712687e6eccddf6afe8f8a15b409b97ed675fe64a626058f12863b7b7"}, -] - -[package.extras] -all = ["filelock (>=3.0)", "redis (>=5.0.0,<6.0.0)"] -docs = ["furo (>=2022.3.4,<2023.0.0)", "myst-parser (>=0.17)", "sphinx (>=4.3.0,<5.0.0)", "sphinx-autodoc-typehints (>=1.17,<2.0)", "sphinx-copybutton (>=0.5)", "sphinxcontrib-apidoc (>=0.3,<0.4)"] - -[[package]] -name = "pyrsistent" -version = "0.20.0" -description = "Persistent/Functional/Immutable data structures" -optional = false -python-versions = ">=3.8" -files = [ - {file = "pyrsistent-0.20.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:8c3aba3e01235221e5b229a6c05f585f344734bd1ad42a8ac51493d74722bbce"}, - {file = "pyrsistent-0.20.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c1beb78af5423b879edaf23c5591ff292cf7c33979734c99aa66d5914ead880f"}, - {file = "pyrsistent-0.20.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:21cc459636983764e692b9eba7144cdd54fdec23ccdb1e8ba392a63666c60c34"}, - {file = "pyrsistent-0.20.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f5ac696f02b3fc01a710427585c855f65cd9c640e14f52abe52020722bb4906b"}, - {file = "pyrsistent-0.20.0-cp310-cp310-win32.whl", hash = "sha256:0724c506cd8b63c69c7f883cc233aac948c1ea946ea95996ad8b1380c25e1d3f"}, - {file = "pyrsistent-0.20.0-cp310-cp310-win_amd64.whl", hash = "sha256:8441cf9616d642c475684d6cf2520dd24812e996ba9af15e606df5f6fd9d04a7"}, - {file = "pyrsistent-0.20.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:0f3b1bcaa1f0629c978b355a7c37acd58907390149b7311b5db1b37648eb6958"}, - {file = "pyrsistent-0.20.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5cdd7ef1ea7a491ae70d826b6cc64868de09a1d5ff9ef8d574250d0940e275b8"}, - {file = "pyrsistent-0.20.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cae40a9e3ce178415040a0383f00e8d68b569e97f31928a3a8ad37e3fde6df6a"}, - {file = "pyrsistent-0.20.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6288b3fa6622ad8a91e6eb759cfc48ff3089e7c17fb1d4c59a919769314af224"}, - {file = "pyrsistent-0.20.0-cp311-cp311-win32.whl", hash = "sha256:7d29c23bdf6e5438c755b941cef867ec2a4a172ceb9f50553b6ed70d50dfd656"}, - {file = "pyrsistent-0.20.0-cp311-cp311-win_amd64.whl", hash = "sha256:59a89bccd615551391f3237e00006a26bcf98a4d18623a19909a2c48b8e986ee"}, - {file = "pyrsistent-0.20.0-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:09848306523a3aba463c4b49493a760e7a6ca52e4826aa100ee99d8d39b7ad1e"}, - {file = "pyrsistent-0.20.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a14798c3005ec892bbada26485c2eea3b54109cb2533713e355c806891f63c5e"}, - {file = "pyrsistent-0.20.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b14decb628fac50db5e02ee5a35a9c0772d20277824cfe845c8a8b717c15daa3"}, - {file = "pyrsistent-0.20.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2e2c116cc804d9b09ce9814d17df5edf1df0c624aba3b43bc1ad90411487036d"}, - {file = "pyrsistent-0.20.0-cp312-cp312-win32.whl", hash = "sha256:e78d0c7c1e99a4a45c99143900ea0546025e41bb59ebc10182e947cf1ece9174"}, - {file = "pyrsistent-0.20.0-cp312-cp312-win_amd64.whl", hash = "sha256:4021a7f963d88ccd15b523787d18ed5e5269ce57aa4037146a2377ff607ae87d"}, - {file = "pyrsistent-0.20.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:79ed12ba79935adaac1664fd7e0e585a22caa539dfc9b7c7c6d5ebf91fb89054"}, - {file = "pyrsistent-0.20.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f920385a11207dc372a028b3f1e1038bb244b3ec38d448e6d8e43c6b3ba20e98"}, - {file = "pyrsistent-0.20.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4f5c2d012671b7391803263419e31b5c7c21e7c95c8760d7fc35602353dee714"}, - {file = "pyrsistent-0.20.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ef3992833fbd686ee783590639f4b8343a57f1f75de8633749d984dc0eb16c86"}, - {file = "pyrsistent-0.20.0-cp38-cp38-win32.whl", hash = "sha256:881bbea27bbd32d37eb24dd320a5e745a2a5b092a17f6debc1349252fac85423"}, - {file = "pyrsistent-0.20.0-cp38-cp38-win_amd64.whl", hash = "sha256:6d270ec9dd33cdb13f4d62c95c1a5a50e6b7cdd86302b494217137f760495b9d"}, - {file = "pyrsistent-0.20.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:ca52d1ceae015859d16aded12584c59eb3825f7b50c6cfd621d4231a6cc624ce"}, - {file = "pyrsistent-0.20.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b318ca24db0f0518630e8b6f3831e9cba78f099ed5c1d65ffe3e023003043ba0"}, - {file = "pyrsistent-0.20.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fed2c3216a605dc9a6ea50c7e84c82906e3684c4e80d2908208f662a6cbf9022"}, - {file = "pyrsistent-0.20.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2e14c95c16211d166f59c6611533d0dacce2e25de0f76e4c140fde250997b3ca"}, - {file = "pyrsistent-0.20.0-cp39-cp39-win32.whl", hash = "sha256:f058a615031eea4ef94ead6456f5ec2026c19fb5bd6bfe86e9665c4158cf802f"}, - {file = "pyrsistent-0.20.0-cp39-cp39-win_amd64.whl", hash = "sha256:58b8f6366e152092194ae68fefe18b9f0b4f89227dfd86a07770c3d86097aebf"}, - {file = "pyrsistent-0.20.0-py3-none-any.whl", hash = "sha256:c55acc4733aad6560a7f5f818466631f07efc001fd023f34a6c203f8b6df0f0b"}, - {file = "pyrsistent-0.20.0.tar.gz", hash = "sha256:4c48f78f62ab596c679086084d0dd13254ae4f3d6c72a83ffdf5ebdef8f265a4"}, -] - -[[package]] -name = "pytest" -version = "6.2.5" -description = "pytest: simple powerful testing with Python" -optional = false -python-versions = ">=3.6" -files = [ - {file = "pytest-6.2.5-py3-none-any.whl", hash = "sha256:7310f8d27bc79ced999e760ca304d69f6ba6c6649c0b60fb0e04a4a77cacc134"}, - {file = "pytest-6.2.5.tar.gz", hash = "sha256:131b36680866a76e6781d13f101efb86cf674ebb9762eb70d3082b6f29889e89"}, -] - -[package.dependencies] -atomicwrites = {version = ">=1.0", markers = "sys_platform == \"win32\""} -attrs = ">=19.2.0" -colorama = {version = "*", markers = "sys_platform == \"win32\""} -iniconfig = "*" -packaging = "*" -pluggy = ">=0.12,<2.0" -py = ">=1.8.2" -toml = "*" - -[package.extras] -testing = ["argcomplete", "hypothesis (>=3.56)", "mock", "nose", "requests", "xmlschema"] - -[[package]] -name = "pytest-mock" -version = "3.12.0" -description = "Thin-wrapper around the mock package for easier use with pytest" -optional = false -python-versions = ">=3.8" -files = [ - {file = "pytest-mock-3.12.0.tar.gz", hash = "sha256:31a40f038c22cad32287bb43932054451ff5583ff094bca6f675df2f8bc1a6e9"}, - {file = "pytest_mock-3.12.0-py3-none-any.whl", hash = "sha256:0972719a7263072da3a21c7f4773069bcc7486027d7e8e1f81d98a47e701bc4f"}, -] - -[package.dependencies] -pytest = ">=5.0" - -[package.extras] -dev = ["pre-commit", "pytest-asyncio", "tox"] - -[[package]] -name = "python-dateutil" -version = "2.8.2" -description = "Extensions to the standard Python datetime module" -optional = false -python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7" -files = [ - {file = "python-dateutil-2.8.2.tar.gz", hash = "sha256:0123cacc1627ae19ddf3c27a5de5bd67ee4586fbdd6440d9748f8abb483d3e86"}, - {file = "python_dateutil-2.8.2-py2.py3-none-any.whl", hash = "sha256:961d03dc3453ebbc59dbdea9e4e11c5651520a876d0f4db161e8674aae935da9"}, -] - -[package.dependencies] -six = ">=1.5" - -[[package]] -name = "pytzdata" -version = "2020.1" -description = "The Olson timezone database for Python." -optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" -files = [ - {file = "pytzdata-2020.1-py2.py3-none-any.whl", hash = "sha256:e1e14750bcf95016381e4d472bad004eef710f2d6417240904070b3d6654485f"}, - {file = "pytzdata-2020.1.tar.gz", hash = "sha256:3efa13b335a00a8de1d345ae41ec78dd11c9f8807f522d39850f2dd828681540"}, -] - -[[package]] -name = "pyyaml" -version = "6.0.1" -description = "YAML parser and emitter for Python" -optional = false -python-versions = ">=3.6" -files = [ - {file = "PyYAML-6.0.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d858aa552c999bc8a8d57426ed01e40bef403cd8ccdd0fc5f6f04a00414cac2a"}, - {file = "PyYAML-6.0.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:fd66fc5d0da6d9815ba2cebeb4205f95818ff4b79c3ebe268e75d961704af52f"}, - {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:69b023b2b4daa7548bcfbd4aa3da05b3a74b772db9e23b982788168117739938"}, - {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:81e0b275a9ecc9c0c0c07b4b90ba548307583c125f54d5b6946cfee6360c733d"}, - {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ba336e390cd8e4d1739f42dfe9bb83a3cc2e80f567d8805e11b46f4a943f5515"}, - {file = "PyYAML-6.0.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:326c013efe8048858a6d312ddd31d56e468118ad4cdeda36c719bf5bb6192290"}, - {file = "PyYAML-6.0.1-cp310-cp310-win32.whl", hash = "sha256:bd4af7373a854424dabd882decdc5579653d7868b8fb26dc7d0e99f823aa5924"}, - {file = "PyYAML-6.0.1-cp310-cp310-win_amd64.whl", hash = "sha256:fd1592b3fdf65fff2ad0004b5e363300ef59ced41c2e6b3a99d4089fa8c5435d"}, - {file = "PyYAML-6.0.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:6965a7bc3cf88e5a1c3bd2e0b5c22f8d677dc88a455344035f03399034eb3007"}, - {file = "PyYAML-6.0.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:f003ed9ad21d6a4713f0a9b5a7a0a79e08dd0f221aff4525a2be4c346ee60aab"}, - {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:42f8152b8dbc4fe7d96729ec2b99c7097d656dc1213a3229ca5383f973a5ed6d"}, - {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:062582fca9fabdd2c8b54a3ef1c978d786e0f6b3a1510e0ac93ef59e0ddae2bc"}, - {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d2b04aac4d386b172d5b9692e2d2da8de7bfb6c387fa4f801fbf6fb2e6ba4673"}, - {file = "PyYAML-6.0.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:e7d73685e87afe9f3b36c799222440d6cf362062f78be1013661b00c5c6f678b"}, - {file = "PyYAML-6.0.1-cp311-cp311-win32.whl", hash = "sha256:1635fd110e8d85d55237ab316b5b011de701ea0f29d07611174a1b42f1444741"}, - {file = "PyYAML-6.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:bf07ee2fef7014951eeb99f56f39c9bb4af143d8aa3c21b1677805985307da34"}, - {file = "PyYAML-6.0.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:855fb52b0dc35af121542a76b9a84f8d1cd886ea97c84703eaa6d88e37a2ad28"}, - {file = "PyYAML-6.0.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:40df9b996c2b73138957fe23a16a4f0ba614f4c0efce1e9406a184b6d07fa3a9"}, - {file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a08c6f0fe150303c1c6b71ebcd7213c2858041a7e01975da3a99aed1e7a378ef"}, - {file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6c22bec3fbe2524cde73d7ada88f6566758a8f7227bfbf93a408a9d86bcc12a0"}, - {file = "PyYAML-6.0.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8d4e9c88387b0f5c7d5f281e55304de64cf7f9c0021a3525bd3b1c542da3b0e4"}, - {file = "PyYAML-6.0.1-cp312-cp312-win32.whl", hash = "sha256:d483d2cdf104e7c9fa60c544d92981f12ad66a457afae824d146093b8c294c54"}, - {file = "PyYAML-6.0.1-cp312-cp312-win_amd64.whl", hash = "sha256:0d3304d8c0adc42be59c5f8a4d9e3d7379e6955ad754aa9d6ab7a398b59dd1df"}, - {file = "PyYAML-6.0.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:50550eb667afee136e9a77d6dc71ae76a44df8b3e51e41b77f6de2932bfe0f47"}, - {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1fe35611261b29bd1de0070f0b2f47cb6ff71fa6595c077e42bd0c419fa27b98"}, - {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:704219a11b772aea0d8ecd7058d0082713c3562b4e271b849ad7dc4a5c90c13c"}, - {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:afd7e57eddb1a54f0f1a974bc4391af8bcce0b444685d936840f125cf046d5bd"}, - {file = "PyYAML-6.0.1-cp36-cp36m-win32.whl", hash = "sha256:fca0e3a251908a499833aa292323f32437106001d436eca0e6e7833256674585"}, - {file = "PyYAML-6.0.1-cp36-cp36m-win_amd64.whl", hash = "sha256:f22ac1c3cac4dbc50079e965eba2c1058622631e526bd9afd45fedd49ba781fa"}, - {file = "PyYAML-6.0.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:b1275ad35a5d18c62a7220633c913e1b42d44b46ee12554e5fd39c70a243d6a3"}, - {file = "PyYAML-6.0.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:18aeb1bf9a78867dc38b259769503436b7c72f7a1f1f4c93ff9a17de54319b27"}, - {file = "PyYAML-6.0.1-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:596106435fa6ad000c2991a98fa58eeb8656ef2325d7e158344fb33864ed87e3"}, - {file = "PyYAML-6.0.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:baa90d3f661d43131ca170712d903e6295d1f7a0f595074f151c0aed377c9b9c"}, - {file = "PyYAML-6.0.1-cp37-cp37m-win32.whl", hash = "sha256:9046c58c4395dff28dd494285c82ba00b546adfc7ef001486fbf0324bc174fba"}, - {file = "PyYAML-6.0.1-cp37-cp37m-win_amd64.whl", hash = "sha256:4fb147e7a67ef577a588a0e2c17b6db51dda102c71de36f8549b6816a96e1867"}, - {file = "PyYAML-6.0.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1d4c7e777c441b20e32f52bd377e0c409713e8bb1386e1099c2415f26e479595"}, - {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a0cd17c15d3bb3fa06978b4e8958dcdc6e0174ccea823003a106c7d4d7899ac5"}, - {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:28c119d996beec18c05208a8bd78cbe4007878c6dd15091efb73a30e90539696"}, - {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7e07cbde391ba96ab58e532ff4803f79c4129397514e1413a7dc761ccd755735"}, - {file = "PyYAML-6.0.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:49a183be227561de579b4a36efbb21b3eab9651dd81b1858589f796549873dd6"}, - {file = "PyYAML-6.0.1-cp38-cp38-win32.whl", hash = "sha256:184c5108a2aca3c5b3d3bf9395d50893a7ab82a38004c8f61c258d4428e80206"}, - {file = "PyYAML-6.0.1-cp38-cp38-win_amd64.whl", hash = "sha256:1e2722cc9fbb45d9b87631ac70924c11d3a401b2d7f410cc0e3bbf249f2dca62"}, - {file = "PyYAML-6.0.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:9eb6caa9a297fc2c2fb8862bc5370d0303ddba53ba97e71f08023b6cd73d16a8"}, - {file = "PyYAML-6.0.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:c8098ddcc2a85b61647b2590f825f3db38891662cfc2fc776415143f599bb859"}, - {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5773183b6446b2c99bb77e77595dd486303b4faab2b086e7b17bc6bef28865f6"}, - {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b786eecbdf8499b9ca1d697215862083bd6d2a99965554781d0d8d1ad31e13a0"}, - {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bc1bf2925a1ecd43da378f4db9e4f799775d6367bdb94671027b73b393a7c42c"}, - {file = "PyYAML-6.0.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:04ac92ad1925b2cff1db0cfebffb6ffc43457495c9b3c39d3fcae417d7125dc5"}, - {file = "PyYAML-6.0.1-cp39-cp39-win32.whl", hash = "sha256:faca3bdcf85b2fc05d06ff3fbc1f83e1391b3e724afa3feba7d13eeab355484c"}, - {file = "PyYAML-6.0.1-cp39-cp39-win_amd64.whl", hash = "sha256:510c9deebc5c0225e8c96813043e62b680ba2f9c50a08d3724c7f28a747d1486"}, - {file = "PyYAML-6.0.1.tar.gz", hash = "sha256:bfdf460b1736c775f2ba9f6a92bca30bc2095067b8a9d77876d1fad6cc3b4a43"}, -] - -[[package]] -name = "recurly" -version = "4.10.0" -description = "Recurly v4" -optional = false -python-versions = "*" -files = [ - {file = "recurly-4.10.0-py3-none-any.whl", hash = "sha256:b8e3b1ec58f7b1e1b91286f2db864f6ba4053837ad920d0c2868508020442aaf"}, - {file = "recurly-4.10.0.tar.gz", hash = "sha256:a8dddab76bb38f76a715644448f45499227bfd00529ef33f7945b3bcc5a8f3a2"}, -] - -[[package]] -name = "requests" -version = "2.31.0" -description = "Python HTTP for Humans." -optional = false -python-versions = ">=3.7" -files = [ - {file = "requests-2.31.0-py3-none-any.whl", hash = "sha256:58cd2187c01e70e6e26505bca751777aa9f2ee0b7f4300988b709f44e013003f"}, - {file = "requests-2.31.0.tar.gz", hash = "sha256:942c5a758f98d790eaed1a29cb6eefc7ffb0d1cf7af05c3d2791656dbd6ad1e1"}, -] - -[package.dependencies] -certifi = ">=2017.4.17" -charset-normalizer = ">=2,<4" -idna = ">=2.5,<4" -urllib3 = ">=1.21.1,<3" - -[package.extras] -socks = ["PySocks (>=1.5.6,!=1.5.7)"] -use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"] - -[[package]] -name = "requests-cache" -version = "1.2.0" -description = "A persistent cache for python requests" -optional = false -python-versions = ">=3.8" -files = [ - {file = "requests_cache-1.2.0-py3-none-any.whl", hash = "sha256:490324301bf0cb924ff4e6324bd2613453e7e1f847353928b08adb0fdfb7f722"}, - {file = "requests_cache-1.2.0.tar.gz", hash = "sha256:db1c709ca343cc1cd5b6c8b1a5387298eceed02306a6040760db538c885e3838"}, -] - -[package.dependencies] -attrs = ">=21.2" -cattrs = ">=22.2" -platformdirs = ">=2.5" -requests = ">=2.22" -url-normalize = ">=1.4" -urllib3 = ">=1.25.5" - -[package.extras] -all = ["boto3 (>=1.15)", "botocore (>=1.18)", "itsdangerous (>=2.0)", "pymongo (>=3)", "pyyaml (>=6.0.1)", "redis (>=3)", "ujson (>=5.4)"] -bson = ["bson (>=0.5)"] -docs = ["furo (>=2023.3,<2024.0)", "linkify-it-py (>=2.0,<3.0)", "myst-parser (>=1.0,<2.0)", "sphinx (>=5.0.2,<6.0.0)", "sphinx-autodoc-typehints (>=1.19)", "sphinx-automodapi (>=0.14)", "sphinx-copybutton (>=0.5)", "sphinx-design (>=0.2)", "sphinx-notfound-page (>=0.8)", "sphinxcontrib-apidoc (>=0.3)", "sphinxext-opengraph (>=0.9)"] -dynamodb = ["boto3 (>=1.15)", "botocore (>=1.18)"] -json = ["ujson (>=5.4)"] -mongodb = ["pymongo (>=3)"] -redis = ["redis (>=3)"] -security = ["itsdangerous (>=2.0)"] -yaml = ["pyyaml (>=6.0.1)"] - -[[package]] -name = "requests-mock" -version = "1.11.0" -description = "Mock out responses from the requests package" -optional = false -python-versions = "*" -files = [ - {file = "requests-mock-1.11.0.tar.gz", hash = "sha256:ef10b572b489a5f28e09b708697208c4a3b2b89ef80a9f01584340ea357ec3c4"}, - {file = "requests_mock-1.11.0-py2.py3-none-any.whl", hash = "sha256:f7fae383f228633f6bececebdab236c478ace2284d6292c6e7e2867b9ab74d15"}, -] - -[package.dependencies] -requests = ">=2.3,<3" -six = "*" - -[package.extras] -fixture = ["fixtures"] -test = ["fixtures", "mock", "purl", "pytest", "requests-futures", "sphinx", "testtools"] - -[[package]] -name = "setuptools" -version = "69.1.0" -description = "Easily download, build, install, upgrade, and uninstall Python packages" -optional = false -python-versions = ">=3.8" -files = [ - {file = "setuptools-69.1.0-py3-none-any.whl", hash = "sha256:c054629b81b946d63a9c6e732bc8b2513a7c3ea645f11d0139a2191d735c60c6"}, - {file = "setuptools-69.1.0.tar.gz", hash = "sha256:850894c4195f09c4ed30dba56213bf7c3f21d86ed6bdaafb5df5972593bfc401"}, -] - -[package.extras] -docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "rst.linker (>=1.9)", "sphinx (<7.2.5)", "sphinx (>=3.5)", "sphinx-favicon", "sphinx-inline-tabs", "sphinx-lint", "sphinx-notfound-page (>=1,<2)", "sphinx-reredirects", "sphinxcontrib-towncrier"] -testing = ["build[virtualenv]", "filelock (>=3.4.0)", "flake8-2020", "ini2toml[lite] (>=0.9)", "jaraco.develop (>=7.21)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "pip (>=19.1)", "pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-home (>=0.5)", "pytest-mypy (>=0.9.1)", "pytest-perf", "pytest-ruff (>=0.2.1)", "pytest-timeout", "pytest-xdist", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel"] -testing-integration = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "packaging (>=23.1)", "pytest", "pytest-enabler", "pytest-xdist", "tomli", "virtualenv (>=13.0.0)", "wheel"] - -[[package]] -name = "six" -version = "1.16.0" -description = "Python 2 and 3 compatibility utilities" -optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*" -files = [ - {file = "six-1.16.0-py2.py3-none-any.whl", hash = "sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254"}, - {file = "six-1.16.0.tar.gz", hash = "sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926"}, -] - -[[package]] -name = "toml" -version = "0.10.2" -description = "Python Library for Tom's Obvious, Minimal Language" -optional = false -python-versions = ">=2.6, !=3.0.*, !=3.1.*, !=3.2.*" -files = [ - {file = "toml-0.10.2-py2.py3-none-any.whl", hash = "sha256:806143ae5bfb6a3c6e736a764057db0e6a0e05e338b5630894a5f779cabb4f9b"}, - {file = "toml-0.10.2.tar.gz", hash = "sha256:b3bda1d108d5dd99f4a20d24d9c348e91c4db7ab1b749200bded2f839ccbe68f"}, -] - -[[package]] -name = "typing-extensions" -version = "4.9.0" -description = "Backported and Experimental Type Hints for Python 3.8+" -optional = false -python-versions = ">=3.8" -files = [ - {file = "typing_extensions-4.9.0-py3-none-any.whl", hash = "sha256:af72aea155e91adfc61c3ae9e0e342dbc0cba726d6cba4b6c72c1f34e47291cd"}, - {file = "typing_extensions-4.9.0.tar.gz", hash = "sha256:23478f88c37f27d76ac8aee6c905017a143b0b1b886c3c9f66bc2fd94f9f5783"}, -] - -[[package]] -name = "url-normalize" -version = "1.4.3" -description = "URL normalization for Python" -optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*" -files = [ - {file = "url-normalize-1.4.3.tar.gz", hash = "sha256:d23d3a070ac52a67b83a1c59a0e68f8608d1cd538783b401bc9de2c0fac999b2"}, - {file = "url_normalize-1.4.3-py2.py3-none-any.whl", hash = "sha256:ec3c301f04e5bb676d333a7fa162fa977ad2ca04b7e652bfc9fac4e405728eed"}, -] - -[package.dependencies] -six = "*" - -[[package]] -name = "urllib3" -version = "2.2.1" -description = "HTTP library with thread-safe connection pooling, file post, and more." -optional = false -python-versions = ">=3.8" -files = [ - {file = "urllib3-2.2.1-py3-none-any.whl", hash = "sha256:450b20ec296a467077128bff42b73080516e71b56ff59a60a02bef2232c4fa9d"}, - {file = "urllib3-2.2.1.tar.gz", hash = "sha256:d0570876c61ab9e520d776c38acbbb5b05a776d3f9ff98a5c8fd5162a444cf19"}, -] - -[package.extras] -brotli = ["brotli (>=1.0.9)", "brotlicffi (>=0.8.0)"] -h2 = ["h2 (>=4,<5)"] -socks = ["pysocks (>=1.5.6,!=1.5.7,<2.0)"] -zstd = ["zstandard (>=0.18.0)"] - -[[package]] -name = "wcmatch" -version = "8.4" -description = "Wildcard/glob file name matcher." -optional = false -python-versions = ">=3.7" -files = [ - {file = "wcmatch-8.4-py3-none-any.whl", hash = "sha256:dc7351e5a7f8bbf4c6828d51ad20c1770113f5f3fd3dfe2a03cfde2a63f03f98"}, - {file = "wcmatch-8.4.tar.gz", hash = "sha256:ba4fc5558f8946bf1ffc7034b05b814d825d694112499c86035e0e4d398b6a67"}, -] - -[package.dependencies] -bracex = ">=2.1.1" - -[[package]] -name = "wrapt" -version = "1.16.0" -description = "Module for decorators, wrappers and monkey patching." -optional = false -python-versions = ">=3.6" -files = [ - {file = "wrapt-1.16.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:ffa565331890b90056c01db69c0fe634a776f8019c143a5ae265f9c6bc4bd6d4"}, - {file = "wrapt-1.16.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:e4fdb9275308292e880dcbeb12546df7f3e0f96c6b41197e0cf37d2826359020"}, - {file = "wrapt-1.16.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bb2dee3874a500de01c93d5c71415fcaef1d858370d405824783e7a8ef5db440"}, - {file = "wrapt-1.16.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2a88e6010048489cda82b1326889ec075a8c856c2e6a256072b28eaee3ccf487"}, - {file = "wrapt-1.16.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ac83a914ebaf589b69f7d0a1277602ff494e21f4c2f743313414378f8f50a4cf"}, - {file = "wrapt-1.16.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:73aa7d98215d39b8455f103de64391cb79dfcad601701a3aa0dddacf74911d72"}, - {file = "wrapt-1.16.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:807cc8543a477ab7422f1120a217054f958a66ef7314f76dd9e77d3f02cdccd0"}, - {file = "wrapt-1.16.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:bf5703fdeb350e36885f2875d853ce13172ae281c56e509f4e6eca049bdfb136"}, - {file = "wrapt-1.16.0-cp310-cp310-win32.whl", hash = "sha256:f6b2d0c6703c988d334f297aa5df18c45e97b0af3679bb75059e0e0bd8b1069d"}, - {file = "wrapt-1.16.0-cp310-cp310-win_amd64.whl", hash = "sha256:decbfa2f618fa8ed81c95ee18a387ff973143c656ef800c9f24fb7e9c16054e2"}, - {file = "wrapt-1.16.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:1a5db485fe2de4403f13fafdc231b0dbae5eca4359232d2efc79025527375b09"}, - {file = "wrapt-1.16.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:75ea7d0ee2a15733684badb16de6794894ed9c55aa5e9903260922f0482e687d"}, - {file = "wrapt-1.16.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a452f9ca3e3267cd4d0fcf2edd0d035b1934ac2bd7e0e57ac91ad6b95c0c6389"}, - {file = "wrapt-1.16.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:43aa59eadec7890d9958748db829df269f0368521ba6dc68cc172d5d03ed8060"}, - {file = "wrapt-1.16.0-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:72554a23c78a8e7aa02abbd699d129eead8b147a23c56e08d08dfc29cfdddca1"}, - {file = "wrapt-1.16.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:d2efee35b4b0a347e0d99d28e884dfd82797852d62fcd7ebdeee26f3ceb72cf3"}, - {file = "wrapt-1.16.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:6dcfcffe73710be01d90cae08c3e548d90932d37b39ef83969ae135d36ef3956"}, - {file = "wrapt-1.16.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:eb6e651000a19c96f452c85132811d25e9264d836951022d6e81df2fff38337d"}, - {file = "wrapt-1.16.0-cp311-cp311-win32.whl", hash = "sha256:66027d667efe95cc4fa945af59f92c5a02c6f5bb6012bff9e60542c74c75c362"}, - {file = "wrapt-1.16.0-cp311-cp311-win_amd64.whl", hash = "sha256:aefbc4cb0a54f91af643660a0a150ce2c090d3652cf4052a5397fb2de549cd89"}, - {file = "wrapt-1.16.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:5eb404d89131ec9b4f748fa5cfb5346802e5ee8836f57d516576e61f304f3b7b"}, - {file = "wrapt-1.16.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:9090c9e676d5236a6948330e83cb89969f433b1943a558968f659ead07cb3b36"}, - {file = "wrapt-1.16.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:94265b00870aa407bd0cbcfd536f17ecde43b94fb8d228560a1e9d3041462d73"}, - {file = "wrapt-1.16.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f2058f813d4f2b5e3a9eb2eb3faf8f1d99b81c3e51aeda4b168406443e8ba809"}, - {file = "wrapt-1.16.0-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:98b5e1f498a8ca1858a1cdbffb023bfd954da4e3fa2c0cb5853d40014557248b"}, - {file = "wrapt-1.16.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:14d7dc606219cdd7405133c713f2c218d4252f2a469003f8c46bb92d5d095d81"}, - {file = "wrapt-1.16.0-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:49aac49dc4782cb04f58986e81ea0b4768e4ff197b57324dcbd7699c5dfb40b9"}, - {file = "wrapt-1.16.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:418abb18146475c310d7a6dc71143d6f7adec5b004ac9ce08dc7a34e2babdc5c"}, - {file = "wrapt-1.16.0-cp312-cp312-win32.whl", hash = "sha256:685f568fa5e627e93f3b52fda002c7ed2fa1800b50ce51f6ed1d572d8ab3e7fc"}, - {file = "wrapt-1.16.0-cp312-cp312-win_amd64.whl", hash = "sha256:dcdba5c86e368442528f7060039eda390cc4091bfd1dca41e8046af7c910dda8"}, - {file = "wrapt-1.16.0-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:d462f28826f4657968ae51d2181a074dfe03c200d6131690b7d65d55b0f360f8"}, - {file = "wrapt-1.16.0-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a33a747400b94b6d6b8a165e4480264a64a78c8a4c734b62136062e9a248dd39"}, - {file = "wrapt-1.16.0-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b3646eefa23daeba62643a58aac816945cadc0afaf21800a1421eeba5f6cfb9c"}, - {file = "wrapt-1.16.0-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3ebf019be5c09d400cf7b024aa52b1f3aeebeff51550d007e92c3c1c4afc2a40"}, - {file = "wrapt-1.16.0-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:0d2691979e93d06a95a26257adb7bfd0c93818e89b1406f5a28f36e0d8c1e1fc"}, - {file = "wrapt-1.16.0-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:1acd723ee2a8826f3d53910255643e33673e1d11db84ce5880675954183ec47e"}, - {file = "wrapt-1.16.0-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:bc57efac2da352a51cc4658878a68d2b1b67dbe9d33c36cb826ca449d80a8465"}, - {file = "wrapt-1.16.0-cp36-cp36m-win32.whl", hash = "sha256:da4813f751142436b075ed7aa012a8778aa43a99f7b36afe9b742d3ed8bdc95e"}, - {file = "wrapt-1.16.0-cp36-cp36m-win_amd64.whl", hash = "sha256:6f6eac2360f2d543cc875a0e5efd413b6cbd483cb3ad7ebf888884a6e0d2e966"}, - {file = "wrapt-1.16.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:a0ea261ce52b5952bf669684a251a66df239ec6d441ccb59ec7afa882265d593"}, - {file = "wrapt-1.16.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7bd2d7ff69a2cac767fbf7a2b206add2e9a210e57947dd7ce03e25d03d2de292"}, - {file = "wrapt-1.16.0-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9159485323798c8dc530a224bd3ffcf76659319ccc7bbd52e01e73bd0241a0c5"}, - {file = "wrapt-1.16.0-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a86373cf37cd7764f2201b76496aba58a52e76dedfaa698ef9e9688bfd9e41cf"}, - {file = "wrapt-1.16.0-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:73870c364c11f03ed072dda68ff7aea6d2a3a5c3fe250d917a429c7432e15228"}, - {file = "wrapt-1.16.0-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:b935ae30c6e7400022b50f8d359c03ed233d45b725cfdd299462f41ee5ffba6f"}, - {file = "wrapt-1.16.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:db98ad84a55eb09b3c32a96c576476777e87c520a34e2519d3e59c44710c002c"}, - {file = "wrapt-1.16.0-cp37-cp37m-win32.whl", hash = "sha256:9153ed35fc5e4fa3b2fe97bddaa7cbec0ed22412b85bcdaf54aeba92ea37428c"}, - {file = "wrapt-1.16.0-cp37-cp37m-win_amd64.whl", hash = "sha256:66dfbaa7cfa3eb707bbfcd46dab2bc6207b005cbc9caa2199bcbc81d95071a00"}, - {file = "wrapt-1.16.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1dd50a2696ff89f57bd8847647a1c363b687d3d796dc30d4dd4a9d1689a706f0"}, - {file = "wrapt-1.16.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:44a2754372e32ab315734c6c73b24351d06e77ffff6ae27d2ecf14cf3d229202"}, - {file = "wrapt-1.16.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8e9723528b9f787dc59168369e42ae1c3b0d3fadb2f1a71de14531d321ee05b0"}, - {file = "wrapt-1.16.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:dbed418ba5c3dce92619656802cc5355cb679e58d0d89b50f116e4a9d5a9603e"}, - {file = "wrapt-1.16.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:941988b89b4fd6b41c3f0bfb20e92bd23746579736b7343283297c4c8cbae68f"}, - {file = "wrapt-1.16.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:6a42cd0cfa8ffc1915aef79cb4284f6383d8a3e9dcca70c445dcfdd639d51267"}, - {file = "wrapt-1.16.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:1ca9b6085e4f866bd584fb135a041bfc32cab916e69f714a7d1d397f8c4891ca"}, - {file = "wrapt-1.16.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:d5e49454f19ef621089e204f862388d29e6e8d8b162efce05208913dde5b9ad6"}, - {file = "wrapt-1.16.0-cp38-cp38-win32.whl", hash = "sha256:c31f72b1b6624c9d863fc095da460802f43a7c6868c5dda140f51da24fd47d7b"}, - {file = "wrapt-1.16.0-cp38-cp38-win_amd64.whl", hash = "sha256:490b0ee15c1a55be9c1bd8609b8cecd60e325f0575fc98f50058eae366e01f41"}, - {file = "wrapt-1.16.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:9b201ae332c3637a42f02d1045e1d0cccfdc41f1f2f801dafbaa7e9b4797bfc2"}, - {file = "wrapt-1.16.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:2076fad65c6736184e77d7d4729b63a6d1ae0b70da4868adeec40989858eb3fb"}, - {file = "wrapt-1.16.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c5cd603b575ebceca7da5a3a251e69561bec509e0b46e4993e1cac402b7247b8"}, - {file = "wrapt-1.16.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b47cfad9e9bbbed2339081f4e346c93ecd7ab504299403320bf85f7f85c7d46c"}, - {file = "wrapt-1.16.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f8212564d49c50eb4565e502814f694e240c55551a5f1bc841d4fcaabb0a9b8a"}, - {file = "wrapt-1.16.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:5f15814a33e42b04e3de432e573aa557f9f0f56458745c2074952f564c50e664"}, - {file = "wrapt-1.16.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:db2e408d983b0e61e238cf579c09ef7020560441906ca990fe8412153e3b291f"}, - {file = "wrapt-1.16.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:edfad1d29c73f9b863ebe7082ae9321374ccb10879eeabc84ba3b69f2579d537"}, - {file = "wrapt-1.16.0-cp39-cp39-win32.whl", hash = "sha256:ed867c42c268f876097248e05b6117a65bcd1e63b779e916fe2e33cd6fd0d3c3"}, - {file = "wrapt-1.16.0-cp39-cp39-win_amd64.whl", hash = "sha256:eb1b046be06b0fce7249f1d025cd359b4b80fc1c3e24ad9eca33e0dcdb2e4a35"}, - {file = "wrapt-1.16.0-py3-none-any.whl", hash = "sha256:6906c4100a8fcbf2fa735f6059214bb13b97f75b1a61777fcf6432121ef12ef1"}, - {file = "wrapt-1.16.0.tar.gz", hash = "sha256:5f370f952971e7d17c7d1ead40e49f32345a7f7a5373571ef44d800d06b1899d"}, -] - -[metadata] -lock-version = "2.0" -python-versions = "^3.9,<3.12" -content-hash = "d85022c0f25e080bc6664f5dd4f866b1ef1dbe94bf07b5ad77a8eb4d45987798" diff --git a/airbyte-integrations/connectors/source-recurly/pyproject.toml b/airbyte-integrations/connectors/source-recurly/pyproject.toml deleted file mode 100644 index 7e1569523ee2..000000000000 --- a/airbyte-integrations/connectors/source-recurly/pyproject.toml +++ /dev/null @@ -1,29 +0,0 @@ -[build-system] -requires = [ "poetry-core>=1.0.0",] -build-backend = "poetry.core.masonry.api" - -[tool.poetry] -version = "0.5.0" -name = "source-recurly" -description = "Source implementation for Recurly." -authors = [ "Airbyte ",] -license = "MIT" -readme = "README.md" -documentation = "https://docs.airbyte.com/integrations/sources/recurly" -homepage = "https://airbyte.com" -repository = "https://github.com/airbytehq/airbyte" -[[tool.poetry.packages]] -include = "source_recurly" - -[tool.poetry.dependencies] -python = "^3.9,<3.12" -airbyte-cdk = "^0.67.0" -recurly = "==4.10.0" - -[tool.poetry.scripts] -source-recurly = "source_recurly.run:run" - -[tool.poetry.group.dev.dependencies] -requests-mock = "^1.9.3" -pytest-mock = "^3.6.1" -pytest = "^6.1" diff --git a/airbyte-integrations/connectors/source-recurly/sample_files/configured_catalog.json b/airbyte-integrations/connectors/source-recurly/sample_files/configured_catalog.json deleted file mode 100644 index dc424c32f070..000000000000 --- a/airbyte-integrations/connectors/source-recurly/sample_files/configured_catalog.json +++ /dev/null @@ -1,16 +0,0 @@ -{ - "streams": [ - { - "stream": { - "name": "unique_coupons", - "supported_sync_modes": ["full_refresh", "incremental"], - "source_defined_cursor": true, - "default_cursor_field": ["updated_at"], - "source_defined_primary_key": [["id"]], - "json_schema": {} - }, - "sync_mode": "incremental", - "destination_sync_mode": "overwrite" - } - ] -} diff --git a/airbyte-integrations/connectors/source-recurly/sample_files/sample_catalog.json b/airbyte-integrations/connectors/source-recurly/sample_files/sample_catalog.json deleted file mode 100644 index 5ee733426146..000000000000 --- a/airbyte-integrations/connectors/source-recurly/sample_files/sample_catalog.json +++ /dev/null @@ -1,102 +0,0 @@ -{ - "streams": [ - { - "name": "accounts", - "supported_sync_modes": ["full_refresh"], - "source_defined_cursor": false, - "json_schema": { - "$schema": "http://json-schema.org/draft-07/schema#", - "type": "object", - "properties": { - "id": { - "type": "string" - }, - "object": { - "type": "string" - }, - "code": { - "type": "string" - }, - "parent_account_id": { - "type": "string" - }, - "bill_to": { - "type": "string" - }, - "state": { - "type": "string" - }, - "username": { - "type": "string" - }, - "email": { - "type": "string" - }, - "cc_emails": { - "type": "string" - }, - "preferred_locale": { - "type": "string" - }, - "first_name": { - "type": "string" - }, - "last_name": { - "type": "string" - }, - "company": { - "type": "string" - }, - "vat_number": { - "type": "string" - }, - "tax_exempt": { - "type": "boolean" - }, - "exemption_certificate": { - "type": "string" - }, - "address": { - "type": "object" - }, - "billing_info": { - "type": "object" - }, - "shipping_addresses": { - "type": "array" - }, - "custom_fields": { - "type": "array" - }, - "has_live_subscription": { - "type": "boolean" - }, - "has_active_subscription": { - "type": "boolean" - }, - "has_future_subscription": { - "type": "boolean" - }, - "has_canceled_subscription": { - "type": "boolean" - }, - "has_paused_subscription": { - "type": "boolean" - }, - "has_past_due_invoice": { - "type": "boolean" - }, - "created_at": { - "type": "string" - }, - "updated_at": { - "type": "string" - }, - "deleted_at": { - "type": "string" - } - } - } - } - ] -} diff --git a/airbyte-integrations/connectors/source-recurly/sample_files/sample_config.json b/airbyte-integrations/connectors/source-recurly/sample_files/sample_config.json deleted file mode 100644 index c411c99a0bc4..000000000000 --- a/airbyte-integrations/connectors/source-recurly/sample_files/sample_config.json +++ /dev/null @@ -1,3 +0,0 @@ -{ - "api_key": "" -} diff --git a/airbyte-integrations/connectors/source-recurly/source_recurly/__init__.py b/airbyte-integrations/connectors/source-recurly/source_recurly/__init__.py deleted file mode 100644 index 48116a0807b8..000000000000 --- a/airbyte-integrations/connectors/source-recurly/source_recurly/__init__.py +++ /dev/null @@ -1,3 +0,0 @@ -from .source import SourceRecurly - -__all__ = ["SourceRecurly"] diff --git a/airbyte-integrations/connectors/source-recurly/source_recurly/run.py b/airbyte-integrations/connectors/source-recurly/source_recurly/run.py deleted file mode 100644 index 746b6556605c..000000000000 --- a/airbyte-integrations/connectors/source-recurly/source_recurly/run.py +++ /dev/null @@ -1,14 +0,0 @@ -# -# Copyright (c) 2023 Airbyte, Inc., all rights reserved. -# - - -import sys - -from airbyte_cdk.entrypoint import launch -from source_recurly import SourceRecurly - - -def run(): - source = SourceRecurly() - launch(source, sys.argv[1:]) diff --git a/airbyte-integrations/connectors/source-recurly/source_recurly/schemas/account_coupon_redemptions.json b/airbyte-integrations/connectors/source-recurly/source_recurly/schemas/account_coupon_redemptions.json deleted file mode 100644 index d9e894d584d3..000000000000 --- a/airbyte-integrations/connectors/source-recurly/source_recurly/schemas/account_coupon_redemptions.json +++ /dev/null @@ -1,46 +0,0 @@ -{ - "$schema": "http://json-schema.org/draft-07/schema#", - "type": "object", - "properties": { - "id": { - "type": ["null", "string"], - "maxLength": 13 - }, - "object": { - "type": ["null", "string"] - }, - "account": { - "$ref": "account_details.json" - }, - "subscription_id": { - "type": ["null", "string"], - "maxLength": 13 - }, - "coupon": { - "$ref": "coupons.json" - }, - "state": { - "type": ["null", "string"], - "maxLength": 256 - }, - "currency": { - "type": ["null", "string"], - "maxLength": 3 - }, - "discounted": { - "type": ["null", "number"] - }, - "created_at": { - "type": ["null", "string"], - "format": "date-time" - }, - "updated_at": { - "type": ["null", "string"], - "format": "date-time" - }, - "removed_at": { - "type": ["null", "string"], - "format": "date-time" - } - } -} diff --git a/airbyte-integrations/connectors/source-recurly/source_recurly/schemas/account_notes.json b/airbyte-integrations/connectors/source-recurly/source_recurly/schemas/account_notes.json deleted file mode 100644 index ee68e82c8973..000000000000 --- a/airbyte-integrations/connectors/source-recurly/source_recurly/schemas/account_notes.json +++ /dev/null @@ -1,30 +0,0 @@ -{ - "$schema": "http://json-schema.org/draft-07/schema#", - "type": "object", - "properties": { - "id": { - "type": "string", - "maxLength": 13, - "readOnly": true - }, - "object": { - "type": ["null", "string"] - }, - "account_id": { - "type": "string", - "maxLength": 13 - }, - "user": { - "$ref": "users.json" - }, - "message": { - "type": ["null", "string"], - "maxLength": 2048 - }, - "created_at": { - "type": "string", - "format": "date-time", - "readOnly": true - } - } -} diff --git a/airbyte-integrations/connectors/source-recurly/source_recurly/schemas/accounts.json b/airbyte-integrations/connectors/source-recurly/source_recurly/schemas/accounts.json deleted file mode 100644 index c9f1c5b84953..000000000000 --- a/airbyte-integrations/connectors/source-recurly/source_recurly/schemas/accounts.json +++ /dev/null @@ -1,182 +0,0 @@ -{ - "$schema": "http://json-schema.org/draft-07/schema#", - "type": "object", - "properties": { - "id": { - "type": ["null", "string"], - "maxLength": 13 - }, - "object": { - "type": ["null", "string"] - }, - "hosted_login_token": { - "type": ["null", "string"] - }, - "code": { - "type": ["null", "string"], - "maxLength": 256 - }, - "parent_account_id": { - "type": ["null", "string"], - "maxLength": 13 - }, - "bill_to": { - "type": ["null", "string"], - "maxLength": 6 - }, - "state": { - "type": ["null", "string"], - "maxLength": 256 - }, - "username": { - "type": ["null", "string"], - "maxLength": 256 - }, - "email": { - "type": ["null", "string"], - "maxLength": 256 - }, - "cc_emails": { - "type": ["null", "string"], - "maxLength": 256 - }, - "preferred_locale": { - "type": ["null", "string"], - "maxLength": 12 - }, - "first_name": { - "type": ["null", "string"], - "maxLength": 256 - }, - "last_name": { - "type": ["null", "string"], - "maxLength": 256 - }, - "company": { - "type": ["null", "string"], - "maxLength": 50 - }, - "vat_number": { - "type": ["null", "string"], - "maxLength": 20 - }, - "tax_exempt": { - "type": ["null", "boolean"] - }, - "exemption_certificate": { - "type": ["null", "string"], - "maxLength": 30 - }, - "address": { - "type": "object", - "properties": { - "phone": { - "type": "string", - "title": "Phone number", - "maxLength": 256 - }, - "street1": { - "type": "string", - "title": "Street 1", - "maxLength": 256 - }, - "street2": { - "type": "string", - "title": "Street 2", - "maxLength": 256 - }, - "city": { - "type": "string", - "title": "City", - "maxLength": 256 - }, - "region": { - "type": "string", - "title": "State/Province", - "description": "State or province.", - "maxLength": 256 - }, - "postal_code": { - "type": "string", - "title": "Zip/Postal code", - "description": "Zip or postal code.", - "maxLength": 256 - }, - "country": { - "type": "string", - "title": "Country", - "description": "Country, 2-letter ISO 3166-1 alpha-2 code.", - "maxLength": 2 - }, - "geo_code": { - "type": ["null", "string"] - } - } - }, - "custom_fields": { - "type": ["null", "array"], - "items": { - "type": ["null", "object"], - "additionalProperties": true - } - }, - "has_live_subscription": { - "type": ["null", "boolean"] - }, - "has_active_subscription": { - "type": ["null", "boolean"] - }, - "has_future_subscription": { - "type": ["null", "boolean"] - }, - "has_canceled_subscription": { - "type": ["null", "boolean"] - }, - "has_paused_subscription": { - "type": ["null", "boolean"] - }, - "has_past_due_invoice": { - "type": ["null", "boolean"] - }, - "dunning_campaign_id": { - "type": ["null", "string"], - "maxLength": 256 - }, - "created_at": { - "type": ["null", "string"], - "format": "date-time" - }, - "updated_at": { - "type": ["null", "string"], - "format": "date-time" - }, - "deleted_at": { - "type": ["null", "string"], - "format": "date-time" - }, - "billing_info": { - "$ref": "billing_infos.json" - }, - "external_accounts": { - "type": ["null", "array"], - "items": { - "$ref": "external_accounts.json" - } - }, - "invoice_template_id": { - "type": ["null", "string"] - }, - "override_business_entity_id": { - "type": ["null", "string"] - }, - "preferred_time_zone": { - "type": ["null", "string"] - }, - "shipping_addresses": { - "type": ["null", "array"], - "items": { - "$ref": "shipping_addresses.json" - } - } - } -} diff --git a/airbyte-integrations/connectors/source-recurly/source_recurly/schemas/add_ons.json b/airbyte-integrations/connectors/source-recurly/source_recurly/schemas/add_ons.json deleted file mode 100644 index ffeea5d2f1be..000000000000 --- a/airbyte-integrations/connectors/source-recurly/source_recurly/schemas/add_ons.json +++ /dev/null @@ -1,151 +0,0 @@ -{ - "$schema": "http://json-schema.org/schema#", - "type": "object", - "properties": { - "id": { - "type": "string", - "title": "Add-on ID", - "maxLength": 13, - "readOnly": true - }, - "plan_id": { - "type": "string", - "title": "Plan ID", - "maxLength": 13, - "readOnly": true - }, - "code": { - "type": "string", - "title": "Add-on code", - "description": "The unique identifier for the add-on within its plan.", - "maxLength": 50 - }, - "state": { - "title": "State", - "description": "Add-ons can be either active or inactive.", - "readOnly": true, - "type": "string", - "maxLength": 256 - }, - "name": { - "type": "string", - "title": "Name", - "description": "Describes your add-on and will appear in subscribers' invoices.", - "maxLength": 255 - }, - "add_on_type": { - "type": ["null", "string"], - "title": "Add-on Type", - "description": "Whether the add-on type is fixed, or usage-based.", - "maxLength": 256 - }, - "usage_type": { - "type": "string", - "title": "Usage Type", - "description": "Type of usage, returns usage type if `add_on_type` is `usage`.", - "maxLength": 256 - }, - "usage_percentage": { - "type": ["null", "number"], - "format": "float", - "title": "Usage Percentage", - "description": "The percentage taken of the monetary amount of usage tracked. This can be up to 4 decimal places. A value between 0.0 and 100.0." - }, - "measured_unit_id": { - "type": ["null", "string"], - "title": "Measured Unit ID", - "description": "System-generated unique identifier for an measured unit associated with the add-on.", - "maxLength": 13 - }, - "accounting_code": { - "type": ["null", "string"], - "title": "Accounting code", - "description": "Accounting code for invoice line items for this add-on. If no value is provided, it defaults to add-on's code.", - "maxLength": 256 - }, - "revenue_schedule_type": { - "title": "Revenue schedule type", - "description": "When this add-on is invoiced, the line item will use this revenue schedule. If `item_code`/`item_id` is part of the request then `revenue_schedule_type` must be absent in the request as the value will be set from the item.", - "type": "string", - "maxLength": 256 - }, - "avalara_transaction_type": { - "type": ["string", "integer"], - "title": "Avalara Transaction Type", - "description": "Used by Avalara for Communications taxes. The transaction type in combination with the service type describe how the add-on is taxed. Refer to [the documentation](https://help.avalara.com/AvaTax_for_Communications/Tax_Calculation/AvaTax_for_Communications_Tax_Engine/Mapping_Resources/TM_00115_AFC_Modules_Corresponding_Transaction_Types) for more available t/s types.", - "minimum": 0 - }, - "avalara_service_type": { - "type": ["string", "integer"], - "title": "Avalara Service Type", - "description": "Used by Avalara for Communications taxes. The transaction type in combination with the service type describe how the add-on is taxed. Refer to [the documentation](https://help.avalara.com/AvaTax_for_Communications/Tax_Calculation/AvaTax_for_Communications_Tax_Engine/Mapping_Resources/TM_00115_AFC_Modules_Corresponding_Transaction_Types) for more available t/s types.", - "minimum": 0 - }, - "tax_code": { - "type": ["null", "string"], - "title": "Tax code", - "description": "Used by Avalara, Vertex, and Recurly\u2019s EU VAT tax feature. The tax code values are specific to each tax system. If you are using Recurly\u2019s EU VAT feature you can use `unknown`, `physical`, or `digital`.", - "maxLength": 50 - }, - "display_quantity": { - "type": ["null", "boolean"], - "title": "Display quantity?", - "description": "Determines if the quantity field is displayed on the hosted pages for the add-on." - }, - "default_quantity": { - "type": ["null", "integer"], - "title": "Default quantity", - "description": "Default quantity for the hosted pages." - }, - "optional": { - "type": ["null", "boolean"], - "title": "Optional", - "description": "Whether the add-on is optional for the customer to include in their purchase on the hosted payment page. If false, the add-on will be included when a subscription is created through the Recurly UI. However, the add-on will not be included when a subscription is created through the API." - }, - "currencies": { - "type": "array", - "description": "This is only present when `type=fixed`.", - "items": { - "type": ["null", "object"], - "properties": { - "currency": { - "type": "string", - "title": "Currency", - "description": "3-letter ISO 4217 currency code.", - "maxLength": 3 - }, - "unit_amount": { - "type": "number", - "format": "float", - "title": "Discount Amount", - "description": "Value of the fixed discount that this coupon applies." - } - } - } - }, - "tier_type": { - "type": ["null", "string"], - "title": "Tier type", - "description": "The pricing model for the add-on. For more information,\n[click here](https://docs.recurly.com/docs/billing-models#section-quantity-based). See our\n[Guide](https://developers.recurly.com/guides/item-addon-guide.html) for an overview of how\nto configure quantity-based pricing models.\n", - "maxLength": 256 - }, - "created_at": { - "type": "string", - "format": "date-time", - "title": "Created at", - "readOnly": true - }, - "updated_at": { - "type": "string", - "format": "date-time", - "title": "Last updated at", - "readOnly": true - }, - "deleted_at": { - "type": "string", - "format": "date-time", - "title": "Deleted at", - "readOnly": true - } - } -} diff --git a/airbyte-integrations/connectors/source-recurly/source_recurly/schemas/billing_infos.json b/airbyte-integrations/connectors/source-recurly/source_recurly/schemas/billing_infos.json deleted file mode 100644 index 5dd179d84f39..000000000000 --- a/airbyte-integrations/connectors/source-recurly/source_recurly/schemas/billing_infos.json +++ /dev/null @@ -1,3 +0,0 @@ -{ - "$ref": "billing_infos.json" -} diff --git a/airbyte-integrations/connectors/source-recurly/source_recurly/schemas/coupons.json b/airbyte-integrations/connectors/source-recurly/source_recurly/schemas/coupons.json deleted file mode 100644 index 9c262d6773c3..000000000000 --- a/airbyte-integrations/connectors/source-recurly/source_recurly/schemas/coupons.json +++ /dev/null @@ -1,3 +0,0 @@ -{ - "$ref": "coupons.json" -} diff --git a/airbyte-integrations/connectors/source-recurly/source_recurly/schemas/credit_payments.json b/airbyte-integrations/connectors/source-recurly/source_recurly/schemas/credit_payments.json deleted file mode 100644 index 98d827bb6074..000000000000 --- a/airbyte-integrations/connectors/source-recurly/source_recurly/schemas/credit_payments.json +++ /dev/null @@ -1,123 +0,0 @@ -{ - "$schema": "http://json-schema.org/schema#", - "type": "object", - "properties": { - "id": { - "type": "string", - "title": "Credit Payment ID", - "maxLength": 13 - }, - "uuid": { - "type": "string", - "title": "Recurly UUID", - "description": "The UUID is useful for matching data with the CSV exports and building URLs into Recurly's UI.", - "maxLength": 32 - }, - "action": { - "title": "Action", - "description": "The action for which the credit was created.", - "type": "string", - "maxLength": 256 - }, - "account": { - "type": "object", - "title": "Account mini details", - "properties": { - "id": { - "type": "string", - "maxLength": 13, - "readOnly": true - }, - "code": { - "type": "string", - "description": "The unique identifier of the account.", - "maxLength": 50 - } - } - }, - "applied_to_invoice": { - "type": ["null", "object"], - "title": "Invoice mini details", - "properties": { - "id": { - "type": "string", - "title": "Invoice ID", - "maxLength": 13 - }, - "number": { - "type": "string", - "title": "Invoice number", - "maxLength": 256 - } - } - }, - "original_invoice": { - "type": ["null", "object"], - "title": "Invoice mini details", - "properties": { - "id": { - "type": "string", - "title": "Invoice ID", - "maxLength": 13 - }, - "number": { - "type": "string", - "title": "Invoice number", - "maxLength": 256 - } - } - }, - "currency": { - "type": "string", - "title": "Currency", - "description": "3-letter ISO 4217 currency code.", - "maxLength": 3 - }, - "amount": { - "type": "number", - "format": "float", - "title": "Amount", - "description": "Total credit payment amount applied to the charge invoice." - }, - "original_credit_payment_id": { - "type": ["null", "string"], - "title": "Original Credit Payment ID", - "description": "For credit payments with action `refund`, this is the credit payment that was refunded.", - "maxLength": 13 - }, - "refund_transaction": { - "type": ["null", "object"], - "properties": { - "id": { - "type": "string", - "title": "Transaction ID", - "maxLength": 13 - }, - "uuid": { - "type": "string", - "title": "Recurly UUID", - "description": "The UUID is useful for matching data with the CSV exports and building URLs into Recurly's UI.", - "maxLength": 32 - } - } - }, - "created_at": { - "type": "string", - "title": "Created at", - "format": "date-time", - "readOnly": true - }, - "updated_at": { - "type": "string", - "title": "Last updated at", - "format": "date-time", - "readOnly": true - }, - "voided_at": { - "type": ["null", "string"], - "title": "Voided at", - "format": "date-time", - "readOnly": true - } - } -} diff --git a/airbyte-integrations/connectors/source-recurly/source_recurly/schemas/export_dates.json b/airbyte-integrations/connectors/source-recurly/source_recurly/schemas/export_dates.json deleted file mode 100644 index f63e37989dea..000000000000 --- a/airbyte-integrations/connectors/source-recurly/source_recurly/schemas/export_dates.json +++ /dev/null @@ -1,13 +0,0 @@ -{ - "$schema": "http://json-schema.org/draft-07/schema#", - "type": "object", - "properties": { - "dates": { - "type": ["null", "array"], - "items": { - "type": ["null", "string"], - "maxLength": 256 - } - } - } -} diff --git a/airbyte-integrations/connectors/source-recurly/source_recurly/schemas/invoices.json b/airbyte-integrations/connectors/source-recurly/source_recurly/schemas/invoices.json deleted file mode 100644 index 6e5f4732e079..000000000000 --- a/airbyte-integrations/connectors/source-recurly/source_recurly/schemas/invoices.json +++ /dev/null @@ -1,377 +0,0 @@ -{ - "$schema": "http://json-schema.org/draft-07/schema#", - "type": "object", - "properties": { - "id": { - "type": ["null", "string"], - "title": "Invoice ID", - "readOnly": true, - "maxLength": 13 - }, - "uuid": { - "type": ["null", "string"] - }, - "object": { - "type": ["null", "string"] - }, - "type": { - "title": "Invoice type", - "description": "Invoices are either charge, credit, or legacy invoices.", - "type": ["null", "string"], - "maxLength": 256 - }, - "origin": { - "type": ["null", "string"], - "title": "Origin", - "description": "The event that created the invoice.", - "maxLength": 256 - }, - "state": { - "title": "Invoice state", - "type": ["null", "string"], - "maxLength": 256 - }, - "account": { - "$ref": "account_details.json" - }, - "billing_info_id": { - "type": ["null", "string"], - "title": "Billing info ID", - "description": "The `billing_info_id` is the value that represents a specific billing info for an end customer. When `billing_info_id` is used to assign billing info to the subscription, all future billing events for the subscription will bill to the specified billing info. `billing_info_id` can ONLY be used for sites utilizing the Wallet feature.", - "maxLength": 256 - }, - "subscription_ids": { - "type": ["null", "array"], - "title": "Subscription IDs", - "description": "If the invoice is charging or refunding for one or more subscriptions, these are their IDs.", - "items": { - "type": ["null", "string"], - "title": "Subscription ID", - "maxLength": 13 - } - }, - "previous_invoice_id": { - "type": ["null", "string"], - "title": "Previous invoice ID", - "description": "On refund invoices, this value will exist and show the invoice ID of the purchase invoice the refund was created from.", - "maxLength": 13 - }, - "number": { - "type": ["null", "string"], - "title": "Invoice number", - "description": "If VAT taxation and the Country Invoice Sequencing feature are enabled, invoices will have country-specific invoice numbers for invoices billed to EU countries (ex: FR1001). Non-EU invoices will continue to use the site-level invoice number sequence.", - "maxLength": 256 - }, - "collection_method": { - "type": ["null", "string"], - "title": "Collection method", - "description": "An automatic invoice means a corresponding transaction is run using the account's billing information at the same time the invoice is created. Manual invoices are created without a corresponding transaction. The merchant must enter a manual payment transaction or have the customer pay the invoice with an automatic method, like credit card, PayPal, Amazon, or ACH bank payment.", - "maxLength": 256 - }, - "po_number": { - "type": ["null", "string"], - "title": "Purchase order number", - "description": "For manual invoicing, this identifies the PO number associated with the subscription.", - "maxLength": 50 - }, - "net_terms": { - "type": ["null", "integer"], - "title": "Net terms", - "description": "Integer representing the number of days after an invoice's creation that the invoice will become past due. If an invoice's net terms are set to '0', it is due 'On Receipt' and will become past due 24 hours after it\u2019s created. If an invoice is due net 30, it will become past due at 31 days exactly.", - "minimum": 0, - "default": 0 - }, - "address": { - "type": ["null", "object"], - "properties": { - "name_on_account": { - "type": ["null", "string"], - "title": "Name on account", - "maxLength": 256 - }, - "company": { - "type": ["null", "string"], - "title": "Company", - "maxLength": 256 - }, - "phone": { - "type": ["null", "string"], - "title": "Phone number", - "maxLength": 256 - }, - "street1": { - "type": ["null", "string"], - "title": "Street 1", - "maxLength": 256 - }, - "street2": { - "type": ["null", "string"], - "title": "Street 2", - "maxLength": 256 - }, - "city": { - "type": ["null", "string"], - "title": "City", - "maxLength": 256 - }, - "region": { - "type": ["null", "string"], - "title": "State/Province", - "description": "State or province.", - "maxLength": 256 - }, - "postal_code": { - "type": ["null", "string"], - "title": "Zip/Postal code", - "description": "Zip or postal code.", - "maxLength": 256 - }, - "country": { - "type": ["null", "string"], - "title": "Country", - "description": "Country, 2-letter ISO 3166-1 alpha-2 code.", - "maxLength": 2 - }, - "first_name": { - "type": ["null", "string"], - "maxLength": 256 - }, - "last_name": { - "type": ["null", "string"], - "maxLength": 256 - } - } - }, - "shipping_address": { - "type": ["null", "object"], - "properties": { - "id": { - "type": ["null", "string"], - "title": "Shipping Address ID", - "maxLength": 13, - "readOnly": true - } - } - }, - "currency": { - "type": ["null", "string"], - "title": "Currency", - "description": "3-letter ISO 4217 currency code.", - "maxLength": 3 - }, - "discount": { - "type": ["null", "number"], - "format": "float", - "title": "Discount", - "description": "Total discounts applied to this invoice." - }, - "subtotal": { - "type": ["null", "number"], - "format": "float", - "title": "Subtotal", - "description": "The summation of charges and credits, before discounts and taxes." - }, - "tax": { - "type": ["null", "number"], - "format": "float", - "title": "Tax", - "description": "The total tax on this invoice." - }, - "total": { - "type": ["null", "number"], - "format": "float", - "title": "Total", - "description": "The final total on this invoice. The summation of invoice charges, discounts, credits, and tax." - }, - "refundable_amount": { - "type": ["null", "number"], - "format": "float", - "title": "Refundable amount", - "description": "The refundable amount on a charge invoice. It will be null for all other invoices." - }, - "paid": { - "type": ["null", "number"], - "format": "float", - "title": "Paid", - "description": "The total amount of successful payments transaction on this invoice." - }, - "balance": { - "type": ["null", "number"], - "format": "float", - "title": "Balance", - "description": "The outstanding balance remaining on this invoice." - }, - "tax_info": { - "type": ["null", "object"], - "title": "Tax info", - "properties": { - "type": { - "type": ["null", "string"], - "title": "Type", - "description": "Provides the tax type as \"vat\" for EU VAT, \"usst\" for U.S. Sales Tax, or the 2 letter country code for country level tax types like Canada, Australia, New Zealand, Israel, and all non-EU European countries.", - "maxLength": 256 - }, - "region": { - "type": ["null", "string"], - "title": "Region", - "description": "Provides the tax region applied on an invoice. For U.S. Sales Tax, this will be the 2 letter state code. For EU VAT this will be the 2 letter country code. For all country level tax types, this will display the regional tax, like VAT, GST, or PST." - }, - "rate": { - "type": ["null", "number"], - "format": "float", - "title": "Rate" - }, - "tax_details": { - "type": "array", - "description": "Provides additional tax details for Canadian Sales Tax when there is tax applied at both the country and province levels. This will only be populated for the Invoice response when fetching a single invoice and not for the InvoiceList or LineItem.", - "items": { - "type": "object", - "title": "Tax detail", - "properties": { - "type": { - "type": ["null", "string"], - "title": "Type", - "description": "Provides the tax type for the region. For Canadian Sales Tax, this will be GST, HST, QST or PST.", - "maxLength": 256 - }, - "region": { - "type": ["null", "string"], - "title": "Region", - "description": "Provides the tax region applied on an invoice. For Canadian Sales Tax, this will be either the 2 letter province code or country code.", - "maxLength": 256 - }, - "rate": { - "type": ["null", "number"], - "format": "float", - "title": "Rate", - "description": "Provides the tax rate for the region." - }, - "tax": { - "type": ["null", "number"], - "format": "float", - "title": "Tax", - "description": "The total tax applied for this tax type." - } - } - } - } - } - }, - "used_tax_service": { - "type": ["null", "boolean"] - }, - "vat_number": { - "type": ["null", "string"], - "title": "VAT number", - "description": "VAT registration number for the customer on this invoice. This will come from the VAT Number field in the Billing Info or the Account Info depending on your tax settings and the invoice collection method.", - "maxLength": 20 - }, - "vat_reverse_charge_notes": { - "type": ["null", "string"], - "title": "VAT reverse charge notes", - "description": "VAT Reverse Charge Notes only appear if you have EU VAT enabled or are using your own Avalara AvaTax account and the customer is in the EU, has a VAT number, and is in a different country than your own. This will default to the VAT Reverse Charge Notes text specified on the Tax Settings page in your Recurly admin, unless custom notes were created with the original subscription.", - "maxLength": 1024 - }, - "terms_and_conditions": { - "type": ["null", "string"], - "title": "Terms and conditions", - "description": "This will default to the Terms and Conditions text specified on the Invoice Settings page in your Recurly admin. Specify custom notes to add or override Terms and Conditions.", - "maxLength": 16384 - }, - "customer_notes": { - "type": ["null", "string"], - "title": "Customer notes", - "description": "This will default to the Customer Notes text specified on the Invoice Settings. Specify custom notes to add or override Customer Notes.", - "maxLength": 2048 - }, - "line_items": { - "type": ["null", "array"], - "title": "Line Items", - "items": { - "$ref": "line_items.json" - } - }, - "has_more_line_items": { - "type": ["null", "boolean"] - }, - "transactions": { - "type": ["null", "array"], - "title": "Transactions", - "items": { - "type": ["null", "object"], - "properties": { - "id": { - "type": "string", - "title": "Transaction ID", - "maxLength": 13 - }, - "uuid": { - "type": "string", - "title": "Recurly UUID", - "description": "The UUID is useful for matching data with the CSV exports and building URLs into Recurly's UI.", - "maxLength": 32 - } - } - } - }, - "credit_payments": { - "type": ["null", "array"], - "title": "Credit payments", - "items": { - "type": ["null", "object"], - "properties": { - "id": { - "type": "string", - "title": "Credit Payment ID", - "maxLength": 13 - }, - "uuid": { - "type": "string", - "title": "Recurly UUID", - "description": "The UUID is useful for matching data with the CSV exports and building URLs into Recurly's UI.", - "maxLength": 32 - } - } - } - }, - "created_at": { - "type": ["null", "string"], - "format": "date-time", - "title": "Created at", - "readOnly": true - }, - "updated_at": { - "type": ["null", "string"], - "format": "date-time", - "title": "Last updated at", - "readOnly": true - }, - "due_at": { - "type": ["null", "string"], - "format": "date-time", - "title": "Due at", - "description": "Date invoice is due. This is the date the net terms are reached." - }, - "closed_at": { - "type": ["null", "string"], - "format": "date-time", - "title": "Closed at", - "description": "Date invoice was marked paid or failed." - }, - "dunning_campaign_id": { - "type": ["null", "string"], - "title": "Dunning Campaign ID", - "description": "Unique ID to identify the dunning campaign used when dunning the invoice. Available when the Dunning Campaigns feature is enabled. For sites without multiple dunning campaigns enabled, this will always be the default dunning campaign.", - "maxLength": 256 - }, - "dunning_events_sent": { - "type": ["null", "integer"] - }, - "final_dunning_event": { - "type": ["null", "boolean"] - }, - "business_entity_id": { - "type": ["null", "string"] - } - } -} diff --git a/airbyte-integrations/connectors/source-recurly/source_recurly/schemas/line_items.json b/airbyte-integrations/connectors/source-recurly/source_recurly/schemas/line_items.json deleted file mode 100644 index 85370cc75850..000000000000 --- a/airbyte-integrations/connectors/source-recurly/source_recurly/schemas/line_items.json +++ /dev/null @@ -1,3 +0,0 @@ -{ - "$ref": "line_items.json" -} diff --git a/airbyte-integrations/connectors/source-recurly/source_recurly/schemas/measured_units.json b/airbyte-integrations/connectors/source-recurly/source_recurly/schemas/measured_units.json deleted file mode 100644 index 7865d44d3079..000000000000 --- a/airbyte-integrations/connectors/source-recurly/source_recurly/schemas/measured_units.json +++ /dev/null @@ -1,41 +0,0 @@ -{ - "$schema": "http://json-schema.org/draft-07/schema#", - "type": "object", - "properties": { - "id": { - "type": ["null", "string"], - "maxLength": 13 - }, - "object": { - "type": ["null", "string"] - }, - "name": { - "type": ["null", "string"], - "maxLength": 256 - }, - "display_name": { - "type": ["null", "string"], - "maxLength": 255 - }, - "state": { - "type": ["null", "string"], - "maxLength": 255 - }, - "description": { - "type": ["null", "string"], - "maxLength": 1024 - }, - "created_at": { - "type": ["null", "string"], - "format": "date-time" - }, - "updated_at": { - "type": ["null", "string"], - "format": "date-time" - }, - "deleted_at": { - "type": ["null", "string"], - "format": "date-time" - } - } -} diff --git a/airbyte-integrations/connectors/source-recurly/source_recurly/schemas/plans.json b/airbyte-integrations/connectors/source-recurly/source_recurly/schemas/plans.json deleted file mode 100644 index aabac321be53..000000000000 --- a/airbyte-integrations/connectors/source-recurly/source_recurly/schemas/plans.json +++ /dev/null @@ -1,191 +0,0 @@ -{ - "$schema": "http://json-schema.org/draft-07/schema#", - "type": "object", - "properties": { - "id": { - "type": ["null", "string"], - "maxLength": 13 - }, - "object": { - "type": ["null", "string"] - }, - "code": { - "type": ["null", "string"], - "maxLength": 256 - }, - "state": { - "type": ["null", "string"], - "maxLength": 256 - }, - "name": { - "type": ["null", "string"], - "maxLength": 256 - }, - "description": { - "type": ["null", "string"], - "maxLength": 1024 - }, - "interval_unit": { - "type": ["null", "string"], - "maxLength": 256 - }, - "interval_length": { - "type": ["null", "number"] - }, - "trial_unit": { - "type": ["null", "string"], - "maxLength": 256 - }, - "trial_length": { - "type": ["null", "number"] - }, - "trial_requires_billing_info": { - "type": ["null", "boolean"] - }, - "total_billing_cycles": { - "type": ["null", "number"] - }, - "auto_renew": { - "type": ["null", "boolean"] - }, - "pricing_model": { - "type": ["null", "string"] - }, - "ramp_intervals": { - "type": ["null", "array"], - "items": { - "type": ["null", "object"], - "properties": { - "starting_billing_cycle": { - "type": ["null", "integer"] - }, - "currencies": { - "type": ["null", "array"], - "items": { - "type": ["null", "object"], - "properties": { - "currency": { - "type": ["null", "string"] - }, - "unit_amount": { - "type": ["null", "number"] - } - } - } - } - } - } - }, - "custom_fields": { - "type": ["null", "array"], - "items": { - "type": ["null", "object"], - "properties": { - "name": { - "type": ["null", "string"] - }, - "value": { - "type": ["null", "string"] - } - } - } - }, - "accounting_code": { - "type": ["null", "string"], - "maxLength": 256 - }, - "revenue_schedule_type": { - "type": ["null", "string"], - "maxLength": 256 - }, - "setup_fee_revenue_schedule_type": { - "type": ["null", "string"], - "maxLength": 256 - }, - "setup_fee_accounting_code": { - "type": ["null", "string"], - "maxLength": 256 - }, - "avalara_transaction_type": { - "type": ["null", "number"] - }, - "avalara_service_type": { - "type": ["null", "number"] - }, - "tax_code": { - "type": ["null", "string"], - "maxLength": 256 - }, - "tax_exempt": { - "type": ["null", "boolean"] - }, - "currencies": { - "type": "array", - "title": "Pricing", - "items": { - "type": "object", - "properties": { - "currency": { - "type": "string", - "title": "Currency", - "description": "3-letter ISO 4217 currency code.", - "maxLength": 3 - }, - "setup_fee": { - "type": "number", - "format": "float", - "title": "Setup fee", - "description": "Amount of one-time setup fee automatically charged at the beginning of a subscription billing cycle. For subscription plans with a trial, the setup fee will be charged at the time of signup. Setup fees do not increase with the quantity of a subscription plan.", - "minimum": 0, - "maximum": 1000000 - }, - "unit_amount": { - "type": "number", - "format": "float", - "title": "Unit price", - "minimum": 0, - "maximum": 1000000 - } - } - } - }, - "hosted_pages": { - "type": "object", - "properties": { - "success_url": { - "type": ["null", "string"], - "maxLength": 2048 - }, - "cancel_url": { - "type": ["null", "string"], - "maxLength": 2048 - }, - "bypass_confirmation": { - "type": ["null", "boolean"] - }, - "display_quantity": { - "type": ["null", "boolean"] - } - } - }, - "allow_any_item_on_subscriptions": { - "type": ["null", "boolean"] - }, - "dunning_campaign_id": { - "type": ["null", "string"], - "maxLength": 256 - }, - "created_at": { - "type": ["null", "string"], - "format": "date-time" - }, - "updated_at": { - "type": ["null", "string"], - "format": "date-time" - }, - "deleted_at": { - "type": ["null", "string"], - "format": "date-time" - } - } -} diff --git a/airbyte-integrations/connectors/source-recurly/source_recurly/schemas/shared/account_details.json b/airbyte-integrations/connectors/source-recurly/source_recurly/schemas/shared/account_details.json deleted file mode 100644 index 9d3dc5d71945..000000000000 --- a/airbyte-integrations/connectors/source-recurly/source_recurly/schemas/shared/account_details.json +++ /dev/null @@ -1,35 +0,0 @@ -{ - "type": ["null", "object"], - "properties": { - "id": { - "type": "string" - }, - "object": { - "type": ["null", "string"] - }, - "code": { - "type": ["null", "string"] - }, - "email": { - "type": ["null", "string"] - }, - "first_name": { - "type": ["null", "string"] - }, - "last_name": { - "type": ["null", "string"] - }, - "company": { - "type": ["null", "string"] - }, - "parent_account_id": { - "type": ["null", "string"] - }, - "bill_to": { - "type": ["null", "string"] - }, - "dunning_campaign_id": { - "type": ["null", "string"] - } - } -} diff --git a/airbyte-integrations/connectors/source-recurly/source_recurly/schemas/shared/billing_infos.json b/airbyte-integrations/connectors/source-recurly/source_recurly/schemas/shared/billing_infos.json deleted file mode 100644 index dbf207f589b4..000000000000 --- a/airbyte-integrations/connectors/source-recurly/source_recurly/schemas/shared/billing_infos.json +++ /dev/null @@ -1,213 +0,0 @@ -{ - "$schema": "http://json-schema.org/draft-07/schema#", - "type": "object", - "properties": { - "id": { - "type": "string", - "maxLength": 13, - "readOnly": true - }, - "object": { - "type": ["null", "string"] - }, - "account_id": { - "type": "string", - "maxLength": 13, - "readOnly": true - }, - "first_name": { - "type": ["null", "string"], - "maxLength": 50 - }, - "last_name": { - "type": ["null", "string"], - "maxLength": 50 - }, - "company": { - "type": ["null", "string"], - "maxLength": 100 - }, - "address": { - "type": "object", - "properties": { - "phone": { - "type": ["null", "string"], - "title": "Phone number", - "maxLength": 256 - }, - "street1": { - "type": ["null", "string"], - "title": "Street 1", - "maxLength": 256 - }, - "street2": { - "type": ["null", "string"], - "title": "Street 2", - "maxLength": 256 - }, - "city": { - "type": ["null", "string"], - "title": "City", - "maxLength": 256 - }, - "region": { - "type": ["null", "string"], - "title": "State/Province", - "description": "State or province.", - "maxLength": 256 - }, - "postal_code": { - "type": ["null", "string"], - "title": "Zip/Postal code", - "description": "Zip or postal code.", - "maxLength": 256 - }, - "country": { - "type": ["null", "string"], - "title": "Country", - "description": "Country, 2-letter ISO 3166-1 alpha-2 code.", - "maxLength": 2 - } - } - }, - "vat_number": { - "type": ["null", "string"], - "description": "Customer's VAT number (to avoid having the VAT applied). This is only used for automatically collected invoices.", - "maxLength": 20 - }, - "valid": { - "type": "boolean", - "readOnly": true - }, - "payment_method": { - "type": "object", - "properties": { - "card_type": { - "description": "Visa, MasterCard, American Express, Discover, JCB, etc.", - "type": ["null", "string"], - "maxLength": 256 - }, - "object": { - "type": ["null", "string"] - }, - "first_six": { - "type": ["null", "string"], - "description": "Credit card number's first six digits.", - "maxLength": 6 - }, - "last_four": { - "type": ["null", "string"], - "description": "Credit card number's last four digits. Will refer to bank account if payment method is ACH.", - "maxLength": 4 - }, - "last_two": { - "type": ["null", "string"], - "description": "The IBAN bank account's last two digits.", - "maxLength": 2 - }, - "exp_month": { - "type": ["null", "integer"], - "description": "Expiration month.", - "maxLength": 2 - }, - "exp_year": { - "type": ["null", "integer"], - "description": "Expiration year.", - "maxLength": 4 - }, - "gateway_token": { - "type": ["null", "string"], - "description": "A token used in place of a credit card in order to perform transactions.", - "maxLength": 50 - }, - "cc_bin_country": { - "type": ["null", "string"], - "description": "The 2-letter ISO 3166-1 alpha-2 country code associated with the credit card BIN, if known by Recurly. Available on the BillingInfo object only. Available when the BIN country lookup feature is enabled.", - "maxLength": 256 - }, - "gateway_code": { - "type": ["null", "string"], - "description": "An identifier for a specific payment gateway.", - "maxLength": 13 - }, - "billing_agreement_id": { - "type": ["null", "string"], - "description": "Billing Agreement identifier. Only present for Amazon or Paypal payment methods.", - "maxLength": 256 - }, - "name_on_account": { - "type": ["null", "string"], - "description": "The name associated with the bank account.", - "maxLength": 256 - }, - "account_type": { - "description": "The bank account type. Only present for ACH payment methods.", - "type": ["null", "string"], - "maxLength": 256 - }, - "routing_number": { - "type": ["null", "string"], - "description": "The bank account's routing number. Only present for ACH payment methods.", - "maxLength": 256 - }, - "routing_number_bank": { - "type": ["null", "string"], - "description": "The bank name of this routing number.", - "maxLength": 256 - } - } - }, - "fraud": { - "type": ["null", "object"], - "title": "Fraud information", - "description": "Most recent fraud result.", - "readOnly": true, - "properties": { - "score": { - "type": ["null", "integer"], - "title": "Kount score" - }, - "decision": { - "title": "Kount decision", - "maxLength": 10, - "type": ["null", "string"] - }, - "risk_rules_triggered": { - "type": "object", - "title": "Kount rules" - } - } - }, - "primary_payment_method": { - "type": "boolean", - "description": "The `primary_payment_method` field is used to indicate the primary billing info on the account. The first billing info created on an account will always become primary. This payment method will be used" - }, - "backup_payment_method": { - "type": "boolean", - "description": "The `backup_payment_method` field is used to indicate a billing info as a backup on the account that will be tried if the initial billing info used for an invoice is declined." - }, - "created_at": { - "type": "string", - "format": "date-time", - "description": "When the billing information was created.", - "readOnly": true - }, - "updated_at": { - "type": "string", - "format": "date-time", - "description": "When the billing information was last changed.", - "readOnly": true - }, - "updated_by": { - "type": ["null", "object"], - "properties": { - "ip": { - "type": ["null", "string"] - }, - "country": { - "type": ["null", "string"] - } - } - } - } -} diff --git a/airbyte-integrations/connectors/source-recurly/source_recurly/schemas/shared/coupon_redemptions.json b/airbyte-integrations/connectors/source-recurly/source_recurly/schemas/shared/coupon_redemptions.json deleted file mode 100644 index e74f00e93f57..000000000000 --- a/airbyte-integrations/connectors/source-recurly/source_recurly/schemas/shared/coupon_redemptions.json +++ /dev/null @@ -1,85 +0,0 @@ -{ - "$schema": "http://json-schema.org/draft-07/schema#", - "type": ["null", "object"], - "properties": { - "id": { - "type": ["null", "string"] - }, - "object": { - "type": ["null", "string"] - }, - "coupon": { - "type": ["null", "object"], - "properties": { - "id": { - "type": ["null", "string"] - }, - "object": { - "type": ["null", "string"] - }, - "code": { - "type": ["null", "string"] - }, - "name": { - "type": ["null", "string"] - }, - "state": { - "type": ["null", "string"] - }, - "discount": { - "type": ["null", "object"], - "properties": { - "type": { - "type": ["null", "string"] - }, - "percent": { - "type": ["null", "integer"] - }, - "currencies": { - "type": ["null", "array"], - "items": { - "type": ["null", "object"], - "properties": { - "currency": { - "type": ["null", "string"] - }, - "amount": { - "type": ["null", "number"] - } - } - } - }, - "trial": { - "type": ["null", "object"], - "properties": { - "unit": { - "type": ["null", "string"] - }, - "length": { - "type": ["null", "integer"] - } - } - } - } - }, - "coupon_type": { - "type": ["null", "string"] - }, - "expired_at": { - "type": ["null", "string"], - "format": "date-time" - } - } - }, - "state": { - "type": ["null", "string"] - }, - "discounted": { - "type": ["null", "number"] - }, - "created_at": { - "type": ["null", "string"], - "format": "date-time" - } - } -} diff --git a/airbyte-integrations/connectors/source-recurly/source_recurly/schemas/shared/coupons.json b/airbyte-integrations/connectors/source-recurly/source_recurly/schemas/shared/coupons.json deleted file mode 100644 index 50b91db59b4f..000000000000 --- a/airbyte-integrations/connectors/source-recurly/source_recurly/schemas/shared/coupons.json +++ /dev/null @@ -1,194 +0,0 @@ -{ - "$schema": "http://json-schema.org/draft-07/schema#", - "type": "object", - "properties": { - "id": { - "type": ["null", "string"], - "maxLength": 13 - }, - "object": { - "type": ["null", "string"] - }, - "code": { - "type": ["null", "string"], - "maxLength": 256 - }, - "name": { - "type": ["null", "string"], - "maxLength": 256 - }, - "state": { - "type": ["null", "string"], - "maxLength": 256 - }, - "max_redemptions": { - "type": ["null", "number"] - }, - "max_redemptions_per_account": { - "type": ["null", "number"] - }, - "unique_coupon_codes_count": { - "type": ["null", "number"] - }, - "unique_code_template": { - "type": ["null", "string"], - "maxLength": 256 - }, - "unique_coupon_code": { - "$ref": "unique_coupons.json" - }, - "duration": { - "type": ["null", "string"], - "maxLength": 256 - }, - "temporal_amount": { - "type": ["null", "number"] - }, - "temporal_unit": { - "type": ["null", "string"], - "maxLength": 256 - }, - "free_trial_unit": { - "type": ["null", "string"], - "maxLength": 256 - }, - "free_trial_amount": { - "type": ["null", "number"] - }, - "applies_to_all_plans": { - "type": ["null", "boolean"] - }, - "applies_to_all_items": { - "type": ["null", "boolean"] - }, - "applies_to_non_plan_charges": { - "type": ["null", "boolean"] - }, - "plans": { - "type": ["null", "array"], - "title": "Plans", - "description": "A list of plans for which this coupon applies. This will be `null` if `applies_to_all_plans=true`.", - "items": { - "type": "object", - "title": "Plan mini details", - "description": "Just the important parts.", - "properties": { - "id": { - "type": "string", - "title": "Plan ID", - "maxLength": 13, - "readOnly": true - }, - "code": { - "type": "string", - "title": "Plan code", - "description": "Unique code to identify the plan. This is used in Hosted Payment Page URLs and in the invoice exports.", - "maxLength": 13 - } - } - } - }, - "items": { - "type": ["null", "array"], - "title": "Items", - "description": "A list of items for which this coupon applies. This will be\n`null` if `applies_to_all_items=true`.\n", - "items": { - "type": ["null", "object"], - "title": "Item mini details", - "description": "Just the important parts.", - "properties": { - "id": { - "type": "string", - "title": "Item ID", - "maxLength": 13, - "readOnly": true - } - } - } - }, - "redemption_resource": { - "type": ["null", "string"], - "maxLength": 256 - }, - "discount": { - "type": ["null", "object"], - "description": "Details of the discount a coupon applies. Will contain a `type`\nproperty and one of the following properties: `percent`, `fixed`, `trial`.\n", - "properties": { - "type": { - "type": "string", - "maxLength": 256 - }, - "percent": { - "description": "This is only present when `type=percent`.", - "type": "integer" - }, - "currencies": { - "type": "array", - "description": "This is only present when `type=fixed`.", - "items": { - "type": ["null", "object"], - "properties": { - "currency": { - "type": "string", - "title": "Currency", - "description": "3-letter ISO 4217 currency code.", - "maxLength": 3 - }, - "amount": { - "type": "number", - "format": "float", - "title": "Discount Amount", - "description": "Value of the fixed discount that this coupon applies." - } - } - } - }, - "trial": { - "type": "object", - "description": "This is only present when `type=free_trial`.", - "properties": { - "unit": { - "title": "Trial unit", - "description": "Temporal unit of the free trial", - "type": "string", - "maxLength": 256 - }, - "length": { - "type": "integer", - "title": "Trial length", - "description": "Trial length measured in the units specified by the sibling `unit` property" - } - } - } - } - }, - "coupon_type": { - "type": ["null", "string"], - "maxLength": 256 - }, - "hosted_page_description": { - "type": ["null", "string"], - "maxLength": 1024 - }, - "invoice_description": { - "type": ["null", "string"], - "maxLength": 1024 - }, - "redeem_by": { - "type": ["null", "string"], - "maxLength": 256 - }, - "created_at": { - "type": ["null", "string"], - "format": "date-time" - }, - "updated_at": { - "type": ["null", "string"], - "format": "date-time" - }, - "expired_at": { - "type": ["null", "string"], - "format": "date-time" - } - } -} diff --git a/airbyte-integrations/connectors/source-recurly/source_recurly/schemas/shared/external_accounts.json b/airbyte-integrations/connectors/source-recurly/source_recurly/schemas/shared/external_accounts.json deleted file mode 100644 index 29e6292263b7..000000000000 --- a/airbyte-integrations/connectors/source-recurly/source_recurly/schemas/shared/external_accounts.json +++ /dev/null @@ -1,24 +0,0 @@ -{ - "$schema": "http://json-schema.org/draft-07/schema#", - "type": "object", - "properties": { - "object": { - "type": ["null", "string"] - }, - "id": { - "type": ["null", "string"] - }, - "external_account_code": { - "type": ["null", "string"] - }, - "external_connection_type": { - "type": ["null", "string"] - }, - "created_at": { - "type": ["null", "string"] - }, - "updated_at": { - "type": ["null", "string"] - } - } -} diff --git a/airbyte-integrations/connectors/source-recurly/source_recurly/schemas/shared/line_items.json b/airbyte-integrations/connectors/source-recurly/source_recurly/schemas/shared/line_items.json deleted file mode 100644 index 33e1fb8809d8..000000000000 --- a/airbyte-integrations/connectors/source-recurly/source_recurly/schemas/shared/line_items.json +++ /dev/null @@ -1,293 +0,0 @@ -{ - "$schema": "http://json-schema.org/draft-07/schema#", - "type": ["null", "object"], - "title": "Line item", - "properties": { - "id": { - "type": "string", - "title": "Line item ID", - "maxLength": 13 - }, - "object": { - "type": ["null", "string"] - }, - "uuid": { - "type": "string", - "title": "UUID", - "description": "The UUID is useful for matching data with the CSV exports and building URLs into Recurly's UI.", - "maxLength": 32 - }, - "type": { - "type": "string", - "title": "Line item type", - "description": "Charges are positive line items that debit the account. Credits are negative line items that credit the account.", - "maxLength": 256 - }, - "item_code": { - "type": ["null", "string"], - "title": "Item Code", - "description": "Unique code to identify an item. Available when the Credit Invoices and Subscription Billing Terms features are enabled.", - "maxLength": 50 - }, - "item_id": { - "type": ["null", "string"], - "title": "Item ID", - "description": "System-generated unique identifier for an item. Available when the Credit Invoices and Subscription Billing Terms features are enabled.", - "maxLength": 13 - }, - "external_sku": { - "type": ["null", "string"], - "title": "External SKU", - "description": "Optional Stock Keeping Unit assigned to an item. Available when the Credit Invoices and Subscription Billing Terms features are enabled.", - "maxLength": 50 - }, - "revenue_schedule_type": { - "type": ["null", "string"], - "title": "Revenue schedule type", - "maxLength": 256 - }, - "state": { - "type": "string", - "title": "Current state of the line item", - "description": "Pending line items are charges or credits on an account that have not been applied to an invoice yet. Invoiced line items will always have an `invoice_id` value.", - "maxLength": 256 - }, - "legacy_category": { - "type": ["null", "string"], - "title": "Legacy category", - "description": "Category to describe the role of a line item on a legacy invoice:\n- \"charges\" refers to charges being billed for on this invoice.\n- \"credits\" refers to refund or proration credits. This portion of the invoice can be considered a credit memo.\n- \"applied_credits\" refers to previous credits applied to this invoice. See their original_line_item_id to determine where the credit first originated.\n- \"carryforwards\" can be ignored. They exist to consume any remaining credit balance. A new credit with the same amount will be created and placed back on the account.\n" - }, - "account": { - "$ref": "account_details.json" - }, - "bill_for_account_id": { - "type": "string", - "title": "Bill For Account ID", - "maxLength": 13, - "description": "The UUID of the account responsible for originating the line item." - }, - "subscription_id": { - "type": ["null", "string"], - "title": "Subscription ID", - "description": "If the line item is a charge or credit for a subscription, this is its ID.", - "maxLength": 13 - }, - "plan_id": { - "type": ["null", "string"], - "title": "Plan ID", - "description": "If the line item is a charge or credit for a plan or add-on, this is the plan's ID.", - "maxLength": 13 - }, - "plan_code": { - "type": ["null", "string"], - "title": "Plan code", - "description": "If the line item is a charge or credit for a plan or add-on, this is the plan's code.", - "maxLength": 50 - }, - "add_on_id": { - "type": ["null", "string"], - "title": "Add-on ID", - "description": "If the line item is a charge or credit for an add-on this is its ID.", - "maxLength": 13 - }, - "add_on_code": { - "type": ["null", "string"], - "title": "Add-on code", - "description": "If the line item is a charge or credit for an add-on, this is its code.", - "maxLength": 50 - }, - "invoice_id": { - "type": ["null", "string"], - "title": "Invoice ID", - "description": "Once the line item has been invoiced this will be the invoice's ID.", - "maxLength": 13 - }, - "invoice_number": { - "type": ["null", "string"], - "title": "Invoice number", - "description": "Once the line item has been invoiced this will be the invoice's number. If VAT taxation and the Country Invoice Sequencing feature are enabled, invoices will have country-specific invoice numbers for invoices billed to EU countries (ex: FR1001). Non-EU invoices will continue to use the site-level invoice number sequence.", - "maxLength": 256 - }, - "previous_line_item_id": { - "type": ["null", "string"], - "title": "Previous line item ID", - "description": "Will only have a value if the line item is a credit created from a previous credit, or if the credit was created from a charge refund.", - "maxLength": 13 - }, - "original_line_item_invoice_id": { - "type": ["null", "string"], - "title": "Original line item's invoice ID", - "description": "The invoice where the credit originated. Will only have a value if the line item is a credit created from a previous credit, or if the credit was created from a charge refund.", - "maxLength": 13 - }, - "origin": { - "type": "string", - "title": "Origin of line item", - "description": "A credit created from an original charge will have the value of the charge's origin.", - "maxLength": 256 - }, - "accounting_code": { - "type": "string", - "title": "Accounting code", - "description": "Internal accounting code to help you reconcile your revenue to the correct ledger. Line items created as part of a subscription invoice will use the plan or add-on's accounting code, otherwise the value will only be present if you define an accounting code when creating the line item.", - "maxLength": 20 - }, - "product_code": { - "type": "string", - "title": "Product code", - "description": "For plan-related line items this will be the plan's code, for add-on related line items it will be the add-on's code. For item-related line items it will be the item's `external_sku`.", - "maxLength": 50 - }, - "credit_reason_code": { - "type": ["null", "string"], - "title": "Credit reason code", - "description": "The reason the credit was given when line item is `type=credit`.", - "default": "general", - "maxLength": 256 - }, - "currency": { - "type": "string", - "title": "Currency", - "description": "3-letter ISO 4217 currency code.", - "maxLength": 3 - }, - "amount": { - "type": "number", - "format": "float", - "title": "Total after discounts and taxes", - "description": "`(quantity * unit_amount) - (discount + tax)`" - }, - "description": { - "type": "string", - "title": "Description", - "description": "Description that appears on the invoice. For subscription related items this will be filled in automatically.", - "maxLength": 255 - }, - "quantity": { - "type": "integer", - "title": "Quantity", - "description": "This number will be multiplied by the unit amount to compute the subtotal before any discounts or taxes.", - "default": 1 - }, - "unit_amount": { - "type": "number", - "format": "float", - "title": "Unit amount", - "description": "Positive amount for a charge, negative amount for a credit." - }, - "unit_amount_decimal": { - "type": ["null", "string"], - "title": "Unit amount decimal", - "description": "Positive amount for a charge, negative amount for a credit." - }, - "subtotal": { - "type": "number", - "format": "float", - "title": "Total before discounts and taxes", - "description": "`quantity * unit_amount`" - }, - "discount": { - "type": ["null", "number"], - "format": "float", - "title": "Discount", - "description": "The discount applied to the line item." - }, - "tax": { - "type": ["null", "number"], - "format": "float", - "title": "Tax", - "description": "The tax amount for the line item." - }, - "taxable": { - "type": "boolean", - "title": "Taxable?", - "description": "`true` if the line item is taxable, `false` if it is not." - }, - "tax_exempt": { - "type": "boolean", - "title": "Tax exempt?", - "description": "`true` exempts tax on charges, `false` applies tax on charges. If not defined, then defaults to the Plan and Site settings. This attribute does not work for credits (negative line items). Credits are always applied post-tax. Pre-tax discounts should use the Coupons feature." - }, - "tax_code": { - "type": ["null", "string"], - "title": "Tax code", - "description": "Used by Avalara, Vertex, and Recurly\u2019s EU VAT tax feature. The tax code values are specific to each tax system. If you are using Recurly\u2019s EU VAT feature you can use `unknown`, `physical`, or `digital`.", - "maxLength": 50 - }, - "tax_info": { - "$ref": "tax_info.json" - }, - "proration_rate": { - "type": ["null", "number"], - "format": "float", - "title": "Proration rate", - "description": "When a line item has been prorated, this is the rate of the proration. Proration rates were made available for line items created after March 30, 2017. For line items created prior to that date, the proration rate will be `null`, even if the line item was prorated.", - "minimum": 0, - "maximum": 1 - }, - "refund": { - "type": "boolean", - "title": "Refund?" - }, - "refunded_quantity": { - "type": ["null", "integer"], - "title": "Refunded Quantity", - "description": "For refund charges, the quantity being refunded. For non-refund charges, the total quantity refunded (possibly over multiple refunds)." - }, - "credit_applied": { - "type": ["null", "number"], - "format": "float", - "title": "Credit Applied", - "description": "The amount of credit from this line item that was applied to the invoice." - }, - "shipping_address": { - "type": ["null", "object"], - "properties": { - "id": { - "type": "string", - "title": "Shipping Address ID", - "maxLength": 13, - "readOnly": true - } - } - }, - "start_date": { - "type": ["null", "string"], - "format": "date-time", - "title": "Start date", - "description": "If an end date is present, this is value indicates the beginning of a billing time range. If no end date is present it indicates billing for a specific date." - }, - "end_date": { - "type": ["null", "string"], - "format": "date-time", - "title": "End date", - "description": "If this date is provided, it indicates the end of a time range." - }, - "custom_fields": { - "type": ["null", "array"], - "items": { - "type": ["null", "object"], - "properties": { - "name": { - "type": ["null", "string"] - }, - "value": { - "type": ["null", "string"] - } - } - } - }, - "created_at": { - "type": "string", - "format": "date-time", - "title": "Created at", - "description": "When the line item was created." - }, - "updated_at": { - "type": "string", - "format": "date-time", - "title": "Last updated at", - "description": "When the line item was last changed." - } - } -} diff --git a/airbyte-integrations/connectors/source-recurly/source_recurly/schemas/shared/shipping_addresses.json b/airbyte-integrations/connectors/source-recurly/source_recurly/schemas/shared/shipping_addresses.json deleted file mode 100644 index e0f8091cdbe6..000000000000 --- a/airbyte-integrations/connectors/source-recurly/source_recurly/schemas/shared/shipping_addresses.json +++ /dev/null @@ -1,91 +0,0 @@ -{ - "$schema": "http://json-schema.org/draft-07/schema#", - "type": ["null", "object"], - "properties": { - "id": { - "type": "string", - "title": "Shipping Address ID", - "maxLength": 13, - "readOnly": true - }, - "object": { - "type": ["null", "string"] - }, - "account_id": { - "type": "string", - "title": "Account ID", - "maxLength": 13, - "readOnly": true - }, - "nickname": { - "type": "string", - "maxLength": 255 - }, - "first_name": { - "type": "string", - "maxLength": 255 - }, - "last_name": { - "type": "string", - "maxLength": 255 - }, - "company": { - "type": "string", - "maxLength": 255 - }, - "email": { - "type": "string", - "maxLength": 255 - }, - "vat_number": { - "type": "string", - "maxLength": 20 - }, - "phone": { - "type": "string", - "maxLength": 30 - }, - "street1": { - "type": "string", - "maxLength": 255 - }, - "street2": { - "type": "string", - "maxLength": 255 - }, - "city": { - "type": "string", - "maxLength": 255 - }, - "region": { - "type": "string", - "maxLength": 255, - "description": "State or province." - }, - "postal_code": { - "type": "string", - "maxLength": 20, - "description": "Zip or postal code." - }, - "country": { - "type": "string", - "maxLength": 50, - "description": "Country, 2-letter ISO 3166-1 alpha-2 code." - }, - "geo_code": { - "type": ["null", "string"] - }, - "created_at": { - "type": "string", - "title": "Created at", - "format": "date-time", - "readOnly": true - }, - "updated_at": { - "type": "string", - "title": "Updated at", - "format": "date-time", - "readOnly": true - } - } -} diff --git a/airbyte-integrations/connectors/source-recurly/source_recurly/schemas/shared/tax_info.json b/airbyte-integrations/connectors/source-recurly/source_recurly/schemas/shared/tax_info.json deleted file mode 100644 index 13502eb46241..000000000000 --- a/airbyte-integrations/connectors/source-recurly/source_recurly/schemas/shared/tax_info.json +++ /dev/null @@ -1,44 +0,0 @@ -{ - "$schema": "http://json-schema.org/draft-07/schema#", - "type": ["null", "object"], - "properties": { - "type": { - "type": ["null", "string"] - }, - "region": { - "type": ["null", "string"] - }, - "rate": { - "type": ["null", "number"] - }, - "tax_details": { - "type": ["null", "array"], - "items": { - "type": ["null", "object"], - "properties": { - "type": { - "type": ["null", "string"] - }, - "region": { - "type": ["null", "string"] - }, - "rate": { - "type": ["null", "number"] - }, - "tax": { - "type": ["null", "number"] - }, - "name": { - "type": ["null", "string"] - }, - "level": { - "type": ["null", "string"] - }, - "billable": { - "type": ["null", "boolean"] - } - } - } - } - } -} diff --git a/airbyte-integrations/connectors/source-recurly/source_recurly/schemas/shared/unique_coupons.json b/airbyte-integrations/connectors/source-recurly/source_recurly/schemas/shared/unique_coupons.json deleted file mode 100644 index 1d2a0a3a117c..000000000000 --- a/airbyte-integrations/connectors/source-recurly/source_recurly/schemas/shared/unique_coupons.json +++ /dev/null @@ -1,66 +0,0 @@ -{ - "$schema": "http://json-schema.org/draft-07/schema#", - "type": ["null", "object"], - "description": "A unique coupon code for a bulk coupon.", - "properties": { - "id": { - "type": "string", - "title": "Unique Coupon Code ID", - "readOnly": true, - "maxLength": 13 - }, - "object": { - "type": "string" - }, - "code": { - "type": "string", - "title": "Coupon code", - "description": "The code the customer enters to redeem the coupon.", - "maxLength": 256 - }, - "state": { - "type": ["null", "string"], - "title": "State", - "description": "Indicates if the unique coupon code is redeemable or why not.", - "maxLength": 256 - }, - "bulk_coupon_id": { - "type": ["null", "string"], - "title": "Bulk Coupon ID", - "description": "The Coupon ID of the parent Bulk Coupon", - "readOnly": true, - "maxLength": 13 - }, - "bulk_coupon_code": { - "type": ["null", "string"], - "title": "Bulk Coupon code", - "description": "The Coupon code of the parent Bulk Coupon", - "maxLength": 256 - }, - "created_at": { - "type": "string", - "title": "Created at", - "format": "date-time", - "readOnly": true - }, - "updated_at": { - "type": "string", - "title": "Updated at", - "format": "date-time", - "readOnly": true - }, - "redeemed_at": { - "type": ["null", "string"], - "title": "Redeemed at", - "description": "The date and time the unique coupon code was redeemed.", - "format": "date-time", - "readOnly": true - }, - "expired_at": { - "type": ["null", "string"], - "title": "Expired at", - "description": "The date and time the coupon was expired early or reached its `max_redemptions`.", - "format": "date-time" - } - } -} diff --git a/airbyte-integrations/connectors/source-recurly/source_recurly/schemas/shared/users.json b/airbyte-integrations/connectors/source-recurly/source_recurly/schemas/shared/users.json deleted file mode 100644 index 70b7c49ac122..000000000000 --- a/airbyte-integrations/connectors/source-recurly/source_recurly/schemas/shared/users.json +++ /dev/null @@ -1,33 +0,0 @@ -{ - "type": ["null", "object"], - "properties": { - "id": { - "type": "string" - }, - "object": { - "type": ["null", "string"] - }, - "email": { - "type": ["null", "string"] - }, - "first_name": { - "type": ["null", "string"] - }, - "last_name": { - "type": ["null", "string"] - }, - "time_zone": { - "type": ["null", "string"] - }, - "created_at": { - "type": ["null", "string"], - "format": "date-time", - "airbyte_type": "timestamp_without_timezone" - }, - "deleted_at": { - "type": ["null", "string"], - "format": "date-time", - "airbyte_type": "timestamp_without_timezone" - } - } -} diff --git a/airbyte-integrations/connectors/source-recurly/source_recurly/schemas/shipping_addresses.json b/airbyte-integrations/connectors/source-recurly/source_recurly/schemas/shipping_addresses.json deleted file mode 100644 index bcbb555d47a4..000000000000 --- a/airbyte-integrations/connectors/source-recurly/source_recurly/schemas/shipping_addresses.json +++ /dev/null @@ -1,3 +0,0 @@ -{ - "$ref": "shipping_addresses.json" -} diff --git a/airbyte-integrations/connectors/source-recurly/source_recurly/schemas/shipping_methods.json b/airbyte-integrations/connectors/source-recurly/source_recurly/schemas/shipping_methods.json deleted file mode 100644 index 23ba22deb2a2..000000000000 --- a/airbyte-integrations/connectors/source-recurly/source_recurly/schemas/shipping_methods.json +++ /dev/null @@ -1,54 +0,0 @@ -{ - "$schema": "http://json-schema.org/schema#", - "type": "object", - "properties": { - "id": { - "type": "string", - "title": "Shipping Method ID", - "readOnly": true, - "maxLength": 13 - }, - "code": { - "type": "string", - "title": "Code", - "description": "The internal name used identify the shipping method.", - "maxLength": 50 - }, - "name": { - "type": "string", - "title": "Name", - "description": "The name of the shipping method displayed to customers.", - "maxLength": 100 - }, - "accounting_code": { - "type": "string", - "title": "Accounting Code", - "description": "Accounting code for shipping method.", - "maxLength": 20 - }, - "tax_code": { - "type": "string", - "title": "Tax code", - "description": "Used by Avalara, Vertex, and Recurly\u2019s built-in tax feature. The tax\ncode values are specific to each tax system. If you are using Recurly\u2019s\nbuilt-in taxes the values are:\n\n- `FR` \u2013 Common Carrier FOB Destination\n- `FR022000` \u2013 Common Carrier FOB Origin\n- `FR020400` \u2013 Non Common Carrier FOB Destination\n- `FR020500` \u2013 Non Common Carrier FOB Origin\n- `FR010100` \u2013 Delivery by Company Vehicle Before Passage of Title\n- `FR010200` \u2013 Delivery by Company Vehicle After Passage of Title\n- `NT` \u2013 Non-Taxable\n", - "maxLength": 50 - }, - "created_at": { - "type": "string", - "format": "date-time", - "title": "Created at", - "readOnly": true - }, - "updated_at": { - "type": "string", - "format": "date-time", - "title": "Last updated at", - "readOnly": true - }, - "deleted_at": { - "type": "string", - "format": "date-time", - "title": "Deleted at", - "readOnly": true - } - } -} diff --git a/airbyte-integrations/connectors/source-recurly/source_recurly/schemas/subscriptions.json b/airbyte-integrations/connectors/source-recurly/source_recurly/schemas/subscriptions.json deleted file mode 100644 index 27c3b0ad4ea1..000000000000 --- a/airbyte-integrations/connectors/source-recurly/source_recurly/schemas/subscriptions.json +++ /dev/null @@ -1,368 +0,0 @@ -{ - "$schema": "http://json-schema.org/draft-07/schema#", - "type": "object", - "properties": { - "id": { - "type": ["null", "string"], - "maxLength": 13 - }, - "object": { - "type": ["null", "string"] - }, - "uuid": { - "type": ["null", "string"], - "maxLength": 32 - }, - "account": { - "type": ["null", "object"], - "properties": { - "id": { - "type": ["null", "string"], - "maxLength": 13 - }, - "object": { - "type": ["null", "string"] - }, - "code": { - "type": ["null", "string"], - "maxLength": 256 - }, - "email": { - "type": ["null", "string"], - "maxLength": 256 - }, - "first_name": { - "type": ["null", "string"] - }, - "last_name": { - "type": ["null", "string"] - }, - "company": { - "type": ["null", "string"] - }, - "parent_account_id": { - "type": ["null", "string"] - }, - "bill_to": { - "type": ["null", "string"] - }, - "dunning_campaign_id": { - "type": ["null", "string"] - } - } - }, - "plan": { - "type": "object", - "properties": { - "id": { - "type": ["null", "string"], - "maxLength": 13 - }, - "object": { - "type": ["null", "string"] - }, - "code": { - "type": ["null", "string"], - "maxLength": 256 - }, - "name": { - "type": ["null", "string"] - } - } - }, - "state": { - "type": ["null", "string"], - "maxLength": 256 - }, - "shipping": { - "type": ["null", "object"], - "properties": { - "object": { - "type": ["null", "string"] - }, - "address": { - "$ref": "shipping_addresses.json" - }, - "method": { - "type": ["null", "object"], - "properties": { - "id": { - "type": "string", - "title": "Shipping Method ID", - "readOnly": true, - "maxLength": 13 - }, - "object": { - "type": ["null", "string"] - }, - "code": { - "type": ["null", "string"] - }, - "name": { - "type": ["null", "string"] - } - } - }, - "amount": { - "type": ["null", "number"] - } - } - }, - "coupon_redemptions": { "$ref": "coupon_redemptions.json" }, - "pending_change": { - "type": ["null", "object"], - "title": "Subscription Change", - "properties": { - "id": { - "type": "string", - "title": "Subscription Change ID", - "description": "The ID of the Subscription Change.", - "maxLength": 13 - }, - "subscription_id": { - "type": "string", - "title": "Subscription ID", - "description": "The ID of the subscription that is going to be changed.", - "maxLength": 13 - }, - "activate_at": { - "type": "string", - "format": "date-time", - "title": "Activated at", - "readOnly": true - }, - "activated": { - "type": "boolean", - "title": "Activated?", - "description": "Returns `true` if the subscription change is activated." - }, - "created_at": { - "type": "string", - "format": "date-time", - "title": "Created at", - "readOnly": true - }, - "updated_at": { - "type": "string", - "format": "date-time", - "title": "Updated at", - "readOnly": true - }, - "deleted_at": { - "type": "string", - "format": "date-time", - "title": "Deleted at", - "readOnly": true - } - } - }, - "current_period_started_at": { - "type": ["null", "string"], - "format": "date-time" - }, - "current_period_ends_at": { - "type": ["null", "string"], - "format": "date-time" - }, - "current_term_started_at": { - "type": ["null", "string"], - "format": "date-time" - }, - "current_term_ends_at": { - "type": ["null", "string"], - "format": "date-time" - }, - "trial_started_at": { - "type": ["null", "string"], - "format": "date-time" - }, - "trial_ends_at": { - "type": ["null", "string"], - "format": "date-time" - }, - "remaining_billing_cycles": { - "type": ["null", "number"] - }, - "total_billing_cycles": { - "type": ["null", "number"] - }, - "renewal_billing_cycles": { - "type": ["null", "number"] - }, - "auto_renew": { - "type": ["null", "boolean"] - }, - "ramp_intervals": { - "type": ["null", "array"], - "items": { - "type": ["null", "object"], - "properties": { - "starting_billing_cycle": { - "type": ["null", "integer"] - }, - "remaining_billing_cycles": { - "type": ["null", "integer"] - }, - "starting_on": { - "type": ["null", "string"], - "format": "date-time" - }, - "ending_on": { - "type": ["null", "string"], - "format": "date-time" - }, - "unit_amount": { - "type": ["null", "number"] - } - } - } - }, - "paused_at": { - "type": ["null", "string"], - "format": "date-time" - }, - "remaining_pause_cycles": { - "type": ["null", "number"] - }, - "currency": { - "type": ["null", "string"], - "maxLength": 3 - }, - "revenue_schedule_type": { - "type": ["null", "string"], - "maxLength": 256 - }, - "unit_amount": { - "type": ["null", "number"] - }, - "tax_inclusive": { - "type": ["null", "boolean"] - }, - "quantity": { - "type": ["null", "number"] - }, - "add_ons": { - "type": ["null", "array"], - "title": "Add-ons", - "items": { - "type": ["null", "object"], - "title": "Subscription Add-on", - "description": "This links an Add-on to a specific Subscription.", - "properties": { - "id": { - "type": "string", - "title": "Subscription Add-on ID", - "maxLength": 13 - }, - "code": { - "type": "string", - "title": "Add-on code", - "description": "The unique identifier for the add-on within its plan.", - "maxLength": 50 - } - } - } - }, - "add_ons_total": { - "type": ["null", "number"] - }, - "subtotal": { - "type": ["null", "number"] - }, - "tax": { - "type": ["null", "number"] - }, - "tax_info": { - "$ref": "tax_info.json" - }, - "total": { - "type": ["null", "number"] - }, - "collection_method": { - "type": ["null", "string"], - "maxLength": 256 - }, - "po_number": { - "type": ["null", "string"], - "maxLength": 256 - }, - "net_terms": { - "type": ["null", "number"] - }, - "net_terms_type": { - "type": ["null", "string"] - }, - "terms_and_conditions": { - "type": ["null", "string"], - "maxLength": 16384 - }, - "customer_notes": { - "type": ["null", "string"], - "maxLength": 1024 - }, - "expiration_reason": { - "type": ["null", "string"], - "maxLength": 1024 - }, - "custom_fields": { - "type": ["null", "array"], - "items": { - "type": ["null", "object"], - "properties": { - "name": { - "type": ["null", "string"] - }, - "value": { - "type": ["null", "string"] - } - } - } - }, - "created_at": { - "type": ["null", "string"], - "format": "date-time" - }, - "updated_at": { - "type": ["null", "string"], - "format": "date-time" - }, - "activated_at": { - "type": ["null", "string"], - "format": "date-time" - }, - "canceled_at": { - "type": ["null", "string"], - "format": "date-time" - }, - "expires_at": { - "type": ["null", "string"], - "format": "date-time" - }, - "bank_account_authorized_at": { - "type": ["null", "string"], - "format": "date-time" - }, - "gateway_code": { - "type": ["null", "string"], - "maxLength": 256 - }, - "billing_info_id": { - "type": ["null", "string"], - "maxLength": 13 - }, - "active_invoice_id": { - "type": ["null", "string"] - }, - "started_with_gift": { - "type": ["null", "boolean"] - }, - "converted_at": { - "type": ["null", "string"], - "format": "date-time" - }, - "action_result": { - "type": ["null", "object"], - "additionalProperties": true - } - } -} diff --git a/airbyte-integrations/connectors/source-recurly/source_recurly/schemas/transactions.json b/airbyte-integrations/connectors/source-recurly/source_recurly/schemas/transactions.json deleted file mode 100644 index 7b06e0d76cb5..000000000000 --- a/airbyte-integrations/connectors/source-recurly/source_recurly/schemas/transactions.json +++ /dev/null @@ -1,345 +0,0 @@ -{ - "$schema": "http://json-schema.org/draft-07/schema#", - "type": "object", - "additionalProperties": true, - "properties": { - "id": { - "type": ["null", "string"], - "maxLength": 13 - }, - "object": { - "type": ["null", "string"] - }, - "uuid": { - "type": ["null", "string"], - "maxLength": 32 - }, - "original_transaction_id": { - "type": ["null", "string"], - "maxLength": 13 - }, - "account": { - "$ref": "account_details.json" - }, - "invoice": { - "type": ["null", "object"], - "properties": { - "id": { - "type": ["null", "string"], - "maxLength": 13 - }, - "number": { - "type": ["null", "string"], - "maxLength": 256 - }, - "business_entity_id": { - "type": ["null", "string"] - }, - "type": { - "type": ["null", "string"] - }, - "state": { - "type": ["null", "string"] - } - } - }, - "voided_by_invoice": { - "type": ["null", "object"], - "properties": { - "id": { - "type": ["null", "string"], - "maxLength": 13 - }, - "object": { - "type": ["null", "string"] - }, - "number": { - "type": ["null", "string"], - "maxLength": 256 - }, - "business_entity_id": { - "type": ["null", "string"] - }, - "type": { - "type": ["null", "string"] - }, - "state": { - "type": ["null", "string"] - } - } - }, - "subscription_ids": { - "type": "array", - "items": { - "type": ["null", "string"], - "maxLength": 13 - } - }, - "type": { - "type": ["null", "string"], - "maxLength": 256 - }, - "origin": { - "type": ["null", "string"], - "maxLength": 256 - }, - "currency": { - "type": ["null", "string"], - "maxLength": 3 - }, - "amount": { - "type": ["null", "number"] - }, - "status": { - "type": ["null", "string"], - "maxLength": 256 - }, - "success": { - "type": ["null", "boolean"] - }, - "backup_payment_method_used": { - "type": ["null", "boolean"] - }, - "refunded": { - "type": ["null", "boolean"] - }, - "billing_address": { - "type": "object", - "properties": { - "first_name": { - "type": ["null", "string"], - "maxLength": 256 - }, - "last_name": { - "type": ["null", "string"], - "maxLength": 256 - }, - "phone": { - "type": ["null", "string"], - "maxLength": 256 - }, - "street1": { - "type": ["null", "string"], - "maxLength": 256 - }, - "street2": { - "type": ["null", "string"], - "maxLength": 256 - }, - "city": { - "type": ["null", "string"], - "maxLength": 256 - }, - "region": { - "type": ["null", "string"], - "maxLength": 256 - }, - "postal_code": { - "type": ["null", "string"], - "maxLength": 256 - }, - "country": { - "type": ["null", "string"], - "maxLength": 256 - }, - "geo_code": { - "type": ["null", "string"] - } - } - }, - "collection_method": { - "type": ["null", "string"], - "maxLength": 256 - }, - "payment_method": { - "type": "object", - "properties": { - "object": { - "type": ["null", "string"] - }, - "card_type": { - "type": ["null", "string"], - "maxLength": 256 - }, - "first_six": { - "type": ["null", "string"], - "maxLength": 6 - }, - "last_four": { - "type": ["null", "string"], - "maxLength": 4 - }, - "last_two": { - "type": ["null", "string"], - "maxLength": 2 - }, - "exp_month": { - "type": ["null", "number"] - }, - "exp_year": { - "type": ["null", "number"] - }, - "gateway_token": { - "type": ["null", "string"], - "maxLength": 256 - }, - "cc_bin_country": { - "type": ["null", "string"] - }, - "gateway_code": { - "type": ["null", "string"], - "maxLength": 256 - }, - "billing_agreement_id": { - "type": ["null", "string"], - "maxLength": 256 - }, - "name_on_account": { - "type": ["null", "string"], - "maxLength": 256 - }, - "account_type": { - "type": ["null", "string"], - "maxLength": 256 - }, - "routing_number": { - "type": ["null", "string"], - "maxLength": 256 - }, - "routing_number_bank": { - "type": ["null", "string"], - "maxLength": 256 - }, - "username": { - "type": ["null", "string"] - } - } - }, - "ip_address_v4": { - "type": ["null", "string"], - "maxLength": 256 - }, - "ip_address_country": { - "type": ["null", "string"], - "maxLength": 256 - }, - "status_code": { - "type": ["null", "string"], - "maxLength": 256 - }, - "status_message": { - "type": ["null", "string"], - "maxLength": 1024 - }, - "customer_message": { - "type": ["null", "string"], - "maxLength": 1024 - }, - "customer_message_locale": { - "type": ["null", "string"], - "maxLength": 12 - }, - "payment_gateway": { - "type": "object", - "properties": { - "id": { - "type": ["null", "string"], - "maxLength": 13 - }, - "object": { - "type": ["null", "string"] - }, - "type": { - "type": ["null", "string"] - }, - "name": { - "type": ["null", "string"] - } - } - }, - "gateway_message": { - "type": ["null", "string"], - "maxLength": 256 - }, - "gateway_reference": { - "type": ["null", "string"], - "maxLength": 256 - }, - "gateway_approval_code": { - "type": ["null", "string"], - "maxLength": 256 - }, - "gateway_response_code": { - "type": ["null", "string"], - "maxLength": 256 - }, - "gateway_response_time": { - "type": ["null", "number"] - }, - "gateway_response_values": { - "type": "object" - }, - "cvv_check": { - "type": ["null", "string"], - "maxLength": 256 - }, - "avs_check": { - "type": ["null", "string"], - "maxLength": 256 - }, - "created_at": { - "type": ["null", "string"], - "format": "date-time" - }, - "updated_at": { - "type": ["null", "string"], - "format": "date-time" - }, - "voided_at": { - "type": ["null", "string"], - "format": "date-time" - }, - "collected_at": { - "type": ["null", "string"], - "format": "date-time" - }, - "action_result": { - "type": ["null", "object"], - "additionalProperties": true - }, - "vat_number": { - "type": ["null", "string"] - }, - "fraud_info": { - "type": ["null", "object"], - "properties": { - "object": { - "type": ["null", "string"] - }, - "score": { - "type": ["null", "integer"] - }, - "decision": { - "type": ["null", "string"] - }, - "reference": { - "type": ["null", "string"] - }, - "risk_rules_triggered": { - "type": ["null", "array"], - "items": { - "type": ["null", "object"], - "properties": { - "code": { - "type": ["null", "string"] - }, - "message": { - "type": ["null", "string"] - } - } - } - } - } - } - } -} diff --git a/airbyte-integrations/connectors/source-recurly/source_recurly/schemas/unique_coupons.json b/airbyte-integrations/connectors/source-recurly/source_recurly/schemas/unique_coupons.json deleted file mode 100644 index 0458768570ff..000000000000 --- a/airbyte-integrations/connectors/source-recurly/source_recurly/schemas/unique_coupons.json +++ /dev/null @@ -1,3 +0,0 @@ -{ - "$ref": "unique_coupons.json" -} diff --git a/airbyte-integrations/connectors/source-recurly/source_recurly/source.py b/airbyte-integrations/connectors/source-recurly/source_recurly/source.py deleted file mode 100644 index e7bd2e9a7e17..000000000000 --- a/airbyte-integrations/connectors/source-recurly/source_recurly/source.py +++ /dev/null @@ -1,80 +0,0 @@ -# -# Copyright (c) 2023 Airbyte, Inc., all rights reserved. -# - -from typing import Any, List, Mapping, Optional, Tuple - -from airbyte_cdk.logger import AirbyteLogger -from airbyte_cdk.sources import AbstractSource -from airbyte_cdk.sources.streams import Stream -from recurly import ApiError, Client - -from .streams import ( - AccountCouponRedemptions, - AccountNotes, - Accounts, - AddOns, - BillingInfos, - Coupons, - CreditPayments, - ExportDates, - Invoices, - LineItems, - MeasuredUnits, - Plans, - ShippingAddresses, - ShippingMethods, - Subscriptions, - Transactions, - UniqueCoupons, -) - - -class SourceRecurly(AbstractSource): - """ - Recurly API Reference: https://developers.recurly.com/api/v2021-02-25/ - """ - - def __init__(self): - super(SourceRecurly, self).__init__() - - self.__client = None - - def check_connection(self, logger: AirbyteLogger, config: Mapping[str, Any]) -> Tuple[bool, Optional[Any]]: - try: - # Checking the API key by trying a test API call to get the first account - self._client(config["api_key"]).list_accounts().first() - return True, None - except ApiError as err: - return False, err.args[0] - - def streams(self, config: Mapping[str, Any]) -> List[Stream]: - client = self._client(api_key=config["api_key"]) - - args = {"client": client, "begin_time": config.get("begin_time"), "end_time": config.get("end_time")} - - return [ - Accounts(**args), - AccountCouponRedemptions(**args), - AccountNotes(**args), - AddOns(**args), - BillingInfos(**args), - Coupons(**args), - CreditPayments(**args), - ExportDates(**args), - Invoices(**args), - LineItems(**args), - MeasuredUnits(**args), - Plans(**args), - ShippingAddresses(**args), - ShippingMethods(**args), - Subscriptions(**args), - Transactions(**args), - UniqueCoupons(**args), - ] - - def _client(self, api_key: str) -> Client: - if not self.__client: - self.__client = Client(api_key=api_key) - - return self.__client diff --git a/airbyte-integrations/connectors/source-recurly/source_recurly/spec.json b/airbyte-integrations/connectors/source-recurly/source_recurly/spec.json deleted file mode 100644 index d2135eb06551..000000000000 --- a/airbyte-integrations/connectors/source-recurly/source_recurly/spec.json +++ /dev/null @@ -1,33 +0,0 @@ -{ - "documentationUrl": "https://docs.airbyte.com/integrations/sources/recurly", - "connectionSpecification": { - "$schema": "http://json-schema.org/draft-07/schema#", - "title": "Recurly Source Spec", - "type": "object", - "required": ["api_key"], - "additionalProperties": true, - "properties": { - "api_key": { - "type": "string", - "title": "API Key", - "airbyte_secret": true, - "description": "Recurly API Key. See the docs for more information on how to generate this key.", - "order": 1 - }, - "begin_time": { - "type": "string", - "description": "ISO8601 timestamp from which the replication from Recurly API will start from.", - "examples": ["2021-12-01T00:00:00"], - "pattern": "^$|^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}$", - "order": 2 - }, - "end_time": { - "type": "string", - "description": "ISO8601 timestamp to which the replication from Recurly API will stop. Records after that date won't be imported.", - "examples": ["2021-12-01T00:00:00"], - "pattern": "^$|^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}$", - "order": 3 - } - } - } -} diff --git a/airbyte-integrations/connectors/source-recurly/source_recurly/streams.py b/airbyte-integrations/connectors/source-recurly/source_recurly/streams.py deleted file mode 100644 index f7526476f4ae..000000000000 --- a/airbyte-integrations/connectors/source-recurly/source_recurly/streams.py +++ /dev/null @@ -1,337 +0,0 @@ -# -# Copyright (c) 2023 Airbyte, Inc., all rights reserved. -# - -import re -from typing import Any, Iterable, List, Mapping, MutableMapping, Optional, Union - -from airbyte_cdk.models import SyncMode -from airbyte_cdk.sources.streams import Stream -from recurly import Client -from recurly.errors import MissingFeatureError, NotFoundError, ValidationError - -DEFAULT_PRIMARY_KEY = "id" -DEFAULT_CURSOR = "updated_at" -DEFAULT_SORT_KEY = "updated_at" -DEFAULT_LIMIT = 200 - -BEGIN_TIME_PARAM = "begin_time" -END_TIME_PARAM = "end_time" - -CAMEL_CASE_PATTERN = re.compile(r"(? str: - """ - Returns the Recurly client method to call to retrieve the resource data. - - :return: The Recurly client method to call for the Recurly resource. For example `list_accounts` for the - Recurly `accounts` resource - :rtype: str - """ - return f"list_{self.name}" - - @property - def primary_key(self) -> Optional[Union[str, List[str], List[List[str]]]]: - """ - The Recurly resource primary key. Most of the Recurly resources have `id` as a primary ID. Other Recurly - resources have different primary key or a composite key can override this method. - - :return: The Recurly resource primary key(s) - :rtype: Either `str`, list(str) or list(list(str)) - """ - return DEFAULT_PRIMARY_KEY - - @property - def sort_key(self) -> str: - """ - Sets the sort key when calling the Recurly API. Most of the Recurly API resources accept `params` dictionary - with `sort` key. For more details: - https://developers.recurly.com/api/v2021-02-25/#section/Getting-Started/Pagination#query-parameters - - :return: The Recurly resource sort key - :rtype: `str` - """ - return DEFAULT_SORT_KEY - - @property - def limit(self) -> int: - """ - Returns the number of records limit - """ - return DEFAULT_LIMIT - - @property - def cursor_field(self) -> Union[str, List[str]]: - """ - Returns the cursor field to be used in the `incremental` sync mode. - - By default enable the `incremental` sync mode for all resources using the `begin_time` field. Any other - Recurly resource that either does not support `incremental` sync mode such as the `export_dates` or resources - that use other cursor can override this method, but the `begin_time` is not a field in any of the resouce - it is just a query parameter sent in the API request and it can be considered as an alias to the `updated_at` - field. That's why when calling the Recurly API, the cursor field is renamed to `begin_time` by default in the - :func:`read_records`. For more details: - https://developers.recurly.com/api/v2021-02-25/#section/Getting-Started/Pagination#query-parameters - - :return: The cursor field(s) to be used in the `incremental` sync mode. - :rtype: Union[str, List[str]] - """ - return DEFAULT_CURSOR - - @property - def default_params(self) -> dict: - """ - Returns the parameters to be sent together with the API call to Recurly - """ - return {"order": "asc", "sort": self.sort_key, "limit": self.limit} - - def read_records( - self, - sync_mode: SyncMode, - cursor_field: List[str] = None, - stream_slice: Mapping[str, any] = None, - stream_state: Mapping[str, Any] = None, - ) -> Iterable[Mapping[str, Any]]: - """ - The method to be called to retrieve records from the Recurly API. It uses the Recurly Python client. - Resources having different logic (such as the `export_dates`) can override this method - - :return: Iterable of dictionaries representing the Recurly resource - :rtype: Iterable - """ - params = self.default_params - - self.begin_time = (stream_state and stream_state[self.cursor_field]) or self.begin_time - - if self.begin_time: - params.update({BEGIN_TIME_PARAM: self.begin_time}) - - if self.end_time: - params.update({END_TIME_PARAM: self.end_time}) - - items = getattr(self._client, self.client_method_name)(params=params).items() - - # Call the Recurly client methods - for item in items: - yield self._item_to_dict(item) - - def get_updated_state(self, current_stream_state: MutableMapping[str, Any], latest_record: Mapping[str, Any]): - """ - Compares the current stream state cursor with the latest record cursor value and returns the latest or the most - recent cursor value (either the current cursor value or the latest record cursor value depending which of those - is the maximum). - - :return: The value of the new current value of the cursor - :rtype: dict - """ - current_updated_at = (current_stream_state or {}).get(self.cursor_field, "") - latest_record_updated_at = latest_record[self.cursor_field].isoformat() - - return {self.cursor_field: max(latest_record_updated_at, current_updated_at)} - - def _item_to_dict(self, resource): - """ - Recursively converts the Recurly resource object to `dict` - """ - if isinstance(resource, dict): - return dict((key, self._item_to_dict(value)) for key, value in resource.items()) - elif hasattr(resource, "__iter__") and not isinstance(resource, str): - return [self._item_to_dict(value) for value in resource] - elif hasattr(resource, "__dict__"): - return dict([(key, self._item_to_dict(value)) for key, value in resource.__dict__.items()]) - else: - return resource - - -class BaseAccountResourceStream(BaseStream): - @property - def account_params(self) -> dict: - """ - Returns the account API call params - """ - return self.default_params - - def read_records( - self, - sync_mode: SyncMode, - cursor_field: List[str] = None, - stream_slice: Mapping[str, any] = None, - stream_state: Mapping[str, Any] = None, - ) -> Iterable[Mapping[str, Any]]: - """ - The method to be called to retrieve the accounts sub-resources such as the account coupon redemptions, shipping addresses, ... etc - from Recurly. To retrieve the account's sub-resources, a separate call to list all the accounts sub-resources be made to pass - the `account_id` to the sub-resource API call. - - :return: Iterable of dictionaries representing the Recurly resource - :rtype: Iterable - """ - account_params = self.account_params - params = self.default_params - - self.begin_time = (stream_state and stream_state.get(self.cursor_field)) or self.begin_time - - if self.begin_time: - account_params.update({BEGIN_TIME_PARAM: self.begin_time}) - params.update({BEGIN_TIME_PARAM: self.begin_time}) - - if self.end_time: - account_params.update({END_TIME_PARAM: self.end_time}) - params.update({END_TIME_PARAM: self.end_time}) - - # Call the Recurly client methods - accounts = self._client.list_accounts(params=account_params).items() - - # If the API call throws the Recurly's client `MissingFeatureError` error, then skip loading the resources from Recurly - # and log a warn - try: - for account in accounts: - items = getattr(self._client, self.client_method_name)(params=params, account_id=account.id).items() - for item in items: - yield self._item_to_dict(item) - except MissingFeatureError as error: - super().logger.warning(f"Missing feature error {error}") - - -class Accounts(BaseStream): - pass - - -class AccountCouponRedemptions(BaseAccountResourceStream): - pass - - -class AccountNotes(BaseAccountResourceStream): - @property - def sort_key(self) -> str: - return "created_at" - - @property - def cursor_field(self) -> Union[str, List[str]]: - return "created_at" - - @property - def account_params(self) -> dict: - return {"order": "asc", "sort": DEFAULT_SORT_KEY, "limit": self.limit} - - -class AddOns(BaseStream): - pass - - -class BillingInfos(BaseAccountResourceStream): - pass - - -class Coupons(BaseStream): - pass - - -class CreditPayments(BaseStream): - pass - - -class ExportDates(BaseStream): - cursor_field = [] # Disable `incremental` sync for `export_dates` Recurly API call - primary_key = None # There are no primary keys for automated exports - - def read_records( - self, - sync_mode: SyncMode, - cursor_field: List[str] = None, - stream_slice: Mapping[str, any] = None, - stream_state: Mapping[str, Any] = None, - ) -> Iterable[Mapping[str, Any]]: - """ - Reads the `export_dates` response from Recurly. This is a special API call different from other Recurly - resources and hence treated differently - """ - yield {"dates": self._client.get_export_dates().dates or [""]} - - -class Invoices(BaseStream): - pass - - -class LineItems(BaseStream): - pass - - -class MeasuredUnits(BaseStream): - client_method_name = "list_measured_unit" - - -class Plans(BaseStream): - pass - - -class ShippingAddresses(BaseAccountResourceStream): - pass - - -class ShippingMethods(BaseStream): - pass - - -class Subscriptions(BaseStream): - pass - - -class Transactions(BaseStream): - pass - - -class UniqueCoupons(BaseStream): - def read_records( - self, - sync_mode: SyncMode, - cursor_field: List[str] = None, - stream_slice: Mapping[str, any] = None, - stream_state: Mapping[str, Any] = None, - ) -> Iterable[Mapping[str, Any]]: - """ - The method to be called to retrieve the unique coupons from Recurly. To retrieve the unique coupons, a separate call to - get unique coupons should be made to pass the `coupon_id` to the unique coupons API call. - - :return: Iterable of dictionaries representing the Recurly resource - :rtype: Iterable - """ - params = self.default_params - - self.begin_time = (stream_state and stream_state[self.cursor_field]) or self.begin_time - - if self.begin_time: - params.update({BEGIN_TIME_PARAM: self.begin_time}) - - if self.end_time: - params.update({END_TIME_PARAM: self.end_time}) - - # List all coupons - coupons = self._client.list_coupons(params=params).items() - - for coupon in coupons: - try: - items = self._client.list_unique_coupon_codes(params=params, coupon_id=coupon.id).items() - for item in items: - yield self._item_to_dict(item) - except (NotFoundError, ValidationError): - pass diff --git a/airbyte-integrations/connectors/source-recurly/unit_tests/__init__.py b/airbyte-integrations/connectors/source-recurly/unit_tests/__init__.py deleted file mode 100644 index e69de29bb2d1..000000000000 diff --git a/airbyte-integrations/connectors/source-recurly/unit_tests/test_streams.py b/airbyte-integrations/connectors/source-recurly/unit_tests/test_streams.py deleted file mode 100644 index 739d7eb46b18..000000000000 --- a/airbyte-integrations/connectors/source-recurly/unit_tests/test_streams.py +++ /dev/null @@ -1,203 +0,0 @@ -# -# Copyright (c) 2023 Airbyte, Inc., all rights reserved. -# - -import unittest -from datetime import datetime, timedelta -from unittest.mock import Mock - -from source_recurly.streams import ( - BEGIN_TIME_PARAM, - DEFAULT_CURSOR, - DEFAULT_LIMIT, - END_TIME_PARAM, - AccountCouponRedemptions, - AccountNotes, - Accounts, - AddOns, - BaseStream, - BillingInfos, - Coupons, - CreditPayments, - ExportDates, - Invoices, - LineItems, - MeasuredUnits, - Plans, - ShippingAddresses, - ShippingMethods, - Subscriptions, - Transactions, - UniqueCoupons, -) - -METHOD_NAME = "list_resource" - - -class TestStream(BaseStream): - name = "test" - client_method_name = METHOD_NAME - - -class TestStreams(unittest.TestCase): - def setUp(self) -> None: - self.client_mock = Mock() - getattr(self.client_mock, METHOD_NAME).return_value.items.return_value = iter([None]) - - self.sync_mode_mock = Mock() - - self.params = {"order": "asc", "sort": DEFAULT_CURSOR, "limit": DEFAULT_LIMIT} - - def test_read_records(self): - stream = TestStream(client=self.client_mock) - - next(iter(stream.read_records(self.sync_mode_mock))) - - getattr(self.client_mock, METHOD_NAME).assert_called_once_with(params=self.params) - - getattr(self.client_mock, METHOD_NAME).return_value.items.assert_called_once() - - def test_read_records_with_begin_time(self): - begin_time_mock = Mock() - stream = TestStream(client=self.client_mock, begin_time=begin_time_mock) - - next(iter(stream.read_records(self.sync_mode_mock))) - - params = {**self.params, BEGIN_TIME_PARAM: begin_time_mock} - - getattr(self.client_mock, METHOD_NAME).assert_called_once_with(params=params) - - def test_read_records_with_end_time(self): - end_time_mock = Mock() - stream = TestStream(client=self.client_mock, end_time=end_time_mock) - - next(iter(stream.read_records(self.sync_mode_mock))) - - params = {**self.params, END_TIME_PARAM: end_time_mock} - - getattr(self.client_mock, METHOD_NAME).assert_called_once_with(params=params) - - def test_get_updated_state(self): - stream = TestStream(client=self.client_mock) - - cursor_field = stream.cursor_field - - now = datetime.now() - yesterday = now - timedelta(days=1) - - current_state = {cursor_field: yesterday.isoformat()} - latest_record = {cursor_field: now} - - expected_date = {cursor_field: now.isoformat()} - - assert stream.get_updated_state(current_state, latest_record) == expected_date - - def test_accounts_methods_client_method_name(self): - stream = Accounts(client=self.client_mock) - - assert stream.client_method_name == "list_accounts" - - def test_account_coupon_redemptions_read_records(self): - stream = AccountCouponRedemptions(client=self.client_mock) - account_id_mock = Mock() - account_mock = Mock(id=account_id_mock) - self.client_mock.list_accounts.return_value.items.return_value = iter([account_mock]) - self.client_mock.list_account_coupon_redemptions.return_value.items.return_value = iter([None]) - - next(iter(stream.read_records(self.sync_mode_mock))) - - self.client_mock.list_accounts.assert_called_once() - self.client_mock.list_account_coupon_redemptions.assert_called_once_with(account_id=account_id_mock, params=self.params) - - def test_account_notes_read_records(self): - stream = AccountNotes(client=self.client_mock) - account_id_mock = Mock() - account_mock = Mock(id=account_id_mock) - self.client_mock.list_accounts.return_value.items.return_value = iter([account_mock]) - self.client_mock.list_account_notes.return_value.items.return_value = iter([None]) - - params = {"order": "asc", "sort": "created_at", "limit": DEFAULT_LIMIT} - - next(iter(stream.read_records(self.sync_mode_mock))) - - self.client_mock.list_accounts.assert_called_once() - self.client_mock.list_account_notes.assert_called_once_with(account_id=account_id_mock, params=params) - - def test_add_ons_client_method_name(self): - stream = AddOns(client=self.client_mock) - - assert stream.client_method_name == "list_add_ons" - - def test_billing_infos_client_method_name(self): - stream = BillingInfos(client=self.client_mock) - - assert stream.client_method_name == "list_billing_infos" - - def test_coupons_methods_client_method_name(self): - stream = Coupons(client=self.client_mock) - - assert stream.client_method_name == "list_coupons" - - def test_credit_payments_read_records(self): - stream = CreditPayments(client=self.client_mock) - - assert stream.client_method_name == "list_credit_payments" - - def test_export_dates_read_records(self): - stream = ExportDates(client=self.client_mock) - - next(iter(stream.read_records(self.sync_mode_mock))) - - self.client_mock.get_export_dates.assert_called_once() - - def test_invoices_methods_client_method_name(self): - stream = Invoices(client=self.client_mock) - - assert stream.client_method_name == "list_invoices" - - def test_line_items_methods_client_method_name(self): - stream = LineItems(client=self.client_mock) - - assert stream.client_method_name == "list_line_items" - - def test_measured_unit_client_method_name(self): - stream = MeasuredUnits(client=self.client_mock) - - assert stream.client_method_name == "list_measured_unit" - - def test_plans_client_method_name(self): - stream = Plans(client=self.client_mock) - - assert stream.client_method_name == "list_plans" - - def test_shipping_addresses_client_method_name(self): - stream = ShippingAddresses(client=self.client_mock) - - assert stream.client_method_name == "list_shipping_addresses" - - def test_shipping_methods_client_method_name(self): - stream = ShippingMethods(client=self.client_mock) - - assert stream.client_method_name == "list_shipping_methods" - - def test_subscriptions_client_method_name(self): - stream = Subscriptions(client=self.client_mock) - - assert stream.client_method_name == "list_subscriptions" - - def test_transactions_client_method_name(self): - stream = Transactions(client=self.client_mock) - - assert stream.client_method_name == "list_transactions" - - def test_unique_coupons_read_records(self): - stream = UniqueCoupons(client=self.client_mock) - coupon_id_mock = Mock() - coupon_mock = Mock(id=coupon_id_mock) - self.client_mock.list_coupons.return_value.items.return_value = iter([coupon_mock]) - self.client_mock.list_unique_coupon_codes.return_value.items.return_value = iter([None]) - - next(iter(stream.read_records(self.sync_mode_mock))) - - self.client_mock.list_coupons.assert_called_once() - self.client_mock.list_unique_coupon_codes.assert_called_once_with(coupon_id=coupon_id_mock, params=self.params) diff --git a/airbyte-integrations/connectors/source-search-metrics/.dockerignore b/airbyte-integrations/connectors/source-search-metrics/.dockerignore deleted file mode 100644 index 4b799fef5dae..000000000000 --- a/airbyte-integrations/connectors/source-search-metrics/.dockerignore +++ /dev/null @@ -1,7 +0,0 @@ -* -!Dockerfile -!Dockerfile.test -!main.py -!source_search_metrics -!setup.py -!secrets diff --git a/airbyte-integrations/connectors/source-search-metrics/Dockerfile b/airbyte-integrations/connectors/source-search-metrics/Dockerfile deleted file mode 100644 index f2a89cd5501c..000000000000 --- a/airbyte-integrations/connectors/source-search-metrics/Dockerfile +++ /dev/null @@ -1,38 +0,0 @@ -FROM python:3.9.11-alpine3.15 as base - -# build and load all requirements -FROM base as builder -WORKDIR /airbyte/integration_code - -# upgrade pip to the latest version -RUN apk --no-cache upgrade \ - && pip install --upgrade pip \ - && apk --no-cache add tzdata build-base - - -COPY setup.py ./ -# install necessary packages to a temporary folder -RUN pip install --prefix=/install . - -# build a clean environment -FROM base -WORKDIR /airbyte/integration_code - -# copy all loaded and built libraries to a pure basic image -COPY --from=builder /install /usr/local -# add default timezone settings -COPY --from=builder /usr/share/zoneinfo/Etc/UTC /etc/localtime -RUN echo "Etc/UTC" > /etc/timezone - -# bash is installed for more convenient debugging. -RUN apk --no-cache add bash - -# copy payload code only -COPY main.py ./ -COPY source_search_metrics ./source_search_metrics - -ENV AIRBYTE_ENTRYPOINT "python /airbyte/integration_code/main.py" -ENTRYPOINT ["python", "/airbyte/integration_code/main.py"] - -LABEL io.airbyte.version=0.1.1 -LABEL io.airbyte.name=airbyte/source-search-metrics diff --git a/airbyte-integrations/connectors/source-search-metrics/README.md b/airbyte-integrations/connectors/source-search-metrics/README.md deleted file mode 100644 index e2403678f007..000000000000 --- a/airbyte-integrations/connectors/source-search-metrics/README.md +++ /dev/null @@ -1,100 +0,0 @@ -# Search Metrics Source - -This is the repository for the Search Metrics source connector, written in Python. -For information about how to use this connector within Airbyte, see [the documentation](https://docs.airbyte.io/integrations/sources/search-metrics). - -## Local development - -### Prerequisites -**To iterate on this connector, make sure to complete this prerequisites section.** - -#### Minimum Python version required `= 3.7.0` - -#### Build & Activate Virtual Environment and install dependencies -From this connector directory, create a virtual environment: -``` -python -m venv .venv -``` - -This will generate a virtualenv for this module in `.venv/`. Make sure this venv is active in your -development environment of choice. To activate it from the terminal, run: -``` -source .venv/bin/activate -pip install -r requirements.txt -pip install '.[tests]' -``` -If you are in an IDE, follow your IDE's instructions to activate the virtualenv. - -Note that while we are installing dependencies from `requirements.txt`, you should only edit `setup.py` for your dependencies. `requirements.txt` is -used for editable installs (`pip install -e`) to pull in Python dependencies from the monorepo and will call `setup.py`. -If this is mumbo jumbo to you, don't worry about it, just put your deps in `setup.py` but install using `pip install -r requirements.txt` and everything -should work as you expect. - -#### Create credentials -**If you are a community contributor**, follow the instructions in the [documentation](https://docs.airbyte.io/integrations/sources/search-metrics) -to generate the necessary credentials. Then create a file `secrets/config.json` conforming to the `source_search_metrics/spec.json` file. -Note that any directory named `secrets` is gitignored across the entire Airbyte repo, so there is no danger of accidentally checking in sensitive information. -See `integration_tests/sample_config.json` for a sample config file. - -**If you are an Airbyte core member**, copy the credentials in Lastpass under the secret name `source search-metrics test creds` -and place them into `secrets/config.json`. - -### Locally running the connector -``` -python main.py spec -python main.py check --config secrets/config.json -python main.py discover --config secrets/config.json -python main.py read --config secrets/config.json --catalog integration_tests/configured_catalog.json -``` - -### Locally running the connector docker image - - -#### Build -**Via [`airbyte-ci`](https://github.com/airbytehq/airbyte/blob/master/airbyte-ci/connectors/pipelines/README.md) (recommended):** -```bash -airbyte-ci connectors --name=source-search-metrics build -``` - -An image will be built with the tag `airbyte/source-search-metrics:dev`. - -**Via `docker build`:** -```bash -docker build -t airbyte/source-search-metrics:dev . -``` - -#### Run -Then run any of the connector commands as follows: -``` -docker run --rm airbyte/source-search-metrics:dev spec -docker run --rm -v $(pwd)/secrets:/secrets airbyte/source-search-metrics:dev check --config /secrets/config.json -docker run --rm -v $(pwd)/secrets:/secrets airbyte/source-search-metrics:dev discover --config /secrets/config.json -docker run --rm -v $(pwd)/secrets:/secrets -v $(pwd)/integration_tests:/integration_tests airbyte/source-search-metrics:dev read --config /secrets/config.json --catalog /integration_tests/configured_catalog.json -``` - -## Testing -You can run our full test suite locally using [`airbyte-ci`](https://github.com/airbytehq/airbyte/blob/master/airbyte-ci/connectors/pipelines/README.md): -```bash -airbyte-ci connectors --name=source-search-metrics test -``` - -### Customizing acceptance Tests -Customize `acceptance-test-config.yml` file to configure tests. See [Connector Acceptance Tests](https://docs.airbyte.com/connector-development/testing-connectors/connector-acceptance-tests-reference) for more information. -If your connector requires to create or destroy resources for use during acceptance tests create fixtures for it and place them inside integration_tests/acceptance.py. - -## Dependency Management -All of your dependencies should go in `setup.py`, NOT `requirements.txt`. The requirements file is only used to connect internal Airbyte dependencies in the monorepo for local development. -We split dependencies between two groups, dependencies that are: -* required for your connector to work need to go to `MAIN_REQUIREMENTS` list. -* required for the testing need to go to `TEST_REQUIREMENTS` list - -### Publishing a new version of the connector -You've checked out the repo, implemented a million dollar feature, and you're ready to share your changes with the world. Now what? -1. Make sure your changes are passing our test suite: `airbyte-ci connectors --name=source-search-metrics test` -2. Bump the connector version in `metadata.yaml`: increment the `dockerImageTag` value. Please follow [semantic versioning for connectors](https://docs.airbyte.com/contributing-to-airbyte/resources/pull-requests-handbook/#semantic-versioning-for-connectors). -3. Make sure the `metadata.yaml` content is up to date. -4. Make the connector documentation and its changelog is up to date (`docs/integrations/sources/search-metrics.md`). -5. Create a Pull Request: use [our PR naming conventions](https://docs.airbyte.com/contributing-to-airbyte/resources/pull-requests-handbook/#pull-request-title-convention). -6. Pat yourself on the back for being an awesome contributor. -7. Someone from Airbyte will take a look at your PR and iterate with you to merge it into master. - diff --git a/airbyte-integrations/connectors/source-search-metrics/acceptance-test-config.yml b/airbyte-integrations/connectors/source-search-metrics/acceptance-test-config.yml deleted file mode 100644 index feccb404059a..000000000000 --- a/airbyte-integrations/connectors/source-search-metrics/acceptance-test-config.yml +++ /dev/null @@ -1,34 +0,0 @@ -# See [Connector Acceptance Tests](https://docs.airbyte.com/connector-development/testing-connectors/connector-acceptance-tests-reference) -# for more information about how to configure these tests -connector_image: airbyte/source-search-metrics:dev -tests: - spec: - - spec_path: "source_search_metrics/spec.json" - connection: - - config_path: "secrets/config.json" - status: "succeed" - - config_path: "integration_tests/invalid_config.json" - status: "failed" - discovery: - - config_path: "secrets/config.json" - basic_read: - - config_path: "secrets/config.json" - configured_catalog_path: "integration_tests/configured_catalog.json" - empty_streams: - [ - "list_market_share_s7", - "list_rankings_domain", - "list_competitors", - "distribution_keywords_s7", - "list_position_spread_historic_s7", - "list_seo_visibility_historic_s7", - "count_domain_keyword", - ] - # Incremental commented because incremental streams haven't records - # incremental: - # - config_path: "secrets/config.json" - # configured_catalog_path: "integration_tests/configured_catalog.json" - # future_state_path: "integration_tests/abnormal_state.json" - full_refresh: - - config_path: "secrets/config.json" - configured_catalog_path: "integration_tests/configured_catalog.json" diff --git a/airbyte-integrations/connectors/source-search-metrics/integration_tests/__init__.py b/airbyte-integrations/connectors/source-search-metrics/integration_tests/__init__.py deleted file mode 100644 index 46b7376756ec..000000000000 --- a/airbyte-integrations/connectors/source-search-metrics/integration_tests/__init__.py +++ /dev/null @@ -1,3 +0,0 @@ -# -# Copyright (c) 2021 Airbyte, Inc., all rights reserved. -# diff --git a/airbyte-integrations/connectors/source-search-metrics/integration_tests/abnormal_state.json b/airbyte-integrations/connectors/source-search-metrics/integration_tests/abnormal_state.json deleted file mode 100644 index a04b24252aed..000000000000 --- a/airbyte-integrations/connectors/source-search-metrics/integration_tests/abnormal_state.json +++ /dev/null @@ -1,11 +0,0 @@ -{ - "list_market_share_s7": { - "date": 30210807 - }, - "list_position_spread_historic_s7": { - "date": 30210807 - }, - "list_seo_visibility_historic_s7": { - "date": 30210807 - } -} diff --git a/airbyte-integrations/connectors/source-search-metrics/integration_tests/acceptance.py b/airbyte-integrations/connectors/source-search-metrics/integration_tests/acceptance.py deleted file mode 100644 index d49b55882333..000000000000 --- a/airbyte-integrations/connectors/source-search-metrics/integration_tests/acceptance.py +++ /dev/null @@ -1,13 +0,0 @@ -# -# Copyright (c) 2023 Airbyte, Inc., all rights reserved. -# - - -import pytest - -pytest_plugins = ("connector_acceptance_test.plugin",) - - -@pytest.fixture(scope="session", autouse=True) -def connector_setup(): - yield diff --git a/airbyte-integrations/connectors/source-search-metrics/integration_tests/catalog.json b/airbyte-integrations/connectors/source-search-metrics/integration_tests/catalog.json deleted file mode 100644 index 0147263f4c92..000000000000 --- a/airbyte-integrations/connectors/source-search-metrics/integration_tests/catalog.json +++ /dev/null @@ -1,24 +0,0 @@ -{ - "streams": [ - { - "stream": { - "name": "projects", - "json_schema": {}, - "supported_sync_modes": ["full_refresh"] - }, - "sync_mode": "full_refresh", - "destination_sync_mode": "overwrite" - }, - { - "stream": { - "name": "list_market_share_s7", - "json_schema": {}, - "supported_sync_modes": ["incremental"], - "source_defined_cursor": true, - "default_cursor_field": [] - }, - "sync_mode": "incremental", - "destination_sync_mode": "append" - } - ] -} diff --git a/airbyte-integrations/connectors/source-search-metrics/integration_tests/configured_catalog.json b/airbyte-integrations/connectors/source-search-metrics/integration_tests/configured_catalog.json deleted file mode 100644 index 746c184b525e..000000000000 --- a/airbyte-integrations/connectors/source-search-metrics/integration_tests/configured_catalog.json +++ /dev/null @@ -1,208 +0,0 @@ -{ - "streams": [ - { - "stream": { - "name": "projects", - "json_schema": {}, - "supported_sync_modes": ["full_refresh"] - }, - "sync_mode": "full_refresh", - "destination_sync_mode": "overwrite" - }, - { - "stream": { - "name": "count_domain_keyword", - "json_schema": {}, - "supported_sync_modes": ["full_refresh"] - }, - "sync_mode": "full_refresh", - "destination_sync_mode": "overwrite" - }, - { - "stream": { - "name": "list_market_share_s7", - "json_schema": {}, - "supported_sync_modes": ["incremental"], - "source_defined_cursor": true, - "default_cursor_field": [] - }, - "sync_mode": "incremental", - "destination_sync_mode": "append" - }, - { - "stream": { - "name": "list_position_spread_historic_s7", - "json_schema": {}, - "supported_sync_modes": ["incremental"], - "source_defined_cursor": true, - "default_cursor_field": [] - }, - "sync_mode": "incremental", - "destination_sync_mode": "append" - }, - { - "stream": { - "name": "list_seo_visibility_historic_s7", - "json_schema": {}, - "supported_sync_modes": ["incremental"], - "source_defined_cursor": true, - "default_cursor_field": [] - }, - "sync_mode": "incremental", - "destination_sync_mode": "append" - }, - { - "stream": { - "name": "tags", - "json_schema": {}, - "supported_sync_modes": ["full_refresh"] - }, - "sync_mode": "full_refresh", - "destination_sync_mode": "overwrite" - }, - { - "stream": { - "name": "benchmark_rankings_s7", - "json_schema": {}, - "supported_sync_modes": ["full_refresh"] - }, - "sync_mode": "full_refresh", - "destination_sync_mode": "overwrite" - }, - { - "stream": { - "name": "competitor_rankings_s7", - "json_schema": {}, - "supported_sync_modes": ["full_refresh"] - }, - "sync_mode": "full_refresh", - "destination_sync_mode": "overwrite" - }, - { - "stream": { - "name": "distribution_keywords_s7", - "json_schema": {}, - "supported_sync_modes": ["full_refresh"] - }, - "sync_mode": "full_refresh", - "destination_sync_mode": "overwrite" - }, - { - "stream": { - "name": "keyword_potentials_s7", - "json_schema": {}, - "supported_sync_modes": ["full_refresh"] - }, - "sync_mode": "full_refresh", - "destination_sync_mode": "overwrite" - }, - { - "stream": { - "name": "tag_potentials_s7", - "json_schema": {}, - "supported_sync_modes": ["full_refresh"] - }, - "sync_mode": "full_refresh", - "destination_sync_mode": "overwrite" - }, - { - "stream": { - "name": "url_rankings_s7", - "json_schema": {}, - "supported_sync_modes": ["full_refresh"] - }, - "sync_mode": "full_refresh", - "destination_sync_mode": "overwrite" - }, - { - "stream": { - "name": "seo_visibility_value_s7", - "json_schema": {}, - "supported_sync_modes": ["full_refresh"] - }, - "sync_mode": "full_refresh", - "destination_sync_mode": "overwrite" - }, - { - "stream": { - "name": "serp_spread_value_s7", - "json_schema": {}, - "supported_sync_modes": ["full_refresh"] - }, - "sync_mode": "full_refresh", - "destination_sync_mode": "overwrite" - }, - { - "stream": { - "name": "list_competitors", - "json_schema": {}, - "supported_sync_modes": ["full_refresh"] - }, - "sync_mode": "full_refresh", - "destination_sync_mode": "overwrite" - }, - { - "stream": { - "name": "list_competitors_relevancy", - "json_schema": {}, - "supported_sync_modes": ["full_refresh"] - }, - "sync_mode": "full_refresh", - "destination_sync_mode": "overwrite" - }, - { - "stream": { - "name": "list_rankings_domain", - "json_schema": {}, - "supported_sync_modes": ["full_refresh"] - }, - "sync_mode": "full_refresh", - "destination_sync_mode": "overwrite" - }, - { - "stream": { - "name": "list_seo_visibility_country", - "json_schema": {}, - "supported_sync_modes": ["full_refresh"] - }, - "sync_mode": "full_refresh", - "destination_sync_mode": "overwrite" - }, - { - "stream": { - "name": "list_serp_spread_s7", - "json_schema": {}, - "supported_sync_modes": ["full_refresh"] - }, - "sync_mode": "full_refresh", - "destination_sync_mode": "overwrite" - }, - { - "stream": { - "name": "list_rankings_historic_s7", - "json_schema": {}, - "supported_sync_modes": ["full_refresh"] - }, - "sync_mode": "full_refresh", - "destination_sync_mode": "overwrite" - }, - { - "stream": { - "name": "list_winners_s7", - "json_schema": {}, - "supported_sync_modes": ["full_refresh"] - }, - "sync_mode": "full_refresh", - "destination_sync_mode": "overwrite" - }, - { - "stream": { - "name": "list_losers_s7", - "json_schema": {}, - "supported_sync_modes": ["full_refresh"] - }, - "sync_mode": "full_refresh", - "destination_sync_mode": "overwrite" - } - ] -} diff --git a/airbyte-integrations/connectors/source-search-metrics/integration_tests/invalid_config.json b/airbyte-integrations/connectors/source-search-metrics/integration_tests/invalid_config.json deleted file mode 100644 index 9a2e4444acf9..000000000000 --- a/airbyte-integrations/connectors/source-search-metrics/integration_tests/invalid_config.json +++ /dev/null @@ -1,7 +0,0 @@ -{ - "api_key": "invalid_api_key", - "client_secret": "invalid_client_secret", - "country_code": "CA", - "start_date": "20330807", - "window_in_days": 30 -} diff --git a/airbyte-integrations/connectors/source-search-metrics/integration_tests/sample_config.json b/airbyte-integrations/connectors/source-search-metrics/integration_tests/sample_config.json deleted file mode 100644 index 355a9a0e04bf..000000000000 --- a/airbyte-integrations/connectors/source-search-metrics/integration_tests/sample_config.json +++ /dev/null @@ -1,7 +0,0 @@ -{ - "api_key": "api_key", - "client_secret": "client_secret", - "country_code": "CA", - "start_date": "20210807", - "window_in_days": 30 -} diff --git a/airbyte-integrations/connectors/source-search-metrics/integration_tests/sample_state.json b/airbyte-integrations/connectors/source-search-metrics/integration_tests/sample_state.json deleted file mode 100644 index 81cf082cea41..000000000000 --- a/airbyte-integrations/connectors/source-search-metrics/integration_tests/sample_state.json +++ /dev/null @@ -1,11 +0,0 @@ -{ - "list_market_share_s7": { - "date": 20210807 - }, - "list_position_spread_historic_s7": { - "date": 20210807 - }, - "list_seo_visibility_historic_s7": { - "date": 20210807 - } -} diff --git a/airbyte-integrations/connectors/source-search-metrics/main.py b/airbyte-integrations/connectors/source-search-metrics/main.py deleted file mode 100644 index 29e5e8a133a6..000000000000 --- a/airbyte-integrations/connectors/source-search-metrics/main.py +++ /dev/null @@ -1,8 +0,0 @@ -# -# Copyright (c) 2023 Airbyte, Inc., all rights reserved. -# - -from source_search_metrics.run import run - -if __name__ == "__main__": - run() diff --git a/airbyte-integrations/connectors/source-search-metrics/requirements.txt b/airbyte-integrations/connectors/source-search-metrics/requirements.txt deleted file mode 100644 index d6e1198b1ab1..000000000000 --- a/airbyte-integrations/connectors/source-search-metrics/requirements.txt +++ /dev/null @@ -1 +0,0 @@ --e . diff --git a/airbyte-integrations/connectors/source-search-metrics/setup.py b/airbyte-integrations/connectors/source-search-metrics/setup.py deleted file mode 100644 index 2e3f6e81f310..000000000000 --- a/airbyte-integrations/connectors/source-search-metrics/setup.py +++ /dev/null @@ -1,46 +0,0 @@ -# -# Copyright (c) 2023 Airbyte, Inc., all rights reserved. -# - - -from setuptools import find_packages, setup - -MAIN_REQUIREMENTS = [ - "airbyte-cdk", -] - -TEST_REQUIREMENTS = [ - "requests-mock~=1.9.3", - "pytest~=6.1", - "pytest-mock~=3.6.1", -] - -setup( - entry_points={ - "console_scripts": [ - "source-search-metrics=source_search_metrics.run:run", - ], - }, - name="source_search_metrics", - description="Source implementation for Search Metrics.", - author="Airbyte", - author_email="contact@airbyte.io", - packages=find_packages(), - install_requires=MAIN_REQUIREMENTS, - package_data={ - "": [ - # Include yaml files in the package (if any) - "*.yml", - "*.yaml", - # Include all json files in the package, up to 4 levels deep - "*.json", - "*/*.json", - "*/*/*.json", - "*/*/*/*.json", - "*/*/*/*/*.json", - ] - }, - extras_require={ - "tests": TEST_REQUIREMENTS, - }, -) diff --git a/airbyte-integrations/connectors/source-search-metrics/source_search_metrics/__init__.py b/airbyte-integrations/connectors/source-search-metrics/source_search_metrics/__init__.py deleted file mode 100644 index 9b0b409b9906..000000000000 --- a/airbyte-integrations/connectors/source-search-metrics/source_search_metrics/__init__.py +++ /dev/null @@ -1,8 +0,0 @@ -# -# Copyright (c) 2021 Airbyte, Inc., all rights reserved. -# - - -from .source import SourceSearchMetrics - -__all__ = ["SourceSearchMetrics"] diff --git a/airbyte-integrations/connectors/source-search-metrics/source_search_metrics/run.py b/airbyte-integrations/connectors/source-search-metrics/source_search_metrics/run.py deleted file mode 100644 index 29a0ebc75add..000000000000 --- a/airbyte-integrations/connectors/source-search-metrics/source_search_metrics/run.py +++ /dev/null @@ -1,14 +0,0 @@ -# -# Copyright (c) 2023 Airbyte, Inc., all rights reserved. -# - - -import sys - -from airbyte_cdk.entrypoint import launch -from source_search_metrics import SourceSearchMetrics - - -def run(): - source = SourceSearchMetrics() - launch(source, sys.argv[1:]) diff --git a/airbyte-integrations/connectors/source-search-metrics/source_search_metrics/schemas/TODO.md b/airbyte-integrations/connectors/source-search-metrics/source_search_metrics/schemas/TODO.md deleted file mode 100644 index cf1efadb3c9c..000000000000 --- a/airbyte-integrations/connectors/source-search-metrics/source_search_metrics/schemas/TODO.md +++ /dev/null @@ -1,25 +0,0 @@ -# TODO: Define your stream schemas -Your connector must describe the schema of each stream it can output using [JSONSchema](https://json-schema.org). - -The simplest way to do this is to describe the schema of your streams using one `.json` file per stream. You can also dynamically generate the schema of your stream in code, or you can combine both approaches: start with a `.json` file and dynamically add properties to it. - -The schema of a stream is the return value of `Stream.get_json_schema`. - -## Static schemas -By default, `Stream.get_json_schema` reads a `.json` file in the `schemas/` directory whose name is equal to the value of the `Stream.name` property. In turn `Stream.name` by default returns the name of the class in snake case. Therefore, if you have a class `class EmployeeBenefits(HttpStream)` the default behavior will look for a file called `schemas/employee_benefits.json`. You can override any of these behaviors as you need. - -Important note: any objects referenced via `$ref` should be placed in the `shared/` directory in their own `.json` files. - -## Dynamic schemas -If you'd rather define your schema in code, override `Stream.get_json_schema` in your stream class to return a `dict` describing the schema using [JSONSchema](https://json-schema.org). - -## Dynamically modifying static schemas -Override `Stream.get_json_schema` to run the default behavior, edit the returned value, then return the edited value: -``` -def get_json_schema(self): - schema = super().get_json_schema() - schema['dynamically_determined_property'] = "property" - return schema -``` - -Delete this file once you're done. Or don't. Up to you :) diff --git a/airbyte-integrations/connectors/source-search-metrics/source_search_metrics/schemas/benchmark_rankings_s7.json b/airbyte-integrations/connectors/source-search-metrics/source_search_metrics/schemas/benchmark_rankings_s7.json deleted file mode 100644 index 6bea16765247..000000000000 --- a/airbyte-integrations/connectors/source-search-metrics/source_search_metrics/schemas/benchmark_rankings_s7.json +++ /dev/null @@ -1,21 +0,0 @@ -{ - "$schema": "http://json-schema.org/draft-07/schema#", - "type": "object", - "properties": { - "visibility": { - "type": ["null", "integer"] - }, - "domain": { - "type": ["null", "string"] - }, - "avg_position": { - "type": ["null", "number"] - }, - "keyword_coverage": { - "type": ["null", "number"] - }, - "url_type": { - "type": ["null", "string"] - } - } -} diff --git a/airbyte-integrations/connectors/source-search-metrics/source_search_metrics/schemas/competitor_rankings_s7.json b/airbyte-integrations/connectors/source-search-metrics/source_search_metrics/schemas/competitor_rankings_s7.json deleted file mode 100644 index 7486d9e93a4b..000000000000 --- a/airbyte-integrations/connectors/source-search-metrics/source_search_metrics/schemas/competitor_rankings_s7.json +++ /dev/null @@ -1,21 +0,0 @@ -{ - "$schema": "http://json-schema.org/draft-07/schema#", - "type": "object", - "properties": { - "visibility": { - "type": ["null", "integer"] - }, - "avg_position": { - "type": ["null", "number"] - }, - "keyword_count": { - "type": ["null", "integer"] - }, - "domain": { - "type": ["null", "string"] - }, - "url_type": { - "type": ["null", "string"] - } - } -} diff --git a/airbyte-integrations/connectors/source-search-metrics/source_search_metrics/schemas/count_domain_keyword.json b/airbyte-integrations/connectors/source-search-metrics/source_search_metrics/schemas/count_domain_keyword.json deleted file mode 100644 index 645a8b56c4a5..000000000000 --- a/airbyte-integrations/connectors/source-search-metrics/source_search_metrics/schemas/count_domain_keyword.json +++ /dev/null @@ -1,12 +0,0 @@ -{ - "$schema": "http://json-schema.org/draft-07/schema#", - "type": "object", - "properties": { - "count": { - "type": ["null", "integer"] - }, - "balance": { - "type": ["null", "integer"] - } - } -} diff --git a/airbyte-integrations/connectors/source-search-metrics/source_search_metrics/schemas/distribution_keywords_s7.json b/airbyte-integrations/connectors/source-search-metrics/source_search_metrics/schemas/distribution_keywords_s7.json deleted file mode 100644 index 8f0b4b2cef49..000000000000 --- a/airbyte-integrations/connectors/source-search-metrics/source_search_metrics/schemas/distribution_keywords_s7.json +++ /dev/null @@ -1,21 +0,0 @@ -{ - "$schema": "http://json-schema.org/draft-07/schema#", - "type": "object", - "properties": { - "desktop": { - "type": ["null", "string"] - }, - "mobile": { - "type": ["null", "string"] - }, - "smartphone": { - "type": ["null", "string"] - }, - "tablet": { - "type": ["null", "string"] - }, - "keyword": { - "type": ["null", "string"] - } - } -} diff --git a/airbyte-integrations/connectors/source-search-metrics/source_search_metrics/schemas/keyword_potentials_s7.json b/airbyte-integrations/connectors/source-search-metrics/source_search_metrics/schemas/keyword_potentials_s7.json deleted file mode 100644 index 9037fba170d1..000000000000 --- a/airbyte-integrations/connectors/source-search-metrics/source_search_metrics/schemas/keyword_potentials_s7.json +++ /dev/null @@ -1,27 +0,0 @@ -{ - "$schema": "http://json-schema.org/draft-07/schema#", - "type": "object", - "properties": { - "traffic": { - "type": ["null", "integer"] - }, - "traffic_max": { - "type": ["null", "integer"] - }, - "traffic_volume": { - "type": ["null", "string"] - }, - "max_potential": { - "type": ["null", "integer"] - }, - "se_position": { - "type": ["null", "string"] - }, - "kd": { - "type": ["null", "string"] - }, - "keyword": { - "type": ["null", "string"] - } - } -} diff --git a/airbyte-integrations/connectors/source-search-metrics/source_search_metrics/schemas/list_competitors.json b/airbyte-integrations/connectors/source-search-metrics/source_search_metrics/schemas/list_competitors.json deleted file mode 100644 index 8f0274bbf64a..000000000000 --- a/airbyte-integrations/connectors/source-search-metrics/source_search_metrics/schemas/list_competitors.json +++ /dev/null @@ -1,33 +0,0 @@ -{ - "$schema": "http://json-schema.org/draft-07/schema#", - "type": "object", - "properties": { - "domain": { - "type": ["null", "string"] - }, - "avg_position_1": { - "type": ["null", "number"] - }, - "avg_position_2": { - "type": ["null", "number"] - }, - "common_keywords": { - "type": ["null", "string"] - }, - "traffic_1": { - "type": ["null", "integer"] - }, - "traffic_2": { - "type": ["null", "integer"] - }, - "costs": { - "type": ["null", "number"] - }, - "total_kw_count": { - "type": ["null", "string"] - }, - "diff_keywords": { - "type": ["null", "string"] - } - } -} diff --git a/airbyte-integrations/connectors/source-search-metrics/source_search_metrics/schemas/list_competitors_relevancy.json b/airbyte-integrations/connectors/source-search-metrics/source_search_metrics/schemas/list_competitors_relevancy.json deleted file mode 100644 index 197e9a43172f..000000000000 --- a/airbyte-integrations/connectors/source-search-metrics/source_search_metrics/schemas/list_competitors_relevancy.json +++ /dev/null @@ -1,66 +0,0 @@ -{ - "$schema": "http://json-schema.org/draft-07/schema#", - "type": "object", - "properties": { - "domain_id": { - "type": ["null", "integer"] - }, - "host_id": { - "type": ["null", "integer"] - }, - "own_domain": { - "type": ["null", "boolean"] - }, - "domain": { - "type": ["null", "string"] - }, - "own_avg_position": { - "type": ["null", "integer"] - }, - "competitor_avg_position": { - "type": ["null", "integer"] - }, - "own_traffic_index": { - "type": ["null", "integer"] - }, - "competitor_traffic_index": { - "type": ["null", "integer"] - }, - "own_traffic_index_value": { - "type": ["null", "number"] - }, - "competitor_traffic_index_value": { - "type": ["null", "number"] - }, - "traffic_index_difference": { - "type": ["null", "integer"] - }, - "avg_traffic_value": { - "type": ["null", "number"] - }, - "traffic_index_potential": { - "type": ["null", "integer"] - }, - "traffic_index_value_potential": { - "type": ["null", "number"] - }, - "keyword_count": { - "type": ["null", "integer"] - }, - "own_keyword_count": { - "type": ["null", "integer"] - }, - "shared_keyword_count": { - "type": ["null", "integer"] - }, - "keyword_overlap": { - "type": ["null", "number"] - }, - "competitor_keyword_count": { - "type": ["null", "integer"] - }, - "keyword_relevance": { - "type": ["null", "integer"] - } - } -} diff --git a/airbyte-integrations/connectors/source-search-metrics/source_search_metrics/schemas/list_losers_s7.json b/airbyte-integrations/connectors/source-search-metrics/source_search_metrics/schemas/list_losers_s7.json deleted file mode 100644 index ea12aee4247c..000000000000 --- a/airbyte-integrations/connectors/source-search-metrics/source_search_metrics/schemas/list_losers_s7.json +++ /dev/null @@ -1,44 +0,0 @@ -{ - "$schema": "http://json-schema.org/draft-07/schema#", - "type": "object", - "properties": { - "url_last": { - "type": ["null", "string"] - }, - "se_position": { - "type": ["null", "string"] - }, - "url": { - "type": ["null", "string"] - }, - "traffic_volume": { - "type": ["null", "string"] - }, - "cpc": { - "type": ["null", "number"] - }, - "original_position": { - "type": ["null", "string"] - }, - "delta_traffic": { - "type": ["null", "integer"] - }, - "trend": { - "type": ["null", "object"], - "properties": { - "trend": { - "type": ["null", "integer"] - }, - "abs": { - "type": ["null", "integer"] - }, - "per": { - "type": ["null", "number"] - } - } - }, - "keyword": { - "type": ["null", "string"] - } - } -} diff --git a/airbyte-integrations/connectors/source-search-metrics/source_search_metrics/schemas/list_market_share_s7.json b/airbyte-integrations/connectors/source-search-metrics/source_search_metrics/schemas/list_market_share_s7.json deleted file mode 100644 index f051b24524a5..000000000000 --- a/airbyte-integrations/connectors/source-search-metrics/source_search_metrics/schemas/list_market_share_s7.json +++ /dev/null @@ -1,29 +0,0 @@ -{ - "$schema": "http://json-schema.org/draft-07/schema#", - "type": "object", - "properties": { - "date": { - "type": ["null", "integer"] - }, - "data": { - "type": ["null", "array"], - "items": { - "type": ["null", "object"], - "properties": { - "domain": { - "type": ["null", "string"] - }, - "host": { - "type": ["null", "string"] - }, - "url": { - "type": ["null", "string"] - }, - "marketshare": { - "type": ["null", "number"] - } - } - } - } - } -} diff --git a/airbyte-integrations/connectors/source-search-metrics/source_search_metrics/schemas/list_position_spread_historic_s7.json b/airbyte-integrations/connectors/source-search-metrics/source_search_metrics/schemas/list_position_spread_historic_s7.json deleted file mode 100644 index 247266bb4d31..000000000000 --- a/airbyte-integrations/connectors/source-search-metrics/source_search_metrics/schemas/list_position_spread_historic_s7.json +++ /dev/null @@ -1,135 +0,0 @@ -{ - "$schema": "http://json-schema.org/draft-07/schema#", - "type": "object", - "properties": { - "date": { - "type": ["null", "string"] - }, - "data": { - "type": ["null", "array"], - "items": { - "type": ["null", "object"], - "properties": { - "domain": { - "type": ["null", "string"] - }, - "data": { - "type": ["null", "object"], - "properties": { - "position1": { - "type": ["null", "object"], - "properties": { - "keyword_count": { - "type": ["null", "integer"] - }, - "keyword_per": { - "type": ["null", "number"] - } - } - }, - "position2": { - "type": ["null", "object"], - "properties": { - "keyword_count": { - "type": ["null", "integer"] - }, - "keyword_per": { - "type": ["null", "number"] - } - } - }, - "position3": { - "type": ["null", "object"], - "properties": { - "keyword_count": { - "type": ["null", "integer"] - }, - "keyword_per": { - "type": ["null", "number"] - } - } - }, - "position4": { - "type": ["null", "object"], - "properties": { - "keyword_count": { - "type": ["null", "integer"] - }, - "keyword_per": { - "type": ["null", "number"] - } - } - }, - "position5": { - "type": ["null", "object"], - "properties": { - "keyword_count": { - "type": ["null", "integer"] - }, - "keyword_per": { - "type": ["null", "number"] - } - } - }, - "position6": { - "type": ["null", "object"], - "properties": { - "keyword_count": { - "type": ["null", "integer"] - }, - "keyword_per": { - "type": ["null", "number"] - } - } - }, - "position7": { - "type": ["null", "object"], - "properties": { - "keyword_count": { - "type": ["null", "integer"] - }, - "keyword_per": { - "type": ["null", "number"] - } - } - }, - "position8": { - "type": ["null", "object"], - "properties": { - "keyword_count": { - "type": ["null", "integer"] - }, - "keyword_per": { - "type": ["null", "number"] - } - } - }, - "position9": { - "type": ["null", "object"], - "properties": { - "keyword_count": { - "type": ["null", "integer"] - }, - "keyword_per": { - "type": ["null", "number"] - } - } - }, - "position10": { - "type": ["null", "object"], - "properties": { - "keyword_count": { - "type": ["null", "integer"] - }, - "keyword_per": { - "type": ["null", "number"] - } - } - } - } - } - } - } - } - } -} diff --git a/airbyte-integrations/connectors/source-search-metrics/source_search_metrics/schemas/list_rankings_analysis_s7.json b/airbyte-integrations/connectors/source-search-metrics/source_search_metrics/schemas/list_rankings_analysis_s7.json deleted file mode 100644 index b472da1ffa9a..000000000000 --- a/airbyte-integrations/connectors/source-search-metrics/source_search_metrics/schemas/list_rankings_analysis_s7.json +++ /dev/null @@ -1,38 +0,0 @@ -{ - "$schema": "http://json-schema.org/draft-07/schema#", - "type": "object", - "properties": { - "url": { - "type": ["null", "string"] - }, - "position": { - "type": ["null", "integer"] - }, - "page": { - "type": ["null", "integer"] - }, - "type_id": { - "type": ["null", "integer"] - }, - "title": { - "type": ["null", "string"] - }, - "url_type": { - "type": ["null", "string"] - }, - "trend": { - "type": ["null", "object"], - "properties": { - "trend": { - "type": ["null", "integer"] - }, - "abs": { - "type": ["null", "string"] - }, - "per": { - "type": ["null", "number"] - } - } - } - } -} diff --git a/airbyte-integrations/connectors/source-search-metrics/source_search_metrics/schemas/list_rankings_domain.json b/airbyte-integrations/connectors/source-search-metrics/source_search_metrics/schemas/list_rankings_domain.json deleted file mode 100644 index 8d7692a51789..000000000000 --- a/airbyte-integrations/connectors/source-search-metrics/source_search_metrics/schemas/list_rankings_domain.json +++ /dev/null @@ -1,88 +0,0 @@ -{ - "$schema": "http://json-schema.org/draft-07/schema#", - "type": "object", - "properties": { - "pos": { - "type": ["null", "array"], - "items": { - "type": ["null", "object"], - "properties": { - "position": { - "type": ["null", "integer"] - }, - "page": { - "type": ["null", "string"] - }, - "directory": { - "type": ["null", "string"] - }, - "url": { - "type": ["null", "string"] - }, - "visibility": { - "type": ["null", "string"] - }, - "traffic": { - "type": ["null", "integer"] - }, - "ad_budget": { - "type": ["null", "string"] - }, - "title": { - "type": ["null", "string"] - }, - "traffic_volume": { - "type": ["null", "string"] - }, - "cpc": { - "type": ["null", "number"] - }, - "delta_traffic": { - "type": ["null", "string"] - }, - "trend": { - "type": ["null", "object"], - "properties": { - "trend": { - "type": ["null", "integer"] - }, - "abs": { - "type": ["null", "string"] - }, - "per": { - "type": ["null", "integer"] - } - } - }, - "tags": { - "type": ["null", "array"], - "items": { - "type": ["null", "string"] - } - } - } - } - }, - "keyword": { - "type": ["null", "string"] - }, - "tags": { - "type": ["null", "array"], - "items": { - "type": ["null", "string"] - } - }, - "delete_date": { - "type": ["null", "string"], - "format": "date" - }, - "date": { - "type": ["null", "string"], - "format": "date" - }, - "trend_date": { - "type": ["null", "string"], - "format": "date" - } - } -} diff --git a/airbyte-integrations/connectors/source-search-metrics/source_search_metrics/schemas/list_rankings_historic_s7.json b/airbyte-integrations/connectors/source-search-metrics/source_search_metrics/schemas/list_rankings_historic_s7.json deleted file mode 100644 index a352b644db5e..000000000000 --- a/airbyte-integrations/connectors/source-search-metrics/source_search_metrics/schemas/list_rankings_historic_s7.json +++ /dev/null @@ -1,87 +0,0 @@ -{ - "$schema": "http://json-schema.org/draft-07/schema#", - "type": "object", - "properties": { - "date": { - "type": ["null", "integer"], - "format": "date" - }, - "group1": { - "type": ["null", "object"], - "properties": { - "keyword_count": { - "type": ["null", "integer"] - }, - "keyword_per": { - "type": ["null", "number"] - } - } - }, - "group2": { - "type": ["null", "object"], - "properties": { - "keyword_count": { - "type": ["null", "integer"] - }, - "keyword_per": { - "type": ["null", "number"] - } - } - }, - "group3": { - "type": ["null", "object"], - "properties": { - "keyword_count": { - "type": ["null", "integer"] - }, - "keyword_per": { - "type": ["null", "number"] - } - } - }, - "group4": { - "type": ["null", "object"], - "properties": { - "keyword_count": { - "type": ["null", "integer"] - }, - "keyword_per": { - "type": ["null", "number"] - } - } - }, - "group5": { - "type": ["null", "object"], - "properties": { - "keyword_count": { - "type": ["null", "integer"] - }, - "keyword_per": { - "type": ["null", "number"] - } - } - }, - "group6": { - "type": ["null", "object"], - "properties": { - "keyword_count": { - "type": ["null", "integer"] - }, - "keyword_per": { - "type": ["null", "number"] - } - } - }, - "group7": { - "type": ["null", "object"], - "properties": { - "keyword_count": { - "type": ["null", "integer"] - }, - "keyword_per": { - "type": ["null", "number"] - } - } - } - } -} diff --git a/airbyte-integrations/connectors/source-search-metrics/source_search_metrics/schemas/list_seo_visibility_country.json b/airbyte-integrations/connectors/source-search-metrics/source_search_metrics/schemas/list_seo_visibility_country.json deleted file mode 100644 index 4597cddbd6ab..000000000000 --- a/airbyte-integrations/connectors/source-search-metrics/source_search_metrics/schemas/list_seo_visibility_country.json +++ /dev/null @@ -1,12 +0,0 @@ -{ - "$schema": "http://json-schema.org/draft-07/schema#", - "type": "object", - "properties": { - "country_code": { - "type": ["null", "string"] - }, - "visibility": { - "type": ["null", "integer"] - } - } -} diff --git a/airbyte-integrations/connectors/source-search-metrics/source_search_metrics/schemas/list_seo_visibility_historic_s7.json b/airbyte-integrations/connectors/source-search-metrics/source_search_metrics/schemas/list_seo_visibility_historic_s7.json deleted file mode 100644 index 6bf8bfc9967f..000000000000 --- a/airbyte-integrations/connectors/source-search-metrics/source_search_metrics/schemas/list_seo_visibility_historic_s7.json +++ /dev/null @@ -1,27 +0,0 @@ -{ - "$schema": "http://json-schema.org/draft-07/schema#", - "type": "object", - "properties": { - "date": { - "type": ["null", "integer"], - "format": "date" - }, - "data": { - "type": ["null", "array"], - "items": { - "type": ["null", "object"], - "properties": { - "domain": { - "type": ["null", "string"] - }, - "sum_visibility": { - "type": ["null", "integer"] - }, - "count_keywords": { - "type": ["null", "string"] - } - } - } - } - } -} diff --git a/airbyte-integrations/connectors/source-search-metrics/source_search_metrics/schemas/list_serp_spread_s7.json b/airbyte-integrations/connectors/source-search-metrics/source_search_metrics/schemas/list_serp_spread_s7.json deleted file mode 100644 index a352b644db5e..000000000000 --- a/airbyte-integrations/connectors/source-search-metrics/source_search_metrics/schemas/list_serp_spread_s7.json +++ /dev/null @@ -1,87 +0,0 @@ -{ - "$schema": "http://json-schema.org/draft-07/schema#", - "type": "object", - "properties": { - "date": { - "type": ["null", "integer"], - "format": "date" - }, - "group1": { - "type": ["null", "object"], - "properties": { - "keyword_count": { - "type": ["null", "integer"] - }, - "keyword_per": { - "type": ["null", "number"] - } - } - }, - "group2": { - "type": ["null", "object"], - "properties": { - "keyword_count": { - "type": ["null", "integer"] - }, - "keyword_per": { - "type": ["null", "number"] - } - } - }, - "group3": { - "type": ["null", "object"], - "properties": { - "keyword_count": { - "type": ["null", "integer"] - }, - "keyword_per": { - "type": ["null", "number"] - } - } - }, - "group4": { - "type": ["null", "object"], - "properties": { - "keyword_count": { - "type": ["null", "integer"] - }, - "keyword_per": { - "type": ["null", "number"] - } - } - }, - "group5": { - "type": ["null", "object"], - "properties": { - "keyword_count": { - "type": ["null", "integer"] - }, - "keyword_per": { - "type": ["null", "number"] - } - } - }, - "group6": { - "type": ["null", "object"], - "properties": { - "keyword_count": { - "type": ["null", "integer"] - }, - "keyword_per": { - "type": ["null", "number"] - } - } - }, - "group7": { - "type": ["null", "object"], - "properties": { - "keyword_count": { - "type": ["null", "integer"] - }, - "keyword_per": { - "type": ["null", "number"] - } - } - } - } -} diff --git a/airbyte-integrations/connectors/source-search-metrics/source_search_metrics/schemas/list_winners_s7.json b/airbyte-integrations/connectors/source-search-metrics/source_search_metrics/schemas/list_winners_s7.json deleted file mode 100644 index ea12aee4247c..000000000000 --- a/airbyte-integrations/connectors/source-search-metrics/source_search_metrics/schemas/list_winners_s7.json +++ /dev/null @@ -1,44 +0,0 @@ -{ - "$schema": "http://json-schema.org/draft-07/schema#", - "type": "object", - "properties": { - "url_last": { - "type": ["null", "string"] - }, - "se_position": { - "type": ["null", "string"] - }, - "url": { - "type": ["null", "string"] - }, - "traffic_volume": { - "type": ["null", "string"] - }, - "cpc": { - "type": ["null", "number"] - }, - "original_position": { - "type": ["null", "string"] - }, - "delta_traffic": { - "type": ["null", "integer"] - }, - "trend": { - "type": ["null", "object"], - "properties": { - "trend": { - "type": ["null", "integer"] - }, - "abs": { - "type": ["null", "integer"] - }, - "per": { - "type": ["null", "number"] - } - } - }, - "keyword": { - "type": ["null", "string"] - } - } -} diff --git a/airbyte-integrations/connectors/source-search-metrics/source_search_metrics/schemas/marketshare_value_s7.json b/airbyte-integrations/connectors/source-search-metrics/source_search_metrics/schemas/marketshare_value_s7.json deleted file mode 100644 index 85c4bd986b09..000000000000 --- a/airbyte-integrations/connectors/source-search-metrics/source_search_metrics/schemas/marketshare_value_s7.json +++ /dev/null @@ -1,38 +0,0 @@ -{ - "$schema": "http://json-schema.org/draft-07/schema#", - "type": "object", - "properties": { - "date": { - "type": ["null", "string"], - "format": "date" - }, - "data": { - "type": ["null", "array"], - "items": { - "type": ["null", "object"], - "properties": { - "domain": { - "type": ["null", "string"] - }, - "marketshare": { - "type": ["null", "integer"] - }, - "trend": { - "type": ["null", "object"], - "properties": { - "trend": { - "type": ["null", "integer"] - }, - "abs": { - "type": ["null", "string"] - }, - "per": { - "type": ["null", "integer"] - } - } - } - } - } - } - } -} diff --git a/airbyte-integrations/connectors/source-search-metrics/source_search_metrics/schemas/projects.json b/airbyte-integrations/connectors/source-search-metrics/source_search_metrics/schemas/projects.json deleted file mode 100644 index cb0884960dec..000000000000 --- a/airbyte-integrations/connectors/source-search-metrics/source_search_metrics/schemas/projects.json +++ /dev/null @@ -1,27 +0,0 @@ -{ - "$schema": "http://json-schema.org/draft-07/schema#", - "type": "object", - "properties": { - "project_id": { - "type": ["null", "integer"] - }, - "project_name": { - "type": ["null", "string"] - }, - "project_url": { - "type": ["null", "string"] - }, - "keyword_count": { - "type": ["null", "integer"] - }, - "engine_count": { - "type": ["null", "integer"] - }, - "engines": { - "type": ["null", "array"], - "items": { - "type": ["null", "string"] - } - } - } -} diff --git a/airbyte-integrations/connectors/source-search-metrics/source_search_metrics/schemas/seo_visibility_value_s7.json b/airbyte-integrations/connectors/source-search-metrics/source_search_metrics/schemas/seo_visibility_value_s7.json deleted file mode 100644 index 2468104da80d..000000000000 --- a/airbyte-integrations/connectors/source-search-metrics/source_search_metrics/schemas/seo_visibility_value_s7.json +++ /dev/null @@ -1,33 +0,0 @@ -{ - "$schema": "http://json-schema.org/draft-07/schema#", - "type": "object", - "properties": { - "trend": { - "type": ["null", "object"], - "properties": { - "trend": { - "type": ["null", "integer"] - }, - "abs": { - "type": ["null", "integer"] - }, - "per": { - "type": ["null", "number"] - } - } - }, - "domain": { - "type": ["null", "string"] - }, - "date": { - "type": ["null", "integer"], - "format": "date" - }, - "sum_visibility": { - "type": ["null", "integer"] - }, - "count_keywords": { - "type": ["null", "string"] - } - } -} diff --git a/airbyte-integrations/connectors/source-search-metrics/source_search_metrics/schemas/serp_spread_value_s7.json b/airbyte-integrations/connectors/source-search-metrics/source_search_metrics/schemas/serp_spread_value_s7.json deleted file mode 100644 index ea9579aa2f1f..000000000000 --- a/airbyte-integrations/connectors/source-search-metrics/source_search_metrics/schemas/serp_spread_value_s7.json +++ /dev/null @@ -1,29 +0,0 @@ -{ - "$schema": "http://json-schema.org/draft-07/schema#", - "type": "object", - "properties": { - "data": { - "type": ["null", "array"], - "items": { - "type": ["null", "object"], - "properties": { - "unranked": { - "type": ["null", "integer"] - }, - "pos_1_10": { - "type": ["null", "integer"] - }, - "pos_11_20": { - "type": ["null", "integer"] - }, - "pos_20+": { - "type": ["null", "integer"] - } - } - } - }, - "total": { - "type": ["null", "integer"] - } - } -} diff --git a/airbyte-integrations/connectors/source-search-metrics/source_search_metrics/schemas/tag_potentials_s7.json b/airbyte-integrations/connectors/source-search-metrics/source_search_metrics/schemas/tag_potentials_s7.json deleted file mode 100644 index 5c1773dd9030..000000000000 --- a/airbyte-integrations/connectors/source-search-metrics/source_search_metrics/schemas/tag_potentials_s7.json +++ /dev/null @@ -1,36 +0,0 @@ -{ - "$schema": "http://json-schema.org/draft-07/schema#", - "type": "object", - "properties": { - "keywords": { - "type": ["null", "integer"] - }, - "rankings": { - "type": ["null", "integer"] - }, - "traffic": { - "type": ["null", "integer"] - }, - "traffic_max": { - "type": ["null", "integer"] - }, - "traffic_volume": { - "type": ["null", "integer"] - }, - "max_potential": { - "type": ["null", "integer"] - }, - "date": { - "type": ["null", "integer"] - }, - "winner": { - "type": ["null", "integer"] - }, - "loser": { - "type": ["null", "integer"] - }, - "tag": { - "type": ["null", "string"] - } - } -} diff --git a/airbyte-integrations/connectors/source-search-metrics/source_search_metrics/schemas/tags.json b/airbyte-integrations/connectors/source-search-metrics/source_search_metrics/schemas/tags.json deleted file mode 100644 index 9dc4cbf9e101..000000000000 --- a/airbyte-integrations/connectors/source-search-metrics/source_search_metrics/schemas/tags.json +++ /dev/null @@ -1,33 +0,0 @@ -{ - "$schema": "http://json-schema.org/draft-07/schema#", - "type": "object", - "properties": { - "project_id": { - "type": ["null", "integer"] - }, - "id": { - "type": ["null", "string"] - }, - "parent_id": { - "type": ["null", "string"] - }, - "type": { - "type": ["null", "string"] - }, - "name": { - "type": ["null", "string"] - }, - "full_name": { - "type": ["null", "string"] - }, - "tag_name_path": { - "type": ["null", "array"], - "items": { - "type": ["null", "string"] - } - }, - "level": { - "type": ["null", "integer"] - } - } -} diff --git a/airbyte-integrations/connectors/source-search-metrics/source_search_metrics/schemas/url_rankings_s7.json b/airbyte-integrations/connectors/source-search-metrics/source_search_metrics/schemas/url_rankings_s7.json deleted file mode 100644 index 22567a891a03..000000000000 --- a/airbyte-integrations/connectors/source-search-metrics/source_search_metrics/schemas/url_rankings_s7.json +++ /dev/null @@ -1,87 +0,0 @@ -{ - "$schema": "http://json-schema.org/draft-07/schema#", - "type": "object", - "properties": { - "pos": { - "type": ["null", "array"], - "items": { - "type": ["null", "object"], - "properties": { - "position": { - "type": ["null", "integer", "string"] - }, - "page": { - "type": ["null", "integer", "string"] - }, - "directory": { - "type": ["null", "string"] - }, - "url": { - "type": ["null", "string"] - }, - "visibility": { - "type": ["null", "integer", "string"] - }, - "traffic": { - "type": ["null", "integer", "string"] - }, - "ad_budget": { - "type": ["null", "number", "string"] - }, - "title": { - "type": ["null", "string"] - }, - "traffic_volume": { - "type": ["null", "integer"] - }, - "cpc": { - "type": ["null", "number"] - }, - "delta_traffic": { - "type": ["null", "integer", "string"] - }, - "trend": { - "type": ["null", "object"], - "properties": { - "trend": { - "type": ["null", "integer", "string"] - }, - "abs": { - "type": ["null", "integer", "string"] - }, - "per": { - "type": ["null", "number", "string"] - } - } - }, - "tags": { - "type": ["null", "array"], - "items": { - "type": ["null", "string"] - } - } - } - } - }, - "keyword": { - "type": ["null", "string"] - }, - "tags": { - "type": ["null", "array"], - "items": { - "type": ["null", "string"] - } - }, - "delete_date": { - "type": ["null", "string"] - }, - "date": { - "type": ["null", "integer"], - "format": "date" - }, - "trend_date": { - "type": ["null", "integer"], - "format": "date" - } - } -} diff --git a/airbyte-integrations/connectors/source-search-metrics/source_search_metrics/source.py b/airbyte-integrations/connectors/source-search-metrics/source_search_metrics/source.py deleted file mode 100644 index 4ba8ba798c07..000000000000 --- a/airbyte-integrations/connectors/source-search-metrics/source_search_metrics/source.py +++ /dev/null @@ -1,360 +0,0 @@ -# -# Copyright (c) 2023 Airbyte, Inc., all rights reserved. -# - -import base64 -from abc import ABC -from typing import Any, Iterable, List, Mapping, MutableMapping, Optional, Tuple - -import pendulum -import requests -from airbyte_cdk.sources import AbstractSource -from airbyte_cdk.sources.streams import Stream -from airbyte_cdk.sources.streams.http import HttpStream -from airbyte_cdk.sources.streams.http.auth import Oauth2Authenticator - -from .utils import to_datetime_str - - -class SearchMetricsStream(HttpStream, ABC): - primary_key = None - page_size = 250 - url_base = "https://api.searchmetrics.com/v4/" - - def __init__(self, config: Mapping[str, Any]): - super().__init__(authenticator=config["authenticator"]) - self.config = config - self.start_date = config["start_date"] - self.window_in_days = config.get("window_in_days", 30) - - def next_page_token(self, response: requests.Response) -> Optional[Mapping[str, Any]]: - return None - - def request_params( - self, - stream_state: Mapping[str, Any], - stream_slice: Mapping[str, Any] = None, - next_page_token: Mapping[str, Any] = None, - ) -> MutableMapping[str, Any]: - return { - "project_id": stream_slice["project_id"], - "se_id": stream_slice["engine"], - "urls": stream_slice["project_url"], - "url": stream_slice["project_url"], - "domain": stream_slice["project_url"], - "countrycode": self.config["country_code"], - "limit": self.page_size, - } - - def parse_response(self, response: requests.Response, **kwargs) -> Iterable[Mapping]: - data = response.json().get("response", []) - - if isinstance(data, list): - data = data - elif isinstance(data, dict): - data = [data] - - for record in data: - yield record - - def should_retry(self, response: requests.Response) -> bool: - rankings_not_yet_calculated = response.status_code == 400 and "Rankings not yet calculated" in response.json()["error_message"] - insufficient_credits_to_make_this_service_request = ( - response.status_code == 403 and "Insufficient credits to make this service request" in response.json()["error_message"] - ) - - if rankings_not_yet_calculated or insufficient_credits_to_make_this_service_request: - self.logger.error(f"{response.json()['error_message']}") - self.raise_on_http_errors = False - - return super().should_retry(response) - - def raise_on_http_errors(self) -> bool: - return True - - -class ChildStreamMixin: - parent_stream_class: Optional[SearchMetricsStream] = None - - def stream_slices(self, **kwargs) -> Iterable[Optional[Mapping[str, any]]]: - for item in self.parent_stream_class(config=self.config).read_records(sync_mode=None): - for engine in item["engines"]: - yield {"project_id": item["project_id"], "engine": engine, "project_url": item["project_url"]} - - yield from [] - - -class Projects(SearchMetricsStream): - primary_key = "project_id" - - def request_params( - self, - stream_state: Mapping[str, Any], - stream_slice: Mapping[str, Any] = None, - next_page_token: Mapping[str, Any] = None, - ) -> MutableMapping[str, Any]: - return {} - - def path(self, **kwargs) -> str: - return "AdminStatusGetListProjects.json" - - -class ProjectsChildStream(ChildStreamMixin): - parent_stream_class = Projects - - -class IncrementalSearchMetricsStream(ProjectsChildStream, SearchMetricsStream): - cursor_field = "date" - - def request_params( - self, stream_state: Mapping[str, Any], stream_slice: Mapping[str, Any] = None, next_page_token: Mapping[str, Any] = None - ) -> MutableMapping[str, Any]: - params = super().request_params(stream_state, stream_slice, next_page_token) - params["date_from"] = stream_slice["date_from"] - params["date_to"] = stream_slice["date_to"] - return params - - def get_updated_state(self, current_stream_state: MutableMapping[str, Any], latest_record: Mapping[str, Any]) -> Mapping[str, Any]: - return { - self.cursor_field: max( - str(latest_record.get(self.cursor_field, self.start_date)), - str(current_stream_state.get(self.cursor_field, self.start_date)), - ) - } - - def stream_slices(self, stream_state: Mapping[str, Any] = None, **kwargs) -> Iterable[Optional[Mapping[str, Any]]]: - """ - Override default stream_slices CDK method to provide date_slices as page chunks for data fetch. - Returns list of dict, example: [{ - "date_from": "20200101", - "date_to": "20210102" - }, - { - "date_from": "20200103", - "date_to": "20210104" - }, - ...] - """ - - for stream_slice in super().stream_slices(**kwargs): - start_date = pendulum.parse(self.start_date).date() - end_date = pendulum.now().date() - - # Determine stream_state, if no stream_state we use start_date - if stream_state: - start_date = pendulum.parse(stream_state.get(self.cursor_field)).date() - - # use the lowest date between start_date and self.end_date, otherwise API fails if start_date is in future - start_date = min(start_date, end_date) - date_slices = [] - - while start_date <= end_date: - end_date_slice = start_date.add(days=self.window_in_days) - stream_slice.update({"date_from": to_datetime_str(start_date), "date_to": to_datetime_str(min(end_date_slice, end_date))}) - date_slices.append(stream_slice) - # add 1 day for start next slice from next day and not duplicate data from previous slice end date. - start_date = end_date_slice.add(days=1) - - return date_slices - - -class Tags(ProjectsChildStream, SearchMetricsStream): - primary_key = "id" - - def path(self, **kwargs) -> str: - return "AdminStatusGetListProjectTags.json" - - -class BenchmarkRankingsS7(ProjectsChildStream, SearchMetricsStream): - def path(self, **kwargs) -> str: - return "ProjectOrganicGetListBenchmarkRankingsS7.json" - - -class CountDomainKeyword(ProjectsChildStream, SearchMetricsStream): - def path(self, **kwargs) -> str: - return "ResearchOrganicGetCountDomainKeyword.json" - - -class CompetitorRankingsS7(ProjectsChildStream, SearchMetricsStream): - def path(self, **kwargs) -> str: - return "ProjectOrganicGetListCompetitorRankingsS7.json" - - -class DistributionKeywordsS7(ProjectsChildStream, SearchMetricsStream): - def path(self, **kwargs) -> str: - return "ProjectOrganicGetListDistributionKeywordsS7.json" - - -class KeywordPotentialsS7(ProjectsChildStream, SearchMetricsStream): - def path(self, **kwargs) -> str: - return "ProjectOrganicGetListKeywordPotentialsS7.json" - - -class TagPotentialsS7(ProjectsChildStream, SearchMetricsStream): - def path(self, **kwargs) -> str: - return "ProjectOrganicGetListTagPotentialsS7.json" - - -class UrlRankingsS7(ProjectsChildStream, SearchMetricsStream): - def path(self, **kwargs) -> str: - return "ProjectOrganicGetListUrlRankingsS7.json" - - -class MarketshareValueS7(ProjectsChildStream, SearchMetricsStream): - def path(self, **kwargs) -> str: - return "ProjectOrganicGetValueMarketshareS7.json" - - -class SeoVisibilityValueS7(ProjectsChildStream, SearchMetricsStream): - def path(self, **kwargs) -> str: - return "ProjectOrganicGetValueSeoVisibilityS7.json" - - -class SerpSpreadValueS7(ProjectsChildStream, SearchMetricsStream): - def path(self, **kwargs) -> str: - return "ProjectOrganicGetValueSerpSpreadS7.json" - - -class ListCompetitors(ProjectsChildStream, SearchMetricsStream): - def path(self, **kwargs) -> str: - return "ResearchOrganicGetListCompetitors.json" - - -class ListCompetitorsRelevancy(ProjectsChildStream, SearchMetricsStream): - def path(self, **kwargs) -> str: - return "ResearchOrganicGetListCompetitorsRelevancy.json" - - -class ListRankingsDomain(ProjectsChildStream, SearchMetricsStream): - def path(self, **kwargs) -> str: - return "ResearchOrganicGetListRankingsDomain.json" - - -class ListSeoVisibilityCountry(ProjectsChildStream, SearchMetricsStream): - def path(self, **kwargs) -> str: - return "ResearchOrganicGetListSeoVisibilityCountry.json" - - -class ListLosersS7(ProjectsChildStream, SearchMetricsStream): - def path(self, **kwargs) -> str: - return "ProjectOrganicGetListLosersS7.json" - - -class ListMarketShareS7(IncrementalSearchMetricsStream): - def path(self, **kwargs) -> str: - return "ProjectOrganicGetListMarketShareS7.json" - - -class ListPositionSpreadHistoricS7(IncrementalSearchMetricsStream): - def path(self, **kwargs) -> str: - return "ProjectOrganicGetListPositionSpreadHistoricS7.json" - - -class ListSeoVisibilityHistoricS7(IncrementalSearchMetricsStream): - def path(self, **kwargs) -> str: - return "ProjectOrganicGetListSeoVisibilityHistoricS7.json" - - -class ListRankingsAnalysisS7(ProjectsChildStream, SearchMetricsStream): - def path(self, **kwargs) -> str: - return "ProjectOrganicGetListRankingsAnalysisS7.json" - - -class ListWinnersS7(ProjectsChildStream, SearchMetricsStream): - def path(self, **kwargs) -> str: - return "ProjectOrganicGetListWinnersS7.json" - - -class ListRankingsHistoricS7(ProjectsChildStream, SearchMetricsStream): - def path(self, **kwargs) -> str: - return "ProjectOrganicGetListRankingsHistoricS7.json" - - -class ListSerpSpreadS7(ProjectsChildStream, SearchMetricsStream): - def path(self, **kwargs) -> str: - return "ProjectOrganicGetListSerpSpreadS7.json" - - -class SearchMetricsAuthenticator(Oauth2Authenticator): - def __init__(self, config): - super().__init__( - token_refresh_endpoint="https://api.searchmetrics.com/v4/token", - client_id=config["api_key"], - client_secret=config["client_secret"], - refresh_token=None, - ) - - def get_refresh_request_body(self) -> Mapping[str, Any]: - payload: MutableMapping[str, Any] = {"grant_type": "client_credentials"} - - return payload - - def get_refresh_request_headers(self) -> Mapping[str, Any]: - encoded_credentials = base64.b64encode(f"{self.client_id}:{self.client_secret}".encode("ascii")) - headers: MutableMapping[str, Any] = {"Accept": "application/json", "Authorization": f"Basic {encoded_credentials.decode('utf-8')}"} - - return headers - - def refresh_access_token(self) -> Tuple[str, int]: - """ - Returns a tuple of (access_token, token_lifespan_in_seconds) - """ - try: - response = requests.request( - method="POST", - url=self.token_refresh_endpoint, - headers=self.get_refresh_request_headers(), - data=self.get_refresh_request_body(), - ) - response.raise_for_status() - response_json = response.json() - return response_json["access_token"], response_json["expires_in"] - except Exception as e: - raise Exception(f"Error while refreshing access token: {e}") from e - - -# Source -class SourceSearchMetrics(AbstractSource): - def check_connection(self, logger, config) -> Tuple[bool, any]: - """ - Testing connection availability for the connector by granting the credentials. - """ - authenticator = SearchMetricsAuthenticator(config) - - try: - url = "https://api.searchmetrics.com/v4/AdminStatusGetListProjects.json" - - auth_headers = {"Accept": "application/json", **authenticator.get_auth_header()} - session = requests.get(url, headers=auth_headers) - session.raise_for_status() - - return True, None - except requests.exceptions.RequestException as e: - return False, e - - def streams(self, config: Mapping[str, Any]) -> List[Stream]: - config["authenticator"] = SearchMetricsAuthenticator(config) - return [ - BenchmarkRankingsS7(config), - CompetitorRankingsS7(config), - CountDomainKeyword(config), - DistributionKeywordsS7(config), - KeywordPotentialsS7(config), - ListCompetitors(config), - ListCompetitorsRelevancy(config), - ListLosersS7(config), - ListMarketShareS7(config), - ListPositionSpreadHistoricS7(config), - ListRankingsDomain(config), - ListRankingsHistoricS7(config), - ListSeoVisibilityCountry(config), - ListSeoVisibilityHistoricS7(config), - ListSerpSpreadS7(config), - ListWinnersS7(config), - Projects(config), - SeoVisibilityValueS7(config), - SerpSpreadValueS7(config), - TagPotentialsS7(config), - Tags(config), - UrlRankingsS7(config), - ] diff --git a/airbyte-integrations/connectors/source-search-metrics/source_search_metrics/spec.json b/airbyte-integrations/connectors/source-search-metrics/source_search_metrics/spec.json deleted file mode 100644 index b7e56d69da53..000000000000 --- a/airbyte-integrations/connectors/source-search-metrics/source_search_metrics/spec.json +++ /dev/null @@ -1,71 +0,0 @@ -{ - "documentationUrl": "https://docs.airbyte.com/integrations/sources/seacrh-metrics", - "connectionSpecification": { - "$schema": "http://json-schema.org/draft-07/schema#", - "title": "Source Search Metrics Spec", - "type": "object", - "required": ["api_key", "client_secret", "country_code", "start_date"], - "additionalProperties": true, - "properties": { - "api_key": { - "title": "API Key", - "type": "string", - "description": "", - "airbyte_secret": true - }, - "client_secret": { - "title": "Client Secret", - "type": "string", - "description": "", - "airbyte_secret": true - }, - "country_code": { - "title": "Country Code", - "type": "string", - "default": "", - "description": "The region of the S3 staging bucket to use if utilising a copy strategy.", - "enum": [ - "", - "AR", - "AU", - "AT", - "BE", - "BR", - "CA", - "CN", - "CO", - "DK", - "FI", - "FR", - "DE", - "HK", - "IN", - "IE", - "IT", - "JP", - "MX", - "NL", - "NO", - "PL", - "RU", - "SG", - "ZA", - "ES", - "SE", - "CH", - "TR", - "US", - "GB" - ], - "order": 2 - }, - "start_date": { - "title": "Start Date", - "type": "string", - "description": "Data generated in SearchMetrics after this date will be replicated. This date must be specified in the format YYYY-MM-DDT00:00:00Z.", - "examples": ["20200925"], - "pattern": "^[0-9]{4}[0-9]{2}[0-9]{2}$" - } - } - } -} diff --git a/airbyte-integrations/connectors/source-search-metrics/source_search_metrics/utils.py b/airbyte-integrations/connectors/source-search-metrics/source_search_metrics/utils.py deleted file mode 100644 index c5fd187fba73..000000000000 --- a/airbyte-integrations/connectors/source-search-metrics/source_search_metrics/utils.py +++ /dev/null @@ -1,14 +0,0 @@ -# -# Copyright (c) 2023 Airbyte, Inc., all rights reserved. -# - - -from datetime import datetime - - -def to_datetime_str(date: datetime) -> str: - """ - Returns the formated datetime string. - :: Output example: '20210715T' FORMAT : "%Y%m%d" - """ - return date.strftime("%Y%m%d") diff --git a/airbyte-integrations/connectors/source-search-metrics/unit_tests/unit_test.py b/airbyte-integrations/connectors/source-search-metrics/unit_tests/unit_test.py deleted file mode 100644 index 5bda1c50d4ad..000000000000 --- a/airbyte-integrations/connectors/source-search-metrics/unit_tests/unit_test.py +++ /dev/null @@ -1,8 +0,0 @@ -# -# Copyright (c) 2023 Airbyte, Inc., all rights reserved. -# - - -def test_example(): - """Example of unit test""" - pass diff --git a/airbyte-integrations/connectors/source-talkdesk-explore/.dockerignore b/airbyte-integrations/connectors/source-talkdesk-explore/.dockerignore deleted file mode 100644 index 43a6f7f639b6..000000000000 --- a/airbyte-integrations/connectors/source-talkdesk-explore/.dockerignore +++ /dev/null @@ -1,6 +0,0 @@ -* -!Dockerfile -!main.py -!source_talkdesk_explore -!setup.py -!secrets diff --git a/airbyte-integrations/connectors/source-talkdesk-explore/Dockerfile b/airbyte-integrations/connectors/source-talkdesk-explore/Dockerfile deleted file mode 100644 index 8ebe39cc3a5d..000000000000 --- a/airbyte-integrations/connectors/source-talkdesk-explore/Dockerfile +++ /dev/null @@ -1,38 +0,0 @@ -FROM python:3.7.11-alpine3.14 as base - -# build and load all requirements -FROM base as builder -WORKDIR /airbyte/integration_code - -# upgrade pip to the latest version -RUN apk --no-cache upgrade \ - && pip install --upgrade pip \ - && apk --no-cache add tzdata build-base - - -COPY setup.py ./ -# install necessary packages to a temporary folder -RUN pip install --prefix=/install . - -# build a clean environment -FROM base -WORKDIR /airbyte/integration_code - -# copy all loaded and built libraries to a pure basic image -COPY --from=builder /install /usr/local -# add default timezone settings -COPY --from=builder /usr/share/zoneinfo/Etc/UTC /etc/localtime -RUN echo "Etc/UTC" > /etc/timezone - -# bash is installed for more convenient debugging. -RUN apk --no-cache add bash - -# copy payload code only -COPY main.py ./ -COPY source_talkdesk_explore ./source_talkdesk_explore - -ENV AIRBYTE_ENTRYPOINT "python /airbyte/integration_code/main.py" -ENTRYPOINT ["python", "/airbyte/integration_code/main.py"] - -LABEL io.airbyte.version=0.1.0 -LABEL io.airbyte.name=airbyte/source-talkdesk-explore diff --git a/airbyte-integrations/connectors/source-talkdesk-explore/README.md b/airbyte-integrations/connectors/source-talkdesk-explore/README.md deleted file mode 100644 index 8f8fa650ed71..000000000000 --- a/airbyte-integrations/connectors/source-talkdesk-explore/README.md +++ /dev/null @@ -1,99 +0,0 @@ -# Talkdesk-Explore Source - -This is the repository for the Talkdesk source connector, written in Python. -For information about how to use this connector within Airbyte, see [the documentation](https://docs.airbyte.io/integrations/sources/talkdesk-explore). - -## Local development - -### Prerequisites -**To iterate on this connector, make sure to complete this prerequisites section.** - -#### Minimum Python version required `= 3.7.0` - -#### Build & Activate Virtual Environment and install dependencies -From this connector directory, create a virtual environment: -``` -python -m venv .venv -``` - -This will generate a virtualenv for this module in `.venv/`. Make sure this venv is active in your -development environment of choice. To activate it from the terminal, run: -``` -source .venv/bin/activate -pip install -r requirements.txt -``` -If you are in an IDE, follow your IDE's instructions to activate the virtualenv. - -Note that while we are installing dependencies from `requirements.txt`, you should only edit `setup.py` for your dependencies. `requirements.txt` is -used for editable installs (`pip install -e`) to pull in Python dependencies from the monorepo and will call `setup.py`. -If this is mumbo jumbo to you, don't worry about it, just put your deps in `setup.py` but install using `pip install -r requirements.txt` and everything -should work as you expect. - -#### Create credentials -**If you are a community contributor**, follow the instructions in the [documentation](https://docs.airbyte.io/integrations/sources/talkdesk-explore) -to generate the necessary credentials. Then create a file `secrets/config.json` conforming to the `source_talkdesk_explore/spec.json` file. -Note that the `secrets` directory is gitignored by default, so there is no danger of accidentally checking in sensitive information. -See `integration_tests/sample_config.json` for a sample config file. - -**If you are an Airbyte core member**, copy the credentials in Lastpass under the secret name `source talkdesk-explore test creds` -and place them into `secrets/config.json`. - -### Locally running the connector -``` -python main.py spec -python main.py check --config secrets/config.json -python main.py discover --config secrets/config.json -python main.py read --config secrets/config.json --catalog integration_tests/configured_catalog.json -``` - -### Locally running the connector docker image - - -#### Build -**Via [`airbyte-ci`](https://github.com/airbytehq/airbyte/blob/master/airbyte-ci/connectors/pipelines/README.md) (recommended):** -```bash -airbyte-ci connectors --name=source-talkdesk-explore build -``` - -An image will be built with the tag `airbyte/source-talkdesk-explore:dev`. - -**Via `docker build`:** -```bash -docker build -t airbyte/source-talkdesk-explore:dev . -``` - -#### Run -Then run any of the connector commands as follows: -``` -docker run --rm airbyte/source-talkdesk-explore:dev spec -docker run --rm -v $(pwd)/secrets:/secrets airbyte/source-talkdesk-explore:dev check --config /secrets/config.json -docker run --rm -v $(pwd)/secrets:/secrets airbyte/source-talkdesk-explore:dev discover --config /secrets/config.json -docker run --rm -v $(pwd)/secrets:/secrets -v $(pwd)/integration_tests:/integration_tests airbyte/source-talkdesk-explore:dev read --config /secrets/config.json --catalog /integration_tests/configured_catalog.json -``` - -## Testing -You can run our full test suite locally using [`airbyte-ci`](https://github.com/airbytehq/airbyte/blob/master/airbyte-ci/connectors/pipelines/README.md): -```bash -airbyte-ci connectors --name=source-talkdesk-explore test -``` - -### Customizing acceptance Tests -Customize `acceptance-test-config.yml` file to configure tests. See [Connector Acceptance Tests](https://docs.airbyte.com/connector-development/testing-connectors/connector-acceptance-tests-reference) for more information. -If your connector requires to create or destroy resources for use during acceptance tests create fixtures for it and place them inside integration_tests/acceptance.py. - -## Dependency Management -All of your dependencies should go in `setup.py`, NOT `requirements.txt`. The requirements file is only used to connect internal Airbyte dependencies in the monorepo for local development. -We split dependencies between two groups, dependencies that are: -* required for your connector to work need to go to `MAIN_REQUIREMENTS` list. -* required for the testing need to go to `TEST_REQUIREMENTS` list - -### Publishing a new version of the connector -You've checked out the repo, implemented a million dollar feature, and you're ready to share your changes with the world. Now what? -1. Make sure your changes are passing our test suite: `airbyte-ci connectors --name=source-talkdesk-explore test` -2. Bump the connector version in `metadata.yaml`: increment the `dockerImageTag` value. Please follow [semantic versioning for connectors](https://docs.airbyte.com/contributing-to-airbyte/resources/pull-requests-handbook/#semantic-versioning-for-connectors). -3. Make sure the `metadata.yaml` content is up to date. -4. Make the connector documentation and its changelog is up to date (`docs/integrations/sources/talkdesk-explore.md`). -5. Create a Pull Request: use [our PR naming conventions](https://docs.airbyte.com/contributing-to-airbyte/resources/pull-requests-handbook/#pull-request-title-convention). -6. Pat yourself on the back for being an awesome contributor. -7. Someone from Airbyte will take a look at your PR and iterate with you to merge it into master. - diff --git a/airbyte-integrations/connectors/source-talkdesk-explore/acceptance-test-config.yml b/airbyte-integrations/connectors/source-talkdesk-explore/acceptance-test-config.yml deleted file mode 100644 index 59db1e673e25..000000000000 --- a/airbyte-integrations/connectors/source-talkdesk-explore/acceptance-test-config.yml +++ /dev/null @@ -1,21 +0,0 @@ -# See [Connector Acceptance Tests](https://docs.airbyte.com/connector-development/testing-connectors/connector-acceptance-tests-reference) -# for more information about how to configure these tests -connector_image: airbyte-local/source-talkdesk:tests -tests: - spec: - - spec_path: "source_talkdesk_explore/spec.json" - connection: - - config_path: "secrets/config.json" - status: "succeed" - - config_path: "integration_tests/invalid_config.json" - status: "failed" - discovery: - - config_path: "secrets/config.json" - basic_read: - - config_path: "secrets/config.json" - configured_catalog_path: "integration_tests/configured_catalog.json" - empty_streams: [] - incremental: # TODO if your connector does not implement incremental sync, remove this block - - config_path: "secrets/config.json" - configured_catalog_path: "integration_tests/configured_catalog.json" - future_state_path: "integration_tests/abnormal_state.json" diff --git a/airbyte-integrations/connectors/source-talkdesk-explore/bootstrap.md b/airbyte-integrations/connectors/source-talkdesk-explore/bootstrap.md deleted file mode 100644 index 36307d302a52..000000000000 --- a/airbyte-integrations/connectors/source-talkdesk-explore/bootstrap.md +++ /dev/null @@ -1,23 +0,0 @@ -## Streams - -Talkdesk Explore API is focused on delivering data reports, and this connector implements five streams: - -* Calls Report -* User Status Report -* Studio Flow Execution Report -* Contacts Report -* Ring Attempts Report - -Please refer to the official documentation for a list of all available reports: https://docs.talkdesk.com/docs/available-report - -To request data from one of the endpoints, first you need to generate a report. This is done by a POST request where the payload is the report specifications. Then, the response will be a report ID that you need to use in a GET request to obtain the report's data. - -This process is further explained here: [Executing a Report](https://docs.talkdesk.com/docs/executing-report) - -## Pagination - -To both report generation and report consumption, data is not paginated. - -## Authentication - -The only authentication method implemented so far is `Client Credentials`. You can read [here](https://docs.talkdesk.com/docs/authentication) about all the supported authentication methods. diff --git a/airbyte-integrations/connectors/source-talkdesk-explore/integration_tests/__init__.py b/airbyte-integrations/connectors/source-talkdesk-explore/integration_tests/__init__.py deleted file mode 100644 index 46b7376756ec..000000000000 --- a/airbyte-integrations/connectors/source-talkdesk-explore/integration_tests/__init__.py +++ /dev/null @@ -1,3 +0,0 @@ -# -# Copyright (c) 2021 Airbyte, Inc., all rights reserved. -# diff --git a/airbyte-integrations/connectors/source-talkdesk-explore/integration_tests/acceptance.py b/airbyte-integrations/connectors/source-talkdesk-explore/integration_tests/acceptance.py deleted file mode 100644 index 43ce950d77ca..000000000000 --- a/airbyte-integrations/connectors/source-talkdesk-explore/integration_tests/acceptance.py +++ /dev/null @@ -1,16 +0,0 @@ -# -# Copyright (c) 2023 Airbyte, Inc., all rights reserved. -# - - -import pytest - -pytest_plugins = ("connector_acceptance_test.plugin",) - - -@pytest.fixture(scope="session", autouse=True) -def connector_setup(): - """This fixture is a placeholder for external resources that acceptance test might require.""" - # TODO: setup test dependencies - yield - # TODO: clean up test dependencies diff --git a/airbyte-integrations/connectors/source-talkdesk-explore/integration_tests/configured_catalog.json b/airbyte-integrations/connectors/source-talkdesk-explore/integration_tests/configured_catalog.json deleted file mode 100644 index 81b32913dd16..000000000000 --- a/airbyte-integrations/connectors/source-talkdesk-explore/integration_tests/configured_catalog.json +++ /dev/null @@ -1,76 +0,0 @@ -{ - "streams": [ - { - "stream": { - "name": "calls", - "json_schema": { - "$schema": "http://json-schema.org/draft-07/schema#", - "type": "object", - "properties": {} - }, - "supported_sync_modes": ["full_refresh", "incremental"], - "source_defined_cursor": true, - "default_cursor_field": ["end_at"], - "source_defined_primary_key": [["call_id"]] - }, - "sync_mode": "incremental", - "cursor_field": ["end_at"], - "destination_sync_mode": "append" - }, - { - "stream": { - "name": "user_status", - "json_schema": { - "$schema": "http://json-schema.org/draft-07/schema#", - "type": "object", - "properties": {} - }, - "supported_sync_modes": ["full_refresh", "incremental"], - "source_defined_cursor": true, - "default_cursor_field": ["status_end_at"], - "source_defined_primary_key": [["user_id"]] - }, - "sync_mode": "incremental", - "cursor_field": ["status_end_at"], - "destination_sync_mode": "append" - }, - { - "stream": { - "name": "studio_flow_execution", - "json_schema": { - "$schema": "http://json-schema.org/draft-07/schema#", - "type": "object", - "properties": {} - }, - "supported_sync_modes": ["full_refresh", "incremental"], - "source_defined_cursor": true, - "default_cursor_field": [ - "studio_flow_executions_aggregated.flow_execution_finished_time" - ], - "source_defined_primary_key": [["flow_id"]] - }, - "sync_mode": "incremental", - "cursor_field": [ - "studio_flow_executions_aggregated.flow_execution_finished_time" - ], - "destination_sync_mode": "append" - }, - { - "stream": { - "name": "contacts", - "json_schema": { - "$schema": "http://json-schema.org/draft-07/schema#", - "type": "object", - "properties": {} - }, - "supported_sync_modes": ["full_refresh", "incremental"], - "source_defined_cursor": true, - "default_cursor_field": ["finished_at"], - "source_defined_primary_key": [["contact_id"]] - }, - "sync_mode": "incremental", - "cursor_field": ["finished_at"], - "destination_sync_mode": "append" - } - ] -} diff --git a/airbyte-integrations/connectors/source-talkdesk-explore/integration_tests/invalid_config.json b/airbyte-integrations/connectors/source-talkdesk-explore/integration_tests/invalid_config.json deleted file mode 100644 index 3578f03b09e1..000000000000 --- a/airbyte-integrations/connectors/source-talkdesk-explore/integration_tests/invalid_config.json +++ /dev/null @@ -1,6 +0,0 @@ -{ - "START_DATE": "2022-02-04T11:00:00", - "TIMEZONE": "Europe/London", - "AUTH_URL": "invalid url", - "API_KEY": "invalid api key" -} diff --git a/airbyte-integrations/connectors/source-talkdesk-explore/integration_tests/sample_config.json b/airbyte-integrations/connectors/source-talkdesk-explore/integration_tests/sample_config.json deleted file mode 100644 index ecc4913b84c7..000000000000 --- a/airbyte-integrations/connectors/source-talkdesk-explore/integration_tests/sample_config.json +++ /dev/null @@ -1,3 +0,0 @@ -{ - "fix-me": "TODO" -} diff --git a/airbyte-integrations/connectors/source-talkdesk-explore/integration_tests/sample_state.json b/airbyte-integrations/connectors/source-talkdesk-explore/integration_tests/sample_state.json deleted file mode 100644 index 2489355851a2..000000000000 --- a/airbyte-integrations/connectors/source-talkdesk-explore/integration_tests/sample_state.json +++ /dev/null @@ -1,57 +0,0 @@ -[ - { - "type": "STREAM", - "stream": { - "stream_state": { - "end_at": "2099-01-01T00:00:00" - }, - "stream_descriptor": { - "name": "calls" - } - } - }, - { - "type": "STREAM", - "stream": { - "stream_state": { - "status_end_at": "2099-01-01T00:00:00" - }, - "stream_descriptor": { - "name": "user_status" - } - } - }, - { - "type": "STREAM", - "stream": { - "stream_state": { - "studio_flow_executions_aggregated.flow_execution_finished_time": "2099-01-01T00:00:00" - }, - "stream_descriptor": { - "name": "studio_flow_execution" - } - } - }, - { - "type": "STREAM", - "stream": { - "stream_state": { - "finished_at": "2099-01-01T00:00:00" - }, - "stream_descriptor": { - "name": "contacts" - } - } - }, - { - "type": "STREAM", - "stream": { - "stream_state": { - "ring_finished_at_time": "2099-01-01T00:00:00" - }, - "stream_descriptor": { - "name": "ring_attempts" - } - } - } -] diff --git a/airbyte-integrations/connectors/source-talkdesk-explore/main.py b/airbyte-integrations/connectors/source-talkdesk-explore/main.py deleted file mode 100644 index 745a3f67e001..000000000000 --- a/airbyte-integrations/connectors/source-talkdesk-explore/main.py +++ /dev/null @@ -1,8 +0,0 @@ -# -# Copyright (c) 2023 Airbyte, Inc., all rights reserved. -# - -from source_talkdesk_explore.run import run - -if __name__ == "__main__": - run() diff --git a/airbyte-integrations/connectors/source-talkdesk-explore/requirements.txt b/airbyte-integrations/connectors/source-talkdesk-explore/requirements.txt deleted file mode 100644 index 7b9114ed5867..000000000000 --- a/airbyte-integrations/connectors/source-talkdesk-explore/requirements.txt +++ /dev/null @@ -1,2 +0,0 @@ -# This file is autogenerated -- only edit if you know what you are doing. Use setup.py for declaring dependencies. --e . diff --git a/airbyte-integrations/connectors/source-talkdesk-explore/sample_files/configured_catalog.json b/airbyte-integrations/connectors/source-talkdesk-explore/sample_files/configured_catalog.json deleted file mode 100644 index cb866003837b..000000000000 --- a/airbyte-integrations/connectors/source-talkdesk-explore/sample_files/configured_catalog.json +++ /dev/null @@ -1,93 +0,0 @@ -{ - "streams": [ - { - "stream": { - "name": "calls", - "json_schema": { - "$schema": "http://json-schema.org/draft-07/schema#", - "type": "object", - "properties": {} - }, - "supported_sync_modes": ["full_refresh", "incremental"], - "source_defined_cursor": true, - "default_cursor_field": ["end_at"], - "source_defined_primary_key": [["call_id"]] - }, - "sync_mode": "incremental", - "cursor_field": ["end_at"], - "destination_sync_mode": "append" - }, - { - "stream": { - "name": "user_status", - "json_schema": { - "$schema": "http://json-schema.org/draft-07/schema#", - "type": "object", - "properties": {} - }, - "supported_sync_modes": ["full_refresh", "incremental"], - "source_defined_cursor": true, - "default_cursor_field": ["status_end_at"], - "source_defined_primary_key": [["user_id"]] - }, - "sync_mode": "incremental", - "cursor_field": ["status_end_at"], - "destination_sync_mode": "append" - }, - { - "stream": { - "name": "studio_flow_execution", - "json_schema": { - "$schema": "http://json-schema.org/draft-07/schema#", - "type": "object", - "properties": {} - }, - "supported_sync_modes": ["full_refresh", "incremental"], - "source_defined_cursor": true, - "default_cursor_field": [ - "studio_flow_executions_aggregated.flow_execution_finished_time" - ], - "source_defined_primary_key": [["flow_id"]] - }, - "sync_mode": "incremental", - "cursor_field": [ - "studio_flow_executions_aggregated.flow_execution_finished_time" - ], - "destination_sync_mode": "append" - }, - { - "stream": { - "name": "contacts", - "json_schema": { - "$schema": "http://json-schema.org/draft-07/schema#", - "type": "object", - "properties": {} - }, - "supported_sync_modes": ["full_refresh", "incremental"], - "source_defined_cursor": true, - "default_cursor_field": ["finished_at"], - "source_defined_primary_key": [["contact_id"]] - }, - "sync_mode": "incremental", - "cursor_field": ["finished_at"], - "destination_sync_mode": "append" - }, - { - "stream": { - "name": "ring_attempts", - "json_schema": { - "$schema": "http://json-schema.org/draft-07/schema#", - "type": "object", - "properties": {} - }, - "supported_sync_modes": ["full_refresh", "incremental"], - "source_defined_cursor": true, - "default_cursor_field": ["ring_finished_at_time"], - "source_defined_primary_key": [["ring_attempt_id"]] - }, - "sync_mode": "incremental", - "cursor_field": ["ring_finished_at_time"], - "destination_sync_mode": "append" - } - ] -} diff --git a/airbyte-integrations/connectors/source-talkdesk-explore/setup.py b/airbyte-integrations/connectors/source-talkdesk-explore/setup.py deleted file mode 100644 index 1ec623cfb4d5..000000000000 --- a/airbyte-integrations/connectors/source-talkdesk-explore/setup.py +++ /dev/null @@ -1,46 +0,0 @@ -# -# Copyright (c) 2023 Airbyte, Inc., all rights reserved. -# - - -from setuptools import find_packages, setup - -MAIN_REQUIREMENTS = [ - "airbyte-cdk~=0.1", -] - -TEST_REQUIREMENTS = [ - "requests-mock~=1.9.3", - "pytest-mock~=3.6.1", - "pytest~=6.1", -] - -setup( - entry_points={ - "console_scripts": [ - "source-talkdesk-explore=source_talkdesk_explore.run:run", - ], - }, - name="source_talkdesk_explore", - description="Source implementation for Talkdesk Explore API.", - author="Airbyte", - author_email="alexandre.martins@saltpay.co", - packages=find_packages(), - install_requires=MAIN_REQUIREMENTS, - package_data={ - "": [ - # Include yaml files in the package (if any) - "*.yml", - "*.yaml", - # Include all json files in the package, up to 4 levels deep - "*.json", - "*/*.json", - "*/*/*.json", - "*/*/*/*.json", - "*/*/*/*/*.json", - ] - }, - extras_require={ - "tests": TEST_REQUIREMENTS, - }, -) diff --git a/airbyte-integrations/connectors/source-talkdesk-explore/source_talkdesk_explore/__init__.py b/airbyte-integrations/connectors/source-talkdesk-explore/source_talkdesk_explore/__init__.py deleted file mode 100644 index 6390c93aeed9..000000000000 --- a/airbyte-integrations/connectors/source-talkdesk-explore/source_talkdesk_explore/__init__.py +++ /dev/null @@ -1,8 +0,0 @@ -# -# Copyright (c) 2021 Airbyte, Inc., all rights reserved. -# - - -from .source import SourceTalkdeskExplore - -__all__ = ["SourceTalkdeskExplore"] diff --git a/airbyte-integrations/connectors/source-talkdesk-explore/source_talkdesk_explore/run.py b/airbyte-integrations/connectors/source-talkdesk-explore/source_talkdesk_explore/run.py deleted file mode 100644 index 442b84e5a278..000000000000 --- a/airbyte-integrations/connectors/source-talkdesk-explore/source_talkdesk_explore/run.py +++ /dev/null @@ -1,14 +0,0 @@ -# -# Copyright (c) 2023 Airbyte, Inc., all rights reserved. -# - - -import sys - -from airbyte_cdk.entrypoint import launch -from source_talkdesk_explore import SourceTalkdeskExplore - - -def run(): - source = SourceTalkdeskExplore() - launch(source, sys.argv[1:]) diff --git a/airbyte-integrations/connectors/source-talkdesk-explore/source_talkdesk_explore/schemas/calls.json b/airbyte-integrations/connectors/source-talkdesk-explore/source_talkdesk_explore/schemas/calls.json deleted file mode 100644 index c923ef4ce622..000000000000 --- a/airbyte-integrations/connectors/source-talkdesk-explore/source_talkdesk_explore/schemas/calls.json +++ /dev/null @@ -1,162 +0,0 @@ -{ - "$schema": "http://json-schema.org/draft-07/schema#", - "type": "object", - "title": "Calls report", - "description": "Includes all calls that have been made / received during a period of time, as well as detailed information about each one: time, duration, type, agent, call disposition, description, recording, waiting times, etc.", - "properties": { - "call_id": { - "type": "string", - "description": "The call's unique identification number." - }, - "callsid": { - "type": "string", - "description": "The call's unique identification number." - }, - "type": { - "type": "string", - "description": "The call type (i.e., outbound, outbound_missed, inbound, missed, abandoned, short_abandoned or voicemail)." - }, - "start_at": { - "type": "string", - "format": "date-time", - "examples": ["2016-01-29T10:22:02Z"] - }, - "end_at": { - "type": "string", - "format": "date-time", - "examples": ["2016-01-29T10:22:02Z"] - }, - "talkdesk_phone_number": { - "type": "string", - "examples": ["+184432634343"] - }, - "talkdesk_phone_display_name": { - "type": ["string", "null"], - "examples": ["support"] - }, - "contact_phone_number": { - "type": "string", - "examples": ["+3434345345"] - }, - "user_id": { - "type": ["string", "null"], - "examples": ["asdf1492daf3xa32423"] - }, - "user_name": { - "type": ["string", "null"], - "examples": ["Alice Eve"] - }, - "user_email": { - "type": ["string", "null"], - "examples": ["alice@talkdesk.com"] - }, - "total_time": { - "type": "integer", - "examples": [12] - }, - "talk_time": { - "type": "integer", - "examples": [10] - }, - "wait_time": { - "type": "integer", - "examples": [2] - }, - "hold_time": { - "type": "integer", - "examples": [0] - }, - "abandon_time": { - "type": "integer", - "examples": [0] - }, - "total_ringing_time": { - "type": "integer", - "examples": [2] - }, - "disposition_code": { - "type": ["string", "null"], - "examples": ["Not Interested"] - }, - "notes": { - "type": ["string", "null"], - "examples": ["They were not interested"] - }, - "user_voice_rating": { - "type": ["integer", "null"], - "examples": [2] - }, - "ring_groups": { - "type": "string", - "examples": ["Product Support"] - }, - "ivr_options": { - "type": "string", - "examples": ["1,2,1"] - }, - "is_in_business_hours": { - "type": "boolean", - "examples": [true] - }, - "is_callback_from_queue": { - "type": "boolean", - "examples": [null] - }, - "is_call_forwarding": { - "type": "boolean", - "examples": [false] - }, - "is_if_no_answer": { - "type": "boolean", - "examples": [false] - }, - "is_transfer": { - "type": "boolean", - "examples": [false] - }, - "is_external_transfer": { - "type": "boolean", - "examples": [false] - }, - "handling_user_name": { - "type": ["string", "null"], - "examples": ["Mark Jones"] - }, - "handling_user_id": { - "type": ["string", "null"], - "examples": ["asdf1492daf3xa32424"] - }, - "handling_user_email": { - "type": ["string", "null"], - "examples": ["mark@talkdesk.com"] - }, - "recording_url": { - "type": ["string", "null"], - "examples": ["/recordings/cads13d23123asdfa413"] - }, - "csat_score": { - "type": ["integer", "null"], - "examples": [1] - }, - "csat_survey_time": { - "type": ["string", "null"], - "examples": ["2016-01-29T10:22:16Z"] - }, - "mood": { - "type": "string", - "examples": ["happy"] - }, - "is_mood_prompted": { - "type": ["boolean", "null"], - "examples": [true] - }, - "team_id": { - "type": ["string", "null"], - "examples": ["69bd4bed944f4de99cb79736f2ca15b1"] - }, - "team_name": { - "type": ["string", "null"], - "examples": ["The A-Team"] - } - } -} diff --git a/airbyte-integrations/connectors/source-talkdesk-explore/source_talkdesk_explore/schemas/contacts.json b/airbyte-integrations/connectors/source-talkdesk-explore/source_talkdesk_explore/schemas/contacts.json deleted file mode 100644 index 541cbf48d7cb..000000000000 --- a/airbyte-integrations/connectors/source-talkdesk-explore/source_talkdesk_explore/schemas/contacts.json +++ /dev/null @@ -1,180 +0,0 @@ -{ - "$schema": "http://json-schema.org/draft-07/schema#", - "type": "object", - "title": "Contacts", - "description": "The Contacts report provides an extra level of granularity with details on each contact within an interaction.", - "properties": { - "interaction_id": { - "type": "string", - "examples": ["9a7c39081c3a4606b82462a092a6c2e1"] - }, - "contact_id": { - "type": "string", - "examples": ["a1ea1e41d2cb1d85d184fc0c1f9c1462"] - }, - "company_number": { - "type": "string", - "examples": ["+345345345"] - }, - "phone_display_name": { - "type": ["string", "null"], - "examples": ["Phone neame - Caller"] - }, - "contact_person_number": { - "type": "string", - "examples": ["+4343434"] - }, - "external_phone_number": { - "type": ["string", "null"], - "examples": ["None"] - }, - "direction": { - "type": "string", - "examples": ["IN"] - }, - "contact_type": { - "type": "string", - "examples": ["Answered"] - }, - "started_at": { - "type": "string", - "format": "date-time", - "examples": ["2022-01-19 09:59:42"] - }, - "wait_time": { - "type": ["integer", "null"], - "examples": [29] - }, - "time_to_missed": { - "type": ["integer", "null"], - "examples": [0] - }, - "abandon_time": { - "type": ["integer", "null"], - "examples": [0] - }, - "short_abandon_time": { - "type": ["integer", "null"], - "examples": [0] - }, - "time_to_voicemail": { - "type": ["integer", "null"], - "examples": [0] - }, - "ring_time": { - "type": ["integer", "null"], - "examples": [27] - }, - "connect_time": { - "type": ["integer", "null"], - "examples": [0] - }, - "answered_at": { - "type": ["string", "null"], - "format": "date-time", - "examples": ["2022-01-19 10:00:12"] - }, - "connected_at": { - "type": ["string", "null"], - "format": "date-time", - "examples": ["2022-01-19 10:00:12"] - }, - "talk_time": { - "type": ["integer", "null"], - "examples": [268] - }, - "hard_hold_time": { - "type": ["integer", "null"], - "examples": [0] - }, - "soft_hold_time": { - "type": ["integer", "null"], - "examples": [0] - }, - "hold_time": { - "type": ["integer", "null"], - "examples": [0] - }, - "after_call_work_time": { - "type": ["integer", "null"], - "examples": [30] - }, - "finished_at": { - "type": ["string", "null"], - "format": "date-time", - "examples": ["2022-01-19 10:05:11"] - }, - "duration": { - "type": ["integer", "null"], - "examples": [328] - }, - "handle_time": { - "type": ["integer", "null"], - "examples": [228] - }, - "inside_business_hours": { - "type": "string", - "examples": ["Yes"] - }, - "inside_service_level": { - "type": "string", - "examples": ["Yes"] - }, - "within_service_level_threshold": { - "type": "string", - "examples": ["Yes"] - }, - "transfer_out": { - "type": "string", - "examples": ["No"] - }, - "transfer_out_type": { - "type": ["string", "null"], - "examples": ["None"] - }, - "transfer_in": { - "type": ["string", "null"], - "examples": ["No"] - }, - "transfer_in_type": { - "type": ["string", "null"], - "examples": ["None"] - }, - "callback": { - "type": ["string", "null"], - "examples": ["None"] - }, - "ring_groups": { - "type": ["string", "null"], - "examples": ["cs_storyous_cz"] - }, - "user_name": { - "type": ["string", "null"], - "examples": ["The User"] - }, - "direct_assignment_user": { - "type": ["string", "null"], - "examples": ["None"] - }, - "team_name": { - "type": ["string", "null"], - "examples": ["None"] - }, - "user_id": { - "type": ["string", "null"], - "examples": ["618255e0fba4741asad4fsdf"] - }, - "direct_assignment_ids": { - "type": ["string", "null"], - "examples": ["None"] - }, - "team_id": { - "type": ["string", "null"], - "examples": ["None"] - }, - "handling_ring_groups": { - "type": ["string", "null"], - "examples": ["awesome_ring_group"] - } - } -} diff --git a/airbyte-integrations/connectors/source-talkdesk-explore/source_talkdesk_explore/schemas/ring_attempts.json b/airbyte-integrations/connectors/source-talkdesk-explore/source_talkdesk_explore/schemas/ring_attempts.json deleted file mode 100644 index 4aa7ef007ddf..000000000000 --- a/airbyte-integrations/connectors/source-talkdesk-explore/source_talkdesk_explore/schemas/ring_attempts.json +++ /dev/null @@ -1,54 +0,0 @@ -{ - "$schema": "http://json-schema.org/draft-07/schema#", - "type": "object", - "title": "Ring Attempts", - "description": "The Ring Attempts report provides detailed information regarding ring attempts.", - "properties": { - "interaction_id": { - "type": "string", - "examples": ["a1428598ddce4554af93d7b163a67d2f"] - }, - "contact_id": { - "type": "string", - "examples": ["2a003e24939d3523a54ddc028c005672"] - }, - "batch_ring_id": { - "type": "string", - "examples": ["520dbf1887a44ce095ec52467abf799e"] - }, - "ring_attempt_id": { - "type": "string", - "examples": ["1ca68b85ae9c4f84d3630310f25c98b44510021f"] - }, - "attempt_type": { - "type": ["null", "string"], - "examples": ["Ignored"] - }, - "ring_started_at_time": { - "type": ["null", "string"], - "format": "date-time", - "examples": ["2022-01-19 09:54:52"] - }, - "ring_finished_at_time": { - "type": ["null", "string"], - "format": "date-time", - "examples": ["2022-01-19 09:54:52"] - }, - "ring_attempt_duration": { - "type": ["null", "integer"], - "examples": [20] - }, - "user_name": { - "type": ["null", "string"], - "examples": ["The Unicorn"] - }, - "user_email": { - "type": ["null", "string"], - "examples": ["unicorn@talkdesk.com"] - }, - "team_name": { - "type": ["null", "string"], - "examples": ["None"] - } - } -} diff --git a/airbyte-integrations/connectors/source-talkdesk-explore/source_talkdesk_explore/schemas/studio_flow_execution.json b/airbyte-integrations/connectors/source-talkdesk-explore/source_talkdesk_explore/schemas/studio_flow_execution.json deleted file mode 100644 index 62aed95cd1fd..000000000000 --- a/airbyte-integrations/connectors/source-talkdesk-explore/source_talkdesk_explore/schemas/studio_flow_execution.json +++ /dev/null @@ -1,62 +0,0 @@ -{ - "$schema": "http://json-schema.org/draft-07/schema#", - "type": "object", - "title": "Studio Flow Execution", - "description": "The Studio Flow Execution is a detailed report that contains, at the step level, all the information related to the interaction.", - "properties": { - "call_sid": { - "type": "string", - "examples": ["CAe0280e1d972d19a6469bd50cc4bdb40a"] - }, - "interaction_id": { - "type": "string", - "examples": ["7ba953869d144d92b18e82d7d857ec43"] - }, - "destination_number": { - "type": "string", - "examples": ["+4545342444"] - }, - "origin_number": { - "type": "string", - "examples": ["+23423234234"] - }, - "flow_name": { - "type": "string", - "examples": ["My unique Flow"] - }, - "flow_id": { - "type": "string", - "examples": ["1344e9fa793349adbf1df3a625ee94ed"] - }, - "component_title": { - "type": "string", - "examples": ["Calls"] - }, - "step_name": { - "type": "string", - "examples": ["Talkdesk step"] - }, - "exit": { - "type": "string", - "examples": ["ok"] - }, - "time_in_step": { - "type": "number", - "examples": [6.712963e-7] - }, - "timestamp": { - "type": "string", - "format": "date-time", - "examples": ["2022-01-19 09:57:46"] - }, - "studio_flow_executions_aggregated.flow_execution_finished_time": { - "type": "string", - "format": "date-time", - "examples": ["2022-01-19 09:57:54"] - }, - "step_execution_order": { - "type": "integer", - "examples": [0] - } - } -} diff --git a/airbyte-integrations/connectors/source-talkdesk-explore/source_talkdesk_explore/schemas/user_status.json b/airbyte-integrations/connectors/source-talkdesk-explore/source_talkdesk_explore/schemas/user_status.json deleted file mode 100644 index ed947b685478..000000000000 --- a/airbyte-integrations/connectors/source-talkdesk-explore/source_talkdesk_explore/schemas/user_status.json +++ /dev/null @@ -1,50 +0,0 @@ -{ - "$schema": "http://json-schema.org/draft-07/schema#", - "type": "object", - "title": "User status", - "description": "The User Status report provides a raw log of agent status changes occurring in Talkdesk.", - "properties": { - "user_id": { - "type": "string", - "examples": ["13da-3ccd-3423"] - }, - "user_name": { - "type": "string", - "examples": ["Foo bar"] - }, - "user_email": { - "type": "string", - "examples": ["robert@talkdesk.com"] - }, - "status_label": { - "type": "string", - "examples": ["Away"] - }, - "status_start_at": { - "type": "string", - "format": "date-time", - "examples": ["2016-01-29 10:30:00"] - }, - "status_end_at": { - "type": "string", - "format": "date-time", - "examples": ["2016-01-29 10:30:14"] - }, - "status_time": { - "type": "integer", - "examples": [14] - }, - "is_user_active": { - "type": "boolean", - "examples": [true] - }, - "team_id": { - "type": ["string", "null"], - "examples": ["69bd4bed944f4de99cb79736f2ca15b1"] - }, - "team_name": { - "type": ["string", "null"], - "examples": ["The A-Team"] - } - } -} diff --git a/airbyte-integrations/connectors/source-talkdesk-explore/source_talkdesk_explore/source.py b/airbyte-integrations/connectors/source-talkdesk-explore/source_talkdesk_explore/source.py deleted file mode 100644 index be60740dbaad..000000000000 --- a/airbyte-integrations/connectors/source-talkdesk-explore/source_talkdesk_explore/source.py +++ /dev/null @@ -1,50 +0,0 @@ -# -# Copyright (c) 2023 Airbyte, Inc., all rights reserved. -# - -from typing import Any, List, Mapping, Tuple - -from airbyte_cdk.logger import AirbyteLogger -from airbyte_cdk.sources import AbstractSource -from airbyte_cdk.sources.streams import Stream -from airbyte_cdk.sources.streams.http.auth import TokenAuthenticator - -from .streams import Calls, Contacts, RingAttempts, StudioFlowExecution, UserStatus -from .talkdesk_auth import TalkdeskAuth - - -class SourceTalkdeskExplore(AbstractSource): - def check_connection(self, logger: AirbyteLogger, config: Mapping[str, Any]) -> Tuple[bool, Any]: - talkdesk_auth = TalkdeskAuth(config) - token_request = talkdesk_auth.request_bearer_token() - - # Check for valid token and scope - if "access_token" not in token_request.keys(): - return False, "Unable to retrieve access token. Check your credentials." - elif "data-reports:read" and "data-reports:write" not in token_request["scope"]: - return ( - False, - "Provided credential does not have necessary privileges to read data. Required scope: data-reports:read AND data-reports:write", - ) - else: - return True, None - - def streams(self, config: Mapping[str, Any]) -> List[Stream]: - talkdesk_auth = TalkdeskAuth(config) - token_request = talkdesk_auth.request_bearer_token() - talkdesk_auth_token = token_request.get("access_token", None) - - authenticator = TokenAuthenticator(token=talkdesk_auth_token) - - start_date = config.get("start_date", None) - timezone = config.get("timezone", None) - - streams_ = [ - Calls(start_date=start_date, timezone=timezone, authenticator=authenticator), - UserStatus(start_date=start_date, timezone=timezone, authenticator=authenticator), - StudioFlowExecution(start_date=start_date, timezone=timezone, authenticator=authenticator), - Contacts(start_date=start_date, timezone=timezone, authenticator=authenticator), - RingAttempts(start_date=start_date, timezone=timezone, authenticator=authenticator), - ] - - return streams_ diff --git a/airbyte-integrations/connectors/source-talkdesk-explore/source_talkdesk_explore/spec.json b/airbyte-integrations/connectors/source-talkdesk-explore/source_talkdesk_explore/spec.json deleted file mode 100644 index 281ebf9fad24..000000000000 --- a/airbyte-integrations/connectors/source-talkdesk-explore/source_talkdesk_explore/spec.json +++ /dev/null @@ -1,43 +0,0 @@ -{ - "documentationUrl": "https://docsurl.com", - "connectionSpecification": { - "$schema": "http://json-schema.org/draft-07/schema#", - "title": "Talkdesk Explore API Spec", - "type": "object", - "required": ["start_date", "auth_url", "api_key"], - "additionalProperties": false, - "properties": { - "start_date": { - "type": "string", - "title": "START DATE", - "description": "The date from which you'd like to replicate data for Talkdesk Explore API, in the format YYYY-MM-DDT00:00:00. All data generated after this date will be replicated.", - "examples": ["2020-10-15T00:00:00"], - "pattern": "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}$", - "order": 0 - }, - "timezone": { - "type": "string", - "title": "TIMEZONE", - "description": "Timezone to use when generating reports. Only IANA timezones are supported (https://nodatime.org/TimeZones)", - "examples": ["Europe/London", "America/Los_Angeles"], - "default": "UTC", - "order": 1 - }, - "auth_url": { - "title": "AUTH URL", - "type": "string", - "description": "Talkdesk Auth URL. Only 'client_credentials' auth type supported at the moment.", - "examples": [ - "https://xxxxxx.talkdeskid.com/oauth/token?grant_type=client_credentials" - ], - "order": 2 - }, - "api_key": { - "title": "API KEY", - "type": "string", - "description": "Talkdesk API key.", - "order": 3 - } - } - } -} diff --git a/airbyte-integrations/connectors/source-talkdesk-explore/source_talkdesk_explore/streams.py b/airbyte-integrations/connectors/source-talkdesk-explore/source_talkdesk_explore/streams.py deleted file mode 100644 index 8999e04ab754..000000000000 --- a/airbyte-integrations/connectors/source-talkdesk-explore/source_talkdesk_explore/streams.py +++ /dev/null @@ -1,248 +0,0 @@ -# -# Copyright (c) 2023 Airbyte, Inc., all rights reserved. -# - -import json -from abc import abstractmethod -from datetime import datetime -from typing import Any, Iterable, Mapping, MutableMapping, Optional - -import requests -from airbyte_cdk.logger import AirbyteLogger -from airbyte_cdk.models import SyncMode -from airbyte_cdk.sources.streams.http import HttpStream - -logger = AirbyteLogger() - - -class GenerateReportStream(HttpStream): - """This stream is specifically for generating the report in Talkdesk. - - HTTP method: POST - - Returns: ID of the generated report - - """ - - primary_key = None - - def __init__(self, base_path, start_date, timezone, **kwargs): - super().__init__(**kwargs) - self.base_path = base_path - self.start_date = start_date - self.timezone = timezone - - @property - def url_base(self) -> str: - return "https://api.talkdeskapp.com/data/" - - @property - def http_method(self) -> str: - return "POST" - - def path(self, **kwargs) -> str: - return self.base_path - - def request_body_json( - self, - stream_state: Mapping[str, Any], - stream_slice: Mapping[str, Any] = None, - next_page_token: Mapping[str, Any] = None, - ) -> Optional[Mapping]: - now = datetime.now().strftime("%Y-%m-%dT%H:%M:%S") - logger.info(f"Generating {self.base_path} report from '{self.start_date}' to '{now}'") - return { - "format": "json", - "timespan": { - "from": self.start_date, - "to": now, - "timezone": self.timezone, - }, - } - - def next_page_token(self, response: requests.Response) -> Optional[Mapping[str, Any]]: - return None - - def parse_response(self, response: requests.Response, **kwargs) -> Iterable[Mapping]: - response_json = response.json() - id_obj = {"id": response_json["job"]["id"]} - logger.info(f"Generated report with ID '{id_obj['id']}'") - - return [id_obj] - - -class ReadReportStream(HttpStream): - primary_key = None - - def __init__(self, start_date, timezone, **kwargs): - super().__init__(**kwargs) - self.start_date = start_date - self.timezone = timezone - - @property - def url_base(self) -> str: - return "https://api.talkdeskapp.com/data/" - - def path(self, **kwargs) -> str: - latest_state = kwargs.get("stream_state").get(self.cursor_field, None) - - if not latest_state: - latest_state = self.start_date - - # Check and set latest_state to necessary date-time format - try: - datetime.strptime(latest_state, "%Y-%m-%dT%H:%M:%S") - except ValueError: - try: - datetime.strptime(latest_state, "%Y-%m-%d %H:%M:%S") - latest_state = latest_state.replace(" ", "T") - except ValueError: - logger.error("stream_state is in unhandled date-time format. Required format: %Y-%m-%dT%H:%M:%S") - - generate_report = GenerateReportStream( - base_path=self.base_path, start_date=latest_state, timezone=self.timezone, authenticator=self.authenticator - ) - report_id = next(generate_report.read_records(SyncMode.full_refresh)) - - return self.base_path + f"/{report_id['id']}" - - def next_page_token(self, response: requests.Response) -> Optional[Mapping[str, Any]]: - return None - - def should_retry(self, response: requests.Response) -> bool: - """ - Retry conditions: - 1. By default, back off on the following HTTP response statuses: - - 429 (Too Many Requests) indicating rate limiting - - 500s to handle transient server errors - - Unexpected but transient exceptions (connection timeout, DNS resolution failed, etc..) are retried by default. - 2. When the report is requested but is not ready to be fetched: - - In that case, the response will have the following format: - ``` - {"job": {"id": "369f88a5-d5a3-42c6-a135-8aec4215553e", "name": "Calls", - "created_at": "2022-01-13T10:17:15", "status": "processing", "type": "calls", "format": "json", ...}} - ``` - The retry function will be looking for a response in this format with 'status' different than 'completed'. - Please refer to the docs to read more about executing a report: https://docs.talkdesk.com/docs/executing-report. - - """ - if response.status_code == 429 or 500 <= response.status_code < 600: - return True - else: - response_obj = response.json() - try: - report_status = response_obj["job"]["status"] - if report_status != "completed": - logger.info("Requested report is in uncompleted status. Waiting for it to be completed...") - return True - else: - return False - except KeyError: - # Report failures - if response.status_code in [400, 401, 403]: - logger.error(f"Report returned an invalid response: {json.dumps(response_obj)}") - raise ValueError("Requested report is in invalid/failed state.") - # TODO: implement handling of other response types here. - return False - - def parse_response(self, response: requests.Response, **kwargs) -> Iterable[Mapping]: - response_json = response.json() - try: - yield from response_json["entries"] - except KeyError: - logger.warn("No entries found in requested report. Setting it to null.") - yield from [] - - -class IncrementalReadReportStream(ReadReportStream): - """ - Incremental append for the ReadReportStream. This class introduces the 'cursor_field' - and 'get_updated_state' methods. - - """ - - @property - @abstractmethod - def cursor_field(self) -> str: - """ - Defining a cursor field indicates that a stream is incremental, so any incremental stream must extend this class - and define a cursor field. - """ - pass - - def get_updated_state(self, current_stream_state: MutableMapping[str, Any], latest_record: Mapping[str, Any]) -> Mapping[str, Any]: - """ - Return the latest state by comparing the cursor value in the latest record with the stream's most recent state object - and returning an updated state object. - """ - latest_state = latest_record.get(self.cursor_field) - current_state = current_stream_state.get(self.cursor_field) or latest_state - return {self.cursor_field: max(latest_state, current_state)} - - -class Calls(IncrementalReadReportStream): - @property - def primary_key(self) -> str: - return "call_id" - - @property - def base_path(self) -> str: - return "reports/calls/jobs" - - @property - def cursor_field(self) -> str: - return "end_at" - - -class UserStatus(IncrementalReadReportStream): - @property - def primary_key(self) -> str: - return "user_id" - - @property - def base_path(self) -> str: - return "reports/user_status/jobs" - - @property - def cursor_field(self) -> str: - return "status_end_at" - - -class StudioFlowExecution(IncrementalReadReportStream): - @property - def primary_key(self) -> str: - return "flow_id" - - @property - def base_path(self) -> str: - return "reports/studio_flow_execution/jobs" - - @property - def cursor_field(self) -> str: - return "studio_flow_executions_aggregated.flow_execution_finished_time" - - -class Contacts(IncrementalReadReportStream): - @property - def primary_key(self) -> str: - return "contact_id" - - @property - def base_path(self) -> str: - return "reports/contacts/jobs" - - @property - def cursor_field(self) -> str: - return "finished_at" - - -class RingAttempts(IncrementalReadReportStream): - @property - def primary_key(self) -> str: - return "ring_attempt_id" - - @property - def base_path(self) -> str: - return "reports/ring_attempts/jobs" - - @property - def cursor_field(self) -> str: - return "ring_finished_at_time" diff --git a/airbyte-integrations/connectors/source-talkdesk-explore/source_talkdesk_explore/talkdesk_auth.py b/airbyte-integrations/connectors/source-talkdesk-explore/source_talkdesk_explore/talkdesk_auth.py deleted file mode 100644 index fb85db9da64a..000000000000 --- a/airbyte-integrations/connectors/source-talkdesk-explore/source_talkdesk_explore/talkdesk_auth.py +++ /dev/null @@ -1,44 +0,0 @@ -# -# Copyright (c) 2023 Airbyte, Inc., all rights reserved. -# - -import base64 -from typing import Any, Dict, Mapping - -import requests - - -class TalkdeskAuth: - """Main class for handling Talkdesk Authentication. - Only 'client_credentials' auth method supported at the moment. - - # TODO: Implement 'Signed JWT' and 'Authorization Code' auth methods. - - Docs: https://docs.talkdesk.com/docs/authentication - - """ - - def __init__(self, config: Mapping[str, Any]): - self.api_key = config.get("api_key", None) - self.auth_url = config.get("auth_url", None) - - def _encode_key(self, key: str) -> bytes: - """Encode 'str' API key to bytes""" - base64_bytes = base64.b64encode(key.encode("ascii")) - return base64_bytes.decode("ascii") - - def request_bearer_token(self) -> Dict: - headers = { - "Authorization": f"Basic {self._encode_key(self.api_key)}", - "Content-Type": "application/x-www-form-urlencoded", - } - try: - response = requests.request( - "POST", - url=self.auth_url, - headers=headers, - ) - except Exception as exc: - raise exc - - return response.json() diff --git a/airbyte-integrations/connectors/source-talkdesk-explore/unit_tests/unit_test.py b/airbyte-integrations/connectors/source-talkdesk-explore/unit_tests/unit_test.py deleted file mode 100644 index 219ae0142c72..000000000000 --- a/airbyte-integrations/connectors/source-talkdesk-explore/unit_tests/unit_test.py +++ /dev/null @@ -1,7 +0,0 @@ -# -# Copyright (c) 2023 Airbyte, Inc., all rights reserved. -# - - -def test_example_method(): - assert True diff --git a/airbyte-integrations/connectors/source-zuora/.dockerignore b/airbyte-integrations/connectors/source-zuora/.dockerignore deleted file mode 100644 index cc1164ce1bf8..000000000000 --- a/airbyte-integrations/connectors/source-zuora/.dockerignore +++ /dev/null @@ -1,7 +0,0 @@ -* -!Dockerfile -!Dockerfile.test -!main.py -!source_zuora -!setup.py -!secrets diff --git a/airbyte-integrations/connectors/source-zuora/BOOTSTRAP.md b/airbyte-integrations/connectors/source-zuora/BOOTSTRAP.md deleted file mode 100644 index 71ac8829810c..000000000000 --- a/airbyte-integrations/connectors/source-zuora/BOOTSTRAP.md +++ /dev/null @@ -1,19 +0,0 @@ -# Zuora - -SOAP API docs (more info on queries with ZOQL) are [here](https://knowledgecenter.zuora.com/Central_Platform/API/G_SOAP_API). - -REST API docs are [here](https://www.zuora.com/developer/api-reference/). - -The Zuora API exposes a SQL-like interface ([ZOQL](https://knowledgecenter.zuora.com/Central_Platform/Query/ZOQL)) for customers to pull their data on subscriptions, users, transactions etc. An example of a query may be: -select * from account where updateddate >= TIMESTAMP - -Zuora has [various base endpoints](https://www.zuora.com/developer/api-reference/#section/Introduction/Access-to-the-API), differentiating for production/sandbox, US/EU etc. - -It operates on a POST-check-GET mechanism where the ZOQL query is first sent in the initial request, the id of that request can then be polled to check status and eventually consumed with another request when completed. - -The information about all streams can be pulled dynamically using ZOQL queries SHOW TABLES and DESCRIBE {table}. - -Auth = OAuth2 with a header of “grant_type” set to “client_credentials” and no refresh token - - -See [this](https://docs.airbyte.io/integrations/sources/zuora) link for the nuances about the connector. diff --git a/airbyte-integrations/connectors/source-zuora/Dockerfile b/airbyte-integrations/connectors/source-zuora/Dockerfile deleted file mode 100644 index e7925278a3db..000000000000 --- a/airbyte-integrations/connectors/source-zuora/Dockerfile +++ /dev/null @@ -1,37 +0,0 @@ -FROM python:3.9.11-alpine3.15 as base - -# build and load all requirements -FROM base as builder -WORKDIR /airbyte/integration_code - -# upgrade pip to the latest version -RUN apk --no-cache upgrade \ - && pip install --upgrade pip \ - && apk --no-cache add tzdata build-base - -COPY setup.py ./ -# install necessary packages to a temporary folder -RUN pip install --prefix=/install . - -# build a clean environment -FROM base -WORKDIR /airbyte/integration_code - -# copy all loaded and built libraries to a pure basic image -COPY --from=builder /install /usr/local -# add default timezone settings -COPY --from=builder /usr/share/zoneinfo/Etc/UTC /etc/localtime -RUN echo "Etc/UTC" > /etc/timezone - -# bash is installed for more convenient debugging. -RUN apk --no-cache add bash - -# copy payload code only -COPY main.py ./ -COPY source_zuora ./source_zuora - -ENV AIRBYTE_ENTRYPOINT "python /airbyte/integration_code/main.py" -ENTRYPOINT ["python", "/airbyte/integration_code/main.py"] - -LABEL io.airbyte.version=0.1.3 -LABEL io.airbyte.name=airbyte/source-zuora \ No newline at end of file diff --git a/airbyte-integrations/connectors/source-zuora/README.md b/airbyte-integrations/connectors/source-zuora/README.md deleted file mode 100644 index e0e56908c9d3..000000000000 --- a/airbyte-integrations/connectors/source-zuora/README.md +++ /dev/null @@ -1,100 +0,0 @@ -# Zuora Source - -This is the repository for the Zuora source connector, written in Python. -For information about how to use this connector within Airbyte, see [the documentation](https://docs.airbyte.io/integrations/sources/zuora). - -## Local development - -### Prerequisites -**To iterate on this connector, make sure to complete this prerequisites section.** - -#### Minimum Python version required `= 3.7.0` - -#### Build & Activate Virtual Environment and install dependencies -From this connector directory, create a virtual environment: -``` -python3 -m venv .venv -``` - -This will generate a virtualenv for this module in `.venv/`. Make sure this venv is active in your -development environment of choice. To activate it from the terminal, run: -``` -source .venv/bin/activate -pip install -r requirements.txt -``` -If you are in an IDE, follow your IDE's instructions to activate the virtualenv. - -Note that while we are installing dependencies from `requirements.txt`, you should only edit `setup.py` for your dependencies. `requirements.txt` is -used for editable installs (`pip install -e`) to pull in Python dependencies from the monorepo and will call `setup.py`. -If this is mumbo jumbo to you, don't worry about it, just put your deps in `setup.py` but install using `pip install -r requirements.txt` and everything -should work as you expect. - -#### Create credentials -**If you are a community contributor**, follow the instructions in the [documentation](https://docs.airbyte.io/integrations/sources/zuora) -to generate the necessary credentials. Then create a file `secrets/config.json` conforming to the `source_zuora/spec.json` file. -Note that any directory named `secrets` is gitignored across the entire Airbyte repo, so there is no danger of accidentally checking in sensitive information. -See `integration_tests/sample_config.json` for a sample config file. - -**If you are an Airbyte core member**, copy the credentials in Lastpass under the secret name `source zuora test creds` -and place them into `secrets/config.json`. - -### Locally running the connector -``` -python main.py spec -python main.py check --config secrets/config.json -python main.py discover --config secrets/config.json -python main.py read --config secrets/config.json --catalog integration_tests/configured_catalog.json -``` - -### Locally running the connector docker image - - -#### Build -**Via [`airbyte-ci`](https://github.com/airbytehq/airbyte/blob/master/airbyte-ci/connectors/pipelines/README.md) (recommended):** -```bash -airbyte-ci connectors --name=source-zuora build -``` - -An image will be built with the tag `airbyte/source-zuora:dev`. - -**Via `docker build`:** -```bash -docker build -t airbyte/source-zuora:dev . -``` - -#### Run -Then run any of the connector commands as follows: -``` -docker run --rm airbyte/source-zuora:dev spec -docker run --rm -v $(pwd)/secrets:/secrets airbyte/source-zuora:dev check --config /secrets/config.json -docker run --rm -v $(pwd)/secrets:/secrets airbyte/source-zuora:dev discover --config /secrets/config.json -docker run --rm -v $(pwd)/secrets:/secrets -v $(pwd)/integration_tests:/integration_tests airbyte/source-zuora:dev read --config /secrets/config.json --catalog /integration_tests/configured_catalog.json -``` - - -## Testing -You can run our full test suite locally using [`airbyte-ci`](https://github.com/airbytehq/airbyte/blob/master/airbyte-ci/connectors/pipelines/README.md): -```bash -airbyte-ci connectors --name=source-zuora test -``` - -### Customizing acceptance Tests -Customize `acceptance-test-config.yml` file to configure tests. See [Connector Acceptance Tests](https://docs.airbyte.com/connector-development/testing-connectors/connector-acceptance-tests-reference) for more information. -If your connector requires to create or destroy resources for use during acceptance tests create fixtures for it and place them inside integration_tests/acceptance.py. - -## Dependency Management -All of your dependencies should go in `setup.py`, NOT `requirements.txt`. The requirements file is only used to connect internal Airbyte dependencies in the monorepo for local development. -We split dependencies between two groups, dependencies that are: -* required for your connector to work need to go to `MAIN_REQUIREMENTS` list. -* required for the testing need to go to `TEST_REQUIREMENTS` list - -### Publishing a new version of the connector -You've checked out the repo, implemented a million dollar feature, and you're ready to share your changes with the world. Now what? -1. Make sure your changes are passing our test suite: `airbyte-ci connectors --name=source-zuora test` -2. Bump the connector version in `metadata.yaml`: increment the `dockerImageTag` value. Please follow [semantic versioning for connectors](https://docs.airbyte.com/contributing-to-airbyte/resources/pull-requests-handbook/#semantic-versioning-for-connectors). -3. Make sure the `metadata.yaml` content is up to date. -4. Make the connector documentation and its changelog is up to date (`docs/integrations/sources/zuora.md`). -5. Create a Pull Request: use [our PR naming conventions](https://docs.airbyte.com/contributing-to-airbyte/resources/pull-requests-handbook/#pull-request-title-convention). -6. Pat yourself on the back for being an awesome contributor. -7. Someone from Airbyte will take a look at your PR and iterate with you to merge it into master. - diff --git a/airbyte-integrations/connectors/source-zuora/acceptance-test-config.yml b/airbyte-integrations/connectors/source-zuora/acceptance-test-config.yml deleted file mode 100644 index d833a920c49e..000000000000 --- a/airbyte-integrations/connectors/source-zuora/acceptance-test-config.yml +++ /dev/null @@ -1,27 +0,0 @@ -# See [Connector Acceptance Tests](https://docs.airbyte.com/contributing-to-airbyte/building-new-connector/connector-acceptance-tests) -# for more information about how to configure these tests -connector_image: airbyte/source-zuora:dev -tests: - spec: - - spec_path: "source_zuora/spec.json" - connection: - - config_path: "secrets/config.json" - status: "succeed" - - config_path: "integration_tests/invalid_config.json" - status: "failed" - discovery: - - config_path: "secrets/config.json" - timeout_seconds: 3600 - basic_read: - - config_path: "secrets/config.json" - configured_catalog_path: "integration_tests/configured_catalog.json" - timeout_seconds: 3600 - full_refresh: - - config_path: "secrets/config.json" - configured_catalog_path: "integration_tests/configured_catalog.json" - timeout_seconds: 3600 - incremental: - - config_path: "secrets/config.json" - configured_catalog_path: "integration_tests/configured_catalog.json" - future_state_path: "integration_tests/abnormal_state.json" - timeout_seconds: 3600 diff --git a/airbyte-integrations/connectors/source-zuora/integration_tests/__init__.py b/airbyte-integrations/connectors/source-zuora/integration_tests/__init__.py deleted file mode 100644 index e69de29bb2d1..000000000000 diff --git a/airbyte-integrations/connectors/source-zuora/integration_tests/abnormal_state.json b/airbyte-integrations/connectors/source-zuora/integration_tests/abnormal_state.json deleted file mode 100644 index 934e0ce55cf4..000000000000 --- a/airbyte-integrations/connectors/source-zuora/integration_tests/abnormal_state.json +++ /dev/null @@ -1,173 +0,0 @@ -{ - "account": { - "updateddate": "2025-10-01T12:27:40Z" - }, - "accountingcode": { - "updateddate": "2025-09-29T08:51:48Z" - }, - "accountingperiod": { - "updateddate": "2025-10-01T12:24:33Z" - }, - "amendment": { - "updateddate": "2025-09-30T14:37:50Z" - }, - "applicationgroup": { - "updateddate": "2025-10-01T12:27:25Z" - }, - "aquatasklog": { - "updateddate": "2025-10-02T15:18:15Z" - }, - "billingrun": { - "updateddate": "2025-09-30T15:07:13Z" - }, - "chargemetrics": { - "updateddate": "2025-09-30T14:37:54.237Z" - }, - "contact": { - "updateddate": "2025-09-30T14:01:42Z" - }, - "contactsnapshot": { - "updateddate": "2025-09-30T14:01:42Z" - }, - "country": { - "createddate": "2025-09-13T05:56:56Z" - }, - "creditmemo": { - "updateddate": "2025-10-01T12:27:25Z" - }, - "creditmemoapplication": { - "updateddate": "2025-10-01T12:27:25Z" - }, - "creditmemoapplicationitem": { - "updateddate": "2025-10-01T12:27:25Z" - }, - "creditmemoitem": { - "updateddate": "2025-10-01T12:22:11Z" - }, - "creditmemopart": { - "updateddate": "2025-10-01T12:27:25Z" - }, - "creditmemopartitem": { - "updateddate": "2025-10-01T12:27:25Z" - }, - "debitmemo": { - "updateddate": "2025-10-01T12:27:40Z" - }, - "debitmemoitem": { - "updateddate": "2025-10-01T12:23:54Z" - }, - "invoice": { - "updateddate": "2025-10-01T08:24:15Z" - }, - "invoicehistory": { - "createddate": "2025-09-30T15:07:13Z" - }, - "invoiceitem": { - "updateddate": "2025-10-01T08:24:15Z" - }, - "journalentry": { - "updateddate": "2025-10-01T12:18:36Z" - }, - "journalentryitem": { - "updateddate": "2025-10-01T12:18:36Z" - }, - "journalrun": { - "updateddate": "2025-10-01T12:25:08Z" - }, - "memohistory": { - "createddate": "2025-10-01T12:27:41Z" - }, - "orderaction": { - "updateddate": "2025-09-30T14:37:49Z" - }, - "orderactionrateplan": { - "updateddate": "2025-09-30T14:37:50Z" - }, - "orderdeltamrr": { - "updateddate": "2025-09-30T14:37:52.362Z" - }, - "orderdeltatcb": { - "updateddate": "2025-09-30T14:37:52.365Z" - }, - "orderdeltatcv": { - "updateddate": "2025-09-30T14:37:52.363Z" - }, - "orders": { - "updateddate": "2025-09-30T14:37:50Z" - }, - "payment": { - "updateddate": "2025-10-01T08:25:23Z" - }, - "paymentapplication": { - "updateddate": "2025-10-01T08:25:23Z" - }, - "paymentmethod": { - "updateddate": "2025-10-01T08:25:23Z" - }, - "paymentmethodsnapshot": { - "updateddate": "2025-10-01T08:25:23Z" - }, - "paymentmethodtransactionlog": { - "createddate": "2025-09-30T14:32:49Z" - }, - "paymentpart": { - "updateddate": "2025-10-01T08:25:23Z" - }, - "paymentrun": { - "updateddate": "2025-10-01T06:52:02Z" - }, - "paymenttransactionlog": { - "createddate": "2025-10-01T08:25:23Z" - }, - "product": { - "updateddate": "2025-09-30T09:35:59Z" - }, - "productrateplan": { - "updateddate": "2025-09-30T09:35:50Z" - }, - "productrateplancharge": { - "updateddate": "2025-09-30T09:35:50Z" - }, - "productrateplanchargetier": { - "updateddate": "2025-09-30T09:35:51Z" - }, - "productrateplancurrency": { - "updateddate": "2025-09-30T09:35:50Z" - }, - "rateplan": { - "updateddate": "2025-09-30T14:37:50Z" - }, - "rateplancharge": { - "updateddate": "2025-09-30T14:37:50Z" - }, - "rateplanchargetier": { - "updateddate": "2025-09-30T14:37:50Z" - }, - "refund": { - "updateddate": "2025-07-06T03:26:45-07:00" - }, - "revenueeventtype": { - "updateddate": "2025-09-13T05:56:56Z" - }, - "state": { - "createddate": "2025-09-13T05:56:56Z" - }, - "subscription": { - "updateddate": "2025-09-30T14:37:50Z" - }, - "user": { - "createddate": "2025-06-14T01:49:11-07:00" - }, - "workflow": { - "updateddate": "2025-10-01T12:36:16.354Z" - }, - "workflow_definition": { - "updateddate": "2025-10-01T12:36:16.36Z" - }, - "workflow_linkage": { - "updateddate": "2025-10-01T12:36:16.459Z" - }, - "workflow_task": { - "updateddate": "2025-10-01T12:36:16.487Z" - } -} diff --git a/airbyte-integrations/connectors/source-zuora/integration_tests/acceptance.py b/airbyte-integrations/connectors/source-zuora/integration_tests/acceptance.py deleted file mode 100644 index 82823254d266..000000000000 --- a/airbyte-integrations/connectors/source-zuora/integration_tests/acceptance.py +++ /dev/null @@ -1,14 +0,0 @@ -# -# Copyright (c) 2023 Airbyte, Inc., all rights reserved. -# - - -import pytest - -pytest_plugins = ("connector_acceptance_test.plugin",) - - -@pytest.fixture(scope="session", autouse=True) -def connector_setup(): - """This fixture is a placeholder for external resources that acceptance test might require.""" - yield diff --git a/airbyte-integrations/connectors/source-zuora/integration_tests/configured_catalog.json b/airbyte-integrations/connectors/source-zuora/integration_tests/configured_catalog.json deleted file mode 100644 index da751bb89255..000000000000 --- a/airbyte-integrations/connectors/source-zuora/integration_tests/configured_catalog.json +++ /dev/null @@ -1,739 +0,0 @@ -{ - "streams": [ - { - "stream": { - "name": "account", - "json_schema": {}, - "supported_sync_modes": ["full_refresh", "incremental"], - "source_defined_cursor": true, - "default_cursor_field": ["updateddate"], - "source_defined_primary_key": [["id"]] - }, - "sync_mode": "incremental", - "cursor_field": ["updateddate"], - "destination_sync_mode": "append" - }, - { - "stream": { - "name": "accountingcode", - "json_schema": {}, - "supported_sync_modes": ["full_refresh", "incremental"], - "source_defined_cursor": true, - "default_cursor_field": ["updateddate"], - "source_defined_primary_key": [["id"]] - }, - "sync_mode": "incremental", - "cursor_field": ["updateddate"], - "destination_sync_mode": "append" - }, - { - "stream": { - "name": "accountingperiod", - "json_schema": {}, - "supported_sync_modes": ["full_refresh", "incremental"], - "source_defined_cursor": true, - "default_cursor_field": ["updateddate"], - "source_defined_primary_key": [["id"]] - }, - "sync_mode": "incremental", - "cursor_field": ["updateddate"], - "destination_sync_mode": "append" - }, - { - "stream": { - "name": "amendment", - "json_schema": {}, - "supported_sync_modes": ["full_refresh", "incremental"], - "source_defined_cursor": true, - "default_cursor_field": ["updateddate"], - "source_defined_primary_key": [["id"]] - }, - "sync_mode": "incremental", - "cursor_field": ["updateddate"], - "destination_sync_mode": "append" - }, - { - "stream": { - "name": "applicationgroup", - "json_schema": {}, - "supported_sync_modes": ["full_refresh", "incremental"], - "source_defined_cursor": true, - "default_cursor_field": ["updateddate"], - "source_defined_primary_key": [["id"]] - }, - "sync_mode": "incremental", - "cursor_field": ["updateddate"], - "destination_sync_mode": "append" - }, - { - "stream": { - "name": "aquatasklog", - "json_schema": {}, - "supported_sync_modes": ["full_refresh", "incremental"], - "source_defined_cursor": true, - "default_cursor_field": ["updateddate"], - "source_defined_primary_key": [["id"]] - }, - "sync_mode": "incremental", - "cursor_field": ["updateddate"], - "destination_sync_mode": "append" - }, - { - "stream": { - "name": "billingrun", - "json_schema": {}, - "supported_sync_modes": ["full_refresh", "incremental"], - "source_defined_cursor": true, - "default_cursor_field": ["updateddate"], - "source_defined_primary_key": [["id"]] - }, - "sync_mode": "incremental", - "cursor_field": ["updateddate"], - "destination_sync_mode": "append" - }, - { - "stream": { - "name": "chargemetrics", - "json_schema": {}, - "supported_sync_modes": ["full_refresh", "incremental"], - "source_defined_cursor": true, - "default_cursor_field": ["updateddate"], - "source_defined_primary_key": [["id"]] - }, - "sync_mode": "incremental", - "cursor_field": ["updateddate"], - "destination_sync_mode": "append" - }, - { - "stream": { - "name": "contact", - "json_schema": {}, - "supported_sync_modes": ["full_refresh", "incremental"], - "source_defined_cursor": true, - "default_cursor_field": ["updateddate"], - "source_defined_primary_key": [["id"]] - }, - "sync_mode": "incremental", - "cursor_field": ["updateddate"], - "destination_sync_mode": "append" - }, - { - "stream": { - "name": "contactsnapshot", - "json_schema": {}, - "supported_sync_modes": ["full_refresh", "incremental"], - "source_defined_cursor": true, - "default_cursor_field": ["updateddate"], - "source_defined_primary_key": [["id"]] - }, - "sync_mode": "incremental", - "cursor_field": ["updateddate"], - "destination_sync_mode": "append" - }, - { - "stream": { - "name": "country", - "json_schema": {}, - "supported_sync_modes": ["full_refresh"], - "source_defined_primary_key": [["id"]] - }, - "sync_mode": "full_refresh", - "destination_sync_mode": "append" - }, - { - "stream": { - "name": "creditmemo", - "json_schema": {}, - "supported_sync_modes": ["full_refresh", "incremental"], - "source_defined_cursor": true, - "default_cursor_field": ["updateddate"], - "source_defined_primary_key": [["id"]] - }, - "sync_mode": "incremental", - "cursor_field": ["updateddate"], - "destination_sync_mode": "append" - }, - { - "stream": { - "name": "creditmemoapplication", - "json_schema": {}, - "supported_sync_modes": ["full_refresh", "incremental"], - "source_defined_cursor": true, - "default_cursor_field": ["updateddate"], - "source_defined_primary_key": [["id"]] - }, - "sync_mode": "incremental", - "cursor_field": ["updateddate"], - "destination_sync_mode": "append" - }, - { - "stream": { - "name": "creditmemoapplicationitem", - "json_schema": {}, - "supported_sync_modes": ["full_refresh", "incremental"], - "source_defined_cursor": true, - "default_cursor_field": ["updateddate"], - "source_defined_primary_key": [["id"]] - }, - "sync_mode": "incremental", - "cursor_field": ["updateddate"], - "destination_sync_mode": "append" - }, - { - "stream": { - "name": "creditmemoitem", - "json_schema": {}, - "supported_sync_modes": ["full_refresh", "incremental"], - "source_defined_cursor": true, - "default_cursor_field": ["updateddate"], - "source_defined_primary_key": [["id"]] - }, - "sync_mode": "incremental", - "cursor_field": ["updateddate"], - "destination_sync_mode": "append" - }, - { - "stream": { - "name": "creditmemopart", - "json_schema": {}, - "supported_sync_modes": ["full_refresh", "incremental"], - "source_defined_cursor": true, - "default_cursor_field": ["updateddate"], - "source_defined_primary_key": [["id"]] - }, - "sync_mode": "incremental", - "cursor_field": ["updateddate"], - "destination_sync_mode": "append" - }, - { - "stream": { - "name": "creditmemopartitem", - "json_schema": {}, - "supported_sync_modes": ["full_refresh", "incremental"], - "source_defined_cursor": true, - "default_cursor_field": ["updateddate"], - "source_defined_primary_key": [["id"]] - }, - "sync_mode": "incremental", - "cursor_field": ["updateddate"], - "destination_sync_mode": "append" - }, - { - "stream": { - "name": "debitmemo", - "json_schema": {}, - "supported_sync_modes": ["full_refresh", "incremental"], - "source_defined_cursor": true, - "default_cursor_field": ["updateddate"], - "source_defined_primary_key": [["id"]] - }, - "sync_mode": "incremental", - "cursor_field": ["updateddate"], - "destination_sync_mode": "append" - }, - { - "stream": { - "name": "debitmemoitem", - "json_schema": {}, - "supported_sync_modes": ["full_refresh", "incremental"], - "source_defined_cursor": true, - "default_cursor_field": ["updateddate"], - "source_defined_primary_key": [["id"]] - }, - "sync_mode": "incremental", - "cursor_field": ["updateddate"], - "destination_sync_mode": "append" - }, - { - "stream": { - "name": "invoice", - "json_schema": {}, - "supported_sync_modes": ["full_refresh", "incremental"], - "source_defined_cursor": true, - "default_cursor_field": ["updateddate"], - "source_defined_primary_key": [["id"]] - }, - "sync_mode": "incremental", - "cursor_field": ["updateddate"], - "destination_sync_mode": "append" - }, - { - "stream": { - "name": "invoicehistory", - "json_schema": {}, - "supported_sync_modes": ["full_refresh", "incremental"], - "source_defined_cursor": true, - "default_cursor_field": ["createddate"], - "source_defined_primary_key": [["id"]] - }, - "sync_mode": "incremental", - "cursor_field": ["createddate"], - "destination_sync_mode": "append" - }, - { - "stream": { - "name": "invoiceitem", - "json_schema": {}, - "supported_sync_modes": ["full_refresh", "incremental"], - "source_defined_cursor": true, - "default_cursor_field": ["updateddate"], - "source_defined_primary_key": [["id"]] - }, - "sync_mode": "incremental", - "cursor_field": ["updateddate"], - "destination_sync_mode": "append" - }, - { - "stream": { - "name": "journalentry", - "json_schema": {}, - "supported_sync_modes": ["full_refresh", "incremental"], - "source_defined_cursor": true, - "default_cursor_field": ["updateddate"], - "source_defined_primary_key": [["id"]] - }, - "sync_mode": "incremental", - "cursor_field": ["updateddate"], - "destination_sync_mode": "append" - }, - { - "stream": { - "name": "journalentryitem", - "json_schema": {}, - "supported_sync_modes": ["full_refresh", "incremental"], - "source_defined_cursor": true, - "default_cursor_field": ["updateddate"], - "source_defined_primary_key": [["id"]] - }, - "sync_mode": "incremental", - "cursor_field": ["updateddate"], - "destination_sync_mode": "append" - }, - { - "stream": { - "name": "journalrun", - "json_schema": {}, - "supported_sync_modes": ["full_refresh", "incremental"], - "source_defined_cursor": true, - "default_cursor_field": ["updateddate"], - "source_defined_primary_key": [["id"]] - }, - "sync_mode": "incremental", - "cursor_field": ["updateddate"], - "destination_sync_mode": "append" - }, - { - "stream": { - "name": "memohistory", - "json_schema": {}, - "supported_sync_modes": ["full_refresh", "incremental"], - "source_defined_cursor": true, - "default_cursor_field": ["createddate"], - "source_defined_primary_key": [["id"]] - }, - "sync_mode": "incremental", - "cursor_field": ["createddate"], - "destination_sync_mode": "append" - }, - { - "stream": { - "name": "orderaction", - "json_schema": {}, - "supported_sync_modes": ["full_refresh", "incremental"], - "source_defined_cursor": true, - "default_cursor_field": ["updateddate"], - "source_defined_primary_key": [["id"]] - }, - "sync_mode": "incremental", - "cursor_field": ["updateddate"], - "destination_sync_mode": "append" - }, - { - "stream": { - "name": "orderactionrateplan", - "json_schema": {}, - "supported_sync_modes": ["full_refresh", "incremental"], - "source_defined_cursor": true, - "default_cursor_field": ["updateddate"], - "source_defined_primary_key": [["id"]] - }, - "sync_mode": "incremental", - "cursor_field": ["updateddate"], - "destination_sync_mode": "append" - }, - { - "stream": { - "name": "orderdeltamrr", - "json_schema": {}, - "supported_sync_modes": ["full_refresh", "incremental"], - "source_defined_cursor": true, - "default_cursor_field": ["updateddate"], - "source_defined_primary_key": [["id"]] - }, - "sync_mode": "incremental", - "cursor_field": ["updateddate"], - "destination_sync_mode": "append" - }, - { - "stream": { - "name": "orderdeltatcb", - "json_schema": {}, - "supported_sync_modes": ["full_refresh", "incremental"], - "source_defined_cursor": true, - "default_cursor_field": ["updateddate"], - "source_defined_primary_key": [["id"]] - }, - "sync_mode": "incremental", - "cursor_field": ["updateddate"], - "destination_sync_mode": "append" - }, - { - "stream": { - "name": "orderdeltatcv", - "json_schema": {}, - "supported_sync_modes": ["full_refresh", "incremental"], - "source_defined_cursor": true, - "default_cursor_field": ["updateddate"], - "source_defined_primary_key": [["id"]] - }, - "sync_mode": "incremental", - "cursor_field": ["updateddate"], - "destination_sync_mode": "append" - }, - { - "stream": { - "name": "orders", - "json_schema": {}, - "supported_sync_modes": ["full_refresh", "incremental"], - "source_defined_cursor": true, - "default_cursor_field": ["updateddate"], - "source_defined_primary_key": [["id"]] - }, - "sync_mode": "incremental", - "cursor_field": ["updateddate"], - "destination_sync_mode": "append" - }, - { - "stream": { - "name": "payment", - "json_schema": {}, - "supported_sync_modes": ["full_refresh", "incremental"], - "source_defined_cursor": true, - "default_cursor_field": ["updateddate"], - "source_defined_primary_key": [["id"]] - }, - "sync_mode": "incremental", - "cursor_field": ["updateddate"], - "destination_sync_mode": "append" - }, - { - "stream": { - "name": "paymentapplication", - "json_schema": {}, - "supported_sync_modes": ["full_refresh", "incremental"], - "source_defined_cursor": true, - "default_cursor_field": ["updateddate"], - "source_defined_primary_key": [["id"]] - }, - "sync_mode": "incremental", - "cursor_field": ["updateddate"], - "destination_sync_mode": "append" - }, - { - "stream": { - "name": "paymentmethod", - "json_schema": {}, - "supported_sync_modes": ["full_refresh", "incremental"], - "source_defined_cursor": true, - "default_cursor_field": ["updateddate"], - "source_defined_primary_key": [["id"]] - }, - "sync_mode": "incremental", - "cursor_field": ["updateddate"], - "destination_sync_mode": "append" - }, - { - "stream": { - "name": "paymentmethodsnapshot", - "json_schema": {}, - "supported_sync_modes": ["full_refresh", "incremental"], - "source_defined_cursor": true, - "default_cursor_field": ["updateddate"], - "source_defined_primary_key": [["id"]] - }, - "sync_mode": "incremental", - "cursor_field": ["updateddate"], - "destination_sync_mode": "append" - }, - { - "stream": { - "name": "paymentmethodtransactionlog", - "json_schema": {}, - "supported_sync_modes": ["full_refresh", "incremental"], - "source_defined_cursor": true, - "default_cursor_field": ["createddate"], - "source_defined_primary_key": [["id"]] - }, - "sync_mode": "incremental", - "cursor_field": ["createddate"], - "destination_sync_mode": "append" - }, - { - "stream": { - "name": "paymentpart", - "json_schema": {}, - "supported_sync_modes": ["full_refresh", "incremental"], - "source_defined_cursor": true, - "default_cursor_field": ["updateddate"], - "source_defined_primary_key": [["id"]] - }, - "sync_mode": "incremental", - "cursor_field": ["updateddate"], - "destination_sync_mode": "append" - }, - { - "stream": { - "name": "paymentrun", - "json_schema": {}, - "supported_sync_modes": ["full_refresh", "incremental"], - "source_defined_cursor": true, - "default_cursor_field": ["updateddate"], - "source_defined_primary_key": [["id"]] - }, - "sync_mode": "incremental", - "cursor_field": ["updateddate"], - "destination_sync_mode": "append" - }, - { - "stream": { - "name": "paymenttransactionlog", - "json_schema": {}, - "supported_sync_modes": ["full_refresh", "incremental"], - "source_defined_cursor": true, - "default_cursor_field": ["createddate"], - "source_defined_primary_key": [["id"]] - }, - "sync_mode": "incremental", - "cursor_field": ["createddate"], - "destination_sync_mode": "append" - }, - { - "stream": { - "name": "product", - "json_schema": {}, - "supported_sync_modes": ["full_refresh", "incremental"], - "source_defined_cursor": true, - "default_cursor_field": ["updateddate"], - "source_defined_primary_key": [["id"]] - }, - "sync_mode": "incremental", - "cursor_field": ["updateddate"], - "destination_sync_mode": "append" - }, - { - "stream": { - "name": "productrateplan", - "json_schema": {}, - "supported_sync_modes": ["full_refresh", "incremental"], - "source_defined_cursor": true, - "default_cursor_field": ["updateddate"], - "source_defined_primary_key": [["id"]] - }, - "sync_mode": "incremental", - "cursor_field": ["updateddate"], - "destination_sync_mode": "append" - }, - { - "stream": { - "name": "productrateplancharge", - "json_schema": {}, - "supported_sync_modes": ["full_refresh", "incremental"], - "source_defined_cursor": true, - "default_cursor_field": ["updateddate"], - "source_defined_primary_key": [["id"]] - }, - "sync_mode": "incremental", - "cursor_field": ["updateddate"], - "destination_sync_mode": "append" - }, - { - "stream": { - "name": "productrateplanchargetier", - "json_schema": {}, - "supported_sync_modes": ["full_refresh", "incremental"], - "source_defined_cursor": true, - "default_cursor_field": ["updateddate"], - "source_defined_primary_key": [["id"]] - }, - "sync_mode": "incremental", - "cursor_field": ["updateddate"], - "destination_sync_mode": "append" - }, - { - "stream": { - "name": "productrateplancurrency", - "json_schema": {}, - "supported_sync_modes": ["full_refresh", "incremental"], - "source_defined_cursor": true, - "default_cursor_field": ["updateddate"], - "source_defined_primary_key": [["id"]] - }, - "sync_mode": "incremental", - "cursor_field": ["updateddate"], - "destination_sync_mode": "append" - }, - { - "stream": { - "name": "rateplan", - "json_schema": {}, - "supported_sync_modes": ["full_refresh", "incremental"], - "source_defined_cursor": true, - "default_cursor_field": ["updateddate"], - "source_defined_primary_key": [["id"]] - }, - "sync_mode": "incremental", - "cursor_field": ["updateddate"], - "destination_sync_mode": "append" - }, - { - "stream": { - "name": "rateplancharge", - "json_schema": {}, - "supported_sync_modes": ["full_refresh", "incremental"], - "source_defined_cursor": true, - "default_cursor_field": ["updateddate"], - "source_defined_primary_key": [["id"]] - }, - "sync_mode": "incremental", - "cursor_field": ["updateddate"], - "destination_sync_mode": "append" - }, - { - "stream": { - "name": "rateplanchargetier", - "json_schema": {}, - "supported_sync_modes": ["full_refresh", "incremental"], - "source_defined_cursor": true, - "default_cursor_field": ["updateddate"], - "source_defined_primary_key": [["id"]] - }, - "sync_mode": "incremental", - "cursor_field": ["updateddate"], - "destination_sync_mode": "append" - }, - { - "stream": { - "name": "refund", - "json_schema": {}, - "supported_sync_modes": ["full_refresh", "incremental"], - "source_defined_cursor": true, - "default_cursor_field": ["updateddate"], - "source_defined_primary_key": [["id"]] - }, - "sync_mode": "incremental", - "cursor_field": ["updateddate"], - "destination_sync_mode": "append" - }, - { - "stream": { - "name": "revenueeventtype", - "json_schema": {}, - "supported_sync_modes": ["full_refresh", "incremental"], - "source_defined_cursor": true, - "default_cursor_field": ["updateddate"], - "source_defined_primary_key": [["id"]] - }, - "sync_mode": "incremental", - "cursor_field": ["updateddate"], - "destination_sync_mode": "append" - }, - { - "stream": { - "name": "state", - "json_schema": {}, - "supported_sync_modes": ["full_refresh"], - "source_defined_primary_key": [["id"]] - }, - "sync_mode": "full_refresh", - "destination_sync_mode": "append" - }, - { - "stream": { - "name": "subscription", - "json_schema": {}, - "supported_sync_modes": ["full_refresh", "incremental"], - "source_defined_cursor": true, - "default_cursor_field": ["updateddate"], - "source_defined_primary_key": [["id"]] - }, - "sync_mode": "incremental", - "cursor_field": ["updateddate"], - "destination_sync_mode": "append" - }, - { - "stream": { - "name": "user", - "json_schema": {}, - "supported_sync_modes": ["full_refresh", "incremental"], - "source_defined_cursor": true, - "default_cursor_field": ["createddate"], - "source_defined_primary_key": [["id"]] - }, - "sync_mode": "incremental", - "cursor_field": ["createddate"], - "destination_sync_mode": "append" - }, - { - "stream": { - "name": "workflow", - "json_schema": {}, - "supported_sync_modes": ["full_refresh", "incremental"], - "source_defined_cursor": true, - "default_cursor_field": ["updateddate"], - "source_defined_primary_key": [["id"]] - }, - "sync_mode": "incremental", - "cursor_field": ["updateddate"], - "destination_sync_mode": "append" - }, - { - "stream": { - "name": "workflow_definition", - "json_schema": {}, - "supported_sync_modes": ["full_refresh", "incremental"], - "source_defined_cursor": true, - "default_cursor_field": ["updateddate"], - "source_defined_primary_key": [["id"]] - }, - "sync_mode": "incremental", - "cursor_field": ["updateddate"], - "destination_sync_mode": "append" - }, - { - "stream": { - "name": "workflow_linkage", - "json_schema": {}, - "supported_sync_modes": ["full_refresh", "incremental"], - "source_defined_cursor": true, - "default_cursor_field": ["updateddate"], - "source_defined_primary_key": [["id"]] - }, - "sync_mode": "incremental", - "cursor_field": ["updateddate"], - "destination_sync_mode": "append" - }, - { - "stream": { - "name": "workflow_task", - "json_schema": {}, - "supported_sync_modes": ["full_refresh", "incremental"], - "source_defined_cursor": true, - "default_cursor_field": ["updateddate"], - "source_defined_primary_key": [["id"]] - }, - "sync_mode": "incremental", - "cursor_field": ["updateddate"], - "destination_sync_mode": "append" - } - ] -} diff --git a/airbyte-integrations/connectors/source-zuora/integration_tests/integration_test.py b/airbyte-integrations/connectors/source-zuora/integration_tests/integration_test.py deleted file mode 100644 index 8e52c671db5e..000000000000 --- a/airbyte-integrations/connectors/source-zuora/integration_tests/integration_test.py +++ /dev/null @@ -1,241 +0,0 @@ -# -# Copyright (c) 2023 Airbyte, Inc., all rights reserved. -# - -import json -from typing import Any, Dict, Mapping - -import pendulum -import pytest -from airbyte_cdk import AirbyteLogger -from source_zuora.source import ( - SourceZuora, - ZuoraDescribeObject, - ZuoraGetJobResult, - ZuoraJobStatusCheck, - ZuoraListObjects, - ZuoraObjectsBase, - ZuoraSubmitJob, -) -from source_zuora.zuora_auth import ZuoraAuthenticator -from source_zuora.zuora_excluded_streams import ZUORA_EXCLUDED_STREAMS - - -def get_config(config_path: str) -> Mapping[str, Any]: - """ - Get the config from /test_input - """ - with open(config_path, "r") as f: - return json.loads(f.read()) - - -def client(config: Dict): - """ - Create client by extending config dict with authenticator and url_base - """ - auth = ZuoraAuthenticator(config) - config["authenticator"] = auth.get_auth() - config["url_base"] = auth.url_base - return config - - -class TestZuora: - """ - This test class provides set of tests for custom classes of the Airbyte Zuora connector. - The test is based on input so feel free to change the input parameters. - - Class attributes that marked with (!) are must have. - - :: (!) config - is the Dict with authenticator and url_base parameters. - :: (!) test_stream - the name of the Zuora Object - :: (!) test_cursor_field - basicaly the date field which allow to filter and query the data from Zuora Test Object, - default is 'updateddate' it's available for the most of the Zuora Objects, some uses 'createddate', - check this before set this parameter. - :: (!) test_schema_fields - the list of fields / Columns available for the test_stream, - check this before set this parameter. - :: (!) test_fields_schema_types - basically the prepared expected output for the get_json_schema() for the test_stream, - we expect if the schema discovery is correctly performed, we should have this types of fields in the output, - than connector should be able to normalize data as expected. - - Issues that could potentially may take place: - :: If the any of the tests fails, - - check the input parameters first, - - start_date in the secrets/config.json, this should be valid date range where data is 100% available to be read. - - availability of the test_stream in the Zuora Account and your Subscription Plan. - - check other errors from the test output. - """ - - # create client - config = client(config=get_config("secrets/config.json")) - # create client with Data Query Type == "Unlimited option - unlimited_config = client(config=get_config("secrets/config.json")) - unlimited_config["data_query"] = "Unlimited" - - # Define common test input - test_stream = "account" - test_cursor_field = "updateddate" - test_schema_fields = ["id", "creditbalance", "allowinvoiceedit"] - test_fields_schema_types = { - "id": {"type": ["string", "null"]}, - "creditbalance": {"type": ["number", "null"]}, - "allowinvoiceedit": {"type": ["boolean", "null"]}, - } - - def _prepare_date_slice(self): - """ - Helper method for other tests, - makes test_date_slice to use within another tests. - As well as we test the ZuoraObjectsBase.to_datetime_str method for correct datetime formating needed for the query to run. - """ - start_date = ZuoraObjectsBase.to_datetime_str(pendulum.parse(self.config["start_date"]).astimezone()) - end_date = ZuoraObjectsBase.to_datetime_str(pendulum.now().astimezone()) - test_date_slice = {"start_date": start_date, "end_date": end_date} - return test_date_slice - - def test_zuora_connection(self): - """ - Test checks the connection to the Zuora API. - """ - connection = SourceZuora.check_connection(self, logger=AirbyteLogger, config=self.config) - assert connection == (True, None) - - @pytest.mark.parametrize("config", [(config)], ids=["LIVE"]) - def test_list_all_zuora_objects(self, config): - """ - Test retrieves all the objects (streams) available from Zuora Account and checks if test_stream is in the list. - """ - zuora_objects_list = ZuoraListObjects(config).read_records(sync_mode=None) - assert self.test_stream in zuora_objects_list - - @pytest.mark.parametrize("config", [(config)], ids=["LIVE"]) - def test_excluded_streams_are_not_in_the_list(self, config): - """ - Test retrieves all the objects (streams) available from Zuora Account and checks if excluded streams are not in the list. - """ - zuora_streams_list = SourceZuora.streams(self, config=config) - # extract stream names from auto-generated stream class - generated_stream_class_names = [] - for stream in zuora_streams_list: - generated_stream_class_names.append(stream.__class__.__name__) - # check if excluded streams are not in the final list of stream classes - for excluded_stream in ZUORA_EXCLUDED_STREAMS: - assert False if excluded_stream in generated_stream_class_names else True - - @pytest.mark.parametrize("config", [(config)], ids=["LIVE"]) - def test_get_json_schema(self, config): - """ - Test of getting schema from Zuora endpoint, check converted JsonSchema Types are correct. - """ - schema = list(ZuoraDescribeObject(self.test_stream, config=config).read_records(sync_mode=None)) - schema = {key: d[key] for d in schema for key in d} - - # Filter the schema up to the test_schema_fields - output_converted_schema_types = {key: value for key, value in schema.items() if key in self.test_schema_fields} - - # Return True if all is correct - assert self.test_fields_schema_types == output_converted_schema_types - - def test_query(self): - """ - The ZuoraObjectsBase.query() works with date_slices as input, - we test if date_slices are formed and passed correctly. - """ - # Prepare date_slice - test_date_slice = self._prepare_date_slice() - - # Making example query using input - example_query = f""" - select * - from {self.test_stream} where - {self.test_cursor_field} >= TIMESTAMP '{test_date_slice.get("start_date")}' and - {self.test_cursor_field} <= TIMESTAMP '{test_date_slice.get("end_date")}' - order by {self.test_cursor_field} asc - """ - - # Making test query using query() method - test_query = ZuoraObjectsBase.query( - self, stream_name=self.test_stream, cursor_field=self.test_cursor_field, date_slice=test_date_slice - ) - - # If the query is correctly build using connector class return True - assert example_query == test_query - - def test_query_full_object(self): - """ - The ZuoraObjectsBase.query() works with streams that doesn't support any of the cursor available, - such as `UpdatedDate` or `CreatedDate`. In this case, we cannot filter the object by date, - so we pull the whole object. - """ - - # Making example query using input - example_query = f"""select * from {self.test_stream}""" - - # Making test query using query() method - test_query = ZuoraObjectsBase.query(self, stream_name=self.test_stream, full_object=True) - - # If the query is correctly build using connector class return True - assert example_query == test_query - - @pytest.mark.parametrize("config", [(config)], ids=["LIVE"]) - def test_submit_job(self, config): - """ - Test submits the job to the server and returns the `job_id` as confirmation that the job was submitted successfully. - """ - - # Prepare date_slice - test_date_slice = self._prepare_date_slice() - - # Submitting the job to the server - job_id = ZuoraSubmitJob( - ZuoraObjectsBase.query(self, stream_name=self.test_stream, cursor_field=self.test_cursor_field, date_slice=test_date_slice), - config, - ).read_records(sync_mode=None) - - # Return True if we have submited job_id - assert len(list(job_id)) > 0 - - @pytest.mark.parametrize("config", [(config)], ids=["LIVE"]) - def test_check_job_status(self, config): - """ - Test checks submited job for status, if status is "completed" job_data_url will contain URL for jsonl dataFile, - Otherwise, if the status of the job is in ["failed", "canceled", "aborted"] it will raise the error message to the output, - describing what type of error occured. - """ - - # Prepared date_slice - test_date_slice = self._prepare_date_slice() - - # Submiting a job first - job_id = ZuoraSubmitJob( - ZuoraObjectsBase.query(self, stream_name=self.test_stream, cursor_field=self.test_cursor_field, date_slice=test_date_slice), - config, - ).read_records(sync_mode=None) - - # checking iteratively if the job is completed, then return the URL with jsonl datafile - job_data_url = ZuoraJobStatusCheck(list(job_id)[0], self.config).read_records(sync_mode=None) - - # Return True if there is a URL leading to a file - assert "https://" in list(job_data_url)[0] - - @pytest.mark.parametrize("config", [(config)], ids=["LIVE"]) - def test_get_job_result(self, config): - """ - Test reads the dataFile from URL of submited, checked and successfully completed job. - """ - - # Prepared date_slice - test_date_slice = self._prepare_date_slice() - - # Submiting a job first - job_id = ZuoraSubmitJob( - ZuoraObjectsBase.query(self, stream_name=self.test_stream, cursor_field=self.test_cursor_field, date_slice=test_date_slice), - config, - ).read_records(sync_mode=None) - - # checking iteratively if the job is completed, then return the URL with jsonl datafile - job_data_url = ZuoraJobStatusCheck(list(job_id)[0], self.config).read_records(sync_mode=None) - - # read records from completed job - job_result = ZuoraGetJobResult(list(job_data_url)[0]).read_records(sync_mode=None) - # Return True if we have successfully read records from completed job - assert len(list(job_result)) > 0 diff --git a/airbyte-integrations/connectors/source-zuora/integration_tests/invalid_config.json b/airbyte-integrations/connectors/source-zuora/integration_tests/invalid_config.json deleted file mode 100644 index 5529e73ba380..000000000000 --- a/airbyte-integrations/connectors/source-zuora/integration_tests/invalid_config.json +++ /dev/null @@ -1,8 +0,0 @@ -{ - "start_date": "2020-01-01", - "window_in_days": "30", - "tenant_endpoint": "US Cloud API Sandbox", - "data_query": "Live", - "client_id": "some_client_id", - "client_secret": "some_client_secret" -} diff --git a/airbyte-integrations/connectors/source-zuora/main.py b/airbyte-integrations/connectors/source-zuora/main.py deleted file mode 100644 index 404a72854a9e..000000000000 --- a/airbyte-integrations/connectors/source-zuora/main.py +++ /dev/null @@ -1,8 +0,0 @@ -# -# Copyright (c) 2023 Airbyte, Inc., all rights reserved. -# - -from source_zuora.run import run - -if __name__ == "__main__": - run() diff --git a/airbyte-integrations/connectors/source-zuora/requirements.txt b/airbyte-integrations/connectors/source-zuora/requirements.txt deleted file mode 100644 index d6e1198b1ab1..000000000000 --- a/airbyte-integrations/connectors/source-zuora/requirements.txt +++ /dev/null @@ -1 +0,0 @@ --e . diff --git a/airbyte-integrations/connectors/source-zuora/setup.py b/airbyte-integrations/connectors/source-zuora/setup.py deleted file mode 100644 index 6cec429e1996..000000000000 --- a/airbyte-integrations/connectors/source-zuora/setup.py +++ /dev/null @@ -1,46 +0,0 @@ -# -# Copyright (c) 2023 Airbyte, Inc., all rights reserved. -# - - -from setuptools import find_packages, setup - -MAIN_REQUIREMENTS = [ - "airbyte-cdk", - "pendulum", -] - -TEST_REQUIREMENTS = [ - "requests-mock~=1.9.3", - "pytest-mock~=3.6.1", -] - -setup( - entry_points={ - "console_scripts": [ - "source-zuora=source_zuora.run:run", - ], - }, - name="source_zuora", - description="Airbyte source-connector for Zuora.", - author="Airbyte", - author_email="contact@airbyte.io", - packages=find_packages(), - install_requires=MAIN_REQUIREMENTS, - package_data={ - "": [ - # Include yaml files in the package (if any) - "*.yml", - "*.yaml", - # Include all json files in the package, up to 4 levels deep - "*.json", - "*/*.json", - "*/*/*.json", - "*/*/*/*.json", - "*/*/*/*/*.json", - ] - }, - extras_require={ - "tests": TEST_REQUIREMENTS, - }, -) diff --git a/airbyte-integrations/connectors/source-zuora/source_zuora/__init__.py b/airbyte-integrations/connectors/source-zuora/source_zuora/__init__.py deleted file mode 100644 index 2786f91d417f..000000000000 --- a/airbyte-integrations/connectors/source-zuora/source_zuora/__init__.py +++ /dev/null @@ -1,27 +0,0 @@ -""" -MIT License - -Copyright (c) 2020 Airbyte - -Permission is hereby granted, free of charge, to any person obtaining a copy -of this software and associated documentation files (the "Software"), to deal -in the Software without restriction, including without limitation the rights -to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -copies of the Software, and to permit persons to whom the Software is -furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in all -copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -SOFTWARE. -""" - -from .source import SourceZuora - -__all__ = ["SourceZuora"] diff --git a/airbyte-integrations/connectors/source-zuora/source_zuora/run.py b/airbyte-integrations/connectors/source-zuora/source_zuora/run.py deleted file mode 100644 index 58495d4d67e3..000000000000 --- a/airbyte-integrations/connectors/source-zuora/source_zuora/run.py +++ /dev/null @@ -1,14 +0,0 @@ -# -# Copyright (c) 2023 Airbyte, Inc., all rights reserved. -# - - -import sys - -from airbyte_cdk.entrypoint import launch -from source_zuora import SourceZuora - - -def run(): - source = SourceZuora() - launch(source, sys.argv[1:]) diff --git a/airbyte-integrations/connectors/source-zuora/source_zuora/source.py b/airbyte-integrations/connectors/source-zuora/source_zuora/source.py deleted file mode 100644 index 3f2c2ba73d23..000000000000 --- a/airbyte-integrations/connectors/source-zuora/source_zuora/source.py +++ /dev/null @@ -1,534 +0,0 @@ -# -# Copyright (c) 2023 Airbyte, Inc., all rights reserved. -# - - -import json -from abc import ABC -from datetime import datetime -from typing import Any, Dict, Iterable, List, Mapping, MutableMapping, Optional, Tuple - -import pendulum -import requests -from airbyte_cdk.logger import AirbyteLogger -from airbyte_cdk.models import AirbyteStream, SyncMode -from airbyte_cdk.sources import AbstractSource -from airbyte_cdk.sources.streams.http import HttpStream - -from .zuora_auth import ZuoraAuthenticator -from .zuora_errors import ( - QueryWindowError, - ZOQLQueryCannotProcessObject, - ZOQLQueryFailed, - ZOQLQueryFieldCannotResolveAltCursor, - ZOQLQueryFieldCannotResolveCursor, -) -from .zuora_excluded_streams import ZUORA_EXCLUDED_STREAMS - - -class ZuoraStream(HttpStream, ABC): - """ - Parent class for all other classes, except of SourceZuora. - """ - - # Define primary key - primary_key = "id" - - # Define possible cursor_fields - cursor_field = "updateddate" - alt_cursor_field = "createddate" - - def __init__(self, config: Dict): - super().__init__(authenticator=config["authenticator"]) - self._config = config - - @property - def url_base(self) -> str: - return self._config["url_base"] - - @property - def window_in_days(self) -> float: - """ - Converting `Query Window` config parameter from string type into type float. - """ - try: - value = self._config["window_in_days"] - return float(value) - except ValueError: - raise QueryWindowError(value) - - def next_page_token(self, response: requests.Response) -> Optional[Mapping[str, Any]]: - """Abstractmethod HTTPStream CDK dependency""" - return None - - def request_params(self, stream_state: Mapping[str, Any], **kwargs) -> MutableMapping[str, Any]: - """Abstractmethod HTTPStream CDK dependency""" - return {} - - def base_query_params(self) -> MutableMapping[str, Any]: - """ - Returns base query parameters for default CDK request_json_body method - """ - params = {"compression": "NONE", "output": {"target": "S3"}, "outputFormat": "JSON"} - if self._config["data_query"] == "Unlimited": - params["sourceData"] = "DATAHUB" - return params - - -class ZuoraBase(ZuoraStream): - """ - Base child class, provides main functionality for next classes: - - ZuoraObjectsBase, ZuoraListObjects, ZuoraDescribeObject - """ - - def path(self, **kwargs) -> str: - """Abstractmethod HTTPStream CDK dependency""" - return "" - - def request_kwargs(self, stream_slice: Mapping[str, Any] = None, **kwargs) -> Mapping[str, Any]: - """ - Override of default CDK method to return date_slices as stream_slices - """ - return stream_slice if stream_slice else {} - - def get_zuora_data(self, date_slice: Dict, config: Dict, full_object: bool = False) -> Iterable[Mapping[str, Any]]: - """ - This is the wrapper for 'Submit > Check > Get' operation. - - :: job_id - string with submited job_id EXAMPLE: '5a96ee43-e874-4a25-a9b4-004b39fe82a4' - for more information see: ZuoraSubmitJob - :: job_data_url - response object with: - - 'queryStatus': ["completed", "in_progress", "failed", "canceled", "aborted"], - - 'errorMessage': if there in any error on the server side during job execution - - 'dataFile': if the execution was succesfull returns URL for jsonl file - for more information see: ZuoraJobStatusCheck - :: ZuoraGetJobResult - reads the 'dataFile' URL and outputs the data records for completed job - for more information see: ZuoraGetJobResult - :: full_object - boolean, indicates whether to fetch the whole object without any filtering, default `False` - - """ - if full_object: - # If the cursor is not available, we fetch whole object - job_query = self.query(stream_name=self.name, full_object=True) - else: - # Default prepared job with Cursor - job_query = self.query(stream_name=self.name, cursor_field=self.cursor_field, date_slice=date_slice) - - job_id: List[str] = ZuoraSubmitJob(job_query, config).read_records(sync_mode=None) - job_data_url: List = ZuoraJobStatusCheck(list(job_id)[0], config).read_records(sync_mode=None) - yield from ZuoraGetJobResult(list(job_data_url)[0]).read_records(sync_mode=None) - - def _send_request(self, request: requests.PreparedRequest, request_kwargs: Mapping[str, Any]) -> requests.Response: - """ - Override for _send_request CDK method to send HTTP request to the Zuora API - """ - try: - # try to fetch with default cursor_field = UpdatedDate - yield from self.get_zuora_data(date_slice=request_kwargs, config=self._config) - except ZOQLQueryCannotProcessObject: - # do nothing if we cannot resolve the object - pass - except ZOQLQueryFieldCannotResolveCursor: - """ - The default cursor_field is "updateddate" sometimes it's not supported by certain streams. - We need to swith the default cursor field to alternative one, and retry again the whole operation, submit the new job to the server. - We also need to save the state in the end of the sync. - So this switch is needed as fast and easy way of resolving the cursor_field for streams that support only the "createddate" - """ - # cursor_field switch to alternative = CreatedDate - self.cursor_field = self.alt_cursor_field - try: - """ - The alternative cursor_field is "createddate", it could be also not available for some custom objects. - In this case, we fetch the whole object without any filtering. - """ - # retry the whole operation with alternative cursor - yield from self.get_zuora_data(date_slice=request_kwargs, config=self._config) - except ZOQLQueryFieldCannotResolveAltCursor: - # if we fail to use the alternative cursor - fetch the whole object - # retry the whole operation - yield from self.get_zuora_data(date_slice=request_kwargs, config=self._config, full_object=True) - except ZOQLQueryCannotProcessObject: - # do nothing if we cannot resolve the object - pass - - def parse_response(self, response: requests.Response, **kwargs) -> str: - yield from response - - -class ZuoraObjectsBase(ZuoraBase): - """ - Main class for all the Zuora data streams (Zuora Object names), - provides functionality for dynamically created classes as streams of data. - """ - - @property - def state_checkpoint_interval(self) -> float: - return self.window_in_days - - @staticmethod - def to_datetime_str(date: datetime) -> str: - """ - Custom method. - Returns the formated datetime string in a way Zuora API endpoint recognises it as timestamp. - :: Output example: '2021-07-15 07:45:55 -07:00' FROMAT : "%Y-%m-%d %H:%M:%S.%f %Z" - """ - return date.strftime("%Y-%m-%d %H:%M:%S.%f %Z") - - def get_cursor_from_schema(self, schema: Dict) -> str: - """ - Get the cursor_field from the stream's schema rather that take it from the class attribute - If the stream doesn't support 'updateddate', then we use 'createddate'. - If the stream doesn't support 'createddate', then stream is `full_refresh` only. - """ - if self.cursor_field in schema: - # when UpdatedDate is availalbe - return self.cursor_field - elif self.alt_cursor_field in schema: - # when CreatedDate is availalbe - return self.alt_cursor_field - else: - return None - - def get_json_schema(self) -> Mapping[str, Any]: - """ - Override get_json_schema CDK method to retrieve the schema information for Zuora Object dynamicaly. - """ - schema = list(ZuoraDescribeObject(self.name, config=self._config).read_records(sync_mode=None)) - return {"type": "object", "properties": {key: d[key] for d in schema for key in d}} - - def as_airbyte_stream(self) -> AirbyteStream: - """ - Override as_airbyte_stream CDK method to replace default 'default_cursor_field' behaviour, - :: We use the cursor_field defined inside schema instead of using class attribute by default. - :: But we still need the default class attribute 'cursor_field' in order to CDK read_records works properly. - """ - stream = super().as_airbyte_stream() - stream_cursor = self.get_cursor_from_schema(stream.json_schema["properties"]) - if stream_cursor: - stream.default_cursor_field = [stream_cursor] - else: - # When there is no cursor available in the stream, we do Full-Refresh only. - stream.supported_sync_modes = [SyncMode.full_refresh] - stream.source_defined_cursor = True # default CDK for full-refresh - stream.default_cursor_field = [] # default CDK for full-refresh - return stream - - def get_updated_state(self, current_stream_state: MutableMapping[str, Any], latest_record: Mapping[str, Any]) -> Mapping[str, Any]: - """ - Update the state value, default CDK method. - """ - updated_state = max(latest_record.get(self.cursor_field, ""), current_stream_state.get(self.cursor_field, "")) - return {self.cursor_field: updated_state} if updated_state else {} - - def query(self, stream_name: str, cursor_field: str = None, date_slice: Dict = None, full_object: bool = False) -> str: - """ - Custom method. Returns the SQL-like query in a way Zuora API endpoint accepts the jobs. - """ - if full_object: - return f"""select * from {stream_name}""" - - return f""" - select * - from {stream_name} where - {cursor_field} >= TIMESTAMP '{date_slice.get('start_date')}' and - {cursor_field} <= TIMESTAMP '{date_slice.get('end_date')}' - order by {cursor_field} asc - """ - - def stream_slices(self, stream_state: Mapping[str, Any] = None, **kwargs) -> Iterable[Optional[Mapping[str, Any]]]: - """ - Override default stream_slices CDK method to provide date_slices as page chunks for data fetch. - Returns list of dict, example: { - "start_date": "2020-01-01 00:00:00 -07:00", - "end_date": "2021-12-31 00:00:00 -07:00" - }, - ... - """ - - start_date = pendulum.parse(self._config["start_date"]).astimezone() - end_date = pendulum.now().astimezone() - - # Determine stream_state, if no stream_state we use start_date - if stream_state: - state = stream_state.get(self.cursor_field, stream_state.get(self.alt_cursor_field)) - start_date = pendulum.parse(state) if state else self._config["start_date"] - - # use the lowest date between start_date and self.end_date, otherwise API fails if start_date is in future - start_date = min(start_date, end_date) - - while start_date <= end_date: - end_date_slice = start_date.add(days=self.window_in_days) - yield {"start_date": self.to_datetime_str(start_date), "end_date": self.to_datetime_str(end_date_slice)} - start_date = end_date_slice - - -class ZuoraListObjects(ZuoraBase): - """ - Provides functionality to retrieve the list of Zuora Objects as list of object names. - """ - - def query(self, **kwargs) -> str: - return "SHOW TABLES" - - def parse_response(self, response: requests.Response, **kwargs) -> Iterable[Mapping]: - return [name["Table"] for name in response] - - -class ZuoraDescribeObject(ZuoraBase): - """ - Provides functionality to retrive Zuora Object (stream) schema dynamicaly from the endpoint, - convert it into JSONSchema types, for the connector's catalog. - """ - - def __init__(self, zuora_object_name: str, config: Dict): - super().__init__(config) - self.zuora_object_name = zuora_object_name - - def query(self, **kwargs) -> str: - return f"DESCRIBE {self.zuora_object_name}" - - def parse_response(self, response: requests.Response, **kwargs) -> List[Dict]: - """ - Response example: - [ - {'Column': 'taxexempteffectivedate', 'Type': 'date', 'Extra': '', 'Comment': 'TaxExemptEffectiveDate'}, - {'Column': 'invoicetemplateid', 'Type': 'varchar', 'Extra': '', 'Comment': 'InvoiceTemplateId'}... - ] - """ - type_number = ["number", "null"] - type_string = ["string", "null"] - type_object = ["object", "null"] - type_array = ["array", "null"] - type_bool = ["boolean", "null"] - - type_mapping = { - "decimal(22,9)": type_number, - "decimal": type_number, - "integer": type_number, - "int": type_number, - "bigint": type_number, - "smallint": type_number, - "double": type_number, - "float": type_number, - "timestamp": type_number, - "date": type_string, - "datetime": type_string, - "timestamp with time zone": type_string, - "picklist": type_string, - "text": type_string, - "varchar": type_string, - "zoql": type_object, - "binary": type_object, - "json": type_object, - "xml": type_object, - "blob": type_object, - "list": type_array, - "array": type_array, - "boolean": type_bool, - "bool": type_bool, - } - - json_schema = {} - for field in response: - json_type = type_mapping.get(field.get("Type"), type_string) - json_schema[field.get("Column")] = {"type": json_type} - - return [json_schema] - - -class ZuoraSubmitJob(ZuoraStream): - """ - Provides functionality to submit ZOQL Data Query job on the server. - Return job_id as comfirmation of the successfully submited job. - """ - - http_method = "POST" - - def __init__(self, query: str, config: Dict): - super().__init__(config) - self.query = query - - def path(self, **kwargs) -> str: - return "/query/jobs" - - def request_body_json(self, **kwargs) -> Optional[Mapping]: - """ - Override of default CDK method to return SQL-like query and use it in _send_request method. - """ - params = self.base_query_params() - params["query"] = self.query - return params - - def parse_response(self, response: requests.Response, **kwargs) -> List[str]: - - """ - Response example: - {'data': - { - 'id': 'c6f25f91-5357-4fec-a00d-9009cc1ae856', - 'query': 'DESCRIBE account', # This could be SELECT statement or DESCRIBE or SHOW {object} - 'useIndexJoin': False, - 'sourceData': 'LIVE', - 'queryStatus': 'accepted', - 'remainingRetries': 3, - 'retries': 3, - 'updatedOn': '2021-07-26T15:33:48.287Z', - 'createdBy': '84f78cea-8a5b-4332-933f-27439fe3b87b' - } - } - """ - return [response.json()["data"]["id"]] - - -class ZuoraJobStatusCheck(ZuoraStream): - """ - Provedes functionaluty to check the status of submited job on the server. - :: There are ["completed", "in_progress", "failed", "canceled", "aborted"] statuses available in check response. - The check operation returns either dataFile URL or error message describing the error. - """ - - def __init__(self, job_id: str, config: Dict): - super().__init__(config) - self.job_id = job_id - - def path(self, **kwargs) -> str: - return f"/query/jobs/{self.job_id}" - - def parse_response(self, response: requests.Response, **kwargs) -> List[str]: - return [response.json()["data"]["dataFile"]] - - def _send_request(self, request: requests.PreparedRequest, request_kwargs: Mapping[str, Any]) -> requests.Response: - - """ - Override of default CDK method _send_request to check the status of submited job iteratevely, - until it's either "completed" or "failed" or "canceled" for any reason. - - Response example: - {'data': - { - 'id': 'c6f25f91-5357-4fec-a00d-9009cc1ae856', - 'query': 'DESCRIBE account', - 'useIndexJoin': False, - 'sourceData': 'LIVE', - 'queryStatus': 'completed', - 'dataFile': 'https://owl-auw2-sbx01-query-result.s3.us-west-2.amazonaws.com/c6f25f91-5357-4fec-a00d-9009cc1ae856_2779514650704989.jsonl?....', - 'outputRows': 53, - 'processingTime': 516, - 'remainingRetries': 3, - 'retries': 3, - 'updatedOn': '2021-07-26T15:33:48.803Z', - 'createdBy': '84f78cea-8a5b-4332-933f-27439fe3b87b' - } - } - """ - - # Define the job error statuses - errors = ["failed", "canceled", "aborted"] - # Error msg: the cursor_field cannot be resolved - cursor_error = f"Column '{self.cursor_field}' cannot be resolved" - alt_cursor_error = f"Column '{self.alt_cursor_field}' cannot be resolved" - # Error msg: cannot process object - obj_read_error = "failed to process object" - - status = None - success = "completed" - while status != success: - """ - There is no opportunity for the infinity loop because the operation performs on the server-side, - there are query run-time limitations: if the query time is longer than 120 min, - the server will output the error with the corresponding message for the user in the output, - by raising `ZOQLQueryFailed` exception. - """ - - response: requests.Response = self._session.send(request, **request_kwargs) - job_check = response.json() - status = job_check["data"]["queryStatus"] - if status in errors and cursor_error in job_check["data"]["errorMessage"]: - raise ZOQLQueryFieldCannotResolveCursor - elif status in errors and obj_read_error in job_check["data"]["errorMessage"]: - raise ZOQLQueryCannotProcessObject - elif status in errors and alt_cursor_error in job_check["data"]["errorMessage"]: - raise ZOQLQueryFieldCannotResolveAltCursor - elif status in errors: - raise ZOQLQueryFailed(response) - return response - - -class ZuoraGetJobResult(HttpStream): - """ - Provides functionality to retrive the records from the file formed by submited and successfully completed job - DataFile URL example: - {'data': - { - 'id': 'c6f25f91-5357-4fec-a00d-9009cc1ae856', - ..., - ..., - 'dataFile': 'https://owl-auw2-sbx01-query-result.s3.us-west-2.amazonaws.com/c6f25f91-5357-4fec-a00d-9009cc1ae856_2779514650704989.jsonl?....', - ..., - ..., - 'createdBy': '84f78cea-8a5b-4332-933f-27439fe3b87b' - } - } - """ - - primary_key = None - - def __init__(self, url: str): - super().__init__() # initiate authenticator = NoAuth(), _session - self.url = url # accept incoming dataFile URL - - @property - def url_base(self): - return self.url - - def path(self, **kwargs) -> str: - """Abstractmethod HTTPStream CDK dependency""" - return "" - - def next_page_token(self, response: requests.Response) -> Optional[Mapping[str, Any]]: - """Abstractmethod HTTPStream CDK dependency""" - return None - - def parse_response(self, response: requests.Response, **kwargs) -> str: - """ - Return records from JSONLines file from dataFile URL. - """ - for line in response.text.splitlines(): - yield json.loads(line) - - -class SourceZuora(AbstractSource): - def check_connection(self, logger: AirbyteLogger, config: Mapping[str, Any]) -> Tuple[bool, any]: - """ - Testing connection availability for the connector by granting the token. - """ - auth = ZuoraAuthenticator(config).get_auth() - try: - auth.get_auth_header() - return True, None - except Exception as e: - return False, e - - def streams(self, config: Mapping[str, Any]) -> List[ZuoraStream]: - """ - Mapping a input config of the user input configuration as defined in the connector spec. - Defining streams to run by building stream classes dynamically. - """ - auth = ZuoraAuthenticator(config) - config["authenticator"] = auth.get_auth() - config["url_base"] = auth.url_base - - # List available objects (streams) names from Zuora - # Example: zuora_stream_names = ["account", "country", "user"] - zuora_stream_names = ZuoraListObjects(config).read_records(sync_mode=None) - - streams: List[ZuoraStream] = [] - for stream_name in zuora_stream_names: - if stream_name not in ZUORA_EXCLUDED_STREAMS: - # construct ZuoraReadStreams sub-class for each stream_name - stream_class = type(stream_name, (ZuoraObjectsBase,), {}) - # instancetiate a stream with config - stream_instance = stream_class(config) - streams.append(stream_instance) - return streams diff --git a/airbyte-integrations/connectors/source-zuora/source_zuora/spec.json b/airbyte-integrations/connectors/source-zuora/source_zuora/spec.json deleted file mode 100644 index f9d2dc6262dc..000000000000 --- a/airbyte-integrations/connectors/source-zuora/source_zuora/spec.json +++ /dev/null @@ -1,66 +0,0 @@ -{ - "documentationUrl": "https://docs.airbyte.com/integrations/sources/zuora", - "connectionSpecification": { - "$schema": "http://json-schema.org/draft-07/schema#", - "title": "Zuora Connector Configuration", - "type": "object", - "required": [ - "start_date", - "tenant_endpoint", - "data_query", - "client_id", - "client_secret" - ], - "properties": { - "start_date": { - "type": "string", - "title": "Start Date", - "description": "Start Date in format: YYYY-MM-DD", - "pattern": "^[0-9]{4}-[0-9]{2}-[0-9]{2}$" - }, - "window_in_days": { - "type": "string", - "title": "Query Window (in days)", - "description": "The amount of days for each data-chunk begining from start_date. Bigger the value - faster the fetch. (0.1 - as for couple of hours, 1 - as for a Day; 364 - as for a Year).", - "examples": ["0.5", "1", "30", "60", "90", "120", "200", "364"], - "pattern": "^(0|[1-9]\\d*)(\\.\\d+)?$", - "default": "90" - }, - "tenant_endpoint": { - "title": "Tenant Endpoint Location", - "type": "string", - "description": "Please choose the right endpoint where your Tenant is located. More info by this Link", - "enum": [ - "US Production", - "US Cloud Production", - "US API Sandbox", - "US Cloud API Sandbox", - "US Central Sandbox", - "US Performance Test", - "EU Production", - "EU API Sandbox", - "EU Central Sandbox" - ] - }, - "data_query": { - "title": "Data Query Type", - "type": "string", - "description": "Choose between `Live`, or `Unlimited` - the optimized, replicated database at 12 hours freshness for high volume extraction Link", - "enum": ["Live", "Unlimited"], - "default": "Live" - }, - "client_id": { - "type": "string", - "title": "Client ID", - "description": "Your OAuth user Client ID", - "airbyte_secret": true - }, - "client_secret": { - "type": "string", - "title": "Client Secret", - "description": "Your OAuth user Client Secret", - "airbyte_secret": true - } - } - } -} diff --git a/airbyte-integrations/connectors/source-zuora/source_zuora/zuora_auth.py b/airbyte-integrations/connectors/source-zuora/source_zuora/zuora_auth.py deleted file mode 100644 index c5adcd9666c2..000000000000 --- a/airbyte-integrations/connectors/source-zuora/source_zuora/zuora_auth.py +++ /dev/null @@ -1,38 +0,0 @@ -# -# Copyright (c) 2023 Airbyte, Inc., all rights reserved. -# - - -from typing import Any, Dict, Mapping - -from airbyte_cdk.sources.streams.http.requests_native_auth.oauth import Oauth2Authenticator - -from .zuora_endpoint import get_url_base - - -class OAuth(Oauth2Authenticator): - def __init__(self, **kwargs): - super().__init__(**kwargs) - - def get_refresh_request_body(self) -> Mapping[str, Any]: - payload = super().get_refresh_request_body() - payload["grant_type"] = "client_credentials" - payload.pop("refresh_token") # Zuora doesn't have Refresh Token parameter - return payload - - -class ZuoraAuthenticator: - def __init__(self, config: Dict): - self.config = config - - @property - def url_base(self) -> str: - return get_url_base(self.config["tenant_endpoint"]) - - def get_auth(self) -> OAuth: - return OAuth( - token_refresh_endpoint=f"{self.url_base}/oauth/token", - client_id=self.config["client_id"], - client_secret=self.config["client_secret"], - refresh_token=None, # Zuora doesn't have Refresh Token parameter - ) diff --git a/airbyte-integrations/connectors/source-zuora/source_zuora/zuora_endpoint.py b/airbyte-integrations/connectors/source-zuora/source_zuora/zuora_endpoint.py deleted file mode 100644 index 0747850486fb..000000000000 --- a/airbyte-integrations/connectors/source-zuora/source_zuora/zuora_endpoint.py +++ /dev/null @@ -1,26 +0,0 @@ -# -# Copyright (c) 2023 Airbyte, Inc., all rights reserved. -# - - -from typing import Dict - -ZUORA_TENANT_ENDPOINT_MAP: Dict = { - # Production - "US Production": "https://rest.zuora.com", - "US Cloud Production": "https://rest.na.zuora.com", - "EU Production": "https://rest.eu.zuora.com", - # Sandbox - "US API Sandbox": "https://rest.apisandbox.zuora.com", - "US Cloud API Sandbox": "https://rest.sandbox.na.zuora.com", - "US Central Sandbox": "https://rest.test.zuora.com", - "EU API Sandbox": "https://rest.sandbox.eu.zuora.com", - "EU Central Sandbox": "https://rest.test.eu.zuora.com", - # Performance Test - "US Performance Test": "https://rest.pt1.zuora.com", -} - - -def get_url_base(tenant_endpoint: str) -> str: - """Define the URL Base from user's input with respect to the ZUORA_TENANT_ENDPOINT_MAP""" - return ZUORA_TENANT_ENDPOINT_MAP.get(tenant_endpoint) diff --git a/airbyte-integrations/connectors/source-zuora/source_zuora/zuora_errors.py b/airbyte-integrations/connectors/source-zuora/source_zuora/zuora_errors.py deleted file mode 100644 index f13f16015a0a..000000000000 --- a/airbyte-integrations/connectors/source-zuora/source_zuora/zuora_errors.py +++ /dev/null @@ -1,77 +0,0 @@ -# -# Copyright (c) 2023 Airbyte, Inc., all rights reserved. -# - - -import sys -from typing import Any - -import requests -from airbyte_cdk.logger import AirbyteLogger - - -class Error(Exception): - """Base Error class for other exceptions""" - - # Define the instance of the Native Airbyte Logger - logger = AirbyteLogger() - - -class QueryWindowError(Error): - def __init__(self, value: Any): - self.message = f"`Query Window` is set to '{value}', please make sure you use float or integer, not string." - super().__init__(self.logger.info(self.message)) - # Exit with non-zero status - sys.exit(1) - - -class ZOQLQueryError(Error): - """Base class for ZOQL EXPORT query errors""" - - def __init__(self, response: requests.Response = None): - if response: - self.response = response.json() - self.error_msg = self.response["data"]["errorMessage"] - self.query = self.response["data"]["query"] - super().__init__(self.logger.error(f"{self.error_msg}, QUERY: {self.query}")) - # Exit with non-zero status - sys.exit(1) - - -class ZOQLQueryFailed(ZOQLQueryError): - """Failed to execute query on the server side""" - - -class ZOQLQueryFieldCannotResolveCursor(Error): - """ - Failed to execute query on the server side because of the certain field could not be resolved - This exception is used to switch the default cursor_field inside the query. - """ - - def __init__(self, message: str = "Cursor 'UpdatedDate' is not available. Switching cursor to 'CreatedDate'"): - super().__init__(self.logger.info(message)) - - -class ZOQLQueryFieldCannotResolveAltCursor(Error): - """ - Failed to execute query on the server side because of the certain field could not be resolved - This exception is used to switch the default cursor_field inside the query. - """ - - def __init__(self, message: str = "Cursor 'CreatedDate' is not available. Fetching whole object"): - super().__init__(self.logger.info(message)) - - -class ZOQLQueryCannotProcessObject(Error): - """ - The error raises when the user doesn't have the right permissions to read certain Zuora Object, - or the object cannot be read due to technical reasons, we receive something like: 'failed to process object' msg, - We trying to skip reading this stream, return [] as output and continue to read other streams - """ - - def __init__( - self, - message: str = "The stream cannot be processed, check Zuora Object's Permissions / Subscription Plan / API User Permissions, etc. This warning is not critical, and could be ignored.", - ): - super().__init__(self.logger.warn(message)) - pass diff --git a/airbyte-integrations/connectors/source-zuora/source_zuora/zuora_excluded_streams.py b/airbyte-integrations/connectors/source-zuora/source_zuora/zuora_excluded_streams.py deleted file mode 100644 index 4b65bb1fffc3..000000000000 --- a/airbyte-integrations/connectors/source-zuora/source_zuora/zuora_excluded_streams.py +++ /dev/null @@ -1,16 +0,0 @@ -# -# Copyright (c) 2023 Airbyte, Inc., all rights reserved. -# - - -from typing import List - -""" -This list holds the Zuora Object names (API object names) that could not be processed or -plays the service role for other objects and doesn't hold the actual data, while being called using API call. -Extend this list if needed. -""" - -ZUORA_EXCLUDED_STREAMS: List = [ - "aggregatedataqueryslowdata", -] diff --git a/airbyte-integrations/connectors/source-zuora/unit_tests/unit_test.py b/airbyte-integrations/connectors/source-zuora/unit_tests/unit_test.py deleted file mode 100644 index 5bda1c50d4ad..000000000000 --- a/airbyte-integrations/connectors/source-zuora/unit_tests/unit_test.py +++ /dev/null @@ -1,8 +0,0 @@ -# -# Copyright (c) 2023 Airbyte, Inc., all rights reserved. -# - - -def test_example(): - """Example of unit test""" - pass From 358a5005df3dfb71072ea5e28bcf684aa15c9cdf Mon Sep 17 00:00:00 2001 From: evantahler Date: Fri, 1 Mar 2024 10:24:00 -0800 Subject: [PATCH 053/172] source-recurly is archived - remove from OSS registry --- airbyte-integrations/connectors/source-recurly/metadata.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/airbyte-integrations/connectors/source-recurly/metadata.yaml b/airbyte-integrations/connectors/source-recurly/metadata.yaml index ba3b93c18dfc..9cbaf180675e 100644 --- a/airbyte-integrations/connectors/source-recurly/metadata.yaml +++ b/airbyte-integrations/connectors/source-recurly/metadata.yaml @@ -18,7 +18,7 @@ data: cloud: enabled: false oss: - enabled: true + enabled: false releaseStage: alpha remoteRegistries: pypi: From 7277fc5170caf35005ee08324bbdd7af245993b2 Mon Sep 17 00:00:00 2001 From: Gireesh Sreepathi Date: Fri, 1 Mar 2024 11:03:05 -0800 Subject: [PATCH 054/172] Use a separate concurrency group for Java CDK GH workflow. (#35758) --- .github/workflows/publish-java-cdk-command.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/publish-java-cdk-command.yml b/.github/workflows/publish-java-cdk-command.yml index 878f7273faf5..b43a7224aa7c 100644 --- a/.github/workflows/publish-java-cdk-command.yml +++ b/.github/workflows/publish-java-cdk-command.yml @@ -35,7 +35,7 @@ on: required: false concurrency: - group: publish-airbyte-cdk + group: publish-java-cdk cancel-in-progress: false env: From 4efc065e86e7556ce88c2ac3e7fb2ba7cbdbd0bd Mon Sep 17 00:00:00 2001 From: Edward Gao Date: Fri, 1 Mar 2024 11:04:24 -0800 Subject: [PATCH 055/172] DV2 destinations: Build DestinationState / Migration framework (#35303) Signed-off-by: Gireesh Sreepathi Co-authored-by: Gireesh Sreepathi --- airbyte-cdk/java/airbyte-cdk/README.md | 5 +- .../src/main/resources/version.properties | 2 +- .../jdbc/AbstractJdbcDestination.java | 20 +- .../JdbcDestinationHandler.java | 159 ++++++- .../jdbc/AbstractJdbcDestinationTest.java | 2 +- .../JdbcSqlGeneratorIntegrationTest.java | 4 +- .../BaseDestinationV1V2Migrator.java | 4 +- .../typing_deduping/DefaultTyperDeduper.java | 89 ++-- .../typing_deduping/DestinationHandler.java | 13 +- .../DestinationInitialState.java | 23 - .../DestinationInitialStateImpl.java | 14 - .../DestinationInitialStatus.kt | 12 + .../DestinationV1V2Migrator.java | 2 +- ...eState.java => InitialRawTableStatus.java} | 2 +- .../NoOpDestinationV1V2Migrator.java | 2 +- .../NoOpTyperDeduperWithV1V2Migrations.java | 38 +- .../TypeAndDedupeTransaction.java | 4 +- .../typing_deduping/TyperDeduperUtil.kt | 139 ++++-- .../typing_deduping/V2TableMigrator.java | 4 + .../typing_deduping/migrators/Migration.kt | 46 ++ .../migrators/MinimumDestinationState.kt | 43 ++ .../DefaultTyperDeduperTest.java | 414 ++++++++++++++++-- .../DestinationV1V2MigratorTest.java | 2 +- .../BaseSqlGeneratorIntegrationTest.java | 87 ++-- 24 files changed, 908 insertions(+), 222 deletions(-) delete mode 100644 airbyte-cdk/java/airbyte-cdk/typing-deduping/src/main/java/io/airbyte/integrations/base/destination/typing_deduping/DestinationInitialState.java delete mode 100644 airbyte-cdk/java/airbyte-cdk/typing-deduping/src/main/java/io/airbyte/integrations/base/destination/typing_deduping/DestinationInitialStateImpl.java create mode 100644 airbyte-cdk/java/airbyte-cdk/typing-deduping/src/main/java/io/airbyte/integrations/base/destination/typing_deduping/DestinationInitialStatus.kt rename airbyte-cdk/java/airbyte-cdk/typing-deduping/src/main/java/io/airbyte/integrations/base/destination/typing_deduping/{InitialRawTableState.java => InitialRawTableStatus.java} (58%) create mode 100644 airbyte-cdk/java/airbyte-cdk/typing-deduping/src/main/java/io/airbyte/integrations/base/destination/typing_deduping/migrators/Migration.kt create mode 100644 airbyte-cdk/java/airbyte-cdk/typing-deduping/src/main/java/io/airbyte/integrations/base/destination/typing_deduping/migrators/MinimumDestinationState.kt diff --git a/airbyte-cdk/java/airbyte-cdk/README.md b/airbyte-cdk/java/airbyte-cdk/README.md index 8d941ed946f2..bcfd9ebb4524 100644 --- a/airbyte-cdk/java/airbyte-cdk/README.md +++ b/airbyte-cdk/java/airbyte-cdk/README.md @@ -166,9 +166,10 @@ MavenLocal debugging steps: | Version | Date | Pull Request | Subject | |:--------|:-----------|:-----------------------------------------------------------|:---------------------------------------------------------------------------------------------------------------------------------------------------------------| -| 0.23.9 | 2024-03-01 | [\#35720](https://github.com/airbytehq/airbyte/pull/35720) | various improvements for tests TestDataHolder | +| 0.23.10 | 2024-03-01 | [\#35303](https://github.com/airbytehq/airbyte/pull/35303) | various improvements for tests TestDataHolder | +| 0.23.9 | 2024-03-01 | [\#35720](https://github.com/airbytehq/airbyte/pull/35720) | various improvements for tests TestDataHolder | | 0.23.8 | 2024-02-28 | [\#35529](https://github.com/airbytehq/airbyte/pull/35529) | Refactor on state iterators | -| 0.23.7 | 2024-02-28 | [\#35376](https://github.com/airbytehq/airbyte/pull/35376) | Extract typereduper migrations to separte method | +| 0.23.7 | 2024-02-28 | [\#35376](https://github.com/airbytehq/airbyte/pull/35376) | Extract typereduper migrations to separte method | | 0.23.6 | 2024-02-26 | [\#35647](https://github.com/airbytehq/airbyte/pull/35647) | Add a getNamespace into TestDataHolder | | 0.23.5 | 2024-02-26 | [\#35512](https://github.com/airbytehq/airbyte/pull/35512) | Remove @DisplayName from all CDK tests. | | 0.23.4 | 2024-02-26 | [\#35507](https://github.com/airbytehq/airbyte/pull/35507) | Add more logs into TestDatabase. | diff --git a/airbyte-cdk/java/airbyte-cdk/core/src/main/resources/version.properties b/airbyte-cdk/java/airbyte-cdk/core/src/main/resources/version.properties index 586cdf0b8210..c02c9de6a0a6 100644 --- a/airbyte-cdk/java/airbyte-cdk/core/src/main/resources/version.properties +++ b/airbyte-cdk/java/airbyte-cdk/core/src/main/resources/version.properties @@ -1 +1 @@ -version=0.23.9 +version=0.23.10 diff --git a/airbyte-cdk/java/airbyte-cdk/db-destinations/src/main/java/io/airbyte/cdk/integrations/destination/jdbc/AbstractJdbcDestination.java b/airbyte-cdk/java/airbyte-cdk/db-destinations/src/main/java/io/airbyte/cdk/integrations/destination/jdbc/AbstractJdbcDestination.java index b12fd56c93f7..86935bbe2937 100644 --- a/airbyte-cdk/java/airbyte-cdk/db-destinations/src/main/java/io/airbyte/cdk/integrations/destination/jdbc/AbstractJdbcDestination.java +++ b/airbyte-cdk/java/airbyte-cdk/db-destinations/src/main/java/io/airbyte/cdk/integrations/destination/jdbc/AbstractJdbcDestination.java @@ -4,6 +4,7 @@ package io.airbyte.cdk.integrations.destination.jdbc; +import static io.airbyte.cdk.integrations.base.JavaBaseConstants.DEFAULT_AIRBYTE_INTERNAL_NAMESPACE; import static io.airbyte.cdk.integrations.base.errors.messages.ErrorMessage.getErrorMessage; import static io.airbyte.cdk.integrations.util.ConfiguredCatalogUtilKt.addDefaultNamespaceToStreams; @@ -17,7 +18,6 @@ import io.airbyte.cdk.integrations.base.AirbyteMessageConsumer; import io.airbyte.cdk.integrations.base.AirbyteTraceMessageUtility; import io.airbyte.cdk.integrations.base.Destination; -import io.airbyte.cdk.integrations.base.JavaBaseConstants; import io.airbyte.cdk.integrations.base.SerializedAirbyteMessageConsumer; import io.airbyte.cdk.integrations.base.TypingAndDedupingFlag; import io.airbyte.cdk.integrations.destination.NamingConventionTransformer; @@ -37,6 +37,7 @@ import io.airbyte.integrations.base.destination.typing_deduping.NoopV2TableMigrator; import io.airbyte.integrations.base.destination.typing_deduping.ParsedCatalog; import io.airbyte.integrations.base.destination.typing_deduping.TyperDeduper; +import io.airbyte.integrations.base.destination.typing_deduping.migrators.MinimumDestinationState; import io.airbyte.protocol.models.v0.AirbyteConnectionStatus; import io.airbyte.protocol.models.v0.AirbyteConnectionStatus.Status; import io.airbyte.protocol.models.v0.AirbyteMessage; @@ -45,6 +46,7 @@ import java.util.List; import java.util.Map; import java.util.Objects; +import java.util.Optional; import java.util.UUID; import java.util.function.Consumer; import javax.sql.DataSource; @@ -93,7 +95,7 @@ public AirbyteConnectionStatus check(final JsonNode config) { attemptTableOperations(outputSchema, database, namingResolver, sqlOperations, false); if (TypingAndDedupingFlag.isDestinationV2()) { final var v2RawSchema = namingResolver.getIdentifier(TypingAndDedupingFlag.getRawNamespaceOverride(RAW_SCHEMA_OVERRIDE) - .orElse(JavaBaseConstants.DEFAULT_AIRBYTE_INTERNAL_NAMESPACE)); + .orElse(DEFAULT_AIRBYTE_INTERNAL_NAMESPACE)); attemptTableOperations(v2RawSchema, database, namingResolver, sqlOperations, false); destinationSpecificTableOperations(database); } @@ -252,7 +254,9 @@ private void assertCustomParametersDontOverwriteDefaultParameters(final Map getDestinationHandler(final String databaseName, + final JdbcDatabase database, + final String rawTableSchema); /** * "database" key at root of the config json, for any other variants in config, override this @@ -309,21 +313,23 @@ public SerializedAirbyteMessageConsumer getSerializedMessageConsumer(final JsonN */ private TyperDeduper getV2TyperDeduper(final JsonNode config, final ConfiguredAirbyteCatalog catalog, final JdbcDatabase database) { final JdbcSqlGenerator sqlGenerator = getSqlGenerator(); - final ParsedCatalog parsedCatalog = TypingAndDedupingFlag.getRawNamespaceOverride(RAW_SCHEMA_OVERRIDE) + Optional rawNamespaceOverride = TypingAndDedupingFlag.getRawNamespaceOverride(RAW_SCHEMA_OVERRIDE); + final ParsedCatalog parsedCatalog = rawNamespaceOverride .map(override -> new CatalogParser(sqlGenerator, override)) .orElse(new CatalogParser(sqlGenerator)) .parseCatalog(catalog); final String databaseName = getDatabaseName(config); final var migrator = new JdbcV1V2Migrator(namingResolver, database, databaseName); final NoopV2TableMigrator v2TableMigrator = new NoopV2TableMigrator(); - final DestinationHandler destinationHandler = getDestinationHandler(databaseName, database); + final DestinationHandler destinationHandler = + getDestinationHandler(databaseName, database, rawNamespaceOverride.orElse(DEFAULT_AIRBYTE_INTERNAL_NAMESPACE)); final boolean disableTypeDedupe = config.has(DISABLE_TYPE_DEDUPE) && config.get(DISABLE_TYPE_DEDUPE).asBoolean(false); final TyperDeduper typerDeduper; if (disableTypeDedupe) { - typerDeduper = new NoOpTyperDeduperWithV1V2Migrations(sqlGenerator, destinationHandler, parsedCatalog, migrator, v2TableMigrator); + typerDeduper = new NoOpTyperDeduperWithV1V2Migrations<>(sqlGenerator, destinationHandler, parsedCatalog, migrator, v2TableMigrator, List.of()); } else { typerDeduper = - new DefaultTyperDeduper(sqlGenerator, destinationHandler, parsedCatalog, migrator, v2TableMigrator); + new DefaultTyperDeduper<>(sqlGenerator, destinationHandler, parsedCatalog, migrator, v2TableMigrator, List.of()); } return typerDeduper; } diff --git a/airbyte-cdk/java/airbyte-cdk/db-destinations/src/main/java/io/airbyte/cdk/integrations/destination/jdbc/typing_deduping/JdbcDestinationHandler.java b/airbyte-cdk/java/airbyte-cdk/db-destinations/src/main/java/io/airbyte/cdk/integrations/destination/jdbc/typing_deduping/JdbcDestinationHandler.java index 1aa0b687f8c5..f68595dd6ecf 100644 --- a/airbyte-cdk/java/airbyte-cdk/db-destinations/src/main/java/io/airbyte/cdk/integrations/destination/jdbc/typing_deduping/JdbcDestinationHandler.java +++ b/airbyte-cdk/java/airbyte-cdk/db-destinations/src/main/java/io/airbyte/cdk/integrations/destination/jdbc/typing_deduping/JdbcDestinationHandler.java @@ -8,12 +8,15 @@ import static io.airbyte.cdk.integrations.base.JavaBaseConstants.COLUMN_NAME_AB_META; import static io.airbyte.cdk.integrations.base.JavaBaseConstants.COLUMN_NAME_AB_RAW_ID; import static io.airbyte.cdk.integrations.base.JavaBaseConstants.V2_FINAL_TABLE_METADATA_COLUMNS; +import static java.util.stream.Collectors.toMap; import static org.jooq.impl.DSL.exists; import static org.jooq.impl.DSL.field; import static org.jooq.impl.DSL.name; -import static org.jooq.impl.DSL.select; +import static org.jooq.impl.DSL.quotedName; import static org.jooq.impl.DSL.selectOne; +import static org.jooq.impl.DSL.table; +import com.fasterxml.jackson.databind.JsonNode; import io.airbyte.cdk.db.jdbc.JdbcDatabase; import io.airbyte.cdk.integrations.destination.jdbc.ColumnDefinition; import io.airbyte.cdk.integrations.destination.jdbc.TableDefinition; @@ -21,23 +24,26 @@ import io.airbyte.commons.concurrency.CompletableFutures; import io.airbyte.commons.exceptions.SQLRuntimeException; import io.airbyte.commons.functional.Either; +import io.airbyte.commons.json.Jsons; import io.airbyte.integrations.base.destination.typing_deduping.AirbyteProtocolType; import io.airbyte.integrations.base.destination.typing_deduping.AirbyteType; import io.airbyte.integrations.base.destination.typing_deduping.DestinationHandler; -import io.airbyte.integrations.base.destination.typing_deduping.DestinationInitialState; -import io.airbyte.integrations.base.destination.typing_deduping.DestinationInitialStateImpl; -import io.airbyte.integrations.base.destination.typing_deduping.InitialRawTableState; +import io.airbyte.integrations.base.destination.typing_deduping.DestinationInitialStatus; +import io.airbyte.integrations.base.destination.typing_deduping.InitialRawTableStatus; import io.airbyte.integrations.base.destination.typing_deduping.Sql; import io.airbyte.integrations.base.destination.typing_deduping.StreamConfig; import io.airbyte.integrations.base.destination.typing_deduping.StreamId; import io.airbyte.integrations.base.destination.typing_deduping.Struct; +import io.airbyte.protocol.models.v0.AirbyteStreamNameNamespacePair; import java.sql.ResultSet; import java.sql.SQLException; import java.sql.Timestamp; import java.time.Instant; +import java.time.OffsetDateTime; import java.time.temporal.ChronoUnit; import java.util.LinkedHashMap; import java.util.List; +import java.util.Map; import java.util.Objects; import java.util.Optional; import java.util.UUID; @@ -45,23 +51,45 @@ import java.util.concurrent.CompletionStage; import java.util.stream.Stream; import lombok.extern.slf4j.Slf4j; +import org.jetbrains.annotations.NotNull; +import org.jooq.Condition; +import org.jooq.DSLContext; +import org.jooq.InsertValuesStep4; +import org.jooq.Record; +import org.jooq.SQLDialect; import org.jooq.conf.ParamType; import org.jooq.impl.DSL; +import org.jooq.impl.SQLDataType; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @Slf4j -public abstract class JdbcDestinationHandler implements DestinationHandler { +public abstract class JdbcDestinationHandler implements DestinationHandler { private static final Logger LOGGER = LoggerFactory.getLogger(JdbcDestinationHandler.class); + private static final String DESTINATION_STATE_TABLE_NAME = "_airbyte_destination_state"; + private static final String DESTINATION_STATE_TABLE_COLUMN_NAME = "name"; + private static final String DESTINATION_STATE_TABLE_COLUMN_NAMESPACE = "namespace"; + private static final String DESTINATION_STATE_TABLE_COLUMN_STATE = "destination_state"; + private static final String DESTINATION_STATE_TABLE_COLUMN_UPDATED_AT = "updated_at"; protected final String databaseName; protected final JdbcDatabase jdbcDatabase; + protected final String rawTableSchemaName; + private final SQLDialect dialect; public JdbcDestinationHandler(final String databaseName, - final JdbcDatabase jdbcDatabase) { + final JdbcDatabase jdbcDatabase, + final String rawTableSchemaName, + final SQLDialect dialect) { this.databaseName = databaseName; this.jdbcDatabase = jdbcDatabase; + this.rawTableSchemaName = rawTableSchemaName; + this.dialect = dialect; + } + + protected DSLContext getDslContext() { + return DSL.using(dialect); } private Optional findExistingTable(final StreamId id) throws Exception { @@ -70,15 +98,15 @@ private Optional findExistingTable(final StreamId id) throws Ex private boolean isFinalTableEmpty(final StreamId id) throws Exception { return !jdbcDatabase.queryBoolean( - select( + getDslContext().select( field(exists( selectOne() .from(name(id.finalNamespace(), id.finalName())) .limit(1)))) - .getSQL(ParamType.INLINED)); + .getSQL(ParamType.INLINED)); } - private InitialRawTableState getInitialRawTableState(final StreamId id) throws Exception { + private InitialRawTableStatus getInitialRawTableState(final StreamId id) throws Exception { boolean tableExists = jdbcDatabase.executeMetadataQuery(dbmetadata -> { LOGGER.info("Retrieving table from Db metadata: {} {} {}", databaseName, id.rawNamespace(), id.rawName()); try (final ResultSet table = dbmetadata.getTables(databaseName, id.rawNamespace(), id.rawName(), null)) { @@ -91,7 +119,7 @@ private InitialRawTableState getInitialRawTableState(final StreamId id) throws E if (!tableExists) { // There's no raw table at all. Therefore there are no unprocessed raw records, and this sync // should not filter raw records by timestamp. - return new InitialRawTableState(false, Optional.empty()); + return new InitialRawTableStatus(false, false, Optional.empty()); } // And use two explicit queries because COALESCE might not short-circuit evaluation. // This first query tries to find the oldest raw record with loaded_at = NULL. @@ -99,7 +127,7 @@ private InitialRawTableState getInitialRawTableState(final StreamId id) throws E // but it's also the only method in the JdbcDatabase interface to return non-string/int types try (final Stream timestampStream = jdbcDatabase.unsafeQuery( conn -> conn.prepareStatement( - select(field("MIN(_airbyte_extracted_at)").as("min_timestamp")) + getDslContext().select(field("MIN(_airbyte_extracted_at)").as("min_timestamp")) .from(name(id.rawNamespace(), id.rawName())) .where(DSL.condition("_airbyte_loaded_at IS NULL")) .getSQL()), @@ -111,20 +139,20 @@ record -> record.getTimestamp("min_timestamp"))) { final Optional ts = minUnloadedTimestamp .map(Timestamp::toInstant) .map(i -> i.minus(1, ChronoUnit.SECONDS)); - return new InitialRawTableState(true, ts); + return new InitialRawTableStatus(true, true, ts); } } // If there are no unloaded raw records, then we can safely skip all existing raw records. // This second query just finds the newest raw record. try (final Stream timestampStream = jdbcDatabase.unsafeQuery( conn -> conn.prepareStatement( - select(field("MAX(_airbyte_extracted_at)").as("min_timestamp")) + getDslContext().select(field("MAX(_airbyte_extracted_at)").as("min_timestamp")) .from(name(id.rawNamespace(), id.rawName())) .getSQL()), record -> record.getTimestamp("min_timestamp"))) { // Filter for nonNull values in case the query returned NULL (i.e. no raw records at all). final Optional minUnloadedTimestamp = timestampStream.filter(Objects::nonNull).findFirst(); - return new InitialRawTableState(false, minUnloadedTimestamp.map(Timestamp::toInstant)); + return new InitialRawTableStatus(true, false, minUnloadedTimestamp.map(Timestamp::toInstant)); } } @@ -149,16 +177,60 @@ public void execute(final Sql sql) throws Exception { } @Override - public List gatherInitialState(List streamConfigs) throws Exception { - final List> initialStates = streamConfigs.stream() - .map(this::retrieveState) + public List> gatherInitialState(List streamConfigs) throws Exception { + // Use stream n/ns pair because we don't want to build the full StreamId here + CompletableFuture> destinationStatesFuture = CompletableFuture.supplyAsync(() -> { + try { + return getAllDestinationStates(); + } catch (SQLException e) { + throw new RuntimeException(e); + } + }); + + final List>> initialStates = streamConfigs.stream() + .map(streamConfig -> retrieveState(destinationStatesFuture, streamConfig)) .toList(); - final List> states = CompletableFutures.allOf(initialStates).toCompletableFuture().join(); + final List>> states = + CompletableFutures.allOf(initialStates).toCompletableFuture().join(); return ConnectorExceptionUtil.getResultsOrLogAndThrowFirst("Failed to retrieve initial state", states); } - private CompletionStage retrieveState(final StreamConfig streamConfig) { - return CompletableFuture.supplyAsync(() -> { + @NotNull + protected Map getAllDestinationStates() throws SQLException { + // Guarantee the table exists. + jdbcDatabase.execute( + getDslContext().createTableIfNotExists(quotedName(rawTableSchemaName, DESTINATION_STATE_TABLE_NAME)) + .column(quotedName(DESTINATION_STATE_TABLE_COLUMN_NAME), SQLDataType.VARCHAR) + .column(quotedName(DESTINATION_STATE_TABLE_COLUMN_NAMESPACE), SQLDataType.VARCHAR) + // Just use a string type, even if the destination has a json type. + // We're never going to query this column in a fancy way - all our processing can happen + // client-side. + .column(quotedName(DESTINATION_STATE_TABLE_COLUMN_STATE), SQLDataType.VARCHAR) + // Add an updated_at field. We don't actually need it yet, but it can't hurt! + .column(quotedName(DESTINATION_STATE_TABLE_COLUMN_UPDATED_AT), SQLDataType.TIMESTAMPWITHTIMEZONE) + .getSQL(ParamType.INLINED)); + // Fetch all records from it. We _could_ filter down to just our streams... but meh. This is small + // data. + return jdbcDatabase.queryJsons( + getDslContext().select( + field(quotedName(DESTINATION_STATE_TABLE_COLUMN_NAME)), + field(quotedName(DESTINATION_STATE_TABLE_COLUMN_NAMESPACE)), + field(quotedName(DESTINATION_STATE_TABLE_COLUMN_STATE))).from(quotedName(rawTableSchemaName, DESTINATION_STATE_TABLE_NAME)) + .getSQL()) + .stream().collect(toMap( + record -> { + final JsonNode nameNode = record.get(DESTINATION_STATE_TABLE_COLUMN_NAME); + final JsonNode namespaceNode = record.get(DESTINATION_STATE_TABLE_COLUMN_NAMESPACE); + return new AirbyteStreamNameNamespacePair( + nameNode != null ? nameNode.asText() : null, + namespaceNode != null ? namespaceNode.asText() : null); + }, + record -> toDestinationState(Jsons.deserialize(record.get(DESTINATION_STATE_TABLE_COLUMN_STATE).asText())))); + } + + private CompletionStage> retrieveState(final CompletableFuture> destinationStatesFuture, + final StreamConfig streamConfig) { + return destinationStatesFuture.thenApply(destinationStates -> { try { final Optional finalTableDefinition = findExistingTable(streamConfig.id()); final boolean isSchemaMismatch; @@ -172,9 +244,10 @@ private CompletionStage retrieveState(final StreamConfi isSchemaMismatch = false; isFinalTableEmpty = true; } - final InitialRawTableState initialRawTableState = getInitialRawTableState(streamConfig.id()); - return new DestinationInitialStateImpl(streamConfig, finalTableDefinition.isPresent(), initialRawTableState, - isSchemaMismatch, isFinalTableEmpty); + final InitialRawTableStatus initialRawTableState = getInitialRawTableState(streamConfig.id()); + DestinationState destinationState = destinationStates.getOrDefault(streamConfig.id().asPair(), toDestinationState(Jsons.emptyObject())); + return new DestinationInitialStatus<>(streamConfig, finalTableDefinition.isPresent(), initialRawTableState, + isSchemaMismatch, isFinalTableEmpty, destinationState); } catch (Exception e) { throw new RuntimeException(e); } @@ -257,6 +330,44 @@ protected boolean existingSchemaMatchesStreamConfig(final StreamConfig stream, f return actualColumns.equals(intendedColumns); } + @Override + public void commitDestinationStates(final Map destinationStates) throws Exception { + if (destinationStates.isEmpty()) { + return; + } + + // Delete all state records where the stream name+namespace match one of our states + String deleteStates = getDslContext().deleteFrom(table(quotedName(rawTableSchemaName, DESTINATION_STATE_TABLE_NAME))) + .where(destinationStates.keySet().stream() + .map(streamId -> field(quotedName(DESTINATION_STATE_TABLE_COLUMN_NAME)).eq(streamId.originalName()) + .and(field(quotedName(DESTINATION_STATE_TABLE_COLUMN_NAMESPACE)).eq(streamId.originalNamespace()))) + .reduce( + DSL.falseCondition(), + Condition::or)) + .getSQL(ParamType.INLINED); + + // Reinsert all of our states + @NotNull + InsertValuesStep4 insertStatesStep = + getDslContext().insertInto(table(quotedName(rawTableSchemaName, DESTINATION_STATE_TABLE_NAME))) + .columns( + field(quotedName(DESTINATION_STATE_TABLE_COLUMN_NAME), String.class), + field(quotedName(DESTINATION_STATE_TABLE_COLUMN_NAMESPACE), String.class), + field(quotedName(DESTINATION_STATE_TABLE_COLUMN_STATE), String.class), + // This field is a timestamptz, but it's easier to just insert a string + // and assume the destination can cast it appropriately. + // Destination-specific timestamp syntax is weird and annoying. + field(quotedName(DESTINATION_STATE_TABLE_COLUMN_UPDATED_AT), String.class)); + for (Map.Entry destinationState : destinationStates.entrySet()) { + final StreamId streamId = destinationState.getKey(); + final String stateJson = Jsons.serialize(destinationState.getValue()); + insertStatesStep = insertStatesStep.values(streamId.originalName(), streamId.originalNamespace(), stateJson, OffsetDateTime.now().toString()); + } + String insertStates = insertStatesStep.getSQL(ParamType.INLINED); + + jdbcDatabase.executeWithinTransaction(List.of(deleteStates, insertStates)); + } + /** * Convert to the TYPE_NAME retrieved from {@link java.sql.DatabaseMetaData#getColumns} * @@ -265,4 +376,6 @@ protected boolean existingSchemaMatchesStreamConfig(final StreamConfig stream, f */ protected abstract String toJdbcTypeName(final AirbyteType airbyteType); + protected abstract DestinationState toDestinationState(final JsonNode json); + } diff --git a/airbyte-cdk/java/airbyte-cdk/db-destinations/src/test/java/io/airbyte/cdk/integrations/destination/jdbc/AbstractJdbcDestinationTest.java b/airbyte-cdk/java/airbyte-cdk/db-destinations/src/test/java/io/airbyte/cdk/integrations/destination/jdbc/AbstractJdbcDestinationTest.java index a5a07903ad48..f92102c79119 100644 --- a/airbyte-cdk/java/airbyte-cdk/db-destinations/src/test/java/io/airbyte/cdk/integrations/destination/jdbc/AbstractJdbcDestinationTest.java +++ b/airbyte-cdk/java/airbyte-cdk/db-destinations/src/test/java/io/airbyte/cdk/integrations/destination/jdbc/AbstractJdbcDestinationTest.java @@ -142,7 +142,7 @@ protected JdbcSqlGenerator getSqlGenerator() { } @Override - protected JdbcDestinationHandler getDestinationHandler(String databaseName, JdbcDatabase database) { + protected JdbcDestinationHandler getDestinationHandler(String databaseName, JdbcDatabase database, String rawTableSchema) { return null; } diff --git a/airbyte-cdk/java/airbyte-cdk/db-destinations/src/testFixtures/java/io/airbyte/cdk/integrations/standardtest/destination/typing_deduping/JdbcSqlGeneratorIntegrationTest.java b/airbyte-cdk/java/airbyte-cdk/db-destinations/src/testFixtures/java/io/airbyte/cdk/integrations/standardtest/destination/typing_deduping/JdbcSqlGeneratorIntegrationTest.java index df4ca42e004b..2688b6d6e42b 100644 --- a/airbyte-cdk/java/airbyte-cdk/db-destinations/src/testFixtures/java/io/airbyte/cdk/integrations/standardtest/destination/typing_deduping/JdbcSqlGeneratorIntegrationTest.java +++ b/airbyte-cdk/java/airbyte-cdk/db-destinations/src/testFixtures/java/io/airbyte/cdk/integrations/standardtest/destination/typing_deduping/JdbcSqlGeneratorIntegrationTest.java @@ -22,6 +22,7 @@ import io.airbyte.integrations.base.destination.typing_deduping.AirbyteProtocolType; import io.airbyte.integrations.base.destination.typing_deduping.BaseSqlGeneratorIntegrationTest; import io.airbyte.integrations.base.destination.typing_deduping.StreamId; +import io.airbyte.integrations.base.destination.typing_deduping.migrators.MinimumDestinationState; import java.sql.SQLException; import java.util.Arrays; import java.util.List; @@ -36,7 +37,8 @@ import org.jooq.impl.DSL; import org.jooq.impl.SQLDataType; -public abstract class JdbcSqlGeneratorIntegrationTest extends BaseSqlGeneratorIntegrationTest { +public abstract class JdbcSqlGeneratorIntegrationTest + extends BaseSqlGeneratorIntegrationTest { protected abstract JdbcDatabase getDatabase(); diff --git a/airbyte-cdk/java/airbyte-cdk/typing-deduping/src/main/java/io/airbyte/integrations/base/destination/typing_deduping/BaseDestinationV1V2Migrator.java b/airbyte-cdk/java/airbyte-cdk/typing-deduping/src/main/java/io/airbyte/integrations/base/destination/typing_deduping/BaseDestinationV1V2Migrator.java index 95c5841241b7..1f33b9074952 100644 --- a/airbyte-cdk/java/airbyte-cdk/typing-deduping/src/main/java/io/airbyte/integrations/base/destination/typing_deduping/BaseDestinationV1V2Migrator.java +++ b/airbyte-cdk/java/airbyte-cdk/typing-deduping/src/main/java/io/airbyte/integrations/base/destination/typing_deduping/BaseDestinationV1V2Migrator.java @@ -20,7 +20,7 @@ public abstract class BaseDestinationV1V2Migrator implem @Override public void migrateIfNecessary( final SqlGenerator sqlGenerator, - final DestinationHandler destinationHandler, + final DestinationHandler destinationHandler, final StreamConfig streamConfig) throws Exception { LOGGER.info("Assessing whether migration is necessary for stream {}", streamConfig.id().finalName()); @@ -60,7 +60,7 @@ protected boolean shouldMigrate(final StreamConfig streamConfig) throws Exceptio * @param streamConfig the stream to migrate the raw table of */ public void migrate(final SqlGenerator sqlGenerator, - final DestinationHandler destinationHandler, + final DestinationHandler destinationHandler, final StreamConfig streamConfig) throws TableNotMigratedException { final var namespacedTableName = convertToV1RawName(streamConfig); diff --git a/airbyte-cdk/java/airbyte-cdk/typing-deduping/src/main/java/io/airbyte/integrations/base/destination/typing_deduping/DefaultTyperDeduper.java b/airbyte-cdk/java/airbyte-cdk/typing-deduping/src/main/java/io/airbyte/integrations/base/destination/typing_deduping/DefaultTyperDeduper.java index 2dbd9f1e8498..9becbb9aa2b9 100644 --- a/airbyte-cdk/java/airbyte-cdk/typing-deduping/src/main/java/io/airbyte/integrations/base/destination/typing_deduping/DefaultTyperDeduper.java +++ b/airbyte-cdk/java/airbyte-cdk/typing-deduping/src/main/java/io/airbyte/integrations/base/destination/typing_deduping/DefaultTyperDeduper.java @@ -6,13 +6,16 @@ import static io.airbyte.cdk.integrations.base.IntegrationRunner.TYPE_AND_DEDUPE_THREAD_NAME; import static io.airbyte.cdk.integrations.util.ConnectorExceptionUtil.getResultsOrLogAndThrowFirst; -import static io.airbyte.integrations.base.destination.typing_deduping.FutureUtils.*; +import static io.airbyte.integrations.base.destination.typing_deduping.FutureUtils.getCountOfTypeAndDedupeThreads; import static io.airbyte.integrations.base.destination.typing_deduping.FutureUtils.reduceExceptions; import static java.util.Collections.singleton; +import static java.util.stream.Collectors.toMap; import io.airbyte.cdk.integrations.destination.StreamSyncSummary; import io.airbyte.commons.concurrency.CompletableFutures; import io.airbyte.commons.functional.Either; +import io.airbyte.integrations.base.destination.typing_deduping.migrators.Migration; +import io.airbyte.integrations.base.destination.typing_deduping.migrators.MinimumDestinationState; import io.airbyte.protocol.models.v0.DestinationSyncMode; import io.airbyte.protocol.models.v0.StreamDescriptor; import java.util.HashSet; @@ -49,7 +52,7 @@ * Note that #prepareTables() initializes some internal state. The other methods will throw an * exception if that method was not called. */ -public class DefaultTyperDeduper implements TyperDeduper { +public class DefaultTyperDeduper implements TyperDeduper { private static final Logger LOGGER = LoggerFactory.getLogger(TyperDeduper.class); @@ -57,14 +60,15 @@ public class DefaultTyperDeduper implements TyperDeduper { private static final String TMP_OVERWRITE_TABLE_SUFFIX = "_airbyte_tmp"; private final SqlGenerator sqlGenerator; - private final DestinationHandler destinationHandler; + private final DestinationHandler destinationHandler; private final DestinationV1V2Migrator v1V2Migrator; private final V2TableMigrator v2TableMigrator; + private final List> migrations; private final ParsedCatalog parsedCatalog; private Set overwriteStreamsWithTmpTable; private final Set> streamsWithSuccessfulSetup; - private final Map initialRawTableStateByStream; + private final Map initialRawTableStateByStream; // We only want to run a single instance of T+D per stream at a time. These objects are used for // synchronization per stream. // Use a read-write lock because we need the same semantics: @@ -77,17 +81,20 @@ public class DefaultTyperDeduper implements TyperDeduper { private final Map internalTdLocks; private final ExecutorService executorService; + private List> destinationInitialStatuses; public DefaultTyperDeduper(final SqlGenerator sqlGenerator, - final DestinationHandler destinationHandler, + final DestinationHandler destinationHandler, final ParsedCatalog parsedCatalog, final DestinationV1V2Migrator v1V2Migrator, - final V2TableMigrator v2TableMigrator) { + final V2TableMigrator v2TableMigrator, + final List> migrations) { this.sqlGenerator = sqlGenerator; this.destinationHandler = destinationHandler; this.parsedCatalog = parsedCatalog; this.v1V2Migrator = v1V2Migrator; this.v2TableMigrator = v2TableMigrator; + this.migrations = migrations; this.initialRawTableStateByStream = new ConcurrentHashMap<>(); this.streamsWithSuccessfulSetup = ConcurrentHashMap.newKeySet(parsedCatalog.streams().size()); this.tdLocks = new ConcurrentHashMap<>(); @@ -96,20 +103,45 @@ public DefaultTyperDeduper(final SqlGenerator sqlGenerator, new BasicThreadFactory.Builder().namingPattern(TYPE_AND_DEDUPE_THREAD_NAME).build()); } - public DefaultTyperDeduper(final SqlGenerator sqlGenerator, - final DestinationHandler destinationHandler, + public DefaultTyperDeduper( + final SqlGenerator sqlGenerator, + final DestinationHandler destinationHandler, final ParsedCatalog parsedCatalog, - final DestinationV1V2Migrator v1V2Migrator) { - this(sqlGenerator, destinationHandler, parsedCatalog, v1V2Migrator, new NoopV2TableMigrator()); + final DestinationV1V2Migrator v1V2Migrator, + final List> migrations) { + this(sqlGenerator, destinationHandler, parsedCatalog, v1V2Migrator, new NoopV2TableMigrator(), migrations); } @Override - public void prepareSchemasAndRunMigrations() { + public void prepareSchemasAndRunMigrations() throws Exception { // Technically kind of weird to call this here, but it's the best place we have. // Ideally, we'd create just airbyte_internal here, and defer creating the final table schemas // until prepareFinalTables... but it doesn't really matter. TyperDeduperUtil.prepareSchemas(sqlGenerator, destinationHandler, parsedCatalog); - TyperDeduperUtil.executeRawTableMigrations(executorService, sqlGenerator, destinationHandler, v1V2Migrator, v2TableMigrator, parsedCatalog); + + TyperDeduperUtil.executeWeirdMigrations( + executorService, + sqlGenerator, + destinationHandler, + v1V2Migrator, + v2TableMigrator, + parsedCatalog); + + destinationInitialStatuses = TyperDeduperUtil.executeRawTableMigrations( + executorService, + destinationHandler, + migrations, + destinationHandler.gatherInitialState(parsedCatalog.streams())); + + // Commit our destination states immediately. + // Technically, migrations aren't done until we execute the soft reset. + // However, our state contains a needsSoftReset flag, so we can commit that we already executed the + // migration + // and even if we fail to run the soft reset in this sync, future syncs will see the soft reset flag + // and finish it for us. + destinationHandler.commitDestinationStates(destinationInitialStatuses.stream().collect(toMap( + state -> state.streamConfig().id(), + DestinationInitialStatus::destinationState))); } @Override @@ -120,25 +152,17 @@ public void prepareFinalTables() throws Exception { overwriteStreamsWithTmpTable = ConcurrentHashMap.newKeySet(); LOGGER.info("Preparing tables"); - final List initialStates = destinationHandler.gatherInitialState(parsedCatalog.streams()); final List> prepareTablesFutureResult = CompletableFutures.allOf( - initialStates.stream().map(this::prepareTablesFuture).toList()).toCompletableFuture().join(); + destinationInitialStatuses.stream().map(this::prepareTablesFuture).toList()).toCompletableFuture().join(); getResultsOrLogAndThrowFirst("The following exceptions were thrown attempting to prepare tables:\n", prepareTablesFutureResult); - } - private CompletionStage runMigrationsAsync(StreamConfig streamConfig) { - return CompletableFuture.runAsync(() -> { - try { - // Migrate the Raw Tables if this is the first v2 sync after a v1 sync - v1V2Migrator.migrateIfNecessary(sqlGenerator, destinationHandler, streamConfig); - v2TableMigrator.migrateIfNecessary(streamConfig); - } catch (Exception e) { - throw new RuntimeException(e); - } - }, this.executorService); + destinationHandler.commitDestinationStates(destinationInitialStatuses.stream().collect(toMap( + state -> state.streamConfig().id(), + // If we get here, then we've executed all soft resets. Force the soft reset flag to false. + state -> state.destinationState().withSoftReset(false)))); } - private CompletionStage prepareTablesFuture(final DestinationInitialState initialState) { + private CompletionStage prepareTablesFuture(final DestinationInitialStatus initialState) { // For each stream, make sure that its corresponding final table exists. // Also, for OVERWRITE streams, decide if we're writing directly to the final table, or into an // _airbyte_tmp table. @@ -160,9 +184,10 @@ private CompletionStage prepareTablesFuture(final DestinationInitialState LOGGER.info("Final Table for stream {} is empty and matches the expected v2 format, writing to table directly", stream.id().finalName()); } - - } else if (initialState.isSchemaMismatch()) { - // We're loading data directly into the existing table. Make sure it has the right schema. + } else if (initialState.isSchemaMismatch() || initialState.destinationState().needsSoftReset()) { + // We're loading data directly into the existing table. + // Make sure it has the right schema. + // Also, if a raw table migration wants us to do a soft reset, do that here. TypeAndDedupeTransaction.executeSoftReset(sqlGenerator, destinationHandler, stream); } } else { @@ -171,7 +196,7 @@ private CompletionStage prepareTablesFuture(final DestinationInitialState destinationHandler.execute(sqlGenerator.createTable(stream, NO_SUFFIX, false)); } - initialRawTableStateByStream.put(stream.id(), initialState.initialRawTableState()); + initialRawTableStateByStream.put(stream.id(), initialState.initialRawTableStatus()); streamsWithSuccessfulSetup.add(Pair.of(stream.id().originalNamespace(), stream.id().originalName())); @@ -247,12 +272,12 @@ public CompletableFuture> typeAndDedupeTask(final StreamConf final Lock externalLock = tdLocks.get(streamConfig.id()).writeLock(); externalLock.lock(); try { - final InitialRawTableState initialRawTableState = initialRawTableStateByStream.get(streamConfig.id()); + final InitialRawTableStatus initialRawTableStatus = initialRawTableStateByStream.get(streamConfig.id()); TypeAndDedupeTransaction.executeTypeAndDedupe( sqlGenerator, destinationHandler, streamConfig, - initialRawTableState.maxProcessedTimestamp(), + initialRawTableStatus.maxProcessedTimestamp(), getFinalTableSuffix(streamConfig.id())); } finally { LOGGER.info("Allowing other threads to proceed for {}.{}", originalNamespace, originalName); diff --git a/airbyte-cdk/java/airbyte-cdk/typing-deduping/src/main/java/io/airbyte/integrations/base/destination/typing_deduping/DestinationHandler.java b/airbyte-cdk/java/airbyte-cdk/typing-deduping/src/main/java/io/airbyte/integrations/base/destination/typing_deduping/DestinationHandler.java index f75f0fc9a040..9deec7bca0b1 100644 --- a/airbyte-cdk/java/airbyte-cdk/typing-deduping/src/main/java/io/airbyte/integrations/base/destination/typing_deduping/DestinationHandler.java +++ b/airbyte-cdk/java/airbyte-cdk/typing-deduping/src/main/java/io/airbyte/integrations/base/destination/typing_deduping/DestinationHandler.java @@ -5,11 +5,20 @@ package io.airbyte.integrations.base.destination.typing_deduping; import java.util.List; +import java.util.Map; -public interface DestinationHandler { +public interface DestinationHandler { void execute(final Sql sql) throws Exception; - List gatherInitialState(List streamConfigs) throws Exception; + /** + * Fetch the current state of the destination for the given streams. This method MUST create the + * airbyte_internal.state table if it does not exist. This method MAY assume the airbyte_internal + * schema already exists. (substitute the appropriate raw table schema if the user is overriding + * it). + */ + List> gatherInitialState(List streamConfigs) throws Exception; + + void commitDestinationStates(final Map destinationStates) throws Exception; } diff --git a/airbyte-cdk/java/airbyte-cdk/typing-deduping/src/main/java/io/airbyte/integrations/base/destination/typing_deduping/DestinationInitialState.java b/airbyte-cdk/java/airbyte-cdk/typing-deduping/src/main/java/io/airbyte/integrations/base/destination/typing_deduping/DestinationInitialState.java deleted file mode 100644 index 31aa25770790..000000000000 --- a/airbyte-cdk/java/airbyte-cdk/typing-deduping/src/main/java/io/airbyte/integrations/base/destination/typing_deduping/DestinationInitialState.java +++ /dev/null @@ -1,23 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.integrations.base.destination.typing_deduping; - -/** - * Interface representing the initial state of a destination table. - * - */ -public interface DestinationInitialState { - - StreamConfig streamConfig(); - - boolean isFinalTablePresent(); - - InitialRawTableState initialRawTableState(); - - boolean isSchemaMismatch(); - - boolean isFinalTableEmpty(); - -} diff --git a/airbyte-cdk/java/airbyte-cdk/typing-deduping/src/main/java/io/airbyte/integrations/base/destination/typing_deduping/DestinationInitialStateImpl.java b/airbyte-cdk/java/airbyte-cdk/typing-deduping/src/main/java/io/airbyte/integrations/base/destination/typing_deduping/DestinationInitialStateImpl.java deleted file mode 100644 index e1fa315c703e..000000000000 --- a/airbyte-cdk/java/airbyte-cdk/typing-deduping/src/main/java/io/airbyte/integrations/base/destination/typing_deduping/DestinationInitialStateImpl.java +++ /dev/null @@ -1,14 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.integrations.base.destination.typing_deduping; - -public record DestinationInitialStateImpl(StreamConfig streamConfig, - boolean isFinalTablePresent, - InitialRawTableState initialRawTableState, - boolean isSchemaMismatch, - boolean isFinalTableEmpty) - implements DestinationInitialState { - -} diff --git a/airbyte-cdk/java/airbyte-cdk/typing-deduping/src/main/java/io/airbyte/integrations/base/destination/typing_deduping/DestinationInitialStatus.kt b/airbyte-cdk/java/airbyte-cdk/typing-deduping/src/main/java/io/airbyte/integrations/base/destination/typing_deduping/DestinationInitialStatus.kt new file mode 100644 index 000000000000..713a79c9a771 --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/typing-deduping/src/main/java/io/airbyte/integrations/base/destination/typing_deduping/DestinationInitialStatus.kt @@ -0,0 +1,12 @@ +/* + * Copyright (c) 2023 Airbyte, Inc., all rights reserved. + */ +package io.airbyte.integrations.base.destination.typing_deduping + +@JvmRecord +data class DestinationInitialStatus(val streamConfig: StreamConfig, + val isFinalTablePresent: Boolean, + val initialRawTableStatus: InitialRawTableStatus, + val isSchemaMismatch: Boolean, + val isFinalTableEmpty: Boolean, + val destinationState: DestinationState) diff --git a/airbyte-cdk/java/airbyte-cdk/typing-deduping/src/main/java/io/airbyte/integrations/base/destination/typing_deduping/DestinationV1V2Migrator.java b/airbyte-cdk/java/airbyte-cdk/typing-deduping/src/main/java/io/airbyte/integrations/base/destination/typing_deduping/DestinationV1V2Migrator.java index 5e1e26e804f1..1a31d04b9a70 100644 --- a/airbyte-cdk/java/airbyte-cdk/typing-deduping/src/main/java/io/airbyte/integrations/base/destination/typing_deduping/DestinationV1V2Migrator.java +++ b/airbyte-cdk/java/airbyte-cdk/typing-deduping/src/main/java/io/airbyte/integrations/base/destination/typing_deduping/DestinationV1V2Migrator.java @@ -18,7 +18,7 @@ public interface DestinationV1V2Migrator { */ void migrateIfNecessary( final SqlGenerator sqlGenerator, - final DestinationHandler destinationHandler, + final DestinationHandler destinationHandler, final StreamConfig streamConfig) throws TableNotMigratedException, UnexpectedSchemaException, Exception; diff --git a/airbyte-cdk/java/airbyte-cdk/typing-deduping/src/main/java/io/airbyte/integrations/base/destination/typing_deduping/InitialRawTableState.java b/airbyte-cdk/java/airbyte-cdk/typing-deduping/src/main/java/io/airbyte/integrations/base/destination/typing_deduping/InitialRawTableStatus.java similarity index 58% rename from airbyte-cdk/java/airbyte-cdk/typing-deduping/src/main/java/io/airbyte/integrations/base/destination/typing_deduping/InitialRawTableState.java rename to airbyte-cdk/java/airbyte-cdk/typing-deduping/src/main/java/io/airbyte/integrations/base/destination/typing_deduping/InitialRawTableStatus.java index a037daebfa40..b39abf1cba29 100644 --- a/airbyte-cdk/java/airbyte-cdk/typing-deduping/src/main/java/io/airbyte/integrations/base/destination/typing_deduping/InitialRawTableState.java +++ b/airbyte-cdk/java/airbyte-cdk/typing-deduping/src/main/java/io/airbyte/integrations/base/destination/typing_deduping/InitialRawTableStatus.java @@ -7,6 +7,6 @@ import java.time.Instant; import java.util.Optional; -public record InitialRawTableState(boolean hasUnprocessedRecords, Optional maxProcessedTimestamp) { +public record InitialRawTableStatus(boolean rawTableExists, boolean hasUnprocessedRecords, Optional maxProcessedTimestamp) { } diff --git a/airbyte-cdk/java/airbyte-cdk/typing-deduping/src/main/java/io/airbyte/integrations/base/destination/typing_deduping/NoOpDestinationV1V2Migrator.java b/airbyte-cdk/java/airbyte-cdk/typing-deduping/src/main/java/io/airbyte/integrations/base/destination/typing_deduping/NoOpDestinationV1V2Migrator.java index f7f5b275768f..a32f214cec49 100644 --- a/airbyte-cdk/java/airbyte-cdk/typing-deduping/src/main/java/io/airbyte/integrations/base/destination/typing_deduping/NoOpDestinationV1V2Migrator.java +++ b/airbyte-cdk/java/airbyte-cdk/typing-deduping/src/main/java/io/airbyte/integrations/base/destination/typing_deduping/NoOpDestinationV1V2Migrator.java @@ -8,7 +8,7 @@ public class NoOpDestinationV1V2Migrator implements DestinationV1V2Migrator { @Override public void migrateIfNecessary(final SqlGenerator sqlGenerator, - final DestinationHandler destinationHandler, + final DestinationHandler destinationHandler, final StreamConfig streamConfig) throws TableNotMigratedException, UnexpectedSchemaException { // Do nothing diff --git a/airbyte-cdk/java/airbyte-cdk/typing-deduping/src/main/java/io/airbyte/integrations/base/destination/typing_deduping/NoOpTyperDeduperWithV1V2Migrations.java b/airbyte-cdk/java/airbyte-cdk/typing-deduping/src/main/java/io/airbyte/integrations/base/destination/typing_deduping/NoOpTyperDeduperWithV1V2Migrations.java index 1d06b9a49b61..3b7d35a3fbd8 100644 --- a/airbyte-cdk/java/airbyte-cdk/typing-deduping/src/main/java/io/airbyte/integrations/base/destination/typing_deduping/NoOpTyperDeduperWithV1V2Migrations.java +++ b/airbyte-cdk/java/airbyte-cdk/typing-deduping/src/main/java/io/airbyte/integrations/base/destination/typing_deduping/NoOpTyperDeduperWithV1V2Migrations.java @@ -6,9 +6,13 @@ import static io.airbyte.cdk.integrations.base.IntegrationRunner.TYPE_AND_DEDUPE_THREAD_NAME; import static io.airbyte.integrations.base.destination.typing_deduping.FutureUtils.getCountOfTypeAndDedupeThreads; +import static java.util.stream.Collectors.toMap; import io.airbyte.cdk.integrations.destination.StreamSyncSummary; +import io.airbyte.integrations.base.destination.typing_deduping.migrators.Migration; +import io.airbyte.integrations.base.destination.typing_deduping.migrators.MinimumDestinationState; import io.airbyte.protocol.models.v0.StreamDescriptor; +import java.util.List; import java.util.Map; import java.util.concurrent.ExecutorService; import java.util.concurrent.Executors; @@ -22,33 +26,55 @@ * json->string migrations in the raw tables. */ @Slf4j -public class NoOpTyperDeduperWithV1V2Migrations implements TyperDeduper { +public class NoOpTyperDeduperWithV1V2Migrations implements TyperDeduper { private final DestinationV1V2Migrator v1V2Migrator; private final V2TableMigrator v2TableMigrator; + private final List> migrations; private final ExecutorService executorService; private final ParsedCatalog parsedCatalog; private final SqlGenerator sqlGenerator; - private final DestinationHandler destinationHandler; + private final DestinationHandler destinationHandler; public NoOpTyperDeduperWithV1V2Migrations(final SqlGenerator sqlGenerator, - final DestinationHandler destinationHandler, + final DestinationHandler destinationHandler, final ParsedCatalog parsedCatalog, final DestinationV1V2Migrator v1V2Migrator, - final V2TableMigrator v2TableMigrator) { + final V2TableMigrator v2TableMigrator, + final List> migrations) { this.sqlGenerator = sqlGenerator; this.destinationHandler = destinationHandler; this.parsedCatalog = parsedCatalog; this.v1V2Migrator = v1V2Migrator; this.v2TableMigrator = v2TableMigrator; + this.migrations = migrations; this.executorService = Executors.newFixedThreadPool(getCountOfTypeAndDedupeThreads(), new BasicThreadFactory.Builder().namingPattern(TYPE_AND_DEDUPE_THREAD_NAME).build()); } @Override - public void prepareSchemasAndRunMigrations() { + public void prepareSchemasAndRunMigrations() throws Exception { TyperDeduperUtil.prepareSchemas(sqlGenerator, destinationHandler, parsedCatalog); - TyperDeduperUtil.executeRawTableMigrations(executorService, sqlGenerator, destinationHandler, v1V2Migrator, v2TableMigrator, parsedCatalog); + + TyperDeduperUtil.executeWeirdMigrations( + executorService, + sqlGenerator, + destinationHandler, + v1V2Migrator, + v2TableMigrator, + parsedCatalog); + + List> destinationInitialStatuses = TyperDeduperUtil.executeRawTableMigrations( + executorService, + destinationHandler, + migrations, + destinationHandler.gatherInitialState(parsedCatalog.streams())); + + // Commit the updated destination states. + // We don't need to trigger any soft resets, because we don't have any final tables. + destinationHandler.commitDestinationStates(destinationInitialStatuses.stream().collect(toMap( + state -> state.streamConfig().id(), + DestinationInitialStatus::destinationState))); } @Override diff --git a/airbyte-cdk/java/airbyte-cdk/typing-deduping/src/main/java/io/airbyte/integrations/base/destination/typing_deduping/TypeAndDedupeTransaction.java b/airbyte-cdk/java/airbyte-cdk/typing-deduping/src/main/java/io/airbyte/integrations/base/destination/typing_deduping/TypeAndDedupeTransaction.java index f350c83e76ca..63bb3b6470c5 100644 --- a/airbyte-cdk/java/airbyte-cdk/typing-deduping/src/main/java/io/airbyte/integrations/base/destination/typing_deduping/TypeAndDedupeTransaction.java +++ b/airbyte-cdk/java/airbyte-cdk/typing-deduping/src/main/java/io/airbyte/integrations/base/destination/typing_deduping/TypeAndDedupeTransaction.java @@ -27,7 +27,7 @@ public class TypeAndDedupeTransaction { * @throws Exception if the safe query fails */ public static void executeTypeAndDedupe(final SqlGenerator sqlGenerator, - final DestinationHandler destinationHandler, + final DestinationHandler destinationHandler, final StreamConfig streamConfig, final Optional minExtractedAt, final String suffix) @@ -63,7 +63,7 @@ public static void executeTypeAndDedupe(final SqlGenerator sqlGenerator, * @throws Exception if the safe query fails */ public static void executeSoftReset(final SqlGenerator sqlGenerator, - final DestinationHandler destinationHandler, + final DestinationHandler destinationHandler, final StreamConfig streamConfig) throws Exception { LOGGER.info("Attempting soft reset for stream {} {}", streamConfig.id().originalNamespace(), streamConfig.id().originalName()); diff --git a/airbyte-cdk/java/airbyte-cdk/typing-deduping/src/main/java/io/airbyte/integrations/base/destination/typing_deduping/TyperDeduperUtil.kt b/airbyte-cdk/java/airbyte-cdk/typing-deduping/src/main/java/io/airbyte/integrations/base/destination/typing_deduping/TyperDeduperUtil.kt index 59d829cb79b9..df1d4277d13d 100644 --- a/airbyte-cdk/java/airbyte-cdk/typing-deduping/src/main/java/io/airbyte/integrations/base/destination/typing_deduping/TyperDeduperUtil.kt +++ b/airbyte-cdk/java/airbyte-cdk/typing-deduping/src/main/java/io/airbyte/integrations/base/destination/typing_deduping/TyperDeduperUtil.kt @@ -3,34 +3,112 @@ package io.airbyte.integrations.base.destination.typing_deduping import com.google.common.collect.Streams import io.airbyte.cdk.integrations.util.ConnectorExceptionUtil.getResultsOrLogAndThrowFirst import io.airbyte.commons.concurrency.CompletableFutures +import io.airbyte.integrations.base.destination.typing_deduping.migrators.Migration +import io.airbyte.integrations.base.destination.typing_deduping.migrators.MinimumDestinationState +import org.slf4j.Logger +import org.slf4j.LoggerFactory import java.util.* import java.util.concurrent.CompletableFuture import java.util.concurrent.CompletionStage import java.util.concurrent.ExecutorService - +import java.util.stream.Collectors.toMap class TyperDeduperUtil { companion object { + private val LOGGER: Logger = LoggerFactory.getLogger(TyperDeduperUtil::class.java) @JvmStatic - fun executeRawTableMigrations( + fun executeRawTableMigrations( executorService: ExecutorService, - sqlGenerator: SqlGenerator, - destinationHandler: DestinationHandler, - v1V2Migrator: DestinationV1V2Migrator, - v2TableMigrator: V2TableMigrator, - parsedCatalog: ParsedCatalog - ) { + destinationHandler: DestinationHandler, + migrations: List>, + initialStates: List> + ): List> { // TODO: Either the migrations run the soft reset and create v2 tables or the actual prepare tables. // unify the logic // with current state of raw tables & final tables. This is done first before gather initial state // to avoid recreating // final tables later again. - val runMigrationsResult = - CompletableFutures.allOf(parsedCatalog.streams().stream() - .map { streamConfig -> runMigrationsAsync(executorService, sqlGenerator, destinationHandler, v1V2Migrator, v2TableMigrator, streamConfig) } - .toList()).toCompletableFuture().join() - getResultsOrLogAndThrowFirst("The following exceptions were thrown attempting to run migrations:\n", runMigrationsResult) + + // Run migrations in lockstep. Some migrations may require us to refetch the initial state. + // We want to be able to batch those calls together across streams. + // If a migration runs on one stream, it's likely to also run on other streams. + // So we can bundle the gatherInitialState calls together. + var currentStates = initialStates + for (migration in migrations) { + // Execute the migration on all streams in parallel + val futures: Map>> = currentStates.stream() + .collect(toMap( + { it.streamConfig.id }, + { initialState -> runMigrationsAsync(executorService, destinationHandler, migration, initialState) } + )) + val migrationResultFutures = CompletableFutures.allOf(futures.values.toList()).toCompletableFuture().join() + getResultsOrLogAndThrowFirst("The following exceptions were thrown attempting to run migrations:\n", migrationResultFutures) + val migrationResults: Map> = futures.mapValues { it.value.toCompletableFuture().join() } + + // Check if we need to refetch DestinationInitialState + val invalidatedStreams: Set = migrationResults.filter { it.value.invalidateInitialState }.keys + val updatedStates: List> + if (invalidatedStreams.isNotEmpty()) { + LOGGER.info("Refetching initial state for streams: $invalidatedStreams") + updatedStates = destinationHandler.gatherInitialState(currentStates.filter{invalidatedStreams.contains(it.streamConfig.id)}.map {it.streamConfig}) + LOGGER.info("Updated states: $updatedStates") + } else { + updatedStates = emptyList() + } + + // Update the DestinationInitialStates with the new DestinationStates, + // and also update initialStates with the refetched states. + currentStates = currentStates.map { initialState -> + // migrationResults will always contain an entry for each stream, so we can safely use !! + val updatedDestinationState = migrationResults[initialState.streamConfig.id]!!.updatedDestinationState + if (invalidatedStreams.contains(initialState.streamConfig.id)) { + // We invalidated this stream's DestinationInitialState. + // Find the updated DestinationInitialState, and update it with our new DestinationState + return@map updatedStates.filter{updatedState -> updatedState.streamConfig.id.equals(initialState.streamConfig.id)} + .first() + .copy(destinationState = updatedDestinationState) + } else { + // Just update the original DestinationInitialState with the new DestinationState. + return@map initialState.copy(destinationState = updatedDestinationState) + } + } + + } + return currentStates + } + + /** + * The legacy-style migrations (V1V2Migrator, V2TableMigrator) need to run before we gather + * initial state, because they're dumb and weird. + * (specifically: SnowflakeV2TableMigrator inspects the final tables and triggers a soft reset + * directly within the migration). + * TODO: Migrate these migrations to the new migration system. + * This will also reduce the number of times we need to query DB metadata, since (a) we can rely + * on the gatherInitialState values, and (b) we can add a DestinationState field for these migrations. + * It also enables us to not trigger multiple soft resets in a single sync. + */ + @JvmStatic + fun executeWeirdMigrations( + executorService: ExecutorService, + sqlGenerator: SqlGenerator, + destinationHandler: DestinationHandler, + v1V2Migrator: DestinationV1V2Migrator, + v2TableMigrator: V2TableMigrator, + parsedCatalog: ParsedCatalog + ) { + val futures = parsedCatalog.streams.map { + CompletableFuture.supplyAsync( + { + v1V2Migrator.migrateIfNecessary(sqlGenerator, destinationHandler, it) + v2TableMigrator.migrateIfNecessary(it) + }, + executorService + ) + } + getResultsOrLogAndThrowFirst( + "The following exceptions were thrown attempting to run migrations:\n", + CompletableFutures.allOf(futures.toList()).toCompletableFuture().join()) } /** @@ -38,9 +116,9 @@ class TyperDeduperUtil { * exist in the Destination Database. */ @JvmStatic - fun prepareSchemas( + fun prepareSchemas( sqlGenerator: SqlGenerator, - destinationHandler: DestinationHandler, + destinationHandler: DestinationHandler, parsedCatalog: ParsedCatalog) { val rawSchema = parsedCatalog.streams.stream().map { it.id.rawNamespace } val finalSchema = parsedCatalog.streams.stream().map { it.id.finalNamespace } @@ -52,20 +130,25 @@ class TyperDeduperUtil { destinationHandler.execute(Sql.concat(createAllSchemasSql)) } - private fun runMigrationsAsync( + private fun runMigrationsAsync( executorService: ExecutorService, - sqlGenerator: SqlGenerator, - destinationHandler: DestinationHandler, - v1V2Migrator: DestinationV1V2Migrator, - v2TableMigrator: V2TableMigrator, - streamConfig: StreamConfig): CompletionStage { - return CompletableFuture.runAsync({ - try { - v1V2Migrator.migrateIfNecessary(sqlGenerator, destinationHandler, streamConfig) - v2TableMigrator.migrateIfNecessary(streamConfig) - } catch (e: java.lang.Exception) { - throw RuntimeException(e) - } + destinationHandler: DestinationHandler, + migration: Migration, + initialStatus: DestinationInitialStatus + ): CompletionStage> { + return CompletableFuture.supplyAsync({ + LOGGER.info("Maybe executing ${migration.javaClass.simpleName} migration for stream ${initialStatus.streamConfig.id.originalNamespace}.${initialStatus.streamConfig.id.originalName}.") + + // We technically don't need to track this, but might as well hedge against migrations + // accidentally setting softReset=false + val softReset = initialStatus.destinationState.needsSoftReset() + val migrationResult = migration.migrateIfNecessary( + destinationHandler, + initialStatus.streamConfig, + initialStatus) + val updatedNeedsSoftReset = softReset || migrationResult.updatedDestinationState.needsSoftReset() + return@supplyAsync migrationResult.copy( + updatedDestinationState = migrationResult.updatedDestinationState.withSoftReset(updatedNeedsSoftReset)) }, executorService) } } diff --git a/airbyte-cdk/java/airbyte-cdk/typing-deduping/src/main/java/io/airbyte/integrations/base/destination/typing_deduping/V2TableMigrator.java b/airbyte-cdk/java/airbyte-cdk/typing-deduping/src/main/java/io/airbyte/integrations/base/destination/typing_deduping/V2TableMigrator.java index 27b056d5d399..ecc2d4ddd74c 100644 --- a/airbyte-cdk/java/airbyte-cdk/typing-deduping/src/main/java/io/airbyte/integrations/base/destination/typing_deduping/V2TableMigrator.java +++ b/airbyte-cdk/java/airbyte-cdk/typing-deduping/src/main/java/io/airbyte/integrations/base/destination/typing_deduping/V2TableMigrator.java @@ -4,6 +4,10 @@ package io.airbyte.integrations.base.destination.typing_deduping; +/** + * Prefer {@link io.airbyte.integrations.base.destination.typing_deduping.migrators.Migration} + * instead. + */ public interface V2TableMigrator { void migrateIfNecessary(final StreamConfig streamConfig) throws Exception; diff --git a/airbyte-cdk/java/airbyte-cdk/typing-deduping/src/main/java/io/airbyte/integrations/base/destination/typing_deduping/migrators/Migration.kt b/airbyte-cdk/java/airbyte-cdk/typing-deduping/src/main/java/io/airbyte/integrations/base/destination/typing_deduping/migrators/Migration.kt new file mode 100644 index 000000000000..8a38628594fd --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/typing-deduping/src/main/java/io/airbyte/integrations/base/destination/typing_deduping/migrators/Migration.kt @@ -0,0 +1,46 @@ +package io.airbyte.integrations.base.destination.typing_deduping.migrators + +import io.airbyte.integrations.base.destination.typing_deduping.DestinationHandler +import io.airbyte.integrations.base.destination.typing_deduping.DestinationInitialStatus +import io.airbyte.integrations.base.destination.typing_deduping.StreamConfig + +/** + * Migrations may do two things: + * 1. Modify the raw table + * 2. Trigger a soft reset + * + * The raw table modification should happen in {@link #migrateIfNecessary(Object, StreamConfig)}. However, + * if multiple migrations want to trigger a soft reset, we should only trigger a single soft reset, + * because soft resets are idempotent. There's no reason to trigger multiple soft resets in sequence, + * and it would be a waste of warehouse compute to do so. Migrations MUST NOT directly run a soft reset + * within {@link #migrateIfNecessary(Object, StreamConfig)}. + *

+ * Migrations are encouraged to store something into the destination State blob. This allows us to make + * fewer queries into customer data. However, migrations MUST NOT rely solely on the state blob to trigger + * migrations. It's possible for a state to not be committed after a migration runs (e.g. a well-timed + * OOMKill). Therefore, if the state blob indicates that a migration is necessary, migrations must still + * confirm against the database that the migration is necessary. + */ +interface Migration { + + /** + * Perform the migration if it's necessary. Implementations of this method MUST check against the database + * to confirm the the migration is still necessary, in case a previous migration ran, but failed + * to update the state. + * + * Migrations MUST NOT set the `needsSoftReset` flag to false, but they MAY set it to true. + */ + fun migrateIfNecessary( + destinationHandler: DestinationHandler, + stream: StreamConfig, + state: DestinationInitialStatus + ): MigrationResult + + /** + * @param invalidateInitialState If true, the migration modified the raw tables in a way that requires us to re-gather initial state. + */ + data class MigrationResult( + val updatedDestinationState: DestinationState, + val invalidateInitialState: Boolean + ) +} diff --git a/airbyte-cdk/java/airbyte-cdk/typing-deduping/src/main/java/io/airbyte/integrations/base/destination/typing_deduping/migrators/MinimumDestinationState.kt b/airbyte-cdk/java/airbyte-cdk/typing-deduping/src/main/java/io/airbyte/integrations/base/destination/typing_deduping/migrators/MinimumDestinationState.kt new file mode 100644 index 000000000000..9fefd2d0ff30 --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/typing-deduping/src/main/java/io/airbyte/integrations/base/destination/typing_deduping/migrators/MinimumDestinationState.kt @@ -0,0 +1,43 @@ +package io.airbyte.integrations.base.destination.typing_deduping.migrators + +/** + * All destination states MUST contain a parameter `needsSoftReset`. This allows migrators to track + * whether a soft reset is necessary, and persist that value across syncs in case of sync failure. + */ +interface MinimumDestinationState { + fun needsSoftReset(): Boolean + + /** + * The type parameter should be the subclass itself. We need this so that [withSoftReset] can + * return the correct type. Callers are responsible for passing the correct type parameter + * into this function (e.g. `currentState.withSoftReset(softReset)`). + * + * Implementations generally look like this: (note the unchecked `as T` cast) + * ```kotlin + * data class ExampleState(val needsSoftReset: Boolean, ): MinimumDestinationState { + * override fun needsSoftReset(): Boolean { + * return needsSoftReset + * } + * + * override fun withSoftReset(needsSoftReset: Boolean): T { + * return copy(needsSoftReset = true) as T + * } + * } + * ``` + */ + fun withSoftReset(needsSoftReset: Boolean): T + + /** + * A minimal implementation of [MinimumDestinationState]. This is useful for destinations that don't + * want to bother implementing a full state object. + */ + data class Impl(val needsSoftReset: Boolean): MinimumDestinationState { + override fun needsSoftReset(): Boolean { + return needsSoftReset + } + + override fun withSoftReset(needsSoftReset: Boolean): T { + return copy(needsSoftReset = true) as T + } + } +} diff --git a/airbyte-cdk/java/airbyte-cdk/typing-deduping/src/test/java/io/airbyte/integrations/base/destination/typing_deduping/DefaultTyperDeduperTest.java b/airbyte-cdk/java/airbyte-cdk/typing-deduping/src/test/java/io/airbyte/integrations/base/destination/typing_deduping/DefaultTyperDeduperTest.java index 65f2c127f26e..7eec05efc2d7 100644 --- a/airbyte-cdk/java/airbyte-cdk/typing-deduping/src/test/java/io/airbyte/integrations/base/destination/typing_deduping/DefaultTyperDeduperTest.java +++ b/airbyte-cdk/java/airbyte-cdk/typing-deduping/src/test/java/io/airbyte/integrations/base/destination/typing_deduping/DefaultTyperDeduperTest.java @@ -20,70 +20,152 @@ import static org.mockito.Mockito.when; import io.airbyte.cdk.integrations.destination.StreamSyncSummary; +import io.airbyte.integrations.base.destination.typing_deduping.migrators.Migration; +import io.airbyte.integrations.base.destination.typing_deduping.migrators.MinimumDestinationState; import io.airbyte.protocol.models.v0.DestinationSyncMode; import io.airbyte.protocol.models.v0.StreamDescriptor; import java.time.Instant; +import java.util.Collections; +import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Optional; +import lombok.SneakyThrows; +import org.jetbrains.annotations.NotNull; import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Test; public class DefaultTyperDeduperTest { + private ParsedCatalog parsedCatalog; + private static final StreamConfig OVERWRITE_STREAM_CONFIG = new StreamConfig( + new StreamId("overwrite_ns", "overwrite_stream", "airbyte_internal", "overwrite_stream", "overwrite_ns", "overwrite_stream"), + null, + DestinationSyncMode.OVERWRITE, + null, + null, + null); + private static final StreamConfig APPEND_STREAM_CONFIG = new StreamConfig( + new StreamId("append_ns", "append_stream", "airbyte_internal", "append_stream", "append_ns", "append_stream"), + null, + DestinationSyncMode.APPEND, + null, + null, + null); + private static final StreamConfig DEDUPE_STREAM_CONFIG = new StreamConfig( + new StreamId("dedup_ns", "dedup_stream", "airbyte_internal", "dedup_stream", "dedup_ns", "dedup_stream"), + null, + DestinationSyncMode.APPEND_DEDUP, + null, + null, + null); + + private record MockState( + boolean needsSoftReset, + boolean softResetMigrationCompleted, + boolean nonSoftResetMigrationCompleted) + implements MinimumDestinationState { + + @Override + public MockState withSoftReset(boolean needsSoftReset) { + return new MockState(needsSoftReset, this.softResetMigrationCompleted, this.nonSoftResetMigrationCompleted); + } + + } + private MockSqlGenerator sqlGenerator; - private DestinationHandler destinationHandler; + private DestinationHandler destinationHandler; - private List initialStates; + private List> initialStates; + private Map updatedStates; private DestinationV1V2Migrator migrator; private TyperDeduper typerDeduper; + private final Migration MIGRATION_REQUIRING_SOFT_RESET = new Migration<>() { + + @SneakyThrows + @NotNull + @Override + public MigrationResult migrateIfNecessary(DestinationHandler destinationHandler, + @NotNull StreamConfig stream, + DestinationInitialStatus state) { + destinationHandler.execute(Sql.of("MIGRATE " + stream.id().rawTableId(""))); + return new MigrationResult<>(new MockState(true, true, state.destinationState().nonSoftResetMigrationCompleted), false); + } + + }; + + private final Migration MIGRATION_NOT_REQUIRING_SOFT_RESET = new Migration<>() { + + @NotNull + @Override + public MigrationResult migrateIfNecessary(@NotNull DestinationHandler destinationHandler, + @NotNull StreamConfig stream, + DestinationInitialStatus status) { + return new MigrationResult<>( + new MockState( + status.destinationState().needsSoftReset, + status.destinationState().softResetMigrationCompleted, + true), + false); + } + + }; + + private final Migration MIGRATION_NOOP = new Migration<>() { + + @NotNull + @Override + public MigrationResult migrateIfNecessary(@NotNull DestinationHandler destinationHandler, + @NotNull StreamConfig stream, + DestinationInitialStatus status) { + return new MigrationResult<>( + new MockState( + status.destinationState().needsSoftReset, + status.destinationState().softResetMigrationCompleted, + true), + false); + } + + }; + @BeforeEach void setup() throws Exception { sqlGenerator = spy(new MockSqlGenerator()); destinationHandler = mock(DestinationHandler.class); - DestinationInitialState overwriteNsState = mock(DestinationInitialState.class); - DestinationInitialState appendNsState = mock(DestinationInitialState.class); - DestinationInitialState dedupeNsState = mock(DestinationInitialState.class); - StreamConfig overwriteStreamConfig = new StreamConfig( - new StreamId("overwrite_ns", "overwrite_stream", null, null, "overwrite_ns", "overwrite_stream"), - null, - DestinationSyncMode.OVERWRITE, - null, - null, - null); - StreamConfig appendStreamConfig = new StreamConfig( - new StreamId("append_ns", "append_stream", null, null, "append_ns", "append_stream"), - null, - DestinationSyncMode.APPEND, - null, - null, - null); - StreamConfig dedupeStreamConfig = new StreamConfig( - new StreamId("dedup_ns", "dedup_stream", null, null, "dedup_ns", "dedup_stream"), - null, - DestinationSyncMode.APPEND_DEDUP, - null, - null, - null); - when(overwriteNsState.streamConfig()).thenReturn(overwriteStreamConfig); - when(appendNsState.streamConfig()).thenReturn(appendStreamConfig); - when(dedupeNsState.streamConfig()).thenReturn(dedupeStreamConfig); + + DestinationInitialStatus overwriteNsState = mock(DestinationInitialStatus.class); + when(overwriteNsState.destinationState()).thenReturn(new MockState(false, false, true)); + when(overwriteNsState.streamConfig()).thenReturn(OVERWRITE_STREAM_CONFIG); + + DestinationInitialStatus appendNsState = mock(DestinationInitialStatus.class); + when(appendNsState.destinationState()).thenReturn(new MockState(false, false, true)); + when(appendNsState.streamConfig()).thenReturn(APPEND_STREAM_CONFIG); + + DestinationInitialStatus dedupeNsState = mock(DestinationInitialStatus.class); + when(dedupeNsState.destinationState()).thenReturn(new MockState(false, false, true)); + when(dedupeNsState.streamConfig()).thenReturn(DEDUPE_STREAM_CONFIG); initialStates = List.of(overwriteNsState, appendNsState, dedupeNsState); when(destinationHandler.gatherInitialState(anyList())) .thenReturn(initialStates); - initialStates.forEach(initialState -> when(initialState.initialRawTableState()).thenReturn(new InitialRawTableState(true, Optional.empty()))); + initialStates + .forEach(initialState -> when(initialState.initialRawTableStatus()).thenReturn(new InitialRawTableStatus(true, true, Optional.empty()))); + + updatedStates = new HashMap<>(); + updatedStates.put(OVERWRITE_STREAM_CONFIG.id(), new MockState(false, false, true)); + updatedStates.put(APPEND_STREAM_CONFIG.id(), new MockState(false, false, true)); + updatedStates.put(DEDUPE_STREAM_CONFIG.id(), new MockState(false, false, true)); migrator = new NoOpDestinationV1V2Migrator(); - final ParsedCatalog parsedCatalog = new ParsedCatalog(List.of( - overwriteStreamConfig, - appendStreamConfig, - dedupeStreamConfig)); + parsedCatalog = new ParsedCatalog(List.of( + OVERWRITE_STREAM_CONFIG, + APPEND_STREAM_CONFIG, + DEDUPE_STREAM_CONFIG)); - typerDeduper = new DefaultTyperDeduper(sqlGenerator, destinationHandler, parsedCatalog, migrator); + typerDeduper = new DefaultTyperDeduper<>(sqlGenerator, destinationHandler, parsedCatalog, migrator, Collections.emptyList()); } /** @@ -94,12 +176,16 @@ void emptyDestination() throws Exception { initialStates.forEach(initialState -> when(initialState.isFinalTablePresent()).thenReturn(false)); typerDeduper.prepareSchemasAndRunMigrations(); - verify(destinationHandler).execute(separately("CREATE SCHEMA overwrite_ns", "CREATE SCHEMA append_ns", "CREATE SCHEMA dedup_ns")); + verify(destinationHandler) + .execute(separately("CREATE SCHEMA airbyte_internal", "CREATE SCHEMA overwrite_ns", "CREATE SCHEMA append_ns", "CREATE SCHEMA dedup_ns")); + verify(destinationHandler).commitDestinationStates(updatedStates); + clearInvocations(destinationHandler); typerDeduper.prepareFinalTables(); verify(destinationHandler).execute(Sql.of("CREATE TABLE overwrite_ns.overwrite_stream")); verify(destinationHandler).execute(Sql.of("CREATE TABLE append_ns.append_stream")); verify(destinationHandler).execute(Sql.of("CREATE TABLE dedup_ns.dedup_stream")); + verify(destinationHandler).commitDestinationStates(updatedStates); verifyNoMoreInteractions(ignoreStubs(destinationHandler)); clearInvocations(destinationHandler); @@ -129,7 +215,10 @@ void existingEmptyTable() throws Exception { }); typerDeduper.prepareSchemasAndRunMigrations(); - verify(destinationHandler).execute(separately("CREATE SCHEMA overwrite_ns", "CREATE SCHEMA append_ns", "CREATE SCHEMA dedup_ns")); + verify(destinationHandler) + .execute(separately("CREATE SCHEMA airbyte_internal", "CREATE SCHEMA overwrite_ns", "CREATE SCHEMA append_ns", "CREATE SCHEMA dedup_ns")); + verify(destinationHandler).commitDestinationStates(updatedStates); + clearInvocations(destinationHandler); typerDeduper.prepareFinalTables(); verify(destinationHandler).execute(Sql.of("CREATE TABLE overwrite_ns.overwrite_stream_airbyte_tmp")); @@ -139,6 +228,7 @@ void existingEmptyTable() throws Exception { verify(destinationHandler).execute(Sql.of("PREPARE dedup_ns.dedup_stream FOR SOFT RESET")); verify(destinationHandler).execute(Sql.of("UPDATE TABLE dedup_ns.dedup_stream_ab_soft_reset WITHOUT SAFER CASTING")); verify(destinationHandler).execute(Sql.of("OVERWRITE TABLE dedup_ns.dedup_stream FROM dedup_ns.dedup_stream_ab_soft_reset")); + verify(destinationHandler).commitDestinationStates(updatedStates); verifyNoMoreInteractions(ignoreStubs(destinationHandler)); clearInvocations(destinationHandler); @@ -169,7 +259,8 @@ void existingEmptyTableMatchingSchema() throws Exception { }); typerDeduper.prepareSchemasAndRunMigrations(); - verify(destinationHandler).execute(separately("CREATE SCHEMA overwrite_ns", "CREATE SCHEMA append_ns", "CREATE SCHEMA dedup_ns")); + verify(destinationHandler) + .execute(separately("CREATE SCHEMA airbyte_internal", "CREATE SCHEMA overwrite_ns", "CREATE SCHEMA append_ns", "CREATE SCHEMA dedup_ns")); clearInvocations(destinationHandler); typerDeduper.prepareFinalTables(); @@ -186,16 +277,19 @@ void existingNonemptyTable() throws Exception { when(initialState.isFinalTablePresent()).thenReturn(true); when(initialState.isFinalTableEmpty()).thenReturn(false); when(initialState.isSchemaMismatch()).thenReturn(true); - when(initialState.initialRawTableState()).thenReturn(new InitialRawTableState(true, Optional.of(Instant.parse("2023-01-01T12:34:56Z")))); + when(initialState.initialRawTableStatus()) + .thenReturn(new InitialRawTableStatus(true, true, Optional.of(Instant.parse("2023-01-01T12:34:56Z")))); }); typerDeduper.prepareSchemasAndRunMigrations(); - verify(destinationHandler).execute(separately("CREATE SCHEMA overwrite_ns", "CREATE SCHEMA append_ns", "CREATE SCHEMA dedup_ns")); + verify(destinationHandler) + .execute(separately("CREATE SCHEMA airbyte_internal", "CREATE SCHEMA overwrite_ns", "CREATE SCHEMA append_ns", "CREATE SCHEMA dedup_ns")); + verify(destinationHandler).commitDestinationStates(updatedStates); + clearInvocations(destinationHandler); typerDeduper.prepareFinalTables(); // NB: We only create a tmp table for the overwrite stream, and do _not_ soft reset the existing // overwrite stream's table. - verify(destinationHandler).execute(Sql.of("CREATE TABLE overwrite_ns.overwrite_stream_airbyte_tmp")); verify(destinationHandler).execute(Sql.of("PREPARE append_ns.append_stream FOR SOFT RESET")); verify(destinationHandler).execute(Sql.of("UPDATE TABLE append_ns.append_stream_ab_soft_reset WITHOUT SAFER CASTING")); @@ -203,6 +297,7 @@ void existingNonemptyTable() throws Exception { verify(destinationHandler).execute(Sql.of("PREPARE dedup_ns.dedup_stream FOR SOFT RESET")); verify(destinationHandler).execute(Sql.of("UPDATE TABLE dedup_ns.dedup_stream_ab_soft_reset WITHOUT SAFER CASTING")); verify(destinationHandler).execute(Sql.of("OVERWRITE TABLE dedup_ns.dedup_stream FROM dedup_ns.dedup_stream_ab_soft_reset")); + verify(destinationHandler).commitDestinationStates(updatedStates); verifyNoMoreInteractions(ignoreStubs(destinationHandler)); clearInvocations(destinationHandler); @@ -233,16 +328,20 @@ void existingNonemptyTableMatchingSchema() throws Exception { when(initialState.isFinalTablePresent()).thenReturn(true); when(initialState.isFinalTableEmpty()).thenReturn(false); when(initialState.isSchemaMismatch()).thenReturn(false); - when(initialState.initialRawTableState()).thenReturn(new InitialRawTableState(true, Optional.of(Instant.now()))); + when(initialState.initialRawTableStatus()).thenReturn(new InitialRawTableStatus(true, true, Optional.of(Instant.now()))); }); typerDeduper.prepareSchemasAndRunMigrations(); - verify(destinationHandler).execute(separately("CREATE SCHEMA overwrite_ns", "CREATE SCHEMA append_ns", "CREATE SCHEMA dedup_ns")); + verify(destinationHandler) + .execute(separately("CREATE SCHEMA airbyte_internal", "CREATE SCHEMA overwrite_ns", "CREATE SCHEMA append_ns", "CREATE SCHEMA dedup_ns")); + verify(destinationHandler).commitDestinationStates(updatedStates); + clearInvocations(destinationHandler); typerDeduper.prepareFinalTables(); // NB: We only create one tmp table here. // Also, we need to alter the existing _real_ table, not the tmp table! verify(destinationHandler).execute(Sql.of("CREATE TABLE overwrite_ns.overwrite_stream_airbyte_tmp")); + verify(destinationHandler).commitDestinationStates(updatedStates); verifyNoMoreInteractions(ignoreStubs(destinationHandler)); } @@ -272,7 +371,10 @@ void failedSetup() throws Exception { */ @Test void noUnprocessedRecords() throws Exception { - initialStates.forEach(initialState -> when(initialState.initialRawTableState()).thenReturn(new InitialRawTableState(false, Optional.empty()))); + initialStates + .forEach(initialState -> when(initialState.initialRawTableStatus()).thenReturn(new InitialRawTableStatus(true, false, Optional.empty()))); + + typerDeduper.prepareSchemasAndRunMigrations(); typerDeduper.prepareFinalTables(); clearInvocations(destinationHandler); @@ -295,8 +397,10 @@ void noUnprocessedRecords() throws Exception { */ @Test void unprocessedRecords() throws Exception { - initialStates.forEach(initialState -> when(initialState.initialRawTableState()) - .thenReturn(new InitialRawTableState(true, Optional.of(Instant.parse("2023-01-23T12:34:56Z"))))); + initialStates.forEach(initialState -> when(initialState.initialRawTableStatus()) + .thenReturn(new InitialRawTableStatus(true, true, Optional.of(Instant.parse("2023-01-23T12:34:56Z"))))); + + typerDeduper.prepareSchemasAndRunMigrations(); typerDeduper.prepareFinalTables(); clearInvocations(destinationHandler); @@ -312,4 +416,222 @@ void unprocessedRecords() throws Exception { verify(destinationHandler).execute(Sql.of("UPDATE TABLE dedup_ns.dedup_stream WITHOUT SAFER CASTING WHERE extracted_at > 2023-01-23T12:34:56Z")); } + /** + * A test that tries to trigger multiple soft resets on all three streams. The migration should run, + * and we also detect a schema mismatch. However, only one soft reset should be triggered once per + * stream. Additionally, the overwrite stream should not trigger a soft reset. + */ + @Test + void multipleSoftResets() throws Exception { + typerDeduper = new DefaultTyperDeduper<>( + sqlGenerator, + destinationHandler, + parsedCatalog, + migrator, + List.of(MIGRATION_REQUIRING_SOFT_RESET)); + + // Notably: isSchemaMismatch = true, + // and the MockStates have needsSoftReset = false and isMigrated = false. + when(destinationHandler.gatherInitialState(anyList())) + .thenReturn(List.of( + new DestinationInitialStatus<>( + OVERWRITE_STREAM_CONFIG, + true, + new InitialRawTableStatus(true, true, Optional.of(Instant.ofEpochMilli(42))), + true, + false, + new MockState(false, false, true)), + new DestinationInitialStatus<>( + APPEND_STREAM_CONFIG, + true, + new InitialRawTableStatus(true, true, Optional.of(Instant.ofEpochMilli(42))), + true, + false, + new MockState(false, false, true)), + new DestinationInitialStatus<>( + DEDUPE_STREAM_CONFIG, + true, + new InitialRawTableStatus(true, true, Optional.of(Instant.ofEpochMilli(42))), + true, + false, + new MockState(false, false, true)))); + + typerDeduper.prepareSchemasAndRunMigrations(); + verify(destinationHandler).execute(Sql.of("MIGRATE airbyte_internal.overwrite_stream")); + verify(destinationHandler).execute(Sql.of("MIGRATE airbyte_internal.append_stream")); + verify(destinationHandler).execute(Sql.of("MIGRATE airbyte_internal.dedup_stream")); + verify(destinationHandler).commitDestinationStates(Map.of( + OVERWRITE_STREAM_CONFIG.id(), new MockState(true, true, true), + APPEND_STREAM_CONFIG.id(), new MockState(true, true, true), + DEDUPE_STREAM_CONFIG.id(), new MockState(true, true, true))); + verify(destinationHandler).gatherInitialState(any()); + verify(destinationHandler) + .execute(separately("CREATE SCHEMA airbyte_internal", "CREATE SCHEMA overwrite_ns", "CREATE SCHEMA append_ns", "CREATE SCHEMA dedup_ns")); + verifyNoMoreInteractions(destinationHandler); + clearInvocations(destinationHandler); + + typerDeduper.prepareFinalTables(); + + // We should trigger a soft reset on the append + dedup streams. + verify(destinationHandler).execute(Sql.of("PREPARE append_ns.append_stream FOR SOFT RESET")); + verify(destinationHandler).execute(Sql.of("UPDATE TABLE append_ns.append_stream_ab_soft_reset WITHOUT SAFER CASTING")); + verify(destinationHandler).execute(Sql.of("OVERWRITE TABLE append_ns.append_stream FROM append_ns.append_stream_ab_soft_reset")); + + verify(destinationHandler).execute(Sql.of("PREPARE dedup_ns.dedup_stream FOR SOFT RESET")); + verify(destinationHandler).execute(Sql.of("UPDATE TABLE dedup_ns.dedup_stream_ab_soft_reset WITHOUT SAFER CASTING")); + verify(destinationHandler).execute(Sql.of("OVERWRITE TABLE dedup_ns.dedup_stream FROM dedup_ns.dedup_stream_ab_soft_reset")); + + // The overwrite stream just gets a new table entirely, instead of a soft reset. + verify(destinationHandler).execute(Sql.of("CREATE TABLE overwrite_ns.overwrite_stream_airbyte_tmp")); + + // And we should commit the states. Note that we now set needsSoftReset=false. + verify(destinationHandler).commitDestinationStates(Map.of( + OVERWRITE_STREAM_CONFIG.id(), new MockState(false, true, true), + APPEND_STREAM_CONFIG.id(), new MockState(false, true, true), + DEDUPE_STREAM_CONFIG.id(), new MockState(false, true, true))); + + verifyNoMoreInteractions(destinationHandler); + } + + /** + * A test where we have multiple migrations. The first migration triggers a soft reset; the second + * migration does nothing. We should correctly trigger the soft reset. + */ + @Test + void migrationsMixedResults() throws Exception { + typerDeduper = new DefaultTyperDeduper<>( + sqlGenerator, + destinationHandler, + parsedCatalog, + migrator, + List.of(MIGRATION_REQUIRING_SOFT_RESET, MIGRATION_NOT_REQUIRING_SOFT_RESET)); + + when(destinationHandler.gatherInitialState(anyList())) + .thenReturn(List.of( + new DestinationInitialStatus<>( + OVERWRITE_STREAM_CONFIG, + true, + new InitialRawTableStatus(true, true, Optional.of(Instant.ofEpochMilli(42))), + false, + false, + new MockState(false, false, false)), + new DestinationInitialStatus<>( + APPEND_STREAM_CONFIG, + true, + new InitialRawTableStatus(true, true, Optional.of(Instant.ofEpochMilli(42))), + false, + false, + new MockState(false, false, false)), + new DestinationInitialStatus<>( + DEDUPE_STREAM_CONFIG, + true, + new InitialRawTableStatus(true, true, Optional.of(Instant.ofEpochMilli(42))), + false, + false, + new MockState(false, false, false)))); + + typerDeduper.prepareSchemasAndRunMigrations(); + verify(destinationHandler).execute(Sql.of("MIGRATE airbyte_internal.overwrite_stream")); + verify(destinationHandler).execute(Sql.of("MIGRATE airbyte_internal.append_stream")); + verify(destinationHandler).execute(Sql.of("MIGRATE airbyte_internal.dedup_stream")); + verify(destinationHandler).commitDestinationStates(Map.of( + OVERWRITE_STREAM_CONFIG.id(), new MockState(true, true, true), + APPEND_STREAM_CONFIG.id(), new MockState(true, true, true), + DEDUPE_STREAM_CONFIG.id(), new MockState(true, true, true))); + verify(destinationHandler).gatherInitialState(any()); + verify(destinationHandler) + .execute(separately("CREATE SCHEMA airbyte_internal", "CREATE SCHEMA overwrite_ns", "CREATE SCHEMA append_ns", "CREATE SCHEMA dedup_ns")); + verifyNoMoreInteractions(destinationHandler); + clearInvocations(destinationHandler); + + typerDeduper.prepareFinalTables(); + + // We should trigger a soft reset on the append + dedup streams. + verify(destinationHandler).execute(Sql.of("PREPARE append_ns.append_stream FOR SOFT RESET")); + verify(destinationHandler).execute(Sql.of("UPDATE TABLE append_ns.append_stream_ab_soft_reset WITHOUT SAFER CASTING")); + verify(destinationHandler).execute(Sql.of("OVERWRITE TABLE append_ns.append_stream FROM append_ns.append_stream_ab_soft_reset")); + + verify(destinationHandler).execute(Sql.of("PREPARE dedup_ns.dedup_stream FOR SOFT RESET")); + verify(destinationHandler).execute(Sql.of("UPDATE TABLE dedup_ns.dedup_stream_ab_soft_reset WITHOUT SAFER CASTING")); + verify(destinationHandler).execute(Sql.of("OVERWRITE TABLE dedup_ns.dedup_stream FROM dedup_ns.dedup_stream_ab_soft_reset")); + + // The overwrite stream just gets a new table + verify(destinationHandler).execute(Sql.of("CREATE TABLE overwrite_ns.overwrite_stream_airbyte_tmp")); + + // And we should commit the states. + verify(destinationHandler).commitDestinationStates(Map.of( + OVERWRITE_STREAM_CONFIG.id(), new MockState(false, true, true), + APPEND_STREAM_CONFIG.id(), new MockState(false, true, true), + DEDUPE_STREAM_CONFIG.id(), new MockState(false, true, true))); + + verifyNoMoreInteractions(destinationHandler); + } + + /** + * A test where a previous sync committed a destination state with needsSoftReset=true. We should + * trigger a soft reset, even though the current sync doesn't need it. + */ + @Test + void previousSyncSoftReset() throws Exception { + // Notably: isSchemaMismatch = false, but the MockStates have needsSoftReset = true. + when(destinationHandler.gatherInitialState(anyList())) + .thenReturn(List.of( + new DestinationInitialStatus<>( + OVERWRITE_STREAM_CONFIG, + true, + new InitialRawTableStatus(true, true, Optional.of(Instant.ofEpochMilli(42))), + false, + false, + new MockState(true, false, false)), + new DestinationInitialStatus<>( + APPEND_STREAM_CONFIG, + true, + new InitialRawTableStatus(true, true, Optional.of(Instant.ofEpochMilli(42))), + false, + false, + new MockState(true, false, false)), + new DestinationInitialStatus<>( + DEDUPE_STREAM_CONFIG, + true, + new InitialRawTableStatus(true, true, Optional.of(Instant.ofEpochMilli(42))), + false, + false, + new MockState(true, false, false)))); + + typerDeduper.prepareSchemasAndRunMigrations(); + // Even though we didn't do anything, we still commit the destination states. + // This is technically unnecessary, but it's a single extra call and it's simpler to just do it. + verify(destinationHandler).commitDestinationStates(Map.of( + OVERWRITE_STREAM_CONFIG.id(), new MockState(true, false, false), + APPEND_STREAM_CONFIG.id(), new MockState(true, false, false), + DEDUPE_STREAM_CONFIG.id(), new MockState(true, false, false))); + verify(destinationHandler).gatherInitialState(any()); + verify(destinationHandler) + .execute(separately("CREATE SCHEMA airbyte_internal", "CREATE SCHEMA overwrite_ns", "CREATE SCHEMA append_ns", "CREATE SCHEMA dedup_ns")); + verifyNoMoreInteractions(destinationHandler); + clearInvocations(destinationHandler); + + typerDeduper.prepareFinalTables(); + + // We should trigger a soft reset on the append + dedup streams. + verify(destinationHandler).execute(Sql.of("PREPARE append_ns.append_stream FOR SOFT RESET")); + verify(destinationHandler).execute(Sql.of("UPDATE TABLE append_ns.append_stream_ab_soft_reset WITHOUT SAFER CASTING")); + verify(destinationHandler).execute(Sql.of("OVERWRITE TABLE append_ns.append_stream FROM append_ns.append_stream_ab_soft_reset")); + + verify(destinationHandler).execute(Sql.of("PREPARE dedup_ns.dedup_stream FOR SOFT RESET")); + verify(destinationHandler).execute(Sql.of("UPDATE TABLE dedup_ns.dedup_stream_ab_soft_reset WITHOUT SAFER CASTING")); + verify(destinationHandler).execute(Sql.of("OVERWRITE TABLE dedup_ns.dedup_stream FROM dedup_ns.dedup_stream_ab_soft_reset")); + + // The overwrite stream just gets a new table entirely, instead of a soft reset. + verify(destinationHandler).execute(Sql.of("CREATE TABLE overwrite_ns.overwrite_stream_airbyte_tmp")); + + // And we should commit the states. Note that we now set needsSoftReset=false. + verify(destinationHandler).commitDestinationStates(Map.of( + OVERWRITE_STREAM_CONFIG.id(), new MockState(false, false, false), + APPEND_STREAM_CONFIG.id(), new MockState(false, false, false), + DEDUPE_STREAM_CONFIG.id(), new MockState(false, false, false))); + + verifyNoMoreInteractions(destinationHandler); + } + } diff --git a/airbyte-cdk/java/airbyte-cdk/typing-deduping/src/test/java/io/airbyte/integrations/base/destination/typing_deduping/DestinationV1V2MigratorTest.java b/airbyte-cdk/java/airbyte-cdk/typing-deduping/src/test/java/io/airbyte/integrations/base/destination/typing_deduping/DestinationV1V2MigratorTest.java index 0e4c80321055..2f582274438b 100644 --- a/airbyte-cdk/java/airbyte-cdk/typing-deduping/src/test/java/io/airbyte/integrations/base/destination/typing_deduping/DestinationV1V2MigratorTest.java +++ b/airbyte-cdk/java/airbyte-cdk/typing-deduping/src/test/java/io/airbyte/integrations/base/destination/typing_deduping/DestinationV1V2MigratorTest.java @@ -74,7 +74,7 @@ public void testMismatchedSchemaThrowsException() throws Exception { public void testMigrate() throws Exception { final var sqlGenerator = new MockSqlGenerator(); final StreamConfig stream = new StreamConfig(STREAM_ID, null, DestinationSyncMode.APPEND_DEDUP, null, null, null); - final DestinationHandler handler = Mockito.mock(DestinationHandler.class); + final DestinationHandler handler = Mockito.mock(DestinationHandler.class); final var sql = sqlGenerator.migrateFromV1toV2(STREAM_ID, "v1_raw_namespace", "v1_raw_table"); // All is well final var migrator = noIssuesMigrator(); diff --git a/airbyte-cdk/java/airbyte-cdk/typing-deduping/src/testFixtures/java/io/airbyte/integrations/base/destination/typing_deduping/BaseSqlGeneratorIntegrationTest.java b/airbyte-cdk/java/airbyte-cdk/typing-deduping/src/testFixtures/java/io/airbyte/integrations/base/destination/typing_deduping/BaseSqlGeneratorIntegrationTest.java index 93e62670a99d..c2be6502365c 100644 --- a/airbyte-cdk/java/airbyte-cdk/typing-deduping/src/testFixtures/java/io/airbyte/integrations/base/destination/typing_deduping/BaseSqlGeneratorIntegrationTest.java +++ b/airbyte-cdk/java/airbyte-cdk/typing-deduping/src/testFixtures/java/io/airbyte/integrations/base/destination/typing_deduping/BaseSqlGeneratorIntegrationTest.java @@ -17,6 +17,7 @@ import com.google.common.collect.Streams; import io.airbyte.commons.json.Jsons; import io.airbyte.commons.string.Strings; +import io.airbyte.integrations.base.destination.typing_deduping.migrators.MinimumDestinationState; import io.airbyte.protocol.models.v0.DestinationSyncMode; import io.airbyte.protocol.models.v0.SyncMode; import java.time.Instant; @@ -50,7 +51,7 @@ * {@link #getDestinationHandler()} in a {@link org.junit.jupiter.api.BeforeEach} method. */ @Execution(ExecutionMode.CONCURRENT) -public abstract class BaseSqlGeneratorIntegrationTest { +public abstract class BaseSqlGeneratorIntegrationTest { private static final Logger LOGGER = LoggerFactory.getLogger(BaseSqlGeneratorIntegrationTest.class); /** @@ -104,7 +105,7 @@ public abstract class BaseSqlGeneratorIntegrationTest { protected StreamConfig cdcIncrementalAppendStream; protected SqlGenerator generator; - protected DestinationHandler destinationHandler; + protected DestinationHandler destinationHandler; protected String namespace; protected StreamId streamId; @@ -114,7 +115,7 @@ public abstract class BaseSqlGeneratorIntegrationTest { protected abstract SqlGenerator getSqlGenerator(); - protected abstract DestinationHandler getDestinationHandler(); + protected abstract DestinationHandler getDestinationHandler(); /** * Subclasses should override this method if they need to make changes to the stream ID. For @@ -192,7 +193,6 @@ protected Map getFinalMetadataColumnNames() { @BeforeEach public void setup() throws Exception { generator = getSqlGenerator(); - destinationHandler = getDestinationHandler(); final ColumnId id1 = generator.buildColumnId("id1"); final ColumnId id2 = generator.buildColumnId("id2"); @@ -263,6 +263,8 @@ public void setup() throws Exception { Optional.of(cursor), cdcColumns); + destinationHandler = getDestinationHandler(); + LOGGER.info("Running with namespace {}", namespace); createNamespace(namespace); } @@ -272,8 +274,8 @@ public void teardown() throws Exception { teardownNamespace(namespace); } - private DestinationInitialState getDestinationInitialState(StreamConfig streamConfig) throws Exception { - final List initialState = + private DestinationInitialStatus getDestinationInitialState(StreamConfig streamConfig) throws Exception { + final List> initialState = destinationHandler.gatherInitialState(List.of(streamConfig)); assertEquals(1, initialState.size(), "gatherInitialState returned the wrong number of futures"); assertTrue(initialState.getFirst().isFinalTablePresent(), "Destination handler could not find existing table"); @@ -287,9 +289,9 @@ private DestinationInitialState getDestinationInitialState(StreamConfig streamCo public void detectNoSchemaChange() throws Exception { final Sql createTable = generator.createTable(incrementalDedupStream, "", false); destinationHandler.execute(createTable); - final DestinationInitialState destinationInitialState = getDestinationInitialState(incrementalDedupStream); + final DestinationInitialStatus destinationInitialStatus = getDestinationInitialState(incrementalDedupStream); assertFalse( - destinationInitialState.isSchemaMismatch(), + destinationInitialStatus.isSchemaMismatch(), "Unchanged schema was incorrectly detected as a schema change."); } @@ -303,9 +305,9 @@ public void detectColumnAdded() throws Exception { incrementalDedupStream.columns().put( generator.buildColumnId("new_column"), AirbyteProtocolType.STRING); - final DestinationInitialState destinationInitialState = getDestinationInitialState(incrementalDedupStream); + final DestinationInitialStatus destinationInitialStatus = getDestinationInitialState(incrementalDedupStream); assertTrue( - destinationInitialState.isSchemaMismatch(), + destinationInitialStatus.isSchemaMismatch(), "Adding a new column was not detected as a schema change."); } @@ -317,9 +319,9 @@ public void detectColumnRemoved() throws Exception { final Sql createTable = generator.createTable(incrementalDedupStream, "", false); destinationHandler.execute(createTable); incrementalDedupStream.columns().remove(generator.buildColumnId("string")); - final DestinationInitialState destinationInitialState = getDestinationInitialState(incrementalDedupStream); + final DestinationInitialStatus destinationInitialStatus = getDestinationInitialState(incrementalDedupStream); assertTrue( - destinationInitialState.isSchemaMismatch(), + destinationInitialStatus.isSchemaMismatch(), "Removing a column was not detected as a schema change."); } @@ -333,9 +335,9 @@ public void detectColumnChanged() throws Exception { incrementalDedupStream.columns().put( generator.buildColumnId("string"), AirbyteProtocolType.INTEGER); - final DestinationInitialState destinationInitialState = getDestinationInitialState(incrementalDedupStream); + final DestinationInitialStatus destinationInitialStatus = getDestinationInitialState(incrementalDedupStream); assertTrue( - destinationInitialState.isSchemaMismatch(), + destinationInitialStatus.isSchemaMismatch(), "Altering a column was not detected as a schema change."); } @@ -373,7 +375,7 @@ public void incrementalDedupSameNameNamespace() throws Exception { verifyRecordCounts(1, rawRecords, 1, finalRecords); } - private DestinationInitialState getOnly(final List initialStates) { + private DestinationInitialStatus getOnly(final List> initialStates) { assertEquals(1, initialStates.size()); return initialStates.getFirst(); } @@ -403,7 +405,7 @@ public void allTypes() throws Exception { streamId, BaseTypingDedupingTest.readRecords("sqlgenerator/alltypes_inputrecords.jsonl")); - DestinationInitialState initialState = getOnly(destinationHandler.gatherInitialState(List.of(incrementalDedupStream))); + DestinationInitialStatus initialState = getOnly(destinationHandler.gatherInitialState(List.of(incrementalDedupStream))); assertTrue(initialState.isFinalTableEmpty(), "Final table should be empty before T+D"); TypeAndDedupeTransaction.executeTypeAndDedupe(generator, destinationHandler, incrementalDedupStream, Optional.empty(), ""); @@ -428,7 +430,7 @@ public void allTypesUnsafe() throws Exception { streamId, BaseTypingDedupingTest.readRecords("sqlgenerator/alltypes_unsafe_inputrecords.jsonl")); - DestinationInitialState initialState = getOnly(destinationHandler.gatherInitialState(List.of(incrementalDedupStream))); + DestinationInitialStatus initialState = getOnly(destinationHandler.gatherInitialState(List.of(incrementalDedupStream))); assertTrue(initialState.isFinalTableEmpty(), "Final table should be empty before T+D"); // Instead of using the full T+D transaction, explicitly run with useSafeCasting=false. @@ -439,11 +441,11 @@ public void allTypesUnsafe() throws Exception { assertFalse(initialState.isFinalTableEmpty(), "Final table should not be empty after T+D"); } - private InitialRawTableState getInitialRawTableState(StreamConfig streamConfig) throws Exception { - List initialStates = + private InitialRawTableStatus getInitialRawTableState(StreamConfig streamConfig) throws Exception { + List> initialStates = destinationHandler.gatherInitialState(List.of(streamConfig)); assertEquals(1, initialStates.size()); - return initialStates.getFirst().initialRawTableState(); + return initialStates.getFirst().initialRawTableStatus(); } /** @@ -453,11 +455,11 @@ private InitialRawTableState getInitialRawTableState(StreamConfig streamConfig) @Test public void minTimestampBehavesCorrectly() throws Exception { // When the raw table doesn't exist, there are no unprocessed records and no timestamp - assertEquals(new InitialRawTableState(false, Optional.empty()), getInitialRawTableState(incrementalAppendStream)); + assertEquals(new InitialRawTableStatus(false, false, Optional.empty()), getInitialRawTableState(incrementalAppendStream)); // When the raw table is empty, there are still no unprocessed records and no timestamp createRawTable(streamId); - assertEquals(new InitialRawTableState(false, Optional.empty()), getInitialRawTableState(incrementalAppendStream)); + assertEquals(new InitialRawTableStatus(true, false, Optional.empty()), getInitialRawTableState(incrementalAppendStream)); // If we insert some raw records with null loaded_at, we should get the min extracted_at insertRawTableRecords( @@ -479,7 +481,7 @@ public void minTimestampBehavesCorrectly() throws Exception { "_airbyte_data": {} } """))); - InitialRawTableState tableState = getInitialRawTableState(incrementalAppendStream); + InitialRawTableStatus tableState = getInitialRawTableState(incrementalAppendStream); assertTrue(tableState.hasUnprocessedRecords(), "When all raw records have null loaded_at, we should recognize that there are unprocessed records"); assertTrue( @@ -493,7 +495,7 @@ public void minTimestampBehavesCorrectly() throws Exception { assertEquals( getInitialRawTableState(incrementalAppendStream), - new InitialRawTableState(false, Optional.of(Instant.parse("2023-01-02T00:00:00Z"))), + new InitialRawTableStatus(true, false, Optional.of(Instant.parse("2023-01-02T00:00:00Z"))), "When all raw records have non-null loaded_at, we should recognize that there are no unprocessed records, and the min timestamp should be equal to the latest extracted_at"); // If we insert another raw record with older extracted_at than the typed records, we should fetch a @@ -549,7 +551,7 @@ public void handlePreexistingRecords() throws Exception { streamId, BaseTypingDedupingTest.readRecords("sqlgenerator/alltypes_inputrecords.jsonl")); - final InitialRawTableState tableState = getInitialRawTableState(incrementalDedupStream); + final InitialRawTableStatus tableState = getInitialRawTableState(incrementalDedupStream); assertAll( () -> assertTrue(tableState.hasUnprocessedRecords(), "After writing some raw records, we should recognize that there are unprocessed records"), @@ -575,7 +577,7 @@ public void handleNoPreexistingRecords() throws Exception { generator.buildColumnId("IamACaseSensitiveColumnName"), AirbyteProtocolType.STRING); createRawTable(streamId); - final InitialRawTableState tableState = getInitialRawTableState(incrementalDedupStream); + final InitialRawTableStatus tableState = getInitialRawTableState(incrementalDedupStream); assertAll( () -> assertFalse(tableState.hasUnprocessedRecords(), "With an empty raw table, we should recognize that there are no unprocessed records"), () -> assertEquals(Optional.empty(), tableState.maxProcessedTimestamp(), "With an empty raw table, the min timestamp should be empty")); @@ -900,7 +902,7 @@ public void testCdcOrdering_updateAfterDelete() throws Exception { streamId, BaseTypingDedupingTest.readRecords("sqlgenerator/cdcordering_updateafterdelete_inputrecords.jsonl")); - final InitialRawTableState tableState = getInitialRawTableState(cdcIncrementalDedupStream); + final InitialRawTableStatus tableState = getInitialRawTableState(cdcIncrementalDedupStream); TypeAndDedupeTransaction.executeTypeAndDedupe(generator, destinationHandler, cdcIncrementalDedupStream, tableState.maxProcessedTimestamp(), ""); verifyRecordCounts( @@ -937,7 +939,7 @@ public void testCdcOrdering_insertAfterDelete() throws Exception { "", BaseTypingDedupingTest.readRecords("sqlgenerator/cdcordering_insertafterdelete_inputrecords_final.jsonl")); - final InitialRawTableState tableState = getInitialRawTableState(cdcIncrementalAppendStream); + final InitialRawTableStatus tableState = getInitialRawTableState(cdcIncrementalAppendStream); TypeAndDedupeTransaction.executeTypeAndDedupe(generator, destinationHandler, cdcIncrementalDedupStream, tableState.maxProcessedTimestamp(), ""); verifyRecordCounts( 2, @@ -1245,6 +1247,35 @@ public void testCreateTableForce() throws Exception { getDestinationInitialState(incrementalDedupStream); } + @Test + public void testStateHandling() throws Exception { + // Fetch state from an empty destination. This should not throw an error. + final DestinationInitialStatus initialState = + destinationHandler.gatherInitialState(List.of((incrementalDedupStream))).getFirst(); + // The initial state should not need a soft reset. + assertFalse(initialState.destinationState().needsSoftReset(), "Empty state table should have needsSoftReset = false"); + + // Commit a state that now requires a soft reset. + destinationHandler.commitDestinationStates(Map.of( + incrementalDedupStream.id(), + initialState.destinationState().withSoftReset(true))); + final DestinationInitialStatus updatedState = + destinationHandler.gatherInitialState(List.of((incrementalDedupStream))).getFirst(); + // When we re-fetch the state, it should now need a soft reset. + assertTrue(updatedState.destinationState().needsSoftReset(), "After committing an explicit state, expected needsSoftReset = true"); + + // Commit a state belonging to a different stream + destinationHandler.commitDestinationStates(Map.of( + new StreamId(null, null, null, null, null, "some_other_stream"), + initialState.destinationState().withSoftReset(true))); + + // Verify that we can still retrieve the state for the original stream + final DestinationInitialStatus refetchedState = + destinationHandler.gatherInitialState(List.of((incrementalDedupStream))).getFirst(); + // When we re-fetch the state, it should now need a soft reset. + assertTrue(refetchedState.destinationState().needsSoftReset(), "After committing an unrelated state, expected needsSoftReset = true"); + } + protected void createFinalTable(final StreamConfig stream, final String suffix) throws Exception { final Sql createTable = generator.createTable(stream, suffix, false); destinationHandler.execute(createTable); From 1685b276d8885787928df37196d09729fda9d34f Mon Sep 17 00:00:00 2001 From: Edward Gao Date: Fri, 1 Mar 2024 11:28:50 -0800 Subject: [PATCH 056/172] Java CDK: Staging destinations include timezone in extracted_at in CSV file (#35313) Co-authored-by: Gireesh Sreepathi --- airbyte-cdk/java/airbyte-cdk/README.md | 3 ++- .../core/src/main/resources/version.properties | 2 +- .../s3/csv/StagingDatabaseCsvSheetGenerator.java | 5 ++--- .../destination/s3/csv/S3CsvWriterTest.java | 11 ++++------- 4 files changed, 9 insertions(+), 12 deletions(-) diff --git a/airbyte-cdk/java/airbyte-cdk/README.md b/airbyte-cdk/java/airbyte-cdk/README.md index bcfd9ebb4524..40d555388b29 100644 --- a/airbyte-cdk/java/airbyte-cdk/README.md +++ b/airbyte-cdk/java/airbyte-cdk/README.md @@ -166,7 +166,8 @@ MavenLocal debugging steps: | Version | Date | Pull Request | Subject | |:--------|:-----------|:-----------------------------------------------------------|:---------------------------------------------------------------------------------------------------------------------------------------------------------------| -| 0.23.10 | 2024-03-01 | [\#35303](https://github.com/airbytehq/airbyte/pull/35303) | various improvements for tests TestDataHolder | +| 0.23.11 | 2024-03-01 | [\#35313](https://github.com/airbytehq/airbyte/pull/35313) | Preserve timezone offset in CSV writer for destinations | +| 0.23.10 | 2024-03-01 | [\#35303](https://github.com/airbytehq/airbyte/pull/35303) | Migration framework with DestinationState for softReset | | 0.23.9 | 2024-03-01 | [\#35720](https://github.com/airbytehq/airbyte/pull/35720) | various improvements for tests TestDataHolder | | 0.23.8 | 2024-02-28 | [\#35529](https://github.com/airbytehq/airbyte/pull/35529) | Refactor on state iterators | | 0.23.7 | 2024-02-28 | [\#35376](https://github.com/airbytehq/airbyte/pull/35376) | Extract typereduper migrations to separte method | diff --git a/airbyte-cdk/java/airbyte-cdk/core/src/main/resources/version.properties b/airbyte-cdk/java/airbyte-cdk/core/src/main/resources/version.properties index c02c9de6a0a6..b3f1969f9497 100644 --- a/airbyte-cdk/java/airbyte-cdk/core/src/main/resources/version.properties +++ b/airbyte-cdk/java/airbyte-cdk/core/src/main/resources/version.properties @@ -1 +1 @@ -version=0.23.10 +version=0.23.11 diff --git a/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/main/java/io/airbyte/cdk/integrations/destination/s3/csv/StagingDatabaseCsvSheetGenerator.java b/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/main/java/io/airbyte/cdk/integrations/destination/s3/csv/StagingDatabaseCsvSheetGenerator.java index 32d2e977a26d..9ff3ecb9dab6 100644 --- a/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/main/java/io/airbyte/cdk/integrations/destination/s3/csv/StagingDatabaseCsvSheetGenerator.java +++ b/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/main/java/io/airbyte/cdk/integrations/destination/s3/csv/StagingDatabaseCsvSheetGenerator.java @@ -8,7 +8,6 @@ import io.airbyte.cdk.integrations.base.JavaBaseConstants; import io.airbyte.commons.json.Jsons; import io.airbyte.protocol.models.v0.AirbyteRecordMessage; -import java.sql.Timestamp; import java.time.Instant; import java.util.Collections; import java.util.LinkedList; @@ -62,14 +61,14 @@ public List getDataRow(final UUID id, final String formattedString, fina if (useDestinationsV2Columns) { return List.of( id, - Timestamp.from(Instant.ofEpochMilli(emittedAt)), + Instant.ofEpochMilli(emittedAt), "", formattedString); } else { return List.of( id, formattedString, - Timestamp.from(Instant.ofEpochMilli(emittedAt))); + Instant.ofEpochMilli(emittedAt)); } } diff --git a/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/test/java/io/airbyte/cdk/integrations/destination/s3/csv/S3CsvWriterTest.java b/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/test/java/io/airbyte/cdk/integrations/destination/s3/csv/S3CsvWriterTest.java index 5fe69ffa9923..f56374a04952 100644 --- a/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/test/java/io/airbyte/cdk/integrations/destination/s3/csv/S3CsvWriterTest.java +++ b/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/test/java/io/airbyte/cdk/integrations/destination/s3/csv/S3CsvWriterTest.java @@ -284,13 +284,10 @@ public void writesContentsCorrectly_when_stagingDatabaseConfig() throws IOExcept // carriage returns are required b/c RFC4180 requires it :( // Dynamically generate the timestamp because we generate in local time. assertEquals( - String.format( - """ - f6767f7d-ce1e-45cc-92db-2ad3dfdd088e,"{""foo"":73}",%s\r - 2b95a13f-d54f-4370-a712-1c7bf2716190,"{""bar"":84}",%s\r - """, - Timestamp.from(Instant.ofEpochMilli(1234)), - Timestamp.from(Instant.ofEpochMilli(2345))), + """ + f6767f7d-ce1e-45cc-92db-2ad3dfdd088e,"{""foo"":73}",1970-01-01T00:00:01.234Z\r + 2b95a13f-d54f-4370-a712-1c7bf2716190,"{""bar"":84}",1970-01-01T00:00:02.345Z\r + """, outputStreams.get(0).toString(StandardCharsets.UTF_8)); } From 2a32688970e96e0f0b05688ab696f1cc1b01f0e7 Mon Sep 17 00:00:00 2001 From: Joe Bell Date: Fri, 1 Mar 2024 12:16:12 -0800 Subject: [PATCH 057/172] Certify Postgres (#35760) --- .../destination-postgres/metadata.yaml | 6 +- .../postgres/PostgresDestination.java | 3 + docs/integrations/destinations/postgres.md | 63 ++++++++++--------- 3 files changed, 38 insertions(+), 34 deletions(-) diff --git a/airbyte-integrations/connectors/destination-postgres/metadata.yaml b/airbyte-integrations/connectors/destination-postgres/metadata.yaml index af88d008829f..9be4fbc5643f 100644 --- a/airbyte-integrations/connectors/destination-postgres/metadata.yaml +++ b/airbyte-integrations/connectors/destination-postgres/metadata.yaml @@ -5,7 +5,7 @@ data: connectorSubtype: database connectorType: destination definitionId: 25c5221d-dce2-4163-ade9-739ef790f503 - dockerImageTag: 2.0.1 + dockerImageTag: 2.0.2 dockerRepository: airbyte/destination-postgres documentationUrl: https://docs.airbyte.com/integrations/destinations/postgres githubIssueLabel: destination-postgres @@ -27,8 +27,8 @@ data: These changes will likely require updates to downstream dbt / SQL models, which we walk through [here](https://docs.airbyte.com/release_notes/upgrading_to_destinations_v2/#updating-downstream-transformations). Selecting `Upgrade` will upgrade **all** connections using this destination at their next sync. For more controlled upgrade [see instructions](https://docs.airbyte.com/release_notes/upgrading_to_destinations_v2/#upgrading-connections-one-by-one-with-dual-writing). upgradeDeadline: "2024-05-31" - releaseStage: alpha - supportLevel: community + releaseStage: generally_available + supportLevel: certified supportsDbt: true tags: - language:java diff --git a/airbyte-integrations/connectors/destination-postgres/src/main/java/io/airbyte/integrations/destination/postgres/PostgresDestination.java b/airbyte-integrations/connectors/destination-postgres/src/main/java/io/airbyte/integrations/destination/postgres/PostgresDestination.java index 93c51df74259..8c5f92aa3e7f 100644 --- a/airbyte-integrations/connectors/destination-postgres/src/main/java/io/airbyte/integrations/destination/postgres/PostgresDestination.java +++ b/airbyte-integrations/connectors/destination-postgres/src/main/java/io/airbyte/integrations/destination/postgres/PostgresDestination.java @@ -16,6 +16,7 @@ import io.airbyte.cdk.db.factory.DatabaseDriver; import io.airbyte.cdk.db.jdbc.JdbcDatabase; import io.airbyte.cdk.db.jdbc.JdbcUtils; +import io.airbyte.cdk.integrations.base.AirbyteExceptionHandler; import io.airbyte.cdk.integrations.base.Destination; import io.airbyte.cdk.integrations.base.IntegrationRunner; import io.airbyte.cdk.integrations.base.ssh.SshWrappedDestination; @@ -31,6 +32,7 @@ import java.util.HashMap; import java.util.Map; import java.util.Optional; +import org.postgresql.util.PSQLException; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -141,6 +143,7 @@ public boolean isV2Destination() { } public static void main(final String[] args) throws Exception { + AirbyteExceptionHandler.addThrowableForDeinterpolation(PSQLException.class); final Destination destination = PostgresDestination.sshWrappedDestination(); LOGGER.info("starting destination: {}", PostgresDestination.class); new IntegrationRunner(destination).run(args); diff --git a/docs/integrations/destinations/postgres.md b/docs/integrations/destinations/postgres.md index 50bd15cc864a..638747a410c3 100644 --- a/docs/integrations/destinations/postgres.md +++ b/docs/integrations/destinations/postgres.md @@ -191,34 +191,35 @@ Now that you have set up the Postgres destination connector, check out the follo ## Changelog -| Version | Date | Pull Request | Subject | -|:--------|:-----------|:-----------------------------------------------------------|:----------------------------------------------------------------------------------------------------| -| 2.0.1 | 2024-02-22 | [35385](https://github.com/airbytehq/airbyte/pull/35385) | Upgrade CDK to 0.23.0; Gathering required initial state upfront | -| 2.0.0 | 2024-02-09 | [35042](https://github.com/airbytehq/airbyte/pull/35042) | GA release V2 destinations format. | -| 0.6.3 | 2024-02-06 | [34891](https://github.com/airbytehq/airbyte/pull/34891) | Remove varchar limit, use system defaults | -| 0.6.2 | 2024-01-30 | [34683](https://github.com/airbytehq/airbyte/pull/34683) | CDK Upgrade 0.16.3; Fix dependency mismatches in slf4j lib | -| 0.6.1 | 2024-01-29 | [34630](https://github.com/airbytehq/airbyte/pull/34630) | CDK Upgrade; Use lowercase raw table in T+D queries. | -| 0.6.0 | 2024-01-19 | [34372](https://github.com/airbytehq/airbyte/pull/34372) | Add dv2 flag in spec | -| 0.5.5 | 2024-01-18 | [34236](https://github.com/airbytehq/airbyte/pull/34236) | Upgrade CDK to 0.13.1; Add indexes in raw table for query optimization | -| 0.5.4 | 2024-01-11 | [34177](https://github.com/airbytehq/airbyte/pull/34177) | Add code for DV2 beta (no user-visible changes) | -| 0.5.3 | 2024-01-10 | [34135](https://github.com/airbytehq/airbyte/pull/34135) | Use published CDK missed in previous release | -| 0.5.2 | 2024-01-08 | [33875](https://github.com/airbytehq/airbyte/pull/33875) | Update CDK to get Tunnel heartbeats feature | -| 0.5.1 | 2024-01-04 | [33873](https://github.com/airbytehq/airbyte/pull/33873) | Install normalization to enable DV2 beta | -| 0.5.0 | 2023-12-18 | [33507](https://github.com/airbytehq/airbyte/pull/33507) | Upgrade to latest CDK; Fix DATs and tests | -| 0.4.0 | 2023-06-27 | [\#27781](https://github.com/airbytehq/airbyte/pull/27781) | License Update: Elv2 | -| 0.3.27 | 2023-04-04 | [\#24604](https://github.com/airbytehq/airbyte/pull/24604) | Support for destination checkpointing | -| 0.3.26 | 2022-09-27 | [\#17299](https://github.com/airbytehq/airbyte/pull/17299) | Improve error handling for strict-encrypt postgres destination | -| 0.3.24 | 2022-09-08 | [\#16046](https://github.com/airbytehq/airbyte/pull/16046) | Fix missing database name URL Encoding | -| 0.3.23 | 2022-07-18 | [\#16260](https://github.com/airbytehq/airbyte/pull/16260) | Prevent traffic going on an unsecured channel in strict-encryption version of destination postgres | -| 0.3.22 | 2022-07-18 | [\#13840](https://github.com/airbytehq/airbyte/pull/13840) | Added the ability to connect using different SSL modes and SSL certificates | -| 0.3.21 | 2022-07-06 | [\#14479](https://github.com/airbytehq/airbyte/pull/14479) | Publish amd64 and arm64 versions of the connector | -| 0.3.20 | 2022-05-17 | [\#12820](https://github.com/airbytehq/airbyte/pull/12820) | Improved 'check' operation performance | -| 0.3.19 | 2022-04-25 | [\#12195](https://github.com/airbytehq/airbyte/pull/12195) | Add support for additional JDBC URL Params input | -| 0.3.18 | 2022-04-12 | [\#11729](https://github.com/airbytehq/airbyte/pull/11514) | Bump mina-sshd from 2.7.0 to 2.8.0 | -| 0.3.17 | 2022-04-05 | [\#11729](https://github.com/airbytehq/airbyte/pull/11729) | Fixed bug with dashes in schema name | -| 0.3.15 | 2022-02-25 | [\#10421](https://github.com/airbytehq/airbyte/pull/10421) | Refactor JDBC parameters handling | -| 0.3.14 | 2022-02-14 | [\#10256](https://github.com/airbytehq/airbyte/pull/10256) | (unpublished) Add `-XX:+ExitOnOutOfMemoryError` JVM option | -| 0.3.13 | 2021-12-01 | [\#8371](https://github.com/airbytehq/airbyte/pull/8371) | Fixed incorrect handling "\n" in ssh key | -| 0.3.12 | 2021-11-08 | [\#7719](https://github.com/airbytehq/airbyte/pull/7719) | Improve handling of wide rows by buffering records based on their byte size rather than their count | -| 0.3.11 | 2021-09-07 | [\#5743](https://github.com/airbytehq/airbyte/pull/5743) | Add SSH Tunnel support | -| 0.3.10 | 2021-08-11 | [\#5336](https://github.com/airbytehq/airbyte/pull/5336) | Destination Postgres: fix \u0000\(NULL\) value processing | +| Version | Date | Pull Request | Subject | +|:--------|:-----------|:-------------------------------------------------------------|:----------------------------------------------------------------------------------------------------| +| 2.0.2 | 2024-03-01 | [\#35760](https://github.com/airbytehq/airbyte/pull/35760) | Mark as certified, add PSQL exception to deinterpolator | +| 2.0.1 | 2024-02-22 | [\#35385](https://github.com/airbytehq/airbyte/pull/35385) | Upgrade CDK to 0.23.0; Gathering required initial state upfront | +| 2.0.0 | 2024-02-09 | [\#35042](https://github.com/airbytehq/airbyte/pull/35042) | GA release V2 destinations format. | +| 0.6.3 | 2024-02-06 | [\#34891](https://github.com/airbytehq/airbyte/pull/34891) | Remove varchar limit, use system defaults | +| 0.6.2 | 2024-01-30 | [\#34683](https://github.com/airbytehq/airbyte/pull/34683) | CDK Upgrade 0.16.3; Fix dependency mismatches in slf4j lib | +| 0.6.1 | 2024-01-29 | [\#34630](https://github.com/airbytehq/airbyte/pull/34630) | CDK Upgrade; Use lowercase raw table in T+D queries. | +| 0.6.0 | 2024-01-19 | [\#34372](https://github.com/airbytehq/airbyte/pull/34372) | Add dv2 flag in spec | +| 0.5.5 | 2024-01-18 | [\#34236](https://github.com/airbytehq/airbyte/pull/34236) | Upgrade CDK to 0.13.1; Add indexes in raw table for query optimization | +| 0.5.4 | 2024-01-11 | [\#34177](https://github.com/airbytehq/airbyte/pull/34177) | Add code for DV2 beta (no user-visible changes) | +| 0.5.3 | 2024-01-10 | [\#34135](https://github.com/airbytehq/airbyte/pull/34135) | Use published CDK missed in previous release | +| 0.5.2 | 2024-01-08 | [\#33875](https://github.com/airbytehq/airbyte/pull/33875) | Update CDK to get Tunnel heartbeats feature | +| 0.5.1 | 2024-01-04 | [\#33873](https://github.com/airbytehq/airbyte/pull/33873) | Install normalization to enable DV2 beta | +| 0.5.0 | 2023-12-18 | [\#33507](https://github.com/airbytehq/airbyte/pull/33507) | Upgrade to latest CDK; Fix DATs and tests | +| 0.4.0 | 2023-06-27 | [\#27781](https://github.com/airbytehq/airbyte/pull/27781) | License Update: Elv2 | +| 0.3.27 | 2023-04-04 | [\#24604](https://github.com/airbytehq/airbyte/pull/24604) | Support for destination checkpointing | +| 0.3.26 | 2022-09-27 | [\#17299](https://github.com/airbytehq/airbyte/pull/17299) | Improve error handling for strict-encrypt postgres destination | +| 0.3.24 | 2022-09-08 | [\#16046](https://github.com/airbytehq/airbyte/pull/16046) | Fix missing database name URL Encoding | +| 0.3.23 | 2022-07-18 | [\#16260](https://github.com/airbytehq/airbyte/pull/16260) | Prevent traffic going on an unsecured channel in strict-encryption version of destination postgres | +| 0.3.22 | 2022-07-18 | [\#13840](https://github.com/airbytehq/airbyte/pull/13840) | Added the ability to connect using different SSL modes and SSL certificates | +| 0.3.21 | 2022-07-06 | [\#14479](https://github.com/airbytehq/airbyte/pull/14479) | Publish amd64 and arm64 versions of the connector | +| 0.3.20 | 2022-05-17 | [\#12820](https://github.com/airbytehq/airbyte/pull/12820) | Improved 'check' operation performance | +| 0.3.19 | 2022-04-25 | [\#12195](https://github.com/airbytehq/airbyte/pull/12195) | Add support for additional JDBC URL Params input | +| 0.3.18 | 2022-04-12 | [\#11729](https://github.com/airbytehq/airbyte/pull/11514) | Bump mina-sshd from 2.7.0 to 2.8.0 | +| 0.3.17 | 2022-04-05 | [\#11729](https://github.com/airbytehq/airbyte/pull/11729) | Fixed bug with dashes in schema name | +| 0.3.15 | 2022-02-25 | [\#10421](https://github.com/airbytehq/airbyte/pull/10421) | Refactor JDBC parameters handling | +| 0.3.14 | 2022-02-14 | [\#10256](https://github.com/airbytehq/airbyte/pull/10256) | (unpublished) Add `-XX:+ExitOnOutOfMemoryError` JVM option | +| 0.3.13 | 2021-12-01 | [\#8371](https://github.com/airbytehq/airbyte/pull/8371) | Fixed incorrect handling "\n" in ssh key | +| 0.3.12 | 2021-11-08 | [\#7719](https://github.com/airbytehq/airbyte/pull/7719) | Improve handling of wide rows by buffering records based on their byte size rather than their count | +| 0.3.11 | 2021-09-07 | [\#5743](https://github.com/airbytehq/airbyte/pull/5743) | Add SSH Tunnel support | +| 0.3.10 | 2021-08-11 | [\#5336](https://github.com/airbytehq/airbyte/pull/5336) | Destination Postgres: fix \u0000\(NULL\) value processing | From 7063ea3fd901601bfd9059f65a59193d7d985449 Mon Sep 17 00:00:00 2001 From: Joe Bell Date: Fri, 1 Mar 2024 13:39:38 -0800 Subject: [PATCH 058/172] Postgres Strict Encrypt (#35762) --- .../destination-postgres-strict-encrypt/metadata.yaml | 5 +++-- .../postgres/PostgresDestinationStrictEncrypt.java | 3 +++ 2 files changed, 6 insertions(+), 2 deletions(-) diff --git a/airbyte-integrations/connectors/destination-postgres-strict-encrypt/metadata.yaml b/airbyte-integrations/connectors/destination-postgres-strict-encrypt/metadata.yaml index 43e8aa23e427..29a9cd9b6dde 100644 --- a/airbyte-integrations/connectors/destination-postgres-strict-encrypt/metadata.yaml +++ b/airbyte-integrations/connectors/destination-postgres-strict-encrypt/metadata.yaml @@ -2,7 +2,7 @@ data: connectorSubtype: database connectorType: destination definitionId: 25c5221d-dce2-4163-ade9-739ef790f503 - dockerImageTag: 2.0.1 + dockerImageTag: 2.0.2 dockerRepository: airbyte/destination-postgres-strict-encrypt documentationUrl: https://docs.airbyte.com/integrations/destinations/postgres githubIssueLabel: destination-postgres @@ -23,7 +23,8 @@ data: These changes will likely require updates to downstream dbt / SQL models, which we walk through [here](https://docs.airbyte.com/release_notes/upgrading_to_destinations_v2/#updating-downstream-transformations). Selecting `Upgrade` will upgrade **all** connections using this destination at their next sync. For more controlled upgrade [see instructions](https://docs.airbyte.com/release_notes/upgrading_to_destinations_v2/#upgrading-connections-one-by-one-with-dual-writing). upgradeDeadline: "2024-05-31" - releaseStage: alpha + releaseStage: generally_available + supportLevel: certified supportsDbt: true tags: - language:java diff --git a/airbyte-integrations/connectors/destination-postgres-strict-encrypt/src/main/java/io/airbyte/integrations/destination/postgres/PostgresDestinationStrictEncrypt.java b/airbyte-integrations/connectors/destination-postgres-strict-encrypt/src/main/java/io/airbyte/integrations/destination/postgres/PostgresDestinationStrictEncrypt.java index 2ac912ae5962..3eece7cb3ac1 100644 --- a/airbyte-integrations/connectors/destination-postgres-strict-encrypt/src/main/java/io/airbyte/integrations/destination/postgres/PostgresDestinationStrictEncrypt.java +++ b/airbyte-integrations/connectors/destination-postgres-strict-encrypt/src/main/java/io/airbyte/integrations/destination/postgres/PostgresDestinationStrictEncrypt.java @@ -7,6 +7,7 @@ import com.fasterxml.jackson.databind.JsonNode; import com.fasterxml.jackson.databind.node.ObjectNode; import io.airbyte.cdk.db.jdbc.JdbcUtils; +import io.airbyte.cdk.integrations.base.AirbyteExceptionHandler; import io.airbyte.cdk.integrations.base.Destination; import io.airbyte.cdk.integrations.base.IntegrationRunner; import io.airbyte.cdk.integrations.base.spec_modification.SpecModifyingDestination; @@ -15,6 +16,7 @@ import io.airbyte.protocol.models.v0.AirbyteConnectionStatus.Status; import io.airbyte.protocol.models.v0.ConnectorSpecification; import java.util.Set; +import org.postgresql.util.PSQLException; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -66,6 +68,7 @@ public boolean isV2Destination() { } public static void main(final String[] args) throws Exception { + AirbyteExceptionHandler.addThrowableForDeinterpolation(PSQLException.class); final Destination destination = new PostgresDestinationStrictEncrypt(); LOGGER.info("starting destination: {}", PostgresDestinationStrictEncrypt.class); new IntegrationRunner(destination).run(args); From dc35e13b3bed3228b3f5796d50ada3a75ee6c50e Mon Sep 17 00:00:00 2001 From: Stephane Geneix <147216312+stephane-airbyte@users.noreply.github.com> Date: Fri, 1 Mar 2024 17:31:41 -0800 Subject: [PATCH 059/172] add a timeout to junit test (#35767) Today there's no clear timeout for tests. The only one that exists is at the github action level, and it has to be conservative at 1h. This change introduces a timeout per test method. The default value is set at 5 minutes per method, and can be overriden on a per method basis via the use of the junit @Timeout annotation. We're also fixing a regression in the test ContainerFactory that was sharing less container than was possible. Finally, we're introducing a MdcScope.doNothing to allow using MdcScope in try blocks without triggering a compiler warning --- airbyte-cdk/java/airbyte-cdk/README.md | 3 +- .../src/main/resources/version.properties | 2 +- .../LoggingInvocationInterceptor.java | 33 ++++++++++++++++++- .../cdk/testutils/ContainerFactory.java | 8 +++-- .../destination/s3/csv/S3CsvWriterTest.java | 4 +++ 5 files changed, 44 insertions(+), 6 deletions(-) diff --git a/airbyte-cdk/java/airbyte-cdk/README.md b/airbyte-cdk/java/airbyte-cdk/README.md index 40d555388b29..a32fa872ea2d 100644 --- a/airbyte-cdk/java/airbyte-cdk/README.md +++ b/airbyte-cdk/java/airbyte-cdk/README.md @@ -166,9 +166,10 @@ MavenLocal debugging steps: | Version | Date | Pull Request | Subject | |:--------|:-----------|:-----------------------------------------------------------|:---------------------------------------------------------------------------------------------------------------------------------------------------------------| +| 0.23.12 | 2024-03-01 | [\#35767](https://github.com/airbytehq/airbyte/pull/35767) | introducing a timeout for java tests. | | 0.23.11 | 2024-03-01 | [\#35313](https://github.com/airbytehq/airbyte/pull/35313) | Preserve timezone offset in CSV writer for destinations | | 0.23.10 | 2024-03-01 | [\#35303](https://github.com/airbytehq/airbyte/pull/35303) | Migration framework with DestinationState for softReset | -| 0.23.9 | 2024-03-01 | [\#35720](https://github.com/airbytehq/airbyte/pull/35720) | various improvements for tests TestDataHolder | +| 0.23.9 | 2024-02-29 | [\#35720](https://github.com/airbytehq/airbyte/pull/35720) | various improvements for tests TestDataHolder | | 0.23.8 | 2024-02-28 | [\#35529](https://github.com/airbytehq/airbyte/pull/35529) | Refactor on state iterators | | 0.23.7 | 2024-02-28 | [\#35376](https://github.com/airbytehq/airbyte/pull/35376) | Extract typereduper migrations to separte method | | 0.23.6 | 2024-02-26 | [\#35647](https://github.com/airbytehq/airbyte/pull/35647) | Add a getNamespace into TestDataHolder | diff --git a/airbyte-cdk/java/airbyte-cdk/core/src/main/resources/version.properties b/airbyte-cdk/java/airbyte-cdk/core/src/main/resources/version.properties index b3f1969f9497..f182eacffb68 100644 --- a/airbyte-cdk/java/airbyte-cdk/core/src/main/resources/version.properties +++ b/airbyte-cdk/java/airbyte-cdk/core/src/main/resources/version.properties @@ -1 +1 @@ -version=0.23.11 +version=0.23.12 diff --git a/airbyte-cdk/java/airbyte-cdk/core/src/testFixtures/java/io/airbyte/cdk/extensions/LoggingInvocationInterceptor.java b/airbyte-cdk/java/airbyte-cdk/core/src/testFixtures/java/io/airbyte/cdk/extensions/LoggingInvocationInterceptor.java index a0a2d31640f9..07a2c526045b 100644 --- a/airbyte-cdk/java/airbyte-cdk/core/src/testFixtures/java/io/airbyte/cdk/extensions/LoggingInvocationInterceptor.java +++ b/airbyte-cdk/java/airbyte-cdk/core/src/testFixtures/java/io/airbyte/cdk/extensions/LoggingInvocationInterceptor.java @@ -16,6 +16,9 @@ import java.util.regex.Pattern; import org.apache.commons.lang3.StringUtils; import org.apache.commons.lang3.exception.ExceptionUtils; +import org.junit.jupiter.api.Assertions; +import org.junit.jupiter.api.Timeout; +import org.junit.jupiter.api.Timeout.ThreadMode; import org.junit.jupiter.api.extension.DynamicTestInvocationContext; import org.junit.jupiter.api.extension.ExtensionContext; import org.junit.jupiter.api.extension.InvocationInterceptor; @@ -33,6 +36,8 @@ */ public class LoggingInvocationInterceptor implements InvocationInterceptor { + private static final Duration DEFAULT_TIMEOUT = Duration.ofMinutes(5); + private static final class LoggingInvocationInterceptorHandler implements InvocationHandler { private static final Logger LOGGER = LoggerFactory.getLogger(LoggingInvocationInterceptor.class); @@ -68,7 +73,13 @@ public Object invoke(Object proxy, Method method, Object[] args) throws Throwabl LOGGER.info("Junit starting {}", logLineSuffix); try { Instant start = Instant.now(); - Object retVal = invocation.proceed(); + final Object retVal; + Duration timeout = getTimeout(invocationContext); + if (timeout != null) { + retVal = Assertions.assertTimeoutPreemptively(timeout, invocation::proceed); + } else { + retVal = invocation.proceed(); + } long elapsedMs = Duration.between(start, Instant.now()).toMillis(); LOGGER.info("Junit completed {} in {} ms", logLineSuffix, elapsedMs); return retVal; @@ -93,6 +104,26 @@ public Object invoke(Object proxy, Method method, Object[] args) throws Throwabl } } + private static Duration getTimeout(ReflectiveInvocationContext invocationContext) { + Duration timeout = DEFAULT_TIMEOUT; + if (invocationContext.getExecutable()instanceof Method m) { + Timeout timeoutAnnotation = m.getAnnotation(Timeout.class); + if (timeoutAnnotation == null) { + timeoutAnnotation = invocationContext.getTargetClass().getAnnotation(Timeout.class); + } + if (timeoutAnnotation != null) { + if (timeoutAnnotation.threadMode() == ThreadMode.SAME_THREAD) { + return null; + } + timeout = Duration.ofMillis(timeoutAnnotation.unit().toMillis(timeoutAnnotation.value())); + } + } + if (timeout.compareTo(Duration.ofHours(1)) > 0) { + return DEFAULT_TIMEOUT; + } + return timeout; + } + } private final InvocationInterceptor proxy = (InvocationInterceptor) Proxy.newProxyInstance( diff --git a/airbyte-cdk/java/airbyte-cdk/core/src/testFixtures/java/io/airbyte/cdk/testutils/ContainerFactory.java b/airbyte-cdk/java/airbyte-cdk/core/src/testFixtures/java/io/airbyte/cdk/testutils/ContainerFactory.java index 6e89dc7e2f2f..d69b60dab21c 100644 --- a/airbyte-cdk/java/airbyte-cdk/core/src/testFixtures/java/io/airbyte/cdk/testutils/ContainerFactory.java +++ b/airbyte-cdk/java/airbyte-cdk/core/src/testFixtures/java/io/airbyte/cdk/testutils/ContainerFactory.java @@ -35,7 +35,9 @@ public abstract class ContainerFactory> { private record ContainerKey> (Class clazz, DockerImageName imageName, - List> methods) {}; + List> methods) {} + + ; private static class ContainerOrException { @@ -70,7 +72,7 @@ GenericContainer container() { } - private final ConcurrentMap, ContainerOrException> SHARED_CONTAINERS = new ConcurrentHashMap<>(); + private static final ConcurrentMap, ContainerOrException> SHARED_CONTAINERS = new ConcurrentHashMap<>(); private static final AtomicInteger containerId = new AtomicInteger(0); private final MdcScope.Builder getTestContainerLogMdcBuilder(DockerImageName imageName, @@ -112,7 +114,7 @@ public final C shared(String imageName, List // Container creation can be exceedingly slow. // Furthermore, we need to handle exceptions raised during container creation. ContainerOrException containerOrError = SHARED_CONTAINERS.computeIfAbsent(containerKey, - key -> new ContainerOrException(() -> createAndStartContainer(key.imageName(), key.methods()))); + key -> new ContainerOrException(() -> createAndStartContainer(key.imageName(), ((ContainerKey) key).methods()))); // Instead, the container creation (if applicable) is deferred to here. return (C) containerOrError.container(); } diff --git a/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/test/java/io/airbyte/cdk/integrations/destination/s3/csv/S3CsvWriterTest.java b/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/test/java/io/airbyte/cdk/integrations/destination/s3/csv/S3CsvWriterTest.java index f56374a04952..747ecb44afd0 100644 --- a/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/test/java/io/airbyte/cdk/integrations/destination/s3/csv/S3CsvWriterTest.java +++ b/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/test/java/io/airbyte/cdk/integrations/destination/s3/csv/S3CsvWriterTest.java @@ -45,8 +45,12 @@ import org.junit.jupiter.api.AfterEach; import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.Timeout; +import org.junit.jupiter.api.Timeout.ThreadMode; import org.mockito.MockedConstruction; +@Timeout(value = 1, + threadMode = ThreadMode.SAME_THREAD) class S3CsvWriterTest { public static final ConfiguredAirbyteStream CONFIGURED_STREAM = new ConfiguredAirbyteStream() From 44668e45d376c5ac39ffb35847e27b1554ce274d Mon Sep 17 00:00:00 2001 From: Augustin Date: Mon, 4 Mar 2024 17:01:25 +0100 Subject: [PATCH 060/172] airbyte-ci format: run a runner with more disk space (#35790) --- .github/workflows/format_check.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/format_check.yml b/.github/workflows/format_check.yml index 2eabc9a87463..c8e54f6b405c 100644 --- a/.github/workflows/format_check.yml +++ b/.github/workflows/format_check.yml @@ -12,7 +12,7 @@ jobs: format-check: # IMPORTANT: This name must match the require check name on the branch protection settings name: "Check for formatting errors" - runs-on: ubuntu-latest + runs-on: tooling-test-small steps: - name: Checkout Airbyte uses: actions/checkout@v3 From 875705852dc446f4c285307b2b23606b4933fbac Mon Sep 17 00:00:00 2001 From: Anton Karpets Date: Mon, 4 Mar 2024 18:35:45 +0200 Subject: [PATCH 061/172] =?UTF-8?q?=F0=9F=8F=A5Source=20Paypal=20Transacti?= =?UTF-8?q?on:=20fix=20CAT=20(#35781)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .../source-paypal-transaction/acceptance-test-config.yml | 2 -- 1 file changed, 2 deletions(-) diff --git a/airbyte-integrations/connectors/source-paypal-transaction/acceptance-test-config.yml b/airbyte-integrations/connectors/source-paypal-transaction/acceptance-test-config.yml index 66695c691d17..ba6e42d9f4fc 100644 --- a/airbyte-integrations/connectors/source-paypal-transaction/acceptance-test-config.yml +++ b/airbyte-integrations/connectors/source-paypal-transaction/acceptance-test-config.yml @@ -40,8 +40,6 @@ acceptance_tests: empty_streams: - name: show_product_details bypass_reason: "Products may not exist" - - name: list_products - bypass_reason: "Product List may be too big causing timeout errors" - name: search_invoices bypass_reason: "Order makes the diff fail." #Have to add for testing PR CI. From 2b088037fdfbfaf80bb43920b3e71a6a2c5ce831 Mon Sep 17 00:00:00 2001 From: Anton Karpets Date: Mon, 4 Mar 2024 18:46:54 +0200 Subject: [PATCH 062/172] =?UTF-8?q?=F0=9F=8F=A5Source=20Google=20Search=20?= =?UTF-8?q?Console:=20fix=20expected=20records=20(#35787)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .../source-google-search-console/README.md | 2 +- .../integration_tests/expected_records.jsonl | 24 +++++++++---------- 2 files changed, 13 insertions(+), 13 deletions(-) diff --git a/airbyte-integrations/connectors/source-google-search-console/README.md b/airbyte-integrations/connectors/source-google-search-console/README.md index 0ff6251cecd7..6ed565336b99 100755 --- a/airbyte-integrations/connectors/source-google-search-console/README.md +++ b/airbyte-integrations/connectors/source-google-search-console/README.md @@ -30,7 +30,7 @@ See `sample_files/sample_config.json` for a sample config file. poetry run source-google-search-console spec poetry run source-google-search-console check --config secrets/config.json poetry run source-google-search-console discover --config secrets/config.json -poetry run source-google-search-console read --config secrets/config.json --catalog sample_files/configured_catalog.json +poetry run source-google-search-console read --config secrets/config.json --catalog integration_tests/configured_catalog.json ``` ### Running unit tests diff --git a/airbyte-integrations/connectors/source-google-search-console/integration_tests/expected_records.jsonl b/airbyte-integrations/connectors/source-google-search-console/integration_tests/expected_records.jsonl index 1b96b467d4bb..e05006e04b63 100644 --- a/airbyte-integrations/connectors/source-google-search-console/integration_tests/expected_records.jsonl +++ b/airbyte-integrations/connectors/source-google-search-console/integration_tests/expected_records.jsonl @@ -3,15 +3,15 @@ {"stream": "sitemaps", "data": {"path": "https://airbyte.io/sitemap.xml", "lastSubmitted": "2021-09-10T23:02:22.258Z", "isPending": false, "isSitemapsIndex": false, "type": "sitemap", "lastDownloaded": "2023-03-02T03:42:19.607Z", "warnings": "0", "errors": "0", "contents": [{"type": "web", "submitted": "5165", "indexed": "0"}]}, "emitted_at": 1677799186044} {"stream": "search_analytics_by_date", "data": {"clicks": 160, "impressions": 6097, "ctr": 0.026242414302115796, "position": 27.335410857798916, "site_url": "sc-domain:airbyte.io", "search_type": "web", "date": "2022-10-22"}, "emitted_at": 1709284338937} {"stream": "search_analytics_by_date", "data": {"clicks": 227, "impressions": 7309, "ctr": 0.031057600218908195, "position": 25.308523737857435, "site_url": "sc-domain:airbyte.io", "search_type": "web", "date": "2022-10-23"}, "emitted_at": 1709284338938} -{"stream": "search_analytics_by_country", "data": {"clicks": 102, "impressions": 3190, "ctr": 0.03197492163009404, "position": 18.926018808777428, "site_url": "sc-domain:airbyte.io", "search_type": "web", "date": "2022-10-20", "country": "usa"}, "emitted_at": 1709284488094} -{"stream": "search_analytics_by_country", "data": {"clicks": 85, "impressions": 1270, "ctr": 0.06692913385826772, "position": 15.401574803149606, "site_url": "sc-domain:airbyte.io", "search_type": "web", "date": "2022-10-20", "country": "ind"}, "emitted_at": 1709284488095} -{"stream": "search_analytics_by_device", "data": {"clicks": 576, "impressions": 13543, "ctr": 0.04253119692830244, "position": 17.01343867680721, "site_url": "sc-domain:airbyte.io", "search_type": "web", "date": "2022-10-20", "device": "DESKTOP"}, "emitted_at": 1709284601472} -{"stream": "search_analytics_by_device", "data": {"clicks": 43, "impressions": 1213, "ctr": 0.03544929925803792, "position": 27.912613355317394, "site_url": "sc-domain:airbyte.io", "search_type": "web", "date": "2022-10-20", "device": "MOBILE"}, "emitted_at": 1709284601472} -{"stream": "search_analytics_by_page", "data": {"clicks": 13, "impressions": 176, "ctr": 0.07386363636363637, "position": 7.5227272727272725, "site_url": "sc-domain:airbyte.io", "search_type": "web", "date": "2022-10-20", "page": "https://discuss.airbyte.io/t/error-io-grpc-statusruntimeexception-deadline-exceeded-deadline-exceeded-after/254"}, "emitted_at": 1709284712124} -{"stream": "search_analytics_by_page", "data": {"clicks": 13, "impressions": 28, "ctr": 0.4642857142857143, "position": 6.678571428571429, "site_url": "sc-domain:airbyte.io", "search_type": "web", "date": "2022-10-20", "page": "https://discuss.airbyte.io/t/user-management-oauth-authentication/1287"}, "emitted_at": 1709284712124} -{"stream": "search_analytics_by_query", "data": {"clicks": 5, "impressions": 5, "ctr": 1, "position": 1, "site_url": "sc-domain:airbyte.io", "search_type": "web", "date": "2022-10-20", "query": "airbyte discourse"}, "emitted_at": 1709284850261} -{"stream": "search_analytics_by_query", "data": {"clicks": 4, "impressions": 36, "ctr": 0.1111111111111111, "position": 5.027777777777778, "site_url": "sc-domain:airbyte.io", "search_type": "web", "date": "2022-10-19", "query": "airbyte connectors"}, "emitted_at": 1709284850262} -{"stream": "search_analytics_all_fields", "data": {"clicks": 2, "impressions": 18, "ctr": 0.1111111111111111, "position": 4.944444444444445, "site_url": "sc-domain:airbyte.io", "search_type": "web", "date": "2022-10-19", "country": "usa", "device": "DESKTOP", "page": "https://demo.airbyte.io/", "query": "airbyte connectors"}, "emitted_at": 1709284987285} -{"stream": "search_analytics_all_fields", "data": {"clicks": 2, "impressions": 2, "ctr": 1, "position": 3.5, "site_url": "sc-domain:airbyte.io", "search_type": "web", "date": "2022-10-20", "country": "swe", "device": "MOBILE", "page": "https://discuss.airbyte.io/t/advice-for-custom-destination-connector-for-reverse-etl/678", "query": "airbyte reverse etl"}, "emitted_at": 1709284987285} -{"stream": "custom_dimensions", "data": {"clicks": 91, "impressions": 2925, "ctr": 0.03111111111111111, "position": 18.23145299145299, "site_url": "sc-domain:airbyte.io", "search_type": "web", "date": "2022-10-20", "country": "usa", "device": "DESKTOP"}, "emitted_at": 1709285094714} -{"stream": "custom_dimensions", "data": {"clicks": 81, "impressions": 1155, "ctr": 0.07012987012987013, "position": 14.841558441558442, "site_url": "sc-domain:airbyte.io", "search_type": "web", "date": "2022-10-20", "country": "ind", "device": "DESKTOP"}, "emitted_at": 1709285094714} +{"stream": "search_analytics_by_country", "data": {"clicks": 37, "impressions": 1246, "ctr": 0.02969502407704655, "position": 31.96548956661316, "site_url": "sc-domain:airbyte.io", "search_type": "web", "date": "2022-10-22", "country": "usa"}, "emitted_at": 1709558064452} +{"stream": "search_analytics_by_country", "data": {"clicks": 31, "impressions": 1282, "ctr": 0.02418096723868955, "position": 30.254290171606865, "site_url": "sc-domain:airbyte.io", "search_type": "web", "date": "2022-10-23", "country": "usa"}, "emitted_at": 1709558064452} +{"stream": "search_analytics_by_device", "data": {"clicks": 203, "impressions": 6206, "ctr": 0.03271028037383177, "position": 23.797937479858202, "site_url": "sc-domain:airbyte.io", "search_type": "web", "date": "2022-10-23", "device": "DESKTOP"}, "emitted_at": 1709558104602} +{"stream": "search_analytics_by_device", "data": {"clicks": 21, "impressions": 1084, "ctr": 0.01937269372693727, "position": 34.21678966789668, "site_url": "sc-domain:airbyte.io", "search_type": "web", "date": "2022-10-23", "device": "MOBILE"}, "emitted_at": 1709558104603} +{"stream": "search_analytics_by_page", "data": {"clicks": 8, "impressions": 197, "ctr": 0.04060913705583756, "position": 8.802030456852792, "site_url": "sc-domain:airbyte.io", "search_type": "web", "date": "2022-10-21", "page": "https://discuss.airbyte.io/t/kafka-connection-fails/723"}, "emitted_at": 1709558151837} +{"stream": "search_analytics_by_page", "data": {"clicks": 8, "impressions": 66, "ctr": 0.12121212121212122, "position": 6.96969696969697, "site_url": "sc-domain:airbyte.io", "search_type": "web", "date": "2022-10-21", "page": "https://discuss.airbyte.io/t/source-potsgres-connection-org-postgresql-jdbc-pgconnection-34b9fc7d-marked-as-broken-because-of-sqlstate-08006/1800"}, "emitted_at": 1709558151837} +{"stream": "search_analytics_by_query", "data": {"clicks": 2, "impressions": 2, "ctr": 1, "position": 1, "site_url": "sc-domain:airbyte.io", "search_type": "web", "date": "2022-10-21", "query": "airbyte authentication"}, "emitted_at": 1709558202703} +{"stream": "search_analytics_by_query", "data": {"clicks": 2, "impressions": 11, "ctr": 0.18181818181818182, "position": 2.090909090909091, "site_url": "sc-domain:airbyte.io", "search_type": "web", "date": "2022-10-21", "query": "airbyte cloud"}, "emitted_at": 1709558202703} +{"stream": "search_analytics_all_fields", "data": {"clicks": 1, "impressions": 1, "ctr": 1, "position": 9, "site_url": "sc-domain:airbyte.io", "search_type": "web", "date": "2022-10-21", "country": "aut", "device": "DESKTOP", "page": "https://discuss.airbyte.io/t/cannot-build-docker-images-for-python-destination-connector/1454", "query": "fatal error: ffi.h: no such file or directory"}, "emitted_at": 1709558247944} +{"stream": "search_analytics_all_fields", "data": {"clicks": 1, "impressions": 1, "ctr": 1, "position": 1, "site_url": "sc-domain:airbyte.io", "search_type": "web", "date": "2022-10-21", "country": "bel", "device": "DESKTOP", "page": "https://cloud.airbyte.io/login", "query": "airbyte login"}, "emitted_at": 1709558247944} +{"stream": "custom_dimensions", "data": {"clicks": 29, "impressions": 521, "ctr": 0.05566218809980806, "position": 11.186180422264876, "site_url": "sc-domain:airbyte.io", "search_type": "web", "date": "2022-10-21", "country": "fra", "device": "DESKTOP"}, "emitted_at": 1709559198005} +{"stream": "custom_dimensions", "data": {"clicks": 27, "impressions": 421, "ctr": 0.06413301662707839, "position": 14.931116389548693, "site_url": "sc-domain:airbyte.io", "search_type": "web", "date": "2022-10-21", "country": "bra", "device": "DESKTOP"}, "emitted_at": 1709559198006} From 8be5bbf97c386d9a3647906ac1c4412b81a587a2 Mon Sep 17 00:00:00 2001 From: Anton Karpets Date: Mon, 4 Mar 2024 18:47:12 +0200 Subject: [PATCH 063/172] =?UTF-8?q?=F0=9F=8F=A5Source=20Zendesk=20Talk:=20?= =?UTF-8?q?change=20order=20of=20authentication=20methods=20in=20spec=20(#?= =?UTF-8?q?35783)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .../connectors/source-zendesk-talk/README.md | 2 +- .../acceptance-test-config.yml | 13 ++++-- .../integration_tests/expected_records.jsonl | 3 -- .../source-zendesk-talk/metadata.yaml | 2 +- .../source-zendesk-talk/pyproject.toml | 2 +- .../source_zendesk_talk/spec.json | 46 +++++++++---------- docs/integrations/sources/zendesk-talk.md | 5 +- 7 files changed, 39 insertions(+), 34 deletions(-) diff --git a/airbyte-integrations/connectors/source-zendesk-talk/README.md b/airbyte-integrations/connectors/source-zendesk-talk/README.md index 14b1c4f187d5..3b9ae361dd5f 100644 --- a/airbyte-integrations/connectors/source-zendesk-talk/README.md +++ b/airbyte-integrations/connectors/source-zendesk-talk/README.md @@ -30,7 +30,7 @@ See `sample_files/sample_config.json` for a sample config file. poetry run source-zendesk-talk spec poetry run source-zendesk-talk check --config secrets/config.json poetry run source-zendesk-talk discover --config secrets/config.json -poetry run source-zendesk-talk read --config secrets/config.json --catalog sample_files/configured_catalog.json +poetry run source-zendesk-talk read --config secrets/config.json --catalog integration_tests/configured_catalog.json ``` ### Running unit tests diff --git a/airbyte-integrations/connectors/source-zendesk-talk/acceptance-test-config.yml b/airbyte-integrations/connectors/source-zendesk-talk/acceptance-test-config.yml index 05b42a969d3e..6600e4095132 100644 --- a/airbyte-integrations/connectors/source-zendesk-talk/acceptance-test-config.yml +++ b/airbyte-integrations/connectors/source-zendesk-talk/acceptance-test-config.yml @@ -29,6 +29,13 @@ acceptance_tests: expect_records: path: "integration_tests/expected_records.jsonl" fail_on_extra_columns: false + empty_streams: + - name: account_overview + bypass_reason: "The stream is not empty, but makes the test failed due to frequently changing primary key value" + - name: agents_overview + bypass_reason: "The stream is not empty, but makes the test failed due to frequently changing primary key value" + - name: current_queue_activity + bypass_reason: "The stream is not empty, but makes the test failed due to frequently changing primary key value" incremental: tests: - config_path: "secrets/config.json" @@ -41,10 +48,10 @@ acceptance_tests: ignored_fields: account_overview: - name: current_timestamp - bypass_reason: Depend on current time + bypass_reason: Depends on current time agents_overview: - name: current_timestamp - bypass_reason: Depend on current time + bypass_reason: Depends on current time current_queue_activity: - name: current_timestamp - bypass_reason: Depend on current time + bypass_reason: Depends on current time diff --git a/airbyte-integrations/connectors/source-zendesk-talk/integration_tests/expected_records.jsonl b/airbyte-integrations/connectors/source-zendesk-talk/integration_tests/expected_records.jsonl index 2620572a5677..e2afb9fc050b 100644 --- a/airbyte-integrations/connectors/source-zendesk-talk/integration_tests/expected_records.jsonl +++ b/airbyte-integrations/connectors/source-zendesk-talk/integration_tests/expected_records.jsonl @@ -79,6 +79,3 @@ {"stream": "ivrs", "data": {"id": 360000012016, "name": "Fake IVR Menu 20", "menus": [{"id": 360000018636, "name": "Main menu", "default": true, "greeting_id": null, "routes": []}], "phone_number_ids": [], "phone_number_names": []}, "emitted_at": 1674159480408} {"stream": "ivrs", "data": {"id": 360000012036, "name": "Fake IVR Menu 21", "menus": [{"id": 360000018656, "name": "Main menu", "default": true, "greeting_id": null, "routes": []}], "phone_number_ids": [], "phone_number_names": []}, "emitted_at": 1674159480408} {"stream": "phone_numbers", "data": {"id": 360000121575, "country_code": "US", "created_at": "2020-12-14T18:44:31Z", "external": false, "number": "+12059531462", "name": "+1 (205) 953-1462", "nickname": null, "display_number": "+1 (205) 953-1462", "location": "AL", "toll_free": false, "transcription": true, "recorded": true, "call_recording_consent": "always", "group_ids": [], "default_group_id": null, "greeting_ids": [], "default_greeting_ids": ["voicemail_en", "available_en", "wait_en", "hold_en", "callback_en", "callback-confirmation_en", "call-recording-opt-out_en", "call-recording-opt-in_en"], "categorised_greetings": {"1": "voicemail_en", "2": "available_en", "3": "wait_en", "4": "hold_en", "6": "callback_en", "7": "callback-confirmation_en", "8": "call-recording-opt-out_en", "9": "call-recording-opt-in_en"}, "categorised_greetings_with_sub_settings": {"1": {"voicemail_on_outside_business_hours": "voicemail_en", "voicemail_on_inside_business_hours": "voicemail_en", "voicemail_off_inside_business_hours": "voicemail_en_voicemail_config", "voicemail_off_outside_business_hours": "voicemail_en_voicemail_config"}, "2": {"voicemail_on": "available_en", "voicemail_off": "available_en_voicemail_config"}, "3": "wait_en", "4": "hold_en", "6": "callback_en", "7": "callback-confirmation_en", "8": "call-recording-opt-out_en", "9": "call-recording-opt-in_en"}, "sms_group_id": null, "capabilities": {"sms": true, "mms": true, "voice": true, "emergency_address": true}, "sms_enabled": false, "voice_enabled": true, "priority": 0, "outbound_enabled": true, "line_type": "phone", "ivr_id": null, "schedule_id": null, "failover_number": null}, "emitted_at": 1674159481130} -{"stream": "account_overview", "data": {"average_call_duration": 0, "average_callback_wait_time": 0, "average_hold_time": 0, "average_queue_wait_time": 0, "average_time_to_answer": 0, "average_wrap_up_time": 0, "max_calls_waiting": 0, "max_queue_wait_time": 0, "total_call_duration": 0, "total_callback_calls": 0, "total_calls": 0, "total_calls_abandoned_in_queue": 0, "total_calls_outside_business_hours": 0, "total_calls_with_exceeded_queue_wait_time": 0, "total_calls_with_requested_voicemail": 0, "total_embeddable_callback_calls": 0, "total_hold_time": 0, "total_inbound_calls": 0, "total_outbound_calls": 0, "total_textback_requests": 0, "total_voicemails": 0, "total_wrap_up_time": 0, "current_timestamp": 1677599290}, "emitted_at": 1677599290211} -{"stream": "agents_overview", "data": {"average_accepted_transfers": 0, "average_available_time": 0, "average_away_time": 0, "average_calls_accepted": 0, "average_calls_denied": 0, "average_calls_missed": 0, "average_calls_put_on_hold": 0, "average_hold_time": 0, "average_online_time": 0, "average_started_transfers": 0, "average_talk_time": 0, "average_transfers_only_time": 0, "average_wrap_up_time": 0, "total_accepted_transfers": 0, "total_calls_accepted": 0, "total_calls_denied": 0, "total_calls_missed": 0, "total_calls_put_on_hold": 0, "total_hold_time": 0, "total_started_transfers": 0, "total_talk_time": 0, "total_wrap_up_time": 0, "current_timestamp": 1677599290}, "emitted_at": 1677599290630} -{"stream": "current_queue_activity", "data": {"agents_online": 0, "average_wait_time": 0, "calls_waiting": 0, "longest_wait_time": 0, "callbacks_waiting": 0, "embeddable_callbacks_waiting": 0, "current_timestamp": 1677599291}, "emitted_at": 1677599291494} diff --git a/airbyte-integrations/connectors/source-zendesk-talk/metadata.yaml b/airbyte-integrations/connectors/source-zendesk-talk/metadata.yaml index be1a86e383ae..8c9d2e0dddd2 100644 --- a/airbyte-integrations/connectors/source-zendesk-talk/metadata.yaml +++ b/airbyte-integrations/connectors/source-zendesk-talk/metadata.yaml @@ -11,7 +11,7 @@ data: connectorSubtype: api connectorType: source definitionId: c8630570-086d-4a40-99ae-ea5b18673071 - dockerImageTag: 0.1.12 + dockerImageTag: 0.1.13 dockerRepository: airbyte/source-zendesk-talk documentationUrl: https://docs.airbyte.com/integrations/sources/zendesk-talk githubIssueLabel: source-zendesk-talk diff --git a/airbyte-integrations/connectors/source-zendesk-talk/pyproject.toml b/airbyte-integrations/connectors/source-zendesk-talk/pyproject.toml index 80585d460f4a..cd56a4f28eaa 100644 --- a/airbyte-integrations/connectors/source-zendesk-talk/pyproject.toml +++ b/airbyte-integrations/connectors/source-zendesk-talk/pyproject.toml @@ -3,7 +3,7 @@ requires = [ "poetry-core>=1.0.0",] build-backend = "poetry.core.masonry.api" [tool.poetry] -version = "0.1.12" +version = "0.1.13" name = "source-zendesk-talk" description = "Source implementation for Zendesk Talk." authors = [ "Airbyte ",] diff --git a/airbyte-integrations/connectors/source-zendesk-talk/source_zendesk_talk/spec.json b/airbyte-integrations/connectors/source-zendesk-talk/source_zendesk_talk/spec.json index b205a1f064c2..8268b8449a59 100644 --- a/airbyte-integrations/connectors/source-zendesk-talk/source_zendesk_talk/spec.json +++ b/airbyte-integrations/connectors/source-zendesk-talk/source_zendesk_talk/spec.json @@ -18,29 +18,6 @@ "order": 1, "description": "Zendesk service provides two authentication methods. Choose between: `OAuth2.0` or `API token`.", "oneOf": [ - { - "title": "API Token", - "type": "object", - "required": ["email", "api_token"], - "additionalProperties": true, - "properties": { - "auth_type": { - "type": "string", - "const": "api_token" - }, - "email": { - "title": "Email", - "type": "string", - "description": "The user email for your Zendesk account." - }, - "api_token": { - "title": "API Token", - "type": "string", - "description": "The value of the API token generated. See the docs for more information.", - "airbyte_secret": true - } - } - }, { "title": "OAuth2.0", "type": "object", @@ -71,6 +48,29 @@ "airbyte_secret": true } } + }, + { + "title": "API Token", + "type": "object", + "required": ["email", "api_token"], + "additionalProperties": true, + "properties": { + "auth_type": { + "type": "string", + "const": "api_token" + }, + "email": { + "title": "Email", + "type": "string", + "description": "The user email for your Zendesk account." + }, + "api_token": { + "title": "API Token", + "type": "string", + "description": "The value of the API token generated. See the docs for more information.", + "airbyte_secret": true + } + } } ] }, diff --git a/docs/integrations/sources/zendesk-talk.md b/docs/integrations/sources/zendesk-talk.md index 6bb00f8d4f0a..0c544a84e893 100644 --- a/docs/integrations/sources/zendesk-talk.md +++ b/docs/integrations/sources/zendesk-talk.md @@ -74,8 +74,9 @@ The Zendesk connector should not run into Zendesk API limitations under normal u | Version | Date | Pull Request | Subject | |:--------|:-----------|:---------------------------------------------------------|:----------------------------------------------------------------------------| -| 0.1.12 | 2024-02-12 | [35156](https://github.com/airbytehq/airbyte/pull/35156) | Manage dependencies with Poetry. | -| 0.1.11 | 2024-01-12 | [34204](https://github.com/airbytehq/airbyte/pull/34204) | prepare for airbyte-lib | +| 0.1.13 | 2024-03-04 | [35783](https://github.com/airbytehq/airbyte/pull/35783) | Change order of authentication methods in spec | +| 0.1.12 | 2024-02-12 | [35156](https://github.com/airbytehq/airbyte/pull/35156) | Manage dependencies with Poetry. | +| 0.1.11 | 2024-01-12 | [34204](https://github.com/airbytehq/airbyte/pull/34204) | Prepare for airbyte-lib | | 0.1.10 | 2023-12-04 | [33030](https://github.com/airbytehq/airbyte/pull/33030) | Base image migration: remove Dockerfile and use python-connector-base image | | 0.1.9 | 2023-08-03 | [29031](https://github.com/airbytehq/airbyte/pull/29031) | Reverted `advancedAuth` spec changes | | 0.1.8 | 2023-08-01 | [28910](https://github.com/airbytehq/airbyte/pull/28910) | Updated `advancedAuth` broken references | From c5b822030ac7e514ed02e731ae0ec242e179ee31 Mon Sep 17 00:00:00 2001 From: Artem Inzhyyants <36314070+artem1205@users.noreply.github.com> Date: Mon, 4 Mar 2024 17:50:27 +0100 Subject: [PATCH 064/172] =?UTF-8?q?=F0=9F=90=9B=20Source=20SalesForce:=20f?= =?UTF-8?q?ix=20memory=20leak=20(OOM)=20(#35791)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Signed-off-by: Artem Inzhyyants --- .../source-salesforce/metadata.yaml | 2 +- .../connectors/source-salesforce/poetry.lock | 63 ++++++++++--------- .../source-salesforce/pyproject.toml | 4 +- docs/integrations/sources/salesforce.md | 1 + 4 files changed, 36 insertions(+), 34 deletions(-) diff --git a/airbyte-integrations/connectors/source-salesforce/metadata.yaml b/airbyte-integrations/connectors/source-salesforce/metadata.yaml index ae18dcd63d4f..bc92039fd1ee 100644 --- a/airbyte-integrations/connectors/source-salesforce/metadata.yaml +++ b/airbyte-integrations/connectors/source-salesforce/metadata.yaml @@ -10,7 +10,7 @@ data: connectorSubtype: api connectorType: source definitionId: b117307c-14b6-41aa-9422-947e34922962 - dockerImageTag: 2.3.2 + dockerImageTag: 2.3.3 dockerRepository: airbyte/source-salesforce documentationUrl: https://docs.airbyte.com/integrations/sources/salesforce githubIssueLabel: source-salesforce diff --git a/airbyte-integrations/connectors/source-salesforce/poetry.lock b/airbyte-integrations/connectors/source-salesforce/poetry.lock index 37eb11ff1200..1b8149e9cedb 100644 --- a/airbyte-integrations/connectors/source-salesforce/poetry.lock +++ b/airbyte-integrations/connectors/source-salesforce/poetry.lock @@ -537,40 +537,40 @@ files = [ [[package]] name = "pandas" -version = "2.2.0" +version = "2.2.1" description = "Powerful data structures for data analysis, time series, and statistics" optional = false python-versions = ">=3.9" files = [ - {file = "pandas-2.2.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:8108ee1712bb4fa2c16981fba7e68b3f6ea330277f5ca34fa8d557e986a11670"}, - {file = "pandas-2.2.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:736da9ad4033aeab51d067fc3bd69a0ba36f5a60f66a527b3d72e2030e63280a"}, - {file = "pandas-2.2.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:38e0b4fc3ddceb56ec8a287313bc22abe17ab0eb184069f08fc6a9352a769b18"}, - {file = "pandas-2.2.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:20404d2adefe92aed3b38da41d0847a143a09be982a31b85bc7dd565bdba0f4e"}, - {file = "pandas-2.2.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:7ea3ee3f125032bfcade3a4cf85131ed064b4f8dd23e5ce6fa16473e48ebcaf5"}, - {file = "pandas-2.2.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:f9670b3ac00a387620489dfc1bca66db47a787f4e55911f1293063a78b108df1"}, - {file = "pandas-2.2.0-cp310-cp310-win_amd64.whl", hash = "sha256:5a946f210383c7e6d16312d30b238fd508d80d927014f3b33fb5b15c2f895430"}, - {file = "pandas-2.2.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:a1b438fa26b208005c997e78672f1aa8138f67002e833312e6230f3e57fa87d5"}, - {file = "pandas-2.2.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:8ce2fbc8d9bf303ce54a476116165220a1fedf15985b09656b4b4275300e920b"}, - {file = "pandas-2.2.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2707514a7bec41a4ab81f2ccce8b382961a29fbe9492eab1305bb075b2b1ff4f"}, - {file = "pandas-2.2.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:85793cbdc2d5bc32620dc8ffa715423f0c680dacacf55056ba13454a5be5de88"}, - {file = "pandas-2.2.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:cfd6c2491dc821b10c716ad6776e7ab311f7df5d16038d0b7458bc0b67dc10f3"}, - {file = "pandas-2.2.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:a146b9dcacc3123aa2b399df1a284de5f46287a4ab4fbfc237eac98a92ebcb71"}, - {file = "pandas-2.2.0-cp311-cp311-win_amd64.whl", hash = "sha256:fbc1b53c0e1fdf16388c33c3cca160f798d38aea2978004dd3f4d3dec56454c9"}, - {file = "pandas-2.2.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:a41d06f308a024981dcaa6c41f2f2be46a6b186b902c94c2674e8cb5c42985bc"}, - {file = "pandas-2.2.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:159205c99d7a5ce89ecfc37cb08ed179de7783737cea403b295b5eda8e9c56d1"}, - {file = "pandas-2.2.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:eb1e1f3861ea9132b32f2133788f3b14911b68102d562715d71bd0013bc45440"}, - {file = "pandas-2.2.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:761cb99b42a69005dec2b08854fb1d4888fdf7b05db23a8c5a099e4b886a2106"}, - {file = "pandas-2.2.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:a20628faaf444da122b2a64b1e5360cde100ee6283ae8effa0d8745153809a2e"}, - {file = "pandas-2.2.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:f5be5d03ea2073627e7111f61b9f1f0d9625dc3c4d8dda72cc827b0c58a1d042"}, - {file = "pandas-2.2.0-cp312-cp312-win_amd64.whl", hash = "sha256:a626795722d893ed6aacb64d2401d017ddc8a2341b49e0384ab9bf7112bdec30"}, - {file = "pandas-2.2.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:9f66419d4a41132eb7e9a73dcec9486cf5019f52d90dd35547af11bc58f8637d"}, - {file = "pandas-2.2.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:57abcaeda83fb80d447f28ab0cc7b32b13978f6f733875ebd1ed14f8fbc0f4ab"}, - {file = "pandas-2.2.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e60f1f7dba3c2d5ca159e18c46a34e7ca7247a73b5dd1a22b6d59707ed6b899a"}, - {file = "pandas-2.2.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:eb61dc8567b798b969bcc1fc964788f5a68214d333cade8319c7ab33e2b5d88a"}, - {file = "pandas-2.2.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:52826b5f4ed658fa2b729264d63f6732b8b29949c7fd234510d57c61dbeadfcd"}, - {file = "pandas-2.2.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:bde2bc699dbd80d7bc7f9cab1e23a95c4375de615860ca089f34e7c64f4a8de7"}, - {file = "pandas-2.2.0-cp39-cp39-win_amd64.whl", hash = "sha256:3de918a754bbf2da2381e8a3dcc45eede8cd7775b047b923f9006d5f876802ae"}, - {file = "pandas-2.2.0.tar.gz", hash = "sha256:30b83f7c3eb217fb4d1b494a57a2fda5444f17834f5df2de6b2ffff68dc3c8e2"}, + {file = "pandas-2.2.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:8df8612be9cd1c7797c93e1c5df861b2ddda0b48b08f2c3eaa0702cf88fb5f88"}, + {file = "pandas-2.2.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:0f573ab277252ed9aaf38240f3b54cfc90fff8e5cab70411ee1d03f5d51f3944"}, + {file = "pandas-2.2.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f02a3a6c83df4026e55b63c1f06476c9aa3ed6af3d89b4f04ea656ccdaaaa359"}, + {file = "pandas-2.2.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c38ce92cb22a4bea4e3929429aa1067a454dcc9c335799af93ba9be21b6beb51"}, + {file = "pandas-2.2.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:c2ce852e1cf2509a69e98358e8458775f89599566ac3775e70419b98615f4b06"}, + {file = "pandas-2.2.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:53680dc9b2519cbf609c62db3ed7c0b499077c7fefda564e330286e619ff0dd9"}, + {file = "pandas-2.2.1-cp310-cp310-win_amd64.whl", hash = "sha256:94e714a1cca63e4f5939cdce5f29ba8d415d85166be3441165edd427dc9f6bc0"}, + {file = "pandas-2.2.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:f821213d48f4ab353d20ebc24e4faf94ba40d76680642fb7ce2ea31a3ad94f9b"}, + {file = "pandas-2.2.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:c70e00c2d894cb230e5c15e4b1e1e6b2b478e09cf27cc593a11ef955b9ecc81a"}, + {file = "pandas-2.2.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e97fbb5387c69209f134893abc788a6486dbf2f9e511070ca05eed4b930b1b02"}, + {file = "pandas-2.2.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:101d0eb9c5361aa0146f500773395a03839a5e6ecde4d4b6ced88b7e5a1a6403"}, + {file = "pandas-2.2.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:7d2ed41c319c9fb4fd454fe25372028dfa417aacb9790f68171b2e3f06eae8cd"}, + {file = "pandas-2.2.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:af5d3c00557d657c8773ef9ee702c61dd13b9d7426794c9dfeb1dc4a0bf0ebc7"}, + {file = "pandas-2.2.1-cp311-cp311-win_amd64.whl", hash = "sha256:06cf591dbaefb6da9de8472535b185cba556d0ce2e6ed28e21d919704fef1a9e"}, + {file = "pandas-2.2.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:88ecb5c01bb9ca927ebc4098136038519aa5d66b44671861ffab754cae75102c"}, + {file = "pandas-2.2.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:04f6ec3baec203c13e3f8b139fb0f9f86cd8c0b94603ae3ae8ce9a422e9f5bee"}, + {file = "pandas-2.2.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a935a90a76c44fe170d01e90a3594beef9e9a6220021acfb26053d01426f7dc2"}, + {file = "pandas-2.2.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c391f594aae2fd9f679d419e9a4d5ba4bce5bb13f6a989195656e7dc4b95c8f0"}, + {file = "pandas-2.2.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:9d1265545f579edf3f8f0cb6f89f234f5e44ba725a34d86535b1a1d38decbccc"}, + {file = "pandas-2.2.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:11940e9e3056576ac3244baef2fedade891977bcc1cb7e5cc8f8cc7d603edc89"}, + {file = "pandas-2.2.1-cp312-cp312-win_amd64.whl", hash = "sha256:4acf681325ee1c7f950d058b05a820441075b0dd9a2adf5c4835b9bc056bf4fb"}, + {file = "pandas-2.2.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:9bd8a40f47080825af4317d0340c656744f2bfdb6819f818e6ba3cd24c0e1397"}, + {file = "pandas-2.2.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:df0c37ebd19e11d089ceba66eba59a168242fc6b7155cba4ffffa6eccdfb8f16"}, + {file = "pandas-2.2.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:739cc70eaf17d57608639e74d63387b0d8594ce02f69e7a0b046f117974b3019"}, + {file = "pandas-2.2.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f9d3558d263073ed95e46f4650becff0c5e1ffe0fc3a015de3c79283dfbdb3df"}, + {file = "pandas-2.2.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:4aa1d8707812a658debf03824016bf5ea0d516afdea29b7dc14cf687bc4d4ec6"}, + {file = "pandas-2.2.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:76f27a809cda87e07f192f001d11adc2b930e93a2b0c4a236fde5429527423be"}, + {file = "pandas-2.2.1-cp39-cp39-win_amd64.whl", hash = "sha256:1ba21b1d5c0e43416218db63037dbe1a01fc101dc6e6024bcad08123e48004ab"}, + {file = "pandas-2.2.1.tar.gz", hash = "sha256:0ab90f87093c13f3e8fa45b48ba9f39181046e8f3317d3aadb2fffbb1b978572"}, ] [package.dependencies] @@ -601,6 +601,7 @@ parquet = ["pyarrow (>=10.0.1)"] performance = ["bottleneck (>=1.3.6)", "numba (>=0.56.4)", "numexpr (>=2.8.4)"] plot = ["matplotlib (>=3.6.3)"] postgresql = ["SQLAlchemy (>=2.0.0)", "adbc-driver-postgresql (>=0.8.0)", "psycopg2 (>=2.9.6)"] +pyarrow = ["pyarrow (>=10.0.1)"] spss = ["pyreadstat (>=1.2.0)"] sql-other = ["SQLAlchemy (>=2.0.0)", "adbc-driver-postgresql (>=0.8.0)", "adbc-driver-sqlite (>=0.8.0)"] test = ["hypothesis (>=6.46.1)", "pytest (>=7.3.2)", "pytest-xdist (>=2.2.0)"] @@ -1197,4 +1198,4 @@ files = [ [metadata] lock-version = "2.0" python-versions = "^3.9,<3.12" -content-hash = "2e04a4463d839afab2cd36da228d97580143a8d4ef83c3b80fcb4cb78e836d2b" +content-hash = "bcf80ea642eccd247e3804a1445935df891810324d047a52571f7c1b28e3450e" diff --git a/airbyte-integrations/connectors/source-salesforce/pyproject.toml b/airbyte-integrations/connectors/source-salesforce/pyproject.toml index f81862b62b1c..aabb267b93a5 100644 --- a/airbyte-integrations/connectors/source-salesforce/pyproject.toml +++ b/airbyte-integrations/connectors/source-salesforce/pyproject.toml @@ -3,7 +3,7 @@ requires = [ "poetry-core>=1.0.0",] build-backend = "poetry.core.masonry.api" [tool.poetry] -version = "2.3.2" +version = "2.3.3" name = "source-salesforce" description = "Source implementation for Salesforce." authors = [ "Airbyte ",] @@ -17,7 +17,7 @@ include = "source_salesforce" [tool.poetry.dependencies] python = "^3.9,<3.12" -pandas = "==2.2.0" +pandas = "2.2.1" airbyte-cdk = "^0.63.2" [tool.poetry.scripts] diff --git a/docs/integrations/sources/salesforce.md b/docs/integrations/sources/salesforce.md index 54edec588742..3ce8568ca1ff 100644 --- a/docs/integrations/sources/salesforce.md +++ b/docs/integrations/sources/salesforce.md @@ -193,6 +193,7 @@ Now that you have set up the Salesforce source connector, check out the followin | Version | Date | Pull Request | Subject | |:--------|:-----------|:---------------------------------------------------------|:-------------------------------------------------------------------------------------------------------------------------------------| +| 2.3.3 | 2024-03-04 | [35791](https://github.com/airbytehq/airbyte/pull/35791) | Fix memory leak (OOM) | | 2.3.2 | 2024-02-19 | [35421](https://github.com/airbytehq/airbyte/pull/35421) | Add Stream Slice Step option to specification | | 2.3.1 | 2024-02-12 | [35147](https://github.com/airbytehq/airbyte/pull/35147) | Manage dependencies with Poetry. | | 2.3.0 | 2023-12-15 | [33522](https://github.com/airbytehq/airbyte/pull/33522) | Sync streams concurrently in all sync modes | From e82ff2e221246d4bd713baf853df0af467a04800 Mon Sep 17 00:00:00 2001 From: Anton Karpets Date: Mon, 4 Mar 2024 18:52:20 +0200 Subject: [PATCH 065/172] =?UTF-8?q?=F0=9F=8F=A5Source=20Pinterest:=20fix?= =?UTF-8?q?=20CAT=20(#35788)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .../source-pinterest/acceptance-test-config.yml | 9 +++++++++ .../integration_tests/expected_records.jsonl | 4 ---- 2 files changed, 9 insertions(+), 4 deletions(-) diff --git a/airbyte-integrations/connectors/source-pinterest/acceptance-test-config.yml b/airbyte-integrations/connectors/source-pinterest/acceptance-test-config.yml index c450976d9deb..f65e8eeb9db4 100644 --- a/airbyte-integrations/connectors/source-pinterest/acceptance-test-config.yml +++ b/airbyte-integrations/connectors/source-pinterest/acceptance-test-config.yml @@ -42,6 +42,15 @@ acceptance_tests: bypass_reason: Not possible to add data - name: keyword_report bypass_reason: Not possible to add data + # Remove when https://github.com/airbytehq/airbyte-internal-issues/issues/6438 completed + - name: ad_group_analytics + bypass_reason: Data needs to be added to account + - name: campaign_analytics + bypass_reason: Data needs to be added to account + - name: ad_analytics + bypass_reason: Data needs to be added to account + - name: ad_account_analytics + bypass_reason: Data needs to be added to account timeout_seconds: 1200 expect_records: path: "integration_tests/expected_records.jsonl" diff --git a/airbyte-integrations/connectors/source-pinterest/integration_tests/expected_records.jsonl b/airbyte-integrations/connectors/source-pinterest/integration_tests/expected_records.jsonl index 010cf0912179..f1f73cd033f2 100644 --- a/airbyte-integrations/connectors/source-pinterest/integration_tests/expected_records.jsonl +++ b/airbyte-integrations/connectors/source-pinterest/integration_tests/expected_records.jsonl @@ -1,15 +1,11 @@ {"stream": "ad_accounts", "data": {"id": "549761668032", "name": "Airbyte", "owner": {"username": "integrationtest0375", "id": "666744057242074926"}, "country": "US", "currency": "USD", "permissions": ["OWNER"], "created_time": 1603772920, "updated_time": 1623173784}, "emitted_at": 1688461289470} -{"stream": "ad_account_analytics", "data": {"TOTAL_IMPRESSION_FREQUENCY": 1.0, "TOTAL_IMPRESSION_USER": 1.0, "ADVERTISER_ID": "549761668032", "DATE": "2023-12-04", "IMPRESSION_2": 1.0, "AD_ACCOUNT_ID": "549761668032"}, "emitted_at": 1708094337349} {"stream": "ads", "data": {"id": "687218400118", "ad_group_id": "2680068678965", "ad_account_id": "549761668032", "android_deep_link": null, "campaign_id": "626744128956", "carousel_android_deep_links": null, "carousel_destination_urls": null, "carousel_ios_deep_links": null, "click_tracking_url": null, "collection_items_destination_url_template": null, "created_time": 1623245885, "creative_type": "REGULAR", "destination_url": "https://airbyte.io/", "ios_deep_link": null, "is_pin_deleted": false, "is_removable": false, "name": "2021-06-09 | Traffic | Keywords | Data Integration", "pin_id": "666743919837294988", "rejected_reasons": [], "rejection_labels": [], "review_status": "APPROVED", "status": "PAUSED", "summary_status": "PAUSED", "tracking_urls": null, "type": "ad", "updated_time": 1699394846, "view_tracking_url": null, "lead_form_id": null, "grid_click_type": "DIRECT_TO_DESTINATION", "customizable_cta_type": null}, "emitted_at": 1708094430503} -{"stream": "ad_analytics", "data": {"PIN_ID": 6.66743919837295e+17, "AD_GROUP_ID": "2680068678993", "AD_GROUP_ENTITY_STATUS": "1", "CAMPAIGN_ENTITY_STATUS": 1.0, "TOTAL_IMPRESSION_FREQUENCY": 1.0, "CAMPAIGN_LIFETIME_SPEND_CAP": 0.0, "TOTAL_IMPRESSION_USER": 1.0, "CAMPAIGN_DAILY_SPEND_CAP": 25000000.0, "AD_ID": "687218400210", "ADVERTISER_ID": "549761668032", "PIN_PROMOTION_ID": 687218400210.0, "DATE": "2023-12-04", "IMPRESSION_2": 1.0, "AD_ACCOUNT_ID": "549761668032", "CAMPAIGN_ID": 626744128982.0, "CAMPAIGN_NAME": "2021-06-08 09:08 UTC | Brand awareness"}, "emitted_at": 1708094509254} {"stream": "ad_groups", "data": {"id": "2680068678965", "created_time": 1623245885.0, "updated_time": 1699394439.0, "start_time": null, "end_time": null, "bid_in_micro_currency": null, "budget_in_micro_currency": null, "campaign_id": "626744128956", "ad_account_id": "549761668032", "auto_targeting_enabled": true, "type": "adgroup", "budget_type": "CBO_ADGROUP", "billable_event": "CLICKTHROUGH", "status": "ACTIVE", "lifetime_frequency_cap": -1.0, "targeting_spec": {"GENDER": ["female", "male", "unknown"], "APPTYPE": ["web", "web_mobile", "iphone", "ipad", "android_mobile", "android_tablet"], "LOCALE": ["cs", "da", "de", "el", "en", "es", "fi", "fr", "hu", "id", "it", "ja", "ko", "nb", "nl", "pl", "pt", "ro", "ru", "sk", "sv", "tr", "uk", "zh"], "TARGETING_STRATEGY": ["CHOOSE_YOUR_OWN"], "LOCATION": ["US"]}, "name": "2021-06-09 | Traffic | Keywords | Data Integration", "placement_group": "ALL", "pacing_delivery_type": "STANDARD", "tracking_urls": null, "conversion_learning_mode_type": null, "summary_status": "COMPLETED", "feed_profile_id": "0", "placement_traffic_type": null, "optimization_goal_metadata": {}, "bid_strategy_type": "AUTOMATIC_BID", "targeting_template_ids": null}, "emitted_at": 1708094572628} -{"stream": "ad_group_analytics", "data": {"AD_GROUP_ID": "2680068678993", "AD_GROUP_ENTITY_STATUS": "1", "CAMPAIGN_ENTITY_STATUS": 1.0, "TOTAL_IMPRESSION_FREQUENCY": 1.0, "CAMPAIGN_LIFETIME_SPEND_CAP": 0.0, "TOTAL_IMPRESSION_USER": 1.0, "CAMPAIGN_DAILY_SPEND_CAP": 25000000.0, "ADVERTISER_ID": "549761668032", "DATE": "2023-12-04", "IMPRESSION_2": 1.0, "AD_ACCOUNT_ID": "549761668032", "CAMPAIGN_ID": 626744128982.0, "CAMPAIGN_NAME": "2021-06-08 09:08 UTC | Brand awareness"}, "emitted_at": 1708094657011} {"stream": "boards", "data": {"created_at": "2021-06-08T09:37:18", "privacy": "PUBLIC", "board_pins_modified_at": "2021-10-25T11:17:56.715000", "media": {"pin_thumbnail_urls": [], "image_cover_url": "https://i.pinimg.com/400x300/c6/b6/0d/c6b60d6b5f2ec04db7748d35fb1a8004.jpg"}, "collaborator_count": 0, "description": "", "follower_count": 3, "id": "666743988523388559", "pin_count": 1, "name": "business", "owner": {"username": "integrationtest0375"}}, "emitted_at": 1708094711013} {"stream": "board_pins", "data": {"description": "Data Integration", "board_owner": {"username": "integrationtest0375"}, "product_tags": [], "has_been_promoted": true,"link":"http://airbyte.io/", "created_at": "2021-06-08T09:37:30", "board_id": "666743988523388559", "note": "", "creative_type": "REGULAR", "parent_pin_id": null, "title": "Airbyte", "alt_text": null, "pin_metrics": null, "dominant_color": "#cacafe", "id": "666743919837294988", "is_owner": true, "board_section_id": "5195034916661798218", "is_standard": true}, "emitted_at": 1698398201666} {"stream": "board_sections", "data": {"name": "Airbyte_board_section_new", "id": "5195035116725909603"}, "emitted_at": 1699893323493} {"stream": "board_section_pins","data":{"id":"666743919837294988","dominant_color":"#cacafe","pin_metrics":null,"title":"Airbyte","creative_type":"REGULAR","link":"http://airbyte.io/","board_id":"666743988523388559","created_at":"2021-06-08T09:37:30","is_owner":true,"description":"Data Integration","note":"","alt_text":null,"board_section_id":"5195034916661798218","parent_pin_id":null,"product_tags":[],"board_owner":{"username":"integrationtest0375"},"is_standard":true,"has_been_promoted":true},"emitted_at":1699893364884} {"stream": "campaigns", "data": {"id": "626744128956", "ad_account_id": "549761668032", "name": "2021-06-09 | Traffic | Keywords | Data Integration", "status": "ACTIVE", "objective_type": "CONSIDERATION", "lifetime_spend_cap": 0, "daily_spend_cap": 3000000, "order_line_id": null, "tracking_urls": null, "created_time": 1623245885, "updated_time": 1691447502, "type": "campaign", "is_flexible_daily_budgets": false, "summary_status": "COMPLETED", "is_campaign_budget_optimization": true, "start_time": 1623196800, "end_time": 1624060800}, "emitted_at": 1699393571700} -{"stream": "campaign_analytics", "data": {"TOTAL_IMPRESSION_FREQUENCY": 1.0, "CAMPAIGN_LIFETIME_SPEND_CAP": 0.0, "TOTAL_IMPRESSION_USER": 1.0, "CAMPAIGN_ENTITY_STATUS": 1.0, "CAMPAIGN_DAILY_SPEND_CAP": 25000000.0, "ADVERTISER_ID": 549761668032.0, "DATE": "2023-12-04", "IMPRESSION_2": 1.0, "AD_ACCOUNT_ID": "549761668032", "CAMPAIGN_ID": 626744128982.0, "CAMPAIGN_NAME": "2021-06-08 09:08 UTC | Brand awareness"}, "emitted_at": 1708094774057} {"stream": "campaign_analytics_report", "data": {"ADVERTISER_ID": 549761668032.0, "AD_ACCOUNT_ID": "549761668032", "CAMPAIGN_DAILY_SPEND_CAP": 25000000.0, "CAMPAIGN_ENTITY_STATUS": "ACTIVE", "CAMPAIGN_ID": 626744128982.0, "CAMPAIGN_LIFETIME_SPEND_CAP": 0.0, "CAMPAIGN_NAME": "2021-06-08 09:08 UTC | Brand awareness", "IMPRESSION_2": 3.0, "TOTAL_IMPRESSION_FREQUENCY": 1.5, "TOTAL_IMPRESSION_USER": 2.0, "DATE": "2023-07-14"}, "emitted_at": 1690299367301} {"stream": "campaign_targeting_report", "data": {"ADVERTISER_ID": 549761668032.0, "AD_ACCOUNT_ID": "549761668032", "CAMPAIGN_DAILY_SPEND_CAP": 25000000.0, "CAMPAIGN_ENTITY_STATUS": "ACTIVE", "CAMPAIGN_ID": 626744128982.0, "CAMPAIGN_LIFETIME_SPEND_CAP": 0.0, "CAMPAIGN_NAME": "2021-06-08 09:08 UTC | Brand awareness", "IMPRESSION_2": 1.0, "TARGETING_VALUE": "TWOCOLUMN_FEED", "TARGETING_TYPE": "FEED_TYPE", "DATE": "2023-10-29"}, "emitted_at": 1699894287823} {"stream": "user_account_analytics", "data": {"date": "2024-02-18", "data_status": "READY", "metrics": {"SAVE": 6.0, "VIDEO_MRC_VIEW": 0, "VIDEO_10S_VIEW": 0, "PIN_CLICK": 15, "OUTBOUND_CLICK_RATE": 0.0017123287671232876, "QUARTILE_95_PERCENT_VIEW": 0, "ENGAGEMENT_RATE": 0.03767123287671233, "VIDEO_START": 0, "IMPRESSION": 584.0, "OUTBOUND_CLICK": 1, "ENGAGEMENT": 22.0, "PIN_CLICK_RATE": 0.025684931506849314, "VIDEO_V50_WATCH_TIME": 0, "SAVE_RATE": 0.010273972602739725, "VIDEO_AVG_WATCH_TIME": 0.0}}, "emitted_at": 1708476886035} From 62abce9c10732e9fd37db6b127bcf09fa1a66436 Mon Sep 17 00:00:00 2001 From: Christo Grabowski <108154848+ChristoGrab@users.noreply.github.com> Date: Mon, 4 Mar 2024 12:15:48 -0500 Subject: [PATCH 066/172] Source Recurly: Unarchive and release as 1.0.0 (#35763) --- .../connectors/source-recurly/.dockerignore | 6 + .../connectors/source-recurly/README.md | 104 ++ .../source-recurly/acceptance-test-config.yml | 40 + .../integration_tests/acceptance.py | 14 + .../integration_tests/configured_catalog.json | 205 ++++ .../integration_tests/future_state.json | 114 ++ .../integration_tests/invalid_config.json | 3 + .../integration_tests/sample_config.json | 3 + .../connectors/source-recurly/main.py | 8 + .../connectors/source-recurly/metadata.yaml | 13 +- .../connectors/source-recurly/poetry.lock | 1045 +++++++++++++++++ .../connectors/source-recurly/pyproject.toml | 29 + .../sample_files/configured_catalog.json | 16 + .../sample_files/sample_catalog.json | 102 ++ .../sample_files/sample_config.json | 3 + .../source-recurly/source_recurly/__init__.py | 3 + .../source-recurly/source_recurly/run.py | 14 + .../schemas/account_coupon_redemptions.json | 46 + .../source_recurly/schemas/account_notes.json | 30 + .../source_recurly/schemas/accounts.json | 182 +++ .../source_recurly/schemas/add_ons.json | 151 +++ .../source_recurly/schemas/billing_infos.json | 3 + .../source_recurly/schemas/coupons.json | 3 + .../schemas/credit_payments.json | 123 ++ .../source_recurly/schemas/export_dates.json | 13 + .../source_recurly/schemas/invoices.json | 377 ++++++ .../source_recurly/schemas/line_items.json | 3 + .../schemas/measured_units.json | 41 + .../source_recurly/schemas/plans.json | 191 +++ .../schemas/shared/account_details.json | 35 + .../schemas/shared/billing_infos.json | 213 ++++ .../schemas/shared/coupon_redemptions.json | 85 ++ .../schemas/shared/coupons.json | 194 +++ .../schemas/shared/external_accounts.json | 24 + .../schemas/shared/line_items.json | 293 +++++ .../schemas/shared/shipping_addresses.json | 91 ++ .../schemas/shared/tax_info.json | 44 + .../schemas/shared/unique_coupons.json | 66 ++ .../source_recurly/schemas/shared/users.json | 33 + .../schemas/shipping_addresses.json | 3 + .../schemas/shipping_methods.json | 54 + .../source_recurly/schemas/subscriptions.json | 368 ++++++ .../source_recurly/schemas/transactions.json | 345 ++++++ .../schemas/unique_coupons.json | 3 + .../source-recurly/source_recurly/source.py | 80 ++ .../source-recurly/source_recurly/spec.json | 33 + .../source-recurly/source_recurly/streams.py | 337 ++++++ .../source-recurly/unit_tests/__init__.py | 0 .../source-recurly/unit_tests/test_streams.py | 203 ++++ .../sources/recurly-migrations.md | 57 + docs/integrations/sources/recurly.md | 1 + 51 files changed, 5444 insertions(+), 3 deletions(-) create mode 100644 airbyte-integrations/connectors/source-recurly/.dockerignore create mode 100644 airbyte-integrations/connectors/source-recurly/README.md create mode 100644 airbyte-integrations/connectors/source-recurly/acceptance-test-config.yml create mode 100644 airbyte-integrations/connectors/source-recurly/integration_tests/acceptance.py create mode 100644 airbyte-integrations/connectors/source-recurly/integration_tests/configured_catalog.json create mode 100644 airbyte-integrations/connectors/source-recurly/integration_tests/future_state.json create mode 100644 airbyte-integrations/connectors/source-recurly/integration_tests/invalid_config.json create mode 100644 airbyte-integrations/connectors/source-recurly/integration_tests/sample_config.json create mode 100644 airbyte-integrations/connectors/source-recurly/main.py create mode 100644 airbyte-integrations/connectors/source-recurly/poetry.lock create mode 100644 airbyte-integrations/connectors/source-recurly/pyproject.toml create mode 100644 airbyte-integrations/connectors/source-recurly/sample_files/configured_catalog.json create mode 100644 airbyte-integrations/connectors/source-recurly/sample_files/sample_catalog.json create mode 100644 airbyte-integrations/connectors/source-recurly/sample_files/sample_config.json create mode 100644 airbyte-integrations/connectors/source-recurly/source_recurly/__init__.py create mode 100644 airbyte-integrations/connectors/source-recurly/source_recurly/run.py create mode 100644 airbyte-integrations/connectors/source-recurly/source_recurly/schemas/account_coupon_redemptions.json create mode 100644 airbyte-integrations/connectors/source-recurly/source_recurly/schemas/account_notes.json create mode 100644 airbyte-integrations/connectors/source-recurly/source_recurly/schemas/accounts.json create mode 100644 airbyte-integrations/connectors/source-recurly/source_recurly/schemas/add_ons.json create mode 100644 airbyte-integrations/connectors/source-recurly/source_recurly/schemas/billing_infos.json create mode 100644 airbyte-integrations/connectors/source-recurly/source_recurly/schemas/coupons.json create mode 100644 airbyte-integrations/connectors/source-recurly/source_recurly/schemas/credit_payments.json create mode 100644 airbyte-integrations/connectors/source-recurly/source_recurly/schemas/export_dates.json create mode 100644 airbyte-integrations/connectors/source-recurly/source_recurly/schemas/invoices.json create mode 100644 airbyte-integrations/connectors/source-recurly/source_recurly/schemas/line_items.json create mode 100644 airbyte-integrations/connectors/source-recurly/source_recurly/schemas/measured_units.json create mode 100644 airbyte-integrations/connectors/source-recurly/source_recurly/schemas/plans.json create mode 100644 airbyte-integrations/connectors/source-recurly/source_recurly/schemas/shared/account_details.json create mode 100644 airbyte-integrations/connectors/source-recurly/source_recurly/schemas/shared/billing_infos.json create mode 100644 airbyte-integrations/connectors/source-recurly/source_recurly/schemas/shared/coupon_redemptions.json create mode 100644 airbyte-integrations/connectors/source-recurly/source_recurly/schemas/shared/coupons.json create mode 100644 airbyte-integrations/connectors/source-recurly/source_recurly/schemas/shared/external_accounts.json create mode 100644 airbyte-integrations/connectors/source-recurly/source_recurly/schemas/shared/line_items.json create mode 100644 airbyte-integrations/connectors/source-recurly/source_recurly/schemas/shared/shipping_addresses.json create mode 100644 airbyte-integrations/connectors/source-recurly/source_recurly/schemas/shared/tax_info.json create mode 100644 airbyte-integrations/connectors/source-recurly/source_recurly/schemas/shared/unique_coupons.json create mode 100644 airbyte-integrations/connectors/source-recurly/source_recurly/schemas/shared/users.json create mode 100644 airbyte-integrations/connectors/source-recurly/source_recurly/schemas/shipping_addresses.json create mode 100644 airbyte-integrations/connectors/source-recurly/source_recurly/schemas/shipping_methods.json create mode 100644 airbyte-integrations/connectors/source-recurly/source_recurly/schemas/subscriptions.json create mode 100644 airbyte-integrations/connectors/source-recurly/source_recurly/schemas/transactions.json create mode 100644 airbyte-integrations/connectors/source-recurly/source_recurly/schemas/unique_coupons.json create mode 100644 airbyte-integrations/connectors/source-recurly/source_recurly/source.py create mode 100644 airbyte-integrations/connectors/source-recurly/source_recurly/spec.json create mode 100644 airbyte-integrations/connectors/source-recurly/source_recurly/streams.py create mode 100644 airbyte-integrations/connectors/source-recurly/unit_tests/__init__.py create mode 100644 airbyte-integrations/connectors/source-recurly/unit_tests/test_streams.py create mode 100644 docs/integrations/sources/recurly-migrations.md diff --git a/airbyte-integrations/connectors/source-recurly/.dockerignore b/airbyte-integrations/connectors/source-recurly/.dockerignore new file mode 100644 index 000000000000..7ac167d6e945 --- /dev/null +++ b/airbyte-integrations/connectors/source-recurly/.dockerignore @@ -0,0 +1,6 @@ +* +!Dockerfile +!main.py +!source_recurly +!setup.py +!secrets diff --git a/airbyte-integrations/connectors/source-recurly/README.md b/airbyte-integrations/connectors/source-recurly/README.md new file mode 100644 index 000000000000..936201b1a143 --- /dev/null +++ b/airbyte-integrations/connectors/source-recurly/README.md @@ -0,0 +1,104 @@ +# Recurly source connector + +This is the repository for the Recurly source connector, written in Python. +For information about how to use this connector within Airbyte, see [the documentation](https://docs.airbyte.io/integrations/sources/recurly). + +## Local development + +### Prerequisites + +* Python (~=3.9) +* Poetry (~=1.7) - installation instructions [here](https://python-poetry.org/docs/#installation) + +### Installing the connector + +From this connector directory, run: + +```bash +poetry install --with dev +``` + +### Creating credentials + +**If you are a community contributor**, follow the instructions in the [documentation](https://docs.airbyte.io/integrations/sources/recurly) +to generate the necessary credentials. Then create a file `secrets/config.json` conforming to the `source_recurly/spec.json` file. +Note that the `secrets` directory is gitignored by default, so there is no danger of accidentally checking in sensitive information. +See `integration_tests/sample_config.json` for a sample config file. + +**If you are an Airbyte core member**, copy the credentials in Lastpass under the secret name `source recurly test creds` +and place them into `secrets/config.json`. + +### Locally running the connector + +```bash +poetry run source-recurly spec +poetry run source-recurly check --config secrets/config.json +poetry run source-recurly discover --config secrets/config.json +poetry run source-recurly read --config secrets/config.json --catalog sample_files/configured_catalog.json +``` + +### Running unit tests + +To run unit tests locally, from the connector directory run: + +```bash +poetry run pytest unit_tests +``` + +### Building the docker image + +1. Install [`airbyte-ci`](https://github.com/airbytehq/airbyte/blob/master/airbyte-ci/connectors/pipelines/README.md) +2. Run the following command to build the docker image: + +```bash +airbyte-ci connectors --name=source-recurly build +``` + +An image will be available on your host with the tag `airbyte/source-recurly:dev`. + +### Running the docker container + +Then run any of the connector commands as follows: + +```bash +docker run --rm airbyte/source-recurly:dev spec +docker run --rm -v $(pwd)/secrets:/secrets airbyte/source-recurly:dev check --config /secrets/config.json +docker run --rm -v $(pwd)/secrets:/secrets airbyte/source-recurly:dev discover --config /secrets/config.json +docker run --rm -v $(pwd)/secrets:/secrets -v $(pwd)/integration_tests:/integration_tests airbyte/source-recurly:dev read --config /secrets/config.json --catalog /integration_tests/configured_catalog.json +``` + +### Running our CI test suite + +You can run our full test suite locally using [`airbyte-ci`](https://github.com/airbytehq/airbyte/blob/master/airbyte-ci/connectors/pipelines/README.md): + +```bash +airbyte-ci connectors --name=source-recurly test +``` + +### Customizing acceptance Tests + +Customize the `acceptance-test-config.yml` file to configure acceptance tests. See our [Connector Acceptance Tests reference](https://docs.airbyte.com/connector-development/testing-connectors/connector-acceptance-tests-reference) for more information. +If your connector requires you to create or destroy resources for use during acceptance tests create fixtures for it and place them inside integration_tests/acceptance.py. + +### Dependency Management + +All of your dependencies should be managed via Poetry. To add a new dependency, run: + +```bash +poetry add +``` + +Please commit the changes to the `pyproject.toml` and `poetry.lock` files. + +## Publishing a new version of the connector + +You've checked out the repo, implemented a million dollar feature, and you're ready to share your changes with the world. Now what? + +1. Make sure your changes are passing our test suite: `airbyte-ci connectors --name=source-recurly test` +2. Bump the connector version listed as `dockerImageTag` in `metadata.yaml`. Please follow [semantic versioning for connectors](https://docs.airbyte.com/contributing-to-airbyte/resources/pull-requests-handbook/#semantic-versioning-for-connectors). +3. Make sure the `metadata.yaml` content is up to date. +4. Make the connector documentation and its changelog is up to date (`docs/integrations/sources/recurly.md`). +5. Create a Pull Request: use [our PR naming conventions](https://docs.airbyte.com/contributing-to-airbyte/resources/pull-requests-handbook/#pull-request-title-convention). +6. Pat yourself on the back for being an awesome contributor. +7. Someone from Airbyte will take a look at your PR and iterate with you to merge it into master. +8. Once your PR is merged, the new version of the connector will be automatically published to Docker Hub and our connector registry. diff --git a/airbyte-integrations/connectors/source-recurly/acceptance-test-config.yml b/airbyte-integrations/connectors/source-recurly/acceptance-test-config.yml new file mode 100644 index 000000000000..c1e9dfe6ee73 --- /dev/null +++ b/airbyte-integrations/connectors/source-recurly/acceptance-test-config.yml @@ -0,0 +1,40 @@ +# See [Connector Acceptance Tests](https://docs.airbyte.com/connector-development/testing-connectors/connector-acceptance-tests-reference) +# for more information about how to configure these tests +connector_image: airbyte/source-recurly:dev +acceptance_tests: + spec: + tests: + - spec_path: "source_recurly/spec.json" + connection: + tests: + - config_path: "secrets/config.json" + status: "succeed" + - config_path: "integration_tests/invalid_config.json" + status: "failed" + discovery: + tests: + - config_path: "secrets/config.json" + basic_read: + tests: + - config_path: "secrets/config.json" + configured_catalog_path: "integration_tests/configured_catalog.json" + empty_streams: + - name: "add_ons" + bypass_reason: "Cannot seed this stream with free sandbox account" + - name: "billing_infos" + bypass_reason: "Cannot seed this stream with free sandbox account" + - name: "credit_payments" + bypass_reason: "Cannot seed this stream with free sandbox account" + - name: "shipping_methods" + bypass_reason: "Cannot seed this stream with free sandbox account" + full_refresh: + tests: + - config_path: "secrets/config.json" + configured_catalog_path: "integration_tests/configured_catalog.json" + incremental: + tests: + - config_path: "secrets/config.json" + configured_catalog_path: "integration_tests/configured_catalog.json" + future_state: + future_state_path: "integration_tests/future_state.json" + skip_comprehensive_incremental_tests: true diff --git a/airbyte-integrations/connectors/source-recurly/integration_tests/acceptance.py b/airbyte-integrations/connectors/source-recurly/integration_tests/acceptance.py new file mode 100644 index 000000000000..82823254d266 --- /dev/null +++ b/airbyte-integrations/connectors/source-recurly/integration_tests/acceptance.py @@ -0,0 +1,14 @@ +# +# Copyright (c) 2023 Airbyte, Inc., all rights reserved. +# + + +import pytest + +pytest_plugins = ("connector_acceptance_test.plugin",) + + +@pytest.fixture(scope="session", autouse=True) +def connector_setup(): + """This fixture is a placeholder for external resources that acceptance test might require.""" + yield diff --git a/airbyte-integrations/connectors/source-recurly/integration_tests/configured_catalog.json b/airbyte-integrations/connectors/source-recurly/integration_tests/configured_catalog.json new file mode 100644 index 000000000000..75bcfeaf58ad --- /dev/null +++ b/airbyte-integrations/connectors/source-recurly/integration_tests/configured_catalog.json @@ -0,0 +1,205 @@ +{ + "streams": [ + { + "stream": { + "name": "accounts", + "json_schema": {}, + "supported_sync_modes": ["full_refresh", "incremental"], + "source_defined_cursor": true, + "default_cursor_field": ["updated_at"], + "source_defined_primary_key": [["id"]] + }, + "sync_mode": "incremental", + "destination_sync_mode": "overwrite" + }, + { + "stream": { + "name": "account_coupon_redemptions", + "json_schema": {}, + "supported_sync_modes": ["full_refresh", "incremental"], + "source_defined_cursor": true, + "default_cursor_field": ["updated_at"], + "source_defined_primary_key": [["id"]] + }, + "sync_mode": "incremental", + "destination_sync_mode": "overwrite" + }, + { + "stream": { + "name": "account_notes", + "json_schema": {}, + "supported_sync_modes": ["full_refresh", "incremental"], + "source_defined_cursor": true, + "default_cursor_field": ["created_at"], + "source_defined_primary_key": [["id"]] + }, + "sync_mode": "incremental", + "destination_sync_mode": "overwrite" + }, + { + "stream": { + "name": "add_ons", + "json_schema": {}, + "supported_sync_modes": ["full_refresh", "incremental"], + "source_defined_cursor": true, + "default_cursor_field": ["updated_at"], + "source_defined_primary_key": [["id"]] + }, + "sync_mode": "incremental", + "destination_sync_mode": "overwrite" + }, + { + "stream": { + "name": "billing_infos", + "json_schema": {}, + "supported_sync_modes": ["full_refresh", "incremental"], + "source_defined_cursor": true, + "default_cursor_field": ["updated_at"], + "source_defined_primary_key": [["id"]] + }, + "sync_mode": "incremental", + "destination_sync_mode": "overwrite" + }, + { + "stream": { + "name": "coupons", + "json_schema": {}, + "supported_sync_modes": ["full_refresh", "incremental"], + "source_defined_cursor": true, + "default_cursor_field": ["updated_at"], + "source_defined_primary_key": [["id"]] + }, + "sync_mode": "incremental", + "destination_sync_mode": "overwrite" + }, + { + "stream": { + "name": "credit_payments", + "json_schema": {}, + "supported_sync_modes": ["full_refresh", "incremental"], + "source_defined_cursor": true, + "default_cursor_field": ["updated_at"], + "source_defined_primary_key": [["id"]] + }, + "sync_mode": "incremental", + "destination_sync_mode": "overwrite" + }, + { + "stream": { + "name": "export_dates", + "json_schema": {}, + "supported_sync_modes": ["full_refresh"] + }, + "sync_mode": "full_refresh", + "destination_sync_mode": "overwrite" + }, + { + "stream": { + "name": "invoices", + "json_schema": {}, + "supported_sync_modes": ["full_refresh", "incremental"], + "source_defined_cursor": true, + "default_cursor_field": ["updated_at"], + "source_defined_primary_key": [["id"]] + }, + "sync_mode": "incremental", + "destination_sync_mode": "overwrite" + }, + { + "stream": { + "name": "line_items", + "json_schema": {}, + "supported_sync_modes": ["full_refresh", "incremental"], + "source_defined_cursor": true, + "default_cursor_field": ["updated_at"], + "source_defined_primary_key": [["id"]] + }, + "sync_mode": "incremental", + "destination_sync_mode": "overwrite" + }, + { + "stream": { + "name": "measured_units", + "json_schema": {}, + "supported_sync_modes": ["full_refresh", "incremental"], + "source_defined_cursor": true, + "default_cursor_field": ["updated_at"], + "source_defined_primary_key": [["id"]] + }, + "sync_mode": "incremental", + "destination_sync_mode": "overwrite" + }, + { + "stream": { + "name": "plans", + "json_schema": {}, + "supported_sync_modes": ["full_refresh", "incremental"], + "source_defined_cursor": true, + "default_cursor_field": ["updated_at"], + "source_defined_primary_key": [["id"]] + }, + "sync_mode": "incremental", + "destination_sync_mode": "overwrite" + }, + { + "stream": { + "name": "shipping_addresses", + "json_schema": {}, + "supported_sync_modes": ["full_refresh", "incremental"], + "source_defined_cursor": true, + "default_cursor_field": ["updated_at"], + "source_defined_primary_key": [["id"]] + }, + "sync_mode": "incremental", + "destination_sync_mode": "overwrite" + }, + { + "stream": { + "name": "shipping_methods", + "json_schema": {}, + "supported_sync_modes": ["full_refresh", "incremental"], + "source_defined_cursor": true, + "default_cursor_field": ["updated_at"], + "source_defined_primary_key": [["id"]] + }, + "sync_mode": "incremental", + "destination_sync_mode": "overwrite" + }, + { + "stream": { + "name": "subscriptions", + "json_schema": {}, + "supported_sync_modes": ["full_refresh", "incremental"], + "source_defined_cursor": true, + "default_cursor_field": ["updated_at"], + "source_defined_primary_key": [["id"]] + }, + "sync_mode": "incremental", + "destination_sync_mode": "overwrite" + }, + { + "stream": { + "name": "transactions", + "json_schema": {}, + "supported_sync_modes": ["full_refresh", "incremental"], + "source_defined_cursor": true, + "default_cursor_field": ["updated_at"], + "source_defined_primary_key": [["id"]] + }, + "sync_mode": "incremental", + "destination_sync_mode": "overwrite" + }, + { + "stream": { + "name": "unique_coupons", + "json_schema": {}, + "supported_sync_modes": ["full_refresh", "incremental"], + "source_defined_cursor": true, + "default_cursor_field": ["updated_at"], + "source_defined_primary_key": [["id"]] + }, + "sync_mode": "incremental", + "destination_sync_mode": "overwrite" + } + ] +} diff --git a/airbyte-integrations/connectors/source-recurly/integration_tests/future_state.json b/airbyte-integrations/connectors/source-recurly/integration_tests/future_state.json new file mode 100644 index 000000000000..1c9442f7c6d3 --- /dev/null +++ b/airbyte-integrations/connectors/source-recurly/integration_tests/future_state.json @@ -0,0 +1,114 @@ +[ + { + "type": "STREAM", + "stream": { + "stream_state": { "updated_at": "2036-07-19T22:21:37Z" }, + "stream_descriptor": { "name": "accounts" } + } + }, + { + "type": "STREAM", + "stream": { + "stream_state": { "updated_at": "2036-07-19T22:21:37Z" }, + "stream_descriptor": { "name": "account_coupon_redemptions" } + } + }, + { + "type": "STREAM", + "stream": { + "stream_state": { "created_at": "2036-07-19T22:21:37Z" }, + "stream_descriptor": { "name": "account_notes" } + } + }, + { + "type": "STREAM", + "stream": { + "stream_state": { "updated_at": "2036-07-19T22:21:37Z" }, + "stream_descriptor": { "name": "add_ons" } + } + }, + { + "type": "STREAM", + "stream": { + "stream_state": { "updated_at": "2036-07-19T22:21:37Z" }, + "stream_descriptor": { "name": "billing_infos" } + } + }, + { + "type": "STREAM", + "stream": { + "stream_state": { "updated_at": "2036-07-19T22:21:37Z" }, + "stream_descriptor": { "name": "coupons" } + } + }, + { + "type": "STREAM", + "stream": { + "stream_state": { "updated_at": "2036-07-19T22:21:37Z" }, + "stream_descriptor": { "name": "credit_payments" } + } + }, + { + "type": "STREAM", + "stream": { + "stream_state": { "updated_at": "2036-07-19T22:21:37Z" }, + "stream_descriptor": { "name": "invoices" } + } + }, + { + "type": "STREAM", + "stream": { + "stream_state": { "updated_at": "2036-07-19T22:21:37Z" }, + "stream_descriptor": { "name": "line_items" } + } + }, + { + "type": "STREAM", + "stream": { + "stream_state": { "updated_at": "2036-07-19T22:21:37Z" }, + "stream_descriptor": { "name": "measured_units" } + } + }, + { + "type": "STREAM", + "stream": { + "stream_state": { "updated_at": "2036-07-19T22:21:37Z" }, + "stream_descriptor": { "name": "plans" } + } + }, + { + "type": "STREAM", + "stream": { + "stream_state": { "updated_at": "2036-07-19T22:21:37Z" }, + "stream_descriptor": { "name": "shipping_addresses" } + } + }, + { + "type": "STREAM", + "stream": { + "stream_state": { "updated_at": "2036-07-19T22:21:37Z" }, + "stream_descriptor": { "name": "shipping_methods" } + } + }, + { + "type": "STREAM", + "stream": { + "stream_state": { "updated_at": "2036-07-19T22:21:37Z" }, + "stream_descriptor": { "name": "subscriptions" } + } + }, + { + "type": "STREAM", + "stream": { + "stream_state": { "updated_at": "2036-07-19T22:21:37Z" }, + "stream_descriptor": { "name": "transactions" } + } + }, + { + "type": "STREAM", + "stream": { + "stream_state": { "updated_at": "2036-07-19T22:21:37Z" }, + "stream_descriptor": { "name": "unique_coupons" } + } + } +] diff --git a/airbyte-integrations/connectors/source-recurly/integration_tests/invalid_config.json b/airbyte-integrations/connectors/source-recurly/integration_tests/invalid_config.json new file mode 100644 index 000000000000..6016942564e8 --- /dev/null +++ b/airbyte-integrations/connectors/source-recurly/integration_tests/invalid_config.json @@ -0,0 +1,3 @@ +{ + "api_key": "wrong-api-key" +} diff --git a/airbyte-integrations/connectors/source-recurly/integration_tests/sample_config.json b/airbyte-integrations/connectors/source-recurly/integration_tests/sample_config.json new file mode 100644 index 000000000000..f0f3959f86a3 --- /dev/null +++ b/airbyte-integrations/connectors/source-recurly/integration_tests/sample_config.json @@ -0,0 +1,3 @@ +{ + "api_key": "api-key" +} diff --git a/airbyte-integrations/connectors/source-recurly/main.py b/airbyte-integrations/connectors/source-recurly/main.py new file mode 100644 index 000000000000..ba5c26176fde --- /dev/null +++ b/airbyte-integrations/connectors/source-recurly/main.py @@ -0,0 +1,8 @@ +# +# Copyright (c) 2023 Airbyte, Inc., all rights reserved. +# + +from source_recurly.run import run + +if __name__ == "__main__": + run() diff --git a/airbyte-integrations/connectors/source-recurly/metadata.yaml b/airbyte-integrations/connectors/source-recurly/metadata.yaml index 9cbaf180675e..f82b443f1c35 100644 --- a/airbyte-integrations/connectors/source-recurly/metadata.yaml +++ b/airbyte-integrations/connectors/source-recurly/metadata.yaml @@ -7,7 +7,7 @@ data: connectorSubtype: api connectorType: source definitionId: cd42861b-01fc-4658-a8ab-5d11d0510f01 - dockerImageTag: 0.5.0 + dockerImageTag: 1.0.0 dockerRepository: airbyte/source-recurly documentationUrl: https://docs.airbyte.com/integrations/sources/recurly githubIssueLabel: source-recurly @@ -16,9 +16,16 @@ data: name: Recurly registries: cloud: - enabled: false + enabled: true oss: - enabled: false + enabled: true + releases: + breakingChanges: + 1.0.0: + message: + Version 1.0.0 introduces a number of schema updates to the Recurly connector. + To ensure a smooth upgrade, please refresh your schemas and reset your data before resuming syncs. + upgradeDeadline: "2024-03-05" releaseStage: alpha remoteRegistries: pypi: diff --git a/airbyte-integrations/connectors/source-recurly/poetry.lock b/airbyte-integrations/connectors/source-recurly/poetry.lock new file mode 100644 index 000000000000..e3d56310c931 --- /dev/null +++ b/airbyte-integrations/connectors/source-recurly/poetry.lock @@ -0,0 +1,1045 @@ +# This file is automatically @generated by Poetry 1.7.1 and should not be changed by hand. + +[[package]] +name = "airbyte-cdk" +version = "0.67.0" +description = "A framework for writing Airbyte Connectors." +optional = false +python-versions = ">=3.8" +files = [ + {file = "airbyte-cdk-0.67.0.tar.gz", hash = "sha256:cbbff1b3895c89313764a721870bb293a396c74bad8dd6e5c36a0c3b0a2f6a10"}, + {file = "airbyte_cdk-0.67.0-py3-none-any.whl", hash = "sha256:2082c859536a2450c03b89dba1bbdab21bad314fbf5ef6d2e86fefc4ba935373"}, +] + +[package.dependencies] +airbyte-protocol-models = "0.5.1" +backoff = "*" +cachetools = "*" +Deprecated = ">=1.2,<2.0" +dpath = ">=2.0.1,<2.1.0" +genson = "1.2.2" +isodate = ">=0.6.1,<0.7.0" +Jinja2 = ">=3.1.2,<3.2.0" +jsonref = ">=0.2,<1.0" +jsonschema = ">=3.2.0,<3.3.0" +pendulum = "<3.0.0" +pydantic = ">=1.10.8,<2.0.0" +pyrate-limiter = ">=3.1.0,<3.2.0" +python-dateutil = "*" +PyYAML = ">=6.0.1" +requests = "*" +requests-cache = "*" +wcmatch = "8.4" + +[package.extras] +dev = ["avro (>=1.11.2,<1.12.0)", "cohere (==4.21)", "fastavro (>=1.8.0,<1.9.0)", "freezegun", "langchain (==0.0.271)", "markdown", "mypy", "openai[embeddings] (==0.27.9)", "pandas (==2.0.3)", "pdf2image (==1.16.3)", "pdfminer.six (==20221105)", "pyarrow (==12.0.1)", "pytesseract (==0.3.10)", "pytest", "pytest-cov", "pytest-httpserver", "pytest-mock", "requests-mock", "tiktoken (==0.4.0)", "unstructured (==0.10.27)", "unstructured.pytesseract (>=0.3.12)", "unstructured[docx,pptx] (==0.10.27)"] +file-based = ["avro (>=1.11.2,<1.12.0)", "fastavro (>=1.8.0,<1.9.0)", "markdown", "pdf2image (==1.16.3)", "pdfminer.six (==20221105)", "pyarrow (==12.0.1)", "pytesseract (==0.3.10)", "unstructured (==0.10.27)", "unstructured.pytesseract (>=0.3.12)", "unstructured[docx,pptx] (==0.10.27)"] +sphinx-docs = ["Sphinx (>=4.2,<5.0)", "sphinx-rtd-theme (>=1.0,<2.0)"] +vector-db-based = ["cohere (==4.21)", "langchain (==0.0.271)", "openai[embeddings] (==0.27.9)", "tiktoken (==0.4.0)"] + +[[package]] +name = "airbyte-protocol-models" +version = "0.5.1" +description = "Declares the Airbyte Protocol." +optional = false +python-versions = ">=3.8" +files = [ + {file = "airbyte_protocol_models-0.5.1-py3-none-any.whl", hash = "sha256:dfe84e130e51ce2ae81a06d5aa36f6c5ce3152b9e36e6f0195fad6c3dab0927e"}, + {file = "airbyte_protocol_models-0.5.1.tar.gz", hash = "sha256:7c8b16c7c1c7956b1996052e40585a3a93b1e44cb509c4e97c1ee4fe507ea086"}, +] + +[package.dependencies] +pydantic = ">=1.9.2,<2.0.0" + +[[package]] +name = "atomicwrites" +version = "1.4.1" +description = "Atomic file writes." +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +files = [ + {file = "atomicwrites-1.4.1.tar.gz", hash = "sha256:81b2c9071a49367a7f770170e5eec8cb66567cfbbc8c73d20ce5ca4a8d71cf11"}, +] + +[[package]] +name = "attrs" +version = "23.2.0" +description = "Classes Without Boilerplate" +optional = false +python-versions = ">=3.7" +files = [ + {file = "attrs-23.2.0-py3-none-any.whl", hash = "sha256:99b87a485a5820b23b879f04c2305b44b951b502fd64be915879d77a7e8fc6f1"}, + {file = "attrs-23.2.0.tar.gz", hash = "sha256:935dc3b529c262f6cf76e50877d35a4bd3c1de194fd41f47a2b7ae8f19971f30"}, +] + +[package.extras] +cov = ["attrs[tests]", "coverage[toml] (>=5.3)"] +dev = ["attrs[tests]", "pre-commit"] +docs = ["furo", "myst-parser", "sphinx", "sphinx-notfound-page", "sphinxcontrib-towncrier", "towncrier", "zope-interface"] +tests = ["attrs[tests-no-zope]", "zope-interface"] +tests-mypy = ["mypy (>=1.6)", "pytest-mypy-plugins"] +tests-no-zope = ["attrs[tests-mypy]", "cloudpickle", "hypothesis", "pympler", "pytest (>=4.3.0)", "pytest-xdist[psutil]"] + +[[package]] +name = "backoff" +version = "2.2.1" +description = "Function decoration for backoff and retry" +optional = false +python-versions = ">=3.7,<4.0" +files = [ + {file = "backoff-2.2.1-py3-none-any.whl", hash = "sha256:63579f9a0628e06278f7e47b7d7d5b6ce20dc65c5e96a6f3ca99a6adca0396e8"}, + {file = "backoff-2.2.1.tar.gz", hash = "sha256:03f829f5bb1923180821643f8753b0502c3b682293992485b0eef2807afa5cba"}, +] + +[[package]] +name = "bracex" +version = "2.4" +description = "Bash style brace expander." +optional = false +python-versions = ">=3.8" +files = [ + {file = "bracex-2.4-py3-none-any.whl", hash = "sha256:efdc71eff95eaff5e0f8cfebe7d01adf2c8637c8c92edaf63ef348c241a82418"}, + {file = "bracex-2.4.tar.gz", hash = "sha256:a27eaf1df42cf561fed58b7a8f3fdf129d1ea16a81e1fadd1d17989bc6384beb"}, +] + +[[package]] +name = "cachetools" +version = "5.3.3" +description = "Extensible memoizing collections and decorators" +optional = false +python-versions = ">=3.7" +files = [ + {file = "cachetools-5.3.3-py3-none-any.whl", hash = "sha256:0abad1021d3f8325b2fc1d2e9c8b9c9d57b04c3932657a72465447332c24d945"}, + {file = "cachetools-5.3.3.tar.gz", hash = "sha256:ba29e2dfa0b8b556606f097407ed1aa62080ee108ab0dc5ec9d6a723a007d105"}, +] + +[[package]] +name = "cattrs" +version = "23.2.3" +description = "Composable complex class support for attrs and dataclasses." +optional = false +python-versions = ">=3.8" +files = [ + {file = "cattrs-23.2.3-py3-none-any.whl", hash = "sha256:0341994d94971052e9ee70662542699a3162ea1e0c62f7ce1b4a57f563685108"}, + {file = "cattrs-23.2.3.tar.gz", hash = "sha256:a934090d95abaa9e911dac357e3a8699e0b4b14f8529bcc7d2b1ad9d51672b9f"}, +] + +[package.dependencies] +attrs = ">=23.1.0" +exceptiongroup = {version = ">=1.1.1", markers = "python_version < \"3.11\""} +typing-extensions = {version = ">=4.1.0,<4.6.3 || >4.6.3", markers = "python_version < \"3.11\""} + +[package.extras] +bson = ["pymongo (>=4.4.0)"] +cbor2 = ["cbor2 (>=5.4.6)"] +msgpack = ["msgpack (>=1.0.5)"] +orjson = ["orjson (>=3.9.2)"] +pyyaml = ["pyyaml (>=6.0)"] +tomlkit = ["tomlkit (>=0.11.8)"] +ujson = ["ujson (>=5.7.0)"] + +[[package]] +name = "certifi" +version = "2024.2.2" +description = "Python package for providing Mozilla's CA Bundle." +optional = false +python-versions = ">=3.6" +files = [ + {file = "certifi-2024.2.2-py3-none-any.whl", hash = "sha256:dc383c07b76109f368f6106eee2b593b04a011ea4d55f652c6ca24a754d1cdd1"}, + {file = "certifi-2024.2.2.tar.gz", hash = "sha256:0569859f95fc761b18b45ef421b1290a0f65f147e92a1e5eb3e635f9a5e4e66f"}, +] + +[[package]] +name = "charset-normalizer" +version = "3.3.2" +description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet." +optional = false +python-versions = ">=3.7.0" +files = [ + {file = "charset-normalizer-3.3.2.tar.gz", hash = "sha256:f30c3cb33b24454a82faecaf01b19c18562b1e89558fb6c56de4d9118a032fd5"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:25baf083bf6f6b341f4121c2f3c548875ee6f5339300e08be3f2b2ba1721cdd3"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:06435b539f889b1f6f4ac1758871aae42dc3a8c0e24ac9e60c2384973ad73027"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9063e24fdb1e498ab71cb7419e24622516c4a04476b17a2dab57e8baa30d6e03"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6897af51655e3691ff853668779c7bad41579facacf5fd7253b0133308cf000d"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1d3193f4a680c64b4b6a9115943538edb896edc190f0b222e73761716519268e"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:cd70574b12bb8a4d2aaa0094515df2463cb429d8536cfb6c7ce983246983e5a6"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8465322196c8b4d7ab6d1e049e4c5cb460d0394da4a27d23cc242fbf0034b6b5"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a9a8e9031d613fd2009c182b69c7b2c1ef8239a0efb1df3f7c8da66d5dd3d537"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:beb58fe5cdb101e3a055192ac291b7a21e3b7ef4f67fa1d74e331a7f2124341c"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:e06ed3eb3218bc64786f7db41917d4e686cc4856944f53d5bdf83a6884432e12"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:2e81c7b9c8979ce92ed306c249d46894776a909505d8f5a4ba55b14206e3222f"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:572c3763a264ba47b3cf708a44ce965d98555f618ca42c926a9c1616d8f34269"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:fd1abc0d89e30cc4e02e4064dc67fcc51bd941eb395c502aac3ec19fab46b519"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-win32.whl", hash = "sha256:3d47fa203a7bd9c5b6cee4736ee84ca03b8ef23193c0d1ca99b5089f72645c73"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-win_amd64.whl", hash = "sha256:10955842570876604d404661fbccbc9c7e684caf432c09c715ec38fbae45ae09"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:802fe99cca7457642125a8a88a084cef28ff0cf9407060f7b93dca5aa25480db"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:573f6eac48f4769d667c4442081b1794f52919e7edada77495aaed9236d13a96"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:549a3a73da901d5bc3ce8d24e0600d1fa85524c10287f6004fbab87672bf3e1e"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f27273b60488abe721a075bcca6d7f3964f9f6f067c8c4c605743023d7d3944f"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1ceae2f17a9c33cb48e3263960dc5fc8005351ee19db217e9b1bb15d28c02574"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:65f6f63034100ead094b8744b3b97965785388f308a64cf8d7c34f2f2e5be0c4"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:753f10e867343b4511128c6ed8c82f7bec3bd026875576dfd88483c5c73b2fd8"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4a78b2b446bd7c934f5dcedc588903fb2f5eec172f3d29e52a9096a43722adfc"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:e537484df0d8f426ce2afb2d0f8e1c3d0b114b83f8850e5f2fbea0e797bd82ae"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:eb6904c354526e758fda7167b33005998fb68c46fbc10e013ca97f21ca5c8887"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:deb6be0ac38ece9ba87dea880e438f25ca3eddfac8b002a2ec3d9183a454e8ae"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:4ab2fe47fae9e0f9dee8c04187ce5d09f48eabe611be8259444906793ab7cbce"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:80402cd6ee291dcb72644d6eac93785fe2c8b9cb30893c1af5b8fdd753b9d40f"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-win32.whl", hash = "sha256:7cd13a2e3ddeed6913a65e66e94b51d80a041145a026c27e6bb76c31a853c6ab"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-win_amd64.whl", hash = "sha256:663946639d296df6a2bb2aa51b60a2454ca1cb29835324c640dafb5ff2131a77"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:0b2b64d2bb6d3fb9112bafa732def486049e63de9618b5843bcdd081d8144cd8"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:ddbb2551d7e0102e7252db79ba445cdab71b26640817ab1e3e3648dad515003b"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:55086ee1064215781fff39a1af09518bc9255b50d6333f2e4c74ca09fac6a8f6"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8f4a014bc36d3c57402e2977dada34f9c12300af536839dc38c0beab8878f38a"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a10af20b82360ab00827f916a6058451b723b4e65030c5a18577c8b2de5b3389"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8d756e44e94489e49571086ef83b2bb8ce311e730092d2c34ca8f7d925cb20aa"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:90d558489962fd4918143277a773316e56c72da56ec7aa3dc3dbbe20fdfed15b"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6ac7ffc7ad6d040517be39eb591cac5ff87416c2537df6ba3cba3bae290c0fed"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:7ed9e526742851e8d5cc9e6cf41427dfc6068d4f5a3bb03659444b4cabf6bc26"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:8bdb58ff7ba23002a4c5808d608e4e6c687175724f54a5dade5fa8c67b604e4d"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:6b3251890fff30ee142c44144871185dbe13b11bab478a88887a639655be1068"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:b4a23f61ce87adf89be746c8a8974fe1c823c891d8f86eb218bb957c924bb143"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:efcb3f6676480691518c177e3b465bcddf57cea040302f9f4e6e191af91174d4"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-win32.whl", hash = "sha256:d965bba47ddeec8cd560687584e88cf699fd28f192ceb452d1d7ee807c5597b7"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-win_amd64.whl", hash = "sha256:96b02a3dc4381e5494fad39be677abcb5e6634bf7b4fa83a6dd3112607547001"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:95f2a5796329323b8f0512e09dbb7a1860c46a39da62ecb2324f116fa8fdc85c"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c002b4ffc0be611f0d9da932eb0f704fe2602a9a949d1f738e4c34c75b0863d5"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a981a536974bbc7a512cf44ed14938cf01030a99e9b3a06dd59578882f06f985"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3287761bc4ee9e33561a7e058c72ac0938c4f57fe49a09eae428fd88aafe7bb6"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:42cb296636fcc8b0644486d15c12376cb9fa75443e00fb25de0b8602e64c1714"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0a55554a2fa0d408816b3b5cedf0045f4b8e1a6065aec45849de2d6f3f8e9786"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:c083af607d2515612056a31f0a8d9e0fcb5876b7bfc0abad3ecd275bc4ebc2d5"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:87d1351268731db79e0f8e745d92493ee2841c974128ef629dc518b937d9194c"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:bd8f7df7d12c2db9fab40bdd87a7c09b1530128315d047a086fa3ae3435cb3a8"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:c180f51afb394e165eafe4ac2936a14bee3eb10debc9d9e4db8958fe36afe711"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:8c622a5fe39a48f78944a87d4fb8a53ee07344641b0562c540d840748571b811"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-win32.whl", hash = "sha256:db364eca23f876da6f9e16c9da0df51aa4f104a972735574842618b8c6d999d4"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-win_amd64.whl", hash = "sha256:86216b5cee4b06df986d214f664305142d9c76df9b6512be2738aa72a2048f99"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:6463effa3186ea09411d50efc7d85360b38d5f09b870c48e4600f63af490e56a"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:6c4caeef8fa63d06bd437cd4bdcf3ffefe6738fb1b25951440d80dc7df8c03ac"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:37e55c8e51c236f95b033f6fb391d7d7970ba5fe7ff453dad675e88cf303377a"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fb69256e180cb6c8a894fee62b3afebae785babc1ee98b81cdf68bbca1987f33"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ae5f4161f18c61806f411a13b0310bea87f987c7d2ecdbdaad0e94eb2e404238"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b2b0a0c0517616b6869869f8c581d4eb2dd83a4d79e0ebcb7d373ef9956aeb0a"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:45485e01ff4d3630ec0d9617310448a8702f70e9c01906b0d0118bdf9d124cf2"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:eb00ed941194665c332bf8e078baf037d6c35d7c4f3102ea2d4f16ca94a26dc8"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:2127566c664442652f024c837091890cb1942c30937add288223dc895793f898"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:a50aebfa173e157099939b17f18600f72f84eed3049e743b68ad15bd69b6bf99"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:4d0d1650369165a14e14e1e47b372cfcb31d6ab44e6e33cb2d4e57265290044d"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:923c0c831b7cfcb071580d3f46c4baf50f174be571576556269530f4bbd79d04"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:06a81e93cd441c56a9b65d8e1d043daeb97a3d0856d177d5c90ba85acb3db087"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-win32.whl", hash = "sha256:6ef1d82a3af9d3eecdba2321dc1b3c238245d890843e040e41e470ffa64c3e25"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-win_amd64.whl", hash = "sha256:eb8821e09e916165e160797a6c17edda0679379a4be5c716c260e836e122f54b"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:c235ebd9baae02f1b77bcea61bce332cb4331dc3617d254df3323aa01ab47bd4"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:5b4c145409bef602a690e7cfad0a15a55c13320ff7a3ad7ca59c13bb8ba4d45d"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:68d1f8a9e9e37c1223b656399be5d6b448dea850bed7d0f87a8311f1ff3dabb0"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:22afcb9f253dac0696b5a4be4a1c0f8762f8239e21b99680099abd9b2b1b2269"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e27ad930a842b4c5eb8ac0016b0a54f5aebbe679340c26101df33424142c143c"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1f79682fbe303db92bc2b1136016a38a42e835d932bab5b3b1bfcfbf0640e519"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b261ccdec7821281dade748d088bb6e9b69e6d15b30652b74cbbac25e280b796"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:122c7fa62b130ed55f8f285bfd56d5f4b4a5b503609d181f9ad85e55c89f4185"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:d0eccceffcb53201b5bfebb52600a5fb483a20b61da9dbc885f8b103cbe7598c"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:9f96df6923e21816da7e0ad3fd47dd8f94b2a5ce594e00677c0013018b813458"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:7f04c839ed0b6b98b1a7501a002144b76c18fb1c1850c8b98d458ac269e26ed2"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:34d1c8da1e78d2e001f363791c98a272bb734000fcef47a491c1e3b0505657a8"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:ff8fa367d09b717b2a17a052544193ad76cd49979c805768879cb63d9ca50561"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-win32.whl", hash = "sha256:aed38f6e4fb3f5d6bf81bfa990a07806be9d83cf7bacef998ab1a9bd660a581f"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-win_amd64.whl", hash = "sha256:b01b88d45a6fcb69667cd6d2f7a9aeb4bf53760d7fc536bf679ec94fe9f3ff3d"}, + {file = "charset_normalizer-3.3.2-py3-none-any.whl", hash = "sha256:3e4d1f6587322d2788836a99c69062fbb091331ec940e02d12d179c1d53e25fc"}, +] + +[[package]] +name = "colorama" +version = "0.4.6" +description = "Cross-platform colored terminal text." +optional = false +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7" +files = [ + {file = "colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6"}, + {file = "colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44"}, +] + +[[package]] +name = "deprecated" +version = "1.2.14" +description = "Python @deprecated decorator to deprecate old python classes, functions or methods." +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +files = [ + {file = "Deprecated-1.2.14-py2.py3-none-any.whl", hash = "sha256:6fac8b097794a90302bdbb17b9b815e732d3c4720583ff1b198499d78470466c"}, + {file = "Deprecated-1.2.14.tar.gz", hash = "sha256:e5323eb936458dccc2582dc6f9c322c852a775a27065ff2b0c4970b9d53d01b3"}, +] + +[package.dependencies] +wrapt = ">=1.10,<2" + +[package.extras] +dev = ["PyTest", "PyTest-Cov", "bump2version (<1)", "sphinx (<2)", "tox"] + +[[package]] +name = "dpath" +version = "2.0.8" +description = "Filesystem-like pathing and searching for dictionaries" +optional = false +python-versions = ">=3.7" +files = [ + {file = "dpath-2.0.8-py3-none-any.whl", hash = "sha256:f92f595214dd93a00558d75d4b858beee519f4cffca87f02616ad6cd013f3436"}, + {file = "dpath-2.0.8.tar.gz", hash = "sha256:a3440157ebe80d0a3ad794f1b61c571bef125214800ffdb9afc9424e8250fe9b"}, +] + +[[package]] +name = "exceptiongroup" +version = "1.2.0" +description = "Backport of PEP 654 (exception groups)" +optional = false +python-versions = ">=3.7" +files = [ + {file = "exceptiongroup-1.2.0-py3-none-any.whl", hash = "sha256:4bfd3996ac73b41e9b9628b04e079f193850720ea5945fc96a08633c66912f14"}, + {file = "exceptiongroup-1.2.0.tar.gz", hash = "sha256:91f5c769735f051a4290d52edd0858999b57e5876e9f85937691bd4c9fa3ed68"}, +] + +[package.extras] +test = ["pytest (>=6)"] + +[[package]] +name = "genson" +version = "1.2.2" +description = "GenSON is a powerful, user-friendly JSON Schema generator." +optional = false +python-versions = "*" +files = [ + {file = "genson-1.2.2.tar.gz", hash = "sha256:8caf69aa10af7aee0e1a1351d1d06801f4696e005f06cedef438635384346a16"}, +] + +[[package]] +name = "idna" +version = "3.6" +description = "Internationalized Domain Names in Applications (IDNA)" +optional = false +python-versions = ">=3.5" +files = [ + {file = "idna-3.6-py3-none-any.whl", hash = "sha256:c05567e9c24a6b9faaa835c4821bad0590fbb9d5779e7caa6e1cc4978e7eb24f"}, + {file = "idna-3.6.tar.gz", hash = "sha256:9ecdbbd083b06798ae1e86adcbfe8ab1479cf864e4ee30fe4e46a003d12491ca"}, +] + +[[package]] +name = "iniconfig" +version = "2.0.0" +description = "brain-dead simple config-ini parsing" +optional = false +python-versions = ">=3.7" +files = [ + {file = "iniconfig-2.0.0-py3-none-any.whl", hash = "sha256:b6a85871a79d2e3b22d2d1b94ac2824226a63c6b741c88f7ae975f18b6778374"}, + {file = "iniconfig-2.0.0.tar.gz", hash = "sha256:2d91e135bf72d31a410b17c16da610a82cb55f6b0477d1a902134b24a455b8b3"}, +] + +[[package]] +name = "isodate" +version = "0.6.1" +description = "An ISO 8601 date/time/duration parser and formatter" +optional = false +python-versions = "*" +files = [ + {file = "isodate-0.6.1-py2.py3-none-any.whl", hash = "sha256:0751eece944162659049d35f4f549ed815792b38793f07cf73381c1c87cbed96"}, + {file = "isodate-0.6.1.tar.gz", hash = "sha256:48c5881de7e8b0a0d648cb024c8062dc84e7b840ed81e864c7614fd3c127bde9"}, +] + +[package.dependencies] +six = "*" + +[[package]] +name = "jinja2" +version = "3.1.3" +description = "A very fast and expressive template engine." +optional = false +python-versions = ">=3.7" +files = [ + {file = "Jinja2-3.1.3-py3-none-any.whl", hash = "sha256:7d6d50dd97d52cbc355597bd845fabfbac3f551e1f99619e39a35ce8c370b5fa"}, + {file = "Jinja2-3.1.3.tar.gz", hash = "sha256:ac8bd6544d4bb2c9792bf3a159e80bba8fda7f07e81bc3aed565432d5925ba90"}, +] + +[package.dependencies] +MarkupSafe = ">=2.0" + +[package.extras] +i18n = ["Babel (>=2.7)"] + +[[package]] +name = "jsonref" +version = "0.3.0" +description = "jsonref is a library for automatic dereferencing of JSON Reference objects for Python." +optional = false +python-versions = ">=3.3,<4.0" +files = [ + {file = "jsonref-0.3.0-py3-none-any.whl", hash = "sha256:9480ad1b500f7e795daeb0ef29f9c55ae3a9ab38fb8d6659b6f4868acb5a5bc8"}, + {file = "jsonref-0.3.0.tar.gz", hash = "sha256:68b330c6815dc0d490dbb3d65ccda265ddde9f7856fd2f3322f971d456ea7549"}, +] + +[[package]] +name = "jsonschema" +version = "3.2.0" +description = "An implementation of JSON Schema validation for Python" +optional = false +python-versions = "*" +files = [ + {file = "jsonschema-3.2.0-py2.py3-none-any.whl", hash = "sha256:4e5b3cf8216f577bee9ce139cbe72eca3ea4f292ec60928ff24758ce626cd163"}, + {file = "jsonschema-3.2.0.tar.gz", hash = "sha256:c8a85b28d377cc7737e46e2d9f2b4f44ee3c0e1deac6bf46ddefc7187d30797a"}, +] + +[package.dependencies] +attrs = ">=17.4.0" +pyrsistent = ">=0.14.0" +setuptools = "*" +six = ">=1.11.0" + +[package.extras] +format = ["idna", "jsonpointer (>1.13)", "rfc3987", "strict-rfc3339", "webcolors"] +format-nongpl = ["idna", "jsonpointer (>1.13)", "rfc3339-validator", "rfc3986-validator (>0.1.0)", "webcolors"] + +[[package]] +name = "markupsafe" +version = "2.1.5" +description = "Safely add untrusted strings to HTML/XML markup." +optional = false +python-versions = ">=3.7" +files = [ + {file = "MarkupSafe-2.1.5-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:a17a92de5231666cfbe003f0e4b9b3a7ae3afb1ec2845aadc2bacc93ff85febc"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:72b6be590cc35924b02c78ef34b467da4ba07e4e0f0454a2c5907f473fc50ce5"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e61659ba32cf2cf1481e575d0462554625196a1f2fc06a1c777d3f48e8865d46"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2174c595a0d73a3080ca3257b40096db99799265e1c27cc5a610743acd86d62f"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ae2ad8ae6ebee9d2d94b17fb62763125f3f374c25618198f40cbb8b525411900"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:075202fa5b72c86ad32dc7d0b56024ebdbcf2048c0ba09f1cde31bfdd57bcfff"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:598e3276b64aff0e7b3451b72e94fa3c238d452e7ddcd893c3ab324717456bad"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:fce659a462a1be54d2ffcacea5e3ba2d74daa74f30f5f143fe0c58636e355fdd"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-win32.whl", hash = "sha256:d9fad5155d72433c921b782e58892377c44bd6252b5af2f67f16b194987338a4"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-win_amd64.whl", hash = "sha256:bf50cd79a75d181c9181df03572cdce0fbb75cc353bc350712073108cba98de5"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:629ddd2ca402ae6dbedfceeba9c46d5f7b2a61d9749597d4307f943ef198fc1f"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:5b7b716f97b52c5a14bffdf688f971b2d5ef4029127f1ad7a513973cfd818df2"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6ec585f69cec0aa07d945b20805be741395e28ac1627333b1c5b0105962ffced"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b91c037585eba9095565a3556f611e3cbfaa42ca1e865f7b8015fe5c7336d5a5"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7502934a33b54030eaf1194c21c692a534196063db72176b0c4028e140f8f32c"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:0e397ac966fdf721b2c528cf028494e86172b4feba51d65f81ffd65c63798f3f"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:c061bb86a71b42465156a3ee7bd58c8c2ceacdbeb95d05a99893e08b8467359a"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:3a57fdd7ce31c7ff06cdfbf31dafa96cc533c21e443d57f5b1ecc6cdc668ec7f"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-win32.whl", hash = "sha256:397081c1a0bfb5124355710fe79478cdbeb39626492b15d399526ae53422b906"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-win_amd64.whl", hash = "sha256:2b7c57a4dfc4f16f7142221afe5ba4e093e09e728ca65c51f5620c9aaeb9a617"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:8dec4936e9c3100156f8a2dc89c4b88d5c435175ff03413b443469c7c8c5f4d1"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:3c6b973f22eb18a789b1460b4b91bf04ae3f0c4234a0a6aa6b0a92f6f7b951d4"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ac07bad82163452a6884fe8fa0963fb98c2346ba78d779ec06bd7a6262132aee"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f5dfb42c4604dddc8e4305050aa6deb084540643ed5804d7455b5df8fe16f5e5"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ea3d8a3d18833cf4304cd2fc9cbb1efe188ca9b5efef2bdac7adc20594a0e46b"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:d050b3361367a06d752db6ead6e7edeb0009be66bc3bae0ee9d97fb326badc2a"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:bec0a414d016ac1a18862a519e54b2fd0fc8bbfd6890376898a6c0891dd82e9f"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:58c98fee265677f63a4385256a6d7683ab1832f3ddd1e66fe948d5880c21a169"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-win32.whl", hash = "sha256:8590b4ae07a35970728874632fed7bd57b26b0102df2d2b233b6d9d82f6c62ad"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-win_amd64.whl", hash = "sha256:823b65d8706e32ad2df51ed89496147a42a2a6e01c13cfb6ffb8b1e92bc910bb"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:c8b29db45f8fe46ad280a7294f5c3ec36dbac9491f2d1c17345be8e69cc5928f"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ec6a563cff360b50eed26f13adc43e61bc0c04d94b8be985e6fb24b81f6dcfdf"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a549b9c31bec33820e885335b451286e2969a2d9e24879f83fe904a5ce59d70a"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4f11aa001c540f62c6166c7726f71f7573b52c68c31f014c25cc7901deea0b52"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:7b2e5a267c855eea6b4283940daa6e88a285f5f2a67f2220203786dfa59b37e9"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:2d2d793e36e230fd32babe143b04cec8a8b3eb8a3122d2aceb4a371e6b09b8df"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:ce409136744f6521e39fd8e2a24c53fa18ad67aa5bc7c2cf83645cce5b5c4e50"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-win32.whl", hash = "sha256:4096e9de5c6fdf43fb4f04c26fb114f61ef0bf2e5604b6ee3019d51b69e8c371"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-win_amd64.whl", hash = "sha256:4275d846e41ecefa46e2015117a9f491e57a71ddd59bbead77e904dc02b1bed2"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:656f7526c69fac7f600bd1f400991cc282b417d17539a1b228617081106feb4a"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:97cafb1f3cbcd3fd2b6fbfb99ae11cdb14deea0736fc2b0952ee177f2b813a46"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1f3fbcb7ef1f16e48246f704ab79d79da8a46891e2da03f8783a5b6fa41a9532"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fa9db3f79de01457b03d4f01b34cf91bc0048eb2c3846ff26f66687c2f6d16ab"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ffee1f21e5ef0d712f9033568f8344d5da8cc2869dbd08d87c84656e6a2d2f68"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:5dedb4db619ba5a2787a94d877bc8ffc0566f92a01c0ef214865e54ecc9ee5e0"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:30b600cf0a7ac9234b2638fbc0fb6158ba5bdcdf46aeb631ead21248b9affbc4"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:8dd717634f5a044f860435c1d8c16a270ddf0ef8588d4887037c5028b859b0c3"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-win32.whl", hash = "sha256:daa4ee5a243f0f20d528d939d06670a298dd39b1ad5f8a72a4275124a7819eff"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-win_amd64.whl", hash = "sha256:619bc166c4f2de5caa5a633b8b7326fbe98e0ccbfacabd87268a2b15ff73a029"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:7a68b554d356a91cce1236aa7682dc01df0edba8d043fd1ce607c49dd3c1edcf"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:db0b55e0f3cc0be60c1f19efdde9a637c32740486004f20d1cff53c3c0ece4d2"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3e53af139f8579a6d5f7b76549125f0d94d7e630761a2111bc431fd820e163b8"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:17b950fccb810b3293638215058e432159d2b71005c74371d784862b7e4683f3"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4c31f53cdae6ecfa91a77820e8b151dba54ab528ba65dfd235c80b086d68a465"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:bff1b4290a66b490a2f4719358c0cdcd9bafb6b8f061e45c7a2460866bf50c2e"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:bc1667f8b83f48511b94671e0e441401371dfd0f0a795c7daa4a3cd1dde55bea"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:5049256f536511ee3f7e1b3f87d1d1209d327e818e6ae1365e8653d7e3abb6a6"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-win32.whl", hash = "sha256:00e046b6dd71aa03a41079792f8473dc494d564611a8f89bbbd7cb93295ebdcf"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-win_amd64.whl", hash = "sha256:fa173ec60341d6bb97a89f5ea19c85c5643c1e7dedebc22f5181eb73573142c5"}, + {file = "MarkupSafe-2.1.5.tar.gz", hash = "sha256:d283d37a890ba4c1ae73ffadf8046435c76e7bc2247bbb63c00bd1a709c6544b"}, +] + +[[package]] +name = "packaging" +version = "23.2" +description = "Core utilities for Python packages" +optional = false +python-versions = ">=3.7" +files = [ + {file = "packaging-23.2-py3-none-any.whl", hash = "sha256:8c491190033a9af7e1d931d0b5dacc2ef47509b34dd0de67ed209b5203fc88c7"}, + {file = "packaging-23.2.tar.gz", hash = "sha256:048fb0e9405036518eaaf48a55953c750c11e1a1b68e0dd1a9d62ed0c092cfc5"}, +] + +[[package]] +name = "pendulum" +version = "2.1.2" +description = "Python datetimes made easy" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" +files = [ + {file = "pendulum-2.1.2-cp27-cp27m-macosx_10_15_x86_64.whl", hash = "sha256:b6c352f4bd32dff1ea7066bd31ad0f71f8d8100b9ff709fb343f3b86cee43efe"}, + {file = "pendulum-2.1.2-cp27-cp27m-win_amd64.whl", hash = "sha256:318f72f62e8e23cd6660dbafe1e346950281a9aed144b5c596b2ddabc1d19739"}, + {file = "pendulum-2.1.2-cp35-cp35m-macosx_10_15_x86_64.whl", hash = "sha256:0731f0c661a3cb779d398803655494893c9f581f6488048b3fb629c2342b5394"}, + {file = "pendulum-2.1.2-cp35-cp35m-manylinux1_i686.whl", hash = "sha256:3481fad1dc3f6f6738bd575a951d3c15d4b4ce7c82dce37cf8ac1483fde6e8b0"}, + {file = "pendulum-2.1.2-cp35-cp35m-manylinux1_x86_64.whl", hash = "sha256:9702069c694306297ed362ce7e3c1ef8404ac8ede39f9b28b7c1a7ad8c3959e3"}, + {file = "pendulum-2.1.2-cp35-cp35m-win_amd64.whl", hash = "sha256:fb53ffa0085002ddd43b6ca61a7b34f2d4d7c3ed66f931fe599e1a531b42af9b"}, + {file = "pendulum-2.1.2-cp36-cp36m-macosx_10_15_x86_64.whl", hash = "sha256:c501749fdd3d6f9e726086bf0cd4437281ed47e7bca132ddb522f86a1645d360"}, + {file = "pendulum-2.1.2-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:c807a578a532eeb226150d5006f156632df2cc8c5693d778324b43ff8c515dd0"}, + {file = "pendulum-2.1.2-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:2d1619a721df661e506eff8db8614016f0720ac171fe80dda1333ee44e684087"}, + {file = "pendulum-2.1.2-cp36-cp36m-win_amd64.whl", hash = "sha256:f888f2d2909a414680a29ae74d0592758f2b9fcdee3549887779cd4055e975db"}, + {file = "pendulum-2.1.2-cp37-cp37m-macosx_10_15_x86_64.whl", hash = "sha256:e95d329384717c7bf627bf27e204bc3b15c8238fa8d9d9781d93712776c14002"}, + {file = "pendulum-2.1.2-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:4c9c689747f39d0d02a9f94fcee737b34a5773803a64a5fdb046ee9cac7442c5"}, + {file = "pendulum-2.1.2-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:1245cd0075a3c6d889f581f6325dd8404aca5884dea7223a5566c38aab94642b"}, + {file = "pendulum-2.1.2-cp37-cp37m-win_amd64.whl", hash = "sha256:db0a40d8bcd27b4fb46676e8eb3c732c67a5a5e6bfab8927028224fbced0b40b"}, + {file = "pendulum-2.1.2-cp38-cp38-macosx_10_15_x86_64.whl", hash = "sha256:f5e236e7730cab1644e1b87aca3d2ff3e375a608542e90fe25685dae46310116"}, + {file = "pendulum-2.1.2-cp38-cp38-manylinux1_i686.whl", hash = "sha256:de42ea3e2943171a9e95141f2eecf972480636e8e484ccffaf1e833929e9e052"}, + {file = "pendulum-2.1.2-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:7c5ec650cb4bec4c63a89a0242cc8c3cebcec92fcfe937c417ba18277d8560be"}, + {file = "pendulum-2.1.2-cp38-cp38-win_amd64.whl", hash = "sha256:33fb61601083f3eb1d15edeb45274f73c63b3c44a8524703dc143f4212bf3269"}, + {file = "pendulum-2.1.2-cp39-cp39-manylinux1_i686.whl", hash = "sha256:29c40a6f2942376185728c9a0347d7c0f07905638c83007e1d262781f1e6953a"}, + {file = "pendulum-2.1.2-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:94b1fc947bfe38579b28e1cccb36f7e28a15e841f30384b5ad6c5e31055c85d7"}, + {file = "pendulum-2.1.2.tar.gz", hash = "sha256:b06a0ca1bfe41c990bbf0c029f0b6501a7f2ec4e38bfec730712015e8860f207"}, +] + +[package.dependencies] +python-dateutil = ">=2.6,<3.0" +pytzdata = ">=2020.1" + +[[package]] +name = "platformdirs" +version = "4.2.0" +description = "A small Python package for determining appropriate platform-specific dirs, e.g. a \"user data dir\"." +optional = false +python-versions = ">=3.8" +files = [ + {file = "platformdirs-4.2.0-py3-none-any.whl", hash = "sha256:0614df2a2f37e1a662acbd8e2b25b92ccf8632929bc6d43467e17fe89c75e068"}, + {file = "platformdirs-4.2.0.tar.gz", hash = "sha256:ef0cc731df711022c174543cb70a9b5bd22e5a9337c8624ef2c2ceb8ddad8768"}, +] + +[package.extras] +docs = ["furo (>=2023.9.10)", "proselint (>=0.13)", "sphinx (>=7.2.6)", "sphinx-autodoc-typehints (>=1.25.2)"] +test = ["appdirs (==1.4.4)", "covdefaults (>=2.3)", "pytest (>=7.4.3)", "pytest-cov (>=4.1)", "pytest-mock (>=3.12)"] + +[[package]] +name = "pluggy" +version = "1.4.0" +description = "plugin and hook calling mechanisms for python" +optional = false +python-versions = ">=3.8" +files = [ + {file = "pluggy-1.4.0-py3-none-any.whl", hash = "sha256:7db9f7b503d67d1c5b95f59773ebb58a8c1c288129a88665838012cfb07b8981"}, + {file = "pluggy-1.4.0.tar.gz", hash = "sha256:8c85c2876142a764e5b7548e7d9a0e0ddb46f5185161049a79b7e974454223be"}, +] + +[package.extras] +dev = ["pre-commit", "tox"] +testing = ["pytest", "pytest-benchmark"] + +[[package]] +name = "py" +version = "1.11.0" +description = "library with cross-python path, ini-parsing, io, code, log facilities" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" +files = [ + {file = "py-1.11.0-py2.py3-none-any.whl", hash = "sha256:607c53218732647dff4acdfcd50cb62615cedf612e72d1724fb1a0cc6405b378"}, + {file = "py-1.11.0.tar.gz", hash = "sha256:51c75c4126074b472f746a24399ad32f6053d1b34b68d2fa41e558e6f4a98719"}, +] + +[[package]] +name = "pydantic" +version = "1.10.14" +description = "Data validation and settings management using python type hints" +optional = false +python-versions = ">=3.7" +files = [ + {file = "pydantic-1.10.14-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:7f4fcec873f90537c382840f330b90f4715eebc2bc9925f04cb92de593eae054"}, + {file = "pydantic-1.10.14-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:8e3a76f571970fcd3c43ad982daf936ae39b3e90b8a2e96c04113a369869dc87"}, + {file = "pydantic-1.10.14-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:82d886bd3c3fbeaa963692ef6b643159ccb4b4cefaf7ff1617720cbead04fd1d"}, + {file = "pydantic-1.10.14-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:798a3d05ee3b71967844a1164fd5bdb8c22c6d674f26274e78b9f29d81770c4e"}, + {file = "pydantic-1.10.14-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:23d47a4b57a38e8652bcab15a658fdb13c785b9ce217cc3a729504ab4e1d6bc9"}, + {file = "pydantic-1.10.14-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:f9f674b5c3bebc2eba401de64f29948ae1e646ba2735f884d1594c5f675d6f2a"}, + {file = "pydantic-1.10.14-cp310-cp310-win_amd64.whl", hash = "sha256:24a7679fab2e0eeedb5a8924fc4a694b3bcaac7d305aeeac72dd7d4e05ecbebf"}, + {file = "pydantic-1.10.14-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:9d578ac4bf7fdf10ce14caba6f734c178379bd35c486c6deb6f49006e1ba78a7"}, + {file = "pydantic-1.10.14-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:fa7790e94c60f809c95602a26d906eba01a0abee9cc24150e4ce2189352deb1b"}, + {file = "pydantic-1.10.14-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:aad4e10efa5474ed1a611b6d7f0d130f4aafadceb73c11d9e72823e8f508e663"}, + {file = "pydantic-1.10.14-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1245f4f61f467cb3dfeced2b119afef3db386aec3d24a22a1de08c65038b255f"}, + {file = "pydantic-1.10.14-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:21efacc678a11114c765eb52ec0db62edffa89e9a562a94cbf8fa10b5db5c046"}, + {file = "pydantic-1.10.14-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:412ab4a3f6dbd2bf18aefa9f79c7cca23744846b31f1d6555c2ee2b05a2e14ca"}, + {file = "pydantic-1.10.14-cp311-cp311-win_amd64.whl", hash = "sha256:e897c9f35281f7889873a3e6d6b69aa1447ceb024e8495a5f0d02ecd17742a7f"}, + {file = "pydantic-1.10.14-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:d604be0f0b44d473e54fdcb12302495fe0467c56509a2f80483476f3ba92b33c"}, + {file = "pydantic-1.10.14-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a42c7d17706911199798d4c464b352e640cab4351efe69c2267823d619a937e5"}, + {file = "pydantic-1.10.14-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:596f12a1085e38dbda5cbb874d0973303e34227b400b6414782bf205cc14940c"}, + {file = "pydantic-1.10.14-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:bfb113860e9288d0886e3b9e49d9cf4a9d48b441f52ded7d96db7819028514cc"}, + {file = "pydantic-1.10.14-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:bc3ed06ab13660b565eed80887fcfbc0070f0aa0691fbb351657041d3e874efe"}, + {file = "pydantic-1.10.14-cp37-cp37m-win_amd64.whl", hash = "sha256:ad8c2bc677ae5f6dbd3cf92f2c7dc613507eafe8f71719727cbc0a7dec9a8c01"}, + {file = "pydantic-1.10.14-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:c37c28449752bb1f47975d22ef2882d70513c546f8f37201e0fec3a97b816eee"}, + {file = "pydantic-1.10.14-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:49a46a0994dd551ec051986806122767cf144b9702e31d47f6d493c336462597"}, + {file = "pydantic-1.10.14-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:53e3819bd20a42470d6dd0fe7fc1c121c92247bca104ce608e609b59bc7a77ee"}, + {file = "pydantic-1.10.14-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0fbb503bbbbab0c588ed3cd21975a1d0d4163b87e360fec17a792f7d8c4ff29f"}, + {file = "pydantic-1.10.14-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:336709883c15c050b9c55a63d6c7ff09be883dbc17805d2b063395dd9d9d0022"}, + {file = "pydantic-1.10.14-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:4ae57b4d8e3312d486e2498d42aed3ece7b51848336964e43abbf9671584e67f"}, + {file = "pydantic-1.10.14-cp38-cp38-win_amd64.whl", hash = "sha256:dba49d52500c35cfec0b28aa8b3ea5c37c9df183ffc7210b10ff2a415c125c4a"}, + {file = "pydantic-1.10.14-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:c66609e138c31cba607d8e2a7b6a5dc38979a06c900815495b2d90ce6ded35b4"}, + {file = "pydantic-1.10.14-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:d986e115e0b39604b9eee3507987368ff8148222da213cd38c359f6f57b3b347"}, + {file = "pydantic-1.10.14-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:646b2b12df4295b4c3148850c85bff29ef6d0d9621a8d091e98094871a62e5c7"}, + {file = "pydantic-1.10.14-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:282613a5969c47c83a8710cc8bfd1e70c9223feb76566f74683af889faadc0ea"}, + {file = "pydantic-1.10.14-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:466669501d08ad8eb3c4fecd991c5e793c4e0bbd62299d05111d4f827cded64f"}, + {file = "pydantic-1.10.14-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:13e86a19dca96373dcf3190fcb8797d40a6f12f154a244a8d1e8e03b8f280593"}, + {file = "pydantic-1.10.14-cp39-cp39-win_amd64.whl", hash = "sha256:08b6ec0917c30861e3fe71a93be1648a2aa4f62f866142ba21670b24444d7fd8"}, + {file = "pydantic-1.10.14-py3-none-any.whl", hash = "sha256:8ee853cd12ac2ddbf0ecbac1c289f95882b2d4482258048079d13be700aa114c"}, + {file = "pydantic-1.10.14.tar.gz", hash = "sha256:46f17b832fe27de7850896f3afee50ea682220dd218f7e9c88d436788419dca6"}, +] + +[package.dependencies] +typing-extensions = ">=4.2.0" + +[package.extras] +dotenv = ["python-dotenv (>=0.10.4)"] +email = ["email-validator (>=1.0.3)"] + +[[package]] +name = "pyrate-limiter" +version = "3.1.1" +description = "Python Rate-Limiter using Leaky-Bucket Algorithm" +optional = false +python-versions = ">=3.8,<4.0" +files = [ + {file = "pyrate_limiter-3.1.1-py3-none-any.whl", hash = "sha256:c51906f1d51d56dc992ff6c26e8300e32151bc6cfa3e6559792e31971dfd4e2b"}, + {file = "pyrate_limiter-3.1.1.tar.gz", hash = "sha256:2f57eda712687e6eccddf6afe8f8a15b409b97ed675fe64a626058f12863b7b7"}, +] + +[package.extras] +all = ["filelock (>=3.0)", "redis (>=5.0.0,<6.0.0)"] +docs = ["furo (>=2022.3.4,<2023.0.0)", "myst-parser (>=0.17)", "sphinx (>=4.3.0,<5.0.0)", "sphinx-autodoc-typehints (>=1.17,<2.0)", "sphinx-copybutton (>=0.5)", "sphinxcontrib-apidoc (>=0.3,<0.4)"] + +[[package]] +name = "pyrsistent" +version = "0.20.0" +description = "Persistent/Functional/Immutable data structures" +optional = false +python-versions = ">=3.8" +files = [ + {file = "pyrsistent-0.20.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:8c3aba3e01235221e5b229a6c05f585f344734bd1ad42a8ac51493d74722bbce"}, + {file = "pyrsistent-0.20.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c1beb78af5423b879edaf23c5591ff292cf7c33979734c99aa66d5914ead880f"}, + {file = "pyrsistent-0.20.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:21cc459636983764e692b9eba7144cdd54fdec23ccdb1e8ba392a63666c60c34"}, + {file = "pyrsistent-0.20.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f5ac696f02b3fc01a710427585c855f65cd9c640e14f52abe52020722bb4906b"}, + {file = "pyrsistent-0.20.0-cp310-cp310-win32.whl", hash = "sha256:0724c506cd8b63c69c7f883cc233aac948c1ea946ea95996ad8b1380c25e1d3f"}, + {file = "pyrsistent-0.20.0-cp310-cp310-win_amd64.whl", hash = "sha256:8441cf9616d642c475684d6cf2520dd24812e996ba9af15e606df5f6fd9d04a7"}, + {file = "pyrsistent-0.20.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:0f3b1bcaa1f0629c978b355a7c37acd58907390149b7311b5db1b37648eb6958"}, + {file = "pyrsistent-0.20.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5cdd7ef1ea7a491ae70d826b6cc64868de09a1d5ff9ef8d574250d0940e275b8"}, + {file = "pyrsistent-0.20.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cae40a9e3ce178415040a0383f00e8d68b569e97f31928a3a8ad37e3fde6df6a"}, + {file = "pyrsistent-0.20.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6288b3fa6622ad8a91e6eb759cfc48ff3089e7c17fb1d4c59a919769314af224"}, + {file = "pyrsistent-0.20.0-cp311-cp311-win32.whl", hash = "sha256:7d29c23bdf6e5438c755b941cef867ec2a4a172ceb9f50553b6ed70d50dfd656"}, + {file = "pyrsistent-0.20.0-cp311-cp311-win_amd64.whl", hash = "sha256:59a89bccd615551391f3237e00006a26bcf98a4d18623a19909a2c48b8e986ee"}, + {file = "pyrsistent-0.20.0-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:09848306523a3aba463c4b49493a760e7a6ca52e4826aa100ee99d8d39b7ad1e"}, + {file = "pyrsistent-0.20.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a14798c3005ec892bbada26485c2eea3b54109cb2533713e355c806891f63c5e"}, + {file = "pyrsistent-0.20.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b14decb628fac50db5e02ee5a35a9c0772d20277824cfe845c8a8b717c15daa3"}, + {file = "pyrsistent-0.20.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2e2c116cc804d9b09ce9814d17df5edf1df0c624aba3b43bc1ad90411487036d"}, + {file = "pyrsistent-0.20.0-cp312-cp312-win32.whl", hash = "sha256:e78d0c7c1e99a4a45c99143900ea0546025e41bb59ebc10182e947cf1ece9174"}, + {file = "pyrsistent-0.20.0-cp312-cp312-win_amd64.whl", hash = "sha256:4021a7f963d88ccd15b523787d18ed5e5269ce57aa4037146a2377ff607ae87d"}, + {file = "pyrsistent-0.20.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:79ed12ba79935adaac1664fd7e0e585a22caa539dfc9b7c7c6d5ebf91fb89054"}, + {file = "pyrsistent-0.20.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f920385a11207dc372a028b3f1e1038bb244b3ec38d448e6d8e43c6b3ba20e98"}, + {file = "pyrsistent-0.20.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4f5c2d012671b7391803263419e31b5c7c21e7c95c8760d7fc35602353dee714"}, + {file = "pyrsistent-0.20.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ef3992833fbd686ee783590639f4b8343a57f1f75de8633749d984dc0eb16c86"}, + {file = "pyrsistent-0.20.0-cp38-cp38-win32.whl", hash = "sha256:881bbea27bbd32d37eb24dd320a5e745a2a5b092a17f6debc1349252fac85423"}, + {file = "pyrsistent-0.20.0-cp38-cp38-win_amd64.whl", hash = "sha256:6d270ec9dd33cdb13f4d62c95c1a5a50e6b7cdd86302b494217137f760495b9d"}, + {file = "pyrsistent-0.20.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:ca52d1ceae015859d16aded12584c59eb3825f7b50c6cfd621d4231a6cc624ce"}, + {file = "pyrsistent-0.20.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b318ca24db0f0518630e8b6f3831e9cba78f099ed5c1d65ffe3e023003043ba0"}, + {file = "pyrsistent-0.20.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fed2c3216a605dc9a6ea50c7e84c82906e3684c4e80d2908208f662a6cbf9022"}, + {file = "pyrsistent-0.20.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2e14c95c16211d166f59c6611533d0dacce2e25de0f76e4c140fde250997b3ca"}, + {file = "pyrsistent-0.20.0-cp39-cp39-win32.whl", hash = "sha256:f058a615031eea4ef94ead6456f5ec2026c19fb5bd6bfe86e9665c4158cf802f"}, + {file = "pyrsistent-0.20.0-cp39-cp39-win_amd64.whl", hash = "sha256:58b8f6366e152092194ae68fefe18b9f0b4f89227dfd86a07770c3d86097aebf"}, + {file = "pyrsistent-0.20.0-py3-none-any.whl", hash = "sha256:c55acc4733aad6560a7f5f818466631f07efc001fd023f34a6c203f8b6df0f0b"}, + {file = "pyrsistent-0.20.0.tar.gz", hash = "sha256:4c48f78f62ab596c679086084d0dd13254ae4f3d6c72a83ffdf5ebdef8f265a4"}, +] + +[[package]] +name = "pytest" +version = "6.2.5" +description = "pytest: simple powerful testing with Python" +optional = false +python-versions = ">=3.6" +files = [ + {file = "pytest-6.2.5-py3-none-any.whl", hash = "sha256:7310f8d27bc79ced999e760ca304d69f6ba6c6649c0b60fb0e04a4a77cacc134"}, + {file = "pytest-6.2.5.tar.gz", hash = "sha256:131b36680866a76e6781d13f101efb86cf674ebb9762eb70d3082b6f29889e89"}, +] + +[package.dependencies] +atomicwrites = {version = ">=1.0", markers = "sys_platform == \"win32\""} +attrs = ">=19.2.0" +colorama = {version = "*", markers = "sys_platform == \"win32\""} +iniconfig = "*" +packaging = "*" +pluggy = ">=0.12,<2.0" +py = ">=1.8.2" +toml = "*" + +[package.extras] +testing = ["argcomplete", "hypothesis (>=3.56)", "mock", "nose", "requests", "xmlschema"] + +[[package]] +name = "pytest-mock" +version = "3.12.0" +description = "Thin-wrapper around the mock package for easier use with pytest" +optional = false +python-versions = ">=3.8" +files = [ + {file = "pytest-mock-3.12.0.tar.gz", hash = "sha256:31a40f038c22cad32287bb43932054451ff5583ff094bca6f675df2f8bc1a6e9"}, + {file = "pytest_mock-3.12.0-py3-none-any.whl", hash = "sha256:0972719a7263072da3a21c7f4773069bcc7486027d7e8e1f81d98a47e701bc4f"}, +] + +[package.dependencies] +pytest = ">=5.0" + +[package.extras] +dev = ["pre-commit", "pytest-asyncio", "tox"] + +[[package]] +name = "python-dateutil" +version = "2.9.0.post0" +description = "Extensions to the standard Python datetime module" +optional = false +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7" +files = [ + {file = "python-dateutil-2.9.0.post0.tar.gz", hash = "sha256:37dd54208da7e1cd875388217d5e00ebd4179249f90fb72437e91a35459a0ad3"}, + {file = "python_dateutil-2.9.0.post0-py2.py3-none-any.whl", hash = "sha256:a8b2bc7bffae282281c8140a97d3aa9c14da0b136dfe83f850eea9a5f7470427"}, +] + +[package.dependencies] +six = ">=1.5" + +[[package]] +name = "pytzdata" +version = "2020.1" +description = "The Olson timezone database for Python." +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +files = [ + {file = "pytzdata-2020.1-py2.py3-none-any.whl", hash = "sha256:e1e14750bcf95016381e4d472bad004eef710f2d6417240904070b3d6654485f"}, + {file = "pytzdata-2020.1.tar.gz", hash = "sha256:3efa13b335a00a8de1d345ae41ec78dd11c9f8807f522d39850f2dd828681540"}, +] + +[[package]] +name = "pyyaml" +version = "6.0.1" +description = "YAML parser and emitter for Python" +optional = false +python-versions = ">=3.6" +files = [ + {file = "PyYAML-6.0.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d858aa552c999bc8a8d57426ed01e40bef403cd8ccdd0fc5f6f04a00414cac2a"}, + {file = "PyYAML-6.0.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:fd66fc5d0da6d9815ba2cebeb4205f95818ff4b79c3ebe268e75d961704af52f"}, + {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:69b023b2b4daa7548bcfbd4aa3da05b3a74b772db9e23b982788168117739938"}, + {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:81e0b275a9ecc9c0c0c07b4b90ba548307583c125f54d5b6946cfee6360c733d"}, + {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ba336e390cd8e4d1739f42dfe9bb83a3cc2e80f567d8805e11b46f4a943f5515"}, + {file = "PyYAML-6.0.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:326c013efe8048858a6d312ddd31d56e468118ad4cdeda36c719bf5bb6192290"}, + {file = "PyYAML-6.0.1-cp310-cp310-win32.whl", hash = "sha256:bd4af7373a854424dabd882decdc5579653d7868b8fb26dc7d0e99f823aa5924"}, + {file = "PyYAML-6.0.1-cp310-cp310-win_amd64.whl", hash = "sha256:fd1592b3fdf65fff2ad0004b5e363300ef59ced41c2e6b3a99d4089fa8c5435d"}, + {file = "PyYAML-6.0.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:6965a7bc3cf88e5a1c3bd2e0b5c22f8d677dc88a455344035f03399034eb3007"}, + {file = "PyYAML-6.0.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:f003ed9ad21d6a4713f0a9b5a7a0a79e08dd0f221aff4525a2be4c346ee60aab"}, + {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:42f8152b8dbc4fe7d96729ec2b99c7097d656dc1213a3229ca5383f973a5ed6d"}, + {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:062582fca9fabdd2c8b54a3ef1c978d786e0f6b3a1510e0ac93ef59e0ddae2bc"}, + {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d2b04aac4d386b172d5b9692e2d2da8de7bfb6c387fa4f801fbf6fb2e6ba4673"}, + {file = "PyYAML-6.0.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:e7d73685e87afe9f3b36c799222440d6cf362062f78be1013661b00c5c6f678b"}, + {file = "PyYAML-6.0.1-cp311-cp311-win32.whl", hash = "sha256:1635fd110e8d85d55237ab316b5b011de701ea0f29d07611174a1b42f1444741"}, + {file = "PyYAML-6.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:bf07ee2fef7014951eeb99f56f39c9bb4af143d8aa3c21b1677805985307da34"}, + {file = "PyYAML-6.0.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:855fb52b0dc35af121542a76b9a84f8d1cd886ea97c84703eaa6d88e37a2ad28"}, + {file = "PyYAML-6.0.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:40df9b996c2b73138957fe23a16a4f0ba614f4c0efce1e9406a184b6d07fa3a9"}, + {file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a08c6f0fe150303c1c6b71ebcd7213c2858041a7e01975da3a99aed1e7a378ef"}, + {file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6c22bec3fbe2524cde73d7ada88f6566758a8f7227bfbf93a408a9d86bcc12a0"}, + {file = "PyYAML-6.0.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8d4e9c88387b0f5c7d5f281e55304de64cf7f9c0021a3525bd3b1c542da3b0e4"}, + {file = "PyYAML-6.0.1-cp312-cp312-win32.whl", hash = "sha256:d483d2cdf104e7c9fa60c544d92981f12ad66a457afae824d146093b8c294c54"}, + {file = "PyYAML-6.0.1-cp312-cp312-win_amd64.whl", hash = "sha256:0d3304d8c0adc42be59c5f8a4d9e3d7379e6955ad754aa9d6ab7a398b59dd1df"}, + {file = "PyYAML-6.0.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:50550eb667afee136e9a77d6dc71ae76a44df8b3e51e41b77f6de2932bfe0f47"}, + {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1fe35611261b29bd1de0070f0b2f47cb6ff71fa6595c077e42bd0c419fa27b98"}, + {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:704219a11b772aea0d8ecd7058d0082713c3562b4e271b849ad7dc4a5c90c13c"}, + {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:afd7e57eddb1a54f0f1a974bc4391af8bcce0b444685d936840f125cf046d5bd"}, + {file = "PyYAML-6.0.1-cp36-cp36m-win32.whl", hash = "sha256:fca0e3a251908a499833aa292323f32437106001d436eca0e6e7833256674585"}, + {file = "PyYAML-6.0.1-cp36-cp36m-win_amd64.whl", hash = "sha256:f22ac1c3cac4dbc50079e965eba2c1058622631e526bd9afd45fedd49ba781fa"}, + {file = "PyYAML-6.0.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:b1275ad35a5d18c62a7220633c913e1b42d44b46ee12554e5fd39c70a243d6a3"}, + {file = "PyYAML-6.0.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:18aeb1bf9a78867dc38b259769503436b7c72f7a1f1f4c93ff9a17de54319b27"}, + {file = "PyYAML-6.0.1-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:596106435fa6ad000c2991a98fa58eeb8656ef2325d7e158344fb33864ed87e3"}, + {file = "PyYAML-6.0.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:baa90d3f661d43131ca170712d903e6295d1f7a0f595074f151c0aed377c9b9c"}, + {file = "PyYAML-6.0.1-cp37-cp37m-win32.whl", hash = "sha256:9046c58c4395dff28dd494285c82ba00b546adfc7ef001486fbf0324bc174fba"}, + {file = "PyYAML-6.0.1-cp37-cp37m-win_amd64.whl", hash = "sha256:4fb147e7a67ef577a588a0e2c17b6db51dda102c71de36f8549b6816a96e1867"}, + {file = "PyYAML-6.0.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1d4c7e777c441b20e32f52bd377e0c409713e8bb1386e1099c2415f26e479595"}, + {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a0cd17c15d3bb3fa06978b4e8958dcdc6e0174ccea823003a106c7d4d7899ac5"}, + {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:28c119d996beec18c05208a8bd78cbe4007878c6dd15091efb73a30e90539696"}, + {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7e07cbde391ba96ab58e532ff4803f79c4129397514e1413a7dc761ccd755735"}, + {file = "PyYAML-6.0.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:49a183be227561de579b4a36efbb21b3eab9651dd81b1858589f796549873dd6"}, + {file = "PyYAML-6.0.1-cp38-cp38-win32.whl", hash = "sha256:184c5108a2aca3c5b3d3bf9395d50893a7ab82a38004c8f61c258d4428e80206"}, + {file = "PyYAML-6.0.1-cp38-cp38-win_amd64.whl", hash = "sha256:1e2722cc9fbb45d9b87631ac70924c11d3a401b2d7f410cc0e3bbf249f2dca62"}, + {file = "PyYAML-6.0.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:9eb6caa9a297fc2c2fb8862bc5370d0303ddba53ba97e71f08023b6cd73d16a8"}, + {file = "PyYAML-6.0.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:c8098ddcc2a85b61647b2590f825f3db38891662cfc2fc776415143f599bb859"}, + {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5773183b6446b2c99bb77e77595dd486303b4faab2b086e7b17bc6bef28865f6"}, + {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b786eecbdf8499b9ca1d697215862083bd6d2a99965554781d0d8d1ad31e13a0"}, + {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bc1bf2925a1ecd43da378f4db9e4f799775d6367bdb94671027b73b393a7c42c"}, + {file = "PyYAML-6.0.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:04ac92ad1925b2cff1db0cfebffb6ffc43457495c9b3c39d3fcae417d7125dc5"}, + {file = "PyYAML-6.0.1-cp39-cp39-win32.whl", hash = "sha256:faca3bdcf85b2fc05d06ff3fbc1f83e1391b3e724afa3feba7d13eeab355484c"}, + {file = "PyYAML-6.0.1-cp39-cp39-win_amd64.whl", hash = "sha256:510c9deebc5c0225e8c96813043e62b680ba2f9c50a08d3724c7f28a747d1486"}, + {file = "PyYAML-6.0.1.tar.gz", hash = "sha256:bfdf460b1736c775f2ba9f6a92bca30bc2095067b8a9d77876d1fad6cc3b4a43"}, +] + +[[package]] +name = "recurly" +version = "4.10.0" +description = "Recurly v4" +optional = false +python-versions = "*" +files = [ + {file = "recurly-4.10.0-py3-none-any.whl", hash = "sha256:b8e3b1ec58f7b1e1b91286f2db864f6ba4053837ad920d0c2868508020442aaf"}, + {file = "recurly-4.10.0.tar.gz", hash = "sha256:a8dddab76bb38f76a715644448f45499227bfd00529ef33f7945b3bcc5a8f3a2"}, +] + +[[package]] +name = "requests" +version = "2.31.0" +description = "Python HTTP for Humans." +optional = false +python-versions = ">=3.7" +files = [ + {file = "requests-2.31.0-py3-none-any.whl", hash = "sha256:58cd2187c01e70e6e26505bca751777aa9f2ee0b7f4300988b709f44e013003f"}, + {file = "requests-2.31.0.tar.gz", hash = "sha256:942c5a758f98d790eaed1a29cb6eefc7ffb0d1cf7af05c3d2791656dbd6ad1e1"}, +] + +[package.dependencies] +certifi = ">=2017.4.17" +charset-normalizer = ">=2,<4" +idna = ">=2.5,<4" +urllib3 = ">=1.21.1,<3" + +[package.extras] +socks = ["PySocks (>=1.5.6,!=1.5.7)"] +use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"] + +[[package]] +name = "requests-cache" +version = "1.2.0" +description = "A persistent cache for python requests" +optional = false +python-versions = ">=3.8" +files = [ + {file = "requests_cache-1.2.0-py3-none-any.whl", hash = "sha256:490324301bf0cb924ff4e6324bd2613453e7e1f847353928b08adb0fdfb7f722"}, + {file = "requests_cache-1.2.0.tar.gz", hash = "sha256:db1c709ca343cc1cd5b6c8b1a5387298eceed02306a6040760db538c885e3838"}, +] + +[package.dependencies] +attrs = ">=21.2" +cattrs = ">=22.2" +platformdirs = ">=2.5" +requests = ">=2.22" +url-normalize = ">=1.4" +urllib3 = ">=1.25.5" + +[package.extras] +all = ["boto3 (>=1.15)", "botocore (>=1.18)", "itsdangerous (>=2.0)", "pymongo (>=3)", "pyyaml (>=6.0.1)", "redis (>=3)", "ujson (>=5.4)"] +bson = ["bson (>=0.5)"] +docs = ["furo (>=2023.3,<2024.0)", "linkify-it-py (>=2.0,<3.0)", "myst-parser (>=1.0,<2.0)", "sphinx (>=5.0.2,<6.0.0)", "sphinx-autodoc-typehints (>=1.19)", "sphinx-automodapi (>=0.14)", "sphinx-copybutton (>=0.5)", "sphinx-design (>=0.2)", "sphinx-notfound-page (>=0.8)", "sphinxcontrib-apidoc (>=0.3)", "sphinxext-opengraph (>=0.9)"] +dynamodb = ["boto3 (>=1.15)", "botocore (>=1.18)"] +json = ["ujson (>=5.4)"] +mongodb = ["pymongo (>=3)"] +redis = ["redis (>=3)"] +security = ["itsdangerous (>=2.0)"] +yaml = ["pyyaml (>=6.0.1)"] + +[[package]] +name = "requests-mock" +version = "1.11.0" +description = "Mock out responses from the requests package" +optional = false +python-versions = "*" +files = [ + {file = "requests-mock-1.11.0.tar.gz", hash = "sha256:ef10b572b489a5f28e09b708697208c4a3b2b89ef80a9f01584340ea357ec3c4"}, + {file = "requests_mock-1.11.0-py2.py3-none-any.whl", hash = "sha256:f7fae383f228633f6bececebdab236c478ace2284d6292c6e7e2867b9ab74d15"}, +] + +[package.dependencies] +requests = ">=2.3,<3" +six = "*" + +[package.extras] +fixture = ["fixtures"] +test = ["fixtures", "mock", "purl", "pytest", "requests-futures", "sphinx", "testtools"] + +[[package]] +name = "setuptools" +version = "69.1.1" +description = "Easily download, build, install, upgrade, and uninstall Python packages" +optional = false +python-versions = ">=3.8" +files = [ + {file = "setuptools-69.1.1-py3-none-any.whl", hash = "sha256:02fa291a0471b3a18b2b2481ed902af520c69e8ae0919c13da936542754b4c56"}, + {file = "setuptools-69.1.1.tar.gz", hash = "sha256:5c0806c7d9af348e6dd3777b4f4dbb42c7ad85b190104837488eab9a7c945cf8"}, +] + +[package.extras] +docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "rst.linker (>=1.9)", "sphinx (<7.2.5)", "sphinx (>=3.5)", "sphinx-favicon", "sphinx-inline-tabs", "sphinx-lint", "sphinx-notfound-page (>=1,<2)", "sphinx-reredirects", "sphinxcontrib-towncrier"] +testing = ["build[virtualenv]", "filelock (>=3.4.0)", "flake8-2020", "ini2toml[lite] (>=0.9)", "jaraco.develop (>=7.21)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "packaging (>=23.2)", "pip (>=19.1)", "pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-home (>=0.5)", "pytest-mypy (>=0.9.1)", "pytest-perf", "pytest-ruff (>=0.2.1)", "pytest-timeout", "pytest-xdist", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel"] +testing-integration = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "packaging (>=23.2)", "pytest", "pytest-enabler", "pytest-xdist", "tomli", "virtualenv (>=13.0.0)", "wheel"] + +[[package]] +name = "six" +version = "1.16.0" +description = "Python 2 and 3 compatibility utilities" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*" +files = [ + {file = "six-1.16.0-py2.py3-none-any.whl", hash = "sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254"}, + {file = "six-1.16.0.tar.gz", hash = "sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926"}, +] + +[[package]] +name = "toml" +version = "0.10.2" +description = "Python Library for Tom's Obvious, Minimal Language" +optional = false +python-versions = ">=2.6, !=3.0.*, !=3.1.*, !=3.2.*" +files = [ + {file = "toml-0.10.2-py2.py3-none-any.whl", hash = "sha256:806143ae5bfb6a3c6e736a764057db0e6a0e05e338b5630894a5f779cabb4f9b"}, + {file = "toml-0.10.2.tar.gz", hash = "sha256:b3bda1d108d5dd99f4a20d24d9c348e91c4db7ab1b749200bded2f839ccbe68f"}, +] + +[[package]] +name = "typing-extensions" +version = "4.10.0" +description = "Backported and Experimental Type Hints for Python 3.8+" +optional = false +python-versions = ">=3.8" +files = [ + {file = "typing_extensions-4.10.0-py3-none-any.whl", hash = "sha256:69b1a937c3a517342112fb4c6df7e72fc39a38e7891a5730ed4985b5214b5475"}, + {file = "typing_extensions-4.10.0.tar.gz", hash = "sha256:b0abd7c89e8fb96f98db18d86106ff1d90ab692004eb746cf6eda2682f91b3cb"}, +] + +[[package]] +name = "url-normalize" +version = "1.4.3" +description = "URL normalization for Python" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*" +files = [ + {file = "url-normalize-1.4.3.tar.gz", hash = "sha256:d23d3a070ac52a67b83a1c59a0e68f8608d1cd538783b401bc9de2c0fac999b2"}, + {file = "url_normalize-1.4.3-py2.py3-none-any.whl", hash = "sha256:ec3c301f04e5bb676d333a7fa162fa977ad2ca04b7e652bfc9fac4e405728eed"}, +] + +[package.dependencies] +six = "*" + +[[package]] +name = "urllib3" +version = "2.2.1" +description = "HTTP library with thread-safe connection pooling, file post, and more." +optional = false +python-versions = ">=3.8" +files = [ + {file = "urllib3-2.2.1-py3-none-any.whl", hash = "sha256:450b20ec296a467077128bff42b73080516e71b56ff59a60a02bef2232c4fa9d"}, + {file = "urllib3-2.2.1.tar.gz", hash = "sha256:d0570876c61ab9e520d776c38acbbb5b05a776d3f9ff98a5c8fd5162a444cf19"}, +] + +[package.extras] +brotli = ["brotli (>=1.0.9)", "brotlicffi (>=0.8.0)"] +h2 = ["h2 (>=4,<5)"] +socks = ["pysocks (>=1.5.6,!=1.5.7,<2.0)"] +zstd = ["zstandard (>=0.18.0)"] + +[[package]] +name = "wcmatch" +version = "8.4" +description = "Wildcard/glob file name matcher." +optional = false +python-versions = ">=3.7" +files = [ + {file = "wcmatch-8.4-py3-none-any.whl", hash = "sha256:dc7351e5a7f8bbf4c6828d51ad20c1770113f5f3fd3dfe2a03cfde2a63f03f98"}, + {file = "wcmatch-8.4.tar.gz", hash = "sha256:ba4fc5558f8946bf1ffc7034b05b814d825d694112499c86035e0e4d398b6a67"}, +] + +[package.dependencies] +bracex = ">=2.1.1" + +[[package]] +name = "wrapt" +version = "1.16.0" +description = "Module for decorators, wrappers and monkey patching." +optional = false +python-versions = ">=3.6" +files = [ + {file = "wrapt-1.16.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:ffa565331890b90056c01db69c0fe634a776f8019c143a5ae265f9c6bc4bd6d4"}, + {file = "wrapt-1.16.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:e4fdb9275308292e880dcbeb12546df7f3e0f96c6b41197e0cf37d2826359020"}, + {file = "wrapt-1.16.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bb2dee3874a500de01c93d5c71415fcaef1d858370d405824783e7a8ef5db440"}, + {file = "wrapt-1.16.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2a88e6010048489cda82b1326889ec075a8c856c2e6a256072b28eaee3ccf487"}, + {file = "wrapt-1.16.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ac83a914ebaf589b69f7d0a1277602ff494e21f4c2f743313414378f8f50a4cf"}, + {file = "wrapt-1.16.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:73aa7d98215d39b8455f103de64391cb79dfcad601701a3aa0dddacf74911d72"}, + {file = "wrapt-1.16.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:807cc8543a477ab7422f1120a217054f958a66ef7314f76dd9e77d3f02cdccd0"}, + {file = "wrapt-1.16.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:bf5703fdeb350e36885f2875d853ce13172ae281c56e509f4e6eca049bdfb136"}, + {file = "wrapt-1.16.0-cp310-cp310-win32.whl", hash = "sha256:f6b2d0c6703c988d334f297aa5df18c45e97b0af3679bb75059e0e0bd8b1069d"}, + {file = "wrapt-1.16.0-cp310-cp310-win_amd64.whl", hash = "sha256:decbfa2f618fa8ed81c95ee18a387ff973143c656ef800c9f24fb7e9c16054e2"}, + {file = "wrapt-1.16.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:1a5db485fe2de4403f13fafdc231b0dbae5eca4359232d2efc79025527375b09"}, + {file = "wrapt-1.16.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:75ea7d0ee2a15733684badb16de6794894ed9c55aa5e9903260922f0482e687d"}, + {file = "wrapt-1.16.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a452f9ca3e3267cd4d0fcf2edd0d035b1934ac2bd7e0e57ac91ad6b95c0c6389"}, + {file = "wrapt-1.16.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:43aa59eadec7890d9958748db829df269f0368521ba6dc68cc172d5d03ed8060"}, + {file = "wrapt-1.16.0-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:72554a23c78a8e7aa02abbd699d129eead8b147a23c56e08d08dfc29cfdddca1"}, + {file = "wrapt-1.16.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:d2efee35b4b0a347e0d99d28e884dfd82797852d62fcd7ebdeee26f3ceb72cf3"}, + {file = "wrapt-1.16.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:6dcfcffe73710be01d90cae08c3e548d90932d37b39ef83969ae135d36ef3956"}, + {file = "wrapt-1.16.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:eb6e651000a19c96f452c85132811d25e9264d836951022d6e81df2fff38337d"}, + {file = "wrapt-1.16.0-cp311-cp311-win32.whl", hash = "sha256:66027d667efe95cc4fa945af59f92c5a02c6f5bb6012bff9e60542c74c75c362"}, + {file = "wrapt-1.16.0-cp311-cp311-win_amd64.whl", hash = "sha256:aefbc4cb0a54f91af643660a0a150ce2c090d3652cf4052a5397fb2de549cd89"}, + {file = "wrapt-1.16.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:5eb404d89131ec9b4f748fa5cfb5346802e5ee8836f57d516576e61f304f3b7b"}, + {file = "wrapt-1.16.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:9090c9e676d5236a6948330e83cb89969f433b1943a558968f659ead07cb3b36"}, + {file = "wrapt-1.16.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:94265b00870aa407bd0cbcfd536f17ecde43b94fb8d228560a1e9d3041462d73"}, + {file = "wrapt-1.16.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f2058f813d4f2b5e3a9eb2eb3faf8f1d99b81c3e51aeda4b168406443e8ba809"}, + {file = "wrapt-1.16.0-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:98b5e1f498a8ca1858a1cdbffb023bfd954da4e3fa2c0cb5853d40014557248b"}, + {file = "wrapt-1.16.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:14d7dc606219cdd7405133c713f2c218d4252f2a469003f8c46bb92d5d095d81"}, + {file = "wrapt-1.16.0-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:49aac49dc4782cb04f58986e81ea0b4768e4ff197b57324dcbd7699c5dfb40b9"}, + {file = "wrapt-1.16.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:418abb18146475c310d7a6dc71143d6f7adec5b004ac9ce08dc7a34e2babdc5c"}, + {file = "wrapt-1.16.0-cp312-cp312-win32.whl", hash = "sha256:685f568fa5e627e93f3b52fda002c7ed2fa1800b50ce51f6ed1d572d8ab3e7fc"}, + {file = "wrapt-1.16.0-cp312-cp312-win_amd64.whl", hash = "sha256:dcdba5c86e368442528f7060039eda390cc4091bfd1dca41e8046af7c910dda8"}, + {file = "wrapt-1.16.0-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:d462f28826f4657968ae51d2181a074dfe03c200d6131690b7d65d55b0f360f8"}, + {file = "wrapt-1.16.0-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a33a747400b94b6d6b8a165e4480264a64a78c8a4c734b62136062e9a248dd39"}, + {file = "wrapt-1.16.0-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b3646eefa23daeba62643a58aac816945cadc0afaf21800a1421eeba5f6cfb9c"}, + {file = "wrapt-1.16.0-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3ebf019be5c09d400cf7b024aa52b1f3aeebeff51550d007e92c3c1c4afc2a40"}, + {file = "wrapt-1.16.0-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:0d2691979e93d06a95a26257adb7bfd0c93818e89b1406f5a28f36e0d8c1e1fc"}, + {file = "wrapt-1.16.0-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:1acd723ee2a8826f3d53910255643e33673e1d11db84ce5880675954183ec47e"}, + {file = "wrapt-1.16.0-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:bc57efac2da352a51cc4658878a68d2b1b67dbe9d33c36cb826ca449d80a8465"}, + {file = "wrapt-1.16.0-cp36-cp36m-win32.whl", hash = "sha256:da4813f751142436b075ed7aa012a8778aa43a99f7b36afe9b742d3ed8bdc95e"}, + {file = "wrapt-1.16.0-cp36-cp36m-win_amd64.whl", hash = "sha256:6f6eac2360f2d543cc875a0e5efd413b6cbd483cb3ad7ebf888884a6e0d2e966"}, + {file = "wrapt-1.16.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:a0ea261ce52b5952bf669684a251a66df239ec6d441ccb59ec7afa882265d593"}, + {file = "wrapt-1.16.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7bd2d7ff69a2cac767fbf7a2b206add2e9a210e57947dd7ce03e25d03d2de292"}, + {file = "wrapt-1.16.0-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9159485323798c8dc530a224bd3ffcf76659319ccc7bbd52e01e73bd0241a0c5"}, + {file = "wrapt-1.16.0-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a86373cf37cd7764f2201b76496aba58a52e76dedfaa698ef9e9688bfd9e41cf"}, + {file = "wrapt-1.16.0-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:73870c364c11f03ed072dda68ff7aea6d2a3a5c3fe250d917a429c7432e15228"}, + {file = "wrapt-1.16.0-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:b935ae30c6e7400022b50f8d359c03ed233d45b725cfdd299462f41ee5ffba6f"}, + {file = "wrapt-1.16.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:db98ad84a55eb09b3c32a96c576476777e87c520a34e2519d3e59c44710c002c"}, + {file = "wrapt-1.16.0-cp37-cp37m-win32.whl", hash = "sha256:9153ed35fc5e4fa3b2fe97bddaa7cbec0ed22412b85bcdaf54aeba92ea37428c"}, + {file = "wrapt-1.16.0-cp37-cp37m-win_amd64.whl", hash = "sha256:66dfbaa7cfa3eb707bbfcd46dab2bc6207b005cbc9caa2199bcbc81d95071a00"}, + {file = "wrapt-1.16.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1dd50a2696ff89f57bd8847647a1c363b687d3d796dc30d4dd4a9d1689a706f0"}, + {file = "wrapt-1.16.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:44a2754372e32ab315734c6c73b24351d06e77ffff6ae27d2ecf14cf3d229202"}, + {file = "wrapt-1.16.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8e9723528b9f787dc59168369e42ae1c3b0d3fadb2f1a71de14531d321ee05b0"}, + {file = "wrapt-1.16.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:dbed418ba5c3dce92619656802cc5355cb679e58d0d89b50f116e4a9d5a9603e"}, + {file = "wrapt-1.16.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:941988b89b4fd6b41c3f0bfb20e92bd23746579736b7343283297c4c8cbae68f"}, + {file = "wrapt-1.16.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:6a42cd0cfa8ffc1915aef79cb4284f6383d8a3e9dcca70c445dcfdd639d51267"}, + {file = "wrapt-1.16.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:1ca9b6085e4f866bd584fb135a041bfc32cab916e69f714a7d1d397f8c4891ca"}, + {file = "wrapt-1.16.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:d5e49454f19ef621089e204f862388d29e6e8d8b162efce05208913dde5b9ad6"}, + {file = "wrapt-1.16.0-cp38-cp38-win32.whl", hash = "sha256:c31f72b1b6624c9d863fc095da460802f43a7c6868c5dda140f51da24fd47d7b"}, + {file = "wrapt-1.16.0-cp38-cp38-win_amd64.whl", hash = "sha256:490b0ee15c1a55be9c1bd8609b8cecd60e325f0575fc98f50058eae366e01f41"}, + {file = "wrapt-1.16.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:9b201ae332c3637a42f02d1045e1d0cccfdc41f1f2f801dafbaa7e9b4797bfc2"}, + {file = "wrapt-1.16.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:2076fad65c6736184e77d7d4729b63a6d1ae0b70da4868adeec40989858eb3fb"}, + {file = "wrapt-1.16.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c5cd603b575ebceca7da5a3a251e69561bec509e0b46e4993e1cac402b7247b8"}, + {file = "wrapt-1.16.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b47cfad9e9bbbed2339081f4e346c93ecd7ab504299403320bf85f7f85c7d46c"}, + {file = "wrapt-1.16.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f8212564d49c50eb4565e502814f694e240c55551a5f1bc841d4fcaabb0a9b8a"}, + {file = "wrapt-1.16.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:5f15814a33e42b04e3de432e573aa557f9f0f56458745c2074952f564c50e664"}, + {file = "wrapt-1.16.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:db2e408d983b0e61e238cf579c09ef7020560441906ca990fe8412153e3b291f"}, + {file = "wrapt-1.16.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:edfad1d29c73f9b863ebe7082ae9321374ccb10879eeabc84ba3b69f2579d537"}, + {file = "wrapt-1.16.0-cp39-cp39-win32.whl", hash = "sha256:ed867c42c268f876097248e05b6117a65bcd1e63b779e916fe2e33cd6fd0d3c3"}, + {file = "wrapt-1.16.0-cp39-cp39-win_amd64.whl", hash = "sha256:eb1b046be06b0fce7249f1d025cd359b4b80fc1c3e24ad9eca33e0dcdb2e4a35"}, + {file = "wrapt-1.16.0-py3-none-any.whl", hash = "sha256:6906c4100a8fcbf2fa735f6059214bb13b97f75b1a61777fcf6432121ef12ef1"}, + {file = "wrapt-1.16.0.tar.gz", hash = "sha256:5f370f952971e7d17c7d1ead40e49f32345a7f7a5373571ef44d800d06b1899d"}, +] + +[metadata] +lock-version = "2.0" +python-versions = "^3.9,<3.12" +content-hash = "f37d79c9bfb96e8cbd4ac45629e6df6fbdcc4afc2854ece9f0711a7a561dc5b8" diff --git a/airbyte-integrations/connectors/source-recurly/pyproject.toml b/airbyte-integrations/connectors/source-recurly/pyproject.toml new file mode 100644 index 000000000000..fcf2625d1e89 --- /dev/null +++ b/airbyte-integrations/connectors/source-recurly/pyproject.toml @@ -0,0 +1,29 @@ +[build-system] +requires = [ "poetry-core>=1.0.0",] +build-backend = "poetry.core.masonry.api" + +[tool.poetry] +version = "1.0.0" +name = "source-recurly" +description = "Source implementation for Recurly." +authors = [ "Airbyte ",] +license = "MIT" +readme = "README.md" +documentation = "https://docs.airbyte.com/integrations/sources/recurly" +homepage = "https://airbyte.com" +repository = "https://github.com/airbytehq/airbyte" +[[tool.poetry.packages]] +include = "source_recurly" + +[tool.poetry.dependencies] +python = "^3.9,<3.12" +airbyte-cdk = "==0.67.0" +recurly = "==4.10.0" + +[tool.poetry.scripts] +source-recurly = "source_recurly.run:run" + +[tool.poetry.group.dev.dependencies] +requests-mock = "^1.9.3" +pytest-mock = "^3.6.1" +pytest = "^6.1" diff --git a/airbyte-integrations/connectors/source-recurly/sample_files/configured_catalog.json b/airbyte-integrations/connectors/source-recurly/sample_files/configured_catalog.json new file mode 100644 index 000000000000..dc424c32f070 --- /dev/null +++ b/airbyte-integrations/connectors/source-recurly/sample_files/configured_catalog.json @@ -0,0 +1,16 @@ +{ + "streams": [ + { + "stream": { + "name": "unique_coupons", + "supported_sync_modes": ["full_refresh", "incremental"], + "source_defined_cursor": true, + "default_cursor_field": ["updated_at"], + "source_defined_primary_key": [["id"]], + "json_schema": {} + }, + "sync_mode": "incremental", + "destination_sync_mode": "overwrite" + } + ] +} diff --git a/airbyte-integrations/connectors/source-recurly/sample_files/sample_catalog.json b/airbyte-integrations/connectors/source-recurly/sample_files/sample_catalog.json new file mode 100644 index 000000000000..5ee733426146 --- /dev/null +++ b/airbyte-integrations/connectors/source-recurly/sample_files/sample_catalog.json @@ -0,0 +1,102 @@ +{ + "streams": [ + { + "name": "accounts", + "supported_sync_modes": ["full_refresh"], + "source_defined_cursor": false, + "json_schema": { + "$schema": "http://json-schema.org/draft-07/schema#", + "type": "object", + "properties": { + "id": { + "type": "string" + }, + "object": { + "type": "string" + }, + "code": { + "type": "string" + }, + "parent_account_id": { + "type": "string" + }, + "bill_to": { + "type": "string" + }, + "state": { + "type": "string" + }, + "username": { + "type": "string" + }, + "email": { + "type": "string" + }, + "cc_emails": { + "type": "string" + }, + "preferred_locale": { + "type": "string" + }, + "first_name": { + "type": "string" + }, + "last_name": { + "type": "string" + }, + "company": { + "type": "string" + }, + "vat_number": { + "type": "string" + }, + "tax_exempt": { + "type": "boolean" + }, + "exemption_certificate": { + "type": "string" + }, + "address": { + "type": "object" + }, + "billing_info": { + "type": "object" + }, + "shipping_addresses": { + "type": "array" + }, + "custom_fields": { + "type": "array" + }, + "has_live_subscription": { + "type": "boolean" + }, + "has_active_subscription": { + "type": "boolean" + }, + "has_future_subscription": { + "type": "boolean" + }, + "has_canceled_subscription": { + "type": "boolean" + }, + "has_paused_subscription": { + "type": "boolean" + }, + "has_past_due_invoice": { + "type": "boolean" + }, + "created_at": { + "type": "string" + }, + "updated_at": { + "type": "string" + }, + "deleted_at": { + "type": "string" + } + } + } + } + ] +} diff --git a/airbyte-integrations/connectors/source-recurly/sample_files/sample_config.json b/airbyte-integrations/connectors/source-recurly/sample_files/sample_config.json new file mode 100644 index 000000000000..c411c99a0bc4 --- /dev/null +++ b/airbyte-integrations/connectors/source-recurly/sample_files/sample_config.json @@ -0,0 +1,3 @@ +{ + "api_key": "" +} diff --git a/airbyte-integrations/connectors/source-recurly/source_recurly/__init__.py b/airbyte-integrations/connectors/source-recurly/source_recurly/__init__.py new file mode 100644 index 000000000000..48116a0807b8 --- /dev/null +++ b/airbyte-integrations/connectors/source-recurly/source_recurly/__init__.py @@ -0,0 +1,3 @@ +from .source import SourceRecurly + +__all__ = ["SourceRecurly"] diff --git a/airbyte-integrations/connectors/source-recurly/source_recurly/run.py b/airbyte-integrations/connectors/source-recurly/source_recurly/run.py new file mode 100644 index 000000000000..746b6556605c --- /dev/null +++ b/airbyte-integrations/connectors/source-recurly/source_recurly/run.py @@ -0,0 +1,14 @@ +# +# Copyright (c) 2023 Airbyte, Inc., all rights reserved. +# + + +import sys + +from airbyte_cdk.entrypoint import launch +from source_recurly import SourceRecurly + + +def run(): + source = SourceRecurly() + launch(source, sys.argv[1:]) diff --git a/airbyte-integrations/connectors/source-recurly/source_recurly/schemas/account_coupon_redemptions.json b/airbyte-integrations/connectors/source-recurly/source_recurly/schemas/account_coupon_redemptions.json new file mode 100644 index 000000000000..d9e894d584d3 --- /dev/null +++ b/airbyte-integrations/connectors/source-recurly/source_recurly/schemas/account_coupon_redemptions.json @@ -0,0 +1,46 @@ +{ + "$schema": "http://json-schema.org/draft-07/schema#", + "type": "object", + "properties": { + "id": { + "type": ["null", "string"], + "maxLength": 13 + }, + "object": { + "type": ["null", "string"] + }, + "account": { + "$ref": "account_details.json" + }, + "subscription_id": { + "type": ["null", "string"], + "maxLength": 13 + }, + "coupon": { + "$ref": "coupons.json" + }, + "state": { + "type": ["null", "string"], + "maxLength": 256 + }, + "currency": { + "type": ["null", "string"], + "maxLength": 3 + }, + "discounted": { + "type": ["null", "number"] + }, + "created_at": { + "type": ["null", "string"], + "format": "date-time" + }, + "updated_at": { + "type": ["null", "string"], + "format": "date-time" + }, + "removed_at": { + "type": ["null", "string"], + "format": "date-time" + } + } +} diff --git a/airbyte-integrations/connectors/source-recurly/source_recurly/schemas/account_notes.json b/airbyte-integrations/connectors/source-recurly/source_recurly/schemas/account_notes.json new file mode 100644 index 000000000000..ee68e82c8973 --- /dev/null +++ b/airbyte-integrations/connectors/source-recurly/source_recurly/schemas/account_notes.json @@ -0,0 +1,30 @@ +{ + "$schema": "http://json-schema.org/draft-07/schema#", + "type": "object", + "properties": { + "id": { + "type": "string", + "maxLength": 13, + "readOnly": true + }, + "object": { + "type": ["null", "string"] + }, + "account_id": { + "type": "string", + "maxLength": 13 + }, + "user": { + "$ref": "users.json" + }, + "message": { + "type": ["null", "string"], + "maxLength": 2048 + }, + "created_at": { + "type": "string", + "format": "date-time", + "readOnly": true + } + } +} diff --git a/airbyte-integrations/connectors/source-recurly/source_recurly/schemas/accounts.json b/airbyte-integrations/connectors/source-recurly/source_recurly/schemas/accounts.json new file mode 100644 index 000000000000..c9f1c5b84953 --- /dev/null +++ b/airbyte-integrations/connectors/source-recurly/source_recurly/schemas/accounts.json @@ -0,0 +1,182 @@ +{ + "$schema": "http://json-schema.org/draft-07/schema#", + "type": "object", + "properties": { + "id": { + "type": ["null", "string"], + "maxLength": 13 + }, + "object": { + "type": ["null", "string"] + }, + "hosted_login_token": { + "type": ["null", "string"] + }, + "code": { + "type": ["null", "string"], + "maxLength": 256 + }, + "parent_account_id": { + "type": ["null", "string"], + "maxLength": 13 + }, + "bill_to": { + "type": ["null", "string"], + "maxLength": 6 + }, + "state": { + "type": ["null", "string"], + "maxLength": 256 + }, + "username": { + "type": ["null", "string"], + "maxLength": 256 + }, + "email": { + "type": ["null", "string"], + "maxLength": 256 + }, + "cc_emails": { + "type": ["null", "string"], + "maxLength": 256 + }, + "preferred_locale": { + "type": ["null", "string"], + "maxLength": 12 + }, + "first_name": { + "type": ["null", "string"], + "maxLength": 256 + }, + "last_name": { + "type": ["null", "string"], + "maxLength": 256 + }, + "company": { + "type": ["null", "string"], + "maxLength": 50 + }, + "vat_number": { + "type": ["null", "string"], + "maxLength": 20 + }, + "tax_exempt": { + "type": ["null", "boolean"] + }, + "exemption_certificate": { + "type": ["null", "string"], + "maxLength": 30 + }, + "address": { + "type": "object", + "properties": { + "phone": { + "type": "string", + "title": "Phone number", + "maxLength": 256 + }, + "street1": { + "type": "string", + "title": "Street 1", + "maxLength": 256 + }, + "street2": { + "type": "string", + "title": "Street 2", + "maxLength": 256 + }, + "city": { + "type": "string", + "title": "City", + "maxLength": 256 + }, + "region": { + "type": "string", + "title": "State/Province", + "description": "State or province.", + "maxLength": 256 + }, + "postal_code": { + "type": "string", + "title": "Zip/Postal code", + "description": "Zip or postal code.", + "maxLength": 256 + }, + "country": { + "type": "string", + "title": "Country", + "description": "Country, 2-letter ISO 3166-1 alpha-2 code.", + "maxLength": 2 + }, + "geo_code": { + "type": ["null", "string"] + } + } + }, + "custom_fields": { + "type": ["null", "array"], + "items": { + "type": ["null", "object"], + "additionalProperties": true + } + }, + "has_live_subscription": { + "type": ["null", "boolean"] + }, + "has_active_subscription": { + "type": ["null", "boolean"] + }, + "has_future_subscription": { + "type": ["null", "boolean"] + }, + "has_canceled_subscription": { + "type": ["null", "boolean"] + }, + "has_paused_subscription": { + "type": ["null", "boolean"] + }, + "has_past_due_invoice": { + "type": ["null", "boolean"] + }, + "dunning_campaign_id": { + "type": ["null", "string"], + "maxLength": 256 + }, + "created_at": { + "type": ["null", "string"], + "format": "date-time" + }, + "updated_at": { + "type": ["null", "string"], + "format": "date-time" + }, + "deleted_at": { + "type": ["null", "string"], + "format": "date-time" + }, + "billing_info": { + "$ref": "billing_infos.json" + }, + "external_accounts": { + "type": ["null", "array"], + "items": { + "$ref": "external_accounts.json" + } + }, + "invoice_template_id": { + "type": ["null", "string"] + }, + "override_business_entity_id": { + "type": ["null", "string"] + }, + "preferred_time_zone": { + "type": ["null", "string"] + }, + "shipping_addresses": { + "type": ["null", "array"], + "items": { + "$ref": "shipping_addresses.json" + } + } + } +} diff --git a/airbyte-integrations/connectors/source-recurly/source_recurly/schemas/add_ons.json b/airbyte-integrations/connectors/source-recurly/source_recurly/schemas/add_ons.json new file mode 100644 index 000000000000..ffeea5d2f1be --- /dev/null +++ b/airbyte-integrations/connectors/source-recurly/source_recurly/schemas/add_ons.json @@ -0,0 +1,151 @@ +{ + "$schema": "http://json-schema.org/schema#", + "type": "object", + "properties": { + "id": { + "type": "string", + "title": "Add-on ID", + "maxLength": 13, + "readOnly": true + }, + "plan_id": { + "type": "string", + "title": "Plan ID", + "maxLength": 13, + "readOnly": true + }, + "code": { + "type": "string", + "title": "Add-on code", + "description": "The unique identifier for the add-on within its plan.", + "maxLength": 50 + }, + "state": { + "title": "State", + "description": "Add-ons can be either active or inactive.", + "readOnly": true, + "type": "string", + "maxLength": 256 + }, + "name": { + "type": "string", + "title": "Name", + "description": "Describes your add-on and will appear in subscribers' invoices.", + "maxLength": 255 + }, + "add_on_type": { + "type": ["null", "string"], + "title": "Add-on Type", + "description": "Whether the add-on type is fixed, or usage-based.", + "maxLength": 256 + }, + "usage_type": { + "type": "string", + "title": "Usage Type", + "description": "Type of usage, returns usage type if `add_on_type` is `usage`.", + "maxLength": 256 + }, + "usage_percentage": { + "type": ["null", "number"], + "format": "float", + "title": "Usage Percentage", + "description": "The percentage taken of the monetary amount of usage tracked. This can be up to 4 decimal places. A value between 0.0 and 100.0." + }, + "measured_unit_id": { + "type": ["null", "string"], + "title": "Measured Unit ID", + "description": "System-generated unique identifier for an measured unit associated with the add-on.", + "maxLength": 13 + }, + "accounting_code": { + "type": ["null", "string"], + "title": "Accounting code", + "description": "Accounting code for invoice line items for this add-on. If no value is provided, it defaults to add-on's code.", + "maxLength": 256 + }, + "revenue_schedule_type": { + "title": "Revenue schedule type", + "description": "When this add-on is invoiced, the line item will use this revenue schedule. If `item_code`/`item_id` is part of the request then `revenue_schedule_type` must be absent in the request as the value will be set from the item.", + "type": "string", + "maxLength": 256 + }, + "avalara_transaction_type": { + "type": ["string", "integer"], + "title": "Avalara Transaction Type", + "description": "Used by Avalara for Communications taxes. The transaction type in combination with the service type describe how the add-on is taxed. Refer to [the documentation](https://help.avalara.com/AvaTax_for_Communications/Tax_Calculation/AvaTax_for_Communications_Tax_Engine/Mapping_Resources/TM_00115_AFC_Modules_Corresponding_Transaction_Types) for more available t/s types.", + "minimum": 0 + }, + "avalara_service_type": { + "type": ["string", "integer"], + "title": "Avalara Service Type", + "description": "Used by Avalara for Communications taxes. The transaction type in combination with the service type describe how the add-on is taxed. Refer to [the documentation](https://help.avalara.com/AvaTax_for_Communications/Tax_Calculation/AvaTax_for_Communications_Tax_Engine/Mapping_Resources/TM_00115_AFC_Modules_Corresponding_Transaction_Types) for more available t/s types.", + "minimum": 0 + }, + "tax_code": { + "type": ["null", "string"], + "title": "Tax code", + "description": "Used by Avalara, Vertex, and Recurly\u2019s EU VAT tax feature. The tax code values are specific to each tax system. If you are using Recurly\u2019s EU VAT feature you can use `unknown`, `physical`, or `digital`.", + "maxLength": 50 + }, + "display_quantity": { + "type": ["null", "boolean"], + "title": "Display quantity?", + "description": "Determines if the quantity field is displayed on the hosted pages for the add-on." + }, + "default_quantity": { + "type": ["null", "integer"], + "title": "Default quantity", + "description": "Default quantity for the hosted pages." + }, + "optional": { + "type": ["null", "boolean"], + "title": "Optional", + "description": "Whether the add-on is optional for the customer to include in their purchase on the hosted payment page. If false, the add-on will be included when a subscription is created through the Recurly UI. However, the add-on will not be included when a subscription is created through the API." + }, + "currencies": { + "type": "array", + "description": "This is only present when `type=fixed`.", + "items": { + "type": ["null", "object"], + "properties": { + "currency": { + "type": "string", + "title": "Currency", + "description": "3-letter ISO 4217 currency code.", + "maxLength": 3 + }, + "unit_amount": { + "type": "number", + "format": "float", + "title": "Discount Amount", + "description": "Value of the fixed discount that this coupon applies." + } + } + } + }, + "tier_type": { + "type": ["null", "string"], + "title": "Tier type", + "description": "The pricing model for the add-on. For more information,\n[click here](https://docs.recurly.com/docs/billing-models#section-quantity-based). See our\n[Guide](https://developers.recurly.com/guides/item-addon-guide.html) for an overview of how\nto configure quantity-based pricing models.\n", + "maxLength": 256 + }, + "created_at": { + "type": "string", + "format": "date-time", + "title": "Created at", + "readOnly": true + }, + "updated_at": { + "type": "string", + "format": "date-time", + "title": "Last updated at", + "readOnly": true + }, + "deleted_at": { + "type": "string", + "format": "date-time", + "title": "Deleted at", + "readOnly": true + } + } +} diff --git a/airbyte-integrations/connectors/source-recurly/source_recurly/schemas/billing_infos.json b/airbyte-integrations/connectors/source-recurly/source_recurly/schemas/billing_infos.json new file mode 100644 index 000000000000..5dd179d84f39 --- /dev/null +++ b/airbyte-integrations/connectors/source-recurly/source_recurly/schemas/billing_infos.json @@ -0,0 +1,3 @@ +{ + "$ref": "billing_infos.json" +} diff --git a/airbyte-integrations/connectors/source-recurly/source_recurly/schemas/coupons.json b/airbyte-integrations/connectors/source-recurly/source_recurly/schemas/coupons.json new file mode 100644 index 000000000000..9c262d6773c3 --- /dev/null +++ b/airbyte-integrations/connectors/source-recurly/source_recurly/schemas/coupons.json @@ -0,0 +1,3 @@ +{ + "$ref": "coupons.json" +} diff --git a/airbyte-integrations/connectors/source-recurly/source_recurly/schemas/credit_payments.json b/airbyte-integrations/connectors/source-recurly/source_recurly/schemas/credit_payments.json new file mode 100644 index 000000000000..98d827bb6074 --- /dev/null +++ b/airbyte-integrations/connectors/source-recurly/source_recurly/schemas/credit_payments.json @@ -0,0 +1,123 @@ +{ + "$schema": "http://json-schema.org/schema#", + "type": "object", + "properties": { + "id": { + "type": "string", + "title": "Credit Payment ID", + "maxLength": 13 + }, + "uuid": { + "type": "string", + "title": "Recurly UUID", + "description": "The UUID is useful for matching data with the CSV exports and building URLs into Recurly's UI.", + "maxLength": 32 + }, + "action": { + "title": "Action", + "description": "The action for which the credit was created.", + "type": "string", + "maxLength": 256 + }, + "account": { + "type": "object", + "title": "Account mini details", + "properties": { + "id": { + "type": "string", + "maxLength": 13, + "readOnly": true + }, + "code": { + "type": "string", + "description": "The unique identifier of the account.", + "maxLength": 50 + } + } + }, + "applied_to_invoice": { + "type": ["null", "object"], + "title": "Invoice mini details", + "properties": { + "id": { + "type": "string", + "title": "Invoice ID", + "maxLength": 13 + }, + "number": { + "type": "string", + "title": "Invoice number", + "maxLength": 256 + } + } + }, + "original_invoice": { + "type": ["null", "object"], + "title": "Invoice mini details", + "properties": { + "id": { + "type": "string", + "title": "Invoice ID", + "maxLength": 13 + }, + "number": { + "type": "string", + "title": "Invoice number", + "maxLength": 256 + } + } + }, + "currency": { + "type": "string", + "title": "Currency", + "description": "3-letter ISO 4217 currency code.", + "maxLength": 3 + }, + "amount": { + "type": "number", + "format": "float", + "title": "Amount", + "description": "Total credit payment amount applied to the charge invoice." + }, + "original_credit_payment_id": { + "type": ["null", "string"], + "title": "Original Credit Payment ID", + "description": "For credit payments with action `refund`, this is the credit payment that was refunded.", + "maxLength": 13 + }, + "refund_transaction": { + "type": ["null", "object"], + "properties": { + "id": { + "type": "string", + "title": "Transaction ID", + "maxLength": 13 + }, + "uuid": { + "type": "string", + "title": "Recurly UUID", + "description": "The UUID is useful for matching data with the CSV exports and building URLs into Recurly's UI.", + "maxLength": 32 + } + } + }, + "created_at": { + "type": "string", + "title": "Created at", + "format": "date-time", + "readOnly": true + }, + "updated_at": { + "type": "string", + "title": "Last updated at", + "format": "date-time", + "readOnly": true + }, + "voided_at": { + "type": ["null", "string"], + "title": "Voided at", + "format": "date-time", + "readOnly": true + } + } +} diff --git a/airbyte-integrations/connectors/source-recurly/source_recurly/schemas/export_dates.json b/airbyte-integrations/connectors/source-recurly/source_recurly/schemas/export_dates.json new file mode 100644 index 000000000000..f63e37989dea --- /dev/null +++ b/airbyte-integrations/connectors/source-recurly/source_recurly/schemas/export_dates.json @@ -0,0 +1,13 @@ +{ + "$schema": "http://json-schema.org/draft-07/schema#", + "type": "object", + "properties": { + "dates": { + "type": ["null", "array"], + "items": { + "type": ["null", "string"], + "maxLength": 256 + } + } + } +} diff --git a/airbyte-integrations/connectors/source-recurly/source_recurly/schemas/invoices.json b/airbyte-integrations/connectors/source-recurly/source_recurly/schemas/invoices.json new file mode 100644 index 000000000000..6e5f4732e079 --- /dev/null +++ b/airbyte-integrations/connectors/source-recurly/source_recurly/schemas/invoices.json @@ -0,0 +1,377 @@ +{ + "$schema": "http://json-schema.org/draft-07/schema#", + "type": "object", + "properties": { + "id": { + "type": ["null", "string"], + "title": "Invoice ID", + "readOnly": true, + "maxLength": 13 + }, + "uuid": { + "type": ["null", "string"] + }, + "object": { + "type": ["null", "string"] + }, + "type": { + "title": "Invoice type", + "description": "Invoices are either charge, credit, or legacy invoices.", + "type": ["null", "string"], + "maxLength": 256 + }, + "origin": { + "type": ["null", "string"], + "title": "Origin", + "description": "The event that created the invoice.", + "maxLength": 256 + }, + "state": { + "title": "Invoice state", + "type": ["null", "string"], + "maxLength": 256 + }, + "account": { + "$ref": "account_details.json" + }, + "billing_info_id": { + "type": ["null", "string"], + "title": "Billing info ID", + "description": "The `billing_info_id` is the value that represents a specific billing info for an end customer. When `billing_info_id` is used to assign billing info to the subscription, all future billing events for the subscription will bill to the specified billing info. `billing_info_id` can ONLY be used for sites utilizing the Wallet feature.", + "maxLength": 256 + }, + "subscription_ids": { + "type": ["null", "array"], + "title": "Subscription IDs", + "description": "If the invoice is charging or refunding for one or more subscriptions, these are their IDs.", + "items": { + "type": ["null", "string"], + "title": "Subscription ID", + "maxLength": 13 + } + }, + "previous_invoice_id": { + "type": ["null", "string"], + "title": "Previous invoice ID", + "description": "On refund invoices, this value will exist and show the invoice ID of the purchase invoice the refund was created from.", + "maxLength": 13 + }, + "number": { + "type": ["null", "string"], + "title": "Invoice number", + "description": "If VAT taxation and the Country Invoice Sequencing feature are enabled, invoices will have country-specific invoice numbers for invoices billed to EU countries (ex: FR1001). Non-EU invoices will continue to use the site-level invoice number sequence.", + "maxLength": 256 + }, + "collection_method": { + "type": ["null", "string"], + "title": "Collection method", + "description": "An automatic invoice means a corresponding transaction is run using the account's billing information at the same time the invoice is created. Manual invoices are created without a corresponding transaction. The merchant must enter a manual payment transaction or have the customer pay the invoice with an automatic method, like credit card, PayPal, Amazon, or ACH bank payment.", + "maxLength": 256 + }, + "po_number": { + "type": ["null", "string"], + "title": "Purchase order number", + "description": "For manual invoicing, this identifies the PO number associated with the subscription.", + "maxLength": 50 + }, + "net_terms": { + "type": ["null", "integer"], + "title": "Net terms", + "description": "Integer representing the number of days after an invoice's creation that the invoice will become past due. If an invoice's net terms are set to '0', it is due 'On Receipt' and will become past due 24 hours after it\u2019s created. If an invoice is due net 30, it will become past due at 31 days exactly.", + "minimum": 0, + "default": 0 + }, + "address": { + "type": ["null", "object"], + "properties": { + "name_on_account": { + "type": ["null", "string"], + "title": "Name on account", + "maxLength": 256 + }, + "company": { + "type": ["null", "string"], + "title": "Company", + "maxLength": 256 + }, + "phone": { + "type": ["null", "string"], + "title": "Phone number", + "maxLength": 256 + }, + "street1": { + "type": ["null", "string"], + "title": "Street 1", + "maxLength": 256 + }, + "street2": { + "type": ["null", "string"], + "title": "Street 2", + "maxLength": 256 + }, + "city": { + "type": ["null", "string"], + "title": "City", + "maxLength": 256 + }, + "region": { + "type": ["null", "string"], + "title": "State/Province", + "description": "State or province.", + "maxLength": 256 + }, + "postal_code": { + "type": ["null", "string"], + "title": "Zip/Postal code", + "description": "Zip or postal code.", + "maxLength": 256 + }, + "country": { + "type": ["null", "string"], + "title": "Country", + "description": "Country, 2-letter ISO 3166-1 alpha-2 code.", + "maxLength": 2 + }, + "first_name": { + "type": ["null", "string"], + "maxLength": 256 + }, + "last_name": { + "type": ["null", "string"], + "maxLength": 256 + } + } + }, + "shipping_address": { + "type": ["null", "object"], + "properties": { + "id": { + "type": ["null", "string"], + "title": "Shipping Address ID", + "maxLength": 13, + "readOnly": true + } + } + }, + "currency": { + "type": ["null", "string"], + "title": "Currency", + "description": "3-letter ISO 4217 currency code.", + "maxLength": 3 + }, + "discount": { + "type": ["null", "number"], + "format": "float", + "title": "Discount", + "description": "Total discounts applied to this invoice." + }, + "subtotal": { + "type": ["null", "number"], + "format": "float", + "title": "Subtotal", + "description": "The summation of charges and credits, before discounts and taxes." + }, + "tax": { + "type": ["null", "number"], + "format": "float", + "title": "Tax", + "description": "The total tax on this invoice." + }, + "total": { + "type": ["null", "number"], + "format": "float", + "title": "Total", + "description": "The final total on this invoice. The summation of invoice charges, discounts, credits, and tax." + }, + "refundable_amount": { + "type": ["null", "number"], + "format": "float", + "title": "Refundable amount", + "description": "The refundable amount on a charge invoice. It will be null for all other invoices." + }, + "paid": { + "type": ["null", "number"], + "format": "float", + "title": "Paid", + "description": "The total amount of successful payments transaction on this invoice." + }, + "balance": { + "type": ["null", "number"], + "format": "float", + "title": "Balance", + "description": "The outstanding balance remaining on this invoice." + }, + "tax_info": { + "type": ["null", "object"], + "title": "Tax info", + "properties": { + "type": { + "type": ["null", "string"], + "title": "Type", + "description": "Provides the tax type as \"vat\" for EU VAT, \"usst\" for U.S. Sales Tax, or the 2 letter country code for country level tax types like Canada, Australia, New Zealand, Israel, and all non-EU European countries.", + "maxLength": 256 + }, + "region": { + "type": ["null", "string"], + "title": "Region", + "description": "Provides the tax region applied on an invoice. For U.S. Sales Tax, this will be the 2 letter state code. For EU VAT this will be the 2 letter country code. For all country level tax types, this will display the regional tax, like VAT, GST, or PST." + }, + "rate": { + "type": ["null", "number"], + "format": "float", + "title": "Rate" + }, + "tax_details": { + "type": "array", + "description": "Provides additional tax details for Canadian Sales Tax when there is tax applied at both the country and province levels. This will only be populated for the Invoice response when fetching a single invoice and not for the InvoiceList or LineItem.", + "items": { + "type": "object", + "title": "Tax detail", + "properties": { + "type": { + "type": ["null", "string"], + "title": "Type", + "description": "Provides the tax type for the region. For Canadian Sales Tax, this will be GST, HST, QST or PST.", + "maxLength": 256 + }, + "region": { + "type": ["null", "string"], + "title": "Region", + "description": "Provides the tax region applied on an invoice. For Canadian Sales Tax, this will be either the 2 letter province code or country code.", + "maxLength": 256 + }, + "rate": { + "type": ["null", "number"], + "format": "float", + "title": "Rate", + "description": "Provides the tax rate for the region." + }, + "tax": { + "type": ["null", "number"], + "format": "float", + "title": "Tax", + "description": "The total tax applied for this tax type." + } + } + } + } + } + }, + "used_tax_service": { + "type": ["null", "boolean"] + }, + "vat_number": { + "type": ["null", "string"], + "title": "VAT number", + "description": "VAT registration number for the customer on this invoice. This will come from the VAT Number field in the Billing Info or the Account Info depending on your tax settings and the invoice collection method.", + "maxLength": 20 + }, + "vat_reverse_charge_notes": { + "type": ["null", "string"], + "title": "VAT reverse charge notes", + "description": "VAT Reverse Charge Notes only appear if you have EU VAT enabled or are using your own Avalara AvaTax account and the customer is in the EU, has a VAT number, and is in a different country than your own. This will default to the VAT Reverse Charge Notes text specified on the Tax Settings page in your Recurly admin, unless custom notes were created with the original subscription.", + "maxLength": 1024 + }, + "terms_and_conditions": { + "type": ["null", "string"], + "title": "Terms and conditions", + "description": "This will default to the Terms and Conditions text specified on the Invoice Settings page in your Recurly admin. Specify custom notes to add or override Terms and Conditions.", + "maxLength": 16384 + }, + "customer_notes": { + "type": ["null", "string"], + "title": "Customer notes", + "description": "This will default to the Customer Notes text specified on the Invoice Settings. Specify custom notes to add or override Customer Notes.", + "maxLength": 2048 + }, + "line_items": { + "type": ["null", "array"], + "title": "Line Items", + "items": { + "$ref": "line_items.json" + } + }, + "has_more_line_items": { + "type": ["null", "boolean"] + }, + "transactions": { + "type": ["null", "array"], + "title": "Transactions", + "items": { + "type": ["null", "object"], + "properties": { + "id": { + "type": "string", + "title": "Transaction ID", + "maxLength": 13 + }, + "uuid": { + "type": "string", + "title": "Recurly UUID", + "description": "The UUID is useful for matching data with the CSV exports and building URLs into Recurly's UI.", + "maxLength": 32 + } + } + } + }, + "credit_payments": { + "type": ["null", "array"], + "title": "Credit payments", + "items": { + "type": ["null", "object"], + "properties": { + "id": { + "type": "string", + "title": "Credit Payment ID", + "maxLength": 13 + }, + "uuid": { + "type": "string", + "title": "Recurly UUID", + "description": "The UUID is useful for matching data with the CSV exports and building URLs into Recurly's UI.", + "maxLength": 32 + } + } + } + }, + "created_at": { + "type": ["null", "string"], + "format": "date-time", + "title": "Created at", + "readOnly": true + }, + "updated_at": { + "type": ["null", "string"], + "format": "date-time", + "title": "Last updated at", + "readOnly": true + }, + "due_at": { + "type": ["null", "string"], + "format": "date-time", + "title": "Due at", + "description": "Date invoice is due. This is the date the net terms are reached." + }, + "closed_at": { + "type": ["null", "string"], + "format": "date-time", + "title": "Closed at", + "description": "Date invoice was marked paid or failed." + }, + "dunning_campaign_id": { + "type": ["null", "string"], + "title": "Dunning Campaign ID", + "description": "Unique ID to identify the dunning campaign used when dunning the invoice. Available when the Dunning Campaigns feature is enabled. For sites without multiple dunning campaigns enabled, this will always be the default dunning campaign.", + "maxLength": 256 + }, + "dunning_events_sent": { + "type": ["null", "integer"] + }, + "final_dunning_event": { + "type": ["null", "boolean"] + }, + "business_entity_id": { + "type": ["null", "string"] + } + } +} diff --git a/airbyte-integrations/connectors/source-recurly/source_recurly/schemas/line_items.json b/airbyte-integrations/connectors/source-recurly/source_recurly/schemas/line_items.json new file mode 100644 index 000000000000..85370cc75850 --- /dev/null +++ b/airbyte-integrations/connectors/source-recurly/source_recurly/schemas/line_items.json @@ -0,0 +1,3 @@ +{ + "$ref": "line_items.json" +} diff --git a/airbyte-integrations/connectors/source-recurly/source_recurly/schemas/measured_units.json b/airbyte-integrations/connectors/source-recurly/source_recurly/schemas/measured_units.json new file mode 100644 index 000000000000..7865d44d3079 --- /dev/null +++ b/airbyte-integrations/connectors/source-recurly/source_recurly/schemas/measured_units.json @@ -0,0 +1,41 @@ +{ + "$schema": "http://json-schema.org/draft-07/schema#", + "type": "object", + "properties": { + "id": { + "type": ["null", "string"], + "maxLength": 13 + }, + "object": { + "type": ["null", "string"] + }, + "name": { + "type": ["null", "string"], + "maxLength": 256 + }, + "display_name": { + "type": ["null", "string"], + "maxLength": 255 + }, + "state": { + "type": ["null", "string"], + "maxLength": 255 + }, + "description": { + "type": ["null", "string"], + "maxLength": 1024 + }, + "created_at": { + "type": ["null", "string"], + "format": "date-time" + }, + "updated_at": { + "type": ["null", "string"], + "format": "date-time" + }, + "deleted_at": { + "type": ["null", "string"], + "format": "date-time" + } + } +} diff --git a/airbyte-integrations/connectors/source-recurly/source_recurly/schemas/plans.json b/airbyte-integrations/connectors/source-recurly/source_recurly/schemas/plans.json new file mode 100644 index 000000000000..aabac321be53 --- /dev/null +++ b/airbyte-integrations/connectors/source-recurly/source_recurly/schemas/plans.json @@ -0,0 +1,191 @@ +{ + "$schema": "http://json-schema.org/draft-07/schema#", + "type": "object", + "properties": { + "id": { + "type": ["null", "string"], + "maxLength": 13 + }, + "object": { + "type": ["null", "string"] + }, + "code": { + "type": ["null", "string"], + "maxLength": 256 + }, + "state": { + "type": ["null", "string"], + "maxLength": 256 + }, + "name": { + "type": ["null", "string"], + "maxLength": 256 + }, + "description": { + "type": ["null", "string"], + "maxLength": 1024 + }, + "interval_unit": { + "type": ["null", "string"], + "maxLength": 256 + }, + "interval_length": { + "type": ["null", "number"] + }, + "trial_unit": { + "type": ["null", "string"], + "maxLength": 256 + }, + "trial_length": { + "type": ["null", "number"] + }, + "trial_requires_billing_info": { + "type": ["null", "boolean"] + }, + "total_billing_cycles": { + "type": ["null", "number"] + }, + "auto_renew": { + "type": ["null", "boolean"] + }, + "pricing_model": { + "type": ["null", "string"] + }, + "ramp_intervals": { + "type": ["null", "array"], + "items": { + "type": ["null", "object"], + "properties": { + "starting_billing_cycle": { + "type": ["null", "integer"] + }, + "currencies": { + "type": ["null", "array"], + "items": { + "type": ["null", "object"], + "properties": { + "currency": { + "type": ["null", "string"] + }, + "unit_amount": { + "type": ["null", "number"] + } + } + } + } + } + } + }, + "custom_fields": { + "type": ["null", "array"], + "items": { + "type": ["null", "object"], + "properties": { + "name": { + "type": ["null", "string"] + }, + "value": { + "type": ["null", "string"] + } + } + } + }, + "accounting_code": { + "type": ["null", "string"], + "maxLength": 256 + }, + "revenue_schedule_type": { + "type": ["null", "string"], + "maxLength": 256 + }, + "setup_fee_revenue_schedule_type": { + "type": ["null", "string"], + "maxLength": 256 + }, + "setup_fee_accounting_code": { + "type": ["null", "string"], + "maxLength": 256 + }, + "avalara_transaction_type": { + "type": ["null", "number"] + }, + "avalara_service_type": { + "type": ["null", "number"] + }, + "tax_code": { + "type": ["null", "string"], + "maxLength": 256 + }, + "tax_exempt": { + "type": ["null", "boolean"] + }, + "currencies": { + "type": "array", + "title": "Pricing", + "items": { + "type": "object", + "properties": { + "currency": { + "type": "string", + "title": "Currency", + "description": "3-letter ISO 4217 currency code.", + "maxLength": 3 + }, + "setup_fee": { + "type": "number", + "format": "float", + "title": "Setup fee", + "description": "Amount of one-time setup fee automatically charged at the beginning of a subscription billing cycle. For subscription plans with a trial, the setup fee will be charged at the time of signup. Setup fees do not increase with the quantity of a subscription plan.", + "minimum": 0, + "maximum": 1000000 + }, + "unit_amount": { + "type": "number", + "format": "float", + "title": "Unit price", + "minimum": 0, + "maximum": 1000000 + } + } + } + }, + "hosted_pages": { + "type": "object", + "properties": { + "success_url": { + "type": ["null", "string"], + "maxLength": 2048 + }, + "cancel_url": { + "type": ["null", "string"], + "maxLength": 2048 + }, + "bypass_confirmation": { + "type": ["null", "boolean"] + }, + "display_quantity": { + "type": ["null", "boolean"] + } + } + }, + "allow_any_item_on_subscriptions": { + "type": ["null", "boolean"] + }, + "dunning_campaign_id": { + "type": ["null", "string"], + "maxLength": 256 + }, + "created_at": { + "type": ["null", "string"], + "format": "date-time" + }, + "updated_at": { + "type": ["null", "string"], + "format": "date-time" + }, + "deleted_at": { + "type": ["null", "string"], + "format": "date-time" + } + } +} diff --git a/airbyte-integrations/connectors/source-recurly/source_recurly/schemas/shared/account_details.json b/airbyte-integrations/connectors/source-recurly/source_recurly/schemas/shared/account_details.json new file mode 100644 index 000000000000..9d3dc5d71945 --- /dev/null +++ b/airbyte-integrations/connectors/source-recurly/source_recurly/schemas/shared/account_details.json @@ -0,0 +1,35 @@ +{ + "type": ["null", "object"], + "properties": { + "id": { + "type": "string" + }, + "object": { + "type": ["null", "string"] + }, + "code": { + "type": ["null", "string"] + }, + "email": { + "type": ["null", "string"] + }, + "first_name": { + "type": ["null", "string"] + }, + "last_name": { + "type": ["null", "string"] + }, + "company": { + "type": ["null", "string"] + }, + "parent_account_id": { + "type": ["null", "string"] + }, + "bill_to": { + "type": ["null", "string"] + }, + "dunning_campaign_id": { + "type": ["null", "string"] + } + } +} diff --git a/airbyte-integrations/connectors/source-recurly/source_recurly/schemas/shared/billing_infos.json b/airbyte-integrations/connectors/source-recurly/source_recurly/schemas/shared/billing_infos.json new file mode 100644 index 000000000000..dbf207f589b4 --- /dev/null +++ b/airbyte-integrations/connectors/source-recurly/source_recurly/schemas/shared/billing_infos.json @@ -0,0 +1,213 @@ +{ + "$schema": "http://json-schema.org/draft-07/schema#", + "type": "object", + "properties": { + "id": { + "type": "string", + "maxLength": 13, + "readOnly": true + }, + "object": { + "type": ["null", "string"] + }, + "account_id": { + "type": "string", + "maxLength": 13, + "readOnly": true + }, + "first_name": { + "type": ["null", "string"], + "maxLength": 50 + }, + "last_name": { + "type": ["null", "string"], + "maxLength": 50 + }, + "company": { + "type": ["null", "string"], + "maxLength": 100 + }, + "address": { + "type": "object", + "properties": { + "phone": { + "type": ["null", "string"], + "title": "Phone number", + "maxLength": 256 + }, + "street1": { + "type": ["null", "string"], + "title": "Street 1", + "maxLength": 256 + }, + "street2": { + "type": ["null", "string"], + "title": "Street 2", + "maxLength": 256 + }, + "city": { + "type": ["null", "string"], + "title": "City", + "maxLength": 256 + }, + "region": { + "type": ["null", "string"], + "title": "State/Province", + "description": "State or province.", + "maxLength": 256 + }, + "postal_code": { + "type": ["null", "string"], + "title": "Zip/Postal code", + "description": "Zip or postal code.", + "maxLength": 256 + }, + "country": { + "type": ["null", "string"], + "title": "Country", + "description": "Country, 2-letter ISO 3166-1 alpha-2 code.", + "maxLength": 2 + } + } + }, + "vat_number": { + "type": ["null", "string"], + "description": "Customer's VAT number (to avoid having the VAT applied). This is only used for automatically collected invoices.", + "maxLength": 20 + }, + "valid": { + "type": "boolean", + "readOnly": true + }, + "payment_method": { + "type": "object", + "properties": { + "card_type": { + "description": "Visa, MasterCard, American Express, Discover, JCB, etc.", + "type": ["null", "string"], + "maxLength": 256 + }, + "object": { + "type": ["null", "string"] + }, + "first_six": { + "type": ["null", "string"], + "description": "Credit card number's first six digits.", + "maxLength": 6 + }, + "last_four": { + "type": ["null", "string"], + "description": "Credit card number's last four digits. Will refer to bank account if payment method is ACH.", + "maxLength": 4 + }, + "last_two": { + "type": ["null", "string"], + "description": "The IBAN bank account's last two digits.", + "maxLength": 2 + }, + "exp_month": { + "type": ["null", "integer"], + "description": "Expiration month.", + "maxLength": 2 + }, + "exp_year": { + "type": ["null", "integer"], + "description": "Expiration year.", + "maxLength": 4 + }, + "gateway_token": { + "type": ["null", "string"], + "description": "A token used in place of a credit card in order to perform transactions.", + "maxLength": 50 + }, + "cc_bin_country": { + "type": ["null", "string"], + "description": "The 2-letter ISO 3166-1 alpha-2 country code associated with the credit card BIN, if known by Recurly. Available on the BillingInfo object only. Available when the BIN country lookup feature is enabled.", + "maxLength": 256 + }, + "gateway_code": { + "type": ["null", "string"], + "description": "An identifier for a specific payment gateway.", + "maxLength": 13 + }, + "billing_agreement_id": { + "type": ["null", "string"], + "description": "Billing Agreement identifier. Only present for Amazon or Paypal payment methods.", + "maxLength": 256 + }, + "name_on_account": { + "type": ["null", "string"], + "description": "The name associated with the bank account.", + "maxLength": 256 + }, + "account_type": { + "description": "The bank account type. Only present for ACH payment methods.", + "type": ["null", "string"], + "maxLength": 256 + }, + "routing_number": { + "type": ["null", "string"], + "description": "The bank account's routing number. Only present for ACH payment methods.", + "maxLength": 256 + }, + "routing_number_bank": { + "type": ["null", "string"], + "description": "The bank name of this routing number.", + "maxLength": 256 + } + } + }, + "fraud": { + "type": ["null", "object"], + "title": "Fraud information", + "description": "Most recent fraud result.", + "readOnly": true, + "properties": { + "score": { + "type": ["null", "integer"], + "title": "Kount score" + }, + "decision": { + "title": "Kount decision", + "maxLength": 10, + "type": ["null", "string"] + }, + "risk_rules_triggered": { + "type": "object", + "title": "Kount rules" + } + } + }, + "primary_payment_method": { + "type": "boolean", + "description": "The `primary_payment_method` field is used to indicate the primary billing info on the account. The first billing info created on an account will always become primary. This payment method will be used" + }, + "backup_payment_method": { + "type": "boolean", + "description": "The `backup_payment_method` field is used to indicate a billing info as a backup on the account that will be tried if the initial billing info used for an invoice is declined." + }, + "created_at": { + "type": "string", + "format": "date-time", + "description": "When the billing information was created.", + "readOnly": true + }, + "updated_at": { + "type": "string", + "format": "date-time", + "description": "When the billing information was last changed.", + "readOnly": true + }, + "updated_by": { + "type": ["null", "object"], + "properties": { + "ip": { + "type": ["null", "string"] + }, + "country": { + "type": ["null", "string"] + } + } + } + } +} diff --git a/airbyte-integrations/connectors/source-recurly/source_recurly/schemas/shared/coupon_redemptions.json b/airbyte-integrations/connectors/source-recurly/source_recurly/schemas/shared/coupon_redemptions.json new file mode 100644 index 000000000000..e74f00e93f57 --- /dev/null +++ b/airbyte-integrations/connectors/source-recurly/source_recurly/schemas/shared/coupon_redemptions.json @@ -0,0 +1,85 @@ +{ + "$schema": "http://json-schema.org/draft-07/schema#", + "type": ["null", "object"], + "properties": { + "id": { + "type": ["null", "string"] + }, + "object": { + "type": ["null", "string"] + }, + "coupon": { + "type": ["null", "object"], + "properties": { + "id": { + "type": ["null", "string"] + }, + "object": { + "type": ["null", "string"] + }, + "code": { + "type": ["null", "string"] + }, + "name": { + "type": ["null", "string"] + }, + "state": { + "type": ["null", "string"] + }, + "discount": { + "type": ["null", "object"], + "properties": { + "type": { + "type": ["null", "string"] + }, + "percent": { + "type": ["null", "integer"] + }, + "currencies": { + "type": ["null", "array"], + "items": { + "type": ["null", "object"], + "properties": { + "currency": { + "type": ["null", "string"] + }, + "amount": { + "type": ["null", "number"] + } + } + } + }, + "trial": { + "type": ["null", "object"], + "properties": { + "unit": { + "type": ["null", "string"] + }, + "length": { + "type": ["null", "integer"] + } + } + } + } + }, + "coupon_type": { + "type": ["null", "string"] + }, + "expired_at": { + "type": ["null", "string"], + "format": "date-time" + } + } + }, + "state": { + "type": ["null", "string"] + }, + "discounted": { + "type": ["null", "number"] + }, + "created_at": { + "type": ["null", "string"], + "format": "date-time" + } + } +} diff --git a/airbyte-integrations/connectors/source-recurly/source_recurly/schemas/shared/coupons.json b/airbyte-integrations/connectors/source-recurly/source_recurly/schemas/shared/coupons.json new file mode 100644 index 000000000000..50b91db59b4f --- /dev/null +++ b/airbyte-integrations/connectors/source-recurly/source_recurly/schemas/shared/coupons.json @@ -0,0 +1,194 @@ +{ + "$schema": "http://json-schema.org/draft-07/schema#", + "type": "object", + "properties": { + "id": { + "type": ["null", "string"], + "maxLength": 13 + }, + "object": { + "type": ["null", "string"] + }, + "code": { + "type": ["null", "string"], + "maxLength": 256 + }, + "name": { + "type": ["null", "string"], + "maxLength": 256 + }, + "state": { + "type": ["null", "string"], + "maxLength": 256 + }, + "max_redemptions": { + "type": ["null", "number"] + }, + "max_redemptions_per_account": { + "type": ["null", "number"] + }, + "unique_coupon_codes_count": { + "type": ["null", "number"] + }, + "unique_code_template": { + "type": ["null", "string"], + "maxLength": 256 + }, + "unique_coupon_code": { + "$ref": "unique_coupons.json" + }, + "duration": { + "type": ["null", "string"], + "maxLength": 256 + }, + "temporal_amount": { + "type": ["null", "number"] + }, + "temporal_unit": { + "type": ["null", "string"], + "maxLength": 256 + }, + "free_trial_unit": { + "type": ["null", "string"], + "maxLength": 256 + }, + "free_trial_amount": { + "type": ["null", "number"] + }, + "applies_to_all_plans": { + "type": ["null", "boolean"] + }, + "applies_to_all_items": { + "type": ["null", "boolean"] + }, + "applies_to_non_plan_charges": { + "type": ["null", "boolean"] + }, + "plans": { + "type": ["null", "array"], + "title": "Plans", + "description": "A list of plans for which this coupon applies. This will be `null` if `applies_to_all_plans=true`.", + "items": { + "type": "object", + "title": "Plan mini details", + "description": "Just the important parts.", + "properties": { + "id": { + "type": "string", + "title": "Plan ID", + "maxLength": 13, + "readOnly": true + }, + "code": { + "type": "string", + "title": "Plan code", + "description": "Unique code to identify the plan. This is used in Hosted Payment Page URLs and in the invoice exports.", + "maxLength": 13 + } + } + } + }, + "items": { + "type": ["null", "array"], + "title": "Items", + "description": "A list of items for which this coupon applies. This will be\n`null` if `applies_to_all_items=true`.\n", + "items": { + "type": ["null", "object"], + "title": "Item mini details", + "description": "Just the important parts.", + "properties": { + "id": { + "type": "string", + "title": "Item ID", + "maxLength": 13, + "readOnly": true + } + } + } + }, + "redemption_resource": { + "type": ["null", "string"], + "maxLength": 256 + }, + "discount": { + "type": ["null", "object"], + "description": "Details of the discount a coupon applies. Will contain a `type`\nproperty and one of the following properties: `percent`, `fixed`, `trial`.\n", + "properties": { + "type": { + "type": "string", + "maxLength": 256 + }, + "percent": { + "description": "This is only present when `type=percent`.", + "type": "integer" + }, + "currencies": { + "type": "array", + "description": "This is only present when `type=fixed`.", + "items": { + "type": ["null", "object"], + "properties": { + "currency": { + "type": "string", + "title": "Currency", + "description": "3-letter ISO 4217 currency code.", + "maxLength": 3 + }, + "amount": { + "type": "number", + "format": "float", + "title": "Discount Amount", + "description": "Value of the fixed discount that this coupon applies." + } + } + } + }, + "trial": { + "type": "object", + "description": "This is only present when `type=free_trial`.", + "properties": { + "unit": { + "title": "Trial unit", + "description": "Temporal unit of the free trial", + "type": "string", + "maxLength": 256 + }, + "length": { + "type": "integer", + "title": "Trial length", + "description": "Trial length measured in the units specified by the sibling `unit` property" + } + } + } + } + }, + "coupon_type": { + "type": ["null", "string"], + "maxLength": 256 + }, + "hosted_page_description": { + "type": ["null", "string"], + "maxLength": 1024 + }, + "invoice_description": { + "type": ["null", "string"], + "maxLength": 1024 + }, + "redeem_by": { + "type": ["null", "string"], + "maxLength": 256 + }, + "created_at": { + "type": ["null", "string"], + "format": "date-time" + }, + "updated_at": { + "type": ["null", "string"], + "format": "date-time" + }, + "expired_at": { + "type": ["null", "string"], + "format": "date-time" + } + } +} diff --git a/airbyte-integrations/connectors/source-recurly/source_recurly/schemas/shared/external_accounts.json b/airbyte-integrations/connectors/source-recurly/source_recurly/schemas/shared/external_accounts.json new file mode 100644 index 000000000000..29e6292263b7 --- /dev/null +++ b/airbyte-integrations/connectors/source-recurly/source_recurly/schemas/shared/external_accounts.json @@ -0,0 +1,24 @@ +{ + "$schema": "http://json-schema.org/draft-07/schema#", + "type": "object", + "properties": { + "object": { + "type": ["null", "string"] + }, + "id": { + "type": ["null", "string"] + }, + "external_account_code": { + "type": ["null", "string"] + }, + "external_connection_type": { + "type": ["null", "string"] + }, + "created_at": { + "type": ["null", "string"] + }, + "updated_at": { + "type": ["null", "string"] + } + } +} diff --git a/airbyte-integrations/connectors/source-recurly/source_recurly/schemas/shared/line_items.json b/airbyte-integrations/connectors/source-recurly/source_recurly/schemas/shared/line_items.json new file mode 100644 index 000000000000..33e1fb8809d8 --- /dev/null +++ b/airbyte-integrations/connectors/source-recurly/source_recurly/schemas/shared/line_items.json @@ -0,0 +1,293 @@ +{ + "$schema": "http://json-schema.org/draft-07/schema#", + "type": ["null", "object"], + "title": "Line item", + "properties": { + "id": { + "type": "string", + "title": "Line item ID", + "maxLength": 13 + }, + "object": { + "type": ["null", "string"] + }, + "uuid": { + "type": "string", + "title": "UUID", + "description": "The UUID is useful for matching data with the CSV exports and building URLs into Recurly's UI.", + "maxLength": 32 + }, + "type": { + "type": "string", + "title": "Line item type", + "description": "Charges are positive line items that debit the account. Credits are negative line items that credit the account.", + "maxLength": 256 + }, + "item_code": { + "type": ["null", "string"], + "title": "Item Code", + "description": "Unique code to identify an item. Available when the Credit Invoices and Subscription Billing Terms features are enabled.", + "maxLength": 50 + }, + "item_id": { + "type": ["null", "string"], + "title": "Item ID", + "description": "System-generated unique identifier for an item. Available when the Credit Invoices and Subscription Billing Terms features are enabled.", + "maxLength": 13 + }, + "external_sku": { + "type": ["null", "string"], + "title": "External SKU", + "description": "Optional Stock Keeping Unit assigned to an item. Available when the Credit Invoices and Subscription Billing Terms features are enabled.", + "maxLength": 50 + }, + "revenue_schedule_type": { + "type": ["null", "string"], + "title": "Revenue schedule type", + "maxLength": 256 + }, + "state": { + "type": "string", + "title": "Current state of the line item", + "description": "Pending line items are charges or credits on an account that have not been applied to an invoice yet. Invoiced line items will always have an `invoice_id` value.", + "maxLength": 256 + }, + "legacy_category": { + "type": ["null", "string"], + "title": "Legacy category", + "description": "Category to describe the role of a line item on a legacy invoice:\n- \"charges\" refers to charges being billed for on this invoice.\n- \"credits\" refers to refund or proration credits. This portion of the invoice can be considered a credit memo.\n- \"applied_credits\" refers to previous credits applied to this invoice. See their original_line_item_id to determine where the credit first originated.\n- \"carryforwards\" can be ignored. They exist to consume any remaining credit balance. A new credit with the same amount will be created and placed back on the account.\n" + }, + "account": { + "$ref": "account_details.json" + }, + "bill_for_account_id": { + "type": "string", + "title": "Bill For Account ID", + "maxLength": 13, + "description": "The UUID of the account responsible for originating the line item." + }, + "subscription_id": { + "type": ["null", "string"], + "title": "Subscription ID", + "description": "If the line item is a charge or credit for a subscription, this is its ID.", + "maxLength": 13 + }, + "plan_id": { + "type": ["null", "string"], + "title": "Plan ID", + "description": "If the line item is a charge or credit for a plan or add-on, this is the plan's ID.", + "maxLength": 13 + }, + "plan_code": { + "type": ["null", "string"], + "title": "Plan code", + "description": "If the line item is a charge or credit for a plan or add-on, this is the plan's code.", + "maxLength": 50 + }, + "add_on_id": { + "type": ["null", "string"], + "title": "Add-on ID", + "description": "If the line item is a charge or credit for an add-on this is its ID.", + "maxLength": 13 + }, + "add_on_code": { + "type": ["null", "string"], + "title": "Add-on code", + "description": "If the line item is a charge or credit for an add-on, this is its code.", + "maxLength": 50 + }, + "invoice_id": { + "type": ["null", "string"], + "title": "Invoice ID", + "description": "Once the line item has been invoiced this will be the invoice's ID.", + "maxLength": 13 + }, + "invoice_number": { + "type": ["null", "string"], + "title": "Invoice number", + "description": "Once the line item has been invoiced this will be the invoice's number. If VAT taxation and the Country Invoice Sequencing feature are enabled, invoices will have country-specific invoice numbers for invoices billed to EU countries (ex: FR1001). Non-EU invoices will continue to use the site-level invoice number sequence.", + "maxLength": 256 + }, + "previous_line_item_id": { + "type": ["null", "string"], + "title": "Previous line item ID", + "description": "Will only have a value if the line item is a credit created from a previous credit, or if the credit was created from a charge refund.", + "maxLength": 13 + }, + "original_line_item_invoice_id": { + "type": ["null", "string"], + "title": "Original line item's invoice ID", + "description": "The invoice where the credit originated. Will only have a value if the line item is a credit created from a previous credit, or if the credit was created from a charge refund.", + "maxLength": 13 + }, + "origin": { + "type": "string", + "title": "Origin of line item", + "description": "A credit created from an original charge will have the value of the charge's origin.", + "maxLength": 256 + }, + "accounting_code": { + "type": "string", + "title": "Accounting code", + "description": "Internal accounting code to help you reconcile your revenue to the correct ledger. Line items created as part of a subscription invoice will use the plan or add-on's accounting code, otherwise the value will only be present if you define an accounting code when creating the line item.", + "maxLength": 20 + }, + "product_code": { + "type": "string", + "title": "Product code", + "description": "For plan-related line items this will be the plan's code, for add-on related line items it will be the add-on's code. For item-related line items it will be the item's `external_sku`.", + "maxLength": 50 + }, + "credit_reason_code": { + "type": ["null", "string"], + "title": "Credit reason code", + "description": "The reason the credit was given when line item is `type=credit`.", + "default": "general", + "maxLength": 256 + }, + "currency": { + "type": "string", + "title": "Currency", + "description": "3-letter ISO 4217 currency code.", + "maxLength": 3 + }, + "amount": { + "type": "number", + "format": "float", + "title": "Total after discounts and taxes", + "description": "`(quantity * unit_amount) - (discount + tax)`" + }, + "description": { + "type": "string", + "title": "Description", + "description": "Description that appears on the invoice. For subscription related items this will be filled in automatically.", + "maxLength": 255 + }, + "quantity": { + "type": "integer", + "title": "Quantity", + "description": "This number will be multiplied by the unit amount to compute the subtotal before any discounts or taxes.", + "default": 1 + }, + "unit_amount": { + "type": "number", + "format": "float", + "title": "Unit amount", + "description": "Positive amount for a charge, negative amount for a credit." + }, + "unit_amount_decimal": { + "type": ["null", "string"], + "title": "Unit amount decimal", + "description": "Positive amount for a charge, negative amount for a credit." + }, + "subtotal": { + "type": "number", + "format": "float", + "title": "Total before discounts and taxes", + "description": "`quantity * unit_amount`" + }, + "discount": { + "type": ["null", "number"], + "format": "float", + "title": "Discount", + "description": "The discount applied to the line item." + }, + "tax": { + "type": ["null", "number"], + "format": "float", + "title": "Tax", + "description": "The tax amount for the line item." + }, + "taxable": { + "type": "boolean", + "title": "Taxable?", + "description": "`true` if the line item is taxable, `false` if it is not." + }, + "tax_exempt": { + "type": "boolean", + "title": "Tax exempt?", + "description": "`true` exempts tax on charges, `false` applies tax on charges. If not defined, then defaults to the Plan and Site settings. This attribute does not work for credits (negative line items). Credits are always applied post-tax. Pre-tax discounts should use the Coupons feature." + }, + "tax_code": { + "type": ["null", "string"], + "title": "Tax code", + "description": "Used by Avalara, Vertex, and Recurly\u2019s EU VAT tax feature. The tax code values are specific to each tax system. If you are using Recurly\u2019s EU VAT feature you can use `unknown`, `physical`, or `digital`.", + "maxLength": 50 + }, + "tax_info": { + "$ref": "tax_info.json" + }, + "proration_rate": { + "type": ["null", "number"], + "format": "float", + "title": "Proration rate", + "description": "When a line item has been prorated, this is the rate of the proration. Proration rates were made available for line items created after March 30, 2017. For line items created prior to that date, the proration rate will be `null`, even if the line item was prorated.", + "minimum": 0, + "maximum": 1 + }, + "refund": { + "type": "boolean", + "title": "Refund?" + }, + "refunded_quantity": { + "type": ["null", "integer"], + "title": "Refunded Quantity", + "description": "For refund charges, the quantity being refunded. For non-refund charges, the total quantity refunded (possibly over multiple refunds)." + }, + "credit_applied": { + "type": ["null", "number"], + "format": "float", + "title": "Credit Applied", + "description": "The amount of credit from this line item that was applied to the invoice." + }, + "shipping_address": { + "type": ["null", "object"], + "properties": { + "id": { + "type": "string", + "title": "Shipping Address ID", + "maxLength": 13, + "readOnly": true + } + } + }, + "start_date": { + "type": ["null", "string"], + "format": "date-time", + "title": "Start date", + "description": "If an end date is present, this is value indicates the beginning of a billing time range. If no end date is present it indicates billing for a specific date." + }, + "end_date": { + "type": ["null", "string"], + "format": "date-time", + "title": "End date", + "description": "If this date is provided, it indicates the end of a time range." + }, + "custom_fields": { + "type": ["null", "array"], + "items": { + "type": ["null", "object"], + "properties": { + "name": { + "type": ["null", "string"] + }, + "value": { + "type": ["null", "string"] + } + } + } + }, + "created_at": { + "type": "string", + "format": "date-time", + "title": "Created at", + "description": "When the line item was created." + }, + "updated_at": { + "type": "string", + "format": "date-time", + "title": "Last updated at", + "description": "When the line item was last changed." + } + } +} diff --git a/airbyte-integrations/connectors/source-recurly/source_recurly/schemas/shared/shipping_addresses.json b/airbyte-integrations/connectors/source-recurly/source_recurly/schemas/shared/shipping_addresses.json new file mode 100644 index 000000000000..e0f8091cdbe6 --- /dev/null +++ b/airbyte-integrations/connectors/source-recurly/source_recurly/schemas/shared/shipping_addresses.json @@ -0,0 +1,91 @@ +{ + "$schema": "http://json-schema.org/draft-07/schema#", + "type": ["null", "object"], + "properties": { + "id": { + "type": "string", + "title": "Shipping Address ID", + "maxLength": 13, + "readOnly": true + }, + "object": { + "type": ["null", "string"] + }, + "account_id": { + "type": "string", + "title": "Account ID", + "maxLength": 13, + "readOnly": true + }, + "nickname": { + "type": "string", + "maxLength": 255 + }, + "first_name": { + "type": "string", + "maxLength": 255 + }, + "last_name": { + "type": "string", + "maxLength": 255 + }, + "company": { + "type": "string", + "maxLength": 255 + }, + "email": { + "type": "string", + "maxLength": 255 + }, + "vat_number": { + "type": "string", + "maxLength": 20 + }, + "phone": { + "type": "string", + "maxLength": 30 + }, + "street1": { + "type": "string", + "maxLength": 255 + }, + "street2": { + "type": "string", + "maxLength": 255 + }, + "city": { + "type": "string", + "maxLength": 255 + }, + "region": { + "type": "string", + "maxLength": 255, + "description": "State or province." + }, + "postal_code": { + "type": "string", + "maxLength": 20, + "description": "Zip or postal code." + }, + "country": { + "type": "string", + "maxLength": 50, + "description": "Country, 2-letter ISO 3166-1 alpha-2 code." + }, + "geo_code": { + "type": ["null", "string"] + }, + "created_at": { + "type": "string", + "title": "Created at", + "format": "date-time", + "readOnly": true + }, + "updated_at": { + "type": "string", + "title": "Updated at", + "format": "date-time", + "readOnly": true + } + } +} diff --git a/airbyte-integrations/connectors/source-recurly/source_recurly/schemas/shared/tax_info.json b/airbyte-integrations/connectors/source-recurly/source_recurly/schemas/shared/tax_info.json new file mode 100644 index 000000000000..13502eb46241 --- /dev/null +++ b/airbyte-integrations/connectors/source-recurly/source_recurly/schemas/shared/tax_info.json @@ -0,0 +1,44 @@ +{ + "$schema": "http://json-schema.org/draft-07/schema#", + "type": ["null", "object"], + "properties": { + "type": { + "type": ["null", "string"] + }, + "region": { + "type": ["null", "string"] + }, + "rate": { + "type": ["null", "number"] + }, + "tax_details": { + "type": ["null", "array"], + "items": { + "type": ["null", "object"], + "properties": { + "type": { + "type": ["null", "string"] + }, + "region": { + "type": ["null", "string"] + }, + "rate": { + "type": ["null", "number"] + }, + "tax": { + "type": ["null", "number"] + }, + "name": { + "type": ["null", "string"] + }, + "level": { + "type": ["null", "string"] + }, + "billable": { + "type": ["null", "boolean"] + } + } + } + } + } +} diff --git a/airbyte-integrations/connectors/source-recurly/source_recurly/schemas/shared/unique_coupons.json b/airbyte-integrations/connectors/source-recurly/source_recurly/schemas/shared/unique_coupons.json new file mode 100644 index 000000000000..1d2a0a3a117c --- /dev/null +++ b/airbyte-integrations/connectors/source-recurly/source_recurly/schemas/shared/unique_coupons.json @@ -0,0 +1,66 @@ +{ + "$schema": "http://json-schema.org/draft-07/schema#", + "type": ["null", "object"], + "description": "A unique coupon code for a bulk coupon.", + "properties": { + "id": { + "type": "string", + "title": "Unique Coupon Code ID", + "readOnly": true, + "maxLength": 13 + }, + "object": { + "type": "string" + }, + "code": { + "type": "string", + "title": "Coupon code", + "description": "The code the customer enters to redeem the coupon.", + "maxLength": 256 + }, + "state": { + "type": ["null", "string"], + "title": "State", + "description": "Indicates if the unique coupon code is redeemable or why not.", + "maxLength": 256 + }, + "bulk_coupon_id": { + "type": ["null", "string"], + "title": "Bulk Coupon ID", + "description": "The Coupon ID of the parent Bulk Coupon", + "readOnly": true, + "maxLength": 13 + }, + "bulk_coupon_code": { + "type": ["null", "string"], + "title": "Bulk Coupon code", + "description": "The Coupon code of the parent Bulk Coupon", + "maxLength": 256 + }, + "created_at": { + "type": "string", + "title": "Created at", + "format": "date-time", + "readOnly": true + }, + "updated_at": { + "type": "string", + "title": "Updated at", + "format": "date-time", + "readOnly": true + }, + "redeemed_at": { + "type": ["null", "string"], + "title": "Redeemed at", + "description": "The date and time the unique coupon code was redeemed.", + "format": "date-time", + "readOnly": true + }, + "expired_at": { + "type": ["null", "string"], + "title": "Expired at", + "description": "The date and time the coupon was expired early or reached its `max_redemptions`.", + "format": "date-time" + } + } +} diff --git a/airbyte-integrations/connectors/source-recurly/source_recurly/schemas/shared/users.json b/airbyte-integrations/connectors/source-recurly/source_recurly/schemas/shared/users.json new file mode 100644 index 000000000000..70b7c49ac122 --- /dev/null +++ b/airbyte-integrations/connectors/source-recurly/source_recurly/schemas/shared/users.json @@ -0,0 +1,33 @@ +{ + "type": ["null", "object"], + "properties": { + "id": { + "type": "string" + }, + "object": { + "type": ["null", "string"] + }, + "email": { + "type": ["null", "string"] + }, + "first_name": { + "type": ["null", "string"] + }, + "last_name": { + "type": ["null", "string"] + }, + "time_zone": { + "type": ["null", "string"] + }, + "created_at": { + "type": ["null", "string"], + "format": "date-time", + "airbyte_type": "timestamp_without_timezone" + }, + "deleted_at": { + "type": ["null", "string"], + "format": "date-time", + "airbyte_type": "timestamp_without_timezone" + } + } +} diff --git a/airbyte-integrations/connectors/source-recurly/source_recurly/schemas/shipping_addresses.json b/airbyte-integrations/connectors/source-recurly/source_recurly/schemas/shipping_addresses.json new file mode 100644 index 000000000000..bcbb555d47a4 --- /dev/null +++ b/airbyte-integrations/connectors/source-recurly/source_recurly/schemas/shipping_addresses.json @@ -0,0 +1,3 @@ +{ + "$ref": "shipping_addresses.json" +} diff --git a/airbyte-integrations/connectors/source-recurly/source_recurly/schemas/shipping_methods.json b/airbyte-integrations/connectors/source-recurly/source_recurly/schemas/shipping_methods.json new file mode 100644 index 000000000000..23ba22deb2a2 --- /dev/null +++ b/airbyte-integrations/connectors/source-recurly/source_recurly/schemas/shipping_methods.json @@ -0,0 +1,54 @@ +{ + "$schema": "http://json-schema.org/schema#", + "type": "object", + "properties": { + "id": { + "type": "string", + "title": "Shipping Method ID", + "readOnly": true, + "maxLength": 13 + }, + "code": { + "type": "string", + "title": "Code", + "description": "The internal name used identify the shipping method.", + "maxLength": 50 + }, + "name": { + "type": "string", + "title": "Name", + "description": "The name of the shipping method displayed to customers.", + "maxLength": 100 + }, + "accounting_code": { + "type": "string", + "title": "Accounting Code", + "description": "Accounting code for shipping method.", + "maxLength": 20 + }, + "tax_code": { + "type": "string", + "title": "Tax code", + "description": "Used by Avalara, Vertex, and Recurly\u2019s built-in tax feature. The tax\ncode values are specific to each tax system. If you are using Recurly\u2019s\nbuilt-in taxes the values are:\n\n- `FR` \u2013 Common Carrier FOB Destination\n- `FR022000` \u2013 Common Carrier FOB Origin\n- `FR020400` \u2013 Non Common Carrier FOB Destination\n- `FR020500` \u2013 Non Common Carrier FOB Origin\n- `FR010100` \u2013 Delivery by Company Vehicle Before Passage of Title\n- `FR010200` \u2013 Delivery by Company Vehicle After Passage of Title\n- `NT` \u2013 Non-Taxable\n", + "maxLength": 50 + }, + "created_at": { + "type": "string", + "format": "date-time", + "title": "Created at", + "readOnly": true + }, + "updated_at": { + "type": "string", + "format": "date-time", + "title": "Last updated at", + "readOnly": true + }, + "deleted_at": { + "type": "string", + "format": "date-time", + "title": "Deleted at", + "readOnly": true + } + } +} diff --git a/airbyte-integrations/connectors/source-recurly/source_recurly/schemas/subscriptions.json b/airbyte-integrations/connectors/source-recurly/source_recurly/schemas/subscriptions.json new file mode 100644 index 000000000000..27c3b0ad4ea1 --- /dev/null +++ b/airbyte-integrations/connectors/source-recurly/source_recurly/schemas/subscriptions.json @@ -0,0 +1,368 @@ +{ + "$schema": "http://json-schema.org/draft-07/schema#", + "type": "object", + "properties": { + "id": { + "type": ["null", "string"], + "maxLength": 13 + }, + "object": { + "type": ["null", "string"] + }, + "uuid": { + "type": ["null", "string"], + "maxLength": 32 + }, + "account": { + "type": ["null", "object"], + "properties": { + "id": { + "type": ["null", "string"], + "maxLength": 13 + }, + "object": { + "type": ["null", "string"] + }, + "code": { + "type": ["null", "string"], + "maxLength": 256 + }, + "email": { + "type": ["null", "string"], + "maxLength": 256 + }, + "first_name": { + "type": ["null", "string"] + }, + "last_name": { + "type": ["null", "string"] + }, + "company": { + "type": ["null", "string"] + }, + "parent_account_id": { + "type": ["null", "string"] + }, + "bill_to": { + "type": ["null", "string"] + }, + "dunning_campaign_id": { + "type": ["null", "string"] + } + } + }, + "plan": { + "type": "object", + "properties": { + "id": { + "type": ["null", "string"], + "maxLength": 13 + }, + "object": { + "type": ["null", "string"] + }, + "code": { + "type": ["null", "string"], + "maxLength": 256 + }, + "name": { + "type": ["null", "string"] + } + } + }, + "state": { + "type": ["null", "string"], + "maxLength": 256 + }, + "shipping": { + "type": ["null", "object"], + "properties": { + "object": { + "type": ["null", "string"] + }, + "address": { + "$ref": "shipping_addresses.json" + }, + "method": { + "type": ["null", "object"], + "properties": { + "id": { + "type": "string", + "title": "Shipping Method ID", + "readOnly": true, + "maxLength": 13 + }, + "object": { + "type": ["null", "string"] + }, + "code": { + "type": ["null", "string"] + }, + "name": { + "type": ["null", "string"] + } + } + }, + "amount": { + "type": ["null", "number"] + } + } + }, + "coupon_redemptions": { "$ref": "coupon_redemptions.json" }, + "pending_change": { + "type": ["null", "object"], + "title": "Subscription Change", + "properties": { + "id": { + "type": "string", + "title": "Subscription Change ID", + "description": "The ID of the Subscription Change.", + "maxLength": 13 + }, + "subscription_id": { + "type": "string", + "title": "Subscription ID", + "description": "The ID of the subscription that is going to be changed.", + "maxLength": 13 + }, + "activate_at": { + "type": "string", + "format": "date-time", + "title": "Activated at", + "readOnly": true + }, + "activated": { + "type": "boolean", + "title": "Activated?", + "description": "Returns `true` if the subscription change is activated." + }, + "created_at": { + "type": "string", + "format": "date-time", + "title": "Created at", + "readOnly": true + }, + "updated_at": { + "type": "string", + "format": "date-time", + "title": "Updated at", + "readOnly": true + }, + "deleted_at": { + "type": "string", + "format": "date-time", + "title": "Deleted at", + "readOnly": true + } + } + }, + "current_period_started_at": { + "type": ["null", "string"], + "format": "date-time" + }, + "current_period_ends_at": { + "type": ["null", "string"], + "format": "date-time" + }, + "current_term_started_at": { + "type": ["null", "string"], + "format": "date-time" + }, + "current_term_ends_at": { + "type": ["null", "string"], + "format": "date-time" + }, + "trial_started_at": { + "type": ["null", "string"], + "format": "date-time" + }, + "trial_ends_at": { + "type": ["null", "string"], + "format": "date-time" + }, + "remaining_billing_cycles": { + "type": ["null", "number"] + }, + "total_billing_cycles": { + "type": ["null", "number"] + }, + "renewal_billing_cycles": { + "type": ["null", "number"] + }, + "auto_renew": { + "type": ["null", "boolean"] + }, + "ramp_intervals": { + "type": ["null", "array"], + "items": { + "type": ["null", "object"], + "properties": { + "starting_billing_cycle": { + "type": ["null", "integer"] + }, + "remaining_billing_cycles": { + "type": ["null", "integer"] + }, + "starting_on": { + "type": ["null", "string"], + "format": "date-time" + }, + "ending_on": { + "type": ["null", "string"], + "format": "date-time" + }, + "unit_amount": { + "type": ["null", "number"] + } + } + } + }, + "paused_at": { + "type": ["null", "string"], + "format": "date-time" + }, + "remaining_pause_cycles": { + "type": ["null", "number"] + }, + "currency": { + "type": ["null", "string"], + "maxLength": 3 + }, + "revenue_schedule_type": { + "type": ["null", "string"], + "maxLength": 256 + }, + "unit_amount": { + "type": ["null", "number"] + }, + "tax_inclusive": { + "type": ["null", "boolean"] + }, + "quantity": { + "type": ["null", "number"] + }, + "add_ons": { + "type": ["null", "array"], + "title": "Add-ons", + "items": { + "type": ["null", "object"], + "title": "Subscription Add-on", + "description": "This links an Add-on to a specific Subscription.", + "properties": { + "id": { + "type": "string", + "title": "Subscription Add-on ID", + "maxLength": 13 + }, + "code": { + "type": "string", + "title": "Add-on code", + "description": "The unique identifier for the add-on within its plan.", + "maxLength": 50 + } + } + } + }, + "add_ons_total": { + "type": ["null", "number"] + }, + "subtotal": { + "type": ["null", "number"] + }, + "tax": { + "type": ["null", "number"] + }, + "tax_info": { + "$ref": "tax_info.json" + }, + "total": { + "type": ["null", "number"] + }, + "collection_method": { + "type": ["null", "string"], + "maxLength": 256 + }, + "po_number": { + "type": ["null", "string"], + "maxLength": 256 + }, + "net_terms": { + "type": ["null", "number"] + }, + "net_terms_type": { + "type": ["null", "string"] + }, + "terms_and_conditions": { + "type": ["null", "string"], + "maxLength": 16384 + }, + "customer_notes": { + "type": ["null", "string"], + "maxLength": 1024 + }, + "expiration_reason": { + "type": ["null", "string"], + "maxLength": 1024 + }, + "custom_fields": { + "type": ["null", "array"], + "items": { + "type": ["null", "object"], + "properties": { + "name": { + "type": ["null", "string"] + }, + "value": { + "type": ["null", "string"] + } + } + } + }, + "created_at": { + "type": ["null", "string"], + "format": "date-time" + }, + "updated_at": { + "type": ["null", "string"], + "format": "date-time" + }, + "activated_at": { + "type": ["null", "string"], + "format": "date-time" + }, + "canceled_at": { + "type": ["null", "string"], + "format": "date-time" + }, + "expires_at": { + "type": ["null", "string"], + "format": "date-time" + }, + "bank_account_authorized_at": { + "type": ["null", "string"], + "format": "date-time" + }, + "gateway_code": { + "type": ["null", "string"], + "maxLength": 256 + }, + "billing_info_id": { + "type": ["null", "string"], + "maxLength": 13 + }, + "active_invoice_id": { + "type": ["null", "string"] + }, + "started_with_gift": { + "type": ["null", "boolean"] + }, + "converted_at": { + "type": ["null", "string"], + "format": "date-time" + }, + "action_result": { + "type": ["null", "object"], + "additionalProperties": true + } + } +} diff --git a/airbyte-integrations/connectors/source-recurly/source_recurly/schemas/transactions.json b/airbyte-integrations/connectors/source-recurly/source_recurly/schemas/transactions.json new file mode 100644 index 000000000000..7b06e0d76cb5 --- /dev/null +++ b/airbyte-integrations/connectors/source-recurly/source_recurly/schemas/transactions.json @@ -0,0 +1,345 @@ +{ + "$schema": "http://json-schema.org/draft-07/schema#", + "type": "object", + "additionalProperties": true, + "properties": { + "id": { + "type": ["null", "string"], + "maxLength": 13 + }, + "object": { + "type": ["null", "string"] + }, + "uuid": { + "type": ["null", "string"], + "maxLength": 32 + }, + "original_transaction_id": { + "type": ["null", "string"], + "maxLength": 13 + }, + "account": { + "$ref": "account_details.json" + }, + "invoice": { + "type": ["null", "object"], + "properties": { + "id": { + "type": ["null", "string"], + "maxLength": 13 + }, + "number": { + "type": ["null", "string"], + "maxLength": 256 + }, + "business_entity_id": { + "type": ["null", "string"] + }, + "type": { + "type": ["null", "string"] + }, + "state": { + "type": ["null", "string"] + } + } + }, + "voided_by_invoice": { + "type": ["null", "object"], + "properties": { + "id": { + "type": ["null", "string"], + "maxLength": 13 + }, + "object": { + "type": ["null", "string"] + }, + "number": { + "type": ["null", "string"], + "maxLength": 256 + }, + "business_entity_id": { + "type": ["null", "string"] + }, + "type": { + "type": ["null", "string"] + }, + "state": { + "type": ["null", "string"] + } + } + }, + "subscription_ids": { + "type": "array", + "items": { + "type": ["null", "string"], + "maxLength": 13 + } + }, + "type": { + "type": ["null", "string"], + "maxLength": 256 + }, + "origin": { + "type": ["null", "string"], + "maxLength": 256 + }, + "currency": { + "type": ["null", "string"], + "maxLength": 3 + }, + "amount": { + "type": ["null", "number"] + }, + "status": { + "type": ["null", "string"], + "maxLength": 256 + }, + "success": { + "type": ["null", "boolean"] + }, + "backup_payment_method_used": { + "type": ["null", "boolean"] + }, + "refunded": { + "type": ["null", "boolean"] + }, + "billing_address": { + "type": "object", + "properties": { + "first_name": { + "type": ["null", "string"], + "maxLength": 256 + }, + "last_name": { + "type": ["null", "string"], + "maxLength": 256 + }, + "phone": { + "type": ["null", "string"], + "maxLength": 256 + }, + "street1": { + "type": ["null", "string"], + "maxLength": 256 + }, + "street2": { + "type": ["null", "string"], + "maxLength": 256 + }, + "city": { + "type": ["null", "string"], + "maxLength": 256 + }, + "region": { + "type": ["null", "string"], + "maxLength": 256 + }, + "postal_code": { + "type": ["null", "string"], + "maxLength": 256 + }, + "country": { + "type": ["null", "string"], + "maxLength": 256 + }, + "geo_code": { + "type": ["null", "string"] + } + } + }, + "collection_method": { + "type": ["null", "string"], + "maxLength": 256 + }, + "payment_method": { + "type": "object", + "properties": { + "object": { + "type": ["null", "string"] + }, + "card_type": { + "type": ["null", "string"], + "maxLength": 256 + }, + "first_six": { + "type": ["null", "string"], + "maxLength": 6 + }, + "last_four": { + "type": ["null", "string"], + "maxLength": 4 + }, + "last_two": { + "type": ["null", "string"], + "maxLength": 2 + }, + "exp_month": { + "type": ["null", "number"] + }, + "exp_year": { + "type": ["null", "number"] + }, + "gateway_token": { + "type": ["null", "string"], + "maxLength": 256 + }, + "cc_bin_country": { + "type": ["null", "string"] + }, + "gateway_code": { + "type": ["null", "string"], + "maxLength": 256 + }, + "billing_agreement_id": { + "type": ["null", "string"], + "maxLength": 256 + }, + "name_on_account": { + "type": ["null", "string"], + "maxLength": 256 + }, + "account_type": { + "type": ["null", "string"], + "maxLength": 256 + }, + "routing_number": { + "type": ["null", "string"], + "maxLength": 256 + }, + "routing_number_bank": { + "type": ["null", "string"], + "maxLength": 256 + }, + "username": { + "type": ["null", "string"] + } + } + }, + "ip_address_v4": { + "type": ["null", "string"], + "maxLength": 256 + }, + "ip_address_country": { + "type": ["null", "string"], + "maxLength": 256 + }, + "status_code": { + "type": ["null", "string"], + "maxLength": 256 + }, + "status_message": { + "type": ["null", "string"], + "maxLength": 1024 + }, + "customer_message": { + "type": ["null", "string"], + "maxLength": 1024 + }, + "customer_message_locale": { + "type": ["null", "string"], + "maxLength": 12 + }, + "payment_gateway": { + "type": "object", + "properties": { + "id": { + "type": ["null", "string"], + "maxLength": 13 + }, + "object": { + "type": ["null", "string"] + }, + "type": { + "type": ["null", "string"] + }, + "name": { + "type": ["null", "string"] + } + } + }, + "gateway_message": { + "type": ["null", "string"], + "maxLength": 256 + }, + "gateway_reference": { + "type": ["null", "string"], + "maxLength": 256 + }, + "gateway_approval_code": { + "type": ["null", "string"], + "maxLength": 256 + }, + "gateway_response_code": { + "type": ["null", "string"], + "maxLength": 256 + }, + "gateway_response_time": { + "type": ["null", "number"] + }, + "gateway_response_values": { + "type": "object" + }, + "cvv_check": { + "type": ["null", "string"], + "maxLength": 256 + }, + "avs_check": { + "type": ["null", "string"], + "maxLength": 256 + }, + "created_at": { + "type": ["null", "string"], + "format": "date-time" + }, + "updated_at": { + "type": ["null", "string"], + "format": "date-time" + }, + "voided_at": { + "type": ["null", "string"], + "format": "date-time" + }, + "collected_at": { + "type": ["null", "string"], + "format": "date-time" + }, + "action_result": { + "type": ["null", "object"], + "additionalProperties": true + }, + "vat_number": { + "type": ["null", "string"] + }, + "fraud_info": { + "type": ["null", "object"], + "properties": { + "object": { + "type": ["null", "string"] + }, + "score": { + "type": ["null", "integer"] + }, + "decision": { + "type": ["null", "string"] + }, + "reference": { + "type": ["null", "string"] + }, + "risk_rules_triggered": { + "type": ["null", "array"], + "items": { + "type": ["null", "object"], + "properties": { + "code": { + "type": ["null", "string"] + }, + "message": { + "type": ["null", "string"] + } + } + } + } + } + } + } +} diff --git a/airbyte-integrations/connectors/source-recurly/source_recurly/schemas/unique_coupons.json b/airbyte-integrations/connectors/source-recurly/source_recurly/schemas/unique_coupons.json new file mode 100644 index 000000000000..0458768570ff --- /dev/null +++ b/airbyte-integrations/connectors/source-recurly/source_recurly/schemas/unique_coupons.json @@ -0,0 +1,3 @@ +{ + "$ref": "unique_coupons.json" +} diff --git a/airbyte-integrations/connectors/source-recurly/source_recurly/source.py b/airbyte-integrations/connectors/source-recurly/source_recurly/source.py new file mode 100644 index 000000000000..e7bd2e9a7e17 --- /dev/null +++ b/airbyte-integrations/connectors/source-recurly/source_recurly/source.py @@ -0,0 +1,80 @@ +# +# Copyright (c) 2023 Airbyte, Inc., all rights reserved. +# + +from typing import Any, List, Mapping, Optional, Tuple + +from airbyte_cdk.logger import AirbyteLogger +from airbyte_cdk.sources import AbstractSource +from airbyte_cdk.sources.streams import Stream +from recurly import ApiError, Client + +from .streams import ( + AccountCouponRedemptions, + AccountNotes, + Accounts, + AddOns, + BillingInfos, + Coupons, + CreditPayments, + ExportDates, + Invoices, + LineItems, + MeasuredUnits, + Plans, + ShippingAddresses, + ShippingMethods, + Subscriptions, + Transactions, + UniqueCoupons, +) + + +class SourceRecurly(AbstractSource): + """ + Recurly API Reference: https://developers.recurly.com/api/v2021-02-25/ + """ + + def __init__(self): + super(SourceRecurly, self).__init__() + + self.__client = None + + def check_connection(self, logger: AirbyteLogger, config: Mapping[str, Any]) -> Tuple[bool, Optional[Any]]: + try: + # Checking the API key by trying a test API call to get the first account + self._client(config["api_key"]).list_accounts().first() + return True, None + except ApiError as err: + return False, err.args[0] + + def streams(self, config: Mapping[str, Any]) -> List[Stream]: + client = self._client(api_key=config["api_key"]) + + args = {"client": client, "begin_time": config.get("begin_time"), "end_time": config.get("end_time")} + + return [ + Accounts(**args), + AccountCouponRedemptions(**args), + AccountNotes(**args), + AddOns(**args), + BillingInfos(**args), + Coupons(**args), + CreditPayments(**args), + ExportDates(**args), + Invoices(**args), + LineItems(**args), + MeasuredUnits(**args), + Plans(**args), + ShippingAddresses(**args), + ShippingMethods(**args), + Subscriptions(**args), + Transactions(**args), + UniqueCoupons(**args), + ] + + def _client(self, api_key: str) -> Client: + if not self.__client: + self.__client = Client(api_key=api_key) + + return self.__client diff --git a/airbyte-integrations/connectors/source-recurly/source_recurly/spec.json b/airbyte-integrations/connectors/source-recurly/source_recurly/spec.json new file mode 100644 index 000000000000..d2135eb06551 --- /dev/null +++ b/airbyte-integrations/connectors/source-recurly/source_recurly/spec.json @@ -0,0 +1,33 @@ +{ + "documentationUrl": "https://docs.airbyte.com/integrations/sources/recurly", + "connectionSpecification": { + "$schema": "http://json-schema.org/draft-07/schema#", + "title": "Recurly Source Spec", + "type": "object", + "required": ["api_key"], + "additionalProperties": true, + "properties": { + "api_key": { + "type": "string", + "title": "API Key", + "airbyte_secret": true, + "description": "Recurly API Key. See the docs for more information on how to generate this key.", + "order": 1 + }, + "begin_time": { + "type": "string", + "description": "ISO8601 timestamp from which the replication from Recurly API will start from.", + "examples": ["2021-12-01T00:00:00"], + "pattern": "^$|^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}$", + "order": 2 + }, + "end_time": { + "type": "string", + "description": "ISO8601 timestamp to which the replication from Recurly API will stop. Records after that date won't be imported.", + "examples": ["2021-12-01T00:00:00"], + "pattern": "^$|^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}$", + "order": 3 + } + } + } +} diff --git a/airbyte-integrations/connectors/source-recurly/source_recurly/streams.py b/airbyte-integrations/connectors/source-recurly/source_recurly/streams.py new file mode 100644 index 000000000000..f7526476f4ae --- /dev/null +++ b/airbyte-integrations/connectors/source-recurly/source_recurly/streams.py @@ -0,0 +1,337 @@ +# +# Copyright (c) 2023 Airbyte, Inc., all rights reserved. +# + +import re +from typing import Any, Iterable, List, Mapping, MutableMapping, Optional, Union + +from airbyte_cdk.models import SyncMode +from airbyte_cdk.sources.streams import Stream +from recurly import Client +from recurly.errors import MissingFeatureError, NotFoundError, ValidationError + +DEFAULT_PRIMARY_KEY = "id" +DEFAULT_CURSOR = "updated_at" +DEFAULT_SORT_KEY = "updated_at" +DEFAULT_LIMIT = 200 + +BEGIN_TIME_PARAM = "begin_time" +END_TIME_PARAM = "end_time" + +CAMEL_CASE_PATTERN = re.compile(r"(? str: + """ + Returns the Recurly client method to call to retrieve the resource data. + + :return: The Recurly client method to call for the Recurly resource. For example `list_accounts` for the + Recurly `accounts` resource + :rtype: str + """ + return f"list_{self.name}" + + @property + def primary_key(self) -> Optional[Union[str, List[str], List[List[str]]]]: + """ + The Recurly resource primary key. Most of the Recurly resources have `id` as a primary ID. Other Recurly + resources have different primary key or a composite key can override this method. + + :return: The Recurly resource primary key(s) + :rtype: Either `str`, list(str) or list(list(str)) + """ + return DEFAULT_PRIMARY_KEY + + @property + def sort_key(self) -> str: + """ + Sets the sort key when calling the Recurly API. Most of the Recurly API resources accept `params` dictionary + with `sort` key. For more details: + https://developers.recurly.com/api/v2021-02-25/#section/Getting-Started/Pagination#query-parameters + + :return: The Recurly resource sort key + :rtype: `str` + """ + return DEFAULT_SORT_KEY + + @property + def limit(self) -> int: + """ + Returns the number of records limit + """ + return DEFAULT_LIMIT + + @property + def cursor_field(self) -> Union[str, List[str]]: + """ + Returns the cursor field to be used in the `incremental` sync mode. + + By default enable the `incremental` sync mode for all resources using the `begin_time` field. Any other + Recurly resource that either does not support `incremental` sync mode such as the `export_dates` or resources + that use other cursor can override this method, but the `begin_time` is not a field in any of the resouce + it is just a query parameter sent in the API request and it can be considered as an alias to the `updated_at` + field. That's why when calling the Recurly API, the cursor field is renamed to `begin_time` by default in the + :func:`read_records`. For more details: + https://developers.recurly.com/api/v2021-02-25/#section/Getting-Started/Pagination#query-parameters + + :return: The cursor field(s) to be used in the `incremental` sync mode. + :rtype: Union[str, List[str]] + """ + return DEFAULT_CURSOR + + @property + def default_params(self) -> dict: + """ + Returns the parameters to be sent together with the API call to Recurly + """ + return {"order": "asc", "sort": self.sort_key, "limit": self.limit} + + def read_records( + self, + sync_mode: SyncMode, + cursor_field: List[str] = None, + stream_slice: Mapping[str, any] = None, + stream_state: Mapping[str, Any] = None, + ) -> Iterable[Mapping[str, Any]]: + """ + The method to be called to retrieve records from the Recurly API. It uses the Recurly Python client. + Resources having different logic (such as the `export_dates`) can override this method + + :return: Iterable of dictionaries representing the Recurly resource + :rtype: Iterable + """ + params = self.default_params + + self.begin_time = (stream_state and stream_state[self.cursor_field]) or self.begin_time + + if self.begin_time: + params.update({BEGIN_TIME_PARAM: self.begin_time}) + + if self.end_time: + params.update({END_TIME_PARAM: self.end_time}) + + items = getattr(self._client, self.client_method_name)(params=params).items() + + # Call the Recurly client methods + for item in items: + yield self._item_to_dict(item) + + def get_updated_state(self, current_stream_state: MutableMapping[str, Any], latest_record: Mapping[str, Any]): + """ + Compares the current stream state cursor with the latest record cursor value and returns the latest or the most + recent cursor value (either the current cursor value or the latest record cursor value depending which of those + is the maximum). + + :return: The value of the new current value of the cursor + :rtype: dict + """ + current_updated_at = (current_stream_state or {}).get(self.cursor_field, "") + latest_record_updated_at = latest_record[self.cursor_field].isoformat() + + return {self.cursor_field: max(latest_record_updated_at, current_updated_at)} + + def _item_to_dict(self, resource): + """ + Recursively converts the Recurly resource object to `dict` + """ + if isinstance(resource, dict): + return dict((key, self._item_to_dict(value)) for key, value in resource.items()) + elif hasattr(resource, "__iter__") and not isinstance(resource, str): + return [self._item_to_dict(value) for value in resource] + elif hasattr(resource, "__dict__"): + return dict([(key, self._item_to_dict(value)) for key, value in resource.__dict__.items()]) + else: + return resource + + +class BaseAccountResourceStream(BaseStream): + @property + def account_params(self) -> dict: + """ + Returns the account API call params + """ + return self.default_params + + def read_records( + self, + sync_mode: SyncMode, + cursor_field: List[str] = None, + stream_slice: Mapping[str, any] = None, + stream_state: Mapping[str, Any] = None, + ) -> Iterable[Mapping[str, Any]]: + """ + The method to be called to retrieve the accounts sub-resources such as the account coupon redemptions, shipping addresses, ... etc + from Recurly. To retrieve the account's sub-resources, a separate call to list all the accounts sub-resources be made to pass + the `account_id` to the sub-resource API call. + + :return: Iterable of dictionaries representing the Recurly resource + :rtype: Iterable + """ + account_params = self.account_params + params = self.default_params + + self.begin_time = (stream_state and stream_state.get(self.cursor_field)) or self.begin_time + + if self.begin_time: + account_params.update({BEGIN_TIME_PARAM: self.begin_time}) + params.update({BEGIN_TIME_PARAM: self.begin_time}) + + if self.end_time: + account_params.update({END_TIME_PARAM: self.end_time}) + params.update({END_TIME_PARAM: self.end_time}) + + # Call the Recurly client methods + accounts = self._client.list_accounts(params=account_params).items() + + # If the API call throws the Recurly's client `MissingFeatureError` error, then skip loading the resources from Recurly + # and log a warn + try: + for account in accounts: + items = getattr(self._client, self.client_method_name)(params=params, account_id=account.id).items() + for item in items: + yield self._item_to_dict(item) + except MissingFeatureError as error: + super().logger.warning(f"Missing feature error {error}") + + +class Accounts(BaseStream): + pass + + +class AccountCouponRedemptions(BaseAccountResourceStream): + pass + + +class AccountNotes(BaseAccountResourceStream): + @property + def sort_key(self) -> str: + return "created_at" + + @property + def cursor_field(self) -> Union[str, List[str]]: + return "created_at" + + @property + def account_params(self) -> dict: + return {"order": "asc", "sort": DEFAULT_SORT_KEY, "limit": self.limit} + + +class AddOns(BaseStream): + pass + + +class BillingInfos(BaseAccountResourceStream): + pass + + +class Coupons(BaseStream): + pass + + +class CreditPayments(BaseStream): + pass + + +class ExportDates(BaseStream): + cursor_field = [] # Disable `incremental` sync for `export_dates` Recurly API call + primary_key = None # There are no primary keys for automated exports + + def read_records( + self, + sync_mode: SyncMode, + cursor_field: List[str] = None, + stream_slice: Mapping[str, any] = None, + stream_state: Mapping[str, Any] = None, + ) -> Iterable[Mapping[str, Any]]: + """ + Reads the `export_dates` response from Recurly. This is a special API call different from other Recurly + resources and hence treated differently + """ + yield {"dates": self._client.get_export_dates().dates or [""]} + + +class Invoices(BaseStream): + pass + + +class LineItems(BaseStream): + pass + + +class MeasuredUnits(BaseStream): + client_method_name = "list_measured_unit" + + +class Plans(BaseStream): + pass + + +class ShippingAddresses(BaseAccountResourceStream): + pass + + +class ShippingMethods(BaseStream): + pass + + +class Subscriptions(BaseStream): + pass + + +class Transactions(BaseStream): + pass + + +class UniqueCoupons(BaseStream): + def read_records( + self, + sync_mode: SyncMode, + cursor_field: List[str] = None, + stream_slice: Mapping[str, any] = None, + stream_state: Mapping[str, Any] = None, + ) -> Iterable[Mapping[str, Any]]: + """ + The method to be called to retrieve the unique coupons from Recurly. To retrieve the unique coupons, a separate call to + get unique coupons should be made to pass the `coupon_id` to the unique coupons API call. + + :return: Iterable of dictionaries representing the Recurly resource + :rtype: Iterable + """ + params = self.default_params + + self.begin_time = (stream_state and stream_state[self.cursor_field]) or self.begin_time + + if self.begin_time: + params.update({BEGIN_TIME_PARAM: self.begin_time}) + + if self.end_time: + params.update({END_TIME_PARAM: self.end_time}) + + # List all coupons + coupons = self._client.list_coupons(params=params).items() + + for coupon in coupons: + try: + items = self._client.list_unique_coupon_codes(params=params, coupon_id=coupon.id).items() + for item in items: + yield self._item_to_dict(item) + except (NotFoundError, ValidationError): + pass diff --git a/airbyte-integrations/connectors/source-recurly/unit_tests/__init__.py b/airbyte-integrations/connectors/source-recurly/unit_tests/__init__.py new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/airbyte-integrations/connectors/source-recurly/unit_tests/test_streams.py b/airbyte-integrations/connectors/source-recurly/unit_tests/test_streams.py new file mode 100644 index 000000000000..739d7eb46b18 --- /dev/null +++ b/airbyte-integrations/connectors/source-recurly/unit_tests/test_streams.py @@ -0,0 +1,203 @@ +# +# Copyright (c) 2023 Airbyte, Inc., all rights reserved. +# + +import unittest +from datetime import datetime, timedelta +from unittest.mock import Mock + +from source_recurly.streams import ( + BEGIN_TIME_PARAM, + DEFAULT_CURSOR, + DEFAULT_LIMIT, + END_TIME_PARAM, + AccountCouponRedemptions, + AccountNotes, + Accounts, + AddOns, + BaseStream, + BillingInfos, + Coupons, + CreditPayments, + ExportDates, + Invoices, + LineItems, + MeasuredUnits, + Plans, + ShippingAddresses, + ShippingMethods, + Subscriptions, + Transactions, + UniqueCoupons, +) + +METHOD_NAME = "list_resource" + + +class TestStream(BaseStream): + name = "test" + client_method_name = METHOD_NAME + + +class TestStreams(unittest.TestCase): + def setUp(self) -> None: + self.client_mock = Mock() + getattr(self.client_mock, METHOD_NAME).return_value.items.return_value = iter([None]) + + self.sync_mode_mock = Mock() + + self.params = {"order": "asc", "sort": DEFAULT_CURSOR, "limit": DEFAULT_LIMIT} + + def test_read_records(self): + stream = TestStream(client=self.client_mock) + + next(iter(stream.read_records(self.sync_mode_mock))) + + getattr(self.client_mock, METHOD_NAME).assert_called_once_with(params=self.params) + + getattr(self.client_mock, METHOD_NAME).return_value.items.assert_called_once() + + def test_read_records_with_begin_time(self): + begin_time_mock = Mock() + stream = TestStream(client=self.client_mock, begin_time=begin_time_mock) + + next(iter(stream.read_records(self.sync_mode_mock))) + + params = {**self.params, BEGIN_TIME_PARAM: begin_time_mock} + + getattr(self.client_mock, METHOD_NAME).assert_called_once_with(params=params) + + def test_read_records_with_end_time(self): + end_time_mock = Mock() + stream = TestStream(client=self.client_mock, end_time=end_time_mock) + + next(iter(stream.read_records(self.sync_mode_mock))) + + params = {**self.params, END_TIME_PARAM: end_time_mock} + + getattr(self.client_mock, METHOD_NAME).assert_called_once_with(params=params) + + def test_get_updated_state(self): + stream = TestStream(client=self.client_mock) + + cursor_field = stream.cursor_field + + now = datetime.now() + yesterday = now - timedelta(days=1) + + current_state = {cursor_field: yesterday.isoformat()} + latest_record = {cursor_field: now} + + expected_date = {cursor_field: now.isoformat()} + + assert stream.get_updated_state(current_state, latest_record) == expected_date + + def test_accounts_methods_client_method_name(self): + stream = Accounts(client=self.client_mock) + + assert stream.client_method_name == "list_accounts" + + def test_account_coupon_redemptions_read_records(self): + stream = AccountCouponRedemptions(client=self.client_mock) + account_id_mock = Mock() + account_mock = Mock(id=account_id_mock) + self.client_mock.list_accounts.return_value.items.return_value = iter([account_mock]) + self.client_mock.list_account_coupon_redemptions.return_value.items.return_value = iter([None]) + + next(iter(stream.read_records(self.sync_mode_mock))) + + self.client_mock.list_accounts.assert_called_once() + self.client_mock.list_account_coupon_redemptions.assert_called_once_with(account_id=account_id_mock, params=self.params) + + def test_account_notes_read_records(self): + stream = AccountNotes(client=self.client_mock) + account_id_mock = Mock() + account_mock = Mock(id=account_id_mock) + self.client_mock.list_accounts.return_value.items.return_value = iter([account_mock]) + self.client_mock.list_account_notes.return_value.items.return_value = iter([None]) + + params = {"order": "asc", "sort": "created_at", "limit": DEFAULT_LIMIT} + + next(iter(stream.read_records(self.sync_mode_mock))) + + self.client_mock.list_accounts.assert_called_once() + self.client_mock.list_account_notes.assert_called_once_with(account_id=account_id_mock, params=params) + + def test_add_ons_client_method_name(self): + stream = AddOns(client=self.client_mock) + + assert stream.client_method_name == "list_add_ons" + + def test_billing_infos_client_method_name(self): + stream = BillingInfos(client=self.client_mock) + + assert stream.client_method_name == "list_billing_infos" + + def test_coupons_methods_client_method_name(self): + stream = Coupons(client=self.client_mock) + + assert stream.client_method_name == "list_coupons" + + def test_credit_payments_read_records(self): + stream = CreditPayments(client=self.client_mock) + + assert stream.client_method_name == "list_credit_payments" + + def test_export_dates_read_records(self): + stream = ExportDates(client=self.client_mock) + + next(iter(stream.read_records(self.sync_mode_mock))) + + self.client_mock.get_export_dates.assert_called_once() + + def test_invoices_methods_client_method_name(self): + stream = Invoices(client=self.client_mock) + + assert stream.client_method_name == "list_invoices" + + def test_line_items_methods_client_method_name(self): + stream = LineItems(client=self.client_mock) + + assert stream.client_method_name == "list_line_items" + + def test_measured_unit_client_method_name(self): + stream = MeasuredUnits(client=self.client_mock) + + assert stream.client_method_name == "list_measured_unit" + + def test_plans_client_method_name(self): + stream = Plans(client=self.client_mock) + + assert stream.client_method_name == "list_plans" + + def test_shipping_addresses_client_method_name(self): + stream = ShippingAddresses(client=self.client_mock) + + assert stream.client_method_name == "list_shipping_addresses" + + def test_shipping_methods_client_method_name(self): + stream = ShippingMethods(client=self.client_mock) + + assert stream.client_method_name == "list_shipping_methods" + + def test_subscriptions_client_method_name(self): + stream = Subscriptions(client=self.client_mock) + + assert stream.client_method_name == "list_subscriptions" + + def test_transactions_client_method_name(self): + stream = Transactions(client=self.client_mock) + + assert stream.client_method_name == "list_transactions" + + def test_unique_coupons_read_records(self): + stream = UniqueCoupons(client=self.client_mock) + coupon_id_mock = Mock() + coupon_mock = Mock(id=coupon_id_mock) + self.client_mock.list_coupons.return_value.items.return_value = iter([coupon_mock]) + self.client_mock.list_unique_coupon_codes.return_value.items.return_value = iter([None]) + + next(iter(stream.read_records(self.sync_mode_mock))) + + self.client_mock.list_coupons.assert_called_once() + self.client_mock.list_unique_coupon_codes.assert_called_once_with(coupon_id=coupon_id_mock, params=self.params) diff --git a/docs/integrations/sources/recurly-migrations.md b/docs/integrations/sources/recurly-migrations.md new file mode 100644 index 000000000000..251b70ae1d97 --- /dev/null +++ b/docs/integrations/sources/recurly-migrations.md @@ -0,0 +1,57 @@ +# Recurly Migration Guide + +## Upgrading to 1.0.0 + +We recently rolled out an update to the Recurly connector using a newer version of our CDK, as well as introducing several additions to the existing stream schemas. Our aim with these updates is always to enhance the connector's functionality and provide you with a richer set of data to support your integration needs. + +While our intention was to make these updates as seamless as possible, we've observed that some users are experiencing issues during the "Discover" step of the sync process. This has led us to re-categorize the recent changes as breaking updates, despite not removing fields or altering property names within the schema. + +Once you have migrated to the new version, we highly recommend all users refresh their schemas and reset their data before resuming syncs + +### For Airbyte Open Source: Update the local connector image + +Airbyte Open Source users with existing connections must manually update the connector image in their local registry before proceeding with the migration. To do so: + +1. Select **Settings** in the main navbar. + 1. Select **Sources**. +2. Find Recurly in the list of connectors. + +:::note +You will see two versions listed, the current in-use version and the latest version available. +::: + +3. Select **Change** to update your OSS version to the latest available version. + +### Update the connector version + +1. Select **Sources** in the main navbar. +2. Select the instance of the connector you wish to upgrade. + +:::note +Each instance of the connector must be updated separately. If you have created multiple instances of a connector, updating one will not affect the others. +::: + +3. Select **Upgrade** + 1. Follow the prompt to confirm you are ready to upgrade to the new version. + +### Refresh schemas and reset data + +1. Select **Connections** in the main navbar. +2. Select the connection(s) affected by the update. +3. Select the **Replication** tab. + 1. Select **Refresh source schema**. + 2. Select **OK**. + +:::note +Any detected schema changes will be listed for your review. +::: + +4. Select **Save changes** at the bottom of the page. + 1. Ensure the **Reset all streams** option is checked. +5. Select **Save connection**. + +:::note +This will reset the data in your destination and initiate a fresh sync. +::: + +For more information on resetting your data in Airbyte, see [this page](https://docs.airbyte.com/operator-guides/reset). diff --git a/docs/integrations/sources/recurly.md b/docs/integrations/sources/recurly.md index 2fac238fb528..66bc7eee0bb3 100644 --- a/docs/integrations/sources/recurly.md +++ b/docs/integrations/sources/recurly.md @@ -64,6 +64,7 @@ We recommend creating a restricted, read-only key specifically for Airbyte acces | Version | Date | Pull Request | Subject | |:--------|:-----------| :--------------------------------------------------------| :--------------------------------------------------------------------------------------- | +| 1.0.0 | 2024-03-01 | [35763](https://github.com/airbytehq/airbyte/pull/35763) | Re-introduce updated connector to catalog from archival repo | | 0.5.0 | 2024-02-22 | [34622](https://github.com/airbytehq/airbyte/pull/34622) | Republish connector using base image/Poetry, update schemas | | 0.4.1 | 2022-06-10 | [13685](https://github.com/airbytehq/airbyte/pull/13685) | Add state_checkpoint_interval to Recurly stream | | 0.4.0 | 2022-01-28 | [9866](https://github.com/airbytehq/airbyte/pull/9866) | Revamp Recurly Schema and add more resources | From 24c8b442226f771103d9055ba581051772995123 Mon Sep 17 00:00:00 2001 From: Marius Posta Date: Mon, 4 Mar 2024 09:56:41 -0800 Subject: [PATCH 067/172] airbyte-ci: add kotlin support, update license message template (#35784) --- LICENSE_SHORT | 2 +- .../util/ConfiguredCatalogUtil.kt | 12 +- .../typing_deduping/RawOnlySqlGenerator.kt | 12 +- .../DestinationInitialStatus.kt | 14 +- .../typing_deduping/NoOpRawTableTDLock.kt | 10 +- .../typing_deduping/TyperDeduperUtil.kt | 315 ++++++++++-------- .../typing_deduping/migrators/Migration.kt | 36 +- .../migrators/MinimumDestinationState.kt | 66 ++-- airbyte-ci/connectors/pipelines/README.md | 1 + .../airbyte_ci/format/configuration.py | 7 +- .../connectors/pipelines/pyproject.toml | 2 +- .../test_format/formatted_code/java.java | 2 +- .../test_format/formatted_code/python.py | 2 +- .../debezium/DebeziumMongoDbConnectorTest.kt | 202 ++++++++--- .../src/test/generator/MongoDbInsertClient.kt | 111 ++++-- spotless-maven-pom.xml | 9 + 16 files changed, 525 insertions(+), 278 deletions(-) diff --git a/LICENSE_SHORT b/LICENSE_SHORT index dc7ac8476682..ad0158e9b5c9 100644 --- a/LICENSE_SHORT +++ b/LICENSE_SHORT @@ -1 +1 @@ -Copyright (c) 2023 Airbyte, Inc., all rights reserved. +Copyright (c) 2024 Airbyte, Inc., all rights reserved. diff --git a/airbyte-cdk/java/airbyte-cdk/core/src/main/java/io/airbyte/cdk/integrations/util/ConfiguredCatalogUtil.kt b/airbyte-cdk/java/airbyte-cdk/core/src/main/java/io/airbyte/cdk/integrations/util/ConfiguredCatalogUtil.kt index 42183f51fcbe..d33bbabb6280 100644 --- a/airbyte-cdk/java/airbyte-cdk/core/src/main/java/io/airbyte/cdk/integrations/util/ConfiguredCatalogUtil.kt +++ b/airbyte-cdk/java/airbyte-cdk/core/src/main/java/io/airbyte/cdk/integrations/util/ConfiguredCatalogUtil.kt @@ -1,3 +1,7 @@ +/* + * Copyright (c) 2024 Airbyte, Inc., all rights reserved. + */ + package io.airbyte.cdk.integrations.util import io.airbyte.protocol.models.v0.ConfiguredAirbyteCatalog @@ -6,10 +10,10 @@ import io.airbyte.protocol.models.v0.ConfiguredAirbyteCatalog * For streams in [catalog] which do not have a namespace specified, explicitly set their namespace * to the [defaultNamespace] */ - fun addDefaultNamespaceToStreams(catalog: ConfiguredAirbyteCatalog, defaultNamespace: String?) { - if (defaultNamespace == null) { - return - } +fun addDefaultNamespaceToStreams(catalog: ConfiguredAirbyteCatalog, defaultNamespace: String?) { + if (defaultNamespace == null) { + return + } // TODO: This logic exists in all V2 destinations. // This is sad that if we forget to add this, there will be a null pointer during parseCatalog for (catalogStream in catalog.streams) { diff --git a/airbyte-cdk/java/airbyte-cdk/db-destinations/src/main/java/io/airbyte/cdk/integrations/destination/jdbc/typing_deduping/RawOnlySqlGenerator.kt b/airbyte-cdk/java/airbyte-cdk/db-destinations/src/main/java/io/airbyte/cdk/integrations/destination/jdbc/typing_deduping/RawOnlySqlGenerator.kt index 5c9963f5bad8..47a115ba4eba 100644 --- a/airbyte-cdk/java/airbyte-cdk/db-destinations/src/main/java/io/airbyte/cdk/integrations/destination/jdbc/typing_deduping/RawOnlySqlGenerator.kt +++ b/airbyte-cdk/java/airbyte-cdk/db-destinations/src/main/java/io/airbyte/cdk/integrations/destination/jdbc/typing_deduping/RawOnlySqlGenerator.kt @@ -1,21 +1,23 @@ +/* + * Copyright (c) 2024 Airbyte, Inc., all rights reserved. + */ + package io.airbyte.cdk.integrations.destination.jdbc.typing_deduping import io.airbyte.cdk.integrations.destination.NamingConventionTransformer -import io.airbyte.cdk.integrations.destination.jdbc.TableDefinition import io.airbyte.integrations.base.destination.typing_deduping.AirbyteType import io.airbyte.integrations.base.destination.typing_deduping.ColumnId -import io.airbyte.integrations.base.destination.typing_deduping.StreamConfig +import java.util.* import org.jooq.Condition import org.jooq.DataType import org.jooq.Field import org.jooq.SQLDialect -import java.util.* /** * Some Destinations do not support Typing and Deduping but have the updated raw table format * SqlGenerator implementations are only for "final" tables and are a required input for - * TyperDeduper classes. This implementation appeases that requirement but does not implement - * any "final" table operations. + * TyperDeduper classes. This implementation appeases that requirement but does not implement any + * "final" table operations. */ class RawOnlySqlGenerator(private val namingTransformer: NamingConventionTransformer) : JdbcSqlGenerator(namingTransformer) { diff --git a/airbyte-cdk/java/airbyte-cdk/typing-deduping/src/main/java/io/airbyte/integrations/base/destination/typing_deduping/DestinationInitialStatus.kt b/airbyte-cdk/java/airbyte-cdk/typing-deduping/src/main/java/io/airbyte/integrations/base/destination/typing_deduping/DestinationInitialStatus.kt index 713a79c9a771..ebddcc2805d5 100644 --- a/airbyte-cdk/java/airbyte-cdk/typing-deduping/src/main/java/io/airbyte/integrations/base/destination/typing_deduping/DestinationInitialStatus.kt +++ b/airbyte-cdk/java/airbyte-cdk/typing-deduping/src/main/java/io/airbyte/integrations/base/destination/typing_deduping/DestinationInitialStatus.kt @@ -4,9 +4,11 @@ package io.airbyte.integrations.base.destination.typing_deduping @JvmRecord -data class DestinationInitialStatus(val streamConfig: StreamConfig, - val isFinalTablePresent: Boolean, - val initialRawTableStatus: InitialRawTableStatus, - val isSchemaMismatch: Boolean, - val isFinalTableEmpty: Boolean, - val destinationState: DestinationState) +data class DestinationInitialStatus( + val streamConfig: StreamConfig, + val isFinalTablePresent: Boolean, + val initialRawTableStatus: InitialRawTableStatus, + val isSchemaMismatch: Boolean, + val isFinalTableEmpty: Boolean, + val destinationState: DestinationState +) diff --git a/airbyte-cdk/java/airbyte-cdk/typing-deduping/src/main/java/io/airbyte/integrations/base/destination/typing_deduping/NoOpRawTableTDLock.kt b/airbyte-cdk/java/airbyte-cdk/typing-deduping/src/main/java/io/airbyte/integrations/base/destination/typing_deduping/NoOpRawTableTDLock.kt index 9c26e4d605b8..56d4bf53d0f2 100644 --- a/airbyte-cdk/java/airbyte-cdk/typing-deduping/src/main/java/io/airbyte/integrations/base/destination/typing_deduping/NoOpRawTableTDLock.kt +++ b/airbyte-cdk/java/airbyte-cdk/typing-deduping/src/main/java/io/airbyte/integrations/base/destination/typing_deduping/NoOpRawTableTDLock.kt @@ -1,10 +1,14 @@ +/* + * Copyright (c) 2024 Airbyte, Inc., all rights reserved. + */ + package io.airbyte.integrations.base.destination.typing_deduping import java.util.concurrent.TimeUnit import java.util.concurrent.locks.Condition import java.util.concurrent.locks.Lock -class NoOpRawTableTDLock: Lock { +class NoOpRawTableTDLock : Lock { override fun lock() {} override fun lockInterruptibly() {} @@ -17,6 +21,8 @@ class NoOpRawTableTDLock: Lock { override fun newCondition(): Condition { // Always throw exception to avoid callers from using this path - throw UnsupportedOperationException("This lock implementation does not support retrieving a Condition") + throw UnsupportedOperationException( + "This lock implementation does not support retrieving a Condition" + ) } } diff --git a/airbyte-cdk/java/airbyte-cdk/typing-deduping/src/main/java/io/airbyte/integrations/base/destination/typing_deduping/TyperDeduperUtil.kt b/airbyte-cdk/java/airbyte-cdk/typing-deduping/src/main/java/io/airbyte/integrations/base/destination/typing_deduping/TyperDeduperUtil.kt index df1d4277d13d..1b55216675fa 100644 --- a/airbyte-cdk/java/airbyte-cdk/typing-deduping/src/main/java/io/airbyte/integrations/base/destination/typing_deduping/TyperDeduperUtil.kt +++ b/airbyte-cdk/java/airbyte-cdk/typing-deduping/src/main/java/io/airbyte/integrations/base/destination/typing_deduping/TyperDeduperUtil.kt @@ -1,3 +1,7 @@ +/* + * Copyright (c) 2024 Airbyte, Inc., all rights reserved. + */ + package io.airbyte.integrations.base.destination.typing_deduping import com.google.common.collect.Streams @@ -5,151 +9,202 @@ import io.airbyte.cdk.integrations.util.ConnectorExceptionUtil.getResultsOrLogAn import io.airbyte.commons.concurrency.CompletableFutures import io.airbyte.integrations.base.destination.typing_deduping.migrators.Migration import io.airbyte.integrations.base.destination.typing_deduping.migrators.MinimumDestinationState -import org.slf4j.Logger -import org.slf4j.LoggerFactory import java.util.* import java.util.concurrent.CompletableFuture import java.util.concurrent.CompletionStage import java.util.concurrent.ExecutorService import java.util.stream.Collectors.toMap +import org.slf4j.Logger +import org.slf4j.LoggerFactory class TyperDeduperUtil { - companion object { - private val LOGGER: Logger = LoggerFactory.getLogger(TyperDeduperUtil::class.java) + companion object { + private val LOGGER: Logger = LoggerFactory.getLogger(TyperDeduperUtil::class.java) - @JvmStatic - fun executeRawTableMigrations( - executorService: ExecutorService, - destinationHandler: DestinationHandler, - migrations: List>, - initialStates: List> - ): List> { - // TODO: Either the migrations run the soft reset and create v2 tables or the actual prepare tables. - // unify the logic - // with current state of raw tables & final tables. This is done first before gather initial state - // to avoid recreating - // final tables later again. + @JvmStatic + fun executeRawTableMigrations( + executorService: ExecutorService, + destinationHandler: DestinationHandler, + migrations: List>, + initialStates: List> + ): List> { + // TODO: Either the migrations run the soft reset and create v2 tables or the actual + // prepare tables. + // unify the logic + // with current state of raw tables & final tables. This is done first before gather + // initial state + // to avoid recreating + // final tables later again. - // Run migrations in lockstep. Some migrations may require us to refetch the initial state. - // We want to be able to batch those calls together across streams. - // If a migration runs on one stream, it's likely to also run on other streams. - // So we can bundle the gatherInitialState calls together. - var currentStates = initialStates - for (migration in migrations) { - // Execute the migration on all streams in parallel - val futures: Map>> = currentStates.stream() - .collect(toMap( - { it.streamConfig.id }, - { initialState -> runMigrationsAsync(executorService, destinationHandler, migration, initialState) } - )) - val migrationResultFutures = CompletableFutures.allOf(futures.values.toList()).toCompletableFuture().join() - getResultsOrLogAndThrowFirst("The following exceptions were thrown attempting to run migrations:\n", migrationResultFutures) - val migrationResults: Map> = futures.mapValues { it.value.toCompletableFuture().join() } + // Run migrations in lockstep. Some migrations may require us to refetch the initial + // state. + // We want to be able to batch those calls together across streams. + // If a migration runs on one stream, it's likely to also run on other streams. + // So we can bundle the gatherInitialState calls together. + var currentStates = initialStates + for (migration in migrations) { + // Execute the migration on all streams in parallel + val futures: + Map>> = + currentStates + .stream() + .collect( + toMap( + { it.streamConfig.id }, + { initialState -> + runMigrationsAsync( + executorService, + destinationHandler, + migration, + initialState + ) + } + ) + ) + val migrationResultFutures = + CompletableFutures.allOf(futures.values.toList()).toCompletableFuture().join() + getResultsOrLogAndThrowFirst( + "The following exceptions were thrown attempting to run migrations:\n", + migrationResultFutures + ) + val migrationResults: Map> = + futures.mapValues { it.value.toCompletableFuture().join() } - // Check if we need to refetch DestinationInitialState - val invalidatedStreams: Set = migrationResults.filter { it.value.invalidateInitialState }.keys - val updatedStates: List> - if (invalidatedStreams.isNotEmpty()) { - LOGGER.info("Refetching initial state for streams: $invalidatedStreams") - updatedStates = destinationHandler.gatherInitialState(currentStates.filter{invalidatedStreams.contains(it.streamConfig.id)}.map {it.streamConfig}) - LOGGER.info("Updated states: $updatedStates") - } else { - updatedStates = emptyList() - } + // Check if we need to refetch DestinationInitialState + val invalidatedStreams: Set = + migrationResults.filter { it.value.invalidateInitialState }.keys + val updatedStates: List> + if (invalidatedStreams.isNotEmpty()) { + LOGGER.info("Refetching initial state for streams: $invalidatedStreams") + updatedStates = + destinationHandler.gatherInitialState( + currentStates + .filter { invalidatedStreams.contains(it.streamConfig.id) } + .map { it.streamConfig } + ) + LOGGER.info("Updated states: $updatedStates") + } else { + updatedStates = emptyList() + } - // Update the DestinationInitialStates with the new DestinationStates, - // and also update initialStates with the refetched states. - currentStates = currentStates.map { initialState -> - // migrationResults will always contain an entry for each stream, so we can safely use !! - val updatedDestinationState = migrationResults[initialState.streamConfig.id]!!.updatedDestinationState - if (invalidatedStreams.contains(initialState.streamConfig.id)) { - // We invalidated this stream's DestinationInitialState. - // Find the updated DestinationInitialState, and update it with our new DestinationState - return@map updatedStates.filter{updatedState -> updatedState.streamConfig.id.equals(initialState.streamConfig.id)} - .first() - .copy(destinationState = updatedDestinationState) - } else { - // Just update the original DestinationInitialState with the new DestinationState. - return@map initialState.copy(destinationState = updatedDestinationState) - } + // Update the DestinationInitialStates with the new DestinationStates, + // and also update initialStates with the refetched states. + currentStates = + currentStates.map { initialState -> + // migrationResults will always contain an entry for each stream, so we can + // safely use !! + val updatedDestinationState = + migrationResults[initialState.streamConfig.id]!!.updatedDestinationState + if (invalidatedStreams.contains(initialState.streamConfig.id)) { + // We invalidated this stream's DestinationInitialState. + // Find the updated DestinationInitialState, and update it with our new + // DestinationState + return@map updatedStates + .filter { updatedState -> + updatedState.streamConfig.id.equals( + initialState.streamConfig.id + ) + } + .first() + .copy(destinationState = updatedDestinationState) + } else { + // Just update the original DestinationInitialState with the new + // DestinationState. + return@map initialState.copy(destinationState = updatedDestinationState) + } + } + } + return currentStates } - } - return currentStates - } - - /** - * The legacy-style migrations (V1V2Migrator, V2TableMigrator) need to run before we gather - * initial state, because they're dumb and weird. - * (specifically: SnowflakeV2TableMigrator inspects the final tables and triggers a soft reset - * directly within the migration). - * TODO: Migrate these migrations to the new migration system. - * This will also reduce the number of times we need to query DB metadata, since (a) we can rely - * on the gatherInitialState values, and (b) we can add a DestinationState field for these migrations. - * It also enables us to not trigger multiple soft resets in a single sync. - */ - @JvmStatic - fun executeWeirdMigrations( - executorService: ExecutorService, - sqlGenerator: SqlGenerator, - destinationHandler: DestinationHandler, - v1V2Migrator: DestinationV1V2Migrator, - v2TableMigrator: V2TableMigrator, - parsedCatalog: ParsedCatalog - ) { - val futures = parsedCatalog.streams.map { - CompletableFuture.supplyAsync( - { - v1V2Migrator.migrateIfNecessary(sqlGenerator, destinationHandler, it) - v2TableMigrator.migrateIfNecessary(it) - }, - executorService - ) - } - getResultsOrLogAndThrowFirst( - "The following exceptions were thrown attempting to run migrations:\n", - CompletableFutures.allOf(futures.toList()).toCompletableFuture().join()) - } + /** + * The legacy-style migrations (V1V2Migrator, V2TableMigrator) need to run before we gather + * initial state, because they're dumb and weird. (specifically: SnowflakeV2TableMigrator + * inspects the final tables and triggers a soft reset directly within the migration). TODO: + * Migrate these migrations to the new migration system. This will also reduce the number of + * times we need to query DB metadata, since (a) we can rely on the gatherInitialState + * values, and (b) we can add a DestinationState field for these migrations. It also enables + * us to not trigger multiple soft resets in a single sync. + */ + @JvmStatic + fun executeWeirdMigrations( + executorService: ExecutorService, + sqlGenerator: SqlGenerator, + destinationHandler: DestinationHandler, + v1V2Migrator: DestinationV1V2Migrator, + v2TableMigrator: V2TableMigrator, + parsedCatalog: ParsedCatalog + ) { + val futures = + parsedCatalog.streams.map { + CompletableFuture.supplyAsync( + { + v1V2Migrator.migrateIfNecessary(sqlGenerator, destinationHandler, it) + v2TableMigrator.migrateIfNecessary(it) + }, + executorService + ) + } + getResultsOrLogAndThrowFirst( + "The following exceptions were thrown attempting to run migrations:\n", + CompletableFutures.allOf(futures.toList()).toCompletableFuture().join() + ) + } - /** - * Extracts all the "raw" and "final" schemas identified in the [parsedCatalog] and ensures they - * exist in the Destination Database. - */ - @JvmStatic - fun prepareSchemas( - sqlGenerator: SqlGenerator, - destinationHandler: DestinationHandler, - parsedCatalog: ParsedCatalog) { - val rawSchema = parsedCatalog.streams.stream().map { it.id.rawNamespace } - val finalSchema = parsedCatalog.streams.stream().map { it.id.finalNamespace } - val createAllSchemasSql = Streams.concat(rawSchema, finalSchema) - .filter(Objects::nonNull) - .distinct() - .map(sqlGenerator::createSchema) - .toList() - destinationHandler.execute(Sql.concat(createAllSchemasSql)) - } + /** + * Extracts all the "raw" and "final" schemas identified in the [parsedCatalog] and ensures + * they exist in the Destination Database. + */ + @JvmStatic + fun prepareSchemas( + sqlGenerator: SqlGenerator, + destinationHandler: DestinationHandler, + parsedCatalog: ParsedCatalog + ) { + val rawSchema = parsedCatalog.streams.stream().map { it.id.rawNamespace } + val finalSchema = parsedCatalog.streams.stream().map { it.id.finalNamespace } + val createAllSchemasSql = + Streams.concat(rawSchema, finalSchema) + .filter(Objects::nonNull) + .distinct() + .map(sqlGenerator::createSchema) + .toList() + destinationHandler.execute(Sql.concat(createAllSchemasSql)) + } - private fun runMigrationsAsync( - executorService: ExecutorService, - destinationHandler: DestinationHandler, - migration: Migration, - initialStatus: DestinationInitialStatus - ): CompletionStage> { - return CompletableFuture.supplyAsync({ - LOGGER.info("Maybe executing ${migration.javaClass.simpleName} migration for stream ${initialStatus.streamConfig.id.originalNamespace}.${initialStatus.streamConfig.id.originalName}.") + private fun runMigrationsAsync( + executorService: ExecutorService, + destinationHandler: DestinationHandler, + migration: Migration, + initialStatus: DestinationInitialStatus + ): CompletionStage> { + return CompletableFuture.supplyAsync( + { + LOGGER.info( + "Maybe executing ${migration.javaClass.simpleName} migration for stream ${initialStatus.streamConfig.id.originalNamespace}.${initialStatus.streamConfig.id.originalName}." + ) - // We technically don't need to track this, but might as well hedge against migrations - // accidentally setting softReset=false - val softReset = initialStatus.destinationState.needsSoftReset() - val migrationResult = migration.migrateIfNecessary( - destinationHandler, - initialStatus.streamConfig, - initialStatus) - val updatedNeedsSoftReset = softReset || migrationResult.updatedDestinationState.needsSoftReset() - return@supplyAsync migrationResult.copy( - updatedDestinationState = migrationResult.updatedDestinationState.withSoftReset(updatedNeedsSoftReset)) - }, executorService) + // We technically don't need to track this, but might as well hedge against + // migrations + // accidentally setting softReset=false + val softReset = initialStatus.destinationState.needsSoftReset() + val migrationResult = + migration.migrateIfNecessary( + destinationHandler, + initialStatus.streamConfig, + initialStatus + ) + val updatedNeedsSoftReset = + softReset || migrationResult.updatedDestinationState.needsSoftReset() + return@supplyAsync migrationResult.copy( + updatedDestinationState = + migrationResult.updatedDestinationState.withSoftReset( + updatedNeedsSoftReset + ) + ) + }, + executorService + ) + } } - } } diff --git a/airbyte-cdk/java/airbyte-cdk/typing-deduping/src/main/java/io/airbyte/integrations/base/destination/typing_deduping/migrators/Migration.kt b/airbyte-cdk/java/airbyte-cdk/typing-deduping/src/main/java/io/airbyte/integrations/base/destination/typing_deduping/migrators/Migration.kt index 8a38628594fd..d409f5157458 100644 --- a/airbyte-cdk/java/airbyte-cdk/typing-deduping/src/main/java/io/airbyte/integrations/base/destination/typing_deduping/migrators/Migration.kt +++ b/airbyte-cdk/java/airbyte-cdk/typing-deduping/src/main/java/io/airbyte/integrations/base/destination/typing_deduping/migrators/Migration.kt @@ -1,3 +1,7 @@ +/* + * Copyright (c) 2024 Airbyte, Inc., all rights reserved. + */ + package io.airbyte.integrations.base.destination.typing_deduping.migrators import io.airbyte.integrations.base.destination.typing_deduping.DestinationHandler @@ -9,24 +13,23 @@ import io.airbyte.integrations.base.destination.typing_deduping.StreamConfig * 1. Modify the raw table * 2. Trigger a soft reset * - * The raw table modification should happen in {@link #migrateIfNecessary(Object, StreamConfig)}. However, - * if multiple migrations want to trigger a soft reset, we should only trigger a single soft reset, - * because soft resets are idempotent. There's no reason to trigger multiple soft resets in sequence, - * and it would be a waste of warehouse compute to do so. Migrations MUST NOT directly run a soft reset - * within {@link #migrateIfNecessary(Object, StreamConfig)}. - *

- * Migrations are encouraged to store something into the destination State blob. This allows us to make - * fewer queries into customer data. However, migrations MUST NOT rely solely on the state blob to trigger - * migrations. It's possible for a state to not be committed after a migration runs (e.g. a well-timed - * OOMKill). Therefore, if the state blob indicates that a migration is necessary, migrations must still - * confirm against the database that the migration is necessary. + * The raw table modification should happen in {@link #migrateIfNecessary(Object, StreamConfig)}. + * However, if multiple migrations want to trigger a soft reset, we should only trigger a single + * soft reset, because soft resets are idempotent. There's no reason to trigger multiple soft resets + * in sequence, and it would be a waste of warehouse compute to do so. Migrations MUST NOT directly + * run a soft reset within {@link #migrateIfNecessary(Object, StreamConfig)}.

Migrations are + * encouraged to store something into the destination State blob. This allows us to make fewer + * queries into customer data. However, migrations MUST NOT rely solely on the state blob to trigger + * migrations. It's possible for a state to not be committed after a migration runs (e.g. a + * well-timed OOMKill). Therefore, if the state blob indicates that a migration is necessary, + * migrations must still confirm against the database that the migration is necessary. */ -interface Migration { +interface Migration { /** - * Perform the migration if it's necessary. Implementations of this method MUST check against the database - * to confirm the the migration is still necessary, in case a previous migration ran, but failed - * to update the state. + * Perform the migration if it's necessary. Implementations of this method MUST check against + * the database to confirm the the migration is still necessary, in case a previous migration + * ran, but failed to update the state. * * Migrations MUST NOT set the `needsSoftReset` flag to false, but they MAY set it to true. */ @@ -37,7 +40,8 @@ interface Migration { ): MigrationResult /** - * @param invalidateInitialState If true, the migration modified the raw tables in a way that requires us to re-gather initial state. + * @param invalidateInitialState If true, the migration modified the raw tables in a way that + * requires us to re-gather initial state. */ data class MigrationResult( val updatedDestinationState: DestinationState, diff --git a/airbyte-cdk/java/airbyte-cdk/typing-deduping/src/main/java/io/airbyte/integrations/base/destination/typing_deduping/migrators/MinimumDestinationState.kt b/airbyte-cdk/java/airbyte-cdk/typing-deduping/src/main/java/io/airbyte/integrations/base/destination/typing_deduping/migrators/MinimumDestinationState.kt index 9fefd2d0ff30..2fceb4ad74fc 100644 --- a/airbyte-cdk/java/airbyte-cdk/typing-deduping/src/main/java/io/airbyte/integrations/base/destination/typing_deduping/migrators/MinimumDestinationState.kt +++ b/airbyte-cdk/java/airbyte-cdk/typing-deduping/src/main/java/io/airbyte/integrations/base/destination/typing_deduping/migrators/MinimumDestinationState.kt @@ -1,3 +1,7 @@ +/* + * Copyright (c) 2024 Airbyte, Inc., all rights reserved. + */ + package io.airbyte.integrations.base.destination.typing_deduping.migrators /** @@ -5,39 +9,39 @@ package io.airbyte.integrations.base.destination.typing_deduping.migrators * whether a soft reset is necessary, and persist that value across syncs in case of sync failure. */ interface MinimumDestinationState { - fun needsSoftReset(): Boolean + fun needsSoftReset(): Boolean - /** - * The type parameter should be the subclass itself. We need this so that [withSoftReset] can - * return the correct type. Callers are responsible for passing the correct type parameter - * into this function (e.g. `currentState.withSoftReset(softReset)`). - * - * Implementations generally look like this: (note the unchecked `as T` cast) - * ```kotlin - * data class ExampleState(val needsSoftReset: Boolean, ): MinimumDestinationState { - * override fun needsSoftReset(): Boolean { - * return needsSoftReset - * } - * - * override fun withSoftReset(needsSoftReset: Boolean): T { - * return copy(needsSoftReset = true) as T - * } - * } - * ``` - */ - fun withSoftReset(needsSoftReset: Boolean): T + /** + * The type parameter should be the subclass itself. We need this so that [withSoftReset] can + * return the correct type. Callers are responsible for passing the correct type parameter into + * this function (e.g. `currentState.withSoftReset(softReset)`). + * + * Implementations generally look like this: (note the unchecked `as T` cast) + * ```kotlin + * data class ExampleState(val needsSoftReset: Boolean, ): MinimumDestinationState { + * override fun needsSoftReset(): Boolean { + * return needsSoftReset + * } + * + * override fun withSoftReset(needsSoftReset: Boolean): T { + * return copy(needsSoftReset = true) as T + * } + * } + * ``` + */ + fun withSoftReset(needsSoftReset: Boolean): T - /** - * A minimal implementation of [MinimumDestinationState]. This is useful for destinations that don't - * want to bother implementing a full state object. - */ - data class Impl(val needsSoftReset: Boolean): MinimumDestinationState { - override fun needsSoftReset(): Boolean { - return needsSoftReset - } + /** + * A minimal implementation of [MinimumDestinationState]. This is useful for destinations that + * don't want to bother implementing a full state object. + */ + data class Impl(val needsSoftReset: Boolean) : MinimumDestinationState { + override fun needsSoftReset(): Boolean { + return needsSoftReset + } - override fun withSoftReset(needsSoftReset: Boolean): T { - return copy(needsSoftReset = true) as T + override fun withSoftReset(needsSoftReset: Boolean): T { + return copy(needsSoftReset = true) as T + } } - } } diff --git a/airbyte-ci/connectors/pipelines/README.md b/airbyte-ci/connectors/pipelines/README.md index 7d92fc08cad2..8bd2d171491b 100644 --- a/airbyte-ci/connectors/pipelines/README.md +++ b/airbyte-ci/connectors/pipelines/README.md @@ -644,6 +644,7 @@ E.G.: running Poe tasks on the modified internal packages of the current branch: | Version | PR | Description | | ------- | ---------------------------------------------------------- | -------------------------------------------------------------------------------------------------------------------------- | +| 4.5.0 | [#35784](https://github.com/airbytehq/airbyte/pull/35784) | Format command supports kotlin | | 4.4.0 | [#35317](https://github.com/airbytehq/airbyte/pull/35317) | Augment java connector reports to include full logs and junit test results | | 4.3.2 | [#35536](https://github.com/airbytehq/airbyte/pull/35536) | Make QA checks run correctly on `*-strict-encrypt` connectors. | | 4.3.1 | [#35437](https://github.com/airbytehq/airbyte/pull/35437) | Do not run QA checks on publish, just MetadataValidation. | diff --git a/airbyte-ci/connectors/pipelines/pipelines/airbyte_ci/format/configuration.py b/airbyte-ci/connectors/pipelines/pipelines/airbyte_ci/format/configuration.py index 0ee6e5441b0b..ddbf30e96ab0 100644 --- a/airbyte-ci/connectors/pipelines/pipelines/airbyte_ci/format/configuration.py +++ b/airbyte-ci/connectors/pipelines/pipelines/airbyte_ci/format/configuration.py @@ -27,7 +27,10 @@ class FormatConfiguration: FORMATTERS_CONFIGURATIONS: List[FormatConfiguration] = [ # Run spotless on all java and gradle files. FormatConfiguration( - Formatter.JAVA, ["**/*.java", "**/*.gradle"], format_java_container, ["mvn -f spotless-maven-pom.xml spotless:apply clean"] + Formatter.JAVA, + ["**/*.java", "**/*.kt", "**/*.gradle"], + format_java_container, + ["mvn -f spotless-maven-pom.xml spotless:apply clean"], ), # Run prettier on all json and yaml files. FormatConfiguration( @@ -39,7 +42,7 @@ class FormatConfiguration: # Add license header to java and python files. The license header is stored in LICENSE_SHORT file. FormatConfiguration( Formatter.LICENSE, - ["**/*.java", "**/*.py"], + ["**/*.java", "**/*.kt", "**/*.py"], format_license_container, [f"addlicense -c 'Airbyte, Inc.' -l apache -v -f {LICENSE_FILE_NAME} ."], ), diff --git a/airbyte-ci/connectors/pipelines/pyproject.toml b/airbyte-ci/connectors/pipelines/pyproject.toml index cc7abba8285d..629a40b57e4c 100644 --- a/airbyte-ci/connectors/pipelines/pyproject.toml +++ b/airbyte-ci/connectors/pipelines/pyproject.toml @@ -4,7 +4,7 @@ build-backend = "poetry.core.masonry.api" [tool.poetry] name = "pipelines" -version = "4.4.0" +version = "4.5.0" description = "Packaged maintained by the connector operations team to perform CI for connectors' pipelines" authors = ["Airbyte "] diff --git a/airbyte-ci/connectors/pipelines/tests/test_format/formatted_code/java.java b/airbyte-ci/connectors/pipelines/tests/test_format/formatted_code/java.java index 725920dd60c5..4a26a94eec19 100644 --- a/airbyte-ci/connectors/pipelines/tests/test_format/formatted_code/java.java +++ b/airbyte-ci/connectors/pipelines/tests/test_format/formatted_code/java.java @@ -1,5 +1,5 @@ /* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. + * Copyright (c) 2024 Airbyte, Inc., all rights reserved. */ public class BadlyFormatted { diff --git a/airbyte-ci/connectors/pipelines/tests/test_format/formatted_code/python.py b/airbyte-ci/connectors/pipelines/tests/test_format/formatted_code/python.py index e9c43d7a4066..1ce53a315384 100644 --- a/airbyte-ci/connectors/pipelines/tests/test_format/formatted_code/python.py +++ b/airbyte-ci/connectors/pipelines/tests/test_format/formatted_code/python.py @@ -1,4 +1,4 @@ -# Copyright (c) 2023 Airbyte, Inc., all rights reserved. +# Copyright (c) 2024 Airbyte, Inc., all rights reserved. def my_function(): diff --git a/airbyte-integrations/connectors/source-mongodb-v2/src/test/debezium/DebeziumMongoDbConnectorTest.kt b/airbyte-integrations/connectors/source-mongodb-v2/src/test/debezium/DebeziumMongoDbConnectorTest.kt index f896e5d09d48..e02d0ea2ae26 100644 --- a/airbyte-integrations/connectors/source-mongodb-v2/src/test/debezium/DebeziumMongoDbConnectorTest.kt +++ b/airbyte-integrations/connectors/source-mongodb-v2/src/test/debezium/DebeziumMongoDbConnectorTest.kt @@ -1,3 +1,7 @@ +/* + * Copyright (c) 2024 Airbyte, Inc., all rights reserved. + */ + package io.airbyte.integrations.source.mongodb import com.mongodb.ConnectionString @@ -8,9 +12,6 @@ import edu.umd.cs.findbugs.annotations.SuppressFBWarnings import io.debezium.connector.mongodb.MongoDbConnector import io.debezium.connector.mongodb.ResumeTokens import io.debezium.engine.DebeziumEngine -import org.bson.BsonDocument -import org.bson.BsonTimestamp -import org.slf4j.LoggerFactory import java.io.IOException import java.io.ObjectInputStream import java.nio.ByteBuffer @@ -31,13 +32,23 @@ import kotlin.collections.component2 import kotlinx.cli.ArgParser import kotlinx.cli.ArgType import kotlinx.cli.required +import org.bson.BsonDocument +import org.bson.BsonTimestamp +import org.slf4j.LoggerFactory - -class DebeziumMongoDbConnectorTest internal constructor(private val connectionString: String, private val databaseName: String, private val collectionName: String, private val username: String, private val password: String) { +class DebeziumMongoDbConnectorTest +internal constructor( + private val connectionString: String, + private val databaseName: String, + private val collectionName: String, + private val username: String, + private val password: String +) { @Throws(InterruptedException::class, IOException::class) fun startTest() { - val queue: LinkedBlockingQueue> = LinkedBlockingQueue>(10000) + val queue: LinkedBlockingQueue> = + LinkedBlockingQueue>(10000) val path = path LOGGER.info("Using offset storage path '{}'.", path) testChangeEventStream() @@ -45,13 +56,19 @@ class DebeziumMongoDbConnectorTest internal constructor(private val connectionSt // will do an initial sync cause offset is null initialSync(queue, path) - // will do an incremental processing cause after the initialSync run the offset must be updated + // will do an incremental processing cause after the initialSync run the offset must be + // updated engineWithIncrementalSnapshot(queue, path) } private fun testChangeEventStream() { - val mongoClientSettings = MongoClientSettings.builder() - .applyConnectionString(ConnectionString("mongodb+srv://$username:$password@cluster0.iqgf8.mongodb.net/")) + val mongoClientSettings = + MongoClientSettings.builder() + .applyConnectionString( + ConnectionString( + "mongodb+srv://$username:$password@cluster0.iqgf8.mongodb.net/" + ) + ) .readPreference(ReadPreference.secondaryPreferred()) .build() MongoClients.create(mongoClientSettings).use { client -> @@ -77,26 +94,41 @@ class DebeziumMongoDbConnectorTest internal constructor(private val connectionSt } @Throws(InterruptedException::class, IOException::class) - private fun initialSync(queue: LinkedBlockingQueue>, path: Path) { + private fun initialSync( + queue: LinkedBlockingQueue>, + path: Path + ) { val executorService = Executors.newSingleThreadExecutor() val thrownError = AtomicReference() val engineLatch = CountDownLatch(1) - val engine: DebeziumEngine> = DebeziumEngine.create(io.debezium.engine.format.Json::class.java) - .using(getDebeziumProperties(path, listOf("$databaseName\\.$collectionName").stream().collect(Collectors.joining(",")))) + val engine: DebeziumEngine> = + DebeziumEngine.create(io.debezium.engine.format.Json::class.java) + .using( + getDebeziumProperties( + path, + listOf("$databaseName\\.$collectionName") + .stream() + .collect(Collectors.joining(",")) + ) + ) .using(io.debezium.engine.spi.OffsetCommitPolicy.AlwaysCommitOffsetPolicy()) - .notifying(Consumer> { e: io.debezium.engine.ChangeEvent -> - // debezium outputs a tombstone event that has a value of null. this is an artifact of how it - // interacts with kafka. we want to ignore it. - // more on the tombstone: - // https://debezium.io/documentation/reference/configuration/event-flattening.html - if (e.value() != null) { - LOGGER.debug("{}", e) - var inserted = false - while (!inserted) { - inserted = queue.offer(e) + .notifying( + Consumer> { + e: io.debezium.engine.ChangeEvent -> + // debezium outputs a tombstone event that has a value of null. this is an + // artifact of how it + // interacts with kafka. we want to ignore it. + // more on the tombstone: + // https://debezium.io/documentation/reference/configuration/event-flattening.html + if (e.value() != null) { + LOGGER.debug("{}", e) + var inserted = false + while (!inserted) { + inserted = queue.offer(e) + } } } - }) + ) .using { _: Boolean, message: String?, error: Throwable? -> LOGGER.info("Initial sync Debezium engine shutdown.") if (error != null) { @@ -105,7 +137,7 @@ class DebeziumMongoDbConnectorTest internal constructor(private val connectionSt engineLatch.countDown() thrownError.set(error) } - .build() + .build() executorService.execute(engine) Thread.sleep((45 * 1000).toLong()) engine.close() @@ -119,15 +151,27 @@ class DebeziumMongoDbConnectorTest internal constructor(private val connectionSt } @Throws(InterruptedException::class, IOException::class) - private fun engineWithIncrementalSnapshot(queue: LinkedBlockingQueue>, path: Path) { + private fun engineWithIncrementalSnapshot( + queue: LinkedBlockingQueue>, + path: Path + ) { val executorService2 = Executors.newSingleThreadExecutor() val thrownError2 = AtomicReference() val engineLatch2 = CountDownLatch(1) - val engine2: DebeziumEngine> = DebeziumEngine.create(io.debezium.engine.format.Json::class.java) - .using(getDebeziumProperties(path, listOf("$databaseName\\.$collectionName").stream().collect(Collectors.joining(",")))) + val engine2: DebeziumEngine> = + DebeziumEngine.create(io.debezium.engine.format.Json::class.java) + .using( + getDebeziumProperties( + path, + listOf("$databaseName\\.$collectionName") + .stream() + .collect(Collectors.joining(",")) + ) + ) .using(io.debezium.engine.spi.OffsetCommitPolicy.AlwaysCommitOffsetPolicy()) .notifying { e: io.debezium.engine.ChangeEvent -> - // debezium outputs a tombstone event that has a value of null. this is an artifact of how it + // debezium outputs a tombstone event that has a value of null. this is an + // artifact of how it // interacts with kafka. we want to ignore it. // more on the tombstone: // https://debezium.io/documentation/reference/configuration/event-flattening.html @@ -139,14 +183,19 @@ class DebeziumMongoDbConnectorTest internal constructor(private val connectionSt } } } - .using(io.debezium.engine.DebeziumEngine.CompletionCallback { success: Boolean, message: String?, error: Throwable? -> - LOGGER.info("Incremental snapshot Debezium engine shutdown.") - if (error != null) { - LOGGER.error("error occurred: {}", message, error) + .using( + io.debezium.engine.DebeziumEngine.CompletionCallback { + success: Boolean, + message: String?, + error: Throwable? -> + LOGGER.info("Incremental snapshot Debezium engine shutdown.") + if (error != null) { + LOGGER.error("error occurred: {}", message, error) + } + engineLatch2.countDown() + thrownError2.set(error) } - engineLatch2.countDown() - thrownError2.set(error) - }) + ) .build() executorService2.execute(engine2) Thread.sleep((180 * 1000).toLong()) @@ -160,7 +209,10 @@ class DebeziumMongoDbConnectorTest internal constructor(private val connectionSt } } - protected fun getDebeziumProperties(cdcOffsetFilePath: Path, collectionNames: String): Properties { + protected fun getDebeziumProperties( + cdcOffsetFilePath: Path, + collectionNames: String + ): Properties { val props = Properties() LOGGER.info("Included collection names regular expression: '{}'.", collectionNames) props.setProperty("connector.class", MongoDbConnector::class.java.getName()) @@ -180,7 +232,10 @@ class DebeziumMongoDbConnectorTest internal constructor(private val connectionSt props.setProperty("database.include.list", databaseName) // Offset storage configuration - props.setProperty("offset.storage", "org.apache.kafka.connect.storage.FileOffsetBackingStore") + props.setProperty( + "offset.storage", + "org.apache.kafka.connect.storage.FileOffsetBackingStore" + ) props.setProperty("offset.storage.file.filename", cdcOffsetFilePath.toString()) props.setProperty("offset.flush.interval.ms", "1000") @@ -193,7 +248,8 @@ class DebeziumMongoDbConnectorTest internal constructor(private val connectionSt props.setProperty("value.converter.schemas.enable", "false") // By default "decimal.handing.mode=precise" which caused returning this value as a binary. - // The "double" type may cause a loss of precision, so set Debezium's config to store it as a String + // The "double" type may cause a loss of precision, so set Debezium's config to store it as + // a String // explicitly in its Kafka messages for more details see: // https://debezium.io/documentation/reference/1.4/connectors/postgresql.html#postgresql-decimal-types // https://debezium.io/documentation/faq/#how_to_retrieve_decimal_field_from_binary_representation @@ -206,11 +262,12 @@ class DebeziumMongoDbConnectorTest internal constructor(private val connectionSt private val path: Path get() { - val cdcWorkingDir: Path = try { - Files.createTempDirectory(Path.of("/tmp"), "cdc-state-offset") - } catch (e: IOException) { - throw RuntimeException(e) - } + val cdcWorkingDir: Path = + try { + Files.createTempDirectory(Path.of("/tmp"), "cdc-state-offset") + } catch (e: IOException) { + throw RuntimeException(e) + } return cdcWorkingDir.resolve("offset.txt") } @@ -220,7 +277,15 @@ class DebeziumMongoDbConnectorTest internal constructor(private val connectionSt try { ObjectInputStream(Files.newInputStream(path)).use { ois -> val raw = ois.readObject() as Map - raw.entries.forEach(Consumer { (key, value): Map.Entry -> LOGGER.info("{}:{}", String(ByteBuffer.wrap(key).array(), StandardCharsets.UTF_8), String(ByteBuffer.wrap(value).array(), StandardCharsets.UTF_8)) }) + raw.entries.forEach( + Consumer { (key, value): Map.Entry -> + LOGGER.info( + "{}:{}", + String(ByteBuffer.wrap(key).array(), StandardCharsets.UTF_8), + String(ByteBuffer.wrap(value).array(), StandardCharsets.UTF_8) + ) + } + ) } } catch (e: IOException) { LOGGER.error("Unable to read offset file '{}'.", path, e) @@ -235,17 +300,56 @@ class DebeziumMongoDbConnectorTest internal constructor(private val connectionSt @JvmStatic fun main(args: Array) { val parser = ArgParser("Debezium MongoDb Connector Test Harness") - val connectionString by parser.option(ArgType.String, fullName = "connection-string", shortName = "cs", description = "MongoDB Connection String").required() - val databaseName by parser.option(ArgType.String, fullName = "database-name", shortName = "d", description = "Database Name").required() - val collectionName by parser.option(ArgType.String, fullName = "collection-name", shortName = "cn", description = "Collection Name").required() - val username by parser.option(ArgType.String, fullName = "username", shortName = "u", description = "Username").required() + val connectionString by + parser + .option( + ArgType.String, + fullName = "connection-string", + shortName = "cs", + description = "MongoDB Connection String" + ) + .required() + val databaseName by + parser + .option( + ArgType.String, + fullName = "database-name", + shortName = "d", + description = "Database Name" + ) + .required() + val collectionName by + parser + .option( + ArgType.String, + fullName = "collection-name", + shortName = "cn", + description = "Collection Name" + ) + .required() + val username by + parser + .option( + ArgType.String, + fullName = "username", + shortName = "u", + description = "Username" + ) + .required() parser.parse(args) println("Enter password: ") val password = readln() - val debeziumEngineTest = DebeziumMongoDbConnectorTest(connectionString, databaseName, collectionName, username, password) + val debeziumEngineTest = + DebeziumMongoDbConnectorTest( + connectionString, + databaseName, + collectionName, + username, + password + ) debeziumEngineTest.startTest() } } diff --git a/airbyte-integrations/connectors/source-mongodb-v2/src/test/generator/MongoDbInsertClient.kt b/airbyte-integrations/connectors/source-mongodb-v2/src/test/generator/MongoDbInsertClient.kt index fd2b7f612930..88763e65e9f8 100644 --- a/airbyte-integrations/connectors/source-mongodb-v2/src/test/generator/MongoDbInsertClient.kt +++ b/airbyte-integrations/connectors/source-mongodb-v2/src/test/generator/MongoDbInsertClient.kt @@ -1,3 +1,7 @@ +/* + * Copyright (c) 2024 Airbyte, Inc., all rights reserved. + */ + package io.airbyte.integrations.source.mongodb import com.fasterxml.jackson.core.JsonGenerator @@ -6,13 +10,13 @@ import com.fasterxml.jackson.databind.ObjectMapper import com.fasterxml.jackson.datatype.jsr310.JavaTimeModule import com.github.javafaker.Faker import io.github.oshai.kotlinlogging.KotlinLogging +import java.lang.System.currentTimeMillis import kotlinx.cli.ArgParser import kotlinx.cli.ArgType import kotlinx.cli.default import kotlinx.cli.required import org.bson.BsonTimestamp import org.bson.Document -import java.lang.System.currentTimeMillis object MongoDbInsertClient { @@ -23,57 +27,106 @@ object MongoDbInsertClient { @JvmStatic fun main(args: Array) { val parser = ArgParser("MongoDb Insert Client") - val connectionString by parser.option(ArgType.String, fullName = "connection-string", shortName = "cs", description = "MongoDb Connection String").required() - val databaseName by parser.option(ArgType.String, fullName = "database-name", shortName = "d", description = "Database Name").required() - val collectionName by parser.option(ArgType.String, fullName = "collection-name", shortName = "cn", description = "Collection Name").required() - val username by parser.option(ArgType.String, fullName = "username", shortName = "u", description = "Username").required() - val numberOfDocuments by parser.option(ArgType.Int, fullName = "number", shortName = "n", description = "Number of documents to generate").default(10000) + val connectionString by + parser + .option( + ArgType.String, + fullName = "connection-string", + shortName = "cs", + description = "MongoDb Connection String" + ) + .required() + val databaseName by + parser + .option( + ArgType.String, + fullName = "database-name", + shortName = "d", + description = "Database Name" + ) + .required() + val collectionName by + parser + .option( + ArgType.String, + fullName = "collection-name", + shortName = "cn", + description = "Collection Name" + ) + .required() + val username by + parser + .option( + ArgType.String, + fullName = "username", + shortName = "u", + description = "Username" + ) + .required() + val numberOfDocuments by + parser + .option( + ArgType.Int, + fullName = "number", + shortName = "n", + description = "Number of documents to generate" + ) + .default(10000) parser.parse(args) println("Enter password: ") val password = readln() - val config = mapOf(MongoConstants.DATABASE_CONFIG_CONFIGURATION_KEY to + val config = mapOf( - MongoConstants.DATABASE_CONFIGURATION_KEY to databaseName, - MongoConstants.CONNECTION_STRING_CONFIGURATION_KEY to connectionString, - MongoConstants.AUTH_SOURCE_CONFIGURATION_KEY to "admin", - MongoConstants.USERNAME_CONFIGURATION_KEY to username, - MongoConstants.PASSWORD_CONFIGURATION_KEY to password) - ) + MongoConstants.DATABASE_CONFIG_CONFIGURATION_KEY to + mapOf( + MongoConstants.DATABASE_CONFIGURATION_KEY to databaseName, + MongoConstants.CONNECTION_STRING_CONFIGURATION_KEY to connectionString, + MongoConstants.AUTH_SOURCE_CONFIGURATION_KEY to "admin", + MongoConstants.USERNAME_CONFIGURATION_KEY to username, + MongoConstants.PASSWORD_CONFIGURATION_KEY to password + ) + ) - val faker = Faker(); + val faker = Faker() val objectMapper = ObjectMapper().registerModule(JavaTimeModule()) objectMapper.configure(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES, false) objectMapper.configure(JsonGenerator.Feature.WRITE_BIGDECIMAL_AS_PLAIN, true) val roundTrippedConfig = objectMapper.readTree(objectMapper.writeValueAsBytes(config)) - MongoConnectionUtils.createMongoClient(MongoDbSourceConfig(roundTrippedConfig)).use { mongoClient -> + MongoConnectionUtils.createMongoClient(MongoDbSourceConfig(roundTrippedConfig)).use { + mongoClient -> val documents = mutableListOf() - val batches = if (numberOfDocuments > BATCH_SIZE) numberOfDocuments / BATCH_SIZE else 1; - val batchSize = if (numberOfDocuments > BATCH_SIZE) BATCH_SIZE else numberOfDocuments; + val batches = if (numberOfDocuments > BATCH_SIZE) numberOfDocuments / BATCH_SIZE else 1 + val batchSize = if (numberOfDocuments > BATCH_SIZE) BATCH_SIZE else numberOfDocuments logger.info { "Inserting $batches batch(es) of $batchSize document(s) each..." } for (i in 0..batches) { logger.info { "Inserting batch ${i}..." } for (j in 0..batchSize) { - val index = (j+1)+((i+1)*batchSize) - documents += Document().append("name", "Document $index") - .append("title", "${faker.lorem().sentence(10)}") - .append("description", "${faker.lorem().paragraph(25)}") - .append("data", "${faker.lorem().paragraphs(100)}") - .append("paragraph", "${faker.lorem().paragraph(25)}") - .append("doubleField", index.toDouble()) - .append("intField", index) - .append("objectField", mapOf("key" to "value")) - .append("timestamp", BsonTimestamp(currentTimeMillis())) + val index = (j + 1) + ((i + 1) * batchSize) + documents += + Document() + .append("name", "Document $index") + .append("title", "${faker.lorem().sentence(10)}") + .append("description", "${faker.lorem().paragraph(25)}") + .append("data", "${faker.lorem().paragraphs(100)}") + .append("paragraph", "${faker.lorem().paragraph(25)}") + .append("doubleField", index.toDouble()) + .append("intField", index) + .append("objectField", mapOf("key" to "value")) + .append("timestamp", BsonTimestamp(currentTimeMillis())) } - mongoClient.getDatabase(databaseName).getCollection(collectionName).insertMany(documents) + mongoClient + .getDatabase(databaseName) + .getCollection(collectionName) + .insertMany(documents) documents.clear() } } logger.info { "Inserted $numberOfDocuments document(s) to $databaseName.$collectionName" } } -} \ No newline at end of file +} diff --git a/spotless-maven-pom.xml b/spotless-maven-pom.xml index 93da05c12bce..63de55c6766d 100644 --- a/spotless-maven-pom.xml +++ b/spotless-maven-pom.xml @@ -44,6 +44,15 @@ + + + **/*.kt + + + 0.39 + + + From 160ec729375af9ab7c225bb9646da69711b9b24d Mon Sep 17 00:00:00 2001 From: Edward Gao Date: Mon, 4 Mar 2024 11:43:43 -0800 Subject: [PATCH 068/172] Destination bigquery: upgrade cdk (#35315) Signed-off-by: Gireesh Sreepathi Co-authored-by: Gireesh Sreepathi --- .../destination-bigquery/build.gradle | 3 +- .../destination-bigquery/metadata.yaml | 2 +- .../bigquery/BigQueryDestination.java | 31 +-- .../BigQueryStagingConsumerFactory.java | 5 +- .../BigQueryDestinationHandler.java | 182 ++++++++++++++++-- .../typing_deduping/BigQuerySqlGenerator.java | 130 +------------ .../AbstractBigQueryTypingDedupingTest.java | 2 +- .../BigQuerySqlGeneratorIntegrationTest.java | 15 +- .../BigQuerySqlGeneratorTest.java | 114 ----------- .../BigqueryDestinationHandlerTest.java | 132 +++++++++++++ docs/integrations/destinations/bigquery.md | 15 +- 11 files changed, 350 insertions(+), 281 deletions(-) create mode 100644 airbyte-integrations/connectors/destination-bigquery/src/test/java/io/airbyte/integrations/destination/bigquery/typing_deduping/BigqueryDestinationHandlerTest.java diff --git a/airbyte-integrations/connectors/destination-bigquery/build.gradle b/airbyte-integrations/connectors/destination-bigquery/build.gradle index f5d1b05d4b54..14da5852eef0 100644 --- a/airbyte-integrations/connectors/destination-bigquery/build.gradle +++ b/airbyte-integrations/connectors/destination-bigquery/build.gradle @@ -1,9 +1,10 @@ plugins { id 'airbyte-java-connector' + id 'org.jetbrains.kotlin.jvm' version '1.9.22' } airbyteJavaConnector { - cdkVersionRequired = '0.20.9' + cdkVersionRequired = '0.23.11' features = [ 'db-destinations', 'datastore-bigquery', diff --git a/airbyte-integrations/connectors/destination-bigquery/metadata.yaml b/airbyte-integrations/connectors/destination-bigquery/metadata.yaml index e3e73d6a5c98..ee0aecd9a385 100644 --- a/airbyte-integrations/connectors/destination-bigquery/metadata.yaml +++ b/airbyte-integrations/connectors/destination-bigquery/metadata.yaml @@ -5,7 +5,7 @@ data: connectorSubtype: database connectorType: destination definitionId: 22f6c74f-5699-40ff-833c-4a879ea40133 - dockerImageTag: 2.4.11 + dockerImageTag: 2.4.12 dockerRepository: airbyte/destination-bigquery documentationUrl: https://docs.airbyte.com/integrations/destinations/bigquery githubIssueLabel: destination-bigquery diff --git a/airbyte-integrations/connectors/destination-bigquery/src/main/java/io/airbyte/integrations/destination/bigquery/BigQueryDestination.java b/airbyte-integrations/connectors/destination-bigquery/src/main/java/io/airbyte/integrations/destination/bigquery/BigQueryDestination.java index b884fe5dbd23..f2b11b35247b 100644 --- a/airbyte-integrations/connectors/destination-bigquery/src/main/java/io/airbyte/integrations/destination/bigquery/BigQueryDestination.java +++ b/airbyte-integrations/connectors/destination-bigquery/src/main/java/io/airbyte/integrations/destination/bigquery/BigQueryDestination.java @@ -61,6 +61,7 @@ import java.util.ArrayList; import java.util.List; import java.util.Map; +import java.util.Optional; import java.util.UUID; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.ConcurrentMap; @@ -233,9 +234,11 @@ public SerializedAirbyteMessageConsumer getSerializedMessageConsumer(final JsonN final boolean disableTypeDedupe = BigQueryUtils.getDisableTypeDedupFlag(config); final String datasetLocation = BigQueryUtils.getDatasetLocation(config); final BigQuerySqlGenerator sqlGenerator = new BigQuerySqlGenerator(config.get(BigQueryConsts.CONFIG_PROJECT_ID).asText(), datasetLocation); - final ParsedCatalog parsedCatalog = parseCatalog(config, catalog, datasetLocation); + final Optional rawNamespaceOverride = TypingAndDedupingFlag.getRawNamespaceOverride(RAW_DATA_DATASET); + final ParsedCatalog parsedCatalog = parseCatalog(config, catalog, datasetLocation, rawNamespaceOverride); final BigQuery bigquery = getBigQuery(config); - final TyperDeduper typerDeduper = buildTyperDeduper(sqlGenerator, parsedCatalog, bigquery, datasetLocation, disableTypeDedupe); + final TyperDeduper typerDeduper = + buildTyperDeduper(sqlGenerator, parsedCatalog, bigquery, datasetLocation, disableTypeDedupe); AirbyteExceptionHandler.addAllStringsInConfigForDeinterpolation(config); final JsonNode serviceAccountKey = config.get(BigQueryConsts.CONFIG_CREDS); @@ -360,7 +363,6 @@ private SerializedAirbyteMessageConsumer getStandardRecordConsumer(final BigQuer final Consumer outputRecordCollector, final TyperDeduper typerDeduper) throws Exception { - typerDeduper.prepareTables(); final Supplier>> writeConfigs = getUploaderMap( bigquery, config, @@ -372,6 +374,8 @@ private SerializedAirbyteMessageConsumer getStandardRecordConsumer(final BigQuer return new BigQueryRecordStandardConsumer( outputRecordCollector, () -> { + typerDeduper.prepareSchemasAndRunMigrations(); + // Set up our raw tables writeConfigs.get().forEach((streamId, uploader) -> { final StreamConfig stream = parsedCatalog.getStream(streamId); @@ -390,6 +394,8 @@ private SerializedAirbyteMessageConsumer getStandardRecordConsumer(final BigQuer uploader.createRawTable(); } }); + + typerDeduper.prepareFinalTables(); }, (hasFailed, streamSyncSummaries) -> { try { @@ -424,11 +430,13 @@ private void setDefaultStreamNamespace(final ConfiguredAirbyteCatalog catalog, f } } - private ParsedCatalog parseCatalog(final JsonNode config, final ConfiguredAirbyteCatalog catalog, final String datasetLocation) { + private ParsedCatalog parseCatalog(final JsonNode config, + final ConfiguredAirbyteCatalog catalog, + final String datasetLocation, + final Optional rawNamespaceOverride) { final BigQuerySqlGenerator sqlGenerator = new BigQuerySqlGenerator(config.get(BigQueryConsts.CONFIG_PROJECT_ID).asText(), datasetLocation); - final CatalogParser catalogParser = TypingAndDedupingFlag.getRawNamespaceOverride(RAW_DATA_DATASET).isPresent() - ? new CatalogParser(sqlGenerator, TypingAndDedupingFlag.getRawNamespaceOverride(RAW_DATA_DATASET).get()) - : new CatalogParser(sqlGenerator); + final CatalogParser catalogParser = rawNamespaceOverride.map(s -> new CatalogParser(sqlGenerator, s)) + .orElseGet(() -> new CatalogParser(sqlGenerator)); return catalogParser.parseCatalog(catalog); } @@ -440,11 +448,13 @@ private TyperDeduper buildTyperDeduper(final BigQuerySqlGenerator sqlGenerator, final boolean disableTypeDedupe) { final BigQueryV1V2Migrator migrator = new BigQueryV1V2Migrator(bigquery, namingResolver); final BigQueryV2TableMigrator v2RawTableMigrator = new BigQueryV2TableMigrator(bigquery); - final BigQueryDestinationHandler destinationHandler = new BigQueryDestinationHandler(bigquery, datasetLocation); + final BigQueryDestinationHandler destinationHandler = new BigQueryDestinationHandler( + bigquery, + datasetLocation); if (disableTypeDedupe) { return new NoOpTyperDeduperWithV1V2Migrations<>( - sqlGenerator, destinationHandler, parsedCatalog, migrator, v2RawTableMigrator, 8); + sqlGenerator, destinationHandler, parsedCatalog, migrator, v2RawTableMigrator, List.of()); } return new DefaultTyperDeduper<>( @@ -453,8 +463,7 @@ private TyperDeduper buildTyperDeduper(final BigQuerySqlGenerator sqlGenerator, parsedCatalog, migrator, v2RawTableMigrator, - 8); - + List.of()); } @Override diff --git a/airbyte-integrations/connectors/destination-bigquery/src/main/java/io/airbyte/integrations/destination/bigquery/BigQueryStagingConsumerFactory.java b/airbyte-integrations/connectors/destination-bigquery/src/main/java/io/airbyte/integrations/destination/bigquery/BigQueryStagingConsumerFactory.java index a929bfbf095f..5f40d71c4815 100644 --- a/airbyte-integrations/connectors/destination-bigquery/src/main/java/io/airbyte/integrations/destination/bigquery/BigQueryStagingConsumerFactory.java +++ b/airbyte-integrations/connectors/destination-bigquery/src/main/java/io/airbyte/integrations/destination/bigquery/BigQueryStagingConsumerFactory.java @@ -135,7 +135,8 @@ private OnStartFunction onStartFunction(final BigQueryStagingOperations bigQuery final TyperDeduper typerDeduper) { return () -> { LOGGER.info("Preparing airbyte_raw tables in destination started for {} streams", writeConfigs.size()); - typerDeduper.prepareTables(); + typerDeduper.prepareSchemasAndRunMigrations(); + for (final BigQueryWriteConfig writeConfig : writeConfigs.values()) { LOGGER.info("Preparing staging are in destination for schema: {}, stream: {}, target table: {}, stage: {}", writeConfig.tableSchema(), writeConfig.streamName(), writeConfig.targetTableId(), writeConfig.streamName()); @@ -156,6 +157,8 @@ private OnStartFunction onStartFunction(final BigQueryStagingOperations bigQuery bigQueryGcsOperations.truncateTableIfExists(rawDatasetId, writeConfig.targetTableId(), writeConfig.tableSchema()); } } + + typerDeduper.prepareFinalTables(); LOGGER.info("Preparing tables in destination completed."); }; } diff --git a/airbyte-integrations/connectors/destination-bigquery/src/main/java/io/airbyte/integrations/destination/bigquery/typing_deduping/BigQueryDestinationHandler.java b/airbyte-integrations/connectors/destination-bigquery/src/main/java/io/airbyte/integrations/destination/bigquery/typing_deduping/BigQueryDestinationHandler.java index 8199165d6527..111894b62967 100644 --- a/airbyte-integrations/connectors/destination-bigquery/src/main/java/io/airbyte/integrations/destination/bigquery/typing_deduping/BigQueryDestinationHandler.java +++ b/airbyte-integrations/connectors/destination-bigquery/src/main/java/io/airbyte/integrations/destination/bigquery/typing_deduping/BigQueryDestinationHandler.java @@ -4,8 +4,17 @@ package io.airbyte.integrations.destination.bigquery.typing_deduping; +import static io.airbyte.integrations.base.destination.typing_deduping.CollectionUtils.containsAllIgnoreCase; +import static io.airbyte.integrations.base.destination.typing_deduping.CollectionUtils.containsIgnoreCase; +import static io.airbyte.integrations.base.destination.typing_deduping.CollectionUtils.matchingKey; +import static io.airbyte.integrations.destination.bigquery.typing_deduping.BigQuerySqlGenerator.QUOTE; +import static io.airbyte.integrations.destination.bigquery.typing_deduping.BigQuerySqlGenerator.clusteringColumns; +import static io.airbyte.integrations.destination.bigquery.typing_deduping.BigQuerySqlGenerator.toDialectType; +import static java.util.stream.Collectors.toMap; + import com.google.cloud.bigquery.BigQuery; import com.google.cloud.bigquery.BigQueryException; +import com.google.cloud.bigquery.Field; import com.google.cloud.bigquery.FieldValue; import com.google.cloud.bigquery.Job; import com.google.cloud.bigquery.JobConfiguration; @@ -14,28 +23,46 @@ import com.google.cloud.bigquery.JobStatistics; import com.google.cloud.bigquery.JobStatus; import com.google.cloud.bigquery.QueryJobConfiguration; +import com.google.cloud.bigquery.StandardSQLTypeName; +import com.google.cloud.bigquery.StandardTableDefinition; import com.google.cloud.bigquery.Table; import com.google.cloud.bigquery.TableDefinition; import com.google.cloud.bigquery.TableId; +import com.google.cloud.bigquery.TimePartitioning; +import com.google.common.annotations.VisibleForTesting; import com.google.common.collect.Streams; import io.airbyte.cdk.integrations.base.AirbyteExceptionHandler; +import io.airbyte.cdk.integrations.base.JavaBaseConstants; +import io.airbyte.integrations.base.destination.typing_deduping.AlterTableReport; +import io.airbyte.integrations.base.destination.typing_deduping.ColumnId; import io.airbyte.integrations.base.destination.typing_deduping.DestinationHandler; +import io.airbyte.integrations.base.destination.typing_deduping.DestinationInitialStatus; +import io.airbyte.integrations.base.destination.typing_deduping.InitialRawTableStatus; import io.airbyte.integrations.base.destination.typing_deduping.Sql; +import io.airbyte.integrations.base.destination.typing_deduping.StreamConfig; import io.airbyte.integrations.base.destination.typing_deduping.StreamId; +import io.airbyte.integrations.base.destination.typing_deduping.TableNotMigratedException; +import io.airbyte.integrations.base.destination.typing_deduping.migrators.MinimumDestinationState; +import io.airbyte.integrations.base.destination.typing_deduping.migrators.MinimumDestinationState.Impl; import java.math.BigInteger; +import java.util.ArrayList; +import java.util.Collection; +import java.util.Collections; import java.util.Comparator; -import java.util.LinkedHashMap; +import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Optional; +import java.util.Set; import java.util.UUID; +import java.util.stream.Collectors; import java.util.stream.Stream; import org.apache.commons.text.StringSubstitutor; import org.slf4j.Logger; import org.slf4j.LoggerFactory; // TODO this stuff almost definitely exists somewhere else in our codebase. -public class BigQueryDestinationHandler implements DestinationHandler { +public class BigQueryDestinationHandler implements DestinationHandler { private static final Logger LOGGER = LoggerFactory.getLogger(BigQueryDestinationHandler.class); @@ -47,32 +74,24 @@ public BigQueryDestinationHandler(final BigQuery bq, final String datasetLocatio this.datasetLocation = datasetLocation; } - @Override public Optional findExistingTable(final StreamId id) { final Table table = bq.getTable(id.finalNamespace(), id.finalName()); return Optional.ofNullable(table).map(Table::getDefinition); } - @Override - public LinkedHashMap findExistingFinalTables(List streamIds) throws Exception { - return null; - } - - @Override public boolean isFinalTableEmpty(final StreamId id) { return BigInteger.ZERO.equals(bq.getTable(TableId.of(id.finalNamespace(), id.finalName())).getNumRows()); } - @Override - public InitialRawTableState getInitialRawTableState(final StreamId id) throws Exception { + public InitialRawTableStatus getInitialRawTableState(final StreamId id) throws Exception { final Table rawTable = bq.getTable(TableId.of(id.rawNamespace(), id.rawName())); if (rawTable == null) { // Table doesn't exist. There are no unprocessed records, and no timestamp. - return new InitialRawTableState(false, Optional.empty()); + return new InitialRawTableStatus(false, false, Optional.empty()); } final FieldValue unloadedRecordTimestamp = bq.query(QueryJobConfiguration.newBuilder(new StringSubstitutor(Map.of( - "raw_table", id.rawTableId(BigQuerySqlGenerator.QUOTE))).replace( + "raw_table", id.rawTableId(QUOTE))).replace( // bigquery timestamps have microsecond precision """ SELECT TIMESTAMP_SUB(MIN(_airbyte_extracted_at), INTERVAL 1 MICROSECOND) @@ -84,11 +103,11 @@ SELECT TIMESTAMP_SUB(MIN(_airbyte_extracted_at), INTERVAL 1 MICROSECOND) // If it's not null, then we can return immediately - we've found some unprocessed records and their // timestamp. if (!unloadedRecordTimestamp.isNull()) { - return new InitialRawTableState(true, Optional.of(unloadedRecordTimestamp.getTimestampInstant())); + return new InitialRawTableStatus(true, true, Optional.of(unloadedRecordTimestamp.getTimestampInstant())); } final FieldValue loadedRecordTimestamp = bq.query(QueryJobConfiguration.newBuilder(new StringSubstitutor(Map.of( - "raw_table", id.rawTableId(BigQuerySqlGenerator.QUOTE))).replace( + "raw_table", id.rawTableId(QUOTE))).replace( """ SELECT MAX(_airbyte_extracted_at) FROM ${raw_table} @@ -98,10 +117,10 @@ SELECT MAX(_airbyte_extracted_at) // So we just need to get the timestamp of the most recent record. if (loadedRecordTimestamp.isNull()) { // Null timestamp because the table is empty. T+D can process the entire raw table during this sync. - return new InitialRawTableState(false, Optional.empty()); + return new InitialRawTableStatus(true, false, Optional.empty()); } else { // The raw table already has some records. T+D can skip all records with timestamp <= this value. - return new InitialRawTableState(false, Optional.of(loadedRecordTimestamp.getTimestampInstant())); + return new InitialRawTableStatus(true, false, Optional.of(loadedRecordTimestamp.getTimestampInstant())); } } @@ -172,4 +191,133 @@ public void execute(final Sql sql) throws InterruptedException { } } + @Override + public List> gatherInitialState(List streamConfigs) throws Exception { + final List> initialStates = new ArrayList<>(); + for (final StreamConfig streamConfig : streamConfigs) { + final StreamId id = streamConfig.id(); + final Optional finalTable = findExistingTable(id); + final InitialRawTableStatus rawTableState = getInitialRawTableState(id); + initialStates.add(new DestinationInitialStatus<>( + streamConfig, + finalTable.isPresent(), + rawTableState, + finalTable.isPresent() && !existingSchemaMatchesStreamConfig(streamConfig, finalTable.get()), + finalTable.isEmpty() || isFinalTableEmpty(id), + // Return a default state blob since we don't actually track state. + new MinimumDestinationState.Impl(false))); + } + return initialStates; + } + + @Override + public void commitDestinationStates(Map destinationStates) throws Exception { + // Intentionally do nothing. Bigquery doesn't actually support destination states. + } + + private boolean existingSchemaMatchesStreamConfig(final StreamConfig stream, + final TableDefinition existingTable) + throws TableNotMigratedException { + final var alterTableReport = buildAlterTableReport(stream, existingTable); + boolean tableClusteringMatches = false; + boolean tablePartitioningMatches = false; + if (existingTable instanceof final StandardTableDefinition standardExistingTable) { + tableClusteringMatches = clusteringMatches(stream, standardExistingTable); + tablePartitioningMatches = partitioningMatches(standardExistingTable); + } + LOGGER.info("Alter Table Report {} {} {}; Clustering {}; Partitioning {}", + alterTableReport.columnsToAdd(), + alterTableReport.columnsToRemove(), + alterTableReport.columnsToChangeType(), + tableClusteringMatches, + tablePartitioningMatches); + + return alterTableReport.isNoOp() && tableClusteringMatches && tablePartitioningMatches; + } + + public AlterTableReport buildAlterTableReport(final StreamConfig stream, final TableDefinition existingTable) { + final Set pks = getPks(stream); + + final Map streamSchema = stream.columns().entrySet().stream() + .collect(toMap( + entry -> entry.getKey().name(), + entry -> toDialectType(entry.getValue()))); + + final Map existingSchema = existingTable.getSchema().getFields().stream() + .collect(toMap( + field -> field.getName(), + field -> field.getType().getStandardType())); + + // Columns in the StreamConfig that don't exist in the TableDefinition + final Set columnsToAdd = streamSchema.keySet().stream() + .filter(name -> !containsIgnoreCase(existingSchema.keySet(), name)) + .collect(Collectors.toSet()); + + // Columns in the current schema that are no longer in the StreamConfig + final Set columnsToRemove = existingSchema.keySet().stream() + .filter(name -> !containsIgnoreCase(streamSchema.keySet(), name) && !containsIgnoreCase( + JavaBaseConstants.V2_FINAL_TABLE_METADATA_COLUMNS, name)) + .collect(Collectors.toSet()); + + // Columns that are typed differently than the StreamConfig + final Set columnsToChangeType = Stream.concat( + streamSchema.keySet().stream() + // If it's not in the existing schema, it should already be in the columnsToAdd Set + .filter(name -> { + // Big Query Columns are case-insensitive, first find the correctly cased key if it exists + return matchingKey(existingSchema.keySet(), name) + // if it does exist, only include it in this set if the type (the value in each respective map) + // is different between the stream and existing schemas + .map(key -> !existingSchema.get(key).equals(streamSchema.get(name))) + // if there is no matching key, then don't include it because it is probably already in columnsToAdd + .orElse(false); + }), + + // OR columns that used to have a non-null constraint and shouldn't + // (https://github.com/airbytehq/airbyte/pull/31082) + existingTable.getSchema().getFields().stream() + .filter(field -> pks.contains(field.getName())) + .filter(field -> field.getMode() == Field.Mode.REQUIRED) + .map(Field::getName)) + .collect(Collectors.toSet()); + + final boolean isDestinationV2Format = schemaContainAllFinalTableV2AirbyteColumns(existingSchema.keySet()); + + return new AlterTableReport(columnsToAdd, columnsToRemove, columnsToChangeType, isDestinationV2Format); + } + + @VisibleForTesting + public static boolean clusteringMatches(final StreamConfig stream, final StandardTableDefinition existingTable) { + return existingTable.getClustering() != null + && containsAllIgnoreCase( + new HashSet<>(existingTable.getClustering().getFields()), + clusteringColumns(stream)); + } + + @VisibleForTesting + public static boolean partitioningMatches(final StandardTableDefinition existingTable) { + return existingTable.getTimePartitioning() != null + && existingTable.getTimePartitioning() + .getField() + .equalsIgnoreCase("_airbyte_extracted_at") + && TimePartitioning.Type.DAY.equals(existingTable.getTimePartitioning().getType()); + } + + /** + * Checks the schema to determine whether the table contains all expected final table airbyte + * columns + * + * @param columnNames the column names of the schema to check + * @return whether all the {@link JavaBaseConstants#V2_FINAL_TABLE_METADATA_COLUMNS} are present + */ + @VisibleForTesting + public static boolean schemaContainAllFinalTableV2AirbyteColumns(final Collection columnNames) { + return JavaBaseConstants.V2_FINAL_TABLE_METADATA_COLUMNS.stream() + .allMatch(column -> containsIgnoreCase(columnNames, column)); + } + + private static Set getPks(final StreamConfig stream) { + return stream.primaryKey() != null ? stream.primaryKey().stream().map(ColumnId::name).collect(Collectors.toSet()) : Collections.emptySet(); + } + } diff --git a/airbyte-integrations/connectors/destination-bigquery/src/main/java/io/airbyte/integrations/destination/bigquery/typing_deduping/BigQuerySqlGenerator.java b/airbyte-integrations/connectors/destination-bigquery/src/main/java/io/airbyte/integrations/destination/bigquery/typing_deduping/BigQuerySqlGenerator.java index c4370fc5dc0a..3fe1f2cbb145 100644 --- a/airbyte-integrations/connectors/destination-bigquery/src/main/java/io/airbyte/integrations/destination/bigquery/typing_deduping/BigQuerySqlGenerator.java +++ b/airbyte-integrations/connectors/destination-bigquery/src/main/java/io/airbyte/integrations/destination/bigquery/typing_deduping/BigQuerySqlGenerator.java @@ -4,25 +4,15 @@ package io.airbyte.integrations.destination.bigquery.typing_deduping; -import static io.airbyte.integrations.base.destination.typing_deduping.CollectionUtils.containsAllIgnoreCase; -import static io.airbyte.integrations.base.destination.typing_deduping.CollectionUtils.containsIgnoreCase; -import static io.airbyte.integrations.base.destination.typing_deduping.CollectionUtils.matchingKey; import static io.airbyte.integrations.base.destination.typing_deduping.Sql.separately; import static io.airbyte.integrations.base.destination.typing_deduping.Sql.transactionally; import static io.airbyte.integrations.base.destination.typing_deduping.TypeAndDedupeTransaction.SOFT_RESET_SUFFIX; import static java.util.stream.Collectors.joining; -import com.google.cloud.bigquery.Field; -import com.google.cloud.bigquery.Field.Mode; import com.google.cloud.bigquery.StandardSQLTypeName; -import com.google.cloud.bigquery.StandardTableDefinition; -import com.google.cloud.bigquery.TableDefinition; -import com.google.cloud.bigquery.TimePartitioning; import com.google.common.annotations.VisibleForTesting; -import io.airbyte.cdk.integrations.base.JavaBaseConstants; import io.airbyte.integrations.base.destination.typing_deduping.AirbyteProtocolType; import io.airbyte.integrations.base.destination.typing_deduping.AirbyteType; -import io.airbyte.integrations.base.destination.typing_deduping.AlterTableReport; import io.airbyte.integrations.base.destination.typing_deduping.Array; import io.airbyte.integrations.base.destination.typing_deduping.ColumnId; import io.airbyte.integrations.base.destination.typing_deduping.Sql; @@ -30,28 +20,22 @@ import io.airbyte.integrations.base.destination.typing_deduping.StreamConfig; import io.airbyte.integrations.base.destination.typing_deduping.StreamId; import io.airbyte.integrations.base.destination.typing_deduping.Struct; -import io.airbyte.integrations.base.destination.typing_deduping.TableNotMigratedException; import io.airbyte.integrations.base.destination.typing_deduping.Union; import io.airbyte.integrations.base.destination.typing_deduping.UnsupportedOneOf; import io.airbyte.integrations.destination.bigquery.BigQuerySQLNameTransformer; import io.airbyte.protocol.models.v0.DestinationSyncMode; import java.time.Instant; import java.util.ArrayList; -import java.util.Collection; -import java.util.Collections; -import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Optional; -import java.util.Set; -import java.util.stream.Collectors; import java.util.stream.Stream; import org.apache.commons.lang3.StringUtils; import org.apache.commons.text.StringSubstitutor; import org.slf4j.Logger; import org.slf4j.LoggerFactory; -public class BigQuerySqlGenerator implements SqlGenerator { +public class BigQuerySqlGenerator implements SqlGenerator { public static final String QUOTE = "`"; private static final BigQuerySQLNameTransformer nameTransformer = new BigQuerySQLNameTransformer(); @@ -95,7 +79,7 @@ public ColumnId buildColumnId(final String name, final String suffix) { nameTransformer.getIdentifier(nameWithSuffix.toLowerCase())); } - public StandardSQLTypeName toDialectType(final AirbyteType type) { + public static StandardSQLTypeName toDialectType(final AirbyteType type) { // switch pattern-matching is still in preview at language level 17 :( if (type instanceof final AirbyteProtocolType p) { return toDialectType(p); @@ -197,7 +181,7 @@ THEN JSON_QUERY(`_airbyte_data`, '$."${column_name}"') // TODO maybe make this a BiMap and elevate this method and its inverse (toDestinationSQLType?) to // the SQLGenerator? - public StandardSQLTypeName toDialectType(final AirbyteProtocolType airbyteProtocolType) { + public static StandardSQLTypeName toDialectType(final AirbyteProtocolType airbyteProtocolType) { return switch (airbyteProtocolType) { case STRING -> StandardSQLTypeName.STRING; case NUMBER -> StandardSQLTypeName.NUMERIC; @@ -239,7 +223,7 @@ PARTITION BY (DATE_TRUNC(_airbyte_extracted_at, DAY)) """)); } - private List clusteringColumns(final StreamConfig stream) { + static List clusteringColumns(final StreamConfig stream) { final List clusterColumns = new ArrayList<>(); if (stream.destinationSyncMode() == DestinationSyncMode.APPEND_DEDUP) { // We're doing de-duping, therefore we have a primary key. @@ -259,108 +243,6 @@ private String columnsAndTypes(final StreamConfig stream) { .collect(joining(",\n")); } - @Override - public boolean existingSchemaMatchesStreamConfig(final StreamConfig stream, - final TableDefinition existingTable) - throws TableNotMigratedException { - final var alterTableReport = buildAlterTableReport(stream, existingTable); - boolean tableClusteringMatches = false; - boolean tablePartitioningMatches = false; - if (existingTable instanceof final StandardTableDefinition standardExistingTable) { - tableClusteringMatches = clusteringMatches(stream, standardExistingTable); - tablePartitioningMatches = partitioningMatches(standardExistingTable); - } - LOGGER.info("Alter Table Report {} {} {}; Clustering {}; Partitioning {}", - alterTableReport.columnsToAdd(), - alterTableReport.columnsToRemove(), - alterTableReport.columnsToChangeType(), - tableClusteringMatches, - tablePartitioningMatches); - - return alterTableReport.isNoOp() && tableClusteringMatches && tablePartitioningMatches; - } - - @VisibleForTesting - public boolean clusteringMatches(final StreamConfig stream, final StandardTableDefinition existingTable) { - return existingTable.getClustering() != null - && containsAllIgnoreCase( - new HashSet<>(existingTable.getClustering().getFields()), - clusteringColumns(stream)); - } - - @VisibleForTesting - public boolean partitioningMatches(final StandardTableDefinition existingTable) { - return existingTable.getTimePartitioning() != null - && existingTable.getTimePartitioning() - .getField() - .equalsIgnoreCase("_airbyte_extracted_at") - && TimePartitioning.Type.DAY.equals(existingTable.getTimePartitioning().getType()); - } - - public AlterTableReport buildAlterTableReport(final StreamConfig stream, final TableDefinition existingTable) { - final Set pks = getPks(stream); - - final Map streamSchema = stream.columns().entrySet().stream() - .collect(Collectors.toMap( - entry -> entry.getKey().name(), - entry -> toDialectType(entry.getValue()))); - - final Map existingSchema = existingTable.getSchema().getFields().stream() - .collect(Collectors.toMap( - field -> field.getName(), - field -> field.getType().getStandardType())); - - // Columns in the StreamConfig that don't exist in the TableDefinition - final Set columnsToAdd = streamSchema.keySet().stream() - .filter(name -> !containsIgnoreCase(existingSchema.keySet(), name)) - .collect(Collectors.toSet()); - - // Columns in the current schema that are no longer in the StreamConfig - final Set columnsToRemove = existingSchema.keySet().stream() - .filter(name -> !containsIgnoreCase(streamSchema.keySet(), name) && !containsIgnoreCase( - JavaBaseConstants.V2_FINAL_TABLE_METADATA_COLUMNS, name)) - .collect(Collectors.toSet()); - - // Columns that are typed differently than the StreamConfig - final Set columnsToChangeType = Stream.concat( - streamSchema.keySet().stream() - // If it's not in the existing schema, it should already be in the columnsToAdd Set - .filter(name -> { - // Big Query Columns are case-insensitive, first find the correctly cased key if it exists - return matchingKey(existingSchema.keySet(), name) - // if it does exist, only include it in this set if the type (the value in each respective map) - // is different between the stream and existing schemas - .map(key -> !existingSchema.get(key).equals(streamSchema.get(name))) - // if there is no matching key, then don't include it because it is probably already in columnsToAdd - .orElse(false); - }), - - // OR columns that used to have a non-null constraint and shouldn't - // (https://github.com/airbytehq/airbyte/pull/31082) - existingTable.getSchema().getFields().stream() - .filter(field -> pks.contains(field.getName())) - .filter(field -> field.getMode() == Mode.REQUIRED) - .map(Field::getName)) - .collect(Collectors.toSet()); - - final boolean isDestinationV2Format = schemaContainAllFinalTableV2AirbyteColumns(existingSchema.keySet()); - - return new AlterTableReport(columnsToAdd, columnsToRemove, columnsToChangeType, isDestinationV2Format); - } - - /** - * Checks the schema to determine whether the table contains all expected final table airbyte - * columns - * - * @param columnNames the column names of the schema to check - * @return whether all the {@link JavaBaseConstants#V2_FINAL_TABLE_METADATA_COLUMNS} are present - */ - @VisibleForTesting - public static boolean schemaContainAllFinalTableV2AirbyteColumns(final Collection columnNames) { - return JavaBaseConstants.V2_FINAL_TABLE_METADATA_COLUMNS.stream() - .allMatch(column -> containsIgnoreCase(columnNames, column)); - } - @Override public Sql prepareTablesForSoftReset(final StreamConfig stream) { // Bigquery can't run DDL in a transaction, so these are separate transactions. @@ -765,10 +647,6 @@ private static String cast(final String content, final String asType, final bool return wrap(open, content + " as " + asType, ")"); } - private static Set getPks(final StreamConfig stream) { - return stream.primaryKey() != null ? stream.primaryKey().stream().map(ColumnId::name).collect(Collectors.toSet()) : Collections.emptySet(); - } - private static String wrap(final String open, final String content, final String close) { return open + content + close; } diff --git a/airbyte-integrations/connectors/destination-bigquery/src/test-integration/java/io/airbyte/integrations/destination/bigquery/typing_deduping/AbstractBigQueryTypingDedupingTest.java b/airbyte-integrations/connectors/destination-bigquery/src/test-integration/java/io/airbyte/integrations/destination/bigquery/typing_deduping/AbstractBigQueryTypingDedupingTest.java index cc9f499abdfe..3d78ed982294 100644 --- a/airbyte-integrations/connectors/destination-bigquery/src/test-integration/java/io/airbyte/integrations/destination/bigquery/typing_deduping/AbstractBigQueryTypingDedupingTest.java +++ b/airbyte-integrations/connectors/destination-bigquery/src/test-integration/java/io/airbyte/integrations/destination/bigquery/typing_deduping/AbstractBigQueryTypingDedupingTest.java @@ -87,7 +87,7 @@ protected void teardownStreamAndNamespace(String streamNamespace, final String s } @Override - protected SqlGenerator getSqlGenerator() { + protected SqlGenerator getSqlGenerator() { return new BigQuerySqlGenerator(getConfig().get(BigQueryConsts.CONFIG_PROJECT_ID).asText(), null); } diff --git a/airbyte-integrations/connectors/destination-bigquery/src/test-integration/java/io/airbyte/integrations/destination/bigquery/typing_deduping/BigQuerySqlGeneratorIntegrationTest.java b/airbyte-integrations/connectors/destination-bigquery/src/test-integration/java/io/airbyte/integrations/destination/bigquery/typing_deduping/BigQuerySqlGeneratorIntegrationTest.java index 99ac8a8e75dd..a303a176d38c 100644 --- a/airbyte-integrations/connectors/destination-bigquery/src/test-integration/java/io/airbyte/integrations/destination/bigquery/typing_deduping/BigQuerySqlGeneratorIntegrationTest.java +++ b/airbyte-integrations/connectors/destination-bigquery/src/test-integration/java/io/airbyte/integrations/destination/bigquery/typing_deduping/BigQuerySqlGeneratorIntegrationTest.java @@ -25,7 +25,6 @@ import com.google.cloud.bigquery.Schema; import com.google.cloud.bigquery.StandardSQLTypeName; import com.google.cloud.bigquery.Table; -import com.google.cloud.bigquery.TableDefinition; import com.google.cloud.bigquery.TableResult; import io.airbyte.cdk.integrations.base.JavaBaseConstants; import io.airbyte.commons.json.Jsons; @@ -34,6 +33,7 @@ import io.airbyte.integrations.base.destination.typing_deduping.Sql; import io.airbyte.integrations.base.destination.typing_deduping.StreamConfig; import io.airbyte.integrations.base.destination.typing_deduping.StreamId; +import io.airbyte.integrations.base.destination.typing_deduping.migrators.MinimumDestinationState; import io.airbyte.integrations.destination.bigquery.BigQueryConsts; import io.airbyte.integrations.destination.bigquery.BigQueryDestination; import io.airbyte.protocol.models.v0.DestinationSyncMode; @@ -57,7 +57,7 @@ import org.slf4j.LoggerFactory; @Execution(ExecutionMode.CONCURRENT) -public class BigQuerySqlGeneratorIntegrationTest extends BaseSqlGeneratorIntegrationTest { +public class BigQuerySqlGeneratorIntegrationTest extends BaseSqlGeneratorIntegrationTest { private static final Logger LOGGER = LoggerFactory.getLogger(BigQuerySqlGeneratorIntegrationTest.class); @@ -429,6 +429,17 @@ public void noCrashOnSpecialCharacters(final String specialChars) throws Excepti super.noCrashOnSpecialCharacters(specialChars); } + /** + * Bigquery doesn't handle frequent INSERT/DELETE statements on a single table very well. So we + * don't have real state handling. Disable this test. + */ + @Override + @Disabled + @Test + public void testStateHandling() throws Exception { + super.testStateHandling(); + } + /** * TableResult contains records in a somewhat nonintuitive format (and it avoids loading them all * into memory). That's annoying for us since we're working with small test data, so just pull diff --git a/airbyte-integrations/connectors/destination-bigquery/src/test/java/io/airbyte/integrations/destination/bigquery/typing_deduping/BigQuerySqlGeneratorTest.java b/airbyte-integrations/connectors/destination-bigquery/src/test/java/io/airbyte/integrations/destination/bigquery/typing_deduping/BigQuerySqlGeneratorTest.java index 1fac62e2d681..66c2147b1c3f 100644 --- a/airbyte-integrations/connectors/destination-bigquery/src/test/java/io/airbyte/integrations/destination/bigquery/typing_deduping/BigQuerySqlGeneratorTest.java +++ b/airbyte-integrations/connectors/destination-bigquery/src/test/java/io/airbyte/integrations/destination/bigquery/typing_deduping/BigQuerySqlGeneratorTest.java @@ -7,59 +7,24 @@ import static java.util.Collections.emptyList; import static org.junit.jupiter.api.Assertions.assertEquals; -import com.google.cloud.bigquery.Clustering; -import com.google.cloud.bigquery.StandardSQLTypeName; -import com.google.cloud.bigquery.StandardTableDefinition; -import com.google.cloud.bigquery.TimePartitioning; -import com.google.common.collect.ImmutableList; import io.airbyte.commons.json.Jsons; import io.airbyte.integrations.base.destination.typing_deduping.AirbyteProtocolType; -import io.airbyte.integrations.base.destination.typing_deduping.AirbyteType; -import io.airbyte.integrations.base.destination.typing_deduping.Array; import io.airbyte.integrations.base.destination.typing_deduping.CatalogParser; import io.airbyte.integrations.base.destination.typing_deduping.ColumnId; import io.airbyte.integrations.base.destination.typing_deduping.StreamConfig; import io.airbyte.integrations.base.destination.typing_deduping.StreamId; -import io.airbyte.integrations.base.destination.typing_deduping.Struct; -import io.airbyte.integrations.base.destination.typing_deduping.Union; -import io.airbyte.integrations.base.destination.typing_deduping.UnsupportedOneOf; import io.airbyte.protocol.models.v0.AirbyteStream; import io.airbyte.protocol.models.v0.ConfiguredAirbyteStream; import io.airbyte.protocol.models.v0.DestinationSyncMode; import io.airbyte.protocol.models.v0.SyncMode; -import java.util.ArrayList; import java.util.LinkedHashMap; -import java.util.List; import java.util.Optional; -import java.util.Set; -import java.util.stream.Collectors; -import java.util.stream.Stream; -import org.junit.jupiter.api.Assertions; import org.junit.jupiter.api.Test; -import org.mockito.Mockito; public class BigQuerySqlGeneratorTest { private final BigQuerySqlGenerator generator = new BigQuerySqlGenerator("foo", "US"); - @Test - public void testToDialectType() { - final Struct s = new Struct(new LinkedHashMap<>()); - final Array a = new Array(AirbyteProtocolType.BOOLEAN); - - assertEquals(StandardSQLTypeName.INT64, generator.toDialectType((AirbyteType) AirbyteProtocolType.INTEGER)); - assertEquals(StandardSQLTypeName.JSON, generator.toDialectType(s)); - assertEquals(StandardSQLTypeName.JSON, generator.toDialectType(a)); - assertEquals(StandardSQLTypeName.JSON, generator.toDialectType(new UnsupportedOneOf(new ArrayList<>()))); - - Union u = new Union(ImmutableList.of(s)); - assertEquals(StandardSQLTypeName.JSON, generator.toDialectType(u)); - u = new Union(ImmutableList.of(a)); - assertEquals(StandardSQLTypeName.JSON, generator.toDialectType(u)); - u = new Union(ImmutableList.of(AirbyteProtocolType.BOOLEAN, AirbyteProtocolType.NUMBER)); - assertEquals(StandardSQLTypeName.NUMERIC, generator.toDialectType(u)); - } - @Test public void testBuildColumnId() { // Uninteresting names are unchanged @@ -68,85 +33,6 @@ public void testBuildColumnId() { generator.buildColumnId("foo")); } - @Test - public void testClusteringMatches() { - StreamConfig stream = new StreamConfig(null, - null, - DestinationSyncMode.APPEND_DEDUP, - List.of(new ColumnId("foo", "bar", "fizz")), - null, - null); - - // Clustering is null - final StandardTableDefinition existingTable = Mockito.mock(StandardTableDefinition.class); - Mockito.when(existingTable.getClustering()).thenReturn(null); - Assertions.assertFalse(generator.clusteringMatches(stream, existingTable)); - - // Clustering does not contain all fields - Mockito.when(existingTable.getClustering()) - .thenReturn(Clustering.newBuilder().setFields(List.of("_airbyte_extracted_at")).build()); - Assertions.assertFalse(generator.clusteringMatches(stream, existingTable)); - - // Clustering matches - stream = new StreamConfig(null, - null, - DestinationSyncMode.OVERWRITE, - null, - null, - null); - Assertions.assertTrue(generator.clusteringMatches(stream, existingTable)); - - // Clustering only the first 3 PK columns (See https://github.com/airbytehq/oncall/issues/2565) - final var expectedStreamColumnNames = List.of("a", "b", "c"); - Mockito.when(existingTable.getClustering()) - .thenReturn(Clustering.newBuilder().setFields( - Stream.concat(expectedStreamColumnNames.stream(), Stream.of("_airbyte_extracted_at")) - .collect(Collectors.toList())) - .build()); - stream = new StreamConfig(null, - null, - DestinationSyncMode.APPEND_DEDUP, - Stream.concat(expectedStreamColumnNames.stream(), Stream.of("d", "e")) - .map(name -> new ColumnId(name, "foo", "bar")) - .collect(Collectors.toList()), - null, - null); - Assertions.assertTrue(generator.clusteringMatches(stream, existingTable)); - } - - @Test - public void testPartitioningMatches() { - final StandardTableDefinition existingTable = Mockito.mock(StandardTableDefinition.class); - // Partitioning is null - Mockito.when(existingTable.getTimePartitioning()).thenReturn(null); - Assertions.assertFalse(generator.partitioningMatches(existingTable)); - // incorrect field - Mockito.when(existingTable.getTimePartitioning()) - .thenReturn(TimePartitioning.newBuilder(TimePartitioning.Type.DAY).setField("_foo").build()); - Assertions.assertFalse(generator.partitioningMatches(existingTable)); - // incorrect partitioning scheme - Mockito.when(existingTable.getTimePartitioning()) - .thenReturn(TimePartitioning.newBuilder(TimePartitioning.Type.YEAR).setField("_airbyte_extracted_at").build()); - Assertions.assertFalse(generator.partitioningMatches(existingTable)); - - // partitioning matches - Mockito.when(existingTable.getTimePartitioning()) - .thenReturn(TimePartitioning.newBuilder(TimePartitioning.Type.DAY).setField("_airbyte_extracted_at").build()); - Assertions.assertTrue(generator.partitioningMatches(existingTable)); - } - - @Test - public void testSchemaContainAllFinalTableV2AirbyteColumns() { - Assertions.assertTrue( - BigQuerySqlGenerator.schemaContainAllFinalTableV2AirbyteColumns(Set.of("_airbyte_meta", "_airbyte_extracted_at", "_airbyte_raw_id"))); - Assertions.assertFalse(BigQuerySqlGenerator.schemaContainAllFinalTableV2AirbyteColumns(Set.of("_airbyte_extracted_at", "_airbyte_raw_id"))); - Assertions.assertFalse(BigQuerySqlGenerator.schemaContainAllFinalTableV2AirbyteColumns(Set.of("_airbyte_meta", "_airbyte_raw_id"))); - Assertions.assertFalse(BigQuerySqlGenerator.schemaContainAllFinalTableV2AirbyteColumns(Set.of("_airbyte_meta", "_airbyte_extracted_at"))); - Assertions.assertFalse(BigQuerySqlGenerator.schemaContainAllFinalTableV2AirbyteColumns(Set.of())); - Assertions.assertTrue( - BigQuerySqlGenerator.schemaContainAllFinalTableV2AirbyteColumns(Set.of("_AIRBYTE_META", "_AIRBYTE_EXTRACTED_AT", "_AIRBYTE_RAW_ID"))); - } - @Test void columnCollision() { final CatalogParser parser = new CatalogParser(generator); diff --git a/airbyte-integrations/connectors/destination-bigquery/src/test/java/io/airbyte/integrations/destination/bigquery/typing_deduping/BigqueryDestinationHandlerTest.java b/airbyte-integrations/connectors/destination-bigquery/src/test/java/io/airbyte/integrations/destination/bigquery/typing_deduping/BigqueryDestinationHandlerTest.java new file mode 100644 index 000000000000..7a2d6184945d --- /dev/null +++ b/airbyte-integrations/connectors/destination-bigquery/src/test/java/io/airbyte/integrations/destination/bigquery/typing_deduping/BigqueryDestinationHandlerTest.java @@ -0,0 +1,132 @@ +/* + * Copyright (c) 2023 Airbyte, Inc., all rights reserved. + */ + +package io.airbyte.integrations.destination.bigquery.typing_deduping; + +import static org.junit.jupiter.api.Assertions.assertEquals; + +import com.google.cloud.bigquery.Clustering; +import com.google.cloud.bigquery.StandardSQLTypeName; +import com.google.cloud.bigquery.StandardTableDefinition; +import com.google.cloud.bigquery.TimePartitioning; +import com.google.common.collect.ImmutableList; +import io.airbyte.integrations.base.destination.typing_deduping.AirbyteProtocolType; +import io.airbyte.integrations.base.destination.typing_deduping.AirbyteType; +import io.airbyte.integrations.base.destination.typing_deduping.Array; +import io.airbyte.integrations.base.destination.typing_deduping.ColumnId; +import io.airbyte.integrations.base.destination.typing_deduping.StreamConfig; +import io.airbyte.integrations.base.destination.typing_deduping.Struct; +import io.airbyte.integrations.base.destination.typing_deduping.Union; +import io.airbyte.integrations.base.destination.typing_deduping.UnsupportedOneOf; +import io.airbyte.protocol.models.v0.DestinationSyncMode; +import java.util.ArrayList; +import java.util.LinkedHashMap; +import java.util.List; +import java.util.Set; +import java.util.stream.Collectors; +import java.util.stream.Stream; +import org.junit.jupiter.api.Assertions; +import org.junit.jupiter.api.Test; +import org.mockito.Mockito; + +public class BigqueryDestinationHandlerTest { + + @Test + public void testToDialectType() { + final Struct s = new Struct(new LinkedHashMap<>()); + final Array a = new Array(AirbyteProtocolType.BOOLEAN); + + assertEquals(StandardSQLTypeName.INT64, BigQuerySqlGenerator.toDialectType((AirbyteType) AirbyteProtocolType.INTEGER)); + assertEquals(StandardSQLTypeName.JSON, BigQuerySqlGenerator.toDialectType(s)); + assertEquals(StandardSQLTypeName.JSON, BigQuerySqlGenerator.toDialectType(a)); + assertEquals(StandardSQLTypeName.JSON, BigQuerySqlGenerator.toDialectType(new UnsupportedOneOf(new ArrayList<>()))); + + Union u = new Union(ImmutableList.of(s)); + assertEquals(StandardSQLTypeName.JSON, BigQuerySqlGenerator.toDialectType(u)); + u = new Union(ImmutableList.of(a)); + assertEquals(StandardSQLTypeName.JSON, BigQuerySqlGenerator.toDialectType(u)); + u = new Union(ImmutableList.of(AirbyteProtocolType.BOOLEAN, AirbyteProtocolType.NUMBER)); + assertEquals(StandardSQLTypeName.NUMERIC, BigQuerySqlGenerator.toDialectType(u)); + } + + @Test + public void testClusteringMatches() { + StreamConfig stream = new StreamConfig(null, + null, + DestinationSyncMode.APPEND_DEDUP, + List.of(new ColumnId("foo", "bar", "fizz")), + null, + null); + + // Clustering is null + final StandardTableDefinition existingTable = Mockito.mock(StandardTableDefinition.class); + Mockito.when(existingTable.getClustering()).thenReturn(null); + Assertions.assertFalse(BigQueryDestinationHandler.clusteringMatches(stream, existingTable)); + + // Clustering does not contain all fields + Mockito.when(existingTable.getClustering()) + .thenReturn(Clustering.newBuilder().setFields(List.of("_airbyte_extracted_at")).build()); + Assertions.assertFalse(BigQueryDestinationHandler.clusteringMatches(stream, existingTable)); + + // Clustering matches + stream = new StreamConfig(null, + null, + DestinationSyncMode.OVERWRITE, + null, + null, + null); + Assertions.assertTrue(BigQueryDestinationHandler.clusteringMatches(stream, existingTable)); + + // Clustering only the first 3 PK columns (See https://github.com/airbytehq/oncall/issues/2565) + final var expectedStreamColumnNames = List.of("a", "b", "c"); + Mockito.when(existingTable.getClustering()) + .thenReturn(Clustering.newBuilder().setFields( + Stream.concat(expectedStreamColumnNames.stream(), Stream.of("_airbyte_extracted_at")) + .collect(Collectors.toList())) + .build()); + stream = new StreamConfig(null, + null, + DestinationSyncMode.APPEND_DEDUP, + Stream.concat(expectedStreamColumnNames.stream(), Stream.of("d", "e")) + .map(name -> new ColumnId(name, "foo", "bar")) + .collect(Collectors.toList()), + null, + null); + Assertions.assertTrue(BigQueryDestinationHandler.clusteringMatches(stream, existingTable)); + } + + @Test + public void testPartitioningMatches() { + final StandardTableDefinition existingTable = Mockito.mock(StandardTableDefinition.class); + // Partitioning is null + Mockito.when(existingTable.getTimePartitioning()).thenReturn(null); + Assertions.assertFalse(BigQueryDestinationHandler.partitioningMatches(existingTable)); + // incorrect field + Mockito.when(existingTable.getTimePartitioning()) + .thenReturn(TimePartitioning.newBuilder(TimePartitioning.Type.DAY).setField("_foo").build()); + Assertions.assertFalse(BigQueryDestinationHandler.partitioningMatches(existingTable)); + // incorrect partitioning scheme + Mockito.when(existingTable.getTimePartitioning()) + .thenReturn(TimePartitioning.newBuilder(TimePartitioning.Type.YEAR).setField("_airbyte_extracted_at").build()); + Assertions.assertFalse(BigQueryDestinationHandler.partitioningMatches(existingTable)); + + // partitioning matches + Mockito.when(existingTable.getTimePartitioning()) + .thenReturn(TimePartitioning.newBuilder(TimePartitioning.Type.DAY).setField("_airbyte_extracted_at").build()); + Assertions.assertTrue(BigQueryDestinationHandler.partitioningMatches(existingTable)); + } + + @Test + public void testSchemaContainAllFinalTableV2AirbyteColumns() { + Assertions.assertTrue( + BigQueryDestinationHandler.schemaContainAllFinalTableV2AirbyteColumns(Set.of("_airbyte_meta", "_airbyte_extracted_at", "_airbyte_raw_id"))); + Assertions.assertFalse(BigQueryDestinationHandler.schemaContainAllFinalTableV2AirbyteColumns(Set.of("_airbyte_extracted_at", "_airbyte_raw_id"))); + Assertions.assertFalse(BigQueryDestinationHandler.schemaContainAllFinalTableV2AirbyteColumns(Set.of("_airbyte_meta", "_airbyte_raw_id"))); + Assertions.assertFalse(BigQueryDestinationHandler.schemaContainAllFinalTableV2AirbyteColumns(Set.of("_airbyte_meta", "_airbyte_extracted_at"))); + Assertions.assertFalse(BigQueryDestinationHandler.schemaContainAllFinalTableV2AirbyteColumns(Set.of())); + Assertions.assertTrue( + BigQueryDestinationHandler.schemaContainAllFinalTableV2AirbyteColumns(Set.of("_AIRBYTE_META", "_AIRBYTE_EXTRACTED_AT", "_AIRBYTE_RAW_ID"))); + } + +} diff --git a/docs/integrations/destinations/bigquery.md b/docs/integrations/destinations/bigquery.md index 7f475376d592..e02e203bce07 100644 --- a/docs/integrations/destinations/bigquery.md +++ b/docs/integrations/destinations/bigquery.md @@ -210,13 +210,14 @@ tutorials: | Version | Date | Pull Request | Subject | |:--------|:-----------|:-----------------------------------------------------------|:----------------------------------------------------------------------------------------------------------------------------------------------------------------| -| 2.4.11 | 2024-02-22 | [35569](https://github.com/airbytehq/airbyte/pull/35569) | Fix logging bug. | -| 2.4.10 | 2024-02-15 | [35240](https://github.com/airbytehq/airbyte/pull/35240) | Adopt CDK 0.20.9 | -| 2.4.9 | 2024-02-15 | [35285](https://github.com/airbytehq/airbyte/pull/35285) | Adopt CDK 0.20.8 | -| 2.4.8 | 2024-02-12 | [35144](https://github.com/airbytehq/airbyte/pull/35144) | Adopt CDK 0.20.2 | -| 2.4.7 | 2024-02-12 | [35111](https://github.com/airbytehq/airbyte/pull/35111) | Adopt CDK 0.20.1 | -| 2.4.6 | 2024-02-09 | [34575](https://github.com/airbytehq/airbyte/pull/34575) | Adopt CDK 0.20.0 | -| 2.4.5 | 2024-02-08 | [34745](https://github.com/airbytehq/airbyte/pull/34745) | Adopt CDK 0.19.0 | +| 2.4.12 | 2024-03-04 | [35315](https://github.com/airbytehq/airbyte/pull/35315) | Adopt CDK 0.23.11 | +| 2.4.11 | 2024-02-22 | [35569](https://github.com/airbytehq/airbyte/pull/35569) | Fix logging bug. | +| 2.4.10 | 2024-02-15 | [35240](https://github.com/airbytehq/airbyte/pull/35240) | Adopt CDK 0.20.9 | +| 2.4.9 | 2024-02-15 | [35285](https://github.com/airbytehq/airbyte/pull/35285) | Adopt CDK 0.20.8 | +| 2.4.8 | 2024-02-12 | [35144](https://github.com/airbytehq/airbyte/pull/35144) | Adopt CDK 0.20.2 | +| 2.4.7 | 2024-02-12 | [35111](https://github.com/airbytehq/airbyte/pull/35111) | Adopt CDK 0.20.1 | +| 2.4.6 | 2024-02-09 | [34575](https://github.com/airbytehq/airbyte/pull/34575) | Adopt CDK 0.20.0 | +| 2.4.5 | 2024-02-08 | [34745](https://github.com/airbytehq/airbyte/pull/34745) | Adopt CDK 0.19.0 | | 2.4.4 | 2024-02-08 | [35027](https://github.com/airbytehq/airbyte/pull/35027) | Upgrade CDK to 0.17.1 | | 2.4.3 | 2024-02-01 | [34728](https://github.com/airbytehq/airbyte/pull/34728) | Upgrade CDK to 0.16.4; Notable changes from 0.14.2, 0.15.1 and 0.16.3 | | 2.4.2 | 2024-01-24 | [34451](https://github.com/airbytehq/airbyte/pull/34451) | Improve logging for unparseable input | From e11f0fefc1aa953ae7b7d56087912926ecd252ed Mon Sep 17 00:00:00 2001 From: Stephane Geneix <147216312+stephane-airbyte@users.noreply.github.com> Date: Mon, 4 Mar 2024 15:34:47 -0800 Subject: [PATCH 069/172] minor changes to the java CDK test code (#35774) ---

Generated summary (powered by Graphite) > ## TL;DR > This pull request adds logging for non-public JUnit methods and updates test methods to be public. > > ## What changed > - Added logging for non-public JUnit methods in `LoggingInvocationInterceptor.java`. > - Updated test methods to be public in `CdcSourceTest.java`. > - Updated logging message in `DefaultAirbyteSource.java`. > > ## How to test > No specific testing instructions provided. > > ## Why make this change > - To improve visibility and debugging by logging non-public JUnit methods. > - To ensure consistency and best practices by making test methods public.
--- airbyte-cdk/java/airbyte-cdk/README.md | 1 + .../src/main/resources/version.properties | 2 +- .../LoggingInvocationInterceptor.java | 25 ++++-- .../airbyte/cdk/testutils/TestDatabase.java | 4 +- .../source/relationaldb/CdcStateManager.java | 2 +- .../integrations/debezium/CdcSourceTest.java | 22 +++-- .../AbstractSourceDatabaseTypeTest.java | 81 ++++++++++++++----- .../internal/DefaultAirbyteSource.java | 3 +- 8 files changed, 99 insertions(+), 41 deletions(-) diff --git a/airbyte-cdk/java/airbyte-cdk/README.md b/airbyte-cdk/java/airbyte-cdk/README.md index a32fa872ea2d..b2c128fc5036 100644 --- a/airbyte-cdk/java/airbyte-cdk/README.md +++ b/airbyte-cdk/java/airbyte-cdk/README.md @@ -166,6 +166,7 @@ MavenLocal debugging steps: | Version | Date | Pull Request | Subject | |:--------|:-----------|:-----------------------------------------------------------|:---------------------------------------------------------------------------------------------------------------------------------------------------------------| +| 0.23.13 | 2024-03-04 | [\#35774](https://github.com/airbytehq/airbyte/pull/35774) | minor changes to the CDK test fixtures. | | 0.23.12 | 2024-03-01 | [\#35767](https://github.com/airbytehq/airbyte/pull/35767) | introducing a timeout for java tests. | | 0.23.11 | 2024-03-01 | [\#35313](https://github.com/airbytehq/airbyte/pull/35313) | Preserve timezone offset in CSV writer for destinations | | 0.23.10 | 2024-03-01 | [\#35303](https://github.com/airbytehq/airbyte/pull/35303) | Migration framework with DestinationState for softReset | diff --git a/airbyte-cdk/java/airbyte-cdk/core/src/main/resources/version.properties b/airbyte-cdk/java/airbyte-cdk/core/src/main/resources/version.properties index f182eacffb68..ec1b63d34dad 100644 --- a/airbyte-cdk/java/airbyte-cdk/core/src/main/resources/version.properties +++ b/airbyte-cdk/java/airbyte-cdk/core/src/main/resources/version.properties @@ -1 +1 @@ -version=0.23.12 +version=0.23.13 diff --git a/airbyte-cdk/java/airbyte-cdk/core/src/testFixtures/java/io/airbyte/cdk/extensions/LoggingInvocationInterceptor.java b/airbyte-cdk/java/airbyte-cdk/core/src/testFixtures/java/io/airbyte/cdk/extensions/LoggingInvocationInterceptor.java index 07a2c526045b..68ed86ed6c58 100644 --- a/airbyte-cdk/java/airbyte-cdk/core/src/testFixtures/java/io/airbyte/cdk/extensions/LoggingInvocationInterceptor.java +++ b/airbyte-cdk/java/airbyte-cdk/core/src/testFixtures/java/io/airbyte/cdk/extensions/LoggingInvocationInterceptor.java @@ -7,15 +7,20 @@ import java.lang.reflect.Constructor; import java.lang.reflect.InvocationHandler; import java.lang.reflect.Method; +import java.lang.reflect.Modifier; import java.lang.reflect.Proxy; import java.time.Duration; import java.time.Instant; import java.util.LinkedList; import java.util.List; +import java.util.Map; +import java.util.concurrent.ConcurrentHashMap; +import java.util.concurrent.ExecutorService; import java.util.regex.Matcher; import java.util.regex.Pattern; import org.apache.commons.lang3.StringUtils; import org.apache.commons.lang3.exception.ExceptionUtils; +import org.apache.commons.lang3.time.DurationFormatUtils; import org.junit.jupiter.api.Assertions; import org.junit.jupiter.api.Timeout; import org.junit.jupiter.api.Timeout.ThreadMode; @@ -37,10 +42,11 @@ public class LoggingInvocationInterceptor implements InvocationInterceptor { private static final Duration DEFAULT_TIMEOUT = Duration.ofMinutes(5); + private static final Logger LOGGER = LoggerFactory.getLogger(LoggingInvocationInterceptor.class); private static final class LoggingInvocationInterceptorHandler implements InvocationHandler { - private static final Logger LOGGER = LoggerFactory.getLogger(LoggingInvocationInterceptor.class); + private static final Map executorByThread = new ConcurrentHashMap<>(); private static final Pattern methodPattern = Pattern.compile("intercept(.*)Method"); @@ -70,22 +76,24 @@ public Object invoke(Object proxy, Method method, Object[] args) throws Throwabl } else { logLineSuffix = "execution of unknown intercepted call %s".formatted(methodName); } - LOGGER.info("Junit starting {}", logLineSuffix); + Instant start = Instant.now(); try { - Instant start = Instant.now(); final Object retVal; Duration timeout = getTimeout(invocationContext); if (timeout != null) { + LOGGER.info("Junit starting {} with timeout of {}", logLineSuffix, DurationFormatUtils.formatDurationWords(timeout.toMillis(), true, true)); retVal = Assertions.assertTimeoutPreemptively(timeout, invocation::proceed); } else { + LOGGER.warn("Junit starting {} with no timeout", logLineSuffix); retVal = invocation.proceed(); } long elapsedMs = Duration.between(start, Instant.now()).toMillis(); - LOGGER.info("Junit completed {} in {} ms", logLineSuffix, elapsedMs); + LOGGER.info("Junit completed {} in {}", logLineSuffix, DurationFormatUtils.formatDurationWords(elapsedMs, true, true)); return retVal; } catch (Throwable t) { + long elapsedMs = Duration.between(start, Instant.now()).toMillis(); boolean belowCurrentCall = false; - List stackToDisplay = new LinkedList(); + List stackToDisplay = new LinkedList<>(); for (String stackString : ExceptionUtils.getStackFrames(t)) { if (stackString.startsWith("\tat ")) { if (!belowCurrentCall && stackString.contains(LoggingInvocationInterceptor.class.getCanonicalName())) { @@ -99,7 +107,8 @@ public Object invoke(Object proxy, Method method, Object[] args) throws Throwabl } } String stackTrace = StringUtils.join(stackToDisplay, "\n "); - LOGGER.warn("Junit exception throw during {}:\n{}", logLineSuffix, stackTrace); + LOGGER.error("Junit exception throw during {} after {}:\n{}", logLineSuffix, DurationFormatUtils.formatDurationWords(elapsedMs, true, true), + stackTrace); throw t; } } @@ -176,6 +185,10 @@ public void interceptTestMethod(Invocation invocation, ReflectiveInvocationContext invocationContext, ExtensionContext extensionContext) throws Throwable { + if (!Modifier.isPublic(invocationContext.getExecutable().getModifiers())) { + LOGGER.warn("Junit method {}.{} is not declared as public", invocationContext.getExecutable().getDeclaringClass().getCanonicalName(), + invocationContext.getExecutable().getName()); + } proxy.interceptTestMethod(invocation, invocationContext, extensionContext); } diff --git a/airbyte-cdk/java/airbyte-cdk/core/src/testFixtures/java/io/airbyte/cdk/testutils/TestDatabase.java b/airbyte-cdk/java/airbyte-cdk/core/src/testFixtures/java/io/airbyte/cdk/testutils/TestDatabase.java index 3a253ccf978e..3ee1d0e9b0d1 100644 --- a/airbyte-cdk/java/airbyte-cdk/core/src/testFixtures/java/io/airbyte/cdk/testutils/TestDatabase.java +++ b/airbyte-cdk/java/airbyte-cdk/core/src/testFixtures/java/io/airbyte/cdk/testutils/TestDatabase.java @@ -118,7 +118,7 @@ public T with(String fmtSql, Object... fmtArgs) { * object. This typically entails at least a CREATE DATABASE and a CREATE USER. Also Initializes the * {@link DataSource} and {@link DSLContext} owned by this object. */ - final public T initialized() { + public T initialized() { inContainerBootstrapCmd().forEach(this::execInContainer); this.dataSource = DataSourceFactory.create( getUserName(), @@ -193,7 +193,7 @@ protected void execSQL(final Stream sql) { try { getDatabase().query(ctx -> { sql.forEach(statement -> { - LOGGER.debug("{}", statement); + LOGGER.info("executing SQL statement {}", statement); ctx.execute(statement); }); return null; diff --git a/airbyte-cdk/java/airbyte-cdk/db-sources/src/main/java/io/airbyte/cdk/integrations/source/relationaldb/CdcStateManager.java b/airbyte-cdk/java/airbyte-cdk/db-sources/src/main/java/io/airbyte/cdk/integrations/source/relationaldb/CdcStateManager.java index 06a1587bbff5..c4532dcd0270 100644 --- a/airbyte-cdk/java/airbyte-cdk/db-sources/src/main/java/io/airbyte/cdk/integrations/source/relationaldb/CdcStateManager.java +++ b/airbyte-cdk/java/airbyte-cdk/db-sources/src/main/java/io/airbyte/cdk/integrations/source/relationaldb/CdcStateManager.java @@ -30,7 +30,7 @@ public CdcStateManager(final CdcState serialized, this.initialStreamsSynced = initialStreamsSynced; this.rawStateMessage = stateMessage; - LOGGER.info("Initialized CDC state with: {}", serialized); + LOGGER.info("Initialized CDC state"); } public void setCdcState(final CdcState state) { diff --git a/airbyte-cdk/java/airbyte-cdk/db-sources/src/testFixtures/java/io/airbyte/cdk/integrations/debezium/CdcSourceTest.java b/airbyte-cdk/java/airbyte-cdk/db-sources/src/testFixtures/java/io/airbyte/cdk/integrations/debezium/CdcSourceTest.java index 91638d2982f6..62ab77544d4f 100644 --- a/airbyte-cdk/java/airbyte-cdk/db-sources/src/testFixtures/java/io/airbyte/cdk/integrations/debezium/CdcSourceTest.java +++ b/airbyte-cdk/java/airbyte-cdk/db-sources/src/testFixtures/java/io/airbyte/cdk/integrations/debezium/CdcSourceTest.java @@ -144,7 +144,11 @@ protected ConfiguredAirbyteCatalog getConfiguredCatalog() { @BeforeEach protected void setup() { testdb = createTestDatabase(); + createTables(); + populateTables(); + } + protected void createTables() { // create and populate actual table final var actualColumns = ImmutableMap.of( COL_ID, "INTEGER", @@ -153,11 +157,8 @@ protected void setup() { testdb .with(createSchemaSqlFmt(), modelsSchema()) .with(createTableSqlFmt(), modelsSchema(), MODELS_STREAM_NAME, columnClause(actualColumns, Optional.of(COL_ID))); - for (final JsonNode recordJson : MODEL_RECORDS) { - writeModelRecord(recordJson); - } - // Create and populate random table. + // Create random table. // This table is not part of Airbyte sync. It is being created just to make sure the schemas not // being synced by Airbyte are not causing issues with our debezium logic. final var randomColumns = ImmutableMap.of( @@ -168,6 +169,13 @@ protected void setup() { testdb.with(createSchemaSqlFmt(), randomSchema()); } testdb.with(createTableSqlFmt(), randomSchema(), RANDOM_TABLE_NAME, columnClause(randomColumns, Optional.of(COL_ID + "_random"))); + } + + protected void populateTables() { + for (final JsonNode recordJson : MODEL_RECORDS) { + writeModelRecord(recordJson); + } + for (final JsonNode recordJson : MODEL_RECORDS_RANDOM) { writeRecords(recordJson, randomSchema(), RANDOM_TABLE_NAME, COL_ID + "_random", COL_MAKE_ID + "_random", COL_MODEL + "_random"); @@ -351,7 +359,7 @@ protected void compareTargetPositionFromTheRecordsWithTargetPostionGeneratedBefo @Test // When a record is deleted, produces a deletion record. - void testDelete() throws Exception { + public void testDelete() throws Exception { final AutoCloseableIterator read1 = source() .read(config(), getConfiguredCatalog(), null); final List actualRecords1 = AutoCloseableIterators.toListAndClose(read1); @@ -380,7 +388,7 @@ protected void assertExpectedStateMessagesFromIncrementalSync(final List read1 = source() .read(config(), getConfiguredCatalog(), null); @@ -483,7 +491,7 @@ protected void assertExpectedStateMessagesForRecordsProducedDuringAndAfterSync(f @Test // When both incremental CDC and full refresh are configured for different streams in a sync, the // data is replicated as expected. - void testCdcAndFullRefreshInSameSync() throws Exception { + public void testCdcAndFullRefreshInSameSync() throws Exception { final ConfiguredAirbyteCatalog configuredCatalog = Jsons.clone(getConfiguredCatalog()); final List MODEL_RECORDS_2 = ImmutableList.of( diff --git a/airbyte-cdk/java/airbyte-cdk/db-sources/src/testFixtures/java/io/airbyte/cdk/integrations/standardtest/source/AbstractSourceDatabaseTypeTest.java b/airbyte-cdk/java/airbyte-cdk/db-sources/src/testFixtures/java/io/airbyte/cdk/integrations/standardtest/source/AbstractSourceDatabaseTypeTest.java index c8a274208662..9dcb95773cdb 100644 --- a/airbyte-cdk/java/airbyte-cdk/db-sources/src/testFixtures/java/io/airbyte/cdk/integrations/standardtest/source/AbstractSourceDatabaseTypeTest.java +++ b/airbyte-cdk/java/airbyte-cdk/db-sources/src/testFixtures/java/io/airbyte/cdk/integrations/standardtest/source/AbstractSourceDatabaseTypeTest.java @@ -24,8 +24,14 @@ import io.airbyte.protocol.models.v0.SyncMode; import java.io.IOException; import java.sql.SQLException; -import java.util.*; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Collection; +import java.util.HashMap; +import java.util.List; +import java.util.Map; import java.util.stream.Collectors; +import org.apache.commons.lang3.StringUtils; import org.junit.jupiter.api.Test; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -40,6 +46,7 @@ public abstract class AbstractSourceDatabaseTypeTest extends AbstractSourceConne private static final Logger LOGGER = LoggerFactory.getLogger(AbstractSourceDatabaseTypeTest.class); protected final List testDataHolders = new ArrayList<>(); + protected Database database; /** * The column name will be used for a PK column in the test tables. Override it if default name is @@ -76,7 +83,10 @@ protected String getTestColumnName() { @Override protected void setupEnvironment(final TestDestinationEnv environment) throws Exception { - setupDatabaseInternal(); + database = setupDatabase(); + initTests(); + createTables(); + populateTables(); } /** @@ -157,8 +167,8 @@ public UnexpectedRecord(String streamName, String unexpectedValue) { final List recordMessages = allMessages.stream().filter(m -> m.getType() == Type.RECORD).toList(); final Map> expectedValues = new HashMap<>(); - final ArrayList missedValues = new ArrayList<>(); - final List unexpectedValues = new ArrayList<>(); + final Map> missedValuesByStream = new HashMap<>(); + final Map> unexpectedValuesByStream = new HashMap<>(); final Map testByName = new HashMap<>(); // If there is no expected value in the test set we don't include it in the list to be asserted @@ -178,31 +188,51 @@ public UnexpectedRecord(String streamName, String unexpectedValue) { if (expectedValuesForStream != null) { final String value = getValueFromJsonNode(message.getRecord().getData().get(getTestColumnName())); if (!expectedValuesForStream.contains(value)) { - unexpectedValues.add(new UnexpectedRecord(streamName, value)); + unexpectedValuesByStream.putIfAbsent(streamName, new ArrayList<>()); + unexpectedValuesByStream.get(streamName).add(new UnexpectedRecord(streamName, value)); } else { expectedValuesForStream.remove(value); } } } - assertTrue(unexpectedValues.isEmpty(), - unexpectedValues.stream().map((entry) -> // stream each entry, map it to string value - "The stream '" + entry.streamName + "' checking type '" + testByName.get(entry.streamName).getSourceType() + "' initialized at " - + testByName.get(entry.streamName).getDeclarationLocation() + " got unexpected values: " + entry.unexpectedValue) - .collect(Collectors.joining("\n"))); // and join them - // Gather all the missing values, so we don't stop the test in the first missed one expectedValues.forEach((streamName, values) -> { if (!values.isEmpty()) { - missedValues.add(new MissedRecords(streamName, values)); + missedValuesByStream.putIfAbsent(streamName, new ArrayList<>()); + missedValuesByStream.get(streamName).add(new MissedRecords(streamName, values)); } }); - assertTrue(missedValues.isEmpty(), - missedValues.stream().map((entry) -> // stream each entry, map it to string value - "The stream '" + entry.streamName + "' checking type '" + testByName.get(entry.streamName).getSourceType() + "' initialized at " - + testByName.get(entry.streamName).getDeclarationLocation() + " is missing values: " + entry.missedValues) - .collect(Collectors.joining("\n"))); // and join them + Map> errorsByStream = new HashMap<>(); + for (String streamName : unexpectedValuesByStream.keySet()) { + errorsByStream.putIfAbsent(streamName, new ArrayList<>()); + TestDataHolder test = testByName.get(streamName); + List unexpectedValues = unexpectedValuesByStream.get(streamName); + for (UnexpectedRecord unexpectedValue : unexpectedValues) { + errorsByStream.get(streamName).add( + "The stream '%s' checking type '%s' initialized at %s got unexpected values: %s".formatted(streamName, test.getSourceType(), + test.getDeclarationLocation(), unexpectedValue)); + } + } + + for (String streamName : missedValuesByStream.keySet()) { + errorsByStream.putIfAbsent(streamName, new ArrayList<>()); + TestDataHolder test = testByName.get(streamName); + List missedValues = missedValuesByStream.get(streamName); + for (MissedRecords missedValue : missedValues) { + errorsByStream.get(streamName).add( + "The stream '%s' checking type '%s' initialized at %s is missing values: %s".formatted(streamName, test.getSourceType(), + test.getDeclarationLocation(), missedValue)); + } + } + + List errorStrings = new ArrayList<>(); + for (List errors : errorsByStream.values()) { + errorStrings.add(StringUtils.join(errors, "\n")); + } + + assertTrue(errorsByStream.isEmpty(), StringUtils.join(errorStrings, "\n")); } protected String getValueFromJsonNode(final JsonNode jsonNode) throws IOException { @@ -224,16 +254,23 @@ protected String getValueFromJsonNode(final JsonNode jsonNode) throws IOExceptio * @throws Exception might raise exception if configuration goes wrong or tables creation/insert * scripts failed. */ - private void setupDatabaseInternal() throws Exception { - final Database database = setupDatabase(); - - initTests(); + protected void createTables() throws Exception { for (final TestDataHolder test : testDataHolders) { database.query(ctx -> { ctx.fetch(test.getCreateSqlQuery()); - LOGGER.debug("Table " + test.getNameWithTestPrefix() + " is created."); + LOGGER.info("Table {} is created.", test.getNameWithTestPrefix()); + return null; + }); + } + } + + protected void populateTables() throws Exception { + for (final TestDataHolder test : testDataHolders) { + database.query(ctx -> { test.getInsertSqlQueries().forEach(ctx::fetch); + LOGGER.info("Inserted {} rows in Ttable {}", test.getInsertSqlQueries().size(), test.getNameWithTestPrefix()); + return null; }); } diff --git a/airbyte-cdk/java/airbyte-cdk/dependencies/src/testFixtures/java/io/airbyte/workers/internal/DefaultAirbyteSource.java b/airbyte-cdk/java/airbyte-cdk/dependencies/src/testFixtures/java/io/airbyte/workers/internal/DefaultAirbyteSource.java index 8052255bfc55..269841619bfa 100644 --- a/airbyte-cdk/java/airbyte-cdk/dependencies/src/testFixtures/java/io/airbyte/workers/internal/DefaultAirbyteSource.java +++ b/airbyte-cdk/java/airbyte-cdk/dependencies/src/testFixtures/java/io/airbyte/workers/internal/DefaultAirbyteSource.java @@ -19,7 +19,6 @@ import io.airbyte.protocol.models.AirbyteMessage.Type; import io.airbyte.workers.TestHarnessUtils; import io.airbyte.workers.WorkerConstants; -import io.airbyte.workers.exception.TestHarnessException; import io.airbyte.workers.process.IntegrationLauncher; import java.nio.file.Path; import java.time.Duration; @@ -150,7 +149,7 @@ public void close() throws Exception { if (sourceProcess.isAlive() || !IGNORED_EXIT_CODES.contains(getExitValue())) { final String message = sourceProcess.isAlive() ? "Source has not terminated " : "Source process exit with code " + getExitValue(); - throw new TestHarnessException(message + ". This warning is normal if the job was cancelled."); + LOGGER.warn(message + ". This warning is normal if the job was cancelled."); } } From ef981946738569f3fdbc208518d1f767a56acb24 Mon Sep 17 00:00:00 2001 From: Brian Lai <51336873+brianjlai@users.noreply.github.com> Date: Tue, 5 Mar 2024 01:05:06 -0500 Subject: [PATCH 070/172] Emit final state message for full refresh syncs and consolidate read flows (#35622) --- .../airbyte_cdk/sources/abstract_source.py | 99 ++--- .../sources/connector_state_manager.py | 38 +- .../file_based/stream/concurrent/adapters.py | 22 +- .../cursor/file_based_concurrent_cursor.py | 4 +- .../airbyte_cdk/sources/streams/__init__.py | 4 +- .../sources/streams/concurrent/adapters.py | 22 +- .../sources/streams/concurrent/cursor.py | 4 +- .../airbyte_cdk/sources/streams/core.py | 70 +-- .../test_concurrent_source_adapter.py | 3 +- .../concurrent_incremental_scenarios.py | 338 ++++++-------- .../scenarios/incremental_scenarios.py | 224 ++++------ .../stream/concurrent/test_adapters.py | 15 +- .../test_file_based_concurrent_cursor.py | 4 +- .../sources/file_based/test_scenarios.py | 4 +- .../mock_server_tests/mock_source_fixture.py | 325 ++++++++++++++ .../test_mock_server_abstract_source.py | 417 ++++++++++++++++++ .../scenarios/incremental_scenarios.py | 18 +- .../scenarios/stream_facade_scenarios.py | 10 +- .../streams/concurrent/test_adapters.py | 15 +- .../sources/streams/test_stream_read.py | 232 +++++++++- .../sources/test_abstract_source.py | 272 ++++++------ .../sources/test_connector_state_manager.py | 127 +----- .../python/unit_tests/sources/test_source.py | 32 +- 23 files changed, 1468 insertions(+), 831 deletions(-) create mode 100644 airbyte-cdk/python/unit_tests/sources/mock_server_tests/mock_source_fixture.py create mode 100644 airbyte-cdk/python/unit_tests/sources/mock_server_tests/test_mock_server_abstract_source.py diff --git a/airbyte-cdk/python/airbyte_cdk/sources/abstract_source.py b/airbyte-cdk/python/airbyte_cdk/sources/abstract_source.py index a5c4b847f182..208aee6b8a4a 100644 --- a/airbyte-cdk/python/airbyte_cdk/sources/abstract_source.py +++ b/airbyte-cdk/python/airbyte_cdk/sources/abstract_source.py @@ -23,7 +23,7 @@ from airbyte_cdk.sources.connector_state_manager import ConnectorStateManager from airbyte_cdk.sources.message import InMemoryMessageRepository, MessageRepository from airbyte_cdk.sources.source import Source -from airbyte_cdk.sources.streams import Stream +from airbyte_cdk.sources.streams import FULL_REFRESH_SENTINEL_STATE_KEY, Stream from airbyte_cdk.sources.streams.core import StreamData from airbyte_cdk.sources.streams.http.http import HttpStream from airbyte_cdk.sources.utils.record_helper import stream_data_to_airbyte_message @@ -181,10 +181,6 @@ def read( def raise_exception_on_missing_stream(self) -> bool: return True - @property - def per_stream_state_enabled(self) -> bool: - return True - def _read_stream( self, logger: logging.Logger, @@ -206,22 +202,32 @@ def _read_stream( ) stream_instance.log_stream_sync_configuration() - use_incremental = configured_stream.sync_mode == SyncMode.incremental and stream_instance.supports_incremental - if use_incremental: - record_iterator = self._read_incremental( - logger, - stream_instance, - configured_stream, - state_manager, - internal_config, - ) - else: - record_iterator = self._read_full_refresh(logger, stream_instance, configured_stream, internal_config) + stream_name = configured_stream.stream.name + # The platform always passes stream state regardless of sync mode. We shouldn't need to consider this case within the + # connector, but right now we need to prevent accidental usage of the previous stream state + stream_state = ( + state_manager.get_stream_state(stream_name, stream_instance.namespace) + if configured_stream.sync_mode == SyncMode.incremental + else {} + ) + + if stream_state and "state" in dir(stream_instance) and not self._stream_state_is_full_refresh(stream_state): + stream_instance.state = stream_state # type: ignore # we check that state in the dir(stream_instance) + logger.info(f"Setting state of {self.name} stream to {stream_state}") + + record_iterator = stream_instance.read( + configured_stream, + logger, + self._slice_logger, + stream_state, + state_manager, + internal_config, + ) record_counter = 0 - stream_name = configured_stream.stream.name logger.info(f"Syncing stream: {stream_name} ") - for record in record_iterator: + for record_data_or_message in record_iterator: + record = self._get_message(record_data_or_message, stream_instance) if record.type == MessageType.RECORD: record_counter += 1 if record_counter == 1: @@ -233,62 +239,11 @@ def _read_stream( logger.info(f"Read {record_counter} records from {stream_name} stream") - def _read_incremental( - self, - logger: logging.Logger, - stream_instance: Stream, - configured_stream: ConfiguredAirbyteStream, - state_manager: ConnectorStateManager, - internal_config: InternalConfig, - ) -> Iterator[AirbyteMessage]: - """Read stream using incremental algorithm - - :param logger: - :param stream_instance: - :param configured_stream: - :param state_manager: - :param internal_config: - :return: - """ - stream_name = configured_stream.stream.name - stream_state = state_manager.get_stream_state(stream_name, stream_instance.namespace) - - if stream_state and "state" in dir(stream_instance): - stream_instance.state = stream_state # type: ignore # we check that state in the dir(stream_instance) - logger.info(f"Setting state of {self.name} stream to {stream_state}") - - for record_data_or_message in stream_instance.read_incremental( - configured_stream.cursor_field, - logger, - self._slice_logger, - stream_state, - state_manager, - self.per_stream_state_enabled, - internal_config, - ): - yield self._get_message(record_data_or_message, stream_instance) - def _emit_queued_messages(self) -> Iterable[AirbyteMessage]: if self.message_repository: yield from self.message_repository.consume_queue() return - def _read_full_refresh( - self, - logger: logging.Logger, - stream_instance: Stream, - configured_stream: ConfiguredAirbyteStream, - internal_config: InternalConfig, - ) -> Iterator[AirbyteMessage]: - total_records_counter = 0 - for record_data_or_message in stream_instance.read_full_refresh(configured_stream.cursor_field, logger, self._slice_logger): - message = self._get_message(record_data_or_message, stream_instance) - yield message - if message.type == MessageType.RECORD: - total_records_counter += 1 - if internal_config.is_limit_reached(total_records_counter): - return - def _get_message(self, record_data_or_message: Union[StreamData, AirbyteMessage], stream: Stream) -> AirbyteMessage: """ Converts the input to an AirbyteMessage if it is a StreamData. Returns the input as is if it is already an AirbyteMessage @@ -317,3 +272,9 @@ def stop_sync_on_stream_failure(self) -> bool: def _generate_failed_streams_error_message(stream_failures: Mapping[str, AirbyteTracedException]) -> str: failures = ", ".join([f"{stream}: {filter_secrets(exception.__repr__())}" for stream, exception in stream_failures.items()]) return f"During the sync, the following streams did not sync successfully: {failures}" + + @staticmethod + def _stream_state_is_full_refresh(stream_state: Mapping[str, Any]) -> bool: + # For full refresh syncs that don't have a suitable cursor value, we emit a state that contains a sentinel key. + # This key is never used by a connector and is needed during a read to skip assigning the incoming state. + return FULL_REFRESH_SENTINEL_STATE_KEY in stream_state diff --git a/airbyte-cdk/python/airbyte_cdk/sources/connector_state_manager.py b/airbyte-cdk/python/airbyte_cdk/sources/connector_state_manager.py index 62f979f58540..9a85529d29d3 100644 --- a/airbyte-cdk/python/airbyte_cdk/sources/connector_state_manager.py +++ b/airbyte-cdk/python/airbyte_cdk/sources/connector_state_manager.py @@ -77,7 +77,7 @@ def update_state_for_stream(self, stream_name: str, namespace: Optional[str], va stream_descriptor = HashableStreamDescriptor(name=stream_name, namespace=namespace) self.per_stream_states[stream_descriptor] = AirbyteStateBlob.parse_obj(value) - def create_state_message(self, stream_name: str, namespace: Optional[str], send_per_stream_state: bool) -> AirbyteMessage: + def create_state_message(self, stream_name: str, namespace: Optional[str]) -> AirbyteMessage: """ Generates an AirbyteMessage using the current per-stream state of a specified stream in either the per-stream or legacy format :param stream_name: The name of the stream for the message that is being created @@ -85,25 +85,18 @@ def create_state_message(self, stream_name: str, namespace: Optional[str], send_ :param send_per_stream_state: Decides which state format the message should be generated as :return: The Airbyte state message to be emitted by the connector during a sync """ - if send_per_stream_state: - hashable_descriptor = HashableStreamDescriptor(name=stream_name, namespace=namespace) - stream_state = self.per_stream_states.get(hashable_descriptor) or AirbyteStateBlob() - - # According to the Airbyte protocol, the StreamDescriptor namespace field is not required. However, the platform will throw - # a validation error if it receives namespace=null. That is why if namespace is None, the field should be omitted instead. - stream_descriptor = ( - StreamDescriptor(name=stream_name) if namespace is None else StreamDescriptor(name=stream_name, namespace=namespace) - ) - - return AirbyteMessage( - type=MessageType.STATE, - state=AirbyteStateMessage( - type=AirbyteStateType.STREAM, - stream=AirbyteStreamState(stream_descriptor=stream_descriptor, stream_state=stream_state), - data=dict(self._get_legacy_state()), + hashable_descriptor = HashableStreamDescriptor(name=stream_name, namespace=namespace) + stream_state = self.per_stream_states.get(hashable_descriptor) or AirbyteStateBlob() + + return AirbyteMessage( + type=MessageType.STATE, + state=AirbyteStateMessage( + type=AirbyteStateType.STREAM, + stream=AirbyteStreamState( + stream_descriptor=StreamDescriptor(name=stream_name, namespace=namespace), stream_state=stream_state ), - ) - return AirbyteMessage(type=MessageType.STATE, state=AirbyteStateMessage(data=dict(self._get_legacy_state()))) + ), + ) @classmethod def _extract_from_state_message( @@ -176,13 +169,6 @@ def _create_descriptor_to_stream_state_mapping( streams[stream_descriptor] = AirbyteStateBlob.parse_obj(state_value or {}) return streams - def _get_legacy_state(self) -> Mapping[str, Any]: - """ - Using the current per-stream state, creates a mapping of all the stream states for the connector being synced - :return: A deep copy of the mapping of stream name to stream state value - """ - return {descriptor.name: state.dict() if state else {} for descriptor, state in self.per_stream_states.items()} - @staticmethod def _is_legacy_dict_state(state: Union[List[AirbyteStateMessage], MutableMapping[str, Any]]) -> bool: return isinstance(state, dict) diff --git a/airbyte-cdk/python/airbyte_cdk/sources/file_based/stream/concurrent/adapters.py b/airbyte-cdk/python/airbyte_cdk/sources/file_based/stream/concurrent/adapters.py index abaa8f7d044f..4fc1a365b424 100644 --- a/airbyte-cdk/python/airbyte_cdk/sources/file_based/stream/concurrent/adapters.py +++ b/airbyte-cdk/python/airbyte_cdk/sources/file_based/stream/concurrent/adapters.py @@ -7,7 +7,7 @@ from functools import lru_cache from typing import TYPE_CHECKING, Any, Iterable, List, Mapping, MutableMapping, Optional, Union -from airbyte_cdk.models import AirbyteLogMessage, AirbyteMessage, Level, SyncMode, Type +from airbyte_cdk.models import AirbyteLogMessage, AirbyteMessage, ConfiguredAirbyteStream, Level, SyncMode, Type from airbyte_cdk.sources import AbstractSource from airbyte_cdk.sources.connector_state_manager import ConnectorStateManager from airbyte_cdk.sources.file_based.availability_strategy import ( @@ -156,29 +156,13 @@ def infer_schema(self, files: List[RemoteFile]) -> Mapping[str, Any]: def get_underlying_stream(self) -> DefaultStream: return self._abstract_stream - def read_full_refresh( + def read( self, - cursor_field: Optional[List[str]], - logger: logging.Logger, - slice_logger: SliceLogger, - ) -> Iterable[StreamData]: - """ - Read full refresh. Delegate to the underlying AbstractStream, ignoring all the parameters - :param cursor_field: (ignored) - :param logger: (ignored) - :param slice_logger: (ignored) - :return: Iterable of StreamData - """ - yield from self._read_records() - - def read_incremental( - self, - cursor_field: Optional[List[str]], + configured_stream: ConfiguredAirbyteStream, logger: logging.Logger, slice_logger: SliceLogger, stream_state: MutableMapping[str, Any], state_manager: ConnectorStateManager, - per_stream_state_enabled: bool, internal_config: InternalConfig, ) -> Iterable[StreamData]: yield from self._read_records() diff --git a/airbyte-cdk/python/airbyte_cdk/sources/file_based/stream/concurrent/cursor/file_based_concurrent_cursor.py b/airbyte-cdk/python/airbyte_cdk/sources/file_based/stream/concurrent/cursor/file_based_concurrent_cursor.py index 4019dfd17b9e..0e3acaf85366 100644 --- a/airbyte-cdk/python/airbyte_cdk/sources/file_based/stream/concurrent/cursor/file_based_concurrent_cursor.py +++ b/airbyte-cdk/python/airbyte_cdk/sources/file_based/stream/concurrent/cursor/file_based_concurrent_cursor.py @@ -155,9 +155,7 @@ def emit_state_message(self) -> None: self._stream_namespace, new_state, ) - state_message = self._connector_state_manager.create_state_message( - self._stream_name, self._stream_namespace, send_per_stream_state=True - ) + state_message = self._connector_state_manager.create_state_message(self._stream_name, self._stream_namespace) self._message_repository.emit_message(state_message) def _get_new_cursor_value(self) -> str: diff --git a/airbyte-cdk/python/airbyte_cdk/sources/streams/__init__.py b/airbyte-cdk/python/airbyte_cdk/sources/streams/__init__.py index 9326fd1bdca7..f2beaf0433c8 100644 --- a/airbyte-cdk/python/airbyte_cdk/sources/streams/__init__.py +++ b/airbyte-cdk/python/airbyte_cdk/sources/streams/__init__.py @@ -3,6 +3,6 @@ # # Initialize Streams Package -from .core import IncrementalMixin, Stream +from .core import FULL_REFRESH_SENTINEL_STATE_KEY, IncrementalMixin, Stream -__all__ = ["IncrementalMixin", "Stream"] +__all__ = ["FULL_REFRESH_SENTINEL_STATE_KEY", "IncrementalMixin", "Stream"] diff --git a/airbyte-cdk/python/airbyte_cdk/sources/streams/concurrent/adapters.py b/airbyte-cdk/python/airbyte_cdk/sources/streams/concurrent/adapters.py index 86542618354f..8b762e63a7b1 100644 --- a/airbyte-cdk/python/airbyte_cdk/sources/streams/concurrent/adapters.py +++ b/airbyte-cdk/python/airbyte_cdk/sources/streams/concurrent/adapters.py @@ -8,7 +8,7 @@ from functools import lru_cache from typing import Any, Iterable, List, Mapping, MutableMapping, Optional, Tuple, Union -from airbyte_cdk.models import AirbyteLogMessage, AirbyteMessage, AirbyteStream, Level, SyncMode, Type +from airbyte_cdk.models import AirbyteLogMessage, AirbyteMessage, AirbyteStream, ConfiguredAirbyteStream, Level, SyncMode, Type from airbyte_cdk.sources import AbstractSource, Source from airbyte_cdk.sources.connector_state_manager import ConnectorStateManager from airbyte_cdk.sources.message import MessageRepository @@ -116,29 +116,13 @@ def __init__(self, stream: DefaultStream, legacy_stream: Stream, cursor: Cursor, self._slice_logger = slice_logger self._logger = logger - def read_full_refresh( + def read( self, - cursor_field: Optional[List[str]], - logger: logging.Logger, - slice_logger: SliceLogger, - ) -> Iterable[StreamData]: - """ - Read full refresh. Delegate to the underlying AbstractStream, ignoring all the parameters - :param cursor_field: (ignored) - :param logger: (ignored) - :param slice_logger: (ignored) - :return: Iterable of StreamData - """ - yield from self._read_records() - - def read_incremental( - self, - cursor_field: Optional[List[str]], + configured_stream: ConfiguredAirbyteStream, logger: logging.Logger, slice_logger: SliceLogger, stream_state: MutableMapping[str, Any], state_manager: ConnectorStateManager, - per_stream_state_enabled: bool, internal_config: InternalConfig, ) -> Iterable[StreamData]: yield from self._read_records() diff --git a/airbyte-cdk/python/airbyte_cdk/sources/streams/concurrent/cursor.py b/airbyte-cdk/python/airbyte_cdk/sources/streams/concurrent/cursor.py index 82d11318f5ea..d581e66a33d8 100644 --- a/airbyte-cdk/python/airbyte_cdk/sources/streams/concurrent/cursor.py +++ b/airbyte-cdk/python/airbyte_cdk/sources/streams/concurrent/cursor.py @@ -184,9 +184,7 @@ def _emit_state_message(self) -> None: # TODO: if we migrate stored state to the concurrent state format # (aka stop calling self._connector_state_converter.convert_to_sequential_state`), we'll need to cast datetimes to string or # int before emitting state - state_message = self._connector_state_manager.create_state_message( - self._stream_name, self._stream_namespace, send_per_stream_state=True - ) + state_message = self._connector_state_manager.create_state_message(self._stream_name, self._stream_namespace) self._message_repository.emit_message(state_message) def _merge_partitions(self) -> None: diff --git a/airbyte-cdk/python/airbyte_cdk/sources/streams/core.py b/airbyte-cdk/python/airbyte_cdk/sources/streams/core.py index 8d6ba15fdcd1..b944072ee4a4 100644 --- a/airbyte-cdk/python/airbyte_cdk/sources/streams/core.py +++ b/airbyte-cdk/python/airbyte_cdk/sources/streams/core.py @@ -11,7 +11,7 @@ from typing import Any, Iterable, List, Mapping, MutableMapping, Optional, Tuple, Union import airbyte_cdk.sources.utils.casing as casing -from airbyte_cdk.models import AirbyteMessage, AirbyteStream, SyncMode +from airbyte_cdk.models import AirbyteMessage, AirbyteStream, ConfiguredAirbyteStream, SyncMode from airbyte_cdk.models import Type as MessageType # list of all possible HTTP methods which can be used for sending of request bodies @@ -31,6 +31,10 @@ JsonSchema = Mapping[str, Any] +# Streams that only support full refresh don't have a suitable cursor so this sentinel +# value is used to indicate that stream should not load the incoming state value +FULL_REFRESH_SENTINEL_STATE_KEY = "__ab_full_refresh_state_message" + def package_name_from_class(cls: object) -> str: """Find the package name given a class name""" @@ -107,39 +111,24 @@ def get_error_display_message(self, exception: BaseException) -> Optional[str]: """ return None - def read_full_refresh( - self, - cursor_field: Optional[List[str]], - logger: logging.Logger, - slice_logger: SliceLogger, - ) -> Iterable[StreamData]: - slices = self.stream_slices(sync_mode=SyncMode.full_refresh, cursor_field=cursor_field) - logger.debug(f"Processing stream slices for {self.name} (sync_mode: full_refresh)", extra={"stream_slices": slices}) - for _slice in slices: - if slice_logger.should_log_slice_message(logger): - yield slice_logger.create_slice_log_message(_slice) - yield from self.read_records( - stream_slice=_slice, - sync_mode=SyncMode.full_refresh, - cursor_field=cursor_field, - ) - - def read_incremental( # type: ignore # ignoring typing for ConnectorStateManager because of circular dependencies + def read( # type: ignore # ignoring typing for ConnectorStateManager because of circular dependencies self, - cursor_field: Optional[List[str]], + configured_stream: ConfiguredAirbyteStream, logger: logging.Logger, slice_logger: SliceLogger, stream_state: MutableMapping[str, Any], state_manager, - per_stream_state_enabled: bool, internal_config: InternalConfig, ) -> Iterable[StreamData]: + sync_mode = configured_stream.sync_mode + cursor_field = configured_stream.cursor_field + slices = self.stream_slices( cursor_field=cursor_field, - sync_mode=SyncMode.incremental, + sync_mode=sync_mode, # todo: change this interface to no longer rely on sync_mode for behavior stream_state=stream_state, ) - logger.debug(f"Processing stream slices for {self.name} (sync_mode: incremental)", extra={"stream_slices": slices}) + logger.debug(f"Processing stream slices for {self.name} (sync_mode: {sync_mode.name})", extra={"stream_slices": slices}) has_slices = False record_counter = 0 @@ -148,7 +137,7 @@ def read_incremental( # type: ignore # ignoring typing for ConnectorStateManag if slice_logger.should_log_slice_message(logger): yield slice_logger.create_slice_log_message(_slice) records = self.read_records( - sync_mode=SyncMode.incremental, + sync_mode=sync_mode, # todo: change this interface to no longer rely on sync_mode for behavior stream_slice=_slice, stream_state=stream_state, cursor_field=cursor_field or None, @@ -160,20 +149,34 @@ def read_incremental( # type: ignore # ignoring typing for ConnectorStateManag ): record_data = record_data_or_message if isinstance(record_data_or_message, Mapping) else record_data_or_message.record stream_state = self.get_updated_state(stream_state, record_data) - checkpoint_interval = self.state_checkpoint_interval record_counter += 1 - if checkpoint_interval and record_counter % checkpoint_interval == 0: - yield self._checkpoint_state(stream_state, state_manager, per_stream_state_enabled) + + if sync_mode == SyncMode.incremental: + # Checkpoint intervals are a bit controversial, but see below comment about why we're gating it right now + checkpoint_interval = self.state_checkpoint_interval + if checkpoint_interval and record_counter % checkpoint_interval == 0: + airbyte_state_message = self._checkpoint_state(stream_state, state_manager) + yield airbyte_state_message if internal_config.is_limit_reached(record_counter): break - yield self._checkpoint_state(stream_state, state_manager, per_stream_state_enabled) + if sync_mode == SyncMode.incremental: + # Even though right now, only incremental streams running as incremental mode will emit periodic checkpoints. Rather than + # overhaul how refresh interacts with the platform, this positions the code so that once we want to start emitting + # periodic checkpoints in full refresh mode it can be done here + airbyte_state_message = self._checkpoint_state(stream_state, state_manager) + yield airbyte_state_message + + if not has_slices or sync_mode == SyncMode.full_refresh: + if sync_mode == SyncMode.full_refresh: + # We use a dummy state if there is no suitable value provided by full_refresh streams that do not have a valid cursor. + # Incremental streams running full_refresh mode emit a meaningful state + stream_state = stream_state or {FULL_REFRESH_SENTINEL_STATE_KEY: True} - if not has_slices: - # Safety net to ensure we always emit at least one state message even if there are no slices - checkpoint = self._checkpoint_state(stream_state, state_manager, per_stream_state_enabled) - yield checkpoint + # We should always emit a final state message for full refresh sync or streams that do not have any slices + airbyte_state_message = self._checkpoint_state(stream_state, state_manager) + yield airbyte_state_message @abstractmethod def read_records( @@ -361,7 +364,6 @@ def _checkpoint_state( # type: ignore # ignoring typing for ConnectorStateMana self, stream_state: Mapping[str, Any], state_manager, - per_stream_state_enabled: bool, ) -> AirbyteMessage: # First attempt to retrieve the current state using the stream's state property. We receive an AttributeError if the state # property is not implemented by the stream instance and as a fallback, use the stream_state retrieved from the stream @@ -373,4 +375,4 @@ def _checkpoint_state( # type: ignore # ignoring typing for ConnectorStateMana except AttributeError: state_manager.update_state_for_stream(self.name, self.namespace, stream_state) - return state_manager.create_state_message(self.name, self.namespace, send_per_stream_state=per_stream_state_enabled) + return state_manager.create_state_message(self.name, self.namespace) diff --git a/airbyte-cdk/python/unit_tests/sources/concurrent_source/test_concurrent_source_adapter.py b/airbyte-cdk/python/unit_tests/sources/concurrent_source/test_concurrent_source_adapter.py index 96da2b383955..80cc7c4a9d9e 100644 --- a/airbyte-cdk/python/unit_tests/sources/concurrent_source/test_concurrent_source_adapter.py +++ b/airbyte-cdk/python/unit_tests/sources/concurrent_source/test_concurrent_source_adapter.py @@ -81,13 +81,14 @@ def test_concurrent_source_adapter(): def _mock_stream(name: str, data=[], available: bool = True): s = Mock() s.name = name + s.namespace = None s.as_airbyte_stream.return_value = AirbyteStream( name=name, json_schema={}, supported_sync_modes=[SyncMode.full_refresh], ) s.check_availability.return_value = (True, None) if available else (False, "not available") - s.read_full_refresh.return_value = iter(data) + s.read.return_value = iter(data) s.primary_key = None return s diff --git a/airbyte-cdk/python/unit_tests/sources/file_based/scenarios/concurrent_incremental_scenarios.py b/airbyte-cdk/python/unit_tests/sources/file_based/scenarios/concurrent_incremental_scenarios.py index ccbcc1c7116a..3c93e046e87d 100644 --- a/airbyte-cdk/python/unit_tests/sources/file_based/scenarios/concurrent_incremental_scenarios.py +++ b/airbyte-cdk/python/unit_tests/sources/file_based/scenarios/concurrent_incremental_scenarios.py @@ -74,10 +74,8 @@ "stream": "stream1", }, { - "stream1": { - "history": {"some_old_file.csv": "2023-06-01T03:54:07.000000Z", "a.csv": "2023-06-05T03:54:07.000000Z"}, - "_ab_source_file_last_modified": "2023-06-05T03:54:07.000000Z_a.csv", - } + "history": {"some_old_file.csv": "2023-06-01T03:54:07.000000Z", "a.csv": "2023-06-05T03:54:07.000000Z"}, + "_ab_source_file_last_modified": "2023-06-05T03:54:07.000000Z_a.csv", }, ] ) @@ -156,10 +154,8 @@ .set_expected_records( [ { - "stream1": { - "history": {"a.csv": "2023-06-05T03:54:07.000000Z"}, - "_ab_source_file_last_modified": "2023-06-05T03:54:07.000000Z_a.csv", - } + "history": {"a.csv": "2023-06-05T03:54:07.000000Z"}, + "_ab_source_file_last_modified": "2023-06-05T03:54:07.000000Z_a.csv", } ] ) @@ -256,10 +252,8 @@ "stream": "stream1", }, { - "stream1": { - "history": {"a.csv": "2023-06-05T03:54:07.000000Z"}, - "_ab_source_file_last_modified": "2023-06-05T03:54:07.000000Z_a.csv", - } + "history": {"a.csv": "2023-06-05T03:54:07.000000Z"}, + "_ab_source_file_last_modified": "2023-06-05T03:54:07.000000Z_a.csv", }, ] ) @@ -368,10 +362,8 @@ "stream": "stream1", }, { - "stream1": { - "history": {"a.csv": "2023-06-05T03:54:07.000000Z"}, - "_ab_source_file_last_modified": "2023-06-05T03:54:07.000000Z_a.csv", - } + "history": {"a.csv": "2023-06-05T03:54:07.000000Z"}, + "_ab_source_file_last_modified": "2023-06-05T03:54:07.000000Z_a.csv", }, ] ) @@ -491,10 +483,8 @@ "stream": "stream1", }, { - "stream1": { - "history": {"a.csv": "2023-06-05T03:54:07.000000Z", "b.csv": "2023-06-05T03:54:07.000000Z"}, - "_ab_source_file_last_modified": "2023-06-05T03:54:07.000000Z_b.csv", - } + "history": {"a.csv": "2023-06-05T03:54:07.000000Z", "b.csv": "2023-06-05T03:54:07.000000Z"}, + "_ab_source_file_last_modified": "2023-06-05T03:54:07.000000Z_b.csv", }, ] ) @@ -583,13 +573,11 @@ "stream": "stream1", }, { - "stream1": { - "history": { - "recent_file.csv": "2023-07-15T23:59:59.000000Z", - "a.csv": "2023-06-05T03:54:07.000000Z", - }, - "_ab_source_file_last_modified": "2023-07-15T23:59:59.000000Z_recent_file.csv", - } + "history": { + "recent_file.csv": "2023-07-15T23:59:59.000000Z", + "a.csv": "2023-06-05T03:54:07.000000Z", + }, + "_ab_source_file_last_modified": "2023-07-15T23:59:59.000000Z_recent_file.csv", }, ] ) @@ -697,12 +685,10 @@ "stream": "stream1", }, { - "stream1": { - "history": { - "a.csv": "2023-06-04T03:54:07.000000Z", - }, - "_ab_source_file_last_modified": "2023-06-04T03:54:07.000000Z_a.csv", - } + "history": { + "a.csv": "2023-06-04T03:54:07.000000Z", + }, + "_ab_source_file_last_modified": "2023-06-04T03:54:07.000000Z_a.csv", }, { "data": { @@ -725,10 +711,8 @@ "stream": "stream1", }, { - "stream1": { - "history": {"a.csv": "2023-06-04T03:54:07.000000Z", "b.csv": "2023-06-05T03:54:07.000000Z"}, - "_ab_source_file_last_modified": "2023-06-05T03:54:07.000000Z_b.csv", - } + "history": {"a.csv": "2023-06-04T03:54:07.000000Z", "b.csv": "2023-06-05T03:54:07.000000Z"}, + "_ab_source_file_last_modified": "2023-06-05T03:54:07.000000Z_b.csv", }, ] ) @@ -856,10 +840,8 @@ "stream": "stream1", }, { - "stream1": { - "history": {"a.csv": "2023-06-05T03:54:07.000000Z", "b.csv": "2023-06-05T03:54:07.000000Z"}, - "_ab_source_file_last_modified": "2023-06-05T03:54:07.000000Z_b.csv", - } + "history": {"a.csv": "2023-06-05T03:54:07.000000Z", "b.csv": "2023-06-05T03:54:07.000000Z"}, + "_ab_source_file_last_modified": "2023-06-05T03:54:07.000000Z_b.csv", }, { "data": { @@ -882,14 +864,12 @@ "stream": "stream1", }, { - "stream1": { - "history": { - "a.csv": "2023-06-05T03:54:07.000000Z", - "b.csv": "2023-06-05T03:54:07.000000Z", - "c.csv": "2023-06-06T03:54:07.000000Z", - }, - "_ab_source_file_last_modified": "2023-06-06T03:54:07.000000Z_c.csv", - } + "history": { + "a.csv": "2023-06-05T03:54:07.000000Z", + "b.csv": "2023-06-05T03:54:07.000000Z", + "c.csv": "2023-06-06T03:54:07.000000Z", + }, + "_ab_source_file_last_modified": "2023-06-06T03:54:07.000000Z_c.csv", }, ] ) @@ -1001,10 +981,8 @@ "stream": "stream1", }, { - "stream1": { - "history": {"a.csv": "2023-06-05T03:54:07.000000Z", "b.csv": "2023-06-05T03:54:07.000000Z"}, - "_ab_source_file_last_modified": "2023-06-05T03:54:07.000000Z_b.csv", - } + "history": {"a.csv": "2023-06-05T03:54:07.000000Z", "b.csv": "2023-06-05T03:54:07.000000Z"}, + "_ab_source_file_last_modified": "2023-06-05T03:54:07.000000Z_b.csv", }, { "data": { @@ -1027,14 +1005,12 @@ "stream": "stream1", }, { - "stream1": { - "history": { - "a.csv": "2023-06-05T03:54:07.000000Z", - "b.csv": "2023-06-05T03:54:07.000000Z", - "c.csv": "2023-06-06T03:54:07.000000Z", - }, - "_ab_source_file_last_modified": "2023-06-06T03:54:07.000000Z_c.csv", - } + "history": { + "a.csv": "2023-06-05T03:54:07.000000Z", + "b.csv": "2023-06-05T03:54:07.000000Z", + "c.csv": "2023-06-06T03:54:07.000000Z", + }, + "_ab_source_file_last_modified": "2023-06-06T03:54:07.000000Z_c.csv", }, ] ) @@ -1153,14 +1129,12 @@ # {"data": {"col1": "val11c", "col2": "val12c", "col3": "val13c"}, "stream": "stream1"}, # this file is skipped # {"data": {"col1": "val21c", "col2": "val22c", "col3": "val23c"}, "stream": "stream1"}, # this file is skipped { - "stream1": { - "history": { - "a.csv": "2023-06-05T03:54:07.000000Z", - "b.csv": "2023-06-05T03:54:07.000000Z", - "c.csv": "2023-06-06T03:54:07.000000Z", - }, - "_ab_source_file_last_modified": "2023-06-06T03:54:07.000000Z_c.csv", - } + "history": { + "a.csv": "2023-06-05T03:54:07.000000Z", + "b.csv": "2023-06-05T03:54:07.000000Z", + "c.csv": "2023-06-06T03:54:07.000000Z", + }, + "_ab_source_file_last_modified": "2023-06-06T03:54:07.000000Z_c.csv", }, ] ) @@ -1282,14 +1256,12 @@ # {"data": {"col1": "val11c", "col2": "val12c", "col3": "val13c"}, "stream": "stream1"}, # this file is skipped # {"data": {"col1": "val21c", "col2": "val22c", "col3": "val23c"}, "stream": "stream1"}, # this file is skipped { - "stream1": { - "history": { - "a.csv": "2023-06-05T03:54:07.000000Z", - "b.csv": "2023-06-05T03:54:07.000000Z", - "c.csv": "2023-06-06T03:54:07.000000Z", - }, - "_ab_source_file_last_modified": "2023-06-06T03:54:07.000000Z_c.csv", - } + "history": { + "a.csv": "2023-06-05T03:54:07.000000Z", + "b.csv": "2023-06-05T03:54:07.000000Z", + "c.csv": "2023-06-06T03:54:07.000000Z", + }, + "_ab_source_file_last_modified": "2023-06-06T03:54:07.000000Z_c.csv", }, ] ) @@ -1405,14 +1377,12 @@ "stream": "stream1", }, { - "stream1": { - "history": { - "very_old_file.csv": "2023-06-02T03:54:07.000000Z", - "old_file_same_timestamp_as_a.csv": "2023-06-06T03:54:07.000000Z", - "a.csv": "2023-06-06T03:54:07.000000Z", - }, - "_ab_source_file_last_modified": "2023-06-06T03:54:07.000000Z_old_file_same_timestamp_as_a.csv", - } + "history": { + "very_old_file.csv": "2023-06-02T03:54:07.000000Z", + "old_file_same_timestamp_as_a.csv": "2023-06-06T03:54:07.000000Z", + "a.csv": "2023-06-06T03:54:07.000000Z", + }, + "_ab_source_file_last_modified": "2023-06-06T03:54:07.000000Z_old_file_same_timestamp_as_a.csv", }, { "data": { @@ -1435,14 +1405,12 @@ "stream": "stream1", }, { - "stream1": { - "history": { - "old_file_same_timestamp_as_a.csv": "2023-06-06T03:54:07.000000Z", - "a.csv": "2023-06-06T03:54:07.000000Z", - "b.csv": "2023-06-07T03:54:07.000000Z", - }, - "_ab_source_file_last_modified": "2023-06-07T03:54:07.000000Z_b.csv", - } + "history": { + "old_file_same_timestamp_as_a.csv": "2023-06-06T03:54:07.000000Z", + "a.csv": "2023-06-06T03:54:07.000000Z", + "b.csv": "2023-06-07T03:54:07.000000Z", + }, + "_ab_source_file_last_modified": "2023-06-07T03:54:07.000000Z_b.csv", }, { "data": { @@ -1465,14 +1433,12 @@ "stream": "stream1", }, { - "stream1": { - "history": { - "old_file_same_timestamp_as_a.csv": "2023-06-06T03:54:07.000000Z", - "b.csv": "2023-06-07T03:54:07.000000Z", - "c.csv": "2023-06-10T03:54:07.000000Z", - }, - "_ab_source_file_last_modified": "2023-06-10T03:54:07.000000Z_c.csv", - } + "history": { + "old_file_same_timestamp_as_a.csv": "2023-06-06T03:54:07.000000Z", + "b.csv": "2023-06-07T03:54:07.000000Z", + "c.csv": "2023-06-10T03:54:07.000000Z", + }, + "_ab_source_file_last_modified": "2023-06-10T03:54:07.000000Z_c.csv", }, ] ) @@ -1592,14 +1558,12 @@ "stream": "stream1", }, { - "stream1": { - "history": { - "very_old_file.csv": "2023-06-02T03:54:07.000000Z", - "old_file_same_timestamp_as_a.csv": "2023-06-06T03:54:07.000000Z", - "a.csv": "2023-06-06T03:54:07.000000Z", - }, - "_ab_source_file_last_modified": "2023-06-06T03:54:07.000000Z_old_file_same_timestamp_as_a.csv", - } + "history": { + "very_old_file.csv": "2023-06-02T03:54:07.000000Z", + "old_file_same_timestamp_as_a.csv": "2023-06-06T03:54:07.000000Z", + "a.csv": "2023-06-06T03:54:07.000000Z", + }, + "_ab_source_file_last_modified": "2023-06-06T03:54:07.000000Z_old_file_same_timestamp_as_a.csv", }, { "data": { @@ -1622,14 +1586,12 @@ "stream": "stream1", }, { - "stream1": { - "history": { - "old_file_same_timestamp_as_a.csv": "2023-06-06T03:54:07.000000Z", - "a.csv": "2023-06-06T03:54:07.000000Z", - "b.csv": "2023-06-07T03:54:07.000000Z", - }, - "_ab_source_file_last_modified": "2023-06-07T03:54:07.000000Z_b.csv", - } + "history": { + "old_file_same_timestamp_as_a.csv": "2023-06-06T03:54:07.000000Z", + "a.csv": "2023-06-06T03:54:07.000000Z", + "b.csv": "2023-06-07T03:54:07.000000Z", + }, + "_ab_source_file_last_modified": "2023-06-07T03:54:07.000000Z_b.csv", }, { "data": { @@ -1652,14 +1614,12 @@ "stream": "stream1", }, { - "stream1": { - "history": { - "old_file_same_timestamp_as_a.csv": "2023-06-06T03:54:07.000000Z", - "b.csv": "2023-06-07T03:54:07.000000Z", - "c.csv": "2023-06-10T03:54:07.000000Z", - }, - "_ab_source_file_last_modified": "2023-06-10T03:54:07.000000Z_c.csv", - } + "history": { + "old_file_same_timestamp_as_a.csv": "2023-06-06T03:54:07.000000Z", + "b.csv": "2023-06-07T03:54:07.000000Z", + "c.csv": "2023-06-10T03:54:07.000000Z", + }, + "_ab_source_file_last_modified": "2023-06-10T03:54:07.000000Z_c.csv", }, ] ) @@ -1848,14 +1808,12 @@ "stream": "stream1", }, { - "stream1": { - "history": { - "b.csv": "2023-06-05T03:54:07.000000Z", - "c.csv": "2023-06-05T03:54:07.000000Z", - "d.csv": "2023-06-05T03:54:07.000000Z", - }, - "_ab_source_file_last_modified": "2023-06-05T03:54:07.000000Z_d.csv", - } + "history": { + "b.csv": "2023-06-05T03:54:07.000000Z", + "c.csv": "2023-06-05T03:54:07.000000Z", + "d.csv": "2023-06-05T03:54:07.000000Z", + }, + "_ab_source_file_last_modified": "2023-06-05T03:54:07.000000Z_d.csv", }, ] ) @@ -2032,14 +1990,12 @@ "stream": "stream1", }, { - "stream1": { - "history": { - "b.csv": "2023-06-05T03:54:07.000000Z", - "c.csv": "2023-06-05T03:54:07.000000Z", - "d.csv": "2023-06-05T03:54:07.000000Z", - }, - "_ab_source_file_last_modified": "2023-06-05T03:54:07.000000Z_d.csv", - } + "history": { + "b.csv": "2023-06-05T03:54:07.000000Z", + "c.csv": "2023-06-05T03:54:07.000000Z", + "d.csv": "2023-06-05T03:54:07.000000Z", + }, + "_ab_source_file_last_modified": "2023-06-05T03:54:07.000000Z_d.csv", }, ] ) @@ -2138,14 +2094,12 @@ .set_expected_records( [ { - "stream1": { - "history": { - "b.csv": "2023-06-05T03:54:07.000000Z", - "c.csv": "2023-06-05T03:54:07.000000Z", - "d.csv": "2023-06-05T03:54:07.000000Z", - }, - "_ab_source_file_last_modified": "2023-06-05T03:54:07.000000Z_d.csv", - } + "history": { + "b.csv": "2023-06-05T03:54:07.000000Z", + "c.csv": "2023-06-05T03:54:07.000000Z", + "d.csv": "2023-06-05T03:54:07.000000Z", + }, + "_ab_source_file_last_modified": "2023-06-05T03:54:07.000000Z_d.csv", } ] ) @@ -2256,14 +2210,12 @@ .set_expected_records( [ { - "stream1": { "history": { "b.csv": "2023-06-05T03:54:07.000000Z", "c.csv": "2023-06-05T03:54:07.000000Z", "d.csv": "2023-06-05T03:54:07.000000Z", }, "_ab_source_file_last_modified": "2023-06-05T03:54:07.000000Z_d.csv", - } } ] ) @@ -2397,14 +2349,12 @@ "stream": "stream1", }, { - "stream1": { - "history": { - "c.csv": "2023-06-07T03:54:07.000000Z", - "d.csv": "2023-06-08T03:54:07.000000Z", - "e.csv": "2023-06-08T03:54:07.000000Z", - }, - "_ab_source_file_last_modified": "2023-06-08T03:54:07.000000Z_e.csv", - } + "history": { + "c.csv": "2023-06-07T03:54:07.000000Z", + "d.csv": "2023-06-08T03:54:07.000000Z", + "e.csv": "2023-06-08T03:54:07.000000Z", + }, + "_ab_source_file_last_modified": "2023-06-08T03:54:07.000000Z_e.csv", }, ] ) @@ -2537,14 +2487,12 @@ "stream": "stream1", }, { - "stream1": { - "history": { - "c.csv": "2023-06-07T03:54:07.000000Z", - "d.csv": "2023-06-08T03:54:07.000000Z", - "e.csv": "2023-06-08T03:54:07.000000Z", - }, - "_ab_source_file_last_modified": "2023-06-08T03:54:07.000000Z_e.csv", - } + "history": { + "c.csv": "2023-06-07T03:54:07.000000Z", + "d.csv": "2023-06-08T03:54:07.000000Z", + "e.csv": "2023-06-08T03:54:07.000000Z", + }, + "_ab_source_file_last_modified": "2023-06-08T03:54:07.000000Z_e.csv", }, ] ) @@ -2675,14 +2623,12 @@ "stream": "stream1", }, { - "stream1": { - "history": { - "a.csv": "2023-06-05T03:54:07.000000Z", - "c.csv": "2023-06-07T03:54:07.000000Z", - "d.csv": "2023-06-08T03:54:07.000000Z", - }, - "_ab_source_file_last_modified": "2023-06-08T03:54:07.000000Z_d.csv", - } + "history": { + "a.csv": "2023-06-05T03:54:07.000000Z", + "c.csv": "2023-06-07T03:54:07.000000Z", + "d.csv": "2023-06-08T03:54:07.000000Z", + }, + "_ab_source_file_last_modified": "2023-06-08T03:54:07.000000Z_d.csv", }, { "data": { @@ -2705,14 +2651,12 @@ "stream": "stream1", }, { - "stream1": { - "history": { - "b.csv": "2023-06-06T03:54:07.000000Z", - "c.csv": "2023-06-07T03:54:07.000000Z", - "d.csv": "2023-06-08T03:54:07.000000Z", - }, - "_ab_source_file_last_modified": "2023-06-08T03:54:07.000000Z_d.csv", - } + "history": { + "b.csv": "2023-06-06T03:54:07.000000Z", + "c.csv": "2023-06-07T03:54:07.000000Z", + "d.csv": "2023-06-08T03:54:07.000000Z", + }, + "_ab_source_file_last_modified": "2023-06-08T03:54:07.000000Z_d.csv", }, ] ) @@ -2843,14 +2787,12 @@ "stream": "stream1", }, { - "stream1": { - "history": { - "a.csv": "2023-06-05T03:54:07.000000Z", - "c.csv": "2023-06-07T03:54:07.000000Z", - "d.csv": "2023-06-08T03:54:07.000000Z", - }, - "_ab_source_file_last_modified": "2023-06-08T03:54:07.000000Z_d.csv", - } + "history": { + "a.csv": "2023-06-05T03:54:07.000000Z", + "c.csv": "2023-06-07T03:54:07.000000Z", + "d.csv": "2023-06-08T03:54:07.000000Z", + }, + "_ab_source_file_last_modified": "2023-06-08T03:54:07.000000Z_d.csv", }, { "data": { @@ -2873,14 +2815,12 @@ "stream": "stream1", }, { - "stream1": { - "history": { - "b.csv": "2023-06-06T03:54:07.000000Z", - "c.csv": "2023-06-07T03:54:07.000000Z", - "d.csv": "2023-06-08T03:54:07.000000Z", - }, - "_ab_source_file_last_modified": "2023-06-08T03:54:07.000000Z_d.csv", - } + "history": { + "b.csv": "2023-06-06T03:54:07.000000Z", + "c.csv": "2023-06-07T03:54:07.000000Z", + "d.csv": "2023-06-08T03:54:07.000000Z", + }, + "_ab_source_file_last_modified": "2023-06-08T03:54:07.000000Z_d.csv", }, ] ) diff --git a/airbyte-cdk/python/unit_tests/sources/file_based/scenarios/incremental_scenarios.py b/airbyte-cdk/python/unit_tests/sources/file_based/scenarios/incremental_scenarios.py index 3b9785e11bfe..df3f76a497d5 100644 --- a/airbyte-cdk/python/unit_tests/sources/file_based/scenarios/incremental_scenarios.py +++ b/airbyte-cdk/python/unit_tests/sources/file_based/scenarios/incremental_scenarios.py @@ -73,10 +73,8 @@ "stream": "stream1", }, { - "stream1": { - "history": {"some_old_file.csv": "2023-06-01T03:54:07.000000Z", "a.csv": "2023-06-05T03:54:07.000000Z"}, - "_ab_source_file_last_modified": "2023-06-05T03:54:07.000000Z_a.csv", - } + "history": {"some_old_file.csv": "2023-06-01T03:54:07.000000Z", "a.csv": "2023-06-05T03:54:07.000000Z"}, + "_ab_source_file_last_modified": "2023-06-05T03:54:07.000000Z_a.csv", }, ] ) @@ -154,10 +152,8 @@ .set_expected_records( [ { - "stream1": { - "history": {"a.csv": "2023-06-05T03:54:07.000000Z"}, - "_ab_source_file_last_modified": "2023-06-05T03:54:07.000000Z_a.csv", - } + "history": {"a.csv": "2023-06-05T03:54:07.000000Z"}, + "_ab_source_file_last_modified": "2023-06-05T03:54:07.000000Z_a.csv", } ] ) @@ -253,10 +249,8 @@ "stream": "stream1", }, { - "stream1": { - "history": {"a.csv": "2023-06-05T03:54:07.000000Z"}, - "_ab_source_file_last_modified": "2023-06-05T03:54:07.000000Z_a.csv", - } + "history": {"a.csv": "2023-06-05T03:54:07.000000Z"}, + "_ab_source_file_last_modified": "2023-06-05T03:54:07.000000Z_a.csv", }, ] ) @@ -365,10 +359,8 @@ "stream": "stream1", }, { - "stream1": { - "history": {"a.csv": "2023-06-05T03:54:07.000000Z"}, - "_ab_source_file_last_modified": "2023-06-05T03:54:07.000000Z_a.csv", - } + "history": {"a.csv": "2023-06-05T03:54:07.000000Z"}, + "_ab_source_file_last_modified": "2023-06-05T03:54:07.000000Z_a.csv", }, ] ) @@ -488,10 +480,8 @@ "stream": "stream1", }, { - "stream1": { - "history": {"a.csv": "2023-06-05T03:54:07.000000Z", "b.csv": "2023-06-05T03:54:07.000000Z"}, - "_ab_source_file_last_modified": "2023-06-05T03:54:07.000000Z_b.csv", - } + "history": {"a.csv": "2023-06-05T03:54:07.000000Z", "b.csv": "2023-06-05T03:54:07.000000Z"}, + "_ab_source_file_last_modified": "2023-06-05T03:54:07.000000Z_b.csv", }, ] ) @@ -580,13 +570,11 @@ "stream": "stream1", }, { - "stream1": { - "history": { - "recent_file.csv": "2023-07-15T23:59:59.000000Z", - "a.csv": "2023-06-05T03:54:07.000000Z", - }, - "_ab_source_file_last_modified": "2023-07-15T23:59:59.000000Z_recent_file.csv", - } + "history": { + "recent_file.csv": "2023-07-15T23:59:59.000000Z", + "a.csv": "2023-06-05T03:54:07.000000Z", + }, + "_ab_source_file_last_modified": "2023-07-15T23:59:59.000000Z_recent_file.csv", }, ] ) @@ -693,12 +681,10 @@ "stream": "stream1", }, { - "stream1": { - "history": { - "a.csv": "2023-06-04T03:54:07.000000Z", - }, - "_ab_source_file_last_modified": "2023-06-04T03:54:07.000000Z_a.csv", - } + "history": { + "a.csv": "2023-06-04T03:54:07.000000Z", + }, + "_ab_source_file_last_modified": "2023-06-04T03:54:07.000000Z_a.csv", }, { "data": { @@ -721,10 +707,8 @@ "stream": "stream1", }, { - "stream1": { - "history": {"a.csv": "2023-06-04T03:54:07.000000Z", "b.csv": "2023-06-05T03:54:07.000000Z"}, - "_ab_source_file_last_modified": "2023-06-05T03:54:07.000000Z_b.csv", - } + "history": {"a.csv": "2023-06-04T03:54:07.000000Z", "b.csv": "2023-06-05T03:54:07.000000Z"}, + "_ab_source_file_last_modified": "2023-06-05T03:54:07.000000Z_b.csv", }, ] ) @@ -852,10 +836,8 @@ "stream": "stream1", }, { - "stream1": { - "history": {"a.csv": "2023-06-05T03:54:07.000000Z", "b.csv": "2023-06-05T03:54:07.000000Z"}, - "_ab_source_file_last_modified": "2023-06-05T03:54:07.000000Z_b.csv", - } + "history": {"a.csv": "2023-06-05T03:54:07.000000Z", "b.csv": "2023-06-05T03:54:07.000000Z"}, + "_ab_source_file_last_modified": "2023-06-05T03:54:07.000000Z_b.csv", }, { "data": { @@ -878,14 +860,12 @@ "stream": "stream1", }, { - "stream1": { - "history": { - "a.csv": "2023-06-05T03:54:07.000000Z", - "b.csv": "2023-06-05T03:54:07.000000Z", - "c.csv": "2023-06-06T03:54:07.000000Z", - }, - "_ab_source_file_last_modified": "2023-06-06T03:54:07.000000Z_c.csv", - } + "history": { + "a.csv": "2023-06-05T03:54:07.000000Z", + "b.csv": "2023-06-05T03:54:07.000000Z", + "c.csv": "2023-06-06T03:54:07.000000Z", + }, + "_ab_source_file_last_modified": "2023-06-06T03:54:07.000000Z_c.csv", }, ] ) @@ -997,10 +977,8 @@ "stream": "stream1", }, { - "stream1": { - "history": {"a.csv": "2023-06-05T03:54:07.000000Z", "b.csv": "2023-06-05T03:54:07.000000Z"}, - "_ab_source_file_last_modified": "2023-06-05T03:54:07.000000Z_b.csv", - } + "history": {"a.csv": "2023-06-05T03:54:07.000000Z", "b.csv": "2023-06-05T03:54:07.000000Z"}, + "_ab_source_file_last_modified": "2023-06-05T03:54:07.000000Z_b.csv", }, { "data": { @@ -1023,14 +1001,12 @@ "stream": "stream1", }, { - "stream1": { - "history": { - "a.csv": "2023-06-05T03:54:07.000000Z", - "b.csv": "2023-06-05T03:54:07.000000Z", - "c.csv": "2023-06-06T03:54:07.000000Z", - }, - "_ab_source_file_last_modified": "2023-06-06T03:54:07.000000Z_c.csv", - } + "history": { + "a.csv": "2023-06-05T03:54:07.000000Z", + "b.csv": "2023-06-05T03:54:07.000000Z", + "c.csv": "2023-06-06T03:54:07.000000Z", + }, + "_ab_source_file_last_modified": "2023-06-06T03:54:07.000000Z_c.csv", }, ] ) @@ -1151,14 +1127,12 @@ # {"data": {"col1": "val11c", "col2": "val12c", "col3": "val13c"}, "stream": "stream1"}, # this file is skipped # {"data": {"col1": "val21c", "col2": "val22c", "col3": "val23c"}, "stream": "stream1"}, # this file is skipped { - "stream1": { - "history": { - "a.csv": "2023-06-05T03:54:07.000000Z", - "b.csv": "2023-06-05T03:54:07.000000Z", - "c.csv": "2023-06-06T03:54:07.000000Z", - }, - "_ab_source_file_last_modified": "2023-06-06T03:54:07.000000Z_c.csv", - } + "history": { + "a.csv": "2023-06-05T03:54:07.000000Z", + "b.csv": "2023-06-05T03:54:07.000000Z", + "c.csv": "2023-06-06T03:54:07.000000Z", + }, + "_ab_source_file_last_modified": "2023-06-06T03:54:07.000000Z_c.csv", }, ] ) @@ -1273,14 +1247,12 @@ "stream": "stream1", }, { - "stream1": { - "history": { - "very_old_file.csv": "2023-06-02T03:54:07.000000Z", - "old_file_same_timestamp_as_a.csv": "2023-06-06T03:54:07.000000Z", - "a.csv": "2023-06-06T03:54:07.000000Z", - }, - "_ab_source_file_last_modified": "2023-06-06T03:54:07.000000Z_old_file_same_timestamp_as_a.csv", - } + "history": { + "very_old_file.csv": "2023-06-02T03:54:07.000000Z", + "old_file_same_timestamp_as_a.csv": "2023-06-06T03:54:07.000000Z", + "a.csv": "2023-06-06T03:54:07.000000Z", + }, + "_ab_source_file_last_modified": "2023-06-06T03:54:07.000000Z_old_file_same_timestamp_as_a.csv", }, { "data": { @@ -1303,14 +1275,12 @@ "stream": "stream1", }, { - "stream1": { - "history": { - "old_file_same_timestamp_as_a.csv": "2023-06-06T03:54:07.000000Z", - "a.csv": "2023-06-06T03:54:07.000000Z", - "b.csv": "2023-06-07T03:54:07.000000Z", - }, - "_ab_source_file_last_modified": "2023-06-07T03:54:07.000000Z_b.csv", - } + "history": { + "old_file_same_timestamp_as_a.csv": "2023-06-06T03:54:07.000000Z", + "a.csv": "2023-06-06T03:54:07.000000Z", + "b.csv": "2023-06-07T03:54:07.000000Z", + }, + "_ab_source_file_last_modified": "2023-06-07T03:54:07.000000Z_b.csv", }, { "data": { @@ -1333,14 +1303,12 @@ "stream": "stream1", }, { - "stream1": { - "history": { - "old_file_same_timestamp_as_a.csv": "2023-06-06T03:54:07.000000Z", - "b.csv": "2023-06-07T03:54:07.000000Z", - "c.csv": "2023-06-10T03:54:07.000000Z", - }, - "_ab_source_file_last_modified": "2023-06-10T03:54:07.000000Z_c.csv", - } + "history": { + "old_file_same_timestamp_as_a.csv": "2023-06-06T03:54:07.000000Z", + "b.csv": "2023-06-07T03:54:07.000000Z", + "c.csv": "2023-06-10T03:54:07.000000Z", + }, + "_ab_source_file_last_modified": "2023-06-10T03:54:07.000000Z_c.csv", }, ] ) @@ -1528,14 +1496,12 @@ "stream": "stream1", }, { - "stream1": { - "history": { - "b.csv": "2023-06-05T03:54:07.000000Z", - "c.csv": "2023-06-05T03:54:07.000000Z", - "d.csv": "2023-06-05T03:54:07.000000Z", - }, - "_ab_source_file_last_modified": "2023-06-05T03:54:07.000000Z_d.csv", - } + "history": { + "b.csv": "2023-06-05T03:54:07.000000Z", + "c.csv": "2023-06-05T03:54:07.000000Z", + "d.csv": "2023-06-05T03:54:07.000000Z", + }, + "_ab_source_file_last_modified": "2023-06-05T03:54:07.000000Z_d.csv", }, ] ) @@ -1634,14 +1600,12 @@ .set_expected_records( [ { - "stream1": { - "history": { - "b.csv": "2023-06-05T03:54:07.000000Z", - "c.csv": "2023-06-05T03:54:07.000000Z", - "d.csv": "2023-06-05T03:54:07.000000Z", - }, - "_ab_source_file_last_modified": "2023-06-05T03:54:07.000000Z_d.csv", - } + "history": { + "b.csv": "2023-06-05T03:54:07.000000Z", + "c.csv": "2023-06-05T03:54:07.000000Z", + "d.csv": "2023-06-05T03:54:07.000000Z", + }, + "_ab_source_file_last_modified": "2023-06-05T03:54:07.000000Z_d.csv", } ] ) @@ -1773,14 +1737,12 @@ "stream": "stream1", }, { - "stream1": { - "history": { - "c.csv": "2023-06-07T03:54:07.000000Z", - "d.csv": "2023-06-08T03:54:07.000000Z", - "e.csv": "2023-06-08T03:54:07.000000Z", - }, - "_ab_source_file_last_modified": "2023-06-08T03:54:07.000000Z_e.csv", - } + "history": { + "c.csv": "2023-06-07T03:54:07.000000Z", + "d.csv": "2023-06-08T03:54:07.000000Z", + "e.csv": "2023-06-08T03:54:07.000000Z", + }, + "_ab_source_file_last_modified": "2023-06-08T03:54:07.000000Z_e.csv", }, ] ) @@ -1908,14 +1870,12 @@ "stream": "stream1", }, { - "stream1": { - "history": { - "a.csv": "2023-06-05T03:54:07.000000Z", - "c.csv": "2023-06-07T03:54:07.000000Z", - "d.csv": "2023-06-08T03:54:07.000000Z", - }, - "_ab_source_file_last_modified": "2023-06-08T03:54:07.000000Z_d.csv", - } + "history": { + "a.csv": "2023-06-05T03:54:07.000000Z", + "c.csv": "2023-06-07T03:54:07.000000Z", + "d.csv": "2023-06-08T03:54:07.000000Z", + }, + "_ab_source_file_last_modified": "2023-06-08T03:54:07.000000Z_d.csv", }, { "data": { @@ -1938,14 +1898,12 @@ "stream": "stream1", }, { - "stream1": { - "history": { - "b.csv": "2023-06-06T03:54:07.000000Z", - "c.csv": "2023-06-07T03:54:07.000000Z", - "d.csv": "2023-06-08T03:54:07.000000Z", - }, - "_ab_source_file_last_modified": "2023-06-08T03:54:07.000000Z_d.csv", - } + "history": { + "b.csv": "2023-06-06T03:54:07.000000Z", + "c.csv": "2023-06-07T03:54:07.000000Z", + "d.csv": "2023-06-08T03:54:07.000000Z", + }, + "_ab_source_file_last_modified": "2023-06-08T03:54:07.000000Z_d.csv", }, ] ) diff --git a/airbyte-cdk/python/unit_tests/sources/file_based/stream/concurrent/test_adapters.py b/airbyte-cdk/python/unit_tests/sources/file_based/stream/concurrent/test_adapters.py index 2d93e73ced56..01ac6bcb78d3 100644 --- a/airbyte-cdk/python/unit_tests/sources/file_based/stream/concurrent/test_adapters.py +++ b/airbyte-cdk/python/unit_tests/sources/file_based/stream/concurrent/test_adapters.py @@ -226,25 +226,14 @@ def test_full_refresh(self): assert actual_stream_data == expected_stream_data - def test_read_records_full_refresh(self): + def test_read_records(self): expected_stream_data = [{"data": 1}, {"data": 2}] records = [Record(data, "stream") for data in expected_stream_data] partition = Mock() partition.read.return_value = records self._abstract_stream.generate_partitions.return_value = [partition] - actual_stream_data = list(self._facade.read_full_refresh(None, None, None)) - - assert actual_stream_data == expected_stream_data - - def test_read_records_incremental(self): - expected_stream_data = [{"data": 1}, {"data": 2}] - records = [Record(data, "stream") for data in expected_stream_data] - partition = Mock() - partition.read.return_value = records - self._abstract_stream.generate_partitions.return_value = [partition] - - actual_stream_data = list(self._facade.read_incremental(None, None, None, None, None, None, None)) + actual_stream_data = list(self._facade.read(None, None, None, None, None, None)) assert actual_stream_data == expected_stream_data diff --git a/airbyte-cdk/python/unit_tests/sources/file_based/stream/concurrent/test_file_based_concurrent_cursor.py b/airbyte-cdk/python/unit_tests/sources/file_based/stream/concurrent/test_file_based_concurrent_cursor.py index 027038b2ae98..1b645f903dc2 100644 --- a/airbyte-cdk/python/unit_tests/sources/file_based/stream/concurrent/test_file_based_concurrent_cursor.py +++ b/airbyte-cdk/python/unit_tests/sources/file_based/stream/concurrent/test_file_based_concurrent_cursor.py @@ -182,7 +182,7 @@ def test_add_file( uri: RemoteFile(uri=uri, last_modified=datetime.strptime(timestamp, DATE_TIME_FORMAT)) for uri, timestamp in expected_pending_files } assert ( - mock_message_repository.emit_message.call_args_list[0].args[0].state.data["test"]["_ab_source_file_last_modified"] + mock_message_repository.emit_message.call_args_list[0].args[0].state.stream.stream_state._ab_source_file_last_modified == expected_cursor_value ) @@ -233,7 +233,7 @@ def test_add_file_invalid( } assert mock_message_repository.emit_message.call_args_list[0].args[0].log.level.value == "WARN" assert ( - mock_message_repository.emit_message.call_args_list[1].args[0].state.data["test"]["_ab_source_file_last_modified"] + mock_message_repository.emit_message.call_args_list[1].args[0].state.stream.stream_state._ab_source_file_last_modified == expected_cursor_value ) diff --git a/airbyte-cdk/python/unit_tests/sources/file_based/test_scenarios.py b/airbyte-cdk/python/unit_tests/sources/file_based/test_scenarios.py index 47df14a09403..5785f13a65ef 100644 --- a/airbyte-cdk/python/unit_tests/sources/file_based/test_scenarios.py +++ b/airbyte-cdk/python/unit_tests/sources/file_based/test_scenarios.py @@ -109,10 +109,10 @@ def _verify_read_output(output: EntrypointOutput, scenario: TestScenario[Abstrac if hasattr(scenario.source, "cursor_cls") and issubclass(scenario.source.cursor_cls, AbstractConcurrentFileBasedCursor): # Only check the last state emitted because we don't know the order the others will be in. # This may be needed for non-file-based concurrent scenarios too. - assert states[-1].state.data == expected_states[-1] + assert states[-1].state.stream.stream_state.dict() == expected_states[-1] else: for actual, expected in zip(states, expected_states): # states should be emitted in sorted order - assert actual.state.data == expected + assert actual.state.stream.stream_state.dict() == expected if scenario.expected_logs: read_logs = scenario.expected_logs.get("read") diff --git a/airbyte-cdk/python/unit_tests/sources/mock_server_tests/mock_source_fixture.py b/airbyte-cdk/python/unit_tests/sources/mock_server_tests/mock_source_fixture.py new file mode 100644 index 000000000000..462196bbbc35 --- /dev/null +++ b/airbyte-cdk/python/unit_tests/sources/mock_server_tests/mock_source_fixture.py @@ -0,0 +1,325 @@ +# +# Copyright (c) 2024 Airbyte, Inc., all rights reserved. +# + +import logging +from abc import ABC +from datetime import datetime, timezone +from typing import Any, Dict, Iterable, List, Mapping, MutableMapping, Optional, Tuple + +import pendulum +import requests +from airbyte_cdk.sources import AbstractSource, Source +from airbyte_cdk.sources.streams import IncrementalMixin, Stream +from airbyte_cdk.sources.streams.core import StreamData +from airbyte_cdk.sources.streams.http import HttpStream +from airbyte_cdk.sources.streams.http.availability_strategy import HttpAvailabilityStrategy +from airbyte_protocol.models import ConnectorSpecification, SyncMode +from requests import HTTPError + + +class FixtureAvailabilityStrategy(HttpAvailabilityStrategy): + """ + Inherit from HttpAvailabilityStrategy with slight modification to 403 error message. + """ + + def reasons_for_unavailable_status_codes(self, stream: Stream, logger: logging.Logger, source: Source, error: HTTPError) -> Dict[int, str]: + reasons_for_codes: Dict[int, str] = { + requests.codes.FORBIDDEN: "This is likely due to insufficient permissions for your Notion integration. " + "Please make sure your integration has read access for the resources you are trying to sync" + } + return reasons_for_codes + + +class IntegrationStream(HttpStream, ABC): + + url_base = "https://api.airbyte-test.com/v1/" + primary_key = "id" + page_size = 100 + raise_on_http_errors = True + current_page = 0 + + def __init__(self, config: Mapping[str, Any], **kwargs): + super().__init__(**kwargs) + self.start_date = config.get("start_date") + + @property + def availability_strategy(self) -> HttpAvailabilityStrategy: + return FixtureAvailabilityStrategy() + + def parse_response(self, response: requests.Response, **kwargs) -> Iterable[Mapping]: + data = response.json().get("data", []) + yield from data + + def next_page_token(self, response: requests.Response) -> Optional[Mapping[str, Any]]: + has_more = response.json().get("has_more") + if has_more: + self.current_page += 1 + return {"next_page": self.current_page} + + +class IncrementalIntegrationStream(IntegrationStream, IncrementalMixin, ABC): + cursor_field = "created_at" + _state = {} + + @property + def state(self) -> MutableMapping[str, Any]: + return self._state + + @state.setter + def state(self, value: MutableMapping[str, Any]) -> None: + self._state = value + + def read_records( + self, + sync_mode: SyncMode, + cursor_field: Optional[List[str]] = None, + stream_slice: Optional[Mapping[str, Any]] = None, + stream_state: Optional[Mapping[str, Any]] = None, + ) -> Iterable[StreamData]: + for record in super().read_records(sync_mode, cursor_field, stream_slice, stream_state): + self.state = {self.cursor_field: record.get(self.cursor_field)} + yield record + + +class Users(IntegrationStream): + def path(self, **kwargs) -> str: + return "users" + + def get_json_schema(self) -> Mapping[str, Any]: + return { + "$schema": "http://json-schema.org/draft-07/schema#", + "type": "object", + "additionalProperties": True, + "properties": { + "type": { + "type": "string" + }, + "id": { + "type": "string" + }, + "created_at": { + "type": "string", + "format": "date-time" + }, + "first_name": { + "type": "string" + }, + "last_name": { + "type": "string" + } + } + } + + +class Planets(IncrementalIntegrationStream): + + def __init__(self, **kwargs): + super().__init__(**kwargs) + self._state: MutableMapping[str, Any] = {} + + def path(self, **kwargs) -> str: + return "planets" + + def get_json_schema(self) -> Mapping[str, Any]: + return { + "$schema": "http://json-schema.org/draft-07/schema#", + "type": "object", + "additionalProperties": True, + "properties": { + "type": { + "type": "string" + }, + "id": { + "type": "string" + }, + "created_at": { + "type": "string", + "format": "date-time" + }, + "name": { + "type": "string" + } + } + } + + def request_params( + self, + stream_state: Optional[Mapping[str, Any]], + stream_slice: Optional[Mapping[str, Any]] = None, + next_page_token: Optional[Mapping[str, Any]] = None, + ) -> MutableMapping[str, Any]: + return { + "start_date": stream_slice.get("start_date"), + "end_date": stream_slice.get("end_date") + } + + def stream_slices( + self, *, sync_mode: SyncMode, cursor_field: Optional[List[str]] = None, stream_state: Optional[Mapping[str, Any]] = None + ) -> Iterable[Optional[Mapping[str, Any]]]: + start_date = pendulum.parse(self.start_date) + + if stream_state: + start_date = pendulum.parse(stream_state.get(self.cursor_field)) + + date_slices = [] + + end_date = datetime.now(timezone.utc).replace(microsecond=0) + while start_date < end_date: + end_date_slice = min(start_date.add(days=7), end_date) + + date_slice = {"start_date": start_date.strftime("%Y-%m-%dT%H:%M:%SZ"), "end_date": end_date_slice.strftime("%Y-%m-%dT%H:%M:%SZ")} + + date_slices.append(date_slice) + start_date = end_date_slice + + return date_slices + + +class Legacies(IntegrationStream): + """ + Incremental stream that uses the legacy method get_updated_state() to manage stream state. New connectors use the state + property and setter methods. + """ + + cursor_field = "created_at" + + def path(self, **kwargs) -> str: + return "legacies" + + def get_json_schema(self) -> Mapping[str, Any]: + return { + "$schema": "http://json-schema.org/draft-07/schema#", + "type": "object", + "additionalProperties": True, + "properties": { + "type": { + "type": "string" + }, + "id": { + "type": "string" + }, + "created_at": { + "type": "string", + "format": "date-time" + }, + "quote": { + "type": "string" + } + } + } + + def get_updated_state( + self, current_stream_state: MutableMapping[str, Any], latest_record: Mapping[str, Any] + ) -> MutableMapping[str, Any]: + latest_state = latest_record.get(self.cursor_field) + current_state = current_stream_state.get(self.cursor_field) or latest_state + if current_state: + return {self.cursor_field: max(latest_state, current_state)} + return {} + + def read_records( + self, + sync_mode: SyncMode, + cursor_field: Optional[List[str]] = None, + stream_slice: Optional[Mapping[str, Any]] = None, + stream_state: Optional[Mapping[str, Any]] = None, + ) -> Iterable[StreamData]: + yield from super().read_records(sync_mode, cursor_field, stream_slice, stream_state) + + def request_params( + self, + stream_state: Optional[Mapping[str, Any]], + stream_slice: Optional[Mapping[str, Any]] = None, + next_page_token: Optional[Mapping[str, Any]] = None, + ) -> MutableMapping[str, Any]: + return { + "start_date": stream_slice.get("start_date"), + "end_date": stream_slice.get("end_date") + } + + def stream_slices( + self, *, sync_mode: SyncMode, cursor_field: Optional[List[str]] = None, stream_state: Optional[Mapping[str, Any]] = None + ) -> Iterable[Optional[Mapping[str, Any]]]: + start_date = pendulum.parse(self.start_date) + + if stream_state: + start_date = pendulum.parse(stream_state.get(self.cursor_field)) + + date_slices = [] + + end_date = datetime.now(timezone.utc).replace(microsecond=0) + while start_date < end_date: + end_date_slice = min(start_date.add(days=7), end_date) + + date_slice = {"start_date": start_date.strftime("%Y-%m-%dT%H:%M:%SZ"), "end_date": end_date_slice.strftime("%Y-%m-%dT%H:%M:%SZ")} + + date_slices.append(date_slice) + start_date = end_date_slice + + return date_slices + + +class Dividers(IntegrationStream): + def path(self, **kwargs) -> str: + return "dividers" + + def get_json_schema(self) -> Mapping[str, Any]: + return { + "$schema": "http://json-schema.org/draft-07/schema#", + "type": "object", + "additionalProperties": True, + "properties": { + "type": { + "type": "string" + }, + "id": { + "type": "string" + }, + "created_at": { + "type": "string", + "format": "date-time" + }, + "divide_category": { + "type": "string" + } + } + } + + def stream_slices( + self, *, sync_mode: SyncMode, cursor_field: Optional[List[str]] = None, stream_state: Optional[Mapping[str, Any]] = None + ) -> Iterable[Optional[Mapping[str, Any]]]: + return [{"divide_category": "dukes"}, {"divide_category": "mentats"}] + + def request_params( + self, + stream_state: Optional[Mapping[str, Any]], + stream_slice: Optional[Mapping[str, Any]] = None, + next_page_token: Optional[Mapping[str, Any]] = None, + ) -> MutableMapping[str, Any]: + return {"category": stream_slice.get("divide_category")} + + +class SourceFixture(AbstractSource): + def check_connection(self, logger: logging.Logger, config: Mapping[str, Any]) -> Tuple[bool, any]: + return True, None + + def streams(self, config: Mapping[str, Any]) -> List[Stream]: + return [Dividers(config=config), Legacies(config=config), Planets(config=config), Users(config=config)] + + def spec(self, logger: logging.Logger) -> ConnectorSpecification: + return ConnectorSpecification( + connectionSpecification={ + "properties": { + "start_date": { + "title": "Start Date", + "description": "UTC date and time in the format YYYY-MM-DDTHH:MM:SS.000Z. During incremental sync, any data generated before this date will not be replicated. If left blank, the start date will be set to 2 years before the present date.", + "pattern": "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$", + "pattern_descriptor": "YYYY-MM-DDTHH:MM:SS.000Z", + "examples": ["2020-11-16T00:00:00.000Z"], + "type": "string", + "format": "date-time" + } + } + } + ) diff --git a/airbyte-cdk/python/unit_tests/sources/mock_server_tests/test_mock_server_abstract_source.py b/airbyte-cdk/python/unit_tests/sources/mock_server_tests/test_mock_server_abstract_source.py new file mode 100644 index 000000000000..4698f7ba8dad --- /dev/null +++ b/airbyte-cdk/python/unit_tests/sources/mock_server_tests/test_mock_server_abstract_source.py @@ -0,0 +1,417 @@ +# +# Copyright (c) 2024 Airbyte, Inc., all rights reserved. +# + +from datetime import datetime, timedelta, timezone +from typing import List, Optional +from unittest import TestCase + +import freezegun +import pytest +from airbyte_cdk.models import AirbyteMessage, ConfiguredAirbyteCatalog, SyncMode, Type +from airbyte_cdk.test.catalog_builder import CatalogBuilder +from airbyte_cdk.test.entrypoint_wrapper import read +from airbyte_cdk.test.mock_http import HttpMocker, HttpRequest +from airbyte_cdk.test.mock_http.response_builder import ( + FieldPath, + HttpResponseBuilder, + RecordBuilder, + create_record_builder, + create_response_builder, +) +from airbyte_protocol.models import AirbyteStreamStatus +from unit_tests.sources.mock_server_tests.mock_source_fixture import SourceFixture + +_NOW = datetime.now(timezone.utc) + + +class RequestBuilder: + @classmethod + def dividers_endpoint(cls) -> "RequestBuilder": + return cls("dividers") + + @classmethod + def legacies_endpoint(cls) -> "RequestBuilder": + return cls("legacies") + + @classmethod + def planets_endpoint(cls) -> "RequestBuilder": + return cls("planets") + + @classmethod + def users_endpoint(cls) -> "RequestBuilder": + return cls("users") + + def __init__(self, resource: str) -> None: + self._resource = resource + self._start_date: Optional[datetime] = None + self._end_date: Optional[datetime] = None + self._category: Optional[str] = None + + def with_start_date(self, start_date: datetime) -> "RequestBuilder": + self._start_date = start_date + return self + + def with_end_date(self, end_date: datetime) -> "RequestBuilder": + self._end_date = end_date + return self + + def with_category(self, category: str) -> "RequestBuilder": + self._category = category + return self + + def build(self) -> HttpRequest: + query_params = {} + if self._start_date: + query_params["start_date"] = self._start_date.strftime("%Y-%m-%dT%H:%M:%SZ") + if self._end_date: + query_params["end_date"] = self._end_date.strftime("%Y-%m-%dT%H:%M:%SZ") + if self._category: + query_params["category"] = self._category + + return HttpRequest( + url=f"https://api.airbyte-test.com/v1/{self._resource}", + query_params=query_params, + ) + + +def _create_catalog(names_and_sync_modes: List[tuple[str, SyncMode]]) -> ConfiguredAirbyteCatalog: + catalog_builder = CatalogBuilder() + for stream_name, sync_mode in names_and_sync_modes: + catalog_builder.with_stream(name=stream_name, sync_mode=sync_mode) + return catalog_builder.build() + + +def _create_dividers_request() -> RequestBuilder: + return RequestBuilder.dividers_endpoint() + + +def _create_legacies_request() -> RequestBuilder: + return RequestBuilder.legacies_endpoint() + + +def _create_planets_request() -> RequestBuilder: + return RequestBuilder.planets_endpoint() + + +def _create_users_request() -> RequestBuilder: + return RequestBuilder.users_endpoint() + + +RESPONSE_TEMPLATE = { + "object": "list", + "has_more": False, + "data": [ + { + "id": "123", + "created_at": "2024-01-01T07:04:28.000Z" + } + ] +} + +USER_TEMPLATE = { + "object": "list", + "has_more": False, + "data": [ + { + "id": "123", + "created_at": "2024-01-01T07:04:28", + "first_name": "Paul", + "last_name": "Atreides", + } + ] +} + +PLANET_TEMPLATE = { + "object": "list", + "has_more": False, + "data": [ + { + "id": "456", + "created_at": "2024-01-01T07:04:28.000Z", + "name": "Giedi Prime", + } + ] +} + +LEGACY_TEMPLATE = { + "object": "list", + "has_more": False, + "data": [ + { + "id": "l3g4cy", + "created_at": "2024-02-01T07:04:28.000Z", + "quote": "What do you leave behind?", + } + ] +} + +DIVIDER_TEMPLATE = { + "object": "list", + "has_more": False, + "data": [ + { + "id": "l3t0", + "created_at": "2024-02-01T07:04:28.000Z", + "divide_category": "dukes", + } + ] +} + + +RESOURCE_TO_TEMPLATE = { + "dividers": DIVIDER_TEMPLATE, + "legacies": LEGACY_TEMPLATE, + "planets": PLANET_TEMPLATE, + "users": USER_TEMPLATE, +} + + +def _create_response() -> HttpResponseBuilder: + return create_response_builder( + response_template=RESPONSE_TEMPLATE, + records_path=FieldPath("data"), + ) + + +def _create_record(resource: str) -> RecordBuilder: + return create_record_builder( + response_template=RESOURCE_TO_TEMPLATE.get(resource), + records_path=FieldPath("data"), + record_id_path=FieldPath("id"), + record_cursor_path=FieldPath("created_at"), + ) + + +class FullRefreshStreamTest(TestCase): + @HttpMocker() + def test_full_refresh_sync(self, http_mocker): + start_datetime = _NOW - timedelta(days=14) + config = { + "start_date": start_datetime.strftime("%Y-%m-%dT%H:%M:%SZ") + } + + http_mocker.get( + _create_users_request().build(), + _create_response().with_record(record=_create_record("users")).with_record(record=_create_record("users")).build(), + ) + + source = SourceFixture() + actual_messages = read(source, config=config, catalog=_create_catalog([("users", SyncMode.full_refresh)])) + + assert emits_successful_sync_status_messages(actual_messages.get_stream_statuses("users")) + assert len(actual_messages.records) == 2 + assert len(actual_messages.state_messages) == 1 + validate_message_order([Type.RECORD, Type.RECORD, Type.STATE], actual_messages.records_and_state_messages) + assert actual_messages.state_messages[0].state.stream.stream_descriptor.name == "users" + assert actual_messages.state_messages[0].state.stream.stream_state == {"__ab_full_refresh_state_message": True} + + @HttpMocker() + def test_full_refresh_with_slices(self, http_mocker): + start_datetime = _NOW - timedelta(days=14) + config = { + "start_date": start_datetime.strftime("%Y-%m-%dT%H:%M:%SZ") + } + + http_mocker.get( + _create_dividers_request().with_category("dukes").build(), + _create_response().with_record(record=_create_record("dividers")).with_record(record=_create_record("dividers")).build(), + ) + + http_mocker.get( + _create_dividers_request().with_category("mentats").build(), + _create_response().with_record(record=_create_record("dividers")).with_record(record=_create_record("dividers")).build(), + ) + + source = SourceFixture() + actual_messages = read(source, config=config, catalog=_create_catalog([("dividers", SyncMode.full_refresh)])) + + assert emits_successful_sync_status_messages(actual_messages.get_stream_statuses("dividers")) + assert len(actual_messages.records) == 4 + assert len(actual_messages.state_messages) == 1 + validate_message_order([Type.RECORD, Type.RECORD, Type.RECORD, Type.RECORD, Type.STATE], actual_messages.records_and_state_messages) + assert actual_messages.state_messages[0].state.stream.stream_descriptor.name == "dividers" + assert actual_messages.state_messages[0].state.stream.stream_state == {"__ab_full_refresh_state_message": True} + + +@freezegun.freeze_time(_NOW) +class IncrementalStreamTest(TestCase): + @HttpMocker() + def test_incremental_sync(self, http_mocker): + start_datetime = _NOW - timedelta(days=14) + config = { + "start_date": start_datetime.strftime("%Y-%m-%dT%H:%M:%SZ") + } + + last_record_date_0 = (start_datetime + timedelta(days=4)).strftime("%Y-%m-%dT%H:%M:%SZ") + http_mocker.get( + _create_planets_request().with_start_date(start_datetime).with_end_date(start_datetime + timedelta(days=7)).build(), + _create_response().with_record(record=_create_record("planets").with_cursor(last_record_date_0)).with_record(record=_create_record("planets").with_cursor(last_record_date_0)).with_record(record=_create_record("planets").with_cursor(last_record_date_0)).build(), + ) + + last_record_date_1 = (_NOW - timedelta(days=1)).strftime("%Y-%m-%dT%H:%M:%SZ") + http_mocker.get( + _create_planets_request().with_start_date(start_datetime + timedelta(days=7)).with_end_date(_NOW).build(), + _create_response().with_record(record=_create_record("planets").with_cursor(last_record_date_1)).with_record(record=_create_record("planets").with_cursor(last_record_date_1)).build(), + ) + + source = SourceFixture() + actual_messages = read(source, config=config, catalog=_create_catalog([("planets", SyncMode.incremental)])) + + assert emits_successful_sync_status_messages(actual_messages.get_stream_statuses("planets")) + assert len(actual_messages.records) == 5 + assert len(actual_messages.state_messages) == 2 + validate_message_order([Type.RECORD, Type.RECORD, Type.RECORD, Type.STATE, Type.RECORD, Type.RECORD, Type.STATE], actual_messages.records_and_state_messages) + assert actual_messages.state_messages[0].state.stream.stream_descriptor.name == "planets" + assert actual_messages.state_messages[0].state.stream.stream_state == {"created_at": last_record_date_0} + assert actual_messages.state_messages[1].state.stream.stream_descriptor.name == "planets" + assert actual_messages.state_messages[1].state.stream.stream_state == {"created_at": last_record_date_1} + + @HttpMocker() + def test_incremental_running_as_full_refresh(self, http_mocker): + start_datetime = _NOW - timedelta(days=14) + config = { + "start_date": start_datetime.strftime("%Y-%m-%dT%H:%M:%SZ") + } + + last_record_date_0 = (start_datetime + timedelta(days=4)).strftime("%Y-%m-%dT%H:%M:%SZ") + http_mocker.get( + _create_planets_request().with_start_date(start_datetime).with_end_date(start_datetime + timedelta(days=7)).build(), + _create_response().with_record(record=_create_record("planets").with_cursor(last_record_date_0)).with_record(record=_create_record("planets").with_cursor(last_record_date_0)).with_record(record=_create_record("planets").with_cursor(last_record_date_0)).build(), + ) + + last_record_date_1 = (_NOW - timedelta(days=1)).strftime("%Y-%m-%dT%H:%M:%SZ") + http_mocker.get( + _create_planets_request().with_start_date(start_datetime + timedelta(days=7)).with_end_date(_NOW).build(), + _create_response().with_record(record=_create_record("planets").with_cursor(last_record_date_1)).with_record(record=_create_record("planets").with_cursor(last_record_date_1)).build(), + ) + + source = SourceFixture() + actual_messages = read(source, config=config, catalog=_create_catalog([("planets", SyncMode.full_refresh)])) + + assert emits_successful_sync_status_messages(actual_messages.get_stream_statuses("planets")) + assert len(actual_messages.records) == 5 + assert len(actual_messages.state_messages) == 1 + validate_message_order([Type.RECORD, Type.RECORD, Type.RECORD, Type.RECORD, Type.RECORD, Type.STATE], actual_messages.records_and_state_messages) + assert actual_messages.state_messages[0].state.stream.stream_descriptor.name == "planets" + assert actual_messages.state_messages[0].state.stream.stream_state == {"created_at": last_record_date_1} + + @HttpMocker() + def test_legacy_incremental_sync(self, http_mocker): + start_datetime = _NOW - timedelta(days=14) + config = { + "start_date": start_datetime.strftime("%Y-%m-%dT%H:%M:%SZ") + } + + last_record_date_0 = (start_datetime + timedelta(days=4)).strftime("%Y-%m-%dT%H:%M:%SZ") + http_mocker.get( + _create_legacies_request().with_start_date(start_datetime).with_end_date(start_datetime + timedelta(days=7)).build(), + _create_response().with_record(record=_create_record("legacies").with_cursor(last_record_date_0)).with_record(record=_create_record("legacies").with_cursor(last_record_date_0)).with_record(record=_create_record("legacies").with_cursor(last_record_date_0)).build(), + ) + + last_record_date_1 = (_NOW - timedelta(days=1)).strftime("%Y-%m-%dT%H:%M:%SZ") + http_mocker.get( + _create_legacies_request().with_start_date(start_datetime + timedelta(days=7)).with_end_date(_NOW).build(), + _create_response().with_record(record=_create_record("legacies").with_cursor(last_record_date_1)).with_record(record=_create_record("legacies").with_cursor(last_record_date_1)).build(), + ) + + source = SourceFixture() + actual_messages = read(source, config=config, catalog=_create_catalog([("legacies", SyncMode.incremental)])) + + assert emits_successful_sync_status_messages(actual_messages.get_stream_statuses("legacies")) + assert len(actual_messages.records) == 5 + assert len(actual_messages.state_messages) == 2 + validate_message_order([Type.RECORD, Type.RECORD, Type.RECORD, Type.STATE, Type.RECORD, Type.RECORD, Type.STATE], actual_messages.records_and_state_messages) + assert actual_messages.state_messages[0].state.stream.stream_descriptor.name == "legacies" + assert actual_messages.state_messages[0].state.stream.stream_state == {"created_at": last_record_date_0} + assert actual_messages.state_messages[1].state.stream.stream_descriptor.name == "legacies" + assert actual_messages.state_messages[1].state.stream.stream_state == {"created_at": last_record_date_1} + + +@freezegun.freeze_time(_NOW) +class MultipleStreamTest(TestCase): + @HttpMocker() + def test_incremental_and_full_refresh_streams(self, http_mocker): + start_datetime = _NOW - timedelta(days=14) + config = { + "start_date": start_datetime.strftime("%Y-%m-%dT%H:%M:%SZ") + } + + # Mocks for users full refresh stream + http_mocker.get( + _create_users_request().build(), + _create_response().with_record(record=_create_record("users")).with_record(record=_create_record("users")).build(), + ) + + # Mocks for planets incremental stream + last_record_date_0 = (start_datetime + timedelta(days=4)).strftime("%Y-%m-%dT%H:%M:%SZ") + http_mocker.get( + _create_planets_request().with_start_date(start_datetime).with_end_date(start_datetime + timedelta(days=7)).build(), + _create_response().with_record(record=_create_record("planets").with_cursor(last_record_date_0)).with_record(record=_create_record("planets").with_cursor(last_record_date_0)).with_record(record=_create_record("planets").with_cursor(last_record_date_0)).build(), + ) + + last_record_date_1 = (_NOW - timedelta(days=1)).strftime("%Y-%m-%dT%H:%M:%SZ") + http_mocker.get( + _create_planets_request().with_start_date(start_datetime + timedelta(days=7)).with_end_date(_NOW).build(), + _create_response().with_record(record=_create_record("planets").with_cursor(last_record_date_1)).with_record(record=_create_record("planets").with_cursor(last_record_date_1)).build(), + ) + + # Mocks for dividers full refresh stream + http_mocker.get( + _create_dividers_request().with_category("dukes").build(), + _create_response().with_record(record=_create_record("dividers")).with_record(record=_create_record("dividers")).build(), + ) + + http_mocker.get( + _create_dividers_request().with_category("mentats").build(), + _create_response().with_record(record=_create_record("dividers")).with_record(record=_create_record("dividers")).build(), + ) + + source = SourceFixture() + actual_messages = read(source, config=config, catalog=_create_catalog([("users", SyncMode.full_refresh), ("planets", SyncMode.incremental), ("dividers", SyncMode.full_refresh)])) + + assert emits_successful_sync_status_messages(actual_messages.get_stream_statuses("users")) + assert emits_successful_sync_status_messages(actual_messages.get_stream_statuses("planets")) + assert emits_successful_sync_status_messages(actual_messages.get_stream_statuses("dividers")) + + assert len(actual_messages.records) == 11 + assert len(actual_messages.state_messages) == 4 + validate_message_order([ + Type.RECORD, + Type.RECORD, + Type.STATE, + Type.RECORD, + Type.RECORD, + Type.RECORD, + Type.STATE, + Type.RECORD, + Type.RECORD, + Type.STATE, + Type.RECORD, + Type.RECORD, + Type.RECORD, + Type.RECORD, + Type.STATE + ], actual_messages.records_and_state_messages) + assert actual_messages.state_messages[0].state.stream.stream_descriptor.name == "users" + assert actual_messages.state_messages[0].state.stream.stream_state == {"__ab_full_refresh_state_message": True} + assert actual_messages.state_messages[1].state.stream.stream_descriptor.name == "planets" + assert actual_messages.state_messages[1].state.stream.stream_state == {"created_at": last_record_date_0} + assert actual_messages.state_messages[2].state.stream.stream_descriptor.name == "planets" + assert actual_messages.state_messages[2].state.stream.stream_state == {"created_at": last_record_date_1} + assert actual_messages.state_messages[3].state.stream.stream_descriptor.name == "dividers" + assert actual_messages.state_messages[3].state.stream.stream_state == {"__ab_full_refresh_state_message": True} + + +def emits_successful_sync_status_messages(status_messages: List[AirbyteStreamStatus]) -> bool: + return (len(status_messages) == 3 and status_messages[0] == AirbyteStreamStatus.STARTED + and status_messages[1] == AirbyteStreamStatus.RUNNING and status_messages[2] == AirbyteStreamStatus.COMPLETE) + + +def validate_message_order(expected_message_order: List[Type], messages: List[AirbyteMessage]): + if len(expected_message_order) != len(messages): + pytest.fail(f"Expected message order count {len(expected_message_order)} did not match actual messages {len(messages)}") + + for i, message in enumerate(messages): + if message.type != expected_message_order[i]: + pytest.fail(f"At index {i} actual message type {message.type.name} did not match expected message type {expected_message_order[i].name}") diff --git a/airbyte-cdk/python/unit_tests/sources/streams/concurrent/scenarios/incremental_scenarios.py b/airbyte-cdk/python/unit_tests/sources/streams/concurrent/scenarios/incremental_scenarios.py index af3161e07199..1f0592846972 100644 --- a/airbyte-cdk/python/unit_tests/sources/streams/concurrent/scenarios/incremental_scenarios.py +++ b/airbyte-cdk/python/unit_tests/sources/streams/concurrent/scenarios/incremental_scenarios.py @@ -73,11 +73,11 @@ [ {"data": {"id": "1", "cursor_field": 0}, "stream": "stream1"}, {"data": {"id": "2", "cursor_field": 1}, "stream": "stream1"}, - {"stream1": {"cursor_field": 1}}, + {"cursor_field": 1}, {"data": {"id": "3", "cursor_field": 2}, "stream": "stream1"}, {"data": {"id": "4", "cursor_field": 3}, "stream": "stream1"}, - {"stream1": {"cursor_field": 2}}, - {"stream1": {"cursor_field": 2}}, # see Cursor.ensure_at_least_one_state_emitted + {"cursor_field": 2}, + {"cursor_field": 2}, # see Cursor.ensure_at_least_one_state_emitted ] ) .set_log_levels({"ERROR", "WARN", "WARNING", "INFO", "DEBUG"}) @@ -150,11 +150,11 @@ [ {"data": {"id": "1", "cursor_field": 0}, "stream": "stream1"}, {"data": {"id": "2", "cursor_field": 1}, "stream": "stream1"}, - {"stream1": {"cursor_field": 1}}, + {"cursor_field": 1}, {"data": {"id": "3", "cursor_field": 2}, "stream": "stream1"}, {"data": {"id": "4", "cursor_field": 3}, "stream": "stream1"}, - {"stream1": {"cursor_field": 2}}, - {"stream1": {"cursor_field": 2}}, # see Cursor.ensure_at_least_one_state_emitted + {"cursor_field": 2}, + {"cursor_field": 2}, # see Cursor.ensure_at_least_one_state_emitted ] ) .set_log_levels({"ERROR", "WARN", "WARNING", "INFO", "DEBUG"}) @@ -237,11 +237,11 @@ [ {"data": {"id": "1", "cursor_field": 0}, "stream": "stream1"}, {"data": {"id": "2", "cursor_field": 1}, "stream": "stream1"}, - {"stream1": {"cursor_field": 1}}, + {"cursor_field": 1}, {"data": {"id": "3", "cursor_field": 2}, "stream": "stream1"}, {"data": {"id": "4", "cursor_field": 3}, "stream": "stream1"}, - {"stream1": {"cursor_field": 2}}, - {"stream1": {"cursor_field": 2}}, # see Cursor.ensure_at_least_one_state_emitted + {"cursor_field": 2}, + {"cursor_field": 2}, # see Cursor.ensure_at_least_one_state_emitted ] ) .set_log_levels({"ERROR", "WARN", "WARNING", "INFO", "DEBUG"}) diff --git a/airbyte-cdk/python/unit_tests/sources/streams/concurrent/scenarios/stream_facade_scenarios.py b/airbyte-cdk/python/unit_tests/sources/streams/concurrent/scenarios/stream_facade_scenarios.py index 8964024d2ca0..2090a4dd1c14 100644 --- a/airbyte-cdk/python/unit_tests/sources/streams/concurrent/scenarios/stream_facade_scenarios.py +++ b/airbyte-cdk/python/unit_tests/sources/streams/concurrent/scenarios/stream_facade_scenarios.py @@ -357,11 +357,11 @@ [ {"data": {"id": "1", "cursor_field": 0}, "stream": "stream1"}, {"data": {"id": "2", "cursor_field": 1}, "stream": "stream1"}, - {"stream1": {"cursor_field": 1}}, + {"cursor_field": 1}, {"data": {"id": "3", "cursor_field": 2}, "stream": "stream1"}, {"data": {"id": "4", "cursor_field": 3}, "stream": "stream1"}, - {"stream1": {"cursor_field": 2}}, - {"stream1": {"cursor_field": 2}}, # see Cursor.ensure_at_least_one_state_emitted + {"cursor_field": 2}, + {"cursor_field": 2}, # see Cursor.ensure_at_least_one_state_emitted ] ) .set_log_levels({"ERROR", "WARN", "WARNING", "INFO", "DEBUG"}) @@ -403,8 +403,8 @@ [ {"data": {"id": "1", "cursor_field": 0}, "stream": "stream1"}, {"data": {"id": "2", "cursor_field": 3}, "stream": "stream1"}, - {"stream1": {"cursor_field": 3}}, - {"stream1": {"cursor_field": 3}}, # see Cursor.ensure_at_least_one_state_emitted + {"cursor_field": 3}, + {"cursor_field": 3}, # see Cursor.ensure_at_least_one_state_emitted ] ) .set_log_levels({"ERROR", "WARN", "WARNING", "INFO", "DEBUG"}) diff --git a/airbyte-cdk/python/unit_tests/sources/streams/concurrent/test_adapters.py b/airbyte-cdk/python/unit_tests/sources/streams/concurrent/test_adapters.py index 345d3c4b09cd..41553bd4622d 100644 --- a/airbyte-cdk/python/unit_tests/sources/streams/concurrent/test_adapters.py +++ b/airbyte-cdk/python/unit_tests/sources/streams/concurrent/test_adapters.py @@ -244,25 +244,14 @@ def test_full_refresh(self): assert actual_stream_data == expected_stream_data - def test_read_records_full_refresh(self): + def test_read_records(self): expected_stream_data = [{"data": 1}, {"data": 2}] records = [Record(data, "stream") for data in expected_stream_data] partition = Mock() partition.read.return_value = records self._abstract_stream.generate_partitions.return_value = [partition] - actual_stream_data = list(self._facade.read_full_refresh(None, None, None)) - - assert actual_stream_data == expected_stream_data - - def test_read_records_incremental(self): - expected_stream_data = [{"data": 1}, {"data": 2}] - records = [Record(data, "stream") for data in expected_stream_data] - partition = Mock() - partition.read.return_value = records - self._abstract_stream.generate_partitions.return_value = [partition] - - actual_stream_data = list(self._facade.read_incremental(None, None, None, None, None, None, None)) + actual_stream_data = list(self._facade.read(None, None, None, None, None, None)) assert actual_stream_data == expected_stream_data diff --git a/airbyte-cdk/python/unit_tests/sources/streams/test_stream_read.py b/airbyte-cdk/python/unit_tests/sources/streams/test_stream_read.py index 02b9cec19bab..6f12585ca2b6 100644 --- a/airbyte-cdk/python/unit_tests/sources/streams/test_stream_read.py +++ b/airbyte-cdk/python/unit_tests/sources/streams/test_stream_read.py @@ -1,17 +1,34 @@ # # Copyright (c) 2023 Airbyte, Inc., all rights reserved. # + import logging -from typing import Any, Iterable, List, Mapping, Optional, Union +from typing import Any, Iterable, List, Mapping, MutableMapping, Optional, Union from unittest.mock import Mock import pytest -from airbyte_cdk.models import AirbyteLogMessage, AirbyteMessage, Level, SyncMode +from airbyte_cdk.models import ( + AirbyteLogMessage, + AirbyteMessage, + AirbyteStateBlob, + AirbyteStateMessage, + AirbyteStateType, + AirbyteStream, + AirbyteStreamState, + ConfiguredAirbyteStream, + DestinationSyncMode, + Level, + StreamDescriptor, + SyncMode, +) from airbyte_cdk.models import Type as MessageType -from airbyte_cdk.sources.message import InMemoryMessageRepository +from airbyte_cdk.sources.connector_state_manager import ConnectorStateManager +from airbyte_cdk.sources.message import InMemoryMessageRepository, MessageRepository from airbyte_cdk.sources.streams import Stream from airbyte_cdk.sources.streams.concurrent.adapters import StreamFacade -from airbyte_cdk.sources.streams.concurrent.cursor import NoopCursor +from airbyte_cdk.sources.streams.concurrent.cursor import Cursor, NoopCursor +from airbyte_cdk.sources.streams.concurrent.partitions.partition import Partition +from airbyte_cdk.sources.streams.concurrent.partitions.record import Record from airbyte_cdk.sources.streams.core import StreamData from airbyte_cdk.sources.utils.schema_helpers import InternalConfig from airbyte_cdk.sources.utils.slice_logger import DebugSliceLogger @@ -49,20 +66,66 @@ def get_json_schema(self) -> Mapping[str, Any]: return {} +class MockConcurrentCursor(Cursor): + _state: MutableMapping[str, Any] + _message_repository: MessageRepository + + def __init__(self, message_repository: MessageRepository): + self._message_repository = message_repository + self._state = {} + + @property + def state(self) -> MutableMapping[str, Any]: + return self._state + + def observe(self, record: Record) -> None: + partition = str(record.data.get("partition")) + timestamp = record.data.get("created_at") + self._state[partition] = {"created_at": timestamp} + + def close_partition(self, partition: Partition) -> None: + self._message_repository.emit_message( + AirbyteMessage( + type=MessageType.STATE, + state=AirbyteStateMessage( + type=AirbyteStateType.STREAM, + stream=AirbyteStreamState( + stream_descriptor=StreamDescriptor(name='__mock_stream', namespace=None), + stream_state=AirbyteStateBlob(**self._state), + ) + ), + ) + ) + + def ensure_at_least_one_state_emitted(self) -> None: + pass + + def _stream(slice_to_partition_mapping, slice_logger, logger, message_repository): return _MockStream(slice_to_partition_mapping) -def _concurrent_stream(slice_to_partition_mapping, slice_logger, logger, message_repository): +def _concurrent_stream(slice_to_partition_mapping, slice_logger, logger, message_repository, cursor: Cursor = NoopCursor()): stream = _stream(slice_to_partition_mapping, slice_logger, logger, message_repository) source = Mock() source._slice_logger = slice_logger source.message_repository = message_repository - stream = StreamFacade.create_from_stream(stream, source, logger, _NO_STATE, NoopCursor()) + stream = StreamFacade.create_from_stream(stream, source, logger, _NO_STATE, cursor) stream.logger.setLevel(logger.level) return stream +def _incremental_stream(slice_to_partition_mapping, slice_logger, logger, message_repository, timestamp): + stream = _stream(slice_to_partition_mapping, slice_logger, logger, message_repository) + stream.state = {"created_at": timestamp} + return stream + + +def _incremental_concurrent_stream(slice_to_partition_mapping, slice_logger, logger, message_repository, cursor): + stream = _concurrent_stream(slice_to_partition_mapping, slice_logger, logger, message_repository, cursor) + return stream + + @pytest.mark.parametrize( "constructor", [ @@ -73,6 +136,8 @@ def _concurrent_stream(slice_to_partition_mapping, slice_logger, logger, message def test_full_refresh_read_a_single_slice_with_debug(constructor): # This test verifies that a concurrent stream adapted from a Stream behaves the same as the Stream object. # It is done by running the same test cases on both streams + configured_stream = ConfiguredAirbyteStream(stream=AirbyteStream(name="mock_stream", supported_sync_modes=[SyncMode.full_refresh], json_schema={}), sync_mode=SyncMode.full_refresh,destination_sync_mode=DestinationSyncMode.overwrite) + internal_config = InternalConfig() records = [ {"id": 1, "partition": 1}, {"id": 2, "partition": 1}, @@ -82,6 +147,7 @@ def test_full_refresh_read_a_single_slice_with_debug(constructor): logger = _mock_logger(True) message_repository = InMemoryMessageRepository(Level.DEBUG) stream = constructor(slice_to_partition, slice_logger, logger, message_repository) + state_manager = ConnectorStateManager(stream_instance_map={}) expected_records = [ AirbyteMessage( @@ -94,7 +160,22 @@ def test_full_refresh_read_a_single_slice_with_debug(constructor): *records, ] - actual_records = _read(stream, logger, slice_logger, message_repository) + # Temporary check to only validate the final state message for synchronous sources since it has not been implemented for concurrent yet + if constructor == _stream: + expected_records.append( + AirbyteMessage( + type=MessageType.STATE, + state=AirbyteStateMessage( + type=AirbyteStateType.STREAM, + stream=AirbyteStreamState( + stream_descriptor=StreamDescriptor(name='__mock_stream', namespace=None), + stream_state=AirbyteStateBlob(__ab_full_refresh_state_message=True), + ) + ), + ), + ) + + actual_records = _read(stream, configured_stream, logger, slice_logger, message_repository, state_manager, internal_config) assert expected_records == actual_records @@ -109,9 +190,12 @@ def test_full_refresh_read_a_single_slice_with_debug(constructor): def test_full_refresh_read_a_single_slice(constructor): # This test verifies that a concurrent stream adapted from a Stream behaves the same as the Stream object. # It is done by running the same test cases on both streams + configured_stream = ConfiguredAirbyteStream(stream=AirbyteStream(name="mock_stream", supported_sync_modes=[SyncMode.full_refresh], json_schema={}), sync_mode=SyncMode.full_refresh,destination_sync_mode=DestinationSyncMode.overwrite) + internal_config = InternalConfig() logger = _mock_logger() slice_logger = DebugSliceLogger() message_repository = InMemoryMessageRepository(Level.INFO) + state_manager = ConnectorStateManager(stream_instance_map={}) records = [ {"id": 1, "partition": 1}, @@ -122,7 +206,22 @@ def test_full_refresh_read_a_single_slice(constructor): expected_records = [*records] - actual_records = _read(stream, logger, slice_logger, message_repository) + # Temporary check to only validate the final state message for synchronous sources since it has not been implemented for concurrent yet + if constructor == _stream: + expected_records.append( + AirbyteMessage( + type=MessageType.STATE, + state=AirbyteStateMessage( + type=AirbyteStateType.STREAM, + stream=AirbyteStreamState( + stream_descriptor=StreamDescriptor(name='__mock_stream', namespace=None), + stream_state=AirbyteStateBlob(__ab_full_refresh_state_message=True), + ) + ), + ), + ) + + actual_records = _read(stream, configured_stream, logger, slice_logger, message_repository, state_manager, internal_config) assert expected_records == actual_records @@ -137,9 +236,12 @@ def test_full_refresh_read_a_single_slice(constructor): def test_full_refresh_read_a_two_slices(constructor): # This test verifies that a concurrent stream adapted from a Stream behaves the same as the Stream object # It is done by running the same test cases on both streams + configured_stream = ConfiguredAirbyteStream(stream=AirbyteStream(name="mock_stream", supported_sync_modes=[SyncMode.full_refresh], json_schema={}), sync_mode=SyncMode.full_refresh,destination_sync_mode=DestinationSyncMode.overwrite) + internal_config = InternalConfig() logger = _mock_logger() slice_logger = DebugSliceLogger() message_repository = InMemoryMessageRepository(Level.INFO) + state_manager = ConnectorStateManager(stream_instance_map={}) records_partition_1 = [ {"id": 1, "partition": 1}, @@ -157,16 +259,111 @@ def test_full_refresh_read_a_two_slices(constructor): *records_partition_2, ] - actual_records = _read(stream, logger, slice_logger, message_repository) + # Temporary check to only validate the final state message for synchronous sources since it has not been implemented for concurrent yet + if constructor == _stream: + expected_records.append( + AirbyteMessage( + type=MessageType.STATE, + state=AirbyteStateMessage( + type=AirbyteStateType.STREAM, + stream=AirbyteStreamState( + stream_descriptor=StreamDescriptor(name='__mock_stream', namespace=None), + stream_state=AirbyteStateBlob(__ab_full_refresh_state_message=True), + ) + ), + ), + ) + + actual_records = _read(stream, configured_stream, logger, slice_logger, message_repository, state_manager, internal_config) for record in expected_records: assert record in actual_records assert len(expected_records) == len(actual_records) -def _read(stream, logger, slice_logger, message_repository): +def test_incremental_read_two_slices(): + # This test verifies that a stream running in incremental mode emits state messages correctly + configured_stream = ConfiguredAirbyteStream(stream=AirbyteStream(name="mock_stream", supported_sync_modes=[SyncMode.full_refresh, SyncMode.incremental], json_schema={}), sync_mode=SyncMode.incremental,destination_sync_mode=DestinationSyncMode.overwrite) + internal_config = InternalConfig() + logger = _mock_logger() + slice_logger = DebugSliceLogger() + message_repository = InMemoryMessageRepository(Level.INFO) + state_manager = ConnectorStateManager(stream_instance_map={}) + timestamp = "1708899427" + + records_partition_1 = [ + {"id": 1, "partition": 1}, + {"id": 2, "partition": 1}, + ] + records_partition_2 = [ + {"id": 3, "partition": 2}, + {"id": 4, "partition": 2}, + ] + slice_to_partition = {1: records_partition_1, 2: records_partition_2} + stream = _incremental_stream(slice_to_partition, slice_logger, logger, message_repository, timestamp) + + expected_records = [ + *records_partition_1, + _create_state_message("__mock_stream", {"created_at": timestamp}), + *records_partition_2, + _create_state_message("__mock_stream", {"created_at": timestamp}) + ] + + actual_records = _read(stream, configured_stream, logger, slice_logger, message_repository, state_manager, internal_config) + + for record in expected_records: + assert record in actual_records + assert len(expected_records) == len(actual_records) + + +def test_concurrent_incremental_read_two_slices(): + # This test verifies that an incremental concurrent stream manages state correctly for multiple slices syncing concurrently + configured_stream = ConfiguredAirbyteStream(stream=AirbyteStream(name="mock_stream", supported_sync_modes=[SyncMode.full_refresh, SyncMode.incremental], json_schema={}), sync_mode=SyncMode.incremental,destination_sync_mode=DestinationSyncMode.overwrite) + internal_config = InternalConfig() + logger = _mock_logger() + slice_logger = DebugSliceLogger() + message_repository = InMemoryMessageRepository(Level.INFO) + state_manager = ConnectorStateManager(stream_instance_map={}) + slice_timestamp_1 = "1708850000" + slice_timestamp_2 = "1708950000" + cursor = MockConcurrentCursor(message_repository) + + records_partition_1 = [ + {"id": 1, "partition": 1, "created_at": "1708800000"}, + {"id": 2, "partition": 1, "created_at": slice_timestamp_1}, + ] + records_partition_2 = [ + {"id": 3, "partition": 2, "created_at": "1708900000"}, + {"id": 4, "partition": 2, "created_at": slice_timestamp_2}, + ] + slice_to_partition = {1: records_partition_1, 2: records_partition_2} + stream = _incremental_concurrent_stream(slice_to_partition, slice_logger, logger, message_repository, cursor) + + expected_records = [ + *records_partition_1, + *records_partition_2, + ] + + expected_state = _create_state_message("__mock_stream", {"1": {"created_at": slice_timestamp_1}, "2": {"created_at": slice_timestamp_2}}) + + actual_records = _read(stream, configured_stream, logger, slice_logger, message_repository, state_manager, internal_config) + + for record in expected_records: + assert record in actual_records + assert len(expected_records) == len(actual_records) + + # We don't have a real source that reads from the message_repository for state, so we read from the queue directly to verify + # the cursor observed records correctly and updated partition states + mock_partition = Mock() + cursor.close_partition(mock_partition) + actual_state = [state for state in message_repository.consume_queue()] + assert len(actual_state) == 1 + assert actual_state[0] == expected_state + + +def _read(stream, configured_stream, logger, slice_logger, message_repository, state_manager, internal_config): records = [] - for record in stream.read_full_refresh(_A_CURSOR_FIELD, logger, slice_logger): + for record in stream.read(configured_stream, logger, slice_logger, {}, state_manager, internal_config): for message in message_repository.consume_queue(): records.append(message) records.append(record) @@ -192,3 +389,16 @@ def _mock_logger(enabled_for_debug=False): logger.isEnabledFor.return_value = enabled_for_debug logger.level = logging.DEBUG if enabled_for_debug else logging.INFO return logger + + +def _create_state_message(stream: str, state: Mapping[str, Any]) -> AirbyteMessage: + return AirbyteMessage( + type=MessageType.STATE, + state=AirbyteStateMessage( + type=AirbyteStateType.STREAM, + stream=AirbyteStreamState( + stream_descriptor=StreamDescriptor(name=stream, namespace=None), + stream_state=AirbyteStateBlob(**state), + ) + ), + ) diff --git a/airbyte-cdk/python/unit_tests/sources/test_abstract_source.py b/airbyte-cdk/python/unit_tests/sources/test_abstract_source.py index f1489c43860a..7bd56e9b2b49 100644 --- a/airbyte-cdk/python/unit_tests/sources/test_abstract_source.py +++ b/airbyte-cdk/python/unit_tests/sources/test_abstract_source.py @@ -54,14 +54,12 @@ def __init__( self, check_lambda: Callable[[], Tuple[bool, Optional[Any]]] = None, streams: List[Stream] = None, - per_stream: bool = True, message_repository: MessageRepository = None, exception_on_missing_stream: bool = True, stop_sync_on_stream_failure: bool = False, ): self._streams = streams self.check_lambda = check_lambda - self.per_stream = per_stream self.exception_on_missing_stream = exception_on_missing_stream self._message_repository = message_repository self._stop_sync_on_stream_failure = stop_sync_on_stream_failure @@ -286,7 +284,7 @@ def test_read_stream_emits_repository_message_before_record(mocker, message_repo stream = MockStream(name="my_stream") mocker.patch.object(MockStream, "get_json_schema", return_value={}) mocker.patch.object(MockStream, "read_records", side_effect=[[{"a record": "a value"}, {"another record": "another value"}]]) - message_repository.consume_queue.side_effect = [[message for message in [MESSAGE_FROM_REPOSITORY]], []] + message_repository.consume_queue.side_effect = [[message for message in [MESSAGE_FROM_REPOSITORY]], [], []] source = MockSource(streams=[stream], message_repository=message_repository) @@ -357,19 +355,16 @@ def _as_stream_status(stream: str, status: AirbyteStreamStatus) -> AirbyteMessag return AirbyteMessage(type=MessageType.TRACE, trace=trace_message) -def _as_state(state_data: Dict[str, Any], stream_name: str = "", per_stream_state: Dict[str, Any] = None): - if per_stream_state: - return AirbyteMessage( - type=Type.STATE, - state=AirbyteStateMessage( - type=AirbyteStateType.STREAM, - stream=AirbyteStreamState( - stream_descriptor=StreamDescriptor(name=stream_name), stream_state=AirbyteStateBlob.parse_obj(per_stream_state) - ), - data=state_data, +def _as_state(stream_name: str = "", per_stream_state: Dict[str, Any] = None): + return AirbyteMessage( + type=Type.STATE, + state=AirbyteStateMessage( + type=AirbyteStateType.STREAM, + stream=AirbyteStreamState( + stream_descriptor=StreamDescriptor(name=stream_name), stream_state=AirbyteStateBlob.parse_obj(per_stream_state) ), - ) - return AirbyteMessage(type=Type.STATE, state=AirbyteStateMessage(data=state_data)) + ), + ) def _as_error_trace( @@ -410,8 +405,8 @@ def _fix_emitted_at(messages: List[AirbyteMessage]) -> List[AirbyteMessage]: def test_valid_full_refresh_read_no_slices(mocker): """Tests that running a full refresh sync on streams which don't specify slices produces the expected AirbyteMessages""" stream_output = [{"k1": "v1"}, {"k2": "v2"}] - s1 = MockStream([({"sync_mode": SyncMode.full_refresh}, stream_output)], name="s1") - s2 = MockStream([({"sync_mode": SyncMode.full_refresh}, stream_output)], name="s2") + s1 = MockStream([({"stream_state": {}, "sync_mode": SyncMode.full_refresh}, stream_output)], name="s1") + s2 = MockStream([({"stream_state": {}, "sync_mode": SyncMode.full_refresh}, stream_output)], name="s2") mocker.patch.object(MockStream, "get_json_schema", return_value={}) @@ -428,10 +423,12 @@ def test_valid_full_refresh_read_no_slices(mocker): _as_stream_status("s1", AirbyteStreamStatus.STARTED), _as_stream_status("s1", AirbyteStreamStatus.RUNNING), *_as_records("s1", stream_output), + _as_state("s1", {"__ab_full_refresh_state_message": True}), _as_stream_status("s1", AirbyteStreamStatus.COMPLETE), _as_stream_status("s2", AirbyteStreamStatus.STARTED), _as_stream_status("s2", AirbyteStreamStatus.RUNNING), *_as_records("s2", stream_output), + _as_state("s2", {"__ab_full_refresh_state_message": True}), _as_stream_status("s2", AirbyteStreamStatus.COMPLETE), ] ) @@ -445,11 +442,11 @@ def test_valid_full_refresh_read_with_slices(mocker): slices = [{"1": "1"}, {"2": "2"}] # When attempting to sync a slice, just output that slice as a record s1 = MockStream( - [({"sync_mode": SyncMode.full_refresh, "stream_slice": s}, [s]) for s in slices], + [({"stream_state": {}, "sync_mode": SyncMode.full_refresh, "stream_slice": s}, [s]) for s in slices], name="s1", ) s2 = MockStream( - [({"sync_mode": SyncMode.full_refresh, "stream_slice": s}, [s]) for s in slices], + [({"stream_state": {}, "sync_mode": SyncMode.full_refresh, "stream_slice": s}, [s]) for s in slices], name="s2", ) @@ -469,10 +466,12 @@ def test_valid_full_refresh_read_with_slices(mocker): _as_stream_status("s1", AirbyteStreamStatus.STARTED), _as_stream_status("s1", AirbyteStreamStatus.RUNNING), *_as_records("s1", slices), + _as_state("s1", {"__ab_full_refresh_state_message": True}), _as_stream_status("s1", AirbyteStreamStatus.COMPLETE), _as_stream_status("s2", AirbyteStreamStatus.STARTED), _as_stream_status("s2", AirbyteStreamStatus.RUNNING), *_as_records("s2", slices), + _as_state("s2", {"__ab_full_refresh_state_message": True}), _as_stream_status("s2", AirbyteStreamStatus.COMPLETE), ] ) @@ -482,6 +481,73 @@ def test_valid_full_refresh_read_with_slices(mocker): assert expected == messages +def test_full_refresh_does_not_use_incoming_state(mocker): + """Tests that running a full refresh sync does not use an incoming state message from the platform""" + slices = [{"1": "1"}, {"2": "2"}] + # When attempting to sync a slice, just output that slice as a record + s1 = MockStream( + [({"stream_state": {}, "sync_mode": SyncMode.full_refresh, "stream_slice": s}, [s]) for s in slices], + name="s1", + ) + s2 = MockStream( + [({"stream_state": {}, "sync_mode": SyncMode.full_refresh, "stream_slice": s}, [s]) for s in slices], + name="s2", + ) + + def stream_slices_side_effect(stream_state: Mapping[str, Any], **kwargs) -> List[Mapping[str, Any]]: + if stream_state: + return slices[1:] + else: + return slices + + mocker.patch.object(MockStream, "get_json_schema", return_value={}) + mocker.patch.object(MockStream, "stream_slices", side_effect=stream_slices_side_effect) + + state = [ + AirbyteStateMessage( + type=AirbyteStateType.STREAM, + stream=AirbyteStreamState( + stream_descriptor=StreamDescriptor(name="s1"), + stream_state=AirbyteStateBlob.parse_obj({"created_at": "2024-01-31"}), + ), + ), + AirbyteStateMessage( + type=AirbyteStateType.STREAM, + stream=AirbyteStreamState( + stream_descriptor=StreamDescriptor(name="s2"), + stream_state=AirbyteStateBlob.parse_obj({"__ab_full_refresh_state_message": True}), + ), + ), + ] + + src = MockSource(streams=[s1, s2]) + catalog = ConfiguredAirbyteCatalog( + streams=[ + _configured_stream(s1, SyncMode.full_refresh), + _configured_stream(s2, SyncMode.full_refresh), + ] + ) + + expected = _fix_emitted_at( + [ + _as_stream_status("s1", AirbyteStreamStatus.STARTED), + _as_stream_status("s1", AirbyteStreamStatus.RUNNING), + *_as_records("s1", slices), + _as_state("s1", {"__ab_full_refresh_state_message": True}), + _as_stream_status("s1", AirbyteStreamStatus.COMPLETE), + _as_stream_status("s2", AirbyteStreamStatus.STARTED), + _as_stream_status("s2", AirbyteStreamStatus.RUNNING), + *_as_records("s2", slices), + _as_state("s2", {"__ab_full_refresh_state_message": True}), + _as_stream_status("s2", AirbyteStreamStatus.COMPLETE), + ] + ) + + messages = _fix_emitted_at(list(src.read(logger, {}, catalog, state))) + + assert messages == expected + + @pytest.mark.parametrize( "slices", [[{"1": "1"}, {"2": "2"}], [{"date": datetime.date(year=2023, month=1, day=1)}, {"date": datetime.date(year=2023, month=1, day=1)}]], @@ -491,7 +557,7 @@ def test_read_full_refresh_with_slices_sends_slice_messages(mocker, slices): debug_logger = logging.getLogger("airbyte.debug") debug_logger.setLevel(logging.DEBUG) stream = MockStream( - [({"sync_mode": SyncMode.full_refresh, "stream_slice": s}, [s]) for s in slices], + [({"stream_state": {}, "sync_mode": SyncMode.full_refresh, "stream_slice": s}, [s]) for s in slices], name="s1", ) @@ -544,14 +610,7 @@ class TestIncrementalRead: pytest.param(False, id="test_incoming_stream_state_as_per_stream_format"), ], ) - @pytest.mark.parametrize( - "per_stream_enabled", - [ - pytest.param(True, id="test_source_emits_state_as_per_stream_format"), - pytest.param(False, id="test_source_emits_state_as_per_stream_format"), - ], - ) - def test_with_state_attribute(self, mocker, use_legacy, per_stream_enabled): + def test_with_state_attribute(self, mocker, use_legacy): """Test correct state passing for the streams that have a state attribute""" stream_output = [{"k1": "v1"}, {"k2": "v2"}] old_state = {"cursor": "old_value"} @@ -589,7 +648,7 @@ def test_with_state_attribute(self, mocker, use_legacy, per_stream_enabled): return_value=new_state_from_connector, ) mocker.patch.object(MockStreamWithState, "get_json_schema", return_value={}) - src = MockSource(streams=[stream_1, stream_2], per_stream=per_stream_enabled) + src = MockSource(streams=[stream_1, stream_2]) catalog = ConfiguredAirbyteCatalog( streams=[ _configured_stream(stream_1, SyncMode.incremental), @@ -603,17 +662,13 @@ def test_with_state_attribute(self, mocker, use_legacy, per_stream_enabled): _as_stream_status("s1", AirbyteStreamStatus.RUNNING), _as_record("s1", stream_output[0]), _as_record("s1", stream_output[1]), - _as_state({"s1": new_state_from_connector}, "s1", new_state_from_connector) - if per_stream_enabled - else _as_state({"s1": new_state_from_connector}), + _as_state("s1", new_state_from_connector), _as_stream_status("s1", AirbyteStreamStatus.COMPLETE), _as_stream_status("s2", AirbyteStreamStatus.STARTED), _as_stream_status("s2", AirbyteStreamStatus.RUNNING), _as_record("s2", stream_output[0]), _as_record("s2", stream_output[1]), - _as_state({"s1": new_state_from_connector, "s2": new_state_from_connector}, "s2", new_state_from_connector) - if per_stream_enabled - else _as_state({"s1": new_state_from_connector, "s2": new_state_from_connector}), + _as_state("s2", new_state_from_connector), _as_stream_status("s2", AirbyteStreamStatus.COMPLETE), ] ) @@ -633,14 +688,7 @@ def test_with_state_attribute(self, mocker, use_legacy, per_stream_enabled): pytest.param(False, id="test_incoming_stream_state_as_per_stream_format"), ], ) - @pytest.mark.parametrize( - "per_stream_enabled", - [ - pytest.param(True, id="test_source_emits_state_as_per_stream_format"), - pytest.param(False, id="test_source_emits_state_as_per_stream_format"), - ], - ) - def test_with_checkpoint_interval(self, mocker, use_legacy, per_stream_enabled): + def test_with_checkpoint_interval(self, mocker, use_legacy): """Tests that an incremental read which doesn't specify a checkpoint interval outputs a STATE message after reading N records within a stream. """ @@ -670,7 +718,7 @@ def test_with_checkpoint_interval(self, mocker, use_legacy, per_stream_enabled): return_value=1, ) - src = MockSource(streams=[stream_1, stream_2], per_stream=per_stream_enabled) + src = MockSource(streams=[stream_1, stream_2]) catalog = ConfiguredAirbyteCatalog( streams=[ _configured_stream(stream_1, SyncMode.incremental), @@ -683,18 +731,18 @@ def test_with_checkpoint_interval(self, mocker, use_legacy, per_stream_enabled): _as_stream_status("s1", AirbyteStreamStatus.STARTED), _as_stream_status("s1", AirbyteStreamStatus.RUNNING), _as_record("s1", stream_output[0]), - _as_state({"s1": state}, "s1", state) if per_stream_enabled else _as_state({"s1": state}), + _as_state("s1", state), _as_record("s1", stream_output[1]), - _as_state({"s1": state}, "s1", state) if per_stream_enabled else _as_state({"s1": state}), - _as_state({"s1": state}, "s1", state) if per_stream_enabled else _as_state({"s1": state}), + _as_state("s1", state), + _as_state("s1", state), _as_stream_status("s1", AirbyteStreamStatus.COMPLETE), _as_stream_status("s2", AirbyteStreamStatus.STARTED), _as_stream_status("s2", AirbyteStreamStatus.RUNNING), _as_record("s2", stream_output[0]), - _as_state({"s1": state, "s2": state}, "s2", state) if per_stream_enabled else _as_state({"s1": state, "s2": state}), + _as_state("s2", state), _as_record("s2", stream_output[1]), - _as_state({"s1": state, "s2": state}, "s2", state) if per_stream_enabled else _as_state({"s1": state, "s2": state}), - _as_state({"s1": state, "s2": state}, "s2", state) if per_stream_enabled else _as_state({"s1": state, "s2": state}), + _as_state("s2", state), + _as_state("s2", state), _as_stream_status("s2", AirbyteStreamStatus.COMPLETE), ] ) @@ -709,14 +757,7 @@ def test_with_checkpoint_interval(self, mocker, use_legacy, per_stream_enabled): pytest.param(False, id="test_incoming_stream_state_as_per_stream_format"), ], ) - @pytest.mark.parametrize( - "per_stream_enabled", - [ - pytest.param(True, id="test_source_emits_state_as_per_stream_format"), - pytest.param(False, id="test_source_emits_state_as_per_stream_format"), - ], - ) - def test_with_no_interval(self, mocker, use_legacy, per_stream_enabled): + def test_with_no_interval(self, mocker, use_legacy): """Tests that an incremental read which doesn't specify a checkpoint interval outputs a STATE message only after fully reading the stream and does not output any STATE messages during syncing the stream. """ @@ -739,7 +780,7 @@ def test_with_no_interval(self, mocker, use_legacy, per_stream_enabled): mocker.patch.object(MockStream, "supports_incremental", return_value=True) mocker.patch.object(MockStream, "get_json_schema", return_value={}) - src = MockSource(streams=[stream_1, stream_2], per_stream=per_stream_enabled) + src = MockSource(streams=[stream_1, stream_2]) catalog = ConfiguredAirbyteCatalog( streams=[ _configured_stream(stream_1, SyncMode.incremental), @@ -752,12 +793,12 @@ def test_with_no_interval(self, mocker, use_legacy, per_stream_enabled): _as_stream_status("s1", AirbyteStreamStatus.STARTED), _as_stream_status("s1", AirbyteStreamStatus.RUNNING), *_as_records("s1", stream_output), - _as_state({"s1": state}, "s1", state) if per_stream_enabled else _as_state({"s1": state}), + _as_state("s1", state), _as_stream_status("s1", AirbyteStreamStatus.COMPLETE), _as_stream_status("s2", AirbyteStreamStatus.STARTED), _as_stream_status("s2", AirbyteStreamStatus.RUNNING), *_as_records("s2", stream_output), - _as_state({"s1": state, "s2": state}, "s2", state) if per_stream_enabled else _as_state({"s1": state, "s2": state}), + _as_state("s2", state), _as_stream_status("s2", AirbyteStreamStatus.COMPLETE), ] ) @@ -773,14 +814,7 @@ def test_with_no_interval(self, mocker, use_legacy, per_stream_enabled): pytest.param(False, id="test_incoming_stream_state_as_per_stream_format"), ], ) - @pytest.mark.parametrize( - "per_stream_enabled", - [ - pytest.param(True, id="test_source_emits_state_as_per_stream_format"), - pytest.param(False, id="test_source_emits_state_as_per_stream_format"), - ], - ) - def test_with_slices(self, mocker, use_legacy, per_stream_enabled): + def test_with_slices(self, mocker, use_legacy): """Tests that an incremental read which uses slices outputs each record in the slice followed by a STATE message, for each slice""" if use_legacy: input_state = defaultdict(dict) @@ -823,7 +857,7 @@ def test_with_slices(self, mocker, use_legacy, per_stream_enabled): mocker.patch.object(MockStream, "get_json_schema", return_value={}) mocker.patch.object(MockStream, "stream_slices", return_value=slices) - src = MockSource(streams=[stream_1, stream_2], per_stream=per_stream_enabled) + src = MockSource(streams=[stream_1, stream_2]) catalog = ConfiguredAirbyteCatalog( streams=[ _configured_stream(stream_1, SyncMode.incremental), @@ -837,19 +871,19 @@ def test_with_slices(self, mocker, use_legacy, per_stream_enabled): _as_stream_status("s1", AirbyteStreamStatus.RUNNING), # stream 1 slice 1 *_as_records("s1", stream_output), - _as_state({"s1": state}, "s1", state) if per_stream_enabled else _as_state({"s1": state}), + _as_state("s1", state), # stream 1 slice 2 *_as_records("s1", stream_output), - _as_state({"s1": state}, "s1", state) if per_stream_enabled else _as_state({"s1": state}), + _as_state("s1", state), _as_stream_status("s1", AirbyteStreamStatus.COMPLETE), _as_stream_status("s2", AirbyteStreamStatus.STARTED), _as_stream_status("s2", AirbyteStreamStatus.RUNNING), # stream 2 slice 1 *_as_records("s2", stream_output), - _as_state({"s1": state, "s2": state}, "s2", state) if per_stream_enabled else _as_state({"s1": state, "s2": state}), + _as_state("s2", state), # stream 2 slice 2 *_as_records("s2", stream_output), - _as_state({"s1": state, "s2": state}, "s2", state) if per_stream_enabled else _as_state({"s1": state, "s2": state}), + _as_state("s2", state), _as_stream_status("s2", AirbyteStreamStatus.COMPLETE), ] ) @@ -865,15 +899,8 @@ def test_with_slices(self, mocker, use_legacy, per_stream_enabled): pytest.param(False, id="test_incoming_stream_state_as_per_stream_format"), ], ) - @pytest.mark.parametrize( - "per_stream_enabled", - [ - pytest.param(True, id="test_source_emits_state_as_per_stream_format"), - pytest.param(False, id="test_source_emits_state_as_per_stream_format"), - ], - ) @pytest.mark.parametrize("slices", [pytest.param([], id="test_slices_as_list"), pytest.param(iter([]), id="test_slices_as_iterator")]) - def test_no_slices(self, mocker, use_legacy, per_stream_enabled, slices): + def test_no_slices(self, mocker, use_legacy, slices): """ Tests that an incremental read returns at least one state messages even if no records were read: 1. outputs a state message after reading the entire stream @@ -926,7 +953,7 @@ def test_no_slices(self, mocker, use_legacy, per_stream_enabled, slices): return_value=2, ) - src = MockSource(streams=[stream_1, stream_2], per_stream=per_stream_enabled) + src = MockSource(streams=[stream_1, stream_2]) catalog = ConfiguredAirbyteCatalog( streams=[ _configured_stream(stream_1, SyncMode.incremental), @@ -937,10 +964,10 @@ def test_no_slices(self, mocker, use_legacy, per_stream_enabled, slices): expected = _fix_emitted_at( [ _as_stream_status("s1", AirbyteStreamStatus.STARTED), - _as_state({"s1": state}, "s1", state) if per_stream_enabled else _as_state({"s1": state}), + _as_state("s1", state), _as_stream_status("s1", AirbyteStreamStatus.COMPLETE), _as_stream_status("s2", AirbyteStreamStatus.STARTED), - _as_state({"s1": state, "s2": state}, "s2", state) if per_stream_enabled else _as_state({"s1": state, "s2": state}), + _as_state("s2", state), _as_stream_status("s2", AirbyteStreamStatus.COMPLETE), ] ) @@ -956,14 +983,7 @@ def test_no_slices(self, mocker, use_legacy, per_stream_enabled, slices): pytest.param(False, id="test_incoming_stream_state_as_per_stream_format"), ], ) - @pytest.mark.parametrize( - "per_stream_enabled", - [ - pytest.param(True, id="test_source_emits_state_as_per_stream_format"), - pytest.param(False, id="test_source_emits_state_as_per_stream_format"), - ], - ) - def test_with_slices_and_interval(self, mocker, use_legacy, per_stream_enabled): + def test_with_slices_and_interval(self, mocker, use_legacy): """ Tests that an incremental read which uses slices and a checkpoint interval: 1. outputs all records @@ -1016,7 +1036,7 @@ def test_with_slices_and_interval(self, mocker, use_legacy, per_stream_enabled): return_value=2, ) - src = MockSource(streams=[stream_1, stream_2], per_stream=per_stream_enabled) + src = MockSource(streams=[stream_1, stream_2]) catalog = ConfiguredAirbyteCatalog( streams=[ _configured_stream(stream_1, SyncMode.incremental), @@ -1031,32 +1051,32 @@ def test_with_slices_and_interval(self, mocker, use_legacy, per_stream_enabled): _as_stream_status("s1", AirbyteStreamStatus.RUNNING), _as_record("s1", stream_output[0]), _as_record("s1", stream_output[1]), - _as_state({"s1": state}, "s1", state) if per_stream_enabled else _as_state({"s1": state}), + _as_state("s1", state), _as_record("s1", stream_output[2]), - _as_state({"s1": state}, "s1", state) if per_stream_enabled else _as_state({"s1": state}), + _as_state("s1", state), # stream 1 slice 2 _as_record("s1", stream_output[0]), - _as_state({"s1": state}, "s1", state) if per_stream_enabled else _as_state({"s1": state}), + _as_state("s1", state), _as_record("s1", stream_output[1]), _as_record("s1", stream_output[2]), - _as_state({"s1": state}, "s1", state) if per_stream_enabled else _as_state({"s1": state}), - _as_state({"s1": state}, "s1", state) if per_stream_enabled else _as_state({"s1": state}), + _as_state("s1", state), + _as_state("s1", state), _as_stream_status("s1", AirbyteStreamStatus.COMPLETE), # stream 2 slice 1 _as_stream_status("s2", AirbyteStreamStatus.STARTED), _as_stream_status("s2", AirbyteStreamStatus.RUNNING), _as_record("s2", stream_output[0]), _as_record("s2", stream_output[1]), - _as_state({"s1": state, "s2": state}, "s2", state) if per_stream_enabled else _as_state({"s1": state, "s2": state}), + _as_state("s2", state), _as_record("s2", stream_output[2]), - _as_state({"s1": state, "s2": state}, "s2", state) if per_stream_enabled else _as_state({"s1": state, "s2": state}), + _as_state("s2", state), # stream 2 slice 2 _as_record("s2", stream_output[0]), - _as_state({"s1": state, "s2": state}, "s2", state) if per_stream_enabled else _as_state({"s1": state, "s2": state}), + _as_state("s2", state), _as_record("s2", stream_output[1]), _as_record("s2", stream_output[2]), - _as_state({"s1": state, "s2": state}, "s2", state) if per_stream_enabled else _as_state({"s1": state, "s2": state}), - _as_state({"s1": state, "s2": state}, "s2", state) if per_stream_enabled else _as_state({"s1": state, "s2": state}), + _as_state("s2", state), + _as_state("s2", state), _as_stream_status("s2", AirbyteStreamStatus.COMPLETE), ] ) @@ -1065,13 +1085,7 @@ def test_with_slices_and_interval(self, mocker, use_legacy, per_stream_enabled): assert messages == expected - @pytest.mark.parametrize( - "per_stream_enabled", - [ - pytest.param(False, id="test_source_emits_state_as_per_stream_format"), - ], - ) - def test_emit_non_records(self, mocker, per_stream_enabled): + def test_emit_non_records(self, mocker): """ Tests that an incremental read which uses slices and a checkpoint interval: 1. outputs all records @@ -1129,7 +1143,7 @@ def test_emit_non_records(self, mocker, per_stream_enabled): return_value=2, ) - src = MockSource(streams=[stream_1, stream_2], per_stream=per_stream_enabled) + src = MockSource(streams=[stream_1, stream_2]) catalog = ConfiguredAirbyteCatalog( streams=[ _configured_stream(stream_1, SyncMode.incremental), @@ -1145,17 +1159,17 @@ def test_emit_non_records(self, mocker, per_stream_enabled): stream_data_to_airbyte_message("s1", stream_output[0]), stream_data_to_airbyte_message("s1", stream_output[1]), stream_data_to_airbyte_message("s1", stream_output[2]), - _as_state({"s1": state}, "s1", state) if per_stream_enabled else _as_state({"s1": state}), + _as_state("s1", state), stream_data_to_airbyte_message("s1", stream_output[3]), - _as_state({"s1": state}, "s1", state) if per_stream_enabled else _as_state({"s1": state}), + _as_state("s1", state), # stream 1 slice 2 stream_data_to_airbyte_message("s1", stream_output[0]), - _as_state({"s1": state}, "s1", state) if per_stream_enabled else _as_state({"s1": state}), + _as_state("s1", state), stream_data_to_airbyte_message("s1", stream_output[1]), stream_data_to_airbyte_message("s1", stream_output[2]), stream_data_to_airbyte_message("s1", stream_output[3]), - _as_state({"s1": state}, "s1", state) if per_stream_enabled else _as_state({"s1": state}), - _as_state({"s1": state}, "s1", state) if per_stream_enabled else _as_state({"s1": state}), + _as_state("s1", state), + _as_state("s1", state), _as_stream_status("s1", AirbyteStreamStatus.COMPLETE), # stream 2 slice 1 _as_stream_status("s2", AirbyteStreamStatus.STARTED), @@ -1163,17 +1177,17 @@ def test_emit_non_records(self, mocker, per_stream_enabled): stream_data_to_airbyte_message("s2", stream_output[0]), stream_data_to_airbyte_message("s2", stream_output[1]), stream_data_to_airbyte_message("s2", stream_output[2]), - _as_state({"s1": state, "s2": state}, "s2", state) if per_stream_enabled else _as_state({"s1": state, "s2": state}), + _as_state("s2", state), stream_data_to_airbyte_message("s2", stream_output[3]), - _as_state({"s1": state, "s2": state}, "s2", state) if per_stream_enabled else _as_state({"s1": state, "s2": state}), + _as_state("s2", state), # stream 2 slice 2 stream_data_to_airbyte_message("s2", stream_output[0]), - _as_state({"s1": state, "s2": state}, "s2", state) if per_stream_enabled else _as_state({"s1": state, "s2": state}), + _as_state("s2", state), stream_data_to_airbyte_message("s2", stream_output[1]), stream_data_to_airbyte_message("s2", stream_output[2]), stream_data_to_airbyte_message("s2", stream_output[3]), - _as_state({"s1": state, "s2": state}, "s2", state) if per_stream_enabled else _as_state({"s1": state, "s2": state}), - _as_state({"s1": state, "s2": state}, "s2", state) if per_stream_enabled else _as_state({"s1": state, "s2": state}), + _as_state("s2", state), + _as_state("s2", state), _as_stream_status("s2", AirbyteStreamStatus.COMPLETE), ] ) @@ -1200,14 +1214,12 @@ def test_checkpoint_state_from_stream_instance(): # The stream_state passed to checkpoint_state() should be ignored since stream implements state function teams_stream.state = {"updated_at": "2022-09-11"} - actual_message = teams_stream._checkpoint_state({"ignored": "state"}, state_manager, True) - assert actual_message == _as_state({"teams": {"updated_at": "2022-09-11"}}, "teams", {"updated_at": "2022-09-11"}) + actual_message = teams_stream._checkpoint_state({"ignored": "state"}, state_manager) + assert actual_message == _as_state("teams", {"updated_at": "2022-09-11"}) # The stream_state passed to checkpoint_state() should be used since the stream does not implement state function - actual_message = managers_stream._checkpoint_state({"updated": "expected_here"}, state_manager, True) - assert actual_message == _as_state( - {"teams": {"updated_at": "2022-09-11"}, "managers": {"updated": "expected_here"}}, "managers", {"updated": "expected_here"} - ) + actual_message = managers_stream._checkpoint_state({"updated": "expected_here"}, state_manager) + assert actual_message == _as_state("managers", {"updated": "expected_here"}) @pytest.mark.parametrize( @@ -1382,9 +1394,9 @@ def test_continue_sync_with_failed_streams_with_override_false(mocker): the sync when one stream fails with an error. """ stream_output = [{"k1": "v1"}, {"k2": "v2"}] - s1 = MockStream([({"sync_mode": SyncMode.full_refresh}, stream_output)], name="s1") + s1 = MockStream([({"stream_state": {}, "sync_mode": SyncMode.full_refresh}, stream_output)], name="s1") s2 = StreamRaisesException(AirbyteTracedException(message="I was born only to crash like Icarus")) - s3 = MockStream([({"sync_mode": SyncMode.full_refresh}, stream_output)], name="s3") + s3 = MockStream([({"stream_state": {}, "sync_mode": SyncMode.full_refresh}, stream_output)], name="s3") mocker.patch.object(MockStream, "get_json_schema", return_value={}) mocker.patch.object(StreamRaisesException, "get_json_schema", return_value={}) diff --git a/airbyte-cdk/python/unit_tests/sources/test_connector_state_manager.py b/airbyte-cdk/python/unit_tests/sources/test_connector_state_manager.py index 9bee58eb69ee..d4d4d9dd2293 100644 --- a/airbyte-cdk/python/unit_tests/sources/test_connector_state_manager.py +++ b/airbyte-cdk/python/unit_tests/sources/test_connector_state_manager.py @@ -284,61 +284,6 @@ def test_get_stream_state(input_state, stream_name, namespace, expected_state): assert actual_state == expected_state -@pytest.mark.parametrize( - "input_state, expected_legacy_state, expected_error", - [ - pytest.param( - [AirbyteStateMessage(type=AirbyteStateType.LEGACY, data={"actresses": {"id": "seehorn_rhea"}})], - {"actresses": {"id": "seehorn_rhea"}}, - does_not_raise(), - id="test_get_legacy_legacy_state_message", - ), - pytest.param( - [ - AirbyteStateMessage( - type=AirbyteStateType.STREAM, - stream=AirbyteStreamState( - stream_descriptor=StreamDescriptor(name="actresses", namespace="public"), - stream_state=AirbyteStateBlob.parse_obj({"id": "seehorn_rhea"}), - ), - ) - ], - {"actresses": {"id": "seehorn_rhea"}}, - does_not_raise(), - id="test_get_legacy_from_stream_state", - ), - pytest.param( - { - "actors": {"created_at": "1962-10-22"}, - "actresses": {"id": "seehorn_rhea"}, - }, - {"actors": {"created_at": "1962-10-22"}, "actresses": {"id": "seehorn_rhea"}}, - does_not_raise(), - id="test_get_legacy_from_legacy_state_blob", - ), - pytest.param( - [ - AirbyteStateMessage( - type=AirbyteStateType.STREAM, - stream=AirbyteStreamState( - stream_descriptor=StreamDescriptor(name="actresses", namespace="public"), - stream_state=None, - ), - ) - ], - {"actresses": {}}, - does_not_raise(), - id="test_get_legacy_from_stream_state", - ), - ], -) -def test_get_legacy_state(input_state, expected_legacy_state, expected_error): - with expected_error: - state_manager = ConnectorStateManager({}, input_state) - actual_legacy_state = state_manager._get_legacy_state() - assert actual_legacy_state == expected_legacy_state - - def test_get_state_returns_deep_copy(): input_state = [ AirbyteStateMessage( @@ -422,11 +367,10 @@ def test_update_state_for_stream(start_state, update_name, update_namespace, upd assert state_manager.per_stream_states[ HashableStreamDescriptor(name=update_name, namespace=update_namespace) ] == AirbyteStateBlob.parse_obj(update_value) - assert state_manager._get_legacy_state() == expected_legacy_state @pytest.mark.parametrize( - "start_state, update_name, update_namespace, send_per_stream, expected_state_message", + "start_state, update_name, update_namespace, expected_state_message", [ pytest.param( [ @@ -447,7 +391,6 @@ def test_update_state_for_stream(start_state, update_name, update_namespace, upd ], "episodes", "public", - True, AirbyteMessage( type=MessageType.STATE, state=AirbyteStateMessage( @@ -456,7 +399,6 @@ def test_update_state_for_stream(start_state, update_name, update_namespace, upd stream_descriptor=StreamDescriptor(name="episodes", namespace="public"), stream_state=AirbyteStateBlob.parse_obj({"created_at": "2022_05_22"}), ), - data={"episodes": {"created_at": "2022_05_22"}, "seasons": {"id": 1}}, ), ), id="test_emit_state_message_with_stream_and_legacy", @@ -473,7 +415,6 @@ def test_update_state_for_stream(start_state, update_name, update_namespace, upd ], "episodes", "public", - True, AirbyteMessage( type=MessageType.STATE, state=AirbyteStateMessage( @@ -482,7 +423,6 @@ def test_update_state_for_stream(start_state, update_name, update_namespace, upd stream_descriptor=StreamDescriptor(name="episodes", namespace="public"), stream_state=AirbyteStateBlob(), ), - data={"episodes": {}}, ), ), id="test_always_emit_message_with_stream_state_blob", @@ -499,7 +439,6 @@ def test_update_state_for_stream(start_state, update_name, update_namespace, upd ], "missing", "public", - True, AirbyteMessage( type=MessageType.STATE, state=AirbyteStateMessage( @@ -507,7 +446,6 @@ def test_update_state_for_stream(start_state, update_name, update_namespace, upd stream=AirbyteStreamState( stream_descriptor=StreamDescriptor(name="missing", namespace="public"), stream_state=AirbyteStateBlob() ), - data={"episodes": {"id": 507}}, ), ), id="test_emit_state_nonexistent_stream_name", @@ -524,7 +462,6 @@ def test_update_state_for_stream(start_state, update_name, update_namespace, upd ], "episodes", "nonexistent", - True, AirbyteMessage( type=MessageType.STATE, state=AirbyteStateMessage( @@ -532,72 +469,14 @@ def test_update_state_for_stream(start_state, update_name, update_namespace, upd stream=AirbyteStreamState( stream_descriptor=StreamDescriptor(name="episodes", namespace="nonexistent"), stream_state=AirbyteStateBlob() ), - data={"episodes": {"id": 507}}, ), ), id="test_emit_state_wrong_namespace", ), - pytest.param( - [ - AirbyteStateMessage( - type=AirbyteStateType.STREAM, - stream=AirbyteStreamState( - stream_descriptor=StreamDescriptor(name="episodes", namespace=None), - stream_state=AirbyteStateBlob.parse_obj({"created_at": "2022_05_22"}), - ), - ), - AirbyteStateMessage( - type=AirbyteStateType.STREAM, - stream=AirbyteStreamState( - stream_descriptor=StreamDescriptor(name="seasons", namespace=None), - stream_state=AirbyteStateBlob.parse_obj({"id": 1}), - ), - ), - ], - "episodes", - "", - False, - AirbyteMessage( - type=MessageType.STATE, - state=AirbyteStateMessage( - data={"episodes": {"created_at": "2022_05_22"}, "seasons": {"id": 1}}, - ), - ), - id="test_emit_legacy_state_format", - ), ], ) -def test_create_state_message(start_state, update_name, update_namespace, send_per_stream, expected_state_message): +def test_create_state_message(start_state, update_name, update_namespace, expected_state_message): state_manager = ConnectorStateManager({}, start_state) - actual_state_message = state_manager.create_state_message( - stream_name=update_name, namespace=update_namespace, send_per_stream_state=send_per_stream - ) + actual_state_message = state_manager.create_state_message(stream_name=update_name, namespace=update_namespace) assert actual_state_message == expected_state_message - - -def test_do_not_set_stream_descriptor_namespace_when_none(): - """ - This is a very specific test to ensure that the None value is not set and emitted back to the platform for namespace. - The platform performs validation on the state message sent by the connector and namespace must be a string or not - included at all. The None value registers as null by the platform which is not valid input. We can verify that fields - on a pydantic model are not defined using exclude_unset parameter. - """ - expected_stream_state_descriptor = {"name": "episodes"} - - state_manager = ConnectorStateManager( - {}, - [ - AirbyteStateMessage( - type=AirbyteStateType.STREAM, - stream=AirbyteStreamState( - stream_descriptor=StreamDescriptor(name="episodes"), - stream_state=None, - ), - ), - ], - ) - - actual_state_message = state_manager.create_state_message(stream_name="episodes", namespace=None, send_per_stream_state=True) - - assert actual_state_message.state.stream.stream_descriptor.dict(exclude_unset=True) == expected_stream_state_descriptor diff --git a/airbyte-cdk/python/unit_tests/sources/test_source.py b/airbyte-cdk/python/unit_tests/sources/test_source.py index 3657a1c03c14..a1058b13740b 100644 --- a/airbyte-cdk/python/unit_tests/sources/test_source.py +++ b/airbyte-cdk/python/unit_tests/sources/test_source.py @@ -365,8 +365,8 @@ def test_internal_config(abstract_source, catalog): # Test with empty config logger = logging.getLogger(f"airbyte.{getattr(abstract_source, 'name', '')}") records = [r for r in abstract_source.read(logger=logger, config={}, catalog=catalog, state={})] - # 3 for http stream, 3 for non http stream and 3 for stream status messages for each stream (2x) - assert len(records) == 3 + 3 + 3 + 3 + # 3 for http stream, 3 for non http stream, 1 for state message for each stream (2x) and 3 for stream status messages for each stream (2x) + assert len(records) == 3 + 3 + 1 + 1 + 3 + 3 assert http_stream.read_records.called assert non_http_stream.read_records.called # Make sure page_size havent been set @@ -375,21 +375,21 @@ def test_internal_config(abstract_source, catalog): # Test with records limit set to 1 internal_config = {"some_config": 100, "_limit": 1} records = [r for r in abstract_source.read(logger=logger, config=internal_config, catalog=catalog, state={})] - # 1 from http stream + 1 from non http stream and 3 for stream status messages for each stream (2x) - assert len(records) == 1 + 1 + 3 + 3 + # 1 from http stream + 1 from non http stream, 1 for state message for each stream (2x) and 3 for stream status messages for each stream (2x) + assert len(records) == 1 + 1 + 1 + 1 + 3 + 3 assert "_limit" not in abstract_source.streams_config assert "some_config" in abstract_source.streams_config # Test with records limit set to number that exceeds expceted records internal_config = {"some_config": 100, "_limit": 20} records = [r for r in abstract_source.read(logger=logger, config=internal_config, catalog=catalog, state={})] - assert len(records) == 3 + 3 + 3 + 3 + assert len(records) == 3 + 3 + 1 + 1 + 3 + 3 # Check if page_size paramter is set to http instance only internal_config = {"some_config": 100, "_page_size": 2} records = [r for r in abstract_source.read(logger=logger, config=internal_config, catalog=catalog, state={})] assert "_page_size" not in abstract_source.streams_config assert "some_config" in abstract_source.streams_config - assert len(records) == 3 + 3 + 3 + 3 + assert len(records) == 3 + 3 + 1 + 1 + 3 + 3 assert http_stream.page_size == 2 # Make sure page_size havent been set for non http streams assert not non_http_stream.page_size @@ -403,6 +403,7 @@ def test_internal_config_limit(mocker, abstract_source, catalog): SLICE_DEBUG_LOG_COUNT = 1 FULL_RECORDS_NUMBER = 3 TRACE_STATUS_COUNT = 3 + STATE_COUNT = 1 streams = abstract_source.streams(None) http_stream = streams[0] http_stream.read_records.return_value = [{}] * FULL_RECORDS_NUMBER @@ -410,7 +411,7 @@ def test_internal_config_limit(mocker, abstract_source, catalog): catalog.streams[0].sync_mode = SyncMode.full_refresh records = [r for r in abstract_source.read(logger=logger_mock, config=internal_config, catalog=catalog, state={})] - assert len(records) == STREAM_LIMIT + SLICE_DEBUG_LOG_COUNT + TRACE_STATUS_COUNT + assert len(records) == STREAM_LIMIT + SLICE_DEBUG_LOG_COUNT + TRACE_STATUS_COUNT + STATE_COUNT logger_info_args = [call[0][0] for call in logger_mock.info.call_args_list] # Check if log line matches number of limit read_log_record = [_l for _l in logger_info_args if _l.startswith("Read")] @@ -440,6 +441,7 @@ def test_internal_config_limit(mocker, abstract_source, catalog): def test_source_config_no_transform(mocker, abstract_source, catalog): SLICE_DEBUG_LOG_COUNT = 1 TRACE_STATUS_COUNT = 3 + STATE_COUNT = 1 logger_mock = mocker.MagicMock() logger_mock.level = logging.DEBUG streams = abstract_source.streams(None) @@ -447,7 +449,7 @@ def test_source_config_no_transform(mocker, abstract_source, catalog): http_stream.get_json_schema.return_value = non_http_stream.get_json_schema.return_value = SCHEMA http_stream.read_records.return_value, non_http_stream.read_records.return_value = [[{"value": 23}] * 5] * 2 records = [r for r in abstract_source.read(logger=logger_mock, config={}, catalog=catalog, state={})] - assert len(records) == 2 * (5 + SLICE_DEBUG_LOG_COUNT + TRACE_STATUS_COUNT) + assert len(records) == 2 * (5 + SLICE_DEBUG_LOG_COUNT + TRACE_STATUS_COUNT + STATE_COUNT) assert [r.record.data for r in records if r.type == Type.RECORD] == [{"value": 23}] * 2 * 5 assert http_stream.get_json_schema.call_count == 5 assert non_http_stream.get_json_schema.call_count == 5 @@ -458,6 +460,7 @@ def test_source_config_transform(mocker, abstract_source, catalog): logger_mock.level = logging.DEBUG SLICE_DEBUG_LOG_COUNT = 2 TRACE_STATUS_COUNT = 6 + STATE_COUNT = 2 streams = abstract_source.streams(None) http_stream, non_http_stream = streams http_stream.transformer = TypeTransformer(TransformConfig.DefaultSchemaNormalization) @@ -465,7 +468,7 @@ def test_source_config_transform(mocker, abstract_source, catalog): http_stream.get_json_schema.return_value = non_http_stream.get_json_schema.return_value = SCHEMA http_stream.read_records.return_value, non_http_stream.read_records.return_value = [{"value": 23}], [{"value": 23}] records = [r for r in abstract_source.read(logger=logger_mock, config={}, catalog=catalog, state={})] - assert len(records) == 2 + SLICE_DEBUG_LOG_COUNT + TRACE_STATUS_COUNT + assert len(records) == 2 + SLICE_DEBUG_LOG_COUNT + TRACE_STATUS_COUNT + STATE_COUNT assert [r.record.data for r in records if r.type == Type.RECORD] == [{"value": "23"}] * 2 @@ -474,13 +477,14 @@ def test_source_config_transform_and_no_transform(mocker, abstract_source, catal logger_mock.level = logging.DEBUG SLICE_DEBUG_LOG_COUNT = 2 TRACE_STATUS_COUNT = 6 + STATE_COUNT = 2 streams = abstract_source.streams(None) http_stream, non_http_stream = streams http_stream.transformer = TypeTransformer(TransformConfig.DefaultSchemaNormalization) http_stream.get_json_schema.return_value = non_http_stream.get_json_schema.return_value = SCHEMA http_stream.read_records.return_value, non_http_stream.read_records.return_value = [{"value": 23}], [{"value": 23}] records = [r for r in abstract_source.read(logger=logger_mock, config={}, catalog=catalog, state={})] - assert len(records) == 2 + SLICE_DEBUG_LOG_COUNT + TRACE_STATUS_COUNT + assert len(records) == 2 + SLICE_DEBUG_LOG_COUNT + TRACE_STATUS_COUNT + STATE_COUNT assert [r.record.data for r in records if r.type == Type.RECORD] == [{"value": "23"}, {"value": 23}] @@ -526,8 +530,8 @@ def __init__(self, *args, **kvargs): source = MockAbstractSource(streams=streams) logger = logging.getLogger(f"airbyte.{getattr(abstract_source, 'name', '')}") records = [r for r in source.read(logger=logger, config={}, catalog=catalog, state={})] - # 3 for http stream, 3 for non http stream and 3 for stream status messages for each stream (2x) - assert len(records) == 3 + 3 + 3 + 3 + # 3 for http stream, 3 for non http stream, 1 for state message for each stream (2x) and 3 for stream status messages for each stream (2x) + assert len(records) == 3 + 3 + 1 + 1 + 3 + 3 assert http_stream.read_records.called assert non_http_stream.read_records.called @@ -584,8 +588,8 @@ def __init__(self, *args, **kvargs): with caplog.at_level(logging.WARNING): records = [r for r in source.read(logger=logger, config={}, catalog=catalog, state={})] - # 0 for http stream, 3 for non http stream and 3 status trace messages - assert len(records) == 0 + 3 + 3 + # 0 for http stream, 3 for non http stream, 1 for non http stream state message and 3 status trace messages + assert len(records) == 0 + 3 + 1 + 3 assert non_http_stream.read_records.called expected_logs = [ f"Skipped syncing stream '{http_stream.name}' because it was unavailable.", From 7a85c0cb81b7f4ac2bed9188fcf10b88d964d9de Mon Sep 17 00:00:00 2001 From: maxi297 Date: Tue, 5 Mar 2024 09:09:34 -0500 Subject: [PATCH 071/172] Fix expected records --- .../integration_tests/expected_records.jsonl | 1 - 1 file changed, 1 deletion(-) diff --git a/airbyte-integrations/connectors/source-google-search-console/integration_tests/expected_records.jsonl b/airbyte-integrations/connectors/source-google-search-console/integration_tests/expected_records.jsonl index e05006e04b63..457a58444b62 100644 --- a/airbyte-integrations/connectors/source-google-search-console/integration_tests/expected_records.jsonl +++ b/airbyte-integrations/connectors/source-google-search-console/integration_tests/expected_records.jsonl @@ -8,7 +8,6 @@ {"stream": "search_analytics_by_device", "data": {"clicks": 203, "impressions": 6206, "ctr": 0.03271028037383177, "position": 23.797937479858202, "site_url": "sc-domain:airbyte.io", "search_type": "web", "date": "2022-10-23", "device": "DESKTOP"}, "emitted_at": 1709558104602} {"stream": "search_analytics_by_device", "data": {"clicks": 21, "impressions": 1084, "ctr": 0.01937269372693727, "position": 34.21678966789668, "site_url": "sc-domain:airbyte.io", "search_type": "web", "date": "2022-10-23", "device": "MOBILE"}, "emitted_at": 1709558104603} {"stream": "search_analytics_by_page", "data": {"clicks": 8, "impressions": 197, "ctr": 0.04060913705583756, "position": 8.802030456852792, "site_url": "sc-domain:airbyte.io", "search_type": "web", "date": "2022-10-21", "page": "https://discuss.airbyte.io/t/kafka-connection-fails/723"}, "emitted_at": 1709558151837} -{"stream": "search_analytics_by_page", "data": {"clicks": 8, "impressions": 66, "ctr": 0.12121212121212122, "position": 6.96969696969697, "site_url": "sc-domain:airbyte.io", "search_type": "web", "date": "2022-10-21", "page": "https://discuss.airbyte.io/t/source-potsgres-connection-org-postgresql-jdbc-pgconnection-34b9fc7d-marked-as-broken-because-of-sqlstate-08006/1800"}, "emitted_at": 1709558151837} {"stream": "search_analytics_by_query", "data": {"clicks": 2, "impressions": 2, "ctr": 1, "position": 1, "site_url": "sc-domain:airbyte.io", "search_type": "web", "date": "2022-10-21", "query": "airbyte authentication"}, "emitted_at": 1709558202703} {"stream": "search_analytics_by_query", "data": {"clicks": 2, "impressions": 11, "ctr": 0.18181818181818182, "position": 2.090909090909091, "site_url": "sc-domain:airbyte.io", "search_type": "web", "date": "2022-10-21", "query": "airbyte cloud"}, "emitted_at": 1709558202703} {"stream": "search_analytics_all_fields", "data": {"clicks": 1, "impressions": 1, "ctr": 1, "position": 9, "site_url": "sc-domain:airbyte.io", "search_type": "web", "date": "2022-10-21", "country": "aut", "device": "DESKTOP", "page": "https://discuss.airbyte.io/t/cannot-build-docker-images-for-python-destination-connector/1454", "query": "fatal error: ffi.h: no such file or directory"}, "emitted_at": 1709558247944} From 8209dad58ee115bb72435d545f9e13bf068738b8 Mon Sep 17 00:00:00 2001 From: Christo Grabowski <108154848+ChristoGrab@users.noreply.github.com> Date: Tue, 5 Mar 2024 10:26:41 -0500 Subject: [PATCH 072/172] Source Recurly: update support level metadata (#35815) --- airbyte-integrations/connectors/source-recurly/metadata.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/airbyte-integrations/connectors/source-recurly/metadata.yaml b/airbyte-integrations/connectors/source-recurly/metadata.yaml index f82b443f1c35..4c4be34837ba 100644 --- a/airbyte-integrations/connectors/source-recurly/metadata.yaml +++ b/airbyte-integrations/connectors/source-recurly/metadata.yaml @@ -31,7 +31,7 @@ data: pypi: enabled: true packageName: airbyte-source-recurly - supportLevel: archived + supportLevel: community tags: - language:python metadataSpecVersion: "1.0" From dcc3eb911e154e14a0630bcc9686db5931956638 Mon Sep 17 00:00:00 2001 From: Evan Tahler Date: Tue, 5 Mar 2024 08:35:28 -0800 Subject: [PATCH 073/172] Archive `destination-vertica`, restore `destination-vectara` (#35806) --- .../destination-vectara/.dockerignore | 5 + .../connectors/destination-vectara/Dockerfile | 38 ++++ .../connectors/destination-vectara/README.md | 123 +++++++++++ .../destination_vectara/__init__.py | 8 + .../destination_vectara/client.py | 199 ++++++++++++++++++ .../destination_vectara/config.py | 75 +++++++ .../destination_vectara/destination.py | 95 +++++++++ .../destination_vectara/writer.py | 128 +++++++++++ .../integration_tests/integration_test.py | 127 +++++++++++ .../connectors/destination-vectara/main.py | 11 + .../destination-vectara/requirements.txt | 1 + .../connectors/destination-vectara/setup.py | 25 +++ .../unit_tests/__init__.py | 0 .../connectors/destination-vertica/README.md | 72 ------- .../destination-vertica/build.gradle | 28 --- .../vertica/VerticaDestination.java | 148 ------------- .../vertica/VerticaNameTransformer.java | 11 - .../vertica/VerticaSqlOperations.java | 109 ---------- .../src/main/resources/spec.json | 63 ------ .../destination/vertica/VerticaContainer.java | 102 --------- .../VerticaDestinationAcceptanceTest.java | 132 ------------ 21 files changed, 835 insertions(+), 665 deletions(-) create mode 100644 airbyte-integrations/connectors/destination-vectara/.dockerignore create mode 100644 airbyte-integrations/connectors/destination-vectara/Dockerfile create mode 100644 airbyte-integrations/connectors/destination-vectara/README.md create mode 100644 airbyte-integrations/connectors/destination-vectara/destination_vectara/__init__.py create mode 100644 airbyte-integrations/connectors/destination-vectara/destination_vectara/client.py create mode 100644 airbyte-integrations/connectors/destination-vectara/destination_vectara/config.py create mode 100644 airbyte-integrations/connectors/destination-vectara/destination_vectara/destination.py create mode 100644 airbyte-integrations/connectors/destination-vectara/destination_vectara/writer.py create mode 100644 airbyte-integrations/connectors/destination-vectara/integration_tests/integration_test.py create mode 100644 airbyte-integrations/connectors/destination-vectara/main.py create mode 100644 airbyte-integrations/connectors/destination-vectara/requirements.txt create mode 100644 airbyte-integrations/connectors/destination-vectara/setup.py create mode 100644 airbyte-integrations/connectors/destination-vectara/unit_tests/__init__.py delete mode 100644 airbyte-integrations/connectors/destination-vertica/README.md delete mode 100644 airbyte-integrations/connectors/destination-vertica/build.gradle delete mode 100644 airbyte-integrations/connectors/destination-vertica/src/main/java/io/airbyte/integrations/destination/vertica/VerticaDestination.java delete mode 100644 airbyte-integrations/connectors/destination-vertica/src/main/java/io/airbyte/integrations/destination/vertica/VerticaNameTransformer.java delete mode 100644 airbyte-integrations/connectors/destination-vertica/src/main/java/io/airbyte/integrations/destination/vertica/VerticaSqlOperations.java delete mode 100644 airbyte-integrations/connectors/destination-vertica/src/main/resources/spec.json delete mode 100644 airbyte-integrations/connectors/destination-vertica/src/test-integration/java/io/airbyte/integrations/destination/vertica/VerticaContainer.java delete mode 100644 airbyte-integrations/connectors/destination-vertica/src/test-integration/java/io/airbyte/integrations/destination/vertica/VerticaDestinationAcceptanceTest.java diff --git a/airbyte-integrations/connectors/destination-vectara/.dockerignore b/airbyte-integrations/connectors/destination-vectara/.dockerignore new file mode 100644 index 000000000000..f784000e19e2 --- /dev/null +++ b/airbyte-integrations/connectors/destination-vectara/.dockerignore @@ -0,0 +1,5 @@ +* +!Dockerfile +!main.py +!destination_vectara +!setup.py diff --git a/airbyte-integrations/connectors/destination-vectara/Dockerfile b/airbyte-integrations/connectors/destination-vectara/Dockerfile new file mode 100644 index 000000000000..9afa4fa81a36 --- /dev/null +++ b/airbyte-integrations/connectors/destination-vectara/Dockerfile @@ -0,0 +1,38 @@ +FROM python:3.9.11-alpine3.15 as base + +# build and load all requirements +FROM base as builder +WORKDIR /airbyte/integration_code + +# upgrade pip to the latest version +RUN apk --no-cache upgrade \ + && pip install --upgrade pip \ + && apk --no-cache add tzdata build-base + + +COPY setup.py ./ +# install necessary packages to a temporary folder +RUN pip install --prefix=/install . + +# build a clean environment +FROM base +WORKDIR /airbyte/integration_code + +# copy all loaded and built libraries to a pure basic image +COPY --from=builder /install /usr/local +# add default timezone settings +COPY --from=builder /usr/share/zoneinfo/Etc/UTC /etc/localtime +RUN echo "Etc/UTC" > /etc/timezone + +# bash is installed for more convenient debugging. +RUN apk --no-cache add bash + +# copy payload code only +COPY main.py ./ +COPY destination_vectara ./destination_vectara + +ENV AIRBYTE_ENTRYPOINT "python /airbyte/integration_code/main.py" +ENTRYPOINT ["python", "/airbyte/integration_code/main.py"] + +LABEL io.airbyte.version=0.2.0 +LABEL io.airbyte.name=airbyte/destination-vectara diff --git a/airbyte-integrations/connectors/destination-vectara/README.md b/airbyte-integrations/connectors/destination-vectara/README.md new file mode 100644 index 000000000000..2c68229551bc --- /dev/null +++ b/airbyte-integrations/connectors/destination-vectara/README.md @@ -0,0 +1,123 @@ +# Vectara Destination + +This is the repository for the Vectara destination connector, written in Python. +For information about how to use this connector within Airbyte, see [the documentation](https://docs.airbyte.com/integrations/destinations/vectara). + +## Local development + +### Prerequisites +**To iterate on this connector, make sure to complete this prerequisites section.** + +#### Minimum Python version required `= 3.7.0` + +#### Build & Activate Virtual Environment and install dependencies +From this connector directory, create a virtual environment: +``` +python -m venv .venv +``` + +This will generate a virtualenv for this module in `.venv/`. Make sure this venv is active in your +development environment of choice. To activate it from the terminal, run: +``` +source .venv/bin/activate +pip install -r requirements.txt +``` +If you are in an IDE, follow your IDE's instructions to activate the virtualenv. + +Note that while we are installing dependencies from `requirements.txt`, you should only edit `setup.py` for your dependencies. `requirements.txt` is +used for editable installs (`pip install -e`) to pull in Python dependencies from the monorepo and will call `setup.py`. +If this is mumbo jumbo to you, don't worry about it, just put your deps in `setup.py` but install using `pip install -r requirements.txt` and everything +should work as you expect. + +#### Building via Gradle +From the Airbyte repository root, run: +``` +./gradlew :airbyte-integrations:connectors:destination-vectara:build +``` + +#### Create credentials +**If you are a community contributor**, follow the instructions in the [documentation](https://docs.airbyte.com/integrations/destinations/vectara) +to generate the necessary credentials. Then create a file `secrets/config.json` conforming to the `destination_vectara/spec.json` file. +Note that the `secrets` directory is gitignored by default, so there is no danger of accidentally checking in sensitive information. +See `integration_tests/sample_config.json` for a sample config file. + +**If you are an Airbyte core member**, copy the credentials in Lastpass under the secret name `destination vectara test creds` +and place them into `secrets/config.json`. + +### Locally running the connector +``` +python main.py spec +python main.py check --config secrets/config.json +python main.py discover --config secrets/config.json +python main.py read --config secrets/config.json --catalog integration_tests/configured_catalog.json +``` + +### Locally running the connector docker image + +#### Build +First, make sure you build the latest Docker image: +``` +docker build . -t airbyte/destination-vectara:dev +``` + +You can also build the connector image via Gradle: +``` +./gradlew :airbyte-integrations:connectors:destination-vectara:airbyteDocker +``` +When building via Gradle, the docker image name and tag, respectively, are the values of the `io.airbyte.name` and `io.airbyte.version` `LABEL`s in +the Dockerfile. + +#### Run +Then run any of the connector commands as follows: +``` +docker run --rm airbyte/destination-vectara:dev spec +docker run --rm -v $(pwd)/secrets:/secrets airbyte/destination-vectara:dev check --config /secrets/config.json +# messages.jsonl is a file containing line-separated JSON representing AirbyteMessages +cat messages.jsonl | docker run --rm -v $(pwd)/secrets:/secrets -v $(pwd)/integration_tests:/integration_tests airbyte/destination-vectara:dev write --config /secrets/config.json --catalog /integration_tests/configured_catalog.json +``` +## Testing + Make sure to familiarize yourself with [pytest test discovery](https://docs.pytest.org/en/latest/goodpractices.html#test-discovery) to know how your test files and methods should be named. +First install test dependencies into your virtual environment: +``` +pip install .[tests] +``` +### Unit Tests +To run unit tests locally, from the connector directory run: +``` +python -m pytest unit_tests +``` + +### Integration Tests +There are two types of integration tests: Acceptance Tests (Airbyte's test suite for all destination connectors) and custom integration tests (which are specific to this connector). +#### Custom Integration tests +Place custom tests inside `integration_tests/` folder, then, from the connector root, run +``` +python -m pytest integration_tests +``` +#### Acceptance Tests +Coming soon: + +### Using gradle to run tests +All commands should be run from airbyte project root. +To run unit tests: +``` +./gradlew :airbyte-integrations:connectors:destination-vectara:unitTest +``` +To run acceptance and custom integration tests: +``` +./gradlew :airbyte-integrations:connectors:destination-vectara:integrationTest +``` + +## Dependency Management +All of your dependencies should go in `setup.py`, NOT `requirements.txt`. The requirements file is only used to connect internal Airbyte dependencies in the monorepo for local development. +We split dependencies between two groups, dependencies that are: +* required for your connector to work need to go to `MAIN_REQUIREMENTS` list. +* required for the testing need to go to `TEST_REQUIREMENTS` list + +### Publishing a new version of the connector +You've checked out the repo, implemented a million dollar feature, and you're ready to share your changes with the world. Now what? +1. Make sure your changes are passing unit and integration tests. +1. Bump the connector version in `Dockerfile` -- just increment the value of the `LABEL io.airbyte.version` appropriately (we use [SemVer](https://semver.org/)). +1. Create a Pull Request. +1. Pat yourself on the back for being an awesome contributor. +1. Someone from Airbyte will take a look at your PR and iterate with you to merge it into master. diff --git a/airbyte-integrations/connectors/destination-vectara/destination_vectara/__init__.py b/airbyte-integrations/connectors/destination-vectara/destination_vectara/__init__.py new file mode 100644 index 000000000000..1bc53911e4ef --- /dev/null +++ b/airbyte-integrations/connectors/destination-vectara/destination_vectara/__init__.py @@ -0,0 +1,8 @@ +# +# Copyright (c) 2023 Airbyte, Inc., all rights reserved. +# + + +from .destination import DestinationVectara + +__all__ = ["DestinationVectara"] diff --git a/airbyte-integrations/connectors/destination-vectara/destination_vectara/client.py b/airbyte-integrations/connectors/destination-vectara/destination_vectara/client.py new file mode 100644 index 000000000000..755d30014780 --- /dev/null +++ b/airbyte-integrations/connectors/destination-vectara/destination_vectara/client.py @@ -0,0 +1,199 @@ +# +# Copyright (c) 2023 Airbyte, Inc., all rights reserved. +# + +import datetime +import json +import traceback +from concurrent.futures import ThreadPoolExecutor +from typing import Any, Mapping + +import backoff +import requests +from destination_vectara.config import VectaraConfig + +METADATA_STREAM_FIELD = "_ab_stream" + + +def user_error(e: Exception) -> bool: + """ + Return True if this exception is caused by user error, False otherwise. + """ + if not isinstance(e, requests.exceptions.RequestException): + return False + return bool(e.response and 400 <= e.response.status_code < 500) + + +class VectaraClient: + + BASE_URL = "https://api.vectara.io/v1" + + def __init__(self, config: VectaraConfig): + if isinstance(config, dict): + config = VectaraConfig.parse_obj(config) + self.customer_id = config.customer_id + self.corpus_name = config.corpus_name + self.client_id = config.oauth2.client_id + self.client_secret = config.oauth2.client_secret + self.parallelize = config.parallelize + self.check() + + def check(self): + """ + Check for an existing corpus in Vectara. + If more than one exists - then return a message + If exactly one exists with this name - ensure that the corpus has the correct metadata fields, and use it. + If not, create it. + """ + try: + jwt_token = self._get_jwt_token() + if not jwt_token: + return "Unable to get JWT Token. Confirm your Client ID and Client Secret." + + list_corpora_response = self._request(endpoint="list-corpora", data={"numResults": 100, "filter": self.corpus_name}) + possible_corpora_ids_names_map = { + corpus.get("id"): corpus.get("name") + for corpus in list_corpora_response.get("corpus") + if corpus.get("name") == self.corpus_name + } + if len(possible_corpora_ids_names_map) > 1: + return f"Multiple Corpora exist with name {self.corpus_name}" + if len(possible_corpora_ids_names_map) == 1: + self.corpus_id = list(possible_corpora_ids_names_map.keys())[0] + else: + data = { + "corpus": { + "name": self.corpus_name, + "filterAttributes": [ + { + "name": METADATA_STREAM_FIELD, + "indexed": True, + "type": "FILTER_ATTRIBUTE_TYPE__TEXT", + "level": "FILTER_ATTRIBUTE_LEVEL__DOCUMENT", + }, + ], + } + } + + create_corpus_response = self._request(endpoint="create-corpus", data=data) + self.corpus_id = create_corpus_response.get("corpusId") + + except Exception as e: + return str(e) + "\n" + "".join(traceback.TracebackException.from_exception(e).format()) + + def _get_jwt_token(self): + """Connect to the server and get a JWT token.""" + token_endpoint = f"https://vectara-prod-{self.customer_id}.auth.us-west-2.amazoncognito.com/oauth2/token" + headers = { + "Content-Type": "application/x-www-form-urlencoded", + } + data = {"grant_type": "client_credentials", "client_id": self.client_id, "client_secret": self.client_secret} + + request_time = datetime.datetime.now().timestamp() + response = requests.request(method="POST", url=token_endpoint, headers=headers, data=data) + response_json = response.json() + + self.jwt_token = response_json.get("access_token") + self.jwt_token_expires_ts = request_time + response_json.get("expires_in") + return self.jwt_token + + @backoff.on_exception(backoff.expo, requests.exceptions.RequestException, max_tries=5, giveup=user_error) + def _request(self, endpoint: str, http_method: str = "POST", params: Mapping[str, Any] = None, data: Mapping[str, Any] = None): + + url = f"{self.BASE_URL}/{endpoint}" + + current_ts = datetime.datetime.now().timestamp() + if self.jwt_token_expires_ts - current_ts <= 60: + self._get_jwt_token() + + headers = { + "Content-Type": "application/json", + "Accept": "application/json", + "Authorization": f"Bearer {self.jwt_token}", + "customer-id": self.customer_id, + "X-source": "airbyte", + } + + response = requests.request(method=http_method, url=url, headers=headers, params=params, data=json.dumps(data)) + response.raise_for_status() + return response.json() + + def delete_doc_by_metadata(self, metadata_field_name, metadata_field_values): + document_ids = [] + for value in metadata_field_values: + data = { + "query": [ + { + "query": "", + "numResults": 100, + "corpusKey": [ + { + "customerId": self.customer_id, + "corpusId": self.corpus_id, + "metadataFilter": f"doc.{metadata_field_name} = '{value}'", + } + ], + } + ] + } + query_documents_response = self._request(endpoint="query", data=data) + document_ids.extend([document.get("id") for document in query_documents_response.get("responseSet")[0].get("document")]) + self.delete_docs_by_id(document_ids=document_ids) + + def delete_docs_by_id(self, document_ids): + for document_id in document_ids: + self._request( + endpoint="delete-doc", data={"customerId": self.customer_id, "corpusId": self.corpus_id, "documentId": document_id} + ) + + def index_document(self, document): + document_section, document_metadata, document_title, document_id = document + if len(document_section) == 0: + return None # Document is empty, so skip it + document_metadata = self._normalize(document_metadata) + data = { + "customerId": self.customer_id, + "corpusId": self.corpus_id, + "document": { + "documentId": document_id, + "metadataJson": json.dumps(document_metadata), + "title": document_title, + "section": [ + {"text": f"{section_key}: {section_value}"} + for section_key, section_value in document_section.items() + if section_key != METADATA_STREAM_FIELD + ], + }, + } + index_document_response = self._request(endpoint="index", data=data) + return index_document_response + + def index_documents(self, documents): + if self.parallelize: + with ThreadPoolExecutor() as executor: + futures = [executor.submit(self.index_document, doc) for doc in documents] + for future in futures: + try: + response = future.result() + if response is None: + continue + assert ( + response.get("status").get("code") == "OK" + or response.get("status").get("statusDetail") == "Document should have at least one part." + ) + except AssertionError as e: + # Handle the assertion error + pass + else: + for doc in documents: + self.index_document(doc) + + def _normalize(self, metadata: dict) -> dict: + result = {} + for key, value in metadata.items(): + if isinstance(value, (str, int, float, bool)): + result[key] = value + else: + # JSON encode all other types + result[key] = json.dumps(value) + return result diff --git a/airbyte-integrations/connectors/destination-vectara/destination_vectara/config.py b/airbyte-integrations/connectors/destination-vectara/destination_vectara/config.py new file mode 100644 index 000000000000..86ca2dba16f5 --- /dev/null +++ b/airbyte-integrations/connectors/destination-vectara/destination_vectara/config.py @@ -0,0 +1,75 @@ +# +# Copyright (c) 2023 Airbyte, Inc., all rights reserved. +# + +from typing import List, Optional + +from airbyte_cdk.utils.spec_schema_transformations import resolve_refs +from pydantic import BaseModel, Field + + +class OAuth2(BaseModel): + client_id: str = Field(..., title="OAuth Client ID", description="OAuth2.0 client id", order=0) + client_secret: str = Field(..., title="OAuth Client Secret", description="OAuth2.0 client secret", airbyte_secret=True, order=1) + + class Config: + title = "OAuth2.0 Credentials" + schema_extra = { + "description": "OAuth2.0 credentials used to authenticate admin actions (creating/deleting corpora)", + "group": "auth", + } + + +class VectaraConfig(BaseModel): + oauth2: OAuth2 + customer_id: str = Field( + ..., title="Customer ID", description="Your customer id as it is in the authenticaion url", order=2, group="account" + ) + corpus_name: str = Field(..., title="Corpus Name", description="The Name of Corpus to load data into", order=3, group="account") + + parallelize: Optional[bool] = Field( + default=False, + title="Parallelize", + description="Parallelize indexing into Vectara with multiple threads", + always_show=True, + group="account", + ) + + text_fields: Optional[List[str]] = Field( + default=[], + title="Text fields to index with Vectara", + description="List of fields in the record that should be in the section of the document. The field list is applied to all streams in the same way and non-existing fields are ignored. If none are defined, all fields are considered text fields. When specifying text fields, you can access nested fields in the record by using dot notation, e.g. `user.name` will access the `name` field in the `user` object. It's also possible to use wildcards to access all fields in an object, e.g. `users.*.name` will access all `names` fields in all entries of the `users` array.", + always_show=True, + examples=["text", "user.name", "users.*.name"], + ) + title_field: Optional[str] = Field( + default="", + title="Text field to use as document title with Vectara", + description="A field that will be used to populate the `title` of each document. The field list is applied to all streams in the same way and non-existing fields are ignored. If none are defined, all fields are considered text fields. When specifying text fields, you can access nested fields in the record by using dot notation, e.g. `user.name` will access the `name` field in the `user` object. It's also possible to use wildcards to access all fields in an object, e.g. `users.*.name` will access all `names` fields in all entries of the `users` array.", + always_show=True, + examples=["document_key"], + ) + metadata_fields: Optional[List[str]] = Field( + default=[], + title="Fields to store as metadata", + description="List of fields in the record that should be stored as metadata. The field list is applied to all streams in the same way and non-existing fields are ignored. If none are defined, all fields are considered metadata fields. When specifying text fields, you can access nested fields in the record by using dot notation, e.g. `user.name` will access the `name` field in the `user` object. It's also possible to use wildcards to access all fields in an object, e.g. `users.*.name` will access all `names` fields in all entries of the `users` array. When specifying nested paths, all matching values are flattened into an array set to a field named by the path.", + always_show=True, + examples=["age", "user"], + ) + + class Config: + title = "Vectara Config" + schema_extra = { + "description": "Configuration to connect to the Vectara instance", + "groups": [ + {"id": "account", "title": "Account"}, + {"id": "auth", "title": "Authentication"}, + ], + } + + @classmethod + def schema(cls): + """we're overriding the schema classmethod to enable some post-processing""" + schema = super().schema() + schema = resolve_refs(schema) + return schema diff --git a/airbyte-integrations/connectors/destination-vectara/destination_vectara/destination.py b/airbyte-integrations/connectors/destination-vectara/destination_vectara/destination.py new file mode 100644 index 000000000000..6a580655ff91 --- /dev/null +++ b/airbyte-integrations/connectors/destination-vectara/destination_vectara/destination.py @@ -0,0 +1,95 @@ +# +# Copyright (c) 2023 Airbyte, Inc., all rights reserved. +# + +from typing import Any, Iterable, Mapping + +from airbyte_cdk import AirbyteLogger +from airbyte_cdk.destinations import Destination +from airbyte_cdk.models import ( + AirbyteConnectionStatus, + AirbyteMessage, + ConfiguredAirbyteCatalog, + ConnectorSpecification, + DestinationSyncMode, + Status, + Type, +) +from destination_vectara.client import VectaraClient +from destination_vectara.config import VectaraConfig +from destination_vectara.writer import VectaraWriter + + +class DestinationVectara(Destination): + def write( + self, config: Mapping[str, Any], configured_catalog: ConfiguredAirbyteCatalog, input_messages: Iterable[AirbyteMessage] + ) -> Iterable[AirbyteMessage]: + + """ + Reads the input stream of messages, config, and catalog to write data to the destination. + + This method returns an iterable (typically a generator of AirbyteMessages via yield) containing state messages received + in the input message stream. Outputting a state message means that every AirbyteRecordMessage which came before it has been + successfully persisted to the destination. This is used to ensure fault tolerance in the case that a sync fails before fully completing, + then the source is given the last state message output from this method as the starting point of the next sync. + + :param config: dict of JSON configuration matching the configuration declared in spec.json + :param configured_catalog: The Configured Catalog describing the schema of the data being received and how it should be persisted in the + destination + :param input_messages: The stream of input messages received from the source + :return: Iterable of AirbyteStateMessages wrapped in AirbyteMessage structs + """ + + config_model = VectaraConfig.parse_obj(config) + writer = VectaraWriter( + client=VectaraClient(config_model), + text_fields=config_model.text_fields, + title_field=config_model.title_field, + metadata_fields=config_model.metadata_fields, + catalog=configured_catalog, + ) + + writer.delete_streams_to_overwrite(catalog=configured_catalog) + + for message in input_messages: + if message.type == Type.STATE: + # Emitting a state message indicates that all records which came before it have been written to the destination. So we flush + # the queue to ensure writes happen, then output the state message to indicate it's safe to checkpoint state + writer.flush() + yield message + elif message.type == Type.RECORD: + record = message.record + writer.queue_write_operation(record) + else: + # ignore other message types for now + continue + + # Make sure to flush any records still in the queue + writer.flush() + + def check(self, logger: AirbyteLogger, config: VectaraConfig) -> AirbyteConnectionStatus: + """ + Tests if the input configuration can be used to successfully connect to the destination with the needed permissions + e.g: if a provided API token or password can be used to connect and write to the destination. + + :param logger: Logging object to display debug/info/error to the logs + (logs will not be accessible via airbyte UI if they are not passed to this logger) + :param config: Json object containing the configuration of this destination, content of this json is as specified in + the properties of the spec.json file + + :return: AirbyteConnectionStatus indicating a Success or Failure + """ + client = VectaraClient(config=config) + client_error = client.check() + if client_error: + return AirbyteConnectionStatus(status=Status.FAILED, message="\n".join([client_error])) + else: + return AirbyteConnectionStatus(status=Status.SUCCEEDED) + + def spec(self, *args: Any, **kwargs: Any) -> ConnectorSpecification: + return ConnectorSpecification( + documentationUrl="https://docs.airbyte.com/integrations/destinations/vectara", + supportsIncremental=True, + supported_destination_sync_modes=[DestinationSyncMode.overwrite, DestinationSyncMode.append], + connectionSpecification=VectaraConfig.schema(), + ) diff --git a/airbyte-integrations/connectors/destination-vectara/destination_vectara/writer.py b/airbyte-integrations/connectors/destination-vectara/destination_vectara/writer.py new file mode 100644 index 000000000000..0794b0dc9410 --- /dev/null +++ b/airbyte-integrations/connectors/destination-vectara/destination_vectara/writer.py @@ -0,0 +1,128 @@ +# +# Copyright (c) 2023 Airbyte, Inc., all rights reserved. +# + +import uuid +from typing import Any, Dict, List, Mapping, Optional + +import dpath.util +from airbyte_cdk.models import AirbyteRecordMessage, ConfiguredAirbyteCatalog, ConfiguredAirbyteStream, DestinationSyncMode +from airbyte_cdk.models.airbyte_protocol import DestinationSyncMode +from airbyte_cdk.utils.traced_exception import AirbyteTracedException, FailureType +from destination_vectara.client import VectaraClient + +METADATA_STREAM_FIELD = "_ab_stream" + + +class VectaraWriter: + + write_buffer: List[Mapping[str, Any]] = [] + flush_interval = 1000 + + def __init__( + self, + client: VectaraClient, + text_fields: Optional[List[str]], + title_field: Optional[str], + metadata_fields: Optional[List[str]], + catalog: ConfiguredAirbyteCatalog, + ): + self.client = client + self.text_fields = text_fields + self.title_field = title_field + self.metadata_fields = metadata_fields + self.streams = {f"{stream.stream.namespace}_{stream.stream.name}": stream for stream in catalog.streams} + self.ids_to_delete: List[str] = [] + + def delete_streams_to_overwrite(self, catalog: ConfiguredAirbyteCatalog) -> None: + streams_to_overwrite = [ + f"{stream.stream.namespace}_{stream.stream.name}" + for stream in catalog.streams + if stream.destination_sync_mode == DestinationSyncMode.overwrite + ] + if len(streams_to_overwrite): + self.client.delete_doc_by_metadata(metadata_field_name=METADATA_STREAM_FIELD, metadata_field_values=streams_to_overwrite) + + def _delete_documents_to_dedupe(self): + if len(self.ids_to_delete) > 0: + self.client.delete_docs_by_id(document_ids=self.ids_to_delete) + + def queue_write_operation(self, record: AirbyteRecordMessage) -> None: + """Adds messages to the write queue and flushes if the buffer is full""" + + stream_identifier = self._get_stream_id(record=record) + document_section = self._get_document_section(record=record) + document_metadata = self._get_document_metadata(record=record) + document_title = self._get_document_title(record=record) + primary_key = self._get_record_primary_key(record=record) + + if primary_key: + document_id = f"Stream_{stream_identifier}_Key_{primary_key}" + if self.streams[stream_identifier].destination_sync_mode == DestinationSyncMode.append_dedup: + self.ids_to_delete.append(document_id) + else: + document_id = str(uuid.uuid4().int) + + self.write_buffer.append((document_section, document_metadata, document_title, document_id)) + if len(self.write_buffer) == self.flush_interval: + self.flush() + + def flush(self) -> None: + """Flush all documents in Queue to Vectara""" + self._delete_documents_to_dedupe() + self.client.index_documents(self.write_buffer) + self.write_buffer.clear() + self.ids_to_delete.clear() + + def _get_document_section(self, record: AirbyteRecordMessage): + relevant_fields = self._extract_relevant_fields(record, self.text_fields) + if len(relevant_fields) == 0: + text_fields = ", ".join(self.text_fields) if self.text_fields else "all fields" + raise AirbyteTracedException( + internal_message="No text fields found in record", + message=f"Record {str(record.data)[:250]}... does not contain any of the configured text fields: {text_fields}. Please check your processing configuration, there has to be at least one text field set in each record.", + failure_type=FailureType.config_error, + ) + document_section = relevant_fields + return document_section + + def _extract_relevant_fields(self, record: AirbyteRecordMessage, fields: Optional[List[str]]) -> Dict[str, Any]: + relevant_fields = {} + if fields and len(fields) > 0: + for field in fields: + values = dpath.util.values(record.data, field, separator=".") + if values and len(values) > 0: + relevant_fields[field] = values if len(values) > 1 else values[0] + else: + relevant_fields = record.data + return relevant_fields + + def _get_document_metadata(self, record: AirbyteRecordMessage) -> Dict[str, Any]: + document_metadata = self._extract_relevant_fields(record, self.metadata_fields) + document_metadata[METADATA_STREAM_FIELD] = self._get_stream_id(record) + return document_metadata + + def _get_document_title(self, record: AirbyteRecordMessage) -> str: + title = "Untitled" + if self.title_field: + title = dpath.util.get(record.data, self.title_field) + return title + + def _get_stream_id(self, record: AirbyteRecordMessage) -> str: + return f"{record.namespace}_{record.stream}" + + def _get_record_primary_key(self, record: AirbyteRecordMessage) -> Optional[str]: + stream_identifier = self._get_stream_id(record) + current_stream: ConfiguredAirbyteStream = self.streams[stream_identifier] + + if not current_stream.primary_key: + return None + + primary_key = [] + for key in current_stream.primary_key: + try: + primary_key.append(str(dpath.util.get(record.data, key))) + except KeyError: + primary_key.append("__not_found__") + stringified_primary_key = "_".join(primary_key) + return f"{stream_identifier}_{stringified_primary_key}" diff --git a/airbyte-integrations/connectors/destination-vectara/integration_tests/integration_test.py b/airbyte-integrations/connectors/destination-vectara/integration_tests/integration_test.py new file mode 100644 index 000000000000..052006303d85 --- /dev/null +++ b/airbyte-integrations/connectors/destination-vectara/integration_tests/integration_test.py @@ -0,0 +1,127 @@ +# +# Copyright (c) 2023 Airbyte, Inc., all rights reserved. +# + +import json +import logging +import unittest +from typing import Any, Dict + +from airbyte_cdk.models import ( + AirbyteMessage, + AirbyteRecordMessage, + AirbyteStateMessage, + AirbyteStream, + ConfiguredAirbyteCatalog, + ConfiguredAirbyteStream, + DestinationSyncMode, + Status, + SyncMode, + Type, +) +from destination_vectara.client import VectaraClient +from destination_vectara.destination import DestinationVectara + + +class VectaraIntegrationTest(unittest.TestCase): + def _get_configured_catalog(self, destination_mode: DestinationSyncMode) -> ConfiguredAirbyteCatalog: + stream_schema = {"type": "object", "properties": {"str_col": {"type": "str"}, "int_col": {"type": "integer"}}} + + overwrite_stream = ConfiguredAirbyteStream( + stream=AirbyteStream( + name="mystream", json_schema=stream_schema, supported_sync_modes=[SyncMode.incremental, SyncMode.full_refresh] + ), + primary_key=[["int_col"]], + sync_mode=SyncMode.incremental, + destination_sync_mode=destination_mode, + ) + + return ConfiguredAirbyteCatalog(streams=[overwrite_stream]) + + def _state(self, data: Dict[str, Any]) -> AirbyteMessage: + return AirbyteMessage(type=Type.STATE, state=AirbyteStateMessage(data=data)) + + def _record(self, stream: str, str_value: str, int_value: int) -> AirbyteMessage: + return AirbyteMessage( + type=Type.RECORD, record=AirbyteRecordMessage(stream=stream, data={"str_col": str_value, "int_col": int_value}, emitted_at=0) + ) + def _clean(self): + self._client.delete_doc_by_metadata(metadata_field_name="_ab_stream", metadata_field_values=["None_mystream"]) + + def setUp(self): + with open("secrets/config.json", "r") as f: + self.config = json.loads(f.read()) + self._client = VectaraClient(self.config) + self._clean() + + def tearDown(self): + self._clean() + + def test_check_valid_config(self): + outcome = DestinationVectara().check(logging.getLogger("airbyte"), self.config) + assert outcome.status == Status.SUCCEEDED + + def test_check_invalid_config(self): + outcome = DestinationVectara().check( + logging.getLogger("airbyte"), + { + "oauth2": {"client_id": "myclientid", "client_secret": "myclientsecret"}, + "corpus_name": "teststore", + "customer_id": "123456", + "text_fields": [], + "metadata_fields": [], + "title_field": "", + }, + ) + assert outcome.status == Status.FAILED + + def _query_index(self, query="Everything", num_results=100): + return self._client._request( + "query", + data={ + "query": [ + { + "query": query, + "numResults": num_results, + "corpusKey": [ + { + "customerId": self._client.customer_id, + "corpusId": self._client.corpus_id, + } + ], + } + ] + }, + )["responseSet"][0] + + def test_write(self): + # validate corpus starts empty + initial_result = self._query_index()["document"] + assert len(initial_result) == 0 + + catalog = self._get_configured_catalog(DestinationSyncMode.overwrite) + first_state_message = self._state({"state": "1"}) + first_record_chunk = [self._record("mystream", f"Dogs are number {i}", i) for i in range(5)] + + # initial sync + destination = DestinationVectara() + list(destination.write(self.config, catalog, [*first_record_chunk, first_state_message])) + assert len(self._query_index()["document"]) == 5 + + # incrementalally update a doc + incremental_catalog = self._get_configured_catalog(DestinationSyncMode.append_dedup) + list(destination.write(self.config, incremental_catalog, [self._record("mystream", "Cats are nice", 2), first_state_message])) + assert len(self._query_index()["document"]) == 5 + + # use semantic search + result = self._query_index("Feline animals", 1) + assert result["document"] == [ + { + "id": "Stream_None_mystream_Key_None_mystream_2", + "metadata": [ + {"name": "int_col", "value": "2"}, + {"name": "_ab_stream", "value": "None_mystream"}, + {"name": "title", "value": "Cats are nice"}, + ], + } + ] diff --git a/airbyte-integrations/connectors/destination-vectara/main.py b/airbyte-integrations/connectors/destination-vectara/main.py new file mode 100644 index 000000000000..289b411fb318 --- /dev/null +++ b/airbyte-integrations/connectors/destination-vectara/main.py @@ -0,0 +1,11 @@ +# +# Copyright (c) 2023 Airbyte, Inc., all rights reserved. +# + + +import sys + +from destination_vectara import DestinationVectara + +if __name__ == "__main__": + DestinationVectara().run(sys.argv[1:]) diff --git a/airbyte-integrations/connectors/destination-vectara/requirements.txt b/airbyte-integrations/connectors/destination-vectara/requirements.txt new file mode 100644 index 000000000000..d6e1198b1ab1 --- /dev/null +++ b/airbyte-integrations/connectors/destination-vectara/requirements.txt @@ -0,0 +1 @@ +-e . diff --git a/airbyte-integrations/connectors/destination-vectara/setup.py b/airbyte-integrations/connectors/destination-vectara/setup.py new file mode 100644 index 000000000000..ab10a8c60fb9 --- /dev/null +++ b/airbyte-integrations/connectors/destination-vectara/setup.py @@ -0,0 +1,25 @@ +# +# Copyright (c) 2023 Airbyte, Inc., all rights reserved. +# + + +from setuptools import find_packages, setup + +MAIN_REQUIREMENTS = [ + "airbyte-cdk==0.57.8", +] + +TEST_REQUIREMENTS = ["pytest~=6.2"] + +setup( + name="destination_vectara", + description="Destination implementation for Vectara.", + author="Airbyte", + author_email="contact@airbyte.io", + packages=find_packages(), + install_requires=MAIN_REQUIREMENTS, + package_data={"": ["*.json"]}, + extras_require={ + "tests": TEST_REQUIREMENTS, + }, +) diff --git a/airbyte-integrations/connectors/destination-vectara/unit_tests/__init__.py b/airbyte-integrations/connectors/destination-vectara/unit_tests/__init__.py new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/airbyte-integrations/connectors/destination-vertica/README.md b/airbyte-integrations/connectors/destination-vertica/README.md deleted file mode 100644 index 18d51ba57fb0..000000000000 --- a/airbyte-integrations/connectors/destination-vertica/README.md +++ /dev/null @@ -1,72 +0,0 @@ -# Destination Vertica - -This is the repository for the Vertica destination connector in Java. -For information about how to use this connector within Airbyte, see [the User Documentation](https://docs.airbyte.com/integrations/destinations/vertica). - -## Local development - -#### Building via Gradle -From the Airbyte repository root, run: -``` -./gradlew :airbyte-integrations:connectors:destination-vertica:build -``` - -#### Create credentials -**If you are a community contributor**, generate the necessary credentials and place them in `secrets/config.json` conforming to the spec file in `src/main/resources/spec.json`. -Note that the `secrets` directory is git-ignored by default, so there is no danger of accidentally checking in sensitive information. - -**If you are an Airbyte core member**, follow the [instructions](https://docs.airbyte.com/connector-development#using-credentials-in-ci) to set up the credentials. - -### Locally running the connector docker image - -#### Build -Build the connector image via Gradle: - -``` -./gradlew :airbyte-integrations:connectors:destination-vertica:buildConnectorImage -``` -Once built, the docker image name and tag on your host will be `airbyte/destination-vertica:dev`. -the Dockerfile. - -#### Run -Then run any of the connector commands as follows: -``` -docker run --rm airbyte/destination-vertica:dev spec -docker run --rm -v $(pwd)/secrets:/secrets airbyte/destination-vertica:dev check --config /secrets/config.json -docker run --rm -v $(pwd)/secrets:/secrets airbyte/destination-vertica:dev discover --config /secrets/config.json -docker run --rm -v $(pwd)/secrets:/secrets -v $(pwd)/integration_tests:/integration_tests airbyte/destination-vertica:dev read --config /secrets/config.json --catalog /integration_tests/configured_catalog.json -``` - -## Testing -We use `JUnit` for Java tests. - -### Unit and Integration Tests -Place unit tests under `src/test/io/airbyte/integrations/destinations/vertica`. - -#### Acceptance Tests -Airbyte has a standard test suite that all destination connectors must pass. Implement the `TODO`s in -`src/test-integration/java/io/airbyte/integrations/destinations/verticaDestinationAcceptanceTest.java`. - -### Using gradle to run tests -All commands should be run from airbyte project root. -To run unit tests: -``` -./gradlew :airbyte-integrations:connectors:destination-vertica:unitTest -``` -To run acceptance and custom integration tests: -``` -./gradlew :airbyte-integrations:connectors:destination-vertica:integrationTest -``` - -## Dependency Management - -### Publishing a new version of the connector -You've checked out the repo, implemented a million dollar feature, and you're ready to share your changes with the world. Now what? -1. Make sure your changes are passing our test suite: `airbyte-ci connectors --name=destination-vertica test` -2. Bump the connector version in `metadata.yaml`: increment the `dockerImageTag` value. Please follow [semantic versioning for connectors](https://docs.airbyte.com/contributing-to-airbyte/resources/pull-requests-handbook/#semantic-versioning-for-connectors). -3. Make sure the `metadata.yaml` content is up to date. -4. Make the connector documentation and its changelog is up to date (`docs/integrations/destinations/vertica.md`). -5. Create a Pull Request: use [our PR naming conventions](https://docs.airbyte.com/contributing-to-airbyte/resources/pull-requests-handbook/#pull-request-title-convention). -6. Pat yourself on the back for being an awesome contributor. -7. Someone from Airbyte will take a look at your PR and iterate with you to merge it into master. - diff --git a/airbyte-integrations/connectors/destination-vertica/build.gradle b/airbyte-integrations/connectors/destination-vertica/build.gradle deleted file mode 100644 index d5392f6c238b..000000000000 --- a/airbyte-integrations/connectors/destination-vertica/build.gradle +++ /dev/null @@ -1,28 +0,0 @@ -plugins { - id 'application' - id 'airbyte-java-connector' -} - -airbyteJavaConnector { - cdkVersionRequired = '0.2.0' - features = ['db-destinations'] - useLocalCdk = false -} - -//remove once upgrading the CDK version to 0.4.x or later -java { - compileJava { - options.compilerArgs.remove("-Werror") - } -} - -airbyteJavaConnector.addCdkDependencies() - -application { - mainClass = 'io.airbyte.integrations.destination.vertica.VerticaDestination' -} - -dependencies { - implementation group: 'com.vertica.jdbc', name: 'vertica-jdbc', version: '12.0.3-0' - implementation group: 'org.testcontainers', name: 'jdbc', version: '1.18.0' -} diff --git a/airbyte-integrations/connectors/destination-vertica/src/main/java/io/airbyte/integrations/destination/vertica/VerticaDestination.java b/airbyte-integrations/connectors/destination-vertica/src/main/java/io/airbyte/integrations/destination/vertica/VerticaDestination.java deleted file mode 100644 index f9ec9851f449..000000000000 --- a/airbyte-integrations/connectors/destination-vertica/src/main/java/io/airbyte/integrations/destination/vertica/VerticaDestination.java +++ /dev/null @@ -1,148 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.integrations.destination.vertica; - -import static io.airbyte.cdk.integrations.base.errors.messages.ErrorMessage.getErrorMessage; - -import com.fasterxml.jackson.databind.JsonNode; -import com.google.common.collect.ImmutableMap; -import io.airbyte.cdk.db.factory.DataSourceFactory; -import io.airbyte.cdk.db.factory.DatabaseDriver; -import io.airbyte.cdk.db.jdbc.JdbcDatabase; -import io.airbyte.cdk.db.jdbc.JdbcUtils; -import io.airbyte.cdk.integrations.base.*; -import io.airbyte.cdk.integrations.base.ssh.SshWrappedDestination; -import io.airbyte.cdk.integrations.destination.NamingConventionTransformer; -import io.airbyte.cdk.integrations.destination.jdbc.AbstractJdbcDestination; -import io.airbyte.cdk.integrations.destination.jdbc.JdbcBufferedConsumerFactory; -import io.airbyte.commons.exceptions.ConnectionErrorException; -import io.airbyte.commons.json.Jsons; -import io.airbyte.commons.map.MoreMaps; -import io.airbyte.protocol.models.v0.AirbyteConnectionStatus; -import io.airbyte.protocol.models.v0.AirbyteMessage; -import io.airbyte.protocol.models.v0.ConfiguredAirbyteCatalog; -import java.util.Map; -import java.util.function.Consumer; -import javax.sql.DataSource; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -public class VerticaDestination extends AbstractJdbcDestination implements Destination { - - private static final Logger LOGGER = LoggerFactory.getLogger(VerticaDestination.class); - - public static final String DRIVER_CLASS = DatabaseDriver.VERTICA.getDriverClassName(); - - public static final String COLUMN_NAME_AB_ID = - "\"" + JavaBaseConstants.COLUMN_NAME_AB_ID + "\""; - public static final String COLUMN_NAME_DATA = - "\"" + JavaBaseConstants.COLUMN_NAME_DATA + "\""; - public static final String COLUMN_NAME_EMITTED_AT = - "\"" + JavaBaseConstants.COLUMN_NAME_EMITTED_AT + "\""; - private final NamingConventionTransformer namingResolver; - private final VerticaSqlOperations verticaSqlOperations; - - private final String driverClass; - - static final Map DEFAULT_JDBC_PARAMETERS = ImmutableMap.of( - "zeroDateTimeBehavior", "convertToNull", - "allowLoadLocalInfile", "true"); - - public VerticaDestination(final String driverClass, - final NamingConventionTransformer namingResolver, - final VerticaSqlOperations verticaSqlOperations) { - super(DRIVER_CLASS, namingResolver, verticaSqlOperations); - this.verticaSqlOperations = verticaSqlOperations; - this.namingResolver = namingResolver; - this.driverClass = driverClass; - - } - - public static Destination sshWrappedDestination() { - return new SshWrappedDestination(new VerticaDestination(DRIVER_CLASS, new VerticaNameTransformer(), new VerticaSqlOperations()), - JdbcUtils.HOST_LIST_KEY, JdbcUtils.PORT_LIST_KEY); - } - - public static void main(String[] args) throws Exception { - final Destination destination = VerticaDestination.sshWrappedDestination(); - LOGGER.info("starting destination: {}", VerticaDestination.class); - new IntegrationRunner(destination).run(args); - LOGGER.info("completed destination: {}", VerticaDestination.class); - } - - @Override - public AirbyteConnectionStatus check(JsonNode config) { - final DataSource dataSource = getDataSource(config); - try { - final JdbcDatabase database = getDatabase(dataSource); - final VerticaSqlOperations mySQLSqlOperations = (VerticaSqlOperations) getSqlOperations(); - final String outputSchema = getNamingResolver().getIdentifier(config.get(JdbcUtils.DATABASE_KEY).asText()); - attemptSQLCreateAndDropTableOperations(outputSchema, database, getNamingResolver(), - mySQLSqlOperations); - return new AirbyteConnectionStatus().withStatus(AirbyteConnectionStatus.Status.SUCCEEDED); - } catch (final ConnectionErrorException e) { - final String message = getErrorMessage(e.getStateCode(), e.getErrorCode(), e.getExceptionMessage(), e); - AirbyteTraceMessageUtility.emitConfigErrorTrace(e, message); - return new AirbyteConnectionStatus() - .withStatus(AirbyteConnectionStatus.Status.FAILED) - .withMessage(message); - } catch (final Exception e) { - LOGGER.error("Exception while checking connection: ", e); - return new AirbyteConnectionStatus() - .withStatus(AirbyteConnectionStatus.Status.FAILED) - .withMessage("Could not connect with provided configuration. \n" + e.getMessage()); - } finally { - try { - DataSourceFactory.close(dataSource); - } catch (final Exception e) { - LOGGER.warn("Unable to close data source.", e); - } - } - } - - static final Map DEFAULT_SSL_JDBC_PARAMETERS = MoreMaps.merge(ImmutableMap.of( - "useSSL", "false", - "requireSSL", "fase", - "verifyServerCertificate", "false"), - DEFAULT_JDBC_PARAMETERS); - - @Override - protected Map getDefaultConnectionProperties(JsonNode config) { - if (JdbcUtils.useSsl(config)) { - return DEFAULT_SSL_JDBC_PARAMETERS; - } else { - return DEFAULT_JDBC_PARAMETERS; - } - } - - @Override - public JsonNode toJdbcConfig(JsonNode config) { - final String jdbcUrl = String.format("jdbc:vertica://%s:%s/%s", - config.get(JdbcUtils.HOST_KEY).asText(), - config.get(JdbcUtils.PORT_KEY).asText(), - config.get(JdbcUtils.DATABASE_KEY).asText()); - - final ImmutableMap.Builder configBuilder = ImmutableMap.builder() - .put(JdbcUtils.USERNAME_KEY, config.get(JdbcUtils.USERNAME_KEY).asText()) - .put(JdbcUtils.JDBC_URL_KEY, jdbcUrl); - - if (config.has(JdbcUtils.PASSWORD_KEY)) { - configBuilder.put(JdbcUtils.PASSWORD_KEY, config.get(JdbcUtils.PASSWORD_KEY).asText()); - } - if (config.has(JdbcUtils.JDBC_URL_PARAMS_KEY)) { - configBuilder.put(JdbcUtils.JDBC_URL_PARAMS_KEY, config.get(JdbcUtils.JDBC_URL_PARAMS_KEY)); - } - return Jsons.jsonNode(configBuilder.build()); - } - - @Override - public AirbyteMessageConsumer getConsumer(final JsonNode config, - final ConfiguredAirbyteCatalog catalog, - final Consumer outputRecordCollector) { - return JdbcBufferedConsumerFactory.create(outputRecordCollector, getDatabase(getDataSource(config)), verticaSqlOperations, namingResolver, config, - catalog); - } - -} diff --git a/airbyte-integrations/connectors/destination-vertica/src/main/java/io/airbyte/integrations/destination/vertica/VerticaNameTransformer.java b/airbyte-integrations/connectors/destination-vertica/src/main/java/io/airbyte/integrations/destination/vertica/VerticaNameTransformer.java deleted file mode 100644 index 4e714d636021..000000000000 --- a/airbyte-integrations/connectors/destination-vertica/src/main/java/io/airbyte/integrations/destination/vertica/VerticaNameTransformer.java +++ /dev/null @@ -1,11 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.integrations.destination.vertica; - -import io.airbyte.cdk.integrations.destination.StandardNameTransformer; - -public class VerticaNameTransformer extends StandardNameTransformer { - -} diff --git a/airbyte-integrations/connectors/destination-vertica/src/main/java/io/airbyte/integrations/destination/vertica/VerticaSqlOperations.java b/airbyte-integrations/connectors/destination-vertica/src/main/java/io/airbyte/integrations/destination/vertica/VerticaSqlOperations.java deleted file mode 100644 index 398d09731d6b..000000000000 --- a/airbyte-integrations/connectors/destination-vertica/src/main/java/io/airbyte/integrations/destination/vertica/VerticaSqlOperations.java +++ /dev/null @@ -1,109 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.integrations.destination.vertica; - -import io.airbyte.cdk.db.jdbc.JdbcDatabase; -import io.airbyte.cdk.integrations.destination.jdbc.JdbcSqlOperations; -import io.airbyte.commons.json.Jsons; -import io.airbyte.protocol.models.v0.AirbyteRecordMessage; -import java.io.File; -import java.io.FileWriter; -import java.nio.charset.StandardCharsets; -import java.nio.file.Files; -import java.sql.SQLException; -import java.sql.Statement; -import java.sql.Timestamp; -import java.time.Instant; -import java.util.List; -import java.util.UUID; - -public class VerticaSqlOperations extends JdbcSqlOperations { - - protected void writeBatchToFile(final File tmpFile, final List records) throws Exception { - try { - final StringBuffer bfr = new StringBuffer(); - FileWriter wr = new FileWriter(tmpFile, StandardCharsets.UTF_8); - for (AirbyteRecordMessage record : records) { - final var uuid = UUID.randomUUID().toString(); - final var jsonData = Jsons.serialize(formatData(record.getData())); - final var emittedAt = Timestamp.from(Instant.ofEpochMilli(record.getEmittedAt())); - wr.write(uuid.toString() + "|" + jsonData.toString() + "|" + emittedAt.toString() + "\n"); - } - wr.close(); - } catch (Exception e) {} - } - - @Override - public void insertRecordsInternal(final JdbcDatabase database, - final List records, - final String schemaName, - final String tmpTableName) - throws SQLException { - if (records.isEmpty()) { - return; - } - database.execute(connection -> { - File tmpFile = null; - try { - tmpFile = Files.createTempFile(tmpTableName + "-", ".csv").toFile(); - writeBatchToFile(tmpFile, records); - final String query = String.format("copy %s.%s from local '%s' delimiter '%s'", schemaName, tmpTableName, tmpFile, "|"); - Statement stmt = connection.createStatement(); - stmt.execute(query); - stmt.close(); - } catch (final Exception e) { - throw new RuntimeException(e); - } - }); - } - - @Override - public void createSchemaIfNotExists(final JdbcDatabase database, final String schemaName) throws Exception { - final String query = String.format("CREATE SCHEMA IF NOT EXISTS %s", schemaName); - database.execute(query); - } - - @Override - public String createTableQuery(final JdbcDatabase database, final String schemaName, final String tableName) { - final String query = String.format( - "CREATE TABLE IF NOT EXISTS %s.%s (%s VARCHAR(500) PRIMARY KEY,%s VARCHAR(1000),%s VARCHAR(1000));", schemaName, tableName, - VerticaDestination.COLUMN_NAME_AB_ID, VerticaDestination.COLUMN_NAME_DATA, VerticaDestination.COLUMN_NAME_EMITTED_AT); - return query; - } - - @Override - public void createTableIfNotExists(final JdbcDatabase database, final String schemaName, final String tableName) { - try { - database.execute(createTableQuery(database, schemaName, tableName)); - } catch (final Exception e) { - LOGGER.error("Error while creating table.", e); - } - } - - @Override - public void dropTableIfExists(final JdbcDatabase database, final String schemaName, final String tableName) { - try { - final String query = String.format("DROP TABLE IF EXISTS %s.%s", schemaName, tableName); - database.execute(query); - } catch (Exception e) { - LOGGER.error(String.format("Error dropping table %s.%s", schemaName, tableName), e); - throw new RuntimeException(e); - } - } - - @Override - public void executeTransaction(final JdbcDatabase database, final List queries) throws Exception { - database.executeWithinTransaction(queries); - } - - @Override - public String insertTableQuery(final JdbcDatabase database, - final String schemaName, - final String sourceTableName, - final String destinationTableName) { - return String.format("INSERT INTO %s.%s SELECT * FROM %s.%s;\n", schemaName, destinationTableName, schemaName, sourceTableName); - } - -} diff --git a/airbyte-integrations/connectors/destination-vertica/src/main/resources/spec.json b/airbyte-integrations/connectors/destination-vertica/src/main/resources/spec.json deleted file mode 100644 index 682ccef2629c..000000000000 --- a/airbyte-integrations/connectors/destination-vertica/src/main/resources/spec.json +++ /dev/null @@ -1,63 +0,0 @@ -{ - "documentationUrl": "https://docs.airbyte.com/integrations/destinations/vertica", - "supportsIncremental": false, - "supportsNormalization": false, - "supportsDBT": false, - "supported_destination_sync_modes": ["overwrite"], - "connectionSpecification": { - "$schema": "http://json-schema.org/draft-07/schema#", - "title": "Vertica Destination Spec", - "type": "object", - "required": ["host", "port", "username", "database", "schema"], - "additionalProperties": true, - "properties": { - "host": { - "title": "Host", - "description": "Hostname of the database.", - "type": "string", - "order": 0 - }, - "port": { - "title": "Port", - "description": "Port of the database.", - "type": "integer", - "minimum": 0, - "maximum": 65536, - "default": 5433, - "examples": ["5433"], - "order": 1 - }, - "database": { - "title": "DB Name", - "description": "Name of the database.", - "type": "string", - "order": 2 - }, - "username": { - "title": "User", - "description": "Username to use to access the database.", - "type": "string", - "order": 3 - }, - "password": { - "title": "Password", - "description": "Password associated with the username.", - "type": "string", - "airbyte_secret": true, - "order": 4 - }, - "jdbc_url_params": { - "description": "Additional properties to pass to the JDBC URL string when connecting to the database formatted as 'key=value' pairs separated by the symbol '&'. (example: key1=value1&key2=value2&key3=value3).", - "title": "JDBC URL Params", - "type": "string", - "order": 6 - }, - "schema": { - "title": "Schema", - "description": "Schema for vertica destination", - "type": "string", - "order": 7 - } - } - } -} diff --git a/airbyte-integrations/connectors/destination-vertica/src/test-integration/java/io/airbyte/integrations/destination/vertica/VerticaContainer.java b/airbyte-integrations/connectors/destination-vertica/src/test-integration/java/io/airbyte/integrations/destination/vertica/VerticaContainer.java deleted file mode 100644 index 31b3d7117a1a..000000000000 --- a/airbyte-integrations/connectors/destination-vertica/src/test-integration/java/io/airbyte/integrations/destination/vertica/VerticaContainer.java +++ /dev/null @@ -1,102 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.integrations.destination.vertica; - -import static java.time.temporal.ChronoUnit.SECONDS; - -import java.time.Duration; -import java.util.concurrent.Future; -import org.testcontainers.containers.JdbcDatabaseContainer; -import org.testcontainers.containers.wait.strategy.LogMessageWaitStrategy; -import org.testcontainers.utility.DockerImageName; - -public class VerticaContainer extends JdbcDatabaseContainer { - - private static final DockerImageName DEFAULT_IMAGE_NAME = DockerImageName.parse("vertica/vertica-ce"); - - private static final int VERTICA_PORT = 5433; - // Container defaults - static final String DEFAULT_DATABASE_NAME = "airbyte"; - static final String DEFAULT_USER = "airbyte"; - static final String DEFAULT_PASSWORD = "airbyte123"; - - private String databaseName = DEFAULT_DATABASE_NAME; - private String username = DEFAULT_USER; - private String password = DEFAULT_PASSWORD; - private static final int DEFAULT_STARTUP_TIMEOUT_SECONDS = 600; - private static final int DEFAULT_CONNECT_TIMEOUT_SECONDS = 120; - static final String DEFAULT_TAG = "latest"; - - private int startupTimeoutSeconds = 480; - - public VerticaContainer() { - this(DEFAULT_IMAGE_NAME.withTag(DEFAULT_TAG)); - } - - public VerticaContainer(final String dockerImageName) { - this(DockerImageName.parse(dockerImageName)); - } - - public VerticaContainer(final DockerImageName dockerImageName) { - super(dockerImageName); - preconfigure(); - } - - public VerticaContainer(final Future dockerImageName) { - super(dockerImageName); - preconfigure(); - } - - private void preconfigure() { - this.waitStrategy = new LogMessageWaitStrategy() - .withRegEx(".*Vertica is now running.*\\s") - .withTimes(1) - .withStartupTimeout(Duration.of(DEFAULT_STARTUP_TIMEOUT_SECONDS, SECONDS)); - withConnectTimeoutSeconds(DEFAULT_CONNECT_TIMEOUT_SECONDS); - addExposedPorts(VERTICA_PORT); - } - - @Override - public String getDriverClassName() { - return "com.vertica.jdbc.Driver"; - } - - @Override - public String getJdbcUrl() { - String jdbcUrl = "jdbc:vertica://140.236.88.151:5433/PartPub80DB"; - return jdbcUrl; - } - - @Override - public String getUsername() { - return username; - } - - @Override - public String getPassword() { - return password; - } - - @Override - protected String getTestQueryString() { - return "select * from airbyte.employe"; - } - - public Integer getVerticaPort() { - return getMappedPort(VERTICA_PORT); - } - - @Override - public String getDatabaseName() { - return databaseName; - } - - @Override - public VerticaContainer withStartupTimeoutSeconds(int startupTimeoutSeconds) { - this.startupTimeoutSeconds = startupTimeoutSeconds; - return this; - } - -} diff --git a/airbyte-integrations/connectors/destination-vertica/src/test-integration/java/io/airbyte/integrations/destination/vertica/VerticaDestinationAcceptanceTest.java b/airbyte-integrations/connectors/destination-vertica/src/test-integration/java/io/airbyte/integrations/destination/vertica/VerticaDestinationAcceptanceTest.java deleted file mode 100644 index f098723501cd..000000000000 --- a/airbyte-integrations/connectors/destination-vertica/src/test-integration/java/io/airbyte/integrations/destination/vertica/VerticaDestinationAcceptanceTest.java +++ /dev/null @@ -1,132 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.integrations.destination.vertica; - -import com.fasterxml.jackson.databind.JsonNode; -import com.google.common.collect.ImmutableMap; -import io.airbyte.cdk.db.Database; -import io.airbyte.cdk.db.factory.DSLContextFactory; -import io.airbyte.cdk.db.factory.DatabaseDriver; -import io.airbyte.cdk.db.jdbc.JdbcUtils; -import io.airbyte.cdk.integrations.base.JavaBaseConstants; -import io.airbyte.cdk.integrations.destination.StandardNameTransformer; -import io.airbyte.cdk.integrations.standardtest.destination.JdbcDestinationAcceptanceTest; -import io.airbyte.commons.json.Jsons; -import java.io.IOException; -import java.sql.SQLException; -import java.util.HashSet; -import java.util.List; -import java.util.stream.Collectors; -import org.jooq.DSLContext; -import org.jooq.SQLDialect; -import org.junit.jupiter.api.AfterAll; -import org.junit.jupiter.api.BeforeAll; -import org.junit.jupiter.api.Test; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -public class VerticaDestinationAcceptanceTest extends JdbcDestinationAcceptanceTest { - - private static final Logger LOGGER = LoggerFactory.getLogger(VerticaDestinationAcceptanceTest.class); - private static VerticaContainer db; - private final StandardNameTransformer namingResolver = new StandardNameTransformer(); - private JsonNode configJson; - private DSLContext dslContext; - - @Override - protected String getImageName() { - return "airbyte/destination-vertica:dev"; - } - - @Override - protected JsonNode getConfig() { - // TODO: Generate the configuration JSON file to be used for running the destination during the test - return Jsons.jsonNode(ImmutableMap.builder() - .put(JdbcUtils.HOST_KEY, "140.236.88.151") - .put(JdbcUtils.USERNAME_KEY, "airbyte") - .put(JdbcUtils.PASSWORD_KEY, "airbyte123") - .put(JdbcUtils.SCHEMA_KEY, "airbyte") - .put(JdbcUtils.PORT_KEY, 5433) - .put(JdbcUtils.DATABASE_KEY, "airbyte") - .build()); - } - - @Override - protected JsonNode getFailCheckConfig() { - // TODO return an invalid config which, when used to run the connector's check connection operation, - return Jsons.jsonNode(ImmutableMap.builder() - .put(JdbcUtils.HOST_KEY, db.getContainerInfo().getNetworkSettings().getIpAddress()) - .put(JdbcUtils.USERNAME_KEY, db.getUsername()) - .put(JdbcUtils.PASSWORD_KEY, "wrong password") - .put(JdbcUtils.SCHEMA_KEY, "public") - .put(JdbcUtils.PORT_KEY, db.getVerticaPort()) - .put(JdbcUtils.DATABASE_KEY, db.getDatabaseName()) - .put(JdbcUtils.SSL_KEY, false) - .build()); - } - - private List retrieveRecordsFromTable(final String tableName, final String schemaName) throws SQLException { - try (final DSLContext dslContext = DSLContextFactory.create( - "airbyte", - "airbyte123", - db.getDriverClassName(), - String.format(DatabaseDriver.VERTICA.getUrlFormatString(), - "140.236.88.151", - 5433, - "airbyte"), - SQLDialect.DEFAULT)) { - final List recordsFromTable = new Database(dslContext).query( - ctx -> ctx - .fetch(String.format("SELECT * FROM %s.%s ORDER BY %s ASC;", schemaName, tableName, - JavaBaseConstants.COLUMN_NAME_EMITTED_AT)) - .stream() - .map(this::getJsonFromRecord) - .collect(Collectors.toList())); - return recordsFromTable; - } - } - - @Override - @Test - public void testLineBreakCharacters() { - // overrides test with a no-op until we handle full UTF-8 in the destination - } - - @Override - protected List retrieveRecords(TestDestinationEnv testEnv, - String streamName, - String namespace, - JsonNode streamSchema) - throws IOException, SQLException { - return retrieveRecordsFromTable(namingResolver.getRawTableName(streamName), namespace) - .stream() - .map(r -> r.get(JavaBaseConstants.COLUMN_NAME_DATA)) - .collect(Collectors.toList()); - } - - @BeforeAll - protected static void init() { - db = new VerticaContainer(); - db.start(); - } - - @AfterAll - static void cleanUp() { - db.stop(); - db.close(); - } - - @Override - protected void setup(TestDestinationEnv testEnv, HashSet TEST_SCHEMAS) { - // TODO Implement this method to run any setup actions needed before every test case - - } - - @Override - protected void tearDown(TestDestinationEnv testEnv) { - // TODO Implement this method to run any cleanup actions needed after every test case - } - -} From 08dd7de8f8f46863243456906839f30d5e2c9d21 Mon Sep 17 00:00:00 2001 From: Ella Rohm-Ensing Date: Tue, 5 Mar 2024 08:56:52 -0800 Subject: [PATCH 074/172] file cdk: fix typing, pull out non-scalar handling (#35687) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit ## What * Fix typing and handling of different types in `_to_output_value` - we don't always get a `Scalar`. We already handle the different cases correctly, but the typing doesn't reflect this. * Splitting out the methods to do the scalar separately is a helpful precursor to https://github.com/airbytehq/airbyte/pull/35688, as the `DictionaryArray` object doesn't have an `as_py()` method. ## 🚨 User Impact 🚨 None ## Pre-merge Actions *Expand the relevant checklist and delete the others.*
New Connector ### Community member or Airbyter - **Community member?** Grant edit access to maintainers ([instructions](https://docs.github.com/en/github/collaborating-with-pull-requests/working-with-forks/allowing-changes-to-a-pull-request-branch-created-from-a-fork#enabling-repository-maintainer-permissions-on-existing-pull-requests)) - Unit & integration tests added and passing. Community members, please provide proof of success locally e.g: screenshot or copy-paste unit, integration, and acceptance test output. To run acceptance tests for a Python connector, follow instructions in the README. For java connectors run `./gradlew :airbyte-integrations:connectors::integrationTest`. - Connector version is set to `0.0.1` - `Dockerfile` has version `0.0.1` - Documentation updated - Connector's `README.md` - Connector's `bootstrap.md`. See [description and examples](https://docs.google.com/document/d/1ypdgmwmEHWv-TrO4_YOQ7pAJGVrMp5BOkEVh831N260/edit?usp=sharing) - `docs/integrations//.md` including changelog with an entry for the initial version. See changelog [example](https://docs.airbyte.io/integrations/sources/stripe#changelog) - `docs/integrations/README.md` ### Airbyter If this is a community PR, the Airbyte engineer reviewing this PR is responsible for the below items. - Create a non-forked branch based on this PR and test the below items on it - Build is successful - If new credentials are required for use in CI, add them to GSM. [Instructions](https://docs.airbyte.io/connector-development#using-credentials-in-ci).
Updating a connector ### Community member or Airbyter - Grant edit access to maintainers ([instructions](https://docs.github.com/en/github/collaborating-with-pull-requests/working-with-forks/allowing-changes-to-a-pull-request-branch-created-from-a-fork#enabling-repository-maintainer-permissions-on-existing-pull-requests)) - Unit & integration tests added ### Airbyter If this is a community PR, the Airbyte engineer reviewing this PR is responsible for the below items. - Create a non-forked branch based on this PR and test the below items on it - Build is successful - If new credentials are required for use in CI, add them to GSM. [Instructions](https://docs.airbyte.io/connector-development#using-credentials-in-ci).
Connector Generator - Issue acceptance criteria met - PR name follows [PR naming conventions](https://docs.airbyte.com/contributing-to-airbyte/resources/pull-requests-handbook) - If adding a new generator, add it to the [list of scaffold modules being tested](https://github.com/airbytehq/airbyte/blob/master/airbyte-integrations/connector-templates/generator/build.gradle#L41) - The generator test modules (all connectors with `-scaffold` in their name) have been updated with the latest scaffold by running `./gradlew :airbyte-integrations:connector-templates:generator:generateScaffolds` then checking in your changes - Documentation which references the generator is updated as needed
Updating the Python CDK ### Airbyter Before merging: - Pull Request description explains what problem it is solving - Code change is unit tested - Build and my-py check pass - Smoke test the change on at least one affected connector - On Github: Run [this workflow](https://github.com/airbytehq/airbyte/actions/workflows/connectors_tests.yml), passing `--use-local-cdk --name=source-` as options - Locally: `airbyte-ci connectors --use-local-cdk --name=source- test` - PR is reviewed and approved After merging: - [Publish the CDK](https://github.com/airbytehq/airbyte/actions/workflows/publish-cdk-command-manually.yml) - The CDK does not follow proper semantic versioning. Choose minor if this the change has significant user impact or is a breaking change. Choose patch otherwise. - Write a thoughtful changelog message so we know what was updated. - Merge the platform PR that was auto-created for updating the Connector Builder's CDK version - This step is optional if the change does not affect the connector builder or declarative connectors.
--- .../file_based/file_types/parquet_parser.py | 37 ++++++++++++++----- 1 file changed, 27 insertions(+), 10 deletions(-) diff --git a/airbyte-cdk/python/airbyte_cdk/sources/file_based/file_types/parquet_parser.py b/airbyte-cdk/python/airbyte_cdk/sources/file_based/file_types/parquet_parser.py index 00b78c489801..b57e413c0247 100644 --- a/airbyte-cdk/python/airbyte_cdk/sources/file_based/file_types/parquet_parser.py +++ b/airbyte-cdk/python/airbyte_cdk/sources/file_based/file_types/parquet_parser.py @@ -5,7 +5,7 @@ import json import logging import os -from typing import Any, Dict, Iterable, List, Mapping, Optional, Tuple +from typing import Any, Dict, Iterable, List, Mapping, Optional, Tuple, Union from urllib.parse import unquote import pyarrow as pa @@ -16,7 +16,7 @@ from airbyte_cdk.sources.file_based.file_types.file_type_parser import FileTypeParser from airbyte_cdk.sources.file_based.remote_file import RemoteFile from airbyte_cdk.sources.file_based.schema_helpers import SchemaType -from pyarrow import Scalar +from pyarrow import DictionaryArray, Scalar class ParquetParser(FileTypeParser): @@ -95,7 +95,17 @@ def file_read_mode(self) -> FileReadMode: return FileReadMode.READ_BINARY @staticmethod - def _to_output_value(parquet_value: Scalar, parquet_format: ParquetFormat) -> Any: + def _to_output_value(parquet_value: Union[Scalar, DictionaryArray], parquet_format: ParquetFormat) -> Any: + """ + Convert an entry in a pyarrow table to a value that can be output by the source. + """ + if isinstance(parquet_value, DictionaryArray): + return ParquetParser._dictionary_array_to_python_value(parquet_value) + else: + return ParquetParser._scalar_to_python_value(parquet_value, parquet_format) + + @staticmethod + def _scalar_to_python_value(parquet_value: Scalar, parquet_format: ParquetFormat) -> Any: """ Convert a pyarrow scalar to a value that can be output by the source. """ @@ -119,13 +129,6 @@ def _to_output_value(parquet_value: Scalar, parquet_format: ParquetFormat) -> An else: return str(parquet_value.as_py()) - # Dictionaries are stored as two columns: indices and values - # The indices column is an array of integers that maps to the values column - if pa.types.is_dictionary(parquet_value.type): - return { - "indices": parquet_value.indices.tolist(), - "values": parquet_value.dictionary.tolist(), - } if pa.types.is_map(parquet_value.type): return {k: v for k, v in parquet_value.as_py()} @@ -149,6 +152,20 @@ def _to_output_value(parquet_value: Scalar, parquet_format: ParquetFormat) -> An else: return parquet_value.as_py() + @staticmethod + def _dictionary_array_to_python_value(parquet_value: DictionaryArray) -> Dict[str, Any]: + """ + Convert a pyarrow dictionary array to a value that can be output by the source. + + Dictionaries are stored as two columns: indices and values + The indices column is an array of integers that maps to the values column + """ + + return { + "indices": parquet_value.indices.tolist(), + "values": parquet_value.dictionary.tolist(), + } + @staticmethod def parquet_type_to_schema_type(parquet_type: pa.DataType, parquet_format: ParquetFormat) -> Mapping[str, str]: """ From a0900885942af0f73f5792470ee24c7bc1139e02 Mon Sep 17 00:00:00 2001 From: Ella Rohm-Ensing Date: Tue, 5 Mar 2024 09:07:02 -0800 Subject: [PATCH 075/172] file cdk: handle scalar values that resolve to None (#35688) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit ## What * Closes https://github.com/airbytehq/airbyte/issues/34151 * Closes https://github.com/airbytehq/oncall/issues/4386 ## How Handle cases where the python value of a pyarrow scalar is None. This can be due to null values in data, as well as null-like values like `NaT` (similar to `NaN`). We previously handled this for `None` binary types, but now handle this for `None` of any type. ## 🚨 User Impact 🚨 No breaking changes. After this CDK version is released we should update the CDK dependency in S3 and any other file sources that parse parquet ## Pre-merge Actions *Expand the relevant checklist and delete the others.*
New Connector ### Community member or Airbyter - **Community member?** Grant edit access to maintainers ([instructions](https://docs.github.com/en/github/collaborating-with-pull-requests/working-with-forks/allowing-changes-to-a-pull-request-branch-created-from-a-fork#enabling-repository-maintainer-permissions-on-existing-pull-requests)) - Unit & integration tests added and passing. Community members, please provide proof of success locally e.g: screenshot or copy-paste unit, integration, and acceptance test output. To run acceptance tests for a Python connector, follow instructions in the README. For java connectors run `./gradlew :airbyte-integrations:connectors::integrationTest`. - Connector version is set to `0.0.1` - `Dockerfile` has version `0.0.1` - Documentation updated - Connector's `README.md` - Connector's `bootstrap.md`. See [description and examples](https://docs.google.com/document/d/1ypdgmwmEHWv-TrO4_YOQ7pAJGVrMp5BOkEVh831N260/edit?usp=sharing) - `docs/integrations//.md` including changelog with an entry for the initial version. See changelog [example](https://docs.airbyte.io/integrations/sources/stripe#changelog) - `docs/integrations/README.md` ### Airbyter If this is a community PR, the Airbyte engineer reviewing this PR is responsible for the below items. - Create a non-forked branch based on this PR and test the below items on it - Build is successful - If new credentials are required for use in CI, add them to GSM. [Instructions](https://docs.airbyte.io/connector-development#using-credentials-in-ci).
Updating a connector ### Community member or Airbyter - Grant edit access to maintainers ([instructions](https://docs.github.com/en/github/collaborating-with-pull-requests/working-with-forks/allowing-changes-to-a-pull-request-branch-created-from-a-fork#enabling-repository-maintainer-permissions-on-existing-pull-requests)) - Unit & integration tests added ### Airbyter If this is a community PR, the Airbyte engineer reviewing this PR is responsible for the below items. - Create a non-forked branch based on this PR and test the below items on it - Build is successful - If new credentials are required for use in CI, add them to GSM. [Instructions](https://docs.airbyte.io/connector-development#using-credentials-in-ci).
Connector Generator - Issue acceptance criteria met - PR name follows [PR naming conventions](https://docs.airbyte.com/contributing-to-airbyte/resources/pull-requests-handbook) - If adding a new generator, add it to the [list of scaffold modules being tested](https://github.com/airbytehq/airbyte/blob/master/airbyte-integrations/connector-templates/generator/build.gradle#L41) - The generator test modules (all connectors with `-scaffold` in their name) have been updated with the latest scaffold by running `./gradlew :airbyte-integrations:connector-templates:generator:generateScaffolds` then checking in your changes - Documentation which references the generator is updated as needed
Updating the Python CDK ### Airbyter Before merging: - Pull Request description explains what problem it is solving - Code change is unit tested - Build and my-py check pass - Smoke test the change on at least one affected connector - On Github: Run [this workflow](https://github.com/airbytehq/airbyte/actions/workflows/connectors_tests.yml), passing `--use-local-cdk --name=source-` as options - Locally: `airbyte-ci connectors --use-local-cdk --name=source- test` - PR is reviewed and approved After merging: - [Publish the CDK](https://github.com/airbytehq/airbyte/actions/workflows/publish-cdk-command-manually.yml) - The CDK does not follow proper semantic versioning. Choose minor if this the change has significant user impact or is a breaking change. Choose patch otherwise. - Write a thoughtful changelog message so we know what was updated. - Merge the platform PR that was auto-created for updating the Connector Builder's CDK version - This step is optional if the change does not affect the connector builder or declarative connectors.
--- .../file_based/file_types/parquet_parser.py | 9 +-- .../file_types/test_parquet_parser.py | 57 +++++++++++++++++-- 2 files changed, 56 insertions(+), 10 deletions(-) diff --git a/airbyte-cdk/python/airbyte_cdk/sources/file_based/file_types/parquet_parser.py b/airbyte-cdk/python/airbyte_cdk/sources/file_based/file_types/parquet_parser.py index b57e413c0247..11560aed887a 100644 --- a/airbyte-cdk/python/airbyte_cdk/sources/file_based/file_types/parquet_parser.py +++ b/airbyte-cdk/python/airbyte_cdk/sources/file_based/file_types/parquet_parser.py @@ -109,6 +109,9 @@ def _scalar_to_python_value(parquet_value: Scalar, parquet_format: ParquetFormat """ Convert a pyarrow scalar to a value that can be output by the source. """ + if parquet_value.as_py() is None: + return None + # Convert date and datetime objects to isoformat strings if pa.types.is_time(parquet_value.type) or pa.types.is_timestamp(parquet_value.type) or pa.types.is_date(parquet_value.type): return parquet_value.as_py().isoformat() @@ -119,10 +122,8 @@ def _scalar_to_python_value(parquet_value: Scalar, parquet_format: ParquetFormat # Decode binary strings to utf-8 if ParquetParser._is_binary(parquet_value.type): - py_value = parquet_value.as_py() - if py_value is None: - return py_value - return py_value.decode("utf-8") + return parquet_value.as_py().decode("utf-8") + if pa.types.is_decimal(parquet_value.type): if parquet_format.decimal_as_float: return parquet_value.as_py() diff --git a/airbyte-cdk/python/unit_tests/sources/file_based/file_types/test_parquet_parser.py b/airbyte-cdk/python/unit_tests/sources/file_based/file_types/test_parquet_parser.py index 984a782c5925..1fa2dcbf66fc 100644 --- a/airbyte-cdk/python/unit_tests/sources/file_based/file_types/test_parquet_parser.py +++ b/airbyte-cdk/python/unit_tests/sources/file_based/file_types/test_parquet_parser.py @@ -191,12 +191,57 @@ def test_value_dictionary() -> None: assert py_value == {"indices": [0, 1, 2, 0, 1], "values": ["apple", "banana", "cherry"]} -def test_value_none_binary() -> None: - none_binary_scalar = pa.scalar(None, type=pa.binary()) - try: - ParquetParser._to_output_value(none_binary_scalar, _default_parquet_format) - except AttributeError: - assert False, "`None` type binary should be handled properly" +@pytest.mark.parametrize( + "parquet_type, parquet_format", + [ + pytest.param(pa.bool_(), _default_parquet_format, id="test_parquet_bool"), + pytest.param(pa.int8(), _default_parquet_format, id="test_parquet_int8"), + pytest.param(pa.int16(), _default_parquet_format, id="test_parquet_int16"), + pytest.param(pa.int32(), _default_parquet_format, id="test_parquet_int32"), + pytest.param(pa.int64(), _default_parquet_format, id="test_parquet_int64"), + pytest.param(pa.uint8(), _default_parquet_format, id="test_parquet_uint8"), + pytest.param(pa.uint16(), _default_parquet_format, id="test_parquet_uint16"), + pytest.param(pa.uint32(), _default_parquet_format, id="test_parquet_uint32"), + pytest.param(pa.uint64(), _default_parquet_format, id="test_parquet_uint64"), + pytest.param(pa.float16(), _default_parquet_format, id="test_parquet_float16"), + pytest.param(pa.float32(), _default_parquet_format, id="test_parquet_float32"), + pytest.param(pa.float64(), _default_parquet_format, id="test_parquet_float64"), + pytest.param(pa.time32("s"), _default_parquet_format, id="test_parquet_time32s"), + pytest.param(pa.time32("ms"), _default_parquet_format, id="test_parquet_time32ms"), + pytest.param(pa.time64("us"), _default_parquet_format, id="test_parquet_time64us"), + pytest.param(pa.time64("ns"), _default_parquet_format, id="test_parquet_time64ns"), + pytest.param(pa.timestamp("s"), _default_parquet_format, id="test_parquet_timestamps_s"), + pytest.param(pa.timestamp("ms"), _default_parquet_format, id="test_parquet_timestamp_ms"), + pytest.param(pa.timestamp("s", "utc"), _default_parquet_format, id="test_parquet_timestamps_s_with_tz"), + pytest.param(pa.timestamp("ms", "est"), _default_parquet_format, id="test_parquet_timestamps_ms_with_tz"), + pytest.param(pa.date32(), _default_parquet_format, id="test_parquet_date32"), + pytest.param(pa.date64(), _default_parquet_format, id="test_parquet_date64"), + pytest.param(pa.duration("s"), _default_parquet_format, id="test_duration_s"), + pytest.param(pa.duration("ms"), _default_parquet_format, id="test_duration_ms"), + pytest.param(pa.duration("us"), _default_parquet_format, id="test_duration_us"), + pytest.param(pa.duration("ns"), _default_parquet_format, id="test_duration_ns"), + pytest.param(pa.month_day_nano_interval(), _default_parquet_format, id="test_parquet_month_day_nano_interval"), + pytest.param(pa.binary(), _default_parquet_format, id="test_binary"), + pytest.param(pa.binary(2), _default_parquet_format, id="test_fixed_size_binary"), + pytest.param(pa.string(), _default_parquet_format, id="test_parquet_string"), + pytest.param(pa.utf8(), _default_parquet_format, id="test_utf8"), + pytest.param(pa.large_binary(), _default_parquet_format, id="test_large_binary"), + pytest.param(pa.large_string(), _default_parquet_format, id="test_large_string"), + pytest.param(pa.large_utf8(), _default_parquet_format, id="test_large_utf8"), + pytest.param(pa.dictionary(pa.int32(), pa.string()), _default_parquet_format, id="test_dictionary"), + pytest.param(pa.struct([pa.field("field", pa.int32())]), _default_parquet_format, id="test_struct"), + pytest.param(pa.list_(pa.int32()), _default_parquet_format, id="test_list"), + pytest.param(pa.large_list(pa.int32()), _default_parquet_format, id="test_large_list"), + pytest.param(pa.decimal128(2), _default_parquet_format, id="test_decimal128"), + pytest.param(pa.decimal256(2), _default_parquet_format, id="test_decimal256"), + pytest.param(pa.decimal128(2), _decimal_as_float_parquet_format, id="test_decimal128_as_float"), + pytest.param(pa.decimal256(2), _decimal_as_float_parquet_format, id="test_decimal256_as_float"), + pytest.param(pa.map_(pa.int32(), pa.int32()), _default_parquet_format, id="test_map"), + pytest.param(pa.null(), _default_parquet_format, id="test_null"), + ]) +def test_null_value_does_not_throw(parquet_type, parquet_format) -> None: + pyarrow_value = pa.scalar(None, type=parquet_type) + assert ParquetParser._to_output_value(pyarrow_value, parquet_format) is None @pytest.mark.parametrize( From b254a6409cbe7a51b029863aa166cb8dd419cc4f Mon Sep 17 00:00:00 2001 From: Edward Gao Date: Tue, 5 Mar 2024 09:15:29 -0800 Subject: [PATCH 076/172] Destination redshift: Upgrade cdk (#35316) Signed-off-by: Gireesh Sreepathi Co-authored-by: Gireesh Sreepathi --- .../destination-redshift/build.gradle | 3 ++- .../destination-redshift/metadata.yaml | 2 +- .../redshift/RedshiftInsertDestination.java | 7 ++++-- .../RedshiftStagingS3Destination.java | 23 +++++++++++++------ .../RedshiftDestinationHandler.java | 17 ++++++++++---- .../redshift/typing_deduping/RedshiftState.kt | 17 ++++++++++++++ .../RedshiftSqlGeneratorIntegrationTest.java | 18 +++++++-------- docs/integrations/destinations/redshift.md | 1 + 8 files changed, 63 insertions(+), 25 deletions(-) create mode 100644 airbyte-integrations/connectors/destination-redshift/src/main/java/io/airbyte/integrations/destination/redshift/typing_deduping/RedshiftState.kt diff --git a/airbyte-integrations/connectors/destination-redshift/build.gradle b/airbyte-integrations/connectors/destination-redshift/build.gradle index 298b24ec4012..c55b76b11eb9 100644 --- a/airbyte-integrations/connectors/destination-redshift/build.gradle +++ b/airbyte-integrations/connectors/destination-redshift/build.gradle @@ -1,10 +1,11 @@ plugins { id 'application' id 'airbyte-java-connector' + id 'org.jetbrains.kotlin.jvm' version '1.9.22' } airbyteJavaConnector { - cdkVersionRequired = '0.23.2' + cdkVersionRequired = '0.23.11' features = ['db-destinations', 's3-destinations', 'typing-deduping'] useLocalCdk = false } diff --git a/airbyte-integrations/connectors/destination-redshift/metadata.yaml b/airbyte-integrations/connectors/destination-redshift/metadata.yaml index 368369cfe5d2..909685693541 100644 --- a/airbyte-integrations/connectors/destination-redshift/metadata.yaml +++ b/airbyte-integrations/connectors/destination-redshift/metadata.yaml @@ -5,7 +5,7 @@ data: connectorSubtype: database connectorType: destination definitionId: f7a7d195-377f-cf5b-70a5-be6b819019dc - dockerImageTag: 2.1.8 + dockerImageTag: 2.1.9 dockerRepository: airbyte/destination-redshift documentationUrl: https://docs.airbyte.com/integrations/destinations/redshift githubIssueLabel: destination-redshift diff --git a/airbyte-integrations/connectors/destination-redshift/src/main/java/io/airbyte/integrations/destination/redshift/RedshiftInsertDestination.java b/airbyte-integrations/connectors/destination-redshift/src/main/java/io/airbyte/integrations/destination/redshift/RedshiftInsertDestination.java index a4ba7a669557..81521b03b9fa 100644 --- a/airbyte-integrations/connectors/destination-redshift/src/main/java/io/airbyte/integrations/destination/redshift/RedshiftInsertDestination.java +++ b/airbyte-integrations/connectors/destination-redshift/src/main/java/io/airbyte/integrations/destination/redshift/RedshiftInsertDestination.java @@ -22,6 +22,7 @@ import io.airbyte.integrations.destination.redshift.operations.RedshiftSqlOperations; import io.airbyte.integrations.destination.redshift.typing_deduping.RedshiftDestinationHandler; import io.airbyte.integrations.destination.redshift.typing_deduping.RedshiftSqlGenerator; +import io.airbyte.integrations.destination.redshift.typing_deduping.RedshiftState; import io.airbyte.integrations.destination.redshift.util.RedshiftUtil; import java.time.Duration; import java.util.HashMap; @@ -115,8 +116,10 @@ protected JdbcSqlGenerator getSqlGenerator() { } @Override - protected JdbcDestinationHandler getDestinationHandler(final String databaseName, final JdbcDatabase database) { - return new RedshiftDestinationHandler(databaseName, database); + protected JdbcDestinationHandler getDestinationHandler(final String databaseName, + final JdbcDatabase database, + String rawTableSchema) { + return new RedshiftDestinationHandler(databaseName, database, rawTableSchema); } } diff --git a/airbyte-integrations/connectors/destination-redshift/src/main/java/io/airbyte/integrations/destination/redshift/RedshiftStagingS3Destination.java b/airbyte-integrations/connectors/destination-redshift/src/main/java/io/airbyte/integrations/destination/redshift/RedshiftStagingS3Destination.java index 16189ce2004b..97e8b4393890 100644 --- a/airbyte-integrations/connectors/destination-redshift/src/main/java/io/airbyte/integrations/destination/redshift/RedshiftStagingS3Destination.java +++ b/airbyte-integrations/connectors/destination-redshift/src/main/java/io/airbyte/integrations/destination/redshift/RedshiftStagingS3Destination.java @@ -20,6 +20,7 @@ import io.airbyte.cdk.integrations.base.AirbyteMessageConsumer; import io.airbyte.cdk.integrations.base.AirbyteTraceMessageUtility; import io.airbyte.cdk.integrations.base.Destination; +import io.airbyte.cdk.integrations.base.JavaBaseConstants; import io.airbyte.cdk.integrations.base.SerializedAirbyteMessageConsumer; import io.airbyte.cdk.integrations.base.TypingAndDedupingFlag; import io.airbyte.cdk.integrations.base.ssh.SshWrappedDestination; @@ -50,6 +51,7 @@ import io.airbyte.integrations.destination.redshift.operations.RedshiftSqlOperations; import io.airbyte.integrations.destination.redshift.typing_deduping.RedshiftDestinationHandler; import io.airbyte.integrations.destination.redshift.typing_deduping.RedshiftSqlGenerator; +import io.airbyte.integrations.destination.redshift.typing_deduping.RedshiftState; import io.airbyte.integrations.destination.redshift.util.RedshiftUtil; import io.airbyte.protocol.models.v0.AirbyteConnectionStatus; import io.airbyte.protocol.models.v0.AirbyteConnectionStatus.Status; @@ -58,6 +60,7 @@ import io.airbyte.protocol.models.v0.ConfiguredAirbyteStream; import java.time.Duration; import java.util.HashMap; +import java.util.List; import java.util.Map; import java.util.function.Consumer; import javax.sql.DataSource; @@ -176,8 +179,10 @@ protected JdbcSqlGenerator getSqlGenerator() { } @Override - protected JdbcDestinationHandler getDestinationHandler(final String databaseName, final JdbcDatabase database) { - return new RedshiftDestinationHandler(databaseName, database); + protected JdbcDestinationHandler getDestinationHandler(final String databaseName, + final JdbcDatabase database, + String rawTableSchema) { + return new RedshiftDestinationHandler(databaseName, database, rawTableSchema); } @Override @@ -217,22 +222,26 @@ public SerializedAirbyteMessageConsumer getSerializedMessageConsumer(final JsonN final TyperDeduper typerDeduper; final JdbcDatabase database = getDatabase(getDataSource(config)); final String databaseName = config.get(JdbcUtils.DATABASE_KEY).asText(); - final RedshiftDestinationHandler redshiftDestinationHandler = new RedshiftDestinationHandler(databaseName, database); final CatalogParser catalogParser; + final String rawNamespace; if (TypingAndDedupingFlag.getRawNamespaceOverride(RAW_SCHEMA_OVERRIDE).isPresent()) { - catalogParser = new CatalogParser(sqlGenerator, TypingAndDedupingFlag.getRawNamespaceOverride(RAW_SCHEMA_OVERRIDE).get()); + rawNamespace = TypingAndDedupingFlag.getRawNamespaceOverride(RAW_SCHEMA_OVERRIDE).get(); + catalogParser = new CatalogParser(sqlGenerator, rawNamespace); } else { + rawNamespace = JavaBaseConstants.DEFAULT_AIRBYTE_INTERNAL_NAMESPACE; catalogParser = new CatalogParser(sqlGenerator); } + final RedshiftDestinationHandler redshiftDestinationHandler = new RedshiftDestinationHandler(databaseName, database, rawNamespace); parsedCatalog = catalogParser.parseCatalog(catalog); final JdbcV1V2Migrator migrator = new JdbcV1V2Migrator(getNamingResolver(), database, databaseName); final NoopV2TableMigrator v2TableMigrator = new NoopV2TableMigrator(); final boolean disableTypeDedupe = config.has(DISABLE_TYPE_DEDUPE) && config.get(DISABLE_TYPE_DEDUPE).asBoolean(false); if (disableTypeDedupe) { - typerDeduper = new NoOpTyperDeduperWithV1V2Migrations(sqlGenerator, redshiftDestinationHandler, parsedCatalog, migrator, v2TableMigrator); + typerDeduper = + new NoOpTyperDeduperWithV1V2Migrations<>(sqlGenerator, redshiftDestinationHandler, parsedCatalog, migrator, v2TableMigrator, List.of()); } else { typerDeduper = - new DefaultTyperDeduper(sqlGenerator, redshiftDestinationHandler, parsedCatalog, migrator, v2TableMigrator); + new DefaultTyperDeduper<>(sqlGenerator, redshiftDestinationHandler, parsedCatalog, migrator, v2TableMigrator, List.of()); } return StagingConsumerFactory.builder( outputRecordCollector, @@ -252,7 +261,7 @@ public SerializedAirbyteMessageConsumer getSerializedMessageConsumer(final JsonN /** * Retrieves user configured file buffer amount so as long it doesn't exceed the maximum number of * file buffers and sets the minimum number to the default - * + *

* NOTE: If Out Of Memory Exceptions (OOME) occur, this can be a likely cause as this hard limit has * not been thoroughly load tested across all instance sizes * diff --git a/airbyte-integrations/connectors/destination-redshift/src/main/java/io/airbyte/integrations/destination/redshift/typing_deduping/RedshiftDestinationHandler.java b/airbyte-integrations/connectors/destination-redshift/src/main/java/io/airbyte/integrations/destination/redshift/typing_deduping/RedshiftDestinationHandler.java index 5a47c2436d00..497d6469cd05 100644 --- a/airbyte-integrations/connectors/destination-redshift/src/main/java/io/airbyte/integrations/destination/redshift/typing_deduping/RedshiftDestinationHandler.java +++ b/airbyte-integrations/connectors/destination-redshift/src/main/java/io/airbyte/integrations/destination/redshift/typing_deduping/RedshiftDestinationHandler.java @@ -4,8 +4,7 @@ package io.airbyte.integrations.destination.redshift.typing_deduping; -import static io.airbyte.cdk.integrations.base.JavaBaseConstants.*; - +import com.fasterxml.jackson.databind.JsonNode; import io.airbyte.cdk.db.jdbc.JdbcDatabase; import io.airbyte.cdk.integrations.destination.jdbc.typing_deduping.JdbcDestinationHandler; import io.airbyte.integrations.base.destination.typing_deduping.AirbyteProtocolType; @@ -20,12 +19,14 @@ import java.util.List; import java.util.UUID; import lombok.extern.slf4j.Slf4j; +import org.jooq.SQLDialect; @Slf4j -public class RedshiftDestinationHandler extends JdbcDestinationHandler { +public class RedshiftDestinationHandler extends JdbcDestinationHandler { - public RedshiftDestinationHandler(final String databaseName, final JdbcDatabase jdbcDatabase) { - super(databaseName, jdbcDatabase); + public RedshiftDestinationHandler(final String databaseName, final JdbcDatabase jdbcDatabase, String rawNamespace) { + // :shrug: apparently this works better than using POSTGRES + super(databaseName, jdbcDatabase, rawNamespace, SQLDialect.DEFAULT); } @Override @@ -69,6 +70,12 @@ protected String toJdbcTypeName(AirbyteType airbyteType) { }; } + @Override + protected RedshiftState toDestinationState(JsonNode json) { + return new RedshiftState( + json.hasNonNull("needsSoftReset") && json.get("needsSoftReset").asBoolean()); + } + private String toJdbcTypeName(final AirbyteProtocolType airbyteProtocolType) { return switch (airbyteProtocolType) { case STRING -> "varchar"; diff --git a/airbyte-integrations/connectors/destination-redshift/src/main/java/io/airbyte/integrations/destination/redshift/typing_deduping/RedshiftState.kt b/airbyte-integrations/connectors/destination-redshift/src/main/java/io/airbyte/integrations/destination/redshift/typing_deduping/RedshiftState.kt new file mode 100644 index 000000000000..d2200ea9a60c --- /dev/null +++ b/airbyte-integrations/connectors/destination-redshift/src/main/java/io/airbyte/integrations/destination/redshift/typing_deduping/RedshiftState.kt @@ -0,0 +1,17 @@ +/* + * Copyright (c) 2024 Airbyte, Inc., all rights reserved. + */ + +package io.airbyte.integrations.destination.redshift.typing_deduping + +import io.airbyte.integrations.base.destination.typing_deduping.migrators.MinimumDestinationState + +data class RedshiftState(val needsSoftReset: Boolean) : MinimumDestinationState { + override fun needsSoftReset(): Boolean { + return needsSoftReset + } + + override fun withSoftReset(needsSoftReset: Boolean): T { + return copy(needsSoftReset = needsSoftReset) as T + } +} diff --git a/airbyte-integrations/connectors/destination-redshift/src/test-integration/java/io/airbyte/integrations/destination/redshift/typing_deduping/RedshiftSqlGeneratorIntegrationTest.java b/airbyte-integrations/connectors/destination-redshift/src/test-integration/java/io/airbyte/integrations/destination/redshift/typing_deduping/RedshiftSqlGeneratorIntegrationTest.java index 854fe35cfff6..75515b5130b7 100644 --- a/airbyte-integrations/connectors/destination-redshift/src/test-integration/java/io/airbyte/integrations/destination/redshift/typing_deduping/RedshiftSqlGeneratorIntegrationTest.java +++ b/airbyte-integrations/connectors/destination-redshift/src/test-integration/java/io/airbyte/integrations/destination/redshift/typing_deduping/RedshiftSqlGeneratorIntegrationTest.java @@ -21,7 +21,7 @@ import io.airbyte.cdk.integrations.standardtest.destination.typing_deduping.JdbcSqlGeneratorIntegrationTest; import io.airbyte.commons.json.Jsons; import io.airbyte.integrations.base.destination.typing_deduping.DestinationHandler; -import io.airbyte.integrations.base.destination.typing_deduping.DestinationInitialState; +import io.airbyte.integrations.base.destination.typing_deduping.DestinationInitialStatus; import io.airbyte.integrations.base.destination.typing_deduping.Sql; import io.airbyte.integrations.destination.redshift.RedshiftInsertDestination; import io.airbyte.integrations.destination.redshift.RedshiftSQLNameTransformer; @@ -46,7 +46,7 @@ import org.junit.jupiter.api.BeforeAll; import org.junit.jupiter.api.Test; -public class RedshiftSqlGeneratorIntegrationTest extends JdbcSqlGeneratorIntegrationTest { +public class RedshiftSqlGeneratorIntegrationTest extends JdbcSqlGeneratorIntegrationTest { /** * Redshift's JDBC driver doesn't map certain data types onto {@link java.sql.JDBCType} usefully. @@ -151,8 +151,8 @@ protected DSLContext getDslContext() { } @Override - protected DestinationHandler getDestinationHandler() { - return new RedshiftDestinationHandler(databaseName, database); + protected DestinationHandler getDestinationHandler() { + return new RedshiftDestinationHandler(databaseName, database, namespace); } @Override @@ -180,11 +180,11 @@ protected Field toJsonValue(final String valueAsString) { public void testCreateTableIncremental() throws Exception { final Sql sql = generator.createTable(incrementalDedupStream, "", false); destinationHandler.execute(sql); - List initialStates = destinationHandler.gatherInitialState(List.of(incrementalDedupStream)); - assertEquals(1, initialStates.size()); - final DestinationInitialState initialState = initialStates.getFirst(); - assertTrue(initialState.isFinalTablePresent()); - assertFalse(initialState.isSchemaMismatch()); + List> initialStatuses = destinationHandler.gatherInitialState(List.of(incrementalDedupStream)); + assertEquals(1, initialStatuses.size()); + final DestinationInitialStatus initialStatus = initialStatuses.getFirst(); + assertTrue(initialStatus.isFinalTablePresent()); + assertFalse(initialStatus.isSchemaMismatch()); // TODO assert on table clustering, etc. } diff --git a/docs/integrations/destinations/redshift.md b/docs/integrations/destinations/redshift.md index c37e01dcbb67..0df49130d9bf 100644 --- a/docs/integrations/destinations/redshift.md +++ b/docs/integrations/destinations/redshift.md @@ -237,6 +237,7 @@ Each stream will be output into its own raw table in Redshift. Each table will c | Version | Date | Pull Request | Subject | |:--------|:-----------|:-----------------------------------------------------------|:-----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------| +| 2.1.9 | 2024-03-04 | [\#35316](https://github.com/airbytehq/airbyte/pull/35316) | Update to CDK 0.23.11; Adopt migration framework | | 2.1.8 | 2024-02-09 | [\#35354](https://github.com/airbytehq/airbyte/pull/35354) | Update to CDK 0.23.0; Gather required initial state upfront, remove dependency on svv_table_info for table empty check | | 2.1.7 | 2024-02-09 | [\#34562](https://github.com/airbytehq/airbyte/pull/34562) | Switch back to jooq-based sql execution for standard insert | | 2.1.6 | 2024-02-08 | [\#34502](https://github.com/airbytehq/airbyte/pull/34502) | Update to CDK version 0.17.0 | From bf7e1581699eb5fea7477ee157fa535d50098901 Mon Sep 17 00:00:00 2001 From: erohmensing Date: Tue, 5 Mar 2024 17:58:59 +0000 Subject: [PATCH 077/172] =?UTF-8?q?=F0=9F=A4=96=20Bump=20patch=20version?= =?UTF-8?q?=20of=20Python=20CDK?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- airbyte-cdk/python/.bumpversion.cfg | 2 +- airbyte-cdk/python/CHANGELOG.md | 3 +++ airbyte-cdk/python/Dockerfile | 4 ++-- airbyte-cdk/python/setup.py | 2 +- 4 files changed, 7 insertions(+), 4 deletions(-) diff --git a/airbyte-cdk/python/.bumpversion.cfg b/airbyte-cdk/python/.bumpversion.cfg index eb6f22c65e87..6c6370252a71 100644 --- a/airbyte-cdk/python/.bumpversion.cfg +++ b/airbyte-cdk/python/.bumpversion.cfg @@ -1,5 +1,5 @@ [bumpversion] -current_version = 0.67.1 +current_version = 0.67.2 commit = False [bumpversion:file:setup.py] diff --git a/airbyte-cdk/python/CHANGELOG.md b/airbyte-cdk/python/CHANGELOG.md index d2698bdced66..455aadc3c05d 100644 --- a/airbyte-cdk/python/CHANGELOG.md +++ b/airbyte-cdk/python/CHANGELOG.md @@ -1,5 +1,8 @@ # Changelog +## 0.67.2 +File CDK: Update parquet parser to handle values that resolve to None + ## 0.67.1 Fix handling of tab-separated CSVs diff --git a/airbyte-cdk/python/Dockerfile b/airbyte-cdk/python/Dockerfile index 5ad0969929cf..ad1834705398 100644 --- a/airbyte-cdk/python/Dockerfile +++ b/airbyte-cdk/python/Dockerfile @@ -10,7 +10,7 @@ RUN apk --no-cache upgrade \ && apk --no-cache add tzdata build-base # install airbyte-cdk -RUN pip install --prefix=/install airbyte-cdk==0.67.1 +RUN pip install --prefix=/install airbyte-cdk==0.67.2 # build a clean environment FROM base @@ -32,5 +32,5 @@ ENV AIRBYTE_ENTRYPOINT "python /airbyte/integration_code/main.py" ENTRYPOINT ["python", "/airbyte/integration_code/main.py"] # needs to be the same as CDK -LABEL io.airbyte.version=0.67.1 +LABEL io.airbyte.version=0.67.2 LABEL io.airbyte.name=airbyte/source-declarative-manifest diff --git a/airbyte-cdk/python/setup.py b/airbyte-cdk/python/setup.py index 2e7ff9cc4583..4f4a4fd20e10 100644 --- a/airbyte-cdk/python/setup.py +++ b/airbyte-cdk/python/setup.py @@ -36,7 +36,7 @@ name="airbyte-cdk", # The version of the airbyte-cdk package is used at runtime to validate manifests. That validation must be # updated if our semver format changes such as using release candidate versions. - version="0.67.1", + version="0.67.2", description="A framework for writing Airbyte Connectors.", long_description=README, long_description_content_type="text/markdown", From 7e4649eb30d4eb70b38793d1c74dc4cffef36461 Mon Sep 17 00:00:00 2001 From: Catherine Noll Date: Tue, 5 Mar 2024 13:07:08 -0500 Subject: [PATCH 078/172] CDK: upgrade pyarrow (#35818) --- airbyte-cdk/python/setup.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/airbyte-cdk/python/setup.py b/airbyte-cdk/python/setup.py index 4f4a4fd20e10..0757568a7505 100644 --- a/airbyte-cdk/python/setup.py +++ b/airbyte-cdk/python/setup.py @@ -15,7 +15,7 @@ avro_dependency = "avro~=1.11.2" fastavro_dependency = "fastavro~=1.8.0" -pyarrow_dependency = "pyarrow==12.0.1" +pyarrow_dependency = "pyarrow~=15.0.0" langchain_dependency = "langchain==0.0.271" openai_dependency = "openai[embeddings]==0.27.9" From 6354733ed7de741a248da0ac7a9a37eef3ffeceb Mon Sep 17 00:00:00 2001 From: Christo Grabowski <108154848+ChristoGrab@users.noreply.github.com> Date: Tue, 5 Mar 2024 13:51:36 -0500 Subject: [PATCH 079/172] Source Recurly: bump version to unarchive in Cloud (#35828) --- airbyte-integrations/connectors/source-recurly/metadata.yaml | 2 +- airbyte-integrations/connectors/source-recurly/pyproject.toml | 2 +- docs/integrations/sources/recurly.md | 1 + 3 files changed, 3 insertions(+), 2 deletions(-) diff --git a/airbyte-integrations/connectors/source-recurly/metadata.yaml b/airbyte-integrations/connectors/source-recurly/metadata.yaml index 4c4be34837ba..ff6a08c27d33 100644 --- a/airbyte-integrations/connectors/source-recurly/metadata.yaml +++ b/airbyte-integrations/connectors/source-recurly/metadata.yaml @@ -7,7 +7,7 @@ data: connectorSubtype: api connectorType: source definitionId: cd42861b-01fc-4658-a8ab-5d11d0510f01 - dockerImageTag: 1.0.0 + dockerImageTag: 1.0.1 dockerRepository: airbyte/source-recurly documentationUrl: https://docs.airbyte.com/integrations/sources/recurly githubIssueLabel: source-recurly diff --git a/airbyte-integrations/connectors/source-recurly/pyproject.toml b/airbyte-integrations/connectors/source-recurly/pyproject.toml index fcf2625d1e89..5083827e29d2 100644 --- a/airbyte-integrations/connectors/source-recurly/pyproject.toml +++ b/airbyte-integrations/connectors/source-recurly/pyproject.toml @@ -3,7 +3,7 @@ requires = [ "poetry-core>=1.0.0",] build-backend = "poetry.core.masonry.api" [tool.poetry] -version = "1.0.0" +version = "1.0.1" name = "source-recurly" description = "Source implementation for Recurly." authors = [ "Airbyte ",] diff --git a/docs/integrations/sources/recurly.md b/docs/integrations/sources/recurly.md index 66bc7eee0bb3..2e31f5d9c71b 100644 --- a/docs/integrations/sources/recurly.md +++ b/docs/integrations/sources/recurly.md @@ -64,6 +64,7 @@ We recommend creating a restricted, read-only key specifically for Airbyte acces | Version | Date | Pull Request | Subject | |:--------|:-----------| :--------------------------------------------------------| :--------------------------------------------------------------------------------------- | +| 1.0.1 | 2024-03-05 | [35828](https://github.com/airbytehq/airbyte/pull/35828) | Bump version to unarchive supportLevel in Cloud productionDB | | 1.0.0 | 2024-03-01 | [35763](https://github.com/airbytehq/airbyte/pull/35763) | Re-introduce updated connector to catalog from archival repo | | 0.5.0 | 2024-02-22 | [34622](https://github.com/airbytehq/airbyte/pull/34622) | Republish connector using base image/Poetry, update schemas | | 0.4.1 | 2022-06-10 | [13685](https://github.com/airbytehq/airbyte/pull/13685) | Add state_checkpoint_interval to Recurly stream | From 1caa79b3bf62ebf0a0c311c7f0e5488c6867f73b Mon Sep 17 00:00:00 2001 From: Evan Tahler Date: Tue, 5 Mar 2024 10:57:15 -0800 Subject: [PATCH 080/172] docs about how to unarchive a connector (#35825) --- docs/integrations/connector-support-levels.md | 9 +++++++-- 1 file changed, 7 insertions(+), 2 deletions(-) diff --git a/docs/integrations/connector-support-levels.md b/docs/integrations/connector-support-levels.md index f32a1619f663..a46ae6a9f23c 100644 --- a/docs/integrations/connector-support-levels.md +++ b/docs/integrations/connector-support-levels.md @@ -64,5 +64,10 @@ necessary to ensure that the Connector Catalog maintains a minimum level of qual Archived connectors will not receive any further updates or support from the Airbyte team. Archived connectors remain source-available in the [`airbytehq/connector-archive`](https://github.com/airbytehq/connector-archive) repository on -GitHub. If you wish to take over the maintenance of an archived connector, please open a Github -Discussion. +GitHub. + +If you wish to take over the maintenance of an archived connector, please open a Github Discussion. +For API Sources (python), updating the connector to the latest version of the +[CDK](/connector-development/cdk-python/) and ensuring that the connector successfully passes the +[Connector Acceptance Tests](/connector-development/testing-connectors/connector-acceptance-tests-reference) +is the start to the un-archiving process. From 16c00da3ee159be64b5c3a87fb2d5eac0fb93e11 Mon Sep 17 00:00:00 2001 From: clnoll Date: Tue, 5 Mar 2024 19:53:22 +0000 Subject: [PATCH 081/172] =?UTF-8?q?=F0=9F=A4=96=20Bump=20patch=20version?= =?UTF-8?q?=20of=20Python=20CDK?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- airbyte-cdk/python/.bumpversion.cfg | 2 +- airbyte-cdk/python/CHANGELOG.md | 3 +++ airbyte-cdk/python/Dockerfile | 4 ++-- airbyte-cdk/python/setup.py | 2 +- 4 files changed, 7 insertions(+), 4 deletions(-) diff --git a/airbyte-cdk/python/.bumpversion.cfg b/airbyte-cdk/python/.bumpversion.cfg index 6c6370252a71..21636b709b85 100644 --- a/airbyte-cdk/python/.bumpversion.cfg +++ b/airbyte-cdk/python/.bumpversion.cfg @@ -1,5 +1,5 @@ [bumpversion] -current_version = 0.67.2 +current_version = 0.67.3 commit = False [bumpversion:file:setup.py] diff --git a/airbyte-cdk/python/CHANGELOG.md b/airbyte-cdk/python/CHANGELOG.md index 455aadc3c05d..33aea67b14fa 100644 --- a/airbyte-cdk/python/CHANGELOG.md +++ b/airbyte-cdk/python/CHANGELOG.md @@ -1,5 +1,8 @@ # Changelog +## 0.67.3 +CDK: upgrade pyarrow + ## 0.67.2 File CDK: Update parquet parser to handle values that resolve to None diff --git a/airbyte-cdk/python/Dockerfile b/airbyte-cdk/python/Dockerfile index ad1834705398..f500298752d1 100644 --- a/airbyte-cdk/python/Dockerfile +++ b/airbyte-cdk/python/Dockerfile @@ -10,7 +10,7 @@ RUN apk --no-cache upgrade \ && apk --no-cache add tzdata build-base # install airbyte-cdk -RUN pip install --prefix=/install airbyte-cdk==0.67.2 +RUN pip install --prefix=/install airbyte-cdk==0.67.3 # build a clean environment FROM base @@ -32,5 +32,5 @@ ENV AIRBYTE_ENTRYPOINT "python /airbyte/integration_code/main.py" ENTRYPOINT ["python", "/airbyte/integration_code/main.py"] # needs to be the same as CDK -LABEL io.airbyte.version=0.67.2 +LABEL io.airbyte.version=0.67.3 LABEL io.airbyte.name=airbyte/source-declarative-manifest diff --git a/airbyte-cdk/python/setup.py b/airbyte-cdk/python/setup.py index 0757568a7505..18eb1cd7b2eb 100644 --- a/airbyte-cdk/python/setup.py +++ b/airbyte-cdk/python/setup.py @@ -36,7 +36,7 @@ name="airbyte-cdk", # The version of the airbyte-cdk package is used at runtime to validate manifests. That validation must be # updated if our semver format changes such as using release candidate versions. - version="0.67.2", + version="0.67.3", description="A framework for writing Airbyte Connectors.", long_description=README, long_description_content_type="text/markdown", From 8b83f14283cff96bf5a331695c55937bd131b434 Mon Sep 17 00:00:00 2001 From: Edward Gao Date: Tue, 5 Mar 2024 11:54:06 -0800 Subject: [PATCH 082/172] Destination postgres: upgrade cdk (#35528) Signed-off-by: Gireesh Sreepathi Co-authored-by: Gireesh Sreepathi --- .../build.gradle | 3 +- .../metadata.yaml | 2 +- .../destination-postgres/build.gradle | 3 +- .../destination-postgres/metadata.yaml | 2 +- .../postgres/PostgresDestination.java | 14 ++-- .../PostgresDestinationHandler.java | 14 +++- .../postgres/typing_deduping/PostgresState.kt | 17 +++++ .../PostgresSqlGeneratorIntegrationTest.java | 18 ++--- .../postgres/PostgresContainerFactory.java | 17 ++--- .../postgres/PostgresTestDatabase.java | 29 +++++---- docs/integrations/destinations/postgres.md | 65 ++++++++++--------- 11 files changed, 103 insertions(+), 81 deletions(-) create mode 100644 airbyte-integrations/connectors/destination-postgres/src/main/java/io/airbyte/integrations/destination/postgres/typing_deduping/PostgresState.kt diff --git a/airbyte-integrations/connectors/destination-postgres-strict-encrypt/build.gradle b/airbyte-integrations/connectors/destination-postgres-strict-encrypt/build.gradle index 60e06e23de2d..9277c61ead5f 100644 --- a/airbyte-integrations/connectors/destination-postgres-strict-encrypt/build.gradle +++ b/airbyte-integrations/connectors/destination-postgres-strict-encrypt/build.gradle @@ -1,9 +1,10 @@ plugins { id 'airbyte-java-connector' + id 'org.jetbrains.kotlin.jvm' version '1.9.22' } airbyteJavaConnector { - cdkVersionRequired = '0.23.2' + cdkVersionRequired = '0.23.11' features = ['db-destinations', 'typing-deduping', 'datastore-postgres'] useLocalCdk = false } diff --git a/airbyte-integrations/connectors/destination-postgres-strict-encrypt/metadata.yaml b/airbyte-integrations/connectors/destination-postgres-strict-encrypt/metadata.yaml index 29a9cd9b6dde..f748f77282b3 100644 --- a/airbyte-integrations/connectors/destination-postgres-strict-encrypt/metadata.yaml +++ b/airbyte-integrations/connectors/destination-postgres-strict-encrypt/metadata.yaml @@ -2,7 +2,7 @@ data: connectorSubtype: database connectorType: destination definitionId: 25c5221d-dce2-4163-ade9-739ef790f503 - dockerImageTag: 2.0.2 + dockerImageTag: 2.0.3 dockerRepository: airbyte/destination-postgres-strict-encrypt documentationUrl: https://docs.airbyte.com/integrations/destinations/postgres githubIssueLabel: destination-postgres diff --git a/airbyte-integrations/connectors/destination-postgres/build.gradle b/airbyte-integrations/connectors/destination-postgres/build.gradle index ab746b991351..40d3cd9579f4 100644 --- a/airbyte-integrations/connectors/destination-postgres/build.gradle +++ b/airbyte-integrations/connectors/destination-postgres/build.gradle @@ -1,9 +1,10 @@ plugins { id 'airbyte-java-connector' + id 'org.jetbrains.kotlin.jvm' version '1.9.22' } airbyteJavaConnector { - cdkVersionRequired = '0.23.2' + cdkVersionRequired = '0.23.11' features = ['db-destinations', 'datastore-postgres', 'typing-deduping'] useLocalCdk = false } diff --git a/airbyte-integrations/connectors/destination-postgres/metadata.yaml b/airbyte-integrations/connectors/destination-postgres/metadata.yaml index 9be4fbc5643f..94a27c50b032 100644 --- a/airbyte-integrations/connectors/destination-postgres/metadata.yaml +++ b/airbyte-integrations/connectors/destination-postgres/metadata.yaml @@ -5,7 +5,7 @@ data: connectorSubtype: database connectorType: destination definitionId: 25c5221d-dce2-4163-ade9-739ef790f503 - dockerImageTag: 2.0.2 + dockerImageTag: 2.0.3 dockerRepository: airbyte/destination-postgres documentationUrl: https://docs.airbyte.com/integrations/destinations/postgres githubIssueLabel: destination-postgres diff --git a/airbyte-integrations/connectors/destination-postgres/src/main/java/io/airbyte/integrations/destination/postgres/PostgresDestination.java b/airbyte-integrations/connectors/destination-postgres/src/main/java/io/airbyte/integrations/destination/postgres/PostgresDestination.java index 8c5f92aa3e7f..5e7cfa265968 100644 --- a/airbyte-integrations/connectors/destination-postgres/src/main/java/io/airbyte/integrations/destination/postgres/PostgresDestination.java +++ b/airbyte-integrations/connectors/destination-postgres/src/main/java/io/airbyte/integrations/destination/postgres/PostgresDestination.java @@ -26,8 +26,9 @@ import io.airbyte.commons.json.Jsons; import io.airbyte.integrations.destination.postgres.typing_deduping.PostgresDestinationHandler; import io.airbyte.integrations.destination.postgres.typing_deduping.PostgresSqlGenerator; -import java.io.UnsupportedEncodingException; +import io.airbyte.integrations.destination.postgres.typing_deduping.PostgresState; import java.net.URLEncoder; +import java.nio.charset.StandardCharsets; import java.time.Duration; import java.util.HashMap; import java.util.Map; @@ -99,12 +100,7 @@ public JsonNode toJdbcConfig(final JsonNode config) { String encodedDatabase = config.get(JdbcUtils.DATABASE_KEY).asText(); if (encodedDatabase != null) { - try { - encodedDatabase = URLEncoder.encode(encodedDatabase, "UTF-8"); - } catch (final UnsupportedEncodingException e) { - // Should never happen - e.printStackTrace(); - } + encodedDatabase = URLEncoder.encode(encodedDatabase, StandardCharsets.UTF_8); } final String jdbcUrl = String.format("jdbc:postgresql://%s:%s/%s?", config.get(JdbcUtils.HOST_KEY).asText(), @@ -133,8 +129,8 @@ protected JdbcSqlGenerator getSqlGenerator() { } @Override - protected JdbcDestinationHandler getDestinationHandler(String databaseName, JdbcDatabase database) { - return new PostgresDestinationHandler(databaseName, database); + protected JdbcDestinationHandler getDestinationHandler(String databaseName, JdbcDatabase database, String rawTableSchema) { + return new PostgresDestinationHandler(databaseName, database, rawTableSchema); } @Override diff --git a/airbyte-integrations/connectors/destination-postgres/src/main/java/io/airbyte/integrations/destination/postgres/typing_deduping/PostgresDestinationHandler.java b/airbyte-integrations/connectors/destination-postgres/src/main/java/io/airbyte/integrations/destination/postgres/typing_deduping/PostgresDestinationHandler.java index 21cc549b3d38..ac235039aae1 100644 --- a/airbyte-integrations/connectors/destination-postgres/src/main/java/io/airbyte/integrations/destination/postgres/typing_deduping/PostgresDestinationHandler.java +++ b/airbyte-integrations/connectors/destination-postgres/src/main/java/io/airbyte/integrations/destination/postgres/typing_deduping/PostgresDestinationHandler.java @@ -4,6 +4,7 @@ package io.airbyte.integrations.destination.postgres.typing_deduping; +import com.fasterxml.jackson.databind.JsonNode; import io.airbyte.cdk.db.jdbc.JdbcDatabase; import io.airbyte.cdk.integrations.destination.jdbc.typing_deduping.JdbcDestinationHandler; import io.airbyte.integrations.base.destination.typing_deduping.AirbyteProtocolType; @@ -12,11 +13,12 @@ import io.airbyte.integrations.base.destination.typing_deduping.Struct; import io.airbyte.integrations.base.destination.typing_deduping.Union; import io.airbyte.integrations.base.destination.typing_deduping.UnsupportedOneOf; +import org.jooq.SQLDialect; -public class PostgresDestinationHandler extends JdbcDestinationHandler { +public class PostgresDestinationHandler extends JdbcDestinationHandler { - public PostgresDestinationHandler(String databaseName, JdbcDatabase jdbcDatabase) { - super(databaseName, jdbcDatabase); + public PostgresDestinationHandler(String databaseName, JdbcDatabase jdbcDatabase, String rawTableSchema) { + super(databaseName, jdbcDatabase, rawTableSchema, SQLDialect.POSTGRES); } @Override @@ -33,6 +35,12 @@ protected String toJdbcTypeName(AirbyteType airbyteType) { }; } + @Override + protected PostgresState toDestinationState(JsonNode json) { + return new PostgresState( + json.hasNonNull("needsSoftReset") && json.get("needsSoftReset").asBoolean()); + } + private String toJdbcTypeName(final AirbyteProtocolType airbyteProtocolType) { return switch (airbyteProtocolType) { case STRING -> "varchar"; diff --git a/airbyte-integrations/connectors/destination-postgres/src/main/java/io/airbyte/integrations/destination/postgres/typing_deduping/PostgresState.kt b/airbyte-integrations/connectors/destination-postgres/src/main/java/io/airbyte/integrations/destination/postgres/typing_deduping/PostgresState.kt new file mode 100644 index 000000000000..b9380fe033d6 --- /dev/null +++ b/airbyte-integrations/connectors/destination-postgres/src/main/java/io/airbyte/integrations/destination/postgres/typing_deduping/PostgresState.kt @@ -0,0 +1,17 @@ +/* + * Copyright (c) 2024 Airbyte, Inc., all rights reserved. + */ + +package io.airbyte.integrations.destination.postgres.typing_deduping + +import io.airbyte.integrations.base.destination.typing_deduping.migrators.MinimumDestinationState + +data class PostgresState(val needsSoftReset: Boolean) : MinimumDestinationState { + override fun needsSoftReset(): Boolean { + return needsSoftReset + } + + override fun withSoftReset(needsSoftReset: Boolean): T { + return copy(needsSoftReset = needsSoftReset) as T + } +} diff --git a/airbyte-integrations/connectors/destination-postgres/src/test-integration/java/io/airbyte/integrations/destination/postgres/typing_deduping/PostgresSqlGeneratorIntegrationTest.java b/airbyte-integrations/connectors/destination-postgres/src/test-integration/java/io/airbyte/integrations/destination/postgres/typing_deduping/PostgresSqlGeneratorIntegrationTest.java index 6efac136e4c3..23cbda8a5b05 100644 --- a/airbyte-integrations/connectors/destination-postgres/src/test-integration/java/io/airbyte/integrations/destination/postgres/typing_deduping/PostgresSqlGeneratorIntegrationTest.java +++ b/airbyte-integrations/connectors/destination-postgres/src/test-integration/java/io/airbyte/integrations/destination/postgres/typing_deduping/PostgresSqlGeneratorIntegrationTest.java @@ -16,7 +16,7 @@ import io.airbyte.cdk.integrations.destination.jdbc.typing_deduping.JdbcSqlGenerator; import io.airbyte.cdk.integrations.standardtest.destination.typing_deduping.JdbcSqlGeneratorIntegrationTest; import io.airbyte.integrations.base.destination.typing_deduping.DestinationHandler; -import io.airbyte.integrations.base.destination.typing_deduping.DestinationInitialState; +import io.airbyte.integrations.base.destination.typing_deduping.DestinationInitialStatus; import io.airbyte.integrations.base.destination.typing_deduping.Sql; import io.airbyte.integrations.destination.postgres.PostgresDestination; import io.airbyte.integrations.destination.postgres.PostgresSQLNameTransformer; @@ -31,7 +31,7 @@ import org.junit.jupiter.api.BeforeAll; import org.junit.jupiter.api.Test; -public class PostgresSqlGeneratorIntegrationTest extends JdbcSqlGeneratorIntegrationTest { +public class PostgresSqlGeneratorIntegrationTest extends JdbcSqlGeneratorIntegrationTest { private static PostgresTestDatabase testContainer; private static String databaseName; @@ -75,8 +75,8 @@ protected JdbcSqlGenerator getSqlGenerator() { } @Override - protected DestinationHandler getDestinationHandler() { - return new PostgresDestinationHandler(databaseName, database); + protected DestinationHandler getDestinationHandler() { + return new PostgresDestinationHandler(databaseName, database, namespace); } @Override @@ -95,11 +95,11 @@ public void testCreateTableIncremental() throws Exception { final Sql sql = generator.createTable(incrementalDedupStream, "", false); destinationHandler.execute(sql); - List initialStates = destinationHandler.gatherInitialState(List.of(incrementalDedupStream)); - assertEquals(1, initialStates.size()); - final DestinationInitialState initialState = initialStates.getFirst(); - assertTrue(initialState.isFinalTablePresent()); - assertFalse(initialState.isSchemaMismatch()); + List> initialStatuses = destinationHandler.gatherInitialState(List.of(incrementalDedupStream)); + assertEquals(1, initialStatuses.size()); + final DestinationInitialStatus initialStatus = initialStatuses.getFirst(); + assertTrue(initialStatus.isFinalTablePresent()); + assertFalse(initialStatus.isSchemaMismatch()); } } diff --git a/airbyte-integrations/connectors/destination-postgres/src/testFixtures/java/io/airbyte/integrations/destination/postgres/PostgresContainerFactory.java b/airbyte-integrations/connectors/destination-postgres/src/testFixtures/java/io/airbyte/integrations/destination/postgres/PostgresContainerFactory.java index 60e588214f72..f794ce20fc30 100644 --- a/airbyte-integrations/connectors/destination-postgres/src/testFixtures/java/io/airbyte/integrations/destination/postgres/PostgresContainerFactory.java +++ b/airbyte-integrations/connectors/destination-postgres/src/testFixtures/java/io/airbyte/integrations/destination/postgres/PostgresContainerFactory.java @@ -26,7 +26,7 @@ protected PostgreSQLContainer createNewContainer(DockerImageName imageName) { /** * Apply the postgresql.conf file that we've packaged as a resource. */ - public void withConf(PostgreSQLContainer container) { + public static void withConf(PostgreSQLContainer container) { container .withCopyFileToContainer( MountableFile.forClasspathResource("postgresql.conf"), @@ -37,21 +37,14 @@ public void withConf(PostgreSQLContainer container) { /** * Create a new network and bind it to the container. */ - public void withNetwork(PostgreSQLContainer container) { + public static void withNetwork(PostgreSQLContainer container) { container.withNetwork(Network.newNetwork()); } - /** - * Configure postgres with wal_level=logical. - */ - public void withWalLevelLogical(PostgreSQLContainer container) { - container.withCommand("postgres -c wal_level=logical"); - } - /** * Generate SSL certificates and tell postgres to enable SSL and use them. */ - public void withCert(PostgreSQLContainer container) { + public static void withCert(PostgreSQLContainer container) { container.start(); String[] commands = { "psql -U test -c \"CREATE USER postgres WITH PASSWORD 'postgres';\"", @@ -97,7 +90,7 @@ public void withCert(PostgreSQLContainer container) { /** * Tell postgres to enable SSL. */ - public void withSSL(PostgreSQLContainer container) { + public static void withSSL(PostgreSQLContainer container) { container.withCommand("postgres " + "-c ssl=on " + "-c ssl_cert_file=/var/lib/postgresql/server.crt " + @@ -107,7 +100,7 @@ public void withSSL(PostgreSQLContainer container) { /** * Configure postgres with client_encoding=sql_ascii. */ - public void withASCII(PostgreSQLContainer container) { + public static void withASCII(PostgreSQLContainer container) { container.withCommand("postgres -c client_encoding=sql_ascii"); } diff --git a/airbyte-integrations/connectors/destination-postgres/src/testFixtures/java/io/airbyte/integrations/destination/postgres/PostgresTestDatabase.java b/airbyte-integrations/connectors/destination-postgres/src/testFixtures/java/io/airbyte/integrations/destination/postgres/PostgresTestDatabase.java index 31fb23b9fa79..6849af062f50 100644 --- a/airbyte-integrations/connectors/destination-postgres/src/testFixtures/java/io/airbyte/integrations/destination/postgres/PostgresTestDatabase.java +++ b/airbyte-integrations/connectors/destination-postgres/src/testFixtures/java/io/airbyte/integrations/destination/postgres/PostgresTestDatabase.java @@ -7,11 +7,13 @@ import com.google.common.collect.ImmutableMap; import io.airbyte.cdk.db.factory.DatabaseDriver; import io.airbyte.cdk.db.jdbc.JdbcUtils; +import io.airbyte.cdk.testutils.ContainerFactory.NamedContainerModifier; import io.airbyte.cdk.testutils.TestDatabase; import io.airbyte.commons.json.Jsons; import java.io.IOException; import java.io.UncheckedIOException; import java.util.List; +import java.util.function.Consumer; import java.util.stream.Stream; import org.jooq.SQLDialect; import org.testcontainers.containers.PostgreSQLContainer; @@ -39,27 +41,30 @@ private BaseImage(String reference) { } - public static enum ContainerModifier { + public enum ContainerModifier implements NamedContainerModifier> { - ASCII("withASCII"), - CONF("withConf"), - NETWORK("withNetwork"), - SSL("withSSL"), - WAL_LEVEL_LOGICAL("withWalLevelLogical"), - CERT("withCert"), + ASCII(PostgresContainerFactory::withASCII), + CONF(PostgresContainerFactory::withConf), + NETWORK(PostgresContainerFactory::withNetwork), + SSL(PostgresContainerFactory::withSSL), + CERT(PostgresContainerFactory::withCert), ; - private String methodName; + private Consumer> modifer; - private ContainerModifier(String methodName) { - this.methodName = methodName; + private ContainerModifier(final Consumer> modifer) { + this.modifer = modifer; + } + + @Override + public Consumer> modifier() { + return modifer; } } static public PostgresTestDatabase in(BaseImage baseImage, ContainerModifier... modifiers) { - String[] methodNames = Stream.of(modifiers).map(im -> im.methodName).toList().toArray(new String[0]); - final var container = new PostgresContainerFactory().shared(baseImage.reference, methodNames); + final var container = new PostgresContainerFactory().shared(baseImage.reference, modifiers); return new PostgresTestDatabase(container).initialized(); } diff --git a/docs/integrations/destinations/postgres.md b/docs/integrations/destinations/postgres.md index 638747a410c3..bc49991d756f 100644 --- a/docs/integrations/destinations/postgres.md +++ b/docs/integrations/destinations/postgres.md @@ -191,35 +191,36 @@ Now that you have set up the Postgres destination connector, check out the follo ## Changelog -| Version | Date | Pull Request | Subject | -|:--------|:-----------|:-------------------------------------------------------------|:----------------------------------------------------------------------------------------------------| -| 2.0.2 | 2024-03-01 | [\#35760](https://github.com/airbytehq/airbyte/pull/35760) | Mark as certified, add PSQL exception to deinterpolator | -| 2.0.1 | 2024-02-22 | [\#35385](https://github.com/airbytehq/airbyte/pull/35385) | Upgrade CDK to 0.23.0; Gathering required initial state upfront | -| 2.0.0 | 2024-02-09 | [\#35042](https://github.com/airbytehq/airbyte/pull/35042) | GA release V2 destinations format. | -| 0.6.3 | 2024-02-06 | [\#34891](https://github.com/airbytehq/airbyte/pull/34891) | Remove varchar limit, use system defaults | -| 0.6.2 | 2024-01-30 | [\#34683](https://github.com/airbytehq/airbyte/pull/34683) | CDK Upgrade 0.16.3; Fix dependency mismatches in slf4j lib | -| 0.6.1 | 2024-01-29 | [\#34630](https://github.com/airbytehq/airbyte/pull/34630) | CDK Upgrade; Use lowercase raw table in T+D queries. | -| 0.6.0 | 2024-01-19 | [\#34372](https://github.com/airbytehq/airbyte/pull/34372) | Add dv2 flag in spec | -| 0.5.5 | 2024-01-18 | [\#34236](https://github.com/airbytehq/airbyte/pull/34236) | Upgrade CDK to 0.13.1; Add indexes in raw table for query optimization | -| 0.5.4 | 2024-01-11 | [\#34177](https://github.com/airbytehq/airbyte/pull/34177) | Add code for DV2 beta (no user-visible changes) | -| 0.5.3 | 2024-01-10 | [\#34135](https://github.com/airbytehq/airbyte/pull/34135) | Use published CDK missed in previous release | -| 0.5.2 | 2024-01-08 | [\#33875](https://github.com/airbytehq/airbyte/pull/33875) | Update CDK to get Tunnel heartbeats feature | -| 0.5.1 | 2024-01-04 | [\#33873](https://github.com/airbytehq/airbyte/pull/33873) | Install normalization to enable DV2 beta | -| 0.5.0 | 2023-12-18 | [\#33507](https://github.com/airbytehq/airbyte/pull/33507) | Upgrade to latest CDK; Fix DATs and tests | -| 0.4.0 | 2023-06-27 | [\#27781](https://github.com/airbytehq/airbyte/pull/27781) | License Update: Elv2 | -| 0.3.27 | 2023-04-04 | [\#24604](https://github.com/airbytehq/airbyte/pull/24604) | Support for destination checkpointing | -| 0.3.26 | 2022-09-27 | [\#17299](https://github.com/airbytehq/airbyte/pull/17299) | Improve error handling for strict-encrypt postgres destination | -| 0.3.24 | 2022-09-08 | [\#16046](https://github.com/airbytehq/airbyte/pull/16046) | Fix missing database name URL Encoding | -| 0.3.23 | 2022-07-18 | [\#16260](https://github.com/airbytehq/airbyte/pull/16260) | Prevent traffic going on an unsecured channel in strict-encryption version of destination postgres | -| 0.3.22 | 2022-07-18 | [\#13840](https://github.com/airbytehq/airbyte/pull/13840) | Added the ability to connect using different SSL modes and SSL certificates | -| 0.3.21 | 2022-07-06 | [\#14479](https://github.com/airbytehq/airbyte/pull/14479) | Publish amd64 and arm64 versions of the connector | -| 0.3.20 | 2022-05-17 | [\#12820](https://github.com/airbytehq/airbyte/pull/12820) | Improved 'check' operation performance | -| 0.3.19 | 2022-04-25 | [\#12195](https://github.com/airbytehq/airbyte/pull/12195) | Add support for additional JDBC URL Params input | -| 0.3.18 | 2022-04-12 | [\#11729](https://github.com/airbytehq/airbyte/pull/11514) | Bump mina-sshd from 2.7.0 to 2.8.0 | -| 0.3.17 | 2022-04-05 | [\#11729](https://github.com/airbytehq/airbyte/pull/11729) | Fixed bug with dashes in schema name | -| 0.3.15 | 2022-02-25 | [\#10421](https://github.com/airbytehq/airbyte/pull/10421) | Refactor JDBC parameters handling | -| 0.3.14 | 2022-02-14 | [\#10256](https://github.com/airbytehq/airbyte/pull/10256) | (unpublished) Add `-XX:+ExitOnOutOfMemoryError` JVM option | -| 0.3.13 | 2021-12-01 | [\#8371](https://github.com/airbytehq/airbyte/pull/8371) | Fixed incorrect handling "\n" in ssh key | -| 0.3.12 | 2021-11-08 | [\#7719](https://github.com/airbytehq/airbyte/pull/7719) | Improve handling of wide rows by buffering records based on their byte size rather than their count | -| 0.3.11 | 2021-09-07 | [\#5743](https://github.com/airbytehq/airbyte/pull/5743) | Add SSH Tunnel support | -| 0.3.10 | 2021-08-11 | [\#5336](https://github.com/airbytehq/airbyte/pull/5336) | Destination Postgres: fix \u0000\(NULL\) value processing | +| Version | Date | Pull Request | Subject | +|:--------|:-----------|:-----------------------------------------------------------|:----------------------------------------------------------------------------------------------------| +| 2.0.3 | 2024-03-01 | [\#35528](https://github.com/airbytehq/airbyte/pull/35528) | Adopt CDK 0.23.11; Use Migration framework | +| 2.0.2 | 2024-03-01 | [\#35760](https://github.com/airbytehq/airbyte/pull/35760) | Mark as certified, add PSQL exception to deinterpolator | +| 2.0.1 | 2024-02-22 | [\#35385](https://github.com/airbytehq/airbyte/pull/35385) | Upgrade CDK to 0.23.0; Gathering required initial state upfront | +| 2.0.0 | 2024-02-09 | [\#35042](https://github.com/airbytehq/airbyte/pull/35042) | GA release V2 destinations format. | +| 0.6.3 | 2024-02-06 | [\#34891](https://github.com/airbytehq/airbyte/pull/34891) | Remove varchar limit, use system defaults | +| 0.6.2 | 2024-01-30 | [\#34683](https://github.com/airbytehq/airbyte/pull/34683) | CDK Upgrade 0.16.3; Fix dependency mismatches in slf4j lib | +| 0.6.1 | 2024-01-29 | [\#34630](https://github.com/airbytehq/airbyte/pull/34630) | CDK Upgrade; Use lowercase raw table in T+D queries. | +| 0.6.0 | 2024-01-19 | [\#34372](https://github.com/airbytehq/airbyte/pull/34372) | Add dv2 flag in spec | +| 0.5.5 | 2024-01-18 | [\#34236](https://github.com/airbytehq/airbyte/pull/34236) | Upgrade CDK to 0.13.1; Add indexes in raw table for query optimization | +| 0.5.4 | 2024-01-11 | [\#34177](https://github.com/airbytehq/airbyte/pull/34177) | Add code for DV2 beta (no user-visible changes) | +| 0.5.3 | 2024-01-10 | [\#34135](https://github.com/airbytehq/airbyte/pull/34135) | Use published CDK missed in previous release | +| 0.5.2 | 2024-01-08 | [\#33875](https://github.com/airbytehq/airbyte/pull/33875) | Update CDK to get Tunnel heartbeats feature | +| 0.5.1 | 2024-01-04 | [\#33873](https://github.com/airbytehq/airbyte/pull/33873) | Install normalization to enable DV2 beta | +| 0.5.0 | 2023-12-18 | [\#33507](https://github.com/airbytehq/airbyte/pull/33507) | Upgrade to latest CDK; Fix DATs and tests | +| 0.4.0 | 2023-06-27 | [\#27781](https://github.com/airbytehq/airbyte/pull/27781) | License Update: Elv2 | +| 0.3.27 | 2023-04-04 | [\#24604](https://github.com/airbytehq/airbyte/pull/24604) | Support for destination checkpointing | +| 0.3.26 | 2022-09-27 | [\#17299](https://github.com/airbytehq/airbyte/pull/17299) | Improve error handling for strict-encrypt postgres destination | +| 0.3.24 | 2022-09-08 | [\#16046](https://github.com/airbytehq/airbyte/pull/16046) | Fix missing database name URL Encoding | +| 0.3.23 | 2022-07-18 | [\#16260](https://github.com/airbytehq/airbyte/pull/16260) | Prevent traffic going on an unsecured channel in strict-encryption version of destination postgres | +| 0.3.22 | 2022-07-18 | [\#13840](https://github.com/airbytehq/airbyte/pull/13840) | Added the ability to connect using different SSL modes and SSL certificates | +| 0.3.21 | 2022-07-06 | [\#14479](https://github.com/airbytehq/airbyte/pull/14479) | Publish amd64 and arm64 versions of the connector | +| 0.3.20 | 2022-05-17 | [\#12820](https://github.com/airbytehq/airbyte/pull/12820) | Improved 'check' operation performance | +| 0.3.19 | 2022-04-25 | [\#12195](https://github.com/airbytehq/airbyte/pull/12195) | Add support for additional JDBC URL Params input | +| 0.3.18 | 2022-04-12 | [\#11729](https://github.com/airbytehq/airbyte/pull/11514) | Bump mina-sshd from 2.7.0 to 2.8.0 | +| 0.3.17 | 2022-04-05 | [\#11729](https://github.com/airbytehq/airbyte/pull/11729) | Fixed bug with dashes in schema name | +| 0.3.15 | 2022-02-25 | [\#10421](https://github.com/airbytehq/airbyte/pull/10421) | Refactor JDBC parameters handling | +| 0.3.14 | 2022-02-14 | [\#10256](https://github.com/airbytehq/airbyte/pull/10256) | (unpublished) Add `-XX:+ExitOnOutOfMemoryError` JVM option | +| 0.3.13 | 2021-12-01 | [\#8371](https://github.com/airbytehq/airbyte/pull/8371) | Fixed incorrect handling "\n" in ssh key | +| 0.3.12 | 2021-11-08 | [\#7719](https://github.com/airbytehq/airbyte/pull/7719) | Improve handling of wide rows by buffering records based on their byte size rather than their count | +| 0.3.11 | 2021-09-07 | [\#5743](https://github.com/airbytehq/airbyte/pull/5743) | Add SSH Tunnel support | +| 0.3.10 | 2021-08-11 | [\#5336](https://github.com/airbytehq/airbyte/pull/5336) | Destination Postgres: fix \u0000\(NULL\) value processing | From 58bde70e0f9b59d2446a10041470e425a8ed8e71 Mon Sep 17 00:00:00 2001 From: Alexandre Girard Date: Tue, 5 Mar 2024 13:32:45 -0800 Subject: [PATCH 083/172] Do not build java base image when publishing python cdk --- tools/integrations/manage.sh | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/tools/integrations/manage.sh b/tools/integrations/manage.sh index 1133e356cccb..5f9b2cc2e747 100755 --- a/tools/integrations/manage.sh +++ b/tools/integrations/manage.sh @@ -235,7 +235,9 @@ cmd_publish() { do echo "building base images for $arch" docker buildx build -t airbyte/integration-base:dev --platform $arch --load airbyte-integrations/bases/base - docker buildx build -t airbyte/integration-base-java:dev --platform $arch --load airbyte-integrations/bases/base-java + if [ "$path" != "airbyte-cdk/python" ]; then + docker buildx build -t airbyte/integration-base-java:dev --platform $arch --load airbyte-integrations/bases/base-java + fi # For a short while (https://github.com/airbytehq/airbyte/pull/25034), destinations rely on the normalization image to build # Thanks to gradle, destinstaions which need normalization will already have built base-normalization's "build" artifacts From a871899540a4bfc53a57311b0805c950c6ed9ec7 Mon Sep 17 00:00:00 2001 From: Alexandre Girard Date: Tue, 5 Mar 2024 13:35:11 -0800 Subject: [PATCH 084/172] Revert "Do not build java base image when publishing python cdk" This reverts commit 58bde70e0f9b59d2446a10041470e425a8ed8e71. --- tools/integrations/manage.sh | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/tools/integrations/manage.sh b/tools/integrations/manage.sh index 5f9b2cc2e747..1133e356cccb 100755 --- a/tools/integrations/manage.sh +++ b/tools/integrations/manage.sh @@ -235,9 +235,7 @@ cmd_publish() { do echo "building base images for $arch" docker buildx build -t airbyte/integration-base:dev --platform $arch --load airbyte-integrations/bases/base - if [ "$path" != "airbyte-cdk/python" ]; then - docker buildx build -t airbyte/integration-base-java:dev --platform $arch --load airbyte-integrations/bases/base-java - fi + docker buildx build -t airbyte/integration-base-java:dev --platform $arch --load airbyte-integrations/bases/base-java # For a short while (https://github.com/airbytehq/airbyte/pull/25034), destinations rely on the normalization image to build # Thanks to gradle, destinstaions which need normalization will already have built base-normalization's "build" artifacts From 5f48da9a67edb6576b2aee692d98cdae9495bfe0 Mon Sep 17 00:00:00 2001 From: Alexandre Girard Date: Tue, 5 Mar 2024 16:03:16 -0800 Subject: [PATCH 085/172] [low-code] allow page size to be defined with string interpolation (#35735) Co-authored-by: Dan Lecocq --- .../declarative_component_schema.yaml | 7 +++++- .../models/declarative_component_schema.py | 4 ++-- .../parsers/model_to_component_factory.py | 1 + .../paginators/strategies/page_increment.py | 24 +++++++++++++------ .../test_model_to_component_factory.py | 22 ++++++++++++++++- .../paginators/test_page_increment.py | 18 ++++++++++++-- 6 files changed, 63 insertions(+), 13 deletions(-) diff --git a/airbyte-cdk/python/airbyte_cdk/sources/declarative/declarative_component_schema.yaml b/airbyte-cdk/python/airbyte_cdk/sources/declarative/declarative_component_schema.yaml index 69cf2f8d1bbb..55eee7d2fae1 100644 --- a/airbyte-cdk/python/airbyte_cdk/sources/declarative/declarative_component_schema.yaml +++ b/airbyte-cdk/python/airbyte_cdk/sources/declarative/declarative_component_schema.yaml @@ -1719,10 +1719,15 @@ definitions: page_size: title: Page Size description: The number of records to include in each pages. - type: integer + interpolation_context: + - config + anyOf: + - type: integer + - type: string examples: - 100 - "100" + - "{{ config['page_size'] }}" start_from_page: title: Start From Page description: Index of the first page to request. diff --git a/airbyte-cdk/python/airbyte_cdk/sources/declarative/models/declarative_component_schema.py b/airbyte-cdk/python/airbyte_cdk/sources/declarative/models/declarative_component_schema.py index cd79f70ce273..630a0cdd797a 100644 --- a/airbyte-cdk/python/airbyte_cdk/sources/declarative/models/declarative_component_schema.py +++ b/airbyte-cdk/python/airbyte_cdk/sources/declarative/models/declarative_component_schema.py @@ -544,10 +544,10 @@ class OffsetIncrement(BaseModel): class PageIncrement(BaseModel): type: Literal['PageIncrement'] - page_size: Optional[int] = Field( + page_size: Optional[Union[int, str]] = Field( None, description='The number of records to include in each pages.', - examples=[100, '100'], + examples=[100, '100', "{{ config['page_size'] }}"], title='Page Size', ) start_from_page: Optional[int] = Field( diff --git a/airbyte-cdk/python/airbyte_cdk/sources/declarative/parsers/model_to_component_factory.py b/airbyte-cdk/python/airbyte_cdk/sources/declarative/parsers/model_to_component_factory.py index 45a44a97b04b..8f60500d012b 100644 --- a/airbyte-cdk/python/airbyte_cdk/sources/declarative/parsers/model_to_component_factory.py +++ b/airbyte-cdk/python/airbyte_cdk/sources/declarative/parsers/model_to_component_factory.py @@ -857,6 +857,7 @@ def create_offset_increment(model: OffsetIncrementModel, config: Config, **kwarg def create_page_increment(model: PageIncrementModel, config: Config, **kwargs: Any) -> PageIncrement: return PageIncrement( page_size=model.page_size, + config=config, start_from_page=model.start_from_page or 0, inject_on_first_request=model.inject_on_first_request or False, parameters=model.parameters or {}, diff --git a/airbyte-cdk/python/airbyte_cdk/sources/declarative/requesters/paginators/strategies/page_increment.py b/airbyte-cdk/python/airbyte_cdk/sources/declarative/requesters/paginators/strategies/page_increment.py index 64216e016eea..d68f573ab1ea 100644 --- a/airbyte-cdk/python/airbyte_cdk/sources/declarative/requesters/paginators/strategies/page_increment.py +++ b/airbyte-cdk/python/airbyte_cdk/sources/declarative/requesters/paginators/strategies/page_increment.py @@ -3,10 +3,12 @@ # from dataclasses import InitVar, dataclass -from typing import Any, List, Mapping, Optional +from typing import Any, List, Mapping, Optional, Union import requests +from airbyte_cdk.sources.declarative.interpolation import InterpolatedString from airbyte_cdk.sources.declarative.requesters.paginators.strategies.pagination_strategy import PaginationStrategy +from airbyte_cdk.sources.declarative.types import Config, Record @dataclass @@ -19,13 +21,21 @@ class PageIncrement(PaginationStrategy): start_from_page (int): number of the initial page """ - page_size: Optional[int] + config: Config + page_size: Optional[Union[str, int]] parameters: InitVar[Mapping[str, Any]] start_from_page: int = 0 inject_on_first_request: bool = False - def __post_init__(self, parameters: Mapping[str, Any]): + def __post_init__(self, parameters: Mapping[str, Any]) -> None: self._page = self.start_from_page + if isinstance(self.page_size, int) or (self.page_size is None): + self._page_size = self.page_size + else: + page_size = InterpolatedString(self.page_size, parameters=parameters).eval(self.config) + if not isinstance(page_size, int): + raise Exception(f"{page_size} is of type {type(page_size)}. Expected {int}") + self._page_size = page_size @property def initial_token(self) -> Optional[Any]: @@ -33,16 +43,16 @@ def initial_token(self) -> Optional[Any]: return self._page return None - def next_page_token(self, response: requests.Response, last_records: List[Mapping[str, Any]]) -> Optional[Any]: + def next_page_token(self, response: requests.Response, last_records: List[Record]) -> Optional[Any]: # Stop paginating when there are fewer records than the page size or the current page has no records - if (self.page_size and len(last_records) < self.page_size) or len(last_records) == 0: + if (self._page_size and len(last_records) < self._page_size) or len(last_records) == 0: return None else: self._page += 1 return self._page - def reset(self): + def reset(self) -> None: self._page = self.start_from_page def get_page_size(self) -> Optional[int]: - return self.page_size + return self._page_size diff --git a/airbyte-cdk/python/unit_tests/sources/declarative/parsers/test_model_to_component_factory.py b/airbyte-cdk/python/unit_tests/sources/declarative/parsers/test_model_to_component_factory.py index c96c19850578..c0eee22f471a 100644 --- a/airbyte-cdk/python/unit_tests/sources/declarative/parsers/test_model_to_component_factory.py +++ b/airbyte-cdk/python/unit_tests/sources/declarative/parsers/test_model_to_component_factory.py @@ -1764,7 +1764,7 @@ def test_create_page_increment(): start_from_page=1, inject_on_first_request=True, ) - expected_strategy = PageIncrement(page_size=10, start_from_page=1, inject_on_first_request=True, parameters={}) + expected_strategy = PageIncrement(page_size=10, start_from_page=1, inject_on_first_request=True, parameters={}, config=input_config) strategy = factory.create_page_increment(model, input_config) @@ -1773,6 +1773,26 @@ def test_create_page_increment(): assert strategy.inject_on_first_request == expected_strategy.inject_on_first_request +def test_create_page_increment_with_interpolated_page_size(): + model = PageIncrementModel( + type="PageIncrement", + page_size="{{ config['page_size'] }}", + start_from_page=1, + inject_on_first_request=True, + ) + config = { + **input_config, + "page_size": 5 + } + expected_strategy = PageIncrement(page_size=5, start_from_page=1, inject_on_first_request=True, parameters={}, config=config) + + strategy = factory.create_page_increment(model, config) + + assert strategy.get_page_size() == expected_strategy.get_page_size() + assert strategy.start_from_page == expected_strategy.start_from_page + assert strategy.inject_on_first_request == expected_strategy.inject_on_first_request + + def test_create_offset_increment(): model = OffsetIncrementModel( type="OffsetIncrement", diff --git a/airbyte-cdk/python/unit_tests/sources/declarative/requesters/paginators/test_page_increment.py b/airbyte-cdk/python/unit_tests/sources/declarative/requesters/paginators/test_page_increment.py index 52477fedc21e..42d7995388e7 100644 --- a/airbyte-cdk/python/unit_tests/sources/declarative/requesters/paginators/test_page_increment.py +++ b/airbyte-cdk/python/unit_tests/sources/declarative/requesters/paginators/test_page_increment.py @@ -18,10 +18,12 @@ pytest.param(2, 0, [{"id": 0}, {"id": 1}], 1, 1, id="test_same_page_size_start_from_1"), pytest.param(3, 0, [{"id": 0}, {"id": 1}], None, 0, id="test_larger_page_size_start_from_0"), pytest.param(None, 0, [], None, 0, id="test_no_page_size"), + pytest.param("2", 0, [{"id": 0}, {"id": 1}], 1, 1, id="test_page_size_from_string"), + pytest.param("{{ config['value'] }}", 0, [{"id": 0}, {"id": 1}], 1, 1, id="test_page_size_from_config"), ], ) def test_page_increment_paginator_strategy(page_size, start_from, last_records, expected_next_page_token, expected_offset): - paginator_strategy = PageIncrement(page_size, parameters={}, start_from_page=start_from) + paginator_strategy = PageIncrement(page_size=page_size, parameters={}, start_from_page=start_from, config={"value": 2}) assert paginator_strategy._page == start_from response = requests.Response() @@ -38,6 +40,18 @@ def test_page_increment_paginator_strategy(page_size, start_from, last_records, assert start_from == paginator_strategy._page +@pytest.mark.parametrize( + "page_size", + [ + pytest.param("{{ config['value'] }}"), + pytest.param("not-an-integer") + ] +) +def test_page_increment_paginator_strategy_malformed_page_size(page_size): + with pytest.raises(Exception, match=".* is of type . Expected "): + PageIncrement(page_size=page_size, parameters={}, start_from_page=0, config={"value": "not-an-integer"}) + + @pytest.mark.parametrize( "inject_on_first_request, start_from_page, expected_initial_token", [ @@ -50,7 +64,7 @@ def test_page_increment_paginator_strategy_initial_token( inject_on_first_request: bool, start_from_page: int, expected_initial_token: Optional[Any] ): paginator_strategy = PageIncrement( - page_size=20, parameters={}, start_from_page=start_from_page, inject_on_first_request=inject_on_first_request + page_size=20, parameters={}, start_from_page=start_from_page, inject_on_first_request=inject_on_first_request, config={} ) assert paginator_strategy.initial_token == expected_initial_token From 67c918b6fb630afbc25f622f48106cb76445d9ea Mon Sep 17 00:00:00 2001 From: girarda Date: Wed, 6 Mar 2024 00:09:26 +0000 Subject: [PATCH 086/172] =?UTF-8?q?=F0=9F=A4=96=20Bump=20minor=20version?= =?UTF-8?q?=20of=20Python=20CDK?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- airbyte-cdk/python/.bumpversion.cfg | 2 +- airbyte-cdk/python/CHANGELOG.md | 3 +++ airbyte-cdk/python/Dockerfile | 4 ++-- airbyte-cdk/python/setup.py | 2 +- 4 files changed, 7 insertions(+), 4 deletions(-) diff --git a/airbyte-cdk/python/.bumpversion.cfg b/airbyte-cdk/python/.bumpversion.cfg index 21636b709b85..ceea25770ed8 100644 --- a/airbyte-cdk/python/.bumpversion.cfg +++ b/airbyte-cdk/python/.bumpversion.cfg @@ -1,5 +1,5 @@ [bumpversion] -current_version = 0.67.3 +current_version = 0.68.0 commit = False [bumpversion:file:setup.py] diff --git a/airbyte-cdk/python/CHANGELOG.md b/airbyte-cdk/python/CHANGELOG.md index 33aea67b14fa..46b3a2f10cf3 100644 --- a/airbyte-cdk/python/CHANGELOG.md +++ b/airbyte-cdk/python/CHANGELOG.md @@ -1,5 +1,8 @@ # Changelog +## 0.68.0 +low-code: Allow page size to be defined with string interpolation + ## 0.67.3 CDK: upgrade pyarrow diff --git a/airbyte-cdk/python/Dockerfile b/airbyte-cdk/python/Dockerfile index f500298752d1..f1b5c99369bc 100644 --- a/airbyte-cdk/python/Dockerfile +++ b/airbyte-cdk/python/Dockerfile @@ -10,7 +10,7 @@ RUN apk --no-cache upgrade \ && apk --no-cache add tzdata build-base # install airbyte-cdk -RUN pip install --prefix=/install airbyte-cdk==0.67.3 +RUN pip install --prefix=/install airbyte-cdk==0.68.0 # build a clean environment FROM base @@ -32,5 +32,5 @@ ENV AIRBYTE_ENTRYPOINT "python /airbyte/integration_code/main.py" ENTRYPOINT ["python", "/airbyte/integration_code/main.py"] # needs to be the same as CDK -LABEL io.airbyte.version=0.67.3 +LABEL io.airbyte.version=0.68.0 LABEL io.airbyte.name=airbyte/source-declarative-manifest diff --git a/airbyte-cdk/python/setup.py b/airbyte-cdk/python/setup.py index 18eb1cd7b2eb..3a7c0361d5c6 100644 --- a/airbyte-cdk/python/setup.py +++ b/airbyte-cdk/python/setup.py @@ -36,7 +36,7 @@ name="airbyte-cdk", # The version of the airbyte-cdk package is used at runtime to validate manifests. That validation must be # updated if our semver format changes such as using release candidate versions. - version="0.67.3", + version="0.68.0", description="A framework for writing Airbyte Connectors.", long_description=README, long_description_content_type="text/markdown", From 349faeda48c2afa02366d131b84a8f849f738e13 Mon Sep 17 00:00:00 2001 From: Alexandre Girard Date: Tue, 5 Mar 2024 16:19:04 -0800 Subject: [PATCH 087/172] =?UTF-8?q?=F0=9F=90=9B=20Install=20arm64=20emulat?= =?UTF-8?q?or=20when=20building=20source=5Fdeclarative=5Fmanifest=20(#3583?= =?UTF-8?q?9)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .github/workflows/publish-cdk-command-manually.yml | 4 +--- tools/integrations/manage.sh | 13 ++++++++++--- 2 files changed, 11 insertions(+), 6 deletions(-) diff --git a/.github/workflows/publish-cdk-command-manually.yml b/.github/workflows/publish-cdk-command-manually.yml index 4f206f609bab..c9b1e36ecdc9 100644 --- a/.github/workflows/publish-cdk-command-manually.yml +++ b/.github/workflows/publish-cdk-command-manually.yml @@ -223,7 +223,7 @@ jobs: uses: actions/checkout@v3 with: repository: airbytehq/airbyte - ref: master + ref: ${{ github.event.inputs.gitref }} - name: Install Java uses: actions/setup-java@v3 with: @@ -245,8 +245,6 @@ jobs: command: | docker login -u ${DOCKER_HUB_USERNAME} -p ${DOCKER_HUB_PASSWORD} ./tools/integrations/manage.sh publish airbyte-cdk/python false - attempt_limit: 3 - attempt_delay: 5000 in # ms - name: Post failure to Slack channel dev-connectors-extensibility if: ${{ failure() }} uses: slackapi/slack-github-action@v1.23.0 diff --git a/tools/integrations/manage.sh b/tools/integrations/manage.sh index 1133e356cccb..7e27f3377148 100755 --- a/tools/integrations/manage.sh +++ b/tools/integrations/manage.sh @@ -231,11 +231,18 @@ cmd_publish() { # Alternative local approach @ https://github.com/docker/buildx/issues/301#issuecomment-755164475 # We need to use the regular docker buildx driver (not docker container) because we need this intermediate contaiers to be available for later build steps + + echo Installing arm64 docker emulation + docker run --privileged --rm tonistiigi/binfmt --install arm64 + for arch in $(echo $build_arch | sed "s/,/ /g") do - echo "building base images for $arch" - docker buildx build -t airbyte/integration-base:dev --platform $arch --load airbyte-integrations/bases/base - docker buildx build -t airbyte/integration-base-java:dev --platform $arch --load airbyte-integrations/bases/base-java + # These images aren't needed for the CDK + if [ "$path" != "airbyte-cdk/python" ]; then + echo "building base images for $arch" + docker buildx build -t airbyte/integration-base-java:dev --platform $arch --load airbyte-integrations/bases/base-java + docker buildx build -t airbyte/integration-base:dev --platform $arch --load airbyte-integrations/bases/base + fi # For a short while (https://github.com/airbytehq/airbyte/pull/25034), destinations rely on the normalization image to build # Thanks to gradle, destinstaions which need normalization will already have built base-normalization's "build" artifacts From 4b3a9bffc6c6eca39bb8c5965671335f2cfd4505 Mon Sep 17 00:00:00 2001 From: girarda Date: Wed, 6 Mar 2024 00:24:53 +0000 Subject: [PATCH 088/172] =?UTF-8?q?=F0=9F=A4=96=20Bump=20patch=20version?= =?UTF-8?q?=20of=20Python=20CDK?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- airbyte-cdk/python/.bumpversion.cfg | 2 +- airbyte-cdk/python/CHANGELOG.md | 3 +++ airbyte-cdk/python/Dockerfile | 4 ++-- airbyte-cdk/python/setup.py | 2 +- 4 files changed, 7 insertions(+), 4 deletions(-) diff --git a/airbyte-cdk/python/.bumpversion.cfg b/airbyte-cdk/python/.bumpversion.cfg index ceea25770ed8..865bbfae53c1 100644 --- a/airbyte-cdk/python/.bumpversion.cfg +++ b/airbyte-cdk/python/.bumpversion.cfg @@ -1,5 +1,5 @@ [bumpversion] -current_version = 0.68.0 +current_version = 0.68.1 commit = False [bumpversion:file:setup.py] diff --git a/airbyte-cdk/python/CHANGELOG.md b/airbyte-cdk/python/CHANGELOG.md index 46b3a2f10cf3..2c168c8ade77 100644 --- a/airbyte-cdk/python/CHANGELOG.md +++ b/airbyte-cdk/python/CHANGELOG.md @@ -1,5 +1,8 @@ # Changelog +## 0.68.1 +no-op republish of 0.68.0 + ## 0.68.0 low-code: Allow page size to be defined with string interpolation diff --git a/airbyte-cdk/python/Dockerfile b/airbyte-cdk/python/Dockerfile index f1b5c99369bc..dfdf5cd1c2d4 100644 --- a/airbyte-cdk/python/Dockerfile +++ b/airbyte-cdk/python/Dockerfile @@ -10,7 +10,7 @@ RUN apk --no-cache upgrade \ && apk --no-cache add tzdata build-base # install airbyte-cdk -RUN pip install --prefix=/install airbyte-cdk==0.68.0 +RUN pip install --prefix=/install airbyte-cdk==0.68.1 # build a clean environment FROM base @@ -32,5 +32,5 @@ ENV AIRBYTE_ENTRYPOINT "python /airbyte/integration_code/main.py" ENTRYPOINT ["python", "/airbyte/integration_code/main.py"] # needs to be the same as CDK -LABEL io.airbyte.version=0.68.0 +LABEL io.airbyte.version=0.68.1 LABEL io.airbyte.name=airbyte/source-declarative-manifest diff --git a/airbyte-cdk/python/setup.py b/airbyte-cdk/python/setup.py index 3a7c0361d5c6..e61fe0aa93b0 100644 --- a/airbyte-cdk/python/setup.py +++ b/airbyte-cdk/python/setup.py @@ -36,7 +36,7 @@ name="airbyte-cdk", # The version of the airbyte-cdk package is used at runtime to validate manifests. That validation must be # updated if our semver format changes such as using release candidate versions. - version="0.68.0", + version="0.68.1", description="A framework for writing Airbyte Connectors.", long_description=README, long_description_content_type="text/markdown", From f9e73cf62a87ef741390058efcba24d6953ebd10 Mon Sep 17 00:00:00 2001 From: Rodi Reich Zilberman <867491+rodireich@users.noreply.github.com> Date: Tue, 5 Mar 2024 16:45:09 -0800 Subject: [PATCH 089/172] add configuration for cdc queue size (#35739) --- airbyte-cdk/java/airbyte-cdk/README.md | 1 + .../src/main/resources/version.properties | 2 +- .../cdk/testutils/ContainerFactory.java | 5 +- .../debezium/AirbyteDebeziumHandler.java | 51 ++++++++++++++----- .../connectors/source-mssql/build.gradle | 2 +- .../connectors/source-mssql/metadata.yaml | 2 +- .../initialsync/MssqlInitialLoadHandler.java | 5 +- .../initialsync/MssqlInitialReadUtil.java | 43 ++++++++++++---- .../source-mssql/src/main/resources/spec.json | 9 ++++ .../resources/expected_spec.json | 9 ++++ .../source/mssql/MsSQLTestDatabase.java | 1 + docs/integrations/sources/mssql.md | 7 +-- 12 files changed, 100 insertions(+), 37 deletions(-) diff --git a/airbyte-cdk/java/airbyte-cdk/README.md b/airbyte-cdk/java/airbyte-cdk/README.md index b2c128fc5036..11b7a8941c02 100644 --- a/airbyte-cdk/java/airbyte-cdk/README.md +++ b/airbyte-cdk/java/airbyte-cdk/README.md @@ -166,6 +166,7 @@ MavenLocal debugging steps: | Version | Date | Pull Request | Subject | |:--------|:-----------|:-----------------------------------------------------------|:---------------------------------------------------------------------------------------------------------------------------------------------------------------| +| 0.23.14 | 2024-03-05 | [\#35827](https://github.com/airbytehq/airbyte/pull/35827) | improving the Junit interceptor. | | 0.23.13 | 2024-03-04 | [\#35774](https://github.com/airbytehq/airbyte/pull/35774) | minor changes to the CDK test fixtures. | | 0.23.12 | 2024-03-01 | [\#35767](https://github.com/airbytehq/airbyte/pull/35767) | introducing a timeout for java tests. | | 0.23.11 | 2024-03-01 | [\#35313](https://github.com/airbytehq/airbyte/pull/35313) | Preserve timezone offset in CSV writer for destinations | diff --git a/airbyte-cdk/java/airbyte-cdk/core/src/main/resources/version.properties b/airbyte-cdk/java/airbyte-cdk/core/src/main/resources/version.properties index ec1b63d34dad..ff6fa0e7b121 100644 --- a/airbyte-cdk/java/airbyte-cdk/core/src/main/resources/version.properties +++ b/airbyte-cdk/java/airbyte-cdk/core/src/main/resources/version.properties @@ -1 +1 @@ -version=0.23.13 +version=0.23.14 diff --git a/airbyte-cdk/java/airbyte-cdk/core/src/testFixtures/java/io/airbyte/cdk/testutils/ContainerFactory.java b/airbyte-cdk/java/airbyte-cdk/core/src/testFixtures/java/io/airbyte/cdk/testutils/ContainerFactory.java index d69b60dab21c..0cc697764106 100644 --- a/airbyte-cdk/java/airbyte-cdk/core/src/testFixtures/java/io/airbyte/cdk/testutils/ContainerFactory.java +++ b/airbyte-cdk/java/airbyte-cdk/core/src/testFixtures/java/io/airbyte/cdk/testutils/ContainerFactory.java @@ -127,8 +127,7 @@ public final C shared(String imageName, List @SuppressWarnings("unchecked") @Deprecated public final C exclusive(String imageName, String... methods) { - return exclusive(imageName, - (NamedContainerModifier) Stream.of(methods).map(n -> new NamedContainerModifierImpl(n, resolveModifierByName(n))).toList()); + return exclusive(imageName, Stream.of(methods).map(n -> new NamedContainerModifierImpl(n, resolveModifierByName(n))).toList()); } public final C exclusive(String imageName) { @@ -139,7 +138,7 @@ public final C exclusive(String imageName, NamedContainerModifier... namedCon return exclusive(imageName, List.of(namedContainerModifiers)); } - public final C exclusive(String imageName, List> namedContainerModifiers) { + public final C exclusive(String imageName, List> namedContainerModifiers) { return (C) createAndStartContainer(DockerImageName.parse(imageName), namedContainerModifiers); } diff --git a/airbyte-cdk/java/airbyte-cdk/db-sources/src/main/java/io/airbyte/cdk/integrations/debezium/AirbyteDebeziumHandler.java b/airbyte-cdk/java/airbyte-cdk/db-sources/src/main/java/io/airbyte/cdk/integrations/debezium/AirbyteDebeziumHandler.java index d3c2cad31622..a0e9a0f7d9e4 100644 --- a/airbyte-cdk/java/airbyte-cdk/db-sources/src/main/java/io/airbyte/cdk/integrations/debezium/AirbyteDebeziumHandler.java +++ b/airbyte-cdk/java/airbyte-cdk/db-sources/src/main/java/io/airbyte/cdk/integrations/debezium/AirbyteDebeziumHandler.java @@ -4,22 +4,11 @@ package io.airbyte.cdk.integrations.debezium; -import static io.airbyte.cdk.integrations.debezium.DebeziumIteratorConstants.SYNC_CHECKPOINT_DURATION; -import static io.airbyte.cdk.integrations.debezium.DebeziumIteratorConstants.SYNC_CHECKPOINT_DURATION_PROPERTY; -import static io.airbyte.cdk.integrations.debezium.DebeziumIteratorConstants.SYNC_CHECKPOINT_RECORDS; -import static io.airbyte.cdk.integrations.debezium.DebeziumIteratorConstants.SYNC_CHECKPOINT_RECORDS_PROPERTY; +import static io.airbyte.cdk.integrations.debezium.DebeziumIteratorConstants.*; import com.fasterxml.jackson.databind.JsonNode; import io.airbyte.cdk.db.jdbc.JdbcUtils; -import io.airbyte.cdk.integrations.debezium.internals.AirbyteFileOffsetBackingStore; -import io.airbyte.cdk.integrations.debezium.internals.AirbyteSchemaHistoryStorage; -import io.airbyte.cdk.integrations.debezium.internals.ChangeEventWithMetadata; -import io.airbyte.cdk.integrations.debezium.internals.DebeziumEventConverter; -import io.airbyte.cdk.integrations.debezium.internals.DebeziumPropertiesManager; -import io.airbyte.cdk.integrations.debezium.internals.DebeziumRecordIterator; -import io.airbyte.cdk.integrations.debezium.internals.DebeziumRecordPublisher; -import io.airbyte.cdk.integrations.debezium.internals.DebeziumShutdownProcedure; -import io.airbyte.cdk.integrations.debezium.internals.DebeziumStateDecoratingIterator; +import io.airbyte.cdk.integrations.debezium.internals.*; import io.airbyte.commons.util.AutoCloseableIterator; import io.airbyte.commons.util.AutoCloseableIterators; import io.airbyte.protocol.models.v0.AirbyteMessage; @@ -29,6 +18,8 @@ import io.debezium.engine.ChangeEvent; import io.debezium.engine.DebeziumEngine; import java.time.Duration; +import java.time.Instant; +import java.time.temporal.ChronoUnit; import java.util.Optional; import java.util.concurrent.LinkedBlockingQueue; import org.slf4j.Logger; @@ -71,6 +62,38 @@ public AirbyteDebeziumHandler(final JsonNode config, this.addDbNameToOffsetState = addDbNameToOffsetState; } + class CapacityReportingBlockingQueue extends LinkedBlockingQueue { + + private static Duration REPORT_DURATION = Duration.of(10, ChronoUnit.SECONDS); + private Instant lastReport; + + CapacityReportingBlockingQueue(final int capacity) { + super(capacity); + } + + private void reportQueueUtilization() { + if (lastReport == null || Duration.between(lastReport, Instant.now()).compareTo(REPORT_DURATION) > 0) { + LOGGER.info("CDC events queue size: {}. remaining {}", this.size(), this.remainingCapacity()); + synchronized (this) { + lastReport = Instant.now(); + } + } + } + + @Override + public void put(final E e) throws InterruptedException { + reportQueueUtilization(); + super.put(e); + } + + @Override + public E poll() { + reportQueueUtilization(); + return super.poll(); + } + + } + public AutoCloseableIterator getIncrementalIterators(final DebeziumPropertiesManager debeziumPropertiesManager, final DebeziumEventConverter eventConverter, final CdcSavedInfoFetcher cdcSavedInfoFetcher, @@ -85,7 +108,7 @@ public AutoCloseableIterator getIncrementalIterators(final Debez cdcSavedInfoFetcher.getSavedSchemaHistory(), cdcStateHandler.compressSchemaHistoryForState())) : Optional.empty(); final var publisher = new DebeziumRecordPublisher(debeziumPropertiesManager); - final var queue = new LinkedBlockingQueue>(queueSize); + final var queue = new CapacityReportingBlockingQueue>(queueSize); publisher.start(queue, offsetManager, schemaHistoryManager); // handle state machine around pub/sub logic. final AutoCloseableIterator eventIterator = new DebeziumRecordIterator<>( diff --git a/airbyte-integrations/connectors/source-mssql/build.gradle b/airbyte-integrations/connectors/source-mssql/build.gradle index bb7f720685cf..f89ed694a0d9 100644 --- a/airbyte-integrations/connectors/source-mssql/build.gradle +++ b/airbyte-integrations/connectors/source-mssql/build.gradle @@ -3,7 +3,7 @@ plugins { } airbyteJavaConnector { - cdkVersionRequired = '0.21.4' + cdkVersionRequired = '0.23.14' features = ['db-sources'] useLocalCdk = false } diff --git a/airbyte-integrations/connectors/source-mssql/metadata.yaml b/airbyte-integrations/connectors/source-mssql/metadata.yaml index 65ebccd83038..430fca185fe0 100644 --- a/airbyte-integrations/connectors/source-mssql/metadata.yaml +++ b/airbyte-integrations/connectors/source-mssql/metadata.yaml @@ -9,7 +9,7 @@ data: connectorSubtype: database connectorType: source definitionId: b5ea17b1-f170-46dc-bc31-cc744ca984c1 - dockerImageTag: 3.7.4 + dockerImageTag: 3.7.5 dockerRepository: airbyte/source-mssql documentationUrl: https://docs.airbyte.com/integrations/sources/mssql githubIssueLabel: source-mssql diff --git a/airbyte-integrations/connectors/source-mssql/src/main/java/io/airbyte/integrations/source/mssql/initialsync/MssqlInitialLoadHandler.java b/airbyte-integrations/connectors/source-mssql/src/main/java/io/airbyte/integrations/source/mssql/initialsync/MssqlInitialLoadHandler.java index 2000e363e87c..2b7b01db0f9b 100644 --- a/airbyte-integrations/connectors/source-mssql/src/main/java/io/airbyte/integrations/source/mssql/initialsync/MssqlInitialLoadHandler.java +++ b/airbyte-integrations/connectors/source-mssql/src/main/java/io/airbyte/integrations/source/mssql/initialsync/MssqlInitialLoadHandler.java @@ -40,10 +40,7 @@ import java.sql.SQLException; import java.time.Duration; import java.time.Instant; -import java.util.ArrayList; -import java.util.HashMap; -import java.util.List; -import java.util.Map; +import java.util.*; import java.util.concurrent.atomic.AtomicLong; import java.util.function.Function; import java.util.stream.Stream; diff --git a/airbyte-integrations/connectors/source-mssql/src/main/java/io/airbyte/integrations/source/mssql/initialsync/MssqlInitialReadUtil.java b/airbyte-integrations/connectors/source-mssql/src/main/java/io/airbyte/integrations/source/mssql/initialsync/MssqlInitialReadUtil.java index 46bd00400a50..7aa60d045d68 100644 --- a/airbyte-integrations/connectors/source-mssql/src/main/java/io/airbyte/integrations/source/mssql/initialsync/MssqlInitialReadUtil.java +++ b/airbyte-integrations/connectors/source-mssql/src/main/java/io/airbyte/integrations/source/mssql/initialsync/MssqlInitialReadUtil.java @@ -56,15 +56,7 @@ import java.sql.JDBCType; import java.time.Duration; import java.time.Instant; -import java.util.ArrayList; -import java.util.Collection; -import java.util.Collections; -import java.util.HashMap; -import java.util.HashSet; -import java.util.List; -import java.util.Map; -import java.util.Optional; -import java.util.Set; +import java.util.*; import java.util.function.Supplier; import java.util.stream.Collectors; import java.util.stream.Stream; @@ -74,6 +66,8 @@ public class MssqlInitialReadUtil { private static final Logger LOGGER = LoggerFactory.getLogger(MssqlInitialReadUtil.class); + private static final int MIN_QUEUE_SIZE = 1000; + private static final int MAX_QUEUE_SIZE = 10000; public record InitialLoadStreams(List streamsForInitialLoad, Map pairToInitialLoadStatus) { @@ -97,6 +91,8 @@ public static List> getCdcReadIterators(fi final Duration firstRecordWaitTime = RecordWaitTimeUtil.getFirstRecordWaitTime(sourceConfig); final Duration subsequentRecordWaitTime = RecordWaitTimeUtil.getSubsequentRecordWaitTime(sourceConfig); LOGGER.info("First record waiting time: {} seconds", firstRecordWaitTime.getSeconds()); + final int queueSize = getQueueSize(sourceConfig); + LOGGER.info("Queue size: {}", queueSize); // Determine the streams that need to be loaded via primary key sync. final List> initialLoadIterator = new ArrayList<>(); // Construct the initial state for Mssql. If there is already existing state, we use that instead @@ -167,7 +163,7 @@ public static List> getCdcReadIterators(fi true, firstRecordWaitTime, subsequentRecordWaitTime, - AirbyteDebeziumHandler.QUEUE_CAPACITY, + queueSize, false); final var propertiesManager = new RelationalDbDebeziumPropertiesManager(getDebeziumProperties(database, catalog, false), sourceConfig, catalog); @@ -339,4 +335,31 @@ public static InitialLoadStreams streamsForInitialOrderedColumnLoad(final StateM pairToInitialLoadStatus); } + private static OptionalInt extractQueueSizeFromConfig(final JsonNode config) { + final JsonNode replicationMethod = config.get("replication_method"); + if (replicationMethod != null && replicationMethod.has("queue_size")) { + final int queueSize = config.get("replication_method").get("queue_size").asInt(); + return OptionalInt.of(queueSize); + } + return OptionalInt.empty(); + } + + public static int getQueueSize(final JsonNode config) { + final OptionalInt sizeFromConfig = extractQueueSizeFromConfig(config); + if (sizeFromConfig.isPresent()) { + final int size = sizeFromConfig.getAsInt(); + if (size < MIN_QUEUE_SIZE) { + LOGGER.warn("Queue size is overridden to {} , which is the min allowed for safety.", + MIN_QUEUE_SIZE); + return MIN_QUEUE_SIZE; + } else if (size > MAX_QUEUE_SIZE) { + LOGGER.warn("Queue size is overridden to {} , which is the max allowed for safety.", + MAX_QUEUE_SIZE); + return MAX_QUEUE_SIZE; + } + return size; + } + return MAX_QUEUE_SIZE; + } + } diff --git a/airbyte-integrations/connectors/source-mssql/src/main/resources/spec.json b/airbyte-integrations/connectors/source-mssql/src/main/resources/spec.json index 7a040718f402..a984211957cc 100644 --- a/airbyte-integrations/connectors/source-mssql/src/main/resources/spec.json +++ b/airbyte-integrations/connectors/source-mssql/src/main/resources/spec.json @@ -148,6 +148,15 @@ "enum": ["Fail sync", "Re-sync data"], "default": "Fail sync", "order": 4 + }, + "queue_size": { + "type": "integer", + "title": "Size of the queue (Advanced)", + "description": "The size of the internal queue. This may interfere with memory consumption and efficiency of the connector, please be careful.", + "default": 10000, + "order": 5, + "min": 1000, + "max": 10000 } } }, diff --git a/airbyte-integrations/connectors/source-mssql/src/test-integration/resources/expected_spec.json b/airbyte-integrations/connectors/source-mssql/src/test-integration/resources/expected_spec.json index 78d7147d7e43..1289b14f7079 100644 --- a/airbyte-integrations/connectors/source-mssql/src/test-integration/resources/expected_spec.json +++ b/airbyte-integrations/connectors/source-mssql/src/test-integration/resources/expected_spec.json @@ -148,6 +148,15 @@ "enum": ["Fail sync", "Re-sync data"], "default": "Fail sync", "order": 4 + }, + "queue_size": { + "type": "integer", + "title": "Size of the queue (Advanced)", + "description": "The size of the internal queue. This may interfere with memory consumption and efficiency of the connector, please be careful.", + "default": 10000, + "order": 5, + "min": 1000, + "max": 10000 } } }, diff --git a/airbyte-integrations/connectors/source-mssql/src/testFixtures/java/io/airbyte/integrations/source/mssql/MsSQLTestDatabase.java b/airbyte-integrations/connectors/source-mssql/src/testFixtures/java/io/airbyte/integrations/source/mssql/MsSQLTestDatabase.java index 76483b0ef961..d2cef1e979c3 100644 --- a/airbyte-integrations/connectors/source-mssql/src/testFixtures/java/io/airbyte/integrations/source/mssql/MsSQLTestDatabase.java +++ b/airbyte-integrations/connectors/source-mssql/src/testFixtures/java/io/airbyte/integrations/source/mssql/MsSQLTestDatabase.java @@ -59,6 +59,7 @@ public enum ContainerModifier { } + @SuppressWarnings("deprecation") static public MsSQLTestDatabase in(final BaseImage imageName, final ContainerModifier... methods) { final String[] methodNames = Stream.of(methods).map(im -> im.methodName).toList().toArray(new String[0]); final var container = new MsSQLContainerFactory().shared(imageName.reference, methodNames); diff --git a/docs/integrations/sources/mssql.md b/docs/integrations/sources/mssql.md index a384947e78cb..4d377bfb1cf5 100644 --- a/docs/integrations/sources/mssql.md +++ b/docs/integrations/sources/mssql.md @@ -342,9 +342,10 @@ WHERE actor_definition_id ='b5ea17b1-f170-46dc-bc31-cc744ca984c1' AND (configura | Version | Date | Pull Request | Subject | |:--------|:-----------|:------------------------------------------------------------------------------------------------------------------|:------------------------------------------------------------------------------------------------------------------------------------------------| -| 3.7.4 | 2024-02-26 | [35566](https://github.com/airbytehq/airbyte/pull/35566) | Add config to throw an error on invalid CDC position.. | -| 3.7.3 | 2024-02-23 | [35596](https://github.com/airbytehq/airbyte/pull/35596) | Fix a logger issue | -| 3.7.2 | 2024-02-21 | [35368](https://github.com/airbytehq/airbyte/pull/35368) | Change query syntax to make it compatible with Azure SQL Managed Instance. | +| 3.7.5 | 2024-02-29 | [35739](https://github.com/airbytehq/airbyte/pull/35739) | Allow configuring the queue size used for cdc events. | +| 3.7.4 | 2024-02-26 | [35566](https://github.com/airbytehq/airbyte/pull/35566) | Add config to throw an error on invalid CDC position. | +| 3.7.3 | 2024-02-23 | [35596](https://github.com/airbytehq/airbyte/pull/35596) | Fix a logger issue | +| 3.7.2 | 2024-02-21 | [35368](https://github.com/airbytehq/airbyte/pull/35368) | Change query syntax to make it compatible with Azure SQL Managed Instance. | | 3.7.1 | 2024-02-20 | [35405](https://github.com/airbytehq/airbyte/pull/35405) | Change query syntax to make it compatible with Azure Synapse. | | 3.7.0 | 2024-01-30 | [33311](https://github.com/airbytehq/airbyte/pull/33311) | Source mssql with checkpointing initial sync. | | 3.6.1 | 2024-01-26 | [34573](https://github.com/airbytehq/airbyte/pull/34573) | Adopt CDK v0.16.0. | From d5d340ea009b776704515f10d9791ec2657ad238 Mon Sep 17 00:00:00 2001 From: colesnodgrass Date: Wed, 6 Mar 2024 00:54:06 +0000 Subject: [PATCH 090/172] Bump Airbyte version from 0.50.54 to 0.51.0 --- .bumpversion.cfg | 2 +- gradle.properties | 2 +- run-ab-platform.sh | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/.bumpversion.cfg b/.bumpversion.cfg index 71f24208ea79..343dcac32e7e 100644 --- a/.bumpversion.cfg +++ b/.bumpversion.cfg @@ -1,5 +1,5 @@ [bumpversion] -current_version = 0.50.54 +current_version = 0.51.0 commit = False tag = False parse = (?P\d+)\.(?P\d+)\.(?P\d+)(\-[a-z]+)? diff --git a/gradle.properties b/gradle.properties index d0289ff76045..9fc47a66700d 100644 --- a/gradle.properties +++ b/gradle.properties @@ -1,4 +1,4 @@ -VERSION=0.50.54 +VERSION=0.51.0 # NOTE: some of these values are overwritten in CI! # NOTE: if you want to override this for your local machine, set overrides in ~/.gradle/gradle.properties diff --git a/run-ab-platform.sh b/run-ab-platform.sh index 04906fbbf383..172cd5862132 100755 --- a/run-ab-platform.sh +++ b/run-ab-platform.sh @@ -1,6 +1,6 @@ #!/bin/bash -VERSION=0.50.54 +VERSION=0.51.0 # Run away from anything even a little scary set -o nounset # -u exit if a variable is not set set -o errexit # -f exit for any command failure" From b67d16d730f44ded43165935c3a494ee357f2e4b Mon Sep 17 00:00:00 2001 From: Stephane Geneix <147216312+stephane-airbyte@users.noreply.github.com> Date: Tue, 5 Mar 2024 17:12:26 -0800 Subject: [PATCH 091/172] java-CDK fixes to the Junit interceptor (#35827) --- airbyte-cdk/java/airbyte-cdk/README.md | 5 +- .../src/main/resources/version.properties | 2 +- .../LoggingInvocationInterceptor.java | 102 +++++++++++++++--- .../destination/s3/csv/S3CsvWriterTest.java | 6 +- build.gradle | 7 ++ 5 files changed, 101 insertions(+), 21 deletions(-) diff --git a/airbyte-cdk/java/airbyte-cdk/README.md b/airbyte-cdk/java/airbyte-cdk/README.md index 11b7a8941c02..f1b80e5b4700 100644 --- a/airbyte-cdk/java/airbyte-cdk/README.md +++ b/airbyte-cdk/java/airbyte-cdk/README.md @@ -166,8 +166,9 @@ MavenLocal debugging steps: | Version | Date | Pull Request | Subject | |:--------|:-----------|:-----------------------------------------------------------|:---------------------------------------------------------------------------------------------------------------------------------------------------------------| -| 0.23.14 | 2024-03-05 | [\#35827](https://github.com/airbytehq/airbyte/pull/35827) | improving the Junit interceptor. | -| 0.23.13 | 2024-03-04 | [\#35774](https://github.com/airbytehq/airbyte/pull/35774) | minor changes to the CDK test fixtures. | +| 0.23.15 | 2024-03-05 | [\#35827](https://github.com/airbytehq/airbyte/pull/35827) | improving the Junit interceptor. | +| 0.23.14 | 2024-03-05 | [\#35739](https://github.com/airbytehq/airbyte/pull/35739) | Add logging to the CDC queue size. Fix the ContainerFactory. | +| 0.23.13 | 2024-03-04 | [\#35774](https://github.com/airbytehq/airbyte/pull/35774) | minor changes to the CDK test fixtures. | | 0.23.12 | 2024-03-01 | [\#35767](https://github.com/airbytehq/airbyte/pull/35767) | introducing a timeout for java tests. | | 0.23.11 | 2024-03-01 | [\#35313](https://github.com/airbytehq/airbyte/pull/35313) | Preserve timezone offset in CSV writer for destinations | | 0.23.10 | 2024-03-01 | [\#35303](https://github.com/airbytehq/airbyte/pull/35303) | Migration framework with DestinationState for softReset | diff --git a/airbyte-cdk/java/airbyte-cdk/core/src/main/resources/version.properties b/airbyte-cdk/java/airbyte-cdk/core/src/main/resources/version.properties index ff6fa0e7b121..e60bd207859a 100644 --- a/airbyte-cdk/java/airbyte-cdk/core/src/main/resources/version.properties +++ b/airbyte-cdk/java/airbyte-cdk/core/src/main/resources/version.properties @@ -1 +1 @@ -version=0.23.14 +version=0.23.15 diff --git a/airbyte-cdk/java/airbyte-cdk/core/src/testFixtures/java/io/airbyte/cdk/extensions/LoggingInvocationInterceptor.java b/airbyte-cdk/java/airbyte-cdk/core/src/testFixtures/java/io/airbyte/cdk/extensions/LoggingInvocationInterceptor.java index 68ed86ed6c58..a62788dc1ad4 100644 --- a/airbyte-cdk/java/airbyte-cdk/core/src/testFixtures/java/io/airbyte/cdk/extensions/LoggingInvocationInterceptor.java +++ b/airbyte-cdk/java/airbyte-cdk/core/src/testFixtures/java/io/airbyte/cdk/extensions/LoggingInvocationInterceptor.java @@ -4,6 +4,16 @@ package io.airbyte.cdk.extensions; +import static java.util.concurrent.TimeUnit.DAYS; +import static java.util.concurrent.TimeUnit.HOURS; +import static java.util.concurrent.TimeUnit.MICROSECONDS; +import static java.util.concurrent.TimeUnit.MILLISECONDS; +import static java.util.concurrent.TimeUnit.MINUTES; +import static java.util.concurrent.TimeUnit.NANOSECONDS; +import static java.util.concurrent.TimeUnit.SECONDS; +import static java.util.regex.Pattern.CASE_INSENSITIVE; +import static java.util.regex.Pattern.UNICODE_CASE; + import java.lang.reflect.Constructor; import java.lang.reflect.InvocationHandler; import java.lang.reflect.Method; @@ -11,19 +21,23 @@ import java.lang.reflect.Proxy; import java.time.Duration; import java.time.Instant; +import java.time.format.DateTimeParseException; +import java.util.Collections; +import java.util.HashMap; import java.util.LinkedList; import java.util.List; +import java.util.Locale; import java.util.Map; -import java.util.concurrent.ConcurrentHashMap; -import java.util.concurrent.ExecutorService; +import java.util.Timer; +import java.util.TimerTask; +import java.util.concurrent.TimeUnit; +import java.util.concurrent.TimeoutException; import java.util.regex.Matcher; import java.util.regex.Pattern; import org.apache.commons.lang3.StringUtils; import org.apache.commons.lang3.exception.ExceptionUtils; import org.apache.commons.lang3.time.DurationFormatUtils; -import org.junit.jupiter.api.Assertions; import org.junit.jupiter.api.Timeout; -import org.junit.jupiter.api.Timeout.ThreadMode; import org.junit.jupiter.api.extension.DynamicTestInvocationContext; import org.junit.jupiter.api.extension.ExtensionContext; import org.junit.jupiter.api.extension.InvocationInterceptor; @@ -41,13 +55,11 @@ */ public class LoggingInvocationInterceptor implements InvocationInterceptor { - private static final Duration DEFAULT_TIMEOUT = Duration.ofMinutes(5); private static final Logger LOGGER = LoggerFactory.getLogger(LoggingInvocationInterceptor.class); + private static final String JUNIT_METHOD_EXECUTION_TIMEOUT_PROPERTY_NAME = "JunitMethodExecutionTimeout"; private static final class LoggingInvocationInterceptorHandler implements InvocationHandler { - private static final Map executorByThread = new ConcurrentHashMap<>(); - private static final Pattern methodPattern = Pattern.compile("intercept(.*)Method"); @Override @@ -76,22 +88,33 @@ public Object invoke(Object proxy, Method method, Object[] args) throws Throwabl } else { logLineSuffix = "execution of unknown intercepted call %s".formatted(methodName); } + Thread currentThread = Thread.currentThread(); + TimeoutInteruptor timeoutTask = new TimeoutInteruptor(currentThread); Instant start = Instant.now(); try { final Object retVal; Duration timeout = getTimeout(invocationContext); if (timeout != null) { LOGGER.info("Junit starting {} with timeout of {}", logLineSuffix, DurationFormatUtils.formatDurationWords(timeout.toMillis(), true, true)); - retVal = Assertions.assertTimeoutPreemptively(timeout, invocation::proceed); + new Timer("TimeoutTimer-" + currentThread.getName(), true).schedule(timeoutTask, timeout.toMillis()); } else { LOGGER.warn("Junit starting {} with no timeout", logLineSuffix); - retVal = invocation.proceed(); } + retVal = invocation.proceed(); long elapsedMs = Duration.between(start, Instant.now()).toMillis(); LOGGER.info("Junit completed {} in {}", logLineSuffix, DurationFormatUtils.formatDurationWords(elapsedMs, true, true)); return retVal; } catch (Throwable t) { + timeoutTask.cancel(); long elapsedMs = Duration.between(start, Instant.now()).toMillis(); + if (timeoutTask.wasTriggered) { + Throwable t1 = t; + t = new TimeoutException( + "Execution was cancelled after %s. If you think your test should be given more time to complete, you can use the @Timeout annotation. If all the test of a connector are slow, " + + " you can override the property 'JunitMethodExecutionTimeout' in your gradle.properties." + .formatted(DurationFormatUtils.formatDurationWords(elapsedMs, true, true))); + t.initCause(t1); + } boolean belowCurrentCall = false; List stackToDisplay = new LinkedList<>(); for (String stackString : ExceptionUtils.getStackFrames(t)) { @@ -110,25 +133,74 @@ public Object invoke(Object proxy, Method method, Object[] args) throws Throwabl LOGGER.error("Junit exception throw during {} after {}:\n{}", logLineSuffix, DurationFormatUtils.formatDurationWords(elapsedMs, true, true), stackTrace); throw t; + } finally { + timeoutTask.cancel(); + } + } + + private static class TimeoutInteruptor extends TimerTask { + + private final Thread parentThread; + volatile boolean wasTriggered = false; + + TimeoutInteruptor(Thread parentThread) { + this.parentThread = parentThread; + } + + @Override + public void run() { + wasTriggered = true; + parentThread.interrupt(); } + + public boolean cancel() { + return super.cancel(); + } + + } + + private static final Pattern PATTERN = Pattern.compile("([1-9]\\d*) *((?:[nμm]?s)|m|h|d)?", + CASE_INSENSITIVE | UNICODE_CASE); + private static final Map UNITS_BY_ABBREVIATION; + + static { + Map unitsByAbbreviation = new HashMap<>(); + unitsByAbbreviation.put("ns", NANOSECONDS); + unitsByAbbreviation.put("μs", MICROSECONDS); + unitsByAbbreviation.put("ms", MILLISECONDS); + unitsByAbbreviation.put("s", SECONDS); + unitsByAbbreviation.put("m", MINUTES); + unitsByAbbreviation.put("h", HOURS); + unitsByAbbreviation.put("d", DAYS); + UNITS_BY_ABBREVIATION = Collections.unmodifiableMap(unitsByAbbreviation); + } + + static Duration parseDuration(String text) throws DateTimeParseException { + Matcher matcher = PATTERN.matcher(text.trim()); + if (matcher.matches()) { + long value = Long.parseLong(matcher.group(1)); + String unitAbbreviation = matcher.group(2); + TimeUnit unit = unitAbbreviation == null ? SECONDS + : UNITS_BY_ABBREVIATION.get(unitAbbreviation.toLowerCase(Locale.ENGLISH)); + return Duration.ofSeconds(unit.toSeconds(value)); + } + throw new DateTimeParseException("Timeout duration is not in the expected format ( [ns|μs|ms|s|m|h|d])", + text, 0); } private static Duration getTimeout(ReflectiveInvocationContext invocationContext) { - Duration timeout = DEFAULT_TIMEOUT; + Duration timeout = null; if (invocationContext.getExecutable()instanceof Method m) { Timeout timeoutAnnotation = m.getAnnotation(Timeout.class); if (timeoutAnnotation == null) { timeoutAnnotation = invocationContext.getTargetClass().getAnnotation(Timeout.class); } if (timeoutAnnotation != null) { - if (timeoutAnnotation.threadMode() == ThreadMode.SAME_THREAD) { - return null; - } timeout = Duration.ofMillis(timeoutAnnotation.unit().toMillis(timeoutAnnotation.value())); } } - if (timeout.compareTo(Duration.ofHours(1)) > 0) { - return DEFAULT_TIMEOUT; + if (timeout == null) { + timeout = parseDuration(System.getProperty(JUNIT_METHOD_EXECUTION_TIMEOUT_PROPERTY_NAME)); } return timeout; } diff --git a/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/test/java/io/airbyte/cdk/integrations/destination/s3/csv/S3CsvWriterTest.java b/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/test/java/io/airbyte/cdk/integrations/destination/s3/csv/S3CsvWriterTest.java index 747ecb44afd0..1ff6715d1723 100644 --- a/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/test/java/io/airbyte/cdk/integrations/destination/s3/csv/S3CsvWriterTest.java +++ b/airbyte-cdk/java/airbyte-cdk/s3-destinations/src/test/java/io/airbyte/cdk/integrations/destination/s3/csv/S3CsvWriterTest.java @@ -41,16 +41,16 @@ import java.util.ArrayList; import java.util.List; import java.util.UUID; +import java.util.concurrent.TimeUnit; import org.apache.commons.csv.CSVFormat; import org.junit.jupiter.api.AfterEach; import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Test; import org.junit.jupiter.api.Timeout; -import org.junit.jupiter.api.Timeout.ThreadMode; import org.mockito.MockedConstruction; -@Timeout(value = 1, - threadMode = ThreadMode.SAME_THREAD) +@Timeout(value = 90, + unit = TimeUnit.SECONDS) class S3CsvWriterTest { public static final ConfiguredAirbyteStream CONFIGURED_STREAM = new ConfiguredAirbyteStream() diff --git a/build.gradle b/build.gradle index 3d13e5f7b2d5..02755ee877b0 100644 --- a/build.gradle +++ b/build.gradle @@ -125,6 +125,13 @@ allprojects { systemProperty 'junit.jupiter.execution.parallel.config.strategy', 'dynamic' } } + String junitMethodExecutionTimeout + if (project.hasProperty('JunitMethodExecutionTimeout')) { + junitMethodExecutionTimeout = project.property('JunitMethodExecutionTimeout').toString() + } else { + junitMethodExecutionTimeout = '1 m' + } + systemProperty 'JunitMethodExecutionTimeout', junitMethodExecutionTimeout } dependencies { From 95afe2806184fd864b0c5053fc0339ff82d83af1 Mon Sep 17 00:00:00 2001 From: Evan Tahler Date: Tue, 5 Mar 2024 17:29:05 -0800 Subject: [PATCH 092/172] Un-Archive python destinstions (#35838) --- .../destination-amazon-sqs/.dockerignore | 5 + .../destination-amazon-sqs/Dockerfile | 38 + .../destination-amazon-sqs/README.md | 99 ++ .../destination-amazon-sqs/bootstrap.md | 59 + .../destination_amazon_sqs/__init__.py | 8 + .../destination_amazon_sqs/destination.py | 176 +++ .../destination_amazon_sqs/spec.json | 101 ++ .../integration_tests/input_records_json | 1 + .../integration_tests/integration_test.py | 46 + .../connectors/destination-amazon-sqs/main.py | 11 + .../destination-amazon-sqs/metadata.yaml | 4 +- .../destination-amazon-sqs/requirements.txt | 1 + .../sample_files/configured_catalog.json | 27 + .../destination-amazon-sqs/setup.py | 23 + .../unit_tests/unit_test.py | 226 ++++ .../connectors/destination-cumulio/Dockerfile | 42 + .../connectors/destination-cumulio/README.md | 98 ++ .../destination_cumulio/__init__.py | 8 + .../destination_cumulio/client.py | 367 ++++++ .../destination_cumulio/destination.py | 101 ++ .../destination_cumulio/spec.json | 37 + .../destination_cumulio/writer.py | 205 +++ .../integration_tests/configured_catalog.json | 29 + .../integration_tests/integration_test.py | 276 ++++ .../integration_tests/sample_config.json | 5 + .../connectors/destination-cumulio/main.py | 11 + .../destination-cumulio/metadata.yaml | 4 +- .../destination-cumulio/requirements.txt | 1 + .../connectors/destination-cumulio/setup.py | 23 + .../unit_tests/test_client.py | 629 ++++++++++ .../unit_tests/test_destination.py | 155 +++ .../unit_tests/test_writer.py | 512 ++++++++ .../destination-databend/.dockerignore | 5 + .../destination-databend/Dockerfile | 38 + .../connectors/destination-databend/README.md | 99 ++ .../destination_databend/__init__.py | 8 + .../destination_databend/__init__.pyc | Bin 0 -> 307 bytes .../destination_databend/client.py | 20 + .../destination_databend/destination.py | 89 ++ .../destination_databend/spec.json | 57 + .../destination_databend/writer.py | 134 ++ .../integration_tests/integration_test.py | 159 +++ .../integration_tests/sample_config.json | 9 + .../connectors/destination-databend/main.py | 11 + .../destination-databend/metadata.yaml | 4 +- .../destination-databend/requirements.txt | 1 + .../connectors/destination-databend/setup.py | 22 + .../unit_tests/test_databend_destination.py | 161 +++ .../unit_tests/test_writer.py | 46 + .../destination-firebolt/Dockerfile | 29 + .../connectors/destination-firebolt/README.md | 99 ++ .../destination-firebolt/bootstrap.md | 22 + .../destination_firebolt/__init__.py | 8 + .../destination_firebolt/destination.py | 128 ++ .../destination_firebolt/spec.json | 109 ++ .../destination_firebolt/writer.py | 235 ++++ .../integration_tests/configured_catalog.json | 38 + .../integration_tests/integration_test.py | 147 +++ .../integration_tests/invalid_config.json | 9 + .../integration_tests/invalid_config_s3.json | 13 + .../integration_tests/messages.jsonl | 2 + .../connectors/destination-firebolt/main.py | 11 + .../destination-firebolt/metadata.yaml | 4 +- .../destination-firebolt/requirements.txt | 1 + .../connectors/destination-firebolt/setup.py | 23 + .../unit_tests/test_firebolt_destination.py | 241 ++++ .../unit_tests/test_writer.py | 156 +++ .../connectors/destination-kvdb/README.md | 118 ++ .../destination_kvdb/__init__.py | 26 + .../destination_kvdb/client.py | 78 ++ .../destination_kvdb/destination.py | 72 ++ .../destination_kvdb/spec.json | 26 + .../destination_kvdb/writer.py | 46 + .../connectors/destination-kvdb/main.py | 11 + .../connectors/destination-kvdb/metadata.yaml | 4 +- .../connectors/destination-kvdb/poetry.lock | 1108 +++++++++++++++++ .../destination-kvdb/pyproject.toml | 31 + .../destination-kvdb/requirements.txt | 1 + .../destination-kvdb/unit_tests/unit_test.py | 7 + .../destination-meilisearch/.dockerignore | 5 + .../destination-meilisearch/Dockerfile | 38 + .../destination-meilisearch/README.md | 99 ++ .../destination_meilisearch/__init__.py | 8 + .../destination_meilisearch/destination.py | 84 ++ .../destination_meilisearch/spec.json | 27 + .../destination_meilisearch/writer.py | 39 + .../integration_tests/integration_test.py | 103 ++ .../integration_tests/messages.jsonl | 2 + .../destination-meilisearch/main.py | 11 + .../destination-meilisearch/metadata.yaml | 4 +- .../destination-meilisearch/requirements.txt | 1 + .../sample_files/configured_catalog.json | 27 + .../destination-meilisearch/setup.py | 23 + .../unit_tests/unit_test.py | 29 + .../destination-rabbitmq/.dockerignore | 5 + .../destination-rabbitmq/Dockerfile | 38 + .../connectors/destination-rabbitmq/README.md | 99 ++ .../destination_rabbitmq/__init__.py | 8 + .../destination_rabbitmq/__init__.pyc | Bin 0 -> 300 bytes .../destination_rabbitmq/destination.py | 84 ++ .../destination_rabbitmq/spec.json | 49 + .../integration_tests/integration_test.py | 90 ++ .../integration_tests/invalid_config.json | 9 + .../connectors/destination-rabbitmq/main.py | 11 + .../destination-rabbitmq/metadata.yaml | 4 +- .../destination-rabbitmq/requirements.txt | 1 + .../connectors/destination-rabbitmq/setup.py | 23 + .../unit_tests/unit_test.py | 130 ++ .../destination-timeplus/.dockerignore | 5 + .../destination-timeplus/Dockerfile | 38 + .../connectors/destination-timeplus/README.md | 108 ++ .../destination_timeplus/__init__.py | 8 + .../destination_timeplus/destination.py | 160 +++ .../destination_timeplus/spec.json | 31 + .../integration_tests/configured_catalog.json | 263 ++++ .../integration_tests/integration_test.py | 74 ++ .../integration_tests/messages.jsonl | 5 + .../connectors/destination-timeplus/main.py | 11 + .../destination-timeplus/metadata.yaml | 4 +- .../destination-timeplus/requirements.txt | 1 + .../connectors/destination-timeplus/setup.py | 26 + .../unit_tests/unit_test.py | 17 + .../connectors/destination-xata/.dockerignore | 5 + .../connectors/destination-xata/Dockerfile | 38 + .../connectors/destination-xata/README.md | 99 ++ .../connectors/destination-xata/bootstrap.md | 1 + .../destination_xata/__init__.py | 8 + .../destination_xata/destination.py | 79 ++ .../destination_xata/spec.json | 28 + .../integration_tests/integration_test.py | 120 ++ .../integration_tests/invalid_config.json | 4 + .../connectors/destination-xata/main.py | 11 + .../connectors/destination-xata/metadata.yaml | 4 +- .../destination-xata/requirements.txt | 1 + .../sample_files/configured_catalog.json | 13 + .../connectors/destination-xata/setup.py | 23 + .../destination-xata/unit_tests/unit_test.py | 28 + docs/integrations/destinations/amazon-sqs.md | 3 +- docs/integrations/destinations/cumulio.md | 129 +- docs/integrations/destinations/databend.md | 67 +- docs/integrations/destinations/firebolt.md | 105 +- docs/integrations/destinations/kvdb.md | 13 +- docs/integrations/destinations/meilisearch.md | 36 +- docs/integrations/destinations/rabbitmq.md | 15 +- docs/integrations/destinations/timeplus.md | 35 +- docs/integrations/destinations/xata.md | 24 +- 146 files changed, 9518 insertions(+), 167 deletions(-) create mode 100644 airbyte-integrations/connectors/destination-amazon-sqs/.dockerignore create mode 100644 airbyte-integrations/connectors/destination-amazon-sqs/Dockerfile create mode 100644 airbyte-integrations/connectors/destination-amazon-sqs/README.md create mode 100644 airbyte-integrations/connectors/destination-amazon-sqs/bootstrap.md create mode 100644 airbyte-integrations/connectors/destination-amazon-sqs/destination_amazon_sqs/__init__.py create mode 100644 airbyte-integrations/connectors/destination-amazon-sqs/destination_amazon_sqs/destination.py create mode 100644 airbyte-integrations/connectors/destination-amazon-sqs/destination_amazon_sqs/spec.json create mode 100644 airbyte-integrations/connectors/destination-amazon-sqs/integration_tests/input_records_json create mode 100644 airbyte-integrations/connectors/destination-amazon-sqs/integration_tests/integration_test.py create mode 100644 airbyte-integrations/connectors/destination-amazon-sqs/main.py create mode 100644 airbyte-integrations/connectors/destination-amazon-sqs/requirements.txt create mode 100644 airbyte-integrations/connectors/destination-amazon-sqs/sample_files/configured_catalog.json create mode 100644 airbyte-integrations/connectors/destination-amazon-sqs/setup.py create mode 100644 airbyte-integrations/connectors/destination-amazon-sqs/unit_tests/unit_test.py create mode 100644 airbyte-integrations/connectors/destination-cumulio/Dockerfile create mode 100644 airbyte-integrations/connectors/destination-cumulio/README.md create mode 100644 airbyte-integrations/connectors/destination-cumulio/destination_cumulio/__init__.py create mode 100644 airbyte-integrations/connectors/destination-cumulio/destination_cumulio/client.py create mode 100644 airbyte-integrations/connectors/destination-cumulio/destination_cumulio/destination.py create mode 100644 airbyte-integrations/connectors/destination-cumulio/destination_cumulio/spec.json create mode 100644 airbyte-integrations/connectors/destination-cumulio/destination_cumulio/writer.py create mode 100644 airbyte-integrations/connectors/destination-cumulio/integration_tests/configured_catalog.json create mode 100644 airbyte-integrations/connectors/destination-cumulio/integration_tests/integration_test.py create mode 100644 airbyte-integrations/connectors/destination-cumulio/integration_tests/sample_config.json create mode 100644 airbyte-integrations/connectors/destination-cumulio/main.py create mode 100644 airbyte-integrations/connectors/destination-cumulio/requirements.txt create mode 100644 airbyte-integrations/connectors/destination-cumulio/setup.py create mode 100644 airbyte-integrations/connectors/destination-cumulio/unit_tests/test_client.py create mode 100644 airbyte-integrations/connectors/destination-cumulio/unit_tests/test_destination.py create mode 100644 airbyte-integrations/connectors/destination-cumulio/unit_tests/test_writer.py create mode 100644 airbyte-integrations/connectors/destination-databend/.dockerignore create mode 100644 airbyte-integrations/connectors/destination-databend/Dockerfile create mode 100644 airbyte-integrations/connectors/destination-databend/README.md create mode 100644 airbyte-integrations/connectors/destination-databend/destination_databend/__init__.py create mode 100644 airbyte-integrations/connectors/destination-databend/destination_databend/__init__.pyc create mode 100644 airbyte-integrations/connectors/destination-databend/destination_databend/client.py create mode 100644 airbyte-integrations/connectors/destination-databend/destination_databend/destination.py create mode 100644 airbyte-integrations/connectors/destination-databend/destination_databend/spec.json create mode 100644 airbyte-integrations/connectors/destination-databend/destination_databend/writer.py create mode 100644 airbyte-integrations/connectors/destination-databend/integration_tests/integration_test.py create mode 100644 airbyte-integrations/connectors/destination-databend/integration_tests/sample_config.json create mode 100644 airbyte-integrations/connectors/destination-databend/main.py create mode 100644 airbyte-integrations/connectors/destination-databend/requirements.txt create mode 100644 airbyte-integrations/connectors/destination-databend/setup.py create mode 100644 airbyte-integrations/connectors/destination-databend/unit_tests/test_databend_destination.py create mode 100644 airbyte-integrations/connectors/destination-databend/unit_tests/test_writer.py create mode 100644 airbyte-integrations/connectors/destination-firebolt/Dockerfile create mode 100644 airbyte-integrations/connectors/destination-firebolt/README.md create mode 100644 airbyte-integrations/connectors/destination-firebolt/bootstrap.md create mode 100644 airbyte-integrations/connectors/destination-firebolt/destination_firebolt/__init__.py create mode 100644 airbyte-integrations/connectors/destination-firebolt/destination_firebolt/destination.py create mode 100644 airbyte-integrations/connectors/destination-firebolt/destination_firebolt/spec.json create mode 100644 airbyte-integrations/connectors/destination-firebolt/destination_firebolt/writer.py create mode 100644 airbyte-integrations/connectors/destination-firebolt/integration_tests/configured_catalog.json create mode 100644 airbyte-integrations/connectors/destination-firebolt/integration_tests/integration_test.py create mode 100644 airbyte-integrations/connectors/destination-firebolt/integration_tests/invalid_config.json create mode 100644 airbyte-integrations/connectors/destination-firebolt/integration_tests/invalid_config_s3.json create mode 100644 airbyte-integrations/connectors/destination-firebolt/integration_tests/messages.jsonl create mode 100644 airbyte-integrations/connectors/destination-firebolt/main.py create mode 100644 airbyte-integrations/connectors/destination-firebolt/requirements.txt create mode 100644 airbyte-integrations/connectors/destination-firebolt/setup.py create mode 100644 airbyte-integrations/connectors/destination-firebolt/unit_tests/test_firebolt_destination.py create mode 100644 airbyte-integrations/connectors/destination-firebolt/unit_tests/test_writer.py create mode 100644 airbyte-integrations/connectors/destination-kvdb/README.md create mode 100644 airbyte-integrations/connectors/destination-kvdb/destination_kvdb/__init__.py create mode 100644 airbyte-integrations/connectors/destination-kvdb/destination_kvdb/client.py create mode 100644 airbyte-integrations/connectors/destination-kvdb/destination_kvdb/destination.py create mode 100644 airbyte-integrations/connectors/destination-kvdb/destination_kvdb/spec.json create mode 100644 airbyte-integrations/connectors/destination-kvdb/destination_kvdb/writer.py create mode 100644 airbyte-integrations/connectors/destination-kvdb/main.py create mode 100644 airbyte-integrations/connectors/destination-kvdb/poetry.lock create mode 100644 airbyte-integrations/connectors/destination-kvdb/pyproject.toml create mode 100644 airbyte-integrations/connectors/destination-kvdb/requirements.txt create mode 100644 airbyte-integrations/connectors/destination-kvdb/unit_tests/unit_test.py create mode 100644 airbyte-integrations/connectors/destination-meilisearch/.dockerignore create mode 100644 airbyte-integrations/connectors/destination-meilisearch/Dockerfile create mode 100644 airbyte-integrations/connectors/destination-meilisearch/README.md create mode 100644 airbyte-integrations/connectors/destination-meilisearch/destination_meilisearch/__init__.py create mode 100644 airbyte-integrations/connectors/destination-meilisearch/destination_meilisearch/destination.py create mode 100644 airbyte-integrations/connectors/destination-meilisearch/destination_meilisearch/spec.json create mode 100644 airbyte-integrations/connectors/destination-meilisearch/destination_meilisearch/writer.py create mode 100644 airbyte-integrations/connectors/destination-meilisearch/integration_tests/integration_test.py create mode 100644 airbyte-integrations/connectors/destination-meilisearch/integration_tests/messages.jsonl create mode 100644 airbyte-integrations/connectors/destination-meilisearch/main.py create mode 100644 airbyte-integrations/connectors/destination-meilisearch/requirements.txt create mode 100644 airbyte-integrations/connectors/destination-meilisearch/sample_files/configured_catalog.json create mode 100644 airbyte-integrations/connectors/destination-meilisearch/setup.py create mode 100644 airbyte-integrations/connectors/destination-meilisearch/unit_tests/unit_test.py create mode 100644 airbyte-integrations/connectors/destination-rabbitmq/.dockerignore create mode 100644 airbyte-integrations/connectors/destination-rabbitmq/Dockerfile create mode 100644 airbyte-integrations/connectors/destination-rabbitmq/README.md create mode 100644 airbyte-integrations/connectors/destination-rabbitmq/destination_rabbitmq/__init__.py create mode 100644 airbyte-integrations/connectors/destination-rabbitmq/destination_rabbitmq/__init__.pyc create mode 100644 airbyte-integrations/connectors/destination-rabbitmq/destination_rabbitmq/destination.py create mode 100644 airbyte-integrations/connectors/destination-rabbitmq/destination_rabbitmq/spec.json create mode 100644 airbyte-integrations/connectors/destination-rabbitmq/integration_tests/integration_test.py create mode 100644 airbyte-integrations/connectors/destination-rabbitmq/integration_tests/invalid_config.json create mode 100644 airbyte-integrations/connectors/destination-rabbitmq/main.py create mode 100644 airbyte-integrations/connectors/destination-rabbitmq/requirements.txt create mode 100644 airbyte-integrations/connectors/destination-rabbitmq/setup.py create mode 100644 airbyte-integrations/connectors/destination-rabbitmq/unit_tests/unit_test.py create mode 100755 airbyte-integrations/connectors/destination-timeplus/.dockerignore create mode 100755 airbyte-integrations/connectors/destination-timeplus/Dockerfile create mode 100755 airbyte-integrations/connectors/destination-timeplus/README.md create mode 100755 airbyte-integrations/connectors/destination-timeplus/destination_timeplus/__init__.py create mode 100755 airbyte-integrations/connectors/destination-timeplus/destination_timeplus/destination.py create mode 100755 airbyte-integrations/connectors/destination-timeplus/destination_timeplus/spec.json create mode 100644 airbyte-integrations/connectors/destination-timeplus/integration_tests/configured_catalog.json create mode 100755 airbyte-integrations/connectors/destination-timeplus/integration_tests/integration_test.py create mode 100644 airbyte-integrations/connectors/destination-timeplus/integration_tests/messages.jsonl create mode 100755 airbyte-integrations/connectors/destination-timeplus/main.py create mode 100755 airbyte-integrations/connectors/destination-timeplus/requirements.txt create mode 100755 airbyte-integrations/connectors/destination-timeplus/setup.py create mode 100755 airbyte-integrations/connectors/destination-timeplus/unit_tests/unit_test.py create mode 100644 airbyte-integrations/connectors/destination-xata/.dockerignore create mode 100644 airbyte-integrations/connectors/destination-xata/Dockerfile create mode 100644 airbyte-integrations/connectors/destination-xata/README.md create mode 100644 airbyte-integrations/connectors/destination-xata/bootstrap.md create mode 100644 airbyte-integrations/connectors/destination-xata/destination_xata/__init__.py create mode 100644 airbyte-integrations/connectors/destination-xata/destination_xata/destination.py create mode 100644 airbyte-integrations/connectors/destination-xata/destination_xata/spec.json create mode 100644 airbyte-integrations/connectors/destination-xata/integration_tests/integration_test.py create mode 100644 airbyte-integrations/connectors/destination-xata/integration_tests/invalid_config.json create mode 100644 airbyte-integrations/connectors/destination-xata/main.py create mode 100644 airbyte-integrations/connectors/destination-xata/requirements.txt create mode 100644 airbyte-integrations/connectors/destination-xata/sample_files/configured_catalog.json create mode 100644 airbyte-integrations/connectors/destination-xata/setup.py create mode 100644 airbyte-integrations/connectors/destination-xata/unit_tests/unit_test.py diff --git a/airbyte-integrations/connectors/destination-amazon-sqs/.dockerignore b/airbyte-integrations/connectors/destination-amazon-sqs/.dockerignore new file mode 100644 index 000000000000..efa69d407fd8 --- /dev/null +++ b/airbyte-integrations/connectors/destination-amazon-sqs/.dockerignore @@ -0,0 +1,5 @@ +* +!Dockerfile +!main.py +!destination_amazon_sqs +!setup.py diff --git a/airbyte-integrations/connectors/destination-amazon-sqs/Dockerfile b/airbyte-integrations/connectors/destination-amazon-sqs/Dockerfile new file mode 100644 index 000000000000..da7f9d82dd5d --- /dev/null +++ b/airbyte-integrations/connectors/destination-amazon-sqs/Dockerfile @@ -0,0 +1,38 @@ +FROM python:3.9.11-alpine3.15 as base + +# build and load all requirements +FROM base as builder +WORKDIR /airbyte/integration_code + +# upgrade pip to the latest version +RUN apk --no-cache upgrade \ + && pip install --upgrade pip \ + && apk --no-cache add tzdata build-base + + +COPY setup.py ./ +# install necessary packages to a temporary folder +RUN pip install --prefix=/install . + +# build a clean environment +FROM base +WORKDIR /airbyte/integration_code + +# copy all loaded and built libraries to a pure basic image +COPY --from=builder /install /usr/local +# add default timezone settings +COPY --from=builder /usr/share/zoneinfo/Etc/UTC /etc/localtime +RUN echo "Etc/UTC" > /etc/timezone + +# bash is installed for more convenient debugging. +RUN apk --no-cache add bash + +# copy payload code only +COPY main.py ./ +COPY destination_amazon_sqs ./destination_amazon_sqs + +ENV AIRBYTE_ENTRYPOINT "python /airbyte/integration_code/main.py" +ENTRYPOINT ["python", "/airbyte/integration_code/main.py"] + +LABEL io.airbyte.version=0.1.2 +LABEL io.airbyte.name=airbyte/destination-amazon-sqs diff --git a/airbyte-integrations/connectors/destination-amazon-sqs/README.md b/airbyte-integrations/connectors/destination-amazon-sqs/README.md new file mode 100644 index 000000000000..2856f60b1ae7 --- /dev/null +++ b/airbyte-integrations/connectors/destination-amazon-sqs/README.md @@ -0,0 +1,99 @@ +# Amazon Sqs Destination + +This is the repository for the Amazon Sqs destination connector, written in Python. +For information about how to use this connector within Airbyte, see [the documentation](https://docs.airbyte.io/integrations/destinations/amazon-sqs). + +## Local development + +### Prerequisites +**To iterate on this connector, make sure to complete this prerequisites section.** + +#### Minimum Python version required `= 3.7.0` + +#### Build & Activate Virtual Environment and install dependencies +From this connector directory, create a virtual environment: +``` +python -m venv .venv +``` + +This will generate a virtualenv for this module in `.venv/`. Make sure this venv is active in your +development environment of choice. To activate it from the terminal, run: +``` +source .venv/bin/activate +pip install -r requirements.txt +``` +If you are in an IDE, follow your IDE's instructions to activate the virtualenv. + +Note that while we are installing dependencies from `requirements.txt`, you should only edit `setup.py` for your dependencies. `requirements.txt` is +used for editable installs (`pip install -e`) to pull in Python dependencies from the monorepo and will call `setup.py`. +If this is mumbo jumbo to you, don't worry about it, just put your deps in `setup.py` but install using `pip install -r requirements.txt` and everything +should work as you expect. + +#### Create credentials +**If you are a community contributor**, follow the instructions in the [documentation](https://docs.airbyte.io/integrations/destinations/amazon-sqs) +to generate the necessary credentials. Then create a file `secrets/config.json` conforming to the `destination_amazon_sqs/spec.json` file. +Note that the `secrets` directory is gitignored by default, so there is no danger of accidentally checking in sensitive information. +See `integration_tests/sample_config.json` for a sample config file. + +**If you are an Airbyte core member**, copy the credentials in Lastpass under the secret name `destination amazon-sqs test creds` +and place them into `secrets/config.json`. + +### Locally running the connector +``` +python main.py spec +python main.py check --config secrets/config.json +python main.py discover --config secrets/config.json +python main.py read --config secrets/config.json --catalog integration_tests/configured_catalog.json +``` + +### Locally running the connector docker image + + +#### Build +**Via [`airbyte-ci`](https://github.com/airbytehq/airbyte/blob/master/airbyte-ci/connectors/pipelines/README.md) (recommended):** +```bash +airbyte-ci connectors --name=destination-amazon-sqs build +``` + +An image will be built with the tag `airbyte/destination-amazon-sqs:dev`. + +**Via `docker build`:** +```bash +docker build -t airbyte/destination-amazon-sqs:dev . +``` + +#### Run +Then run any of the connector commands as follows: +``` +docker run --rm airbyte/destination-amazon-sqs:dev spec +docker run --rm -v $(pwd)/secrets:/secrets airbyte/destination-amazon-sqs:dev check --config /secrets/config.json +# messages.jsonl is a file containing line-separated JSON representing AirbyteMessages +cat messages.jsonl | docker run --rm -v $(pwd)/secrets:/secrets -v $(pwd)/integration_tests:/integration_tests airbyte/destination-amazon-sqs:dev write --config /secrets/config.json --catalog /integration_tests/configured_catalog.json +``` + +## Testing +You can run our full test suite locally using [`airbyte-ci`](https://github.com/airbytehq/airbyte/blob/master/airbyte-ci/connectors/pipelines/README.md): +```bash +airbyte-ci connectors --name=destination-amazon-sqs test +``` + +### Customizing acceptance Tests +Customize `acceptance-test-config.yml` file to configure tests. See [Connector Acceptance Tests](https://docs.airbyte.com/connector-development/testing-connectors/connector-acceptance-tests-reference) for more information. +If your connector requires to create or destroy resources for use during acceptance tests create fixtures for it and place them inside integration_tests/acceptance.py. + +## Dependency Management +All of your dependencies should go in `setup.py`, NOT `requirements.txt`. The requirements file is only used to connect internal Airbyte dependencies in the monorepo for local development. +We split dependencies between two groups, dependencies that are: +* required for your connector to work need to go to `MAIN_REQUIREMENTS` list. +* required for the testing need to go to `TEST_REQUIREMENTS` list + +### Publishing a new version of the connector +You've checked out the repo, implemented a million dollar feature, and you're ready to share your changes with the world. Now what? +1. Make sure your changes are passing our test suite: `airbyte-ci connectors --name=destination-amazon-sqs test` +2. Bump the connector version in `metadata.yaml`: increment the `dockerImageTag` value. Please follow [semantic versioning for connectors](https://docs.airbyte.com/contributing-to-airbyte/resources/pull-requests-handbook/#semantic-versioning-for-connectors). +3. Make sure the `metadata.yaml` content is up to date. +4. Make the connector documentation and its changelog is up to date (`docs/integrations/destinations/amazon-sqs.md`). +5. Create a Pull Request: use [our PR naming conventions](https://docs.airbyte.com/contributing-to-airbyte/resources/pull-requests-handbook/#pull-request-title-convention). +6. Pat yourself on the back for being an awesome contributor. +7. Someone from Airbyte will take a look at your PR and iterate with you to merge it into master. + diff --git a/airbyte-integrations/connectors/destination-amazon-sqs/bootstrap.md b/airbyte-integrations/connectors/destination-amazon-sqs/bootstrap.md new file mode 100644 index 000000000000..ce91ec1ef142 --- /dev/null +++ b/airbyte-integrations/connectors/destination-amazon-sqs/bootstrap.md @@ -0,0 +1,59 @@ +# Amazon SQS Destination + +## What +This is a connector for producing messages to an [Amazon SQS Queue](https://docs.aws.amazon.com/AWSSimpleQueueService/latest/SQSDeveloperGuide/welcome.html) + +## How +### Sending messages +Amazon SQS allows messages to be sent individually or in batches. Currently, this Destination only supports sending messages individually. This can +have performance implications if sending high volumes of messages. + +#### Message Body +By default, the SQS Message body is built using the AirbyteMessageRecord's 'data' property. + +If the **message_body_key** config item is set, we use the value as a key within the AirbyteMessageRecord's 'data' property. This could be +improved to handle nested keys by using JSONPath syntax to lookup values. + +For example, given the input Record: +``` +{ + "data": + { + "parent_key": { + "nested_key": "nested_value" + }, + "top_key": "top_value" + } +} +``` + +With no **message_body_key** set, the output SQS Message body will be +``` +{ + "parent_key": { + "nested_key": "nested_value" + }, + "top_key": "top_value" +} +``` + +With **message_body_key** set to `parent_key`, the output SQS Message body will be +``` +{ + "nested_key": "nested_value" +} +``` + +#### Message attributes +The airbyte_emmited_at timestamp is added to every message as an Attribute by default. This could be improved to allow the user to set Attributes through the UI, or to take keys from the Record as Attributes. + +#### FIFO Queues +A Queue URL that ends with '.fifo' **must** be a valid FIFO Queue. When the queue is FIFO, the *message_group_id* property is required. + +Currently, a unique uuid4 is generated as the dedupe ID for every message. This could be improved to allow the user to specify a path in the Record +to use as a dedupe ID. + +### Credentials +Requires an AWS IAM Access Key ID and Secret Key. + +This could be improved to add support for configured AWS profiles, env vars etc. diff --git a/airbyte-integrations/connectors/destination-amazon-sqs/destination_amazon_sqs/__init__.py b/airbyte-integrations/connectors/destination-amazon-sqs/destination_amazon_sqs/__init__.py new file mode 100644 index 000000000000..ff5ba7b7242c --- /dev/null +++ b/airbyte-integrations/connectors/destination-amazon-sqs/destination_amazon_sqs/__init__.py @@ -0,0 +1,8 @@ +# +# Copyright (c) 2023 Airbyte, Inc., all rights reserved. +# + + +from .destination import DestinationAmazonSqs + +__all__ = ["DestinationAmazonSqs"] diff --git a/airbyte-integrations/connectors/destination-amazon-sqs/destination_amazon_sqs/destination.py b/airbyte-integrations/connectors/destination-amazon-sqs/destination_amazon_sqs/destination.py new file mode 100644 index 000000000000..1eb0249bccc5 --- /dev/null +++ b/airbyte-integrations/connectors/destination-amazon-sqs/destination_amazon_sqs/destination.py @@ -0,0 +1,176 @@ +# +# Copyright (c) 2023 Airbyte, Inc., all rights reserved. +# + + +import json +from typing import Any, Iterable, Mapping +from uuid import uuid4 + +import boto3 +from airbyte_cdk import AirbyteLogger +from airbyte_cdk.destinations import Destination +from airbyte_cdk.models import AirbyteConnectionStatus, AirbyteMessage, ConfiguredAirbyteCatalog, Status, Type +from botocore.exceptions import ClientError + + +class DestinationAmazonSqs(Destination): + def queue_is_fifo(self, url: str) -> bool: + return url.endswith(".fifo") + + def parse_queue_name(self, url: str) -> str: + return url.rsplit("/", 1)[-1] + + def send_single_message(self, queue, message) -> dict: + return queue.send_message(**message) + + def build_sqs_message(self, record, message_body_key=None): + data = None + if message_body_key: + data = record.data.get(message_body_key) + if data is None: + raise Exception("Message had no attribute of the configured Message Body Key: " + message_body_key) + else: + data = json.dumps(record.data) + + message = {"MessageBody": data} + + return message + + def add_attributes_to_message(self, record, message): + attributes = {"airbyte_emitted_at": {"StringValue": str(record.emitted_at), "DataType": "String"}} + message["MessageAttributes"] = attributes + return message + + def set_message_delay(self, message, message_delay): + message["DelaySeconds"] = message_delay + return message + + # MessageGroupID and MessageDeduplicationID are required properties for FIFO queues + # https://docs.aws.amazon.com/AWSSimpleQueueService/latest/APIReference/API_SendMessage.html + def set_message_fifo_properties(self, message, message_group_id, use_content_dedupe=False): + # https://docs.aws.amazon.com/AWSSimpleQueueService/latest/SQSDeveloperGuide/using-messagegroupid-property.html + if not message_group_id: + raise Exception("Failed to build message - Message Group ID is required for FIFO queues") + else: + message["MessageGroupId"] = message_group_id + # https://docs.aws.amazon.com/AWSSimpleQueueService/latest/SQSDeveloperGuide/using-messagededuplicationid-property.html + if not use_content_dedupe: + message["MessageDeduplicationId"] = str(uuid4()) + # TODO: Support getting MessageDeduplicationId from a key in the record + # if message_dedupe_id: + # message['MessageDeduplicationId'] = message_dedupe_id + return message + + # TODO: Support batch send + # def send_batch_messages(messages, queue): + # entry = { + # 'Id': "1", + # 'MessageBody': str(record.data), + # } + # response = queue.send_messages(Entries=messages) + # if 'Successful' in response: + # for status in response['Successful']: + # print("Message sent: " + status['MessageId']) + # if 'Failed' in response: + # for status in response['Failed']: + # print("Message sent: " + status['MessageId']) + + # https://docs.aws.amazon.com/AWSSimpleQueueService/latest/APIReference/API_SendMessage.html + def write( + self, config: Mapping[str, Any], configured_catalog: ConfiguredAirbyteCatalog, input_messages: Iterable[AirbyteMessage] + ) -> Iterable[AirbyteMessage]: + + # Required propeties + queue_url = config["queue_url"] + queue_region = config["region"] + + # TODO: Implement optional params for batch + # Optional Properties + # max_batch_size = config.get("max_batch_size", 10) + # send_as_batch = config.get("send_as_batch", False) + message_delay = config.get("message_delay") + message_body_key = config.get("message_body_key") + + # FIFO Properties + message_group_id = config.get("message_group_id") + + # Senstive Properties + access_key = config["access_key"] + secret_key = config["secret_key"] + + session = boto3.Session(aws_access_key_id=access_key, aws_secret_access_key=secret_key, region_name=queue_region) + sqs = session.resource("sqs") + queue = sqs.Queue(url=queue_url) + + # TODO: Make access/secret key optional, support public access & profiles + # TODO: Support adding/setting attributes in the UI + # TODO: Support extract a specific path as message attributes + + for message in input_messages: + if message.type == Type.RECORD: + sqs_message = self.build_sqs_message(message.record, message_body_key) + + if message_delay: + sqs_message = self.set_message_delay(sqs_message, message_delay) + + sqs_message = self.add_attributes_to_message(message.record, sqs_message) + + if self.queue_is_fifo(queue_url): + use_content_dedupe = False if queue.attributes.get("ContentBasedDeduplication") == "false" else "true" + self.set_message_fifo_properties(sqs_message, message_group_id, use_content_dedupe) + + self.send_single_message(queue, sqs_message) + if message.type == Type.STATE: + yield message + + def check(self, logger: AirbyteLogger, config: Mapping[str, Any]) -> AirbyteConnectionStatus: + try: + # Required propeties + queue_url = config["queue_url"] + logger.debug("Amazon SQS Destination Config Check - queue_url: " + queue_url) + queue_region = config["region"] + logger.debug("Amazon SQS Destination Config Check - region: " + queue_region) + + # Senstive Properties + access_key = config["access_key"] + logger.debug("Amazon SQS Destination Config Check - access_key (ends with): " + access_key[-1]) + secret_key = config["secret_key"] + logger.debug("Amazon SQS Destination Config Check - secret_key (ends with): " + secret_key[-1]) + + logger.debug("Amazon SQS Destination Config Check - Starting connection test ---") + session = boto3.Session(aws_access_key_id=access_key, aws_secret_access_key=secret_key, region_name=queue_region) + sqs = session.resource("sqs") + queue = sqs.Queue(url=queue_url) + if hasattr(queue, "attributes"): + logger.debug("Amazon SQS Destination Config Check - Connection test successful ---") + + if self.queue_is_fifo(queue_url): + fifo = queue.attributes.get("FifoQueue", False) + if not fifo: + raise Exception("FIFO Queue URL set but Queue is not FIFO") + + message_group_id = config.get("message_group_id") + if message_group_id is None: + raise Exception("Message Group ID is not set, but is required for FIFO Queues.") + + # TODO: Support referencing an ID inside the Record to use as de-dupe ID + # message_dedupe_key = config.get("message_dedupe_key") + # content_dedupe = queue.attributes.get('ContentBasedDeduplication') + # if content_dedupe == "false": + # if message_dedupe_id is None: + # raise Exception("You must provide a Message Deduplication ID when ContentBasedDeduplication is not used.") + + return AirbyteConnectionStatus(status=Status.SUCCEEDED) + else: + return AirbyteConnectionStatus( + status=Status.FAILED, message="Amazon SQS Destination Config Check - Could not connect to queue" + ) + except ClientError as e: + return AirbyteConnectionStatus( + status=Status.FAILED, message=f"Amazon SQS Destination Config Check - Error in AWS Client: {str(e)}" + ) + except Exception as e: + return AirbyteConnectionStatus( + status=Status.FAILED, message=f"Amazon SQS Destination Config Check - An exception occurred: {str(e)}" + ) diff --git a/airbyte-integrations/connectors/destination-amazon-sqs/destination_amazon_sqs/spec.json b/airbyte-integrations/connectors/destination-amazon-sqs/destination_amazon_sqs/spec.json new file mode 100644 index 000000000000..f94d7d023e81 --- /dev/null +++ b/airbyte-integrations/connectors/destination-amazon-sqs/destination_amazon_sqs/spec.json @@ -0,0 +1,101 @@ +{ + "documentationUrl": "https://docs.airbyte.com/integrations/destinations/amazon-sqs", + "supported_destination_sync_modes": ["append"], + "supportsIncremental": true, + "connectionSpecification": { + "$schema": "http://json-schema.org/draft-07/schema#", + "title": "Destination Amazon Sqs", + "type": "object", + "required": ["queue_url", "region"], + "additionalProperties": false, + "properties": { + "queue_url": { + "title": "Queue URL", + "description": "URL of the SQS Queue", + "type": "string", + "examples": [ + "https://sqs.eu-west-1.amazonaws.com/1234567890/my-example-queue" + ], + "order": 0 + }, + "region": { + "title": "AWS Region", + "description": "AWS Region of the SQS Queue", + "type": "string", + "enum": [ + "af-south-1", + "ap-east-1", + "ap-northeast-1", + "ap-northeast-2", + "ap-northeast-3", + "ap-south-1", + "ap-south-2", + "ap-southeast-1", + "ap-southeast-2", + "ap-southeast-3", + "ap-southeast-4", + "ca-central-1", + "ca-west-1", + "cn-north-1", + "cn-northwest-1", + "eu-central-1", + "eu-central-2", + "eu-north-1", + "eu-south-1", + "eu-south-2", + "eu-west-1", + "eu-west-2", + "eu-west-3", + "il-central-1", + "me-central-1", + "me-south-1", + "sa-east-1", + "us-east-1", + "us-east-2", + "us-gov-east-1", + "us-gov-west-1", + "us-west-1", + "us-west-2" + ], + "order": 1 + }, + "message_delay": { + "title": "Message Delay", + "description": "Modify the Message Delay of the individual message from the Queue's default (seconds).", + "type": "integer", + "examples": ["15"], + "order": 2 + }, + "access_key": { + "title": "AWS IAM Access Key ID", + "description": "The Access Key ID of the AWS IAM Role to use for sending messages", + "type": "string", + "examples": ["xxxxxHRNxxx3TBxxxxxx"], + "order": 3, + "airbyte_secret": true + }, + "secret_key": { + "title": "AWS IAM Secret Key", + "description": "The Secret Key of the AWS IAM Role to use for sending messages", + "type": "string", + "examples": ["hu+qE5exxxxT6o/ZrKsxxxxxxBhxxXLexxxxxVKz"], + "order": 4, + "airbyte_secret": true + }, + "message_body_key": { + "title": "Message Body Key", + "description": "Use this property to extract the contents of the named key in the input record to use as the SQS message body. If not set, the entire content of the input record data is used as the message body.", + "type": "string", + "examples": ["myDataPath"], + "order": 5 + }, + "message_group_id": { + "title": "Message Group Id", + "description": "The tag that specifies that a message belongs to a specific message group. This parameter applies only to, and is REQUIRED by, FIFO queues.", + "type": "string", + "examples": ["my-fifo-group"], + "order": 6 + } + } + } +} diff --git a/airbyte-integrations/connectors/destination-amazon-sqs/integration_tests/input_records_json b/airbyte-integrations/connectors/destination-amazon-sqs/integration_tests/input_records_json new file mode 100644 index 000000000000..b46977c1c13d --- /dev/null +++ b/airbyte-integrations/connectors/destination-amazon-sqs/integration_tests/input_records_json @@ -0,0 +1 @@ +{"type": "RECORD", "record": {"stream": "ab-airbyte-testing", "data": {"id": "ba0f237b-abf5-41ae-9d94-1dbd346f38dd", "body": "test 1", "attributes": null}, "emitted_at": 1633881878000}} \ No newline at end of file diff --git a/airbyte-integrations/connectors/destination-amazon-sqs/integration_tests/integration_test.py b/airbyte-integrations/connectors/destination-amazon-sqs/integration_tests/integration_test.py new file mode 100644 index 000000000000..5d1e7112b133 --- /dev/null +++ b/airbyte-integrations/connectors/destination-amazon-sqs/integration_tests/integration_test.py @@ -0,0 +1,46 @@ +# +# Copyright (c) 2023 Airbyte, Inc., all rights reserved. +# + +import json +from typing import Any, Mapping + +import pytest +from airbyte_cdk import AirbyteLogger +from airbyte_cdk.models import AirbyteStream, ConfiguredAirbyteCatalog, ConfiguredAirbyteStream, DestinationSyncMode, Status, SyncMode +from destination_amazon_sqs import DestinationAmazonSqs + + +@pytest.fixture(name="config") +def config_fixture() -> Mapping[str, Any]: + with open("secrets/config.json", "r") as f: + return json.loads(f.read()) + + +@pytest.fixture(name="configured_catalog") +def configured_catalog_fixture() -> ConfiguredAirbyteCatalog: + stream_schema = {"type": "object", "properties": {"string_col": {"type": "str"}, "int_col": {"type": "integer"}}} + + append_stream = ConfiguredAirbyteStream( + stream=AirbyteStream(name="append_stream", json_schema=stream_schema, supported_sync_modes=[SyncMode.incremental]), + sync_mode=SyncMode.incremental, + destination_sync_mode=DestinationSyncMode.append, + ) + + overwrite_stream = ConfiguredAirbyteStream( + stream=AirbyteStream(name="overwrite_stream", json_schema=stream_schema, supported_sync_modes=[SyncMode.incremental]), + sync_mode=SyncMode.incremental, + destination_sync_mode=DestinationSyncMode.overwrite, + ) + + return ConfiguredAirbyteCatalog(streams=[append_stream, overwrite_stream]) + + +def test_check_valid_config(config: Mapping): + outcome = DestinationAmazonSqs().check(AirbyteLogger(), config) + assert outcome.status == Status.SUCCEEDED + + +def test_check_invalid_config(): + outcome = DestinationAmazonSqs().check(AirbyteLogger(), {"secret_key": "not_a_real_secret"}) + assert outcome.status == Status.FAILED diff --git a/airbyte-integrations/connectors/destination-amazon-sqs/main.py b/airbyte-integrations/connectors/destination-amazon-sqs/main.py new file mode 100644 index 000000000000..bc6076972a29 --- /dev/null +++ b/airbyte-integrations/connectors/destination-amazon-sqs/main.py @@ -0,0 +1,11 @@ +# +# Copyright (c) 2023 Airbyte, Inc., all rights reserved. +# + + +import sys + +from destination_amazon_sqs import DestinationAmazonSqs + +if __name__ == "__main__": + DestinationAmazonSqs().run(sys.argv[1:]) diff --git a/airbyte-integrations/connectors/destination-amazon-sqs/metadata.yaml b/airbyte-integrations/connectors/destination-amazon-sqs/metadata.yaml index 3676f4f05113..1dcf91f8995d 100644 --- a/airbyte-integrations/connectors/destination-amazon-sqs/metadata.yaml +++ b/airbyte-integrations/connectors/destination-amazon-sqs/metadata.yaml @@ -2,7 +2,7 @@ data: connectorSubtype: api connectorType: destination definitionId: 0eeee7fb-518f-4045-bacc-9619e31c43ea - dockerImageTag: 0.1.1 + dockerImageTag: 0.1.2 dockerRepository: airbyte/destination-amazon-sqs githubIssueLabel: destination-amazon-sqs icon: awssqs.svg @@ -20,5 +20,5 @@ data: ab_internal: sl: 100 ql: 200 - supportLevel: archived + supportLevel: community metadataSpecVersion: "1.0" diff --git a/airbyte-integrations/connectors/destination-amazon-sqs/requirements.txt b/airbyte-integrations/connectors/destination-amazon-sqs/requirements.txt new file mode 100644 index 000000000000..d6e1198b1ab1 --- /dev/null +++ b/airbyte-integrations/connectors/destination-amazon-sqs/requirements.txt @@ -0,0 +1 @@ +-e . diff --git a/airbyte-integrations/connectors/destination-amazon-sqs/sample_files/configured_catalog.json b/airbyte-integrations/connectors/destination-amazon-sqs/sample_files/configured_catalog.json new file mode 100644 index 000000000000..ee132a2e53a7 --- /dev/null +++ b/airbyte-integrations/connectors/destination-amazon-sqs/sample_files/configured_catalog.json @@ -0,0 +1,27 @@ +{ + "streams": [ + { + "sync_mode": "full_refresh", + "destination_sync_mode": "append", + "stream": { + "name": "ab-airbyte-testing", + "supported_sync_modes": ["full_refresh"], + "source_defined_cursor": false, + "json_schema": { + "type": "object", + "properties": { + "id": { + "type": "string" + }, + "body": { + "type": "string" + }, + "attributes": { + "type": ["null", "object"] + } + } + } + } + } + ] +} diff --git a/airbyte-integrations/connectors/destination-amazon-sqs/setup.py b/airbyte-integrations/connectors/destination-amazon-sqs/setup.py new file mode 100644 index 000000000000..f1df0009ea38 --- /dev/null +++ b/airbyte-integrations/connectors/destination-amazon-sqs/setup.py @@ -0,0 +1,23 @@ +# +# Copyright (c) 2023 Airbyte, Inc., all rights reserved. +# + + +from setuptools import find_packages, setup + +MAIN_REQUIREMENTS = ["airbyte-cdk", "boto3"] + +TEST_REQUIREMENTS = ["pytest~=6.1", "moto"] + +setup( + name="destination_amazon_sqs", + description="Destination implementation for Amazon Sqs.", + author="Airbyte", + author_email="contact@airbyte.io", + packages=find_packages(), + install_requires=MAIN_REQUIREMENTS, + package_data={"": ["*.json"]}, + extras_require={ + "tests": TEST_REQUIREMENTS, + }, +) diff --git a/airbyte-integrations/connectors/destination-amazon-sqs/unit_tests/unit_test.py b/airbyte-integrations/connectors/destination-amazon-sqs/unit_tests/unit_test.py new file mode 100644 index 000000000000..719671fa281b --- /dev/null +++ b/airbyte-integrations/connectors/destination-amazon-sqs/unit_tests/unit_test.py @@ -0,0 +1,226 @@ +# +# Copyright (c) 2023 Airbyte, Inc., all rights reserved. +# + +import json +import time +from typing import Any, Mapping + +import boto3 +from airbyte_cdk.logger import AirbyteLogger +from airbyte_cdk.models import AirbyteMessage, ConfiguredAirbyteCatalog, Status +from destination_amazon_sqs import DestinationAmazonSqs + +# from airbyte_cdk.sources.source import Source +from moto import mock_iam, mock_sqs +from moto.core import set_initial_no_auth_action_count + + +@mock_iam +def create_user_with_all_permissions(): + client = boto3.client("iam", region_name="eu-west-1") + client.create_user(UserName="test_user1") + + policy_document = { + "Version": "2012-10-17", + "Statement": [{"Effect": "Allow", "Action": ["sqs:*"], "Resource": "*"}], + } + + client.put_user_policy( + UserName="test_user1", + PolicyName="policy1", + PolicyDocument=json.dumps(policy_document), + ) + + return client.create_access_key(UserName="test_user1")["AccessKey"] + + +def create_config(queue_url, queue_region, access_key, secret_key, message_delay): + return { + "queue_url": queue_url, + "region": queue_region, + "access_key": access_key, + "secret_key": secret_key, + "message_delay": message_delay, + } + + +def create_fifo_config(queue_url, queue_region, access_key, secret_key, message_group_id, message_delay): + return { + "queue_url": queue_url, + "region": queue_region, + "access_key": access_key, + "secret_key": secret_key, + "message_group_id": message_group_id, + "message_delay": message_delay, + } + + +def create_config_with_body_key(queue_url, queue_region, access_key, secret_key, message_body_key, message_delay): + return { + "queue_url": queue_url, + "region": queue_region, + "access_key": access_key, + "secret_key": secret_key, + "message_body_key": message_body_key, + "message_delay": message_delay, + } + + +def get_catalog() -> Mapping[str, Any]: + with open("sample_files/configured_catalog.json", "r") as f: + return json.load(f) + + +@set_initial_no_auth_action_count(3) +@mock_sqs +@mock_iam +def test_check(): + # Create User + user = create_user_with_all_permissions() + # Create Queue + queue_name = "amazon-sqs-mock-queue" + queue_region = "eu-west-1" + client = boto3.client( + "sqs", aws_access_key_id=user["AccessKeyId"], aws_secret_access_key=user["SecretAccessKey"], region_name=queue_region + ) + queue_url = client.create_queue(QueueName=queue_name)["QueueUrl"] + # Create config + config = create_config(queue_url, queue_region, user["AccessKeyId"], user["SecretAccessKey"], 10) + # Create AirbyteLogger + logger = AirbyteLogger() + # Create Destination + destination = DestinationAmazonSqs() + # Run check + status = destination.check(logger, config) + assert status.status == Status.SUCCEEDED + + # Create FIFO queue + fifo_queue_name = "amazon-sqs-mock-queue.fifo" + fif_queue_url = client.create_queue(QueueName=fifo_queue_name, Attributes={"FifoQueue": "true"})["QueueUrl"] + # Create config for FIFO + fifo_config = create_fifo_config(fif_queue_url, queue_region, user["AccessKeyId"], user["SecretAccessKey"], "fifo-group", 10) + # Run check + status = destination.check(logger, fifo_config) + assert status.status == Status.SUCCEEDED + + +@set_initial_no_auth_action_count(4) +@mock_sqs +@mock_iam +def test_write(): + # Create User + user = create_user_with_all_permissions() + + test_message = { + "type": "RECORD", + "record": { + "stream": "ab-airbyte-testing", + "data": {"id": "ba0f237b-abf5-41ae-9d94-1dbd346f38dd", "body": "test 1", "attributes": None}, + "emitted_at": 1633881878000, + }, + } + ab_message = AirbyteMessage(**test_message) + + # Common params + message_delay = 1 + queue_region = "eu-west-1" + + # Standard Queue Test + print("## Starting standard queue test ##") + # Create Queue + queue_name = "amazon-sqs-mock-queue" + client = boto3.client( + "sqs", aws_access_key_id=user["AccessKeyId"], aws_secret_access_key=user["SecretAccessKey"], region_name=queue_region + ) + queue_url = client.create_queue(QueueName=queue_name)["QueueUrl"] + # Create config + config = create_config(queue_url, queue_region, user["AccessKeyId"], user["SecretAccessKey"], message_delay) + # Create ConfiguredAirbyteCatalog + catalog = ConfiguredAirbyteCatalog(streams=get_catalog()["streams"]) + # Create Destination + destination = DestinationAmazonSqs() + # Send messages using write() + for message in destination.write(config, catalog, [ab_message]): + print(f"Message Sent with delay of {message_delay} seconds") + # Listen for messages for max 20 seconds + timeout = time.time() + 20 + print("Listening for messages.") + while True: + message_received = client.receive_message(QueueUrl=queue_url) + if message_received.get("Messages"): + print("Message received.") + message_body = json.loads(message_received["Messages"][0]["Body"]) + # Compare the body of the received message, with the body of the message we sent + if message_body == test_message["record"]["data"]: + print("Received message matches for standard queue write.") + assert True + break + else: + continue + if time.time() > timeout: + print("Timed out waiting for message after 20 seconds.") + assert False + + # Standard Queue with a Message Key Test + print("## Starting body key queue test ##") + # Create Queue + key_queue_name = "amazon-sqs-mock-queue-key" + key_queue_url = client.create_queue(QueueName=key_queue_name)["QueueUrl"] + # Create config + message_body_key = "body" + key_config = create_config_with_body_key( + key_queue_url, queue_region, user["AccessKeyId"], user["SecretAccessKey"], message_body_key, message_delay + ) + # Send messages using write() + for message in destination.write(key_config, catalog, [ab_message]): + print(f"Message Sent with delay of {message_delay} seconds") + # Listen for messages for max 20 seconds + timeout = time.time() + 20 + print("Listening for messages.") + while True: + message_received = client.receive_message(QueueUrl=key_queue_url) + if message_received.get("Messages"): + print("Message received.") + message_body = message_received["Messages"][0]["Body"] + # Compare the body of the received message, with the body of the message we sent + if message_body == test_message["record"]["data"][message_body_key]: + print("Received message matches for body key queue write.") + assert True + break + else: + continue + if time.time() > timeout: + print("Timed out waiting for message after 20 seconds.") + assert False + + # FIFO Queue Test + print("## Starting FIFO queue test ##") + # Create Queue + fifo_queue_name = "amazon-sqs-mock-queue.fifo" + fifo_queue_url = client.create_queue(QueueName=fifo_queue_name, Attributes={"FifoQueue": "true"})["QueueUrl"] + # Create config + fifo_config = create_fifo_config( + fifo_queue_url, queue_region, user["AccessKeyId"], user["SecretAccessKey"], "fifo-group", message_delay + ) + # Send messages using write() + for message in destination.write(fifo_config, catalog, [ab_message]): + print(f"Message Sent with delay of {message_delay} seconds") + # Listen for messages for max 20 seconds + timeout = time.time() + 20 + print("Listening for messages.") + while True: + message_received = client.receive_message(QueueUrl=fifo_queue_url) + if message_received.get("Messages"): + print("Message received.") + message_body = json.loads(message_received["Messages"][0]["Body"]) + # Compare the body of the received message, with the body of the message we sent + if message_body == test_message["record"]["data"]: + print("Received message matches for FIFO queue write.") + assert True + break + else: + continue + if time.time() > timeout: + print("Timed out waiting for message after 20 seconds.") + assert False diff --git a/airbyte-integrations/connectors/destination-cumulio/Dockerfile b/airbyte-integrations/connectors/destination-cumulio/Dockerfile new file mode 100644 index 000000000000..55356d65ab7e --- /dev/null +++ b/airbyte-integrations/connectors/destination-cumulio/Dockerfile @@ -0,0 +1,42 @@ +FROM python:3.9.11 as base +# FROM python:3.9.11-alpine3.15 as base +# switched from alpine as there were tons of errors (in case you want to switch back to alpine) +# - https://stackoverflow.com/a/57485724/5246670 +# - numpy error: https://stackoverflow.com/a/22411624/5246670 +# - libstdc++ https://github.com/amancevice/docker-pandas/issues/12#issuecomment-717215043 +# - musl-dev linux-headers g++ because of: https://stackoverflow.com/a/40407099/5246670 + +# build and load all requirements +FROM base as builder +WORKDIR /airbyte/integration_code + +# upgrade pip to the latest version +RUN apt-get update && apt-get -y upgrade \ + && pip install --upgrade pip + +COPY setup.py ./ +# install necessary packages to a temporary folder +RUN pip install --prefix=/install . +# build a clean environment +FROM base +# RUN conda install -c conda-forge python-duckdb +WORKDIR /airbyte/integration_code + +# copy all loaded and built libraries to a pure basic image +COPY --from=builder /install /usr/local +# add default timezone settings +COPY --from=builder /usr/share/zoneinfo/Etc/UTC /etc/localtime +RUN echo "Etc/UTC" > /etc/timezone + +#adding duckdb manually (outside of setup.py - lots of errors) +RUN pip install duckdb + +# copy payload code only +COPY main.py ./ +COPY destination_cumulio ./destination_cumulio + +ENV AIRBYTE_ENTRYPOINT "python /airbyte/integration_code/main.py" +ENTRYPOINT ["python", "/airbyte/integration_code/main.py"] + +LABEL io.airbyte.version=0.1.1 +LABEL io.airbyte.name=airbyte/destination-cumulio diff --git a/airbyte-integrations/connectors/destination-cumulio/README.md b/airbyte-integrations/connectors/destination-cumulio/README.md new file mode 100644 index 000000000000..62261106b05f --- /dev/null +++ b/airbyte-integrations/connectors/destination-cumulio/README.md @@ -0,0 +1,98 @@ +# Cumulio Destination + +This is the repository for the Cumulio destination connector, written in Python. +For information about how to use this connector within Airbyte, see [the documentation](https://docs.airbyte.com/integrations/destinations/cumulio). + +## Local development + +### Prerequisites +**To iterate on this connector, make sure to complete this prerequisites section.** + +#### Minimum Python version required `= 3.7.0` + +#### Build & Activate Virtual Environment and install dependencies +From this connector directory, create a virtual environment: +``` +python -m venv .venv +``` + +This will generate a virtualenv for this module in `.venv/`. Make sure this venv is active in your +development environment of choice. To activate it from the terminal, run: +``` +source .venv/bin/activate +pip install -r requirements.txt +``` +If you are in an IDE, follow your IDE's instructions to activate the virtualenv. + +Note that while we are installing dependencies from `requirements.txt`, you should only edit `setup.py` for your dependencies. `requirements.txt` is +used for editable installs (`pip install -e`) to pull in Python dependencies from the monorepo and will call `setup.py`. +If this is mumbo jumbo to you, don't worry about it, just put your deps in `setup.py` but install using `pip install -r requirements.txt` and everything +should work as you expect. + +#### Create credentials +**If you are a community contributor**, follow the instructions in the [documentation](https://docs.airbyte.com/integrations/destinations/cumulio) +to generate the necessary credentials. Then create a file `secrets/config.json` conforming to the `destination_cumulio/spec.json` file. +Note that the `secrets` directory is gitignored by default, so there is no danger of accidentally checking in sensitive information. +See `integration_tests/sample_config.json` for a sample config file. + +**If you are an Airbyte core member**, copy the credentials in Lastpass under the secret name `destination cumulio test creds` +and place them into `secrets/config.json`. + +### Locally running the connector +``` +python main.py spec +python main.py check --config secrets/config.json +python main.py write --config secrets/config.json --catalog integration_tests/configured_catalog.json +``` + +### Locally running the connector docker image + + +#### Build +**Via [`airbyte-ci`](https://github.com/airbytehq/airbyte/blob/master/airbyte-ci/connectors/pipelines/README.md) (recommended):** +```bash +airbyte-ci connectors --name=destination-cumulio build +``` + +An image will be built with the tag `airbyte/destination-cumulio:dev`. + +**Via `docker build`:** +```bash +docker build -t airbyte/destination-cumulio:dev . +``` + +#### Run +Then run any of the connector commands as follows: +``` +docker run --rm airbyte/destination-cumulio:dev spec +docker run --rm -v $(pwd)/secrets:/secrets airbyte/destination-cumulio:dev check --config /secrets/config.json +# messages.jsonl is a file containing line-separated JSON representing AirbyteMessages +cat messages.jsonl | docker run --rm -v $(pwd)/secrets:/secrets -v $(pwd)/integration_tests:/integration_tests airbyte/destination-cumulio:dev write --config /secrets/config.json --catalog /integration_tests/configured_catalog.json +``` + +## Testing +You can run our full test suite locally using [`airbyte-ci`](https://github.com/airbytehq/airbyte/blob/master/airbyte-ci/connectors/pipelines/README.md): +```bash +airbyte-ci connectors --name=destination-cumulio test +``` + +### Customizing acceptance Tests +Customize `acceptance-test-config.yml` file to configure tests. See [Connector Acceptance Tests](https://docs.airbyte.com/connector-development/testing-connectors/connector-acceptance-tests-reference) for more information. +If your connector requires to create or destroy resources for use during acceptance tests create fixtures for it and place them inside integration_tests/acceptance.py. + +## Dependency Management +All of your dependencies should go in `setup.py`, NOT `requirements.txt`. The requirements file is only used to connect internal Airbyte dependencies in the monorepo for local development. +We split dependencies between two groups, dependencies that are: +* required for your connector to work need to go to `MAIN_REQUIREMENTS` list. +* required for the testing need to go to `TEST_REQUIREMENTS` list + +### Publishing a new version of the connector +You've checked out the repo, implemented a million dollar feature, and you're ready to share your changes with the world. Now what? +1. Make sure your changes are passing our test suite: `airbyte-ci connectors --name=destination-cumulio test` +2. Bump the connector version in `metadata.yaml`: increment the `dockerImageTag` value. Please follow [semantic versioning for connectors](https://docs.airbyte.com/contributing-to-airbyte/resources/pull-requests-handbook/#semantic-versioning-for-connectors). +3. Make sure the `metadata.yaml` content is up to date. +4. Make the connector documentation and its changelog is up to date (`docs/integrations/destinations/cumulio.md`). +5. Create a Pull Request: use [our PR naming conventions](https://docs.airbyte.com/contributing-to-airbyte/resources/pull-requests-handbook/#pull-request-title-convention). +6. Pat yourself on the back for being an awesome contributor. +7. Someone from Airbyte will take a look at your PR and iterate with you to merge it into master. + diff --git a/airbyte-integrations/connectors/destination-cumulio/destination_cumulio/__init__.py b/airbyte-integrations/connectors/destination-cumulio/destination_cumulio/__init__.py new file mode 100644 index 000000000000..5dda7de9dfe7 --- /dev/null +++ b/airbyte-integrations/connectors/destination-cumulio/destination_cumulio/__init__.py @@ -0,0 +1,8 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# + + +from .destination import DestinationCumulio + +__all__ = ["DestinationCumulio"] diff --git a/airbyte-integrations/connectors/destination-cumulio/destination_cumulio/client.py b/airbyte-integrations/connectors/destination-cumulio/destination_cumulio/client.py new file mode 100644 index 000000000000..10728e374f54 --- /dev/null +++ b/airbyte-integrations/connectors/destination-cumulio/destination_cumulio/client.py @@ -0,0 +1,367 @@ +# +# Copyright (c) 2023 Airbyte, Inc., all rights reserved. +# + + +import time +from logging import Logger +from typing import Any, Mapping + +from cumulio.cumulio import Cumulio # type: ignore + +# def _retry_with_backoff( +# fn: Callable, +# backoff_times_in_seconds: list[int] +# ): +# while True: +# try: +# return fn() + + +class CumulioClient: + # Cumul.io will auto-generate a UUID that is unique to the dataset created. + # To ensure a consistent flow to the same dataset, we'll add a tag to the dataset: + # the tag is a combination of the prefix below and the stream name. + # This allows us to retrieve the same dataset resource upon further sync schedules. + TAG_PREFIX = "[AIRBYTE - DO NOT DELETE] - " + + REPLACE_TAG = "REPLACE DATA" + + INITIAL_DATASET_NAME_PREFIX = "Airbyte - " + + BACKOFF_TIMES_IN_SECONDS = [300, 600, 1200] + + def __init__(self, config: Mapping[str, Any], logger: Logger): + self.logger = logger + self.client = Cumulio(config["api_key"], config["api_token"], config["api_host"]) + + def batch_write( + self, + stream_name: str, + write_buffer: list, + column_headers: list, + is_in_overwrite_sync_mode: bool, + is_first_batch: bool, + update_metadata: bool, + ): + """Write a list of data (array of arrays) in a specific sync mode to Cumul.io.""" + if len(write_buffer) == 0 or (len(write_buffer) == 1 and len(write_buffer[0]) == 0): + return + + dataset_id = self._get_dataset_id_from_stream_name(stream_name) + if dataset_id is None: + dataset_id = self._push_batch_to_new_dataset(stream_name, write_buffer, column_headers) + else: + is_in_replace_mode = self._dataset_contains_replace_tag(dataset_id) + first_batch_replace = is_first_batch and (is_in_overwrite_sync_mode or is_in_replace_mode) + self._push_batch_to_existing_dataset( + dataset_id, + write_buffer, + column_headers, + first_batch_replace, + update_metadata, + ) + + self.logger.info(f"Successfully pushed {len(write_buffer)} rows to Cumul.io's data warehouse in a dataset with id {dataset_id}.") + + def test_api_token(self): + """Test an API key and token by retrieving it.""" + self.logger.info("Checking API host, key and token.") + data = self.client.get("authorization", {"where": {"type": "api"}}) + # if response contains a count 0, the API host, key and token combination is unknown to Cumul.io. + if data["count"] == 0: + raise Exception( + "Unknown combination of API host, key and token. Can you verify whether you've specified the correct combination of " + "Cumul.io API host, key, and token?" + ) + self.logger.info("API host, key and token combination is valid.") + + def test_data_push(self, stream_name: str, data: list[list[Any]], columns: list[str]): + """[DEPRECATED] This method is no longer in use as it results in a lot of overhead. + Test pushing dummy data into a dataset, and delete the dataset afterwards.""" + + self.logger.info("Starting data push of dummy data.") + self.batch_write(stream_name, data, columns, True, True, True) + self.logger.info("Finished data push of dummy data. Will delete dummy dataset.") + + self.delete_dataset(stream_name) + self.logger.info("Finished deleting dummy dataset.") + + def delete_dataset(self, stream_name: str): + """Delete a dataset in Cumul.io. + This should only be used for testing purposes. Currently used in: + - Integration tests + - When pushing dummy data to an example dataset during "check" of Airbyte destination connector (see destination.py check method) + """ + dataset_id = self._get_dataset_id_from_stream_name(stream_name) + if dataset_id is not None: + return self.client.delete("securable", dataset_id) + + self.logger.info(f"No dataset for stream {stream_name} found to delete.") + + def get_ordered_columns(self, stream_name: str): + """Return a list of ordered columns (based on their order in Cumul.io). + The dataset is retrieved based on a Cumul.io tag that includes the stream_name. + """ + dataset_and_columns = self.get_dataset_and_columns_from_stream_name(stream_name) + if dataset_and_columns is None: + # Dataset hasn't been created yet on Cumul.io's side. + return [] + # Sort columns based on the order property. + order_sorted_columns = sorted(dataset_and_columns["columns"], key=lambda x: x["order"]) + # Return a list of column source names. + return [column["source_name"] for column in order_sorted_columns] + + def get_dataset_and_columns_from_stream_name(self, stream_name: str): + """Return a dataset and its columns based on a Cumul.io tag that includes the stream_name.""" + result = self.client.get( + "securable", + { + "where": {"type": "dataset"}, + "attributes": ["id", "name"], + "include": [ + { + "model": "Tag", + "where": {"tag": self.TAG_PREFIX + stream_name}, + "attributes": ["id", "tag"], + "jointype": "inner", + }, + { + "model": "Column", + "attributes": ["id", "source_name", "order"], + "jointype": "inner", + }, + ], + }, + ) + if result["count"] > 1: + raise Exception( + f"More than one dataset has been returned, could you verify whether the tag for stream {stream_name} is set up " + f"correctly in Cumul.io (expected a tag '{self.TAG_PREFIX}{stream_name}')?" + ) + # A count of zero means that the dataset has not been created on Cumul.io's side yet. + # We'll return None to indicate this. + elif result["count"] == 0: + return None + # return dataset and its columns. + return result["rows"][0] + + def set_replace_tag_on_dataset(self, stream_name: str): + """Add a "replace" tag to a specific dataset based on the stream_name. + The "replace" tag is used to ensure that the next sync will replace the existing data. + """ + dataset_id = self._get_dataset_id_from_stream_name(stream_name) + if dataset_id is not None: + self.logger.info( + f"A tag will be added to the dataset with id {dataset_id} to replace the existing data upon next sync. " + f"As a result, the existing data will not be replaced until the next sync has ran. " + f"This avoids empty datasets which cause 'No data' to be displayed upon querying them." + ) + return self._associate_tag_dataset_id(self.REPLACE_TAG, dataset_id) + self.logger.debug( + f"No dataset found to set Replace tag on (looking for stream name '{stream_name}'), " + f"this might be due to the dataset not existing yet on Cumul.io's side." + ) + + def _push_batch_to_new_dataset(self, stream_name: str, write_buffer: list[list[Any]], column_headers: list[str]): + properties = { + "type": "create", + "data": write_buffer, + "options": { + "header": column_headers, + "update_metadata": True, + "name": {"en": self.INITIAL_DATASET_NAME_PREFIX + stream_name}, + }, + } + result: Mapping[str, Any] = {} + data_is_pushed = False + try_count = 0 + while (not data_is_pushed) and try_count < len(self.BACKOFF_TIMES_IN_SECONDS): + try: + self.logger.info( + f"Pushing {len(write_buffer)} rows to Cumul.io's data warehouse in a new Cumul.io dataset " + f"with name {self.INITIAL_DATASET_NAME_PREFIX}{stream_name}." + ) + + result = self.client.create("data", properties) + data_is_pushed = True + + except Exception as e: + if "Unauthorized" in str(e): + raise Exception( + f"Not able to push a batch of data to a new dataset due to an 'Unauthorized' error. " + f"Please verify that your API key and token are still valid!" + f"Error: {e}" + ) + elif try_count + 1 >= len(self.BACKOFF_TIMES_IN_SECONDS): + raise Exception(f"Exception while creating new dataset after {len(self.BACKOFF_TIMES_IN_SECONDS)} retries: {e}") + + seconds_to_backoff = self.BACKOFF_TIMES_IN_SECONDS[try_count] + try_count += 1 + self.logger.info( + f"Error pushing data to a new dataset during try {try_count}, retrying in {seconds_to_backoff} seconds. Error: {e}" + ) + time.sleep(seconds_to_backoff) + + dataset_id = result["rows"][0]["id"] + try: + # Add a tag to the dataset to allow retrieving it upon further syncs / batch writes + self._associate_tag_dataset_id(stream_name, dataset_id) + except Exception as e: + raise Exception( + f"The data has been stored successfully, but an error occurred while associating a required tag to the " + f"dataset (id: {dataset_id}). This will likely cause issues upon further synchronizations. The following " + f"error occurred: ", + e, + ) + + return dataset_id + + def _push_batch_to_existing_dataset( + self, + dataset_id: str, + write_buffer: list[list[Any]], + column_headers: list[str], + first_batch_replace: bool, + update_metadata: bool, + ): + cumulio_sync_type = "replace" if first_batch_replace else "append" + + properties = { + "type": cumulio_sync_type, + "data": write_buffer, + "securable_id": dataset_id, + "options": { + "header": column_headers, + "update_metadata": update_metadata, + }, + } + data_is_pushed = False + try_count = 0 + while (not data_is_pushed) and try_count < len(self.BACKOFF_TIMES_IN_SECONDS): + try: + self.logger.info( + f"Pushing {len(write_buffer)} rows to Cumul.io dataset with id {dataset_id} in {cumulio_sync_type} mode, " + f"{'while' if update_metadata else 'not'} updating the columns of that dataset." + ) + self.client.create("data", properties) + + data_is_pushed = True + + if first_batch_replace: + # Try to remove replace tag to ensure next syncs do not replace existing data. + self._remove_replace_tag_dataset_id_association(dataset_id) + + except RuntimeError as e: + if "Unauthorized" in str(e): + raise Exception( + f"Not able to push a batch of data to dataset {dataset_id} due to an 'Unauthorized' error. " + f"Please verify that your API key and token are still valid!" + f"Error: {e}" + ) + elif try_count + 1 >= len(self.BACKOFF_TIMES_IN_SECONDS): + raise Exception( + f"Exception while pushing to existing dataset {dataset_id} after {len(self.BACKOFF_TIMES_IN_SECONDS)} retries: ", + e, + ) + + seconds_to_backoff = self.BACKOFF_TIMES_IN_SECONDS[try_count] + try_count += 1 + + self.logger.info( + f"Error pushing data to existing dataset {dataset_id} during try {try_count}, retrying in {seconds_to_backoff} seconds." + ) + + time.sleep(seconds_to_backoff) + + def _dataset_contains_replace_tag(self, dataset_id: str): + """Return a boolean to indicate whether a dataset contains the "replace" tag.""" + result = self.client.get( + "securable", + { + "where": {"type": "dataset", "id": dataset_id}, + "attributes": ["id", "name"], + "include": [ + { + "model": "Tag", + "where": {"tag": self.TAG_PREFIX + self.REPLACE_TAG}, + "attributes": ["id", "tag"], + "jointype": "inner", + } + ], + }, + ) + return False if result["count"] == 0 else True + + def _remove_replace_tag_dataset_id_association(self, dataset_id: str): + """Remove the "replace" tag from a specific dataset.""" + tag_id = self._get_tag_id(self.REPLACE_TAG) + if tag_id is not None: + return self._dissociate_tag_with_dataset_id(tag_id, dataset_id) + self.logger.debug( + f"No replace tag found, so could not remove for Cumul.io dataset with id {dataset_id}." + f"This could be expected as the stream might be configured in overwrite mode." + ) + + def _get_dataset_id_from_stream_name(self, stream_name: str): + """Return a dataset ID based on a Cumul.io tag that includes the stream_name.""" + result = self.client.get( + "securable", + { + "where": {"type": "dataset"}, + "attributes": ["id", "name"], + "include": [ + { + "model": "Tag", + "where": {"tag": self.TAG_PREFIX + stream_name}, + "attributes": ["id", "tag"], + "jointype": "inner", + } + ], + }, + ) + if result["count"] > 1: + raise Exception( + f"More than one dataset has been found, could you verify whether the tag for stream {stream_name} is set up " + f"correctly in Cumul.io (expected a tag '{self.TAG_PREFIX}{stream_name}' on a single dataset)?" + ) + # A count of zero means that the dataset has not been created on Cumul.io's side yet. + # We'll return None to indicate this. + elif result["count"] == 0: + return None + # return dataset ID + return result["rows"][0]["id"] + + def _associate_tag_dataset_id(self, tag_name: str, dataset_id: str): + """Ensure that a specific stream name tag is associated to a dataset ID. + Optionally the Tag is created and associated if not existing yet. + """ + # A tag should be unique and cannot be created multiple times. + # In order to ensure that the association doesn't fail, + # we'll first try to retrieve the tag and then either + # associate it with the newly created securable, + # or create & associate it. + tag_id = self._get_tag_id(tag_name) + if tag_id is not None: + return self._associate_tag_with_dataset_id(tag_id, dataset_id) + return self._create_and_associate_stream_name_tag_with_dataset_id(tag_name, dataset_id) + + def _get_tag_id(self, tag_name: str): + """Return a Tag ID using the stream name.""" + result = self.client.get("tag", {"where": {"tag": self.TAG_PREFIX + tag_name}}) + if result["count"] == 0: + return None + return result["rows"][0]["id"] + + def _associate_tag_with_dataset_id(self, tag_id: str, dataset_id: str): + return self.client.associate("tag", tag_id, "Securables", dataset_id) + + def _dissociate_tag_with_dataset_id(self, tag_id: str, dataset_id: str): + return self.client.dissociate("tag", tag_id, "Securables", dataset_id) + + def _create_and_associate_stream_name_tag_with_dataset_id(self, tag_name: str, dataset_id: str): + return self.client.create( + "tag", + {"tag": self.TAG_PREFIX + tag_name}, + [{"role": "Securables", "id": dataset_id}], + ) diff --git a/airbyte-integrations/connectors/destination-cumulio/destination_cumulio/destination.py b/airbyte-integrations/connectors/destination-cumulio/destination_cumulio/destination.py new file mode 100644 index 000000000000..61c6c5ac4afb --- /dev/null +++ b/airbyte-integrations/connectors/destination-cumulio/destination_cumulio/destination.py @@ -0,0 +1,101 @@ +# +# Copyright (c) 2023 Airbyte, Inc., all rights reserved. +# + + +from logging import Logger, getLogger +from typing import Any, Iterable, Mapping + +from airbyte_cdk.destinations import Destination +from airbyte_cdk.models import AirbyteConnectionStatus, AirbyteMessage, ConfiguredAirbyteCatalog, DestinationSyncMode, Status, Type +from destination_cumulio.client import CumulioClient +from destination_cumulio.writer import CumulioWriter + +logger = getLogger("airbyte") + + +class DestinationCumulio(Destination): + def write( + self, + config: Mapping[str, Any], + configured_catalog: ConfiguredAirbyteCatalog, + input_messages: Iterable[AirbyteMessage], + ) -> Iterable[AirbyteMessage]: + """Reads the input stream of messages, config, and catalog to write data to the destination. + + This method returns an iterable (typically a generator of AirbyteMessages via yield) containing state messages received in the + input message stream. Outputting a state message means that every AirbyteRecordMessage which came before it has been successfully + persisted to the destination. This is used to ensure fault tolerance in the case that a sync fails before fully completing, + then the source is given the last state message output from this method as the starting point of the next sync. + + :param config: dict of JSON configuration matching the configuration declared in spec.json. Current format: + { + 'api_host': '', + 'api_key': '', + 'api_token': '' + } + :param configured_catalog: schema of the data being received and how it should be persisted in the destination. + :param input_messages: stream of input messages received from the source. + + :return: Iterable of AirbyteStateMessages wrapped in AirbyteMessage structs. + """ + writer = CumulioWriter(config, configured_catalog, logger) + + for configured_stream in configured_catalog.streams: + # Cumul.io does not support removing all data from an existing dataset, and removing the dataset itself will break existing + # dashboards built on top of it. + # Instead, the connector will make sure to push the first batch of data as a "replace" action: this will cause all existing data + # to be replaced with the first batch of data. All next batches will be pushed as an "append" action. + if configured_stream.destination_sync_mode == DestinationSyncMode.overwrite: + writer.delete_stream_entries(configured_stream.stream.name) + + for message in input_messages: + if message.type == Type.STATE: + # Yielding a state message indicates that all records which came before it have been written to the destination. + # We flush all write buffers in the writer, and then output the state message itself. + writer.flush_all() + yield message + elif message.type == Type.RECORD: + record = message.record + assert record is not None + assert record.stream is not None + assert record.data is not None + writer.queue_write_operation(record.stream, record.data) + else: + # ignore other message types for now + continue + + # Make sure to flush any records still in the queue + writer.flush_all() + + def check(self, logger: Logger, config: Mapping[str, Any]) -> AirbyteConnectionStatus: + """Tests if the input configuration can be used to successfully connect to the destination with the needed permissions. + + This will test whether the combination of the Cumul.io API host, API key and API token is valid. + + :param logger: Logging object to display debug/info/error to the logs + (logs will not be accessible via airbyte UI if they are not passed to this logger) + :param config: Json object containing the configuration of this destination, content of this json is as specified in + the properties of the spec.json file + + :return: AirbyteConnectionStatus indicating a Success or Failure + """ + try: + client = CumulioClient(config, logger) + # Verify access by hitting Cumul.io authentication endpoint + client.test_api_token() + + # We're no longer using testing a data push as this might take some time. + # If the API host, key, and token are valid, we can assume Data can be pushed using it. + + return AirbyteConnectionStatus(status=Status.SUCCEEDED) + except Exception as e: + # The Cumul.io Python SDK currently returns a generic error message when an issue occurs during the request, + # or when the request return e.g. a 401 Unauthorized HTTP response code. + # We'll assume that either the API host is incorrect, or the API key and token are no longer valid. + if not e == "Something went wrong": + return AirbyteConnectionStatus(status=Status.FAILED, message=f"An exception occurred: {repr(e)}") + return AirbyteConnectionStatus( + status=Status.FAILED, + message="An exception occurred: could it be that the API host is incorrect, or the API key and token are no longer valid?", + ) diff --git a/airbyte-integrations/connectors/destination-cumulio/destination_cumulio/spec.json b/airbyte-integrations/connectors/destination-cumulio/destination_cumulio/spec.json new file mode 100644 index 000000000000..dff9ec31cb64 --- /dev/null +++ b/airbyte-integrations/connectors/destination-cumulio/destination_cumulio/spec.json @@ -0,0 +1,37 @@ +{ + "documentationUrl": "https://docs.airbyte.com/integrations/destinations/cumulio", + "supported_destination_sync_modes": ["overwrite", "append"], + "supportsIncremental": true, + "supportsDBT": false, + "supportsNormalization": false, + "connectionSpecification": { + "$schema": "http://json-schema.org/draft-07/schema#", + "title": "Destination Cumulio", + "type": "object", + "required": ["api_host", "api_key", "api_token"], + "additionalProperties": true, + "properties": { + "api_host": { + "title": "Cumul.io API Host URL", + "description": "URL of the Cumul.io API (e.g. 'https://api.cumul.io', 'https://api.us.cumul.io', or VPC-specific API url). Defaults to 'https://api.cumul.io'.", + "default": "https://api.cumul.io", + "type": "string", + "order": 0 + }, + "api_key": { + "title": "Cumul.io API Key", + "description": "An API key generated in Cumul.io's platform (can be generated here: https://app.cumul.io/start/profile/integration).", + "type": "string", + "airbyte_secret": true, + "order": 1 + }, + "api_token": { + "title": "Cumul.io API Token", + "description": "The corresponding API token generated in Cumul.io's platform (can be generated here: https://app.cumul.io/start/profile/integration).", + "type": "string", + "airbyte_secret": true, + "order": 2 + } + } + } +} diff --git a/airbyte-integrations/connectors/destination-cumulio/destination_cumulio/writer.py b/airbyte-integrations/connectors/destination-cumulio/destination_cumulio/writer.py new file mode 100644 index 000000000000..93c8d05ee761 --- /dev/null +++ b/airbyte-integrations/connectors/destination-cumulio/destination_cumulio/writer.py @@ -0,0 +1,205 @@ +# +# Copyright (c) 2023 Airbyte, Inc., all rights reserved. +# + + +import json +from logging import Logger +from typing import Any, Mapping + +from airbyte_cdk.models import ConfiguredAirbyteCatalog, ConfiguredAirbyteStream, DestinationSyncMode +from destination_cumulio.client import CumulioClient + + +def _convert_airbyte_configured_stream_into_headers_dict( + configured_stream: ConfiguredAirbyteStream, +): + """Return a dict of column names and types based on the configured Airbyte stream. + Note that the Airbyte types are currently not used due to Cumul.io's Data API Service not supporting specifying column types. + """ + column_headers = {} + for column_header in configured_stream.stream.json_schema["properties"]: + if "airbyte-type" in configured_stream.stream.json_schema["properties"][column_header]: + column_headers[column_header] = { + "airbyte-type": configured_stream.stream.json_schema["properties"][column_header]["airbyte-type"] + } + else: + column_headers[column_header] = {"airbyte-type": configured_stream.stream.json_schema["properties"][column_header]["type"]} + return column_headers + + +class CumulioWriter: + # Cumul.io's Data API service has a limit of pushing 10 000 data points (i.e. rows) in a single request. + # (see note here: https://developer.cumul.io/?shell#data_create) + FLUSH_INTERVAL = 10000 + + def __init__( + self, + config: Mapping[str, Any], + configured_catalog: ConfiguredAirbyteCatalog, + logger: Logger, + ): + """Create a single Cumul.io Client and a dict of writers. + The Cumul.io Client will be used to send API requests to Cumul.io's API. + The writers dict will contain one element for each configured_stream in the connection. + Each of these dicts have a stream-specific configuration and write buffer. + """ + self.logger = logger + self.client = CumulioClient(config, logger) + self.writers = self._create_writers(configured_catalog) + + def queue_write_operation(self, stream_name: str, data: Mapping): + """Queue data in a specific writer buffer. + It flushes the buffer in case it has reached the flush interval. + """ + cumulio_data = self.transform_data(stream_name, data) + self.writers[stream_name]["write_buffer"].append(cumulio_data) + if len(self.writers[stream_name]["write_buffer"]) == self.FLUSH_INTERVAL: + self.flush(stream_name) + + def flush_all(self): + """Flush all writer buffers.""" + for stream_name in self.writers: + self.flush(stream_name) + + def flush(self, stream_name: str): + """Write a batch of data from the write buffer using the Cumul.io client.""" + self.client.batch_write( + stream_name, + self.writers[stream_name]["write_buffer"], + [column_header["name"] for column_header in self.writers[stream_name]["column_headers"]], + self.writers[stream_name]["is_in_overwrite_sync_mode"], + self.writers[stream_name]["is_first_batch"], + self.writers[stream_name]["update_metadata"], + ) + self.writers[stream_name]["write_buffer"].clear() + if self.writers[stream_name]["is_first_batch"]: + self.writers[stream_name]["is_first_batch"] = False + + def transform_data(self, stream_name: str, airbyte_data: Mapping) -> list[Any]: + """Transform Airbyte data (one row) into Cumul.io's expected data format (a list in the appropriate order). + If data for a specific column is not included in the Airbyte data, the value will be None. + If data for a specific column in the Airbyte data is not recognized, it will be ignored as extraneous. + (see here: https://docs.airbyte.com/understanding-airbyte/airbyte-protocol/#output-4) + """ + try: + self.writers[stream_name] + except KeyError: + raise Exception(f"The stream {stream_name} is not defined in the configured_catalog and won't thus be streamed.") + + data: list[Any] = [None for i in range(len(self.writers[stream_name]["column_headers"]))] + for column in airbyte_data: + unknown_data = True + index: int = 0 + for column_header in self.writers[stream_name]["column_headers"]: + if column_header["name"] == column: + unknown_data = False + # Cumul.io doesn't support storing or querying nested (list, dict) or boolean data. + # we'll stringify this data via json.dumps + if ( + isinstance(airbyte_data[column], list) + or isinstance(airbyte_data[column], dict) + or isinstance(airbyte_data[column], bool) + ): + data[index] = json.dumps(airbyte_data[column]) + else: + data[index] = airbyte_data[column] + index += 1 + if unknown_data: + self.logger.debug( + f"The value with name {column} has not been defined in the ConfiguredAirbyteStream and will thus be " + f"ignored as extraneous." + ) + return data + + def delete_stream_entries(self, stream_name: str): + """Set a "replace" tag on a dataset to ensure all existing data will be replaced upon next synchronization.""" + return self.client.set_replace_tag_on_dataset(stream_name) + + def _create_writers(self, configured_catalog: ConfiguredAirbyteCatalog): + """Return a set of writers, one for each stream in the configured_catalog. + This method will also merge the Cumul.io columns for the stream's dataset, if existing.""" + writers = {} + for configured_stream in configured_catalog.streams: + result = self._merge_cumulio_and_airbyte_column_headers(configured_stream) + writers[configured_stream.stream.name] = { + "write_buffer": [], + "column_headers": result["sorted_column_headers"], + "is_in_overwrite_sync_mode": configured_stream.destination_sync_mode == DestinationSyncMode.overwrite, + "is_first_batch": True, + "update_metadata": result["update_metadata"], + } + return writers + + def _merge_cumulio_and_airbyte_column_headers(self, configured_stream: ConfiguredAirbyteStream): + """Merge columns known by Airbyte and Cumul.io. + - If the dataset does not yet exist in Cumul.io (i.e. the first sync), the columns order will be based on "for el in dict" order. + - Upon next synchronizations, the dataset exists in Cumul.io. Its column order will be used to send data in the corresponding order. + - If a new column is added to the source table (i.e. this column doesn't exist yet in Cumul.io), + it will be added at the end of the dataset's columns upon next synchronization. + - If an existing column is removed from the source: + 1. If the next synchronization for this stream runs in "overwrite" mode (or a "replace" tag is set), the Cumul.io dataset will + no longer contain the original column. + 2. If the next synchronization for this stream runs in "append" mode, the Cumul.io dataset will + contain empty values for the non-existing columns for all appended rows. + Note that Airbyte recommends a reset upon changes to source schema(s). In that case, the first batch will be synced + using the "overwrite" mode (due to setting a reset tag on the dataset, see delete_stream_entries implementation). + """ + cumulio_column_headers = self.client.get_ordered_columns(configured_stream.stream.name) + airbyte_column_headers = _convert_airbyte_configured_stream_into_headers_dict(configured_stream) + + update_metadata = False + + merged_column_headers = [] + new_column_count = 0 + for airbyte_column_header in airbyte_column_headers: + merged_column_header = { + "name": airbyte_column_header, + "airbyte-type": airbyte_column_headers[airbyte_column_header]["airbyte-type"], + } + + try: + # Add an order based on the order of the column in the Cumul.io dataset + merged_column_header["order"] = cumulio_column_headers.index(airbyte_column_header) + except ValueError: + # Add an appropriate order to ensure the column appears at the end of the data + new_column_count += 1 + merged_column_header["order"] = len(cumulio_column_headers) + new_column_count + + merged_column_headers.append(merged_column_header) + + sorted_column_headers = sorted(merged_column_headers, key=lambda x: x["order"]) + if new_column_count > 0: + update_metadata = True + + if len(cumulio_column_headers) > 0: + self.logger.info( + f"One or more columns defined in stream {configured_stream.stream.name} are not yet present in Cumul.io, " + f"and will added upon next successful synchronization." + ) + else: + self.logger.info( + f"The dataset for stream {configured_stream.stream.name} doesn't seem to exist in Cumul.io. " + f"The next sync for this stream will create it." + ) + elif not update_metadata: + # Validate whether all columns in Cumul.io are still part of the configured airbyte catalog definition. + for cumulio_column_header in cumulio_column_headers: + try: + # Try to find the Cumul.io column header in the Airbyte columns + airbyte_column_headers[cumulio_column_header] + except KeyError: + # Cumul.io's column hasn't been found, so we'll need to update the dataset's metadata upon next sync. + if configured_stream.destination_sync_mode == DestinationSyncMode.overwrite: + self.logger.info( + f"The source column {cumulio_column_header} in Cumul.io is no longer present in the configured " + f"stream {configured_stream.stream.name} (i.e. in the source). As the stream synchronization is " + f"in overwrite mode, the existing column in Cumul.io will be deleted upon next sync. Check " + f"carefully whether this column is used in any existing Cumul.io dashboards!" + ) + update_metadata = True + + return { + "sorted_column_headers": sorted_column_headers, + "update_metadata": update_metadata, + } diff --git a/airbyte-integrations/connectors/destination-cumulio/integration_tests/configured_catalog.json b/airbyte-integrations/connectors/destination-cumulio/integration_tests/configured_catalog.json new file mode 100644 index 000000000000..844c37fea8f6 --- /dev/null +++ b/airbyte-integrations/connectors/destination-cumulio/integration_tests/configured_catalog.json @@ -0,0 +1,29 @@ +{ + "streams": [ + { + "stream": { + "name": "cumulio_example_table", + "supported_sync_modes": ["full_refresh"], + "source_defined_cursor": false, + "json_schema": { + "type": "object", + "properties": { + "hierarchy_column": { + "type": "string" + }, + "numeric_column": { + "type": "number" + }, + "datetime_column": { + "type": "string", + "format": "date-time", + "airbyte_type": "timestamp_with_timezone" + } + } + } + }, + "sync_mode": "full_refresh", + "destination_sync_mode": "overwrite" + } + ] +} diff --git a/airbyte-integrations/connectors/destination-cumulio/integration_tests/integration_test.py b/airbyte-integrations/connectors/destination-cumulio/integration_tests/integration_test.py new file mode 100644 index 000000000000..545241d463e7 --- /dev/null +++ b/airbyte-integrations/connectors/destination-cumulio/integration_tests/integration_test.py @@ -0,0 +1,276 @@ +# +# Copyright (c) 2023 Airbyte, Inc., all rights reserved. +# + +import json +import time +from logging import Logger, getLogger +from typing import Any, Dict, Mapping + +import pytest +from airbyte_cdk.models import ( + AirbyteMessage, + AirbyteRecordMessage, + AirbyteStateMessage, + AirbyteStream, + ConfiguredAirbyteCatalog, + ConfiguredAirbyteStream, + DestinationSyncMode, + Status, + SyncMode, + Type, +) +from destination_cumulio import DestinationCumulio +from destination_cumulio.client import CumulioClient + + +@pytest.fixture(name="logger") +def logger_fixture() -> Logger: + return getLogger("airbyte") + + +@pytest.fixture(name="config") +def config_fixture() -> Mapping[str, Any]: + with open("secrets/config.json", "r") as f: + return json.loads(f.read()) + + +@pytest.fixture(name="configured_catalog") +def configured_catalog_fixture() -> ConfiguredAirbyteCatalog: + stream_schema = { + "type": "object", + "properties": { + "string_col": {"type": "str"}, + "int_col": {"type": "integer"}, + "obj_col": {"type": "object"}, + "arr_col": {"type": "array"}, + }, + } + + append_stream = ConfiguredAirbyteStream( + stream=AirbyteStream( + name="append_integration_test_stream", + json_schema=stream_schema, + supported_sync_modes=[SyncMode.incremental], + ), + sync_mode=SyncMode.incremental, + destination_sync_mode=DestinationSyncMode.append, + ) + + overwrite_stream = ConfiguredAirbyteStream( + stream=AirbyteStream( + name="overwrite_integration_test_stream", + json_schema=stream_schema, + supported_sync_modes=[SyncMode.incremental], + ), + sync_mode=SyncMode.incremental, + destination_sync_mode=DestinationSyncMode.overwrite, + ) + + return ConfiguredAirbyteCatalog(streams=[append_stream, overwrite_stream]) + + +@pytest.fixture(autouse=True) +def delete_datasets(config: Mapping, configured_catalog: ConfiguredAirbyteCatalog, logger: Logger): + cumulio_client = CumulioClient(config, logger) + for stream in configured_catalog.streams: + dataset = cumulio_client.get_dataset_and_columns_from_stream_name(stream.stream.name) + if dataset: + logger.info( + f"Existing integration test dataset found. Will delete Cumul.io dataset for integration test stream {stream.stream.name}." + ) + try: + cumulio_client.client.delete("securable", dataset["id"]) + except Exception as e: + logger.info( + f"The following exception occurred when trying to delete the dataset " + f"for integration test stream {stream.stream.name}: {e}" + ) + + +def test_check_valid_config(config: Mapping, logger: Logger): + outcome = DestinationCumulio().check(logger, config) + assert outcome.status == Status.SUCCEEDED + + +def test_check_incomplete_config(logger: Logger): + outcome = DestinationCumulio().check(logger, {"api_host": "https://api.cumul.io"}) + assert outcome.status == Status.FAILED + + +def test_check_invalid_config(logger: Logger): + outcome = DestinationCumulio().check( + logger, + { + "api_host": ".invalid.url", + "api_key": "invalid_key", + "api_token": "invalid_token", + }, + ) + assert outcome.status == Status.FAILED + + +def _state(data: Dict[str, Any]) -> AirbyteMessage: + return AirbyteMessage(type=Type.STATE, state=AirbyteStateMessage(data=data)) + + +def _record(stream_name: str, str_value: str, int_value: int, obj_value: dict, arr_value: list) -> AirbyteMessage: + return AirbyteMessage( + type=Type.RECORD, + record=AirbyteRecordMessage( + stream=stream_name, + data={ + "string_col": str_value, + "int_col": int_value, + "obj_col": obj_value, + "arr_col": arr_value, + }, + emitted_at=0, + ), + ) + + +def _retrieve_all_records(cumulio_client, stream_name): + dataset_and_columns = cumulio_client.get_dataset_and_columns_from_stream_name(stream_name) + # Wait 5 seconds before trying to retrieve the data to ensure it can be properly retrieved + time.sleep(5) + if dataset_and_columns is not None: + ordered_columns = cumulio_client.get_ordered_columns(stream_name) + dimension_columns = list( + map( + lambda x, y: { + "dataset_id": dataset_and_columns["id"], + "column_id": y["id"], + }, + ordered_columns, + dataset_and_columns["columns"], + ) + ) + int_col_ind = ordered_columns.index("int_col") + + raw_data_query = { + "dimensions": dimension_columns, + "options": {"rollup_data": False}, + "order": [ + { + "dataset_id": dataset_and_columns["id"], + "column_id": dataset_and_columns["columns"][int_col_ind]["id"], + "order": "asc", + } + ], + } + raw_data = cumulio_client.client.get("data", raw_data_query) + airbyte_data_to_return = [] + for row in raw_data["data"]: + airbyte_data_row = {} + for col_ind, column in enumerate(dataset_and_columns["columns"]): + if isinstance(row[col_ind], dict): + airbyte_data_row[column["source_name"]] = row[col_ind]["id"] + else: + airbyte_data_row[column["source_name"]] = row[col_ind] + airbyte_data_to_return.append( + AirbyteMessage( + type=Type.RECORD, + record=AirbyteRecordMessage(stream=stream_name, data=airbyte_data_row, emitted_at=0), + ) + ) + return airbyte_data_to_return + return None + + +def test_write_append( + config: Mapping, + configured_catalog: ConfiguredAirbyteCatalog, + logger: Logger, +): + """ + This test verifies that: + - Writing a stream in "append" mode appends new records while preserving existing data. + - The correct state message is output by the connector at the end of the sync. + - Object and Array data is appropriately stringified in Cumul.io. + """ + stream_name = configured_catalog.streams[0].stream.name + destination = DestinationCumulio() + + state_message = _state({"state": "3"}) + record_chunk_1 = [_record(stream_name, "test-" + str(i), i, {"test": i}, ["test", i]) for i in range(1, 3)] + + output_states_1 = list(destination.write(config, configured_catalog, [*record_chunk_1, state_message])) + assert [state_message] == output_states_1 + + record_chunk_2 = [_record(stream_name, "test-" + str(i), i, {"test": i}, ["test", i]) for i in range(3, 5)] + + output_states_2 = list(destination.write(config, configured_catalog, [*record_chunk_2, state_message])) + assert [state_message] == output_states_2 + + cumulio_client = CumulioClient(config, logger) + + records_in_destination = _retrieve_all_records(cumulio_client, stream_name) + + expected_records = [ + AirbyteMessage( + type=Type.RECORD, + record=AirbyteRecordMessage( + stream=stream_name, + data={ + "string_col": "test-" + str(i), + "int_col": i, + "obj_col": json.dumps({"test": i}), + "arr_col": json.dumps(["test", i]), + }, + emitted_at=0, + ), + ) + for i in range(1, 5) + ] + + assert expected_records == records_in_destination + + +def test_write_overwrite( + config: Mapping[str, Any], + configured_catalog: ConfiguredAirbyteCatalog, + logger: Logger, +): + """ + This test verifies that: + - writing a stream in "append" mode overwrite all exiting data. + - the correct state message is output by the connector at the end of the sync. + - Object and Array data is appropriately stringified in Cumul.io. + """ + stream_name = configured_catalog.streams[1].stream.name + destination = DestinationCumulio() + + state_message = _state({"state": "3"}) + record_chunk_1 = [_record(stream_name, "oldtest-" + str(i), i, {"oldtest": i}, ["oldtest", i]) for i in range(1, 3)] + + output_states_1 = list(destination.write(config, configured_catalog, [*record_chunk_1, state_message])) + assert [state_message] == output_states_1 + + record_chunk_2 = [_record(stream_name, "newtest-" + str(i), i, {"newtest": i}, ["newtest", i]) for i in range(1, 3)] + + output_states_2 = list(destination.write(config, configured_catalog, [*record_chunk_2, state_message])) + assert [state_message] == output_states_2 + + cumulio_client = CumulioClient(config, logger) + + records_in_destination = _retrieve_all_records(cumulio_client, stream_name) + + expected_records = [ + AirbyteMessage( + type=Type.RECORD, + record=AirbyteRecordMessage( + stream=stream_name, + data={ + "string_col": "newtest-" + str(i), + "int_col": i, + "obj_col": json.dumps({"newtest": i}), + "arr_col": json.dumps(["newtest", i]), + }, + emitted_at=0, + ), + ) + for i in range(1, 3) + ] + + assert expected_records == records_in_destination diff --git a/airbyte-integrations/connectors/destination-cumulio/integration_tests/sample_config.json b/airbyte-integrations/connectors/destination-cumulio/integration_tests/sample_config.json new file mode 100644 index 000000000000..2a1ca74c862b --- /dev/null +++ b/airbyte-integrations/connectors/destination-cumulio/integration_tests/sample_config.json @@ -0,0 +1,5 @@ +{ + "api_host": "https://api.cumul.io", + "api_key": "CUMULIO_API_KEY", + "api_token": "CUMULIO_API_TOKEN" +} diff --git a/airbyte-integrations/connectors/destination-cumulio/main.py b/airbyte-integrations/connectors/destination-cumulio/main.py new file mode 100644 index 000000000000..3ad0d7112206 --- /dev/null +++ b/airbyte-integrations/connectors/destination-cumulio/main.py @@ -0,0 +1,11 @@ +# +# Copyright (c) 2023 Airbyte, Inc., all rights reserved. +# + + +import sys + +from destination_cumulio import DestinationCumulio + +if __name__ == "__main__": + DestinationCumulio().run(sys.argv[1:]) diff --git a/airbyte-integrations/connectors/destination-cumulio/metadata.yaml b/airbyte-integrations/connectors/destination-cumulio/metadata.yaml index bef0baecb57e..0661440e0312 100644 --- a/airbyte-integrations/connectors/destination-cumulio/metadata.yaml +++ b/airbyte-integrations/connectors/destination-cumulio/metadata.yaml @@ -1,7 +1,7 @@ data: connectorType: destination definitionId: e088acb6-9780-4568-880c-54c2dd7f431b - dockerImageTag: 0.1.0 + dockerImageTag: 0.1.1 dockerRepository: airbyte/destination-cumulio githubIssueLabel: destination-cumulio connectorSubtype: api @@ -20,5 +20,5 @@ data: ab_internal: sl: 100 ql: 100 - supportLevel: archived + supportLevel: community metadataSpecVersion: "1.0" diff --git a/airbyte-integrations/connectors/destination-cumulio/requirements.txt b/airbyte-integrations/connectors/destination-cumulio/requirements.txt new file mode 100644 index 000000000000..d6e1198b1ab1 --- /dev/null +++ b/airbyte-integrations/connectors/destination-cumulio/requirements.txt @@ -0,0 +1 @@ +-e . diff --git a/airbyte-integrations/connectors/destination-cumulio/setup.py b/airbyte-integrations/connectors/destination-cumulio/setup.py new file mode 100644 index 000000000000..e613da7bbdb4 --- /dev/null +++ b/airbyte-integrations/connectors/destination-cumulio/setup.py @@ -0,0 +1,23 @@ +# +# Copyright (c) 2023 Airbyte, Inc., all rights reserved. +# + + +from setuptools import find_packages, setup + +MAIN_REQUIREMENTS = ["airbyte-cdk", "cumulio"] + +TEST_REQUIREMENTS = ["pytest~=6.2"] + +setup( + name="destination_cumulio", + description="Airbyte destination connector implementation for Cumul.io.", + author="Cumul.io", + author_email="support@cumul.io", + packages=find_packages(), + install_requires=MAIN_REQUIREMENTS, + package_data={"": ["*.json"]}, + extras_require={ + "tests": TEST_REQUIREMENTS, + }, +) diff --git a/airbyte-integrations/connectors/destination-cumulio/unit_tests/test_client.py b/airbyte-integrations/connectors/destination-cumulio/unit_tests/test_client.py new file mode 100644 index 000000000000..258e8ff2a578 --- /dev/null +++ b/airbyte-integrations/connectors/destination-cumulio/unit_tests/test_client.py @@ -0,0 +1,629 @@ +# +# Copyright (c) 2023 Airbyte, Inc., all rights reserved. +# + + +from typing import Any, Mapping +from unittest.mock import ANY, MagicMock, patch + +import pytest +from destination_cumulio.client import CumulioClient + +# "# type: ignore" was added in several places to avoid mypy complaining about patching functions with MagicMock + + +@pytest.fixture(name="logger") +def logger_fixture() -> MagicMock: + return MagicMock() + + +@pytest.fixture(name="cumulio_client") +def cumulio_client_fixture(logger: MagicMock) -> CumulioClient: + # Create a mock configuration dictionary + config = { + "api_key": "123456", + "api_token": "abcdef", + "api_host": "https://api.cumul.io", + } + # Initialize a CumulioClient object with the mock configuration for the Cumulio class + with patch("destination_cumulio.client.Cumulio", MagicMock()): + return CumulioClient(config, logger) + + +@pytest.fixture(name="dummy_data") +def dummy_data_fixture() -> Mapping[str, Any]: + return { + "data": [ + [ + "Text value 1", + 1, + "2022-01-01T00:00:00.000Z", + ], + ["Text value 2", 2, "2022-02-01T00:00:00.000Z"], + ["Text value 3", 3, "2022-03-01T00:00:00.000Z"], + ], + "columns": ["Text column", "Numeric column", "Datetime column"], + } + + +# tests for batch_write method + + +def test_batch_write_append_empty_write_buffer(cumulio_client: CumulioClient): + cumulio_client._get_dataset_id_from_stream_name = MagicMock(return_value="dataset_id") # type: ignore + cumulio_client._push_batch_to_new_dataset = MagicMock() # type: ignore + cumulio_client._push_batch_to_existing_dataset = MagicMock() # type: ignore + + cumulio_client.batch_write( + stream_name="test-stream", + write_buffer=[], + column_headers=["test-column"], + is_in_overwrite_sync_mode=False, + is_first_batch=True, + update_metadata=True, + ) + + cumulio_client._get_dataset_id_from_stream_name.assert_not_called() + cumulio_client._push_batch_to_new_dataset.assert_not_called() + cumulio_client._push_batch_to_existing_dataset.assert_not_called() + + cumulio_client.batch_write( + stream_name="test-stream", + write_buffer=[[]], + column_headers=["test-column"], + is_in_overwrite_sync_mode=False, + is_first_batch=True, + update_metadata=True, + ) + + cumulio_client._get_dataset_id_from_stream_name.assert_not_called() + cumulio_client._push_batch_to_new_dataset.assert_not_called() + cumulio_client._push_batch_to_existing_dataset.assert_not_called() + + +def test_batch_write_append_no_existing_dataset(cumulio_client: CumulioClient, dummy_data: Mapping[str, Any]): + cumulio_client.client.get = MagicMock(return_value={"count": 0, "Rows": []}) + cumulio_client._push_batch_to_new_dataset = MagicMock() # type: ignore + cumulio_client._push_batch_to_existing_dataset = MagicMock() # type: ignore + + stream_name = "test-stream" + + cumulio_client.batch_write( + stream_name=stream_name, + write_buffer=dummy_data["data"], + column_headers=dummy_data["columns"], + is_in_overwrite_sync_mode=False, + is_first_batch=True, + update_metadata=True, + ) + + expected_properties = { + "where": {"type": "dataset"}, + "attributes": ["id", "name"], + "include": [ + { + "model": "Tag", + "where": {"tag": cumulio_client.TAG_PREFIX + stream_name}, + "attributes": ["id", "tag"], + "jointype": "inner", + } + ], + } + + cumulio_client.client.get.assert_called_once_with("securable", expected_properties) + + cumulio_client._push_batch_to_existing_dataset.assert_not_called() + + cumulio_client._push_batch_to_new_dataset.assert_called_once_with(stream_name, dummy_data["data"], dummy_data["columns"]) + + +def test_batch_write_existing_dataset_no_first_batch_replace(cumulio_client: CumulioClient, dummy_data: Mapping[str, Any]): + cumulio_client._get_dataset_id_from_stream_name = MagicMock(return_value="dataset_id") # type: ignore + cumulio_client._push_batch_to_new_dataset = MagicMock() # type: ignore + cumulio_client._push_batch_to_existing_dataset = MagicMock() # type: ignore + cumulio_client._dataset_contains_replace_tag = MagicMock(return_value=False) # type: ignore + + stream_name = "test-stream" + + cumulio_client.batch_write( + stream_name=stream_name, + write_buffer=dummy_data["data"], + column_headers=dummy_data["columns"], + is_in_overwrite_sync_mode=False, + is_first_batch=True, + update_metadata=True, + ) + cumulio_client._push_batch_to_new_dataset.assert_not_called() + cumulio_client._dataset_contains_replace_tag.assert_called_once_with("dataset_id") + cumulio_client._push_batch_to_existing_dataset.assert_called_once_with( + "dataset_id", dummy_data["data"], dummy_data["columns"], False, True + ) + + +def test_batch_write_existing_dataset_first_batch_replace_overwrite_mode(cumulio_client: CumulioClient, dummy_data: Mapping[str, Any]): + cumulio_client._get_dataset_id_from_stream_name = MagicMock(return_value="dataset_id") # type: ignore + cumulio_client._push_batch_to_new_dataset = MagicMock() # type: ignore + cumulio_client._push_batch_to_existing_dataset = MagicMock() # type: ignore + cumulio_client._dataset_contains_replace_tag = MagicMock(return_value=False) # type: ignore + + stream_name = "test-stream" + + cumulio_client.batch_write( + stream_name=stream_name, + write_buffer=dummy_data["data"], + column_headers=dummy_data["columns"], + is_in_overwrite_sync_mode=True, + is_first_batch=True, + update_metadata=True, + ) + cumulio_client._push_batch_to_new_dataset.assert_not_called() + cumulio_client._dataset_contains_replace_tag.assert_called_once_with("dataset_id") + cumulio_client._push_batch_to_existing_dataset.assert_called_once_with( + "dataset_id", dummy_data["data"], dummy_data["columns"], True, True + ) + + +def test_batch_write_existing_dataset_first_batch_replace_tag(cumulio_client: CumulioClient, dummy_data: Mapping[str, Any]): + cumulio_client._get_dataset_id_from_stream_name = MagicMock(return_value="dataset_id") # type: ignore + cumulio_client._push_batch_to_new_dataset = MagicMock() # type: ignore + cumulio_client._push_batch_to_existing_dataset = MagicMock() # type: ignore + cumulio_client._dataset_contains_replace_tag = MagicMock(return_value=True) # type: ignore + + stream_name = "test-stream" + + cumulio_client.batch_write( + stream_name=stream_name, + write_buffer=dummy_data["data"], + column_headers=dummy_data["columns"], + is_in_overwrite_sync_mode=False, + is_first_batch=True, + update_metadata=True, + ) + cumulio_client._push_batch_to_new_dataset.assert_not_called() + cumulio_client._dataset_contains_replace_tag.assert_called_once_with("dataset_id") + cumulio_client._push_batch_to_existing_dataset.assert_called_once_with( + "dataset_id", dummy_data["data"], dummy_data["columns"], True, True + ) + + +def test_batch_write_existing_dataset_non_first_batch(cumulio_client: CumulioClient, dummy_data: Mapping[str, Any]): + cumulio_client._get_dataset_id_from_stream_name = MagicMock(return_value="dataset_id") # type: ignore + cumulio_client._push_batch_to_new_dataset = MagicMock() # type: ignore + cumulio_client._push_batch_to_existing_dataset = MagicMock() # type: ignore + cumulio_client._dataset_contains_replace_tag = MagicMock(return_value=True) # type: ignore + + stream_name = "test-stream" + + cumulio_client.batch_write( + stream_name=stream_name, + write_buffer=dummy_data["data"], + column_headers=dummy_data["columns"], + is_in_overwrite_sync_mode=True, + is_first_batch=False, + update_metadata=True, + ) + cumulio_client._push_batch_to_new_dataset.assert_not_called() + cumulio_client._dataset_contains_replace_tag.assert_called_once_with("dataset_id") + cumulio_client._push_batch_to_existing_dataset.assert_called_once_with( + "dataset_id", dummy_data["data"], dummy_data["columns"], False, True + ) + + +# tests for test_api_token method + + +def test_api_token_unknown_combination(cumulio_client: CumulioClient): + """ "Test that the test_api_token method correctly throws an error upon an invalid combination""" + cumulio_client.client.get = MagicMock(return_value={"count": 0}) + with pytest.raises(Exception): + cumulio_client.test_api_token() + + +def test_api_token_api_call(cumulio_client: CumulioClient): + """ "Test that the test_api_token method makes an API request to the authorization endpoint""" + cumulio_client.client.get = MagicMock(return_value={"count": 1}) + cumulio_client.test_api_token() + cumulio_client.client.get.assert_called_with("authorization", {"where": {"type": "api"}}) + + +def test_test_data_push_method(cumulio_client: CumulioClient, dummy_data: Mapping[str, Any]): + """ "Test that the test_data_push method deletes the dataset afterwards""" + cumulio_client.batch_write = MagicMock() # type: ignore + cumulio_client.delete_dataset = MagicMock() # type: ignore + + stream_name = "test-stream" + + cumulio_client.test_data_push(stream_name, dummy_data["data"], dummy_data["columns"]) + + cumulio_client.delete_dataset.assert_called_once_with("test-stream") + + +# tests for delete_dataset method + + +def test_delete_dataset_no_dataset_found(cumulio_client: CumulioClient): + cumulio_client.client.delete = MagicMock() + cumulio_client._get_dataset_id_from_stream_name = MagicMock(return_value=None) # type: ignore + + cumulio_client.delete_dataset("stream_name") + + # assert that the _get_dataset_id_from_stream_name method was called once with the correct arguments + cumulio_client._get_dataset_id_from_stream_name.assert_called_once_with("stream_name") + + # assert that the client.delete method is not called as no dataset was found + cumulio_client.client.delete.assert_not_called() + + +def test_delete_dataset_dataset_found(cumulio_client: CumulioClient): + cumulio_client.client.delete = MagicMock() + cumulio_client._get_dataset_id_from_stream_name = MagicMock( # type: ignore + return_value="dataset_id" + ) # type: ignore + + cumulio_client.delete_dataset("stream_name") + + # assert that the _get_dataset_id_from_stream_name method was called once with the correct arguments + cumulio_client._get_dataset_id_from_stream_name.assert_called_once_with("stream_name") + + # assert that the client.delete method was called once with the correct arguments + cumulio_client.client.delete.assert_called_once_with("securable", "dataset_id") + + +# tests for get_ordered_columns method + + +def test_get_ordered_columns_dataset_not_created(cumulio_client: CumulioClient): + cumulio_client.get_dataset_and_columns_from_stream_name = MagicMock(return_value=None) # type: ignore + result = cumulio_client.get_ordered_columns("stream_name") + assert result == [] + + +def test_get_ordered_columns_same_order(cumulio_client: CumulioClient): + cumulio_dataset_and_columns = { + "id": "dataset_id", + "columns": [ + {"source_name": "column1", "order": 2}, + {"source_name": "column2", "order": 1}, + ], + } + cumulio_client.get_dataset_and_columns_from_stream_name = MagicMock(return_value=cumulio_dataset_and_columns) # type: ignore + result = cumulio_client.get_ordered_columns("stream_name") + assert result == ["column2", "column1"] + + +# tests for _push_batch_to_new_dataset method + + +def test_push_batch_to_new_dataset(cumulio_client: CumulioClient, dummy_data: Mapping[str, Any]): + cumulio_client.client.create = MagicMock(return_value={"rows": [{"id": "new_dataset_id"}]}) + cumulio_client._associate_tag_dataset_id = MagicMock() # type: ignore + + stream_name = "test_stream" + + expected_request_properties = { + "type": "create", + "data": dummy_data["data"], + "options": { + "header": dummy_data["columns"], + "update_metadata": True, + "name": {"en": cumulio_client.INITIAL_DATASET_NAME_PREFIX + stream_name}, + }, + } + cumulio_client._push_batch_to_new_dataset(stream_name, dummy_data["data"], dummy_data["columns"]) + cumulio_client.client.create.assert_called_once_with("data", expected_request_properties) + cumulio_client._associate_tag_dataset_id.assert_called_once_with(stream_name, "new_dataset_id") + + +def test_push_batch_to_new_dataset_all_retries_error(cumulio_client: CumulioClient, dummy_data: Mapping[str, Any]): + cumulio_client.client.create = MagicMock(side_effect=RuntimeError("Internal Server Error")) + stream_name = "test_stream" + + with patch("destination_cumulio.client.time", MagicMock()): + with pytest.raises(Exception): + cumulio_client._push_batch_to_new_dataset(stream_name, dummy_data["data"], dummy_data["columns"]) + + +def test_push_batch_to_new_dataset_first_try_fails(cumulio_client: CumulioClient, dummy_data: Mapping[str, Any]): + effects = iter([RuntimeError("Internal Server Error")]) + + def side_effect(*_): + try: + raise next(effects) + except StopIteration: + return {"rows": [{"id": "new_dataset_id"}]} + + cumulio_client.client.create = MagicMock(side_effect=side_effect) + cumulio_client._associate_tag_dataset_id = MagicMock() # type: ignore + + stream_name = "test_stream" + + expected_request_properties = { + "type": "create", + "data": dummy_data["data"], + "options": { + "header": dummy_data["columns"], + "update_metadata": True, + "name": {"en": cumulio_client.INITIAL_DATASET_NAME_PREFIX + stream_name}, + }, + } + + with patch("destination_cumulio.client.time", MagicMock()): + cumulio_client._push_batch_to_new_dataset(stream_name, dummy_data["data"], dummy_data["columns"]) + cumulio_client.client.create.assert_called_with("data", expected_request_properties) + + assert cumulio_client.client.create.call_count == 2 + + cumulio_client._associate_tag_dataset_id.assert_called_once_with(stream_name, "new_dataset_id") + + +# tests for _push_batch_to_existing_dataset method + + +def test_push_batch_to_existing_dataset_all_retries_error(cumulio_client: CumulioClient, dummy_data: Mapping[str, Any]): + cumulio_client.client.create = MagicMock(side_effect=RuntimeError("Internal Server Error")) + cumulio_client._remove_replace_tag_dataset_id_association = MagicMock() # type: ignore + + dataset_id = "dataset_id" + + with patch("destination_cumulio.client.time", MagicMock()): + with pytest.raises(Exception): + cumulio_client._push_batch_to_existing_dataset(dataset_id, dummy_data["data"], dummy_data["columns"], False, True) + + +def test_push_batch_to_existing_dataset_first_try_fails(cumulio_client: CumulioClient, dummy_data: Mapping[str, Any]): + effects = iter([RuntimeError("Internal Server Error")]) + + def side_effect(*_): + try: + raise next(effects) + except StopIteration: + return None + + cumulio_client.client.create = MagicMock(side_effect=side_effect) + cumulio_client._remove_replace_tag_dataset_id_association = MagicMock() # type: ignore + + dataset_id = "dataset_id" + + expected_request_properties = { + "type": "append", + "data": dummy_data["data"], + "securable_id": dataset_id, + "options": { + "header": dummy_data["columns"], + "update_metadata": True, + }, + } + + with patch("destination_cumulio.client.time", MagicMock()): + cumulio_client._push_batch_to_existing_dataset(dataset_id, dummy_data["data"], dummy_data["columns"], False, True) + cumulio_client.client.create.assert_called_with("data", expected_request_properties) + + assert cumulio_client.client.create.call_count == 2 + + cumulio_client._remove_replace_tag_dataset_id_association.assert_not_called() + + +def test_push_batch_to_existing_dataset_no_first_batch_replace(cumulio_client: CumulioClient, dummy_data: Mapping[str, Any]): + cumulio_client.client.create = MagicMock() + cumulio_client._remove_replace_tag_dataset_id_association = MagicMock() # type: ignore + + dataset_id = "dataset_id" + + expected_request_properties = { + "type": "append", + "data": dummy_data["data"], + "securable_id": dataset_id, + "options": { + "header": dummy_data["columns"], + "update_metadata": True, + }, + } + + cumulio_client._push_batch_to_existing_dataset(dataset_id, dummy_data["data"], dummy_data["columns"], False, True) + cumulio_client.client.create.assert_called_once_with("data", expected_request_properties) + cumulio_client._remove_replace_tag_dataset_id_association.assert_not_called() + + +def test_push_batch_to_existing_dataset_first_batch_replace(cumulio_client: CumulioClient, dummy_data: Mapping[str, Any]): + cumulio_client.client.create = MagicMock() + cumulio_client._remove_replace_tag_dataset_id_association = MagicMock() # type: ignore + + dataset_id = "dataset_id" + + expected_request_properties = { + "type": "replace", + "data": dummy_data["data"], + "securable_id": dataset_id, + "options": { + "header": dummy_data["columns"], + "update_metadata": True, + }, + } + + cumulio_client._push_batch_to_existing_dataset(dataset_id, dummy_data["data"], dummy_data["columns"], True, True) + cumulio_client.client.create.assert_called_once_with("data", expected_request_properties) + cumulio_client._remove_replace_tag_dataset_id_association.assert_called_once_with(dataset_id) + + +# tests for _dataset_contains_replace_tag method + + +def test_get_dataset_and_columns_from_stream_name_no_dataset( + cumulio_client: CumulioClient, +): + cumulio_dataset_and_columns_result = {"count": 0, "rows": []} + + # Test when no dataset is found + cumulio_client.client.get = MagicMock(return_value=cumulio_dataset_and_columns_result) + result = cumulio_client.get_dataset_and_columns_from_stream_name("test_stream") + assert result is None + + +def test_get_dataset_and_columns_from_stream_name_single_existing_dataset( + cumulio_client: CumulioClient, +): + cumulio_dataset_and_columns_result: Mapping[str, Any] = { + "count": 1, + "rows": [ + { + "id": "dataset_id", + "columns": [ + {"source_name": "column1", "order": 2}, + {"source_name": "column2", "order": 1}, + ], + } + ], + } + # Test when dataset is found + cumulio_client.client.get = MagicMock(return_value=cumulio_dataset_and_columns_result) + result = cumulio_client.get_dataset_and_columns_from_stream_name("test_stream") + assert result["id"] == cumulio_dataset_and_columns_result["rows"][0]["id"] + assert result["columns"] == cumulio_dataset_and_columns_result["rows"][0]["columns"] + + +def test_get_dataset_and_columns_from_stream_name_multiple_existing_datasets( + cumulio_client: CumulioClient, +): + """Tests whether an exception is thrown when multiple datasets are returned for a stream name""" + cumulio_dataset_and_columns_result = { + "count": 2, + "rows": [ + { + "id": "dataset_id_1", + "columns": [ + {"source_name": "column1", "order": 2}, + {"source_name": "column2", "order": 1}, + ], + }, + { + "id": "dataset_id_2", + "columns": [ + {"source_name": "column1", "order": 1}, + {"source_name": "column2", "order": 2}, + ], + }, + ], + } + # Test when multiple datasets are found + cumulio_client.client.get = MagicMock(return_value=cumulio_dataset_and_columns_result) + with pytest.raises(Exception): + cumulio_client.get_dataset_and_columns_from_stream_name("test_stream") + + +# tests for the set_replace_tag_on_dataset method + + +def test_set_replace_tag_on_dataset_no_dataset_found(cumulio_client: CumulioClient): + cumulio_client._get_dataset_id_from_stream_name = MagicMock(return_value=None) # type: ignore + cumulio_client._associate_tag_dataset_id = MagicMock() # type: ignore + + cumulio_client.set_replace_tag_on_dataset("stream_name") + + cumulio_client._get_dataset_id_from_stream_name.assert_called_once_with("stream_name") + cumulio_client._associate_tag_dataset_id.assert_not_called() + + +def test_set_replace_tag_on_dataset_existing_dataset(cumulio_client: CumulioClient): + cumulio_client._get_dataset_id_from_stream_name = MagicMock(return_value="dataset_id") # type: ignore + cumulio_client._associate_tag_dataset_id = MagicMock() # type: ignore + + cumulio_client.set_replace_tag_on_dataset("stream_name") + + cumulio_client._get_dataset_id_from_stream_name.assert_called_once_with("stream_name") + cumulio_client._associate_tag_dataset_id.assert_called_once_with(cumulio_client.REPLACE_TAG, "dataset_id") + + +# tests for _dataset_contains_replace_tag method + + +def test_dataset_contains_replace_tag(cumulio_client: CumulioClient): + dataset_id = "123" + cumulio_client.client.get = MagicMock(return_value={"count": 1}) + assert cumulio_client._dataset_contains_replace_tag(dataset_id) is True + + +def test_dataset_does_not_contain_replace_tag(cumulio_client: CumulioClient): + dataset_id = "123" + cumulio_client.client.get = MagicMock(return_value={"count": 0}) + assert cumulio_client._dataset_contains_replace_tag(dataset_id) is False + + +# tests for _get_dataset_id_from_stream_name method + + +def test_get_dataset_id_from_stream_name_no_dataset(cumulio_client: CumulioClient): + cumulio_client.client.get.return_value = {"count": 0, "rows": []} + dataset_id = cumulio_client._get_dataset_id_from_stream_name("test_stream") + assert dataset_id is None + + +def test_get_dataset_id_from_stream_name_single_dataset(cumulio_client: CumulioClient): + cumulio_client.client.get.return_value = { + "count": 1, + "rows": [{"id": "dataset_id", "name": "Test dataset"}], + } + dataset_id = cumulio_client._get_dataset_id_from_stream_name("test_stream") + assert dataset_id == "dataset_id" + + +def test_get_dataset_id_from_stream_name_multiple_datasets( + cumulio_client: CumulioClient, +): + """Tests whether an exception is thrown when multiple datasets are returned for a stream name""" + cumulio_client.client.get.return_value = { + "count": 2, + "rows": [ + {"id": "dataset_id_1", "name": "Test dataset 1"}, + {"id": "dataset_id_2", "name": "Test dataset 2"}, + ], + } + with pytest.raises(Exception): + cumulio_client._get_dataset_id_from_stream_name("test_stream") + + +# tests for _associate_tag_dataset_id method + + +def test_associate_tag_dataset_id_no_tag_found(cumulio_client: CumulioClient): + cumulio_client._get_tag_id = MagicMock(return_value=None) # type: ignore + cumulio_client._create_and_associate_stream_name_tag_with_dataset_id = MagicMock() # type: ignore + cumulio_client._associate_tag_with_dataset_id = MagicMock() # type: ignore + + cumulio_client._associate_tag_dataset_id("test_stream", "test_dataset_id") + + cumulio_client._create_and_associate_stream_name_tag_with_dataset_id.assert_called_once_with("test_stream", "test_dataset_id") + cumulio_client._associate_tag_with_dataset_id.assert_not_called() + + +def test_associate_tag_dataset_id_tag_found(cumulio_client: CumulioClient): + cumulio_client._get_tag_id = MagicMock(return_value="tag_id") # type: ignore + cumulio_client._create_and_associate_stream_name_tag_with_dataset_id = MagicMock() # type: ignore + cumulio_client._associate_tag_with_dataset_id = MagicMock() # type: ignore + + cumulio_client._associate_tag_dataset_id("test_stream", "test_dataset_id") + + cumulio_client._associate_tag_with_dataset_id.assert_called_once_with("tag_id", "test_dataset_id") + cumulio_client._create_and_associate_stream_name_tag_with_dataset_id.assert_not_called() + + +# tests for _get_tag_id method + + +def test_get_tag_id_no_tag_found(cumulio_client: CumulioClient): + tag_api_response = {"count": 0, "rows": []} + cumulio_client.client.get = MagicMock(return_value=tag_api_response) + + result = cumulio_client._get_tag_id("test_stream") + + cumulio_client.client.get.assert_called_once_with("tag", ANY) + assert result is None + + +def test_get_tag_id_tag_found(cumulio_client: CumulioClient): + tag_api_response: Mapping[str, Any] = {"count": 1, "rows": [{"id": "test_tag_id"}]} + cumulio_client.client.get = MagicMock(return_value=tag_api_response) + + result = cumulio_client._get_tag_id("test_stream") + + cumulio_client.client.get.assert_called_once_with("tag", ANY) + assert result == tag_api_response["rows"][0]["id"] diff --git a/airbyte-integrations/connectors/destination-cumulio/unit_tests/test_destination.py b/airbyte-integrations/connectors/destination-cumulio/unit_tests/test_destination.py new file mode 100644 index 000000000000..4805fb51ecf5 --- /dev/null +++ b/airbyte-integrations/connectors/destination-cumulio/unit_tests/test_destination.py @@ -0,0 +1,155 @@ +# +# Copyright (c) 2023 Airbyte, Inc., all rights reserved. +# + + +from datetime import datetime +from logging import Logger, getLogger +from typing import Any, Mapping +from unittest.mock import MagicMock, call, patch + +import pytest +from airbyte_cdk.models import ( + AirbyteMessage, + AirbyteRecordMessage, + AirbyteStateMessage, + AirbyteStream, + ConfiguredAirbyteCatalog, + ConfiguredAirbyteStream, + DestinationSyncMode, + SyncMode, + Type, +) +from destination_cumulio.destination import DestinationCumulio + + +@pytest.fixture(name="logger") +def logger_fixture() -> Logger: + return getLogger("airbyte") + + +@pytest.fixture(name="config") +def config_fixture() -> Mapping[str, Any]: + return { + "api_key": "123abc", + "api_token": "456def", + "api_host": "https://api.cumul.io", + } + + +@pytest.fixture(name="configured_catalog") +def configured_catalog_fixture() -> ConfiguredAirbyteCatalog: + stream_schema = { + "type": "object", + "properties": { + "string_column": {"type": "integer"}, + "int_column": {"type": "integer"}, + }, + } + + append_stream = ConfiguredAirbyteStream( + stream=AirbyteStream( + name="append_stream", + json_schema=stream_schema, + supported_sync_modes=[SyncMode.incremental], + ), + sync_mode=SyncMode.incremental, + destination_sync_mode=DestinationSyncMode.append, + ) + + overwrite_stream = ConfiguredAirbyteStream( + stream=AirbyteStream( + name="overwrite_stream", + json_schema=stream_schema, + supported_sync_modes=[SyncMode.incremental], + ), + sync_mode=SyncMode.incremental, + destination_sync_mode=DestinationSyncMode.overwrite, + ) + + return ConfiguredAirbyteCatalog(streams=[append_stream, overwrite_stream]) + + +@pytest.fixture(name="airbyte_message_1") +def airbyte_message_1_fixture() -> AirbyteMessage: + return AirbyteMessage( + type=Type.RECORD, + record=AirbyteRecordMessage( + stream="append_stream", + data={"string_column": "value_1", "int_column": 1}, + emitted_at=int(datetime.now().timestamp()) * 1000, + ), + ) + + +@pytest.fixture(name="airbyte_message_2") +def airbyte_message_2_fixture() -> AirbyteMessage: + return AirbyteMessage( + type=Type.RECORD, + record=AirbyteRecordMessage( + stream="overwrite_stream", + data={"string_column": "value_2", "int_column": 2}, + emitted_at=int(datetime.now().timestamp()) * 1000, + ), + ) + + +@pytest.fixture(name="airbyte_state_message") +def airbyte_state_message_fixture() -> AirbyteMessage: + return AirbyteMessage(type=Type.STATE, state=AirbyteStateMessage(data={})) + + +def test_check(config: Mapping[str, Any], logger: MagicMock): + with patch("destination_cumulio.destination.CumulioClient") as cumulio_client: + destination_cumulio = DestinationCumulio() + destination_cumulio.check(logger, config) + assert cumulio_client.mock_calls == [ + call(config, logger), + call().test_api_token(), + ] + + +def test_write_no_input_messages( + config: Mapping[str, Any], + configured_catalog: ConfiguredAirbyteCatalog, + airbyte_message_1: AirbyteMessage, + airbyte_message_2: AirbyteMessage, + airbyte_state_message: AirbyteMessage, + logger: MagicMock, +): + with patch("destination_cumulio.destination.CumulioWriter") as cumulio_writer: + destination_cumulio = DestinationCumulio() + + input_messages = [airbyte_state_message] + result = list(destination_cumulio.write(config, configured_catalog, input_messages)) + assert result == [airbyte_state_message] + + assert cumulio_writer.mock_calls == [ + call(config, configured_catalog, logger), + call().delete_stream_entries("overwrite_stream"), + call().flush_all(), # The first flush_all is called before yielding the state message + call().flush_all(), # The second flush_all is called after going through all input messages + ] + + +def test_write( + config: Mapping[str, Any], + configured_catalog: ConfiguredAirbyteCatalog, + airbyte_message_1: AirbyteMessage, + airbyte_message_2: AirbyteMessage, + airbyte_state_message: AirbyteMessage, + logger: MagicMock, +): + with patch("destination_cumulio.destination.CumulioWriter") as cumulio_writer: + input_messages = [airbyte_message_1, airbyte_message_2, airbyte_state_message] + destination_cumulio = DestinationCumulio() + result = list(destination_cumulio.write(config, configured_catalog, input_messages)) + assert result == [airbyte_state_message] + assert cumulio_writer.mock_calls == [ + call(config, configured_catalog, logger), + call().delete_stream_entries("overwrite_stream"), + call().queue_write_operation("append_stream", {"string_column": "value_1", "int_column": 1}), + call().queue_write_operation("overwrite_stream", {"string_column": "value_2", "int_column": 2}), + call().flush_all(), # The first flush_all is called before yielding the state message + call().flush_all(), # The second flush_all is called after going through all input messages + ] diff --git a/airbyte-integrations/connectors/destination-cumulio/unit_tests/test_writer.py b/airbyte-integrations/connectors/destination-cumulio/unit_tests/test_writer.py new file mode 100644 index 000000000000..ac921c7ef5c4 --- /dev/null +++ b/airbyte-integrations/connectors/destination-cumulio/unit_tests/test_writer.py @@ -0,0 +1,512 @@ +# +# Copyright (c) 2023 Airbyte, Inc., all rights reserved. +# + + +import unittest +from typing import Any, Mapping +from unittest.mock import MagicMock, patch + +import pytest +from airbyte_cdk.models import AirbyteStream, ConfiguredAirbyteCatalog, ConfiguredAirbyteStream, DestinationSyncMode, SyncMode +from destination_cumulio.writer import CumulioWriter + + +@pytest.fixture(name="logger") +def logger_fixture() -> MagicMock: + return MagicMock() + + +@pytest.fixture(name="config") +def config_fixture() -> Mapping[str, Any]: + return { + "api_key": "123abc", + "api_token": "456def", + "api_host": "https://api.cumul.io", + } + + +@pytest.fixture(name="configured_catalog") +def configured_catalog_fixture() -> ConfiguredAirbyteCatalog: + orders_stream_schema = { + "type": "object", + "properties": { + "order_id": {"type": "integer"}, + "amount": {"type": "integer"}, + "customer_id": {"type": "string"}, + }, + } + products_stream_schema = { + "type": "object", + "properties": {"product_id": {"type": "integer"}}, + } + + orders_append_stream = ConfiguredAirbyteStream( + stream=AirbyteStream( + name="orders", + json_schema=orders_stream_schema, + supported_sync_modes=[SyncMode.incremental], + ), + sync_mode=SyncMode.incremental, + destination_sync_mode=DestinationSyncMode.append, + ) + + products_overwrite_stream = ConfiguredAirbyteStream( + stream=AirbyteStream( + name="products", + json_schema=products_stream_schema, + supported_sync_modes=[SyncMode.incremental], + ), + sync_mode=SyncMode.incremental, + destination_sync_mode=DestinationSyncMode.overwrite, + ) + + return ConfiguredAirbyteCatalog(streams=[orders_append_stream, products_overwrite_stream]) + + +@pytest.fixture(name="writer") +def writer_no_existing_cumulio_columns( + config: Mapping[str, Any], + configured_catalog: ConfiguredAirbyteCatalog, + logger: MagicMock, +) -> CumulioWriter: + """Returns a CumulioWriter using MagicMock, and mocking the return_value of all used CumulioClient methods.""" + with patch("destination_cumulio.writer.CumulioClient", MagicMock()) as cumulio_client_mock: + # Mock get_ordered_columns to return no existing Cumul.io columns (dataset hasn't been created yet --> first sync) + cumulio_client_mock.return_value.get_ordered_columns.return_value = [] + # cumulio_client_mock.return_value.batch_write.return_value = None + # cumulio_client_mock.return_value.set_replace_tag_on_dataset.return_value = None + return CumulioWriter(config, configured_catalog, logger) + + +def test_small_enough_data_point_limit(writer: CumulioWriter): + """Tests whether the FLUSH_INTERVAL variable is smaller than the maximum amount of data points Cumul.io supports.""" + assert writer.FLUSH_INTERVAL <= 10000 + + +def test_init(writer: CumulioWriter): + """Tests whether CumulioWriter is correctly initialized for streams with no known Cumulio dataset (i.e. first sync for each stream).""" + + # Assert each stream is correctly initializing writers + assert "orders" in writer.writers + assert "products" in writer.writers + + # Assert each stream is correctly initializing empty write buffer + assert len(writer.writers["orders"]["write_buffer"]) == 0 + assert len(writer.writers["products"]["write_buffer"]) == 0 + + # Assert each stream is correctly initializing is_in_overwrite_sync_mode + assert writer.writers["orders"]["is_in_overwrite_sync_mode"] is False + assert writer.writers["products"]["is_in_overwrite_sync_mode"] is True + + # Assert each stream is correctly initializing is_first_batch to True + assert writer.writers["orders"]["is_first_batch"] is True + assert writer.writers["products"]["is_first_batch"] is True + + # Assert each stream is correctly initializing update_metadata (due to no columns from Cumul.io in this writer, both are True) + assert writer.writers["orders"]["update_metadata"] is True + assert writer.writers["products"]["update_metadata"] is True + + +def test_transform_data(writer: CumulioWriter): + case = unittest.TestCase() + + data = {"order_id": 1, "amount": 100.0, "customer_id": "cust_1"} + transformed_data = writer.transform_data("orders", data) + case.assertCountEqual(transformed_data, ["cust_1", 1, 100.0]) + + +def test_transform_data_missing_data(writer: CumulioWriter): + case = unittest.TestCase() + + missing_data = {"order_id": 1, "customer_id": "cust_1"} + transformed_data = writer.transform_data("orders", missing_data) + case.assertCountEqual(transformed_data, ["cust_1", 1, None]) + + +def test_transform_data_additional_data(writer: CumulioWriter): + case = unittest.TestCase() + + additional_data = { + "order_id": 1, + "amount": 100.0, + "customer_id": "cust_1", + "custmer_name": "Customer 1", + } + transformed_data = writer.transform_data("orders", additional_data) + case.assertCountEqual(transformed_data, ["cust_1", 1, 100.0]) + + +def test_transform_data_bool_data(writer: CumulioWriter): + case = unittest.TestCase() + + bool_data = {"order_id": 1, "amount": 100.0, "customer_id": True} + transformed_data = writer.transform_data("orders", bool_data) + case.assertCountEqual(transformed_data, ["true", 1, 100.0]) + + +def test_transform_data_dict_data(writer: CumulioWriter): + case = unittest.TestCase() + + dict_data = {"order_id": 1, "amount": 100.0, "customer_id": {"key": "value"}} + transformed_data = writer.transform_data("orders", dict_data) + case.assertCountEqual(transformed_data, ['{"key": "value"}', 1, 100.0]) + + +def test_transform_data_arr_data(writer: CumulioWriter): + case = unittest.TestCase() + + arr_data = {"order_id": 1, "amount": 100.0, "customer_id": ["test1", "test2"]} + transformed_data = writer.transform_data("orders", arr_data) + case.assertCountEqual(transformed_data, ['["test1", "test2"]', 1, 100.0]) + + +def test_queue_write_operation(writer: CumulioWriter): + # Set flush interval to max value to avoid flushing data + writer.FLUSH_INTERVAL = 10000 + + writer.client.batch_write = MagicMock() # type: ignore + + case = unittest.TestCase() + + order_data = {"order_id": 1, "amount": 100.0, "customer_id": "customer_1"} + writer.queue_write_operation("orders", order_data) + + # Assert that write_buffer from the orders stream contains a single value + assert len(writer.writers["orders"]["write_buffer"]) == 1 + case.assertCountEqual(writer.writers["orders"]["write_buffer"][0], ["customer_1", 1, 100.0]) + + +def test_queue_write_operation_two_streams(writer: CumulioWriter): + # Set flush interval to max value to avoid flushing data + writer.FLUSH_INTERVAL = 10000 + + writer.client.batch_write = MagicMock() # type: ignore + + order_data = {"order_id": 1, "amount": 100.0, "customer_id": "customer_1"} + writer.queue_write_operation("orders", order_data) + + # Assert that write_buffer from the orders stream contains a single value + assert len(writer.writers["orders"]["write_buffer"]) == 1 + + product_data = {"product_id": 1} + writer.queue_write_operation("products", product_data) + + # Assert that the orders write_buffer isn't influenced by write operations from the products stream + assert len(writer.writers["orders"]["write_buffer"]) == 1 + + # Assert that write_buffer from the products stream contains a single value + assert len(writer.writers["products"]["write_buffer"]) == 1 + assert writer.writers["products"]["write_buffer"] == [[1]] + + product_data = {"product_id": 2} + writer.queue_write_operation("products", product_data) + # Assert that write_buffer from the orders stream contains two values + assert writer.writers["products"]["write_buffer"] == [[1], [2]] + + +def test_queue_write_operation_non_existing_stream(writer: CumulioWriter): + # Set flush interval to max value to avoid flushing data + writer.FLUSH_INTERVAL = 10000 + + writer.client.batch_write = MagicMock() # type: ignore + + with pytest.raises(Exception): + # Assert that an Exception is thrown upon trying to write to a non-existing stream + writer.queue_write_operation("non_existing_stream", {"column": "value"}) + + +def test_flush(writer: CumulioWriter): + writer.client.batch_write = MagicMock() # type: ignore + + writer.writers["orders"]["write_buffer"] = [["customer_1", 1, 100.0]] + writer.flush("orders") + assert writer.writers["orders"]["write_buffer"] == [] + + +def test_queue_write_flush_operation(writer: CumulioWriter): + # Set flush interval to 2 to cause flush after second row has been added to buffer + writer.FLUSH_INTERVAL = 2 + + writer.client.batch_write = MagicMock() # type: ignore + + product_data = {"product_id": 1} + writer.queue_write_operation("products", product_data) + assert writer.writers["products"]["write_buffer"] == [[1]] + + product_data = {"product_id": 2} + writer.queue_write_operation("products", product_data) + assert writer.writers["products"]["write_buffer"] == [] + assert writer.writers["products"]["is_first_batch"] is False + + product_data = {"product_id": 3} + writer.queue_write_operation("products", product_data) + assert writer.writers["products"]["write_buffer"] == [[3]] + + +def test_flush_all(writer: CumulioWriter): + writer.client.batch_write = MagicMock() # type: ignore + + writer.writers["orders"]["write_buffer"] = [["cust_1", 1, 100.0]] + writer.writers["products"]["write_buffer"] = [["cust_1", 1, 100.0]] + writer.flush_all() + assert writer.writers["orders"]["write_buffer"] == [] + assert writer.writers["products"]["write_buffer"] == [] + + +def test_delete_stream_entries(writer: CumulioWriter): + writer.client.set_replace_tag_on_dataset = MagicMock() # type: ignore + writer.delete_stream_entries("stream_name") + writer.client.set_replace_tag_on_dataset.assert_called_once_with("stream_name") + + +def _get_cumulio_and_merged_columns(writer: CumulioWriter) -> Mapping[str, Any]: + if len(writer.writers) < 0: + raise Exception("No streams defined for writer") + + result = {} + + for stream_name in writer.writers: + cumulio_columns = writer.client.get_ordered_columns(stream_name) + merged_columns = writer.writers[stream_name]["column_headers"] + result[stream_name] = { + "cumulio_columns": cumulio_columns, + "merged_columns": merged_columns, + } + return result + + +@pytest.fixture +def writer_existing_cumulio_columns( + config: Mapping[str, Any], + configured_catalog: ConfiguredAirbyteCatalog, + logger: MagicMock, +) -> CumulioWriter: + """This will return a CumulioWriter that mocks airbyte stream catalogs that contains the same columns as those existing in Cumul.io.""" + existing_cumulio_columns = {} + for configured_stream in configured_catalog.streams: + existing_cumulio_columns[configured_stream.stream.name] = [ + column_name for column_name in configured_stream.stream.json_schema["properties"] + ] + + def get_existing_cumulio_columns(stream_name): + return existing_cumulio_columns[stream_name] + + with patch("destination_cumulio.writer.CumulioClient", MagicMock()) as cumulio_client_mock: + # Mock get_ordered_columns to return existing_cumulio_columns + cumulio_client_mock.return_value.get_ordered_columns = MagicMock(side_effect=get_existing_cumulio_columns) + return CumulioWriter(config, configured_catalog, logger) + + +def test_init_existing_cumulio_columns(writer_existing_cumulio_columns: CumulioWriter): + """Tests whether each stream is correctly initializing update_metadata. + Due to identical columns in Cumul.io for this writer, both are False. + """ + assert writer_existing_cumulio_columns.writers["orders"]["update_metadata"] is False + assert writer_existing_cumulio_columns.writers["products"]["update_metadata"] is False + + +def test_equal_cumulio_and_merged_columns( + writer_existing_cumulio_columns: CumulioWriter, +): + result = _get_cumulio_and_merged_columns(writer_existing_cumulio_columns) + + for stream_name in result: + for index, column in enumerate(result[stream_name]["merged_columns"]): + # Assert that merged_columns are in same order as columns defined on Cumul.io's side. + assert result[stream_name]["cumulio_columns"][index] == column["name"] + + +def test_queue_write_operation_with_correct_data_order( + writer_existing_cumulio_columns: CumulioWriter, +): + writer_existing_cumulio_columns.client.batch_write = MagicMock() # type: ignore + + result = _get_cumulio_and_merged_columns(writer_existing_cumulio_columns) + # Set flush interval to max value to avoid flushing data + writer_existing_cumulio_columns.FLUSH_INTERVAL = 10000 + + order_data = {"order_id": 1, "amount": 100.0, "customer_id": "cust_1"} + writer_existing_cumulio_columns.queue_write_operation("orders", order_data) + expected_data = [] + for column in result["orders"]["merged_columns"]: + expected_data.append(order_data[column["name"]]) + assert writer_existing_cumulio_columns.writers["orders"]["write_buffer"][0] == expected_data + + +@pytest.fixture(name="configured_catalog_with_new_column") +def configured_catalog_with_new_column_fixture() -> ConfiguredAirbyteCatalog: + """Creates a ConfiguredAirbyteCatalog that will be used to mock a new column.""" + # The stream should have at least 2 schema properties (i.e. columns) defined. + orders_stream_schema = { + "type": "object", + "properties": { + "order_id": {"type": "integer"}, + "amount": {"type": "integer"}, + "customer_id": {"type": "string"}, + "customer_name": {"type": "string"}, + }, + } + + orders_append_stream = ConfiguredAirbyteStream( + stream=AirbyteStream( + name="orders_append", + json_schema=orders_stream_schema, + supported_sync_modes=[SyncMode.incremental], + ), + sync_mode=SyncMode.incremental, + destination_sync_mode=DestinationSyncMode.append, + ) + + orders_overwrite_stream = ConfiguredAirbyteStream( + stream=AirbyteStream( + name="orders_overwrite", + json_schema=orders_stream_schema, + supported_sync_modes=[SyncMode.incremental], + ), + sync_mode=SyncMode.incremental, + destination_sync_mode=DestinationSyncMode.overwrite, + ) + + return ConfiguredAirbyteCatalog(streams=[orders_append_stream, orders_overwrite_stream]) + + +@pytest.fixture +def writer_new_airbyte_column( + config: Mapping[str, Any], + configured_catalog_with_new_column: ConfiguredAirbyteCatalog, + logger: MagicMock, +) -> CumulioWriter: + """This will return a CumulioWriter that mocks airbyte stream catalogs that contains one column that does not exist in Cumul.io.""" + existing_cumulio_columns = {} + for configured_stream in configured_catalog_with_new_column.streams: + columns = [column_name for column_name in configured_stream.stream.json_schema["properties"]] + # get rid of the second element to mimic a new column being defined in configured_stream + del columns[1] + existing_cumulio_columns[configured_stream.stream.name] = columns + + def get_existing_cumulio_columns(stream_name): + return existing_cumulio_columns[stream_name] + + with patch("destination_cumulio.writer.CumulioClient", MagicMock()) as cumulio_client_mock: + # Mock get_ordered_columns to return existing_cumulio_columns (which does not include one column defined in configured stream) + cumulio_client_mock.return_value.get_ordered_columns = MagicMock(side_effect=get_existing_cumulio_columns) + cumulio_client_mock.return_value.batch_writer.return_value = None + cumulio_client_mock.return_value.set_replace_tag_on_dataset.return_value = None + return CumulioWriter(config, configured_catalog_with_new_column, logger) + + +def test_init_new_airbyte_column(writer_new_airbyte_column: CumulioWriter): + """Tests whether each stream is correctly initializing update_metadata (due to new Column in Airbyte for this writer, both are True)""" + assert writer_new_airbyte_column.writers["orders_append"]["update_metadata"] is True + assert writer_new_airbyte_column.writers["orders_overwrite"]["update_metadata"] is True + + +def test_new_column_update_metadata(writer_new_airbyte_column: CumulioWriter): + """Tests whether Airbyte streams with at least one new column defined results in update_metadata, + to inform Cumul.io about new column data being pushed.""" + for stream_name in writer_new_airbyte_column.writers: + assert writer_new_airbyte_column.writers[stream_name]["update_metadata"] is True + + +def test_new_column_appended(writer_new_airbyte_column: CumulioWriter): + """Tests whether the Airbyte streams with one new column appends it at the end of the column list""" + result = _get_cumulio_and_merged_columns(writer_new_airbyte_column) + for stream_name in result: + assert len(result[stream_name]["merged_columns"]) == len(result[stream_name]["cumulio_columns"]) + 1 + for index, column in enumerate(result[stream_name]["cumulio_columns"]): + # Assert that merged_columns are in same order as columns defined on Cumul.io's side. + assert result[stream_name]["merged_columns"][index]["name"] == column + with pytest.raises(Exception): + # Test whether last element of merged_columns is the column that is not defined on Cumul.io's end. + result[stream_name]["cumulio_columns"].index(result[stream_name]["merged_columns"][-1]["name"]) + + +@pytest.fixture(name="configured_catalog_with_deleted_column") +def configured_catalog_with_deleted_column_fixture() -> ConfiguredAirbyteCatalog: + """Creates a ConfiguredAirbyteCatalog that will be used to mock a deleted column.""" + orders_stream_schema = { + "type": "object", + "properties": {"order_id": {"type": "integer"}, "amount": {"type": "integer"}}, + } + + orders_append_stream = ConfiguredAirbyteStream( + stream=AirbyteStream( + name="orders_append", + json_schema=orders_stream_schema, + supported_sync_modes=[SyncMode.incremental], + ), + sync_mode=SyncMode.incremental, + destination_sync_mode=DestinationSyncMode.append, + ) + + orders_overwrite_stream = ConfiguredAirbyteStream( + stream=AirbyteStream( + name="orders_overwrite", + json_schema=orders_stream_schema, + supported_sync_modes=[SyncMode.incremental], + ), + sync_mode=SyncMode.incremental, + destination_sync_mode=DestinationSyncMode.overwrite, + ) + + return ConfiguredAirbyteCatalog(streams=[orders_append_stream, orders_overwrite_stream]) + + +@pytest.fixture +def writer_deleted_airbyte_column( + config: Mapping[str, Any], + configured_catalog_with_deleted_column: ConfiguredAirbyteCatalog, + logger: MagicMock, +) -> CumulioWriter: + """This will return a CumulioWriter that mocks airbyte stream catalogs that doesn't contain one column that does exist in Cumul.io.""" + existing_cumulio_columns = {} + for configured_stream in configured_catalog_with_deleted_column.streams: + columns = [column_name for column_name in configured_stream.stream.json_schema["properties"]] + # Add customer_name column as second element to mimic a deleted column being defined in configured_stream + columns.insert(1, "customer_name") + existing_cumulio_columns[configured_stream.stream.name] = columns + + def get_existing_cumulio_columns(stream_name): + return existing_cumulio_columns[stream_name] + + with patch("destination_cumulio.writer.CumulioClient", MagicMock()) as cumulio_client_mock: + # Mock get_ordered_columns to return existing_cumulio_columns (which does not include one column defined in configured stream) + cumulio_client_mock.return_value.get_ordered_columns = MagicMock(side_effect=get_existing_cumulio_columns) + cumulio_client_mock.return_value.batch_writer.return_value = None + cumulio_client_mock.return_value.set_replace_tag_on_dataset.return_value = None + return CumulioWriter(config, configured_catalog_with_deleted_column, logger) + + +def test_init_deleted_airbyte_column(writer_deleted_airbyte_column: CumulioWriter): + """Assert each stream is correctly initializing update_metadata. + Due to deleted Column in Airbyte for this writer: + - the update_metadata property for the orders dataset is set to False, as it's in append mode and thus should keep existing structure + - the update_metadata property for the orders dataset is set to True, as it's in overwrite mode + """ + assert writer_deleted_airbyte_column.writers["orders_append"]["update_metadata"] is False + assert writer_deleted_airbyte_column.writers["orders_overwrite"]["update_metadata"] is True + + +def test_deleted_column_update_metadata(writer_deleted_airbyte_column: CumulioWriter): + """Tests whether Airbyte streams that do not contain a column defined on Cumul.io's side results in update_metadata for only + overwrite streams (to inform Cumul.io about new column data being pushed)""" + assert writer_deleted_airbyte_column.writers["orders_append"]["update_metadata"] is False + assert writer_deleted_airbyte_column.writers["orders_overwrite"]["update_metadata"] is True + + +def test_merged_columns_order_for_deleted_column( + writer_deleted_airbyte_column: CumulioWriter, +): + """Tests whether Airbyte streams that do not contain a column defined on Cumul.io's side still correctly puts the other columns in + the right order""" + result = _get_cumulio_and_merged_columns(writer_deleted_airbyte_column) + for stream_name in result: + # Test whether merged_columns contains one less element + assert len(result[stream_name]["merged_columns"]) == len(result[stream_name]["cumulio_columns"]) - 1 + + cumulio_columns_without_deleted = [ + column_name for column_name in result[stream_name]["cumulio_columns"] if column_name != "customer_name" + ] + # Test whether elements, without deleted column, are equal and in the same position + assert cumulio_columns_without_deleted == [column["name"] for column in result[stream_name]["merged_columns"]] diff --git a/airbyte-integrations/connectors/destination-databend/.dockerignore b/airbyte-integrations/connectors/destination-databend/.dockerignore new file mode 100644 index 000000000000..57f4cf36c057 --- /dev/null +++ b/airbyte-integrations/connectors/destination-databend/.dockerignore @@ -0,0 +1,5 @@ +* +!Dockerfile +!main.py +!destination_databend +!setup.py diff --git a/airbyte-integrations/connectors/destination-databend/Dockerfile b/airbyte-integrations/connectors/destination-databend/Dockerfile new file mode 100644 index 000000000000..48bdb5ced052 --- /dev/null +++ b/airbyte-integrations/connectors/destination-databend/Dockerfile @@ -0,0 +1,38 @@ +FROM python:3.9.11-alpine3.15 as base + +# build and load all requirements +FROM base as builder +WORKDIR /airbyte/integration_code + +# upgrade pip to the latest version +RUN apk --no-cache upgrade \ + && pip install --upgrade pip \ + && apk --no-cache add tzdata build-base + + +COPY setup.py ./ +# install necessary packages to a temporary folder +RUN pip install --prefix=/install . + +# build a clean environment +FROM base +WORKDIR /airbyte/integration_code + +# copy all loaded and built libraries to a pure basic image +COPY --from=builder /install /usr/local +# add default timezone settings +COPY --from=builder /usr/share/zoneinfo/Etc/UTC /etc/localtime +RUN echo "Etc/UTC" > /etc/timezone + +# bash is installed for more convenient debugging. +RUN apk --no-cache add bash + +# copy payload code only +COPY main.py ./ +COPY destination_databend ./destination_databend + +ENV AIRBYTE_ENTRYPOINT "python /airbyte/integration_code/main.py" +ENTRYPOINT ["python", "/airbyte/integration_code/main.py"] + +LABEL io.airbyte.version=0.1.3 +LABEL io.airbyte.name=airbyte/destination-databend diff --git a/airbyte-integrations/connectors/destination-databend/README.md b/airbyte-integrations/connectors/destination-databend/README.md new file mode 100644 index 000000000000..9b50cd9ffbfe --- /dev/null +++ b/airbyte-integrations/connectors/destination-databend/README.md @@ -0,0 +1,99 @@ +# Databend Destination + +This is the repository for the Databend destination connector, written in Python. +For information about how to use this connector within Airbyte, see [the documentation](https://docs.airbyte.io/integrations/destinations/databend). + +## Local development + +### Prerequisites +**To iterate on this connector, make sure to complete this prerequisites section.** + +#### Minimum Python version required `= 3.7.0` + +#### Build & Activate Virtual Environment and install dependencies +From this connector directory, create a virtual environment: +``` +python -m venv .venv +``` + +This will generate a virtualenv for this module in `.venv/`. Make sure this venv is active in your +development environment of choice. To activate it from the terminal, run: +``` +source .venv/bin/activate +pip install -r requirements.txt +``` +If you are in an IDE, follow your IDE's instructions to activate the virtualenv. + +Note that while we are installing dependencies from `requirements.txt`, you should only edit `setup.py` for your dependencies. `requirements.txt` is +used for editable installs (`pip install -e`) to pull in Python dependencies from the monorepo and will call `setup.py`. +If this is mumbo jumbo to you, don't worry about it, just put your deps in `setup.py` but install using `pip install -r requirements.txt` and everything +should work as you expect. + +#### Create credentials +**If you are a community contributor**, follow the instructions in the [documentation](https://docs.airbyte.io/integrations/destinations/databend) +to generate the necessary credentials. Then create a file `secrets/config.json` conforming to the `destination_databend/spec.json` file. +Note that the `secrets` directory is gitignored by default, so there is no danger of accidentally checking in sensitive information. +See `integration_tests/sample_config.json` for a sample config file. + +**If you are an Airbyte core member**, copy the credentials in Lastpass under the secret name `destination databend test creds` +and place them into `secrets/config.json`. + +### Locally running the connector +``` +python main.py spec +python main.py check --config secrets/config.json +python main.py discover --config secrets/config.json +python main.py read --config secrets/config.json --catalog integration_tests/configured_catalog.json +``` + +### Locally running the connector docker image + + +#### Build +**Via [`airbyte-ci`](https://github.com/airbytehq/airbyte/blob/master/airbyte-ci/connectors/pipelines/README.md) (recommended):** +```bash +airbyte-ci connectors --name=destination-databend build +``` + +An image will be built with the tag `airbyte/destination-databend:dev`. + +**Via `docker build`:** +```bash +docker build -t airbyte/destination-databend:dev . +``` + +#### Run +Then run any of the connector commands as follows: +``` +docker run --rm airbyte/destination-databend:dev spec +docker run --rm -v $(pwd)/secrets:/secrets airbyte/destination-databend:dev check --config /secrets/config.json +# messages.jsonl is a file containing line-separated JSON representing AirbyteMessages +cat messages.jsonl | docker run --rm -v $(pwd)/secrets:/secrets -v $(pwd)/integration_tests:/integration_tests airbyte/destination-databend:dev write --config /secrets/config.json --catalog /integration_tests/configured_catalog.json +``` + +## Testing +You can run our full test suite locally using [`airbyte-ci`](https://github.com/airbytehq/airbyte/blob/master/airbyte-ci/connectors/pipelines/README.md): +```bash +airbyte-ci connectors --name=destination-databend test +``` + +### Customizing acceptance Tests +Customize `acceptance-test-config.yml` file to configure tests. See [Connector Acceptance Tests](https://docs.airbyte.com/connector-development/testing-connectors/connector-acceptance-tests-reference) for more information. +If your connector requires to create or destroy resources for use during acceptance tests create fixtures for it and place them inside integration_tests/acceptance.py. + +## Dependency Management +All of your dependencies should go in `setup.py`, NOT `requirements.txt`. The requirements file is only used to connect internal Airbyte dependencies in the monorepo for local development. +We split dependencies between two groups, dependencies that are: +* required for your connector to work need to go to `MAIN_REQUIREMENTS` list. +* required for the testing need to go to `TEST_REQUIREMENTS` list + +### Publishing a new version of the connector +You've checked out the repo, implemented a million dollar feature, and you're ready to share your changes with the world. Now what? +1. Make sure your changes are passing our test suite: `airbyte-ci connectors --name=destination-databend test` +2. Bump the connector version in `metadata.yaml`: increment the `dockerImageTag` value. Please follow [semantic versioning for connectors](https://docs.airbyte.com/contributing-to-airbyte/resources/pull-requests-handbook/#semantic-versioning-for-connectors). +3. Make sure the `metadata.yaml` content is up to date. +4. Make the connector documentation and its changelog is up to date (`docs/integrations/destinations/databend.md`). +5. Create a Pull Request: use [our PR naming conventions](https://docs.airbyte.com/contributing-to-airbyte/resources/pull-requests-handbook/#pull-request-title-convention). +6. Pat yourself on the back for being an awesome contributor. +7. Someone from Airbyte will take a look at your PR and iterate with you to merge it into master. + diff --git a/airbyte-integrations/connectors/destination-databend/destination_databend/__init__.py b/airbyte-integrations/connectors/destination-databend/destination_databend/__init__.py new file mode 100644 index 000000000000..5be40696a9e5 --- /dev/null +++ b/airbyte-integrations/connectors/destination-databend/destination_databend/__init__.py @@ -0,0 +1,8 @@ +# +# Copyright (c) 2023 Airbyte, Inc., all rights reserved. +# + + +from .destination import DestinationDatabend + +__all__ = ["DestinationDatabend"] diff --git a/airbyte-integrations/connectors/destination-databend/destination_databend/__init__.pyc b/airbyte-integrations/connectors/destination-databend/destination_databend/__init__.pyc new file mode 100644 index 0000000000000000000000000000000000000000..4538a0add1a44b3d66bbe4f5a67b65c59b0c7072 GIT binary patch literal 307 zcmYjNK~BRk5L_n+P!SRmXYRQKf1pAfdgcJ)!e!;gR#=+aD7y&d#8>zuA7D}yTFW~- zo*io}*FTqw@6A`&(SB9v_b9yqL~KU`(bmz6BNr>`!77S1JvAqAhyye;*fn64)FcO! zl2^gPI_3BXnI4*Ywp;+>!Ll3K8ARqV49pEE9Y7}>{j;V~{cFgIdXwaPvG%i!8}&iP z^VEgPQi8lo2%*< D6Y@_X literal 0 HcmV?d00001 diff --git a/airbyte-integrations/connectors/destination-databend/destination_databend/client.py b/airbyte-integrations/connectors/destination-databend/destination_databend/client.py new file mode 100644 index 000000000000..1764093aa094 --- /dev/null +++ b/airbyte-integrations/connectors/destination-databend/destination_databend/client.py @@ -0,0 +1,20 @@ +# +# Copyright (c) 2023 Airbyte, Inc., all rights reserved. +# + +from databend_sqlalchemy import connector + + +class DatabendClient: + def __init__(self, host: str, port: int, database: str, table: str, username: str, password: str = None): + self.host = host + self.port = port + self.database = database + self.table = table + self.username = username + self.password = password + + def open(self): + handle = connector.connect(f"https://{self.username}:{self.password}@{self.host}:{self.port}/{self.database}").cursor() + + return handle diff --git a/airbyte-integrations/connectors/destination-databend/destination_databend/destination.py b/airbyte-integrations/connectors/destination-databend/destination_databend/destination.py new file mode 100644 index 000000000000..365575ec1e99 --- /dev/null +++ b/airbyte-integrations/connectors/destination-databend/destination_databend/destination.py @@ -0,0 +1,89 @@ +# +# Copyright (c) 2023 Airbyte, Inc., all rights reserved. +# + + +import json +from datetime import datetime +from logging import getLogger +from typing import Any, Iterable, Mapping +from uuid import uuid4 + +from airbyte_cdk import AirbyteLogger +from airbyte_cdk.destinations import Destination +from airbyte_cdk.models import AirbyteConnectionStatus, AirbyteMessage, ConfiguredAirbyteCatalog, DestinationSyncMode, Status, Type +from destination_databend.client import DatabendClient + +from .writer import create_databend_wirter + +logger = getLogger("airbyte") + + +class DestinationDatabend(Destination): + def write( + self, config: Mapping[str, Any], configured_catalog: ConfiguredAirbyteCatalog, input_messages: Iterable[AirbyteMessage] + ) -> Iterable[AirbyteMessage]: + + """ + TODO + Reads the input stream of messages, config, and catalog to write data to the destination. + + This method returns an iterable (typically a generator of AirbyteMessages via yield) containing state messages received + in the input message stream. Outputting a state message means that every AirbyteRecordMessage which came before it has been + successfully persisted to the destination. This is used to ensure fault tolerance in the case that a sync fails before fully completing, + then the source is given the last state message output from this method as the starting point of the next sync. + + :param config: dict of JSON configuration matching the configuration declared in spec.json + :param configured_catalog: The Configured Catalog describing the schema of the data being received and how it should be persisted in the + destination + :param input_messages: The stream of input messages received from the source + :return: Iterable of AirbyteStateMessages wrapped in AirbyteMessage structs + """ + streams = {s.stream.name for s in configured_catalog.streams} + client = DatabendClient(**config) + + writer = create_databend_wirter(client, logger) + + for configured_stream in configured_catalog.streams: + if configured_stream.destination_sync_mode == DestinationSyncMode.overwrite: + writer.delete_table(configured_stream.stream.name) + logger.info(f"Stream {configured_stream.stream.name} is wiped.") + writer.create_raw_table(configured_stream.stream.name) + + for message in input_messages: + if message.type == Type.STATE: + yield message + elif message.type == Type.RECORD: + data = message.record.data + stream = message.record.stream + # Skip unselected streams + if stream not in streams: + logger.debug(f"Stream {stream} was not present in configured streams, skipping") + continue + writer.queue_write_data(stream, str(uuid4()), datetime.now(), json.dumps(data)) + + # Flush any leftover messages + writer.flush() + + def check(self, logger: AirbyteLogger, config: Mapping[str, Any]) -> AirbyteConnectionStatus: + """ + Tests if the input configuration can be used to successfully connect to the destination with the needed permissions + e.g: if a provided API token or password can be used to connect and write to the destination. + + :param logger: Logging object to display debug/info/error to the logs + (logs will not be accessible via airbyte UI if they are not passed to this logger) + :param config: Json object containing the configuration of this destination, content of this json is as specified in + the properties of the spec.json file + + :return: AirbyteConnectionStatus indicating a Success or Failure + """ + try: + client = DatabendClient(**config) + cursor = client.open() + cursor.execute("DROP TABLE IF EXISTS test") + cursor.execute("CREATE TABLE if not exists test (x Int32,y VARCHAR)") + cursor.execute("INSERT INTO test (x,y) VALUES (%,%)", [1, "yy", 2, "xx"]) + cursor.execute("DROP TABLE IF EXISTS test") + return AirbyteConnectionStatus(status=Status.SUCCEEDED) + except Exception as e: + return AirbyteConnectionStatus(status=Status.FAILED, message=f"An exception occurred: {repr(e)}") diff --git a/airbyte-integrations/connectors/destination-databend/destination_databend/spec.json b/airbyte-integrations/connectors/destination-databend/destination_databend/spec.json new file mode 100644 index 000000000000..e77d3301152c --- /dev/null +++ b/airbyte-integrations/connectors/destination-databend/destination_databend/spec.json @@ -0,0 +1,57 @@ +{ + "documentationUrl": "https://docs.airbyte.com/integrations/destinations/databend", + "supported_destination_sync_modes": ["overwrite", "append"], + "supportsIncremental": true, + "connectionSpecification": { + "$schema": "http://json-schema.org/draft-07/schema#", + "title": "Destination Databend", + "type": "object", + "required": ["host", "username", "database"], + "additionalProperties": true, + "properties": { + "host": { + "title": "Host", + "description": "Hostname of the database.", + "type": "string", + "order": 0 + }, + "port": { + "title": "Port", + "description": "Port of the database.", + "type": "integer", + "minimum": 0, + "maximum": 65536, + "default": 443, + "examples": ["443"], + "order": 2 + }, + "database": { + "title": "DB Name", + "description": "Name of the database.", + "type": "string", + "order": 3 + }, + "table": { + "title": "Default Table", + "description": "The default table was written to.", + "type": "string", + "examples": ["default"], + "default": "default", + "order": 4 + }, + "username": { + "title": "User", + "description": "Username to use to access the database.", + "type": "string", + "order": 5 + }, + "password": { + "title": "Password", + "description": "Password associated with the username.", + "type": "string", + "airbyte_secret": true, + "order": 6 + } + } + } +} diff --git a/airbyte-integrations/connectors/destination-databend/destination_databend/writer.py b/airbyte-integrations/connectors/destination-databend/destination_databend/writer.py new file mode 100644 index 000000000000..006ff960b14e --- /dev/null +++ b/airbyte-integrations/connectors/destination-databend/destination_databend/writer.py @@ -0,0 +1,134 @@ +# +# Copyright (c) 2023 Airbyte, Inc., all rights reserved. +# + +from collections import defaultdict +from datetime import datetime +from itertools import chain + +from airbyte_cdk import AirbyteLogger +from airbyte_cdk.models import AirbyteConnectionStatus, Status +from destination_databend.client import DatabendClient + + +class DatabendWriter: + """ + Base class for shared writer logic. + """ + + flush_interval = 1000 + + def __init__(self, client: DatabendClient) -> None: + """ + :param client: Databend SDK connection class with established connection + to the databse. + """ + try: + # open a cursor and do some work with it + self.client = client + self.cursor = client.open() + self._buffer = defaultdict(list) + self._values = 0 + except Exception as e: + # handle the exception + raise AirbyteConnectionStatus(status=Status.FAILED, message=f"An exception occurred: {repr(e)}") + finally: + # close the cursor + self.cursor.close() + + def delete_table(self, name: str) -> None: + """ + Delete the resulting table. + Primarily used in Overwrite strategy to clean up previous data. + + :param name: table name to delete. + """ + self.cursor.execute(f"DROP TABLE IF EXISTS _airbyte_raw_{name}") + + def create_raw_table(self, name: str): + """ + Create the resulting _airbyte_raw table. + + :param name: table name to create. + """ + query = f""" + CREATE TABLE IF NOT EXISTS _airbyte_raw_{name} ( + _airbyte_ab_id TEXT, + _airbyte_emitted_at TIMESTAMP, + _airbyte_data TEXT + ) + """ + cursor = self.cursor + cursor.execute(query) + + def queue_write_data(self, stream_name: str, id: str, time: datetime, record: str) -> None: + """ + Queue up data in a buffer in memory before writing to the database. + When flush_interval is reached data is persisted. + + :param stream_name: name of the stream for which the data corresponds. + :param id: unique identifier of this data row. + :param time: time of writing. + :param record: string representation of the json data payload. + """ + self._buffer[stream_name].append((id, time, record)) + self._values += 1 + if self._values == self.flush_interval: + self._flush() + + def _flush(self): + """ + Stub for the intermediate data flush that's triggered during the + buffering operation. + """ + raise NotImplementedError() + + def flush(self): + """ + Stub for the data flush at the end of writing operation. + """ + raise NotImplementedError() + + +class DatabendSQLWriter(DatabendWriter): + """ + Data writer using the SQL writing strategy. Data is buffered in memory + and flushed using INSERT INTO SQL statement. + """ + + flush_interval = 1000 + + def __init__(self, client: DatabendClient) -> None: + """ + :param client: Databend SDK connection class with established connection + to the databse. + """ + super().__init__(client) + + def _flush(self) -> None: + """ + Intermediate data flush that's triggered during the + buffering operation. Writes data stored in memory via SQL commands. + databend connector insert into table using stage + """ + cursor = self.cursor + # id, written_at, data + for table, data in self._buffer.items(): + cursor.execute( + f"INSERT INTO _airbyte_raw_{table} (_airbyte_ab_id,_airbyte_emitted_at,_airbyte_data) VALUES (%, %, %)", + list(chain.from_iterable(data)), + ) + self._buffer.clear() + self._values = 0 + + def flush(self) -> None: + """ + Final data flush after all data has been written to memory. + """ + self._flush() + + +def create_databend_wirter(client: DatabendClient, logger: AirbyteLogger) -> DatabendWriter: + logger.info("Using the SQL writing strategy") + writer = DatabendSQLWriter(client) + return writer diff --git a/airbyte-integrations/connectors/destination-databend/integration_tests/integration_test.py b/airbyte-integrations/connectors/destination-databend/integration_tests/integration_test.py new file mode 100644 index 000000000000..a40494c4e048 --- /dev/null +++ b/airbyte-integrations/connectors/destination-databend/integration_tests/integration_test.py @@ -0,0 +1,159 @@ +# +# Copyright (c) 2023 Airbyte, Inc., all rights reserved. +# + +import json +import logging +from typing import Any, Dict, List, Mapping + +import pytest +from airbyte_cdk.models import ( + AirbyteMessage, + AirbyteRecordMessage, + AirbyteStateMessage, + AirbyteStream, + ConfiguredAirbyteCatalog, + ConfiguredAirbyteStream, + DestinationSyncMode, + Status, + SyncMode, + Type, +) +from destination_databend import DestinationDatabend +from destination_databend.client import DatabendClient + + +@pytest.fixture(name="databendConfig") +def config_fixture() -> Mapping[str, Any]: + with open("secrets/config.json", "r") as f: + return json.loads(f.read()) + + +@pytest.fixture(name="configured_catalog") +def configured_catalog_fixture() -> ConfiguredAirbyteCatalog: + stream_schema = {"type": "object", "properties": {"string_col": {"type": "str"}, "int_col": {"type": "integer"}}} + + append_stream = ConfiguredAirbyteStream( + stream=AirbyteStream(name="append_stream", json_schema=stream_schema, supported_sync_modes=[SyncMode.incremental]), + sync_mode=SyncMode.incremental, + destination_sync_mode=DestinationSyncMode.append, + ) + + overwrite_stream = ConfiguredAirbyteStream( + stream=AirbyteStream(name="overwrite_stream", json_schema=stream_schema, supported_sync_modes=[SyncMode.incremental]), + sync_mode=SyncMode.incremental, + destination_sync_mode=DestinationSyncMode.overwrite, + ) + + return ConfiguredAirbyteCatalog(streams=[append_stream, overwrite_stream]) + + +@pytest.fixture(autouse=True) +def teardown(databendConfig: Mapping): + yield + client = DatabendClient(**databendConfig) + cursor = client.open() + cursor.close() + + +@pytest.fixture(name="client") +def client_fixture(databendConfig) -> DatabendClient: + return DatabendClient(**databendConfig) + + +def test_check_valid_config(databendConfig: Mapping): + outcome = DestinationDatabend().check(logging.getLogger("airbyte"), databendConfig) + assert outcome.status == Status.SUCCEEDED + + +def test_check_invalid_config(): + outcome = DestinationDatabend().check(logging.getLogger("airbyte"), {"bucket_id": "not_a_real_id"}) + assert outcome.status == Status.FAILED + + +def _state(data: Dict[str, Any]) -> AirbyteMessage: + return AirbyteMessage(type=Type.STATE, state=AirbyteStateMessage(data=data)) + + +def _record(stream: str, str_value: str, int_value: int) -> AirbyteMessage: + return AirbyteMessage( + type=Type.RECORD, record=AirbyteRecordMessage(stream=stream, data={"str_col": str_value, "int_col": int_value}, emitted_at=0) + ) + + +def retrieve_records(stream_name: str, client: DatabendClient) -> List[AirbyteRecordMessage]: + cursor = client.open() + cursor.execute(f"select * from _airbyte_raw_{stream_name}") + all_records = cursor.fetchall() + out = [] + for record in all_records: + # key = record[0] + # stream = key.split("__ab__")[0] + value = json.loads(record[2]) + out.append(_record(stream_name, value["str_col"], value["int_col"])) + return out + + +def retrieve_all_records(client: DatabendClient) -> List[AirbyteRecordMessage]: + """retrieves and formats all records in databend as Airbyte messages""" + overwrite_stream = "overwrite_stream" + append_stream = "append_stream" + overwrite_out = retrieve_records(overwrite_stream, client) + append_out = retrieve_records(append_stream, client) + return overwrite_out + append_out + + +def test_write(databendConfig: Mapping, configured_catalog: ConfiguredAirbyteCatalog, client: DatabendClient): + """ + This test verifies that: + 1. writing a stream in "overwrite" mode overwrites any existing data for that stream + 2. writing a stream in "append" mode appends new records without deleting the old ones + 3. The correct state message is output by the connector at the end of the sync + """ + append_stream, overwrite_stream = configured_catalog.streams[0].stream.name, configured_catalog.streams[1].stream.name + first_state_message = _state({"state": "1"}) + first_record_chunk = [_record(append_stream, str(i), i) for i in range(5)] + [_record(overwrite_stream, str(i), i) for i in range(5)] + + second_state_message = _state({"state": "2"}) + second_record_chunk = [_record(append_stream, str(i), i) for i in range(5, 10)] + [ + _record(overwrite_stream, str(i), i) for i in range(5, 10) + ] + + destination = DestinationDatabend() + + expected_states = [first_state_message, second_state_message] + output_states = list( + destination.write( + databendConfig, configured_catalog, [*first_record_chunk, first_state_message, *second_record_chunk, second_state_message] + ) + ) + assert expected_states == output_states, "Checkpoint state messages were expected from the destination" + + expected_records = [_record(append_stream, str(i), i) for i in range(10)] + [_record(overwrite_stream, str(i), i) for i in range(10)] + records_in_destination = retrieve_all_records(client) + assert len(expected_records) == len(records_in_destination), "Records in destination should match records expected" + + # After this sync we expect the append stream to have 15 messages and the overwrite stream to have 5 + third_state_message = _state({"state": "3"}) + third_record_chunk = [_record(append_stream, str(i), i) for i in range(10, 15)] + [ + _record(overwrite_stream, str(i), i) for i in range(10, 15) + ] + + output_states = list(destination.write(databendConfig, configured_catalog, [*third_record_chunk, third_state_message])) + assert [third_state_message] == output_states + + records_in_destination = retrieve_all_records(client) + expected_records = [_record(append_stream, str(i), i) for i in range(15)] + [ + _record(overwrite_stream, str(i), i) for i in range(10, 15) + ] + assert len(expected_records) == len(records_in_destination) + + tear_down(client) + + +def tear_down(client: DatabendClient): + overwrite_stream = "overwrite_stream" + append_stream = "append_stream" + cursor = client.open() + cursor.execute(f"DROP table _airbyte_raw_{overwrite_stream}") + cursor.execute(f"DROP table _airbyte_raw_{append_stream}") diff --git a/airbyte-integrations/connectors/destination-databend/integration_tests/sample_config.json b/airbyte-integrations/connectors/destination-databend/integration_tests/sample_config.json new file mode 100644 index 000000000000..62c0cdb78b7f --- /dev/null +++ b/airbyte-integrations/connectors/destination-databend/integration_tests/sample_config.json @@ -0,0 +1,9 @@ +{ + "protocol": "https", + "host": "tnc7yee14--xxxx.ch.datafusecloud.com", + "port": 443, + "username": "username", + "password": "password", + "database": "default", + "table": "default" +} diff --git a/airbyte-integrations/connectors/destination-databend/main.py b/airbyte-integrations/connectors/destination-databend/main.py new file mode 100644 index 000000000000..7482c00577de --- /dev/null +++ b/airbyte-integrations/connectors/destination-databend/main.py @@ -0,0 +1,11 @@ +# +# Copyright (c) 2023 Airbyte, Inc., all rights reserved. +# + + +import sys + +from destination_databend import DestinationDatabend + +if __name__ == "__main__": + DestinationDatabend().run(sys.argv[1:]) diff --git a/airbyte-integrations/connectors/destination-databend/metadata.yaml b/airbyte-integrations/connectors/destination-databend/metadata.yaml index 59633494896d..76d0ed20fd07 100644 --- a/airbyte-integrations/connectors/destination-databend/metadata.yaml +++ b/airbyte-integrations/connectors/destination-databend/metadata.yaml @@ -2,7 +2,7 @@ data: connectorSubtype: database connectorType: destination definitionId: 302e4d8e-08d3-4098-acd4-ac67ca365b88 - dockerImageTag: 0.1.2 + dockerImageTag: 0.1.3 dockerRepository: airbyte/destination-databend githubIssueLabel: destination-databend icon: databend.svg @@ -20,5 +20,5 @@ data: ab_internal: sl: 100 ql: 100 - supportLevel: archived + supportLevel: community metadataSpecVersion: "1.0" diff --git a/airbyte-integrations/connectors/destination-databend/requirements.txt b/airbyte-integrations/connectors/destination-databend/requirements.txt new file mode 100644 index 000000000000..d6e1198b1ab1 --- /dev/null +++ b/airbyte-integrations/connectors/destination-databend/requirements.txt @@ -0,0 +1 @@ +-e . diff --git a/airbyte-integrations/connectors/destination-databend/setup.py b/airbyte-integrations/connectors/destination-databend/setup.py new file mode 100644 index 000000000000..49878e343bd6 --- /dev/null +++ b/airbyte-integrations/connectors/destination-databend/setup.py @@ -0,0 +1,22 @@ +# +# Copyright (c) 2023 Airbyte, Inc., all rights reserved. +# + + +from setuptools import find_packages, setup + +MAIN_REQUIREMENTS = ["airbyte-cdk", "requests", "databend-sqlalchemy==0.1.6"] + +TEST_REQUIREMENTS = ["pytest~=6.1"] +setup( + name="destination_databend", + description="Destination implementation for Databend.", + author="Airbyte", + author_email="contact@airbyte.io", + packages=find_packages(), + install_requires=MAIN_REQUIREMENTS, + package_data={"": ["*.json"]}, + extras_require={ + "tests": TEST_REQUIREMENTS, + }, +) diff --git a/airbyte-integrations/connectors/destination-databend/unit_tests/test_databend_destination.py b/airbyte-integrations/connectors/destination-databend/unit_tests/test_databend_destination.py new file mode 100644 index 000000000000..e5a7c7e6d7d6 --- /dev/null +++ b/airbyte-integrations/connectors/destination-databend/unit_tests/test_databend_destination.py @@ -0,0 +1,161 @@ +# +# Copyright (c) 2023 Airbyte, Inc., all rights reserved. +# + +from datetime import datetime +from typing import Dict +from unittest.mock import AsyncMock, MagicMock, call, patch + +from airbyte_cdk.models import ( + AirbyteMessage, + AirbyteRecordMessage, + AirbyteStream, + ConfiguredAirbyteCatalog, + ConfiguredAirbyteStream, + DestinationSyncMode, + SyncMode, + Type, +) +from destination_databend.destination import DatabendClient, DestinationDatabend +from pytest import fixture + + +@fixture +def logger() -> MagicMock: + return MagicMock() + + +@fixture +def config() -> Dict[str, str]: + args = { + "database": "default", + "username": "root", + "password": "root", + "host": "localhost", + "port": 8081, + "table": "default", + } + return args + + +@fixture(name="mock_connection") +def async_connection_cursor_mock(): + connection = MagicMock() + cursor = AsyncMock() + connection.cursor.return_value = cursor + return connection, cursor + + +@fixture +def configured_stream1() -> ConfiguredAirbyteStream: + return ConfiguredAirbyteStream( + stream=AirbyteStream( + name="table1", + json_schema={ + "type": "object", + "properties": {"col1": {"type": "string"}, "col2": {"type": "integer"}}, + }, + supported_sync_modes=[SyncMode.incremental], + ), + sync_mode=SyncMode.incremental, + destination_sync_mode=DestinationSyncMode.append, + ) + + +@fixture +def configured_stream2() -> ConfiguredAirbyteStream: + return ConfiguredAirbyteStream( + stream=AirbyteStream( + name="table2", + json_schema={ + "type": "object", + "properties": {"col1": {"type": "string"}, "col2": {"type": "integer"}}, + }, + supported_sync_modes=[SyncMode.incremental], + ), + sync_mode=SyncMode.incremental, + destination_sync_mode=DestinationSyncMode.append, + ) + + +@fixture +def airbyte_message1() -> AirbyteMessage: + return AirbyteMessage( + type=Type.RECORD, + record=AirbyteRecordMessage( + stream="table1", + data={"key1": "value1", "key2": 2}, + emitted_at=int(datetime.now().timestamp()) * 1000, + ), + ) + + +@fixture +def airbyte_message2() -> AirbyteMessage: + return AirbyteMessage( + type=Type.RECORD, + record=AirbyteRecordMessage( + stream="table2", + data={"key1": "value2", "key2": 3}, + emitted_at=int(datetime.now().timestamp()) * 1000, + ), + ) + + +@fixture +def airbyte_state_message() -> AirbyteMessage: + return AirbyteMessage(type=Type.STATE) + + +@patch("destination_databend.client.DatabendClient", MagicMock()) +def test_connection(config: Dict[str, str], logger: MagicMock) -> None: + # Check no log object + DatabendClient(**config) + + +@patch("destination_databend.writer.DatabendSQLWriter") +@patch("destination_databend.client.DatabendClient") +def test_sql_write_append( + mock_connection: MagicMock, + mock_writer: MagicMock, + config: Dict[str, str], + configured_stream1: ConfiguredAirbyteStream, + configured_stream2: ConfiguredAirbyteStream, + airbyte_message1: AirbyteMessage, + airbyte_message2: AirbyteMessage, + airbyte_state_message: AirbyteMessage, +) -> None: + catalog = ConfiguredAirbyteCatalog(streams=[configured_stream1, configured_stream2]) + + destination = DestinationDatabend() + result = destination.write(config, catalog, [airbyte_message1, airbyte_state_message, airbyte_message2]) + + assert list(result) == [airbyte_state_message] + mock_writer.return_value.delete_table.assert_not_called() + mock_writer.return_value.create_raw_table.mock_calls = [call(mock_connection, "table1"), call(mock_connection, "table2")] + assert len(mock_writer.return_value.queue_write_data.mock_calls) == 2 + mock_writer.return_value.flush.assert_called_once() + + +@patch("destination_databend.writer.DatabendSQLWriter") +@patch("destination_databend.client.DatabendClient") +def test_sql_write_overwrite( + mock_connection: MagicMock, + mock_writer: MagicMock, + config: Dict[str, str], + configured_stream1: ConfiguredAirbyteStream, + configured_stream2: ConfiguredAirbyteStream, + airbyte_message1: AirbyteMessage, + airbyte_message2: AirbyteMessage, + airbyte_state_message: AirbyteMessage, +): + # Overwrite triggers a delete + configured_stream1.destination_sync_mode = DestinationSyncMode.overwrite + catalog = ConfiguredAirbyteCatalog(streams=[configured_stream1, configured_stream2]) + + destination = DestinationDatabend() + result = destination.write(config, catalog, [airbyte_message1, airbyte_state_message, airbyte_message2]) + + assert list(result) == [airbyte_state_message] + mock_writer.return_value.delete_table.assert_called_once_with("table1") + mock_writer.return_value.create_raw_table.mock_calls = [call(mock_connection, "table1"), call(mock_connection, "table2")] diff --git a/airbyte-integrations/connectors/destination-databend/unit_tests/test_writer.py b/airbyte-integrations/connectors/destination-databend/unit_tests/test_writer.py new file mode 100644 index 000000000000..0b68b113c2ab --- /dev/null +++ b/airbyte-integrations/connectors/destination-databend/unit_tests/test_writer.py @@ -0,0 +1,46 @@ +# +# Copyright (c) 2023 Airbyte, Inc., all rights reserved. +# + +from typing import Any, Union +from unittest.mock import MagicMock + +from destination_databend.writer import DatabendSQLWriter +from pytest import fixture, mark + + +@fixture +def client() -> MagicMock: + return MagicMock() + + +@fixture +def sql_writer(client: MagicMock) -> DatabendSQLWriter: + return DatabendSQLWriter(client) + + +def test_sql_default(sql_writer: DatabendSQLWriter) -> None: + assert len(sql_writer._buffer) == 0 + assert sql_writer.flush_interval == 1000 + + +@mark.parametrize("writer", ["sql_writer"]) +def test_sql_create(client: MagicMock, writer: Union[DatabendSQLWriter], request: Any) -> None: + writer = request.getfixturevalue(writer) + writer.create_raw_table("dummy") + + +def test_data_buffering(sql_writer: DatabendSQLWriter) -> None: + sql_writer.queue_write_data("dummy", "id1", 20200101, '{"key": "value"}') + sql_writer._buffer["dummy"][0] == ("id1", 20200101, '{"key": "value"}') + assert len(sql_writer._buffer["dummy"]) == 1 + assert len(sql_writer._buffer.keys()) == 1 + sql_writer.queue_write_data("dummy", "id2", 20200102, '{"key2": "value2"}') + sql_writer._buffer["dummy"][0] == ("id2", 20200102, '{"key2": "value2"}') + assert len(sql_writer._buffer["dummy"]) == 2 + assert len(sql_writer._buffer.keys()) == 1 + sql_writer.queue_write_data("dummy2", "id3", 20200103, '{"key3": "value3"}') + sql_writer._buffer["dummy"][0] == ("id3", 20200103, '{"key3": "value3"}') + assert len(sql_writer._buffer["dummy"]) == 2 + assert len(sql_writer._buffer["dummy2"]) == 1 + assert len(sql_writer._buffer.keys()) == 2 diff --git a/airbyte-integrations/connectors/destination-firebolt/Dockerfile b/airbyte-integrations/connectors/destination-firebolt/Dockerfile new file mode 100644 index 000000000000..efe53cb5be69 --- /dev/null +++ b/airbyte-integrations/connectors/destination-firebolt/Dockerfile @@ -0,0 +1,29 @@ +FROM python:3.9-slim as base + +# build and load all requirements +FROM base as builder +WORKDIR /airbyte/integration_code + +COPY setup.py ./ +# install necessary packages to a temporary folder +RUN pip3 install --prefix=/install --no-cache-dir . + +# build a clean environment +FROM base +WORKDIR /airbyte/integration_code + +# copy all loaded and built libraries to a pure basic image +COPY --from=builder /install /usr/local +# add default timezone settings +COPY --from=builder /usr/share/zoneinfo/Etc/UTC /etc/localtime +RUN echo "Etc/UTC" > /etc/timezone + +# copy payload code only +COPY main.py ./ +COPY destination_firebolt ./destination_firebolt + +ENV AIRBYTE_ENTRYPOINT "python /airbyte/integration_code/main.py" +ENTRYPOINT ["python3", "/airbyte/integration_code/main.py"] + +LABEL io.airbyte.version=0.1.1 +LABEL io.airbyte.name=airbyte/destination-firebolt diff --git a/airbyte-integrations/connectors/destination-firebolt/README.md b/airbyte-integrations/connectors/destination-firebolt/README.md new file mode 100644 index 000000000000..d19fb11dc8a0 --- /dev/null +++ b/airbyte-integrations/connectors/destination-firebolt/README.md @@ -0,0 +1,99 @@ +# Firebolt Destination + +This is the repository for the Firebolt destination connector, written in Python. +For information about how to use this connector within Airbyte, see [the documentation](https://docs.airbyte.io/integrations/destinations/firebolt). + +## Local development + +### Prerequisites +**To iterate on this connector, make sure to complete this prerequisites section.** + +#### Minimum Python version required `= 3.7.0` + +#### Build & Activate Virtual Environment and install dependencies +From this connector directory, create a virtual environment: +``` +python -m venv .venv +``` + +This will generate a virtualenv for this module in `.venv/`. Make sure this venv is active in your +development environment of choice. To activate it from the terminal, run: +``` +source .venv/bin/activate +pip install -r requirements.txt +``` +If you are in an IDE, follow your IDE's instructions to activate the virtualenv. + +Note that while we are installing dependencies from `requirements.txt`, you should only edit `setup.py` for your dependencies. `requirements.txt` is +used for editable installs (`pip install -e`) to pull in Python dependencies from the monorepo and will call `setup.py`. +If this is mumbo jumbo to you, don't worry about it, just put your deps in `setup.py` but install using `pip install -r requirements.txt` and everything +should work as you expect. + +#### Create credentials +**If you are a community contributor**, follow the instructions in the [documentation](https://docs.airbyte.io/integrations/destinations/firebolt) +to generate the necessary credentials. Then create a file `secrets/config.json` conforming to the `destination_firebolt/spec.json` file. +Note that the `secrets` directory is gitignored by default, so there is no danger of accidentally checking in sensitive information. +See `integration_tests/sample_config.json` for a sample config file. + +**If you are an Airbyte core member**, copy the credentials in Lastpass under the secret name `destination firebolt test creds` +and place them into `secrets/config.json`. + +### Locally running the connector +``` +python main.py spec +python main.py check --config secrets/config.json +# messages.jsonl is a file containing line-separated JSON representing AirbyteMessages +cat integration_tests/messages.jsonl | python main.py write --config secrets/config_sql.json --catalog integration_tests/configured_catalog.json +``` + +### Locally running the connector docker image + + +#### Build +**Via [`airbyte-ci`](https://github.com/airbytehq/airbyte/blob/master/airbyte-ci/connectors/pipelines/README.md) (recommended):** +```bash +airbyte-ci connectors --name=destination-firebolt build +``` + +An image will be built with the tag `airbyte/destination-firebolt:dev`. + +**Via `docker build`:** +```bash +docker build -t airbyte/destination-firebolt:dev . +``` + +#### Run +Then run any of the connector commands as follows: +``` +docker run --rm airbyte/destination-firebolt:dev spec +docker run --rm -v $(pwd)/secrets:/secrets airbyte/destination-firebolt:dev check --config /secrets/config.json +# messages.jsonl is a file containing line-separated JSON representing AirbyteMessages +cat integration_tests/messages.jsonl | docker run --rm -v $(pwd)/secrets:/secrets -v $(pwd)/integration_tests:/integration_tests airbyte/destination-firebolt:dev write --config /secrets/config.json --catalog /integration_tests/configured_catalog.json +``` + +## Testing +You can run our full test suite locally using [`airbyte-ci`](https://github.com/airbytehq/airbyte/blob/master/airbyte-ci/connectors/pipelines/README.md): +```bash +airbyte-ci connectors --name=destination-firebolt test +``` + +### Customizing acceptance Tests +Customize `acceptance-test-config.yml` file to configure tests. See [Connector Acceptance Tests](https://docs.airbyte.com/connector-development/testing-connectors/connector-acceptance-tests-reference) for more information. +If your connector requires to create or destroy resources for use during acceptance tests create fixtures for it and place them inside integration_tests/acceptance.py. + +## Dependency Management +All of your dependencies should go in `setup.py`, NOT `requirements.txt`. The requirements file is only used to connect internal Airbyte dependencies in the monorepo for local development. +We split dependencies between two groups, dependencies that are: +* required for your connector to work need to go to `MAIN_REQUIREMENTS` list. +* required for the testing need to go to `TEST_REQUIREMENTS` list + +### Publishing a new version of the connector +You've checked out the repo, implemented a million dollar feature, and you're ready to share your changes with the world. Now what? +1. Make sure your changes are passing our test suite: `airbyte-ci connectors --name=destination-firebolt test` +2. Bump the connector version in `metadata.yaml`: increment the `dockerImageTag` value. Please follow [semantic versioning for connectors](https://docs.airbyte.com/contributing-to-airbyte/resources/pull-requests-handbook/#semantic-versioning-for-connectors). +3. Make sure the `metadata.yaml` content is up to date. +4. Make the connector documentation and its changelog is up to date (`docs/integrations/destinations/firebolt.md`). +5. Create a Pull Request: use [our PR naming conventions](https://docs.airbyte.com/contributing-to-airbyte/resources/pull-requests-handbook/#pull-request-title-convention). +6. Pat yourself on the back for being an awesome contributor. +7. Someone from Airbyte will take a look at your PR and iterate with you to merge it into master. + diff --git a/airbyte-integrations/connectors/destination-firebolt/bootstrap.md b/airbyte-integrations/connectors/destination-firebolt/bootstrap.md new file mode 100644 index 000000000000..dade5200d2d5 --- /dev/null +++ b/airbyte-integrations/connectors/destination-firebolt/bootstrap.md @@ -0,0 +1,22 @@ +# Firebolt Source + +## Overview + +Firebolt is a cloud data warehouse purpose-built to provide sub-second analytics performance on massive, terabyte-scale data sets. + +Firebolt has two main concepts: Databases, which denote the storage of data and Engines, which describe the compute layer on top of a Database. + +Firebolt has three types of tables: External, Fact and Dimension. External tables, which represent a raw file structure in storage. Dimension tables, which are optimised for fetching and store data on each node in an Engine. Fact tables are similar to Dimension, but they shard the data across the nodes. The usual workload is to write source data into a set of files on S3, wrap them with an External table and write this data to a fetch-optimised Fact or Dimension table. + +## Connector + +Firebolt is a data warehouse so the most efficient way to write data into it would be in bulk. Firebolt connector offers two ways of writing data: SQL and S3. SQL transfers data in small batches and is most useful for prototyping. S3 buffers data on Amazon S3 storage and persists the data to Firebolt at the end of execution. The latter is the most efficient way of loading data, but it requires AWS S3 access. + +This connector uses [firebolt-sdk](https://pypi.org/project/firebolt-sdk/), which is a [PEP-249](https://peps.python.org/pep-0249/) DB API implementation. +`Connection` object is used to connect to a specified Engine, wich runs subsequent queries against the data stored in the Database using the `Cursor` object. +[Pyarrow](https://pypi.org/project/pyarrow/) is used to efficiently store and upload data to S3. + +## Notes + +* Integration testing requires the user to have a running engine. Spinning up an engine can take a while so this ensures a faster iteration on the connector. +* S3 is generally faster writing strategy and should be preferred. \ No newline at end of file diff --git a/airbyte-integrations/connectors/destination-firebolt/destination_firebolt/__init__.py b/airbyte-integrations/connectors/destination-firebolt/destination_firebolt/__init__.py new file mode 100644 index 000000000000..90396b049287 --- /dev/null +++ b/airbyte-integrations/connectors/destination-firebolt/destination_firebolt/__init__.py @@ -0,0 +1,8 @@ +# +# Copyright (c) 2021 Airbyte, Inc., all rights reserved. +# + + +from .destination import DestinationFirebolt + +__all__ = ["DestinationFirebolt"] diff --git a/airbyte-integrations/connectors/destination-firebolt/destination_firebolt/destination.py b/airbyte-integrations/connectors/destination-firebolt/destination_firebolt/destination.py new file mode 100644 index 000000000000..5b169f094237 --- /dev/null +++ b/airbyte-integrations/connectors/destination-firebolt/destination_firebolt/destination.py @@ -0,0 +1,128 @@ +# +# Copyright (c) 2023 Airbyte, Inc., all rights reserved. +# + +import json +from datetime import datetime +from logging import getLogger +from typing import Any, Dict, Iterable, Mapping, Optional +from uuid import uuid4 + +from airbyte_cdk import AirbyteLogger +from airbyte_cdk.destinations import Destination +from airbyte_cdk.models import AirbyteConnectionStatus, AirbyteMessage, ConfiguredAirbyteCatalog, DestinationSyncMode, Status, Type +from firebolt.client import DEFAULT_API_URL +from firebolt.client.auth import UsernamePassword +from firebolt.db import Connection, connect + +from .writer import create_firebolt_wirter + +logger = getLogger("airbyte") + + +def parse_config(config: json, logger: Optional[AirbyteLogger] = None) -> Dict[str, Any]: + """ + Convert dict of config values to firebolt.db.Connection arguments + :param config: json-compatible dict of settings + :param logger: AirbyteLogger instance to print logs. + :return: dictionary of firebolt.db.Connection-compatible kwargs + """ + connection_args = { + "database": config["database"], + "auth": UsernamePassword(config["username"], config["password"]), + "api_endpoint": config.get("host", DEFAULT_API_URL), + "account_name": config.get("account"), + } + # engine can be a name or a full URL of a cluster + engine = config.get("engine") + if engine: + if "." in engine: + connection_args["engine_url"] = engine + else: + connection_args["engine_name"] = engine + elif logger: + logger.info("Engine parameter was not provided. Connecting to the default engine.") + return connection_args + + +def establish_connection(config: json, logger: Optional[AirbyteLogger] = None) -> Connection: + """ + Creates a connection to Firebolt database using the parameters provided. + :param config: Json object containing db credentials. + :param logger: AirbyteLogger instance to print logs. + :return: PEP-249 compliant database Connection object. + """ + logger.debug("Connecting to Firebolt.") if logger else None + connection = connect(**parse_config(config, logger)) + logger.debug("Connection to Firebolt established.") if logger else None + return connection + + +class DestinationFirebolt(Destination): + def write( + self, config: Mapping[str, Any], configured_catalog: ConfiguredAirbyteCatalog, input_messages: Iterable[AirbyteMessage] + ) -> Iterable[AirbyteMessage]: + + """ + Reads the input stream of messages, config, and catalog to write data to the destination. + + This method returns an iterable (typically a generator of AirbyteMessages via yield) containing state messages received + in the input message stream. Outputting a state message means that every AirbyteRecordMessage which came before it has been + successfully persisted to the destination. This is used to ensure fault tolerance in the case that a sync fails before fully completing, + then the source is given the last state message output from this method as the starting point of the next sync. + + :param config: dict of JSON configuration matching the configuration declared in spec.json + :param configured_catalog: The Configured Catalog describing the schema of the data being received and how it should be persisted in the + destination + :param input_messages: The stream of input messages received from the source + :return: Iterable of AirbyteStateMessages wrapped in AirbyteMessage structs + """ + streams = {s.stream.name for s in configured_catalog.streams} + + with establish_connection(config) as connection: + writer = create_firebolt_wirter(connection, config, logger) + + for configured_stream in configured_catalog.streams: + if configured_stream.destination_sync_mode == DestinationSyncMode.overwrite: + writer.delete_table(configured_stream.stream.name) + logger.info(f"Stream {configured_stream.stream.name} is wiped.") + writer.create_raw_table(configured_stream.stream.name) + + for message in input_messages: + if message.type == Type.STATE: + yield message + elif message.type == Type.RECORD: + data = message.record.data + stream = message.record.stream + # Skip unselected streams + if stream not in streams: + logger.debug(f"Stream {stream} was not present in configured streams, skipping") + continue + writer.queue_write_data(stream, str(uuid4()), datetime.now(), json.dumps(data)) + + # Flush any leftover messages + writer.flush() + + def check(self, logger: AirbyteLogger, config: Mapping[str, Any]) -> AirbyteConnectionStatus: + """ + Tests if the input configuration can be used to successfully connect to the destination with the needed permissions + e.g: if a provided API token or password can be used to connect and write to the destination. + + :param logger: Logging object to display debug/info/error to the logs + (logs will not be accessible via airbyte UI if they are not passed to this logger) + :param config: Json object containing the configuration of this destination, content of this json is as specified in + the properties of the spec.json file + + :return: AirbyteConnectionStatus indicating a Success or Failure + """ + try: + with establish_connection(config, logger) as connection: + # We can only verify correctness of connection parameters on execution + with connection.cursor() as cursor: + cursor.execute("SELECT 1") + # Test access to the bucket, if S3 strategy is used + create_firebolt_wirter(connection, config, logger) + + return AirbyteConnectionStatus(status=Status.SUCCEEDED) + except Exception as e: + return AirbyteConnectionStatus(status=Status.FAILED, message=f"An exception occurred: {repr(e)}") diff --git a/airbyte-integrations/connectors/destination-firebolt/destination_firebolt/spec.json b/airbyte-integrations/connectors/destination-firebolt/destination_firebolt/spec.json new file mode 100644 index 000000000000..a0263800bf39 --- /dev/null +++ b/airbyte-integrations/connectors/destination-firebolt/destination_firebolt/spec.json @@ -0,0 +1,109 @@ +{ + "documentationUrl": "https://docs.airbyte.com/integrations/destinations/firebolt", + "supported_destination_sync_modes": ["overwrite", "append"], + "supportsIncremental": true, + "supportsDBT": true, + "supportsNormalization": false, + "connectionSpecification": { + "$schema": "http://json-schema.org/draft-07/schema#", + "title": "Firebolt Spec", + "type": "object", + "required": ["username", "password", "database"], + "additionalProperties": false, + "properties": { + "username": { + "type": "string", + "title": "Username", + "description": "Firebolt email address you use to login.", + "examples": ["username@email.com"], + "order": 0 + }, + "password": { + "type": "string", + "title": "Password", + "description": "Firebolt password.", + "airbyte_secret": true, + "order": 1 + }, + "account": { + "type": "string", + "title": "Account", + "description": "Firebolt account to login." + }, + "host": { + "type": "string", + "title": "Host", + "description": "The host name of your Firebolt database.", + "examples": ["api.app.firebolt.io"] + }, + "database": { + "type": "string", + "title": "Database", + "description": "The database to connect to." + }, + "engine": { + "type": "string", + "title": "Engine", + "description": "Engine name or url to connect to." + }, + "loading_method": { + "type": "object", + "title": "Loading Method", + "description": "Loading method used to select the way data will be uploaded to Firebolt", + "oneOf": [ + { + "title": "SQL Inserts", + "additionalProperties": false, + "required": ["method"], + "properties": { + "method": { + "type": "string", + "const": "SQL" + } + } + }, + { + "title": "External Table via S3", + "additionalProperties": false, + "required": [ + "method", + "s3_bucket", + "s3_region", + "aws_key_id", + "aws_key_secret" + ], + "properties": { + "method": { + "type": "string", + "const": "S3" + }, + "s3_bucket": { + "type": "string", + "title": "S3 bucket name", + "description": "The name of the S3 bucket." + }, + "s3_region": { + "type": "string", + "title": "S3 region name", + "description": "Region name of the S3 bucket.", + "examples": ["us-east-1"] + }, + "aws_key_id": { + "type": "string", + "title": "AWS Key ID", + "airbyte_secret": true, + "description": "AWS access key granting read and write access to S3." + }, + "aws_key_secret": { + "type": "string", + "title": "AWS Key Secret", + "airbyte_secret": true, + "description": "Corresponding secret part of the AWS Key" + } + } + } + ] + } + } + } +} diff --git a/airbyte-integrations/connectors/destination-firebolt/destination_firebolt/writer.py b/airbyte-integrations/connectors/destination-firebolt/destination_firebolt/writer.py new file mode 100644 index 000000000000..6935fef35f0b --- /dev/null +++ b/airbyte-integrations/connectors/destination-firebolt/destination_firebolt/writer.py @@ -0,0 +1,235 @@ +# +# Copyright (c) 2023 Airbyte, Inc., all rights reserved. +# + +import json +from collections import defaultdict +from datetime import datetime +from time import time +from uuid import uuid4 + +import pyarrow as pa +import pyarrow.parquet as pq +from airbyte_cdk import AirbyteLogger +from firebolt.db import Connection +from pyarrow import fs + + +class FireboltWriter: + """ + Base class for shared writer logic. + """ + + flush_interval = 1000 + + def __init__(self, connection: Connection) -> None: + """ + :param connection: Firebolt SDK connection class with established connection + to the databse. + """ + self.connection = connection + self._buffer = defaultdict(list) + self._values = 0 + + def delete_table(self, name: str) -> None: + """ + Delete the resulting table. + Primarily used in Overwrite strategy to clean up previous data. + + :param name: table name to delete. + """ + cursor = self.connection.cursor() + cursor.execute(f"DROP TABLE IF EXISTS _airbyte_raw_{name}") + + def create_raw_table(self, name: str): + """ + Create the resulting _airbyte_raw table. + + :param name: table name to create. + """ + query = f""" + CREATE FACT TABLE IF NOT EXISTS _airbyte_raw_{name} ( + _airbyte_ab_id TEXT, + _airbyte_emitted_at TIMESTAMP, + _airbyte_data TEXT + ) + PRIMARY INDEX _airbyte_ab_id + """ + cursor = self.connection.cursor() + cursor.execute(query) + + def queue_write_data(self, stream_name: str, id: str, time: datetime, record: str) -> None: + """ + Queue up data in a buffer in memory before writing to the database. + When flush_interval is reached data is persisted. + + :param stream_name: name of the stream for which the data corresponds. + :param id: unique identifier of this data row. + :param time: time of writing. + :param record: string representation of the json data payload. + """ + self._buffer[stream_name].append((id, time, record)) + self._values += 1 + if self._values == self.flush_interval: + self._flush() + + def _flush(self): + """ + Stub for the intermediate data flush that's triggered during the + buffering operation. + """ + raise NotImplementedError() + + def flush(self): + """ + Stub for the data flush at the end of writing operation. + """ + raise NotImplementedError() + + +class FireboltS3Writer(FireboltWriter): + """ + Data writer using the S3 strategy. Data is buffered in memory + before being flushed to S3 in .parquet format. At the end of + the operation data is written to Firebolt databse from S3, allowing + greater ingestion speed. + """ + + flush_interval = 100000 + + def __init__(self, connection: Connection, s3_bucket: str, access_key: str, secret_key: str, s3_region: str) -> None: + """ + :param connection: Firebolt SDK connection class with established connection + to the databse. + :param s3_bucket: Intermediate bucket to store the data files before writing them to Firebolt. + Has to be created and accessible. + :param access_key: AWS Access Key ID that has read/write/delete permissions on the files in the bucket. + :param secret_key: Corresponding AWS Secret Key. + :param s3_region: S3 region. Best to keep this the same as Firebolt database region. Default us-east-1. + """ + super().__init__(connection) + self.key_id = access_key + self.secret_key = secret_key + self.s3_bucket = s3_bucket + self._updated_tables = set() + self.unique_dir = f"{int(time())}_{uuid4()}" + self.fs = fs.S3FileSystem(access_key=access_key, secret_key=secret_key, region=s3_region) + + def _flush(self) -> None: + """ + Intermediate data flush that's triggered during the + buffering operation. Uploads data stored in memory to the S3. + """ + for table, data in self._buffer.items(): + key_list, ts_list, payload = zip(*data) + upload_data = [pa.array(key_list), pa.array(ts_list), pa.array(payload)] + pa_table = pa.table(upload_data, names=["_airbyte_ab_id", "_airbyte_emitted_at", "_airbyte_data"]) + pq.write_to_dataset(table=pa_table, root_path=f"{self.s3_bucket}/airbyte_output/{self.unique_dir}/{table}", filesystem=self.fs) + # Update tables + self._updated_tables.update(self._buffer.keys()) + self._buffer.clear() + self._values = 0 + + def flush(self) -> None: + """ + Flush any leftover data after ingestion and write from S3 to Firebolt. + Intermediate data on S3 and External Table will be deleted after write is complete. + """ + self._flush() + for table in self._updated_tables: + self.create_raw_table(table) + self.create_external_table(table) + self.ingest_data(table) + self.cleanup(table) + + def create_external_table(self, name: str) -> None: + """ + Create Firebolt External Table to interface with the files on S3. + + :param name: Stream name from which the table name is derived. + """ + query = f""" + CREATE EXTERNAL TABLE IF NOT EXISTS ex_airbyte_raw_{name} ( + _airbyte_ab_id TEXT, + _airbyte_emitted_at TIMESTAMP, + _airbyte_data TEXT + ) + URL = ? + CREDENTIALS = ( AWS_KEY_ID = ? AWS_SECRET_KEY = ? ) + OBJECT_PATTERN = '*.parquet' + TYPE = (PARQUET); + """ + cursor = self.connection.cursor() + cursor.execute(query, parameters=(f"s3://{self.s3_bucket}/airbyte_output/{self.unique_dir}/{name}", self.key_id, self.secret_key)) + + def ingest_data(self, name: str) -> None: + """ + Write data from External Table to the _airbyte_raw table effectively + persisting data in Firebolt. + + :param name: Stream name from which the table name is derived. + """ + query = f"INSERT INTO _airbyte_raw_{name} SELECT * FROM ex_airbyte_raw_{name}" + cursor = self.connection.cursor() + cursor.execute(query) + + def cleanup(self, name: str) -> None: + """ + Clean intermediary External tables and wipe the S3 folder. + + :param name: Stream name from which the table name is derived. + """ + cursor = self.connection.cursor() + cursor.execute(f"DROP TABLE IF EXISTS ex_airbyte_raw_{name}") + self.fs.delete_dir_contents(f"{self.s3_bucket}/airbyte_output/{self.unique_dir}/{name}") + + +class FireboltSQLWriter(FireboltWriter): + """ + Data writer using the SQL writing strategy. Data is buffered in memory + and flushed using INSERT INTO SQL statement. This is less effective strategy + better suited for testing and small data sets. + """ + + flush_interval = 1000 + + def __init__(self, connection: Connection) -> None: + """ + :param connection: Firebolt SDK connection class with established connection + to the databse. + """ + super().__init__(connection) + + def _flush(self) -> None: + """ + Intermediate data flush that's triggered during the + buffering operation. Writes data stored in memory via SQL commands. + """ + cursor = self.connection.cursor() + # id, written_at, data + for table, data in self._buffer.items(): + cursor.executemany(f"INSERT INTO _airbyte_raw_{table} VALUES (?, ?, ?)", parameters_seq=data) + self._buffer.clear() + self._values = 0 + + def flush(self) -> None: + """ + Final data flush after all data has been written to memory. + """ + self._flush() + + +def create_firebolt_wirter(connection: Connection, config: json, logger: AirbyteLogger) -> FireboltWriter: + if config["loading_method"]["method"] == "S3": + logger.info("Using the S3 writing strategy") + writer = FireboltS3Writer( + connection, + config["loading_method"]["s3_bucket"], + config["loading_method"]["aws_key_id"], + config["loading_method"]["aws_key_secret"], + config["loading_method"]["s3_region"], + ) + else: + logger.info("Using the SQL writing strategy") + writer = FireboltSQLWriter(connection) + return writer diff --git a/airbyte-integrations/connectors/destination-firebolt/integration_tests/configured_catalog.json b/airbyte-integrations/connectors/destination-firebolt/integration_tests/configured_catalog.json new file mode 100644 index 000000000000..7715d5bb6ff0 --- /dev/null +++ b/airbyte-integrations/connectors/destination-firebolt/integration_tests/configured_catalog.json @@ -0,0 +1,38 @@ +{ + "streams": [ + { + "stream": { + "name": "airbyte_acceptance_table", + "supported_sync_modes": ["full_refresh"], + "source_defined_cursor": false, + "json_schema": { + "type": "object", + "properties": { + "column1": { + "type": "string" + }, + "column2": { + "type": "number" + }, + "column3": { + "type": "string", + "format": "date-time", + "airbyte_type": "timestamp_without_timezone" + }, + "column4": { + "type": "number" + }, + "column5": { + "type": "array", + "items": { + "type": "integer" + } + } + } + } + }, + "sync_mode": "full_refresh", + "destination_sync_mode": "overwrite" + } + ] +} diff --git a/airbyte-integrations/connectors/destination-firebolt/integration_tests/integration_test.py b/airbyte-integrations/connectors/destination-firebolt/integration_tests/integration_test.py new file mode 100644 index 000000000000..872db32c3821 --- /dev/null +++ b/airbyte-integrations/connectors/destination-firebolt/integration_tests/integration_test.py @@ -0,0 +1,147 @@ +# +# Copyright (c) 2023 Airbyte, Inc., all rights reserved. +# + +import random +import string +from datetime import datetime +from json import dumps, load +from typing import Dict +from unittest.mock import MagicMock + +from airbyte_cdk.models import AirbyteMessage, AirbyteRecordMessage, Status, Type +from airbyte_cdk.models.airbyte_protocol import ( + AirbyteStream, + ConfiguredAirbyteCatalog, + ConfiguredAirbyteStream, + DestinationSyncMode, + SyncMode, +) +from destination_firebolt.destination import DestinationFirebolt, establish_connection +from firebolt.common.exception import FireboltError +from pytest import fixture, mark, raises + + +@fixture(scope="module") +def config() -> Dict[str, str]: + with open( + "secrets/config.json", + ) as f: + yield load(f) + + +@fixture(scope="module") +def test_table_name() -> str: + letters = string.ascii_lowercase + rnd_string = "".join(random.choice(letters) for i in range(10)) + return f"airbyte_integration_{rnd_string}" + + +@fixture +def cleanup(config: Dict[str, str], test_table_name: str): + yield + with establish_connection(config, MagicMock()) as connection: + with connection.cursor() as cursor: + cursor.execute(f"DROP TABLE IF EXISTS _airbyte_raw_{test_table_name}") + cursor.execute(f"DROP TABLE IF EXISTS ex_airbyte_raw_{test_table_name}") + + +@fixture +def table_schema() -> str: + schema = { + "type": "object", + "properties": { + "column1": {"type": ["null", "string"]}, + }, + } + return schema + + +@fixture +def configured_catalogue(test_table_name: str, table_schema: str) -> ConfiguredAirbyteCatalog: + append_stream = ConfiguredAirbyteStream( + stream=AirbyteStream(name=test_table_name, json_schema=table_schema, supported_sync_modes=[SyncMode.incremental]), + sync_mode=SyncMode.incremental, + destination_sync_mode=DestinationSyncMode.append, + ) + return ConfiguredAirbyteCatalog(streams=[append_stream]) + + +@fixture(scope="module") +def invalid_config() -> Dict[str, str]: + with open( + "integration_tests/invalid_config.json", + ) as f: + yield load(f) + + +@fixture(scope="module") +def invalid_config_s3() -> Dict[str, str]: + with open( + "integration_tests/invalid_config_s3.json", + ) as f: + yield load(f) + + +@fixture +def airbyte_message1(test_table_name: str): + return AirbyteMessage( + type=Type.RECORD, + record=AirbyteRecordMessage( + stream=test_table_name, + data={"key1": "value1", "key2": 2}, + emitted_at=int(datetime.now().timestamp()) * 1000, + ), + ) + + +@fixture +def airbyte_message2(test_table_name: str): + return AirbyteMessage( + type=Type.RECORD, + record=AirbyteRecordMessage( + stream=test_table_name, + data={"key1": "value2", "key2": 3}, + emitted_at=int(datetime.now().timestamp()) * 1000, + ), + ) + + +@mark.parametrize("config", ["invalid_config", "invalid_config_s3"]) +def test_check_fails(config, request): + destination = DestinationFirebolt() + status = destination.check(logger=MagicMock(), config=config) + assert status.status == Status.FAILED + + +def test_check_succeeds(config, request): + destination = DestinationFirebolt() + status = destination.check(logger=MagicMock(), config=config) + assert status.status == Status.SUCCEEDED + + +def test_write( + config: Dict[str, str], + configured_catalogue: ConfiguredAirbyteCatalog, + airbyte_message1: AirbyteMessage, + airbyte_message2: AirbyteMessage, + test_table_name: str, + cleanup, + request, +): + destination = DestinationFirebolt() + generator = destination.write(config, configured_catalogue, [airbyte_message1, airbyte_message2]) + result = list(generator) + assert len(result) == 0 + with establish_connection(config, MagicMock()) as connection: + with connection.cursor() as cursor: + cursor.execute( + f"SELECT _airbyte_ab_id, _airbyte_emitted_at, _airbyte_data FROM _airbyte_raw_{test_table_name} ORDER BY _airbyte_data" + ) + result = cursor.fetchall() + # Make sure no temporary tables present + with raises(FireboltError): + cursor.execute(f"SELECT TOP 0 * FROM ex_airbyte_raw_{test_table_name}") + assert len(result) == 2 + assert result[0][2] == dumps(airbyte_message1.record.data) + assert result[1][2] == dumps(airbyte_message2.record.data) diff --git a/airbyte-integrations/connectors/destination-firebolt/integration_tests/invalid_config.json b/airbyte-integrations/connectors/destination-firebolt/integration_tests/invalid_config.json new file mode 100644 index 000000000000..f8251d5271fb --- /dev/null +++ b/airbyte-integrations/connectors/destination-firebolt/integration_tests/invalid_config.json @@ -0,0 +1,9 @@ +{ + "username": "xxx", + "password": "xxx", + "database": "non_existing_database_name", + "engine": "database_name_Analytics", + "loading_method": { + "method": "SQL" + } +} diff --git a/airbyte-integrations/connectors/destination-firebolt/integration_tests/invalid_config_s3.json b/airbyte-integrations/connectors/destination-firebolt/integration_tests/invalid_config_s3.json new file mode 100644 index 000000000000..2ab29e87dfe5 --- /dev/null +++ b/airbyte-integrations/connectors/destination-firebolt/integration_tests/invalid_config_s3.json @@ -0,0 +1,13 @@ +{ + "username": "xxx", + "password": "xxx", + "database": "non_existing_database_name", + "engine": "database_name_Analytics", + "loading_method": { + "method": "S3", + "s3_bucket": "sample_bucket", + "s3_region": "us-east-1", + "aws_key_id": "yyy", + "aws_key_secret": "yyy" + } +} diff --git a/airbyte-integrations/connectors/destination-firebolt/integration_tests/messages.jsonl b/airbyte-integrations/connectors/destination-firebolt/integration_tests/messages.jsonl new file mode 100644 index 000000000000..ab871c15bb02 --- /dev/null +++ b/airbyte-integrations/connectors/destination-firebolt/integration_tests/messages.jsonl @@ -0,0 +1,2 @@ +{"type": "RECORD", "record": {"stream": "airbyte_acceptance_table", "data": {"column1": "my_value", "column2": 221, "column3": "2021-01-01T20:10:22", "column4": 1.214, "column5": [1,2,3]}, "emitted_at": 1626172757000}} +{"type": "RECORD", "record": {"stream": "airbyte_acceptance_table", "data": {"column1": "my_value2", "column2": 222, "column3": "2021-01-02T22:10:22", "column5": [1,2,null]}, "emitted_at": 1626172757000}} diff --git a/airbyte-integrations/connectors/destination-firebolt/main.py b/airbyte-integrations/connectors/destination-firebolt/main.py new file mode 100644 index 000000000000..1b173be0c2b3 --- /dev/null +++ b/airbyte-integrations/connectors/destination-firebolt/main.py @@ -0,0 +1,11 @@ +# +# Copyright (c) 2023 Airbyte, Inc., all rights reserved. +# + + +import sys + +from destination_firebolt import DestinationFirebolt + +if __name__ == "__main__": + DestinationFirebolt().run(sys.argv[1:]) diff --git a/airbyte-integrations/connectors/destination-firebolt/metadata.yaml b/airbyte-integrations/connectors/destination-firebolt/metadata.yaml index bc04b2e4a36c..0c5fd007f413 100644 --- a/airbyte-integrations/connectors/destination-firebolt/metadata.yaml +++ b/airbyte-integrations/connectors/destination-firebolt/metadata.yaml @@ -2,7 +2,7 @@ data: connectorSubtype: database connectorType: destination definitionId: 18081484-02a5-4662-8dba-b270b582f321 - dockerImageTag: 0.1.0 + dockerImageTag: 0.1.1 dockerRepository: airbyte/destination-firebolt githubIssueLabel: destination-firebolt icon: firebolt.svg @@ -21,5 +21,5 @@ data: ab_internal: sl: 100 ql: 100 - supportLevel: archived + supportLevel: community metadataSpecVersion: "1.0" diff --git a/airbyte-integrations/connectors/destination-firebolt/requirements.txt b/airbyte-integrations/connectors/destination-firebolt/requirements.txt new file mode 100644 index 000000000000..d6e1198b1ab1 --- /dev/null +++ b/airbyte-integrations/connectors/destination-firebolt/requirements.txt @@ -0,0 +1 @@ +-e . diff --git a/airbyte-integrations/connectors/destination-firebolt/setup.py b/airbyte-integrations/connectors/destination-firebolt/setup.py new file mode 100644 index 000000000000..a2597d9160af --- /dev/null +++ b/airbyte-integrations/connectors/destination-firebolt/setup.py @@ -0,0 +1,23 @@ +# +# Copyright (c) 2023 Airbyte, Inc., all rights reserved. +# + + +from setuptools import find_packages, setup + +MAIN_REQUIREMENTS = ["airbyte-cdk", "firebolt-sdk>=0.8.0", "pyarrow"] + +TEST_REQUIREMENTS = ["pytest~=6.1"] + +setup( + name="destination_firebolt", + description="Destination implementation for Firebolt.", + author="Airbyte", + author_email="contact@airbyte.io", + packages=find_packages(), + install_requires=MAIN_REQUIREMENTS, + package_data={"": ["*.json"]}, + extras_require={ + "tests": TEST_REQUIREMENTS, + }, +) diff --git a/airbyte-integrations/connectors/destination-firebolt/unit_tests/test_firebolt_destination.py b/airbyte-integrations/connectors/destination-firebolt/unit_tests/test_firebolt_destination.py new file mode 100644 index 000000000000..8d70a1060b5a --- /dev/null +++ b/airbyte-integrations/connectors/destination-firebolt/unit_tests/test_firebolt_destination.py @@ -0,0 +1,241 @@ +# +# Copyright (c) 2023 Airbyte, Inc., all rights reserved. +# + +from datetime import datetime +from typing import Any, Dict +from unittest.mock import MagicMock, call, patch + +from airbyte_cdk.models import ( + AirbyteMessage, + AirbyteRecordMessage, + AirbyteStream, + ConfiguredAirbyteCatalog, + ConfiguredAirbyteStream, + DestinationSyncMode, + Status, + SyncMode, + Type, +) +from destination_firebolt.destination import DestinationFirebolt, establish_connection, parse_config +from pytest import fixture + + +@fixture(params=["my_engine", "my_engine.api.firebolt.io"]) +def config(request: Any) -> Dict[str, str]: + args = { + "database": "my_database", + "username": "my_username", + "password": "my_password", + "engine": request.param, + "loading_method": { + "method": "SQL", + }, + } + return args + + +@fixture +def config_external_table() -> Dict[str, str]: + args = { + "database": "my_database", + "username": "my_username", + "password": "my_password", + "engine": "my_engine", + "loading_method": { + "method": "S3", + "s3_bucket": "my_bucket", + "s3_region": "us-east-1", + "aws_key_id": "aws_key", + "aws_key_secret": "aws_secret", + }, + } + return args + + +@fixture +def config_no_engine() -> Dict[str, str]: + args = { + "database": "my_database", + "username": "my_username", + "password": "my_password", + } + return args + + +@fixture +def logger() -> MagicMock: + return MagicMock() + + +@fixture +def configured_stream1() -> ConfiguredAirbyteStream: + return ConfiguredAirbyteStream( + stream=AirbyteStream( + name="table1", + json_schema={ + "type": "object", + "properties": {"col1": {"type": "string"}, "col2": {"type": "integer"}}, + }, + supported_sync_modes=[SyncMode.incremental], + ), + sync_mode=SyncMode.incremental, + destination_sync_mode=DestinationSyncMode.append, + ) + + +@fixture +def configured_stream2() -> ConfiguredAirbyteStream: + return ConfiguredAirbyteStream( + stream=AirbyteStream( + name="table2", + json_schema={ + "type": "object", + "properties": {"col1": {"type": "string"}, "col2": {"type": "integer"}}, + }, + supported_sync_modes=[SyncMode.incremental], + ), + sync_mode=SyncMode.incremental, + destination_sync_mode=DestinationSyncMode.append, + ) + + +@fixture +def airbyte_message1() -> AirbyteMessage: + return AirbyteMessage( + type=Type.RECORD, + record=AirbyteRecordMessage( + stream="table1", + data={"key1": "value1", "key2": 2}, + emitted_at=int(datetime.now().timestamp()) * 1000, + ), + ) + + +@fixture +def airbyte_message2() -> AirbyteMessage: + return AirbyteMessage( + type=Type.RECORD, + record=AirbyteRecordMessage( + stream="table2", + data={"key1": "value2", "key2": 3}, + emitted_at=int(datetime.now().timestamp()) * 1000, + ), + ) + + +@fixture +def airbyte_state_message() -> AirbyteMessage: + return AirbyteMessage(type=Type.STATE) + + +def test_parse_config(config: Dict[str, str]): + config["engine"] = "override_engine" + result = parse_config(config) + assert result["database"] == "my_database" + assert result["engine_name"] == "override_engine" + assert result["auth"].username == "my_username" + assert result["auth"].password == "my_password" + config["engine"] = "override_engine.api.firebolt.io" + result = parse_config(config) + assert result["engine_url"] == "override_engine.api.firebolt.io" + + +@patch("destination_firebolt.destination.connect", MagicMock()) +def test_connection(config: Dict[str, str], config_no_engine: Dict[str, str], logger: MagicMock) -> None: + establish_connection(config, logger) + logger.reset_mock() + establish_connection(config_no_engine, logger) + assert any(["default engine" in msg.args[0] for msg in logger.info.mock_calls]), "No message on using default engine" + # Check no log object + establish_connection(config) + + +@patch("destination_firebolt.writer.FireboltS3Writer") +@patch("destination_firebolt.destination.connect") +def test_check( + mock_connection: MagicMock, mock_writer: MagicMock, config: Dict[str, str], config_external_table: Dict[str, str], logger: MagicMock +): + destination = DestinationFirebolt() + status = destination.check(logger, config) + assert status.status == Status.SUCCEEDED + mock_writer.assert_not_called() + status = destination.check(logger, config_external_table) + assert status.status == Status.SUCCEEDED + mock_writer.assert_called_once() + mock_connection().__enter__().cursor().__enter__().execute.side_effect = Exception("my exception") + status = destination.check(logger, config) + assert status.status == Status.FAILED + + +@patch("destination_firebolt.writer.FireboltSQLWriter") +@patch("destination_firebolt.destination.establish_connection") +def test_sql_write_append( + mock_connection: MagicMock, + mock_writer: MagicMock, + config: Dict[str, str], + configured_stream1: ConfiguredAirbyteStream, + configured_stream2: ConfiguredAirbyteStream, + airbyte_message1: AirbyteMessage, + airbyte_message2: AirbyteMessage, + airbyte_state_message: AirbyteMessage, +) -> None: + catalog = ConfiguredAirbyteCatalog(streams=[configured_stream1, configured_stream2]) + + destination = DestinationFirebolt() + result = destination.write(config, catalog, [airbyte_message1, airbyte_state_message, airbyte_message2]) + + assert list(result) == [airbyte_state_message] + mock_writer.return_value.delete_table.assert_not_called() + mock_writer.return_value.create_raw_table.mock_calls = [call(mock_connection, "table1"), call(mock_connection, "table2")] + assert len(mock_writer.return_value.queue_write_data.mock_calls) == 2 + mock_writer.return_value.flush.assert_called_once() + + +@patch("destination_firebolt.writer.FireboltS3Writer") +@patch("destination_firebolt.writer.FireboltSQLWriter") +@patch("destination_firebolt.destination.establish_connection") +def test_sql_write_overwrite( + mock_connection: MagicMock, + mock_writer: MagicMock, + mock_s3_writer: MagicMock, + config: Dict[str, str], + configured_stream1: ConfiguredAirbyteStream, + configured_stream2: ConfiguredAirbyteStream, + airbyte_message1: AirbyteMessage, + airbyte_message2: AirbyteMessage, + airbyte_state_message: AirbyteMessage, +): + # Overwrite triggers a delete + configured_stream1.destination_sync_mode = DestinationSyncMode.overwrite + catalog = ConfiguredAirbyteCatalog(streams=[configured_stream1, configured_stream2]) + + destination = DestinationFirebolt() + result = destination.write(config, catalog, [airbyte_message1, airbyte_state_message, airbyte_message2]) + + mock_s3_writer.assert_not_called() + assert list(result) == [airbyte_state_message] + mock_writer.return_value.delete_table.assert_called_once_with("table1") + mock_writer.return_value.create_raw_table.mock_calls = [call(mock_connection, "table1"), call(mock_connection, "table2")] + + +@patch("destination_firebolt.writer.FireboltS3Writer") +@patch("destination_firebolt.writer.FireboltSQLWriter") +@patch("destination_firebolt.destination.establish_connection", MagicMock()) +def test_s3_write( + mock_sql_writer: MagicMock, + mock_s3_writer: MagicMock, + config_external_table: Dict[str, str], + configured_stream1: ConfiguredAirbyteStream, + configured_stream2: ConfiguredAirbyteStream, + airbyte_message1: AirbyteMessage, + airbyte_message2: AirbyteMessage, + airbyte_state_message: AirbyteMessage, +): + catalog = ConfiguredAirbyteCatalog(streams=[configured_stream1, configured_stream2]) + + destination = DestinationFirebolt() + result = destination.write(config_external_table, catalog, [airbyte_message1, airbyte_state_message, airbyte_message2]) + assert list(result) == [airbyte_state_message] + mock_sql_writer.assert_not_called() + mock_s3_writer.assert_called_once() diff --git a/airbyte-integrations/connectors/destination-firebolt/unit_tests/test_writer.py b/airbyte-integrations/connectors/destination-firebolt/unit_tests/test_writer.py new file mode 100644 index 000000000000..6ca5b69c7f24 --- /dev/null +++ b/airbyte-integrations/connectors/destination-firebolt/unit_tests/test_writer.py @@ -0,0 +1,156 @@ +# +# Copyright (c) 2023 Airbyte, Inc., all rights reserved. +# + +from typing import Any, Union +from unittest.mock import ANY, MagicMock, call, patch + +from destination_firebolt.writer import FireboltS3Writer, FireboltSQLWriter +from pytest import fixture, mark + + +@fixture +def connection() -> MagicMock: + return MagicMock() + + +@fixture +def sql_writer(connection: MagicMock) -> FireboltSQLWriter: + return FireboltSQLWriter(connection) + + +@fixture +@patch("destination_firebolt.writer.time", MagicMock(return_value=111)) +@patch("destination_firebolt.writer.uuid4", MagicMock(return_value="dummy-uuid")) +def s3_writer(connection: MagicMock) -> FireboltS3Writer: + # Make sure S3FileSystem mock is reset each time + with patch("destination_firebolt.writer.fs.S3FileSystem", MagicMock()): + return FireboltS3Writer(connection, "dummy_bucket", "access_key", "secret_key", "us-east-1") + + +def test_sql_default(sql_writer: FireboltSQLWriter) -> None: + assert len(sql_writer._buffer) == 0 + assert sql_writer.flush_interval == 1000 + + +@mark.parametrize("writer", ["sql_writer", "s3_writer"]) +def test_sql_create(connection: MagicMock, writer: Union[FireboltSQLWriter, FireboltS3Writer], request: Any) -> None: + writer = request.getfixturevalue(writer) + expected_query = """ + CREATE FACT TABLE IF NOT EXISTS _airbyte_raw_dummy ( + _airbyte_ab_id TEXT, + _airbyte_emitted_at TIMESTAMP, + _airbyte_data TEXT + ) + PRIMARY INDEX _airbyte_ab_id + """ + writer.create_raw_table("dummy") + connection.cursor.return_value.execute.assert_called_once_with(expected_query) + + +def test_data_buffering(sql_writer: FireboltSQLWriter) -> None: + sql_writer.queue_write_data("dummy", "id1", 20200101, '{"key": "value"}') + sql_writer._buffer["dummy"][0] == ("id1", 20200101, '{"key": "value"}') + assert len(sql_writer._buffer["dummy"]) == 1 + assert len(sql_writer._buffer.keys()) == 1 + sql_writer.queue_write_data("dummy", "id2", 20200102, '{"key2": "value2"}') + sql_writer._buffer["dummy"][0] == ("id2", 20200102, '{"key2": "value2"}') + assert len(sql_writer._buffer["dummy"]) == 2 + assert len(sql_writer._buffer.keys()) == 1 + sql_writer.queue_write_data("dummy2", "id3", 20200103, '{"key3": "value3"}') + sql_writer._buffer["dummy"][0] == ("id3", 20200103, '{"key3": "value3"}') + assert len(sql_writer._buffer["dummy"]) == 2 + assert len(sql_writer._buffer["dummy2"]) == 1 + assert len(sql_writer._buffer.keys()) == 2 + + +def test_data_auto_flush_one_table(connection: MagicMock, sql_writer: FireboltSQLWriter) -> None: + sql_writer.flush_interval = 2 + sql_writer.queue_write_data("dummy", "id1", 20200101, '{"key": "value"}') + connection.cursor.return_value.executemany.assert_not_called() + assert sql_writer._values == 1 + sql_writer.queue_write_data("dummy", "id1", 20200101, '{"key": "value"}') + connection.cursor.return_value.executemany.assert_called_once() + assert len(sql_writer._buffer.keys()) == 0 + assert sql_writer._values == 0 + sql_writer.queue_write_data("dummy", "id1", 20200101, '{"key": "value"}') + assert len(sql_writer._buffer.keys()) == 1 + + +def test_data_auto_flush_multi_tables(connection: MagicMock, sql_writer: FireboltSQLWriter) -> None: + sql_writer.flush_interval = 2 + sql_writer.queue_write_data("dummy", "id1", 20200101, '{"key": "value"}') + connection.cursor.return_value.executemany.assert_not_called() + assert sql_writer._values == 1 + sql_writer.queue_write_data("dummy2", "id1", 20200101, '{"key": "value"}') + assert len(connection.cursor.return_value.executemany.mock_calls) == 2 + assert len(sql_writer._buffer.keys()) == 0 + assert sql_writer._values == 0 + + +def test_s3_default(s3_writer: FireboltS3Writer) -> None: + assert s3_writer.flush_interval == 100000 + assert s3_writer._values == 0 + assert len(s3_writer._buffer.keys()) == 0 + + +def test_s3_delete_tables(connection: MagicMock, s3_writer: FireboltS3Writer) -> None: + expected_sql = "DROP TABLE IF EXISTS _airbyte_raw_dummy" + s3_writer.delete_table("dummy") + connection.cursor.return_value.execute.assert_called_once_with(expected_sql) + + +@patch("pyarrow.parquet.write_to_dataset") +def test_s3_data_auto_flush_one_table(mock_write: MagicMock, s3_writer: FireboltS3Writer) -> None: + s3_writer.flush_interval = 2 + s3_writer.queue_write_data("dummy", "id1", 20200101, '{"key": "value"}') + mock_write.assert_not_called() + assert s3_writer._values == 1 + s3_writer.queue_write_data("dummy", "id1", 20200101, '{"key": "value"}') + mock_write.assert_called_once_with(table=ANY, root_path="dummy_bucket/airbyte_output/111_dummy-uuid/dummy", filesystem=s3_writer.fs) + assert len(s3_writer._buffer.keys()) == 0 + assert s3_writer._values == 0 + assert s3_writer._updated_tables == set(["dummy"]) + mock_write.reset_mock() + s3_writer.queue_write_data("dummy", "id1", 20200101, '{"key": "value"}') + mock_write.assert_not_called() + assert len(s3_writer._buffer.keys()) == 1 + assert s3_writer._updated_tables == set(["dummy"]) + + +@patch("pyarrow.parquet.write_to_dataset") +def test_s3_data_auto_flush_multi_tables(mock_write: MagicMock, s3_writer: FireboltS3Writer) -> None: + s3_writer.flush_interval = 2 + s3_writer.queue_write_data("dummy", "id1", 20200101, '{"key": "value"}') + mock_write.assert_not_called() + assert s3_writer._values == 1 + s3_writer.queue_write_data("dummy2", "id1", 20200101, '{"key": "value"}') + assert mock_write.mock_calls == [ + call(table=ANY, root_path="dummy_bucket/airbyte_output/111_dummy-uuid/dummy", filesystem=s3_writer.fs), + call(table=ANY, root_path="dummy_bucket/airbyte_output/111_dummy-uuid/dummy2", filesystem=s3_writer.fs), + ] + assert len(s3_writer._buffer.keys()) == 0 + assert s3_writer._values == 0 + assert s3_writer._updated_tables == set(["dummy", "dummy2"]) + + +def test_s3_final_flush(connection: MagicMock, s3_writer: FireboltS3Writer) -> None: + s3_writer._updated_tables = set(["dummy", "dummy2"]) + s3_writer.flush() + assert len(connection.cursor.return_value.execute.mock_calls) == 8 + expected_url1 = "s3://dummy_bucket/airbyte_output/111_dummy-uuid/dummy" + expected_url2 = "s3://dummy_bucket/airbyte_output/111_dummy-uuid/dummy2" + connection.cursor.return_value.execute.assert_any_call(ANY, parameters=(expected_url1, "access_key", "secret_key")) + connection.cursor.return_value.execute.assert_any_call(ANY, parameters=(expected_url2, "access_key", "secret_key")) + expected_query1 = "INSERT INTO _airbyte_raw_dummy SELECT * FROM ex_airbyte_raw_dummy" + expected_query2 = "INSERT INTO _airbyte_raw_dummy2 SELECT * FROM ex_airbyte_raw_dummy2" + connection.cursor.return_value.execute.assert_any_call(expected_query1) + connection.cursor.return_value.execute.assert_any_call(expected_query2) + + +def test_s3_cleanup(connection: MagicMock, s3_writer: FireboltS3Writer) -> None: + expected_sql = "DROP TABLE IF EXISTS ex_airbyte_raw_my_table" + bucket_path = "dummy_bucket/airbyte_output/111_dummy-uuid/my_table" + s3_writer.cleanup("my_table") + connection.cursor.return_value.execute.assert_called_once_with(expected_sql) + s3_writer.fs.delete_dir_contents.assert_called_once_with(bucket_path) diff --git a/airbyte-integrations/connectors/destination-kvdb/README.md b/airbyte-integrations/connectors/destination-kvdb/README.md new file mode 100644 index 000000000000..b834894111b6 --- /dev/null +++ b/airbyte-integrations/connectors/destination-kvdb/README.md @@ -0,0 +1,118 @@ +# Kvdb Destination + +This is the repository for the [Kvdb](https://kvdb.io) destination connector, written in Python. It is intended to be an example for how to write a Python destination. KvDB is a very simple key value store, which makes it great for the purposes of illustrating how to write a Python destination connector. + +## Local development + +### Prerequisites +**To iterate on this connector, make sure to complete this prerequisites section.** + +#### Minimum Python version required `= 3.7.0` + +#### Build & Activate Virtual Environment and install dependencies +From this connector directory, create a virtual environment: +``` +python -m venv .venv +``` + +This will generate a virtualenv for this module in `.venv/`. Make sure this venv is active in your +development environment of choice. To activate it from the terminal, run: +``` +source .venv/bin/activate +pip install -r requirements.txt +``` +If you are in an IDE, follow your IDE's instructions to activate the virtualenv. + +Note that while we are installing dependencies from `requirements.txt`, you should only edit `setup.py` for your dependencies. `requirements.txt` is +used for editable installs (`pip install -e`) to pull in Python dependencies from the monorepo and will call `setup.py`. +If this is mumbo jumbo to you, don't worry about it, just put your deps in `setup.py` but install using `pip install -r requirements.txt` and everything +should work as you expect. + +#### Building via Gradle +From the Airbyte repository root, run: +``` +./gradlew :airbyte-integrations:connectors:destination-kvdb:build +``` + +#### Create credentials +**If you are a community contributor**, generate the necessary credentials from [Kvdb](https://kvdb.io/docs/api/), and then create a file `secrets/config.json` conforming to the `destination_kvdb/spec.json` file. +Note that the `secrets` directory is gitignored by default, so there is no danger of accidentally checking in sensitive information. +See `integration_tests/sample_config.json` for a sample config file. + +**If you are an Airbyte core member**, copy the credentials in Lastpass under the secret name `destination kvdb test creds` +and place them into `secrets/config.json`. + +### Locally running the connector +``` +python main.py spec +python main.py check --config secrets/config.json +python main.py discover --config secrets/config.json +python main.py read --config secrets/config.json --catalog integration_tests/configured_catalog.json +``` + +### Locally running the connector docker image + + + +#### Build +**Via [`airbyte-ci`](https://github.com/airbytehq/airbyte/blob/master/airbyte-ci/connectors/pipelines/README.md) (recommended):** +```bash +airbyte-ci connectors --name=destination-kvdb build +``` + +An image will be built with the tag `airbyte/destination-kvdb:dev`. + +**Via `docker build`:** +```bash +docker build -t airbyte/destination-kvdb:dev . +``` +#### Run +Then run any of the connector commands as follows: +``` +docker run --rm airbyte/destination-kvdb:dev spec +docker run --rm -v $(pwd)/secrets:/secrets airbyte/destination-kvdb:dev check --config /secrets/config.json +# messages.jsonl is a file containing line-separated JSON representing AirbyteMessages +cat messages.jsonl | docker run --rm -v $(pwd)/secrets:/secrets -v $(pwd)/integration_tests:/integration_tests airbyte/destination-kvdb:dev write --config /secrets/config.json --catalog /integration_tests/configured_catalog.json +``` +## Testing + Make sure to familiarize yourself with [pytest test discovery](https://docs.pytest.org/en/latest/goodpractices.html#test-discovery) to know how your test files and methods should be named. +First install test dependencies into your virtual environment: +``` +pip install .[tests] +``` +### Unit Tests +To run unit tests locally, from the connector directory run: +``` +python -m pytest unit_tests +``` + +### Integration Tests +There are two types of integration tests: Acceptance Tests (Airbyte's test suite for all destination connectors) and custom integration tests (which are specific to this connector). +#### Custom Integration tests +Place custom tests inside `integration_tests/` folder, then, from the connector root, run +``` +python -m pytest integration_tests +``` +#### Acceptance Tests +You can run our full test suite locally using [`airbyte-ci`](https://github.com/airbytehq/airbyte/blob/master/airbyte-ci/connectors/pipelines/README.md): +```bash +airbyte-ci connectors --name=destination-kvdb test +``` + + +## Dependency Management +All of your dependencies should go in `setup.py`, NOT `requirements.txt`. The requirements file is only used to connect internal Airbyte dependencies in the monorepo for local development. +We split dependencies between two groups, dependencies that are: +* required for your connector to work need to go to `MAIN_REQUIREMENTS` list. +* required for the testing need to go to `TEST_REQUIREMENTS` list + +### Publishing a new version of the connector +You've checked out the repo, implemented a million dollar feature, and you're ready to share your changes with the world. Now what? +1. Make sure your changes are passing our test suite: `airbyte-ci connectors --name=destination-kvdb test` +2. Bump the connector version in `metadata.yaml`: increment the `dockerImageTag` value. Please follow [semantic versioning for connectors](https://docs.airbyte.com/contributing-to-airbyte/resources/pull-requests-handbook/#semantic-versioning-for-connectors). +3. Make sure the `metadata.yaml` content is up to date. +4. Make the connector documentation and its changelog is up to date (`docs/integrations/destinations/kvdb.md`). +5. Create a Pull Request: use [our PR naming conventions](https://docs.airbyte.com/contributing-to-airbyte/resources/pull-requests-handbook/#pull-request-title-convention). +6. Pat yourself on the back for being an awesome contributor. +7. Someone from Airbyte will take a look at your PR and iterate with you to merge it into master. + diff --git a/airbyte-integrations/connectors/destination-kvdb/destination_kvdb/__init__.py b/airbyte-integrations/connectors/destination-kvdb/destination_kvdb/__init__.py new file mode 100644 index 000000000000..5f3b041035bf --- /dev/null +++ b/airbyte-integrations/connectors/destination-kvdb/destination_kvdb/__init__.py @@ -0,0 +1,26 @@ +# MIT License +# +# Copyright (c) 2020 Airbyte +# +# Permission is hereby granted, free of charge, to any person obtaining a copy +# of this software and associated documentation files (the "Software"), to deal +# in the Software without restriction, including without limitation the rights +# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +# copies of the Software, and to permit persons to whom the Software is +# furnished to do so, subject to the following conditions: +# +# The above copyright notice and this permission notice shall be included in all +# copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. + + +from .destination import DestinationKvdb + +__all__ = ["DestinationKvdb"] diff --git a/airbyte-integrations/connectors/destination-kvdb/destination_kvdb/client.py b/airbyte-integrations/connectors/destination-kvdb/destination_kvdb/client.py new file mode 100644 index 000000000000..74d9f41176f5 --- /dev/null +++ b/airbyte-integrations/connectors/destination-kvdb/destination_kvdb/client.py @@ -0,0 +1,78 @@ +# +# Copyright (c) 2023 Airbyte, Inc., all rights reserved. +# + +from typing import Any, Iterable, List, Mapping, Tuple, Union + +import requests + + +class KvDbClient: + base_url = "https://kvdb.io" + PAGE_SIZE = 1000 + + def __init__(self, bucket_id: str, secret_key: str = None): + self.secret_key = secret_key + self.bucket_id = bucket_id + + def write(self, key: str, value: Mapping[str, Any]): + return self.batch_write([(key, value)]) + + def batch_write(self, keys_and_values: List[Tuple[str, Mapping[str, Any]]]): + """ + https://kvdb.io/docs/api/#execute-transaction + """ + request_body = {"txn": [{"set": key, "value": value} for key, value in keys_and_values]} + return self._request("POST", json=request_body) + + def list_keys(self, list_values: bool = False, prefix: str = None) -> Iterable[Union[str, List]]: + """ + https://kvdb.io/docs/api/#list-keys + """ + # TODO handle rate limiting + pagination_complete = False + offset = 0 + + while not pagination_complete: + response = self._request( + "GET", + params={ + "limit": self.PAGE_SIZE, + "skip": offset, + "format": "json", + "prefix": prefix or "", + "values": "true" if list_values else "false", + }, + endpoint="/", # the "list" endpoint doesn't work without adding a trailing slash to the URL + ) + + response_json = response.json() + yield from response_json + + pagination_complete = len(response_json) < self.PAGE_SIZE + offset += self.PAGE_SIZE + + def delete(self, key: Union[str, List[str]]): + """ + https://kvdb.io/docs/api/#execute-transaction + """ + key_list = key if isinstance(key, List) else [key] + request_body = {"txn": [{"delete": k} for k in key_list]} + return self._request("POST", json=request_body) + + def _get_base_url(self) -> str: + return f"{self.base_url}/{self.bucket_id}" + + def _get_auth_headers(self) -> Mapping[str, Any]: + return {"Authorization": f"Bearer {self.secret_key}"} if self.secret_key else {} + + def _request( + self, http_method: str, endpoint: str = None, params: Mapping[str, Any] = None, json: Mapping[str, Any] = None + ) -> requests.Response: + url = self._get_base_url() + (endpoint or "") + headers = {"Accept": "application/json", **self._get_auth_headers()} + + response = requests.request(method=http_method, params=params, url=url, headers=headers, json=json) + + response.raise_for_status() + return response diff --git a/airbyte-integrations/connectors/destination-kvdb/destination_kvdb/destination.py b/airbyte-integrations/connectors/destination-kvdb/destination_kvdb/destination.py new file mode 100644 index 000000000000..33ab8565fae4 --- /dev/null +++ b/airbyte-integrations/connectors/destination-kvdb/destination_kvdb/destination.py @@ -0,0 +1,72 @@ +# +# Copyright (c) 2023 Airbyte, Inc., all rights reserved. +# + + +import time +import traceback +import uuid +from typing import Any, Iterable, Mapping + +from airbyte_cdk import AirbyteLogger +from airbyte_cdk.destinations import Destination +from airbyte_cdk.models import AirbyteConnectionStatus, AirbyteMessage, ConfiguredAirbyteCatalog, DestinationSyncMode, Status, Type +from destination_kvdb.client import KvDbClient +from destination_kvdb.writer import KvDbWriter + + +class DestinationKvdb(Destination): + def write( + self, config: Mapping[str, Any], configured_catalog: ConfiguredAirbyteCatalog, input_messages: Iterable[AirbyteMessage] + ) -> Iterable[AirbyteMessage]: + + """ + Reads the input stream of messages, config, and catalog to write data to the destination. + + This method returns an iterable (typically a generator of AirbyteMessages via yield) containing state messages received + in the input message stream. Outputting a state message means that every AirbyteRecordMessage which came before it has been + successfully persisted to the destination. This is used to ensure fault tolerance in the case that a sync fails before fully completing, + then the source is given the last state message output from this method as the starting point of the next sync. + """ + writer = KvDbWriter(KvDbClient(**config)) + + for configured_stream in configured_catalog.streams: + if configured_stream.destination_sync_mode == DestinationSyncMode.overwrite: + writer.delete_stream_entries(configured_stream.stream.name) + + for message in input_messages: + if message.type == Type.STATE: + # Emitting a state message indicates that all records which came before it have been written to the destination. So we flush + # the queue to ensure writes happen, then output the state message to indicate it's safe to checkpoint state + writer.flush() + yield message + elif message.type == Type.RECORD: + record = message.record + writer.queue_write_operation( + record.stream, record.data, time.time_ns() / 1_000_000 + ) # convert from nanoseconds to milliseconds + else: + # ignore other message types for now + continue + + # Make sure to flush any records still in the queue + writer.flush() + + def check(self, logger: AirbyteLogger, config: Mapping[str, Any]) -> AirbyteConnectionStatus: + """ + Tests if the input configuration can be used to successfully connect to the destination with the needed permissions + e.g: if a provided API token or password can be used to connect and write to the destination. + """ + try: + # Verify write access by attempting to write and then delete to a random key + client = KvDbClient(**config) + random_key = str(uuid.uuid4()) + client.write(random_key, {"value": "_airbyte_connection_check"}) + client.delete(random_key) + except Exception as e: + traceback.print_exc() + return AirbyteConnectionStatus( + status=Status.FAILED, message=f"An exception occurred: {e}. \nStacktrace: \n{traceback.format_exc()}" + ) + else: + return AirbyteConnectionStatus(status=Status.SUCCEEDED) diff --git a/airbyte-integrations/connectors/destination-kvdb/destination_kvdb/spec.json b/airbyte-integrations/connectors/destination-kvdb/destination_kvdb/spec.json new file mode 100644 index 000000000000..0ced52c17a22 --- /dev/null +++ b/airbyte-integrations/connectors/destination-kvdb/destination_kvdb/spec.json @@ -0,0 +1,26 @@ +{ + "documentationUrl": "https://kvdb.io/docs/api/", + "supported_destination_sync_modes": ["overwrite", "append"], + "supportsIncremental": true, + "connectionSpecification": { + "$schema": "http://json-schema.org/draft-07/schema#", + "title": "Destination KVdb", + "type": "object", + "required": ["bucket_id", "secret_key"], + "additionalProperties": false, + "properties": { + "bucket_id": { + "title": "Bucket ID", + "type": "string", + "description": "The ID of your KVdb bucket.", + "order": 1 + }, + "secret_key": { + "title": "Secret Key", + "type": "string", + "description": "Your bucket Secret Key.", + "order": 2 + } + } + } +} diff --git a/airbyte-integrations/connectors/destination-kvdb/destination_kvdb/writer.py b/airbyte-integrations/connectors/destination-kvdb/destination_kvdb/writer.py new file mode 100644 index 000000000000..33acbf8a22fb --- /dev/null +++ b/airbyte-integrations/connectors/destination-kvdb/destination_kvdb/writer.py @@ -0,0 +1,46 @@ +# +# Copyright (c) 2023 Airbyte, Inc., all rights reserved. +# + +from collections import Mapping + +from destination_kvdb.client import KvDbClient + + +class KvDbWriter: + """ + Data is written to KvDB in the following format: + key: stream_name__ab__ + value: a JSON object representing the record's data + + This is because unless a data source explicitly designates a primary key, we don't know what to key the record on. + Since KvDB allows reading records with certain prefixes, we treat it more like a message queue, expecting the reader to + read messages with a particular prefix e.g: name__ab__123, where 123 is the timestamp they last read data from. + """ + + write_buffer = [] + flush_interval = 1000 + + def __init__(self, client: KvDbClient): + self.client = client + + def delete_stream_entries(self, stream_name: str): + """Deletes all the records belonging to the input stream""" + keys_to_delete = [] + for key in self.client.list_keys(prefix=f"{stream_name}__ab__"): + keys_to_delete.append(key) + if len(keys_to_delete) == self.flush_interval: + self.client.delete(keys_to_delete) + keys_to_delete.clear() + if len(keys_to_delete) > 0: + self.client.delete(keys_to_delete) + + def queue_write_operation(self, stream_name: str, record: Mapping, written_at: int): + kv_pair = (f"{stream_name}__ab__{written_at}", record) + self.write_buffer.append(kv_pair) + if len(self.write_buffer) == self.flush_interval: + self.flush() + + def flush(self): + self.client.batch_write(self.write_buffer) + self.write_buffer.clear() diff --git a/airbyte-integrations/connectors/destination-kvdb/main.py b/airbyte-integrations/connectors/destination-kvdb/main.py new file mode 100644 index 000000000000..178789589e5a --- /dev/null +++ b/airbyte-integrations/connectors/destination-kvdb/main.py @@ -0,0 +1,11 @@ +# +# Copyright (c) 2023 Airbyte, Inc., all rights reserved. +# + + +import sys + +from destination_kvdb import DestinationKvdb + +if __name__ == "__main__": + DestinationKvdb().run(sys.argv[1:]) diff --git a/airbyte-integrations/connectors/destination-kvdb/metadata.yaml b/airbyte-integrations/connectors/destination-kvdb/metadata.yaml index 64a38cfa1441..f74cb81b3905 100644 --- a/airbyte-integrations/connectors/destination-kvdb/metadata.yaml +++ b/airbyte-integrations/connectors/destination-kvdb/metadata.yaml @@ -9,7 +9,7 @@ data: connectorSubtype: api connectorType: destination definitionId: f2e549cd-8e2a-48f8-822d-cc13630eb42d - dockerImageTag: 0.1.3 + dockerImageTag: 0.1.4 dockerRepository: airbyte/destination-kvdb githubIssueLabel: destination-kvdb icon: kvdb.svg @@ -23,5 +23,5 @@ data: ab_internal: sl: 100 ql: 100 - supportLevel: archived + supportLevel: community metadataSpecVersion: "1.0" diff --git a/airbyte-integrations/connectors/destination-kvdb/poetry.lock b/airbyte-integrations/connectors/destination-kvdb/poetry.lock new file mode 100644 index 000000000000..7835868ea21e --- /dev/null +++ b/airbyte-integrations/connectors/destination-kvdb/poetry.lock @@ -0,0 +1,1108 @@ +# This file is automatically @generated by Poetry 1.5.1 and should not be changed by hand. + +[[package]] +name = "airbyte-cdk" +version = "0.62.2" +description = "A framework for writing Airbyte Connectors." +optional = false +python-versions = ">=3.8" +files = [ + {file = "airbyte-cdk-0.62.2.tar.gz", hash = "sha256:bf45cb847e2d2ab7063d0e1989f6c9cf022771c6ae4fb1e854438c3b8377da85"}, + {file = "airbyte_cdk-0.62.2-py3-none-any.whl", hash = "sha256:6d04d2e8a9a32aa707ddf27a1916ac76969fb50ac39d60582ad2daa08ef832ef"}, +] + +[package.dependencies] +airbyte-protocol-models = "0.5.1" +backoff = "*" +cachetools = "*" +Deprecated = ">=1.2,<2.0" +dpath = ">=2.0.1,<2.1.0" +genson = "1.2.2" +isodate = ">=0.6.1,<0.7.0" +Jinja2 = ">=3.1.2,<3.2.0" +jsonref = ">=0.2,<1.0" +jsonschema = ">=3.2.0,<3.3.0" +pendulum = "<3.0.0" +pydantic = ">=1.10.8,<2.0.0" +pyrate-limiter = ">=3.1.0,<3.2.0" +python-dateutil = "*" +PyYAML = ">=6.0.1" +requests = "*" +requests-cache = "*" +wcmatch = "8.4" + +[package.extras] +dev = ["avro (>=1.11.2,<1.12.0)", "cohere (==4.21)", "fastavro (>=1.8.0,<1.9.0)", "freezegun", "langchain (==0.0.271)", "markdown", "mypy", "openai[embeddings] (==0.27.9)", "pandas (==2.0.3)", "pdf2image (==1.16.3)", "pdfminer.six (==20221105)", "pyarrow (==12.0.1)", "pytesseract (==0.3.10)", "pytest", "pytest-cov", "pytest-httpserver", "pytest-mock", "requests-mock", "tiktoken (==0.4.0)", "unstructured (==0.10.27)", "unstructured.pytesseract (>=0.3.12)", "unstructured[docx,pptx] (==0.10.27)"] +file-based = ["avro (>=1.11.2,<1.12.0)", "fastavro (>=1.8.0,<1.9.0)", "markdown", "pdf2image (==1.16.3)", "pdfminer.six (==20221105)", "pyarrow (==12.0.1)", "pytesseract (==0.3.10)", "unstructured (==0.10.27)", "unstructured.pytesseract (>=0.3.12)", "unstructured[docx,pptx] (==0.10.27)"] +sphinx-docs = ["Sphinx (>=4.2,<5.0)", "sphinx-rtd-theme (>=1.0,<2.0)"] +vector-db-based = ["cohere (==4.21)", "langchain (==0.0.271)", "openai[embeddings] (==0.27.9)", "tiktoken (==0.4.0)"] + +[[package]] +name = "airbyte-protocol-models" +version = "0.5.1" +description = "Declares the Airbyte Protocol." +optional = false +python-versions = ">=3.8" +files = [ + {file = "airbyte_protocol_models-0.5.1-py3-none-any.whl", hash = "sha256:dfe84e130e51ce2ae81a06d5aa36f6c5ce3152b9e36e6f0195fad6c3dab0927e"}, + {file = "airbyte_protocol_models-0.5.1.tar.gz", hash = "sha256:7c8b16c7c1c7956b1996052e40585a3a93b1e44cb509c4e97c1ee4fe507ea086"}, +] + +[package.dependencies] +pydantic = ">=1.9.2,<2.0.0" + +[[package]] +name = "atomicwrites" +version = "1.4.1" +description = "Atomic file writes." +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +files = [ + {file = "atomicwrites-1.4.1.tar.gz", hash = "sha256:81b2c9071a49367a7f770170e5eec8cb66567cfbbc8c73d20ce5ca4a8d71cf11"}, +] + +[[package]] +name = "attrs" +version = "23.2.0" +description = "Classes Without Boilerplate" +optional = false +python-versions = ">=3.7" +files = [ + {file = "attrs-23.2.0-py3-none-any.whl", hash = "sha256:99b87a485a5820b23b879f04c2305b44b951b502fd64be915879d77a7e8fc6f1"}, + {file = "attrs-23.2.0.tar.gz", hash = "sha256:935dc3b529c262f6cf76e50877d35a4bd3c1de194fd41f47a2b7ae8f19971f30"}, +] + +[package.extras] +cov = ["attrs[tests]", "coverage[toml] (>=5.3)"] +dev = ["attrs[tests]", "pre-commit"] +docs = ["furo", "myst-parser", "sphinx", "sphinx-notfound-page", "sphinxcontrib-towncrier", "towncrier", "zope-interface"] +tests = ["attrs[tests-no-zope]", "zope-interface"] +tests-mypy = ["mypy (>=1.6)", "pytest-mypy-plugins"] +tests-no-zope = ["attrs[tests-mypy]", "cloudpickle", "hypothesis", "pympler", "pytest (>=4.3.0)", "pytest-xdist[psutil]"] + +[[package]] +name = "backoff" +version = "2.2.1" +description = "Function decoration for backoff and retry" +optional = false +python-versions = ">=3.7,<4.0" +files = [ + {file = "backoff-2.2.1-py3-none-any.whl", hash = "sha256:63579f9a0628e06278f7e47b7d7d5b6ce20dc65c5e96a6f3ca99a6adca0396e8"}, + {file = "backoff-2.2.1.tar.gz", hash = "sha256:03f829f5bb1923180821643f8753b0502c3b682293992485b0eef2807afa5cba"}, +] + +[[package]] +name = "bracex" +version = "2.4" +description = "Bash style brace expander." +optional = false +python-versions = ">=3.8" +files = [ + {file = "bracex-2.4-py3-none-any.whl", hash = "sha256:efdc71eff95eaff5e0f8cfebe7d01adf2c8637c8c92edaf63ef348c241a82418"}, + {file = "bracex-2.4.tar.gz", hash = "sha256:a27eaf1df42cf561fed58b7a8f3fdf129d1ea16a81e1fadd1d17989bc6384beb"}, +] + +[[package]] +name = "cachetools" +version = "5.3.2" +description = "Extensible memoizing collections and decorators" +optional = false +python-versions = ">=3.7" +files = [ + {file = "cachetools-5.3.2-py3-none-any.whl", hash = "sha256:861f35a13a451f94e301ce2bec7cac63e881232ccce7ed67fab9b5df4d3beaa1"}, + {file = "cachetools-5.3.2.tar.gz", hash = "sha256:086ee420196f7b2ab9ca2db2520aca326318b68fe5ba8bc4d49cca91add450f2"}, +] + +[[package]] +name = "cattrs" +version = "23.2.3" +description = "Composable complex class support for attrs and dataclasses." +optional = false +python-versions = ">=3.8" +files = [ + {file = "cattrs-23.2.3-py3-none-any.whl", hash = "sha256:0341994d94971052e9ee70662542699a3162ea1e0c62f7ce1b4a57f563685108"}, + {file = "cattrs-23.2.3.tar.gz", hash = "sha256:a934090d95abaa9e911dac357e3a8699e0b4b14f8529bcc7d2b1ad9d51672b9f"}, +] + +[package.dependencies] +attrs = ">=23.1.0" +exceptiongroup = {version = ">=1.1.1", markers = "python_version < \"3.11\""} +typing-extensions = {version = ">=4.1.0,<4.6.3 || >4.6.3", markers = "python_version < \"3.11\""} + +[package.extras] +bson = ["pymongo (>=4.4.0)"] +cbor2 = ["cbor2 (>=5.4.6)"] +msgpack = ["msgpack (>=1.0.5)"] +orjson = ["orjson (>=3.9.2)"] +pyyaml = ["pyyaml (>=6.0)"] +tomlkit = ["tomlkit (>=0.11.8)"] +ujson = ["ujson (>=5.7.0)"] + +[[package]] +name = "certifi" +version = "2024.2.2" +description = "Python package for providing Mozilla's CA Bundle." +optional = false +python-versions = ">=3.6" +files = [ + {file = "certifi-2024.2.2-py3-none-any.whl", hash = "sha256:dc383c07b76109f368f6106eee2b593b04a011ea4d55f652c6ca24a754d1cdd1"}, + {file = "certifi-2024.2.2.tar.gz", hash = "sha256:0569859f95fc761b18b45ef421b1290a0f65f147e92a1e5eb3e635f9a5e4e66f"}, +] + +[[package]] +name = "charset-normalizer" +version = "3.3.2" +description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet." +optional = false +python-versions = ">=3.7.0" +files = [ + {file = "charset-normalizer-3.3.2.tar.gz", hash = "sha256:f30c3cb33b24454a82faecaf01b19c18562b1e89558fb6c56de4d9118a032fd5"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:25baf083bf6f6b341f4121c2f3c548875ee6f5339300e08be3f2b2ba1721cdd3"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:06435b539f889b1f6f4ac1758871aae42dc3a8c0e24ac9e60c2384973ad73027"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9063e24fdb1e498ab71cb7419e24622516c4a04476b17a2dab57e8baa30d6e03"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6897af51655e3691ff853668779c7bad41579facacf5fd7253b0133308cf000d"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1d3193f4a680c64b4b6a9115943538edb896edc190f0b222e73761716519268e"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:cd70574b12bb8a4d2aaa0094515df2463cb429d8536cfb6c7ce983246983e5a6"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8465322196c8b4d7ab6d1e049e4c5cb460d0394da4a27d23cc242fbf0034b6b5"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a9a8e9031d613fd2009c182b69c7b2c1ef8239a0efb1df3f7c8da66d5dd3d537"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:beb58fe5cdb101e3a055192ac291b7a21e3b7ef4f67fa1d74e331a7f2124341c"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:e06ed3eb3218bc64786f7db41917d4e686cc4856944f53d5bdf83a6884432e12"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:2e81c7b9c8979ce92ed306c249d46894776a909505d8f5a4ba55b14206e3222f"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:572c3763a264ba47b3cf708a44ce965d98555f618ca42c926a9c1616d8f34269"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:fd1abc0d89e30cc4e02e4064dc67fcc51bd941eb395c502aac3ec19fab46b519"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-win32.whl", hash = "sha256:3d47fa203a7bd9c5b6cee4736ee84ca03b8ef23193c0d1ca99b5089f72645c73"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-win_amd64.whl", hash = "sha256:10955842570876604d404661fbccbc9c7e684caf432c09c715ec38fbae45ae09"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:802fe99cca7457642125a8a88a084cef28ff0cf9407060f7b93dca5aa25480db"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:573f6eac48f4769d667c4442081b1794f52919e7edada77495aaed9236d13a96"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:549a3a73da901d5bc3ce8d24e0600d1fa85524c10287f6004fbab87672bf3e1e"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f27273b60488abe721a075bcca6d7f3964f9f6f067c8c4c605743023d7d3944f"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1ceae2f17a9c33cb48e3263960dc5fc8005351ee19db217e9b1bb15d28c02574"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:65f6f63034100ead094b8744b3b97965785388f308a64cf8d7c34f2f2e5be0c4"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:753f10e867343b4511128c6ed8c82f7bec3bd026875576dfd88483c5c73b2fd8"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4a78b2b446bd7c934f5dcedc588903fb2f5eec172f3d29e52a9096a43722adfc"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:e537484df0d8f426ce2afb2d0f8e1c3d0b114b83f8850e5f2fbea0e797bd82ae"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:eb6904c354526e758fda7167b33005998fb68c46fbc10e013ca97f21ca5c8887"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:deb6be0ac38ece9ba87dea880e438f25ca3eddfac8b002a2ec3d9183a454e8ae"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:4ab2fe47fae9e0f9dee8c04187ce5d09f48eabe611be8259444906793ab7cbce"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:80402cd6ee291dcb72644d6eac93785fe2c8b9cb30893c1af5b8fdd753b9d40f"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-win32.whl", hash = "sha256:7cd13a2e3ddeed6913a65e66e94b51d80a041145a026c27e6bb76c31a853c6ab"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-win_amd64.whl", hash = "sha256:663946639d296df6a2bb2aa51b60a2454ca1cb29835324c640dafb5ff2131a77"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:0b2b64d2bb6d3fb9112bafa732def486049e63de9618b5843bcdd081d8144cd8"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:ddbb2551d7e0102e7252db79ba445cdab71b26640817ab1e3e3648dad515003b"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:55086ee1064215781fff39a1af09518bc9255b50d6333f2e4c74ca09fac6a8f6"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8f4a014bc36d3c57402e2977dada34f9c12300af536839dc38c0beab8878f38a"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a10af20b82360ab00827f916a6058451b723b4e65030c5a18577c8b2de5b3389"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8d756e44e94489e49571086ef83b2bb8ce311e730092d2c34ca8f7d925cb20aa"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:90d558489962fd4918143277a773316e56c72da56ec7aa3dc3dbbe20fdfed15b"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6ac7ffc7ad6d040517be39eb591cac5ff87416c2537df6ba3cba3bae290c0fed"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:7ed9e526742851e8d5cc9e6cf41427dfc6068d4f5a3bb03659444b4cabf6bc26"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:8bdb58ff7ba23002a4c5808d608e4e6c687175724f54a5dade5fa8c67b604e4d"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:6b3251890fff30ee142c44144871185dbe13b11bab478a88887a639655be1068"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:b4a23f61ce87adf89be746c8a8974fe1c823c891d8f86eb218bb957c924bb143"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:efcb3f6676480691518c177e3b465bcddf57cea040302f9f4e6e191af91174d4"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-win32.whl", hash = "sha256:d965bba47ddeec8cd560687584e88cf699fd28f192ceb452d1d7ee807c5597b7"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-win_amd64.whl", hash = "sha256:96b02a3dc4381e5494fad39be677abcb5e6634bf7b4fa83a6dd3112607547001"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:95f2a5796329323b8f0512e09dbb7a1860c46a39da62ecb2324f116fa8fdc85c"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c002b4ffc0be611f0d9da932eb0f704fe2602a9a949d1f738e4c34c75b0863d5"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a981a536974bbc7a512cf44ed14938cf01030a99e9b3a06dd59578882f06f985"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3287761bc4ee9e33561a7e058c72ac0938c4f57fe49a09eae428fd88aafe7bb6"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:42cb296636fcc8b0644486d15c12376cb9fa75443e00fb25de0b8602e64c1714"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0a55554a2fa0d408816b3b5cedf0045f4b8e1a6065aec45849de2d6f3f8e9786"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:c083af607d2515612056a31f0a8d9e0fcb5876b7bfc0abad3ecd275bc4ebc2d5"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:87d1351268731db79e0f8e745d92493ee2841c974128ef629dc518b937d9194c"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:bd8f7df7d12c2db9fab40bdd87a7c09b1530128315d047a086fa3ae3435cb3a8"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:c180f51afb394e165eafe4ac2936a14bee3eb10debc9d9e4db8958fe36afe711"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:8c622a5fe39a48f78944a87d4fb8a53ee07344641b0562c540d840748571b811"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-win32.whl", hash = "sha256:db364eca23f876da6f9e16c9da0df51aa4f104a972735574842618b8c6d999d4"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-win_amd64.whl", hash = "sha256:86216b5cee4b06df986d214f664305142d9c76df9b6512be2738aa72a2048f99"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:6463effa3186ea09411d50efc7d85360b38d5f09b870c48e4600f63af490e56a"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:6c4caeef8fa63d06bd437cd4bdcf3ffefe6738fb1b25951440d80dc7df8c03ac"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:37e55c8e51c236f95b033f6fb391d7d7970ba5fe7ff453dad675e88cf303377a"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fb69256e180cb6c8a894fee62b3afebae785babc1ee98b81cdf68bbca1987f33"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ae5f4161f18c61806f411a13b0310bea87f987c7d2ecdbdaad0e94eb2e404238"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b2b0a0c0517616b6869869f8c581d4eb2dd83a4d79e0ebcb7d373ef9956aeb0a"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:45485e01ff4d3630ec0d9617310448a8702f70e9c01906b0d0118bdf9d124cf2"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:eb00ed941194665c332bf8e078baf037d6c35d7c4f3102ea2d4f16ca94a26dc8"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:2127566c664442652f024c837091890cb1942c30937add288223dc895793f898"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:a50aebfa173e157099939b17f18600f72f84eed3049e743b68ad15bd69b6bf99"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:4d0d1650369165a14e14e1e47b372cfcb31d6ab44e6e33cb2d4e57265290044d"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:923c0c831b7cfcb071580d3f46c4baf50f174be571576556269530f4bbd79d04"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:06a81e93cd441c56a9b65d8e1d043daeb97a3d0856d177d5c90ba85acb3db087"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-win32.whl", hash = "sha256:6ef1d82a3af9d3eecdba2321dc1b3c238245d890843e040e41e470ffa64c3e25"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-win_amd64.whl", hash = "sha256:eb8821e09e916165e160797a6c17edda0679379a4be5c716c260e836e122f54b"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:c235ebd9baae02f1b77bcea61bce332cb4331dc3617d254df3323aa01ab47bd4"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:5b4c145409bef602a690e7cfad0a15a55c13320ff7a3ad7ca59c13bb8ba4d45d"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:68d1f8a9e9e37c1223b656399be5d6b448dea850bed7d0f87a8311f1ff3dabb0"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:22afcb9f253dac0696b5a4be4a1c0f8762f8239e21b99680099abd9b2b1b2269"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e27ad930a842b4c5eb8ac0016b0a54f5aebbe679340c26101df33424142c143c"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1f79682fbe303db92bc2b1136016a38a42e835d932bab5b3b1bfcfbf0640e519"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b261ccdec7821281dade748d088bb6e9b69e6d15b30652b74cbbac25e280b796"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:122c7fa62b130ed55f8f285bfd56d5f4b4a5b503609d181f9ad85e55c89f4185"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:d0eccceffcb53201b5bfebb52600a5fb483a20b61da9dbc885f8b103cbe7598c"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:9f96df6923e21816da7e0ad3fd47dd8f94b2a5ce594e00677c0013018b813458"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:7f04c839ed0b6b98b1a7501a002144b76c18fb1c1850c8b98d458ac269e26ed2"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:34d1c8da1e78d2e001f363791c98a272bb734000fcef47a491c1e3b0505657a8"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:ff8fa367d09b717b2a17a052544193ad76cd49979c805768879cb63d9ca50561"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-win32.whl", hash = "sha256:aed38f6e4fb3f5d6bf81bfa990a07806be9d83cf7bacef998ab1a9bd660a581f"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-win_amd64.whl", hash = "sha256:b01b88d45a6fcb69667cd6d2f7a9aeb4bf53760d7fc536bf679ec94fe9f3ff3d"}, + {file = "charset_normalizer-3.3.2-py3-none-any.whl", hash = "sha256:3e4d1f6587322d2788836a99c69062fbb091331ec940e02d12d179c1d53e25fc"}, +] + +[[package]] +name = "colorama" +version = "0.4.6" +description = "Cross-platform colored terminal text." +optional = false +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7" +files = [ + {file = "colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6"}, + {file = "colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44"}, +] + +[[package]] +name = "deprecated" +version = "1.2.14" +description = "Python @deprecated decorator to deprecate old python classes, functions or methods." +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +files = [ + {file = "Deprecated-1.2.14-py2.py3-none-any.whl", hash = "sha256:6fac8b097794a90302bdbb17b9b815e732d3c4720583ff1b198499d78470466c"}, + {file = "Deprecated-1.2.14.tar.gz", hash = "sha256:e5323eb936458dccc2582dc6f9c322c852a775a27065ff2b0c4970b9d53d01b3"}, +] + +[package.dependencies] +wrapt = ">=1.10,<2" + +[package.extras] +dev = ["PyTest", "PyTest-Cov", "bump2version (<1)", "sphinx (<2)", "tox"] + +[[package]] +name = "dpath" +version = "2.0.8" +description = "Filesystem-like pathing and searching for dictionaries" +optional = false +python-versions = ">=3.7" +files = [ + {file = "dpath-2.0.8-py3-none-any.whl", hash = "sha256:f92f595214dd93a00558d75d4b858beee519f4cffca87f02616ad6cd013f3436"}, + {file = "dpath-2.0.8.tar.gz", hash = "sha256:a3440157ebe80d0a3ad794f1b61c571bef125214800ffdb9afc9424e8250fe9b"}, +] + +[[package]] +name = "exceptiongroup" +version = "1.2.0" +description = "Backport of PEP 654 (exception groups)" +optional = false +python-versions = ">=3.7" +files = [ + {file = "exceptiongroup-1.2.0-py3-none-any.whl", hash = "sha256:4bfd3996ac73b41e9b9628b04e079f193850720ea5945fc96a08633c66912f14"}, + {file = "exceptiongroup-1.2.0.tar.gz", hash = "sha256:91f5c769735f051a4290d52edd0858999b57e5876e9f85937691bd4c9fa3ed68"}, +] + +[package.extras] +test = ["pytest (>=6)"] + +[[package]] +name = "freezegun" +version = "1.4.0" +description = "Let your Python tests travel through time" +optional = false +python-versions = ">=3.7" +files = [ + {file = "freezegun-1.4.0-py3-none-any.whl", hash = "sha256:55e0fc3c84ebf0a96a5aa23ff8b53d70246479e9a68863f1fcac5a3e52f19dd6"}, + {file = "freezegun-1.4.0.tar.gz", hash = "sha256:10939b0ba0ff5adaecf3b06a5c2f73071d9678e507c5eaedb23c761d56ac774b"}, +] + +[package.dependencies] +python-dateutil = ">=2.7" + +[[package]] +name = "genson" +version = "1.2.2" +description = "GenSON is a powerful, user-friendly JSON Schema generator." +optional = false +python-versions = "*" +files = [ + {file = "genson-1.2.2.tar.gz", hash = "sha256:8caf69aa10af7aee0e1a1351d1d06801f4696e005f06cedef438635384346a16"}, +] + +[[package]] +name = "graphql-core" +version = "3.2.3" +description = "GraphQL implementation for Python, a port of GraphQL.js, the JavaScript reference implementation for GraphQL." +optional = false +python-versions = ">=3.6,<4" +files = [ + {file = "graphql-core-3.2.3.tar.gz", hash = "sha256:06d2aad0ac723e35b1cb47885d3e5c45e956a53bc1b209a9fc5369007fe46676"}, + {file = "graphql_core-3.2.3-py3-none-any.whl", hash = "sha256:5766780452bd5ec8ba133f8bf287dc92713e3868ddd83aee4faab9fc3e303dc3"}, +] + +[[package]] +name = "idna" +version = "3.6" +description = "Internationalized Domain Names in Applications (IDNA)" +optional = false +python-versions = ">=3.5" +files = [ + {file = "idna-3.6-py3-none-any.whl", hash = "sha256:c05567e9c24a6b9faaa835c4821bad0590fbb9d5779e7caa6e1cc4978e7eb24f"}, + {file = "idna-3.6.tar.gz", hash = "sha256:9ecdbbd083b06798ae1e86adcbfe8ab1479cf864e4ee30fe4e46a003d12491ca"}, +] + +[[package]] +name = "iniconfig" +version = "2.0.0" +description = "brain-dead simple config-ini parsing" +optional = false +python-versions = ">=3.7" +files = [ + {file = "iniconfig-2.0.0-py3-none-any.whl", hash = "sha256:b6a85871a79d2e3b22d2d1b94ac2824226a63c6b741c88f7ae975f18b6778374"}, + {file = "iniconfig-2.0.0.tar.gz", hash = "sha256:2d91e135bf72d31a410b17c16da610a82cb55f6b0477d1a902134b24a455b8b3"}, +] + +[[package]] +name = "isodate" +version = "0.6.1" +description = "An ISO 8601 date/time/duration parser and formatter" +optional = false +python-versions = "*" +files = [ + {file = "isodate-0.6.1-py2.py3-none-any.whl", hash = "sha256:0751eece944162659049d35f4f549ed815792b38793f07cf73381c1c87cbed96"}, + {file = "isodate-0.6.1.tar.gz", hash = "sha256:48c5881de7e8b0a0d648cb024c8062dc84e7b840ed81e864c7614fd3c127bde9"}, +] + +[package.dependencies] +six = "*" + +[[package]] +name = "jinja2" +version = "3.1.3" +description = "A very fast and expressive template engine." +optional = false +python-versions = ">=3.7" +files = [ + {file = "Jinja2-3.1.3-py3-none-any.whl", hash = "sha256:7d6d50dd97d52cbc355597bd845fabfbac3f551e1f99619e39a35ce8c370b5fa"}, + {file = "Jinja2-3.1.3.tar.gz", hash = "sha256:ac8bd6544d4bb2c9792bf3a159e80bba8fda7f07e81bc3aed565432d5925ba90"}, +] + +[package.dependencies] +MarkupSafe = ">=2.0" + +[package.extras] +i18n = ["Babel (>=2.7)"] + +[[package]] +name = "jsonref" +version = "0.3.0" +description = "jsonref is a library for automatic dereferencing of JSON Reference objects for Python." +optional = false +python-versions = ">=3.3,<4.0" +files = [ + {file = "jsonref-0.3.0-py3-none-any.whl", hash = "sha256:9480ad1b500f7e795daeb0ef29f9c55ae3a9ab38fb8d6659b6f4868acb5a5bc8"}, + {file = "jsonref-0.3.0.tar.gz", hash = "sha256:68b330c6815dc0d490dbb3d65ccda265ddde9f7856fd2f3322f971d456ea7549"}, +] + +[[package]] +name = "jsonschema" +version = "3.2.0" +description = "An implementation of JSON Schema validation for Python" +optional = false +python-versions = "*" +files = [ + {file = "jsonschema-3.2.0-py2.py3-none-any.whl", hash = "sha256:4e5b3cf8216f577bee9ce139cbe72eca3ea4f292ec60928ff24758ce626cd163"}, + {file = "jsonschema-3.2.0.tar.gz", hash = "sha256:c8a85b28d377cc7737e46e2d9f2b4f44ee3c0e1deac6bf46ddefc7187d30797a"}, +] + +[package.dependencies] +attrs = ">=17.4.0" +pyrsistent = ">=0.14.0" +setuptools = "*" +six = ">=1.11.0" + +[package.extras] +format = ["idna", "jsonpointer (>1.13)", "rfc3987", "strict-rfc3339", "webcolors"] +format-nongpl = ["idna", "jsonpointer (>1.13)", "rfc3339-validator", "rfc3986-validator (>0.1.0)", "webcolors"] + +[[package]] +name = "markupsafe" +version = "2.1.5" +description = "Safely add untrusted strings to HTML/XML markup." +optional = false +python-versions = ">=3.7" +files = [ + {file = "MarkupSafe-2.1.5-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:a17a92de5231666cfbe003f0e4b9b3a7ae3afb1ec2845aadc2bacc93ff85febc"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:72b6be590cc35924b02c78ef34b467da4ba07e4e0f0454a2c5907f473fc50ce5"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e61659ba32cf2cf1481e575d0462554625196a1f2fc06a1c777d3f48e8865d46"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2174c595a0d73a3080ca3257b40096db99799265e1c27cc5a610743acd86d62f"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ae2ad8ae6ebee9d2d94b17fb62763125f3f374c25618198f40cbb8b525411900"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:075202fa5b72c86ad32dc7d0b56024ebdbcf2048c0ba09f1cde31bfdd57bcfff"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:598e3276b64aff0e7b3451b72e94fa3c238d452e7ddcd893c3ab324717456bad"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:fce659a462a1be54d2ffcacea5e3ba2d74daa74f30f5f143fe0c58636e355fdd"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-win32.whl", hash = "sha256:d9fad5155d72433c921b782e58892377c44bd6252b5af2f67f16b194987338a4"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-win_amd64.whl", hash = "sha256:bf50cd79a75d181c9181df03572cdce0fbb75cc353bc350712073108cba98de5"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:629ddd2ca402ae6dbedfceeba9c46d5f7b2a61d9749597d4307f943ef198fc1f"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:5b7b716f97b52c5a14bffdf688f971b2d5ef4029127f1ad7a513973cfd818df2"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6ec585f69cec0aa07d945b20805be741395e28ac1627333b1c5b0105962ffced"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b91c037585eba9095565a3556f611e3cbfaa42ca1e865f7b8015fe5c7336d5a5"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7502934a33b54030eaf1194c21c692a534196063db72176b0c4028e140f8f32c"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:0e397ac966fdf721b2c528cf028494e86172b4feba51d65f81ffd65c63798f3f"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:c061bb86a71b42465156a3ee7bd58c8c2ceacdbeb95d05a99893e08b8467359a"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:3a57fdd7ce31c7ff06cdfbf31dafa96cc533c21e443d57f5b1ecc6cdc668ec7f"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-win32.whl", hash = "sha256:397081c1a0bfb5124355710fe79478cdbeb39626492b15d399526ae53422b906"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-win_amd64.whl", hash = "sha256:2b7c57a4dfc4f16f7142221afe5ba4e093e09e728ca65c51f5620c9aaeb9a617"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:8dec4936e9c3100156f8a2dc89c4b88d5c435175ff03413b443469c7c8c5f4d1"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:3c6b973f22eb18a789b1460b4b91bf04ae3f0c4234a0a6aa6b0a92f6f7b951d4"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ac07bad82163452a6884fe8fa0963fb98c2346ba78d779ec06bd7a6262132aee"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f5dfb42c4604dddc8e4305050aa6deb084540643ed5804d7455b5df8fe16f5e5"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ea3d8a3d18833cf4304cd2fc9cbb1efe188ca9b5efef2bdac7adc20594a0e46b"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:d050b3361367a06d752db6ead6e7edeb0009be66bc3bae0ee9d97fb326badc2a"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:bec0a414d016ac1a18862a519e54b2fd0fc8bbfd6890376898a6c0891dd82e9f"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:58c98fee265677f63a4385256a6d7683ab1832f3ddd1e66fe948d5880c21a169"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-win32.whl", hash = "sha256:8590b4ae07a35970728874632fed7bd57b26b0102df2d2b233b6d9d82f6c62ad"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-win_amd64.whl", hash = "sha256:823b65d8706e32ad2df51ed89496147a42a2a6e01c13cfb6ffb8b1e92bc910bb"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:c8b29db45f8fe46ad280a7294f5c3ec36dbac9491f2d1c17345be8e69cc5928f"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ec6a563cff360b50eed26f13adc43e61bc0c04d94b8be985e6fb24b81f6dcfdf"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a549b9c31bec33820e885335b451286e2969a2d9e24879f83fe904a5ce59d70a"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4f11aa001c540f62c6166c7726f71f7573b52c68c31f014c25cc7901deea0b52"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:7b2e5a267c855eea6b4283940daa6e88a285f5f2a67f2220203786dfa59b37e9"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:2d2d793e36e230fd32babe143b04cec8a8b3eb8a3122d2aceb4a371e6b09b8df"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:ce409136744f6521e39fd8e2a24c53fa18ad67aa5bc7c2cf83645cce5b5c4e50"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-win32.whl", hash = "sha256:4096e9de5c6fdf43fb4f04c26fb114f61ef0bf2e5604b6ee3019d51b69e8c371"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-win_amd64.whl", hash = "sha256:4275d846e41ecefa46e2015117a9f491e57a71ddd59bbead77e904dc02b1bed2"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:656f7526c69fac7f600bd1f400991cc282b417d17539a1b228617081106feb4a"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:97cafb1f3cbcd3fd2b6fbfb99ae11cdb14deea0736fc2b0952ee177f2b813a46"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1f3fbcb7ef1f16e48246f704ab79d79da8a46891e2da03f8783a5b6fa41a9532"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fa9db3f79de01457b03d4f01b34cf91bc0048eb2c3846ff26f66687c2f6d16ab"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ffee1f21e5ef0d712f9033568f8344d5da8cc2869dbd08d87c84656e6a2d2f68"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:5dedb4db619ba5a2787a94d877bc8ffc0566f92a01c0ef214865e54ecc9ee5e0"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:30b600cf0a7ac9234b2638fbc0fb6158ba5bdcdf46aeb631ead21248b9affbc4"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:8dd717634f5a044f860435c1d8c16a270ddf0ef8588d4887037c5028b859b0c3"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-win32.whl", hash = "sha256:daa4ee5a243f0f20d528d939d06670a298dd39b1ad5f8a72a4275124a7819eff"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-win_amd64.whl", hash = "sha256:619bc166c4f2de5caa5a633b8b7326fbe98e0ccbfacabd87268a2b15ff73a029"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:7a68b554d356a91cce1236aa7682dc01df0edba8d043fd1ce607c49dd3c1edcf"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:db0b55e0f3cc0be60c1f19efdde9a637c32740486004f20d1cff53c3c0ece4d2"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3e53af139f8579a6d5f7b76549125f0d94d7e630761a2111bc431fd820e163b8"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:17b950fccb810b3293638215058e432159d2b71005c74371d784862b7e4683f3"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4c31f53cdae6ecfa91a77820e8b151dba54ab528ba65dfd235c80b086d68a465"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:bff1b4290a66b490a2f4719358c0cdcd9bafb6b8f061e45c7a2460866bf50c2e"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:bc1667f8b83f48511b94671e0e441401371dfd0f0a795c7daa4a3cd1dde55bea"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:5049256f536511ee3f7e1b3f87d1d1209d327e818e6ae1365e8653d7e3abb6a6"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-win32.whl", hash = "sha256:00e046b6dd71aa03a41079792f8473dc494d564611a8f89bbbd7cb93295ebdcf"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-win_amd64.whl", hash = "sha256:fa173ec60341d6bb97a89f5ea19c85c5643c1e7dedebc22f5181eb73573142c5"}, + {file = "MarkupSafe-2.1.5.tar.gz", hash = "sha256:d283d37a890ba4c1ae73ffadf8046435c76e7bc2247bbb63c00bd1a709c6544b"}, +] + +[[package]] +name = "packaging" +version = "23.2" +description = "Core utilities for Python packages" +optional = false +python-versions = ">=3.7" +files = [ + {file = "packaging-23.2-py3-none-any.whl", hash = "sha256:8c491190033a9af7e1d931d0b5dacc2ef47509b34dd0de67ed209b5203fc88c7"}, + {file = "packaging-23.2.tar.gz", hash = "sha256:048fb0e9405036518eaaf48a55953c750c11e1a1b68e0dd1a9d62ed0c092cfc5"}, +] + +[[package]] +name = "pendulum" +version = "2.1.2" +description = "Python datetimes made easy" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" +files = [ + {file = "pendulum-2.1.2-cp27-cp27m-macosx_10_15_x86_64.whl", hash = "sha256:b6c352f4bd32dff1ea7066bd31ad0f71f8d8100b9ff709fb343f3b86cee43efe"}, + {file = "pendulum-2.1.2-cp27-cp27m-win_amd64.whl", hash = "sha256:318f72f62e8e23cd6660dbafe1e346950281a9aed144b5c596b2ddabc1d19739"}, + {file = "pendulum-2.1.2-cp35-cp35m-macosx_10_15_x86_64.whl", hash = "sha256:0731f0c661a3cb779d398803655494893c9f581f6488048b3fb629c2342b5394"}, + {file = "pendulum-2.1.2-cp35-cp35m-manylinux1_i686.whl", hash = "sha256:3481fad1dc3f6f6738bd575a951d3c15d4b4ce7c82dce37cf8ac1483fde6e8b0"}, + {file = "pendulum-2.1.2-cp35-cp35m-manylinux1_x86_64.whl", hash = "sha256:9702069c694306297ed362ce7e3c1ef8404ac8ede39f9b28b7c1a7ad8c3959e3"}, + {file = "pendulum-2.1.2-cp35-cp35m-win_amd64.whl", hash = "sha256:fb53ffa0085002ddd43b6ca61a7b34f2d4d7c3ed66f931fe599e1a531b42af9b"}, + {file = "pendulum-2.1.2-cp36-cp36m-macosx_10_15_x86_64.whl", hash = "sha256:c501749fdd3d6f9e726086bf0cd4437281ed47e7bca132ddb522f86a1645d360"}, + {file = "pendulum-2.1.2-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:c807a578a532eeb226150d5006f156632df2cc8c5693d778324b43ff8c515dd0"}, + {file = "pendulum-2.1.2-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:2d1619a721df661e506eff8db8614016f0720ac171fe80dda1333ee44e684087"}, + {file = "pendulum-2.1.2-cp36-cp36m-win_amd64.whl", hash = "sha256:f888f2d2909a414680a29ae74d0592758f2b9fcdee3549887779cd4055e975db"}, + {file = "pendulum-2.1.2-cp37-cp37m-macosx_10_15_x86_64.whl", hash = "sha256:e95d329384717c7bf627bf27e204bc3b15c8238fa8d9d9781d93712776c14002"}, + {file = "pendulum-2.1.2-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:4c9c689747f39d0d02a9f94fcee737b34a5773803a64a5fdb046ee9cac7442c5"}, + {file = "pendulum-2.1.2-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:1245cd0075a3c6d889f581f6325dd8404aca5884dea7223a5566c38aab94642b"}, + {file = "pendulum-2.1.2-cp37-cp37m-win_amd64.whl", hash = "sha256:db0a40d8bcd27b4fb46676e8eb3c732c67a5a5e6bfab8927028224fbced0b40b"}, + {file = "pendulum-2.1.2-cp38-cp38-macosx_10_15_x86_64.whl", hash = "sha256:f5e236e7730cab1644e1b87aca3d2ff3e375a608542e90fe25685dae46310116"}, + {file = "pendulum-2.1.2-cp38-cp38-manylinux1_i686.whl", hash = "sha256:de42ea3e2943171a9e95141f2eecf972480636e8e484ccffaf1e833929e9e052"}, + {file = "pendulum-2.1.2-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:7c5ec650cb4bec4c63a89a0242cc8c3cebcec92fcfe937c417ba18277d8560be"}, + {file = "pendulum-2.1.2-cp38-cp38-win_amd64.whl", hash = "sha256:33fb61601083f3eb1d15edeb45274f73c63b3c44a8524703dc143f4212bf3269"}, + {file = "pendulum-2.1.2-cp39-cp39-manylinux1_i686.whl", hash = "sha256:29c40a6f2942376185728c9a0347d7c0f07905638c83007e1d262781f1e6953a"}, + {file = "pendulum-2.1.2-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:94b1fc947bfe38579b28e1cccb36f7e28a15e841f30384b5ad6c5e31055c85d7"}, + {file = "pendulum-2.1.2.tar.gz", hash = "sha256:b06a0ca1bfe41c990bbf0c029f0b6501a7f2ec4e38bfec730712015e8860f207"}, +] + +[package.dependencies] +python-dateutil = ">=2.6,<3.0" +pytzdata = ">=2020.1" + +[[package]] +name = "platformdirs" +version = "4.2.0" +description = "A small Python package for determining appropriate platform-specific dirs, e.g. a \"user data dir\"." +optional = false +python-versions = ">=3.8" +files = [ + {file = "platformdirs-4.2.0-py3-none-any.whl", hash = "sha256:0614df2a2f37e1a662acbd8e2b25b92ccf8632929bc6d43467e17fe89c75e068"}, + {file = "platformdirs-4.2.0.tar.gz", hash = "sha256:ef0cc731df711022c174543cb70a9b5bd22e5a9337c8624ef2c2ceb8ddad8768"}, +] + +[package.extras] +docs = ["furo (>=2023.9.10)", "proselint (>=0.13)", "sphinx (>=7.2.6)", "sphinx-autodoc-typehints (>=1.25.2)"] +test = ["appdirs (==1.4.4)", "covdefaults (>=2.3)", "pytest (>=7.4.3)", "pytest-cov (>=4.1)", "pytest-mock (>=3.12)"] + +[[package]] +name = "pluggy" +version = "1.4.0" +description = "plugin and hook calling mechanisms for python" +optional = false +python-versions = ">=3.8" +files = [ + {file = "pluggy-1.4.0-py3-none-any.whl", hash = "sha256:7db9f7b503d67d1c5b95f59773ebb58a8c1c288129a88665838012cfb07b8981"}, + {file = "pluggy-1.4.0.tar.gz", hash = "sha256:8c85c2876142a764e5b7548e7d9a0e0ddb46f5185161049a79b7e974454223be"}, +] + +[package.extras] +dev = ["pre-commit", "tox"] +testing = ["pytest", "pytest-benchmark"] + +[[package]] +name = "py" +version = "1.11.0" +description = "library with cross-python path, ini-parsing, io, code, log facilities" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" +files = [ + {file = "py-1.11.0-py2.py3-none-any.whl", hash = "sha256:607c53218732647dff4acdfcd50cb62615cedf612e72d1724fb1a0cc6405b378"}, + {file = "py-1.11.0.tar.gz", hash = "sha256:51c75c4126074b472f746a24399ad32f6053d1b34b68d2fa41e558e6f4a98719"}, +] + +[[package]] +name = "pydantic" +version = "1.10.14" +description = "Data validation and settings management using python type hints" +optional = false +python-versions = ">=3.7" +files = [ + {file = "pydantic-1.10.14-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:7f4fcec873f90537c382840f330b90f4715eebc2bc9925f04cb92de593eae054"}, + {file = "pydantic-1.10.14-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:8e3a76f571970fcd3c43ad982daf936ae39b3e90b8a2e96c04113a369869dc87"}, + {file = "pydantic-1.10.14-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:82d886bd3c3fbeaa963692ef6b643159ccb4b4cefaf7ff1617720cbead04fd1d"}, + {file = "pydantic-1.10.14-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:798a3d05ee3b71967844a1164fd5bdb8c22c6d674f26274e78b9f29d81770c4e"}, + {file = "pydantic-1.10.14-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:23d47a4b57a38e8652bcab15a658fdb13c785b9ce217cc3a729504ab4e1d6bc9"}, + {file = "pydantic-1.10.14-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:f9f674b5c3bebc2eba401de64f29948ae1e646ba2735f884d1594c5f675d6f2a"}, + {file = "pydantic-1.10.14-cp310-cp310-win_amd64.whl", hash = "sha256:24a7679fab2e0eeedb5a8924fc4a694b3bcaac7d305aeeac72dd7d4e05ecbebf"}, + {file = "pydantic-1.10.14-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:9d578ac4bf7fdf10ce14caba6f734c178379bd35c486c6deb6f49006e1ba78a7"}, + {file = "pydantic-1.10.14-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:fa7790e94c60f809c95602a26d906eba01a0abee9cc24150e4ce2189352deb1b"}, + {file = "pydantic-1.10.14-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:aad4e10efa5474ed1a611b6d7f0d130f4aafadceb73c11d9e72823e8f508e663"}, + {file = "pydantic-1.10.14-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1245f4f61f467cb3dfeced2b119afef3db386aec3d24a22a1de08c65038b255f"}, + {file = "pydantic-1.10.14-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:21efacc678a11114c765eb52ec0db62edffa89e9a562a94cbf8fa10b5db5c046"}, + {file = "pydantic-1.10.14-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:412ab4a3f6dbd2bf18aefa9f79c7cca23744846b31f1d6555c2ee2b05a2e14ca"}, + {file = "pydantic-1.10.14-cp311-cp311-win_amd64.whl", hash = "sha256:e897c9f35281f7889873a3e6d6b69aa1447ceb024e8495a5f0d02ecd17742a7f"}, + {file = "pydantic-1.10.14-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:d604be0f0b44d473e54fdcb12302495fe0467c56509a2f80483476f3ba92b33c"}, + {file = "pydantic-1.10.14-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a42c7d17706911199798d4c464b352e640cab4351efe69c2267823d619a937e5"}, + {file = "pydantic-1.10.14-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:596f12a1085e38dbda5cbb874d0973303e34227b400b6414782bf205cc14940c"}, + {file = "pydantic-1.10.14-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:bfb113860e9288d0886e3b9e49d9cf4a9d48b441f52ded7d96db7819028514cc"}, + {file = "pydantic-1.10.14-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:bc3ed06ab13660b565eed80887fcfbc0070f0aa0691fbb351657041d3e874efe"}, + {file = "pydantic-1.10.14-cp37-cp37m-win_amd64.whl", hash = "sha256:ad8c2bc677ae5f6dbd3cf92f2c7dc613507eafe8f71719727cbc0a7dec9a8c01"}, + {file = "pydantic-1.10.14-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:c37c28449752bb1f47975d22ef2882d70513c546f8f37201e0fec3a97b816eee"}, + {file = "pydantic-1.10.14-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:49a46a0994dd551ec051986806122767cf144b9702e31d47f6d493c336462597"}, + {file = "pydantic-1.10.14-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:53e3819bd20a42470d6dd0fe7fc1c121c92247bca104ce608e609b59bc7a77ee"}, + {file = "pydantic-1.10.14-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0fbb503bbbbab0c588ed3cd21975a1d0d4163b87e360fec17a792f7d8c4ff29f"}, + {file = "pydantic-1.10.14-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:336709883c15c050b9c55a63d6c7ff09be883dbc17805d2b063395dd9d9d0022"}, + {file = "pydantic-1.10.14-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:4ae57b4d8e3312d486e2498d42aed3ece7b51848336964e43abbf9671584e67f"}, + {file = "pydantic-1.10.14-cp38-cp38-win_amd64.whl", hash = "sha256:dba49d52500c35cfec0b28aa8b3ea5c37c9df183ffc7210b10ff2a415c125c4a"}, + {file = "pydantic-1.10.14-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:c66609e138c31cba607d8e2a7b6a5dc38979a06c900815495b2d90ce6ded35b4"}, + {file = "pydantic-1.10.14-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:d986e115e0b39604b9eee3507987368ff8148222da213cd38c359f6f57b3b347"}, + {file = "pydantic-1.10.14-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:646b2b12df4295b4c3148850c85bff29ef6d0d9621a8d091e98094871a62e5c7"}, + {file = "pydantic-1.10.14-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:282613a5969c47c83a8710cc8bfd1e70c9223feb76566f74683af889faadc0ea"}, + {file = "pydantic-1.10.14-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:466669501d08ad8eb3c4fecd991c5e793c4e0bbd62299d05111d4f827cded64f"}, + {file = "pydantic-1.10.14-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:13e86a19dca96373dcf3190fcb8797d40a6f12f154a244a8d1e8e03b8f280593"}, + {file = "pydantic-1.10.14-cp39-cp39-win_amd64.whl", hash = "sha256:08b6ec0917c30861e3fe71a93be1648a2aa4f62f866142ba21670b24444d7fd8"}, + {file = "pydantic-1.10.14-py3-none-any.whl", hash = "sha256:8ee853cd12ac2ddbf0ecbac1c289f95882b2d4482258048079d13be700aa114c"}, + {file = "pydantic-1.10.14.tar.gz", hash = "sha256:46f17b832fe27de7850896f3afee50ea682220dd218f7e9c88d436788419dca6"}, +] + +[package.dependencies] +typing-extensions = ">=4.2.0" + +[package.extras] +dotenv = ["python-dotenv (>=0.10.4)"] +email = ["email-validator (>=1.0.3)"] + +[[package]] +name = "pyrate-limiter" +version = "3.1.1" +description = "Python Rate-Limiter using Leaky-Bucket Algorithm" +optional = false +python-versions = ">=3.8,<4.0" +files = [ + {file = "pyrate_limiter-3.1.1-py3-none-any.whl", hash = "sha256:c51906f1d51d56dc992ff6c26e8300e32151bc6cfa3e6559792e31971dfd4e2b"}, + {file = "pyrate_limiter-3.1.1.tar.gz", hash = "sha256:2f57eda712687e6eccddf6afe8f8a15b409b97ed675fe64a626058f12863b7b7"}, +] + +[package.extras] +all = ["filelock (>=3.0)", "redis (>=5.0.0,<6.0.0)"] +docs = ["furo (>=2022.3.4,<2023.0.0)", "myst-parser (>=0.17)", "sphinx (>=4.3.0,<5.0.0)", "sphinx-autodoc-typehints (>=1.17,<2.0)", "sphinx-copybutton (>=0.5)", "sphinxcontrib-apidoc (>=0.3,<0.4)"] + +[[package]] +name = "pyrsistent" +version = "0.20.0" +description = "Persistent/Functional/Immutable data structures" +optional = false +python-versions = ">=3.8" +files = [ + {file = "pyrsistent-0.20.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:8c3aba3e01235221e5b229a6c05f585f344734bd1ad42a8ac51493d74722bbce"}, + {file = "pyrsistent-0.20.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c1beb78af5423b879edaf23c5591ff292cf7c33979734c99aa66d5914ead880f"}, + {file = "pyrsistent-0.20.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:21cc459636983764e692b9eba7144cdd54fdec23ccdb1e8ba392a63666c60c34"}, + {file = "pyrsistent-0.20.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f5ac696f02b3fc01a710427585c855f65cd9c640e14f52abe52020722bb4906b"}, + {file = "pyrsistent-0.20.0-cp310-cp310-win32.whl", hash = "sha256:0724c506cd8b63c69c7f883cc233aac948c1ea946ea95996ad8b1380c25e1d3f"}, + {file = "pyrsistent-0.20.0-cp310-cp310-win_amd64.whl", hash = "sha256:8441cf9616d642c475684d6cf2520dd24812e996ba9af15e606df5f6fd9d04a7"}, + {file = "pyrsistent-0.20.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:0f3b1bcaa1f0629c978b355a7c37acd58907390149b7311b5db1b37648eb6958"}, + {file = "pyrsistent-0.20.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5cdd7ef1ea7a491ae70d826b6cc64868de09a1d5ff9ef8d574250d0940e275b8"}, + {file = "pyrsistent-0.20.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cae40a9e3ce178415040a0383f00e8d68b569e97f31928a3a8ad37e3fde6df6a"}, + {file = "pyrsistent-0.20.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6288b3fa6622ad8a91e6eb759cfc48ff3089e7c17fb1d4c59a919769314af224"}, + {file = "pyrsistent-0.20.0-cp311-cp311-win32.whl", hash = "sha256:7d29c23bdf6e5438c755b941cef867ec2a4a172ceb9f50553b6ed70d50dfd656"}, + {file = "pyrsistent-0.20.0-cp311-cp311-win_amd64.whl", hash = "sha256:59a89bccd615551391f3237e00006a26bcf98a4d18623a19909a2c48b8e986ee"}, + {file = "pyrsistent-0.20.0-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:09848306523a3aba463c4b49493a760e7a6ca52e4826aa100ee99d8d39b7ad1e"}, + {file = "pyrsistent-0.20.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a14798c3005ec892bbada26485c2eea3b54109cb2533713e355c806891f63c5e"}, + {file = "pyrsistent-0.20.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b14decb628fac50db5e02ee5a35a9c0772d20277824cfe845c8a8b717c15daa3"}, + {file = "pyrsistent-0.20.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2e2c116cc804d9b09ce9814d17df5edf1df0c624aba3b43bc1ad90411487036d"}, + {file = "pyrsistent-0.20.0-cp312-cp312-win32.whl", hash = "sha256:e78d0c7c1e99a4a45c99143900ea0546025e41bb59ebc10182e947cf1ece9174"}, + {file = "pyrsistent-0.20.0-cp312-cp312-win_amd64.whl", hash = "sha256:4021a7f963d88ccd15b523787d18ed5e5269ce57aa4037146a2377ff607ae87d"}, + {file = "pyrsistent-0.20.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:79ed12ba79935adaac1664fd7e0e585a22caa539dfc9b7c7c6d5ebf91fb89054"}, + {file = "pyrsistent-0.20.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f920385a11207dc372a028b3f1e1038bb244b3ec38d448e6d8e43c6b3ba20e98"}, + {file = "pyrsistent-0.20.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4f5c2d012671b7391803263419e31b5c7c21e7c95c8760d7fc35602353dee714"}, + {file = "pyrsistent-0.20.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ef3992833fbd686ee783590639f4b8343a57f1f75de8633749d984dc0eb16c86"}, + {file = "pyrsistent-0.20.0-cp38-cp38-win32.whl", hash = "sha256:881bbea27bbd32d37eb24dd320a5e745a2a5b092a17f6debc1349252fac85423"}, + {file = "pyrsistent-0.20.0-cp38-cp38-win_amd64.whl", hash = "sha256:6d270ec9dd33cdb13f4d62c95c1a5a50e6b7cdd86302b494217137f760495b9d"}, + {file = "pyrsistent-0.20.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:ca52d1ceae015859d16aded12584c59eb3825f7b50c6cfd621d4231a6cc624ce"}, + {file = "pyrsistent-0.20.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b318ca24db0f0518630e8b6f3831e9cba78f099ed5c1d65ffe3e023003043ba0"}, + {file = "pyrsistent-0.20.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fed2c3216a605dc9a6ea50c7e84c82906e3684c4e80d2908208f662a6cbf9022"}, + {file = "pyrsistent-0.20.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2e14c95c16211d166f59c6611533d0dacce2e25de0f76e4c140fde250997b3ca"}, + {file = "pyrsistent-0.20.0-cp39-cp39-win32.whl", hash = "sha256:f058a615031eea4ef94ead6456f5ec2026c19fb5bd6bfe86e9665c4158cf802f"}, + {file = "pyrsistent-0.20.0-cp39-cp39-win_amd64.whl", hash = "sha256:58b8f6366e152092194ae68fefe18b9f0b4f89227dfd86a07770c3d86097aebf"}, + {file = "pyrsistent-0.20.0-py3-none-any.whl", hash = "sha256:c55acc4733aad6560a7f5f818466631f07efc001fd023f34a6c203f8b6df0f0b"}, + {file = "pyrsistent-0.20.0.tar.gz", hash = "sha256:4c48f78f62ab596c679086084d0dd13254ae4f3d6c72a83ffdf5ebdef8f265a4"}, +] + +[[package]] +name = "pytest" +version = "6.2.5" +description = "pytest: simple powerful testing with Python" +optional = false +python-versions = ">=3.6" +files = [ + {file = "pytest-6.2.5-py3-none-any.whl", hash = "sha256:7310f8d27bc79ced999e760ca304d69f6ba6c6649c0b60fb0e04a4a77cacc134"}, + {file = "pytest-6.2.5.tar.gz", hash = "sha256:131b36680866a76e6781d13f101efb86cf674ebb9762eb70d3082b6f29889e89"}, +] + +[package.dependencies] +atomicwrites = {version = ">=1.0", markers = "sys_platform == \"win32\""} +attrs = ">=19.2.0" +colorama = {version = "*", markers = "sys_platform == \"win32\""} +iniconfig = "*" +packaging = "*" +pluggy = ">=0.12,<2.0" +py = ">=1.8.2" +toml = "*" + +[package.extras] +testing = ["argcomplete", "hypothesis (>=3.56)", "mock", "nose", "requests", "xmlschema"] + +[[package]] +name = "pytest-mock" +version = "3.12.0" +description = "Thin-wrapper around the mock package for easier use with pytest" +optional = false +python-versions = ">=3.8" +files = [ + {file = "pytest-mock-3.12.0.tar.gz", hash = "sha256:31a40f038c22cad32287bb43932054451ff5583ff094bca6f675df2f8bc1a6e9"}, + {file = "pytest_mock-3.12.0-py3-none-any.whl", hash = "sha256:0972719a7263072da3a21c7f4773069bcc7486027d7e8e1f81d98a47e701bc4f"}, +] + +[package.dependencies] +pytest = ">=5.0" + +[package.extras] +dev = ["pre-commit", "pytest-asyncio", "tox"] + +[[package]] +name = "python-dateutil" +version = "2.8.2" +description = "Extensions to the standard Python datetime module" +optional = false +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7" +files = [ + {file = "python-dateutil-2.8.2.tar.gz", hash = "sha256:0123cacc1627ae19ddf3c27a5de5bd67ee4586fbdd6440d9748f8abb483d3e86"}, + {file = "python_dateutil-2.8.2-py2.py3-none-any.whl", hash = "sha256:961d03dc3453ebbc59dbdea9e4e11c5651520a876d0f4db161e8674aae935da9"}, +] + +[package.dependencies] +six = ">=1.5" + +[[package]] +name = "pytzdata" +version = "2020.1" +description = "The Olson timezone database for Python." +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +files = [ + {file = "pytzdata-2020.1-py2.py3-none-any.whl", hash = "sha256:e1e14750bcf95016381e4d472bad004eef710f2d6417240904070b3d6654485f"}, + {file = "pytzdata-2020.1.tar.gz", hash = "sha256:3efa13b335a00a8de1d345ae41ec78dd11c9f8807f522d39850f2dd828681540"}, +] + +[[package]] +name = "pyyaml" +version = "6.0.1" +description = "YAML parser and emitter for Python" +optional = false +python-versions = ">=3.6" +files = [ + {file = "PyYAML-6.0.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d858aa552c999bc8a8d57426ed01e40bef403cd8ccdd0fc5f6f04a00414cac2a"}, + {file = "PyYAML-6.0.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:fd66fc5d0da6d9815ba2cebeb4205f95818ff4b79c3ebe268e75d961704af52f"}, + {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:69b023b2b4daa7548bcfbd4aa3da05b3a74b772db9e23b982788168117739938"}, + {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:81e0b275a9ecc9c0c0c07b4b90ba548307583c125f54d5b6946cfee6360c733d"}, + {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ba336e390cd8e4d1739f42dfe9bb83a3cc2e80f567d8805e11b46f4a943f5515"}, + {file = "PyYAML-6.0.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:326c013efe8048858a6d312ddd31d56e468118ad4cdeda36c719bf5bb6192290"}, + {file = "PyYAML-6.0.1-cp310-cp310-win32.whl", hash = "sha256:bd4af7373a854424dabd882decdc5579653d7868b8fb26dc7d0e99f823aa5924"}, + {file = "PyYAML-6.0.1-cp310-cp310-win_amd64.whl", hash = "sha256:fd1592b3fdf65fff2ad0004b5e363300ef59ced41c2e6b3a99d4089fa8c5435d"}, + {file = "PyYAML-6.0.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:6965a7bc3cf88e5a1c3bd2e0b5c22f8d677dc88a455344035f03399034eb3007"}, + {file = "PyYAML-6.0.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:f003ed9ad21d6a4713f0a9b5a7a0a79e08dd0f221aff4525a2be4c346ee60aab"}, + {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:42f8152b8dbc4fe7d96729ec2b99c7097d656dc1213a3229ca5383f973a5ed6d"}, + {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:062582fca9fabdd2c8b54a3ef1c978d786e0f6b3a1510e0ac93ef59e0ddae2bc"}, + {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d2b04aac4d386b172d5b9692e2d2da8de7bfb6c387fa4f801fbf6fb2e6ba4673"}, + {file = "PyYAML-6.0.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:e7d73685e87afe9f3b36c799222440d6cf362062f78be1013661b00c5c6f678b"}, + {file = "PyYAML-6.0.1-cp311-cp311-win32.whl", hash = "sha256:1635fd110e8d85d55237ab316b5b011de701ea0f29d07611174a1b42f1444741"}, + {file = "PyYAML-6.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:bf07ee2fef7014951eeb99f56f39c9bb4af143d8aa3c21b1677805985307da34"}, + {file = "PyYAML-6.0.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:855fb52b0dc35af121542a76b9a84f8d1cd886ea97c84703eaa6d88e37a2ad28"}, + {file = "PyYAML-6.0.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:40df9b996c2b73138957fe23a16a4f0ba614f4c0efce1e9406a184b6d07fa3a9"}, + {file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a08c6f0fe150303c1c6b71ebcd7213c2858041a7e01975da3a99aed1e7a378ef"}, + {file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6c22bec3fbe2524cde73d7ada88f6566758a8f7227bfbf93a408a9d86bcc12a0"}, + {file = "PyYAML-6.0.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8d4e9c88387b0f5c7d5f281e55304de64cf7f9c0021a3525bd3b1c542da3b0e4"}, + {file = "PyYAML-6.0.1-cp312-cp312-win32.whl", hash = "sha256:d483d2cdf104e7c9fa60c544d92981f12ad66a457afae824d146093b8c294c54"}, + {file = "PyYAML-6.0.1-cp312-cp312-win_amd64.whl", hash = "sha256:0d3304d8c0adc42be59c5f8a4d9e3d7379e6955ad754aa9d6ab7a398b59dd1df"}, + {file = "PyYAML-6.0.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:50550eb667afee136e9a77d6dc71ae76a44df8b3e51e41b77f6de2932bfe0f47"}, + {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1fe35611261b29bd1de0070f0b2f47cb6ff71fa6595c077e42bd0c419fa27b98"}, + {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:704219a11b772aea0d8ecd7058d0082713c3562b4e271b849ad7dc4a5c90c13c"}, + {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:afd7e57eddb1a54f0f1a974bc4391af8bcce0b444685d936840f125cf046d5bd"}, + {file = "PyYAML-6.0.1-cp36-cp36m-win32.whl", hash = "sha256:fca0e3a251908a499833aa292323f32437106001d436eca0e6e7833256674585"}, + {file = "PyYAML-6.0.1-cp36-cp36m-win_amd64.whl", hash = "sha256:f22ac1c3cac4dbc50079e965eba2c1058622631e526bd9afd45fedd49ba781fa"}, + {file = "PyYAML-6.0.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:b1275ad35a5d18c62a7220633c913e1b42d44b46ee12554e5fd39c70a243d6a3"}, + {file = "PyYAML-6.0.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:18aeb1bf9a78867dc38b259769503436b7c72f7a1f1f4c93ff9a17de54319b27"}, + {file = "PyYAML-6.0.1-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:596106435fa6ad000c2991a98fa58eeb8656ef2325d7e158344fb33864ed87e3"}, + {file = "PyYAML-6.0.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:baa90d3f661d43131ca170712d903e6295d1f7a0f595074f151c0aed377c9b9c"}, + {file = "PyYAML-6.0.1-cp37-cp37m-win32.whl", hash = "sha256:9046c58c4395dff28dd494285c82ba00b546adfc7ef001486fbf0324bc174fba"}, + {file = "PyYAML-6.0.1-cp37-cp37m-win_amd64.whl", hash = "sha256:4fb147e7a67ef577a588a0e2c17b6db51dda102c71de36f8549b6816a96e1867"}, + {file = "PyYAML-6.0.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1d4c7e777c441b20e32f52bd377e0c409713e8bb1386e1099c2415f26e479595"}, + {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a0cd17c15d3bb3fa06978b4e8958dcdc6e0174ccea823003a106c7d4d7899ac5"}, + {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:28c119d996beec18c05208a8bd78cbe4007878c6dd15091efb73a30e90539696"}, + {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7e07cbde391ba96ab58e532ff4803f79c4129397514e1413a7dc761ccd755735"}, + {file = "PyYAML-6.0.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:49a183be227561de579b4a36efbb21b3eab9651dd81b1858589f796549873dd6"}, + {file = "PyYAML-6.0.1-cp38-cp38-win32.whl", hash = "sha256:184c5108a2aca3c5b3d3bf9395d50893a7ab82a38004c8f61c258d4428e80206"}, + {file = "PyYAML-6.0.1-cp38-cp38-win_amd64.whl", hash = "sha256:1e2722cc9fbb45d9b87631ac70924c11d3a401b2d7f410cc0e3bbf249f2dca62"}, + {file = "PyYAML-6.0.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:9eb6caa9a297fc2c2fb8862bc5370d0303ddba53ba97e71f08023b6cd73d16a8"}, + {file = "PyYAML-6.0.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:c8098ddcc2a85b61647b2590f825f3db38891662cfc2fc776415143f599bb859"}, + {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5773183b6446b2c99bb77e77595dd486303b4faab2b086e7b17bc6bef28865f6"}, + {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b786eecbdf8499b9ca1d697215862083bd6d2a99965554781d0d8d1ad31e13a0"}, + {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bc1bf2925a1ecd43da378f4db9e4f799775d6367bdb94671027b73b393a7c42c"}, + {file = "PyYAML-6.0.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:04ac92ad1925b2cff1db0cfebffb6ffc43457495c9b3c39d3fcae417d7125dc5"}, + {file = "PyYAML-6.0.1-cp39-cp39-win32.whl", hash = "sha256:faca3bdcf85b2fc05d06ff3fbc1f83e1391b3e724afa3feba7d13eeab355484c"}, + {file = "PyYAML-6.0.1-cp39-cp39-win_amd64.whl", hash = "sha256:510c9deebc5c0225e8c96813043e62b680ba2f9c50a08d3724c7f28a747d1486"}, + {file = "PyYAML-6.0.1.tar.gz", hash = "sha256:bfdf460b1736c775f2ba9f6a92bca30bc2095067b8a9d77876d1fad6cc3b4a43"}, +] + +[[package]] +name = "requests" +version = "2.31.0" +description = "Python HTTP for Humans." +optional = false +python-versions = ">=3.7" +files = [ + {file = "requests-2.31.0-py3-none-any.whl", hash = "sha256:58cd2187c01e70e6e26505bca751777aa9f2ee0b7f4300988b709f44e013003f"}, + {file = "requests-2.31.0.tar.gz", hash = "sha256:942c5a758f98d790eaed1a29cb6eefc7ffb0d1cf7af05c3d2791656dbd6ad1e1"}, +] + +[package.dependencies] +certifi = ">=2017.4.17" +charset-normalizer = ">=2,<4" +idna = ">=2.5,<4" +urllib3 = ">=1.21.1,<3" + +[package.extras] +socks = ["PySocks (>=1.5.6,!=1.5.7)"] +use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"] + +[[package]] +name = "requests-cache" +version = "1.2.0" +description = "A persistent cache for python requests" +optional = false +python-versions = ">=3.8" +files = [ + {file = "requests_cache-1.2.0-py3-none-any.whl", hash = "sha256:490324301bf0cb924ff4e6324bd2613453e7e1f847353928b08adb0fdfb7f722"}, + {file = "requests_cache-1.2.0.tar.gz", hash = "sha256:db1c709ca343cc1cd5b6c8b1a5387298eceed02306a6040760db538c885e3838"}, +] + +[package.dependencies] +attrs = ">=21.2" +cattrs = ">=22.2" +platformdirs = ">=2.5" +requests = ">=2.22" +url-normalize = ">=1.4" +urllib3 = ">=1.25.5" + +[package.extras] +all = ["boto3 (>=1.15)", "botocore (>=1.18)", "itsdangerous (>=2.0)", "pymongo (>=3)", "pyyaml (>=6.0.1)", "redis (>=3)", "ujson (>=5.4)"] +bson = ["bson (>=0.5)"] +docs = ["furo (>=2023.3,<2024.0)", "linkify-it-py (>=2.0,<3.0)", "myst-parser (>=1.0,<2.0)", "sphinx (>=5.0.2,<6.0.0)", "sphinx-autodoc-typehints (>=1.19)", "sphinx-automodapi (>=0.14)", "sphinx-copybutton (>=0.5)", "sphinx-design (>=0.2)", "sphinx-notfound-page (>=0.8)", "sphinxcontrib-apidoc (>=0.3)", "sphinxext-opengraph (>=0.9)"] +dynamodb = ["boto3 (>=1.15)", "botocore (>=1.18)"] +json = ["ujson (>=5.4)"] +mongodb = ["pymongo (>=3)"] +redis = ["redis (>=3)"] +security = ["itsdangerous (>=2.0)"] +yaml = ["pyyaml (>=6.0.1)"] + +[[package]] +name = "requests-mock" +version = "1.11.0" +description = "Mock out responses from the requests package" +optional = false +python-versions = "*" +files = [ + {file = "requests-mock-1.11.0.tar.gz", hash = "sha256:ef10b572b489a5f28e09b708697208c4a3b2b89ef80a9f01584340ea357ec3c4"}, + {file = "requests_mock-1.11.0-py2.py3-none-any.whl", hash = "sha256:f7fae383f228633f6bececebdab236c478ace2284d6292c6e7e2867b9ab74d15"}, +] + +[package.dependencies] +requests = ">=2.3,<3" +six = "*" + +[package.extras] +fixture = ["fixtures"] +test = ["fixtures", "mock", "purl", "pytest", "requests-futures", "sphinx", "testtools"] + +[[package]] +name = "responses" +version = "0.23.3" +description = "A utility library for mocking out the `requests` Python library." +optional = false +python-versions = ">=3.7" +files = [ + {file = "responses-0.23.3-py3-none-any.whl", hash = "sha256:e6fbcf5d82172fecc0aa1860fd91e58cbfd96cee5e96da5b63fa6eb3caa10dd3"}, + {file = "responses-0.23.3.tar.gz", hash = "sha256:205029e1cb334c21cb4ec64fc7599be48b859a0fd381a42443cdd600bfe8b16a"}, +] + +[package.dependencies] +pyyaml = "*" +requests = ">=2.30.0,<3.0" +types-PyYAML = "*" +urllib3 = ">=1.25.10,<3.0" + +[package.extras] +tests = ["coverage (>=6.0.0)", "flake8", "mypy", "pytest (>=7.0.0)", "pytest-asyncio", "pytest-cov", "pytest-httpserver", "tomli", "tomli-w", "types-requests"] + +[[package]] +name = "setuptools" +version = "69.1.0" +description = "Easily download, build, install, upgrade, and uninstall Python packages" +optional = false +python-versions = ">=3.8" +files = [ + {file = "setuptools-69.1.0-py3-none-any.whl", hash = "sha256:c054629b81b946d63a9c6e732bc8b2513a7c3ea645f11d0139a2191d735c60c6"}, + {file = "setuptools-69.1.0.tar.gz", hash = "sha256:850894c4195f09c4ed30dba56213bf7c3f21d86ed6bdaafb5df5972593bfc401"}, +] + +[package.extras] +docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "rst.linker (>=1.9)", "sphinx (<7.2.5)", "sphinx (>=3.5)", "sphinx-favicon", "sphinx-inline-tabs", "sphinx-lint", "sphinx-notfound-page (>=1,<2)", "sphinx-reredirects", "sphinxcontrib-towncrier"] +testing = ["build[virtualenv]", "filelock (>=3.4.0)", "flake8-2020", "ini2toml[lite] (>=0.9)", "jaraco.develop (>=7.21)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "pip (>=19.1)", "pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-home (>=0.5)", "pytest-mypy (>=0.9.1)", "pytest-perf", "pytest-ruff (>=0.2.1)", "pytest-timeout", "pytest-xdist", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel"] +testing-integration = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "packaging (>=23.1)", "pytest", "pytest-enabler", "pytest-xdist", "tomli", "virtualenv (>=13.0.0)", "wheel"] + +[[package]] +name = "sgqlc" +version = "16.3" +description = "Simple GraphQL Client" +optional = false +python-versions = ">=3.7,<4.0" +files = [ + {file = "sgqlc-16.3-py3-none-any.whl", hash = "sha256:89d468386a4ba4b5ade991623228b6fb0a25bea1f25643ccac130fb3ef565b72"}, + {file = "sgqlc-16.3.tar.gz", hash = "sha256:be08857775aa3e65ef7b2c1f0cdcc65dd5794907b162b393c189187fee664558"}, +] + +[package.dependencies] +graphql-core = ">=3.1.7,<4.0.0" + +[package.extras] +requests = ["requests"] +websocket = ["websocket-client"] + +[[package]] +name = "six" +version = "1.16.0" +description = "Python 2 and 3 compatibility utilities" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*" +files = [ + {file = "six-1.16.0-py2.py3-none-any.whl", hash = "sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254"}, + {file = "six-1.16.0.tar.gz", hash = "sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926"}, +] + +[[package]] +name = "toml" +version = "0.10.2" +description = "Python Library for Tom's Obvious, Minimal Language" +optional = false +python-versions = ">=2.6, !=3.0.*, !=3.1.*, !=3.2.*" +files = [ + {file = "toml-0.10.2-py2.py3-none-any.whl", hash = "sha256:806143ae5bfb6a3c6e736a764057db0e6a0e05e338b5630894a5f779cabb4f9b"}, + {file = "toml-0.10.2.tar.gz", hash = "sha256:b3bda1d108d5dd99f4a20d24d9c348e91c4db7ab1b749200bded2f839ccbe68f"}, +] + +[[package]] +name = "types-pyyaml" +version = "6.0.12.12" +description = "Typing stubs for PyYAML" +optional = false +python-versions = "*" +files = [ + {file = "types-PyYAML-6.0.12.12.tar.gz", hash = "sha256:334373d392fde0fdf95af5c3f1661885fa10c52167b14593eb856289e1855062"}, + {file = "types_PyYAML-6.0.12.12-py3-none-any.whl", hash = "sha256:c05bc6c158facb0676674b7f11fe3960db4f389718e19e62bd2b84d6205cfd24"}, +] + +[[package]] +name = "typing-extensions" +version = "4.9.0" +description = "Backported and Experimental Type Hints for Python 3.8+" +optional = false +python-versions = ">=3.8" +files = [ + {file = "typing_extensions-4.9.0-py3-none-any.whl", hash = "sha256:af72aea155e91adfc61c3ae9e0e342dbc0cba726d6cba4b6c72c1f34e47291cd"}, + {file = "typing_extensions-4.9.0.tar.gz", hash = "sha256:23478f88c37f27d76ac8aee6c905017a143b0b1b886c3c9f66bc2fd94f9f5783"}, +] + +[[package]] +name = "url-normalize" +version = "1.4.3" +description = "URL normalization for Python" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*" +files = [ + {file = "url-normalize-1.4.3.tar.gz", hash = "sha256:d23d3a070ac52a67b83a1c59a0e68f8608d1cd538783b401bc9de2c0fac999b2"}, + {file = "url_normalize-1.4.3-py2.py3-none-any.whl", hash = "sha256:ec3c301f04e5bb676d333a7fa162fa977ad2ca04b7e652bfc9fac4e405728eed"}, +] + +[package.dependencies] +six = "*" + +[[package]] +name = "urllib3" +version = "2.2.1" +description = "HTTP library with thread-safe connection pooling, file post, and more." +optional = false +python-versions = ">=3.8" +files = [ + {file = "urllib3-2.2.1-py3-none-any.whl", hash = "sha256:450b20ec296a467077128bff42b73080516e71b56ff59a60a02bef2232c4fa9d"}, + {file = "urllib3-2.2.1.tar.gz", hash = "sha256:d0570876c61ab9e520d776c38acbbb5b05a776d3f9ff98a5c8fd5162a444cf19"}, +] + +[package.extras] +brotli = ["brotli (>=1.0.9)", "brotlicffi (>=0.8.0)"] +h2 = ["h2 (>=4,<5)"] +socks = ["pysocks (>=1.5.6,!=1.5.7,<2.0)"] +zstd = ["zstandard (>=0.18.0)"] + +[[package]] +name = "wcmatch" +version = "8.4" +description = "Wildcard/glob file name matcher." +optional = false +python-versions = ">=3.7" +files = [ + {file = "wcmatch-8.4-py3-none-any.whl", hash = "sha256:dc7351e5a7f8bbf4c6828d51ad20c1770113f5f3fd3dfe2a03cfde2a63f03f98"}, + {file = "wcmatch-8.4.tar.gz", hash = "sha256:ba4fc5558f8946bf1ffc7034b05b814d825d694112499c86035e0e4d398b6a67"}, +] + +[package.dependencies] +bracex = ">=2.1.1" + +[[package]] +name = "wrapt" +version = "1.16.0" +description = "Module for decorators, wrappers and monkey patching." +optional = false +python-versions = ">=3.6" +files = [ + {file = "wrapt-1.16.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:ffa565331890b90056c01db69c0fe634a776f8019c143a5ae265f9c6bc4bd6d4"}, + {file = "wrapt-1.16.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:e4fdb9275308292e880dcbeb12546df7f3e0f96c6b41197e0cf37d2826359020"}, + {file = "wrapt-1.16.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bb2dee3874a500de01c93d5c71415fcaef1d858370d405824783e7a8ef5db440"}, + {file = "wrapt-1.16.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2a88e6010048489cda82b1326889ec075a8c856c2e6a256072b28eaee3ccf487"}, + {file = "wrapt-1.16.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ac83a914ebaf589b69f7d0a1277602ff494e21f4c2f743313414378f8f50a4cf"}, + {file = "wrapt-1.16.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:73aa7d98215d39b8455f103de64391cb79dfcad601701a3aa0dddacf74911d72"}, + {file = "wrapt-1.16.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:807cc8543a477ab7422f1120a217054f958a66ef7314f76dd9e77d3f02cdccd0"}, + {file = "wrapt-1.16.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:bf5703fdeb350e36885f2875d853ce13172ae281c56e509f4e6eca049bdfb136"}, + {file = "wrapt-1.16.0-cp310-cp310-win32.whl", hash = "sha256:f6b2d0c6703c988d334f297aa5df18c45e97b0af3679bb75059e0e0bd8b1069d"}, + {file = "wrapt-1.16.0-cp310-cp310-win_amd64.whl", hash = "sha256:decbfa2f618fa8ed81c95ee18a387ff973143c656ef800c9f24fb7e9c16054e2"}, + {file = "wrapt-1.16.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:1a5db485fe2de4403f13fafdc231b0dbae5eca4359232d2efc79025527375b09"}, + {file = "wrapt-1.16.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:75ea7d0ee2a15733684badb16de6794894ed9c55aa5e9903260922f0482e687d"}, + {file = "wrapt-1.16.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a452f9ca3e3267cd4d0fcf2edd0d035b1934ac2bd7e0e57ac91ad6b95c0c6389"}, + {file = "wrapt-1.16.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:43aa59eadec7890d9958748db829df269f0368521ba6dc68cc172d5d03ed8060"}, + {file = "wrapt-1.16.0-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:72554a23c78a8e7aa02abbd699d129eead8b147a23c56e08d08dfc29cfdddca1"}, + {file = "wrapt-1.16.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:d2efee35b4b0a347e0d99d28e884dfd82797852d62fcd7ebdeee26f3ceb72cf3"}, + {file = "wrapt-1.16.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:6dcfcffe73710be01d90cae08c3e548d90932d37b39ef83969ae135d36ef3956"}, + {file = "wrapt-1.16.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:eb6e651000a19c96f452c85132811d25e9264d836951022d6e81df2fff38337d"}, + {file = "wrapt-1.16.0-cp311-cp311-win32.whl", hash = "sha256:66027d667efe95cc4fa945af59f92c5a02c6f5bb6012bff9e60542c74c75c362"}, + {file = "wrapt-1.16.0-cp311-cp311-win_amd64.whl", hash = "sha256:aefbc4cb0a54f91af643660a0a150ce2c090d3652cf4052a5397fb2de549cd89"}, + {file = "wrapt-1.16.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:5eb404d89131ec9b4f748fa5cfb5346802e5ee8836f57d516576e61f304f3b7b"}, + {file = "wrapt-1.16.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:9090c9e676d5236a6948330e83cb89969f433b1943a558968f659ead07cb3b36"}, + {file = "wrapt-1.16.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:94265b00870aa407bd0cbcfd536f17ecde43b94fb8d228560a1e9d3041462d73"}, + {file = "wrapt-1.16.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f2058f813d4f2b5e3a9eb2eb3faf8f1d99b81c3e51aeda4b168406443e8ba809"}, + {file = "wrapt-1.16.0-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:98b5e1f498a8ca1858a1cdbffb023bfd954da4e3fa2c0cb5853d40014557248b"}, + {file = "wrapt-1.16.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:14d7dc606219cdd7405133c713f2c218d4252f2a469003f8c46bb92d5d095d81"}, + {file = "wrapt-1.16.0-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:49aac49dc4782cb04f58986e81ea0b4768e4ff197b57324dcbd7699c5dfb40b9"}, + {file = "wrapt-1.16.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:418abb18146475c310d7a6dc71143d6f7adec5b004ac9ce08dc7a34e2babdc5c"}, + {file = "wrapt-1.16.0-cp312-cp312-win32.whl", hash = "sha256:685f568fa5e627e93f3b52fda002c7ed2fa1800b50ce51f6ed1d572d8ab3e7fc"}, + {file = "wrapt-1.16.0-cp312-cp312-win_amd64.whl", hash = "sha256:dcdba5c86e368442528f7060039eda390cc4091bfd1dca41e8046af7c910dda8"}, + {file = "wrapt-1.16.0-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:d462f28826f4657968ae51d2181a074dfe03c200d6131690b7d65d55b0f360f8"}, + {file = "wrapt-1.16.0-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a33a747400b94b6d6b8a165e4480264a64a78c8a4c734b62136062e9a248dd39"}, + {file = "wrapt-1.16.0-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b3646eefa23daeba62643a58aac816945cadc0afaf21800a1421eeba5f6cfb9c"}, + {file = "wrapt-1.16.0-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3ebf019be5c09d400cf7b024aa52b1f3aeebeff51550d007e92c3c1c4afc2a40"}, + {file = "wrapt-1.16.0-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:0d2691979e93d06a95a26257adb7bfd0c93818e89b1406f5a28f36e0d8c1e1fc"}, + {file = "wrapt-1.16.0-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:1acd723ee2a8826f3d53910255643e33673e1d11db84ce5880675954183ec47e"}, + {file = "wrapt-1.16.0-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:bc57efac2da352a51cc4658878a68d2b1b67dbe9d33c36cb826ca449d80a8465"}, + {file = "wrapt-1.16.0-cp36-cp36m-win32.whl", hash = "sha256:da4813f751142436b075ed7aa012a8778aa43a99f7b36afe9b742d3ed8bdc95e"}, + {file = "wrapt-1.16.0-cp36-cp36m-win_amd64.whl", hash = "sha256:6f6eac2360f2d543cc875a0e5efd413b6cbd483cb3ad7ebf888884a6e0d2e966"}, + {file = "wrapt-1.16.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:a0ea261ce52b5952bf669684a251a66df239ec6d441ccb59ec7afa882265d593"}, + {file = "wrapt-1.16.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7bd2d7ff69a2cac767fbf7a2b206add2e9a210e57947dd7ce03e25d03d2de292"}, + {file = "wrapt-1.16.0-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9159485323798c8dc530a224bd3ffcf76659319ccc7bbd52e01e73bd0241a0c5"}, + {file = "wrapt-1.16.0-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a86373cf37cd7764f2201b76496aba58a52e76dedfaa698ef9e9688bfd9e41cf"}, + {file = "wrapt-1.16.0-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:73870c364c11f03ed072dda68ff7aea6d2a3a5c3fe250d917a429c7432e15228"}, + {file = "wrapt-1.16.0-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:b935ae30c6e7400022b50f8d359c03ed233d45b725cfdd299462f41ee5ffba6f"}, + {file = "wrapt-1.16.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:db98ad84a55eb09b3c32a96c576476777e87c520a34e2519d3e59c44710c002c"}, + {file = "wrapt-1.16.0-cp37-cp37m-win32.whl", hash = "sha256:9153ed35fc5e4fa3b2fe97bddaa7cbec0ed22412b85bcdaf54aeba92ea37428c"}, + {file = "wrapt-1.16.0-cp37-cp37m-win_amd64.whl", hash = "sha256:66dfbaa7cfa3eb707bbfcd46dab2bc6207b005cbc9caa2199bcbc81d95071a00"}, + {file = "wrapt-1.16.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1dd50a2696ff89f57bd8847647a1c363b687d3d796dc30d4dd4a9d1689a706f0"}, + {file = "wrapt-1.16.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:44a2754372e32ab315734c6c73b24351d06e77ffff6ae27d2ecf14cf3d229202"}, + {file = "wrapt-1.16.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8e9723528b9f787dc59168369e42ae1c3b0d3fadb2f1a71de14531d321ee05b0"}, + {file = "wrapt-1.16.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:dbed418ba5c3dce92619656802cc5355cb679e58d0d89b50f116e4a9d5a9603e"}, + {file = "wrapt-1.16.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:941988b89b4fd6b41c3f0bfb20e92bd23746579736b7343283297c4c8cbae68f"}, + {file = "wrapt-1.16.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:6a42cd0cfa8ffc1915aef79cb4284f6383d8a3e9dcca70c445dcfdd639d51267"}, + {file = "wrapt-1.16.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:1ca9b6085e4f866bd584fb135a041bfc32cab916e69f714a7d1d397f8c4891ca"}, + {file = "wrapt-1.16.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:d5e49454f19ef621089e204f862388d29e6e8d8b162efce05208913dde5b9ad6"}, + {file = "wrapt-1.16.0-cp38-cp38-win32.whl", hash = "sha256:c31f72b1b6624c9d863fc095da460802f43a7c6868c5dda140f51da24fd47d7b"}, + {file = "wrapt-1.16.0-cp38-cp38-win_amd64.whl", hash = "sha256:490b0ee15c1a55be9c1bd8609b8cecd60e325f0575fc98f50058eae366e01f41"}, + {file = "wrapt-1.16.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:9b201ae332c3637a42f02d1045e1d0cccfdc41f1f2f801dafbaa7e9b4797bfc2"}, + {file = "wrapt-1.16.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:2076fad65c6736184e77d7d4729b63a6d1ae0b70da4868adeec40989858eb3fb"}, + {file = "wrapt-1.16.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c5cd603b575ebceca7da5a3a251e69561bec509e0b46e4993e1cac402b7247b8"}, + {file = "wrapt-1.16.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b47cfad9e9bbbed2339081f4e346c93ecd7ab504299403320bf85f7f85c7d46c"}, + {file = "wrapt-1.16.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f8212564d49c50eb4565e502814f694e240c55551a5f1bc841d4fcaabb0a9b8a"}, + {file = "wrapt-1.16.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:5f15814a33e42b04e3de432e573aa557f9f0f56458745c2074952f564c50e664"}, + {file = "wrapt-1.16.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:db2e408d983b0e61e238cf579c09ef7020560441906ca990fe8412153e3b291f"}, + {file = "wrapt-1.16.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:edfad1d29c73f9b863ebe7082ae9321374ccb10879eeabc84ba3b69f2579d537"}, + {file = "wrapt-1.16.0-cp39-cp39-win32.whl", hash = "sha256:ed867c42c268f876097248e05b6117a65bcd1e63b779e916fe2e33cd6fd0d3c3"}, + {file = "wrapt-1.16.0-cp39-cp39-win_amd64.whl", hash = "sha256:eb1b046be06b0fce7249f1d025cd359b4b80fc1c3e24ad9eca33e0dcdb2e4a35"}, + {file = "wrapt-1.16.0-py3-none-any.whl", hash = "sha256:6906c4100a8fcbf2fa735f6059214bb13b97f75b1a61777fcf6432121ef12ef1"}, + {file = "wrapt-1.16.0.tar.gz", hash = "sha256:5f370f952971e7d17c7d1ead40e49f32345a7f7a5373571ef44d800d06b1899d"}, +] + +[metadata] +lock-version = "2.0" +python-versions = "^3.9,<3.12" +content-hash = "40cc246c45e6c2d626e016673f3aa60794f3464d82c8ccd0b62a6b66df2b30da" diff --git a/airbyte-integrations/connectors/destination-kvdb/pyproject.toml b/airbyte-integrations/connectors/destination-kvdb/pyproject.toml new file mode 100644 index 000000000000..3236b1c96ef0 --- /dev/null +++ b/airbyte-integrations/connectors/destination-kvdb/pyproject.toml @@ -0,0 +1,31 @@ +[build-system] +requires = [ "poetry-core>=1.0.0",] +build-backend = "poetry.core.masonry.api" + +[tool.poetry] +version = "0.1.4" +name = "destination-kvdb" +description = "Destination implementation for kvdb." +authors = [ "Airbyte ",] +license = "MIT" +readme = "README.md" +documentation = "https://docs.airbyte.com/integrations/destinations/kvdb" +homepage = "https://airbyte.com" +repository = "https://github.com/airbytehq/airbyte" +[[tool.poetry.packages]] +include = "destination_kvdb" + +[tool.poetry.dependencies] +python = "^3.9,<3.12" +airbyte-cdk = "^0.62.1" +sgqlc = "==16.3" + +[tool.poetry.scripts] +destination-kvdb = "destination_kvdb.run:run" + +[tool.poetry.group.dev.dependencies] +requests-mock = "^1.9.3" +freezegun = "^1.2" +pytest-mock = "^3.6.1" +pytest = "^6.2" +responses = "^0.23.1" diff --git a/airbyte-integrations/connectors/destination-kvdb/requirements.txt b/airbyte-integrations/connectors/destination-kvdb/requirements.txt new file mode 100644 index 000000000000..d6e1198b1ab1 --- /dev/null +++ b/airbyte-integrations/connectors/destination-kvdb/requirements.txt @@ -0,0 +1 @@ +-e . diff --git a/airbyte-integrations/connectors/destination-kvdb/unit_tests/unit_test.py b/airbyte-integrations/connectors/destination-kvdb/unit_tests/unit_test.py new file mode 100644 index 000000000000..219ae0142c72 --- /dev/null +++ b/airbyte-integrations/connectors/destination-kvdb/unit_tests/unit_test.py @@ -0,0 +1,7 @@ +# +# Copyright (c) 2023 Airbyte, Inc., all rights reserved. +# + + +def test_example_method(): + assert True diff --git a/airbyte-integrations/connectors/destination-meilisearch/.dockerignore b/airbyte-integrations/connectors/destination-meilisearch/.dockerignore new file mode 100644 index 000000000000..6d35a84f68b8 --- /dev/null +++ b/airbyte-integrations/connectors/destination-meilisearch/.dockerignore @@ -0,0 +1,5 @@ +* +!Dockerfile +!main.py +!destination_meilisearch +!setup.py diff --git a/airbyte-integrations/connectors/destination-meilisearch/Dockerfile b/airbyte-integrations/connectors/destination-meilisearch/Dockerfile new file mode 100644 index 000000000000..7fda83cf0d7d --- /dev/null +++ b/airbyte-integrations/connectors/destination-meilisearch/Dockerfile @@ -0,0 +1,38 @@ +FROM python:3.9.11-alpine3.15 as base + +# build and load all requirements +FROM base as builder +WORKDIR /airbyte/integration_code + +# upgrade pip to the latest version +RUN apk --no-cache upgrade \ + && pip install --upgrade pip \ + && apk --no-cache add tzdata build-base + + +COPY setup.py ./ +# install necessary packages to a temporary folder +RUN pip install --prefix=/install . + +# build a clean environment +FROM base +WORKDIR /airbyte/integration_code + +# copy all loaded and built libraries to a pure basic image +COPY --from=builder /install /usr/local +# add default timezone settings +COPY --from=builder /usr/share/zoneinfo/Etc/UTC /etc/localtime +RUN echo "Etc/UTC" > /etc/timezone + +# bash is installed for more convenient debugging. +RUN apk --no-cache add bash + +# copy payload code only +COPY main.py ./ +COPY destination_meilisearch ./destination_meilisearch + +ENV AIRBYTE_ENTRYPOINT "python /airbyte/integration_code/main.py" +ENTRYPOINT ["python", "/airbyte/integration_code/main.py"] + +LABEL io.airbyte.version=1.0.2 +LABEL io.airbyte.name=airbyte/destination-meilisearch diff --git a/airbyte-integrations/connectors/destination-meilisearch/README.md b/airbyte-integrations/connectors/destination-meilisearch/README.md new file mode 100644 index 000000000000..207e2898208e --- /dev/null +++ b/airbyte-integrations/connectors/destination-meilisearch/README.md @@ -0,0 +1,99 @@ +# Meilisearch Destination + +This is the repository for the Meilisearch destination connector, written in Python. +For information about how to use this connector within Airbyte, see [the documentation](https://docs.airbyte.io/integrations/destinations/meilisearch). + +## Local development + +### Prerequisites +**To iterate on this connector, make sure to complete this prerequisites section.** + +#### Minimum Python version required `= 3.7.0` + +#### Build & Activate Virtual Environment and install dependencies +From this connector directory, create a virtual environment: +``` +python -m venv .venv +``` + +This will generate a virtualenv for this module in `.venv/`. Make sure this venv is active in your +development environment of choice. To activate it from the terminal, run: +``` +source .venv/bin/activate +pip install -r requirements.txt +``` +If you are in an IDE, follow your IDE's instructions to activate the virtualenv. + +Note that while we are installing dependencies from `requirements.txt`, you should only edit `setup.py` for your dependencies. `requirements.txt` is +used for editable installs (`pip install -e`) to pull in Python dependencies from the monorepo and will call `setup.py`. +If this is mumbo jumbo to you, don't worry about it, just put your deps in `setup.py` but install using `pip install -r requirements.txt` and everything +should work as you expect. + +#### Create credentials +**If you are a community contributor**, follow the instructions in the [documentation](https://docs.airbyte.io/integrations/destinations/meilisearch) +to generate the necessary credentials. Then create a file `secrets/config.json` conforming to the `destination_meilisearch/spec.json` file. +Note that the `secrets` directory is gitignored by default, so there is no danger of accidentally checking in sensitive information. +See `integration_tests/sample_config.json` for a sample config file. + +**If you are an Airbyte core member**, copy the credentials in Lastpass under the secret name `destination meilisearch test creds` +and place them into `secrets/config.json`. + +### Locally running the connector +``` +python main.py spec +python main.py check --config secrets/config.json +python main.py discover --config secrets/config.json +python main.py read --config secrets/config.json --catalog integration_tests/configured_catalog.json +``` + +### Locally running the connector docker image + + +#### Build +**Via [`airbyte-ci`](https://github.com/airbytehq/airbyte/blob/master/airbyte-ci/connectors/pipelines/README.md) (recommended):** +```bash +airbyte-ci connectors --name=destination-meilisearch build +``` + +An image will be built with the tag `airbyte/destination-meilisearch:dev`. + +**Via `docker build`:** +```bash +docker build -t airbyte/destination-meilisearch:dev . +``` + +#### Run +Then run any of the connector commands as follows: +``` +docker run --rm airbyte/destination-meilisearch:dev spec +docker run --rm -v $(pwd)/secrets:/secrets airbyte/destination-meilisearch:dev check --config /secrets/config.json +# messages.jsonl is a file containing line-separated JSON representing AirbyteMessages +cat messages.jsonl | docker run --rm -v $(pwd)/secrets:/secrets -v $(pwd)/integration_tests:/integration_tests airbyte/destination-meilisearch:dev write --config /secrets/config.json --catalog /integration_tests/configured_catalog.json +``` + +## Testing +You can run our full test suite locally using [`airbyte-ci`](https://github.com/airbytehq/airbyte/blob/master/airbyte-ci/connectors/pipelines/README.md): +```bash +airbyte-ci connectors --name=destination-meilisearch test +``` + +### Customizing acceptance Tests +Customize `acceptance-test-config.yml` file to configure tests. See [Connector Acceptance Tests](https://docs.airbyte.com/connector-development/testing-connectors/connector-acceptance-tests-reference) for more information. +If your connector requires to create or destroy resources for use during acceptance tests create fixtures for it and place them inside integration_tests/acceptance.py. + +## Dependency Management +All of your dependencies should go in `setup.py`, NOT `requirements.txt`. The requirements file is only used to connect internal Airbyte dependencies in the monorepo for local development. +We split dependencies between two groups, dependencies that are: +* required for your connector to work need to go to `MAIN_REQUIREMENTS` list. +* required for the testing need to go to `TEST_REQUIREMENTS` list + +### Publishing a new version of the connector +You've checked out the repo, implemented a million dollar feature, and you're ready to share your changes with the world. Now what? +1. Make sure your changes are passing our test suite: `airbyte-ci connectors --name=destination-meilisearch test` +2. Bump the connector version in `metadata.yaml`: increment the `dockerImageTag` value. Please follow [semantic versioning for connectors](https://docs.airbyte.com/contributing-to-airbyte/resources/pull-requests-handbook/#semantic-versioning-for-connectors). +3. Make sure the `metadata.yaml` content is up to date. +4. Make the connector documentation and its changelog is up to date (`docs/integrations/destinations/meilisearch.md`). +5. Create a Pull Request: use [our PR naming conventions](https://docs.airbyte.com/contributing-to-airbyte/resources/pull-requests-handbook/#pull-request-title-convention). +6. Pat yourself on the back for being an awesome contributor. +7. Someone from Airbyte will take a look at your PR and iterate with you to merge it into master. + diff --git a/airbyte-integrations/connectors/destination-meilisearch/destination_meilisearch/__init__.py b/airbyte-integrations/connectors/destination-meilisearch/destination_meilisearch/__init__.py new file mode 100644 index 000000000000..f83a392b80f7 --- /dev/null +++ b/airbyte-integrations/connectors/destination-meilisearch/destination_meilisearch/__init__.py @@ -0,0 +1,8 @@ +# +# Copyright (c) 2023 Airbyte, Inc., all rights reserved. +# + + +from .destination import DestinationMeilisearch + +__all__ = ["DestinationMeilisearch"] diff --git a/airbyte-integrations/connectors/destination-meilisearch/destination_meilisearch/destination.py b/airbyte-integrations/connectors/destination-meilisearch/destination_meilisearch/destination.py new file mode 100644 index 000000000000..32d08b787bf1 --- /dev/null +++ b/airbyte-integrations/connectors/destination-meilisearch/destination_meilisearch/destination.py @@ -0,0 +1,84 @@ +# +# Copyright (c) 2023 Airbyte, Inc., all rights reserved. +# + + +from logging import Logger, getLogger +from typing import Any, Dict, Iterable, Mapping + +from airbyte_cdk.destinations import Destination +from airbyte_cdk.models import AirbyteConnectionStatus, AirbyteMessage, ConfiguredAirbyteCatalog, DestinationSyncMode, Status, Type +from destination_meilisearch.writer import MeiliWriter +from meilisearch import Client + +logger = getLogger("airbyte") + + +def get_client(config: Mapping[str, Any]) -> Client: + host = config.get("host") + api_key = config.get("api_key") + return Client(host, api_key) + + +class DestinationMeilisearch(Destination): + primary_key = "_ab_pk" + + def _flush_streams(self, streams: Dict[str, MeiliWriter]) -> Iterable[AirbyteMessage]: + for stream in streams: + streams[stream].flush() + + def write( + self, config: Mapping[str, Any], configured_catalog: ConfiguredAirbyteCatalog, input_messages: Iterable[AirbyteMessage] + ) -> Iterable[AirbyteMessage]: + client = get_client(config=config) + # Creating Meilisearch writers + writers = {s.stream.name: MeiliWriter(client, s.stream.name, self.primary_key) for s in configured_catalog.streams} + + for configured_stream in configured_catalog.streams: + stream_name = configured_stream.stream.name + # Deleting index in Meilisearch if sync mode is overwite + if configured_stream.destination_sync_mode == DestinationSyncMode.overwrite: + logger.debug(f"Deleting index: {stream_name}.") + client.delete_index(stream_name) + # Creating index in Meilisearch + client.create_index(stream_name, {"primaryKey": self.primary_key}) + logger.debug(f"Creating index: {stream_name}.") + + for message in input_messages: + if message.type == Type.STATE: + yield message + elif message.type == Type.RECORD: + data = message.record.data + stream = message.record.stream + # Skip unselected streams + if stream not in writers: + logger.debug(f"Stream {stream} was not present in configured streams, skipping") + continue + writers[stream].queue_write_operation(data) + else: + logger.info(f"Unhandled message type {message.type}: {message}") + + # Flush any leftover messages + self._flush_streams(writers) + + def check(self, logger: Logger, config: Mapping[str, Any]) -> AirbyteConnectionStatus: + try: + client = get_client(config=config) + + client.create_index("_airbyte", {"primaryKey": "id"}) + + client.index("_airbyte").add_documents( + [ + { + "id": 287947, + "title": "Shazam", + "overview": "A boy is given the ability", + } + ] + ) + + client.delete_index("_airbyte") + return AirbyteConnectionStatus(status=Status.SUCCEEDED) + except Exception as e: + logger.error(f"Check connection failed. Error: {e}") + return AirbyteConnectionStatus(status=Status.FAILED, message=f"An exception occurred: {repr(e)}") diff --git a/airbyte-integrations/connectors/destination-meilisearch/destination_meilisearch/spec.json b/airbyte-integrations/connectors/destination-meilisearch/destination_meilisearch/spec.json new file mode 100644 index 000000000000..f3fe7aaeda47 --- /dev/null +++ b/airbyte-integrations/connectors/destination-meilisearch/destination_meilisearch/spec.json @@ -0,0 +1,27 @@ +{ + "documentationUrl": "https://docs.airbyte.com/integrations/destinations/meilisearch", + "supported_destination_sync_modes": ["overwrite", "append"], + "supportsIncremental": true, + "connectionSpecification": { + "$schema": "http://json-schema.org/draft-07/schema#", + "title": "Destination Meilisearch", + "type": "object", + "required": ["host"], + "additionalProperties": false, + "properties": { + "host": { + "title": "Host", + "description": "Hostname of the MeiliSearch instance.", + "type": "string", + "order": 0 + }, + "api_key": { + "title": "API Key", + "airbyte_secret": true, + "description": "MeiliSearch API Key. See the docs for more information on how to obtain this key.", + "type": "string", + "order": 1 + } + } + } +} diff --git a/airbyte-integrations/connectors/destination-meilisearch/destination_meilisearch/writer.py b/airbyte-integrations/connectors/destination-meilisearch/destination_meilisearch/writer.py new file mode 100644 index 000000000000..e2450f825106 --- /dev/null +++ b/airbyte-integrations/connectors/destination-meilisearch/destination_meilisearch/writer.py @@ -0,0 +1,39 @@ +# +# Copyright (c) 2023 Airbyte, Inc., all rights reserved. +# + +from collections.abc import Mapping +from logging import getLogger +from uuid import uuid4 + +from meilisearch import Client + +logger = getLogger("airbyte") + + +class MeiliWriter: + flush_interval = 50000 + + def __init__(self, client: Client, stream_name: str, primary_key: str): + self.client = client + self.primary_key = primary_key + self.stream_name: str = stream_name + self._write_buffer = [] + + logger.info(f"Creating MeiliWriter for {self.stream_name}") + + def queue_write_operation(self, data: Mapping): + random_key = str(uuid4()) + self._write_buffer.append({**data, self.primary_key: random_key}) + if len(self._write_buffer) == self.flush_interval: + logger.debug(f"Reached limit size: flushing records for {self.stream_name}") + self.flush() + + def flush(self): + buffer_size = len(self._write_buffer) + if buffer_size == 0: + return + logger.info(f"Flushing {buffer_size} records") + response = self.client.index(self.stream_name).add_documents(self._write_buffer) + self.client.wait_for_task(response.task_uid, 1800000, 1000) + self._write_buffer.clear() diff --git a/airbyte-integrations/connectors/destination-meilisearch/integration_tests/integration_test.py b/airbyte-integrations/connectors/destination-meilisearch/integration_tests/integration_test.py new file mode 100644 index 000000000000..1d9687e97c7d --- /dev/null +++ b/airbyte-integrations/connectors/destination-meilisearch/integration_tests/integration_test.py @@ -0,0 +1,103 @@ +# +# Copyright (c) 2023 Airbyte, Inc., all rights reserved. +# + +import json +import logging +from typing import Any, Dict, Mapping + +import pytest +from airbyte_cdk.models import ( + AirbyteMessage, + AirbyteRecordMessage, + AirbyteStateMessage, + AirbyteStream, + ConfiguredAirbyteCatalog, + ConfiguredAirbyteStream, + DestinationSyncMode, + Status, + SyncMode, + Type, +) +from destination_meilisearch.destination import DestinationMeilisearch, get_client +from meilisearch import Client + + +@pytest.fixture(name="config") +def config_fixture() -> Mapping[str, Any]: + with open("secrets/config.json", "r") as f: + return json.loads(f.read()) + + +@pytest.fixture(name="configured_catalog") +def configured_catalog_fixture() -> ConfiguredAirbyteCatalog: + stream_schema = {"type": "object", "properties": {"string_col": {"type": "str"}, "int_col": {"type": "integer"}}} + + overwrite_stream = ConfiguredAirbyteStream( + stream=AirbyteStream( + name="_airbyte", json_schema=stream_schema, supported_sync_modes=[SyncMode.incremental, SyncMode.full_refresh] + ), + sync_mode=SyncMode.incremental, + destination_sync_mode=DestinationSyncMode.overwrite, + ) + + return ConfiguredAirbyteCatalog(streams=[overwrite_stream]) + + +@pytest.fixture(autouse=True) +def teardown(config: Mapping): + yield + client = get_client(config=config) + client.delete_index("_airbyte") + + +@pytest.fixture(name="client") +def client_fixture(config) -> Client: + client = get_client(config=config) + resp = client.create_index("_airbyte", {"primaryKey": "_ab_pk"}) + client.wait_for_task(_handle_breaking_wait_for_task(resp)) + return client + + +def test_check_valid_config(config: Mapping): + outcome = DestinationMeilisearch().check(logging.getLogger("airbyte"), config) + assert outcome.status == Status.SUCCEEDED + + +def test_check_invalid_config(): + outcome = DestinationMeilisearch().check( + logging.getLogger("airbyte"), {"api_key": "not_a_real_key", "host": "https://www.meilisearch.com"} + ) + assert outcome.status == Status.FAILED + + +def _state(data: Dict[str, Any]) -> AirbyteMessage: + return AirbyteMessage(type=Type.STATE, state=AirbyteStateMessage(data=data)) + + +def _record(stream: str, str_value: str, int_value: int) -> AirbyteMessage: + return AirbyteMessage( + type=Type.RECORD, record=AirbyteRecordMessage(stream=stream, data={"str_col": str_value, "int_col": int_value}, emitted_at=0) + ) + + +def _handle_breaking_wait_for_task(task: Any) -> int: + if type(task) is dict: + return task["taskUid"] + else: + return task.task_uid + + +def records_count(client: Client) -> int: + documents_results = client.index("_airbyte").get_documents() + return documents_results.total + + +def test_write(config: Mapping, configured_catalog: ConfiguredAirbyteCatalog, client: Client): + overwrite_stream = configured_catalog.streams[0].stream.name + first_state_message = _state({"state": "1"}) + first_record_chunk = [_record(overwrite_stream, str(i), i) for i in range(2)] + + destination = DestinationMeilisearch() + list(destination.write(config, configured_catalog, [*first_record_chunk, first_state_message])) + assert records_count(client) == 2 diff --git a/airbyte-integrations/connectors/destination-meilisearch/integration_tests/messages.jsonl b/airbyte-integrations/connectors/destination-meilisearch/integration_tests/messages.jsonl new file mode 100644 index 000000000000..e1d0682f9dad --- /dev/null +++ b/airbyte-integrations/connectors/destination-meilisearch/integration_tests/messages.jsonl @@ -0,0 +1,2 @@ +{"type": "RECORD", "record": {"stream": "ab-airbyte-testing", "data": {"_ab_pk": "my_value", "column2": 221, "column3": "2021-01-01T20:10:22", "column4": 1.214, "column5": [1,2,3]}, "emitted_at": 1626172757000}} +{"type": "RECORD", "record": {"stream": "ab-airbyte-testing", "data": {"_ab_pk": "my_value2", "column2": 222, "column3": "2021-01-02T22:10:22", "column5": [1,2,null]}, "emitted_at": 1626172757000}} diff --git a/airbyte-integrations/connectors/destination-meilisearch/main.py b/airbyte-integrations/connectors/destination-meilisearch/main.py new file mode 100644 index 000000000000..a5eba931a2fb --- /dev/null +++ b/airbyte-integrations/connectors/destination-meilisearch/main.py @@ -0,0 +1,11 @@ +# +# Copyright (c) 2023 Airbyte, Inc., all rights reserved. +# + + +import sys + +from destination_meilisearch import DestinationMeilisearch + +if __name__ == "__main__": + DestinationMeilisearch().run(sys.argv[1:]) diff --git a/airbyte-integrations/connectors/destination-meilisearch/metadata.yaml b/airbyte-integrations/connectors/destination-meilisearch/metadata.yaml index 1b30cb150c98..401a4c3756d7 100644 --- a/airbyte-integrations/connectors/destination-meilisearch/metadata.yaml +++ b/airbyte-integrations/connectors/destination-meilisearch/metadata.yaml @@ -2,7 +2,7 @@ data: connectorSubtype: api connectorType: destination definitionId: af7c921e-5892-4ff2-b6c1-4a5ab258fb7e - dockerImageTag: 1.0.1 + dockerImageTag: 1.0.2 dockerRepository: airbyte/destination-meilisearch githubIssueLabel: destination-meilisearch icon: meilisearch.svg @@ -20,5 +20,5 @@ data: ab_internal: sl: 100 ql: 100 - supportLevel: archived + supportLevel: community metadataSpecVersion: "1.0" diff --git a/airbyte-integrations/connectors/destination-meilisearch/requirements.txt b/airbyte-integrations/connectors/destination-meilisearch/requirements.txt new file mode 100644 index 000000000000..d6e1198b1ab1 --- /dev/null +++ b/airbyte-integrations/connectors/destination-meilisearch/requirements.txt @@ -0,0 +1 @@ +-e . diff --git a/airbyte-integrations/connectors/destination-meilisearch/sample_files/configured_catalog.json b/airbyte-integrations/connectors/destination-meilisearch/sample_files/configured_catalog.json new file mode 100644 index 000000000000..9ac002e358d3 --- /dev/null +++ b/airbyte-integrations/connectors/destination-meilisearch/sample_files/configured_catalog.json @@ -0,0 +1,27 @@ +{ + "streams": [ + { + "sync_mode": "full_refresh", + "destination_sync_mode": "overwrite", + "stream": { + "name": "ab-airbyte-testing", + "supported_sync_modes": ["full_refresh"], + "source_defined_cursor": false, + "json_schema": { + "type": "object", + "properties": { + "id": { + "type": "string" + }, + "body": { + "type": "string" + }, + "attributes": { + "type": ["null", "object"] + } + } + } + } + } + ] +} diff --git a/airbyte-integrations/connectors/destination-meilisearch/setup.py b/airbyte-integrations/connectors/destination-meilisearch/setup.py new file mode 100644 index 000000000000..9d9bfe3e6e16 --- /dev/null +++ b/airbyte-integrations/connectors/destination-meilisearch/setup.py @@ -0,0 +1,23 @@ +# +# Copyright (c) 2023 Airbyte, Inc., all rights reserved. +# + + +from setuptools import find_packages, setup + +MAIN_REQUIREMENTS = ["airbyte-cdk", "meilisearch>=0.22.0"] + +TEST_REQUIREMENTS = ["pytest~=6.1"] + +setup( + name="destination_meilisearch", + description="Destination implementation for Meilisearch.", + author="Airbyte", + author_email="contact@airbyte.io", + packages=find_packages(), + install_requires=MAIN_REQUIREMENTS, + package_data={"": ["*.json"]}, + extras_require={ + "tests": TEST_REQUIREMENTS, + }, +) diff --git a/airbyte-integrations/connectors/destination-meilisearch/unit_tests/unit_test.py b/airbyte-integrations/connectors/destination-meilisearch/unit_tests/unit_test.py new file mode 100644 index 000000000000..c09a3f7d8744 --- /dev/null +++ b/airbyte-integrations/connectors/destination-meilisearch/unit_tests/unit_test.py @@ -0,0 +1,29 @@ +# +# Copyright (c) 2023 Airbyte, Inc., all rights reserved. +# + +from unittest.mock import patch + +from destination_meilisearch.writer import MeiliWriter + + +@patch("meilisearch.Client") +def test_queue_write_operation(client): + writer = MeiliWriter(client, "stream_name", "primary_key") + writer.queue_write_operation({"a": "a"}) + assert len(writer._write_buffer) == 1 + writer.queue_write_operation({"b": "b"}) + assert len(writer._write_buffer) == 2 + writer2 = MeiliWriter(client, "stream_name2", "primary_key") + writer2.queue_write_operation({"a": "a"}) + assert len(writer2._write_buffer) == 1 + assert len(writer._write_buffer) == 2 + + +@patch("meilisearch.Client") +def test_flush(client): + writer = MeiliWriter(client, "stream_name", "primary_key") + writer.queue_write_operation({"a": "a"}) + writer.flush() + client.index.assert_called_once_with("stream_name") + client.wait_for_task.assert_called_once() diff --git a/airbyte-integrations/connectors/destination-rabbitmq/.dockerignore b/airbyte-integrations/connectors/destination-rabbitmq/.dockerignore new file mode 100644 index 000000000000..f3757e1aa586 --- /dev/null +++ b/airbyte-integrations/connectors/destination-rabbitmq/.dockerignore @@ -0,0 +1,5 @@ +* +!Dockerfile +!main.py +!destination_rabbitmq +!setup.py diff --git a/airbyte-integrations/connectors/destination-rabbitmq/Dockerfile b/airbyte-integrations/connectors/destination-rabbitmq/Dockerfile new file mode 100644 index 000000000000..a3e040a70787 --- /dev/null +++ b/airbyte-integrations/connectors/destination-rabbitmq/Dockerfile @@ -0,0 +1,38 @@ +FROM python:3.9.11-alpine3.15 as base + +# build and load all requirements +FROM base as builder +WORKDIR /airbyte/integration_code + +# upgrade pip to the latest version +RUN apk --no-cache upgrade \ + && pip install --upgrade pip \ + && apk --no-cache add tzdata build-base + + +COPY setup.py ./ +# install necessary packages to a temporary folder +RUN pip install --prefix=/install . + +# build a clean environment +FROM base +WORKDIR /airbyte/integration_code + +# copy all loaded and built libraries to a pure basic image +COPY --from=builder /install /usr/local +# add default timezone settings +COPY --from=builder /usr/share/zoneinfo/Etc/UTC /etc/localtime +RUN echo "Etc/UTC" > /etc/timezone + +# bash is installed for more convenient debugging. +RUN apk --no-cache add bash + +# copy payload code only +COPY main.py ./ +COPY destination_rabbitmq ./destination_rabbitmq + +ENV AIRBYTE_ENTRYPOINT "python /airbyte/integration_code/main.py" +ENTRYPOINT ["python", "/airbyte/integration_code/main.py"] + +LABEL io.airbyte.version=0.1.2 +LABEL io.airbyte.name=airbyte/destination-rabbitmq diff --git a/airbyte-integrations/connectors/destination-rabbitmq/README.md b/airbyte-integrations/connectors/destination-rabbitmq/README.md new file mode 100644 index 000000000000..f6952028a518 --- /dev/null +++ b/airbyte-integrations/connectors/destination-rabbitmq/README.md @@ -0,0 +1,99 @@ +# Rabbitmq Destination + +This is the repository for the Rabbitmq destination connector, written in Python. +For information about how to use this connector within Airbyte, see [the documentation](https://docs.airbyte.io/integrations/destinations/rabbitmq). + +## Local development + +### Prerequisites +**To iterate on this connector, make sure to complete this prerequisites section.** + +#### Minimum Python version required `= 3.7.0` + +#### Build & Activate Virtual Environment and install dependencies +From this connector directory, create a virtual environment: +``` +python -m venv .venv +``` + +This will generate a virtualenv for this module in `.venv/`. Make sure this venv is active in your +development environment of choice. To activate it from the terminal, run: +``` +source .venv/bin/activate +pip install -r requirements.txt +``` +If you are in an IDE, follow your IDE's instructions to activate the virtualenv. + +Note that while we are installing dependencies from `requirements.txt`, you should only edit `setup.py` for your dependencies. `requirements.txt` is +used for editable installs (`pip install -e`) to pull in Python dependencies from the monorepo and will call `setup.py`. +If this is mumbo jumbo to you, don't worry about it, just put your deps in `setup.py` but install using `pip install -r requirements.txt` and everything +should work as you expect. + +#### Create credentials +**If you are a community contributor**, follow the instructions in the [documentation](https://docs.airbyte.io/integrations/destinations/rabbitmq) +to generate the necessary credentials. Then create a file `secrets/config.json` conforming to the `destination_rabbitmq/spec.json` file. +Note that the `secrets` directory is gitignored by default, so there is no danger of accidentally checking in sensitive information. +See `integration_tests/sample_config.json` for a sample config file. + +**If you are an Airbyte core member**, copy the credentials in Lastpass under the secret name `destination rabbitmq test creds` +and place them into `secrets/config.json`. + +### Locally running the connector +``` +python main.py spec +python main.py check --config secrets/config.json +python main.py discover --config secrets/config.json +python main.py read --config secrets/config.json --catalog integration_tests/configured_catalog.json +``` + +### Locally running the connector docker image + + +#### Build +**Via [`airbyte-ci`](https://github.com/airbytehq/airbyte/blob/master/airbyte-ci/connectors/pipelines/README.md) (recommended):** +```bash +airbyte-ci connectors --name=destination-rabbitmq build +``` + +An image will be built with the tag `airbyte/destination-rabbitmq:dev`. + +**Via `docker build`:** +```bash +docker build -t airbyte/destination-rabbitmq:dev . +``` + +#### Run +Then run any of the connector commands as follows: +``` +docker run --rm airbyte/destination-rabbitmq:dev spec +docker run --rm -v $(pwd)/secrets:/secrets airbyte/destination-rabbitmq:dev check --config /secrets/config.json +# messages.jsonl is a file containing line-separated JSON representing AirbyteMessages +cat messages.jsonl | docker run --rm -v $(pwd)/secrets:/secrets -v $(pwd)/integration_tests:/integration_tests airbyte/destination-rabbitmq:dev write --config /secrets/config.json --catalog /integration_tests/configured_catalog.json +``` + +## Testing +You can run our full test suite locally using [`airbyte-ci`](https://github.com/airbytehq/airbyte/blob/master/airbyte-ci/connectors/pipelines/README.md): +```bash +airbyte-ci connectors --name=destination-rabbitmq test +``` + +### Customizing acceptance Tests +Customize `acceptance-test-config.yml` file to configure tests. See [Connector Acceptance Tests](https://docs.airbyte.com/connector-development/testing-connectors/connector-acceptance-tests-reference) for more information. +If your connector requires to create or destroy resources for use during acceptance tests create fixtures for it and place them inside integration_tests/acceptance.py. + +## Dependency Management +All of your dependencies should go in `setup.py`, NOT `requirements.txt`. The requirements file is only used to connect internal Airbyte dependencies in the monorepo for local development. +We split dependencies between two groups, dependencies that are: +* required for your connector to work need to go to `MAIN_REQUIREMENTS` list. +* required for the testing need to go to `TEST_REQUIREMENTS` list + +### Publishing a new version of the connector +You've checked out the repo, implemented a million dollar feature, and you're ready to share your changes with the world. Now what? +1. Make sure your changes are passing our test suite: `airbyte-ci connectors --name=destination-rabbitmq test` +2. Bump the connector version in `metadata.yaml`: increment the `dockerImageTag` value. Please follow [semantic versioning for connectors](https://docs.airbyte.com/contributing-to-airbyte/resources/pull-requests-handbook/#semantic-versioning-for-connectors). +3. Make sure the `metadata.yaml` content is up to date. +4. Make the connector documentation and its changelog is up to date (`docs/integrations/destinations/rabbitmq.md`). +5. Create a Pull Request: use [our PR naming conventions](https://docs.airbyte.com/contributing-to-airbyte/resources/pull-requests-handbook/#pull-request-title-convention). +6. Pat yourself on the back for being an awesome contributor. +7. Someone from Airbyte will take a look at your PR and iterate with you to merge it into master. + diff --git a/airbyte-integrations/connectors/destination-rabbitmq/destination_rabbitmq/__init__.py b/airbyte-integrations/connectors/destination-rabbitmq/destination_rabbitmq/__init__.py new file mode 100644 index 000000000000..db4e71f357d9 --- /dev/null +++ b/airbyte-integrations/connectors/destination-rabbitmq/destination_rabbitmq/__init__.py @@ -0,0 +1,8 @@ +# +# Copyright (c) 2021 Airbyte, Inc., all rights reserved. +# + + +from .destination import DestinationRabbitmq + +__all__ = ["DestinationRabbitmq"] diff --git a/airbyte-integrations/connectors/destination-rabbitmq/destination_rabbitmq/__init__.pyc b/airbyte-integrations/connectors/destination-rabbitmq/destination_rabbitmq/__init__.pyc new file mode 100644 index 0000000000000000000000000000000000000000..7f48a3bafd1aadfe7ceb85fa60d5e98e7083500c GIT binary patch literal 300 zcmYk2F;4?A427M$3M#5fNS)blX|pf_Vqj)y#lm9cE>UI3C2i~u{oVWl98}<<XBS45M7LjT^0VpMYr~iLtz_F#P(3~M;L^yqtc%x{MI!k w3H$Ps%XY{qsVnQPN!Yqv9yAs4Pei0dg19-HWv;OgWA>*Qp02d#`YE@~H#L<^E&u=k literal 0 HcmV?d00001 diff --git a/airbyte-integrations/connectors/destination-rabbitmq/destination_rabbitmq/destination.py b/airbyte-integrations/connectors/destination-rabbitmq/destination_rabbitmq/destination.py new file mode 100644 index 000000000000..162a7a048e00 --- /dev/null +++ b/airbyte-integrations/connectors/destination-rabbitmq/destination_rabbitmq/destination.py @@ -0,0 +1,84 @@ +# +# Copyright (c) 2023 Airbyte, Inc., all rights reserved. +# + + +import json +from typing import Any, Iterable, Mapping + +import pika +from airbyte_cdk import AirbyteLogger +from airbyte_cdk.destinations import Destination +from airbyte_cdk.models import AirbyteConnectionStatus, AirbyteMessage, ConfiguredAirbyteCatalog, Status, Type +from pika.adapters.blocking_connection import BlockingConnection +from pika.spec import BasicProperties + +_DEFAULT_PORT = 5672 + + +def create_connection(config: Mapping[str, Any]) -> BlockingConnection: + host = config.get("host") + port = config.get("port") or _DEFAULT_PORT + username = config.get("username") + password = config.get("password") + virtual_host = config.get("virtual_host", "") + ssl_enabled = config.get("ssl", False) + amqp_protocol = "amqp" + host_url = host + if ssl_enabled: + amqp_protocol = "amqps" + if port: + host_url = host + ":" + str(port) + credentials = f"{username}:{password}@" if username and password else "" + params = pika.URLParameters(f"{amqp_protocol}://{credentials}{host_url}/{virtual_host}") + return BlockingConnection(params) + + +class DestinationRabbitmq(Destination): + def write( + self, config: Mapping[str, Any], configured_catalog: ConfiguredAirbyteCatalog, input_messages: Iterable[AirbyteMessage] + ) -> Iterable[AirbyteMessage]: + exchange = config.get("exchange") + routing_key = config["routing_key"] + connection = create_connection(config=config) + channel = connection.channel() + + streams = {s.stream.name for s in configured_catalog.streams} + try: + for message in input_messages: + if message.type == Type.STATE: + # Emitting a state message means all records that came before it + # have already been published. + yield message + elif message.type == Type.RECORD: + record = message.record + if record.stream not in streams: + # Message contains record from a stream that is not in the catalog. Skip it! + continue + headers = {"stream": record.stream, "emitted_at": record.emitted_at, "namespace": record.namespace} + properties = BasicProperties(content_type="application/json", headers=headers) + channel.basic_publish( + exchange=exchange or "", routing_key=routing_key, properties=properties, body=json.dumps(record.data) + ) + else: + # Let's ignore other message types for now + continue + finally: + connection.close() + + def check(self, logger: AirbyteLogger, config: Mapping[str, Any]) -> AirbyteConnectionStatus: + try: + connection = create_connection(config=config) + except Exception as e: + logger.error(f"Failed to create connection. Error: {e}") + return AirbyteConnectionStatus(status=Status.FAILED, message=f"Could not create connection: {repr(e)}") + try: + channel = connection.channel() + if channel.is_open: + return AirbyteConnectionStatus(status=Status.SUCCEEDED) + return AirbyteConnectionStatus(status=Status.FAILED, message="Could not open channel") + except Exception as e: + logger.error(f"Failed to open RabbitMQ channel. Error: {e}") + return AirbyteConnectionStatus(status=Status.FAILED, message=f"An exception occurred: {repr(e)}") + finally: + connection.close() diff --git a/airbyte-integrations/connectors/destination-rabbitmq/destination_rabbitmq/spec.json b/airbyte-integrations/connectors/destination-rabbitmq/destination_rabbitmq/spec.json new file mode 100644 index 000000000000..cbeb330e5a96 --- /dev/null +++ b/airbyte-integrations/connectors/destination-rabbitmq/destination_rabbitmq/spec.json @@ -0,0 +1,49 @@ +{ + "documentationUrl": "https://docs.airbyte.com/integrations/destinations/rabbitmq", + "supported_destination_sync_modes": ["append"], + "supportsIncremental": true, + "connectionSpecification": { + "$schema": "http://json-schema.org/draft-07/schema#", + "title": "Destination Rabbitmq", + "type": "object", + "required": ["host", "routing_key"], + "additionalProperties": false, + "properties": { + "ssl": { + "type": "boolean", + "description": "SSL enabled.", + "default": true + }, + "host": { + "type": "string", + "description": "The RabbitMQ host name." + }, + "port": { + "type": "integer", + "description": "The RabbitMQ port." + }, + "virtual_host": { + "type": "string", + "description": "The RabbitMQ virtual host name." + }, + "username": { + "type": "string", + "description": "The username to connect." + }, + "password": { + "type": "string", + "title": "Password", + "description": "The password to connect.", + "airbyte_secret": true + }, + "exchange": { + "type": "string", + "description": "The exchange name." + }, + "routing_key": { + "type": "string", + "description": "The routing key." + } + } + } +} diff --git a/airbyte-integrations/connectors/destination-rabbitmq/integration_tests/integration_test.py b/airbyte-integrations/connectors/destination-rabbitmq/integration_tests/integration_test.py new file mode 100644 index 000000000000..f99c64178d4f --- /dev/null +++ b/airbyte-integrations/connectors/destination-rabbitmq/integration_tests/integration_test.py @@ -0,0 +1,90 @@ +# +# Copyright (c) 2023 Airbyte, Inc., all rights reserved. +# + +import json +from unittest.mock import Mock + +from airbyte_cdk.models import AirbyteMessage, Status, Type +from airbyte_cdk.models.airbyte_protocol import ( + AirbyteRecordMessage, + AirbyteStateMessage, + AirbyteStream, + ConfiguredAirbyteCatalog, + ConfiguredAirbyteStream, + DestinationSyncMode, + SyncMode, +) +from destination_rabbitmq.destination import DestinationRabbitmq, create_connection + +TEST_STREAM = "animals" +TEST_NAMESPACE = "test_namespace" +TEST_MESSAGE = {"name": "cat"} + + +def _configured_catalog() -> ConfiguredAirbyteCatalog: + stream_schema = {"type": "object", "properties": {"name": {"type": "string"}}} + append_stream = ConfiguredAirbyteStream( + stream=AirbyteStream(name=TEST_STREAM, json_schema=stream_schema, supported_sync_modes=[SyncMode.incremental]), + sync_mode=SyncMode.incremental, + destination_sync_mode=DestinationSyncMode.append, + ) + return ConfiguredAirbyteCatalog(streams=[append_stream]) + + +def consume(config): + connection = create_connection(config=config) + channel = connection.channel() + + def assert_message(ch, method, properties, body): + assert json.loads(body) == TEST_MESSAGE + assert properties.content_type == "application/json" + assert properties.headers["stream"] == TEST_STREAM + assert properties.headers["namespace"] == TEST_NAMESPACE + assert "emitted_at" in properties.headers + channel.stop_consuming() + + channel.basic_consume(queue=config["routing_key"], on_message_callback=assert_message, auto_ack=True) + channel.start_consuming() + + +def _state() -> AirbyteMessage: + return AirbyteMessage(type=Type.STATE, state=AirbyteStateMessage(data={})) + + +def _record() -> AirbyteMessage: + return AirbyteMessage( + type=Type.RECORD, record=AirbyteRecordMessage(stream=TEST_STREAM, data=TEST_MESSAGE, emitted_at=0, namespace=TEST_NAMESPACE) + ) + + +def test_check_fails(): + f = open( + "integration_tests/invalid_config.json", + ) + config = json.load(f) + destination = DestinationRabbitmq() + status = destination.check(logger=Mock(), config=config) + assert status.status == Status.FAILED + + +def test_check_succeeds(): + f = open( + "secrets/config.json", + ) + config = json.load(f) + destination = DestinationRabbitmq() + status = destination.check(logger=Mock(), config=config) + assert status.status == Status.SUCCEEDED + + +def test_write(): + f = open( + "secrets/config.json", + ) + config = json.load(f) + messages = [_record(), _state()] + destination = DestinationRabbitmq() + for m in destination.write(config=config, configured_catalog=_configured_catalog(), input_messages=messages): + assert m.type == Type.STATE + consume(config) diff --git a/airbyte-integrations/connectors/destination-rabbitmq/integration_tests/invalid_config.json b/airbyte-integrations/connectors/destination-rabbitmq/integration_tests/invalid_config.json new file mode 100644 index 000000000000..a482e4705f21 --- /dev/null +++ b/airbyte-integrations/connectors/destination-rabbitmq/integration_tests/invalid_config.json @@ -0,0 +1,9 @@ +{ + "host": "invalid.host.io", + "port": 5672, + "virtual_host": "invalid_vh", + "username": "invalid_username", + "password": "invalid_password", + "routing_key": "test_queue", + "exchange": "test_exchange" +} diff --git a/airbyte-integrations/connectors/destination-rabbitmq/main.py b/airbyte-integrations/connectors/destination-rabbitmq/main.py new file mode 100644 index 000000000000..fc09374015c7 --- /dev/null +++ b/airbyte-integrations/connectors/destination-rabbitmq/main.py @@ -0,0 +1,11 @@ +# +# Copyright (c) 2023 Airbyte, Inc., all rights reserved. +# + + +import sys + +from destination_rabbitmq import DestinationRabbitmq + +if __name__ == "__main__": + DestinationRabbitmq().run(sys.argv[1:]) diff --git a/airbyte-integrations/connectors/destination-rabbitmq/metadata.yaml b/airbyte-integrations/connectors/destination-rabbitmq/metadata.yaml index 56bd8775f7d2..0b320468490d 100644 --- a/airbyte-integrations/connectors/destination-rabbitmq/metadata.yaml +++ b/airbyte-integrations/connectors/destination-rabbitmq/metadata.yaml @@ -2,7 +2,7 @@ data: connectorSubtype: database connectorType: destination definitionId: e06ad785-ad6f-4647-b2e8-3027a5c59454 - dockerImageTag: 0.1.1 + dockerImageTag: 0.1.2 dockerRepository: airbyte/destination-rabbitmq githubIssueLabel: destination-rabbitmq icon: pulsar.svg @@ -20,5 +20,5 @@ data: ab_internal: sl: 100 ql: 100 - supportLevel: archived + supportLevel: community metadataSpecVersion: "1.0" diff --git a/airbyte-integrations/connectors/destination-rabbitmq/requirements.txt b/airbyte-integrations/connectors/destination-rabbitmq/requirements.txt new file mode 100644 index 000000000000..d6e1198b1ab1 --- /dev/null +++ b/airbyte-integrations/connectors/destination-rabbitmq/requirements.txt @@ -0,0 +1 @@ +-e . diff --git a/airbyte-integrations/connectors/destination-rabbitmq/setup.py b/airbyte-integrations/connectors/destination-rabbitmq/setup.py new file mode 100644 index 000000000000..352ded5f8b4e --- /dev/null +++ b/airbyte-integrations/connectors/destination-rabbitmq/setup.py @@ -0,0 +1,23 @@ +# +# Copyright (c) 2023 Airbyte, Inc., all rights reserved. +# + + +from setuptools import find_packages, setup + +MAIN_REQUIREMENTS = ["airbyte-cdk", "pika>=1.1.0"] + +TEST_REQUIREMENTS = ["pytest~=6.1"] + +setup( + name="destination_rabbitmq", + description="Destination implementation for Rabbitmq.", + author="Airbyte", + author_email="contact@airbyte.io", + packages=find_packages(), + install_requires=MAIN_REQUIREMENTS, + package_data={"": ["*.json"]}, + extras_require={ + "tests": TEST_REQUIREMENTS, + }, +) diff --git a/airbyte-integrations/connectors/destination-rabbitmq/unit_tests/unit_test.py b/airbyte-integrations/connectors/destination-rabbitmq/unit_tests/unit_test.py new file mode 100644 index 000000000000..57c34b6f9f58 --- /dev/null +++ b/airbyte-integrations/connectors/destination-rabbitmq/unit_tests/unit_test.py @@ -0,0 +1,130 @@ +# +# Copyright (c) 2023 Airbyte, Inc., all rights reserved. +# + +import json +from typing import Any, Dict +from unittest import mock +from unittest.mock import Mock + +from airbyte_cdk.models import AirbyteMessage, Status, Type +from airbyte_cdk.models.airbyte_protocol import ( + AirbyteRecordMessage, + AirbyteStateMessage, + AirbyteStream, + ConfiguredAirbyteCatalog, + ConfiguredAirbyteStream, + DestinationSyncMode, + SyncMode, +) +from destination_rabbitmq.destination import DestinationRabbitmq +from pika.spec import Queue + +config = { + "host": "test.rabbitmq", + "port": 5672, + "virtual_host": "test_vh", + "username": "john.doe", + "password": "secret", + "exchange": "test_exchange", + "routing_key": "test_routing_key", +} + + +def _init_mocks(connection_init): + connection, channel = Mock(), Mock() + connection_init.return_value = connection + connection.channel.return_value = channel + return channel + + +@mock.patch("destination_rabbitmq.destination.BlockingConnection") +def test_check_succeeds(connection_init): + result = Mock() + result.method = Queue.DeclareOk() + channel = _init_mocks(connection_init=connection_init) + channel.queue_declare.return_value = result + destination = DestinationRabbitmq() + status = destination.check(logger=Mock(), config=config) + assert status.status == Status.SUCCEEDED + + +@mock.patch("destination_rabbitmq.destination.BlockingConnection") +def test_check_fails_on_getting_channel(connection_init): + connection = Mock() + connection_init.return_value = connection + connection.channel.side_effect = Exception("Failed to get channel") + destination = DestinationRabbitmq() + status = destination.check(logger=Mock(), config=config) + assert status.status == Status.FAILED + + +@mock.patch("destination_rabbitmq.destination.BlockingConnection") +def test_check_fails_on_creating_connection(connection_init): + connection_init.side_effect = Exception("Could not open connection") + destination = DestinationRabbitmq() + status = destination.check(logger=Mock(), config=config) + assert status.status == Status.FAILED + + +def _state() -> AirbyteMessage: + return AirbyteMessage(type=Type.STATE, state=AirbyteStateMessage(data={})) + + +def _record(stream: str, data: Dict[str, Any]) -> AirbyteMessage: + return AirbyteMessage(type=Type.RECORD, record=AirbyteRecordMessage(stream=stream, data=data, emitted_at=0)) + + +def _configured_catalog() -> ConfiguredAirbyteCatalog: + stream_schema = {"type": "object", "properties": {"name": {"type": "string"}, "email": {"type": "string"}}} + append_stream = ConfiguredAirbyteStream( + stream=AirbyteStream(name="people", json_schema=stream_schema, supported_sync_modes=[SyncMode.incremental]), + sync_mode=SyncMode.incremental, + destination_sync_mode=DestinationSyncMode.append, + ) + return ConfiguredAirbyteCatalog(streams=[append_stream]) + + +@mock.patch("destination_rabbitmq.destination.BlockingConnection") +def test_write_succeeds(connection_init): + stream = "people" + data = {"name": "John Doe", "email": "john.doe@example.com"} + channel = _init_mocks(connection_init=connection_init) + input_messages = [_record(stream=stream, data=data), _state()] + destination = DestinationRabbitmq() + for m in destination.write(config=config, configured_catalog=_configured_catalog(), input_messages=input_messages): + assert m.type == Type.STATE + _, _, args = channel.basic_publish.mock_calls[0] + assert args["exchange"] == "test_exchange" + assert args["routing_key"] == "test_routing_key" + assert args["properties"].content_type == "application/json" + assert args["properties"].headers["stream"] == stream + assert json.loads(args["body"]) == data + + +@mock.patch("destination_rabbitmq.destination.BlockingConnection") +def test_write_succeeds_with_direct_exchange(connection_init): + stream = "people" + data = {"name": "John Doe", "email": "john.doe@example.com"} + channel = _init_mocks(connection_init=connection_init) + input_messages = [_record(stream=stream, data=data), _state()] + custom_config = dict(config) + del custom_config["exchange"] + destination = DestinationRabbitmq() + for m in destination.write(config=custom_config, configured_catalog=_configured_catalog(), input_messages=input_messages): + assert m.type == Type.STATE + _, _, args = channel.basic_publish.mock_calls[0] + assert args["exchange"] == "" + assert json.loads(args["body"]) == data + + +@mock.patch("destination_rabbitmq.destination.BlockingConnection") +def test_write_skips_message_from_unknown_stream(connection_init): + stream = "shapes" + data = {"name": "Rectangle", "color": "blue"} + channel = _init_mocks(connection_init=connection_init) + input_messages = [_record(stream=stream, data=data), _state()] + destination = DestinationRabbitmq() + for m in destination.write(config=config, configured_catalog=_configured_catalog(), input_messages=input_messages): + assert m.type == Type.STATE + channel.basic_publish.assert_not_called() diff --git a/airbyte-integrations/connectors/destination-timeplus/.dockerignore b/airbyte-integrations/connectors/destination-timeplus/.dockerignore new file mode 100755 index 000000000000..40dea8ad1f6f --- /dev/null +++ b/airbyte-integrations/connectors/destination-timeplus/.dockerignore @@ -0,0 +1,5 @@ +* +!Dockerfile +!main.py +!destination_timeplus +!setup.py diff --git a/airbyte-integrations/connectors/destination-timeplus/Dockerfile b/airbyte-integrations/connectors/destination-timeplus/Dockerfile new file mode 100755 index 000000000000..4baf8d7560d8 --- /dev/null +++ b/airbyte-integrations/connectors/destination-timeplus/Dockerfile @@ -0,0 +1,38 @@ +FROM python:3.9.11-alpine3.15 as base + +# build and load all requirements +FROM base as builder +WORKDIR /airbyte/integration_code + +# upgrade pip to the latest version +RUN apk --no-cache upgrade \ + && pip install --upgrade pip \ + && apk --no-cache add tzdata build-base + + +COPY setup.py ./ +# install necessary packages to a temporary folder +RUN pip install --prefix=/install . + +# build a clean environment +FROM base +WORKDIR /airbyte/integration_code + +# copy all loaded and built libraries to a pure basic image +COPY --from=builder /install /usr/local +# add default timezone settings +COPY --from=builder /usr/share/zoneinfo/Etc/UTC /etc/localtime +RUN echo "Etc/UTC" > /etc/timezone + +# bash is installed for more convenient debugging. +RUN apk --no-cache add bash + +# copy payload code only +COPY main.py ./ +COPY destination_timeplus ./destination_timeplus + +ENV AIRBYTE_ENTRYPOINT "python /airbyte/integration_code/main.py" +ENTRYPOINT ["python", "/airbyte/integration_code/main.py"] + +LABEL io.airbyte.version=0.1.1 +LABEL io.airbyte.name=airbyte/destination-timeplus diff --git a/airbyte-integrations/connectors/destination-timeplus/README.md b/airbyte-integrations/connectors/destination-timeplus/README.md new file mode 100755 index 000000000000..6ba14518f631 --- /dev/null +++ b/airbyte-integrations/connectors/destination-timeplus/README.md @@ -0,0 +1,108 @@ +# Timeplus Destination + +This is the repository for the Timeplus destination connector, written in Python. +For information about how to use this connector within Airbyte, see [the documentation](https://docs.airbyte.io/integrations/destinations/timeplus). + +## Local development + +### Prerequisites + +#### Minimum Python version required `= 3.9.0` + +#### Build & Activate Virtual Environment and install dependencies + +From this connector directory, create a virtual environment: + +``` +python -m venv .venv +``` + +This will generate a virtualenv for this module in `.venv/`. Make sure this venv is active in your +development environment of choice. To activate it from the terminal, run: + +``` +source .venv/bin/activate +pip install -r requirements.txt +``` + +If you are in an IDE, follow your IDE's instructions to activate the virtualenv. + +Note that while we are installing dependencies from `requirements.txt`, you should only edit `setup.py` for your dependencies. `requirements.txt` is +used for editable installs (`pip install -e`) to pull in Python dependencies from the monorepo and will call `setup.py`. +If this is mumbo jumbo to you, don't worry about it, just put your deps in `setup.py` but install using `pip install -r requirements.txt` and everything +should work as you expect. + +#### Create credentials + +**If you are a community contributor**, follow the instructions in the [documentation](https://docs.airbyte.io/integrations/destinations/timeplus) +to generate the necessary credentials. Then create a file `secrets/config.json` conforming to the `destination_timeplus/spec.json` file. +Note that the `secrets` directory is gitignored by default, so there is no danger of accidentally checking in sensitive information. +See `integration_tests/sample_config.json` for a sample config file. + +**If you are an Airbyte core member**, copy the credentials in Lastpass under the secret name `destination timeplus test creds` +and place them into `secrets/config.json`. + +### Locally running the connector + +``` +python main.py spec +python main.py check --config secrets/config.json +cat integration_tests/messages.jsonl | python main.py write --config secrets/config.json --catalog integration_tests/configured_catalog.json +``` + +### Locally running the connector docker image + + +#### Build +**Via [`airbyte-ci`](https://github.com/airbytehq/airbyte/blob/master/airbyte-ci/connectors/pipelines/README.md) (recommended):** +```bash +airbyte-ci connectors --name=destination-timeplus build +``` + +An image will be built with the tag `airbyte/destination-timeplus:dev`. + +**Via `docker build`:** +```bash +docker build -t airbyte/destination-timeplus:dev . +``` + +#### Run + +Then run any of the connector commands as follows: + +``` +docker run --rm airbyte/destination-timeplus:dev spec +docker run --rm -v $(pwd)/secrets:/secrets airbyte/destination-timeplus:dev check --config /secrets/config.json +# messages.jsonl is a file containing line-separated JSON representing AirbyteMessages +cat messages.jsonl | docker run --rm -v $(pwd)/secrets:/secrets -v $(pwd)/integration_tests:/integration_tests airbyte/destination-timeplus:dev write --config /secrets/config.json --catalog /integration_tests/configured_catalog.json +``` + + +## Testing +You can run our full test suite locally using [`airbyte-ci`](https://github.com/airbytehq/airbyte/blob/master/airbyte-ci/connectors/pipelines/README.md): +```bash +airbyte-ci connectors --name=destination-timeplus test +``` + +### Customizing acceptance Tests +Customize `acceptance-test-config.yml` file to configure tests. See [Connector Acceptance Tests](https://docs.airbyte.com/connector-development/testing-connectors/connector-acceptance-tests-reference) for more information. +If your connector requires to create or destroy resources for use during acceptance tests create fixtures for it and place them inside integration_tests/acceptance.py. + +## Dependency Management + +All of your dependencies should go in `setup.py`, NOT `requirements.txt`. The requirements file is only used to connect internal Airbyte dependencies in the monorepo for local development. +We split dependencies between two groups, dependencies that are: + +- required for your connector to work need to go to `MAIN_REQUIREMENTS` list. +- required for the testing need to go to `TEST_REQUIREMENTS` list + +### Publishing a new version of the connector +You've checked out the repo, implemented a million dollar feature, and you're ready to share your changes with the world. Now what? +1. Make sure your changes are passing our test suite: `airbyte-ci connectors --name=destination-timeplus test` +2. Bump the connector version in `metadata.yaml`: increment the `dockerImageTag` value. Please follow [semantic versioning for connectors](https://docs.airbyte.com/contributing-to-airbyte/resources/pull-requests-handbook/#semantic-versioning-for-connectors). +3. Make sure the `metadata.yaml` content is up to date. +4. Make the connector documentation and its changelog is up to date (`docs/integrations/destinations/timeplus.md`). +5. Create a Pull Request: use [our PR naming conventions](https://docs.airbyte.com/contributing-to-airbyte/resources/pull-requests-handbook/#pull-request-title-convention). +6. Pat yourself on the back for being an awesome contributor. +7. Someone from Airbyte will take a look at your PR and iterate with you to merge it into master. + diff --git a/airbyte-integrations/connectors/destination-timeplus/destination_timeplus/__init__.py b/airbyte-integrations/connectors/destination-timeplus/destination_timeplus/__init__.py new file mode 100755 index 000000000000..fa8a30eb633c --- /dev/null +++ b/airbyte-integrations/connectors/destination-timeplus/destination_timeplus/__init__.py @@ -0,0 +1,8 @@ +# +# Copyright (c) 2023 Airbyte, Inc., all rights reserved. +# + + +from .destination import DestinationTimeplus + +__all__ = ["DestinationTimeplus"] diff --git a/airbyte-integrations/connectors/destination-timeplus/destination_timeplus/destination.py b/airbyte-integrations/connectors/destination-timeplus/destination_timeplus/destination.py new file mode 100755 index 000000000000..3cf5c8920e78 --- /dev/null +++ b/airbyte-integrations/connectors/destination-timeplus/destination_timeplus/destination.py @@ -0,0 +1,160 @@ +# +# Copyright (c) 2023 Airbyte, Inc., all rights reserved. +# + + +from logging import getLogger +from typing import Any, Iterable, Mapping + +from airbyte_cdk import AirbyteLogger +from airbyte_cdk.destinations import Destination +from airbyte_cdk.models import ( + AirbyteConnectionStatus, + AirbyteMessage, + AirbyteStream, + ConfiguredAirbyteCatalog, + DestinationSyncMode, + Status, + Type, +) +from timeplus import Environment, Stream + +logger = getLogger("airbyte") + + +class DestinationTimeplus(Destination): + def write( + self, config: Mapping[str, Any], configured_catalog: ConfiguredAirbyteCatalog, input_messages: Iterable[AirbyteMessage] + ) -> Iterable[AirbyteMessage]: + """ + Reads the input stream of messages, config, and catalog to write data to the destination. + + This method returns an iterable (typically a generator of AirbyteMessages via yield) containing state messages received + in the input message stream. Outputting a state message means that every AirbyteRecordMessage which came before it has been + successfully persisted to the destination. This is used to ensure fault tolerance in the case that a sync fails before fully completing, + then the source is given the last state message output from this method as the starting point of the next sync. + + :param config: dict of JSON configuration matching the configuration declared in spec.json + :param configured_catalog: The Configured Catalog describing the schema of the data being received and how it should be persisted in the + destination + :param input_messages: The stream of input messages received from the source + :return: Iterable of AirbyteStateMessages wrapped in AirbyteMessage structs + """ + endpoint = config["endpoint"] + apikey = config["apikey"] + if endpoint[-1] == "/": + endpoint = endpoint[0 : len(endpoint) - 1] + env = Environment().address(endpoint).apikey(apikey) + stream_list = Stream(env=env).list() + all_streams = {s.name for s in stream_list} + + # only support "overwrite", "append" + for configured_stream in configured_catalog.streams: + is_overwrite = configured_stream.destination_sync_mode == DestinationSyncMode.overwrite + stream_exists = configured_stream.stream.name in all_streams + logger.info(f"Stream {configured_stream.stream.name} {configured_stream.destination_sync_mode}") + need_delete_stream = False + need_create_stream = False + if is_overwrite: + if stream_exists: + # delete all data in the existing stream and recreate the stream. + need_delete_stream = True + need_create_stream = True + else: + # only need to create the stream + need_create_stream = True + else: + if stream_exists: + # for append mode, just add more data to the existing stream. No need to do anything. + pass + else: + # for append mode, create the stream and append data to it. + need_create_stream = True + + if need_delete_stream: + # delete the existing stream + Stream(env=env).name(configured_stream.stream.name).get().delete() + logger.info(f"Stream {configured_stream.stream.name} deleted successfully") + if need_create_stream: + # create a new stream + DestinationTimeplus.create_stream(env, configured_stream.stream) + logger.info(f"Stream {configured_stream.stream.name} created successfully") + + for message in input_messages: + if message.type == Type.STATE: + # Emitting a state message indicates that all records which came before it have been written to the destination. So we flush + # the queue to ensure writes happen, then output the state message to indicate it's safe to checkpoint state + yield message + elif message.type == Type.RECORD: + record = message.record + + # this code is to send data to a single-column stream + # Stream(env=env).name(record.stream).column("raw", "string").ingest(payload=record.data) + + Stream(env=env).name(record.stream).ingest(payload=record.data, format="streaming") + else: + # ignore other message types for now + continue + + @staticmethod + def create_stream(env, stream: AirbyteStream): + # singlel-column stream + # Stream(env=env).name(stream.name).column('raw','string').create() + + tp_stream = Stream(env=env).name(stream.name.strip()) + for name, v in stream.json_schema["properties"].items(): + tp_stream.column(name.strip(), DestinationTimeplus.type_mapping(v)) + tp_stream.create() + + @staticmethod + def type_mapping(v) -> str: + airbyte_type = v["type"] + if type(airbyte_type) is list: + for t in list(airbyte_type): + if t != "null": + type_def = {"type": t} + if t == "array": + type_def["items"] = v["items"] + return DestinationTimeplus.type_mapping(type_def) + if airbyte_type == "number": + return "float" + elif airbyte_type == "integer": + return "integer" + elif airbyte_type == "boolean": + return "bool" + elif airbyte_type == "object": + return "string" + elif airbyte_type == "array": + return f"array({DestinationTimeplus.type_mapping(v['items'])})" + else: + return "string" + + def check(self, logger: AirbyteLogger, config: Mapping[str, Any]) -> AirbyteConnectionStatus: + """ + Tests if the input configuration can be used to successfully connect to the destination with the needed permissions + e.g: if a provided API token or password can be used to connect and write to the destination. + + :param logger: Logging object to display debug/info/error to the logs + (logs will not be accessible via airbyte UI if they are not passed to this logger) + :param config: Json object containing the configuration of this destination, content of this json is as specified in + the properties of the spec.json file + + :return: AirbyteConnectionStatus indicating a Success or Failure + """ + try: + endpoint = config["endpoint"] + apikey = config["apikey"] + if not endpoint.startswith("http"): + return AirbyteConnectionStatus(status=Status.FAILED, message="Endpoint must start with http or https") + if len(apikey) != 60: + return AirbyteConnectionStatus(status=Status.FAILED, message="API Key must be 60 characters") + if endpoint[-1] == "/": + endpoint = endpoint[0 : len(endpoint) - 1] + env = Environment().address(endpoint).apikey(apikey) + Stream(env=env).list() + logger.info("Successfully connected to " + endpoint) + return AirbyteConnectionStatus(status=Status.SUCCEEDED) + except Exception as e: + return AirbyteConnectionStatus( + status=Status.FAILED, message=f"Fail to connect to Timeplus endpoint with the given API key: {repr(e)}" + ) diff --git a/airbyte-integrations/connectors/destination-timeplus/destination_timeplus/spec.json b/airbyte-integrations/connectors/destination-timeplus/destination_timeplus/spec.json new file mode 100755 index 000000000000..6a56f1b0252e --- /dev/null +++ b/airbyte-integrations/connectors/destination-timeplus/destination_timeplus/spec.json @@ -0,0 +1,31 @@ +{ + "documentationUrl": "https://docs.timeplus.com", + "supported_destination_sync_modes": ["overwrite", "append"], + "supportsIncremental": true, + "supportsDBT": false, + "supportsNormalization": false, + "connectionSpecification": { + "$schema": "http://json-schema.org/draft-07/schema#", + "title": "Destination Timeplus", + "type": "object", + "required": ["endpoint", "apikey"], + "additionalProperties": false, + "properties": { + "endpoint": { + "title": "Endpoint", + "description": "Timeplus workspace endpoint", + "type": "string", + "default": "https://us.timeplus.cloud/", + "examples": ["https://us.timeplus.cloud/workspace_id"], + "order": 0 + }, + "apikey": { + "title": "API key", + "description": "Personal API key", + "type": "string", + "airbyte_secret": true, + "order": 1 + } + } + } +} diff --git a/airbyte-integrations/connectors/destination-timeplus/integration_tests/configured_catalog.json b/airbyte-integrations/connectors/destination-timeplus/integration_tests/configured_catalog.json new file mode 100644 index 000000000000..96540519acb1 --- /dev/null +++ b/airbyte-integrations/connectors/destination-timeplus/integration_tests/configured_catalog.json @@ -0,0 +1,263 @@ +{ + "streams": [ + { + "stream": { + "name": "airbyte_single_str_col", + "supported_sync_modes": ["full_refresh"], + "source_defined_cursor": false, + "json_schema": { + "type": "object", + "properties": { + "raw": { + "type": "string" + } + } + } + }, + "sync_mode": "full_refresh", + "destination_sync_mode": "overwrite" + }, + { + "stream": { + "name": "airbyte_acceptance_table", + "supported_sync_modes": ["full_refresh"], + "source_defined_cursor": false, + "json_schema": { + "type": "object", + "properties": { + "column1": { + "type": "string" + }, + "column2": { + "type": "number" + }, + "column3": { + "type": "string", + "format": "datetime", + "airbyte_type": "timestamp_without_timezone" + }, + "column4": { + "type": "number" + }, + "column5": { + "type": "array", + "items": { + "type": "integer" + } + } + } + } + }, + "sync_mode": "full_refresh", + "destination_sync_mode": "append" + }, + { + "stream": { + "name": "airbyte_test_boolean", + "supported_sync_modes": ["full_refresh"], + "source_defined_cursor": false, + "json_schema": { + "type": "object", + "properties": { + "column1": { + "type": "boolean" + }, + "column2": { + "type": "number" + } + } + } + }, + "sync_mode": "full_refresh", + "destination_sync_mode": "append" + }, + { + "destination_sync_mode": "overwrite", + "stream": { + "json_schema": { + "$schema": "http://json-schema.org/draft-07/schema#", + "type": "object", + "properties": { + "id": { "type": ["null", "integer"] }, + "name": { "type": ["null", "string"] }, + "base_experience": { "type": ["null", "integer"] }, + "height": { "type": ["null", "integer"] }, + "is_default": { "type": ["null", "boolean"] }, + "order": { "type": ["null", "integer"] }, + "weight": { "type": ["null", "integer"] }, + "abilities": { + "type": ["null", "array"], + "items": { + "type": ["null", "object"], + "properties": { + "is_hidden": { "type": ["null", "boolean"] }, + "slot": { "type": ["null", "integer"] }, + "ability": { + "type": ["null", "object"], + "properties": { + "name": { "type": ["null", "string"] }, + "url": { "type": ["null", "string"] } + } + } + } + } + }, + "forms": { + "type": ["null", "array"], + "items": { + "type": ["null", "object"], + "properties": { + "name": { "type": ["null", "string"] }, + "url": { "type": ["null", "string"] } + } + } + }, + "game_indices": { + "type": ["null", "array"], + "items": { + "type": ["null", "object"], + "properties": { + "game_index": { "type": ["null", "integer"] }, + "version": { + "type": ["null", "object"], + "properties": { + "name": { "type": ["null", "string"] }, + "url": { "type": ["null", "string"] } + } + } + } + } + }, + "held_items": { + "type": ["null", "array"], + "items": { + "type": ["null", "object"], + "properties": { + "item": { + "type": ["null", "object"], + "properties": { + "name": { "type": ["null", "string"] }, + "url": { "type": ["null", "string"] } + } + }, + "version_details": { + "type": ["null", "array"], + "items": { + "type": ["null", "object"], + "properties": { + "version": { + "type": ["null", "object"], + "properties": { + "name": { "type": ["null", "string"] }, + "url": { "type": ["null", "string"] } + } + }, + "rarity": { "type": ["null", "integer"] } + } + } + } + } + } + }, + "location_area_encounters": { "type": ["null", "string"] }, + "moves": { + "type": ["null", "array"], + "items": { + "type": ["null", "object"], + "properties": { + "move": { + "type": ["null", "object"], + "properties": { + "name": { "type": ["null", "string"] }, + "url": { "type": ["null", "string"] } + } + }, + "version_group_details": { + "type": ["null", "array"], + "items": { + "type": ["null", "object"], + "properties": { + "move_learn_method": { + "type": ["null", "object"], + "properties": { + "name": { "type": ["null", "string"] }, + "url": { "type": ["null", "string"] } + } + }, + "version_group": { + "type": ["null", "object"], + "properties": { + "name": { "type": ["null", "string"] }, + "url": { "type": ["null", "string"] } + } + }, + "level_learned_at": { "type": ["null", "integer"] } + } + } + } + } + } + }, + "sprites": { + "type": ["null", "object"], + "properties": { + "front_default": { "type": ["null", "string"] }, + "front_shiny": { "type": ["null", "string"] }, + "front_female": { "type": ["null", "string"] }, + "front_shiny_female": { "type": ["null", "string"] }, + "back_default": { "type": ["null", "string"] }, + "back_shiny": { "type": ["null", "string"] }, + "back_female": { "type": ["null", "string"] }, + "back_shiny_female": { "type": ["null", "string"] } + } + }, + "species": { + "type": ["null", "object"], + "properties": { + "name": { "type": ["null", "string"] }, + "url": { "type": ["null", "string"] } + } + }, + "stats": { + "type": ["null", "array"], + "items": { + "type": ["null", "object"], + "properties": { + "stat": { + "type": ["null", "object"], + "properties": { + "name": { "type": ["null", "string"] }, + "url": { "type": ["null", "string"] } + } + }, + "effort": { "type": ["null", "integer"] }, + "base_stat": { "type": ["null", "integer"] } + } + } + }, + "types": { + "type": ["null", "array"], + "items": { + "type": ["null", "object"], + "properties": { + "slot": { "type": ["null", "integer"] }, + "type": { + "type": ["null", "object"], + "properties": { + "name": { "type": ["null", "string"] }, + "url": { "type": ["null", "string"] } + } + } + } + } + } + } + }, + "name": "pokemon", + "source_defined_cursor": false, + "supported_sync_modes": ["full_refresh"] + }, + "sync_mode": "full_refresh" + } + ] +} diff --git a/airbyte-integrations/connectors/destination-timeplus/integration_tests/integration_test.py b/airbyte-integrations/connectors/destination-timeplus/integration_tests/integration_test.py new file mode 100755 index 000000000000..e3de7dac9e71 --- /dev/null +++ b/airbyte-integrations/connectors/destination-timeplus/integration_tests/integration_test.py @@ -0,0 +1,74 @@ +# +# Copyright (c) 2023 Airbyte, Inc., all rights reserved. +# + +import json +import logging +from datetime import datetime +from typing import Any, Mapping + +import pytest +from airbyte_cdk.models import ( + AirbyteMessage, + AirbyteRecordMessage, + AirbyteStream, + ConfiguredAirbyteCatalog, + ConfiguredAirbyteStream, + DestinationSyncMode, + Status, + SyncMode, + Type, +) +from destination_timeplus import DestinationTimeplus + + +@pytest.fixture(name="config") +def config_fixture() -> Mapping[str, Any]: + with open("secrets/config.json", "r") as f: + return json.loads(f.read()) + + +@pytest.fixture(name="configured_catalog") +def configured_catalog_fixture() -> ConfiguredAirbyteCatalog: + stream_schema = {"type": "object", "properties": {"string_col": {"type": "str"}, "int_col": {"type": "integer"}}} + append_stream = ConfiguredAirbyteStream( + stream=AirbyteStream(name="append_stream", json_schema=stream_schema, supported_sync_modes=[SyncMode.incremental]), + sync_mode=SyncMode.incremental, + destination_sync_mode=DestinationSyncMode.append, + ) + + overwrite_stream = ConfiguredAirbyteStream( + stream=AirbyteStream(name="overwrite_stream", json_schema=stream_schema, supported_sync_modes=[SyncMode.incremental]), + sync_mode=SyncMode.incremental, + destination_sync_mode=DestinationSyncMode.overwrite, + ) + + return ConfiguredAirbyteCatalog(streams=[append_stream, overwrite_stream]) + + +def test_check_valid_config(config: Mapping): + outcome = DestinationTimeplus().check(logging.getLogger("airbyte"), config) + assert outcome.status == Status.SUCCEEDED + + +def test_check_invalid_config(): + outcome = DestinationTimeplus().check(logging.getLogger("airbyte"), {"secret_key": "not_a_real_secret"}) + assert outcome.status == Status.FAILED + + +def test_write(config: Mapping[str, Any], configured_catalog: ConfiguredAirbyteCatalog): + records = [ + AirbyteMessage( + type=Type.RECORD, + record=AirbyteRecordMessage( + stream="append_stream", + data={ + "string_col": "example", + "int_col": 1, + }, + emitted_at=int(datetime.now().timestamp()) * 1000, + ), + ) + ] + dest = DestinationTimeplus() + dest.write(config, configured_catalog, records) diff --git a/airbyte-integrations/connectors/destination-timeplus/integration_tests/messages.jsonl b/airbyte-integrations/connectors/destination-timeplus/integration_tests/messages.jsonl new file mode 100644 index 000000000000..6db122f96411 --- /dev/null +++ b/airbyte-integrations/connectors/destination-timeplus/integration_tests/messages.jsonl @@ -0,0 +1,5 @@ +{"type": "RECORD", "record": {"stream": "airbyte_single_str_col", "data": {"raw": "my_value"}, "emitted_at": 1626172757000}} +{"type": "RECORD", "record": {"stream": "airbyte_acceptance_table", "data": {"column1": "my_value", "column2": 221, "column3": "2021-01-01T20:10:22", "column4": 1.214, "column5": [1,2,3]}, "emitted_at": 1626172757000}} +{"type": "RECORD", "record": {"stream": "airbyte_acceptance_table", "data": {"column1": "my_value2", "column2": 222, "column3": "2021-01-02T22:10:22", "column5": [1,2,null]}, "emitted_at": 1626172757000}} +{"type": "RECORD", "record": {"stream": "airbyte_test_boolean", "data": {"column1": true, "column2": 222}, "emitted_at": 1626172757000}} +{"type": "RECORD", "record": {"stream": "pokemon","data": { "abilities": [ { "ability": { "name": "limber", "url": "https://pokeapi.co/api/v2/ability/7/" }, "is_hidden": false, "slot": 1 }, { "ability": { "name": "imposter", "url": "https://pokeapi.co/api/v2/ability/150/" }, "is_hidden": true, "slot": 3 } ], "base_experience": 101, "forms": [ { "name": "ditto", "url": "https://pokeapi.co/api/v2/pokemon-form/132/" } ], "game_indices": [ { "game_index": 76, "version": { "name": "red", "url": "https://pokeapi.co/api/v2/version/1/" } }, { "game_index": 76, "version": { "name": "blue", "url": "https://pokeapi.co/api/v2/version/2/" } }, { "game_index": 76, "version": { "name": "yellow", "url": "https://pokeapi.co/api/v2/version/3/" } }, { "game_index": 132, "version": { "name": "gold", "url": "https://pokeapi.co/api/v2/version/4/" } }, { "game_index": 132, "version": { "name": "silver", "url": "https://pokeapi.co/api/v2/version/5/" } }, { "game_index": 132, "version": { "name": "crystal", "url": "https://pokeapi.co/api/v2/version/6/" } }, { "game_index": 132, "version": { "name": "ruby", "url": "https://pokeapi.co/api/v2/version/7/" } }, { "game_index": 132, "version": { "name": "sapphire", "url": "https://pokeapi.co/api/v2/version/8/" } }, { "game_index": 132, "version": { "name": "emerald", "url": "https://pokeapi.co/api/v2/version/9/" } }, { "game_index": 132, "version": { "name": "firered", "url": "https://pokeapi.co/api/v2/version/10/" } }, { "game_index": 132, "version": { "name": "leafgreen", "url": "https://pokeapi.co/api/v2/version/11/" } }, { "game_index": 132, "version": { "name": "diamond", "url": "https://pokeapi.co/api/v2/version/12/" } }, { "game_index": 132, "version": { "name": "pearl", "url": "https://pokeapi.co/api/v2/version/13/" } }, { "game_index": 132, "version": { "name": "platinum", "url": "https://pokeapi.co/api/v2/version/14/" } }, { "game_index": 132, "version": { "name": "heartgold", "url": "https://pokeapi.co/api/v2/version/15/" } }, { "game_index": 132, "version": { "name": "soulsilver", "url": "https://pokeapi.co/api/v2/version/16/" } }, { "game_index": 132, "version": { "name": "black", "url": "https://pokeapi.co/api/v2/version/17/" } }, { "game_index": 132, "version": { "name": "white", "url": "https://pokeapi.co/api/v2/version/18/" } }, { "game_index": 132, "version": { "name": "black-2", "url": "https://pokeapi.co/api/v2/version/21/" } }, { "game_index": 132, "version": { "name": "white-2", "url": "https://pokeapi.co/api/v2/version/22/" } } ], "height": 3, "held_items": [ { "item": { "name": "metal-powder", "url": "https://pokeapi.co/api/v2/item/234/" }, "version_details": [ { "rarity": 5, "version": { "name": "ruby", "url": "https://pokeapi.co/api/v2/version/7/" } }, { "rarity": 5, "version": { "name": "sapphire", "url": "https://pokeapi.co/api/v2/version/8/" } }, { "rarity": 5, "version": { "name": "emerald", "url": "https://pokeapi.co/api/v2/version/9/" } }, { "rarity": 5, "version": { "name": "firered", "url": "https://pokeapi.co/api/v2/version/10/" } }, { "rarity": 5, "version": { "name": "leafgreen", "url": "https://pokeapi.co/api/v2/version/11/" } }, { "rarity": 5, "version": { "name": "diamond", "url": "https://pokeapi.co/api/v2/version/12/" } }, { "rarity": 5, "version": { "name": "pearl", "url": "https://pokeapi.co/api/v2/version/13/" } }, { "rarity": 5, "version": { "name": "platinum", "url": "https://pokeapi.co/api/v2/version/14/" } }, { "rarity": 5, "version": { "name": "heartgold", "url": "https://pokeapi.co/api/v2/version/15/" } }, { "rarity": 5, "version": { "name": "soulsilver", "url": "https://pokeapi.co/api/v2/version/16/" } }, { "rarity": 5, "version": { "name": "black", "url": "https://pokeapi.co/api/v2/version/17/" } }, { "rarity": 5, "version": { "name": "white", "url": "https://pokeapi.co/api/v2/version/18/" } }, { "rarity": 5, "version": { "name": "black-2", "url": "https://pokeapi.co/api/v2/version/21/" } }, { "rarity": 5, "version": { "name": "white-2", "url": "https://pokeapi.co/api/v2/version/22/" } }, { "rarity": 5, "version": { "name": "x", "url": "https://pokeapi.co/api/v2/version/23/" } }, { "rarity": 5, "version": { "name": "y", "url": "https://pokeapi.co/api/v2/version/24/" } }, { "rarity": 5, "version": { "name": "omega-ruby", "url": "https://pokeapi.co/api/v2/version/25/" } }, { "rarity": 5, "version": { "name": "alpha-sapphire", "url": "https://pokeapi.co/api/v2/version/26/" } }, { "rarity": 5, "version": { "name": "sun", "url": "https://pokeapi.co/api/v2/version/27/" } }, { "rarity": 5, "version": { "name": "moon", "url": "https://pokeapi.co/api/v2/version/28/" } }, { "rarity": 5, "version": { "name": "ultra-sun", "url": "https://pokeapi.co/api/v2/version/29/" } }, { "rarity": 5, "version": { "name": "ultra-moon", "url": "https://pokeapi.co/api/v2/version/30/" } } ] }, { "item": { "name": "quick-powder", "url": "https://pokeapi.co/api/v2/item/251/" }, "version_details": [ { "rarity": 50, "version": { "name": "diamond", "url": "https://pokeapi.co/api/v2/version/12/" } }, { "rarity": 50, "version": { "name": "pearl", "url": "https://pokeapi.co/api/v2/version/13/" } }, { "rarity": 50, "version": { "name": "platinum", "url": "https://pokeapi.co/api/v2/version/14/" } }, { "rarity": 50, "version": { "name": "heartgold", "url": "https://pokeapi.co/api/v2/version/15/" } }, { "rarity": 50, "version": { "name": "soulsilver", "url": "https://pokeapi.co/api/v2/version/16/" } }, { "rarity": 50, "version": { "name": "black", "url": "https://pokeapi.co/api/v2/version/17/" } }, { "rarity": 50, "version": { "name": "white", "url": "https://pokeapi.co/api/v2/version/18/" } }, { "rarity": 50, "version": { "name": "black-2", "url": "https://pokeapi.co/api/v2/version/21/" } }, { "rarity": 50, "version": { "name": "white-2", "url": "https://pokeapi.co/api/v2/version/22/" } }, { "rarity": 50, "version": { "name": "x", "url": "https://pokeapi.co/api/v2/version/23/" } }, { "rarity": 50, "version": { "name": "y", "url": "https://pokeapi.co/api/v2/version/24/" } }, { "rarity": 50, "version": { "name": "omega-ruby", "url": "https://pokeapi.co/api/v2/version/25/" } }, { "rarity": 50, "version": { "name": "alpha-sapphire", "url": "https://pokeapi.co/api/v2/version/26/" } }, { "rarity": 50, "version": { "name": "sun", "url": "https://pokeapi.co/api/v2/version/27/" } }, { "rarity": 50, "version": { "name": "moon", "url": "https://pokeapi.co/api/v2/version/28/" } }, { "rarity": 50, "version": { "name": "ultra-sun", "url": "https://pokeapi.co/api/v2/version/29/" } }, { "rarity": 50, "version": { "name": "ultra-moon", "url": "https://pokeapi.co/api/v2/version/30/" } } ] } ], "id": 132, "is_default": true, "location_area_encounters": "https://pokeapi.co/api/v2/pokemon/132/encounters", "moves": [ { "move": { "name": "transform", "url": "https://pokeapi.co/api/v2/move/144/" }, "version_group_details": [ { "level_learned_at": 1, "move_learn_method": { "name": "level-up", "url": "https://pokeapi.co/api/v2/move-learn-method/1/" }, "version_group": { "name": "red-blue", "url": "https://pokeapi.co/api/v2/version-group/1/" } }, { "level_learned_at": 1, "move_learn_method": { "name": "level-up", "url": "https://pokeapi.co/api/v2/move-learn-method/1/" }, "version_group": { "name": "yellow", "url": "https://pokeapi.co/api/v2/version-group/2/" } }, { "level_learned_at": 1, "move_learn_method": { "name": "level-up", "url": "https://pokeapi.co/api/v2/move-learn-method/1/" }, "version_group": { "name": "gold-silver", "url": "https://pokeapi.co/api/v2/version-group/3/" } }, { "level_learned_at": 1, "move_learn_method": { "name": "level-up", "url": "https://pokeapi.co/api/v2/move-learn-method/1/" }, "version_group": { "name": "crystal", "url": "https://pokeapi.co/api/v2/version-group/4/" } }, { "level_learned_at": 1, "move_learn_method": { "name": "level-up", "url": "https://pokeapi.co/api/v2/move-learn-method/1/" }, "version_group": { "name": "ruby-sapphire", "url": "https://pokeapi.co/api/v2/version-group/5/" } }, { "level_learned_at": 1, "move_learn_method": { "name": "level-up", "url": "https://pokeapi.co/api/v2/move-learn-method/1/" }, "version_group": { "name": "emerald", "url": "https://pokeapi.co/api/v2/version-group/6/" } }, { "level_learned_at": 1, "move_learn_method": { "name": "level-up", "url": "https://pokeapi.co/api/v2/move-learn-method/1/" }, "version_group": { "name": "firered-leafgreen", "url": "https://pokeapi.co/api/v2/version-group/7/" } }, { "level_learned_at": 1, "move_learn_method": { "name": "level-up", "url": "https://pokeapi.co/api/v2/move-learn-method/1/" }, "version_group": { "name": "diamond-pearl", "url": "https://pokeapi.co/api/v2/version-group/8/" } }, { "level_learned_at": 1, "move_learn_method": { "name": "level-up", "url": "https://pokeapi.co/api/v2/move-learn-method/1/" }, "version_group": { "name": "platinum", "url": "https://pokeapi.co/api/v2/version-group/9/" } }, { "level_learned_at": 1, "move_learn_method": { "name": "level-up", "url": "https://pokeapi.co/api/v2/move-learn-method/1/" }, "version_group": { "name": "heartgold-soulsilver", "url": "https://pokeapi.co/api/v2/version-group/10/" } }, { "level_learned_at": 1, "move_learn_method": { "name": "level-up", "url": "https://pokeapi.co/api/v2/move-learn-method/1/" }, "version_group": { "name": "black-white", "url": "https://pokeapi.co/api/v2/version-group/11/" } }, { "level_learned_at": 1, "move_learn_method": { "name": "level-up", "url": "https://pokeapi.co/api/v2/move-learn-method/1/" }, "version_group": { "name": "colosseum", "url": "https://pokeapi.co/api/v2/version-group/12/" } }, { "level_learned_at": 1, "move_learn_method": { "name": "level-up", "url": "https://pokeapi.co/api/v2/move-learn-method/1/" }, "version_group": { "name": "xd", "url": "https://pokeapi.co/api/v2/version-group/13/" } }, { "level_learned_at": 1, "move_learn_method": { "name": "level-up", "url": "https://pokeapi.co/api/v2/move-learn-method/1/" }, "version_group": { "name": "black-2-white-2", "url": "https://pokeapi.co/api/v2/version-group/14/" } }, { "level_learned_at": 1, "move_learn_method": { "name": "level-up", "url": "https://pokeapi.co/api/v2/move-learn-method/1/" }, "version_group": { "name": "x-y", "url": "https://pokeapi.co/api/v2/version-group/15/" } }, { "level_learned_at": 1, "move_learn_method": { "name": "level-up", "url": "https://pokeapi.co/api/v2/move-learn-method/1/" }, "version_group": { "name": "omega-ruby-alpha-sapphire", "url": "https://pokeapi.co/api/v2/version-group/16/" } }, { "level_learned_at": 1, "move_learn_method": { "name": "level-up", "url": "https://pokeapi.co/api/v2/move-learn-method/1/" }, "version_group": { "name": "sun-moon", "url": "https://pokeapi.co/api/v2/version-group/17/" } }, { "level_learned_at": 1, "move_learn_method": { "name": "level-up", "url": "https://pokeapi.co/api/v2/move-learn-method/1/" }, "version_group": { "name": "ultra-sun-ultra-moon", "url": "https://pokeapi.co/api/v2/version-group/18/" } }, { "level_learned_at": 1, "move_learn_method": { "name": "level-up", "url": "https://pokeapi.co/api/v2/move-learn-method/1/" }, "version_group": { "name": "lets-go-pikachu-lets-go-eevee", "url": "https://pokeapi.co/api/v2/version-group/19/" } }, { "level_learned_at": 1, "move_learn_method": { "name": "level-up", "url": "https://pokeapi.co/api/v2/move-learn-method/1/" }, "version_group": { "name": "sword-shield", "url": "https://pokeapi.co/api/v2/version-group/20/" } }, { "level_learned_at": 1, "move_learn_method": { "name": "level-up", "url": "https://pokeapi.co/api/v2/move-learn-method/1/" }, "version_group": { "name": "scarlet-violet", "url": "https://pokeapi.co/api/v2/version-group/25/" } } ] } ], "name": "ditto", "order": 214, "species": { "name": "ditto", "url": "https://pokeapi.co/api/v2/pokemon-species/132/" }, "sprites": { "back_default": "https://raw.githubusercontent.com/PokeAPI/sprites/master/sprites/pokemon/back/132.png", "back_female": null, "back_shiny": "https://raw.githubusercontent.com/PokeAPI/sprites/master/sprites/pokemon/back/shiny/132.png", "back_shiny_female": null, "front_default": "https://raw.githubusercontent.com/PokeAPI/sprites/master/sprites/pokemon/132.png", "front_female": null, "front_shiny": "https://raw.githubusercontent.com/PokeAPI/sprites/master/sprites/pokemon/shiny/132.png", "front_shiny_female": null, "other": { "dream_world": { "front_default": "https://raw.githubusercontent.com/PokeAPI/sprites/master/sprites/pokemon/other/dream-world/132.svg", "front_female": null }, "home": { "front_default": "https://raw.githubusercontent.com/PokeAPI/sprites/master/sprites/pokemon/other/home/132.png", "front_female": null, "front_shiny": "https://raw.githubusercontent.com/PokeAPI/sprites/master/sprites/pokemon/other/home/shiny/132.png", "front_shiny_female": null }, "official-artwork": { "front_default": "https://raw.githubusercontent.com/PokeAPI/sprites/master/sprites/pokemon/other/official-artwork/132.png", "front_shiny": "https://raw.githubusercontent.com/PokeAPI/sprites/master/sprites/pokemon/other/official-artwork/shiny/132.png" } }, "versions": { "generation-i": { "red-blue": { "back_default": "https://raw.githubusercontent.com/PokeAPI/sprites/master/sprites/pokemon/versions/generation-i/red-blue/back/132.png", "back_gray": "https://raw.githubusercontent.com/PokeAPI/sprites/master/sprites/pokemon/versions/generation-i/red-blue/back/gray/132.png", "back_transparent": "https://raw.githubusercontent.com/PokeAPI/sprites/master/sprites/pokemon/versions/generation-i/red-blue/transparent/back/132.png", "front_default": "https://raw.githubusercontent.com/PokeAPI/sprites/master/sprites/pokemon/versions/generation-i/red-blue/132.png", "front_gray": "https://raw.githubusercontent.com/PokeAPI/sprites/master/sprites/pokemon/versions/generation-i/red-blue/gray/132.png", "front_transparent": "https://raw.githubusercontent.com/PokeAPI/sprites/master/sprites/pokemon/versions/generation-i/red-blue/transparent/132.png" }, "yellow": { "back_default": "https://raw.githubusercontent.com/PokeAPI/sprites/master/sprites/pokemon/versions/generation-i/yellow/back/132.png", "back_gray": "https://raw.githubusercontent.com/PokeAPI/sprites/master/sprites/pokemon/versions/generation-i/yellow/back/gray/132.png", "back_transparent": "https://raw.githubusercontent.com/PokeAPI/sprites/master/sprites/pokemon/versions/generation-i/yellow/transparent/back/132.png", "front_default": "https://raw.githubusercontent.com/PokeAPI/sprites/master/sprites/pokemon/versions/generation-i/yellow/132.png", "front_gray": "https://raw.githubusercontent.com/PokeAPI/sprites/master/sprites/pokemon/versions/generation-i/yellow/gray/132.png", "front_transparent": "https://raw.githubusercontent.com/PokeAPI/sprites/master/sprites/pokemon/versions/generation-i/yellow/transparent/132.png" } }, "generation-ii": { "crystal": { "back_default": "https://raw.githubusercontent.com/PokeAPI/sprites/master/sprites/pokemon/versions/generation-ii/crystal/back/132.png", "back_shiny": "https://raw.githubusercontent.com/PokeAPI/sprites/master/sprites/pokemon/versions/generation-ii/crystal/back/shiny/132.png", "back_shiny_transparent": "https://raw.githubusercontent.com/PokeAPI/sprites/master/sprites/pokemon/versions/generation-ii/crystal/transparent/back/shiny/132.png", "back_transparent": "https://raw.githubusercontent.com/PokeAPI/sprites/master/sprites/pokemon/versions/generation-ii/crystal/transparent/back/132.png", "front_default": "https://raw.githubusercontent.com/PokeAPI/sprites/master/sprites/pokemon/versions/generation-ii/crystal/132.png", "front_shiny": "https://raw.githubusercontent.com/PokeAPI/sprites/master/sprites/pokemon/versions/generation-ii/crystal/shiny/132.png", "front_shiny_transparent": "https://raw.githubusercontent.com/PokeAPI/sprites/master/sprites/pokemon/versions/generation-ii/crystal/transparent/shiny/132.png", "front_transparent": "https://raw.githubusercontent.com/PokeAPI/sprites/master/sprites/pokemon/versions/generation-ii/crystal/transparent/132.png" }, "gold": { "back_default": "https://raw.githubusercontent.com/PokeAPI/sprites/master/sprites/pokemon/versions/generation-ii/gold/back/132.png", "back_shiny": "https://raw.githubusercontent.com/PokeAPI/sprites/master/sprites/pokemon/versions/generation-ii/gold/back/shiny/132.png", "front_default": "https://raw.githubusercontent.com/PokeAPI/sprites/master/sprites/pokemon/versions/generation-ii/gold/132.png", "front_shiny": "https://raw.githubusercontent.com/PokeAPI/sprites/master/sprites/pokemon/versions/generation-ii/gold/shiny/132.png", "front_transparent": "https://raw.githubusercontent.com/PokeAPI/sprites/master/sprites/pokemon/versions/generation-ii/gold/transparent/132.png" }, "silver": { "back_default": "https://raw.githubusercontent.com/PokeAPI/sprites/master/sprites/pokemon/versions/generation-ii/silver/back/132.png", "back_shiny": "https://raw.githubusercontent.com/PokeAPI/sprites/master/sprites/pokemon/versions/generation-ii/silver/back/shiny/132.png", "front_default": "https://raw.githubusercontent.com/PokeAPI/sprites/master/sprites/pokemon/versions/generation-ii/silver/132.png", "front_shiny": "https://raw.githubusercontent.com/PokeAPI/sprites/master/sprites/pokemon/versions/generation-ii/silver/shiny/132.png", "front_transparent": "https://raw.githubusercontent.com/PokeAPI/sprites/master/sprites/pokemon/versions/generation-ii/silver/transparent/132.png" } }, "generation-iii": { "emerald": { "front_default": "https://raw.githubusercontent.com/PokeAPI/sprites/master/sprites/pokemon/versions/generation-iii/emerald/132.png", "front_shiny": "https://raw.githubusercontent.com/PokeAPI/sprites/master/sprites/pokemon/versions/generation-iii/emerald/shiny/132.png" }, "firered-leafgreen": { "back_default": "https://raw.githubusercontent.com/PokeAPI/sprites/master/sprites/pokemon/versions/generation-iii/firered-leafgreen/back/132.png", "back_shiny": "https://raw.githubusercontent.com/PokeAPI/sprites/master/sprites/pokemon/versions/generation-iii/firered-leafgreen/back/shiny/132.png", "front_default": "https://raw.githubusercontent.com/PokeAPI/sprites/master/sprites/pokemon/versions/generation-iii/firered-leafgreen/132.png", "front_shiny": "https://raw.githubusercontent.com/PokeAPI/sprites/master/sprites/pokemon/versions/generation-iii/firered-leafgreen/shiny/132.png" }, "ruby-sapphire": { "back_default": "https://raw.githubusercontent.com/PokeAPI/sprites/master/sprites/pokemon/versions/generation-iii/ruby-sapphire/back/132.png", "back_shiny": "https://raw.githubusercontent.com/PokeAPI/sprites/master/sprites/pokemon/versions/generation-iii/ruby-sapphire/back/shiny/132.png", "front_default": "https://raw.githubusercontent.com/PokeAPI/sprites/master/sprites/pokemon/versions/generation-iii/ruby-sapphire/132.png", "front_shiny": "https://raw.githubusercontent.com/PokeAPI/sprites/master/sprites/pokemon/versions/generation-iii/ruby-sapphire/shiny/132.png" } }, "generation-iv": { "diamond-pearl": { "back_default": "https://raw.githubusercontent.com/PokeAPI/sprites/master/sprites/pokemon/versions/generation-iv/diamond-pearl/back/132.png", "back_female": null, "back_shiny": "https://raw.githubusercontent.com/PokeAPI/sprites/master/sprites/pokemon/versions/generation-iv/diamond-pearl/back/shiny/132.png", "back_shiny_female": null, "front_default": "https://raw.githubusercontent.com/PokeAPI/sprites/master/sprites/pokemon/versions/generation-iv/diamond-pearl/132.png", "front_female": null, "front_shiny": "https://raw.githubusercontent.com/PokeAPI/sprites/master/sprites/pokemon/versions/generation-iv/diamond-pearl/shiny/132.png", "front_shiny_female": null }, "heartgold-soulsilver": { "back_default": "https://raw.githubusercontent.com/PokeAPI/sprites/master/sprites/pokemon/versions/generation-iv/heartgold-soulsilver/back/132.png", "back_female": null, "back_shiny": "https://raw.githubusercontent.com/PokeAPI/sprites/master/sprites/pokemon/versions/generation-iv/heartgold-soulsilver/back/shiny/132.png", "back_shiny_female": null, "front_default": "https://raw.githubusercontent.com/PokeAPI/sprites/master/sprites/pokemon/versions/generation-iv/heartgold-soulsilver/132.png", "front_female": null, "front_shiny": "https://raw.githubusercontent.com/PokeAPI/sprites/master/sprites/pokemon/versions/generation-iv/heartgold-soulsilver/shiny/132.png", "front_shiny_female": null }, "platinum": { "back_default": "https://raw.githubusercontent.com/PokeAPI/sprites/master/sprites/pokemon/versions/generation-iv/platinum/back/132.png", "back_female": null, "back_shiny": "https://raw.githubusercontent.com/PokeAPI/sprites/master/sprites/pokemon/versions/generation-iv/platinum/back/shiny/132.png", "back_shiny_female": null, "front_default": "https://raw.githubusercontent.com/PokeAPI/sprites/master/sprites/pokemon/versions/generation-iv/platinum/132.png", "front_female": null, "front_shiny": "https://raw.githubusercontent.com/PokeAPI/sprites/master/sprites/pokemon/versions/generation-iv/platinum/shiny/132.png", "front_shiny_female": null } }, "generation-v": { "black-white": { "animated": { "back_default": "https://raw.githubusercontent.com/PokeAPI/sprites/master/sprites/pokemon/versions/generation-v/black-white/animated/back/132.gif", "back_female": null, "back_shiny": "https://raw.githubusercontent.com/PokeAPI/sprites/master/sprites/pokemon/versions/generation-v/black-white/animated/back/shiny/132.gif", "back_shiny_female": null, "front_default": "https://raw.githubusercontent.com/PokeAPI/sprites/master/sprites/pokemon/versions/generation-v/black-white/animated/132.gif", "front_female": null, "front_shiny": "https://raw.githubusercontent.com/PokeAPI/sprites/master/sprites/pokemon/versions/generation-v/black-white/animated/shiny/132.gif", "front_shiny_female": null }, "back_default": "https://raw.githubusercontent.com/PokeAPI/sprites/master/sprites/pokemon/versions/generation-v/black-white/back/132.png", "back_female": null, "back_shiny": "https://raw.githubusercontent.com/PokeAPI/sprites/master/sprites/pokemon/versions/generation-v/black-white/back/shiny/132.png", "back_shiny_female": null, "front_default": "https://raw.githubusercontent.com/PokeAPI/sprites/master/sprites/pokemon/versions/generation-v/black-white/132.png", "front_female": null, "front_shiny": "https://raw.githubusercontent.com/PokeAPI/sprites/master/sprites/pokemon/versions/generation-v/black-white/shiny/132.png", "front_shiny_female": null } }, "generation-vi": { "omegaruby-alphasapphire": { "front_default": "https://raw.githubusercontent.com/PokeAPI/sprites/master/sprites/pokemon/versions/generation-vi/omegaruby-alphasapphire/132.png", "front_female": null, "front_shiny": "https://raw.githubusercontent.com/PokeAPI/sprites/master/sprites/pokemon/versions/generation-vi/omegaruby-alphasapphire/shiny/132.png", "front_shiny_female": null }, "x-y": { "front_default": "https://raw.githubusercontent.com/PokeAPI/sprites/master/sprites/pokemon/versions/generation-vi/x-y/132.png", "front_female": null, "front_shiny": "https://raw.githubusercontent.com/PokeAPI/sprites/master/sprites/pokemon/versions/generation-vi/x-y/shiny/132.png", "front_shiny_female": null } }, "generation-vii": { "icons": { "front_default": "https://raw.githubusercontent.com/PokeAPI/sprites/master/sprites/pokemon/versions/generation-vii/icons/132.png", "front_female": null }, "ultra-sun-ultra-moon": { "front_default": "https://raw.githubusercontent.com/PokeAPI/sprites/master/sprites/pokemon/versions/generation-vii/ultra-sun-ultra-moon/132.png", "front_female": null, "front_shiny": "https://raw.githubusercontent.com/PokeAPI/sprites/master/sprites/pokemon/versions/generation-vii/ultra-sun-ultra-moon/shiny/132.png", "front_shiny_female": null } }, "generation-viii": { "icons": { "front_default": "https://raw.githubusercontent.com/PokeAPI/sprites/master/sprites/pokemon/versions/generation-viii/icons/132.png", "front_female": null } } } }, "stats": [ { "base_stat": 48, "effort": 1, "stat": { "name": "hp", "url": "https://pokeapi.co/api/v2/stat/1/" } }, { "base_stat": 48, "effort": 0, "stat": { "name": "attack", "url": "https://pokeapi.co/api/v2/stat/2/" } }, { "base_stat": 48, "effort": 0, "stat": { "name": "defense", "url": "https://pokeapi.co/api/v2/stat/3/" } }, { "base_stat": 48, "effort": 0, "stat": { "name": "special-attack", "url": "https://pokeapi.co/api/v2/stat/4/" } }, { "base_stat": 48, "effort": 0, "stat": { "name": "special-defense", "url": "https://pokeapi.co/api/v2/stat/5/" } }, { "base_stat": 48, "effort": 0, "stat": { "name": "speed", "url": "https://pokeapi.co/api/v2/stat/6/" } } ], "types": [ { "slot": 1, "type": { "name": "normal", "url": "https://pokeapi.co/api/v2/type/1/" } } ], "weight": 40 }, "emitted_at": 1673989852906 }} \ No newline at end of file diff --git a/airbyte-integrations/connectors/destination-timeplus/main.py b/airbyte-integrations/connectors/destination-timeplus/main.py new file mode 100755 index 000000000000..a6f1b6b49d3c --- /dev/null +++ b/airbyte-integrations/connectors/destination-timeplus/main.py @@ -0,0 +1,11 @@ +# +# Copyright (c) 2023 Airbyte, Inc., all rights reserved. +# + + +import sys + +from destination_timeplus import DestinationTimeplus + +if __name__ == "__main__": + DestinationTimeplus().run(sys.argv[1:]) diff --git a/airbyte-integrations/connectors/destination-timeplus/metadata.yaml b/airbyte-integrations/connectors/destination-timeplus/metadata.yaml index 9cb94f5d8584..917a78f8494d 100644 --- a/airbyte-integrations/connectors/destination-timeplus/metadata.yaml +++ b/airbyte-integrations/connectors/destination-timeplus/metadata.yaml @@ -2,7 +2,7 @@ data: connectorSubtype: database connectorType: destination definitionId: f70a8ece-351e-4790-b37b-cb790bcd6d54 - dockerImageTag: 0.1.0 + dockerImageTag: 0.1.1 dockerRepository: airbyte/destination-timeplus githubIssueLabel: destination-timeplus icon: timeplus.svg @@ -20,5 +20,5 @@ data: ab_internal: sl: 100 ql: 100 - supportLevel: archived + supportLevel: community metadataSpecVersion: "1.0" diff --git a/airbyte-integrations/connectors/destination-timeplus/requirements.txt b/airbyte-integrations/connectors/destination-timeplus/requirements.txt new file mode 100755 index 000000000000..d6e1198b1ab1 --- /dev/null +++ b/airbyte-integrations/connectors/destination-timeplus/requirements.txt @@ -0,0 +1 @@ +-e . diff --git a/airbyte-integrations/connectors/destination-timeplus/setup.py b/airbyte-integrations/connectors/destination-timeplus/setup.py new file mode 100755 index 000000000000..c082df533d8c --- /dev/null +++ b/airbyte-integrations/connectors/destination-timeplus/setup.py @@ -0,0 +1,26 @@ +# +# Copyright (c) 2023 Airbyte, Inc., all rights reserved. +# + + +from setuptools import find_packages, setup + +MAIN_REQUIREMENTS = [ + "airbyte-cdk", + "timeplus~=1.2.1", +] + +TEST_REQUIREMENTS = ["pytest~=6.2"] + +setup( + name="destination_timeplus", + description="Destination implementation for Timeplus.", + author="Airbyte", + author_email="jove@timeplus.io", + packages=find_packages(), + install_requires=MAIN_REQUIREMENTS, + package_data={"": ["*.json"]}, + extras_require={ + "tests": TEST_REQUIREMENTS, + }, +) diff --git a/airbyte-integrations/connectors/destination-timeplus/unit_tests/unit_test.py b/airbyte-integrations/connectors/destination-timeplus/unit_tests/unit_test.py new file mode 100755 index 000000000000..0b6359090af8 --- /dev/null +++ b/airbyte-integrations/connectors/destination-timeplus/unit_tests/unit_test.py @@ -0,0 +1,17 @@ +# +# Copyright (c) 2023 Airbyte, Inc., all rights reserved. +# + +from destination_timeplus import DestinationTimeplus + + +def test_type_mapping(): + expected = { + "float": {"type": "number"}, + "bool": {"type": "boolean"}, + "string": {"type": "string"}, + "integer": {"type": "integer"}, + "array(integer)": {"type": "array", "items": {"type": "integer"}}, + } + for k, v in expected.items(): + assert k == DestinationTimeplus.type_mapping(v) diff --git a/airbyte-integrations/connectors/destination-xata/.dockerignore b/airbyte-integrations/connectors/destination-xata/.dockerignore new file mode 100644 index 000000000000..40370594ddc6 --- /dev/null +++ b/airbyte-integrations/connectors/destination-xata/.dockerignore @@ -0,0 +1,5 @@ +* +!Dockerfile +!main.py +!destination_xata +!setup.py diff --git a/airbyte-integrations/connectors/destination-xata/Dockerfile b/airbyte-integrations/connectors/destination-xata/Dockerfile new file mode 100644 index 000000000000..1be67c517ca9 --- /dev/null +++ b/airbyte-integrations/connectors/destination-xata/Dockerfile @@ -0,0 +1,38 @@ +FROM python:3.9.11-alpine3.15 as base + +# build and load all requirements +FROM base as builder +WORKDIR /airbyte/integration_code + +# upgrade pip to the latest version +RUN apk --no-cache upgrade \ + && pip install --upgrade pip \ + && apk --no-cache add tzdata build-base + + +COPY setup.py ./ +# install necessary packages to a temporary folder +RUN pip install --prefix=/install . + +# build a clean environment +FROM base +WORKDIR /airbyte/integration_code + +# copy all loaded and built libraries to a pure basic image +COPY --from=builder /install /usr/local +# add default timezone settings +COPY --from=builder /usr/share/zoneinfo/Etc/UTC /etc/localtime +RUN echo "Etc/UTC" > /etc/timezone + +# bash is installed for more convenient debugging. +RUN apk --no-cache add bash + +# copy payload code only +COPY main.py ./ +COPY destination_xata ./destination_xata + +ENV AIRBYTE_ENTRYPOINT "python /airbyte/integration_code/main.py" +ENTRYPOINT ["python", "/airbyte/integration_code/main.py"] + +LABEL io.airbyte.version=0.1.2 +LABEL io.airbyte.name=airbyte/destination-xata diff --git a/airbyte-integrations/connectors/destination-xata/README.md b/airbyte-integrations/connectors/destination-xata/README.md new file mode 100644 index 000000000000..e6153ac20ba1 --- /dev/null +++ b/airbyte-integrations/connectors/destination-xata/README.md @@ -0,0 +1,99 @@ +# Xata Destination + +This is the repository for the Xata destination connector, written in Python. +For information about how to use this connector within Airbyte, see [the documentation](https://docs.airbyte.com/integrations/destinations/xata). + +## Local development + +### Prerequisites +**To iterate on this connector, make sure to complete this prerequisites section.** + +#### Minimum Python version required `= 3.7.0` + +#### Build & Activate Virtual Environment and install dependencies +From this connector directory, create a virtual environment: +``` +python -m venv .venv +``` + +This will generate a virtualenv for this module in `.venv/`. Make sure this venv is active in your +development environment of choice. To activate it from the terminal, run: +``` +source .venv/bin/activate +pip install -r requirements.txt +``` +If you are in an IDE, follow your IDE's instructions to activate the virtualenv. + +Note that while we are installing dependencies from `requirements.txt`, you should only edit `setup.py` for your dependencies. `requirements.txt` is +used for editable installs (`pip install -e`) to pull in Python dependencies from the monorepo and will call `setup.py`. +If this is mumbo jumbo to you, don't worry about it, just put your deps in `setup.py` but install using `pip install -r requirements.txt` and everything +should work as you expect. + +#### Create credentials +**If you are a community contributor**, follow the instructions in the [documentation](https://docs.airbyte.com/integrations/destinations/xata) +to generate the necessary credentials. Then create a file `secrets/config.json` conforming to the `destination_xata/spec.json` file. +Note that the `secrets` directory is gitignored by default, so there is no danger of accidentally checking in sensitive information. +See `integration_tests/sample_config.json` for a sample config file. + +**If you are an Airbyte core member**, copy the credentials in Lastpass under the secret name `destination xata test creds` +and place them into `secrets/config.json`. + +### Locally running the connector +``` +python main.py spec +python main.py check --config secrets/config.json +python main.py discover --config secrets/config.json +python main.py read --config secrets/config.json --catalog integration_tests/configured_catalog.json +``` + +### Locally running the connector docker image + + +#### Build +**Via [`airbyte-ci`](https://github.com/airbytehq/airbyte/blob/master/airbyte-ci/connectors/pipelines/README.md) (recommended):** +```bash +airbyte-ci connectors --name=destination-xata build +``` + +An image will be built with the tag `airbyte/destination-xata:dev`. + +**Via `docker build`:** +```bash +docker build -t airbyte/destination-xata:dev . +``` + +#### Run +Then run any of the connector commands as follows: +``` +docker run --rm airbyte/destination-xata:dev spec +docker run --rm -v $(pwd)/secrets:/secrets airbyte/destination-xata:dev check --config /secrets/config.json +# messages.jsonl is a file containing line-separated JSON representing AirbyteMessages +cat messages.jsonl | docker run --rm -v $(pwd)/secrets:/secrets -v $(pwd)/integration_tests:/integration_tests airbyte/destination-xata:dev write --config /secrets/config.json --catalog /integration_tests/configured_catalog.json +``` + +## Testing +You can run our full test suite locally using [`airbyte-ci`](https://github.com/airbytehq/airbyte/blob/master/airbyte-ci/connectors/pipelines/README.md): +```bash +airbyte-ci connectors --name=destination-xata test +``` + +### Customizing acceptance Tests +Customize `acceptance-test-config.yml` file to configure tests. See [Connector Acceptance Tests](https://docs.airbyte.com/connector-development/testing-connectors/connector-acceptance-tests-reference) for more information. +If your connector requires to create or destroy resources for use during acceptance tests create fixtures for it and place them inside integration_tests/acceptance.py. + +## Dependency Management +All of your dependencies should go in `setup.py`, NOT `requirements.txt`. The requirements file is only used to connect internal Airbyte dependencies in the monorepo for local development. +We split dependencies between two groups, dependencies that are: +* required for your connector to work need to go to `MAIN_REQUIREMENTS` list. +* required for the testing need to go to `TEST_REQUIREMENTS` list + +### Publishing a new version of the connector +You've checked out the repo, implemented a million dollar feature, and you're ready to share your changes with the world. Now what? +1. Make sure your changes are passing our test suite: `airbyte-ci connectors --name=destination-xata test` +2. Bump the connector version in `metadata.yaml`: increment the `dockerImageTag` value. Please follow [semantic versioning for connectors](https://docs.airbyte.com/contributing-to-airbyte/resources/pull-requests-handbook/#semantic-versioning-for-connectors). +3. Make sure the `metadata.yaml` content is up to date. +4. Make the connector documentation and its changelog is up to date (`docs/integrations/destinations/xata.md`). +5. Create a Pull Request: use [our PR naming conventions](https://docs.airbyte.com/contributing-to-airbyte/resources/pull-requests-handbook/#pull-request-title-convention). +6. Pat yourself on the back for being an awesome contributor. +7. Someone from Airbyte will take a look at your PR and iterate with you to merge it into master. + diff --git a/airbyte-integrations/connectors/destination-xata/bootstrap.md b/airbyte-integrations/connectors/destination-xata/bootstrap.md new file mode 100644 index 000000000000..bac35e3ae53c --- /dev/null +++ b/airbyte-integrations/connectors/destination-xata/bootstrap.md @@ -0,0 +1 @@ +# Xata Destination Connector diff --git a/airbyte-integrations/connectors/destination-xata/destination_xata/__init__.py b/airbyte-integrations/connectors/destination-xata/destination_xata/__init__.py new file mode 100644 index 000000000000..d03079997c13 --- /dev/null +++ b/airbyte-integrations/connectors/destination-xata/destination_xata/__init__.py @@ -0,0 +1,8 @@ +# +# Copyright (c) 2023 Airbyte, Inc., all rights reserved. +# + + +from .destination import DestinationXata + +__all__ = ["DestinationXata"] diff --git a/airbyte-integrations/connectors/destination-xata/destination_xata/destination.py b/airbyte-integrations/connectors/destination-xata/destination_xata/destination.py new file mode 100644 index 000000000000..a9698c49c446 --- /dev/null +++ b/airbyte-integrations/connectors/destination-xata/destination_xata/destination.py @@ -0,0 +1,79 @@ +# +# Copyright (c) 2023 Airbyte, Inc., all rights reserved. +# + +import logging +from typing import Any, Iterable, Mapping + +from airbyte_cdk import AirbyteLogger +from airbyte_cdk.destinations import Destination +from airbyte_cdk.models import AirbyteConnectionStatus, AirbyteMessage, ConfiguredAirbyteCatalog, Status, Type +from xata.client import XataClient +from xata.helpers import BulkProcessor + +__version__ = "0.0.1" + +logger = logging.getLogger("airbyte") + + +class DestinationXata(Destination): + def write( + self, config: Mapping[str, Any], configured_catalog: ConfiguredAirbyteCatalog, input_messages: Iterable[AirbyteMessage] + ) -> Iterable[AirbyteMessage]: + """ + Reads the input stream of messages, config, and catalog to write data to the destination. + + This method returns an iterable (typically a generator of AirbyteMessages via yield) containing state messages received + in the input message stream. Outputting a state message means that every AirbyteRecordMessage which came before it has been + successfully persisted to the destination. This is used to ensure fault tolerance in the case that a sync fails before fully completing, + then the source is given the last state message output from this method as the starting point of the next sync. + + :param config: dict of JSON configuration matching the configuration declared in spec.json + :param configured_catalog: The Configured Catalog describing the schema of the data being received and how it should be persisted in the + destination + :param input_messages: The stream of input messages received from the source + :return: Iterable of AirbyteStateMessages wrapped in AirbyteMessage structs + """ + + xata = XataClient(api_key=config["api_key"], db_url=config["db_url"]) + xata.set_header("user-agent", f"airbyte/destination-xata:{__version__}") + + bp = BulkProcessor(xata) + count = 0 + for message in input_messages: + if message.type == Type.RECORD: + # Put record to processing queue + bp.put_record(message.record.stream, message.record.data) + count += 1 + if message.type == Type.STATE: + yield message + bp.flush_queue() + logger.info(bp.get_stats()) + if count != bp.get_stats()["total"] or bp.get_stats()["failed_batches"] != 0: + raise Exception( + "inconsistency found, expected %d records pushed, actual: %d with %d failures." + % (count, bp.get_stats()["total"], bp.get_stats()["failed_batches"]) + ) + + def check(self, logger: AirbyteLogger, config: Mapping[str, Any]) -> AirbyteConnectionStatus: + """ + Tests if the input configuration can be used to successfully connect to the destination with the needed permissions + e.g: if a provided API token or password can be used to connect and write to the destination. + + :param logger: Logging object to display debug/info/error to the logs + (logs will not be accessible via airbyte UI if they are not passed to this logger) + :param config: Json object containing the configuration of this destination, content of this json is as specified in + the properties of the spec.json file + + :return: AirbyteConnectionStatus indicating a Success or Failure + """ + try: + xata = XataClient(api_key=config["api_key"], db_url=config["db_url"]) + xata.set_header("user-agent", f"airbyte/destination-xata:{__version__}") + + r = xata.users().getUser() + if r.status_code != 200: + raise Exception("Invalid connection parameters.") + return AirbyteConnectionStatus(status=Status.SUCCEEDED) + except Exception as e: + return AirbyteConnectionStatus(status=Status.FAILED, message=f"An exception occurred: {repr(e)}") diff --git a/airbyte-integrations/connectors/destination-xata/destination_xata/spec.json b/airbyte-integrations/connectors/destination-xata/destination_xata/spec.json new file mode 100644 index 000000000000..6e73b6cec519 --- /dev/null +++ b/airbyte-integrations/connectors/destination-xata/destination_xata/spec.json @@ -0,0 +1,28 @@ +{ + "documentationUrl": "https://docs.airbyte.com/integrations/destinations/xata", + "supported_destination_sync_modes": ["append"], + "supportsIncremental": false, + "connectionSpecification": { + "$schema": "http://json-schema.org/draft-07/schema#", + "title": "Destination Xata", + "type": "object", + "required": ["api_key", "db_url"], + "additionalProperties": true, + "properties": { + "api_key": { + "title": "API Key", + "description": "API Key to connect.", + "type": "string", + "order": 0, + "airbyte_secret": true + }, + "db_url": { + "title": "Database URL", + "description": "URL pointing to your workspace.", + "type": "string", + "order": 1, + "example": "https://my-workspace-abc123.us-east-1.xata.sh/db/nyc-taxi-fares:main" + } + } + } +} diff --git a/airbyte-integrations/connectors/destination-xata/integration_tests/integration_test.py b/airbyte-integrations/connectors/destination-xata/integration_tests/integration_test.py new file mode 100644 index 000000000000..b98d151d31d3 --- /dev/null +++ b/airbyte-integrations/connectors/destination-xata/integration_tests/integration_test.py @@ -0,0 +1,120 @@ +# +# Copyright (c) 2023 Airbyte, Inc., all rights reserved. +# + +import json +from typing import Any, Mapping +from unittest.mock import Mock + +import pytest +from airbyte_cdk.models import ( + AirbyteMessage, + AirbyteRecordMessage, + AirbyteStream, + ConfiguredAirbyteCatalog, + ConfiguredAirbyteStream, + DestinationSyncMode, + Status, + SyncMode, + Type, +) +from destination_xata import DestinationXata +from xata.client import XataClient + + +@pytest.fixture(name="config") +def config_fixture() -> Mapping[str, Any]: + with open("secrets/config.json", "r") as f: + return json.loads(f.read()) + + +@pytest.fixture(name="configured_catalog") +def configured_catalog_fixture() -> ConfiguredAirbyteCatalog: + stream_schema = {"type": "object", "properties": {"string_col": {"type": "str"}, "int_col": {"type": "integer"}}} + + append_stream = ConfiguredAirbyteStream( + stream=AirbyteStream(name="append_stream", json_schema=stream_schema, supported_sync_modes=[SyncMode.incremental]), + sync_mode=SyncMode.incremental, + destination_sync_mode=DestinationSyncMode.append, + ) + # TODO implement overwrite + """ + overwrite_stream = ConfiguredAirbyteStream( + stream=AirbyteStream(name="overwrite_stream", json_schema=stream_schema, supported_sync_modes=[SyncMode.incremental]), + sync_mode=SyncMode.incremental, + destination_sync_mode=DestinationSyncMode.overwrite, + ) + """ + return ConfiguredAirbyteCatalog(streams=[append_stream]) + + +def test_check_valid_config(config: Mapping): + outcome = DestinationXata().check(logger=Mock(), config=config) + assert outcome.status == Status.SUCCEEDED + + +def test_check_invalid_config(): + f = open("integration_tests/invalid_config.json") + config = json.load(f) + outcome = DestinationXata().check(logger=Mock(), config=config) + assert outcome.status == Status.FAILED + + +def test_write(config: Mapping): + test_schema = {"type": "object", "properties": {"str_col": {"type": "str"}, "int_col": {"type": "integer"}}} + + test_stream = ConfiguredAirbyteStream( + stream=AirbyteStream(name="test_stream", json_schema=test_schema, supported_sync_modes=[SyncMode.incremental]), + sync_mode=SyncMode.incremental, + destination_sync_mode=DestinationSyncMode.append, + ) + + records = [ + AirbyteMessage( + type=Type.RECORD, + record=AirbyteRecordMessage( + stream="test_stream", + data={ + "str_col": "example", + "int_col": 1, + }, + emitted_at=0, + ), + ) + ] + + # setup Xata workspace + xata = XataClient(api_key=config["api_key"], db_url=config["db_url"]) + db_name = xata.get_config()["dbName"] + # database exists ? + assert xata.databases().getDatabaseMetadata(db_name).status_code == 200, f"database '{db_name}' does not exist." + assert xata.table().createTable("test_stream").status_code == 201, "could not create table, if it already exists, please delete it." + assert ( + xata.table() + .setTableSchema( + "test_stream", + { + "columns": [ + {"name": "str_col", "type": "string"}, + {"name": "int_col", "type": "int"}, + ] + }, + ) + .status_code + == 200 + ), "failed to set table schema" + + dest = DestinationXata() + list(dest.write(config=config, configured_catalog=test_stream, input_messages=records)) + + # fetch record + records = xata.data().queryTable("test_stream", {}) + assert records.status_code == 200 + assert len(records.json()["records"]) == 1 + + proof = records.json()["records"][0] + assert proof["str_col"] == "example" + assert proof["int_col"] == 1 + + # cleanup + assert xata.table().deleteTable("test_stream").status_code == 200 diff --git a/airbyte-integrations/connectors/destination-xata/integration_tests/invalid_config.json b/airbyte-integrations/connectors/destination-xata/integration_tests/invalid_config.json new file mode 100644 index 000000000000..36bd35acc0b5 --- /dev/null +++ b/airbyte-integrations/connectors/destination-xata/integration_tests/invalid_config.json @@ -0,0 +1,4 @@ +{ + "api_key": "husenvasen", + "database_url": "https://invalid" +} diff --git a/airbyte-integrations/connectors/destination-xata/main.py b/airbyte-integrations/connectors/destination-xata/main.py new file mode 100644 index 000000000000..76e7d8f087c0 --- /dev/null +++ b/airbyte-integrations/connectors/destination-xata/main.py @@ -0,0 +1,11 @@ +# +# Copyright (c) 2023 Airbyte, Inc., all rights reserved. +# + + +import sys + +from destination_xata import DestinationXata + +if __name__ == "__main__": + DestinationXata().run(sys.argv[1:]) diff --git a/airbyte-integrations/connectors/destination-xata/metadata.yaml b/airbyte-integrations/connectors/destination-xata/metadata.yaml index cdb67521b926..9ff802b51f58 100644 --- a/airbyte-integrations/connectors/destination-xata/metadata.yaml +++ b/airbyte-integrations/connectors/destination-xata/metadata.yaml @@ -7,7 +7,7 @@ data: connectorSubtype: database connectorType: destination definitionId: 2a51c92d-0fb4-4e54-94d2-cce631f24d1f - dockerImageTag: 0.1.1 + dockerImageTag: 0.1.2 dockerRepository: airbyte/destination-xata githubIssueLabel: destination-xata icon: xata.svg @@ -20,5 +20,5 @@ data: ab_internal: sl: 100 ql: 100 - supportLevel: archived + supportLevel: community metadataSpecVersion: "1.0" diff --git a/airbyte-integrations/connectors/destination-xata/requirements.txt b/airbyte-integrations/connectors/destination-xata/requirements.txt new file mode 100644 index 000000000000..d6e1198b1ab1 --- /dev/null +++ b/airbyte-integrations/connectors/destination-xata/requirements.txt @@ -0,0 +1 @@ +-e . diff --git a/airbyte-integrations/connectors/destination-xata/sample_files/configured_catalog.json b/airbyte-integrations/connectors/destination-xata/sample_files/configured_catalog.json new file mode 100644 index 000000000000..f526611d3df1 --- /dev/null +++ b/airbyte-integrations/connectors/destination-xata/sample_files/configured_catalog.json @@ -0,0 +1,13 @@ +{ + "streams": [ + { + "stream": { + "name": "issues", + "json_schema": {}, + "supported_sync_modes": ["full_refresh"] + }, + "sync_mode": "full_refresh", + "destination_sync_mode": "append" + } + ] +} diff --git a/airbyte-integrations/connectors/destination-xata/setup.py b/airbyte-integrations/connectors/destination-xata/setup.py new file mode 100644 index 000000000000..5fcb33e94fbb --- /dev/null +++ b/airbyte-integrations/connectors/destination-xata/setup.py @@ -0,0 +1,23 @@ +# +# Copyright (c) 2023 Airbyte, Inc., all rights reserved. +# + + +from setuptools import find_packages, setup + +MAIN_REQUIREMENTS = ["airbyte-cdk", "xata==0.10.1"] + +TEST_REQUIREMENTS = ["pytest~=6.2"] + +setup( + name="destination_xata", + description="Destination implementation for Xata.io", + author="Philip Krauss ", + author_email="support@xata.io", + packages=find_packages(), + install_requires=MAIN_REQUIREMENTS, + package_data={"": ["*.json"]}, + extras_require={ + "tests": TEST_REQUIREMENTS, + }, +) diff --git a/airbyte-integrations/connectors/destination-xata/unit_tests/unit_test.py b/airbyte-integrations/connectors/destination-xata/unit_tests/unit_test.py new file mode 100644 index 000000000000..51726247685a --- /dev/null +++ b/airbyte-integrations/connectors/destination-xata/unit_tests/unit_test.py @@ -0,0 +1,28 @@ +# +# Copyright (c) 2023 Airbyte, Inc., all rights reserved. +# + +import unittest + +from xata.client import XataClient +from xata.helpers import BulkProcessor + + +class DestinationConnectorXataTestCase(unittest.TestCase): + def test_request(self): + xata = XataClient(db_url="https://unit_tests-mock.results-store.xata.sh/db/mock-db", api_key="mock-key") + bp = BulkProcessor(xata, thread_pool_size=1, batch_size=2, flush_interval=1) + stats = bp.get_stats() + + assert "total" in stats + assert "queue" in stats + assert "failed_batches" in stats + assert "tables" in stats + + assert stats["total"] == 0 + assert stats["queue"] == 0 + assert stats["failed_batches"] == 0 + + +if __name__ == "__main__": + unittest.main() diff --git a/docs/integrations/destinations/amazon-sqs.md b/docs/integrations/destinations/amazon-sqs.md index 1cf727ff6a34..f92c1a997e37 100644 --- a/docs/integrations/destinations/amazon-sqs.md +++ b/docs/integrations/destinations/amazon-sqs.md @@ -121,5 +121,6 @@ The output SQS message would contain: | Version | Date | Pull Request | Subject | | :------ | :--------- | :-------------------------------------------------------- | :-------------------------------- | +| 0.1.2 | 2024-03-05 | [#35838](https://github.com/airbytehq/airbyte/pull/35838) | Un-archive connector | | 0.1.1 | 2024-01-03 | [#33924](https://github.com/airbytehq/airbyte/pull/33924) | Add new ap-southeast-3 AWS region | -| 0.1.0 | 2021-10-27 | [\#0000](https://github.com/airbytehq/airbyte/pull/0000) | Initial version | +| 0.1.0 | 2021-10-27 | [#0000](https://github.com/airbytehq/airbyte/pull/0000) | Initial version | diff --git a/docs/integrations/destinations/cumulio.md b/docs/integrations/destinations/cumulio.md index 6cae834e0cd0..17f292f77cc4 100644 --- a/docs/integrations/destinations/cumulio.md +++ b/docs/integrations/destinations/cumulio.md @@ -2,24 +2,40 @@ ## General -The Airbyte Cumul.io destination connector allows you to stream data into Cumul.io from [any Airbyte Source](https://airbyte.io/connectors?connector-type=Sources). +The Airbyte Cumul.io destination connector allows you to stream data into Cumul.io from +[any Airbyte Source](https://airbyte.io/connectors?connector-type=Sources). -Cumul.io is an **[Embedded analytics SaaS solution](https://cumul.io/product/embedded-analytics)** that enables other SaaS companies to grow with an **engaging customer analytics experience**, seamlessly embedded in their product. Cumul.io's intuitive, low-code interface empowers business users with insight-driven actions in record time **without straining engineering resources from the core product**. +Cumul.io is an **[Embedded analytics SaaS solution](https://cumul.io/product/embedded-analytics)** +that enables other SaaS companies to grow with an **engaging customer analytics experience**, +seamlessly embedded in their product. Cumul.io's intuitive, low-code interface empowers business +users with insight-driven actions in record time **without straining engineering resources from the +core product**. ## Getting started -In order to use the Cumul.io destination, you'll first need to **create a [Cumul.io account](https://app.cumul.io/signup)** (if you don’t already have one). -After logging in to Cumul.io, you can **generate an API key and token** in your [Profile -> API Tokens](https://app.cumul.io/start/profile/integration). -To set up the destination connector in Airbyte, you'll need to provide the following Cumul.io properties: - -- "**Cumul.io API Host URL**": the API host URL for the **Cumul.io environment** where your **Cumul.io account resides** (i.e. `https://api.cumul.io` for EU multi-tenant users, `https://api.us.cumul.io/` for US multi-tenant users, or a VPC-specific address). This property depends on the environment in which your Cumul.io account was created (e.g. if you have signed up via https://app.us.cumul.io/signup, the API host URL would be `https://api.us.cumul.io/`). +In order to use the Cumul.io destination, you'll first need to **create a +[Cumul.io account](https://app.cumul.io/signup)** (if you don’t already have one). After logging in +to Cumul.io, you can **generate an API key and token** in your +[Profile -> API Tokens](https://app.cumul.io/start/profile/integration). To set up the destination +connector in Airbyte, you'll need to provide the following Cumul.io properties: + +- "**Cumul.io API Host URL**": the API host URL for the **Cumul.io environment** where your + **Cumul.io account resides** (i.e. `https://api.cumul.io` for EU multi-tenant users, + `https://api.us.cumul.io/` for US multi-tenant users, or a VPC-specific address). This property + depends on the environment in which your Cumul.io account was created (e.g. if you have signed up + via https://app.us.cumul.io/signup, the API host URL would be `https://api.us.cumul.io/`). - "**Cumul.io API key**": a Cumul.io API key (see above how to generate an API key-token pair) -- "**Cumul.io API token**": the corresponding Cumul.io API token (see above how to generate an API key-token pair) +- "**Cumul.io API token**": the corresponding Cumul.io API token (see above how to generate an API + key-token pair) -As soon as you've connected a source and the **first stream synchronization** has **succeeded**, the desired **Dataset(s)** will be **available in Cumul.io to build dashboards on** (Cumul.io's ["Getting started" Academy course](https://academy.cumul.io/course/a0bf5530-edfb-441e-901b-e1fcb95dfac7) might be interesting to get familiar with its platform). -Depending on the **synchronization mode** set up, the **next synchronizations** will either **replace/append data in/to these datasets**! +As soon as you've connected a source and the **first stream synchronization** has **succeeded**, the +desired **Dataset(s)** will be **available in Cumul.io to build dashboards on** (Cumul.io's +["Getting started" Academy course](https://academy.cumul.io/course/a0bf5530-edfb-441e-901b-e1fcb95dfac7) +might be interesting to get familiar with its platform). Depending on the **synchronization mode** +set up, the **next synchronizations** will either **replace/append data in/to these datasets**! -_If you have any questions or want to get started with Cumul.io, don't hesitate to reach out via [our contact page](https://cumul.io/contact)._ +_If you have any questions or want to get started with Cumul.io, don't hesitate to reach out via +[our contact page](https://cumul.io/contact)._ ## Connector overview @@ -50,48 +66,97 @@ _If you have any questions or want to get started with Cumul.io, don't hesitate | Boolean | Boolean values **will be stringified** ([recommended by Airbyte](https://docs.airbyte.com/understanding-airbyte/supported-data-types/#unsupported-types)) and result in a hierarchy column type (i.e. text/string, see [Cumul.io's data types Academy article](https://academy.cumul.io/article/p68253bn)). You could use Cumul.io's hierarchy translation (see [this Academy article](https://academy.cumul.io/article/dqgn0316)) to assign translations to `true` and `false` that are meaningful to the business user in the column's context. | | All other data types | Should be supported and correctly interpreted by Cumul.io's Data API service\*. | -\*_Note: It might be that Cumul.io's automatic typing could initially interpret this type of data wrongly due to its format (see `Possible future improvements` below), you could then alter the column type in the Cumul.io UI to try changing it manually._ +\*_Note: It might be that Cumul.io's automatic typing could initially interpret this type of data +wrongly due to its format (see `Possible future improvements` below), you could then alter the +column type in the Cumul.io UI to try changing it manually._ ### Output schema in Cumul.io -Each replicated stream from Airbyte will output data into a corresponding dataset in Cumul.io. Each dataset will **initially** have an **`Airbyte - ` English name** which can be **further adapted in Cumul.io's UI**, or even [via API](https://developer.cumul.io/#dashboard_update). If the request of pushing a batch of data fails, the connector will gracefully retry pushing the batch up to three times, with a backoff interval of 5 minutes, 10 minutes, and 20 minutes, respectively. +Each replicated stream from Airbyte will output data into a corresponding dataset in Cumul.io. Each +dataset will **initially** have an **`Airbyte - ` English name** which can +be **further adapted in Cumul.io's UI**, or even +[via API](https://developer.cumul.io/#dashboard_update). If the request of pushing a batch of data +fails, the connector will gracefully retry pushing the batch up to three times, with a backoff +interval of 5 minutes, 10 minutes, and 20 minutes, respectively. The connector will **associate one or more of the following tags to each dataset**: -- `[AIRBYTE - DO NOT DELETE] - `: this tag will be **used to retrieve the dataset ID and its current columns** from Cumul.io, and will be associated with the dataset after the first batch of data is written to a new dataset. -- `[AIRBYTE - DO NOT DELETE] - REPLACE DATA`: this tag will be **associated to a dataset** when it should be "resetted" (i.e. the **existing data should be replaced**, see `Feature` -> `Reset data` above). The first batch of data of the next synchronization will replace all existing data if this tag is present on a dataset. +- `[AIRBYTE - DO NOT DELETE] - `: this tag will be **used to retrieve the + dataset ID and its current columns** from Cumul.io, and will be associated with the dataset after + the first batch of data is written to a new dataset. +- `[AIRBYTE - DO NOT DELETE] - REPLACE DATA`: this tag will be **associated to a dataset** when it + should be "resetted" (i.e. the **existing data should be replaced**, see `Feature` -> `Reset data` + above). The first batch of data of the next synchronization will replace all existing data if this + tag is present on a dataset. -As noted in the tag name, it is important to **never remove such tags from the dataset(s) nor manually set them** on other datasets. Doing so might break existing or new synchronizations! +As noted in the tag name, it is important to **never remove such tags from the dataset(s) nor +manually set them** on other datasets. Doing so might break existing or new synchronizations! ## Data recommendations ### Data structure -To ensure the most performant queries, we recommend to **denormalize your data as much as possible beforehand** (this ensures that the least amount of joins are required to achieve your desired insights). Denormalized datasets also ensure that they can be easily consumed by less technical users, who often do not understand relations between tables! Instead of denormalizing your datasets to specific insights, it is recommended to **set up one or more dimensional data models** that support all kinds of slicing and dicing within a dashboard: this ensures a **flexible & scalable setup** which is **easy-to-understand and performant-to-query**! +To ensure the most performant queries, we recommend to **denormalize your data as much as possible +beforehand** (this ensures that the least amount of joins are required to achieve your desired +insights). Denormalized datasets also ensure that they can be easily consumed by less technical +users, who often do not understand relations between tables! Instead of denormalizing your datasets +to specific insights, it is recommended to **set up one or more dimensional data models** that +support all kinds of slicing and dicing within a dashboard: this ensures a **flexible & scalable +setup** which is **easy-to-understand and performant-to-query**! -This Cumul.io blog post goes into more detail on why customer-facing analytics requires a simple data model: https://blog.cumul.io/2022/12/07/why-a-dimensional-data-model-for-embedded-analytics/. +This Cumul.io blog post goes into more detail on why customer-facing analytics requires a simple +data model: https://blog.cumul.io/2022/12/07/why-a-dimensional-data-model-for-embedded-analytics/. ### Pushing data -Cumul.io uses an **OLAP database** to **ensure the most performant concurrent "Read" queries** on large amounts of data. OLAP databases, such as Cumul.io's database, are however often less suitable for a lot of "Write" queries with small amounts of data. +Cumul.io uses an **OLAP database** to **ensure the most performant concurrent "Read" queries** on +large amounts of data. OLAP databases, such as Cumul.io's database, are however often less suitable +for a lot of "Write" queries with small amounts of data. -To ensure the best performance when writing data, we **recommend synchronizing larger amounts of data less frequently** rather than _smaller amounts of data more frequently_! +To ensure the best performance when writing data, we **recommend synchronizing larger amounts of +data less frequently** rather than _smaller amounts of data more frequently_! ## Possible future improvements -- In case of many concurrent synchronizations, the following issues might arise at one point (not evaluated yet): - - The combination of all write buffers' data could cause memory overload, in that case it might be interesting to alter the flush rate by changing the `flush_interval` variable in `destination_cumulio/writer.py` (currently set to 10 000, which is the maximum amount of data points that can be sent via Cumul.io's Data API service in a single request, see note [here](https://developer.cumul.io/#data_create)). We do recommend keeping the `flush_interval` value **as high as possible** to ensure the least amount of total overhead on all batches pushed! - - Having more than 200 concurrent Airbyte connections flushing the data simultaneously, and using the same Cumul.io API key and token for each connection, might run into [Cumul.io's API Rate limit](https://developer.cumul.io/#core_api_ratelimiting). As this will rarely occur due to Cumul.io's burstable rate limit, we recommend using separate API key and tokens for identical destination connectors in case you would expect such concurrency. Note that synchronizing multiple streams in a single connection will happen sequentially and thus not run into the rate limit. -- The current connector will not take into account the Airbyte source data types, instead Cumul.io's API will automatically detect column types based on a random data sample. If Cumul.io's detected data type is not as desired, it's possible to alter the column's type via Cumul.io's UI to manually change the column type (e.g. if a `VARCHAR` column would only contain numeric values, it could initially be interpreted as a `numeric` column in Cumul.io but can at any point be changed to `hierarchy` if more appropriate). +- In case of many concurrent synchronizations, the following issues might arise at one point (not + evaluated yet): + - The combination of all write buffers' data could cause memory overload, in that case it might be + interesting to alter the flush rate by changing the `flush_interval` variable in + `destination_cumulio/writer.py` (currently set to 10 000, which is the maximum amount of data + points that can be sent via Cumul.io's Data API service in a single request, see note + [here](https://developer.cumul.io/#data_create)). We do recommend keeping the `flush_interval` + value **as high as possible** to ensure the least amount of total overhead on all batches + pushed! + - Having more than 200 concurrent Airbyte connections flushing the data simultaneously, and using + the same Cumul.io API key and token for each connection, might run into + [Cumul.io's API Rate limit](https://developer.cumul.io/#core_api_ratelimiting). As this will + rarely occur due to Cumul.io's burstable rate limit, we recommend using separate API key and + tokens for identical destination connectors in case you would expect such concurrency. Note that + synchronizing multiple streams in a single connection will happen sequentially and thus not run + into the rate limit. +- The current connector will not take into account the Airbyte source data types, instead Cumul.io's + API will automatically detect column types based on a random data sample. If Cumul.io's detected + data type is not as desired, it's possible to alter the column's type via Cumul.io's UI to + manually change the column type (e.g. if a `VARCHAR` column would only contain numeric values, it + could initially be interpreted as a `numeric` column in Cumul.io but can at any point be changed + to `hierarchy` if more appropriate). - As a future improvement, it is possible to: - 1. Create a new dataset - [Create Dataset API Documentation](https://developer.cumul.io/#dataset_create) - 2. Create the appropriate tag (`[AIRBYTE - DO NOT DELETE] - `) and associate it with the newly created dataset (in `destination_cumulio/client.py`, a method `_validate_tag_dataset_id_association(stream_name, dataset_id)` is defined which could be used for this step) - 3. Create each column with the correct Cumul.io type - [Create Column API Documentation](https://developer.cumul.io/#column_create) - 4. Associate each column with the dataset - [Associate Dataset Column API Documentation](https://developer.cumul.io/#column_assoc_dataset) - 5. From there on out, you can replace/append data for this dataset based on the tag (already implemented). + 1. Create a new dataset - + [Create Dataset API Documentation](https://developer.cumul.io/#dataset_create) + 2. Create the appropriate tag (`[AIRBYTE - DO NOT DELETE] - `) and associate it + with the newly created dataset (in `destination_cumulio/client.py`, a method + `_validate_tag_dataset_id_association(stream_name, dataset_id)` is defined which could be + used for this step) + 3. Create each column with the correct Cumul.io type - + [Create Column API Documentation](https://developer.cumul.io/#column_create) + 4. Associate each column with the dataset - + [Associate Dataset Column API Documentation](https://developer.cumul.io/#column_assoc_dataset) + 5. From there on out, you can replace/append data for this dataset based on the tag (already + implemented). ## CHANGELOG -| Version | Date | Pull Request | Subject | -| :------ | :--------- | :----------- | :-------------------------------------------------- | -| 0.1.0 | 2023-02-16 | | Initial release of Cumul.io's Destination connector | +| Version | Date | Pull Request | Subject | +| :------ | :--------- | :-------------------------------------------------------- | :-------------------------------------------------- | +| 0.1.1 | 2024-03-05 | [#35838](https://github.com/airbytehq/airbyte/pull/35838) | Un-archive connector | +| 0.1.0 | 2023-02-16 | | Initial release of Cumul.io's Destination connector | diff --git a/docs/integrations/destinations/databend.md b/docs/integrations/destinations/databend.md index 444a47473a6d..ed99cee48691 100644 --- a/docs/integrations/destinations/databend.md +++ b/docs/integrations/destinations/databend.md @@ -1,27 +1,35 @@ # Databend -This page guides you through the process of setting up the [Databend](https://databend.rs/) destination connector. +This page guides you through the process of setting up the [Databend](https://databend.rs/) +destination connector. ## Features -| Feature | Supported?\(Yes/No\) | Notes | -| :--- | :--- | :--- | -| Full Refresh Sync | Yes | | -| Incremental - Append Sync | Yes | | - +| Feature | Supported?\(Yes/No\) | Notes | +| :------------------------ | :------------------- | :---- | +| Full Refresh Sync | Yes | | +| Incremental - Append Sync | Yes | | #### Output Schema Each stream will be output into its own table in Databend. Each table will contain 3 columns: -* `_airbyte_ab_id`: a uuid assigned by Airbyte to each event that is processed. The column type in Databend is `VARCHAR`. -* `_airbyte_emitted_at`: a timestamp representing when the event was pulled from the data source. The column type in Databend is `TIMESTAMP`. -* `_airbyte_data`: a json blob representing with the event data. The column type in Databend is `VARVHAR`. +- `_airbyte_ab_id`: a uuid assigned by Airbyte to each event that is processed. The column type in + Databend is `VARCHAR`. +- `_airbyte_emitted_at`: a timestamp representing when the event was pulled from the data source. + The column type in Databend is `TIMESTAMP`. +- `_airbyte_data`: a json blob representing with the event data. The column type in Databend is + `VARVHAR`. + ## Getting Started (Airbyte Cloud) + Coming soon... ## Getting Started (Airbyte Open Source) -You can follow the [Connecting to a Warehouse docs](https://docs.databend.com/using-databend-cloud/warehouses/connecting-a-warehouse) to get the user, password, host etc. + +You can follow the +[Connecting to a Warehouse docs](https://docs.databend.com/using-databend-cloud/warehouses/connecting-a-warehouse) +to get the user, password, host etc. Or you can create such a user by running: @@ -31,34 +39,39 @@ GRANT CREATE ON * TO airbyte_user; Make sure the Databend user with the following permissions: -* can create tables and write rows. -* can create databases e.g: +- can create tables and write rows. +- can create databases e.g: You can also use a pre-existing user but we highly recommend creating a dedicated user for Airbyte. - #### Target Database -You will need to choose an existing database or create a new database that will be used to store synced data from Airbyte. +You will need to choose an existing database or create a new database that will be used to store +synced data from Airbyte. ### Setup the Databend Destination in Airbyte -You should now have all the requirements needed to configure Databend as a destination in the UI. You'll need the following information to configure the Databend destination: +You should now have all the requirements needed to configure Databend as a destination in the UI. +You'll need the following information to configure the Databend destination: -* **Host** -* **Port** -* **Username** -* **Password** -* **Database** +- **Host** +- **Port** +- **Username** +- **Password** +- **Database** ## Compatibility -If your databend version >= v0.9.0 or later, you need to use databend-sqlalchemy version >= v0.1.0. And the [Databend Cloud](https://app.databend.com/) will only support databend version > 0.9.0. -## Changelog +If your databend version >= v0.9.0 or later, you need to use databend-sqlalchemy version >= v0.1.0. +And the [Databend Cloud](https://app.databend.com/) will only support databend version > 0.9.0. -| Version | Date | Pull Request | Subject | -|:--------|:-----------|:---------------------------------------------------------|:------------------------------------------------| -| 0.1.2 | 2023-02-11 | [22855](https://github.com/airbytehq/airbyte/pull/22855) | Fix compatibility issue with databend-query 0.9 | -| 0.1.1 | 2022-01-09 | [21182](https://github.com/airbytehq/airbyte/pull/21182) | Remove protocol option and enforce HTTPS | -| 0.1.0 | 2022-01-09 | [20909](https://github.com/airbytehq/airbyte/pull/20909) | Destination Databend | +## Changelog +| Version | Date | Pull Request | Subject | +| :------------------------------------------------------- | :--------------------------------------- | :-------------------------------------------------------- | :------------------------------------------------------- | ----------- | +| 0.1.3 | 2024-03-05 | [#35838](https://github.com/airbytehq/airbyte/pull/35838) | Un-archive connector | +| 0.1.2 | 2023-02-11 | [22855](https://github.com/airbytehq/airbyte/pull/22855) | Fix compatibility | +| issue with databend-query 0.9 | | 0.1.1 | 2022-01-09 | +| [21182](https://github.com/airbytehq/airbyte/pull/21182) | Remove protocol option and enforce HTTPS | +| | 0.1.0 | 2022-01-09 | [20909](https://github.com/airbytehq/airbyte/pull/20909) | Destination | +| Databend | diff --git a/docs/integrations/destinations/firebolt.md b/docs/integrations/destinations/firebolt.md index 555dbae6ac4d..2af5901da50d 100644 --- a/docs/integrations/destinations/firebolt.md +++ b/docs/integrations/destinations/firebolt.md @@ -6,69 +6,92 @@ This page guides you through the process of setting up the Firebolt destination This Firebolt destination connector has two replication strategies: -1. SQL: Replicates data via SQL INSERT queries. This leverages [Firebolt SDK](https://pypi.org/project/firebolt-sdk/) to execute queries directly on Firebolt [Engines](https://docs.firebolt.io/working-with-engines/understanding-engine-fundamentals.html). **Not recommended for production workloads as this does not scale well**. +1. SQL: Replicates data via SQL INSERT queries. This leverages + [Firebolt SDK](https://pypi.org/project/firebolt-sdk/) to execute queries directly on Firebolt + [Engines](https://docs.firebolt.io/working-with-engines/understanding-engine-fundamentals.html). + **Not recommended for production workloads as this does not scale well**. -2. S3: Replicates data by first uploading data to an S3 bucket, creating an External Table and writing into a final Fact Table. This is the recommended loading [approach](https://docs.firebolt.io/loading-data/loading-data.html). Requires an S3 bucket and credentials in addition to Firebolt credentials. +2. S3: Replicates data by first uploading data to an S3 bucket, creating an External Table and + writing into a final Fact Table. This is the recommended loading + [approach](https://docs.firebolt.io/loading-data/loading-data.html). Requires an S3 bucket and + credentials in addition to Firebolt credentials. For SQL strategy: -* **Host** -* **Username** -* **Password** -* **Database** -* **Engine (optional)** +- **Host** +- **Username** +- **Password** +- **Database** +- **Engine (optional)** -Airbyte automatically picks an approach depending on the given configuration - if S3 configuration is present, Airbyte will use the S3 strategy. +Airbyte automatically picks an approach depending on the given configuration - if S3 configuration +is present, Airbyte will use the S3 strategy. For S3 strategy: -* **Username** -* **Password** -* **Database** -* **S3 Bucket Name** - * See [this](https://docs.aws.amazon.com/AmazonS3/latest/userguide/create-bucket-overview.html) to create an S3 bucket. -* **S3 Bucket Region** - * Create the S3 bucket on the same region as the Firebolt database. -* **Access Key Id** - * See [this](https://docs.aws.amazon.com/general/latest/gr/aws-sec-cred-types.html#access-keys-and-secret-access-keys) on how to generate an access key. - * We recommend creating an Airbyte-specific user. This user will require [read, write and delete permissions](https://docs.aws.amazon.com/IAM/latest/UserGuide/reference_policies_examples_s3_rw-bucket.html) to objects in the staging bucket. -* **Secret Access Key** - * Corresponding key to the above key id. -* **Host (optional)** - * Firebolt backend URL. Can be left blank for most usecases. -* **Engine (optional)** - * If connecting to a non-default engine you should specify its name or url here. +- **Username** +- **Password** +- **Database** +- **S3 Bucket Name** + - See [this](https://docs.aws.amazon.com/AmazonS3/latest/userguide/create-bucket-overview.html) to + create an S3 bucket. +- **S3 Bucket Region** + - Create the S3 bucket on the same region as the Firebolt database. +- **Access Key Id** + - See + [this](https://docs.aws.amazon.com/general/latest/gr/aws-sec-cred-types.html#access-keys-and-secret-access-keys) + on how to generate an access key. + - We recommend creating an Airbyte-specific user. This user will require + [read, write and delete permissions](https://docs.aws.amazon.com/IAM/latest/UserGuide/reference_policies_examples_s3_rw-bucket.html) + to objects in the staging bucket. +- **Secret Access Key** + - Corresponding key to the above key id. +- **Host (optional)** + - Firebolt backend URL. Can be left blank for most usecases. +- **Engine (optional)** + - If connecting to a non-default engine you should specify its name or url here. ## Setup guide -1. Create a Firebolt account following the [guide](https://docs.firebolt.io/managing-your-account/creating-an-account.html) -1. Follow the getting started [tutorial](https://docs.firebolt.io/getting-started.html) to setup a database. -1. Create a General Purpose (read-write) engine as described in [here](https://docs.firebolt.io/working-with-engines/working-with-engines-using-the-firebolt-manager.html) -1. (Optional) [Create](https://docs.aws.amazon.com/AmazonS3/latest/userguide/create-bucket-overview.html) a staging S3 bucket \(for the S3 strategy\). -1. (Optional) [Create](https://docs.aws.amazon.com/AmazonS3/latest/userguide/using-iam-policies.html) an IAM with programmatic access to read, write and delete objects from an S3 bucket. - +1. Create a Firebolt account following the + [guide](https://docs.firebolt.io/managing-your-account/creating-an-account.html) +1. Follow the getting started [tutorial](https://docs.firebolt.io/getting-started.html) to setup a + database. +1. Create a General Purpose (read-write) engine as described in + [here](https://docs.firebolt.io/working-with-engines/working-with-engines-using-the-firebolt-manager.html) +1. (Optional) + [Create](https://docs.aws.amazon.com/AmazonS3/latest/userguide/create-bucket-overview.html) a + staging S3 bucket \(for the S3 strategy\). +1. (Optional) + [Create](https://docs.aws.amazon.com/AmazonS3/latest/userguide/using-iam-policies.html) an IAM + with programmatic access to read, write and delete objects from an S3 bucket. ## Supported sync modes -The Firebolt destination connector supports the following [sync modes](https://docs.airbyte.com/cloud/core-concepts/#connection-sync-mode): +The Firebolt destination connector supports the following +[sync modes](https://docs.airbyte.com/cloud/core-concepts/#connection-sync-mode): + - Full Refresh - Incremental - Append Sync - ## Connector-specific features & highlights - ### Output schema -Each stream will be output into its own raw [Fact table](https://docs.firebolt.io/working-with-tables.html#fact-and-dimension-tables) in Firebolt. Each table will contain 3 columns: - -* `_airbyte_ab_id`: a uuid assigned by Airbyte to each event that is processed. The column type in Firebolt is `VARCHAR`. -* `_airbyte_emitted_at`: a timestamp representing when the event was pulled from the data source. The column type in Firebolt is `TIMESTAMP`. -* `_airbyte_data`: a json blob representing the event data. The column type in Firebolt is `VARCHAR` but can be be parsed with JSON functions. +Each stream will be output into its own raw +[Fact table](https://docs.firebolt.io/working-with-tables.html#fact-and-dimension-tables) in +Firebolt. Each table will contain 3 columns: +- `_airbyte_ab_id`: a uuid assigned by Airbyte to each event that is processed. The column type in + Firebolt is `VARCHAR`. +- `_airbyte_emitted_at`: a timestamp representing when the event was pulled from the data source. + The column type in Firebolt is `TIMESTAMP`. +- `_airbyte_data`: a json blob representing the event data. The column type in Firebolt is `VARCHAR` + but can be be parsed with JSON functions. ## Changelog -| Version | Date | Pull Request | Subject | -|:--------|:-----------| :----- | :------ | -| 0.1.0 | 2022-05-18 | [13118](https://github.com/airbytehq/airbyte/pull/13118) | New Destination: Firebolt | +| Version | Date | Pull Request | Subject | +| :------ | :--------- | :-------------------------------------------------------- | :------------------------ | +| 0.1.1 | 2024-03-05 | [#35838](https://github.com/airbytehq/airbyte/pull/35838) | Un-archive connector | +| 0.1.0 | 2022-05-18 | [13118](https://github.com/airbytehq/airbyte/pull/13118) | New Destination: Firebolt | diff --git a/docs/integrations/destinations/kvdb.md b/docs/integrations/destinations/kvdb.md index cb8ba2c53cb1..d548c50ce341 100644 --- a/docs/integrations/destinations/kvdb.md +++ b/docs/integrations/destinations/kvdb.md @@ -20,9 +20,10 @@ TODO ## Changelog -| Version | Date | Pull Request | Subject | -| :------ | :--------- | :------------------------------------------------------- | :------------------------------------------------------------------------- | -| 0.1.3 | 2024-02-19 | [xxx](https://github.com/airbytehq/airbyte/pull/xxx) | bump connector version to publish, convert to base docker image and poetry | -| 0.1.2 | 2024-02-19 | [35422](https://github.com/airbytehq/airbyte/pull/35422) | bump connector version to publish | -| 0.1.1 | 2024-02-16 | [35370](https://github.com/airbytehq/airbyte/pull/35370) | bump connector version to publish | -| 0.1.0 | 2021-07-19 | [4786](https://github.com/airbytehq/airbyte/pull/4786) | Python Demo Destination: KVDB | +| Version | Date | Pull Request | Subject | +| :------ | :--------- | :-------------------------------------------------------- | :------------------------------------------------------------------------- | +| 0.1.4 | 2024-03-05 | [#35838](https://github.com/airbytehq/airbyte/pull/35838) | Un-archive connector | +| 0.1.3 | 2024-02-19 | [xxx](https://github.com/airbytehq/airbyte/pull/xxx) | bump connector version to publish, convert to base docker image and poetry | +| 0.1.2 | 2024-02-19 | [35422](https://github.com/airbytehq/airbyte/pull/35422) | bump connector version to publish | +| 0.1.1 | 2024-02-16 | [35370](https://github.com/airbytehq/airbyte/pull/35370) | bump connector version to publish | +| 0.1.0 | 2021-07-19 | [4786](https://github.com/airbytehq/airbyte/pull/4786) | Python Demo Destination: KVDB | diff --git a/docs/integrations/destinations/meilisearch.md b/docs/integrations/destinations/meilisearch.md index f788f9613057..86992d21198e 100644 --- a/docs/integrations/destinations/meilisearch.md +++ b/docs/integrations/destinations/meilisearch.md @@ -2,13 +2,17 @@ ## Overview -The Airbyte MeilSearch destination allows you to sync data to MeiliSearch. MeiliSearch is a search engine that makes it easy for a non-developer to search through data. It does not require any SQL. +The Airbyte MeilSearch destination allows you to sync data to MeiliSearch. MeiliSearch is a search +engine that makes it easy for a non-developer to search through data. It does not require any SQL. ### Sync overview #### Output schema -Each stream will be output into its own index in MeiliSearch. Each table will be named after the stream with all non-alpha numeric characters removed. Each table will contain one column per top-levelfield in a stream. In addition, it will contain a table called `_ab_pk`. This column is used internally by Airbyte to prevent records from getting overwritten and can be ignored. +Each stream will be output into its own index in MeiliSearch. Each table will be named after the +stream with all non-alpha numeric characters removed. Each table will contain one column per +top-levelfield in a stream. In addition, it will contain a table called `_ab_pk`. This column is +used internally by Airbyte to prevent records from getting overwritten and can be ignored. #### Features @@ -23,19 +27,27 @@ Each stream will be output into its own index in MeiliSearch. Each table will be ### Requirements -To use the MeiliSearch destination, you'll need an existing MeiliSearch instance. You can learn about how to create one in the [MeiliSearch docs](https://www.meilisearch.com/docs/learn/getting_started/installation). +To use the MeiliSearch destination, you'll need an existing MeiliSearch instance. You can learn +about how to create one in the +[MeiliSearch docs](https://www.meilisearch.com/docs/learn/getting_started/installation). ### Setup guide -The setup only requires two fields. First is the `host` which is the address at which MeiliSearch can be reached. If running on a localhost by default it will be on `http://localhost:7700`. Note that you must include the protocol. The second piece of information is the API key. If no API key is set for your MeiliSearch instance, then this field can be left blank. If it is set, you can find the value for your API by following these [instructions](https://docs.meilisearch.com/reference/features/authentication.html#master-key). in the MeiliSearch docs. +The setup only requires two fields. First is the `host` which is the address at which MeiliSearch +can be reached. If running on a localhost by default it will be on `http://localhost:7700`. Note +that you must include the protocol. The second piece of information is the API key. If no API key is +set for your MeiliSearch instance, then this field can be left blank. If it is set, you can find the +value for your API by following these +[instructions](https://docs.meilisearch.com/reference/features/authentication.html#master-key). in +the MeiliSearch docs. ## Changelog -| Version | Date | Pull Request | Subject | -| :------ | :--------- | :------------------------------------------------------- | :----------------------------------------------------- | -| 1.0.1 | 2023-12-19 | [27692](https://github.com/airbytehq/airbyte/pull/27692) | Fix incomplete data indexing | -| 1.0.0 | 2022-10-26 | [18036](https://github.com/airbytehq/airbyte/pull/18036) | Migrate MeiliSearch to Python CDK | -| 0.2.13 | 2022-06-17 | [13864](https://github.com/airbytehq/airbyte/pull/13864) | Updated stacktrace format for any trace message errors | -| 0.2.12 | 2022-02-14 | [10256](https://github.com/airbytehq/airbyte/pull/10256) | Add `-XX:+ExitOnOutOfMemoryError` JVM option | -| 0.2.11 | 2021-12-28 | [9156](https://github.com/airbytehq/airbyte/pull/9156) | Update connector fields title/description | - +| Version | Date | Pull Request | Subject | +| :------ | :--------- | :-------------------------------------------------------- | :----------------------------------------------------- | +| 1.0.2 | 2024-03-05 | [#35838](https://github.com/airbytehq/airbyte/pull/35838) | Un-archive connector | +| 1.0.1 | 2023-12-19 | [27692](https://github.com/airbytehq/airbyte/pull/27692) | Fix incomplete data indexing | +| 1.0.0 | 2022-10-26 | [18036](https://github.com/airbytehq/airbyte/pull/18036) | Migrate MeiliSearch to Python CDK | +| 0.2.13 | 2022-06-17 | [13864](https://github.com/airbytehq/airbyte/pull/13864) | Updated stacktrace format for any trace message errors | +| 0.2.12 | 2022-02-14 | [10256](https://github.com/airbytehq/airbyte/pull/10256) | Add `-XX:+ExitOnOutOfMemoryError` JVM option | +| 0.2.11 | 2021-12-28 | [9156](https://github.com/airbytehq/airbyte/pull/9156) | Update connector fields title/description | diff --git a/docs/integrations/destinations/rabbitmq.md b/docs/integrations/destinations/rabbitmq.md index b1cb1a730236..71ccdc0238f6 100644 --- a/docs/integrations/destinations/rabbitmq.md +++ b/docs/integrations/destinations/rabbitmq.md @@ -2,7 +2,8 @@ ## Overview -The RabbitMQ destination allows you to send/stream data to a RabbitMQ routing key. RabbitMQ is one of the most popular open source message brokers. +The RabbitMQ destination allows you to send/stream data to a RabbitMQ routing key. RabbitMQ is one +of the most popular open source message brokers. ### Sync overview @@ -35,13 +36,15 @@ To use the RabbitMQ destination, you'll need: - A RabbitMQ host and credentials (username/password) to publish messages, if required. - A RabbitMQ routing key. -- RabbitMQ exchange is optional. If specified, a binding between exchange and routing key is required. +- RabbitMQ exchange is optional. If specified, a binding between exchange and routing key is + required. - RabbitMQ port is optional (it defaults to 5672). - RabbitMQ virtual host is also optional. ## CHANGELOG -| Version | Date | Pull Request | Subject | -| :------ | :--------------- | :------------------------------------------------------- | :---------------------------------------------- | -| 0.1.1 | 2022-09-09 | [16528](https://github.com/airbytehq/airbyte/pull/16528) | Marked password field in spec as airbyte_secret | -| 0.1.0 | October 29, 2021 | [\#7560](https://github.com/airbytehq/airbyte/pull/7560) | Initial release | +| Version | Date | Pull Request | Subject | +| :------ | :--------------- | :-------------------------------------------------------- | :---------------------------------------------- | +| 0.1.2 | 2024-03-05 | [#35838](https://github.com/airbytehq/airbyte/pull/35838) | Un-archive connector | +| 0.1.1 | 2022-09-09 | [16528](https://github.com/airbytehq/airbyte/pull/16528) | Marked password field in spec as airbyte_secret | +| 0.1.0 | October 29, 2021 | [\#7560](https://github.com/airbytehq/airbyte/pull/7560) | Initial release | diff --git a/docs/integrations/destinations/timeplus.md b/docs/integrations/destinations/timeplus.md index d883fc1b3726..f7c21bf4fe6e 100644 --- a/docs/integrations/destinations/timeplus.md +++ b/docs/integrations/destinations/timeplus.md @@ -1,37 +1,42 @@ # Timeplus -This page guides you through the process of setting up the [Timeplus](https://timeplus.com) destination connector. +This page guides you through the process of setting up the [Timeplus](https://timeplus.com) +destination connector. ## Features -| Feature | Supported?\(Yes/No\) | Notes | -| :--- | :--- | :--- | -| Overwrite | Yes | | -| Incremental - Append Sync | Yes | | - +| Feature | Supported?\(Yes/No\) | Notes | +| :------------------------ | :------------------- | :---- | +| Overwrite | Yes | | +| Incremental - Append Sync | Yes | | #### Output Schema Each stream will be output into its own stream in Timeplus, with corresponding schema/columns. + ## Getting Started (Airbyte Cloud) + Coming soon... ## Getting Started (Airbyte Open Source) -You can follow the [Quickstart with Timeplus Ingestion API](https://docs.timeplus.com/quickstart-ingest-api) to createa a workspace and API key. + +You can follow the +[Quickstart with Timeplus Ingestion API](https://docs.timeplus.com/quickstart-ingest-api) to createa +a workspace and API key. ### Setup the Timeplus Destination in Airbyte -You should now have all the requirements needed to configure Timeplus as a destination in the UI. You'll need the following information to configure the Timeplus destination: +You should now have all the requirements needed to configure Timeplus as a destination in the UI. +You'll need the following information to configure the Timeplus destination: -* **Endpoint** example https://us.timeplus.cloud/randomId123 -* **API key** +- **Endpoint** example https://us.timeplus.cloud/randomId123 +- **API key** ## Compatibility - ## Changelog -| Version | Date | Pull Request | Subject | -|:--------|:-----------|:---------------------------------------------------------|:------------------------------------------------| -| 0.1.0 | 2023-06-14 | [21226](https://github.com/airbytehq/airbyte/pull/21226) | Destination Timeplus | - +| Version | Date | Pull Request | Subject | +| :------ | :--------- | :-------------------------------------------------------- | :------------------- | +| 0.1.1 | 2024-03-05 | [#35838](https://github.com/airbytehq/airbyte/pull/35838) | Un-archive connector | +| 0.1.0 | 2023-06-14 | [21226](https://github.com/airbytehq/airbyte/pull/21226) | Destination Timeplus | diff --git a/docs/integrations/destinations/xata.md b/docs/integrations/destinations/xata.md index 148cf17a77e0..3206de4768e8 100644 --- a/docs/integrations/destinations/xata.md +++ b/docs/integrations/destinations/xata.md @@ -11,8 +11,9 @@ Conventions: - The `stream` name will define the name of the table in Xata. - The `message` data will be mapped one by one to the table schema. -For example, a stream name `nyc_taxi_fares_2022` will attempt to write to a table with the same name. -If the message has the following shape: +For example, a stream name `nyc_taxi_fares_2022` will attempt to write to a table with the same +name. If the message has the following shape: + ``` { "name": "Yellow Cab, co", @@ -20,17 +21,22 @@ If the message has the following shape: "driver": "Joe Doe" } ``` -the table must have the same columns, mapping the names and [data types](https://xata.io/docs/concepts/data-model), one-by-one. + +the table must have the same columns, mapping the names and +[data types](https://xata.io/docs/concepts/data-model), one-by-one. ## Getting Started In order to connect, you need: -* API Key: go to your [account settings](https://app.xata.io/settings) to generate a key. -* Database URL: navigate to the configuration tab in your workspace and copy the `Workspace API base URL`. + +- API Key: go to your [account settings](https://app.xata.io/settings) to generate a key. +- Database URL: navigate to the configuration tab in your workspace and copy the + `Workspace API base URL`. ## CHANGELOG -| Version | Date | Pull Request | Subject | -|:--------|:-----------|:--------------------------------------------------------------|:------------------------| -| 0.1.1 | 2023-06-21 | [#27542](https://github.com/airbytehq/airbyte/pull/27542) | Mark api_key as Airbyte Secret | -| 0.1.0 | 2023-06-14 | [#24192](https://github.com/airbytehq/airbyte/pull/24192) | New Destination Connector Xata | +| Version | Date | Pull Request | Subject | +| :------ | :--------- | :-------------------------------------------------------- | :----------------------------- | +| 0.1.2 | 2024-03-05 | [#35838](https://github.com/airbytehq/airbyte/pull/35838) | Un-archive connector | +| 0.1.1 | 2023-06-21 | [#27542](https://github.com/airbytehq/airbyte/pull/27542) | Mark api_key as Airbyte Secret | +| 0.1.0 | 2023-06-14 | [#24192](https://github.com/airbytehq/airbyte/pull/24192) | New Destination Connector Xata | From f55abc1fdcfed1d857d541b7fbfb8314a2c09941 Mon Sep 17 00:00:00 2001 From: Alexandre Girard Date: Tue, 5 Mar 2024 18:50:42 -0800 Subject: [PATCH 093/172] :bug: low-code: Fix incremental substreams (#35471) --- .../declarative/datetime/min_max_datetime.py | 43 ++-- .../sources/declarative/declarative_stream.py | 6 +- .../incremental/datetime_based_cursor.py | 92 ++++---- .../incremental/per_partition_cursor.py | 179 +++++++-------- .../declarative/interpolation/jinja.py | 18 +- .../list_partition_router.py | 15 +- .../single_partition_router.py | 2 +- .../substream_partition_router.py | 32 +-- .../declarative/requesters/http_requester.py | 3 + .../request_options_provider.py | 8 +- .../declarative/retrievers/retriever.py | 3 +- .../retrievers/simple_retriever.py | 22 +- .../airbyte_cdk/sources/declarative/types.py | 67 +++++- .../datetime/test_min_max_datetime.py | 15 ++ .../incremental/test_datetime_based_cursor.py | 54 ++++- .../incremental/test_per_partition_cursor.py | 170 ++++++++++++--- .../test_per_partition_cursor_integration.py | 206 ++++++++++++++++-- .../test_model_to_component_factory.py | 26 +-- .../test_list_partition_router.py | 30 ++- .../test_single_partition_router.py | 3 +- .../test_substream_partition_router.py | 32 ++- .../declarative/test_declarative_stream.py | 66 ++++-- .../sources/declarative/test_types.py | 39 ++++ 23 files changed, 833 insertions(+), 298 deletions(-) create mode 100644 airbyte-cdk/python/unit_tests/sources/declarative/test_types.py diff --git a/airbyte-cdk/python/airbyte_cdk/sources/declarative/datetime/min_max_datetime.py b/airbyte-cdk/python/airbyte_cdk/sources/declarative/datetime/min_max_datetime.py index 2e76f49a396a..2694da2762ca 100644 --- a/airbyte-cdk/python/airbyte_cdk/sources/declarative/datetime/min_max_datetime.py +++ b/airbyte-cdk/python/airbyte_cdk/sources/declarative/datetime/min_max_datetime.py @@ -4,7 +4,7 @@ import datetime as dt from dataclasses import InitVar, dataclass, field -from typing import Any, Mapping, Union +from typing import Any, Mapping, Optional, Union from airbyte_cdk.sources.declarative.datetime.datetime_parser import DatetimeParser from airbyte_cdk.sources.declarative.interpolation.interpolated_string import InterpolatedString @@ -37,13 +37,13 @@ class MinMaxDatetime: min_datetime: Union[InterpolatedString, str] = "" max_datetime: Union[InterpolatedString, str] = "" - def __post_init__(self, parameters: Mapping[str, Any]): + def __post_init__(self, parameters: Mapping[str, Any]) -> None: self.datetime = InterpolatedString.create(self.datetime, parameters=parameters or {}) self._parser = DatetimeParser() - self.min_datetime = InterpolatedString.create(self.min_datetime, parameters=parameters) if self.min_datetime else None - self.max_datetime = InterpolatedString.create(self.max_datetime, parameters=parameters) if self.max_datetime else None + self.min_datetime = InterpolatedString.create(self.min_datetime, parameters=parameters) if self.min_datetime else None # type: ignore + self.max_datetime = InterpolatedString.create(self.max_datetime, parameters=parameters) if self.max_datetime else None # type: ignore - def get_datetime(self, config, **additional_parameters) -> dt.datetime: + def get_datetime(self, config: Mapping[str, Any], **additional_parameters: Mapping[str, Any]) -> dt.datetime: """ Evaluates and returns the datetime :param config: The user-provided configuration as specified by the source's spec @@ -55,29 +55,44 @@ def get_datetime(self, config, **additional_parameters) -> dt.datetime: if not datetime_format: datetime_format = "%Y-%m-%dT%H:%M:%S.%f%z" - time = self._parser.parse(str(self.datetime.eval(config, **additional_parameters)), datetime_format) + time = self._parser.parse(str(self.datetime.eval(config, **additional_parameters)), datetime_format) # type: ignore # datetime is always cast to an interpolated string if self.min_datetime: - min_time = str(self.min_datetime.eval(config, **additional_parameters)) + min_time = str(self.min_datetime.eval(config, **additional_parameters)) # type: ignore # min_datetime is always cast to an interpolated string if min_time: - min_time = self._parser.parse(min_time, datetime_format) - time = max(time, min_time) + min_datetime = self._parser.parse(min_time, datetime_format) # type: ignore # min_datetime is always cast to an interpolated string + time = max(time, min_datetime) if self.max_datetime: - max_time = str(self.max_datetime.eval(config, **additional_parameters)) + max_time = str(self.max_datetime.eval(config, **additional_parameters)) # type: ignore # max_datetime is always cast to an interpolated string if max_time: - max_time = self._parser.parse(max_time, datetime_format) - time = min(time, max_time) + max_datetime = self._parser.parse(max_time, datetime_format) + time = min(time, max_datetime) return time - @property + @property # type: ignore # properties don't play well with dataclasses... def datetime_format(self) -> str: """The format of the string representing the datetime""" return self._datetime_format @datetime_format.setter - def datetime_format(self, value: str): + def datetime_format(self, value: str) -> None: """Setter for the datetime format""" # Covers the case where datetime_format is not provided in the constructor, which causes the property object # to be set which we need to avoid doing if not isinstance(value, property): self._datetime_format = value + + @classmethod + def create( + cls, + interpolated_string_or_min_max_datetime: Union[InterpolatedString, str, "MinMaxDatetime"], + parameters: Optional[Mapping[str, Any]] = None, + ) -> "MinMaxDatetime": + if parameters is None: + parameters = {} + if isinstance(interpolated_string_or_min_max_datetime, InterpolatedString) or isinstance( + interpolated_string_or_min_max_datetime, str + ): + return MinMaxDatetime(datetime=interpolated_string_or_min_max_datetime, parameters=parameters) + else: + return interpolated_string_or_min_max_datetime diff --git a/airbyte-cdk/python/airbyte_cdk/sources/declarative/declarative_stream.py b/airbyte-cdk/python/airbyte_cdk/sources/declarative/declarative_stream.py index f74ed377c4ab..d56e7c99a545 100644 --- a/airbyte-cdk/python/airbyte_cdk/sources/declarative/declarative_stream.py +++ b/airbyte-cdk/python/airbyte_cdk/sources/declarative/declarative_stream.py @@ -10,7 +10,7 @@ from airbyte_cdk.sources.declarative.retrievers.retriever import Retriever from airbyte_cdk.sources.declarative.schema import DefaultSchemaLoader from airbyte_cdk.sources.declarative.schema.schema_loader import SchemaLoader -from airbyte_cdk.sources.declarative.types import Config +from airbyte_cdk.sources.declarative.types import Config, StreamSlice from airbyte_cdk.sources.streams.core import Stream @@ -101,6 +101,8 @@ def read_records( """ :param: stream_state We knowingly avoid using stream_state as we want cursors to manage their own state. """ + if not isinstance(stream_slice, StreamSlice): + raise ValueError(f"DeclarativeStream does not support stream_slices that are not StreamSlice. Got {stream_slice}") yield from self.retriever.read_records(self.get_json_schema(), stream_slice) def get_json_schema(self) -> Mapping[str, Any]: # type: ignore @@ -114,7 +116,7 @@ def get_json_schema(self) -> Mapping[str, Any]: # type: ignore def stream_slices( self, *, sync_mode: SyncMode, cursor_field: Optional[List[str]] = None, stream_state: Optional[Mapping[str, Any]] = None - ) -> Iterable[Optional[Mapping[str, Any]]]: + ) -> Iterable[Optional[StreamSlice]]: """ Override to define the slices for this stream. See the stream slicing section of the docs for more information. diff --git a/airbyte-cdk/python/airbyte_cdk/sources/declarative/incremental/datetime_based_cursor.py b/airbyte-cdk/python/airbyte_cdk/sources/declarative/incremental/datetime_based_cursor.py index e2a5f27d1ef3..0124b93e7553 100644 --- a/airbyte-cdk/python/airbyte_cdk/sources/declarative/incremental/datetime_based_cursor.py +++ b/airbyte-cdk/python/airbyte_cdk/sources/declarative/incremental/datetime_based_cursor.py @@ -4,7 +4,7 @@ import datetime from dataclasses import InitVar, dataclass, field -from typing import Any, Iterable, List, Mapping, Optional, Union +from typing import Any, Callable, Iterable, List, Mapping, MutableMapping, Optional, Union from airbyte_cdk.models import AirbyteLogMessage, AirbyteMessage, Level, Type from airbyte_cdk.sources.declarative.datetime.datetime_parser import DatetimeParser @@ -70,10 +70,8 @@ def __post_init__(self, parameters: Mapping[str, Any]) -> None: f"If step is defined, cursor_granularity should be as well and vice-versa. " f"Right now, step is `{self.step}` and cursor_granularity is `{self.cursor_granularity}`" ) - if not isinstance(self.start_datetime, MinMaxDatetime): - self.start_datetime = MinMaxDatetime(self.start_datetime, parameters) - if self.end_datetime and not isinstance(self.end_datetime, MinMaxDatetime): - self.end_datetime = MinMaxDatetime(self.end_datetime, parameters) + self._start_datetime = MinMaxDatetime.create(self.start_datetime, parameters) + self._end_datetime = None if not self.end_datetime else MinMaxDatetime.create(self.end_datetime, parameters) self._timezone = datetime.timezone.utc self._interpolation = JinjaInterpolation() @@ -84,23 +82,23 @@ def __post_init__(self, parameters: Mapping[str, Any]) -> None: else datetime.timedelta.max ) self._cursor_granularity = self._parse_timedelta(self.cursor_granularity) - self.cursor_field = InterpolatedString.create(self.cursor_field, parameters=parameters) - self.lookback_window = InterpolatedString.create(self.lookback_window, parameters=parameters) - self.partition_field_start = InterpolatedString.create(self.partition_field_start or "start_time", parameters=parameters) - self.partition_field_end = InterpolatedString.create(self.partition_field_end or "end_time", parameters=parameters) + self._cursor_field = InterpolatedString.create(self.cursor_field, parameters=parameters) + self._lookback_window = InterpolatedString.create(self.lookback_window, parameters=parameters) if self.lookback_window else None + self._partition_field_start = InterpolatedString.create(self.partition_field_start or "start_time", parameters=parameters) + self._partition_field_end = InterpolatedString.create(self.partition_field_end or "end_time", parameters=parameters) self._parser = DatetimeParser() # If datetime format is not specified then start/end datetime should inherit it from the stream slicer - if not self.start_datetime.datetime_format: - self.start_datetime.datetime_format = self.datetime_format - if self.end_datetime and not self.end_datetime.datetime_format: - self.end_datetime.datetime_format = self.datetime_format + if not self._start_datetime.datetime_format: + self._start_datetime.datetime_format = self.datetime_format + if self._end_datetime and not self._end_datetime.datetime_format: + self._end_datetime.datetime_format = self.datetime_format if not self.cursor_datetime_formats: self.cursor_datetime_formats = [self.datetime_format] def get_stream_state(self) -> StreamState: - return {self.cursor_field.eval(self.config): self._cursor} if self._cursor else {} + return {self._cursor_field.eval(self.config): self._cursor} if self._cursor else {} def set_initial_state(self, stream_state: StreamState) -> None: """ @@ -109,17 +107,22 @@ def set_initial_state(self, stream_state: StreamState) -> None: :param stream_state: The state of the stream as returned by get_stream_state """ - self._cursor = stream_state.get(self.cursor_field.eval(self.config)) if stream_state else None + self._cursor = stream_state.get(self._cursor_field.eval(self.config)) if stream_state else None def close_slice(self, stream_slice: StreamSlice, most_recent_record: Optional[Record]) -> None: - last_record_cursor_value = most_recent_record.get(self.cursor_field.eval(self.config)) if most_recent_record else None - stream_slice_value_end = stream_slice.get(self.partition_field_end.eval(self.config)) + if stream_slice.partition: + raise ValueError(f"Stream slice {stream_slice} should not have a partition. Got {stream_slice.partition}.") + last_record_cursor_value = most_recent_record.get(self._cursor_field.eval(self.config)) if most_recent_record else None + stream_slice_value_end = stream_slice.get(self._partition_field_end.eval(self.config)) + potential_cursor_values = [ + cursor_value for cursor_value in [self._cursor, last_record_cursor_value, stream_slice_value_end] if cursor_value + ] cursor_value_str_by_cursor_value_datetime = dict( map( # we need to ensure the cursor value is preserved as is in the state else the CATs might complain of something like # 2023-01-04T17:30:19.000Z' <= '2023-01-04T17:30:19.000000Z' lambda datetime_str: (self.parse_date(datetime_str), datetime_str), - filter(lambda item: item, [self._cursor, last_record_cursor_value, stream_slice_value_end]), + potential_cursor_values, ) ) self._cursor = ( @@ -142,37 +145,43 @@ def stream_slices(self) -> Iterable[StreamSlice]: return self._partition_daterange(start_datetime, end_datetime, self._step) def _calculate_earliest_possible_value(self, end_datetime: datetime.datetime) -> datetime.datetime: - lookback_delta = self._parse_timedelta(self.lookback_window.eval(self.config) if self.lookback_window else "P0D") - earliest_possible_start_datetime = min(self.start_datetime.get_datetime(self.config), end_datetime) + lookback_delta = self._parse_timedelta(self._lookback_window.eval(self.config) if self.lookback_window else "P0D") + earliest_possible_start_datetime = min(self._start_datetime.get_datetime(self.config), end_datetime) cursor_datetime = self._calculate_cursor_datetime_from_state(self.get_stream_state()) return max(earliest_possible_start_datetime, cursor_datetime) - lookback_delta def _select_best_end_datetime(self) -> datetime.datetime: now = datetime.datetime.now(tz=self._timezone) - if not self.end_datetime: + if not self._end_datetime: return now - return min(self.end_datetime.get_datetime(self.config), now) + return min(self._end_datetime.get_datetime(self.config), now) def _calculate_cursor_datetime_from_state(self, stream_state: Mapping[str, Any]) -> datetime.datetime: - if self.cursor_field.eval(self.config, stream_state=stream_state) in stream_state: - return self.parse_date(stream_state[self.cursor_field.eval(self.config)]) + if self._cursor_field.eval(self.config, stream_state=stream_state) in stream_state: + return self.parse_date(stream_state[self._cursor_field.eval(self.config)]) return datetime.datetime.min.replace(tzinfo=datetime.timezone.utc) def _format_datetime(self, dt: datetime.datetime) -> str: return self._parser.format(dt, self.datetime_format) - def _partition_daterange(self, start: datetime.datetime, end: datetime.datetime, step: Union[datetime.timedelta, Duration]): - start_field = self.partition_field_start.eval(self.config) - end_field = self.partition_field_end.eval(self.config) + def _partition_daterange( + self, start: datetime.datetime, end: datetime.datetime, step: Union[datetime.timedelta, Duration] + ) -> List[StreamSlice]: + start_field = self._partition_field_start.eval(self.config) + end_field = self._partition_field_end.eval(self.config) dates = [] while start <= end: next_start = self._evaluate_next_start_date_safely(start, step) end_date = self._get_date(next_start - self._cursor_granularity, end, min) - dates.append({start_field: self._format_datetime(start), end_field: self._format_datetime(end_date)}) + dates.append( + StreamSlice( + partition={}, cursor_slice={start_field: self._format_datetime(start), end_field: self._format_datetime(end_date)} + ) + ) start = next_start return dates - def _evaluate_next_start_date_safely(self, start, step): + def _evaluate_next_start_date_safely(self, start: datetime.datetime, step: datetime.timedelta) -> datetime.datetime: """ Given that we set the default step at datetime.timedelta.max, we will generate an OverflowError when evaluating the next start_date This method assumes that users would never enter a step that would generate an overflow. Given that would be the case, the code @@ -183,7 +192,12 @@ def _evaluate_next_start_date_safely(self, start, step): except OverflowError: return datetime.datetime.max.replace(tzinfo=datetime.timezone.utc) - def _get_date(self, cursor_value, default_date: datetime.datetime, comparator) -> datetime.datetime: + def _get_date( + self, + cursor_value: datetime.datetime, + default_date: datetime.datetime, + comparator: Callable[[datetime.datetime, datetime.datetime], datetime.datetime], + ) -> datetime.datetime: cursor_date = cursor_value or default_date return comparator(cursor_date, default_date) @@ -196,7 +210,7 @@ def parse_date(self, date: str) -> datetime.datetime: raise ValueError(f"No format in {self.cursor_datetime_formats} matching {date}") @classmethod - def _parse_timedelta(cls, time_str) -> Union[datetime.timedelta, Duration]: + def _parse_timedelta(cls, time_str: Optional[str]) -> Union[datetime.timedelta, Duration]: """ :return Parses an ISO 8601 durations into datetime.timedelta or Duration objects. """ @@ -244,18 +258,20 @@ def request_kwargs(self) -> Mapping[str, Any]: # Never update kwargs return {} - def _get_request_options(self, option_type: RequestOptionType, stream_slice: StreamSlice): - options = {} + def _get_request_options(self, option_type: RequestOptionType, stream_slice: Optional[StreamSlice]) -> Mapping[str, Any]: + options: MutableMapping[str, Any] = {} + if not stream_slice: + return options if self.start_time_option and self.start_time_option.inject_into == option_type: - options[self.start_time_option.field_name.eval(config=self.config)] = stream_slice.get( - self.partition_field_start.eval(self.config) + options[self.start_time_option.field_name.eval(config=self.config)] = stream_slice.get( # type: ignore # field_name is always casted to an interpolated string + self._partition_field_start.eval(self.config) ) if self.end_time_option and self.end_time_option.inject_into == option_type: - options[self.end_time_option.field_name.eval(config=self.config)] = stream_slice.get(self.partition_field_end.eval(self.config)) + options[self.end_time_option.field_name.eval(config=self.config)] = stream_slice.get(self._partition_field_end.eval(self.config)) # type: ignore # field_name is always casted to an interpolated string return options def should_be_synced(self, record: Record) -> bool: - cursor_field = self.cursor_field.eval(self.config) + cursor_field = self._cursor_field.eval(self.config) record_cursor_value = record.get(cursor_field) if not record_cursor_value: self._send_log( @@ -278,7 +294,7 @@ def _send_log(self, level: Level, message: str) -> None: ) def is_greater_than_or_equal(self, first: Record, second: Record) -> bool: - cursor_field = self.cursor_field.eval(self.config) + cursor_field = self._cursor_field.eval(self.config) first_cursor_value = first.get(cursor_field) second_cursor_value = second.get(cursor_field) if first_cursor_value and second_cursor_value: diff --git a/airbyte-cdk/python/airbyte_cdk/sources/declarative/incremental/per_partition_cursor.py b/airbyte-cdk/python/airbyte_cdk/sources/declarative/incremental/per_partition_cursor.py index 75af991970d3..39dfa8f1fe1f 100644 --- a/airbyte-cdk/python/airbyte_cdk/sources/declarative/incremental/per_partition_cursor.py +++ b/airbyte-cdk/python/airbyte_cdk/sources/declarative/incremental/per_partition_cursor.py @@ -3,7 +3,7 @@ # import json -from typing import Any, Callable, Iterable, Mapping, Optional +from typing import Any, Callable, Iterable, Mapping, MutableMapping, Optional, Union from airbyte_cdk.sources.declarative.incremental.cursor import Cursor from airbyte_cdk.sources.declarative.stream_slicers.stream_slicer import StreamSlicer @@ -24,73 +24,15 @@ def to_partition_key(to_serialize: Any) -> str: return json.dumps(to_serialize, indent=None, separators=(",", ":"), sort_keys=True) @staticmethod - def to_partition(to_deserialize: Any): - return json.loads(to_deserialize) - - -class PerPartitionStreamSlice(StreamSlice): - def __init__(self, partition: Mapping[str, Any], cursor_slice: Mapping[str, Any]): - self._partition = partition - self._cursor_slice = cursor_slice - if partition.keys() & cursor_slice.keys(): - raise ValueError("Keys for partition and incremental sync cursor should not overlap") - self._stream_slice = dict(partition) | dict(cursor_slice) - - @property - def partition(self): - return self._partition - - @property - def cursor_slice(self): - return self._cursor_slice - - def __repr__(self): - return repr(self._stream_slice) - - def __setitem__(self, key: str, value: Any): - raise ValueError("PerPartitionStreamSlice is immutable") - - def __getitem__(self, key: str): - return self._stream_slice[key] - - def __len__(self): - return len(self._stream_slice) - - def __iter__(self): - return iter(self._stream_slice) - - def __contains__(self, item: str): - return item in self._stream_slice - - def keys(self): - return self._stream_slice.keys() - - def items(self): - return self._stream_slice.items() - - def values(self): - return self._stream_slice.values() - - def get(self, key: str, default: Any) -> Any: - return self._stream_slice.get(key, default) - - def __eq__(self, other): - if isinstance(other, dict): - return self._stream_slice == other - if isinstance(other, PerPartitionStreamSlice): - # noinspection PyProtectedMember - return self._partition == other._partition and self._cursor_slice == other._cursor_slice - return False - - def __ne__(self, other): - return not self.__eq__(other) + def to_partition(to_deserialize: Any) -> Mapping[str, Any]: + return json.loads(to_deserialize) # type: ignore # The partition is known to be a dict, but the type hint is Any class CursorFactory: - def __init__(self, create_function: Callable[[], StreamSlicer]): + def __init__(self, create_function: Callable[[], Cursor]): self._create_function = create_function - def create(self) -> StreamSlicer: + def create(self) -> Cursor: return self._create_function() @@ -115,27 +57,27 @@ class PerPartitionCursor(Cursor): Therefore, we need to manage state per partition. """ - _NO_STATE = {} - _NO_CURSOR_STATE = {} + _NO_STATE: Mapping[str, Any] = {} + _NO_CURSOR_STATE: Mapping[str, Any] = {} _KEY = 0 _VALUE = 1 def __init__(self, cursor_factory: CursorFactory, partition_router: StreamSlicer): self._cursor_factory = cursor_factory self._partition_router = partition_router - self._cursor_per_partition = {} + self._cursor_per_partition: MutableMapping[str, Cursor] = {} self._partition_serializer = PerPartitionKeySerializer() - def stream_slices(self) -> Iterable[PerPartitionStreamSlice]: + def stream_slices(self) -> Iterable[StreamSlice]: slices = self._partition_router.stream_slices() for partition in slices: - cursor = self._cursor_per_partition.get(self._to_partition_key(partition)) + cursor = self._cursor_per_partition.get(self._to_partition_key(partition.partition)) if not cursor: cursor = self._create_cursor(self._NO_CURSOR_STATE) - self._cursor_per_partition[self._to_partition_key(partition)] = cursor + self._cursor_per_partition[self._to_partition_key(partition.partition)] = cursor for cursor_slice in cursor.stream_slices(): - yield PerPartitionStreamSlice(partition, cursor_slice) + yield StreamSlice(partition=partition, cursor_slice=cursor_slice) def set_initial_state(self, stream_state: StreamState) -> None: if not stream_state: @@ -147,10 +89,12 @@ def set_initial_state(self, stream_state: StreamState) -> None: def close_slice(self, stream_slice: StreamSlice, most_recent_record: Optional[Record]) -> None: try: cursor_most_recent_record = ( - Record(most_recent_record.data, stream_slice.cursor_slice) if most_recent_record else most_recent_record + Record(most_recent_record.data, StreamSlice(partition={}, cursor_slice=stream_slice.cursor_slice)) + if most_recent_record + else most_recent_record ) self._cursor_per_partition[self._to_partition_key(stream_slice.partition)].close_slice( - stream_slice.cursor_slice, cursor_most_recent_record + StreamSlice(partition={}, cursor_slice=stream_slice.cursor_slice), cursor_most_recent_record ) except KeyError as exception: raise ValueError( @@ -179,16 +123,16 @@ def _get_state_for_partition(self, partition: Mapping[str, Any]) -> Optional[Str return None @staticmethod - def _is_new_state(stream_state): + def _is_new_state(stream_state: Mapping[str, Any]) -> bool: return not bool(stream_state) - def _to_partition_key(self, partition) -> tuple: + def _to_partition_key(self, partition: Mapping[str, Any]) -> str: return self._partition_serializer.to_partition_key(partition) - def _to_dict(self, partition_key: tuple) -> StreamSlice: + def _to_dict(self, partition_key: str) -> Mapping[str, Any]: return self._partition_serializer.to_partition(partition_key) - def select_state(self, stream_slice: Optional[PerPartitionStreamSlice] = None) -> Optional[StreamState]: + def select_state(self, stream_slice: Optional[StreamSlice] = None) -> Optional[StreamState]: if not stream_slice: raise ValueError("A partition needs to be provided in order to extract a state") @@ -197,7 +141,7 @@ def select_state(self, stream_slice: Optional[PerPartitionStreamSlice] = None) - return self._get_state_for_partition(stream_slice.partition) - def _create_cursor(self, cursor_state: Any) -> StreamSlicer: + def _create_cursor(self, cursor_state: Any) -> Cursor: cursor = self._cursor_factory.create() cursor.set_initial_state(cursor_state) return cursor @@ -209,11 +153,18 @@ def get_request_params( stream_slice: Optional[StreamSlice] = None, next_page_token: Optional[Mapping[str, Any]] = None, ) -> Mapping[str, Any]: - return self._partition_router.get_request_params( - stream_state=stream_state, stream_slice=stream_slice.partition, next_page_token=next_page_token - ) | self._cursor_per_partition[self._to_partition_key(stream_slice.partition)].get_request_params( - stream_state=stream_state, stream_slice=stream_slice.cursor_slice, next_page_token=next_page_token - ) + if stream_slice: + return self._partition_router.get_request_params( # type: ignore # this always returns a mapping + stream_state=stream_state, + stream_slice=StreamSlice(partition=stream_slice.partition, cursor_slice={}), + next_page_token=next_page_token, + ) | self._cursor_per_partition[self._to_partition_key(stream_slice.partition)].get_request_params( + stream_state=stream_state, + stream_slice=StreamSlice(partition={}, cursor_slice=stream_slice.cursor_slice), + next_page_token=next_page_token, + ) + else: + raise ValueError("A partition needs to be provided in order to get request params") def get_request_headers( self, @@ -222,11 +173,18 @@ def get_request_headers( stream_slice: Optional[StreamSlice] = None, next_page_token: Optional[Mapping[str, Any]] = None, ) -> Mapping[str, Any]: - return self._partition_router.get_request_headers( - stream_state=stream_state, stream_slice=stream_slice.partition, next_page_token=next_page_token - ) | self._cursor_per_partition[self._to_partition_key(stream_slice.partition)].get_request_headers( - stream_state=stream_state, stream_slice=stream_slice.cursor_slice, next_page_token=next_page_token - ) + if stream_slice: + return self._partition_router.get_request_headers( # type: ignore # this always returns a mapping + stream_state=stream_state, + stream_slice=StreamSlice(partition=stream_slice.partition, cursor_slice={}), + next_page_token=next_page_token, + ) | self._cursor_per_partition[self._to_partition_key(stream_slice.partition)].get_request_headers( + stream_state=stream_state, + stream_slice=StreamSlice(partition={}, cursor_slice=stream_slice.cursor_slice), + next_page_token=next_page_token, + ) + else: + raise ValueError("A partition needs to be provided in order to get request headers") def get_request_body_data( self, @@ -234,12 +192,19 @@ def get_request_body_data( stream_state: Optional[StreamState] = None, stream_slice: Optional[StreamSlice] = None, next_page_token: Optional[Mapping[str, Any]] = None, - ) -> Mapping[str, Any]: - return self._partition_router.get_request_body_data( - stream_state=stream_state, stream_slice=stream_slice.partition, next_page_token=next_page_token - ) | self._cursor_per_partition[self._to_partition_key(stream_slice.partition)].get_request_body_data( - stream_state=stream_state, stream_slice=stream_slice.cursor_slice, next_page_token=next_page_token - ) + ) -> Union[Mapping[str, Any], str]: + if stream_slice: + return self._partition_router.get_request_body_data( # type: ignore # this always returns a mapping + stream_state=stream_state, + stream_slice=StreamSlice(partition=stream_slice.partition, cursor_slice={}), + next_page_token=next_page_token, + ) | self._cursor_per_partition[self._to_partition_key(stream_slice.partition)].get_request_body_data( + stream_state=stream_state, + stream_slice=StreamSlice(partition={}, cursor_slice=stream_slice.cursor_slice), + next_page_token=next_page_token, + ) + else: + raise ValueError("A partition needs to be provided in order to get request body data") def get_request_body_json( self, @@ -248,16 +213,25 @@ def get_request_body_json( stream_slice: Optional[StreamSlice] = None, next_page_token: Optional[Mapping[str, Any]] = None, ) -> Mapping[str, Any]: - return self._partition_router.get_request_body_json( - stream_state=stream_state, stream_slice=stream_slice.partition, next_page_token=next_page_token - ) | self._cursor_per_partition[self._to_partition_key(stream_slice.partition)].get_request_body_json( - stream_state=stream_state, stream_slice=stream_slice.cursor_slice, next_page_token=next_page_token - ) + if stream_slice: + return self._partition_router.get_request_body_json( # type: ignore # this always returns a mapping + stream_state=stream_state, + stream_slice=StreamSlice(partition=stream_slice.partition, cursor_slice={}), + next_page_token=next_page_token, + ) | self._cursor_per_partition[self._to_partition_key(stream_slice.partition)].get_request_body_json( + stream_state=stream_state, + stream_slice=StreamSlice(partition={}, cursor_slice=stream_slice.cursor_slice), + next_page_token=next_page_token, + ) + else: + raise ValueError("A partition needs to be provided in order to get request body json") def should_be_synced(self, record: Record) -> bool: return self._get_cursor(record).should_be_synced(self._convert_record_to_cursor_record(record)) def is_greater_than_or_equal(self, first: Record, second: Record) -> bool: + if not first.associated_slice or not second.associated_slice: + raise ValueError(f"Both records should have an associated slice but got {first.associated_slice} and {second.associated_slice}") if first.associated_slice.partition != second.associated_slice.partition: raise ValueError( f"To compare records, partition should be the same but got {first.associated_slice.partition} and {second.associated_slice.partition}" @@ -268,10 +242,15 @@ def is_greater_than_or_equal(self, first: Record, second: Record) -> bool: ) @staticmethod - def _convert_record_to_cursor_record(record: Record): - return Record(record.data, record.associated_slice.cursor_slice) + def _convert_record_to_cursor_record(record: Record) -> Record: + return Record( + record.data, + StreamSlice(partition={}, cursor_slice=record.associated_slice.cursor_slice) if record.associated_slice else None, + ) def _get_cursor(self, record: Record) -> Cursor: + if not record.associated_slice: + raise ValueError("Invalid state as stream slices that are emitted should refer to an existing cursor") partition_key = self._to_partition_key(record.associated_slice.partition) if partition_key not in self._cursor_per_partition: raise ValueError("Invalid state as stream slices that are emitted should refer to an existing cursor") diff --git a/airbyte-cdk/python/airbyte_cdk/sources/declarative/interpolation/jinja.py b/airbyte-cdk/python/airbyte_cdk/sources/declarative/interpolation/jinja.py index 91d52c7579f4..d2ef7a9d0464 100644 --- a/airbyte-cdk/python/airbyte_cdk/sources/declarative/interpolation/jinja.py +++ b/airbyte-cdk/python/airbyte_cdk/sources/declarative/interpolation/jinja.py @@ -3,7 +3,7 @@ # import ast -from typing import Any, Optional, Tuple, Type +from typing import Any, Mapping, Optional, Tuple, Type from airbyte_cdk.sources.declarative.interpolation.filters import filters from airbyte_cdk.sources.declarative.interpolation.interpolation import Interpolation @@ -48,7 +48,7 @@ class JinjaInterpolation(Interpolation): # Please add a unit test to test_jinja.py when adding a restriction. RESTRICTED_BUILTIN_FUNCTIONS = ["range"] # The range function can cause very expensive computations - def __init__(self): + def __init__(self) -> None: self._environment = Environment() self._environment.filters.update(**filters) self._environment.globals.update(**macros) @@ -64,8 +64,8 @@ def eval( config: Config, default: Optional[str] = None, valid_types: Optional[Tuple[Type[Any]]] = None, - **additional_parameters, - ): + **additional_parameters: Any, + ) -> Any: context = {"config": config, **additional_parameters} for alias, equivalent in self.ALIASES.items(): @@ -90,23 +90,23 @@ def eval( # If result is empty or resulted in an undefined error, evaluate and return the default string return self._literal_eval(self._eval(default, context), valid_types) - def _literal_eval(self, result, valid_types: Optional[Tuple[Type[Any]]]): + def _literal_eval(self, result: Optional[str], valid_types: Optional[Tuple[Type[Any]]]) -> Any: try: - evaluated = ast.literal_eval(result) + evaluated = ast.literal_eval(result) # type: ignore # literal_eval is able to handle None except (ValueError, SyntaxError): return result if not valid_types or (valid_types and isinstance(evaluated, valid_types)): return evaluated return result - def _eval(self, s: str, context): + def _eval(self, s: Optional[str], context: Mapping[str, Any]) -> Optional[str]: try: - ast = self._environment.parse(s) + ast = self._environment.parse(s) # type: ignore # parse is able to handle None undeclared = meta.find_undeclared_variables(ast) undeclared_not_in_context = {var for var in undeclared if var not in context} if undeclared_not_in_context: raise ValueError(f"Jinja macro has undeclared variables: {undeclared_not_in_context}. Context: {context}") - return self._environment.from_string(s).render(context) + return self._environment.from_string(s).render(context) # type: ignore # from_string is able to handle None except TypeError: # The string is a static value, not a jinja template # It can be returned as is diff --git a/airbyte-cdk/python/airbyte_cdk/sources/declarative/partition_routers/list_partition_router.py b/airbyte-cdk/python/airbyte_cdk/sources/declarative/partition_routers/list_partition_router.py index 5413709d9615..3490c02f7a0c 100644 --- a/airbyte-cdk/python/airbyte_cdk/sources/declarative/partition_routers/list_partition_router.py +++ b/airbyte-cdk/python/airbyte_cdk/sources/declarative/partition_routers/list_partition_router.py @@ -30,11 +30,12 @@ class ListPartitionRouter(StreamSlicer): parameters: InitVar[Mapping[str, Any]] request_option: Optional[RequestOption] = None - def __post_init__(self, parameters: Mapping[str, Any]): + def __post_init__(self, parameters: Mapping[str, Any]) -> None: if isinstance(self.values, str): self.values = InterpolatedString.create(self.values, parameters=parameters).eval(self.config) - if isinstance(self.cursor_field, str): - self.cursor_field = InterpolatedString(string=self.cursor_field, parameters=parameters) + self._cursor_field = ( + InterpolatedString(string=self.cursor_field, parameters=parameters) if isinstance(self.cursor_field, str) else self.cursor_field + ) self._cursor = None @@ -75,13 +76,13 @@ def get_request_body_json( return self._get_request_option(RequestOptionType.body_json, stream_slice) def stream_slices(self) -> Iterable[StreamSlice]: - return [{self.cursor_field.eval(self.config): slice_value} for slice_value in self.values] + return [StreamSlice(partition={self._cursor_field.eval(self.config): slice_value}, cursor_slice={}) for slice_value in self.values] - def _get_request_option(self, request_option_type: RequestOptionType, stream_slice: StreamSlice): + def _get_request_option(self, request_option_type: RequestOptionType, stream_slice: Optional[StreamSlice]) -> Mapping[str, Any]: if self.request_option and self.request_option.inject_into == request_option_type and stream_slice: - slice_value = stream_slice.get(self.cursor_field.eval(self.config)) + slice_value = stream_slice.get(self._cursor_field.eval(self.config)) if slice_value: - return {self.request_option.field_name.eval(self.config): slice_value} + return {self.request_option.field_name.eval(self.config): slice_value} # type: ignore # field_name is always casted to InterpolatedString else: return {} else: diff --git a/airbyte-cdk/python/airbyte_cdk/sources/declarative/partition_routers/single_partition_router.py b/airbyte-cdk/python/airbyte_cdk/sources/declarative/partition_routers/single_partition_router.py index 4697d114eb1a..d1e7bab68e40 100644 --- a/airbyte-cdk/python/airbyte_cdk/sources/declarative/partition_routers/single_partition_router.py +++ b/airbyte-cdk/python/airbyte_cdk/sources/declarative/partition_routers/single_partition_router.py @@ -48,4 +48,4 @@ def get_request_body_json( return {} def stream_slices(self) -> Iterable[StreamSlice]: - yield dict() + yield StreamSlice(partition={}, cursor_slice={}) diff --git a/airbyte-cdk/python/airbyte_cdk/sources/declarative/partition_routers/substream_partition_router.py b/airbyte-cdk/python/airbyte_cdk/sources/declarative/partition_routers/substream_partition_router.py index 3e915168c059..7d93bacb084f 100644 --- a/airbyte-cdk/python/airbyte_cdk/sources/declarative/partition_routers/substream_partition_router.py +++ b/airbyte-cdk/python/airbyte_cdk/sources/declarative/partition_routers/substream_partition_router.py @@ -3,7 +3,7 @@ # from dataclasses import InitVar, dataclass -from typing import Any, Iterable, List, Mapping, Optional, Union +from typing import TYPE_CHECKING, Any, Iterable, List, Mapping, Optional, Union import dpath.util from airbyte_cdk.models import AirbyteMessage, SyncMode, Type @@ -11,7 +11,9 @@ from airbyte_cdk.sources.declarative.requesters.request_option import RequestOption, RequestOptionType from airbyte_cdk.sources.declarative.stream_slicers.stream_slicer import StreamSlicer from airbyte_cdk.sources.declarative.types import Config, Record, StreamSlice, StreamState -from airbyte_cdk.sources.streams.core import Stream + +if TYPE_CHECKING: + from airbyte_cdk.sources.declarative.declarative_stream import DeclarativeStream @dataclass @@ -25,14 +27,14 @@ class ParentStreamConfig: request_option: How to inject the slice value on an outgoing HTTP request """ - stream: Stream + stream: "DeclarativeStream" # Parent streams must be DeclarativeStream because we can't know which part of the stream slice is a partition for regular Stream parent_key: Union[InterpolatedString, str] partition_field: Union[InterpolatedString, str] config: Config parameters: InitVar[Mapping[str, Any]] request_option: Optional[RequestOption] = None - def __post_init__(self, parameters: Mapping[str, Any]): + def __post_init__(self, parameters: Mapping[str, Any]) -> None: self.parent_key = InterpolatedString.create(self.parent_key, parameters=parameters) self.partition_field = InterpolatedString.create(self.partition_field, parameters=parameters) @@ -51,7 +53,7 @@ class SubstreamPartitionRouter(StreamSlicer): config: Config parameters: InitVar[Mapping[str, Any]] - def __post_init__(self, parameters: Mapping[str, Any]): + def __post_init__(self, parameters: Mapping[str, Any]) -> None: if not self.parent_stream_configs: raise ValueError("SubstreamPartitionRouter needs at least 1 parent stream") self._parameters = parameters @@ -88,19 +90,19 @@ def get_request_body_json( stream_state: Optional[StreamState] = None, stream_slice: Optional[StreamSlice] = None, next_page_token: Optional[Mapping[str, Any]] = None, - ) -> Optional[Mapping]: + ) -> Mapping[str, Any]: # Pass the stream_slice from the argument, not the cursor because the cursor is updated after processing the response return self._get_request_option(RequestOptionType.body_json, stream_slice) - def _get_request_option(self, option_type: RequestOptionType, stream_slice: StreamSlice): + def _get_request_option(self, option_type: RequestOptionType, stream_slice: Optional[StreamSlice]) -> Mapping[str, Any]: params = {} if stream_slice: for parent_config in self.parent_stream_configs: if parent_config.request_option and parent_config.request_option.inject_into == option_type: - key = parent_config.partition_field.eval(self.config) + key = parent_config.partition_field.eval(self.config) # type: ignore # partition_field is always casted to an interpolated string value = stream_slice.get(key) if value: - params.update({parent_config.request_option.field_name.eval(config=self.config): value}) + params.update({parent_config.request_option.field_name.eval(config=self.config): value}) # type: ignore # field_name is always casted to an interpolated string return params def stream_slices(self) -> Iterable[StreamSlice]: @@ -123,13 +125,13 @@ def stream_slices(self) -> Iterable[StreamSlice]: else: for parent_stream_config in self.parent_stream_configs: parent_stream = parent_stream_config.stream - parent_field = parent_stream_config.parent_key.eval(self.config) - stream_state_field = parent_stream_config.partition_field.eval(self.config) + parent_field = parent_stream_config.parent_key.eval(self.config) # type: ignore # parent_key is always casted to an interpolated string + partition_field = parent_stream_config.partition_field.eval(self.config) # type: ignore # partition_field is always casted to an interpolated string for parent_stream_slice in parent_stream.stream_slices( sync_mode=SyncMode.full_refresh, cursor_field=None, stream_state=None ): empty_parent_slice = True - parent_slice = parent_stream_slice + parent_partition = parent_stream_slice.partition if parent_stream_slice else {} for parent_record in parent_stream.read_records( sync_mode=SyncMode.full_refresh, cursor_field=None, stream_slice=parent_stream_slice, stream_state=None @@ -143,12 +145,14 @@ def stream_slices(self) -> Iterable[StreamSlice]: elif isinstance(parent_record, Record): parent_record = parent_record.data try: - stream_state_value = dpath.util.get(parent_record, parent_field) + partition_value = dpath.util.get(parent_record, parent_field) except KeyError: pass else: empty_parent_slice = False - yield {stream_state_field: stream_state_value, "parent_slice": parent_slice} + yield StreamSlice( + partition={partition_field: partition_value, "parent_slice": parent_partition}, cursor_slice={} + ) # If the parent slice contains no records, if empty_parent_slice: yield from [] diff --git a/airbyte-cdk/python/airbyte_cdk/sources/declarative/requesters/http_requester.py b/airbyte-cdk/python/airbyte_cdk/sources/declarative/requesters/http_requester.py index 20c18ec9ba6c..98e12eef908a 100644 --- a/airbyte-cdk/python/airbyte_cdk/sources/declarative/requesters/http_requester.py +++ b/airbyte-cdk/python/airbyte_cdk/sources/declarative/requesters/http_requester.py @@ -456,6 +456,9 @@ def send_request( json=self._request_body_json(stream_state, stream_slice, next_page_token, request_body_json), data=self._request_body_data(stream_state, stream_slice, next_page_token, request_body_data), ) + import time + + time.sleep(1) response = self._send_with_retry(request, log_formatter=log_formatter) return self._validate_response(response) diff --git a/airbyte-cdk/python/airbyte_cdk/sources/declarative/requesters/request_options/request_options_provider.py b/airbyte-cdk/python/airbyte_cdk/sources/declarative/requesters/request_options/request_options_provider.py index b07ffb3f6f08..c03a232e368e 100644 --- a/airbyte-cdk/python/airbyte_cdk/sources/declarative/requesters/request_options/request_options_provider.py +++ b/airbyte-cdk/python/airbyte_cdk/sources/declarative/requesters/request_options/request_options_provider.py @@ -4,7 +4,7 @@ from abc import abstractmethod from dataclasses import dataclass -from typing import Any, Mapping, MutableMapping, Optional, Union +from typing import Any, Mapping, Optional, Union from airbyte_cdk.sources.declarative.types import StreamSlice, StreamState @@ -28,7 +28,7 @@ def get_request_params( stream_state: Optional[StreamState] = None, stream_slice: Optional[StreamSlice] = None, next_page_token: Optional[Mapping[str, Any]] = None, - ) -> MutableMapping[str, Any]: + ) -> Mapping[str, Any]: """ Specifies the query parameters that should be set on an outgoing HTTP request given the inputs. @@ -53,7 +53,7 @@ def get_request_body_data( stream_state: Optional[StreamState] = None, stream_slice: Optional[StreamSlice] = None, next_page_token: Optional[Mapping[str, Any]] = None, - ) -> Optional[Union[Mapping[str, Any], str]]: + ) -> Union[Mapping[str, Any], str]: """ Specifies how to populate the body of the request with a non-JSON payload. @@ -71,7 +71,7 @@ def get_request_body_json( stream_state: Optional[StreamState] = None, stream_slice: Optional[StreamSlice] = None, next_page_token: Optional[Mapping[str, Any]] = None, - ) -> Optional[Mapping[str, Any]]: + ) -> Mapping[str, Any]: """ Specifies how to populate the body of the request with a JSON payload. diff --git a/airbyte-cdk/python/airbyte_cdk/sources/declarative/retrievers/retriever.py b/airbyte-cdk/python/airbyte_cdk/sources/declarative/retrievers/retriever.py index bf4247a4f441..cd6310a0948f 100644 --- a/airbyte-cdk/python/airbyte_cdk/sources/declarative/retrievers/retriever.py +++ b/airbyte-cdk/python/airbyte_cdk/sources/declarative/retrievers/retriever.py @@ -6,7 +6,8 @@ from dataclasses import dataclass from typing import Any, Iterable, Mapping, Optional -from airbyte_cdk.sources.declarative.types import StreamSlice, StreamState +from airbyte_cdk.sources.declarative.incremental.per_partition_cursor import StreamSlice +from airbyte_cdk.sources.declarative.types import StreamState from airbyte_cdk.sources.streams.core import StreamData diff --git a/airbyte-cdk/python/airbyte_cdk/sources/declarative/retrievers/simple_retriever.py b/airbyte-cdk/python/airbyte_cdk/sources/declarative/retrievers/simple_retriever.py index a9c946044922..7028850bcaaa 100644 --- a/airbyte-cdk/python/airbyte_cdk/sources/declarative/retrievers/simple_retriever.py +++ b/airbyte-cdk/python/airbyte_cdk/sources/declarative/retrievers/simple_retriever.py @@ -258,7 +258,7 @@ def _next_page_token(self, response: requests.Response) -> Optional[Mapping[str, return self._paginator.next_page_token(response, self._records_from_last_response) def _fetch_next_page( - self, stream_state: Mapping[str, Any], stream_slice: Mapping[str, Any], next_page_token: Optional[Mapping[str, Any]] = None + self, stream_state: Mapping[str, Any], stream_slice: StreamSlice, next_page_token: Optional[Mapping[str, Any]] = None ) -> Optional[requests.Response]: return self.requester.send_request( path=self._paginator_path(), @@ -280,7 +280,7 @@ def _read_pages( self, records_generator_fn: Callable[[Optional[requests.Response]], Iterable[StreamData]], stream_state: Mapping[str, Any], - stream_slice: Mapping[str, Any], + stream_slice: StreamSlice, ) -> Iterable[StreamData]: pagination_complete = False next_page_token = None @@ -310,7 +310,7 @@ def read_records( :param stream_slice: The stream slice to read data for :return: The records read from the API source """ - stream_slice = stream_slice or {} # None-check + _slice = stream_slice or StreamSlice(partition={}, cursor_slice={}) # None-check # Fixing paginator types has a long tail of dependencies self._paginator.reset() @@ -318,15 +318,15 @@ def read_records( record_generator = partial( self._parse_records, stream_state=self.state or {}, - stream_slice=stream_slice, + stream_slice=_slice, records_schema=records_schema, ) - for stream_data in self._read_pages(record_generator, self.state, stream_slice): - most_recent_record_from_slice = self._get_most_recent_record(most_recent_record_from_slice, stream_data, stream_slice) + for stream_data in self._read_pages(record_generator, self.state, _slice): + most_recent_record_from_slice = self._get_most_recent_record(most_recent_record_from_slice, stream_data, _slice) yield stream_data if self.cursor: - self.cursor.close_slice(stream_slice, most_recent_record_from_slice) + self.cursor.close_slice(_slice, most_recent_record_from_slice) return def _get_most_recent_record( @@ -356,7 +356,7 @@ def _extract_record(stream_data: StreamData, stream_slice: StreamSlice) -> Optio return None # stream_slices is defined with arguments on http stream and fixing this has a long tail of dependencies. Will be resolved by the decoupling of http stream and simple retriever - def stream_slices(self) -> Iterable[Optional[Mapping[str, Any]]]: # type: ignore + def stream_slices(self) -> Iterable[Optional[StreamSlice]]: # type: ignore """ Specifies the slices for this stream. See the stream slicing section of the docs for more information. @@ -382,7 +382,7 @@ def _parse_records( response: Optional[requests.Response], stream_state: Mapping[str, Any], records_schema: Mapping[str, Any], - stream_slice: Optional[Mapping[str, Any]], + stream_slice: Optional[StreamSlice], ) -> Iterable[StreamData]: yield from self._parse_response( response, @@ -412,11 +412,11 @@ def __post_init__(self, options: Mapping[str, Any]) -> None: ) # stream_slices is defined with arguments on http stream and fixing this has a long tail of dependencies. Will be resolved by the decoupling of http stream and simple retriever - def stream_slices(self) -> Iterable[Optional[Mapping[str, Any]]]: # type: ignore + def stream_slices(self) -> Iterable[Optional[StreamSlice]]: # type: ignore return islice(super().stream_slices(), self.maximum_number_of_slices) def _fetch_next_page( - self, stream_state: Mapping[str, Any], stream_slice: Mapping[str, Any], next_page_token: Optional[Mapping[str, Any]] = None + self, stream_state: Mapping[str, Any], stream_slice: StreamSlice, next_page_token: Optional[Mapping[str, Any]] = None ) -> Optional[requests.Response]: return self.requester.send_request( path=self._paginator_path(), diff --git a/airbyte-cdk/python/airbyte_cdk/sources/declarative/types.py b/airbyte-cdk/python/airbyte_cdk/sources/declarative/types.py index fd0eba51676c..734ff29fffb7 100644 --- a/airbyte-cdk/python/airbyte_cdk/sources/declarative/types.py +++ b/airbyte-cdk/python/airbyte_cdk/sources/declarative/types.py @@ -4,14 +4,13 @@ from __future__ import annotations -from typing import Any, List, Mapping, Optional +from typing import Any, ItemsView, Iterator, KeysView, List, Mapping, Optional, ValuesView # A FieldPointer designates a path to a field inside a mapping. For example, retrieving ["k1", "k1.2"] in the object {"k1" :{"k1.2": # "hello"}] returns "hello" FieldPointer = List[str] Config = Mapping[str, Any] ConnectionDefinition = Mapping[str, Any] -StreamSlice = Mapping[str, Any] StreamState = Mapping[str, Any] @@ -51,3 +50,67 @@ def __eq__(self, other: object) -> bool: def __ne__(self, other: object) -> bool: return not self.__eq__(other) + + +class StreamSlice(Mapping[str, Any]): + def __init__(self, *, partition: Mapping[str, Any], cursor_slice: Mapping[str, Any]) -> None: + self._partition = partition + self._cursor_slice = cursor_slice + if partition.keys() & cursor_slice.keys(): + raise ValueError("Keys for partition and incremental sync cursor should not overlap") + self._stream_slice = dict(partition) | dict(cursor_slice) + + @property + def partition(self) -> Mapping[str, Any]: + p = self._partition + while isinstance(p, StreamSlice): + p = p.partition + return p + + @property + def cursor_slice(self) -> Mapping[str, Any]: + c = self._cursor_slice + while isinstance(c, StreamSlice): + c = c.cursor_slice + return c + + def __repr__(self) -> str: + return repr(self._stream_slice) + + def __setitem__(self, key: str, value: Any) -> None: + raise ValueError("StreamSlice is immutable") + + def __getitem__(self, key: str) -> Any: + return self._stream_slice[key] + + def __len__(self) -> int: + return len(self._stream_slice) + + def __iter__(self) -> Iterator[str]: + return iter(self._stream_slice) + + def __contains__(self, item: Any) -> bool: + return item in self._stream_slice + + def keys(self) -> KeysView[str]: + return self._stream_slice.keys() + + def items(self) -> ItemsView[str, Any]: + return self._stream_slice.items() + + def values(self) -> ValuesView[Any]: + return self._stream_slice.values() + + def get(self, key: str, default: Any = None) -> Optional[Any]: + return self._stream_slice.get(key, default) + + def __eq__(self, other: Any) -> bool: + if isinstance(other, dict): + return self._stream_slice == other + if isinstance(other, StreamSlice): + # noinspection PyProtectedMember + return self._partition == other._partition and self._cursor_slice == other._cursor_slice + return False + + def __ne__(self, other: Any) -> bool: + return not self.__eq__(other) diff --git a/airbyte-cdk/python/unit_tests/sources/declarative/datetime/test_min_max_datetime.py b/airbyte-cdk/python/unit_tests/sources/declarative/datetime/test_min_max_datetime.py index b23f6e2fffe9..84a63969cec6 100644 --- a/airbyte-cdk/python/unit_tests/sources/declarative/datetime/test_min_max_datetime.py +++ b/airbyte-cdk/python/unit_tests/sources/declarative/datetime/test_min_max_datetime.py @@ -6,6 +6,7 @@ import pytest from airbyte_cdk.sources.declarative.datetime.min_max_datetime import MinMaxDatetime +from airbyte_cdk.sources.declarative.interpolation.interpolated_string import InterpolatedString date_format = "%Y-%m-%dT%H:%M:%S.%f%z" @@ -110,3 +111,17 @@ def test_min_max_datetime_lazy_eval(): assert datetime.datetime(2021, 1, 1, 0, 0, tzinfo=datetime.timezone.utc) == MinMaxDatetime( **kwargs, parameters={"max_datetime": "2021-01-01T00:00:00"} ).get_datetime({}) + + +@pytest.mark.parametrize( + "input_datetime", [ + pytest.param("2022-01-01T00:00:00", id="test_create_min_max_datetime_from_string"), + pytest.param(InterpolatedString.create("2022-01-01T00:00:00", parameters={}), id="test_create_min_max_datetime_from_string"), + pytest.param(MinMaxDatetime("2022-01-01T00:00:00", parameters={}), id="test_create_min_max_datetime_from_minmaxdatetime") + ] +) +def test_create_min_max_datetime(input_datetime): + minMaxDatetime = MinMaxDatetime.create(input_datetime, parameters={}) + expected_value = "2022-01-01T00:00:00" + + assert minMaxDatetime.datetime.eval(config={}) == expected_value diff --git a/airbyte-cdk/python/unit_tests/sources/declarative/incremental/test_datetime_based_cursor.py b/airbyte-cdk/python/unit_tests/sources/declarative/incremental/test_datetime_based_cursor.py index c128f04f391d..6d93dd50c764 100644 --- a/airbyte-cdk/python/unit_tests/sources/declarative/incremental/test_datetime_based_cursor.py +++ b/airbyte-cdk/python/unit_tests/sources/declarative/incremental/test_datetime_based_cursor.py @@ -10,7 +10,7 @@ from airbyte_cdk.sources.declarative.incremental import DatetimeBasedCursor from airbyte_cdk.sources.declarative.interpolation.interpolated_string import InterpolatedString from airbyte_cdk.sources.declarative.requesters.request_option import RequestOption, RequestOptionType -from airbyte_cdk.sources.declarative.types import Record +from airbyte_cdk.sources.declarative.types import Record, StreamSlice datetime_format = "%Y-%m-%dT%H:%M:%S.%f%z" cursor_granularity = "PT0.000001S" @@ -343,35 +343,35 @@ def test_stream_slices( ( "test_close_slice_previous_cursor_is_highest", "2023-01-01", - {"end_time": "2022-01-01"}, + StreamSlice(partition={}, cursor_slice={"end_time": "2022-01-01"}), {cursor_field: "2021-01-01"}, {cursor_field: "2023-01-01"}, ), ( "test_close_slice_stream_slice_partition_end_is_highest", "2021-01-01", - {"end_time": "2023-01-01"}, + StreamSlice(partition={}, cursor_slice={"end_time": "2023-01-01"}), {cursor_field: "2021-01-01"}, {cursor_field: "2023-01-01"}, ), ( "test_close_slice_latest_record_cursor_value_is_highest", "2021-01-01", - {"end_time": "2022-01-01"}, + StreamSlice(partition={}, cursor_slice={"end_time": "2022-01-01"}), {cursor_field: "2023-01-01"}, {cursor_field: "2023-01-01"}, ), ( "test_close_slice_without_latest_record", "2021-01-01", - {"end_time": "2022-01-01"}, + StreamSlice(partition={}, cursor_slice={"end_time": "2022-01-01"}), None, {cursor_field: "2022-01-01"}, ), ( "test_close_slice_without_cursor", None, - {"end_time": "2022-01-01"}, + StreamSlice(partition={}, cursor_slice={"end_time": "2022-01-01"}), {cursor_field: "2023-01-01"}, {cursor_field: "2023-01-01"}, ), @@ -391,6 +391,19 @@ def test_close_slice(test_name, previous_cursor, stream_slice, latest_record_dat assert updated_state == expected_state +def test_close_slice_fails_if_slice_has_a_partition(): + cursor = DatetimeBasedCursor( + start_datetime=MinMaxDatetime(datetime="2021-01-01T00:00:00.000000+0000", parameters={}), + cursor_field=InterpolatedString(string=cursor_field, parameters={}), + datetime_format="%Y-%m-%d", + config=config, + parameters={}, + ) + stream_slice = StreamSlice(partition={"key": "value"}, cursor_slice={"end_time": "2022-01-01"}) + with pytest.raises(ValueError): + cursor.close_slice(stream_slice, Record({"id": 1}, stream_slice)) + + def test_given_different_format_and_slice_is_highest_when_close_slice_then_slice_datetime_format(): cursor = DatetimeBasedCursor( start_datetime=MinMaxDatetime(datetime="2021-01-01T00:00:00.000000+0000", parameters={}), @@ -401,7 +414,7 @@ def test_given_different_format_and_slice_is_highest_when_close_slice_then_slice parameters={}, ) - _slice = {"end_time": "2023-01-04T17:30:19.000Z"} + _slice = StreamSlice(partition={}, cursor_slice={"end_time": "2023-01-04T17:30:19.000Z"}) record_cursor_value = "2023-01-03" cursor.close_slice(_slice, Record({cursor_field: record_cursor_value}, _slice)) @@ -418,7 +431,7 @@ def test_given_partition_end_is_specified_and_greater_than_record_when_close_sli config=config, parameters={}, ) - stream_slice = {partition_field_end: "2025-01-01"} + stream_slice = StreamSlice(partition={}, cursor_slice={partition_field_end: "2025-01-01"}) cursor.close_slice(stream_slice, Record({cursor_field: "2020-01-01"}, stream_slice)) updated_state = cursor.get_stream_state() assert {cursor_field: "2025-01-01"} == updated_state @@ -489,6 +502,31 @@ def test_request_option(test_name, inject_into, field_name, expected_req_params, assert expected_body_data == slicer.get_request_body_data(stream_slice=stream_slice) +@pytest.mark.parametrize( + "stream_slice", [ + pytest.param(None, id="test_none_stream_slice"), + pytest.param({}, id="test_none_stream_slice"), + ] +) +def test_request_option_with_empty_stream_slice(stream_slice): + start_request_option = RequestOption(inject_into=RequestOptionType.request_parameter, parameters={}, field_name="starttime") + end_request_option = RequestOption(inject_into=RequestOptionType.request_parameter, parameters={}, field_name="endtime") + slicer = DatetimeBasedCursor( + start_datetime=MinMaxDatetime(datetime="2021-01-01T00:00:00.000000+0000", parameters={}), + end_datetime=MinMaxDatetime(datetime="2021-01-10T00:00:00.000000+0000", parameters={}), + step="P1D", + cursor_field=InterpolatedString(string=cursor_field, parameters={}), + datetime_format=datetime_format, + cursor_granularity=cursor_granularity, + lookback_window=InterpolatedString(string="P0D", parameters={}), + start_time_option=start_request_option, + end_time_option=end_request_option, + config=config, + parameters={}, + ) + assert {} == slicer.get_request_params(stream_slice=stream_slice) + + @pytest.mark.parametrize( "test_name, input_date, date_format, date_format_granularity, expected_output_date", [ diff --git a/airbyte-cdk/python/unit_tests/sources/declarative/incremental/test_per_partition_cursor.py b/airbyte-cdk/python/unit_tests/sources/declarative/incremental/test_per_partition_cursor.py index cb7857c9352a..769f3e073fcc 100644 --- a/airbyte-cdk/python/unit_tests/sources/declarative/incremental/test_per_partition_cursor.py +++ b/airbyte-cdk/python/unit_tests/sources/declarative/incremental/test_per_partition_cursor.py @@ -7,11 +7,7 @@ import pytest from airbyte_cdk.sources.declarative.incremental.cursor import Cursor -from airbyte_cdk.sources.declarative.incremental.per_partition_cursor import ( - PerPartitionCursor, - PerPartitionKeySerializer, - PerPartitionStreamSlice, -) +from airbyte_cdk.sources.declarative.incremental.per_partition_cursor import PerPartitionCursor, PerPartitionKeySerializer, StreamSlice from airbyte_cdk.sources.declarative.stream_slicers.stream_slicer import StreamSlicer from airbyte_cdk.sources.declarative.types import Record @@ -78,13 +74,13 @@ def test_given_tuples_in_json_then_deserialization_convert_to_list(): def test_stream_slice_merge_dictionaries(): - stream_slice = PerPartitionStreamSlice({"partition key": "partition value"}, {"cursor key": "cursor value"}) + stream_slice = StreamSlice(partition={"partition key": "partition value"}, cursor_slice={"cursor key": "cursor value"}) assert stream_slice == {"partition key": "partition value", "cursor key": "cursor value"} def test_overlapping_slice_keys_raise_error(): with pytest.raises(ValueError): - PerPartitionStreamSlice({"overlapping key": "partition value"}, {"overlapping key": "cursor value"}) + StreamSlice(partition={"overlapping key": "partition value"}, cursor_slice={"overlapping key": "cursor value"}) class MockedCursorBuilder: @@ -131,7 +127,7 @@ def test_given_no_partition_when_stream_slices_then_no_slices(mocked_cursor_fact def test_given_partition_router_without_state_has_one_partition_then_return_one_slice_per_cursor_slice( mocked_cursor_factory, mocked_partition_router ): - partition = {"partition_field_1": "a value", "partition_field_2": "another value"} + partition = StreamSlice(partition={"partition_field_1": "a value", "partition_field_2": "another value"}, cursor_slice={}) mocked_partition_router.stream_slices.return_value = [partition] cursor_slices = [{"start_datetime": 1}, {"start_datetime": 2}] mocked_cursor_factory.create.return_value = MockedCursorBuilder().with_stream_slices(cursor_slices).build() @@ -139,19 +135,19 @@ def test_given_partition_router_without_state_has_one_partition_then_return_one_ slices = cursor.stream_slices() - assert list(slices) == [PerPartitionStreamSlice(partition, cursor_slice) for cursor_slice in cursor_slices] + assert list(slices) == [StreamSlice(partition=partition, cursor_slice=cursor_slice) for cursor_slice in cursor_slices] def test_given_partition_associated_with_state_when_stream_slices_then_do_not_recreate_cursor( mocked_cursor_factory, mocked_partition_router ): - partition = {"partition_field_1": "a value", "partition_field_2": "another value"} + partition = StreamSlice(partition={"partition_field_1": "a value", "partition_field_2": "another value"}, cursor_slice={}) mocked_partition_router.stream_slices.return_value = [partition] cursor_slices = [{"start_datetime": 1}] mocked_cursor_factory.create.return_value = MockedCursorBuilder().with_stream_slices(cursor_slices).build() cursor = PerPartitionCursor(mocked_cursor_factory, mocked_partition_router) - cursor.set_initial_state({"states": [{"partition": partition, "cursor": CURSOR_STATE}]}) + cursor.set_initial_state({"states": [{"partition": partition.partition, "cursor": CURSOR_STATE}]}) mocked_cursor_factory.create.assert_called_once() slices = list(cursor.stream_slices()) @@ -161,7 +157,7 @@ def test_given_partition_associated_with_state_when_stream_slices_then_do_not_re def test_given_multiple_partitions_then_each_have_their_state(mocked_cursor_factory, mocked_partition_router): first_partition = {"first_partition_key": "first_partition_value"} - mocked_partition_router.stream_slices.return_value = [first_partition, {"second_partition_key": "second_partition_value"}] + mocked_partition_router.stream_slices.return_value = [StreamSlice(partition=first_partition, cursor_slice={}), StreamSlice(partition={"second_partition_key": "second_partition_value"}, cursor_slice={})] first_cursor = MockedCursorBuilder().with_stream_slices([{CURSOR_SLICE_FIELD: "first slice cursor value"}]).build() second_cursor = MockedCursorBuilder().with_stream_slices([{CURSOR_SLICE_FIELD: "second slice cursor value"}]).build() mocked_cursor_factory.create.side_effect = [first_cursor, second_cursor] @@ -173,10 +169,10 @@ def test_given_multiple_partitions_then_each_have_their_state(mocked_cursor_fact first_cursor.stream_slices.assert_called_once() second_cursor.stream_slices.assert_called_once() assert slices == [ - PerPartitionStreamSlice( + StreamSlice( partition={"first_partition_key": "first_partition_value"}, cursor_slice={CURSOR_SLICE_FIELD: "first slice cursor value"} ), - PerPartitionStreamSlice( + StreamSlice( partition={"second_partition_key": "second_partition_value"}, cursor_slice={CURSOR_SLICE_FIELD: "second slice cursor value"} ), ] @@ -187,7 +183,7 @@ def test_given_stream_slices_when_get_stream_state_then_return_updated_state(moc MockedCursorBuilder().with_stream_state({CURSOR_STATE_KEY: "first slice cursor value"}).build(), MockedCursorBuilder().with_stream_state({CURSOR_STATE_KEY: "second slice cursor value"}).build(), ] - mocked_partition_router.stream_slices.return_value = [{"partition key": "first partition"}, {"partition key": "second partition"}] + mocked_partition_router.stream_slices.return_value = [StreamSlice(partition={"partition key": "first partition"}, cursor_slice={}), StreamSlice(partition={"partition key": "second partition"}, cursor_slice={})] cursor = PerPartitionCursor(mocked_cursor_factory, mocked_partition_router) list(cursor.stream_slices()) assert cursor.get_stream_state() == { @@ -201,7 +197,7 @@ def test_given_stream_slices_when_get_stream_state_then_return_updated_state(moc def test_when_get_stream_state_then_delegate_to_underlying_cursor(mocked_cursor_factory, mocked_partition_router): underlying_cursor = MockedCursorBuilder().with_stream_slices([{CURSOR_SLICE_FIELD: "first slice cursor value"}]).build() mocked_cursor_factory.create.side_effect = [underlying_cursor] - mocked_partition_router.stream_slices.return_value = [{"partition key": "first partition"}] + mocked_partition_router.stream_slices.return_value = [StreamSlice(partition={"partition key": "first partition"}, cursor_slice={})] cursor = PerPartitionCursor(mocked_cursor_factory, mocked_partition_router) first_slice = list(cursor.stream_slices())[0] @@ -213,8 +209,8 @@ def test_when_get_stream_state_then_delegate_to_underlying_cursor(mocked_cursor_ def test_close_slice(mocked_cursor_factory, mocked_partition_router): underlying_cursor = MockedCursorBuilder().with_stream_slices([{CURSOR_SLICE_FIELD: "first slice cursor value"}]).build() mocked_cursor_factory.create.side_effect = [underlying_cursor] - stream_slice = PerPartitionStreamSlice(partition={"partition key": "first partition"}, cursor_slice={}) - mocked_partition_router.stream_slices.return_value = [stream_slice.partition] + stream_slice = StreamSlice(partition={"partition key": "first partition"}, cursor_slice={}) + mocked_partition_router.stream_slices.return_value = [stream_slice] cursor = PerPartitionCursor(mocked_cursor_factory, mocked_partition_router) last_record = Mock() list(cursor.stream_slices()) # generate internal state @@ -227,8 +223,8 @@ def test_close_slice(mocked_cursor_factory, mocked_partition_router): def test_given_no_last_record_when_close_slice_then_do_not_raise_error(mocked_cursor_factory, mocked_partition_router): underlying_cursor = MockedCursorBuilder().with_stream_slices([{CURSOR_SLICE_FIELD: "first slice cursor value"}]).build() mocked_cursor_factory.create.side_effect = [underlying_cursor] - stream_slice = PerPartitionStreamSlice(partition={"partition key": "first partition"}, cursor_slice={}) - mocked_partition_router.stream_slices.return_value = [stream_slice.partition] + stream_slice = StreamSlice(partition={"partition key": "first partition"}, cursor_slice={}) + mocked_partition_router.stream_slices.return_value = [stream_slice] cursor = PerPartitionCursor(mocked_cursor_factory, mocked_partition_router) list(cursor.stream_slices()) # generate internal state @@ -241,7 +237,7 @@ def test_given_unknown_partition_when_close_slice_then_raise_error(): any_cursor_factory = Mock() any_partition_router = Mock() cursor = PerPartitionCursor(any_cursor_factory, any_partition_router) - stream_slice = PerPartitionStreamSlice(partition={"unknown_partition": "unknown"}, cursor_slice={}) + stream_slice = StreamSlice(partition={"unknown_partition": "unknown"}, cursor_slice={}) with pytest.raises(ValueError): cursor.close_slice(stream_slice, Record({}, stream_slice)) @@ -251,7 +247,7 @@ def test_given_unknown_partition_when_should_be_synced_then_raise_error(): any_partition_router = Mock() cursor = PerPartitionCursor(any_cursor_factory, any_partition_router) with pytest.raises(ValueError): - cursor.should_be_synced(Record({}, PerPartitionStreamSlice(partition={"unknown_partition": "unknown"}, cursor_slice={}))) + cursor.should_be_synced(Record({}, StreamSlice(partition={"unknown_partition": "unknown"}, cursor_slice={}))) def test_given_records_with_different_slice_when_is_greater_than_or_equal_then_raise_error(): @@ -260,8 +256,26 @@ def test_given_records_with_different_slice_when_is_greater_than_or_equal_then_r cursor = PerPartitionCursor(any_cursor_factory, any_partition_router) with pytest.raises(ValueError): cursor.is_greater_than_or_equal( - Record({}, PerPartitionStreamSlice(partition={"a slice": "value"}, cursor_slice={})), - Record({}, PerPartitionStreamSlice(partition={"another slice": "value"}, cursor_slice={})), + Record({}, StreamSlice(partition={"a slice": "value"}, cursor_slice={})), + Record({}, StreamSlice(partition={"another slice": "value"}, cursor_slice={})), + ) + + +@pytest.mark.parametrize( + "first_record_slice, second_record_slice", + [ + pytest.param(StreamSlice(partition={"a slice": "value"}, cursor_slice={}), None, id="second record does not have a slice"), + pytest.param(None, StreamSlice(partition={"a slice": "value"}, cursor_slice={}), id="first record does not have a slice"), + ] +) +def test_given_records_without_a_slice_when_is_greater_than_or_equal_then_raise_error(first_record_slice, second_record_slice): + any_cursor_factory = Mock() + any_partition_router = Mock() + cursor = PerPartitionCursor(any_cursor_factory, any_partition_router) + with pytest.raises(ValueError): + cursor.is_greater_than_or_equal( + Record({}, first_record_slice), + Record({}, second_record_slice) ) @@ -271,16 +285,16 @@ def test_given_slice_is_unknown_when_is_greater_than_or_equal_then_raise_error() cursor = PerPartitionCursor(any_cursor_factory, any_partition_router) with pytest.raises(ValueError): cursor.is_greater_than_or_equal( - Record({}, PerPartitionStreamSlice(partition={"a slice": "value"}, cursor_slice={})), - Record({}, PerPartitionStreamSlice(partition={"a slice": "value"}, cursor_slice={})), + Record({}, StreamSlice(partition={"a slice": "value"}, cursor_slice={})), + Record({}, StreamSlice(partition={"a slice": "value"}, cursor_slice={})), ) def test_when_is_greater_than_or_equal_then_return_underlying_cursor_response(mocked_cursor_factory, mocked_partition_router): underlying_cursor = MockedCursorBuilder().with_stream_slices([{CURSOR_SLICE_FIELD: "first slice cursor value"}]).build() mocked_cursor_factory.create.side_effect = [underlying_cursor] - stream_slice = PerPartitionStreamSlice(partition={"partition key": "first partition"}, cursor_slice={}) - mocked_partition_router.stream_slices.return_value = [stream_slice.partition] + stream_slice = StreamSlice(partition={"partition key": "first partition"}, cursor_slice={}) + mocked_partition_router.stream_slices.return_value = [stream_slice] cursor = PerPartitionCursor(mocked_cursor_factory, mocked_partition_router) first_record = Record({"first": "value"}, stream_slice) second_record = Record({"second": "value"}, stream_slice) @@ -290,3 +304,103 @@ def test_when_is_greater_than_or_equal_then_return_underlying_cursor_response(mo assert result == underlying_cursor.is_greater_than_or_equal.return_value underlying_cursor.is_greater_than_or_equal.assert_called_once_with(first_record, second_record) + + +@pytest.mark.parametrize( + "stream_slice, expected_output", + [ + pytest.param(StreamSlice(partition={"partition key": "first partition"}, cursor_slice={}), {"cursor": "params", "router": "params"}, id="first partition"), + pytest.param(None, None, id="first partition"), + ] +) +def test_get_request_params(mocked_cursor_factory, mocked_partition_router, stream_slice, expected_output): + underlying_cursor = MockedCursorBuilder().with_stream_slices([{CURSOR_SLICE_FIELD: "first slice cursor value"}]).build() + underlying_cursor.get_request_params.return_value = {"cursor": "params"} + mocked_cursor_factory.create.side_effect = [underlying_cursor] + mocked_partition_router.stream_slices.return_value = [stream_slice] + mocked_partition_router.get_request_params.return_value = {"router": "params"} + cursor = PerPartitionCursor(mocked_cursor_factory, mocked_partition_router) + if stream_slice: + cursor.set_initial_state({"states": [{"partition": stream_slice.partition, "cursor": CURSOR_STATE}]}) + params = cursor.get_request_params(stream_slice=stream_slice) + assert params == expected_output + mocked_partition_router.get_request_params.assert_called_once_with(stream_state=None, stream_slice=stream_slice, next_page_token=None) + underlying_cursor.get_request_params.assert_called_once_with(stream_state=None, stream_slice={}, next_page_token=None) + else: + with pytest.raises(ValueError): + cursor.get_request_params(stream_slice=stream_slice) + + +@pytest.mark.parametrize( + "stream_slice, expected_output", + [ + pytest.param(StreamSlice(partition={"partition key": "first partition"}, cursor_slice={}), {"cursor": "params", "router": "params"}, id="first partition"), + pytest.param(None, None, id="first partition"), + ] +) +def test_get_request_headers(mocked_cursor_factory, mocked_partition_router, stream_slice, expected_output): + underlying_cursor = MockedCursorBuilder().with_stream_slices([{CURSOR_SLICE_FIELD: "first slice cursor value"}]).build() + underlying_cursor.get_request_headers.return_value = {"cursor": "params"} + mocked_cursor_factory.create.side_effect = [underlying_cursor] + mocked_partition_router.stream_slices.return_value = [stream_slice] + mocked_partition_router.get_request_headers.return_value = {"router": "params"} + cursor = PerPartitionCursor(mocked_cursor_factory, mocked_partition_router) + if stream_slice: + cursor.set_initial_state({"states": [{"partition": stream_slice.partition, "cursor": CURSOR_STATE}]}) + params = cursor.get_request_headers(stream_slice=stream_slice) + assert params == expected_output + mocked_partition_router.get_request_headers.assert_called_once_with(stream_state=None, stream_slice=stream_slice, next_page_token=None) + underlying_cursor.get_request_headers.assert_called_once_with(stream_state=None, stream_slice={}, next_page_token=None) + else: + with pytest.raises(ValueError): + cursor.get_request_headers(stream_slice=stream_slice) + + +@pytest.mark.parametrize( + "stream_slice, expected_output", + [ + pytest.param(StreamSlice(partition={"partition key": "first partition"}, cursor_slice={}), {"cursor": "params", "router": "params"}, id="first partition"), + pytest.param(None, None, id="first partition"), + ] +) +def test_get_request_body_data(mocked_cursor_factory, mocked_partition_router, stream_slice, expected_output): + underlying_cursor = MockedCursorBuilder().with_stream_slices([{CURSOR_SLICE_FIELD: "first slice cursor value"}]).build() + underlying_cursor.get_request_body_data.return_value = {"cursor": "params"} + mocked_cursor_factory.create.side_effect = [underlying_cursor] + mocked_partition_router.stream_slices.return_value = [stream_slice] + mocked_partition_router.get_request_body_data.return_value = {"router": "params"} + cursor = PerPartitionCursor(mocked_cursor_factory, mocked_partition_router) + if stream_slice: + cursor.set_initial_state({"states": [{"partition": stream_slice.partition, "cursor": CURSOR_STATE}]}) + params = cursor.get_request_body_data(stream_slice=stream_slice) + assert params == expected_output + mocked_partition_router.get_request_body_data.assert_called_once_with(stream_state=None, stream_slice=stream_slice, next_page_token=None) + underlying_cursor.get_request_body_data.assert_called_once_with(stream_state=None, stream_slice={}, next_page_token=None) + else: + with pytest.raises(ValueError): + cursor.get_request_body_data(stream_slice=stream_slice) + + +@pytest.mark.parametrize( + "stream_slice, expected_output", + [ + pytest.param(StreamSlice(partition={"partition key": "first partition"}, cursor_slice={}), {"cursor": "params", "router": "params"}, id="first partition"), + pytest.param(None, None, id="first partition"), + ] +) +def test_get_request_body_json(mocked_cursor_factory, mocked_partition_router, stream_slice, expected_output): + underlying_cursor = MockedCursorBuilder().with_stream_slices([{CURSOR_SLICE_FIELD: "first slice cursor value"}]).build() + underlying_cursor.get_request_body_json.return_value = {"cursor": "params"} + mocked_cursor_factory.create.side_effect = [underlying_cursor] + mocked_partition_router.stream_slices.return_value = [stream_slice] + mocked_partition_router.get_request_body_json.return_value = {"router": "params"} + cursor = PerPartitionCursor(mocked_cursor_factory, mocked_partition_router) + if stream_slice: + cursor.set_initial_state({"states": [{"partition": stream_slice.partition, "cursor": CURSOR_STATE}]}) + params = cursor.get_request_body_json(stream_slice=stream_slice) + assert params == expected_output + mocked_partition_router.get_request_body_json.assert_called_once_with(stream_state=None, stream_slice=stream_slice, next_page_token=None) + underlying_cursor.get_request_body_json.assert_called_once_with(stream_state=None, stream_slice={}, next_page_token=None) + else: + with pytest.raises(ValueError): + cursor.get_request_body_json(stream_slice=stream_slice) diff --git a/airbyte-cdk/python/unit_tests/sources/declarative/incremental/test_per_partition_cursor_integration.py b/airbyte-cdk/python/unit_tests/sources/declarative/incremental/test_per_partition_cursor_integration.py index ef1f123fd124..e5080f1286a2 100644 --- a/airbyte-cdk/python/unit_tests/sources/declarative/incremental/test_per_partition_cursor_integration.py +++ b/airbyte-cdk/python/unit_tests/sources/declarative/incremental/test_per_partition_cursor_integration.py @@ -4,8 +4,9 @@ from unittest.mock import patch -from airbyte_cdk.models import SyncMode -from airbyte_cdk.sources.declarative.incremental.per_partition_cursor import PerPartitionStreamSlice +from airbyte_cdk.logger import init_logger +from airbyte_cdk.models import ConfiguredAirbyteCatalog, SyncMode, Type +from airbyte_cdk.sources.declarative.incremental.per_partition_cursor import StreamSlice from airbyte_cdk.sources.declarative.manifest_declarative_source import ManifestDeclarativeSource from airbyte_cdk.sources.declarative.retrievers.simple_retriever import SimpleRetriever from airbyte_cdk.sources.declarative.types import Record @@ -16,19 +17,35 @@ class ManifestBuilder: def __init__(self): - self._incremental_sync = None - self._partition_router = None + self._incremental_sync = {} + self._partition_router = {} + self._substream_partition_router = {} - def with_list_partition_router(self, cursor_field, partitions): - self._partition_router = { + def with_list_partition_router(self, stream_name, cursor_field, partitions): + self._partition_router[stream_name] = { "type": "ListPartitionRouter", "cursor_field": cursor_field, "values": partitions, } return self - def with_incremental_sync(self, start_datetime, end_datetime, datetime_format, cursor_field, step, cursor_granularity): - self._incremental_sync = { + def with_substream_partition_router(self, stream_name): + self._substream_partition_router[stream_name] = { + "type": "SubstreamPartitionRouter", + "parent_stream_configs": [ + { + "type": "ParentStreamConfig", + "stream": "#/definitions/Rates", + "parent_key": "id", + "partition_field": "parent_id", + + } + ] + } + return self + + def with_incremental_sync(self, stream_name, start_datetime, end_datetime, datetime_format, cursor_field, step, cursor_granularity): + self._incremental_sync[stream_name] = { "type": "DatetimeBasedCursor", "start_datetime": start_datetime, "end_datetime": end_datetime, @@ -44,8 +61,27 @@ def build(self): "version": "0.34.2", "type": "DeclarativeSource", "check": {"type": "CheckStream", "stream_names": ["Rates"]}, - "streams": [ - { + "definitions": { + "AnotherStream": { + "type": "DeclarativeStream", + "name": "AnotherStream", + "primary_key": [], + "schema_loader": { + "type": "InlineSchemaLoader", + "schema": {"$schema": "http://json-schema.org/schema#", "properties": {"id": {"type": "string"}}, "type": "object"}, + }, + "retriever": { + "type": "SimpleRetriever", + "requester": { + "type": "HttpRequester", + "url_base": "https://api.apilayer.com", + "path": "/exchangerates_data/latest", + "http_method": "GET", + }, + "record_selector": {"type": "RecordSelector", "extractor": {"type": "DpathExtractor", "field_path": []}}, + }, + }, + "Rates": { "type": "DeclarativeStream", "name": "Rates", "primary_key": [], @@ -63,7 +99,11 @@ def build(self): }, "record_selector": {"type": "RecordSelector", "extractor": {"type": "DpathExtractor", "field_path": []}}, }, - } + }, + }, + "streams": [ + {"$ref": "#/definitions/Rates"}, + {"$ref": "#/definitions/AnotherStream"} ], "spec": { "connection_specification": { @@ -77,18 +117,21 @@ def build(self): "type": "Spec", }, } - if self._incremental_sync: - manifest["streams"][0]["incremental_sync"] = self._incremental_sync - if self._partition_router: - manifest["streams"][0]["retriever"]["partition_router"] = self._partition_router + for stream_name, incremental_sync_definition in self._incremental_sync.items(): + manifest["definitions"][stream_name]["incremental_sync"] = incremental_sync_definition + for stream_name, partition_router_definition in self._partition_router.items(): + manifest["definitions"][stream_name]["retriever"]["partition_router"] = partition_router_definition + for stream_name, partition_router_definition in self._substream_partition_router.items(): + manifest["definitions"][stream_name]["retriever"]["partition_router"] = partition_router_definition return manifest def test_given_state_for_only_some_partition_when_stream_slices_then_create_slices_using_state_or_start_from_start_datetime(): source = ManifestDeclarativeSource( source_config=ManifestBuilder() - .with_list_partition_router("partition_field", ["1", "2"]) + .with_list_partition_router("Rates", "partition_field", ["1", "2"]) .with_incremental_sync( + "Rates", start_datetime="2022-01-01", end_datetime="2022-02-28", datetime_format="%Y-%m-%d", @@ -123,8 +166,9 @@ def test_given_state_for_only_some_partition_when_stream_slices_then_create_slic def test_given_record_for_partition_when_read_then_update_state(): source = ManifestDeclarativeSource( source_config=ManifestBuilder() - .with_list_partition_router("partition_field", ["1", "2"]) + .with_list_partition_router("Rates", "partition_field", ["1", "2"]) .with_incremental_sync( + "Rates", start_datetime="2022-01-01", end_datetime="2022-02-28", datetime_format="%Y-%m-%d", @@ -137,9 +181,11 @@ def test_given_record_for_partition_when_read_then_update_state(): stream_instance = source.streams({})[0] list(stream_instance.stream_slices(sync_mode=SYNC_MODE)) - stream_slice = PerPartitionStreamSlice({"partition_field": "1"}, {"start_time": "2022-01-01", "end_time": "2022-01-31"}) + stream_slice = StreamSlice(partition={"partition_field": "1"}, + cursor_slice={"start_time": "2022-01-01", "end_time": "2022-01-31"}) with patch.object( - SimpleRetriever, "_read_pages", side_effect=[[Record({"a record key": "a record value", CURSOR_FIELD: "2022-01-15"}, stream_slice)]] + SimpleRetriever, "_read_pages", + side_effect=[[Record({"a record key": "a record value", CURSOR_FIELD: "2022-01-15"}, stream_slice)]] ): list( stream_instance.read_records( @@ -158,3 +204,125 @@ def test_given_record_for_partition_when_read_then_update_state(): } ] } + + +def test_substream_without_input_state(): + source = ManifestDeclarativeSource( + source_config=ManifestBuilder() + .with_substream_partition_router("AnotherStream") + .with_incremental_sync( + "Rates", + start_datetime="2022-01-01", + end_datetime="2022-02-28", + datetime_format="%Y-%m-%d", + cursor_field=CURSOR_FIELD, + step="P1M", + cursor_granularity="P1D", + ) + .with_incremental_sync( + "AnotherStream", + start_datetime="2022-01-01", + end_datetime="2022-02-28", + datetime_format="%Y-%m-%d", + cursor_field=CURSOR_FIELD, + step="P1M", + cursor_granularity="P1D", + ) + .build() + ) + + stream_instance = source.streams({})[1] + + stream_slice = StreamSlice(partition={"parent_id": "1"}, + cursor_slice={"start_time": "2022-01-01", "end_time": "2022-01-31"}) + + with patch.object( + SimpleRetriever, "_read_pages", side_effect=[[Record({"id": "1", CURSOR_FIELD: "2022-01-15"}, stream_slice)], + Record({"id": "2", CURSOR_FIELD: "2022-01-15"}, stream_slice)] + ): + slices = list(stream_instance.stream_slices(sync_mode=SYNC_MODE)) + assert list(slices) == [ + StreamSlice(partition={"parent_id": "1", "parent_slice": {}, }, + cursor_slice={"start_time": "2022-01-01", "end_time": "2022-01-31"}), + StreamSlice(partition={"parent_id": "1", "parent_slice": {}, }, + cursor_slice={"start_time": "2022-02-01", "end_time": "2022-02-28"}), + ] + + +def test_substream_with_legacy_input_state(): + source = ManifestDeclarativeSource( + source_config=ManifestBuilder() + .with_substream_partition_router("AnotherStream") + .with_incremental_sync( + "Rates", + start_datetime="2022-01-01", + end_datetime="2022-02-28", + datetime_format="%Y-%m-%d", + cursor_field=CURSOR_FIELD, + step="P1M", + cursor_granularity="P1D", + ) + .with_incremental_sync( + "AnotherStream", + start_datetime="2022-01-01", + end_datetime="2022-02-28", + datetime_format="%Y-%m-%d", + cursor_field=CURSOR_FIELD, + step="P1M", + cursor_granularity="P1D", + ) + .build() + ) + + stream_instance = source.streams({})[1] + + input_state = { + "states": [ + { + "partition": {"item_id": "an_item_id", + "parent_slice": {"end_time": "1629640663", "start_time": "1626962264"}, + }, + "cursor": { + "updated_at": "1709058818" + } + } + ] + } + stream_instance.state = input_state + + stream_slice = StreamSlice(partition={"parent_id": "1"}, + cursor_slice={"start_time": "2022-01-01", "end_time": "2022-01-31"}) + + logger = init_logger("airbyte") + configured_catalog = ConfiguredAirbyteCatalog( + streams=[ + { + "stream": {"name": "AnotherStream", "json_schema": {}, "supported_sync_modes": ["incremental"]}, + "sync_mode": "incremental", + "destination_sync_mode": "overwrite", + }, + ] + ) + + with patch.object( + SimpleRetriever, "_read_pages", side_effect=[ + [Record({"id": "1", CURSOR_FIELD: "2022-01-15"}, stream_slice)], + [Record({"parent_id": "1"}, stream_slice)], + [Record({"id": "2", CURSOR_FIELD: "2022-01-15"}, stream_slice)], + [Record({"parent_id": "2", CURSOR_FIELD: "2022-01-15"}, stream_slice)] + ] + ): + messages = list(source.read(logger, {}, configured_catalog, input_state)) + + output_state_message = [message for message in messages if message.type == Type.STATE][0] + + expected_state = {"states": [ + { + "cursor": { + "cursor_field": "2022-01-31" + }, + "partition": {"parent_id": "1", "parent_slice": {}} + } + ]} + + assert output_state_message.state.stream.stream_state == expected_state diff --git a/airbyte-cdk/python/unit_tests/sources/declarative/parsers/test_model_to_component_factory.py b/airbyte-cdk/python/unit_tests/sources/declarative/parsers/test_model_to_component_factory.py index c0eee22f471a..0a5a796566c9 100644 --- a/airbyte-cdk/python/unit_tests/sources/declarative/parsers/test_model_to_component_factory.py +++ b/airbyte-cdk/python/unit_tests/sources/declarative/parsers/test_model_to_component_factory.py @@ -525,23 +525,23 @@ def test_datetime_based_cursor(): assert isinstance(stream_slicer, DatetimeBasedCursor) assert stream_slicer._step == datetime.timedelta(days=10) - assert stream_slicer.cursor_field.string == "created" + assert stream_slicer._cursor_field.string == "created" assert stream_slicer.cursor_granularity == "PT0.000001S" - assert stream_slicer.lookback_window.string == "P5D" + assert stream_slicer._lookback_window.string == "P5D" assert stream_slicer.start_time_option.inject_into == RequestOptionType.request_parameter assert stream_slicer.start_time_option.field_name.eval(config=input_config | {"cursor_field": "updated_at"}) == "since_updated_at" assert stream_slicer.end_time_option.inject_into == RequestOptionType.body_json assert stream_slicer.end_time_option.field_name.eval({}) == "before_created_at" - assert stream_slicer.partition_field_start.eval({}) == "star" - assert stream_slicer.partition_field_end.eval({}) == "en" + assert stream_slicer._partition_field_start.eval({}) == "star" + assert stream_slicer._partition_field_end.eval({}) == "en" - assert isinstance(stream_slicer.start_datetime, MinMaxDatetime) + assert isinstance(stream_slicer._start_datetime, MinMaxDatetime) assert stream_slicer.start_datetime._datetime_format == "%Y-%m-%dT%H:%M:%S.%f%z" assert stream_slicer.start_datetime.datetime.string == "{{ config['start_time'] }}" assert stream_slicer.start_datetime.min_datetime.string == "{{ config['start_time'] + day_delta(2) }}" - assert isinstance(stream_slicer.end_datetime, MinMaxDatetime) - assert stream_slicer.end_datetime.datetime.string == "{{ config['end_time'] }}" + assert isinstance(stream_slicer._end_datetime, MinMaxDatetime) + assert stream_slicer._end_datetime.datetime.string == "{{ config['end_time'] }}" def test_stream_with_incremental_and_retriever_with_partition_router(): @@ -636,17 +636,17 @@ def test_stream_with_incremental_and_retriever_with_partition_router(): datetime_stream_slicer = stream.retriever.stream_slicer._cursor_factory.create() assert isinstance(datetime_stream_slicer, DatetimeBasedCursor) - assert isinstance(datetime_stream_slicer.start_datetime, MinMaxDatetime) - assert datetime_stream_slicer.start_datetime.datetime.string == "{{ config['start_time'] }}" - assert isinstance(datetime_stream_slicer.end_datetime, MinMaxDatetime) - assert datetime_stream_slicer.end_datetime.datetime.string == "{{ config['end_time'] }}" + assert isinstance(datetime_stream_slicer._start_datetime, MinMaxDatetime) + assert datetime_stream_slicer._start_datetime.datetime.string == "{{ config['start_time'] }}" + assert isinstance(datetime_stream_slicer._end_datetime, MinMaxDatetime) + assert datetime_stream_slicer._end_datetime.datetime.string == "{{ config['end_time'] }}" assert datetime_stream_slicer.step == "P10D" - assert datetime_stream_slicer.cursor_field.string == "created" + assert datetime_stream_slicer._cursor_field.string == "created" list_stream_slicer = stream.retriever.stream_slicer._partition_router assert isinstance(list_stream_slicer, ListPartitionRouter) assert list_stream_slicer.values == ["airbyte", "airbyte-cloud"] - assert list_stream_slicer.cursor_field.string == "a_key" + assert list_stream_slicer._cursor_field.string == "a_key" def test_incremental_data_feed(): diff --git a/airbyte-cdk/python/unit_tests/sources/declarative/partition_routers/test_list_partition_router.py b/airbyte-cdk/python/unit_tests/sources/declarative/partition_routers/test_list_partition_router.py index 3a83af1eb714..b98f8f82d0b7 100644 --- a/airbyte-cdk/python/unit_tests/sources/declarative/partition_routers/test_list_partition_router.py +++ b/airbyte-cdk/python/unit_tests/sources/declarative/partition_routers/test_list_partition_router.py @@ -5,6 +5,7 @@ import pytest as pytest from airbyte_cdk.sources.declarative.partition_routers.list_partition_router import ListPartitionRouter from airbyte_cdk.sources.declarative.requesters.request_option import RequestOption, RequestOptionType +from airbyte_cdk.sources.declarative.types import StreamSlice partition_values = ["customer", "store", "subscription"] cursor_field = "owner_resource" @@ -17,17 +18,23 @@ ( ["customer", "store", "subscription"], "owner_resource", - [{"owner_resource": "customer"}, {"owner_resource": "store"}, {"owner_resource": "subscription"}], + [StreamSlice(partition={"owner_resource": "customer"}, cursor_slice={}), + StreamSlice(partition={"owner_resource": "store"}, cursor_slice={}), + StreamSlice(partition={"owner_resource": "subscription"}, cursor_slice={})], ), ( '["customer", "store", "subscription"]', "owner_resource", - [{"owner_resource": "customer"}, {"owner_resource": "store"}, {"owner_resource": "subscription"}], + [StreamSlice(partition={"owner_resource": "customer"}, cursor_slice={}), + StreamSlice(partition={"owner_resource": "store"}, cursor_slice={}), + StreamSlice(partition={"owner_resource": "subscription"}, cursor_slice={})], ), ( '["customer", "store", "subscription"]', "{{ parameters['cursor_field'] }}", - [{"owner_resource": "customer"}, {"owner_resource": "store"}, {"owner_resource": "subscription"}], + [StreamSlice(partition={"owner_resource": "customer"}, cursor_slice={}), + StreamSlice(partition={"owner_resource": "store"}, cursor_slice={}), + StreamSlice(partition={"owner_resource": "subscription"}, cursor_slice={})], ), ], ids=[ @@ -40,6 +47,7 @@ def test_list_partition_router(partition_values, cursor_field, expected_slices): slicer = ListPartitionRouter(values=partition_values, cursor_field=cursor_field, config={}, parameters=parameters) slices = [s for s in slicer.stream_slices()] assert slices == expected_slices + assert all(isinstance(s, StreamSlice) for s in slices) @pytest.mark.parametrize( @@ -93,6 +101,22 @@ def test_request_option(request_option, expected_req_params, expected_headers, e assert expected_body_data == partition_router.get_request_body_data(stream_slice=stream_slice) +@pytest.mark.parametrize( + "stream_slice", + [ + pytest.param({}, id="test_request_option_is_empty_if_empty_stream_slice"), + pytest.param({"not the cursor": "value"}, id="test_request_option_is_empty_if_the_stream_slice_does_not_have_cursor_field"), + pytest.param(None, id="test_request_option_is_empty_if_no_stream_slice") + ] +) +def test_request_option_is_empty_if_no_stream_slice(stream_slice): + request_option = RequestOption(inject_into=RequestOptionType.body_data, parameters={}, field_name="owner_resource") + partition_router = ListPartitionRouter( + values=partition_values, cursor_field=cursor_field, config={}, request_option=request_option, parameters={} + ) + assert {} == partition_router.get_request_body_data(stream_slice=stream_slice) + + @pytest.mark.parametrize( "field_name_interpolation, expected_request_params", [ diff --git a/airbyte-cdk/python/unit_tests/sources/declarative/partition_routers/test_single_partition_router.py b/airbyte-cdk/python/unit_tests/sources/declarative/partition_routers/test_single_partition_router.py index 1f9570955038..008283d5dced 100644 --- a/airbyte-cdk/python/unit_tests/sources/declarative/partition_routers/test_single_partition_router.py +++ b/airbyte-cdk/python/unit_tests/sources/declarative/partition_routers/test_single_partition_router.py @@ -3,6 +3,7 @@ # from airbyte_cdk.sources.declarative.partition_routers.single_partition_router import SinglePartitionRouter +from airbyte_cdk.sources.declarative.types import StreamSlice def test(): @@ -10,4 +11,4 @@ def test(): stream_slices = iterator.stream_slices() next_slice = next(stream_slices) - assert next_slice == dict() + assert next_slice == StreamSlice(partition={}, cursor_slice={}) diff --git a/airbyte-cdk/python/unit_tests/sources/declarative/partition_routers/test_substream_partition_router.py b/airbyte-cdk/python/unit_tests/sources/declarative/partition_routers/test_substream_partition_router.py index 618a0fdb23e9..664dcaa73421 100644 --- a/airbyte-cdk/python/unit_tests/sources/declarative/partition_routers/test_substream_partition_router.py +++ b/airbyte-cdk/python/unit_tests/sources/declarative/partition_routers/test_substream_partition_router.py @@ -6,10 +6,11 @@ import pytest as pytest from airbyte_cdk.models import AirbyteMessage, AirbyteRecordMessage, SyncMode, Type +from airbyte_cdk.sources.declarative.declarative_stream import DeclarativeStream +from airbyte_cdk.sources.declarative.incremental.per_partition_cursor import StreamSlice from airbyte_cdk.sources.declarative.partition_routers.substream_partition_router import ParentStreamConfig, SubstreamPartitionRouter from airbyte_cdk.sources.declarative.requesters.request_option import RequestOption, RequestOptionType from airbyte_cdk.sources.declarative.types import Record -from airbyte_cdk.sources.streams.core import Stream parent_records = [{"id": 1, "data": "data1"}, {"id": 2, "data": "data2"}] more_records = [{"id": 10, "data": "data10", "slice": "second_parent"}, {"id": 20, "data": "data20", "slice": "second_parent"}] @@ -19,10 +20,10 @@ data_third_parent_slice = [] all_parent_data = data_first_parent_slice + data_second_parent_slice + data_third_parent_slice parent_slices = [{"slice": "first"}, {"slice": "second"}, {"slice": "third"}] -second_parent_stream_slice = [{"slice": "second_parent"}] +second_parent_stream_slice = [StreamSlice(partition={"slice": "second_parent"}, cursor_slice={})] -class MockStream(Stream): +class MockStream(DeclarativeStream): def __init__(self, slices, records, name): self._slices = slices self._records = records @@ -38,8 +39,12 @@ def primary_key(self) -> Optional[Union[str, List[str], List[List[str]]]]: def stream_slices( self, *, sync_mode: SyncMode, cursor_field: List[str] = None, stream_state: Mapping[str, Any] = None - ) -> Iterable[Optional[Mapping[str, Any]]]: - yield from self._slices + ) -> Iterable[Optional[StreamSlice]]: + for s in self._slices: + if isinstance(s, StreamSlice): + yield s + else: + yield StreamSlice(partition=s, cursor_slice={}) def read_records( self, @@ -100,6 +105,22 @@ def read_records( {"parent_slice": {"slice": "second"}, "first_stream_id": 2}, ], ), + ( + [ + ParentStreamConfig( + stream=MockStream([StreamSlice(partition=p, cursor_slice={"start": 0, "end": 1}) for p in parent_slices], all_parent_data, "first_stream"), + parent_key="id", + partition_field="first_stream_id", + parameters={}, + config={}, + ) + ], + [ + {"parent_slice": {"slice": "first"}, "first_stream_id": 0}, + {"parent_slice": {"slice": "first"}, "first_stream_id": 1}, + {"parent_slice": {"slice": "second"}, "first_stream_id": 2}, + ], + ), ( [ ParentStreamConfig( @@ -164,6 +185,7 @@ def read_records( "test_single_parent_slices_with_records", "test_with_parent_slices_and_records", "test_multiple_parent_streams", + "test_cursor_values_are_removed_from_parent_slices", "test_missed_parent_key", "test_dpath_extraction", ], diff --git a/airbyte-cdk/python/unit_tests/sources/declarative/test_declarative_stream.py b/airbyte-cdk/python/unit_tests/sources/declarative/test_declarative_stream.py index a5da7e092139..e5e16e66044a 100644 --- a/airbyte-cdk/python/unit_tests/sources/declarative/test_declarative_stream.py +++ b/airbyte-cdk/python/unit_tests/sources/declarative/test_declarative_stream.py @@ -4,20 +4,21 @@ from unittest.mock import MagicMock +import pytest from airbyte_cdk.models import AirbyteLogMessage, AirbyteMessage, AirbyteTraceMessage, Level, SyncMode, TraceType, Type from airbyte_cdk.sources.declarative.declarative_stream import DeclarativeStream +from airbyte_cdk.sources.declarative.types import StreamSlice SLICE_NOT_CONSIDERED_FOR_EQUALITY = {} +_name = "stream" +_primary_key = "pk" +_cursor_field = "created_at" +_json_schema = {"name": {"type": "string"}} -def test_declarative_stream(): - name = "stream" - primary_key = "pk" - cursor_field = "created_at" - schema_loader = MagicMock() - json_schema = {"name": {"type": "string"}} - schema_loader.get_json_schema.return_value = json_schema +def test_declarative_stream(): + schema_loader = _schema_loader() state = MagicMock() records = [ @@ -27,9 +28,9 @@ def test_declarative_stream(): AirbyteMessage(type=Type.TRACE, trace=AirbyteTraceMessage(type=TraceType.ERROR, emitted_at=12345)), ] stream_slices = [ - {"date": "2021-01-01"}, - {"date": "2021-01-02"}, - {"date": "2021-01-03"}, + StreamSlice(partition={}, cursor_slice={"date": "2021-01-01"}), + StreamSlice(partition={}, cursor_slice={"date": "2021-01-02"}), + StreamSlice(partition={}, cursor_slice={"date": "2021-01-03"}), ] retriever = MagicMock() @@ -40,8 +41,8 @@ def test_declarative_stream(): config = {"api_key": "open_sesame"} stream = DeclarativeStream( - name=name, - primary_key=primary_key, + name=_name, + primary_key=_primary_key, stream_cursor_field="{{ parameters['cursor_field'] }}", schema_loader=schema_loader, retriever=retriever, @@ -49,14 +50,37 @@ def test_declarative_stream(): parameters={"cursor_field": "created_at"}, ) - assert stream.name == name - assert stream.get_json_schema() == json_schema + assert stream.name == _name + assert stream.get_json_schema() == _json_schema assert stream.state == state input_slice = stream_slices[0] - assert list(stream.read_records(SyncMode.full_refresh, cursor_field, input_slice, state)) == records - assert stream.primary_key == primary_key - assert stream.cursor_field == cursor_field - assert stream.stream_slices(sync_mode=SyncMode.incremental, cursor_field=cursor_field, stream_state=None) == stream_slices + assert list(stream.read_records(SyncMode.full_refresh, _cursor_field, input_slice, state)) == records + assert stream.primary_key == _primary_key + assert stream.cursor_field == _cursor_field + assert stream.stream_slices(sync_mode=SyncMode.incremental, cursor_field=_cursor_field, stream_state=None) == stream_slices + + +def test_read_records_raises_exception_if_stream_slice_is_not_per_partition_stream_slice(): + schema_loader = _schema_loader() + + retriever = MagicMock() + retriever.state = MagicMock() + retriever.read_records.return_value = [] + stream_slice = {"date": "2021-01-01"} + retriever.stream_slices.return_value = [stream_slice] + + stream = DeclarativeStream( + name=_name, + primary_key=_primary_key, + stream_cursor_field="{{ parameters['cursor_field'] }}", + schema_loader=schema_loader, + retriever=retriever, + config={}, + parameters={"cursor_field": "created_at"}, + ) + + with pytest.raises(ValueError): + list(stream.read_records(SyncMode.full_refresh, _cursor_field, stream_slice, MagicMock())) def test_state_checkpoint_interval(): @@ -71,3 +95,9 @@ def test_state_checkpoint_interval(): ) assert stream.state_checkpoint_interval is None + + +def _schema_loader(): + schema_loader = MagicMock() + schema_loader.get_json_schema.return_value = _json_schema + return schema_loader diff --git a/airbyte-cdk/python/unit_tests/sources/declarative/test_types.py b/airbyte-cdk/python/unit_tests/sources/declarative/test_types.py new file mode 100644 index 000000000000..8fea30c128c7 --- /dev/null +++ b/airbyte-cdk/python/unit_tests/sources/declarative/test_types.py @@ -0,0 +1,39 @@ +# Copyright (c) 2023 Airbyte, Inc., all rights reserved. + +import pytest +from airbyte_cdk.sources.declarative.types import StreamSlice + + +@pytest.mark.parametrize( + "stream_slice, expected_partition", + [ + pytest.param(StreamSlice(partition={},cursor_slice={}), {}, id="test_partition_with_empty_partition"), + pytest.param(StreamSlice(partition=StreamSlice(partition={}, cursor_slice={}), cursor_slice={}), {}, id="test_partition_nested_empty"), + pytest.param(StreamSlice(partition={"key": "value"}, cursor_slice={}), {"key": "value"}, id="test_partition_with_mapping_partition"), + pytest.param(StreamSlice(partition={},cursor_slice={"cursor": "value"}), {}, id="test_partition_with_only_cursor"), + pytest.param(StreamSlice(partition=StreamSlice(partition={}, cursor_slice={}), cursor_slice={"cursor": "value"}), {}, id="test_partition_nested_empty_and_cursor_value_mapping"), + pytest.param(StreamSlice(partition=StreamSlice(partition={}, cursor_slice={"cursor": "value"}), cursor_slice={}), {}, id="test_partition_nested_empty_and_cursor_value"), + ] +) +def test_partition(stream_slice, expected_partition): + partition = stream_slice.partition + + assert partition == expected_partition + + +@pytest.mark.parametrize( + "stream_slice, expected_cursor_slice", + [ + pytest.param(StreamSlice(partition={},cursor_slice={}), {}, id="test_cursor_slice_with_empty_cursor"), + pytest.param(StreamSlice(partition={}, cursor_slice=StreamSlice(partition={}, cursor_slice={})), {}, id="test_cursor_slice_nested_empty"), + + pytest.param(StreamSlice(partition={}, cursor_slice={"key": "value"}), {"key": "value"}, id="test_cursor_slice_with_mapping_cursor_slice"), + pytest.param(StreamSlice(partition={"partition": "value"}, cursor_slice={}), {}, id="test_cursor_slice_with_only_partition"), + pytest.param(StreamSlice(partition={"partition": "value"}, cursor_slice=StreamSlice(partition={}, cursor_slice={})), {}, id="test_cursor_slice_nested_empty_and_partition_mapping"), + pytest.param(StreamSlice(partition=StreamSlice(partition={"partition": "value"}, cursor_slice={}), cursor_slice={}), {}, id="test_cursor_slice_nested_empty_and_partition"), + ] +) +def test_cursor_slice(stream_slice, expected_cursor_slice): + cursor_slice = stream_slice.cursor_slice + + assert cursor_slice == expected_cursor_slice From 1d9e5463c6e331730285131a1022525d8b5d7251 Mon Sep 17 00:00:00 2001 From: girarda Date: Wed, 6 Mar 2024 02:58:12 +0000 Subject: [PATCH 094/172] =?UTF-8?q?=F0=9F=A4=96=20Bump=20patch=20version?= =?UTF-8?q?=20of=20Python=20CDK?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- airbyte-cdk/python/.bumpversion.cfg | 2 +- airbyte-cdk/python/CHANGELOG.md | 3 +++ airbyte-cdk/python/Dockerfile | 4 ++-- airbyte-cdk/python/setup.py | 2 +- 4 files changed, 7 insertions(+), 4 deletions(-) diff --git a/airbyte-cdk/python/.bumpversion.cfg b/airbyte-cdk/python/.bumpversion.cfg index 865bbfae53c1..f050d088b6a3 100644 --- a/airbyte-cdk/python/.bumpversion.cfg +++ b/airbyte-cdk/python/.bumpversion.cfg @@ -1,5 +1,5 @@ [bumpversion] -current_version = 0.68.1 +current_version = 0.68.2 commit = False [bumpversion:file:setup.py] diff --git a/airbyte-cdk/python/CHANGELOG.md b/airbyte-cdk/python/CHANGELOG.md index 2c168c8ade77..ceb58df39fc6 100644 --- a/airbyte-cdk/python/CHANGELOG.md +++ b/airbyte-cdk/python/CHANGELOG.md @@ -1,5 +1,8 @@ # Changelog +## 0.68.2 +low-code: remove parent cursor compoent from incremental substreams' state message + ## 0.68.1 no-op republish of 0.68.0 diff --git a/airbyte-cdk/python/Dockerfile b/airbyte-cdk/python/Dockerfile index dfdf5cd1c2d4..e79934ce9c55 100644 --- a/airbyte-cdk/python/Dockerfile +++ b/airbyte-cdk/python/Dockerfile @@ -10,7 +10,7 @@ RUN apk --no-cache upgrade \ && apk --no-cache add tzdata build-base # install airbyte-cdk -RUN pip install --prefix=/install airbyte-cdk==0.68.1 +RUN pip install --prefix=/install airbyte-cdk==0.68.2 # build a clean environment FROM base @@ -32,5 +32,5 @@ ENV AIRBYTE_ENTRYPOINT "python /airbyte/integration_code/main.py" ENTRYPOINT ["python", "/airbyte/integration_code/main.py"] # needs to be the same as CDK -LABEL io.airbyte.version=0.68.1 +LABEL io.airbyte.version=0.68.2 LABEL io.airbyte.name=airbyte/source-declarative-manifest diff --git a/airbyte-cdk/python/setup.py b/airbyte-cdk/python/setup.py index e61fe0aa93b0..b3754c7859e8 100644 --- a/airbyte-cdk/python/setup.py +++ b/airbyte-cdk/python/setup.py @@ -36,7 +36,7 @@ name="airbyte-cdk", # The version of the airbyte-cdk package is used at runtime to validate manifests. That validation must be # updated if our semver format changes such as using release candidate versions. - version="0.68.1", + version="0.68.2", description="A framework for writing Airbyte Connectors.", long_description=README, long_description_content_type="text/markdown", From e10826ce83c2867554f5abcd843ec77d3fc21b10 Mon Sep 17 00:00:00 2001 From: Artem Inzhyyants <36314070+artem1205@users.noreply.github.com> Date: Wed, 6 Mar 2024 10:44:12 +0100 Subject: [PATCH 095/172] =?UTF-8?q?=F0=9F=9A=A8=F0=9F=9A=A8=20Source=20Fac?= =?UTF-8?q?ebook=20Marketing:=20update=20API=20to=20`v19.0`=20(#35746)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Signed-off-by: Artem Inzhyyants --- .../acceptance-test-config.yml | 2 +- .../integration_tests/spec.json | 28 +++- .../integration_tests/test_streams.py | 42 ++---- .../source-facebook-marketing/metadata.yaml | 20 ++- .../source-facebook-marketing/poetry.lock | 23 ++- .../source-facebook-marketing/pyproject.toml | 5 +- .../schemas/ads_insights.json | 33 +---- .../unit_tests/conftest.py | 3 +- .../unit_tests/integration/request_builder.py | 2 +- .../test_ads_insights_action_product_id.py | 136 +++++------------- .../unit_tests/integration/test_videos.py | 125 +++++++--------- .../unit_tests/test_api.py | 28 +--- .../unit_tests/test_async_job.py | 39 ++--- .../unit_tests/test_async_job_manager.py | 96 ++++--------- .../unit_tests/test_base_insight_streams.py | 88 +++--------- .../unit_tests/test_base_streams.py | 26 +--- .../unit_tests/test_client.py | 30 +--- .../unit_tests/test_config_migrations.py | 1 - .../unit_tests/test_errors.py | 82 +++-------- .../unit_tests/test_source.py | 44 ++---- .../unit_tests/test_streams.py | 26 +--- .../unit_tests/test_utils.py | 9 +- .../sources/facebook-marketing-migrations.md | 37 +++++ .../sources/facebook-marketing.md | 3 +- 24 files changed, 314 insertions(+), 614 deletions(-) create mode 100644 docs/integrations/sources/facebook-marketing-migrations.md diff --git a/airbyte-integrations/connectors/source-facebook-marketing/acceptance-test-config.yml b/airbyte-integrations/connectors/source-facebook-marketing/acceptance-test-config.yml index 9417cf6a8600..c1ab05885be6 100644 --- a/airbyte-integrations/connectors/source-facebook-marketing/acceptance-test-config.yml +++ b/airbyte-integrations/connectors/source-facebook-marketing/acceptance-test-config.yml @@ -37,7 +37,7 @@ acceptance_tests: incremental: tests: - config_path: "secrets/config.json" - timeout_seconds: 4800 + timeout_seconds: 6000 future_state: future_state_path: "integration_tests/future_state.json" full_refresh: diff --git a/airbyte-integrations/connectors/source-facebook-marketing/integration_tests/spec.json b/airbyte-integrations/connectors/source-facebook-marketing/integration_tests/spec.json index 4649cf17fdfe..328659d53acc 100644 --- a/airbyte-integrations/connectors/source-facebook-marketing/integration_tests/spec.json +++ b/airbyte-integrations/connectors/source-facebook-marketing/integration_tests/spec.json @@ -11,11 +11,11 @@ "order": 0, "pattern_descriptor": "The Ad Account ID must be a number.", "examples": ["111111111111111"], - "type": "array", "minItems": 1, + "type": "array", "items": { - "pattern": "^[0-9]+$", - "type": "string" + "type": "string", + "pattern": "^[0-9]+$" }, "uniqueItems": true }, @@ -175,6 +175,7 @@ "catalog_segment_value_omni_purchase_roas", "catalog_segment_value_website_purchase_roas", "clicks", + "conversion_lead_rate", "conversion_rate_ranking", "conversion_values", "conversions", @@ -185,6 +186,7 @@ "cost_per_action_type", "cost_per_ad_click", "cost_per_conversion", + "cost_per_conversion_lead", "cost_per_dda_countby_convs", "cost_per_estimated_ad_recallers", "cost_per_inline_link_click", @@ -229,6 +231,9 @@ "interactive_component_tap", "labels", "location", + "marketing_messages_cost_per_delivered", + "marketing_messages_cost_per_link_btn_click", + "marketing_messages_spend", "mobile_app_purchase_roas", "objective", "optimization_goal", @@ -238,9 +243,6 @@ "purchase_roas", "qualifying_question_qualify_answer_rate", "quality_ranking", - "quality_score_ectr", - "quality_score_ecvr", - "quality_score_organic", "reach", "social_spend", "spend", @@ -309,7 +311,16 @@ "image_asset", "impression_device", "is_conversion_id_modeled", + "landing_destination", "link_url_asset", + "marketing_messages_btn_name", + "mdsa_landing_destination", + "media_asset_url", + "media_creator", + "media_destination_url", + "media_format", + "media_origin_url", + "media_text_content", "mmm", "place_page_id", "platform_position", @@ -320,6 +331,8 @@ "region", "skan_campaign_id", "skan_conversion_id", + "skan_version", + "standard_event_content_type", "title_asset", "video_asset" ] @@ -343,7 +356,8 @@ "action_target_id", "action_type", "action_video_sound", - "action_video_type" + "action_video_type", + "standard_event_content_type" ] } }, diff --git a/airbyte-integrations/connectors/source-facebook-marketing/integration_tests/test_streams.py b/airbyte-integrations/connectors/source-facebook-marketing/integration_tests/test_streams.py index 30d7784bb579..073577cea786 100644 --- a/airbyte-integrations/connectors/source-facebook-marketing/integration_tests/test_streams.py +++ b/airbyte-integrations/connectors/source-facebook-marketing/integration_tests/test_streams.py @@ -31,11 +31,7 @@ def configured_catalog_fixture(config) -> ConfiguredAirbyteCatalog: streams = [] # Prefer incremental if available for stream in catalog.streams: - sync_mode = ( - SyncMode.incremental - if SyncMode.incremental in stream.supported_sync_modes - else SyncMode.full_refresh - ) + sync_mode = SyncMode.incremental if SyncMode.incremental in stream.supported_sync_modes else SyncMode.full_refresh streams.append( ConfiguredAirbyteStream( stream=stream, @@ -56,9 +52,7 @@ class TestFacebookMarketingSource: ("ad_sets", "23846541706990398"), ], ) - def test_streams_with_include_deleted( - self, stream_name, deleted_id, config_with_include_deleted, configured_catalog - ): + def test_streams_with_include_deleted(self, stream_name, deleted_id, config_with_include_deleted, configured_catalog): catalog = self._slice_catalog(configured_catalog, {stream_name}) records, states = self._read_records(config_with_include_deleted, catalog) deleted_records = list(filter(self._deleted_record, records)) @@ -67,16 +61,10 @@ def test_streams_with_include_deleted( assert states, "incremental read should produce states" for name, state in states[-1].state.data.items(): - assert ( - "filter_statuses" in state[account_id] - ), f"State for {name} should include `filter_statuses` flag" + assert "filter_statuses" in state[account_id], f"State for {name} should include `filter_statuses` flag" - assert ( - deleted_records - ), f"{stream_name} stream should have deleted records returned" - assert ( - is_specific_deleted_pulled - ), f"{stream_name} stream should have a deleted record with id={deleted_id}" + assert deleted_records, f"{stream_name} stream should have deleted records returned" + assert is_specific_deleted_pulled, f"{stream_name} stream should have a deleted record with id={deleted_id}" @pytest.mark.parametrize( "stream_name, deleted_num, filter_statuses", @@ -146,14 +134,10 @@ def test_streams_with_include_deleted_and_state( value["filter_statuses"] = filter_statuses catalog = self._slice_catalog(configured_catalog, {stream_name}) - records, states = self._read_records( - config_with_include_deleted, catalog, state=state - ) + records, states = self._read_records(config_with_include_deleted, catalog, state=state) deleted_records = list(filter(self._deleted_record, records)) - assert ( - len(deleted_records) == deleted_num - ), f"{stream_name} should have {deleted_num} deleted records returned" + assert len(deleted_records) == deleted_num, f"{stream_name} should have {deleted_num} deleted records returned" @staticmethod def _deleted_record(record: AirbyteMessage) -> bool: @@ -164,9 +148,7 @@ def _object_id(record: AirbyteMessage) -> str: return str(record.record.data["id"]) @staticmethod - def _slice_catalog( - catalog: ConfiguredAirbyteCatalog, streams: Set[str] - ) -> ConfiguredAirbyteCatalog: + def _slice_catalog(catalog: ConfiguredAirbyteCatalog, streams: Set[str]) -> ConfiguredAirbyteCatalog: sliced_catalog = ConfiguredAirbyteCatalog(streams=[]) for stream in catalog.streams: if stream.stream.name in streams: @@ -174,14 +156,10 @@ def _slice_catalog( return sliced_catalog @staticmethod - def _read_records( - conf, catalog, state=None - ) -> Tuple[List[AirbyteMessage], List[AirbyteMessage]]: + def _read_records(conf, catalog, state=None) -> Tuple[List[AirbyteMessage], List[AirbyteMessage]]: records = [] states = [] - for message in SourceFacebookMarketing().read( - logging.getLogger("airbyte"), conf, catalog, state=state - ): + for message in SourceFacebookMarketing().read(logging.getLogger("airbyte"), conf, catalog, state=state): if message.type == Type.RECORD: records.append(message) elif message.type == Type.STATE: diff --git a/airbyte-integrations/connectors/source-facebook-marketing/metadata.yaml b/airbyte-integrations/connectors/source-facebook-marketing/metadata.yaml index 4f3f93f23537..4db3a3242c63 100644 --- a/airbyte-integrations/connectors/source-facebook-marketing/metadata.yaml +++ b/airbyte-integrations/connectors/source-facebook-marketing/metadata.yaml @@ -6,11 +6,11 @@ data: hosts: - graph.facebook.com connectorBuildOptions: - baseImage: docker.io/airbyte/python-connector-base:1.1.0@sha256:bd98f6505c6764b1b5f99d3aedc23dfc9e9af631a62533f60eb32b1d3dbab20c + baseImage: docker.io/airbyte/python-connector-base:1.2.0@sha256:c22a9d97464b69d6ef01898edf3f8612dc11614f05a84984451dde195f337db9 connectorSubtype: api connectorType: source definitionId: e7778cfc-e97c-4458-9ecb-b4f2bba8946c - dockerImageTag: 1.4.2 + dockerImageTag: 2.0.0 dockerRepository: airbyte/source-facebook-marketing documentationUrl: https://docs.airbyte.com/integrations/sources/facebook-marketing githubIssueLabel: source-facebook-marketing @@ -27,6 +27,22 @@ data: oss: enabled: true releaseStage: generally_available + releases: + breakingChanges: + 2.0.0: + message: "All Ads-Insights-* streams now have updated schemas. Users will need to retest source confguration, refresh the source schema and reset affected streams after upgrading. For more information [visit](https://docs.airbyte.com/integrations/sources/facebook-marketing-migrations)" + upgradeDeadline: "2024-03-17" + scopedImpact: + - scopeType: stream + impactedScopes: + - "ads_insights" + - "ads_insights_age_and_gender" + - "ads_insights_action_type" + - "ads_insights_country" + - "ads_insights_platform_and_device" + - "ads_insights_region" + - "ads_insights_dma" + - "ads_insights_action_product_id" suggestedStreams: streams: - ads_insights diff --git a/airbyte-integrations/connectors/source-facebook-marketing/poetry.lock b/airbyte-integrations/connectors/source-facebook-marketing/poetry.lock index 3cfd31f7261a..64bb0c4e7927 100644 --- a/airbyte-integrations/connectors/source-facebook-marketing/poetry.lock +++ b/airbyte-integrations/connectors/source-facebook-marketing/poetry.lock @@ -1,4 +1,4 @@ -# This file is automatically @generated by Poetry 1.5.1 and should not be changed by hand. +# This file is automatically @generated by Poetry 1.7.1 and should not be changed by hand. [[package]] name = "aiohttp" @@ -432,6 +432,18 @@ files = [ {file = "dpath-2.0.8.tar.gz", hash = "sha256:a3440157ebe80d0a3ad794f1b61c571bef125214800ffdb9afc9424e8250fe9b"}, ] +[[package]] +name = "enum34" +version = "1.1.10" +description = "Python 3.4 Enum backported to 3.3, 3.2, 3.1, 2.7, 2.6, 2.5, and 2.4" +optional = false +python-versions = "*" +files = [ + {file = "enum34-1.1.10-py2-none-any.whl", hash = "sha256:a98a201d6de3f2ab3db284e70a33b0f896fbf35f8086594e8c9e74b909058d53"}, + {file = "enum34-1.1.10-py3-none-any.whl", hash = "sha256:c3858660960c984d6ab0ebad691265180da2b43f07e061c0f8dca9ef3cffd328"}, + {file = "enum34-1.1.10.tar.gz", hash = "sha256:cce6a7477ed816bd2542d03d53db9f0db935dd013b70f336a95c73979289f248"}, +] + [[package]] name = "exceptiongroup" version = "1.2.0" @@ -448,18 +460,19 @@ test = ["pytest (>=6)"] [[package]] name = "facebook-business" -version = "17.0.0" +version = "19.0.0" description = "Facebook Business SDK" optional = false python-versions = "*" files = [ - {file = "facebook_business-17.0.0-py3-none-any.whl", hash = "sha256:f4b87a940a068d94ace6dc2dde7e0d43602264da18375ebfb0a8059a48a47012"}, - {file = "facebook_business-17.0.0.tar.gz", hash = "sha256:6a1c11185384325b49d640a7abb60e610b8f8561a8add1206d8e7e5f24626cf2"}, + {file = "facebook_business-19.0.0-py3-none-any.whl", hash = "sha256:591deedc010cefeb49151bbfadf72659cf262056072b437ca3dbf0ba37b3fa43"}, + {file = "facebook_business-19.0.0.tar.gz", hash = "sha256:e12ea2a13d1703922d1b5d3921bc67bd10176596770ce154f287019738775800"}, ] [package.dependencies] aiohttp = {version = "*", markers = "python_version >= \"3.5.3\""} curlify = ">=2.1.0" +enum34 = {version = "*", markers = "python_version >= \"3\""} pycountry = ">=19.8.18" requests = ">=2.3.0" six = ">=1.7.3" @@ -1507,4 +1520,4 @@ multidict = ">=4.0" [metadata] lock-version = "2.0" python-versions = "^3.9,<3.12" -content-hash = "5753d144dc008fabd12b18d9e28d148ee96976d7b83cdcf0a82b3ea22f8f315f" +content-hash = "cac4564b0e204ad1f4b5d0d3abce8cb436e80193351a8253cf3c27b677ee908e" diff --git a/airbyte-integrations/connectors/source-facebook-marketing/pyproject.toml b/airbyte-integrations/connectors/source-facebook-marketing/pyproject.toml index 04043b38a353..fb1913198496 100644 --- a/airbyte-integrations/connectors/source-facebook-marketing/pyproject.toml +++ b/airbyte-integrations/connectors/source-facebook-marketing/pyproject.toml @@ -3,7 +3,7 @@ requires = [ "poetry-core>=1.0.0",] build-backend = "poetry.core.masonry.api" [tool.poetry] -version = "1.4.2" +version = "2.0.0" name = "source-facebook-marketing" description = "Source implementation for Facebook Marketing." authors = [ "Airbyte ",] @@ -18,9 +18,8 @@ include = "source_facebook_marketing" [tool.poetry.dependencies] python = "^3.9,<3.12" airbyte-cdk = "==0.62.1" -facebook-business = "==17.0.0" +facebook-business = "19.0.0" cached-property = "==1.5.2" -pendulum = "==2.1.2" [tool.poetry.scripts] source-facebook-marketing = "source_facebook_marketing.run:run" diff --git a/airbyte-integrations/connectors/source-facebook-marketing/source_facebook_marketing/schemas/ads_insights.json b/airbyte-integrations/connectors/source-facebook-marketing/source_facebook_marketing/schemas/ads_insights.json index 1428e2963307..5578558f1213 100644 --- a/airbyte-integrations/connectors/source-facebook-marketing/source_facebook_marketing/schemas/ads_insights.json +++ b/airbyte-integrations/connectors/source-facebook-marketing/source_facebook_marketing/schemas/ads_insights.json @@ -33,9 +33,6 @@ "adset_name": { "type": ["null", "string"] }, - "age_targeting": { - "type": ["null", "string"] - }, "attribution_setting": { "type": ["null", "string"] }, @@ -81,6 +78,9 @@ "clicks": { "type": ["null", "integer"] }, + "conversion_lead_rate": { + "type": ["null", "number"] + }, "conversion_rate_ranking": { "type": ["null", "string"] }, @@ -111,6 +111,9 @@ "cost_per_conversion": { "$ref": "ads_action_stats.json" }, + "cost_per_conversion_lead": { + "type": ["null", "number"] + }, "cost_per_estimated_ad_recallers": { "type": ["null", "number"] }, @@ -165,24 +168,9 @@ "engagement_rate_ranking": { "type": ["null", "string"] }, - "estimated_ad_recall_rate": { - "type": ["null", "number"] - }, - "estimated_ad_recall_rate_lower_bound": { - "type": ["null", "number"] - }, - "estimated_ad_recall_rate_upper_bound": { - "type": ["null", "number"] - }, "estimated_ad_recallers": { "type": ["null", "number"] }, - "estimated_ad_recallers_lower_bound": { - "type": ["null", "number"] - }, - "estimated_ad_recallers_upper_bound": { - "type": ["null", "number"] - }, "frequency": { "type": ["null", "number"] }, @@ -192,9 +180,6 @@ "full_view_reach": { "type": ["null", "number"] }, - "gender_targeting": { - "type": ["null", "string"] - }, "impressions": { "type": ["null", "integer"] }, @@ -216,12 +201,6 @@ "instant_experience_outbound_clicks": { "$ref": "ads_action_stats.json" }, - "labels": { - "type": ["null", "string"] - }, - "location": { - "type": ["null", "string"] - }, "mobile_app_purchase_roas": { "$ref": "ads_action_stats.json" }, diff --git a/airbyte-integrations/connectors/source-facebook-marketing/unit_tests/conftest.py b/airbyte-integrations/connectors/source-facebook-marketing/unit_tests/conftest.py index 7c0d34ae8139..7a7cbaa39b9e 100644 --- a/airbyte-integrations/connectors/source-facebook-marketing/unit_tests/conftest.py +++ b/airbyte-integrations/connectors/source-facebook-marketing/unit_tests/conftest.py @@ -70,8 +70,7 @@ def api_fixture(some_config, requests_mock, fb_account_response): ) requests_mock.register_uri( "GET", - FacebookSession.GRAPH - + f"/{FB_API_VERSION}/act_{some_config['account_ids'][0]}/", + FacebookSession.GRAPH + f"/{FB_API_VERSION}/act_{some_config['account_ids'][0]}/", [fb_account_response], ) return api diff --git a/airbyte-integrations/connectors/source-facebook-marketing/unit_tests/integration/request_builder.py b/airbyte-integrations/connectors/source-facebook-marketing/unit_tests/integration/request_builder.py index a07c81b13448..073fad339a34 100644 --- a/airbyte-integrations/connectors/source-facebook-marketing/unit_tests/integration/request_builder.py +++ b/airbyte-integrations/connectors/source-facebook-marketing/unit_tests/integration/request_builder.py @@ -70,7 +70,7 @@ def with_body(self, body: Union[str, bytes, Mapping[str, Any]]) -> RequestBuilde def build(self) -> HttpRequest: return HttpRequest( - url=f"https://graph.facebook.com/v17.0/{self._account_sub_path()}{self._resource}", + url=f"https://graph.facebook.com/v19.0/{self._account_sub_path()}{self._resource}", query_params=self._query_params, body=self._body, ) diff --git a/airbyte-integrations/connectors/source-facebook-marketing/unit_tests/integration/test_ads_insights_action_product_id.py b/airbyte-integrations/connectors/source-facebook-marketing/unit_tests/integration/test_ads_insights_action_product_id.py index 2fe71e37f271..bc0907575c56 100644 --- a/airbyte-integrations/connectors/source-facebook-marketing/unit_tests/integration/test_ads_insights_action_product_id.py +++ b/airbyte-integrations/connectors/source-facebook-marketing/unit_tests/integration/test_ads_insights_action_product_id.py @@ -63,7 +63,6 @@ def _job_start_request( "ad_name", "adset_id", "adset_name", - "age_targeting", "attribution_setting", "auction_bid", "auction_competitiveness", @@ -79,6 +78,7 @@ def _job_start_request( "catalog_segment_value_omni_purchase_roas", "catalog_segment_value_website_purchase_roas", "clicks", + "conversion_lead_rate", "conversion_rate_ranking", "conversion_values", "conversions", @@ -89,6 +89,7 @@ def _job_start_request( "cost_per_action_type", "cost_per_ad_click", "cost_per_conversion", + "cost_per_conversion_lead", "cost_per_estimated_ad_recallers", "cost_per_inline_link_click", "cost_per_inline_post_engagement", @@ -106,16 +107,10 @@ def _job_start_request( "date_start", "date_stop", "engagement_rate_ranking", - "estimated_ad_recall_rate", - "estimated_ad_recall_rate_lower_bound", - "estimated_ad_recall_rate_upper_bound", "estimated_ad_recallers", - "estimated_ad_recallers_lower_bound", - "estimated_ad_recallers_upper_bound", "frequency", "full_view_impressions", "full_view_reach", - "gender_targeting", "impressions", "inline_link_click_ctr", "inline_link_clicks", @@ -123,8 +118,6 @@ def _job_start_request( "instant_experience_clicks_to_open", "instant_experience_clicks_to_start", "instant_experience_outbound_clicks", - "labels", - "location", "mobile_app_purchase_roas", "objective", "optimization_goal", @@ -168,9 +161,7 @@ def _job_start_request( "action_attribution_windows": ["1d_click", "7d_click", "28d_click", "1d_view", "7d_view", "28d_view"], "time_range": {"since": since, "until": until}, } - return RequestBuilder.get_insights_endpoint(access_token=ACCESS_TOKEN, account_id=account_id).with_body( - encode_request_body(body) - ) + return RequestBuilder.get_insights_endpoint(access_token=ACCESS_TOKEN, account_id=account_id).with_body(encode_request_body(body)) def _job_status_request(report_run_ids: Union[str, List[str]]) -> RequestBuilder: @@ -206,12 +197,9 @@ def _job_status_response( job_ids = [job_ids] body = [ { - "body": json.dumps( - { - "id": job_id, "account_id": account_id, "async_status": status, "async_percent_completion": 100 - } - ), - } for job_id in job_ids + "body": json.dumps({"id": job_id, "account_id": account_id, "async_status": status, "async_percent_completion": 100}), + } + for job_id in job_ids ] return build_response(body=body, status_code=HTTPStatus.OK) @@ -236,7 +224,6 @@ def _ads_insights_action_product_id_record() -> RecordBuilder: @freezegun.freeze_time(NOW.isoformat()) class TestFullRefresh(TestCase): - @staticmethod def _read(config_: ConfigBuilder, expecting_exception: bool = False) -> EntrypointOutput: return read_output( @@ -272,9 +259,7 @@ def test_given_one_page_when_read_then_return_records(self, http_mocker: HttpMoc _insights_response().with_record(_ads_insights_action_product_id_record()).build(), ) - output = self._read( - config().with_account_ids([client_side_account_id]).with_start_date(start_date).with_end_date(end_date) - ) + output = self._read(config().with_account_ids([client_side_account_id]).with_start_date(start_date).with_end_date(end_date)) assert len(output.records) == 1 @HttpMocker() @@ -289,9 +274,10 @@ def test_given_multiple_pages_when_read_then_return_records(self, http_mocker: H ) http_mocker.get( _get_insights_request(_JOB_ID).with_next_page_token(NEXT_PAGE_TOKEN).build(), - _insights_response().with_record(_ads_insights_action_product_id_record()).with_record( - _ads_insights_action_product_id_record() - ).build(), + _insights_response() + .with_record(_ads_insights_action_product_id_record()) + .with_record(_ads_insights_action_product_id_record()) + .build(), ) output = self._read(config()) @@ -330,15 +316,9 @@ def test_given_multiple_days_when_read_then_return_records(self, http_mocker: Ht http_mocker.get(get_account_request().build(), get_account_response()) http_mocker.get(_update_api_throttle_limit_request().build(), _update_api_throttle_limit_response()) - http_mocker.post( - _job_start_request(since=start_date, until=start_date).build(), _job_start_response(report_run_id_1) - ) - http_mocker.post( - _job_start_request(since=end_date, until=end_date).build(), _job_start_response(report_run_id_2) - ) - http_mocker.post( - _job_status_request([report_run_id_1, report_run_id_2]).build(), _job_status_response([job_id_1, job_id_2]) - ) + http_mocker.post(_job_start_request(since=start_date, until=start_date).build(), _job_start_response(report_run_id_1)) + http_mocker.post(_job_start_request(since=end_date, until=end_date).build(), _job_start_response(report_run_id_2)) + http_mocker.post(_job_status_request([report_run_id_1, report_run_id_2]).build(), _job_status_response([job_id_1, job_id_2])) http_mocker.get( _get_insights_request(job_id_1).build(), _insights_response().with_record(_ads_insights_action_product_id_record()).build(), @@ -352,9 +332,7 @@ def test_given_multiple_days_when_read_then_return_records(self, http_mocker: Ht assert len(output.records) == 2 @HttpMocker() - def test_given_multiple_account_ids_when_read_then_return_records_from_all_accounts( - self, http_mocker: HttpMocker - ) -> None: + def test_given_multiple_account_ids_when_read_then_return_records_from_all_accounts(self, http_mocker: HttpMocker) -> None: account_id_1 = "123123123" account_id_2 = "321321321" report_run_id_1 = "1571860060019500" @@ -364,35 +342,19 @@ def test_given_multiple_account_ids_when_read_then_return_records_from_all_accou api_throttle_limit_response = _update_api_throttle_limit_response() - http_mocker.get( - get_account_request().with_account_id(account_id_1).build(), get_account_response(account_id=account_id_1) - ) - http_mocker.get( - _update_api_throttle_limit_request().with_account_id(account_id_1).build(), api_throttle_limit_response - ) - http_mocker.post( - _job_start_request().with_account_id(account_id_1).build(), _job_start_response(report_run_id_1) - ) - http_mocker.post( - _job_status_request(report_run_id_1).build(), _job_status_response(job_id_1, account_id=account_id_1) - ) + http_mocker.get(get_account_request().with_account_id(account_id_1).build(), get_account_response(account_id=account_id_1)) + http_mocker.get(_update_api_throttle_limit_request().with_account_id(account_id_1).build(), api_throttle_limit_response) + http_mocker.post(_job_start_request().with_account_id(account_id_1).build(), _job_start_response(report_run_id_1)) + http_mocker.post(_job_status_request(report_run_id_1).build(), _job_status_response(job_id_1, account_id=account_id_1)) http_mocker.get( _get_insights_request(job_id_1).build(), _insights_response().with_record(_ads_insights_action_product_id_record()).build(), ) - http_mocker.get( - get_account_request().with_account_id(account_id_2).build(), get_account_response(account_id=account_id_2) - ) - http_mocker.get( - _update_api_throttle_limit_request().with_account_id(account_id_2).build(), api_throttle_limit_response - ) - http_mocker.post( - _job_start_request().with_account_id(account_id_2).build(), _job_start_response(report_run_id_2) - ) - http_mocker.post( - _job_status_request(report_run_id_2).build(), _job_status_response(job_id_2, account_id=account_id_2) - ) + http_mocker.get(get_account_request().with_account_id(account_id_2).build(), get_account_response(account_id=account_id_2)) + http_mocker.get(_update_api_throttle_limit_request().with_account_id(account_id_2).build(), api_throttle_limit_response) + http_mocker.post(_job_start_request().with_account_id(account_id_2).build(), _job_start_response(report_run_id_2)) + http_mocker.post(_job_status_request(report_run_id_2).build(), _job_status_response(job_id_2, account_id=account_id_2)) http_mocker.get( _get_insights_request(job_id_2).build(), _insights_response().with_record(_ads_insights_action_product_id_record()).build(), @@ -436,16 +398,12 @@ def _read( ) @HttpMocker() - def test_when_read_then_state_message_produced_and_state_match_start_interval( - self, http_mocker: HttpMocker - ) -> None: + def test_when_read_then_state_message_produced_and_state_match_start_interval(self, http_mocker: HttpMocker) -> None: account_id = "123123123" start_date = NOW.set(hour=0, minute=0, second=0) end_date = NOW.set(hour=23, minute=59, second=59) - http_mocker.get( - get_account_request().with_account_id(account_id).build(), get_account_response(account_id=account_id) - ) + http_mocker.get(get_account_request().with_account_id(account_id).build(), get_account_response(account_id=account_id)) http_mocker.get( _update_api_throttle_limit_request().with_account_id(account_id).build(), _update_api_throttle_limit_response(), @@ -454,18 +412,14 @@ def test_when_read_then_state_message_produced_and_state_match_start_interval( _job_start_request(since=start_date, until=end_date).with_account_id(account_id).build(), _job_start_response(_REPORT_RUN_ID), ) - http_mocker.post( - _job_status_request(_REPORT_RUN_ID).build(), _job_status_response(_JOB_ID, account_id=account_id) - ) + http_mocker.post(_job_status_request(_REPORT_RUN_ID).build(), _job_status_response(_JOB_ID, account_id=account_id)) http_mocker.get( _get_insights_request(_JOB_ID).build(), _insights_response().with_record(_ads_insights_action_product_id_record()).build(), ) output = self._read(config().with_account_ids([account_id]).with_start_date(start_date).with_end_date(end_date)) - cursor_value_from_state_message = output.most_recent_state.get(_STREAM_NAME, {}).get(account_id, {}).get( - _CURSOR_FIELD - ) + cursor_value_from_state_message = output.most_recent_state.get(_STREAM_NAME, {}).get(account_id, {}).get(_CURSOR_FIELD) assert cursor_value_from_state_message == start_date.strftime(DATE_FORMAT) @HttpMocker() @@ -483,51 +437,33 @@ def test_given_multiple_account_ids_when_read_then_state_produced_by_account_id_ api_throttle_limit_response = _update_api_throttle_limit_response() - http_mocker.get( - get_account_request().with_account_id(account_id_1).build(), get_account_response(account_id=account_id_1) - ) - http_mocker.get( - _update_api_throttle_limit_request().with_account_id(account_id_1).build(), api_throttle_limit_response - ) + http_mocker.get(get_account_request().with_account_id(account_id_1).build(), get_account_response(account_id=account_id_1)) + http_mocker.get(_update_api_throttle_limit_request().with_account_id(account_id_1).build(), api_throttle_limit_response) http_mocker.post( _job_start_request(since=start_date, until=end_date).with_account_id(account_id_1).build(), _job_start_response(report_run_id_1), ) - http_mocker.post( - _job_status_request(report_run_id_1).build(), _job_status_response(job_id_1, account_id=account_id_1) - ) + http_mocker.post(_job_status_request(report_run_id_1).build(), _job_status_response(job_id_1, account_id=account_id_1)) http_mocker.get( _get_insights_request(job_id_1).build(), _insights_response().with_record(_ads_insights_action_product_id_record()).build(), ) - http_mocker.get( - get_account_request().with_account_id(account_id_2).build(), get_account_response(account_id=account_id_2) - ) - http_mocker.get( - _update_api_throttle_limit_request().with_account_id(account_id_2).build(), api_throttle_limit_response - ) + http_mocker.get(get_account_request().with_account_id(account_id_2).build(), get_account_response(account_id=account_id_2)) + http_mocker.get(_update_api_throttle_limit_request().with_account_id(account_id_2).build(), api_throttle_limit_response) http_mocker.post( _job_start_request(since=start_date, until=end_date).with_account_id(account_id_2).build(), _job_start_response(report_run_id_2), ) - http_mocker.post( - _job_status_request(report_run_id_2).build(), _job_status_response(job_id_2, account_id=account_id_2) - ) + http_mocker.post(_job_status_request(report_run_id_2).build(), _job_status_response(job_id_2, account_id=account_id_2)) http_mocker.get( _get_insights_request(job_id_2).build(), _insights_response().with_record(_ads_insights_action_product_id_record()).build(), ) - output = self._read( - config().with_account_ids([account_id_1, account_id_2]).with_start_date(start_date).with_end_date(end_date) - ) - cursor_value_from_state_account_1 = output.most_recent_state.get(_STREAM_NAME, {}).get(account_id_1, {}).get( - _CURSOR_FIELD - ) - cursor_value_from_state_account_2 = output.most_recent_state.get(_STREAM_NAME, {}).get(account_id_2, {}).get( - _CURSOR_FIELD - ) + output = self._read(config().with_account_ids([account_id_1, account_id_2]).with_start_date(start_date).with_end_date(end_date)) + cursor_value_from_state_account_1 = output.most_recent_state.get(_STREAM_NAME, {}).get(account_id_1, {}).get(_CURSOR_FIELD) + cursor_value_from_state_account_2 = output.most_recent_state.get(_STREAM_NAME, {}).get(account_id_2, {}).get(_CURSOR_FIELD) expected_cursor_value = start_date.strftime(DATE_FORMAT) assert cursor_value_from_state_account_1 == expected_cursor_value assert cursor_value_from_state_account_2 == expected_cursor_value diff --git a/airbyte-integrations/connectors/source-facebook-marketing/unit_tests/integration/test_videos.py b/airbyte-integrations/connectors/source-facebook-marketing/unit_tests/integration/test_videos.py index f2aa2990f3fb..17c88f1c9c61 100644 --- a/airbyte-integrations/connectors/source-facebook-marketing/unit_tests/integration/test_videos.py +++ b/airbyte-integrations/connectors/source-facebook-marketing/unit_tests/integration/test_videos.py @@ -62,18 +62,19 @@ def _get_videos_request(account_id: Optional[str] = ACCOUNT_ID) -> RequestBuilder: - return RequestBuilder.get_videos_endpoint( - access_token=ACCESS_TOKEN, account_id=account_id - ).with_limit(100).with_fields(_FIELDS).with_summary() + return ( + RequestBuilder.get_videos_endpoint(access_token=ACCESS_TOKEN, account_id=account_id) + .with_limit(100) + .with_fields(_FIELDS) + .with_summary() + ) def _get_videos_response() -> HttpResponseBuilder: return create_response_builder( response_template=find_template(_STREAM_NAME, __file__), records_path=FieldPath("data"), - pagination_strategy=FacebookMarketingPaginationStrategy( - request=_get_videos_request().build(), next_page_token=NEXT_PAGE_TOKEN - ), + pagination_strategy=FacebookMarketingPaginationStrategy(request=_get_videos_request().build(), next_page_token=NEXT_PAGE_TOKEN), ) @@ -88,7 +89,6 @@ def _video_record() -> RecordBuilder: @freezegun.freeze_time(NOW.isoformat()) class TestFullRefresh(TestCase): - @staticmethod def _read(config_: ConfigBuilder, expecting_exception: bool = False) -> EntrypointOutput: return read_output( @@ -131,22 +131,16 @@ def test_given_multiple_pages_when_read_then_return_records(self, http_mocker: H assert len(output.records) == 3 @HttpMocker() - def test_given_multiple_account_ids_when_read_then_return_records_from_all_accounts( - self, http_mocker: HttpMocker - ) -> None: + def test_given_multiple_account_ids_when_read_then_return_records_from_all_accounts(self, http_mocker: HttpMocker) -> None: account_id_1 = "123123123" account_id_2 = "321321321" - http_mocker.get( - get_account_request().with_account_id(account_id_1).build(), get_account_response(account_id=account_id_1) - ) + http_mocker.get(get_account_request().with_account_id(account_id_1).build(), get_account_response(account_id=account_id_1)) http_mocker.get( _get_videos_request().with_account_id(account_id_1).build(), _get_videos_response().with_record(_video_record()).build(), ) - http_mocker.get( - get_account_request().with_account_id(account_id_2).build(), get_account_response(account_id=account_id_2) - ) + http_mocker.get(get_account_request().with_account_id(account_id_2).build(), get_account_response(account_id=account_id_2)) http_mocker.get( _get_videos_request().with_account_id(account_id_2).build(), _get_videos_response().with_record(_video_record()).build(), @@ -159,9 +153,7 @@ def test_given_multiple_account_ids_when_read_then_return_records_from_all_accou def test_when_read_then_add_account_id_field(self, http_mocker: HttpMocker) -> None: account_id = "123123123" - http_mocker.get( - get_account_request().with_account_id(account_id).build(), get_account_response(account_id=account_id) - ) + http_mocker.get(get_account_request().with_account_id(account_id).build(), get_account_response(account_id=account_id)) http_mocker.get( _get_videos_request().with_account_id(account_id).build(), _get_videos_response().with_record(_video_record()).build(), @@ -179,9 +171,7 @@ def test_when_read_then_datetime_fields_transformed(self, http_mocker: HttpMocke http_mocker.get(get_account_request().build(), get_account_response()) http_mocker.get( _get_videos_request().with_fields(_FIELDS).with_summary().build(), - _get_videos_response().with_record( - _video_record().with_field(FieldPath(created_time_field), input_datetime_value) - ).build(), + _get_videos_response().with_record(_video_record().with_field(FieldPath(created_time_field), input_datetime_value)).build(), ) output = self._read(config()) @@ -224,20 +214,17 @@ def test_when_read_then_state_message_produced_and_state_match_latest_record(sel max_cursor_value = "2024-02-01T00:00:00+00:00" account_id = "123123123" - http_mocker.get( - get_account_request().with_account_id(account_id).build(), get_account_response(account_id=account_id) - ) + http_mocker.get(get_account_request().with_account_id(account_id).build(), get_account_response(account_id=account_id)) http_mocker.get( _get_videos_request().with_account_id(account_id).build(), - _get_videos_response().with_record(_video_record().with_cursor(max_cursor_value)).with_record( - _video_record().with_cursor(min_cursor_value) - ).build(), + _get_videos_response() + .with_record(_video_record().with_cursor(max_cursor_value)) + .with_record(_video_record().with_cursor(min_cursor_value)) + .build(), ) output = self._read(config().with_account_ids([account_id])) - cursor_value_from_state_message = output.most_recent_state.get(_STREAM_NAME, {}).get(account_id, {}).get( - _CURSOR_FIELD - ) + cursor_value_from_state_message = output.most_recent_state.get(_STREAM_NAME, {}).get(account_id, {}).get(_CURSOR_FIELD) assert cursor_value_from_state_message == max_cursor_value @HttpMocker() @@ -251,54 +238,44 @@ def test_given_multiple_account_ids_when_read_then_state_produced_by_account_id_ min_cursor_value_account_id_2 = "2024-03-01T00:00:00+00:00" max_cursor_value_account_id_2 = "2024-04-01T00:00:00+00:00" - http_mocker.get( - get_account_request().with_account_id(account_id_1).build(), get_account_response(account_id=account_id_1) - ) + http_mocker.get(get_account_request().with_account_id(account_id_1).build(), get_account_response(account_id=account_id_1)) http_mocker.get( _get_videos_request().with_account_id(account_id_1).build(), - _get_videos_response().with_record(_video_record().with_cursor(max_cursor_value_account_id_1)).with_record( - _video_record().with_cursor(min_cursor_value_account_id_1) - ).build(), - ) - http_mocker.get( - get_account_request().with_account_id(account_id_2).build(), get_account_response(account_id=account_id_2) + _get_videos_response() + .with_record(_video_record().with_cursor(max_cursor_value_account_id_1)) + .with_record(_video_record().with_cursor(min_cursor_value_account_id_1)) + .build(), ) + http_mocker.get(get_account_request().with_account_id(account_id_2).build(), get_account_response(account_id=account_id_2)) http_mocker.get( _get_videos_request().with_account_id(account_id_2).build(), - _get_videos_response().with_record(_video_record().with_cursor(max_cursor_value_account_id_2)).with_record( - _video_record().with_cursor(min_cursor_value_account_id_2) - ).build(), + _get_videos_response() + .with_record(_video_record().with_cursor(max_cursor_value_account_id_2)) + .with_record(_video_record().with_cursor(min_cursor_value_account_id_2)) + .build(), ) output = self._read(config().with_account_ids([account_id_1, account_id_2])) - cursor_value_from_state_account_1 = output.most_recent_state.get(_STREAM_NAME, {}).get(account_id_1, {}).get( - _CURSOR_FIELD - ) - cursor_value_from_state_account_2 = output.most_recent_state.get(_STREAM_NAME, {}).get(account_id_2, {}).get( - _CURSOR_FIELD - ) + cursor_value_from_state_account_1 = output.most_recent_state.get(_STREAM_NAME, {}).get(account_id_1, {}).get(_CURSOR_FIELD) + cursor_value_from_state_account_2 = output.most_recent_state.get(_STREAM_NAME, {}).get(account_id_2, {}).get(_CURSOR_FIELD) assert cursor_value_from_state_account_1 == max_cursor_value_account_id_1 assert cursor_value_from_state_account_2 == max_cursor_value_account_id_2 @HttpMocker() - def test_given_state_when_read_then_records_with_cursor_value_less_than_state_filtered( - self, http_mocker: HttpMocker - ) -> None: + def test_given_state_when_read_then_records_with_cursor_value_less_than_state_filtered(self, http_mocker: HttpMocker) -> None: account_id = "123123123" cursor_value_1 = "2024-01-01T00:00:00+00:00" cursor_value_2 = "2024-01-02T00:00:00+00:00" cursor_value_3 = "2024-01-03T00:00:00+00:00" - http_mocker.get( - get_account_request().with_account_id(account_id).build(), get_account_response(account_id=account_id) - ) + http_mocker.get(get_account_request().with_account_id(account_id).build(), get_account_response(account_id=account_id)) http_mocker.get( _get_videos_request().with_account_id(account_id).build(), - _get_videos_response().with_record(_video_record().with_cursor(cursor_value_3)).with_record( - _video_record().with_cursor(cursor_value_2) - ).with_record( - _video_record().with_cursor(cursor_value_1) - ).build(), + _get_videos_response() + .with_record(_video_record().with_cursor(cursor_value_3)) + .with_record(_video_record().with_cursor(cursor_value_2)) + .with_record(_video_record().with_cursor(cursor_value_1)) + .build(), ) output = self._read( @@ -317,27 +294,23 @@ def test_given_state_and_multiple_account_ids_when_read_then_records_with_cursor cursor_value_2 = "2024-01-02T00:00:00+00:00" cursor_value_3 = "2024-01-03T00:00:00+00:00" - http_mocker.get( - get_account_request().with_account_id(account_id_1).build(), get_account_response(account_id=account_id_1) - ) + http_mocker.get(get_account_request().with_account_id(account_id_1).build(), get_account_response(account_id=account_id_1)) http_mocker.get( _get_videos_request().with_account_id(account_id_1).build(), - _get_videos_response().with_record(_video_record().with_cursor(cursor_value_3)).with_record( - _video_record().with_cursor(cursor_value_2) - ).with_record( - _video_record().with_cursor(cursor_value_1) - ).build(), - ) - http_mocker.get( - get_account_request().with_account_id(account_id_2).build(), get_account_response(account_id=account_id_2) + _get_videos_response() + .with_record(_video_record().with_cursor(cursor_value_3)) + .with_record(_video_record().with_cursor(cursor_value_2)) + .with_record(_video_record().with_cursor(cursor_value_1)) + .build(), ) + http_mocker.get(get_account_request().with_account_id(account_id_2).build(), get_account_response(account_id=account_id_2)) http_mocker.get( _get_videos_request().with_account_id(account_id_2).build(), - _get_videos_response().with_record(_video_record().with_cursor(cursor_value_3)).with_record( - _video_record().with_cursor(cursor_value_2) - ).with_record( - _video_record().with_cursor(cursor_value_1) - ).build(), + _get_videos_response() + .with_record(_video_record().with_cursor(cursor_value_3)) + .with_record(_video_record().with_cursor(cursor_value_2)) + .with_record(_video_record().with_cursor(cursor_value_1)) + .build(), ) stream_state = {account_id_1: {_CURSOR_FIELD: cursor_value_2}, account_id_2: {_CURSOR_FIELD: cursor_value_2}} diff --git a/airbyte-integrations/connectors/source-facebook-marketing/unit_tests/test_api.py b/airbyte-integrations/connectors/source-facebook-marketing/unit_tests/test_api.py index d8aae90765b7..c09279ca1d8d 100644 --- a/airbyte-integrations/connectors/source-facebook-marketing/unit_tests/test_api.py +++ b/airbyte-integrations/connectors/source-facebook-marketing/unit_tests/test_api.py @@ -14,9 +14,7 @@ class TestMyFacebookAdsApi: @pytest.fixture def fb_api(self): - return source_facebook_marketing.api.MyFacebookAdsApi.init( - access_token="foo", crash_log=False - ) + return source_facebook_marketing.api.MyFacebookAdsApi.init(access_token="foo", crash_log=False) @pytest.mark.parametrize( "max_rate,max_pause_interval,min_pause_interval,usage,pause_interval,expected_pause_interval", @@ -120,9 +118,7 @@ def test__get_max_usage_pause_interval_from_batch( ] mock_parse_call_rate_header = mocker.Mock(side_effect=usages_pause_intervals) - mocker.patch.object( - fb_api, "_parse_call_rate_header", mock_parse_call_rate_header - ) + mocker.patch.object(fb_api, "_parse_call_rate_header", mock_parse_call_rate_header) mocker.patch.object(fb_api, "MIN_PAUSE_INTERVAL", min_pause_interval) output = fb_api._get_max_usage_pause_interval_from_batch(records) @@ -145,9 +141,7 @@ def test__get_max_usage_pause_interval_from_batch( (["not_batch"], 2, 1, False), ], ) - def test__handle_call_rate_limit( - self, mocker, fb_api, params, min_rate, usage, expect_sleep - ): + def test__handle_call_rate_limit(self, mocker, fb_api, params, min_rate, usage, expect_sleep): pause_interval = 1 mock_response = mocker.Mock() @@ -167,20 +161,12 @@ def test__handle_call_rate_limit( mocker.patch.object(source_facebook_marketing.api, "sleep") assert fb_api._handle_call_rate_limit(mock_response, params) is None if "batch" in params: - fb_api._get_max_usage_pause_interval_from_batch.assert_called_with( - mock_response.json.return_value - ) + fb_api._get_max_usage_pause_interval_from_batch.assert_called_with(mock_response.json.return_value) else: - fb_api._parse_call_rate_header.assert_called_with( - mock_response.headers.return_value - ) + fb_api._parse_call_rate_header.assert_called_with(mock_response.headers.return_value) if expect_sleep: - fb_api._compute_pause_interval.assert_called_with( - usage=usage, pause_interval=pause_interval - ) - source_facebook_marketing.api.sleep.assert_called_with( - fb_api._compute_pause_interval.return_value.total_seconds() - ) + fb_api._compute_pause_interval.assert_called_with(usage=usage, pause_interval=pause_interval) + source_facebook_marketing.api.sleep.assert_called_with(fb_api._compute_pause_interval.return_value.total_seconds()) source_facebook_marketing.api.logger.warning.assert_called_with( f"Utilization is too high ({usage})%, pausing for {fb_api._compute_pause_interval.return_value}" ) diff --git a/airbyte-integrations/connectors/source-facebook-marketing/unit_tests/test_async_job.py b/airbyte-integrations/connectors/source-facebook-marketing/unit_tests/test_async_job.py index 1c4bb0f67c37..b35930672f86 100644 --- a/airbyte-integrations/connectors/source-facebook-marketing/unit_tests/test_async_job.py +++ b/airbyte-integrations/connectors/source-facebook-marketing/unit_tests/test_async_job.py @@ -267,9 +267,7 @@ def test_update_job_with_batch(self, started_job, adreport, mocker): kwargs["failure"](response) def test_elapsed_time(self, job, api, adreport): - assert ( - job.elapsed_time is None - ), "should be None for the job that is not started" + assert job.elapsed_time is None, "should be None for the job that is not started" job.start() adreport["async_status"] = Status.COMPLETED.value @@ -317,10 +315,7 @@ def test_str(self, api, account): job_timeout=pendulum.duration(minutes=60), ) - assert ( - str(job) - == f"InsightAsyncJob(id=, {account}, time_range= 2011-01-01]>, breakdowns=[10, 20])" - ) + assert str(job) == f"InsightAsyncJob(id=, {account}, time_range= 2011-01-01]>, breakdowns=[10, 20])" def test_get_result(self, job, adreport, api): job.start() @@ -375,9 +370,7 @@ def test_get_result_when_job_is_failed(self, failed_job): def test_split_job(self, mocker, api, edge_class, next_edge_class, id_field): """Test that split will correctly downsize edge_object""" today = pendulum.today().date() - start, end = today - pendulum.duration( - days=365 * 3 + 20 - ), today - pendulum.duration(days=365 * 3 + 10) + start, end = today - pendulum.duration(days=365 * 3 + 20), today - pendulum.duration(days=365 * 3 + 10) params = {"time_increment": 1, "breakdowns": []} job = InsightAsyncJob( api=api, @@ -404,9 +397,7 @@ def test_split_job(self, mocker, api, edge_class, next_edge_class, id_field): # with the one 37 months ago, that's why current date is frozen. # For a different date the since date would be also different. # See facebook_marketing.utils.validate_start_date for reference - "since": ( - today - pendulum.duration(months=37) + pendulum.duration(days=1) - ).to_date_string(), + "since": (today - pendulum.duration(months=37) + pendulum.duration(days=1)).to_date_string(), "until": end.to_date_string(), }, } @@ -415,16 +406,11 @@ def test_split_job(self, mocker, api, edge_class, next_edge_class, id_field): assert all(j.interval == job.interval for j in small_jobs) for i, small_job in enumerate(small_jobs, start=1): assert small_job._params["time_range"] == job._params["time_range"] - assert ( - str(small_job) - == f"InsightAsyncJob(id=, {next_edge_class(i)}, time_range={job.interval}, breakdowns={[]})" - ) + assert str(small_job) == f"InsightAsyncJob(id=, {next_edge_class(i)}, time_range={job.interval}, breakdowns={[]})" def test_split_job_smallest(self, mocker, api): """Test that split will correctly downsize edge_object""" - interval = pendulum.Period( - pendulum.Date(2010, 1, 1), pendulum.Date(2010, 1, 10) - ) + interval = pendulum.Period(pendulum.Date(2010, 1, 1), pendulum.Date(2010, 1, 10)) params = {"time_increment": 1, "breakdowns": []} job = InsightAsyncJob( api=api, @@ -434,9 +420,7 @@ def test_split_job_smallest(self, mocker, api): job_timeout=pendulum.duration(minutes=60), ) - with pytest.raises( - ValueError, match="The job is already splitted to the smallest size." - ): + with pytest.raises(ValueError, match="The job is already splitted to the smallest size."): job.split_job() @@ -511,9 +495,7 @@ def test_split_job(self, parent_job, grouped_jobs, mocker): small_jobs = parent_job.split_job() - assert ( - len(small_jobs) == len(grouped_jobs) + 5 - 2 - ), "each failed job must be replaced with its split" + assert len(small_jobs) == len(grouped_jobs) + 5 - 2, "each failed job must be replaced with its split" for i, job in enumerate(grouped_jobs): if i in (0, 5): job.split_job.assert_called_once() @@ -535,7 +517,4 @@ def test_split_job_smallest(self, parent_job, grouped_jobs): count += 1 def test_str(self, parent_job, grouped_jobs): - assert ( - str(parent_job) - == f"ParentAsyncJob({grouped_jobs[0]} ... {len(grouped_jobs) - 1} jobs more)" - ) + assert str(parent_job) == f"ParentAsyncJob({grouped_jobs[0]} ... {len(grouped_jobs) - 1} jobs more)" diff --git a/airbyte-integrations/connectors/source-facebook-marketing/unit_tests/test_async_job_manager.py b/airbyte-integrations/connectors/source-facebook-marketing/unit_tests/test_async_job_manager.py index 77d38e96a19f..45d5199f37fb 100644 --- a/airbyte-integrations/connectors/source-facebook-marketing/unit_tests/test_async_job_manager.py +++ b/airbyte-integrations/connectors/source-facebook-marketing/unit_tests/test_async_job_manager.py @@ -25,17 +25,13 @@ def time_mock_fixture(mocker): @pytest.fixture(name="update_job_mock") def update_job_mock_fixture(mocker): - return mocker.patch( - "source_facebook_marketing.streams.async_job_manager.update_in_batch" - ) + return mocker.patch("source_facebook_marketing.streams.async_job_manager.update_in_batch") class TestInsightAsyncManager: def test_jobs_empty(self, api, some_config): """Should work event without jobs""" - manager = InsightAsyncJobManager( - api=api, jobs=[], account_id=some_config["account_ids"][0] - ) + manager = InsightAsyncJobManager(api=api, jobs=[], account_id=some_config["account_ids"][0]) jobs = list(manager.completed_jobs()) assert not jobs @@ -45,9 +41,7 @@ def test_jobs_completed_immediately(self, api, mocker, time_mock, some_config): mocker.Mock(spec=InsightAsyncJob, attempt_number=1, failed=False), mocker.Mock(spec=InsightAsyncJob, attempt_number=1, failed=False), ] - manager = InsightAsyncJobManager( - api=api, jobs=jobs, account_id=some_config["account_ids"][0] - ) + manager = InsightAsyncJobManager(api=api, jobs=jobs, account_id=some_config["account_ids"][0]) completed_jobs = list(manager.completed_jobs()) assert jobs == completed_jobs time_mock.sleep.assert_not_called() @@ -64,16 +58,10 @@ def update_job_behaviour(): update_job_mock.side_effect = update_job_behaviour() jobs = [ - mocker.Mock( - spec=InsightAsyncJob, attempt_number=1, failed=False, completed=False - ), - mocker.Mock( - spec=InsightAsyncJob, attempt_number=1, failed=False, completed=False - ), + mocker.Mock(spec=InsightAsyncJob, attempt_number=1, failed=False, completed=False), + mocker.Mock(spec=InsightAsyncJob, attempt_number=1, failed=False, completed=False), ] - manager = InsightAsyncJobManager( - api=api, jobs=jobs, account_id=some_config["account_ids"][0] - ) + manager = InsightAsyncJobManager(api=api, jobs=jobs, account_id=some_config["account_ids"][0]) job = next(manager.completed_jobs(), None) assert job == jobs[1] @@ -81,9 +69,7 @@ def update_job_behaviour(): job = next(manager.completed_jobs(), None) assert job == jobs[0] - time_mock.sleep.assert_called_with( - InsightAsyncJobManager.JOB_STATUS_UPDATE_SLEEP_SECONDS - ) + time_mock.sleep.assert_called_with(InsightAsyncJobManager.JOB_STATUS_UPDATE_SLEEP_SECONDS) job = next(manager.completed_jobs(), None) assert job is None @@ -100,16 +86,10 @@ def update_job_behaviour(): update_job_mock.side_effect = update_job_behaviour() jobs = [ - mocker.Mock( - spec=InsightAsyncJob, attempt_number=1, failed=False, completed=True - ), - mocker.Mock( - spec=InsightAsyncJob, attempt_number=1, failed=False, completed=False - ), + mocker.Mock(spec=InsightAsyncJob, attempt_number=1, failed=False, completed=True), + mocker.Mock(spec=InsightAsyncJob, attempt_number=1, failed=False, completed=False), ] - manager = InsightAsyncJobManager( - api=api, jobs=jobs, account_id=some_config["account_ids"][0] - ) + manager = InsightAsyncJobManager(api=api, jobs=jobs, account_id=some_config["account_ids"][0]) job = next(manager.completed_jobs(), None) assert job == jobs[0] @@ -131,27 +111,17 @@ def update_job_behaviour(): update_job_mock.side_effect = update_job_behaviour() jobs = [ - mocker.Mock( - spec=InsightAsyncJob, attempt_number=1, failed=False, completed=True - ), - mocker.Mock( - spec=InsightAsyncJob, attempt_number=1, failed=False, completed=False - ), + mocker.Mock(spec=InsightAsyncJob, attempt_number=1, failed=False, completed=True), + mocker.Mock(spec=InsightAsyncJob, attempt_number=1, failed=False, completed=False), ] sub_jobs = [ - mocker.Mock( - spec=InsightAsyncJob, attempt_number=1, failed=False, completed=True - ), - mocker.Mock( - spec=InsightAsyncJob, attempt_number=1, failed=False, completed=True - ), + mocker.Mock(spec=InsightAsyncJob, attempt_number=1, failed=False, completed=True), + mocker.Mock(spec=InsightAsyncJob, attempt_number=1, failed=False, completed=True), ] sub_jobs[0].get_result.return_value = [1, 2] sub_jobs[1].get_result.return_value = [3, 4] jobs[1].split_job.return_value = sub_jobs - manager = InsightAsyncJobManager( - api=api, jobs=jobs, account_id=some_config["account_ids"][0] - ) + manager = InsightAsyncJobManager(api=api, jobs=jobs, account_id=some_config["account_ids"][0]) job = next(manager.completed_jobs(), None) assert job == jobs[0] @@ -164,9 +134,7 @@ def update_job_behaviour(): job = next(manager.completed_jobs(), None) assert job is None - def test_job_failed_too_many_times( - self, api, mocker, time_mock, update_job_mock, some_config - ): + def test_job_failed_too_many_times(self, api, mocker, time_mock, update_job_mock, some_config): """Manager should fail when job failed too many times""" def update_job_behaviour(): @@ -176,16 +144,10 @@ def update_job_behaviour(): update_job_mock.side_effect = update_job_behaviour() jobs = [ - mocker.Mock( - spec=InsightAsyncJob, attempt_number=1, failed=False, completed=True - ), - mocker.Mock( - spec=InsightAsyncJob, attempt_number=1, failed=False, completed=False - ), + mocker.Mock(spec=InsightAsyncJob, attempt_number=1, failed=False, completed=True), + mocker.Mock(spec=InsightAsyncJob, attempt_number=1, failed=False, completed=False), ] - manager = InsightAsyncJobManager( - api=api, jobs=jobs, account_id=some_config["account_ids"][0] - ) + manager = InsightAsyncJobManager(api=api, jobs=jobs, account_id=some_config["account_ids"][0]) with pytest.raises( JobException, @@ -193,9 +155,7 @@ def update_job_behaviour(): ): next(manager.completed_jobs(), None) - def test_nested_job_failed_too_many_times( - self, api, mocker, time_mock, update_job_mock, some_config - ): + def test_nested_job_failed_too_many_times(self, api, mocker, time_mock, update_job_mock, some_config): """Manager should fail when a nested job within a ParentAsyncJob failed too many times""" def update_job_behaviour(): @@ -206,17 +166,11 @@ def update_job_behaviour(): update_job_mock.side_effect = update_job_behaviour() sub_jobs = [ - mocker.Mock( - spec=InsightAsyncJob, attempt_number=1, failed=False, completed=True - ), - mocker.Mock( - spec=InsightAsyncJob, attempt_number=1, failed=False, completed=False - ), + mocker.Mock(spec=InsightAsyncJob, attempt_number=1, failed=False, completed=True), + mocker.Mock(spec=InsightAsyncJob, attempt_number=1, failed=False, completed=False), ] jobs = [ - mocker.Mock( - spec=InsightAsyncJob, attempt_number=1, failed=False, completed=True - ), + mocker.Mock(spec=InsightAsyncJob, attempt_number=1, failed=False, completed=True), mocker.Mock( spec=ParentAsyncJob, _jobs=sub_jobs, @@ -225,9 +179,7 @@ def update_job_behaviour(): completed=False, ), ] - manager = InsightAsyncJobManager( - api=api, jobs=jobs, account_id=some_config["account_ids"][0] - ) + manager = InsightAsyncJobManager(api=api, jobs=jobs, account_id=some_config["account_ids"][0]) with pytest.raises(JobException): next(manager.completed_jobs(), None) diff --git a/airbyte-integrations/connectors/source-facebook-marketing/unit_tests/test_base_insight_streams.py b/airbyte-integrations/connectors/source-facebook-marketing/unit_tests/test_base_insight_streams.py index 612ed22ef25a..28890704b449 100644 --- a/airbyte-integrations/connectors/source-facebook-marketing/unit_tests/test_base_insight_streams.py +++ b/airbyte-integrations/connectors/source-facebook-marketing/unit_tests/test_base_insight_streams.py @@ -36,18 +36,14 @@ def start_date_fixture(): @pytest.fixture(name="async_manager_mock") def async_manager_mock_fixture(mocker): - mock = mocker.patch( - "source_facebook_marketing.streams.base_insight_streams.InsightAsyncJobManager" - ) + mock = mocker.patch("source_facebook_marketing.streams.base_insight_streams.InsightAsyncJobManager") mock.return_value = mock return mock @pytest.fixture(name="async_job_mock") def async_job_mock_fixture(mocker): - mock = mocker.patch( - "source_facebook_marketing.streams.base_insight_streams.InsightAsyncJob" - ) + mock = mocker.patch("source_facebook_marketing.streams.base_insight_streams.InsightAsyncJob") mock.side_effect = lambda api, **kwargs: {"api": api, **kwargs} @@ -101,9 +97,7 @@ def test_read_records_all(self, mocker, api, some_config): """ job = mocker.Mock(spec=InsightAsyncJob) job.get_result.return_value = [mocker.Mock(), mocker.Mock(), mocker.Mock()] - job.interval = pendulum.Period( - pendulum.date(2010, 1, 1), pendulum.date(2010, 1, 1) - ) + job.interval = pendulum.Period(pendulum.date(2010, 1, 1), pendulum.date(2010, 1, 1)) stream = AdsInsights( api=api, account_ids=some_config["account_ids"], @@ -131,9 +125,7 @@ def test_read_records_random_order(self, mocker, api, some_config): """ job = mocker.Mock(spec=AsyncJob) job.get_result.return_value = [mocker.Mock(), mocker.Mock(), mocker.Mock()] - job.interval = pendulum.Period( - pendulum.date(2010, 1, 1), pendulum.date(2010, 1, 1) - ) + job.interval = pendulum.Period(pendulum.date(2010, 1, 1), pendulum.date(2010, 1, 1)) stream = AdsInsights( api=api, account_ids=some_config["account_ids"], @@ -258,16 +250,12 @@ def test_state(self, api, state, result_state, some_config): actual_state = stream.state result_state = state if not result_state else result_state - result_state[some_config["account_ids"][0]]["slices"] = result_state[ - some_config["account_ids"][0] - ].get("slices", set()) + result_state[some_config["account_ids"][0]]["slices"] = result_state[some_config["account_ids"][0]].get("slices", set()) result_state["time_increment"] = 1 assert actual_state == result_state - def test_stream_slices_no_state( - self, api, async_manager_mock, start_date, some_config - ): + def test_stream_slices_no_state(self, api, async_manager_mock, start_date, some_config): """Stream will use start_date when there is not state""" end_date = start_date + duration(weeks=2) stream = AdsInsights( @@ -279,9 +267,7 @@ def test_stream_slices_no_state( ) async_manager_mock.completed_jobs.return_value = [1, 2, 3] - slices = list( - stream.stream_slices(stream_state=None, sync_mode=SyncMode.incremental) - ) + slices = list(stream.stream_slices(stream_state=None, sync_mode=SyncMode.incremental)) assert slices == [ {"account_id": "unknown_account", "insight_job": 1}, @@ -295,9 +281,7 @@ def test_stream_slices_no_state( assert generated_jobs[0].interval.start == start_date.date() assert generated_jobs[1].interval.start == start_date.date() + duration(days=1) - def test_stream_slices_no_state_close_to_now( - self, api, async_manager_mock, recent_start_date, some_config - ): + def test_stream_slices_no_state_close_to_now(self, api, async_manager_mock, recent_start_date, some_config): """Stream will use start_date when there is not state and start_date within 28d from now""" start_date = recent_start_date end_date = pendulum.now() @@ -310,9 +294,7 @@ def test_stream_slices_no_state_close_to_now( ) async_manager_mock.completed_jobs.return_value = [1, 2, 3] - slices = list( - stream.stream_slices(stream_state=None, sync_mode=SyncMode.incremental) - ) + slices = list(stream.stream_slices(stream_state=None, sync_mode=SyncMode.incremental)) assert slices == [ {"account_id": "unknown_account", "insight_job": 1}, @@ -326,9 +308,7 @@ def test_stream_slices_no_state_close_to_now( assert generated_jobs[0].interval.start == start_date.date() assert generated_jobs[1].interval.start == start_date.date() + duration(days=1) - def test_stream_slices_with_state( - self, api, async_manager_mock, start_date, some_config - ): + def test_stream_slices_with_state(self, api, async_manager_mock, start_date, some_config): """Stream will use cursor_value from state when there is state""" end_date = start_date + duration(days=10) cursor_value = start_date + duration(days=5) @@ -342,9 +322,7 @@ def test_stream_slices_with_state( ) async_manager_mock.completed_jobs.return_value = [1, 2, 3] - slices = list( - stream.stream_slices(stream_state=state, sync_mode=SyncMode.incremental) - ) + slices = list(stream.stream_slices(stream_state=state, sync_mode=SyncMode.incremental)) assert slices == [ {"account_id": "unknown_account", "insight_job": 1}, @@ -355,16 +333,10 @@ def test_stream_slices_with_state( args, kwargs = async_manager_mock.call_args generated_jobs = list(kwargs["jobs"]) assert len(generated_jobs) == (end_date - cursor_value).days - assert generated_jobs[0].interval.start == cursor_value.date() + duration( - days=1 - ) - assert generated_jobs[1].interval.start == cursor_value.date() + duration( - days=2 - ) + assert generated_jobs[0].interval.start == cursor_value.date() + duration(days=1) + assert generated_jobs[1].interval.start == cursor_value.date() + duration(days=2) - def test_stream_slices_with_state_close_to_now( - self, api, async_manager_mock, recent_start_date, some_config - ): + def test_stream_slices_with_state_close_to_now(self, api, async_manager_mock, recent_start_date, some_config): """Stream will use start_date when close to now and start_date close to now""" start_date = recent_start_date end_date = pendulum.now() @@ -379,9 +351,7 @@ def test_stream_slices_with_state_close_to_now( ) async_manager_mock.completed_jobs.return_value = [1, 2, 3] - slices = list( - stream.stream_slices(stream_state=state, sync_mode=SyncMode.incremental) - ) + slices = list(stream.stream_slices(stream_state=state, sync_mode=SyncMode.incremental)) assert slices == [ {"account_id": "unknown_account", "insight_job": 1}, @@ -396,9 +366,7 @@ def test_stream_slices_with_state_close_to_now( assert generated_jobs[1].interval.start == start_date.date() + duration(days=1) @pytest.mark.parametrize("state_format", ["old_format", "new_format"]) - def test_stream_slices_with_state_and_slices( - self, api, async_manager_mock, start_date, some_config, state_format - ): + def test_stream_slices_with_state_and_slices(self, api, async_manager_mock, start_date, some_config, state_format): """Stream will use cursor_value from state, but will skip saved slices""" end_date = start_date + duration(days=10) cursor_value = start_date + duration(days=5) @@ -430,9 +398,7 @@ def test_stream_slices_with_state_and_slices( ) async_manager_mock.completed_jobs.return_value = [1, 2, 3] - slices = list( - stream.stream_slices(stream_state=state, sync_mode=SyncMode.incremental) - ) + slices = list(stream.stream_slices(stream_state=state, sync_mode=SyncMode.incremental)) assert slices == [ {"account_id": "unknown_account", "insight_job": 1}, @@ -442,15 +408,9 @@ def test_stream_slices_with_state_and_slices( async_manager_mock.assert_called_once() args, kwargs = async_manager_mock.call_args generated_jobs = list(kwargs["jobs"]) - assert ( - len(generated_jobs) == (end_date - cursor_value).days - 2 - ), "should be 2 slices short because of state" - assert generated_jobs[0].interval.start == cursor_value.date() + duration( - days=2 - ) - assert generated_jobs[1].interval.start == cursor_value.date() + duration( - days=4 - ) + assert len(generated_jobs) == (end_date - cursor_value).days - 2, "should be 2 slices short because of state" + assert generated_jobs[0].interval.start == cursor_value.date() + duration(days=2) + assert generated_jobs[1].interval.start == cursor_value.date() + duration(days=4) def test_get_json_schema(self, api, some_config): stream = AdsInsights( @@ -465,9 +425,7 @@ def test_get_json_schema(self, api, some_config): assert "device_platform" not in schema["properties"] assert "country" not in schema["properties"] - assert not ( - set(stream.fields()) - set(schema["properties"].keys()) - ), "all fields present in schema" + assert not (set(stream.fields()) - set(schema["properties"].keys())), "all fields present in schema" def test_get_json_schema_custom(self, api, some_config): stream = AdsInsights( @@ -483,9 +441,7 @@ def test_get_json_schema_custom(self, api, some_config): assert "device_platform" in schema["properties"] assert "country" in schema["properties"] - assert not ( - set(stream.fields()) - set(schema["properties"].keys()) - ), "all fields present in schema" + assert not (set(stream.fields()) - set(schema["properties"].keys())), "all fields present in schema" def test_fields(self, api, some_config): stream = AdsInsights( diff --git a/airbyte-integrations/connectors/source-facebook-marketing/unit_tests/test_base_streams.py b/airbyte-integrations/connectors/source-facebook-marketing/unit_tests/test_base_streams.py index 4ddd72eab91d..dd1cfdff690c 100644 --- a/airbyte-integrations/connectors/source-facebook-marketing/unit_tests/test_base_streams.py +++ b/airbyte-integrations/connectors/source-facebook-marketing/unit_tests/test_base_streams.py @@ -112,15 +112,11 @@ def list_objects(self, **kwargs): @pytest.fixture def incremental_class_instance(api): - return ConcreteFBMarketingIncrementalStream( - api=api, account_ids=["123", "456", "789"], start_date=None, end_date=None - ) + return ConcreteFBMarketingIncrementalStream(api=api, account_ids=["123", "456", "789"], start_date=None, end_date=None) class TestFBMarketingIncrementalStreamSliceAndState: - def test_stream_slices_multiple_accounts_with_state( - self, incremental_class_instance - ): + def test_stream_slices_multiple_accounts_with_state(self, incremental_class_instance): stream_state = { "123": {"state_key": "state_value"}, "456": {"state_key": "another_state_value"}, @@ -130,14 +126,9 @@ def test_stream_slices_multiple_accounts_with_state( {"account_id": "456", "stream_state": {"state_key": "another_state_value"}}, {"account_id": "789", "stream_state": {}}, ] - assert ( - list(incremental_class_instance.stream_slices(stream_state)) - == expected_slices - ) + assert list(incremental_class_instance.stream_slices(stream_state)) == expected_slices - def test_stream_slices_multiple_accounts_empty_state( - self, incremental_class_instance - ): + def test_stream_slices_multiple_accounts_empty_state(self, incremental_class_instance): expected_slices = [ {"account_id": "123", "stream_state": {}}, {"account_id": "456", "stream_state": {}}, @@ -149,10 +140,7 @@ def test_stream_slices_single_account_with_state(self, incremental_class_instanc incremental_class_instance._account_ids = ["123"] stream_state = {"state_key": "state_value"} expected_slices = [{"account_id": "123", "stream_state": stream_state}] - assert ( - list(incremental_class_instance.stream_slices(stream_state)) - == expected_slices - ) + assert list(incremental_class_instance.stream_slices(stream_state)) == expected_slices def test_stream_slices_single_account_empty_state(self, incremental_class_instance): incremental_class_instance._account_ids = ["123"] @@ -270,7 +258,5 @@ def test_get_updated_state( # Set the instance's filter_statuses incremental_class_instance._filter_statuses = instance_filter_statuses - new_state = incremental_class_instance.get_updated_state( - current_stream_state, latest_record - ) + new_state = incremental_class_instance.get_updated_state(current_stream_state, latest_record) assert new_state == expected_state diff --git a/airbyte-integrations/connectors/source-facebook-marketing/unit_tests/test_client.py b/airbyte-integrations/connectors/source-facebook-marketing/unit_tests/test_client.py index 5395bab12a5c..56959b74f586 100644 --- a/airbyte-integrations/connectors/source-facebook-marketing/unit_tests/test_client.py +++ b/airbyte-integrations/connectors/source-facebook-marketing/unit_tests/test_client.py @@ -28,11 +28,7 @@ def fb_call_rate_response_fixture(): "fbtrace_id": "this_is_fake_response", } - headers = { - "x-app-usage": json.dumps( - {"call_count": 28, "total_time": 25, "total_cputime": 25} - ) - } + headers = {"x-app-usage": json.dumps({"call_count": 28, "total_time": 25, "total_cputime": 25})} return { "json": { @@ -59,9 +55,7 @@ def fb_call_amount_data_response_fixture(): class TestBackoff: - def test_limit_reached( - self, mocker, requests_mock, api, fb_call_rate_response, account_id, some_config - ): + def test_limit_reached(self, mocker, requests_mock, api, fb_call_rate_response, account_id, some_config): """Error once, check that we retry and not fail""" # turn Campaigns into non batch mode to test non batch logic campaign_responses = [ @@ -111,9 +105,7 @@ def test_limit_reached( except FacebookRequestError: pytest.fail("Call rate error has not being handled") - def test_batch_limit_reached( - self, requests_mock, api, fb_call_rate_response, account_id - ): + def test_batch_limit_reached(self, requests_mock, api, fb_call_rate_response, account_id): """Error once, check that we retry and not fail""" responses = [ fb_call_rate_response, @@ -164,9 +156,7 @@ def test_batch_limit_reached( FacebookSession.GRAPH + f"/{FB_API_VERSION}/act_{account_id}/", responses, ) - requests_mock.register_uri( - "POST", FacebookSession.GRAPH + f"/{FB_API_VERSION}/", batch_responses - ) + requests_mock.register_uri("POST", FacebookSession.GRAPH + f"/{FB_API_VERSION}/", batch_responses) stream = AdCreatives(api=api, account_ids=[account_id]) records = list( @@ -244,9 +234,7 @@ def test_common_error_retry(self, error_response, requests_mock, api, account_id assert accounts == [account_data] - def test_limit_error_retry( - self, fb_call_amount_data_response, requests_mock, api, account_id - ): + def test_limit_error_retry(self, fb_call_amount_data_response, requests_mock, api, account_id): """Error every time, check limit parameter decreases by 2 times every new call""" res = requests_mock.register_uri( @@ -368,13 +356,9 @@ def test_start_date_not_provided(self, requests_mock, api, account_id): ) ) - def test_limit_error_retry_next_page( - self, fb_call_amount_data_response, requests_mock, api, account_id - ): + def test_limit_error_retry_next_page(self, fb_call_amount_data_response, requests_mock, api, account_id): """Unlike the previous test, this one tests the API call fail on the second or more page of a request.""" - base_url = ( - FacebookSession.GRAPH + f"/{FB_API_VERSION}/act_{account_id}/advideos" - ) + base_url = FacebookSession.GRAPH + f"/{FB_API_VERSION}/act_{account_id}/advideos" res = requests_mock.register_uri( "GET", diff --git a/airbyte-integrations/connectors/source-facebook-marketing/unit_tests/test_config_migrations.py b/airbyte-integrations/connectors/source-facebook-marketing/unit_tests/test_config_migrations.py index d72b4ce6c3e2..6742ddc2ba6c 100644 --- a/airbyte-integrations/connectors/source-facebook-marketing/unit_tests/test_config_migrations.py +++ b/airbyte-integrations/connectors/source-facebook-marketing/unit_tests/test_config_migrations.py @@ -112,7 +112,6 @@ def revert_migration(self, config_path: str) -> None: ) def test_migrate_config(self, old_config_path, new_config_path, include_deleted): migration_instance = MigrateIncludeDeletedToStatusFilters() - original_config = load_config(old_config_path) # migrate the test_config migration_instance.migrate([CMD, "--config", old_config_path], SOURCE) # load the updated config diff --git a/airbyte-integrations/connectors/source-facebook-marketing/unit_tests/test_errors.py b/airbyte-integrations/connectors/source-facebook-marketing/unit_tests/test_errors.py index 46bbbbccd0d5..cf748cee6b31 100644 --- a/airbyte-integrations/connectors/source-facebook-marketing/unit_tests/test_errors.py +++ b/airbyte-integrations/connectors/source-facebook-marketing/unit_tests/test_errors.py @@ -253,11 +253,7 @@ class TestRealErrors: }, }, "status_code": 400, - "headers": { - "x-app-usage": json.dumps( - {"call_count": 28, "total_time": 25, "total_cputime": 25} - ) - }, + "headers": {"x-app-usage": json.dumps({"call_count": 28, "total_time": 25, "total_cputime": 25})}, }, ), ( @@ -306,14 +302,10 @@ class TestRealErrors: ), ], ) - def test_retryable_error( - self, some_config, requests_mock, name, retryable_error_response - ): + def test_retryable_error(self, some_config, requests_mock, name, retryable_error_response): """Error once, check that we retry and not fail""" requests_mock.reset_mock() - requests_mock.register_uri( - "GET", f"{act_url}", [retryable_error_response, ad_account_response] - ) + requests_mock.register_uri("GET", f"{act_url}", [retryable_error_response, ad_account_response]) requests_mock.register_uri( "GET", f"{act_url}adcreatives", @@ -333,17 +325,13 @@ def test_retryable_error( assert ad_creative_records == ad_creative_data @pytest.mark.parametrize("name, friendly_msg, config_error_response", CONFIG_ERRORS) - def test_config_error_during_account_info_read( - self, requests_mock, name, friendly_msg, config_error_response - ): + def test_config_error_during_account_info_read(self, requests_mock, name, friendly_msg, config_error_response): """Error raised during account info read""" api = API(access_token=some_config["access_token"], page_size=100) stream = AdCreatives(api=api, account_ids=some_config["account_ids"]) - requests_mock.register_uri( - "GET", f"{act_url}", [config_error_response, ad_account_response] - ) + requests_mock.register_uri("GET", f"{act_url}", [config_error_response, ad_account_response]) try: list( stream.read_records( @@ -360,9 +348,7 @@ def test_config_error_during_account_info_read( # @pytest.mark.parametrize("name, friendly_msg, config_error_response", [CONFIG_ERRORS[-1]]) @pytest.mark.parametrize("name, friendly_msg, config_error_response", CONFIG_ERRORS) - def test_config_error_during_actual_nodes_read( - self, requests_mock, name, friendly_msg, config_error_response - ): + def test_config_error_during_actual_nodes_read(self, requests_mock, name, friendly_msg, config_error_response): """Error raised during actual nodes read""" api = API(access_token=some_config["access_token"], page_size=100) @@ -389,9 +375,7 @@ def test_config_error_during_actual_nodes_read( assert friendly_msg in error.message @pytest.mark.parametrize("name, friendly_msg, config_error_response", CONFIG_ERRORS) - def test_config_error_insights_account_info_read( - self, requests_mock, name, friendly_msg, config_error_response - ): + def test_config_error_insights_account_info_read(self, requests_mock, name, friendly_msg, config_error_response): """Error raised during actual nodes read""" api = API(access_token=some_config["access_token"], page_size=100) @@ -403,30 +387,18 @@ def test_config_error_insights_account_info_read( fields=["account_id", "account_currency"], insights_lookback_window=28, ) - requests_mock.register_uri( - "GET", f"{act_url}", [config_error_response, ad_account_response] - ) + requests_mock.register_uri("GET", f"{act_url}", [config_error_response, ad_account_response]) try: - slice = list( - stream.stream_slices(sync_mode=SyncMode.full_refresh, stream_state={}) - )[0] - list( - stream.read_records( - sync_mode=SyncMode.full_refresh, stream_slice=slice, stream_state={} - ) - ) + slice = list(stream.stream_slices(sync_mode=SyncMode.full_refresh, stream_state={}))[0] + list(stream.read_records(sync_mode=SyncMode.full_refresh, stream_slice=slice, stream_state={})) assert False except Exception as error: assert isinstance(error, AirbyteTracedException) assert error.failure_type == FailureType.config_error assert friendly_msg in error.message - @pytest.mark.parametrize( - "name, friendly_msg, config_error_response", [CONFIG_ERRORS[0]] - ) - def test_config_error_insights_during_actual_nodes_read( - self, requests_mock, name, friendly_msg, config_error_response - ): + @pytest.mark.parametrize("name, friendly_msg, config_error_response", [CONFIG_ERRORS[0]]) + def test_config_error_insights_during_actual_nodes_read(self, requests_mock, name, friendly_msg, config_error_response): """Error raised during actual nodes read""" api = API(access_token=some_config["access_token"], page_size=100) @@ -439,19 +411,11 @@ def test_config_error_insights_during_actual_nodes_read( insights_lookback_window=28, ) requests_mock.register_uri("GET", f"{act_url}", [ad_account_response]) - requests_mock.register_uri( - "GET", f"{act_url}insights", [config_error_response, ad_creative_response] - ) + requests_mock.register_uri("GET", f"{act_url}insights", [config_error_response, ad_creative_response]) try: - slice = list( - stream.stream_slices(sync_mode=SyncMode.full_refresh, stream_state={}) - )[0] - list( - stream.read_records( - sync_mode=SyncMode.full_refresh, stream_slice=slice, stream_state={} - ) - ) + slice = list(stream.stream_slices(sync_mode=SyncMode.full_refresh, stream_state={}))[0] + list(stream.read_records(sync_mode=SyncMode.full_refresh, stream_slice=slice, stream_state={})) assert False except Exception as error: assert isinstance(error, AirbyteTracedException) @@ -502,25 +466,17 @@ def test_adaccount_list_objects_retry(self, requests_mock, failure_response): "account_id": account_id, "business": {"id": "1", "name": "TEST"}, } - requests_mock.register_uri( - "GET", f"{base_url}me/business_users", status_code=200, json=business_user - ) + requests_mock.register_uri("GET", f"{base_url}me/business_users", status_code=200, json=business_user) assigend_users = {"account_id": account_id, "tasks": ["TASK"]} - requests_mock.register_uri( - "GET", f"{act_url}assigned_users", status_code=200, json=assigend_users - ) + requests_mock.register_uri("GET", f"{act_url}assigned_users", status_code=200, json=assigend_users) success_response = {"status_code": 200, "json": {"account_id": account_id}} - requests_mock.register_uri( - "GET", f"{act_url}", [failure_response, success_response] - ) + requests_mock.register_uri("GET", f"{act_url}", [failure_response, success_response]) record_gen = stream.read_records( sync_mode=SyncMode.full_refresh, stream_slice={"account_id": account_id}, stream_state={}, ) - assert list(record_gen) == [ - {"account_id": "unknown_account", "id": "act_unknown_account"} - ] + assert list(record_gen) == [{"account_id": "unknown_account", "id": "act_unknown_account"}] diff --git a/airbyte-integrations/connectors/source-facebook-marketing/unit_tests/test_source.py b/airbyte-integrations/connectors/source-facebook-marketing/unit_tests/test_source.py index 2ca1e4e6a822..202c1ce1fd67 100644 --- a/airbyte-integrations/connectors/source-facebook-marketing/unit_tests/test_source.py +++ b/airbyte-integrations/connectors/source-facebook-marketing/unit_tests/test_source.py @@ -87,9 +87,7 @@ def test_check_connection_ok(self, config, logger_mock, fb_marketing): assert ok assert not error_msg - def test_check_connection_find_account_was_called( - self, api_find_account, config, logger_mock, fb_marketing - ): + def test_check_connection_find_account_was_called(self, api_find_account, config, logger_mock, fb_marketing): """Check if _find_account was called to validate credentials""" ok, error_msg = fb_marketing.check_connection(logger_mock, config=config) @@ -103,9 +101,7 @@ def test_check_connection_find_account_was_called( assert ok assert not error_msg - def test_check_connection_future_date_range( - self, api, config, logger_mock, fb_marketing - ): + def test_check_connection_future_date_range(self, api, config, logger_mock, fb_marketing): config["start_date"] = "2219-10-10T00:00:00" config["end_date"] = "2219-10-11T00:00:00" assert fb_marketing.check_connection(logger_mock, config=config) == ( @@ -113,9 +109,7 @@ def test_check_connection_future_date_range( "Date range can not be in the future.", ) - def test_check_connection_end_date_before_start_date( - self, api, config, logger_mock, fb_marketing - ): + def test_check_connection_end_date_before_start_date(self, api, config, logger_mock, fb_marketing): config["start_date"] = "2019-10-10T00:00:00" config["end_date"] = "2019-10-09T00:00:00" assert fb_marketing.check_connection(logger_mock, config=config) == ( @@ -130,9 +124,7 @@ def test_check_connection_empty_config(self, api, logger_mock, fb_marketing): assert not ok assert error_msg - def test_check_connection_config_no_start_date( - self, api, config, logger_mock, fb_marketing - ): + def test_check_connection_config_no_start_date(self, api, config, logger_mock, fb_marketing): config.pop("start_date") ok, error_msg = fb_marketing.check_connection(logger_mock, config=config) @@ -169,9 +161,7 @@ def test_get_custom_insights_streams(self, api, config, fb_marketing): config = ConnectorConfig.parse_obj(config) assert fb_marketing.get_custom_insights_streams(api, config) - def test_get_custom_insights_action_breakdowns_allow_empty( - self, api, config, fb_marketing - ): + def test_get_custom_insights_action_breakdowns_allow_empty(self, api, config, fb_marketing): config["custom_insights"] = [ { "name": "test", @@ -182,9 +172,7 @@ def test_get_custom_insights_action_breakdowns_allow_empty( ] config["action_breakdowns_allow_empty"] = False - streams = fb_marketing.get_custom_insights_streams( - api, ConnectorConfig.parse_obj(config) - ) + streams = fb_marketing.get_custom_insights_streams(api, ConnectorConfig.parse_obj(config)) assert len(streams) == 1 assert streams[0].breakdowns == ["ad_format_asset"] assert streams[0].action_breakdowns == [ @@ -194,9 +182,7 @@ def test_get_custom_insights_action_breakdowns_allow_empty( ] config["action_breakdowns_allow_empty"] = True - streams = fb_marketing.get_custom_insights_streams( - api, ConnectorConfig.parse_obj(config) - ) + streams = fb_marketing.get_custom_insights_streams(api, ConnectorConfig.parse_obj(config)) assert len(streams) == 1 assert streams[0].breakdowns == ["ad_format_asset"] assert streams[0].action_breakdowns == [] @@ -223,13 +209,9 @@ def test_read_missing_stream(self, config, api, logger_mock, fb_marketing): def test_check_config(config_gen, requests_mock, fb_marketing): - requests_mock.register_uri( - "GET", FacebookSession.GRAPH + f"/{FacebookAdsApi.API_VERSION}/act_123/", {} - ) + requests_mock.register_uri("GET", FacebookSession.GRAPH + f"/{FacebookAdsApi.API_VERSION}/act_123/", {}) - assert command_check(fb_marketing, config_gen()) == AirbyteConnectionStatus( - status=Status.SUCCEEDED, message=None - ) + assert command_check(fb_marketing, config_gen()) == AirbyteConnectionStatus(status=Status.SUCCEEDED, message=None) status = command_check(fb_marketing, config_gen(start_date="2019-99-10T00:00:00Z")) assert status.status == Status.FAILED @@ -240,9 +222,5 @@ def test_check_config(config_gen, requests_mock, fb_marketing): status = command_check(fb_marketing, config_gen(start_date=...)) assert status.status == Status.SUCCEEDED - assert command_check( - fb_marketing, config_gen(end_date=...) - ) == AirbyteConnectionStatus(status=Status.SUCCEEDED, message=None) - assert command_check( - fb_marketing, config_gen(end_date="") - ) == AirbyteConnectionStatus(status=Status.SUCCEEDED, message=None) + assert command_check(fb_marketing, config_gen(end_date=...)) == AirbyteConnectionStatus(status=Status.SUCCEEDED, message=None) + assert command_check(fb_marketing, config_gen(end_date="")) == AirbyteConnectionStatus(status=Status.SUCCEEDED, message=None) diff --git a/airbyte-integrations/connectors/source-facebook-marketing/unit_tests/test_streams.py b/airbyte-integrations/connectors/source-facebook-marketing/unit_tests/test_streams.py index d300af0571b7..afe90d2b2d73 100644 --- a/airbyte-integrations/connectors/source-facebook-marketing/unit_tests/test_streams.py +++ b/airbyte-integrations/connectors/source-facebook-marketing/unit_tests/test_streams.py @@ -23,12 +23,7 @@ def test_filter_all_statuses(api, mocker, some_config): mocker.patch.multiple(FBMarketingStream, __abstractmethods__=set()) expected = {} - assert ( - FBMarketingStream( - api=api, account_ids=some_config["account_ids"] - )._filter_all_statuses() - == expected - ) + assert FBMarketingStream(api=api, account_ids=some_config["account_ids"])._filter_all_statuses() == expected expected = { "filtering": [ @@ -76,9 +71,7 @@ def test_filter_all_statuses(api, mocker, some_config): ], ) def test_fetch_thumbnail_data_url(url, requests_mock): - requests_mock.get( - url, status_code=200, headers={"content-type": "content-type"}, content=b"" - ) + requests_mock.get(url, status_code=200, headers={"content-type": "content-type"}, content=b"") assert fetch_thumbnail_data_url(url) == "data:content-type;base64," @@ -122,9 +115,7 @@ def test_parse_call_rate_header(): ], ], ) -def test_ads_insights_breakdowns( - class_name, breakdowns, action_breakdowns, some_config -): +def test_ads_insights_breakdowns(class_name, breakdowns, action_breakdowns, some_config): kwargs = { "api": None, "account_ids": some_config["account_ids"], @@ -145,9 +136,7 @@ def test_custom_ads_insights_breakdowns(some_config): "end_date": pendulum.now(), "insights_lookback_window": 1, } - stream = AdsInsights( - breakdowns=["mmm"], action_breakdowns=["action_destination"], **kwargs - ) + stream = AdsInsights(breakdowns=["mmm"], action_breakdowns=["action_destination"], **kwargs) assert stream.breakdowns == ["mmm"] assert stream.action_breakdowns == ["action_destination"] @@ -159,12 +148,7 @@ def test_custom_ads_insights_breakdowns(some_config): "action_destination", ] - stream = AdsInsights( - breakdowns=[], - action_breakdowns=[], - action_breakdowns_allow_empty=True, - **kwargs - ) + stream = AdsInsights(breakdowns=[], action_breakdowns=[], action_breakdowns_allow_empty=True, **kwargs) assert stream.breakdowns == [] assert stream.action_breakdowns == [] diff --git a/airbyte-integrations/connectors/source-facebook-marketing/unit_tests/test_utils.py b/airbyte-integrations/connectors/source-facebook-marketing/unit_tests/test_utils.py index 652237fb8f91..ccde2ee1fcba 100644 --- a/airbyte-integrations/connectors/source-facebook-marketing/unit_tests/test_utils.py +++ b/airbyte-integrations/connectors/source-facebook-marketing/unit_tests/test_utils.py @@ -23,18 +23,13 @@ "start_date", pendulum.local(2019, 1, 1), pendulum.local(2020, 3, 2), - [ - f"The start date cannot be beyond 37 months from the current date. " - f"Set start date to {pendulum.local(2020, 3, 2)}." - ], + [f"The start date cannot be beyond 37 months from the current date. " f"Set start date to {pendulum.local(2020, 3, 2)}."], ), ( "start_date", TODAY + pendulum.duration(months=1), TODAY, - [ - f"The start date cannot be in the future. Set start date to today's date - {TODAY}." - ], + [f"The start date cannot be in the future. Set start date to today's date - {TODAY}."], ), ( "end_date", diff --git a/docs/integrations/sources/facebook-marketing-migrations.md b/docs/integrations/sources/facebook-marketing-migrations.md new file mode 100644 index 000000000000..d3cfd48652a7 --- /dev/null +++ b/docs/integrations/sources/facebook-marketing-migrations.md @@ -0,0 +1,37 @@ +# Facebook Marketing Migration Guide + +## Upgrading to 2.0.0 + +Streams Ads-Insights-* streams now have updated schemas. + +### Update Custom Insights Reports (this step can be skipped if you did not define any) + +1. Select **Sources** in the main navbar. + 1. Select the Facebook Marketing Connector. +2. Select the **Retest saved source**. +3. Remove unsupported fields from the list in Custom Insights section. +4. Select **Test and Save**. + +### Refresh affected schemas and reset data + +1. Select **Connections** in the main navbar. + 1. Select the connection(s) affected by the update. +2. Select the **Replication** tab. + 1. Select **Refresh source schema**. + 2. Select **OK**. + +```note +Any detected schema changes will be listed for your review. +``` + +3. Select **Save changes** at the bottom of the page. + 1. Ensure the **Reset affected streams** option is checked. +```note +Depending on destination type you may not be prompted to reset your data. +``` +4. Select **Save connection**. +```note +This will reset the data in your destination and initiate a fresh sync. +``` + +For more information on resetting your data in Airbyte, see [this page](https://docs.airbyte.com/operator-guides/reset). diff --git a/docs/integrations/sources/facebook-marketing.md b/docs/integrations/sources/facebook-marketing.md index df8eafd7c64e..5ab841a4cdf9 100644 --- a/docs/integrations/sources/facebook-marketing.md +++ b/docs/integrations/sources/facebook-marketing.md @@ -190,7 +190,7 @@ The Facebook Marketing connector uses the `lookback_window` parameter to repeate ## Data type mapping | Integration Type | Airbyte Type | -| :--------------: | :----------: | +|:----------------:|:------------:| | string | string | | number | number | | array | array | @@ -200,6 +200,7 @@ The Facebook Marketing connector uses the `lookback_window` parameter to repeate | Version | Date | Pull Request | Subject | |:--------|:-----------|:---------------------------------------------------------|:--------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------| +| 2.0.0 | 2024-03-01 | [35746](https://github.com/airbytehq/airbyte/pull/35746) | Update API to `v19.0` | | 1.4.2 | 2024-02-22 | [35539](https://github.com/airbytehq/airbyte/pull/35539) | Add missing config migration from `include_deleted` field | | 1.4.1 | 2024-02-21 | [35467](https://github.com/airbytehq/airbyte/pull/35467) | Fix error with incorrect state transforming in the 1.4.0 version | | 1.4.0 | 2024-02-20 | [32449](https://github.com/airbytehq/airbyte/pull/32449) | Replace "Include Deleted Campaigns, Ads, and AdSets" option in configuration with specific statuses selection per stream | From dee240a96182dac36b598970d1321b74edfcef1e Mon Sep 17 00:00:00 2001 From: Anton Karpets Date: Wed, 6 Mar 2024 11:57:13 +0200 Subject: [PATCH 096/172] =?UTF-8?q?=E2=9C=A8Source=20Amazon=20Ads:=20migra?= =?UTF-8?q?te=20source=20to=20`YamlDeclarativeSource`=20with=20custom=20`c?= =?UTF-8?q?heck=5Fconnection`=20(#35481)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .../source-amazon-ads/metadata.yaml | 2 +- .../source-amazon-ads/pyproject.toml | 2 +- .../declarative_source_adapter.py | 48 +++++ .../source_amazon_ads/manifest.yaml | 168 ++++++++++++++++++ .../source_amazon_ads/run.py | 3 +- .../source_amazon_ads/source.py | 13 +- .../source_amazon_ads/spec.yaml | 165 ----------------- .../unit_tests/integrations/utils.py | 3 +- .../unit_tests/test_source.py | 59 +++--- docs/integrations/sources/amazon-ads.md | 7 +- 10 files changed, 262 insertions(+), 208 deletions(-) create mode 100644 airbyte-integrations/connectors/source-amazon-ads/source_amazon_ads/declarative_source_adapter.py create mode 100644 airbyte-integrations/connectors/source-amazon-ads/source_amazon_ads/manifest.yaml delete mode 100644 airbyte-integrations/connectors/source-amazon-ads/source_amazon_ads/spec.yaml diff --git a/airbyte-integrations/connectors/source-amazon-ads/metadata.yaml b/airbyte-integrations/connectors/source-amazon-ads/metadata.yaml index c12b2c7341c3..dc62a61bca6b 100644 --- a/airbyte-integrations/connectors/source-amazon-ads/metadata.yaml +++ b/airbyte-integrations/connectors/source-amazon-ads/metadata.yaml @@ -13,7 +13,7 @@ data: connectorSubtype: api connectorType: source definitionId: c6b0a29e-1da9-4512-9002-7bfd0cba2246 - dockerImageTag: 4.0.3 + dockerImageTag: 4.0.4 dockerRepository: airbyte/source-amazon-ads documentationUrl: https://docs.airbyte.com/integrations/sources/amazon-ads githubIssueLabel: source-amazon-ads diff --git a/airbyte-integrations/connectors/source-amazon-ads/pyproject.toml b/airbyte-integrations/connectors/source-amazon-ads/pyproject.toml index 0d00d8b9f314..e281ad72d5ab 100644 --- a/airbyte-integrations/connectors/source-amazon-ads/pyproject.toml +++ b/airbyte-integrations/connectors/source-amazon-ads/pyproject.toml @@ -3,7 +3,7 @@ requires = [ "poetry-core>=1.0.0",] build-backend = "poetry.core.masonry.api" [tool.poetry] -version = "4.0.3" +version = "4.0.4" name = "source-amazon-ads" description = "Source implementation for Amazon Ads." authors = [ "Airbyte ",] diff --git a/airbyte-integrations/connectors/source-amazon-ads/source_amazon_ads/declarative_source_adapter.py b/airbyte-integrations/connectors/source-amazon-ads/source_amazon_ads/declarative_source_adapter.py new file mode 100644 index 000000000000..6b46127b0b6b --- /dev/null +++ b/airbyte-integrations/connectors/source-amazon-ads/source_amazon_ads/declarative_source_adapter.py @@ -0,0 +1,48 @@ +# +# Copyright (c) 2024 Airbyte, Inc., all rights reserved. +# + + +from logging import Logger +from typing import Any, List, Mapping + +from airbyte_cdk.models import AirbyteConnectionStatus +from airbyte_cdk.sources import AbstractSource +from airbyte_cdk.sources.declarative.yaml_declarative_source import YamlDeclarativeSource +from airbyte_cdk.sources.streams import Stream +from airbyte_protocol.models import ConnectorSpecification + + +class DeclarativeSourceAdapter(YamlDeclarativeSource): + def __init__(self, source: AbstractSource) -> None: + self._source = source + super().__init__(path_to_yaml="manifest.yaml") + self._set_adapted_methods() + + @property + def name(self) -> str: + return self._source.name + + def spec(self, logger: Logger) -> ConnectorSpecification: + return self._source.spec(logger) + + def check(self, logger: Logger, config: Mapping[str, Any]) -> AirbyteConnectionStatus: + return self._source.check(logger, config) + + def streams(self, config: Mapping[str, Any]) -> List[Stream]: + return self._source.streams(config) + + def _validate_source(self) -> None: + """Skipping manifest validation as it can be incomplete when use adapter""" + return + + def _set_adapted_methods(self) -> None: + """ + Since the adapter is intended to smoothly migrate the connector, + this method determines whether each of methods `spec`, `check`, and `streams` was declared in the manifest file + and if yes, makes the source use it, otherwise the method defined in the source will be used + """ + adapted_methods = ("spec", "check", "streams") + for method in adapted_methods: + if method in self.resolved_manifest: + self._source.__setattr__(method, getattr(super(), method)) diff --git a/airbyte-integrations/connectors/source-amazon-ads/source_amazon_ads/manifest.yaml b/airbyte-integrations/connectors/source-amazon-ads/source_amazon_ads/manifest.yaml new file mode 100644 index 000000000000..5dd0d541cb34 --- /dev/null +++ b/airbyte-integrations/connectors/source-amazon-ads/source_amazon_ads/manifest.yaml @@ -0,0 +1,168 @@ +version: 0.60.1 +type: source_amazon_ads.SourceAmazonAds +spec: + type: Spec + documentation_url: https://docs.airbyte.com/integrations/sources/amazon-ads + connection_specification: + title: Amazon Ads Spec + type: object + properties: + auth_type: + title: Auth Type + const: oauth2.0 + order: 0 + type: string + client_id: + title: Client ID + description: + The client ID of your Amazon Ads developer application. See the + docs + for more information. + order: 1 + type: string + airbyte_secret: true + client_secret: + title: Client Secret + description: + The client secret of your Amazon Ads developer application. See + the docs + for more information. + airbyte_secret: true + order: 2 + type: string + refresh_token: + title: Refresh Token + description: + Amazon Ads refresh token. See the docs + for more information on how to obtain this token. + airbyte_secret: true + order: 3 + type: string + region: + title: Region + description: + Region to pull data from (EU/NA/FE). See docs + for more details. + enum: + - NA + - EU + - FE + type: string + default: NA + order: 4 + start_date: + title: Start Date + description: + The Start date for collecting reports, should not be more than + 60 days in the past. In YYYY-MM-DD format + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}$" + format: date + examples: + - "2022-10-10" + - "2022-10-22" + order: 5 + type: string + profiles: + title: Profile IDs + description: 'Profile IDs you want to fetch data for. See docs for more details. Note: If Marketplace IDs are also selected, profiles will be selected if they match the Profile ID OR the Marketplace ID.' + order: 6 + type: array + items: + type: integer + marketplace_ids: + title: Marketplace IDs + description: "Marketplace IDs you want to fetch data for. Note: If Profile IDs are also selected, profiles will be selected if they match the Profile ID OR the Marketplace ID." + order: 7 + type: array + items: + type: string + state_filter: + title: State Filter + description: Reflects the state of the Display, Product, and Brand Campaign streams as enabled, paused, or archived. If you do not populate this field, it will be ignored completely. + items: + type: string + enum: + - enabled + - paused + - archived + type: array + uniqueItems: true + order: 8 + look_back_window: + title: "Look Back Window" + description: "The amount of days to go back in time to get the updated data from Amazon Ads" + examples: + - 3 + - 10 + type: "integer" + default: 3 + order: 9 + report_record_types: + title: Report Record Types + description: + Optional configuration which accepts an array of string of record types. + Leave blank for default behaviour to pull all report types. + Use this config option only if you want to pull specific report type(s). + See docs + for more details + items: + type: string + enum: + - adGroups + - asins + - asins_keywords + - asins_targets + - campaigns + - keywords + - productAds + - targets + type: array + uniqueItems: true + order: 10 + required: + - client_id + - client_secret + - refresh_token + additionalProperties: true + advanced_auth: + auth_flow_type: oauth2.0 + predicate_key: + - auth_type + predicate_value: oauth2.0 + oauth_config_specification: + oauth_user_input_from_connector_config_specification: + type: object + additionalProperties: false + properties: + region: + type: string + path_in_connector_config: + - region + complete_oauth_output_specification: + type: object + additionalProperties: true + properties: + refresh_token: + type: string + path_in_connector_config: + - refresh_token + complete_oauth_server_input_specification: + type: object + additionalProperties: true + properties: + client_id: + type: string + client_secret: + type: string + complete_oauth_server_output_specification: + type: object + additionalProperties: true + properties: + client_id: + type: string + path_in_connector_config: + - client_id + client_secret: + type: string + path_in_connector_config: + - client_secret diff --git a/airbyte-integrations/connectors/source-amazon-ads/source_amazon_ads/run.py b/airbyte-integrations/connectors/source-amazon-ads/source_amazon_ads/run.py index a8012240de66..0436d379599e 100644 --- a/airbyte-integrations/connectors/source-amazon-ads/source_amazon_ads/run.py +++ b/airbyte-integrations/connectors/source-amazon-ads/source_amazon_ads/run.py @@ -8,9 +8,10 @@ from airbyte_cdk.entrypoint import launch from source_amazon_ads import SourceAmazonAds from source_amazon_ads.config_migrations import MigrateStartDate +from source_amazon_ads.declarative_source_adapter import DeclarativeSourceAdapter def run(): - source = SourceAmazonAds() + source = DeclarativeSourceAdapter(source=SourceAmazonAds()) MigrateStartDate.migrate(sys.argv[1:], source) launch(source, sys.argv[1:]) diff --git a/airbyte-integrations/connectors/source-amazon-ads/source_amazon_ads/source.py b/airbyte-integrations/connectors/source-amazon-ads/source_amazon_ads/source.py index 9d1852c33330..1eac1615faee 100644 --- a/airbyte-integrations/connectors/source-amazon-ads/source_amazon_ads/source.py +++ b/airbyte-integrations/connectors/source-amazon-ads/source_amazon_ads/source.py @@ -50,7 +50,7 @@ class SourceAmazonAds(AbstractSource): - def _validate_and_transform(self, config: Mapping[str, Any]): + def _validate_and_transform(self, config: Mapping[str, Any]) -> Mapping[str, Any]: start_date = config.get("start_date") if start_date: config["start_date"] = pendulum.from_format(start_date, CONFIG_DATE_FORMAT).date() @@ -69,7 +69,8 @@ def check_connection(self, logger: logging.Logger, config: Mapping[str, Any]) -> """ :param config: the user-input config object conforming to the connector's spec.json :param logger: logger object - :return Tuple[bool, any]: (True, None) if the input config can be used to connect to the API successfully, (False, error) otherwise. + :return Tuple[bool, any]: (True, None) if the input config can be used to connect to the API successfully, + (False, error) otherwise. """ try: config = self._validate_and_transform(config) @@ -78,7 +79,7 @@ def check_connection(self, logger: logging.Logger, config: Mapping[str, Any]) -> # Check connection by sending list of profiles request. Its most simple # request, not require additional parameters and usually has few data # in response body. - # It doesnt support pagination so there is no sense of reading single + # It doesn't support pagination so there is no sense of reading single # record, it would fetch all the data anyway. profiles_list = Profiles(config, authenticator=self._make_authenticator(config)).get_all_profiles() filtered_profiles = self._choose_profiles(config, profiles_list) @@ -89,15 +90,15 @@ def check_connection(self, logger: logging.Logger, config: Mapping[str, Any]) -> def streams(self, config: Mapping[str, Any]) -> List[Stream]: """ :param config: A Mapping of the user input configuration as defined in the connector spec. - :return list of streams for current source + :return: list of streams for current source """ config = self._validate_and_transform(config) auth = self._make_authenticator(config) stream_args = {"config": config, "authenticator": auth} # All data for individual Amazon Ads stream divided into sets of data for # each profile. Every API request except profiles has required - # paramater passed over "Amazon-Advertising-API-Scope" http header and - # should contain profile id. So every stream is dependant on Profiles + # parameter passed over "Amazon-Advertising-API-Scope" http header and + # should contain profile id. So every stream is dependent on Profiles # stream and should have information about all profiles. profiles_stream = Profiles(**stream_args) profiles_list = profiles_stream.get_all_profiles() diff --git a/airbyte-integrations/connectors/source-amazon-ads/source_amazon_ads/spec.yaml b/airbyte-integrations/connectors/source-amazon-ads/source_amazon_ads/spec.yaml deleted file mode 100644 index 0e703cb4ca3a..000000000000 --- a/airbyte-integrations/connectors/source-amazon-ads/source_amazon_ads/spec.yaml +++ /dev/null @@ -1,165 +0,0 @@ ---- -documentationUrl: https://docs.airbyte.com/integrations/sources/amazon-ads -connectionSpecification: - title: Amazon Ads Spec - type: object - properties: - auth_type: - title: Auth Type - const: oauth2.0 - order: 0 - type: string - client_id: - title: Client ID - description: - The client ID of your Amazon Ads developer application. See the - docs - for more information. - order: 1 - type: string - airbyte_secret: true - client_secret: - title: Client Secret - description: - The client secret of your Amazon Ads developer application. See - the docs - for more information. - airbyte_secret: true - order: 2 - type: string - refresh_token: - title: Refresh Token - description: - Amazon Ads refresh token. See the docs - for more information on how to obtain this token. - airbyte_secret: true - order: 3 - type: string - region: - title: Region - description: - Region to pull data from (EU/NA/FE). See docs - for more details. - enum: - - NA - - EU - - FE - type: string - default: NA - order: 4 - start_date: - title: Start Date - description: - The Start date for collecting reports, should not be more than - 60 days in the past. In YYYY-MM-DD format - pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}$" - format: date - examples: - - "2022-10-10" - - "2022-10-22" - order: 5 - type: string - profiles: - title: Profile IDs - description: 'Profile IDs you want to fetch data for. See docs for more details. Note: If Marketplace IDs are also selected, profiles will be selected if they match the Profile ID OR the Marketplace ID.' - order: 6 - type: array - items: - type: integer - marketplace_ids: - title: Marketplace IDs - description: "Marketplace IDs you want to fetch data for. Note: If Profile IDs are also selected, profiles will be selected if they match the Profile ID OR the Marketplace ID." - order: 7 - type: array - items: - type: string - state_filter: - title: State Filter - description: Reflects the state of the Display, Product, and Brand Campaign streams as enabled, paused, or archived. If you do not populate this field, it will be ignored completely. - items: - type: string - enum: - - enabled - - paused - - archived - type: array - uniqueItems: true - order: 8 - look_back_window: - title: "Look Back Window" - description: "The amount of days to go back in time to get the updated data from Amazon Ads" - examples: - - 3 - - 10 - type: "integer" - default: 3 - order: 9 - report_record_types: - title: Report Record Types - description: - Optional configuration which accepts an array of string of record types. - Leave blank for default behaviour to pull all report types. - Use this config option only if you want to pull specific report type(s). - See docs - for more details - items: - type: string - enum: - - adGroups - - asins - - asins_keywords - - asins_targets - - campaigns - - keywords - - productAds - - targets - type: array - uniqueItems: true - order: 10 - required: - - client_id - - client_secret - - refresh_token - additionalProperties: true -advanced_auth: - auth_flow_type: oauth2.0 - predicate_key: - - auth_type - predicate_value: oauth2.0 - oauth_config_specification: - oauth_user_input_from_connector_config_specification: - type: object - additionalProperties: false - properties: - region: - type: string - path_in_connector_config: - - region - complete_oauth_output_specification: - type: object - additionalProperties: true - properties: - refresh_token: - type: string - path_in_connector_config: - - refresh_token - complete_oauth_server_input_specification: - type: object - additionalProperties: true - properties: - client_id: - type: string - client_secret: - type: string - complete_oauth_server_output_specification: - type: object - additionalProperties: true - properties: - client_id: - type: string - path_in_connector_config: - - client_id - client_secret: - type: string - path_in_connector_config: - - client_secret diff --git a/airbyte-integrations/connectors/source-amazon-ads/unit_tests/integrations/utils.py b/airbyte-integrations/connectors/source-amazon-ads/unit_tests/integrations/utils.py index 6d2828a3bf70..fafd5b37f785 100644 --- a/airbyte-integrations/connectors/source-amazon-ads/unit_tests/integrations/utils.py +++ b/airbyte-integrations/connectors/source-amazon-ads/unit_tests/integrations/utils.py @@ -9,6 +9,7 @@ from airbyte_cdk.test.entrypoint_wrapper import EntrypointOutput, read from airbyte_protocol.models import SyncMode from source_amazon_ads import SourceAmazonAds +from source_amazon_ads.declarative_source_adapter import DeclarativeSourceAdapter def read_stream( @@ -19,7 +20,7 @@ def read_stream( expecting_exception: bool = False ) -> EntrypointOutput: catalog = CatalogBuilder().with_stream(stream_name, sync_mode).build() - return read(SourceAmazonAds(), config, catalog, state, expecting_exception) + return read(DeclarativeSourceAdapter(source=SourceAmazonAds()), config, catalog, state, expecting_exception) def get_log_messages_by_log_level(logs: List[AirbyteMessage], log_level: LogLevel) -> List[str]: diff --git a/airbyte-integrations/connectors/source-amazon-ads/unit_tests/test_source.py b/airbyte-integrations/connectors/source-amazon-ads/unit_tests/test_source.py index f3e8e9d93954..4ecc75099554 100644 --- a/airbyte-integrations/connectors/source-amazon-ads/unit_tests/test_source.py +++ b/airbyte-integrations/connectors/source-amazon-ads/unit_tests/test_source.py @@ -7,6 +7,7 @@ from airbyte_cdk.models import AirbyteConnectionStatus, AirbyteMessage, ConnectorSpecification, Status, Type from jsonschema import Draft4Validator from source_amazon_ads import SourceAmazonAds +from source_amazon_ads.declarative_source_adapter import DeclarativeSourceAdapter from source_amazon_ads.schemas import Profile from .utils import command_check, url_strip_query @@ -40,7 +41,7 @@ def ensure_additional_property_is_boolean(root): @responses.activate def test_discover(config): setup_responses() - source = SourceAmazonAds() + source = DeclarativeSourceAdapter(source=SourceAmazonAds()) catalog = source.discover(None, config) catalog = AirbyteMessage(type=Type.CATALOG, catalog=catalog).dict(exclude_unset=True) schemas = [stream["json_schema"] for stream in catalog["catalog"]["streams"]] @@ -50,7 +51,7 @@ def test_discover(config): def test_spec(): - source = SourceAmazonAds() + source = DeclarativeSourceAdapter(source=SourceAmazonAds()) spec = source.spec(None) assert isinstance(spec, ConnectorSpecification) @@ -58,7 +59,7 @@ def test_spec(): @responses.activate def test_check(config_gen): setup_responses() - source = SourceAmazonAds() + source = DeclarativeSourceAdapter(source=SourceAmazonAds()) assert command_check(source, config_gen(start_date=...)) == AirbyteConnectionStatus(status=Status.SUCCEEDED) assert len(responses.calls) == 2 @@ -89,36 +90,34 @@ def test_check(config_gen): @responses.activate def test_source_streams(config): setup_responses() - source = SourceAmazonAds() + source = DeclarativeSourceAdapter(source=SourceAmazonAds()) streams = source.streams(config) assert len(streams) == 29 actual_stream_names = {stream.name for stream in streams} - expected_stream_names = set( - [ - "profiles", - "portfolios", - "sponsored_display_campaigns", - "sponsored_product_campaigns", - "sponsored_product_ad_groups", - "sponsored_product_ad_group_suggested_keywords", - "sponsored_product_ad_group_bid_recommendations", - "sponsored_product_keywords", - "sponsored_product_negative_keywords", - "sponsored_product_campaign_negative_keywords", - "sponsored_product_ads", - "sponsored_product_targetings", - "sponsored_products_report_stream", - "sponsored_brands_campaigns", - "sponsored_brands_ad_groups", - "sponsored_brands_keywords", - "sponsored_brands_report_stream", - "attribution_report_performance_adgroup", - "attribution_report_performance_campaign", - "attribution_report_performance_creative", - "attribution_report_products", - "sponsored_display_budget_rules", - ] - ) + expected_stream_names = { + "profiles", + "portfolios", + "sponsored_display_campaigns", + "sponsored_product_campaigns", + "sponsored_product_ad_groups", + "sponsored_product_ad_group_suggested_keywords", + "sponsored_product_ad_group_bid_recommendations", + "sponsored_product_keywords", + "sponsored_product_negative_keywords", + "sponsored_product_campaign_negative_keywords", + "sponsored_product_ads", + "sponsored_product_targetings", + "sponsored_products_report_stream", + "sponsored_brands_campaigns", + "sponsored_brands_ad_groups", + "sponsored_brands_keywords", + "sponsored_brands_report_stream", + "attribution_report_performance_adgroup", + "attribution_report_performance_campaign", + "attribution_report_performance_creative", + "attribution_report_products", + "sponsored_display_budget_rules", + } assert not expected_stream_names - actual_stream_names diff --git a/docs/integrations/sources/amazon-ads.md b/docs/integrations/sources/amazon-ads.md index 532712344b5e..275f2d38a535 100644 --- a/docs/integrations/sources/amazon-ads.md +++ b/docs/integrations/sources/amazon-ads.md @@ -110,7 +110,8 @@ Information about expected report generation waiting time can be found [here](ht | Version | Date | Pull Request | Subject | |:--------|:-----------|:---------------------------------------------------------|:----------------------------------------------------------------------------------------------------------------| -| 4.0.3 | 2024-02-12 | [35180](https://github.com/airbytehq/airbyte/pull/35180) | Manage dependencies with Poetry. | +| 4.0.4 | 2024-02-23 | [35481](https://github.com/airbytehq/airbyte/pull/35481) | Migrate source to `YamlDeclarativeSource` with custom `check_connection` | +| 4.0.3 | 2024-02-12 | [35180](https://github.com/airbytehq/airbyte/pull/35180) | Manage dependencies with Poetry | | 4.0.2 | 2024-02-08 | [35013](https://github.com/airbytehq/airbyte/pull/35013) | Add missing field to `sponsored_display_budget_rules` stream | | 4.0.1 | 2023-12-28 | [33833](https://github.com/airbytehq/airbyte/pull/33833) | Updated oauth spec to put region, so we can choose oauth consent url based on it | | 4.0.0 | 2023-12-28 | [33817](https://github.com/airbytehq/airbyte/pull/33817) | Fix schema for streams: `SponsoredBrandsAdGroups` and `SponsoredBrandsKeywords` | @@ -120,7 +121,7 @@ Information about expected report generation waiting time can be found [here](ht | 3.3.0 | 2023-09-22 | [30679](https://github.com/airbytehq/airbyte/pull/30679) | Fix unexpected column for `SponsoredProductCampaigns` and `SponsoredBrandsKeywords` | | 3.2.0 | 2023-09-18 | [30517](https://github.com/airbytehq/airbyte/pull/30517) | Add suggested streams; fix unexpected column issue | | 3.1.2 | 2023-08-16 | [29233](https://github.com/airbytehq/airbyte/pull/29233) | Add filter for Marketplace IDs | -| 3.1.1 | 2023-08-28 | [29900](https://github.com/airbytehq/airbyte/pull/29900) | Add 404 handling for no assotiated with bid ad groups | +| 3.1.1 | 2023-08-28 | [29900](https://github.com/airbytehq/airbyte/pull/29900) | Add 404 handling for no associated with bid ad groups | | 3.1.0 | 2023-08-08 | [00000](https://github.com/airbytehq/airbyte/pull/00000) | Add `T00030` tactic support for `sponsored_display_report_stream` | | 3.0.0 | 2023-07-24 | [27868](https://github.com/airbytehq/airbyte/pull/27868) | Fix attribution report stream schemas | | 2.3.1 | 2023-07-11 | [28155](https://github.com/airbytehq/airbyte/pull/28155) | Bugfix: validation error when record values are missing | @@ -161,7 +162,7 @@ Information about expected report generation waiting time can be found [here](ht | 0.1.6 | 2022-04-20 | [11659](https://github.com/airbytehq/airbyte/pull/11659) | Add adId to products report | | 0.1.5 | 2022-04-08 | [11430](https://github.com/airbytehq/airbyte/pull/11430) | Add support OAuth2.0 | | 0.1.4 | 2022-02-21 | [10513](https://github.com/airbytehq/airbyte/pull/10513) | Increasing REPORT_WAIT_TIMEOUT for supporting report generation which takes longer time | -| 0.1.3 | 2021-12-28 | [8388](https://github.com/airbytehq/airbyte/pull/8388) | Add retry if recoverable error occured for reporting stream processing | +| 0.1.3 | 2021-12-28 | [8388](https://github.com/airbytehq/airbyte/pull/8388) | Add retry if recoverable error occurred for reporting stream processing | | 0.1.2 | 2021-10-01 | [6367](https://github.com/airbytehq/airbyte/pull/6461) | Add option to pull data for different regions. Add option to choose profiles we want to pull data. Add lookback | | 0.1.1 | 2021-09-22 | [6367](https://github.com/airbytehq/airbyte/pull/6367) | Add seller and vendor filters to profiles stream | | 0.1.0 | 2021-08-13 | [5023](https://github.com/airbytehq/airbyte/pull/5023) | Initial version | From 0c67a1e23fa71118cd4b1a3a32d1a7f0ea4dd912 Mon Sep 17 00:00:00 2001 From: Anton Karpets Date: Wed, 6 Mar 2024 12:20:19 +0200 Subject: [PATCH 097/172] =?UTF-8?q?=F0=9F=8F=A5Source=20Zendesk=20Support:?= =?UTF-8?q?=20fix=20expected=20records=20(#35847)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .../source-zendesk-support/README.md | 2 +- .../integration_tests/expected_records.jsonl | 18 +++++++++--------- 2 files changed, 10 insertions(+), 10 deletions(-) diff --git a/airbyte-integrations/connectors/source-zendesk-support/README.md b/airbyte-integrations/connectors/source-zendesk-support/README.md index a1b9fd4aabbd..79e724300a32 100644 --- a/airbyte-integrations/connectors/source-zendesk-support/README.md +++ b/airbyte-integrations/connectors/source-zendesk-support/README.md @@ -30,7 +30,7 @@ See `sample_files/sample_config.json` for a sample config file. poetry run source-zendesk-support spec poetry run source-zendesk-support check --config secrets/config.json poetry run source-zendesk-support discover --config secrets/config.json -poetry run source-zendesk-support read --config secrets/config.json --catalog sample_files/configured_catalog.json +poetry run source-zendesk-support read --config secrets/config.json --catalog integration_tests/configured_catalog.json ``` ### Running unit tests diff --git a/airbyte-integrations/connectors/source-zendesk-support/integration_tests/expected_records.jsonl b/airbyte-integrations/connectors/source-zendesk-support/integration_tests/expected_records.jsonl index 5cb0a349cd6c..ae83e846ae3a 100644 --- a/airbyte-integrations/connectors/source-zendesk-support/integration_tests/expected_records.jsonl +++ b/airbyte-integrations/connectors/source-zendesk-support/integration_tests/expected_records.jsonl @@ -32,18 +32,18 @@ {"stream": "satisfaction_ratings", "data": {"url": "https://d3v-airbyte.zendesk.com/api/v2/satisfaction_ratings/5527212710799.json", "id": 5527212710799, "assignee_id": null, "group_id": null, "requester_id": 5527080499599, "ticket_id": 144, "score": "offered", "created_at": "2022-09-19T16:01:43Z", "updated_at": "2022-09-19T16:01:43Z", "comment": null}, "emitted_at": 1697714848279} {"stream": "sla_policies", "data": {"url": "https://d3v-airbyte.zendesk.com/api/v2/slas/policies/360001110696.json", "id": 360001110696, "title": "test police", "description": "for tests", "position": 1, "filter": {"all": [{"field": "assignee_id", "operator": "is", "value": 361089721035}], "any": []}, "policy_metrics": [{"priority": "high", "metric": "first_reply_time", "target": 61, "business_hours": false}], "created_at": "2021-07-16T11:05:31Z", "updated_at": "2021-07-16T11:05:31Z"}, "emitted_at": 1697714849344} {"stream": "sla_policies", "data": {"url": "https://d3v-airbyte.zendesk.com/api/v2/slas/policies/360001113715.json", "id": 360001113715, "title": "test police 2", "description": "test police 2", "position": 2, "filter": {"all": [{"field": "organization_id", "operator": "is", "value": 360033549136}], "any": []}, "policy_metrics": [{"priority": "high", "metric": "first_reply_time", "target": 121, "business_hours": false}], "created_at": "2021-07-16T11:06:01Z", "updated_at": "2021-07-16T11:06:01Z"}, "emitted_at": 1697714849345} -{"stream": "ticket_audits", "data": {"id": 8154471428239, "ticket_id": 154, "created_at": "2023-10-17T14:28:25Z", "author_id": -1, "metadata": {"system": {}, "custom": {}}, "events": [{"id": 8154455112079, "type": "Notification", "via": {"channel": "rule", "source": {"from": {"deleted": false, "title": "Notify assignee of comment update", "id": 360011363236, "revision_id": 1}, "rel": "trigger"}}, "subject": "[{{ticket.account}}] Re: {{ticket.title}}", "body": "This ticket (#{{ticket.id}}) has been updated.\n\n{{ticket.comments_formatted}}", "recipients": [360786799676]}, {"id": 8154459400847, "type": "ChatEndedEvent", "value": {"chat_id": "2310.10414779.TsxCzMf0jqtMS", "chat_started_event_id": 8154502502927, "visitor_id": "10414779-1INmCdTWKtlu4ct", "is_served": true, "tags": ["business_messaging_slack_connect_chat"]}, "attachments": []}, {"id": 8154474154895, "type": "Comment", "author_id": -1, "body": "(01:28:24) Team Airbyte: Nope", "html_body": "

(01:28:24) Team Airbyte: Nope

", "plain_body": "(01:28:24) Team Airbyte: Nope", "public": true, "attachments": [], "audit_id": 8154471428239}], "via": {"channel": "chat_transcript", "source": {"from": {}, "to": {}, "rel": null}}}, "emitted_at": 1697714855596} -{"stream": "ticket_audits", "data": {"id": 8154455109135, "ticket_id": 154, "created_at": "2023-10-17T14:28:25Z", "author_id": -1, "metadata": {"system": {}, "custom": {}}, "events": [{"id": 8154502502927, "type": "ChatStartedEvent", "value": {"chat_id": "2310.10414779.TsxCzMf0jqtMS", "visitor_id": "10414779-1INmCdTWKtlu4ct", "conversation_id": "dc3b91c917014c6095d8123a", "user_id": 8154469488271, "authenticated": false, "tags": ["business_messaging_slack_connect_chat"], "initiator": 2, "backend": "chat", "history": [{"actor_name": "Erica D'Souza", "timestamp": 1697552904528, "chat_index": 0, "actor_type": "end-user", "actor_id": "8154469488271", "type": "ChatJoin"}, {"actor_name": "Team Airbyte", "timestamp": 1697552904538, "chat_index": 1, "actor_type": "agent", "actor_id": "360786799676", "type": "ChatJoin"}, {"actor_name": "Team Airbyte", "timestamp": 1697552904542, "chat_index": 2, "actor_type": "agent", "actor_id": "360786799676", "type": "ChatMessage", "message": "Nope", "message_id": "6daa2df0-6cf9-11ee-9286-1500c615ce3a"}, {"actor_name": "Erica D'Souza", "timestamp": 1697552905076, "chat_index": 3, "actor_type": "end-user", "actor_id": "8154469488271", "type": "ChatMessageStatus", "status": "SEND_SUCCESS", "status_ts": 1697552904542, "parent_message_id": "2310.10414779.TsxCzMf0jqtMS_2", "external_message_id": "652e9a092e0208aaf19929fd"}, {"actor_name": "Erica D'Souza", "timestamp": 1697552905301, "chat_index": 4, "actor_type": "end-user", "actor_id": "8154469488271", "type": "ChatLeave", "reason": "agent_workspace_ticket_status_initiate_chat_end"}], "webpath": [], "channel": "business_messaging_slack_connect"}, "attachments": []}], "via": {"channel": "business_messaging_slack_connect", "source": {"from": {}, "to": {}, "rel": null}}}, "emitted_at": 1697714855600} -{"stream": "ticket_audits", "data": {"id": 8154502294415, "ticket_id": 154, "created_at": "2023-10-17T14:27:53Z", "author_id": -1, "metadata": {"system": {}, "custom": {}}, "events": [{"id": 8154502294543, "type": "ChatEndedEvent", "value": {"chat_id": "2310.10414779.TsxC6Q5zdSu1f", "chat_started_event_id": 8154438755727, "visitor_id": "10414779-1INmCdTWKtlu4ct", "is_served": false, "tags": ["business_messaging_slack_connect_chat"]}, "attachments": []}, {"id": 8154502294671, "type": "Comment", "author_id": -1, "body": "(01:24:53) Erica D'Souza: what is the <#C061EKCHEJZ|zendesk-chat-integration-test> channel for?\n(01:25:39) Erica D'Souza: for what purpose?\n(01:26:45) Erica D'Souza: ahh gotcha, was just making sure no one is asking for ZD chat lol", "html_body": "

(01:24:53) Erica D'Souza: what is the <#C061EKCHEJZ|zendesk-chat-integration-test> channel for?\n
(01:25:39) Erica D'Souza: for what purpose?\n
(01:26:45) Erica D'Souza: ahh gotcha, was just making sure no one is asking for ZD chat lol

", "plain_body": "(01:24:53) Erica D'Souza: what is the <#C061EKCHEJZ|zendesk-chat-integration-test> channel for?\n\n(01:25:39) Erica D'Souza: for what purpose?\n\n(01:26:45) Erica D'Souza: ahh gotcha, was just making sure no one is asking for ZD chat lol", "public": true, "attachments": [], "audit_id": 8154502294415}, {"id": 8154502294799, "type": "Change", "value": "1", "field_name": "is_public", "previous_value": "0"}, {"id": 8154502294927, "type": "Notification", "via": {"channel": "rule", "source": {"from": {"deleted": false, "title": "Notify assignee of comment update", "id": 360011363236, "revision_id": 1}, "rel": "trigger"}}, "subject": "[{{ticket.account}}] Re: {{ticket.title}}", "body": "This ticket (#{{ticket.id}}) has been updated.\n\n{{ticket.comments_formatted}}", "recipients": [360786799676]}], "via": {"channel": "chat_transcript", "source": {"from": {}, "to": {}, "rel": null}}}, "emitted_at": 1697714855601} +{"stream": "ticket_audits", "data": {"id": 8178673821967, "ticket_id": 158, "created_at": "2023-10-20T12:01:58Z", "author_id": -1, "metadata": {"system": {}, "custom": {}}, "events": [{"id": 8178673822095, "type": "Notification", "subject": "Request #{{ticket.id}}: How would you rate the support you received?", "body": "Hello {{ticket.requester.name}},\n\nWe'd love to hear what you think of our customer service. Please take a moment to answer one simple question by clicking either link below:\n\n{{satisfaction.rating_section}}\n\nHere's a reminder of what this request was about:\n\n{{ticket.comments_formatted}}\n", "recipients": [8178212241935]}, {"id": 8178673822223, "type": "Change", "value": "offered", "field_name": "satisfaction_score", "previous_value": "unoffered"}], "via": {"channel": "rule", "source": {"to": {}, "from": {"deleted": false, "title": "Request customer satisfaction rating (system automation)", "id": 360021281435}, "rel": "automation"}}}, "emitted_at": 1709714976448} +{"stream": "ticket_audits", "data": {"id": 8178567687311, "ticket_id": 159, "created_at": "2023-10-20T11:29:29Z", "author_id": 360786799676, "metadata": {"system": {"client": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/118.0.0.0 Safari/537.36", "ip_address": "162.19.235.114", "location": "Frankfurt am Main, HE, Germany", "latitude": 50.1101, "longitude": 8.6721}, "custom": {}}, "events": [{"id": 8178567687439, "type": "Change", "value": "360786799676", "field_name": "assignee_id", "previous_value": null}, {"id": 8178567687567, "type": "Change", "value": "6770788212111", "field_name": "group_id", "previous_value": null}, {"id": 8178567687695, "type": "Change", "value": "open", "field_name": "status", "previous_value": "new"}, {"id": 8178567687823, "type": "Change", "value": "4044376", "field_name": "custom_status_id", "previous_value": "4044356"}], "via": {"channel": "web", "source": {"from": {}, "to": {}, "rel": null}}}, "emitted_at": 1709714976449} +{"stream": "ticket_audits", "data": {"id": 8178427216527, "ticket_id": 159, "created_at": "2023-10-20T10:57:49Z", "author_id": 360786799676, "metadata": {"system": {"client": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/118.0.0.0 Safari/537.36", "ip_address": "162.19.235.114", "location": "Frankfurt am Main, HE, Germany", "latitude": 50.1101, "longitude": 8.6721}, "custom": {}}, "events": [{"id": 8178427216655, "type": "Comment", "author_id": 360786799676, "body": "test ticket", "html_body": "
test ticket
", "plain_body": "test ticket", "public": true, "attachments": [], "audit_id": 8178427216527}, {"id": 8178427216783, "type": "Create", "value": "360000358316", "field_name": "brand_id"}, {"id": 8178427216911, "type": "Create", "value": "8178212241935", "field_name": "requester_id"}, {"id": 8178427217039, "type": "Create", "value": "4044356", "field_name": "custom_status_id"}, {"id": 8178427217167, "type": "Create", "value": "555666", "field_name": "subject"}, {"id": 8178427217295, "type": "Create", "value": "360000084116", "field_name": "ticket_form_id"}, {"id": 8178427217423, "type": "Create", "value": null, "field_name": "priority"}, {"id": 8178427217551, "type": "Create", "value": null, "field_name": "type"}, {"id": 8178427217679, "type": "Create", "value": "new", "field_name": "status"}, {"id": 8178427217807, "type": "Notification", "via": {"channel": "rule", "source": {"from": {"deleted": false, "title": "Notify requester of new proactive ticket", "id": 360011363196, "revision_id": 3}, "rel": "trigger"}}, "subject": "{{ticket.title}}", "body": "This ticket was created on your behalf.\n\n{{ticket.comments_formatted}}\n\nTo add additional comments, reply to this email.", "recipients": [8178212241935]}, {"id": 8178427217935, "type": "Notification", "via": {"channel": "rule", "source": {"from": {"deleted": false, "title": "Notify all agents of received request", "id": 360011363296, "revision_id": 3}, "rel": "trigger"}}, "subject": "[{{ticket.account}}] {{ticket.title}}", "body": "A ticket (#{{ticket.id}}) by {{ticket.requester.name}} has been received. It is unassigned.\n\n{{ticket.comments_formatted}}", "recipients": [361089721035, 360786799676, 7282634891791]}], "via": {"channel": "web", "source": {"from": {}, "to": {"name": "Diana", "address": "valitdiana@gmail.com"}, "rel": null}}}, "emitted_at": 1709714976450} {"stream": "ticket_comments", "data": {"id": 5162146653071, "via": {"channel": "web", "source": {"from": {}, "to": {"name": "Team Airbyte", "address": "integration-test@airbyte.io"}, "rel": null}}, "via_reference_id": null, "type": "Comment", "author_id": 360786799676, "body": " 163748", "html_body": "
 163748
", "plain_body": " 163748", "public": true, "attachments": [], "audit_id": 5162146652943, "created_at": "2022-07-18T09:58:23Z", "event_type": "Comment", "ticket_id": 124, "timestamp": 1658138303}, "emitted_at": 1697714859038} {"stream": "ticket_comments", "data": {"id": 5162208963983, "via": {"channel": "web", "source": {"from": {}, "to": {}, "rel": null}}, "via_reference_id": null, "type": "Comment", "author_id": 360786799676, "body": "238473846", "html_body": "
238473846
", "plain_body": "238473846", "public": false, "attachments": [], "audit_id": 5162208963855, "created_at": "2022-07-18T10:16:53Z", "event_type": "Comment", "ticket_id": 125, "timestamp": 1658139413}, "emitted_at": 1697714859039} {"stream": "ticket_comments", "data": {"id": 5162223308559, "via": {"channel": "web", "source": {"from": {}, "to": {}, "rel": null}}, "via_reference_id": null, "type": "Comment", "author_id": 360786799676, "body": "Airbyte", "html_body": "", "plain_body": "Airbyte", "public": false, "attachments": [], "audit_id": 5162223308431, "created_at": "2022-07-18T10:25:21Z", "event_type": "Comment", "ticket_id": 125, "timestamp": 1658139921}, "emitted_at": 1697714859040} {"stream": "ticket_fields", "data": {"url": "https://d3v-airbyte.zendesk.com/api/v2/ticket_fields/360002833076.json", "id": 360002833076, "type": "subject", "title": "Subject", "raw_title": "Subject", "description": "", "raw_description": "", "position": 1, "active": true, "required": false, "collapsed_for_agents": false, "regexp_for_validation": null, "title_in_portal": "Subject", "raw_title_in_portal": "Subject", "visible_in_portal": true, "editable_in_portal": true, "required_in_portal": true, "tag": null, "created_at": "2020-12-11T18:34:05Z", "updated_at": "2020-12-11T18:34:05Z", "removable": false, "key": null, "agent_description": null}, "emitted_at": 1697714860081} {"stream": "ticket_fields", "data": {"url": "https://d3v-airbyte.zendesk.com/api/v2/ticket_fields/360002833096.json", "id": 360002833096, "type": "description", "title": "Description", "raw_title": "Description", "description": "Please enter the details of your request. A member of our support staff will respond as soon as possible.", "raw_description": "Please enter the details of your request. A member of our support staff will respond as soon as possible.", "position": 2, "active": true, "required": false, "collapsed_for_agents": false, "regexp_for_validation": null, "title_in_portal": "Description", "raw_title_in_portal": "Description", "visible_in_portal": true, "editable_in_portal": true, "required_in_portal": true, "tag": null, "created_at": "2020-12-11T18:34:05Z", "updated_at": "2020-12-11T18:34:05Z", "removable": false, "key": null, "agent_description": null}, "emitted_at": 1697714860083} {"stream": "ticket_fields", "data": {"url": "https://d3v-airbyte.zendesk.com/api/v2/ticket_fields/360002833116.json", "id": 360002833116, "type": "status", "title": "Status", "raw_title": "Status", "description": "Request status", "raw_description": "Request status", "position": 3, "active": true, "required": false, "collapsed_for_agents": false, "regexp_for_validation": null, "title_in_portal": "Status", "raw_title_in_portal": "Status", "visible_in_portal": false, "editable_in_portal": false, "required_in_portal": false, "tag": null, "created_at": "2020-12-11T18:34:05Z", "updated_at": "2020-12-11T18:34:05Z", "removable": false, "key": null, "agent_description": null, "system_field_options": [{"name": "Open", "value": "open"}, {"name": "Pending", "value": "pending"}, {"name": "Solved", "value": "solved"}], "sub_type_id": 0}, "emitted_at": 1697714860085} -{"stream": "ticket_metrics", "data": {"url": "https://d3v-airbyte.zendesk.com/api/v2/ticket_metrics/8154457562767.json", "id": 8154457562767, "ticket_id": 154, "created_at": "2023-10-17T14:24:53Z", "updated_at": "2023-10-17T14:28:25Z", "group_stations": 2, "assignee_stations": 1, "reopens": 0, "replies": 1, "assignee_updated_at": "2023-10-17T14:27:52Z", "requester_updated_at": "2023-10-17T14:26:45Z", "status_updated_at": "2023-11-06T15:01:40Z", "initially_assigned_at": "2023-10-17T14:26:33Z", "assigned_at": "2023-10-17T14:26:33Z", "solved_at": "2023-10-17T14:27:52Z", "latest_comment_added_at": "2023-10-17T14:28:25Z", "reply_time_in_minutes": {"calendar": 4, "business": 0}, "first_resolution_time_in_minutes": {"calendar": 3, "business": 0}, "full_resolution_time_in_minutes": {"calendar": 3, "business": 0}, "agent_wait_time_in_minutes": {"calendar": 0, "business": 0}, "requester_wait_time_in_minutes": {"calendar": 3, "business": 0}, "on_hold_time_in_minutes": {"calendar": 0, "business": 0}, "reply_time_in_seconds": {"calendar": 212}, "custom_status_updated_at": "2023-10-17T14:27:52Z"}, "emitted_at": 1699646404810} -{"stream": "ticket_metrics", "data": {"url": "https://d3v-airbyte.zendesk.com/api/v2/ticket_metrics/7283000498191.json", "id": 7283000498191, "ticket_id": 153, "created_at": "2023-06-26T11:31:48Z", "updated_at": "2023-06-26T12:13:42Z", "group_stations": 2, "assignee_stations": 2, "reopens": 0, "replies": 0, "assignee_updated_at": "2023-06-26T11:31:48Z", "requester_updated_at": "2023-06-26T11:31:48Z", "status_updated_at": "2023-06-26T11:31:48Z", "initially_assigned_at": "2023-06-26T11:31:48Z", "assigned_at": "2023-06-26T12:13:42Z", "solved_at": null, "latest_comment_added_at": "2023-06-26T11:31:48Z", "reply_time_in_minutes": {"calendar": null, "business": null}, "first_resolution_time_in_minutes": {"calendar": null, "business": null}, "full_resolution_time_in_minutes": {"calendar": null, "business": null}, "agent_wait_time_in_minutes": {"calendar": null, "business": null}, "requester_wait_time_in_minutes": {"calendar": null, "business": null}, "on_hold_time_in_minutes": {"calendar": 0, "business": 0}, "custom_status_updated_at": "2023-06-26T11:31:48Z"}, "emitted_at": 1699646404810} -{"stream": "ticket_metrics", "data": {"url": "https://d3v-airbyte.zendesk.com/api/v2/ticket_metrics/7282901696015.json", "id": 7282901696015, "ticket_id": 151, "created_at": "2023-06-26T11:09:33Z", "updated_at": "2023-06-26T12:03:38Z", "group_stations": 1, "assignee_stations": 1, "reopens": 0, "replies": 1, "assignee_updated_at": "2023-06-26T12:03:37Z", "requester_updated_at": "2023-06-26T11:09:33Z", "status_updated_at": "2023-06-26T11:09:33Z", "initially_assigned_at": "2023-06-26T11:09:33Z", "assigned_at": "2023-06-26T11:09:33Z", "solved_at": null, "latest_comment_added_at": "2023-06-26T12:03:37Z", "reply_time_in_minutes": {"calendar": 54, "business": 0}, "first_resolution_time_in_minutes": {"calendar": null, "business": null}, "full_resolution_time_in_minutes": {"calendar": null, "business": null}, "agent_wait_time_in_minutes": {"calendar": null, "business": null}, "requester_wait_time_in_minutes": {"calendar": null, "business": null}, "on_hold_time_in_minutes": {"calendar": 0, "business": 0}, "custom_status_updated_at": "2023-06-26T11:09:33Z"}, "emitted_at": 1700040843806} +{"stream": "ticket_metrics", "data": {"url": "https://d3v-airbyte.zendesk.com/api/v2/ticket_metrics/8171838264079.json", "id": 8171838264079, "ticket_id": 155, "created_at": "2023-10-19T15:22:00Z", "updated_at": "2023-10-19T15:24:05Z", "group_stations": 1, "assignee_stations": 1, "reopens": 0, "replies": 0, "assignee_updated_at": null, "requester_updated_at": "2023-10-19T15:22:32Z", "status_updated_at": "2023-10-19T15:24:05Z", "initially_assigned_at": "2023-10-19T15:24:05Z", "assigned_at": "2023-10-19T15:24:05Z", "solved_at": null, "latest_comment_added_at": "2023-10-19T15:25:58Z", "reply_time_in_minutes": {"calendar": null, "business": null}, "first_resolution_time_in_minutes": {"calendar": null, "business": null}, "full_resolution_time_in_minutes": {"calendar": null, "business": null}, "agent_wait_time_in_minutes": {"calendar": null, "business": null}, "requester_wait_time_in_minutes": {"calendar": 2, "business": 0}, "on_hold_time_in_minutes": {"calendar": 0, "business": 0}, "custom_status_updated_at": "2023-10-19T15:24:05Z"}, "emitted_at": 1709718678594} +{"stream": "ticket_metrics", "data": {"url": "https://d3v-airbyte.zendesk.com/api/v2/ticket_metrics/7283000498191.json", "id": 7283000498191, "ticket_id": 153, "created_at": "2023-06-26T11:31:48Z", "updated_at": "2023-06-26T12:13:42Z", "group_stations": 2, "assignee_stations": 2, "reopens": 0, "replies": 0, "assignee_updated_at": "2023-06-26T11:31:48Z", "requester_updated_at": "2023-06-26T11:31:48Z", "status_updated_at": "2023-06-26T11:31:48Z", "initially_assigned_at": "2023-06-26T11:31:48Z", "assigned_at": "2023-06-26T12:13:42Z", "solved_at": null, "latest_comment_added_at": "2023-06-26T11:31:48Z", "reply_time_in_minutes": {"calendar": null, "business": null}, "first_resolution_time_in_minutes": {"calendar": null, "business": null}, "full_resolution_time_in_minutes": {"calendar": null, "business": null}, "agent_wait_time_in_minutes": {"calendar": null, "business": null}, "requester_wait_time_in_minutes": {"calendar": null, "business": null}, "on_hold_time_in_minutes": {"calendar": 0, "business": 0}, "custom_status_updated_at": "2023-06-26T11:31:48Z"}, "emitted_at": 1709718678594} +{"stream": "ticket_metrics", "data": {"url": "https://d3v-airbyte.zendesk.com/api/v2/ticket_metrics/7282901696015.json", "id": 7282901696015, "ticket_id": 151, "created_at": "2023-06-26T11:09:33Z", "updated_at": "2023-06-26T12:03:38Z", "group_stations": 1, "assignee_stations": 1, "reopens": 0, "replies": 1, "assignee_updated_at": "2023-06-26T12:03:37Z", "requester_updated_at": "2023-06-26T11:09:33Z", "status_updated_at": "2023-06-26T11:09:33Z", "initially_assigned_at": "2023-06-26T11:09:33Z", "assigned_at": "2023-06-26T11:09:33Z", "solved_at": null, "latest_comment_added_at": "2023-06-26T12:03:37Z", "reply_time_in_minutes": {"calendar": 54, "business": 0}, "first_resolution_time_in_minutes": {"calendar": null, "business": null}, "full_resolution_time_in_minutes": {"calendar": null, "business": null}, "agent_wait_time_in_minutes": {"calendar": null, "business": null}, "requester_wait_time_in_minutes": {"calendar": null, "business": null}, "on_hold_time_in_minutes": {"calendar": 0, "business": 0}, "custom_status_updated_at": "2023-06-26T11:09:33Z"}, "emitted_at": 1709718678595} {"stream": "ticket_metric_events", "data": {"id": 4992797383183, "ticket_id": 121, "metric": "agent_work_time", "instance_id": 0, "type": "measure", "time": "2022-06-17T14:49:20Z"}, "emitted_at": 1697714863384} {"stream": "ticket_metric_events", "data": {"id": 4992797383311, "ticket_id": 121, "metric": "pausable_update_time", "instance_id": 0, "type": "measure", "time": "2022-06-17T14:49:20Z"}, "emitted_at": 1697714863386} {"stream": "ticket_metric_events", "data": {"id": 4992797383439, "ticket_id": 121, "metric": "reply_time", "instance_id": 0, "type": "measure", "time": "2022-06-17T14:49:20Z"}, "emitted_at": 1697714863386} @@ -54,9 +54,9 @@ {"stream": "tickets", "data": {"url": "https://d3v-airbyte.zendesk.com/api/v2/tickets/125.json", "id": 125, "external_id": null, "via": {"channel": "web", "source": {"from": {}, "to": {}, "rel": null}}, "created_at": "2022-07-18T10:16:53Z", "updated_at": "2022-07-18T10:36:02Z", "type": "question", "subject": "Ticket Test 2", "raw_subject": "Ticket Test 2", "description": "238473846", "priority": "urgent", "status": "open", "recipient": null, "requester_id": 360786799676, "submitter_id": 360786799676, "assignee_id": 361089721035, "organization_id": 360033549136, "group_id": 5059439464079, "collaborator_ids": [360786799676], "follower_ids": [360786799676], "email_cc_ids": [], "forum_topic_id": null, "problem_id": null, "has_incidents": false, "is_public": false, "due_at": null, "tags": [], "custom_fields": [], "satisfaction_rating": {"score": "unoffered"}, "sharing_agreement_ids": [], "custom_status_id": 4044376, "fields": [], "followup_ids": [], "ticket_form_id": 360000084116, "brand_id": 360000358316, "allow_channelback": false, "allow_attachments": true, "from_messaging_channel": false, "generated_timestamp": 1658140562}, "emitted_at": 1697714865824} {"stream": "topics", "data": {"id": 7253394974479, "url": "https://d3v-airbyte.zendesk.com/api/v2/help_center/community/topics/7253394974479.json", "html_url": "https://d3v-airbyte.zendesk.com/hc/en-us/community/topics/7253394974479-Feature-Requests", "name": "Feature Requests", "description": null, "position": 0, "follower_count": 1, "community_id": 7253391140495, "created_at": "2023-06-22T00:32:21Z", "updated_at": "2023-06-22T00:32:21Z", "manageable_by": "managers", "user_segment_id": null}, "emitted_at": 1697714866838} {"stream": "topics", "data": {"id": 7253351897871, "url": "https://d3v-airbyte.zendesk.com/api/v2/help_center/community/topics/7253351897871.json", "html_url": "https://d3v-airbyte.zendesk.com/hc/en-us/community/topics/7253351897871-General-Discussion", "name": "General Discussion", "description": null, "position": 0, "follower_count": 1, "community_id": 7253391140495, "created_at": "2023-06-22T00:32:20Z", "updated_at": "2023-06-22T00:32:20Z", "manageable_by": "managers", "user_segment_id": null}, "emitted_at": 1697714866839} -{"stream":"users","data":{"id":4992781783439,"url":"https://d3v-airbyte.zendesk.com/api/v2/users/4992781783439.json","name":"Caller +1 (689) 689-8023","email":null,"created_at":"2022-06-17T14:49:19Z","updated_at":"2022-06-17T14:49:19Z","time_zone":"Pacific/Noumea","iana_time_zone":"Pacific/Noumea","phone":"+16896898023","shared_phone_number":false,"photo":null,"locale_id":1,"locale":"en-US","organization_id":null,"role":"end-user","verified":true,"external_id":null,"tags":[],"alias":null,"active":true,"shared":false,"shared_agent":false,"last_login_at":null,"two_factor_auth_enabled":null,"signature":null,"details":null,"notes":null,"role_type":null,"custom_role_id":null,"moderator":false,"ticket_restriction":"requested","only_private_comments":false,"restricted_agent":true,"suspended":false,"default_group_id":null,"report_csv":false,"user_fields":{"test_display_name_checkbox_field":false,"test_display_name_decimal_field":null,"test_display_name_text_field":null}},"emitted_at":1704976960493} -{"stream":"users","data":{"id":4993467856015,"url":"https://d3v-airbyte.zendesk.com/api/v2/users/4993467856015.json","name":"Caller +1 (912) 420-0314","email":null,"created_at":"2022-06-17T19:52:38Z","updated_at":"2022-06-17T19:52:38Z","time_zone":"Pacific/Noumea","iana_time_zone":"Pacific/Noumea","phone":"+19124200314","shared_phone_number":false,"photo":null,"locale_id":1,"locale":"en-US","organization_id":null,"role":"end-user","verified":true,"external_id":null,"tags":[],"alias":null,"active":true,"shared":false,"shared_agent":false,"last_login_at":null,"two_factor_auth_enabled":null,"signature":null,"details":null,"notes":null,"role_type":null,"custom_role_id":null,"moderator":false,"ticket_restriction":"requested","only_private_comments":false,"restricted_agent":true,"suspended":false,"default_group_id":null,"report_csv":false,"user_fields":{"test_display_name_checkbox_field":false,"test_display_name_decimal_field":null,"test_display_name_text_field":null}},"emitted_at":1704976960494} -{"stream":"users","data":{"id":5137812260495,"url":"https://d3v-airbyte.zendesk.com/api/v2/users/5137812260495.json","name":"Caller +1 (607) 210-9549","email":null,"created_at":"2022-07-13T14:34:04Z","updated_at":"2022-07-13T14:34:04Z","time_zone":"Pacific/Noumea","iana_time_zone":"Pacific/Noumea","phone":"+16072109549","shared_phone_number":false,"photo":null,"locale_id":1,"locale":"en-US","organization_id":null,"role":"end-user","verified":true,"external_id":null,"tags":[],"alias":null,"active":true,"shared":false,"shared_agent":false,"last_login_at":null,"two_factor_auth_enabled":null,"signature":null,"details":null,"notes":null,"role_type":null,"custom_role_id":null,"moderator":false,"ticket_restriction":"requested","only_private_comments":false,"restricted_agent":true,"suspended":false,"default_group_id":null,"report_csv":false,"user_fields":{"test_display_name_checkbox_field":false,"test_display_name_decimal_field":null,"test_display_name_text_field":null}},"emitted_at":1704976960494} +{"stream": "users", "data": {"id": 4992781783439, "url": "https://d3v-airbyte.zendesk.com/api/v2/users/4992781783439.json", "name": "Caller +1 (689) 689-8023", "email": null, "created_at": "2022-06-17T14:49:19Z", "updated_at": "2022-06-17T14:49:19Z", "time_zone": "Pacific/Noumea", "iana_time_zone": "Pacific/Noumea", "phone": "+16896898023", "shared_phone_number": false, "photo": null, "locale_id": 1, "locale": "en-US", "organization_id": null, "role": "end-user", "verified": true, "external_id": null, "tags": [], "alias": null, "active": true, "shared": false, "shared_agent": false, "last_login_at": null, "two_factor_auth_enabled": null, "signature": null, "details": null, "notes": null, "role_type": null, "custom_role_id": null, "moderator": false, "ticket_restriction": "requested", "only_private_comments": false, "restricted_agent": true, "suspended": false, "default_group_id": null, "report_csv": false, "user_fields": {"test_display_name_checkbox_field": false, "test_display_name_decimal_field": null, "test_display_name_text_field": null}}, "emitted_at": 1709714606348} +{"stream": "users", "data": {"id": 4993467856015, "url": "https://d3v-airbyte.zendesk.com/api/v2/users/4993467856015.json", "name": "Caller +1 (912) 420-0314", "email": null, "created_at": "2022-06-17T19:52:38Z", "updated_at": "2022-06-17T19:52:38Z", "time_zone": "Pacific/Noumea", "iana_time_zone": "Pacific/Noumea", "phone": "+19124200314", "shared_phone_number": false, "photo": null, "locale_id": 1, "locale": "en-US", "organization_id": null, "role": "end-user", "verified": true, "external_id": null, "tags": [], "alias": null, "active": true, "shared": false, "shared_agent": false, "last_login_at": null, "two_factor_auth_enabled": null, "signature": null, "details": null, "notes": null, "role_type": null, "custom_role_id": null, "moderator": false, "ticket_restriction": "requested", "only_private_comments": false, "restricted_agent": true, "suspended": false, "default_group_id": null, "report_csv": false, "user_fields": {"test_display_name_checkbox_field": false, "test_display_name_decimal_field": null, "test_display_name_text_field": null}}, "emitted_at": 1709714606349} +{"stream": "users", "data": {"id": 5137812260495, "url": "https://d3v-airbyte.zendesk.com/api/v2/users/5137812260495.json", "name": "Caller +1 (607) 210-9549", "email": null, "created_at": "2022-07-13T14:34:04Z", "updated_at": "2022-07-13T14:34:04Z", "time_zone": "Pacific/Noumea", "iana_time_zone": "Pacific/Noumea", "phone": "+16072109549", "shared_phone_number": false, "photo": null, "locale_id": 1, "locale": "en-US", "organization_id": null, "role": "end-user", "verified": true, "external_id": null, "tags": [], "alias": null, "active": true, "shared": false, "shared_agent": false, "last_login_at": null, "two_factor_auth_enabled": null, "signature": null, "details": null, "notes": null, "role_type": null, "custom_role_id": null, "moderator": false, "ticket_restriction": "requested", "only_private_comments": false, "restricted_agent": true, "suspended": false, "default_group_id": null, "report_csv": false, "user_fields": {"test_display_name_checkbox_field": false, "test_display_name_decimal_field": null, "test_display_name_text_field": null}}, "emitted_at": 1709714606349} {"stream": "brands", "data": {"url": "https://d3v-airbyte.zendesk.com/api/v2/brands/360000358316.json", "id": 360000358316, "name": "Airbyte", "brand_url": "https://d3v-airbyte.zendesk.com", "subdomain": "d3v-airbyte", "host_mapping": null, "has_help_center": true, "help_center_state": "enabled", "active": true, "default": true, "is_deleted": false, "logo": null, "ticket_form_ids": [360000084116], "signature_template": "{{agent.signature}}", "created_at": "2020-12-11T18:34:04Z", "updated_at": "2020-12-11T18:34:09Z"}, "emitted_at": 1697714873604} {"stream": "custom_roles", "data": {"id": 360000210636, "name": "Advisor", "description": "Can automate ticket workflows, manage channels and make private comments on tickets", "role_type": 0, "created_at": "2020-12-11T18:34:36Z", "updated_at": "2020-12-11T18:34:36Z", "configuration": {"chat_access": true, "end_user_list_access": "full", "forum_access_restricted_content": false, "light_agent": false, "manage_business_rules": true, "manage_dynamic_content": false, "manage_extensions_and_channels": true, "manage_facebook": true, "moderate_forums": false, "side_conversation_create": true, "ticket_access": "within-groups", "ticket_comment_access": "none", "ticket_deletion": false, "ticket_tag_editing": true, "twitter_search_access": false, "view_deleted_tickets": false, "voice_access": true, "group_access": false, "organization_editing": false, "organization_notes_editing": false, "assign_tickets_to_any_group": false, "end_user_profile_access": "readonly", "explore_access": "readonly", "forum_access": "readonly", "macro_access": "full", "report_access": "none", "ticket_editing": true, "ticket_merge": false, "user_view_access": "full", "view_access": "full", "voice_dashboard_access": false, "manage_automations": true, "manage_contextual_workspaces": false, "manage_organization_fields": false, "manage_skills": true, "manage_slas": true, "manage_suspended_tickets": false, "manage_ticket_fields": false, "manage_ticket_forms": false, "manage_triggers": true, "manage_user_fields": false, "ticket_redaction": false, "manage_roles": "none", "manage_deletion_schedules": "none", "manage_groups": false, "manage_group_memberships": false, "manage_organizations": false, "view_reduced_count": false, "view_filter_tickets": true, "manage_macro_content_suggestions": false, "read_macro_content_suggestions": false, "custom_objects": {}}, "team_member_count": 1}, "emitted_at": 1698749854337} {"stream": "custom_roles", "data": {"id": 360000210596, "name": "Staff", "description": "Can edit tickets within their groups", "role_type": 0, "created_at": "2020-12-11T18:34:36Z", "updated_at": "2020-12-11T18:34:36Z", "configuration": {"chat_access": true, "end_user_list_access": "full", "forum_access_restricted_content": false, "light_agent": false, "manage_business_rules": false, "manage_dynamic_content": false, "manage_extensions_and_channels": false, "manage_facebook": false, "moderate_forums": false, "side_conversation_create": true, "ticket_access": "within-groups", "ticket_comment_access": "public", "ticket_deletion": false, "ticket_tag_editing": false, "twitter_search_access": false, "view_deleted_tickets": false, "voice_access": true, "group_access": false, "organization_editing": false, "organization_notes_editing": false, "assign_tickets_to_any_group": false, "end_user_profile_access": "readonly", "explore_access": "readonly", "forum_access": "readonly", "macro_access": "manage-personal", "report_access": "readonly", "ticket_editing": true, "ticket_merge": false, "user_view_access": "manage-personal", "view_access": "manage-personal", "voice_dashboard_access": false, "manage_automations": false, "manage_contextual_workspaces": false, "manage_organization_fields": false, "manage_skills": false, "manage_slas": false, "manage_suspended_tickets": false, "manage_ticket_fields": false, "manage_ticket_forms": false, "manage_triggers": false, "manage_user_fields": false, "ticket_redaction": false, "manage_roles": "none", "manage_deletion_schedules": "none", "manage_groups": false, "manage_group_memberships": false, "manage_organizations": false, "view_reduced_count": false, "view_filter_tickets": true, "manage_macro_content_suggestions": false, "read_macro_content_suggestions": false, "custom_objects": {}}, "team_member_count": 1}, "emitted_at": 1698749854338} From 84adf58e30e843802aab4f66e910f6af4e6d7040 Mon Sep 17 00:00:00 2001 From: Danylo Jablonski <150933663+DanyloGL@users.noreply.github.com> Date: Wed, 6 Mar 2024 14:52:53 +0200 Subject: [PATCH 098/172] Fix tags in metadata files for migrated community connectors (#35725) --- airbyte-integrations/connectors/source-braintree/metadata.yaml | 2 +- airbyte-integrations/connectors/source-everhour/metadata.yaml | 2 +- airbyte-integrations/connectors/source-klaus-api/metadata.yaml | 2 +- airbyte-integrations/connectors/source-plaid/metadata.yaml | 2 +- airbyte-integrations/connectors/source-unleash/metadata.yaml | 2 +- 5 files changed, 5 insertions(+), 5 deletions(-) diff --git a/airbyte-integrations/connectors/source-braintree/metadata.yaml b/airbyte-integrations/connectors/source-braintree/metadata.yaml index 5d948eee41b6..bf601f6f13a6 100644 --- a/airbyte-integrations/connectors/source-braintree/metadata.yaml +++ b/airbyte-integrations/connectors/source-braintree/metadata.yaml @@ -24,5 +24,5 @@ data: releaseStage: alpha supportLevel: community tags: - - language:python + - language:low-code metadataSpecVersion: "1.0" diff --git a/airbyte-integrations/connectors/source-everhour/metadata.yaml b/airbyte-integrations/connectors/source-everhour/metadata.yaml index edc9128f054f..c4615c4ce9ce 100644 --- a/airbyte-integrations/connectors/source-everhour/metadata.yaml +++ b/airbyte-integrations/connectors/source-everhour/metadata.yaml @@ -23,7 +23,7 @@ data: releaseStage: alpha documentationUrl: https://docs.airbyte.com/integrations/sources/everhour tags: - - language:python + - language:low-code ab_internal: sl: 100 ql: 100 diff --git a/airbyte-integrations/connectors/source-klaus-api/metadata.yaml b/airbyte-integrations/connectors/source-klaus-api/metadata.yaml index 2f4dbc01d8cf..7903adf4b734 100644 --- a/airbyte-integrations/connectors/source-klaus-api/metadata.yaml +++ b/airbyte-integrations/connectors/source-klaus-api/metadata.yaml @@ -27,5 +27,5 @@ data: ql: 300 sl: 100 tags: - - language:python + - language:low-code metadataSpecVersion: "1.0" diff --git a/airbyte-integrations/connectors/source-plaid/metadata.yaml b/airbyte-integrations/connectors/source-plaid/metadata.yaml index a8407c8ad2b1..f15195710ac3 100644 --- a/airbyte-integrations/connectors/source-plaid/metadata.yaml +++ b/airbyte-integrations/connectors/source-plaid/metadata.yaml @@ -20,7 +20,7 @@ data: releaseStage: alpha documentationUrl: https://docs.airbyte.com/integrations/sources/plaid tags: - - language:python + - language:low-code ab_internal: sl: 100 ql: 100 diff --git a/airbyte-integrations/connectors/source-unleash/metadata.yaml b/airbyte-integrations/connectors/source-unleash/metadata.yaml index e3b6c2cf07a4..9fe4ffaea750 100644 --- a/airbyte-integrations/connectors/source-unleash/metadata.yaml +++ b/airbyte-integrations/connectors/source-unleash/metadata.yaml @@ -21,7 +21,7 @@ data: releaseStage: alpha documentationUrl: https://docs.airbyte.com/integrations/sources/unleash tags: - - language:python + - language:low-code ab_internal: sl: 100 ql: 100 From 04943c92580ddf2e84a9065ed61fb76b300a0ee2 Mon Sep 17 00:00:00 2001 From: Anton Karpets Date: Wed, 6 Mar 2024 15:06:27 +0200 Subject: [PATCH 099/172] =?UTF-8?q?=F0=9F=93=9DSource=20Amazon=20Seller=20?= =?UTF-8?q?Partner:=20update=20supportLevel=20to=20certified=20(#35848)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .../connectors/source-amazon-seller-partner/metadata.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/airbyte-integrations/connectors/source-amazon-seller-partner/metadata.yaml b/airbyte-integrations/connectors/source-amazon-seller-partner/metadata.yaml index 0dc1a4afd26e..f0d564039fd4 100644 --- a/airbyte-integrations/connectors/source-amazon-seller-partner/metadata.yaml +++ b/airbyte-integrations/connectors/source-amazon-seller-partner/metadata.yaml @@ -55,7 +55,7 @@ data: Stream `GET_FBA_STORAGE_FEE_CHARGES_DATA` schema has been updated to match Amazon Seller Partner. Users will need to refresh the source schema and reset this stream after upgrading. upgradeDeadline: "2024-03-11" - supportLevel: community + supportLevel: certified tags: - language:python metadataSpecVersion: "1.0" From 1571dbda8f4cb665dfea3fe65cdb7d554ef48c2a Mon Sep 17 00:00:00 2001 From: Catherine Noll Date: Wed, 6 Mar 2024 08:18:38 -0500 Subject: [PATCH 100/172] live-tests: debug mode and initial regression tests framework (#35624) Co-authored-by: alafanechere Co-authored-by: Augustin --- .github/workflows/airbyte-ci-tests.yml | 1 + .gitignore | 1 + airbyte-ci/connectors/live-tests/README.md | 108 ++ airbyte-ci/connectors/live-tests/poetry.lock | 1061 +++++++++++++++++ .../connectors/live-tests/pyproject.toml | 46 + .../live-tests/src/live_tests/__init__.py | 1 + .../live-tests/src/live_tests/cli.py | 13 + .../src/live_tests/commons/__init__.py | 1 + .../live_tests/commons/backends/__init__.py | 6 + .../commons/backends/base_backend.py | 16 + .../commons/backends/file_backend.py | 104 ++ .../live_tests/commons/connector_runner.py | 287 +++++ .../src/live_tests/commons/models.py | 235 ++++ .../src/live_tests/commons/utils.py | 37 + .../src/live_tests/debug/__init__.py | 8 + .../live-tests/src/live_tests/debug/cli.py | 95 ++ .../connectors/live-tests/tests/__init__.py | 1 + .../live-tests/tests/backends/__init__.py | 0 .../tests/backends/test_file_backend.py | 72 ++ airbyte-ci/connectors/pipelines/README.md | 3 +- .../pipelines/airbyte_ci/test/__init__.py | 1 + .../connectors/pipelines/pyproject.toml | 2 +- 22 files changed, 2097 insertions(+), 2 deletions(-) create mode 100644 airbyte-ci/connectors/live-tests/README.md create mode 100644 airbyte-ci/connectors/live-tests/poetry.lock create mode 100644 airbyte-ci/connectors/live-tests/pyproject.toml create mode 100644 airbyte-ci/connectors/live-tests/src/live_tests/__init__.py create mode 100644 airbyte-ci/connectors/live-tests/src/live_tests/cli.py create mode 100644 airbyte-ci/connectors/live-tests/src/live_tests/commons/__init__.py create mode 100644 airbyte-ci/connectors/live-tests/src/live_tests/commons/backends/__init__.py create mode 100644 airbyte-ci/connectors/live-tests/src/live_tests/commons/backends/base_backend.py create mode 100644 airbyte-ci/connectors/live-tests/src/live_tests/commons/backends/file_backend.py create mode 100644 airbyte-ci/connectors/live-tests/src/live_tests/commons/connector_runner.py create mode 100644 airbyte-ci/connectors/live-tests/src/live_tests/commons/models.py create mode 100644 airbyte-ci/connectors/live-tests/src/live_tests/commons/utils.py create mode 100644 airbyte-ci/connectors/live-tests/src/live_tests/debug/__init__.py create mode 100644 airbyte-ci/connectors/live-tests/src/live_tests/debug/cli.py create mode 100644 airbyte-ci/connectors/live-tests/tests/__init__.py create mode 100644 airbyte-ci/connectors/live-tests/tests/backends/__init__.py create mode 100644 airbyte-ci/connectors/live-tests/tests/backends/test_file_backend.py diff --git a/.github/workflows/airbyte-ci-tests.yml b/.github/workflows/airbyte-ci-tests.yml index 37240d132791..a3b2461b07fe 100644 --- a/.github/workflows/airbyte-ci-tests.yml +++ b/.github/workflows/airbyte-ci-tests.yml @@ -38,6 +38,7 @@ jobs: - airbyte-ci/connectors/connector_ops/** - airbyte-ci/connectors/connectors_qa/** - airbyte-ci/connectors/ci_credentials/** + - airbyte-ci/connectors/live-tests/** - airbyte-ci/connectors/metadata_service/lib/** - airbyte-ci/connectors/metadata_service/orchestrator/** - airbyte-integrations/bases/connector-acceptance-test/** diff --git a/.gitignore b/.gitignore index bc6841f7b081..1c085c1c381d 100644 --- a/.gitignore +++ b/.gitignore @@ -16,6 +16,7 @@ static_checker_reports/ # Logs acceptance_tests_logs/ airbyte_ci_logs/ +live_tests_debug_reports/ # Secrets secrets diff --git a/airbyte-ci/connectors/live-tests/README.md b/airbyte-ci/connectors/live-tests/README.md new file mode 100644 index 000000000000..6e2999617724 --- /dev/null +++ b/airbyte-ci/connectors/live-tests/README.md @@ -0,0 +1,108 @@ +# Connector Live Testing + +This project contains utilities for running connector tests against live data. + +## Requirements +* `docker` +* `Python ^3.10` +* `pipx` +* `poetry` + +## Install +```bash +# From airbyte-ci/connectors/live-tests +pipx install . +# To install in editable mode for development +pipx install . --force --editable +``` + +## Commands + +### `debug` + +``` +Usage: live-tests debug [OPTIONS] COMMAND + + Run a specific command on one or multiple connectors and persists the + outputs to local storage. + +Options: + -c, --connector-image TEXT Docker image name of the connector to debug + (e.g. `source-faker:latest`, `source- + faker:dev`) [required] + -o, --output-directory DIRECTORY + Directory in which connector output and test + results should be stored. + Defaults to the current directory. + --config-path FILE Path to the connector config. + --catalog-path FILE Path to the connector catalog. + --state-path FILE Path to the connector state. + -hc, --http-cache Use the HTTP cache for the connector. + --help Show this message and exit. +``` + +This command is made to run any of the following connector commands against one or multiple connector images. + +**Available connector commands:** +* `spec` +* `check` +* `discover` +* `read` or `read_with_state` (requires a `--state-path` to be passed) + +It will write artifacts to an output directory: +* `stdout.log`: The collected standard output following the command execution +* `stderr.log`: The collected standard error following the c +* `http_dump.txt`: An `mitmproxy` http stream log. Can be consumed with `mitmweb` (version `9.0.1`) for debugging. + +#### Example +Let's run `debug` to check the output of `read` on two different versions of the same connector: + +```bash +live-tests debug read \ +--connector-image=airbyte/source-pokeapi:dev \ +--connector-image=airbyte/source-pokeapi:latest \ +--config-path=poke_config.json \ +--catalog-path=configured_catalog.json +``` + +It will store the results in a `live_test_debug_reports` directory under the current working directory: + +``` +live_tests_debug_reports +└── 1709547771 + └── source-pokeapi + └── read + ├── dev + │   ├── airbyte_messages + │   │   ├── logs.jsonl + │   │   ├── pokemon_records.jsonl + │   │   └── traces.jsonl + │   ├── http_dump.mitm + │   ├── stderr.log + │   └── stdout.log + └── latest + ├── airbyte_messages + │   ├── logs.jsonl + │   ├── pokemon_records.jsonl + │   └── traces.jsonl + ├── http_dump.mitm + ├── stderr.log + └── stdout.log + +``` + +##### Consuming `http_dump.mitm` +You can install [`mitmproxy`](https://mitmproxy.org/): +```bash +pipx install mitmproxy +``` + +And run: +```bash +mitmweb --rfile=http_dump.mitm +``` + +## Changelog + +### 0.1.0 +Implement initial primitives and a `debug` command to run connector commands and persist the outputs to local storage. diff --git a/airbyte-ci/connectors/live-tests/poetry.lock b/airbyte-ci/connectors/live-tests/poetry.lock new file mode 100644 index 000000000000..29dab322799d --- /dev/null +++ b/airbyte-ci/connectors/live-tests/poetry.lock @@ -0,0 +1,1061 @@ +# This file is automatically @generated by Poetry 1.6.1 and should not be changed by hand. + +[[package]] +name = "airbyte-protocol-models" +version = "0.6.0" +description = "Declares the Airbyte Protocol." +optional = false +python-versions = ">=3.8" +files = [ + {file = "airbyte_protocol_models-0.6.0-py3-none-any.whl", hash = "sha256:dda91403c9731ecbadffaf05dbe8d24f0d318a189d26fcb727627291837a085c"}, + {file = "airbyte_protocol_models-0.6.0.tar.gz", hash = "sha256:84a0bb0fbedc777f8066295960461ab4a8ab6af63985c21c39bb589569786bc2"}, +] + +[package.dependencies] +pydantic = ">=1.9.2,<2.0.0" + +[[package]] +name = "anyio" +version = "4.3.0" +description = "High level compatibility layer for multiple asynchronous event loop implementations" +optional = false +python-versions = ">=3.8" +files = [ + {file = "anyio-4.3.0-py3-none-any.whl", hash = "sha256:048e05d0f6caeed70d731f3db756d35dcc1f35747c8c403364a8332c630441b8"}, + {file = "anyio-4.3.0.tar.gz", hash = "sha256:f75253795a87df48568485fd18cdd2a3fa5c4f7c5be8e5e36637733fce06fed6"}, +] + +[package.dependencies] +exceptiongroup = {version = ">=1.0.2", markers = "python_version < \"3.11\""} +idna = ">=2.8" +sniffio = ">=1.1" +typing-extensions = {version = ">=4.1", markers = "python_version < \"3.11\""} + +[package.extras] +doc = ["Sphinx (>=7)", "packaging", "sphinx-autodoc-typehints (>=1.2.0)", "sphinx-rtd-theme"] +test = ["anyio[trio]", "coverage[toml] (>=7)", "exceptiongroup (>=1.2.0)", "hypothesis (>=4.0)", "psutil (>=5.9)", "pytest (>=7.0)", "pytest-mock (>=3.6.1)", "trustme", "uvloop (>=0.17)"] +trio = ["trio (>=0.23)"] + +[[package]] +name = "asyncclick" +version = "8.1.7.1" +description = "Composable command line interface toolkit, async version" +optional = false +python-versions = ">=3.7" +files = [ + {file = "asyncclick-8.1.7.1-py3-none-any.whl", hash = "sha256:e0fea5f0223ac45cfc26153cc80a58cc65fc077ac8de79be49248c918e8c3422"}, + {file = "asyncclick-8.1.7.1.tar.gz", hash = "sha256:a47b61258a689212cf9463fbf3b4cc52d05bfd03185f6ead2315fc03fd17ef75"}, +] + +[package.dependencies] +anyio = "*" +colorama = {version = "*", markers = "platform_system == \"Windows\""} + +[[package]] +name = "attrs" +version = "23.2.0" +description = "Classes Without Boilerplate" +optional = false +python-versions = ">=3.7" +files = [ + {file = "attrs-23.2.0-py3-none-any.whl", hash = "sha256:99b87a485a5820b23b879f04c2305b44b951b502fd64be915879d77a7e8fc6f1"}, + {file = "attrs-23.2.0.tar.gz", hash = "sha256:935dc3b529c262f6cf76e50877d35a4bd3c1de194fd41f47a2b7ae8f19971f30"}, +] + +[package.extras] +cov = ["attrs[tests]", "coverage[toml] (>=5.3)"] +dev = ["attrs[tests]", "pre-commit"] +docs = ["furo", "myst-parser", "sphinx", "sphinx-notfound-page", "sphinxcontrib-towncrier", "towncrier", "zope-interface"] +tests = ["attrs[tests-no-zope]", "zope-interface"] +tests-mypy = ["mypy (>=1.6)", "pytest-mypy-plugins"] +tests-no-zope = ["attrs[tests-mypy]", "cloudpickle", "hypothesis", "pympler", "pytest (>=4.3.0)", "pytest-xdist[psutil]"] + +[[package]] +name = "backoff" +version = "2.2.1" +description = "Function decoration for backoff and retry" +optional = false +python-versions = ">=3.7,<4.0" +files = [ + {file = "backoff-2.2.1-py3-none-any.whl", hash = "sha256:63579f9a0628e06278f7e47b7d7d5b6ce20dc65c5e96a6f3ca99a6adca0396e8"}, + {file = "backoff-2.2.1.tar.gz", hash = "sha256:03f829f5bb1923180821643f8753b0502c3b682293992485b0eef2807afa5cba"}, +] + +[[package]] +name = "beartype" +version = "0.17.2" +description = "Unbearably fast runtime type checking in pure Python." +optional = false +python-versions = ">=3.8.0" +files = [ + {file = "beartype-0.17.2-py3-none-any.whl", hash = "sha256:c22b21e1f785cfcf5c4d3d13070f532b6243a3ad67e68d2298ff08d539847dce"}, + {file = "beartype-0.17.2.tar.gz", hash = "sha256:e911e1ae7de4bccd15745f7643609d8732f64de5c2fb844e89cbbed1c5a8d495"}, +] + +[package.extras] +all = ["typing-extensions (>=3.10.0.0)"] +dev = ["autoapi (>=0.9.0)", "coverage (>=5.5)", "equinox", "mypy (>=0.800)", "numpy", "pandera", "pydata-sphinx-theme (<=0.7.2)", "pytest (>=4.0.0)", "sphinx", "sphinx (>=4.2.0,<6.0.0)", "sphinxext-opengraph (>=0.7.5)", "tox (>=3.20.1)", "typing-extensions (>=3.10.0.0)"] +doc-rtd = ["autoapi (>=0.9.0)", "pydata-sphinx-theme (<=0.7.2)", "sphinx (>=4.2.0,<6.0.0)", "sphinxext-opengraph (>=0.7.5)"] +test-tox = ["equinox", "mypy (>=0.800)", "numpy", "pandera", "pytest (>=4.0.0)", "sphinx", "typing-extensions (>=3.10.0.0)"] +test-tox-coverage = ["coverage (>=5.5)"] + +[[package]] +name = "cachetools" +version = "5.3.3" +description = "Extensible memoizing collections and decorators" +optional = false +python-versions = ">=3.7" +files = [ + {file = "cachetools-5.3.3-py3-none-any.whl", hash = "sha256:0abad1021d3f8325b2fc1d2e9c8b9c9d57b04c3932657a72465447332c24d945"}, + {file = "cachetools-5.3.3.tar.gz", hash = "sha256:ba29e2dfa0b8b556606f097407ed1aa62080ee108ab0dc5ec9d6a723a007d105"}, +] + +[[package]] +name = "cattrs" +version = "23.2.3" +description = "Composable complex class support for attrs and dataclasses." +optional = false +python-versions = ">=3.8" +files = [ + {file = "cattrs-23.2.3-py3-none-any.whl", hash = "sha256:0341994d94971052e9ee70662542699a3162ea1e0c62f7ce1b4a57f563685108"}, + {file = "cattrs-23.2.3.tar.gz", hash = "sha256:a934090d95abaa9e911dac357e3a8699e0b4b14f8529bcc7d2b1ad9d51672b9f"}, +] + +[package.dependencies] +attrs = ">=23.1.0" +exceptiongroup = {version = ">=1.1.1", markers = "python_version < \"3.11\""} +typing-extensions = {version = ">=4.1.0,<4.6.3 || >4.6.3", markers = "python_version < \"3.11\""} + +[package.extras] +bson = ["pymongo (>=4.4.0)"] +cbor2 = ["cbor2 (>=5.4.6)"] +msgpack = ["msgpack (>=1.0.5)"] +orjson = ["orjson (>=3.9.2)"] +pyyaml = ["pyyaml (>=6.0)"] +tomlkit = ["tomlkit (>=0.11.8)"] +ujson = ["ujson (>=5.7.0)"] + +[[package]] +name = "certifi" +version = "2024.2.2" +description = "Python package for providing Mozilla's CA Bundle." +optional = false +python-versions = ">=3.6" +files = [ + {file = "certifi-2024.2.2-py3-none-any.whl", hash = "sha256:dc383c07b76109f368f6106eee2b593b04a011ea4d55f652c6ca24a754d1cdd1"}, + {file = "certifi-2024.2.2.tar.gz", hash = "sha256:0569859f95fc761b18b45ef421b1290a0f65f147e92a1e5eb3e635f9a5e4e66f"}, +] + +[[package]] +name = "charset-normalizer" +version = "3.3.2" +description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet." +optional = false +python-versions = ">=3.7.0" +files = [ + {file = "charset-normalizer-3.3.2.tar.gz", hash = "sha256:f30c3cb33b24454a82faecaf01b19c18562b1e89558fb6c56de4d9118a032fd5"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:25baf083bf6f6b341f4121c2f3c548875ee6f5339300e08be3f2b2ba1721cdd3"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:06435b539f889b1f6f4ac1758871aae42dc3a8c0e24ac9e60c2384973ad73027"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9063e24fdb1e498ab71cb7419e24622516c4a04476b17a2dab57e8baa30d6e03"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6897af51655e3691ff853668779c7bad41579facacf5fd7253b0133308cf000d"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1d3193f4a680c64b4b6a9115943538edb896edc190f0b222e73761716519268e"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:cd70574b12bb8a4d2aaa0094515df2463cb429d8536cfb6c7ce983246983e5a6"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8465322196c8b4d7ab6d1e049e4c5cb460d0394da4a27d23cc242fbf0034b6b5"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a9a8e9031d613fd2009c182b69c7b2c1ef8239a0efb1df3f7c8da66d5dd3d537"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:beb58fe5cdb101e3a055192ac291b7a21e3b7ef4f67fa1d74e331a7f2124341c"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:e06ed3eb3218bc64786f7db41917d4e686cc4856944f53d5bdf83a6884432e12"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:2e81c7b9c8979ce92ed306c249d46894776a909505d8f5a4ba55b14206e3222f"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:572c3763a264ba47b3cf708a44ce965d98555f618ca42c926a9c1616d8f34269"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:fd1abc0d89e30cc4e02e4064dc67fcc51bd941eb395c502aac3ec19fab46b519"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-win32.whl", hash = "sha256:3d47fa203a7bd9c5b6cee4736ee84ca03b8ef23193c0d1ca99b5089f72645c73"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-win_amd64.whl", hash = "sha256:10955842570876604d404661fbccbc9c7e684caf432c09c715ec38fbae45ae09"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:802fe99cca7457642125a8a88a084cef28ff0cf9407060f7b93dca5aa25480db"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:573f6eac48f4769d667c4442081b1794f52919e7edada77495aaed9236d13a96"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:549a3a73da901d5bc3ce8d24e0600d1fa85524c10287f6004fbab87672bf3e1e"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f27273b60488abe721a075bcca6d7f3964f9f6f067c8c4c605743023d7d3944f"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1ceae2f17a9c33cb48e3263960dc5fc8005351ee19db217e9b1bb15d28c02574"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:65f6f63034100ead094b8744b3b97965785388f308a64cf8d7c34f2f2e5be0c4"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:753f10e867343b4511128c6ed8c82f7bec3bd026875576dfd88483c5c73b2fd8"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4a78b2b446bd7c934f5dcedc588903fb2f5eec172f3d29e52a9096a43722adfc"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:e537484df0d8f426ce2afb2d0f8e1c3d0b114b83f8850e5f2fbea0e797bd82ae"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:eb6904c354526e758fda7167b33005998fb68c46fbc10e013ca97f21ca5c8887"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:deb6be0ac38ece9ba87dea880e438f25ca3eddfac8b002a2ec3d9183a454e8ae"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:4ab2fe47fae9e0f9dee8c04187ce5d09f48eabe611be8259444906793ab7cbce"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:80402cd6ee291dcb72644d6eac93785fe2c8b9cb30893c1af5b8fdd753b9d40f"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-win32.whl", hash = "sha256:7cd13a2e3ddeed6913a65e66e94b51d80a041145a026c27e6bb76c31a853c6ab"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-win_amd64.whl", hash = "sha256:663946639d296df6a2bb2aa51b60a2454ca1cb29835324c640dafb5ff2131a77"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:0b2b64d2bb6d3fb9112bafa732def486049e63de9618b5843bcdd081d8144cd8"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:ddbb2551d7e0102e7252db79ba445cdab71b26640817ab1e3e3648dad515003b"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:55086ee1064215781fff39a1af09518bc9255b50d6333f2e4c74ca09fac6a8f6"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8f4a014bc36d3c57402e2977dada34f9c12300af536839dc38c0beab8878f38a"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a10af20b82360ab00827f916a6058451b723b4e65030c5a18577c8b2de5b3389"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8d756e44e94489e49571086ef83b2bb8ce311e730092d2c34ca8f7d925cb20aa"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:90d558489962fd4918143277a773316e56c72da56ec7aa3dc3dbbe20fdfed15b"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6ac7ffc7ad6d040517be39eb591cac5ff87416c2537df6ba3cba3bae290c0fed"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:7ed9e526742851e8d5cc9e6cf41427dfc6068d4f5a3bb03659444b4cabf6bc26"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:8bdb58ff7ba23002a4c5808d608e4e6c687175724f54a5dade5fa8c67b604e4d"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:6b3251890fff30ee142c44144871185dbe13b11bab478a88887a639655be1068"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:b4a23f61ce87adf89be746c8a8974fe1c823c891d8f86eb218bb957c924bb143"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:efcb3f6676480691518c177e3b465bcddf57cea040302f9f4e6e191af91174d4"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-win32.whl", hash = "sha256:d965bba47ddeec8cd560687584e88cf699fd28f192ceb452d1d7ee807c5597b7"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-win_amd64.whl", hash = "sha256:96b02a3dc4381e5494fad39be677abcb5e6634bf7b4fa83a6dd3112607547001"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:95f2a5796329323b8f0512e09dbb7a1860c46a39da62ecb2324f116fa8fdc85c"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c002b4ffc0be611f0d9da932eb0f704fe2602a9a949d1f738e4c34c75b0863d5"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a981a536974bbc7a512cf44ed14938cf01030a99e9b3a06dd59578882f06f985"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3287761bc4ee9e33561a7e058c72ac0938c4f57fe49a09eae428fd88aafe7bb6"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:42cb296636fcc8b0644486d15c12376cb9fa75443e00fb25de0b8602e64c1714"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0a55554a2fa0d408816b3b5cedf0045f4b8e1a6065aec45849de2d6f3f8e9786"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:c083af607d2515612056a31f0a8d9e0fcb5876b7bfc0abad3ecd275bc4ebc2d5"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:87d1351268731db79e0f8e745d92493ee2841c974128ef629dc518b937d9194c"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:bd8f7df7d12c2db9fab40bdd87a7c09b1530128315d047a086fa3ae3435cb3a8"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:c180f51afb394e165eafe4ac2936a14bee3eb10debc9d9e4db8958fe36afe711"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:8c622a5fe39a48f78944a87d4fb8a53ee07344641b0562c540d840748571b811"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-win32.whl", hash = "sha256:db364eca23f876da6f9e16c9da0df51aa4f104a972735574842618b8c6d999d4"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-win_amd64.whl", hash = "sha256:86216b5cee4b06df986d214f664305142d9c76df9b6512be2738aa72a2048f99"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:6463effa3186ea09411d50efc7d85360b38d5f09b870c48e4600f63af490e56a"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:6c4caeef8fa63d06bd437cd4bdcf3ffefe6738fb1b25951440d80dc7df8c03ac"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:37e55c8e51c236f95b033f6fb391d7d7970ba5fe7ff453dad675e88cf303377a"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fb69256e180cb6c8a894fee62b3afebae785babc1ee98b81cdf68bbca1987f33"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ae5f4161f18c61806f411a13b0310bea87f987c7d2ecdbdaad0e94eb2e404238"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b2b0a0c0517616b6869869f8c581d4eb2dd83a4d79e0ebcb7d373ef9956aeb0a"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:45485e01ff4d3630ec0d9617310448a8702f70e9c01906b0d0118bdf9d124cf2"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:eb00ed941194665c332bf8e078baf037d6c35d7c4f3102ea2d4f16ca94a26dc8"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:2127566c664442652f024c837091890cb1942c30937add288223dc895793f898"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:a50aebfa173e157099939b17f18600f72f84eed3049e743b68ad15bd69b6bf99"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:4d0d1650369165a14e14e1e47b372cfcb31d6ab44e6e33cb2d4e57265290044d"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:923c0c831b7cfcb071580d3f46c4baf50f174be571576556269530f4bbd79d04"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:06a81e93cd441c56a9b65d8e1d043daeb97a3d0856d177d5c90ba85acb3db087"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-win32.whl", hash = "sha256:6ef1d82a3af9d3eecdba2321dc1b3c238245d890843e040e41e470ffa64c3e25"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-win_amd64.whl", hash = "sha256:eb8821e09e916165e160797a6c17edda0679379a4be5c716c260e836e122f54b"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:c235ebd9baae02f1b77bcea61bce332cb4331dc3617d254df3323aa01ab47bd4"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:5b4c145409bef602a690e7cfad0a15a55c13320ff7a3ad7ca59c13bb8ba4d45d"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:68d1f8a9e9e37c1223b656399be5d6b448dea850bed7d0f87a8311f1ff3dabb0"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:22afcb9f253dac0696b5a4be4a1c0f8762f8239e21b99680099abd9b2b1b2269"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e27ad930a842b4c5eb8ac0016b0a54f5aebbe679340c26101df33424142c143c"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1f79682fbe303db92bc2b1136016a38a42e835d932bab5b3b1bfcfbf0640e519"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b261ccdec7821281dade748d088bb6e9b69e6d15b30652b74cbbac25e280b796"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:122c7fa62b130ed55f8f285bfd56d5f4b4a5b503609d181f9ad85e55c89f4185"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:d0eccceffcb53201b5bfebb52600a5fb483a20b61da9dbc885f8b103cbe7598c"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:9f96df6923e21816da7e0ad3fd47dd8f94b2a5ce594e00677c0013018b813458"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:7f04c839ed0b6b98b1a7501a002144b76c18fb1c1850c8b98d458ac269e26ed2"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:34d1c8da1e78d2e001f363791c98a272bb734000fcef47a491c1e3b0505657a8"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:ff8fa367d09b717b2a17a052544193ad76cd49979c805768879cb63d9ca50561"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-win32.whl", hash = "sha256:aed38f6e4fb3f5d6bf81bfa990a07806be9d83cf7bacef998ab1a9bd660a581f"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-win_amd64.whl", hash = "sha256:b01b88d45a6fcb69667cd6d2f7a9aeb4bf53760d7fc536bf679ec94fe9f3ff3d"}, + {file = "charset_normalizer-3.3.2-py3-none-any.whl", hash = "sha256:3e4d1f6587322d2788836a99c69062fbb091331ec940e02d12d179c1d53e25fc"}, +] + +[[package]] +name = "colorama" +version = "0.4.6" +description = "Cross-platform colored terminal text." +optional = false +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7" +files = [ + {file = "colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6"}, + {file = "colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44"}, +] + +[[package]] +name = "dagger-io" +version = "0.9.6" +description = "A client package for running Dagger pipelines in Python." +optional = false +python-versions = ">=3.10" +files = [ + {file = "dagger_io-0.9.6-py3-none-any.whl", hash = "sha256:e2f1e4bbc252071a314fa5b0bad11a910433a9ee043972b716f6fcc5f9fc8236"}, + {file = "dagger_io-0.9.6.tar.gz", hash = "sha256:147b5a33c44d17f602a4121679893655e91308beb8c46a466afed39cf40f789b"}, +] + +[package.dependencies] +anyio = ">=3.6.2" +beartype = ">=0.11.0" +cattrs = ">=22.2.0" +gql = ">=3.4.0" +graphql-core = ">=3.2.3" +httpx = ">=0.23.1" +platformdirs = ">=2.6.2" +rich = ">=10.11.0" +typing-extensions = ">=4.8.0" + +[[package]] +name = "docker" +version = "6.1.3" +description = "A Python library for the Docker Engine API." +optional = false +python-versions = ">=3.7" +files = [ + {file = "docker-6.1.3-py3-none-any.whl", hash = "sha256:aecd2277b8bf8e506e484f6ab7aec39abe0038e29fa4a6d3ba86c3fe01844ed9"}, + {file = "docker-6.1.3.tar.gz", hash = "sha256:aa6d17830045ba5ef0168d5eaa34d37beeb113948c413affe1d5991fc11f9a20"}, +] + +[package.dependencies] +packaging = ">=14.0" +pywin32 = {version = ">=304", markers = "sys_platform == \"win32\""} +requests = ">=2.26.0" +urllib3 = ">=1.26.0" +websocket-client = ">=0.32.0" + +[package.extras] +ssh = ["paramiko (>=2.4.3)"] + +[[package]] +name = "exceptiongroup" +version = "1.2.0" +description = "Backport of PEP 654 (exception groups)" +optional = false +python-versions = ">=3.7" +files = [ + {file = "exceptiongroup-1.2.0-py3-none-any.whl", hash = "sha256:4bfd3996ac73b41e9b9628b04e079f193850720ea5945fc96a08633c66912f14"}, + {file = "exceptiongroup-1.2.0.tar.gz", hash = "sha256:91f5c769735f051a4290d52edd0858999b57e5876e9f85937691bd4c9fa3ed68"}, +] + +[package.extras] +test = ["pytest (>=6)"] + +[[package]] +name = "gql" +version = "3.5.0" +description = "GraphQL client for Python" +optional = false +python-versions = "*" +files = [ + {file = "gql-3.5.0-py2.py3-none-any.whl", hash = "sha256:70dda5694a5b194a8441f077aa5fb70cc94e4ec08016117523f013680901ecb7"}, + {file = "gql-3.5.0.tar.gz", hash = "sha256:ccb9c5db543682b28f577069950488218ed65d4ac70bb03b6929aaadaf636de9"}, +] + +[package.dependencies] +anyio = ">=3.0,<5" +backoff = ">=1.11.1,<3.0" +graphql-core = ">=3.2,<3.3" +yarl = ">=1.6,<2.0" + +[package.extras] +aiohttp = ["aiohttp (>=3.8.0,<4)", "aiohttp (>=3.9.0b0,<4)"] +all = ["aiohttp (>=3.8.0,<4)", "aiohttp (>=3.9.0b0,<4)", "botocore (>=1.21,<2)", "httpx (>=0.23.1,<1)", "requests (>=2.26,<3)", "requests-toolbelt (>=1.0.0,<2)", "websockets (>=10,<12)"] +botocore = ["botocore (>=1.21,<2)"] +dev = ["aiofiles", "aiohttp (>=3.8.0,<4)", "aiohttp (>=3.9.0b0,<4)", "black (==22.3.0)", "botocore (>=1.21,<2)", "check-manifest (>=0.42,<1)", "flake8 (==3.8.1)", "httpx (>=0.23.1,<1)", "isort (==4.3.21)", "mock (==4.0.2)", "mypy (==0.910)", "parse (==1.15.0)", "pytest (==7.4.2)", "pytest-asyncio (==0.21.1)", "pytest-console-scripts (==1.3.1)", "pytest-cov (==3.0.0)", "requests (>=2.26,<3)", "requests-toolbelt (>=1.0.0,<2)", "sphinx (>=5.3.0,<6)", "sphinx-argparse (==0.2.5)", "sphinx-rtd-theme (>=0.4,<1)", "types-aiofiles", "types-mock", "types-requests", "vcrpy (==4.4.0)", "websockets (>=10,<12)"] +httpx = ["httpx (>=0.23.1,<1)"] +requests = ["requests (>=2.26,<3)", "requests-toolbelt (>=1.0.0,<2)"] +test = ["aiofiles", "aiohttp (>=3.8.0,<4)", "aiohttp (>=3.9.0b0,<4)", "botocore (>=1.21,<2)", "httpx (>=0.23.1,<1)", "mock (==4.0.2)", "parse (==1.15.0)", "pytest (==7.4.2)", "pytest-asyncio (==0.21.1)", "pytest-console-scripts (==1.3.1)", "pytest-cov (==3.0.0)", "requests (>=2.26,<3)", "requests-toolbelt (>=1.0.0,<2)", "vcrpy (==4.4.0)", "websockets (>=10,<12)"] +test-no-transport = ["aiofiles", "mock (==4.0.2)", "parse (==1.15.0)", "pytest (==7.4.2)", "pytest-asyncio (==0.21.1)", "pytest-console-scripts (==1.3.1)", "pytest-cov (==3.0.0)", "vcrpy (==4.4.0)"] +websockets = ["websockets (>=10,<12)"] + +[[package]] +name = "graphql-core" +version = "3.2.3" +description = "GraphQL implementation for Python, a port of GraphQL.js, the JavaScript reference implementation for GraphQL." +optional = false +python-versions = ">=3.6,<4" +files = [ + {file = "graphql-core-3.2.3.tar.gz", hash = "sha256:06d2aad0ac723e35b1cb47885d3e5c45e956a53bc1b209a9fc5369007fe46676"}, + {file = "graphql_core-3.2.3-py3-none-any.whl", hash = "sha256:5766780452bd5ec8ba133f8bf287dc92713e3868ddd83aee4faab9fc3e303dc3"}, +] + +[[package]] +name = "h11" +version = "0.14.0" +description = "A pure-Python, bring-your-own-I/O implementation of HTTP/1.1" +optional = false +python-versions = ">=3.7" +files = [ + {file = "h11-0.14.0-py3-none-any.whl", hash = "sha256:e3fe4ac4b851c468cc8363d500db52c2ead036020723024a109d37346efaa761"}, + {file = "h11-0.14.0.tar.gz", hash = "sha256:8f19fbbe99e72420ff35c00b27a34cb9937e902a8b810e2c88300c6f0a3b699d"}, +] + +[[package]] +name = "httpcore" +version = "1.0.4" +description = "A minimal low-level HTTP client." +optional = false +python-versions = ">=3.8" +files = [ + {file = "httpcore-1.0.4-py3-none-any.whl", hash = "sha256:ac418c1db41bade2ad53ae2f3834a3a0f5ae76b56cf5aa497d2d033384fc7d73"}, + {file = "httpcore-1.0.4.tar.gz", hash = "sha256:cb2839ccfcba0d2d3c1131d3c3e26dfc327326fbe7a5dc0dbfe9f6c9151bb022"}, +] + +[package.dependencies] +certifi = "*" +h11 = ">=0.13,<0.15" + +[package.extras] +asyncio = ["anyio (>=4.0,<5.0)"] +http2 = ["h2 (>=3,<5)"] +socks = ["socksio (==1.*)"] +trio = ["trio (>=0.22.0,<0.25.0)"] + +[[package]] +name = "httpx" +version = "0.27.0" +description = "The next generation HTTP client." +optional = false +python-versions = ">=3.8" +files = [ + {file = "httpx-0.27.0-py3-none-any.whl", hash = "sha256:71d5465162c13681bff01ad59b2cc68dd838ea1f10e51574bac27103f00c91a5"}, + {file = "httpx-0.27.0.tar.gz", hash = "sha256:a0cb88a46f32dc874e04ee956e4c2764aba2aa228f650b06788ba6bda2962ab5"}, +] + +[package.dependencies] +anyio = "*" +certifi = "*" +httpcore = "==1.*" +idna = "*" +sniffio = "*" + +[package.extras] +brotli = ["brotli", "brotlicffi"] +cli = ["click (==8.*)", "pygments (==2.*)", "rich (>=10,<14)"] +http2 = ["h2 (>=3,<5)"] +socks = ["socksio (==1.*)"] + +[[package]] +name = "idna" +version = "3.6" +description = "Internationalized Domain Names in Applications (IDNA)" +optional = false +python-versions = ">=3.5" +files = [ + {file = "idna-3.6-py3-none-any.whl", hash = "sha256:c05567e9c24a6b9faaa835c4821bad0590fbb9d5779e7caa6e1cc4978e7eb24f"}, + {file = "idna-3.6.tar.gz", hash = "sha256:9ecdbbd083b06798ae1e86adcbfe8ab1479cf864e4ee30fe4e46a003d12491ca"}, +] + +[[package]] +name = "iniconfig" +version = "2.0.0" +description = "brain-dead simple config-ini parsing" +optional = false +python-versions = ">=3.7" +files = [ + {file = "iniconfig-2.0.0-py3-none-any.whl", hash = "sha256:b6a85871a79d2e3b22d2d1b94ac2824226a63c6b741c88f7ae975f18b6778374"}, + {file = "iniconfig-2.0.0.tar.gz", hash = "sha256:2d91e135bf72d31a410b17c16da610a82cb55f6b0477d1a902134b24a455b8b3"}, +] + +[[package]] +name = "markdown-it-py" +version = "3.0.0" +description = "Python port of markdown-it. Markdown parsing, done right!" +optional = false +python-versions = ">=3.8" +files = [ + {file = "markdown-it-py-3.0.0.tar.gz", hash = "sha256:e3f60a94fa066dc52ec76661e37c851cb232d92f9886b15cb560aaada2df8feb"}, + {file = "markdown_it_py-3.0.0-py3-none-any.whl", hash = "sha256:355216845c60bd96232cd8d8c40e8f9765cc86f46880e43a8fd22dc1a1a8cab1"}, +] + +[package.dependencies] +mdurl = ">=0.1,<1.0" + +[package.extras] +benchmarking = ["psutil", "pytest", "pytest-benchmark"] +code-style = ["pre-commit (>=3.0,<4.0)"] +compare = ["commonmark (>=0.9,<1.0)", "markdown (>=3.4,<4.0)", "mistletoe (>=1.0,<2.0)", "mistune (>=2.0,<3.0)", "panflute (>=2.3,<3.0)"] +linkify = ["linkify-it-py (>=1,<3)"] +plugins = ["mdit-py-plugins"] +profiling = ["gprof2dot"] +rtd = ["jupyter_sphinx", "mdit-py-plugins", "myst-parser", "pyyaml", "sphinx", "sphinx-copybutton", "sphinx-design", "sphinx_book_theme"] +testing = ["coverage", "pytest", "pytest-cov", "pytest-regressions"] + +[[package]] +name = "mdurl" +version = "0.1.2" +description = "Markdown URL utilities" +optional = false +python-versions = ">=3.7" +files = [ + {file = "mdurl-0.1.2-py3-none-any.whl", hash = "sha256:84008a41e51615a49fc9966191ff91509e3c40b939176e643fd50a5c2196b8f8"}, + {file = "mdurl-0.1.2.tar.gz", hash = "sha256:bb413d29f5eea38f31dd4754dd7377d4465116fb207585f97bf925588687c1ba"}, +] + +[[package]] +name = "multidict" +version = "6.0.5" +description = "multidict implementation" +optional = false +python-versions = ">=3.7" +files = [ + {file = "multidict-6.0.5-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:228b644ae063c10e7f324ab1ab6b548bdf6f8b47f3ec234fef1093bc2735e5f9"}, + {file = "multidict-6.0.5-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:896ebdcf62683551312c30e20614305f53125750803b614e9e6ce74a96232604"}, + {file = "multidict-6.0.5-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:411bf8515f3be9813d06004cac41ccf7d1cd46dfe233705933dd163b60e37600"}, + {file = "multidict-6.0.5-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1d147090048129ce3c453f0292e7697d333db95e52616b3793922945804a433c"}, + {file = "multidict-6.0.5-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:215ed703caf15f578dca76ee6f6b21b7603791ae090fbf1ef9d865571039ade5"}, + {file = "multidict-6.0.5-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7c6390cf87ff6234643428991b7359b5f59cc15155695deb4eda5c777d2b880f"}, + {file = "multidict-6.0.5-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:21fd81c4ebdb4f214161be351eb5bcf385426bf023041da2fd9e60681f3cebae"}, + {file = "multidict-6.0.5-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3cc2ad10255f903656017363cd59436f2111443a76f996584d1077e43ee51182"}, + {file = "multidict-6.0.5-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:6939c95381e003f54cd4c5516740faba40cf5ad3eeff460c3ad1d3e0ea2549bf"}, + {file = "multidict-6.0.5-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:220dd781e3f7af2c2c1053da9fa96d9cf3072ca58f057f4c5adaaa1cab8fc442"}, + {file = "multidict-6.0.5-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:766c8f7511df26d9f11cd3a8be623e59cca73d44643abab3f8c8c07620524e4a"}, + {file = "multidict-6.0.5-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:fe5d7785250541f7f5019ab9cba2c71169dc7d74d0f45253f8313f436458a4ef"}, + {file = "multidict-6.0.5-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:c1c1496e73051918fcd4f58ff2e0f2f3066d1c76a0c6aeffd9b45d53243702cc"}, + {file = "multidict-6.0.5-cp310-cp310-win32.whl", hash = "sha256:7afcdd1fc07befad18ec4523a782cde4e93e0a2bf71239894b8d61ee578c1319"}, + {file = "multidict-6.0.5-cp310-cp310-win_amd64.whl", hash = "sha256:99f60d34c048c5c2fabc766108c103612344c46e35d4ed9ae0673d33c8fb26e8"}, + {file = "multidict-6.0.5-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:f285e862d2f153a70586579c15c44656f888806ed0e5b56b64489afe4a2dbfba"}, + {file = "multidict-6.0.5-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:53689bb4e102200a4fafa9de9c7c3c212ab40a7ab2c8e474491914d2305f187e"}, + {file = "multidict-6.0.5-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:612d1156111ae11d14afaf3a0669ebf6c170dbb735e510a7438ffe2369a847fd"}, + {file = "multidict-6.0.5-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7be7047bd08accdb7487737631d25735c9a04327911de89ff1b26b81745bd4e3"}, + {file = "multidict-6.0.5-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:de170c7b4fe6859beb8926e84f7d7d6c693dfe8e27372ce3b76f01c46e489fcf"}, + {file = "multidict-6.0.5-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:04bde7a7b3de05732a4eb39c94574db1ec99abb56162d6c520ad26f83267de29"}, + {file = "multidict-6.0.5-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:85f67aed7bb647f93e7520633d8f51d3cbc6ab96957c71272b286b2f30dc70ed"}, + {file = "multidict-6.0.5-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:425bf820055005bfc8aa9a0b99ccb52cc2f4070153e34b701acc98d201693733"}, + {file = "multidict-6.0.5-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:d3eb1ceec286eba8220c26f3b0096cf189aea7057b6e7b7a2e60ed36b373b77f"}, + {file = "multidict-6.0.5-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:7901c05ead4b3fb75113fb1dd33eb1253c6d3ee37ce93305acd9d38e0b5f21a4"}, + {file = "multidict-6.0.5-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:e0e79d91e71b9867c73323a3444724d496c037e578a0e1755ae159ba14f4f3d1"}, + {file = "multidict-6.0.5-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:29bfeb0dff5cb5fdab2023a7a9947b3b4af63e9c47cae2a10ad58394b517fddc"}, + {file = "multidict-6.0.5-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:e030047e85cbcedbfc073f71836d62dd5dadfbe7531cae27789ff66bc551bd5e"}, + {file = "multidict-6.0.5-cp311-cp311-win32.whl", hash = "sha256:2f4848aa3baa109e6ab81fe2006c77ed4d3cd1e0ac2c1fbddb7b1277c168788c"}, + {file = "multidict-6.0.5-cp311-cp311-win_amd64.whl", hash = "sha256:2faa5ae9376faba05f630d7e5e6be05be22913782b927b19d12b8145968a85ea"}, + {file = "multidict-6.0.5-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:51d035609b86722963404f711db441cf7134f1889107fb171a970c9701f92e1e"}, + {file = "multidict-6.0.5-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:cbebcd5bcaf1eaf302617c114aa67569dd3f090dd0ce8ba9e35e9985b41ac35b"}, + {file = "multidict-6.0.5-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:2ffc42c922dbfddb4a4c3b438eb056828719f07608af27d163191cb3e3aa6cc5"}, + {file = "multidict-6.0.5-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ceb3b7e6a0135e092de86110c5a74e46bda4bd4fbfeeb3a3bcec79c0f861e450"}, + {file = "multidict-6.0.5-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:79660376075cfd4b2c80f295528aa6beb2058fd289f4c9252f986751a4cd0496"}, + {file = "multidict-6.0.5-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e4428b29611e989719874670fd152b6625500ad6c686d464e99f5aaeeaca175a"}, + {file = "multidict-6.0.5-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d84a5c3a5f7ce6db1f999fb9438f686bc2e09d38143f2d93d8406ed2dd6b9226"}, + {file = "multidict-6.0.5-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:76c0de87358b192de7ea9649beb392f107dcad9ad27276324c24c91774ca5271"}, + {file = "multidict-6.0.5-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:79a6d2ba910adb2cbafc95dad936f8b9386e77c84c35bc0add315b856d7c3abb"}, + {file = "multidict-6.0.5-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:92d16a3e275e38293623ebf639c471d3e03bb20b8ebb845237e0d3664914caef"}, + {file = "multidict-6.0.5-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:fb616be3538599e797a2017cccca78e354c767165e8858ab5116813146041a24"}, + {file = "multidict-6.0.5-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:14c2976aa9038c2629efa2c148022ed5eb4cb939e15ec7aace7ca932f48f9ba6"}, + {file = "multidict-6.0.5-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:435a0984199d81ca178b9ae2c26ec3d49692d20ee29bc4c11a2a8d4514c67eda"}, + {file = "multidict-6.0.5-cp312-cp312-win32.whl", hash = "sha256:9fe7b0653ba3d9d65cbe7698cca585bf0f8c83dbbcc710db9c90f478e175f2d5"}, + {file = "multidict-6.0.5-cp312-cp312-win_amd64.whl", hash = "sha256:01265f5e40f5a17f8241d52656ed27192be03bfa8764d88e8220141d1e4b3556"}, + {file = "multidict-6.0.5-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:19fe01cea168585ba0f678cad6f58133db2aa14eccaf22f88e4a6dccadfad8b3"}, + {file = "multidict-6.0.5-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6bf7a982604375a8d49b6cc1b781c1747f243d91b81035a9b43a2126c04766f5"}, + {file = "multidict-6.0.5-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:107c0cdefe028703fb5dafe640a409cb146d44a6ae201e55b35a4af8e95457dd"}, + {file = "multidict-6.0.5-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:403c0911cd5d5791605808b942c88a8155c2592e05332d2bf78f18697a5fa15e"}, + {file = "multidict-6.0.5-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:aeaf541ddbad8311a87dd695ed9642401131ea39ad7bc8cf3ef3967fd093b626"}, + {file = "multidict-6.0.5-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e4972624066095e52b569e02b5ca97dbd7a7ddd4294bf4e7247d52635630dd83"}, + {file = "multidict-6.0.5-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:d946b0a9eb8aaa590df1fe082cee553ceab173e6cb5b03239716338629c50c7a"}, + {file = "multidict-6.0.5-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:b55358304d7a73d7bdf5de62494aaf70bd33015831ffd98bc498b433dfe5b10c"}, + {file = "multidict-6.0.5-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:a3145cb08d8625b2d3fee1b2d596a8766352979c9bffe5d7833e0503d0f0b5e5"}, + {file = "multidict-6.0.5-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:d65f25da8e248202bd47445cec78e0025c0fe7582b23ec69c3b27a640dd7a8e3"}, + {file = "multidict-6.0.5-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:c9bf56195c6bbd293340ea82eafd0071cb3d450c703d2c93afb89f93b8386ccc"}, + {file = "multidict-6.0.5-cp37-cp37m-win32.whl", hash = "sha256:69db76c09796b313331bb7048229e3bee7928eb62bab5e071e9f7fcc4879caee"}, + {file = "multidict-6.0.5-cp37-cp37m-win_amd64.whl", hash = "sha256:fce28b3c8a81b6b36dfac9feb1de115bab619b3c13905b419ec71d03a3fc1423"}, + {file = "multidict-6.0.5-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:76f067f5121dcecf0d63a67f29080b26c43c71a98b10c701b0677e4a065fbd54"}, + {file = "multidict-6.0.5-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:b82cc8ace10ab5bd93235dfaab2021c70637005e1ac787031f4d1da63d493c1d"}, + {file = "multidict-6.0.5-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:5cb241881eefd96b46f89b1a056187ea8e9ba14ab88ba632e68d7a2ecb7aadf7"}, + {file = "multidict-6.0.5-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e8e94e6912639a02ce173341ff62cc1201232ab86b8a8fcc05572741a5dc7d93"}, + {file = "multidict-6.0.5-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:09a892e4a9fb47331da06948690ae38eaa2426de97b4ccbfafbdcbe5c8f37ff8"}, + {file = "multidict-6.0.5-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:55205d03e8a598cfc688c71ca8ea5f66447164efff8869517f175ea632c7cb7b"}, + {file = "multidict-6.0.5-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:37b15024f864916b4951adb95d3a80c9431299080341ab9544ed148091b53f50"}, + {file = "multidict-6.0.5-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f2a1dee728b52b33eebff5072817176c172050d44d67befd681609b4746e1c2e"}, + {file = "multidict-6.0.5-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:edd08e6f2f1a390bf137080507e44ccc086353c8e98c657e666c017718561b89"}, + {file = "multidict-6.0.5-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:60d698e8179a42ec85172d12f50b1668254628425a6bd611aba022257cac1386"}, + {file = "multidict-6.0.5-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:3d25f19500588cbc47dc19081d78131c32637c25804df8414463ec908631e453"}, + {file = "multidict-6.0.5-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:4cc0ef8b962ac7a5e62b9e826bd0cd5040e7d401bc45a6835910ed699037a461"}, + {file = "multidict-6.0.5-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:eca2e9d0cc5a889850e9bbd68e98314ada174ff6ccd1129500103df7a94a7a44"}, + {file = "multidict-6.0.5-cp38-cp38-win32.whl", hash = "sha256:4a6a4f196f08c58c59e0b8ef8ec441d12aee4125a7d4f4fef000ccb22f8d7241"}, + {file = "multidict-6.0.5-cp38-cp38-win_amd64.whl", hash = "sha256:0275e35209c27a3f7951e1ce7aaf93ce0d163b28948444bec61dd7badc6d3f8c"}, + {file = "multidict-6.0.5-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:e7be68734bd8c9a513f2b0cfd508802d6609da068f40dc57d4e3494cefc92929"}, + {file = "multidict-6.0.5-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:1d9ea7a7e779d7a3561aade7d596649fbecfa5c08a7674b11b423783217933f9"}, + {file = "multidict-6.0.5-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:ea1456df2a27c73ce51120fa2f519f1bea2f4a03a917f4a43c8707cf4cbbae1a"}, + {file = "multidict-6.0.5-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cf590b134eb70629e350691ecca88eac3e3b8b3c86992042fb82e3cb1830d5e1"}, + {file = "multidict-6.0.5-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5c0631926c4f58e9a5ccce555ad7747d9a9f8b10619621f22f9635f069f6233e"}, + {file = "multidict-6.0.5-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:dce1c6912ab9ff5f179eaf6efe7365c1f425ed690b03341911bf4939ef2f3046"}, + {file = "multidict-6.0.5-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c0868d64af83169e4d4152ec612637a543f7a336e4a307b119e98042e852ad9c"}, + {file = "multidict-6.0.5-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:141b43360bfd3bdd75f15ed811850763555a251e38b2405967f8e25fb43f7d40"}, + {file = "multidict-6.0.5-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:7df704ca8cf4a073334e0427ae2345323613e4df18cc224f647f251e5e75a527"}, + {file = "multidict-6.0.5-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:6214c5a5571802c33f80e6c84713b2c79e024995b9c5897f794b43e714daeec9"}, + {file = "multidict-6.0.5-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:cd6c8fca38178e12c00418de737aef1261576bd1b6e8c6134d3e729a4e858b38"}, + {file = "multidict-6.0.5-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:e02021f87a5b6932fa6ce916ca004c4d441509d33bbdbeca70d05dff5e9d2479"}, + {file = "multidict-6.0.5-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:ebd8d160f91a764652d3e51ce0d2956b38efe37c9231cd82cfc0bed2e40b581c"}, + {file = "multidict-6.0.5-cp39-cp39-win32.whl", hash = "sha256:04da1bb8c8dbadf2a18a452639771951c662c5ad03aefe4884775454be322c9b"}, + {file = "multidict-6.0.5-cp39-cp39-win_amd64.whl", hash = "sha256:d6f6d4f185481c9669b9447bf9d9cf3b95a0e9df9d169bbc17e363b7d5487755"}, + {file = "multidict-6.0.5-py3-none-any.whl", hash = "sha256:0d63c74e3d7ab26de115c49bffc92cc77ed23395303d496eae515d4204a625e7"}, + {file = "multidict-6.0.5.tar.gz", hash = "sha256:f7e301075edaf50500f0b341543c41194d8df3ae5caf4702f2095f3ca73dd8da"}, +] + +[[package]] +name = "mypy" +version = "1.8.0" +description = "Optional static typing for Python" +optional = false +python-versions = ">=3.8" +files = [ + {file = "mypy-1.8.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:485a8942f671120f76afffff70f259e1cd0f0cfe08f81c05d8816d958d4577d3"}, + {file = "mypy-1.8.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:df9824ac11deaf007443e7ed2a4a26bebff98d2bc43c6da21b2b64185da011c4"}, + {file = "mypy-1.8.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2afecd6354bbfb6e0160f4e4ad9ba6e4e003b767dd80d85516e71f2e955ab50d"}, + {file = "mypy-1.8.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:8963b83d53ee733a6e4196954502b33567ad07dfd74851f32be18eb932fb1cb9"}, + {file = "mypy-1.8.0-cp310-cp310-win_amd64.whl", hash = "sha256:e46f44b54ebddbeedbd3d5b289a893219065ef805d95094d16a0af6630f5d410"}, + {file = "mypy-1.8.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:855fe27b80375e5c5878492f0729540db47b186509c98dae341254c8f45f42ae"}, + {file = "mypy-1.8.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:4c886c6cce2d070bd7df4ec4a05a13ee20c0aa60cb587e8d1265b6c03cf91da3"}, + {file = "mypy-1.8.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d19c413b3c07cbecf1f991e2221746b0d2a9410b59cb3f4fb9557f0365a1a817"}, + {file = "mypy-1.8.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:9261ed810972061388918c83c3f5cd46079d875026ba97380f3e3978a72f503d"}, + {file = "mypy-1.8.0-cp311-cp311-win_amd64.whl", hash = "sha256:51720c776d148bad2372ca21ca29256ed483aa9a4cdefefcef49006dff2a6835"}, + {file = "mypy-1.8.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:52825b01f5c4c1c4eb0db253ec09c7aa17e1a7304d247c48b6f3599ef40db8bd"}, + {file = "mypy-1.8.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:f5ac9a4eeb1ec0f1ccdc6f326bcdb464de5f80eb07fb38b5ddd7b0de6bc61e55"}, + {file = "mypy-1.8.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:afe3fe972c645b4632c563d3f3eff1cdca2fa058f730df2b93a35e3b0c538218"}, + {file = "mypy-1.8.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:42c6680d256ab35637ef88891c6bd02514ccb7e1122133ac96055ff458f93fc3"}, + {file = "mypy-1.8.0-cp312-cp312-win_amd64.whl", hash = "sha256:720a5ca70e136b675af3af63db533c1c8c9181314d207568bbe79051f122669e"}, + {file = "mypy-1.8.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:028cf9f2cae89e202d7b6593cd98db6759379f17a319b5faf4f9978d7084cdc6"}, + {file = "mypy-1.8.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:4e6d97288757e1ddba10dd9549ac27982e3e74a49d8d0179fc14d4365c7add66"}, + {file = "mypy-1.8.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7f1478736fcebb90f97e40aff11a5f253af890c845ee0c850fe80aa060a267c6"}, + {file = "mypy-1.8.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:42419861b43e6962a649068a61f4a4839205a3ef525b858377a960b9e2de6e0d"}, + {file = "mypy-1.8.0-cp38-cp38-win_amd64.whl", hash = "sha256:2b5b6c721bd4aabaadead3a5e6fa85c11c6c795e0c81a7215776ef8afc66de02"}, + {file = "mypy-1.8.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:5c1538c38584029352878a0466f03a8ee7547d7bd9f641f57a0f3017a7c905b8"}, + {file = "mypy-1.8.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:4ef4be7baf08a203170f29e89d79064463b7fc7a0908b9d0d5114e8009c3a259"}, + {file = "mypy-1.8.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7178def594014aa6c35a8ff411cf37d682f428b3b5617ca79029d8ae72f5402b"}, + {file = "mypy-1.8.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:ab3c84fa13c04aeeeabb2a7f67a25ef5d77ac9d6486ff33ded762ef353aa5592"}, + {file = "mypy-1.8.0-cp39-cp39-win_amd64.whl", hash = "sha256:99b00bc72855812a60d253420d8a2eae839b0afa4938f09f4d2aa9bb4654263a"}, + {file = "mypy-1.8.0-py3-none-any.whl", hash = "sha256:538fd81bb5e430cc1381a443971c0475582ff9f434c16cd46d2c66763ce85d9d"}, + {file = "mypy-1.8.0.tar.gz", hash = "sha256:6ff8b244d7085a0b425b56d327b480c3b29cafbd2eff27316a004f9a7391ae07"}, +] + +[package.dependencies] +mypy-extensions = ">=1.0.0" +tomli = {version = ">=1.1.0", markers = "python_version < \"3.11\""} +typing-extensions = ">=4.1.0" + +[package.extras] +dmypy = ["psutil (>=4.0)"] +install-types = ["pip"] +mypyc = ["setuptools (>=50)"] +reports = ["lxml"] + +[[package]] +name = "mypy-extensions" +version = "1.0.0" +description = "Type system extensions for programs checked with the mypy type checker." +optional = false +python-versions = ">=3.5" +files = [ + {file = "mypy_extensions-1.0.0-py3-none-any.whl", hash = "sha256:4392f6c0eb8a5668a69e23d168ffa70f0be9ccfd32b5cc2d26a34ae5b844552d"}, + {file = "mypy_extensions-1.0.0.tar.gz", hash = "sha256:75dbf8955dc00442a438fc4d0666508a9a97b6bd41aa2f0ffe9d2f2725af0782"}, +] + +[[package]] +name = "packaging" +version = "23.2" +description = "Core utilities for Python packages" +optional = false +python-versions = ">=3.7" +files = [ + {file = "packaging-23.2-py3-none-any.whl", hash = "sha256:8c491190033a9af7e1d931d0b5dacc2ef47509b34dd0de67ed209b5203fc88c7"}, + {file = "packaging-23.2.tar.gz", hash = "sha256:048fb0e9405036518eaaf48a55953c750c11e1a1b68e0dd1a9d62ed0c092cfc5"}, +] + +[[package]] +name = "platformdirs" +version = "4.2.0" +description = "A small Python package for determining appropriate platform-specific dirs, e.g. a \"user data dir\"." +optional = false +python-versions = ">=3.8" +files = [ + {file = "platformdirs-4.2.0-py3-none-any.whl", hash = "sha256:0614df2a2f37e1a662acbd8e2b25b92ccf8632929bc6d43467e17fe89c75e068"}, + {file = "platformdirs-4.2.0.tar.gz", hash = "sha256:ef0cc731df711022c174543cb70a9b5bd22e5a9337c8624ef2c2ceb8ddad8768"}, +] + +[package.extras] +docs = ["furo (>=2023.9.10)", "proselint (>=0.13)", "sphinx (>=7.2.6)", "sphinx-autodoc-typehints (>=1.25.2)"] +test = ["appdirs (==1.4.4)", "covdefaults (>=2.3)", "pytest (>=7.4.3)", "pytest-cov (>=4.1)", "pytest-mock (>=3.12)"] + +[[package]] +name = "pluggy" +version = "1.4.0" +description = "plugin and hook calling mechanisms for python" +optional = false +python-versions = ">=3.8" +files = [ + {file = "pluggy-1.4.0-py3-none-any.whl", hash = "sha256:7db9f7b503d67d1c5b95f59773ebb58a8c1c288129a88665838012cfb07b8981"}, + {file = "pluggy-1.4.0.tar.gz", hash = "sha256:8c85c2876142a764e5b7548e7d9a0e0ddb46f5185161049a79b7e974454223be"}, +] + +[package.extras] +dev = ["pre-commit", "tox"] +testing = ["pytest", "pytest-benchmark"] + +[[package]] +name = "pydantic" +version = "1.10.14" +description = "Data validation and settings management using python type hints" +optional = false +python-versions = ">=3.7" +files = [ + {file = "pydantic-1.10.14-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:7f4fcec873f90537c382840f330b90f4715eebc2bc9925f04cb92de593eae054"}, + {file = "pydantic-1.10.14-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:8e3a76f571970fcd3c43ad982daf936ae39b3e90b8a2e96c04113a369869dc87"}, + {file = "pydantic-1.10.14-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:82d886bd3c3fbeaa963692ef6b643159ccb4b4cefaf7ff1617720cbead04fd1d"}, + {file = "pydantic-1.10.14-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:798a3d05ee3b71967844a1164fd5bdb8c22c6d674f26274e78b9f29d81770c4e"}, + {file = "pydantic-1.10.14-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:23d47a4b57a38e8652bcab15a658fdb13c785b9ce217cc3a729504ab4e1d6bc9"}, + {file = "pydantic-1.10.14-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:f9f674b5c3bebc2eba401de64f29948ae1e646ba2735f884d1594c5f675d6f2a"}, + {file = "pydantic-1.10.14-cp310-cp310-win_amd64.whl", hash = "sha256:24a7679fab2e0eeedb5a8924fc4a694b3bcaac7d305aeeac72dd7d4e05ecbebf"}, + {file = "pydantic-1.10.14-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:9d578ac4bf7fdf10ce14caba6f734c178379bd35c486c6deb6f49006e1ba78a7"}, + {file = "pydantic-1.10.14-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:fa7790e94c60f809c95602a26d906eba01a0abee9cc24150e4ce2189352deb1b"}, + {file = "pydantic-1.10.14-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:aad4e10efa5474ed1a611b6d7f0d130f4aafadceb73c11d9e72823e8f508e663"}, + {file = "pydantic-1.10.14-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1245f4f61f467cb3dfeced2b119afef3db386aec3d24a22a1de08c65038b255f"}, + {file = "pydantic-1.10.14-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:21efacc678a11114c765eb52ec0db62edffa89e9a562a94cbf8fa10b5db5c046"}, + {file = "pydantic-1.10.14-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:412ab4a3f6dbd2bf18aefa9f79c7cca23744846b31f1d6555c2ee2b05a2e14ca"}, + {file = "pydantic-1.10.14-cp311-cp311-win_amd64.whl", hash = "sha256:e897c9f35281f7889873a3e6d6b69aa1447ceb024e8495a5f0d02ecd17742a7f"}, + {file = "pydantic-1.10.14-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:d604be0f0b44d473e54fdcb12302495fe0467c56509a2f80483476f3ba92b33c"}, + {file = "pydantic-1.10.14-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a42c7d17706911199798d4c464b352e640cab4351efe69c2267823d619a937e5"}, + {file = "pydantic-1.10.14-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:596f12a1085e38dbda5cbb874d0973303e34227b400b6414782bf205cc14940c"}, + {file = "pydantic-1.10.14-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:bfb113860e9288d0886e3b9e49d9cf4a9d48b441f52ded7d96db7819028514cc"}, + {file = "pydantic-1.10.14-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:bc3ed06ab13660b565eed80887fcfbc0070f0aa0691fbb351657041d3e874efe"}, + {file = "pydantic-1.10.14-cp37-cp37m-win_amd64.whl", hash = "sha256:ad8c2bc677ae5f6dbd3cf92f2c7dc613507eafe8f71719727cbc0a7dec9a8c01"}, + {file = "pydantic-1.10.14-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:c37c28449752bb1f47975d22ef2882d70513c546f8f37201e0fec3a97b816eee"}, + {file = "pydantic-1.10.14-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:49a46a0994dd551ec051986806122767cf144b9702e31d47f6d493c336462597"}, + {file = "pydantic-1.10.14-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:53e3819bd20a42470d6dd0fe7fc1c121c92247bca104ce608e609b59bc7a77ee"}, + {file = "pydantic-1.10.14-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0fbb503bbbbab0c588ed3cd21975a1d0d4163b87e360fec17a792f7d8c4ff29f"}, + {file = "pydantic-1.10.14-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:336709883c15c050b9c55a63d6c7ff09be883dbc17805d2b063395dd9d9d0022"}, + {file = "pydantic-1.10.14-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:4ae57b4d8e3312d486e2498d42aed3ece7b51848336964e43abbf9671584e67f"}, + {file = "pydantic-1.10.14-cp38-cp38-win_amd64.whl", hash = "sha256:dba49d52500c35cfec0b28aa8b3ea5c37c9df183ffc7210b10ff2a415c125c4a"}, + {file = "pydantic-1.10.14-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:c66609e138c31cba607d8e2a7b6a5dc38979a06c900815495b2d90ce6ded35b4"}, + {file = "pydantic-1.10.14-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:d986e115e0b39604b9eee3507987368ff8148222da213cd38c359f6f57b3b347"}, + {file = "pydantic-1.10.14-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:646b2b12df4295b4c3148850c85bff29ef6d0d9621a8d091e98094871a62e5c7"}, + {file = "pydantic-1.10.14-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:282613a5969c47c83a8710cc8bfd1e70c9223feb76566f74683af889faadc0ea"}, + {file = "pydantic-1.10.14-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:466669501d08ad8eb3c4fecd991c5e793c4e0bbd62299d05111d4f827cded64f"}, + {file = "pydantic-1.10.14-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:13e86a19dca96373dcf3190fcb8797d40a6f12f154a244a8d1e8e03b8f280593"}, + {file = "pydantic-1.10.14-cp39-cp39-win_amd64.whl", hash = "sha256:08b6ec0917c30861e3fe71a93be1648a2aa4f62f866142ba21670b24444d7fd8"}, + {file = "pydantic-1.10.14-py3-none-any.whl", hash = "sha256:8ee853cd12ac2ddbf0ecbac1c289f95882b2d4482258048079d13be700aa114c"}, + {file = "pydantic-1.10.14.tar.gz", hash = "sha256:46f17b832fe27de7850896f3afee50ea682220dd218f7e9c88d436788419dca6"}, +] + +[package.dependencies] +typing-extensions = ">=4.2.0" + +[package.extras] +dotenv = ["python-dotenv (>=0.10.4)"] +email = ["email-validator (>=1.0.3)"] + +[[package]] +name = "pydash" +version = "7.0.7" +description = "The kitchen sink of Python utility libraries for doing \"stuff\" in a functional way. Based on the Lo-Dash Javascript library." +optional = false +python-versions = ">=3.8" +files = [ + {file = "pydash-7.0.7-py3-none-any.whl", hash = "sha256:c3c5b54eec0a562e0080d6f82a14ad4d5090229847b7e554235b5c1558c745e1"}, + {file = "pydash-7.0.7.tar.gz", hash = "sha256:cc935d5ac72dd41fb4515bdf982e7c864c8b5eeea16caffbab1936b849aaa49a"}, +] + +[package.dependencies] +typing-extensions = ">=3.10,<4.6.0 || >4.6.0" + +[package.extras] +dev = ["black", "build", "coverage", "docformatter", "flake8", "flake8-black", "flake8-bugbear", "flake8-isort", "furo", "invoke", "isort", "mypy", "pylint", "pytest", "pytest-cov", "pytest-mypy-testing", "sphinx", "sphinx-autodoc-typehints", "tox", "twine", "wheel"] + +[[package]] +name = "pygments" +version = "2.17.2" +description = "Pygments is a syntax highlighting package written in Python." +optional = false +python-versions = ">=3.7" +files = [ + {file = "pygments-2.17.2-py3-none-any.whl", hash = "sha256:b27c2826c47d0f3219f29554824c30c5e8945175d888647acd804ddd04af846c"}, + {file = "pygments-2.17.2.tar.gz", hash = "sha256:da46cec9fd2de5be3a8a784f434e4c4ab670b4ff54d605c4c2717e9d49c4c367"}, +] + +[package.extras] +plugins = ["importlib-metadata"] +windows-terminal = ["colorama (>=0.4.6)"] + +[[package]] +name = "pytest" +version = "8.0.2" +description = "pytest: simple powerful testing with Python" +optional = false +python-versions = ">=3.8" +files = [ + {file = "pytest-8.0.2-py3-none-any.whl", hash = "sha256:edfaaef32ce5172d5466b5127b42e0d6d35ebbe4453f0e3505d96afd93f6b096"}, + {file = "pytest-8.0.2.tar.gz", hash = "sha256:d4051d623a2e0b7e51960ba963193b09ce6daeb9759a451844a21e4ddedfc1bd"}, +] + +[package.dependencies] +colorama = {version = "*", markers = "sys_platform == \"win32\""} +exceptiongroup = {version = ">=1.0.0rc8", markers = "python_version < \"3.11\""} +iniconfig = "*" +packaging = "*" +pluggy = ">=1.3.0,<2.0" +tomli = {version = ">=1.0.0", markers = "python_version < \"3.11\""} + +[package.extras] +testing = ["argcomplete", "attrs (>=19.2.0)", "hypothesis (>=3.56)", "mock", "nose", "pygments (>=2.7.2)", "requests", "setuptools", "xmlschema"] + +[[package]] +name = "pytest-asyncio" +version = "0.23.5" +description = "Pytest support for asyncio" +optional = false +python-versions = ">=3.8" +files = [ + {file = "pytest-asyncio-0.23.5.tar.gz", hash = "sha256:3a048872a9c4ba14c3e90cc1aa20cbc2def7d01c7c8db3777ec281ba9c057675"}, + {file = "pytest_asyncio-0.23.5-py3-none-any.whl", hash = "sha256:4e7093259ba018d58ede7d5315131d21923a60f8a6e9ee266ce1589685c89eac"}, +] + +[package.dependencies] +pytest = ">=7.0.0,<9" + +[package.extras] +docs = ["sphinx (>=5.3)", "sphinx-rtd-theme (>=1.0)"] +testing = ["coverage (>=6.2)", "hypothesis (>=5.7.1)"] + +[[package]] +name = "pywin32" +version = "306" +description = "Python for Window Extensions" +optional = false +python-versions = "*" +files = [ + {file = "pywin32-306-cp310-cp310-win32.whl", hash = "sha256:06d3420a5155ba65f0b72f2699b5bacf3109f36acbe8923765c22938a69dfc8d"}, + {file = "pywin32-306-cp310-cp310-win_amd64.whl", hash = "sha256:84f4471dbca1887ea3803d8848a1616429ac94a4a8d05f4bc9c5dcfd42ca99c8"}, + {file = "pywin32-306-cp311-cp311-win32.whl", hash = "sha256:e65028133d15b64d2ed8f06dd9fbc268352478d4f9289e69c190ecd6818b6407"}, + {file = "pywin32-306-cp311-cp311-win_amd64.whl", hash = "sha256:a7639f51c184c0272e93f244eb24dafca9b1855707d94c192d4a0b4c01e1100e"}, + {file = "pywin32-306-cp311-cp311-win_arm64.whl", hash = "sha256:70dba0c913d19f942a2db25217d9a1b726c278f483a919f1abfed79c9cf64d3a"}, + {file = "pywin32-306-cp312-cp312-win32.whl", hash = "sha256:383229d515657f4e3ed1343da8be101000562bf514591ff383ae940cad65458b"}, + {file = "pywin32-306-cp312-cp312-win_amd64.whl", hash = "sha256:37257794c1ad39ee9be652da0462dc2e394c8159dfd913a8a4e8eb6fd346da0e"}, + {file = "pywin32-306-cp312-cp312-win_arm64.whl", hash = "sha256:5821ec52f6d321aa59e2db7e0a35b997de60c201943557d108af9d4ae1ec7040"}, + {file = "pywin32-306-cp37-cp37m-win32.whl", hash = "sha256:1c73ea9a0d2283d889001998059f5eaaba3b6238f767c9cf2833b13e6a685f65"}, + {file = "pywin32-306-cp37-cp37m-win_amd64.whl", hash = "sha256:72c5f621542d7bdd4fdb716227be0dd3f8565c11b280be6315b06ace35487d36"}, + {file = "pywin32-306-cp38-cp38-win32.whl", hash = "sha256:e4c092e2589b5cf0d365849e73e02c391c1349958c5ac3e9d5ccb9a28e017b3a"}, + {file = "pywin32-306-cp38-cp38-win_amd64.whl", hash = "sha256:e8ac1ae3601bee6ca9f7cb4b5363bf1c0badb935ef243c4733ff9a393b1690c0"}, + {file = "pywin32-306-cp39-cp39-win32.whl", hash = "sha256:e25fd5b485b55ac9c057f67d94bc203f3f6595078d1fb3b458c9c28b7153a802"}, + {file = "pywin32-306-cp39-cp39-win_amd64.whl", hash = "sha256:39b61c15272833b5c329a2989999dcae836b1eed650252ab1b7bfbe1d59f30f4"}, +] + +[[package]] +name = "requests" +version = "2.31.0" +description = "Python HTTP for Humans." +optional = false +python-versions = ">=3.7" +files = [ + {file = "requests-2.31.0-py3-none-any.whl", hash = "sha256:58cd2187c01e70e6e26505bca751777aa9f2ee0b7f4300988b709f44e013003f"}, + {file = "requests-2.31.0.tar.gz", hash = "sha256:942c5a758f98d790eaed1a29cb6eefc7ffb0d1cf7af05c3d2791656dbd6ad1e1"}, +] + +[package.dependencies] +certifi = ">=2017.4.17" +charset-normalizer = ">=2,<4" +idna = ">=2.5,<4" +urllib3 = ">=1.21.1,<3" + +[package.extras] +socks = ["PySocks (>=1.5.6,!=1.5.7)"] +use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"] + +[[package]] +name = "rich" +version = "13.7.1" +description = "Render rich text, tables, progress bars, syntax highlighting, markdown and more to the terminal" +optional = false +python-versions = ">=3.7.0" +files = [ + {file = "rich-13.7.1-py3-none-any.whl", hash = "sha256:4edbae314f59eb482f54e9e30bf00d33350aaa94f4bfcd4e9e3110e64d0d7222"}, + {file = "rich-13.7.1.tar.gz", hash = "sha256:9be308cb1fe2f1f57d67ce99e95af38a1e2bc71ad9813b0e247cf7ffbcc3a432"}, +] + +[package.dependencies] +markdown-it-py = ">=2.2.0" +pygments = ">=2.13.0,<3.0.0" + +[package.extras] +jupyter = ["ipywidgets (>=7.5.1,<9)"] + +[[package]] +name = "ruff" +version = "0.3.0" +description = "An extremely fast Python linter and code formatter, written in Rust." +optional = false +python-versions = ">=3.7" +files = [ + {file = "ruff-0.3.0-py3-none-macosx_10_12_x86_64.macosx_11_0_arm64.macosx_10_12_universal2.whl", hash = "sha256:7deb528029bacf845bdbb3dbb2927d8ef9b4356a5e731b10eef171e3f0a85944"}, + {file = "ruff-0.3.0-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:e1e0d4381ca88fb2b73ea0766008e703f33f460295de658f5467f6f229658c19"}, + {file = "ruff-0.3.0-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2f7dbba46e2827dfcb0f0cc55fba8e96ba7c8700e0a866eb8cef7d1d66c25dcb"}, + {file = "ruff-0.3.0-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:23dbb808e2f1d68eeadd5f655485e235c102ac6f12ad31505804edced2a5ae77"}, + {file = "ruff-0.3.0-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3ef655c51f41d5fa879f98e40c90072b567c666a7114fa2d9fe004dffba00932"}, + {file = "ruff-0.3.0-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:d0d3d7ef3d4f06433d592e5f7d813314a34601e6c5be8481cccb7fa760aa243e"}, + {file = "ruff-0.3.0-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b08b356d06a792e49a12074b62222f9d4ea2a11dca9da9f68163b28c71bf1dd4"}, + {file = "ruff-0.3.0-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9343690f95710f8cf251bee1013bf43030072b9f8d012fbed6ad702ef70d360a"}, + {file = "ruff-0.3.0-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a1f3ed501a42f60f4dedb7805fa8d4534e78b4e196f536bac926f805f0743d49"}, + {file = "ruff-0.3.0-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:cc30a9053ff2f1ffb505a585797c23434d5f6c838bacfe206c0e6cf38c921a1e"}, + {file = "ruff-0.3.0-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:5da894a29ec018a8293d3d17c797e73b374773943e8369cfc50495573d396933"}, + {file = "ruff-0.3.0-py3-none-musllinux_1_2_i686.whl", hash = "sha256:755c22536d7f1889be25f2baf6fedd019d0c51d079e8417d4441159f3bcd30c2"}, + {file = "ruff-0.3.0-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:dd73fe7f4c28d317855da6a7bc4aa29a1500320818dd8f27df95f70a01b8171f"}, + {file = "ruff-0.3.0-py3-none-win32.whl", hash = "sha256:19eacceb4c9406f6c41af806418a26fdb23120dfe53583df76d1401c92b7c14b"}, + {file = "ruff-0.3.0-py3-none-win_amd64.whl", hash = "sha256:128265876c1d703e5f5e5a4543bd8be47c73a9ba223fd3989d4aa87dd06f312f"}, + {file = "ruff-0.3.0-py3-none-win_arm64.whl", hash = "sha256:e3a4a6d46aef0a84b74fcd201a4401ea9a6cd85614f6a9435f2d33dd8cefbf83"}, + {file = "ruff-0.3.0.tar.gz", hash = "sha256:0886184ba2618d815067cf43e005388967b67ab9c80df52b32ec1152ab49f53a"}, +] + +[[package]] +name = "sniffio" +version = "1.3.1" +description = "Sniff out which async library your code is running under" +optional = false +python-versions = ">=3.7" +files = [ + {file = "sniffio-1.3.1-py3-none-any.whl", hash = "sha256:2f6da418d1f1e0fddd844478f41680e794e6051915791a034ff65e5f100525a2"}, + {file = "sniffio-1.3.1.tar.gz", hash = "sha256:f4324edc670a0f49750a81b895f35c3adb843cca46f0530f79fc1babb23789dc"}, +] + +[[package]] +name = "tomli" +version = "2.0.1" +description = "A lil' TOML parser" +optional = false +python-versions = ">=3.7" +files = [ + {file = "tomli-2.0.1-py3-none-any.whl", hash = "sha256:939de3e7a6161af0c887ef91b7d41a53e7c5a1ca976325f429cb46ea9bc30ecc"}, + {file = "tomli-2.0.1.tar.gz", hash = "sha256:de526c12914f0c550d15924c62d72abc48d6fe7364aa87328337a31007fe8a4f"}, +] + +[[package]] +name = "types-cachetools" +version = "5.3.0.7" +description = "Typing stubs for cachetools" +optional = false +python-versions = ">=3.7" +files = [ + {file = "types-cachetools-5.3.0.7.tar.gz", hash = "sha256:27c982cdb9cf3fead8b0089ee6b895715ecc99dac90ec29e2cab56eb1aaf4199"}, + {file = "types_cachetools-5.3.0.7-py3-none-any.whl", hash = "sha256:98c069dc7fc087b1b061703369c80751b0a0fc561f6fb072b554e5eee23773a0"}, +] + +[[package]] +name = "typing-extensions" +version = "4.10.0" +description = "Backported and Experimental Type Hints for Python 3.8+" +optional = false +python-versions = ">=3.8" +files = [ + {file = "typing_extensions-4.10.0-py3-none-any.whl", hash = "sha256:69b1a937c3a517342112fb4c6df7e72fc39a38e7891a5730ed4985b5214b5475"}, + {file = "typing_extensions-4.10.0.tar.gz", hash = "sha256:b0abd7c89e8fb96f98db18d86106ff1d90ab692004eb746cf6eda2682f91b3cb"}, +] + +[[package]] +name = "urllib3" +version = "2.2.1" +description = "HTTP library with thread-safe connection pooling, file post, and more." +optional = false +python-versions = ">=3.8" +files = [ + {file = "urllib3-2.2.1-py3-none-any.whl", hash = "sha256:450b20ec296a467077128bff42b73080516e71b56ff59a60a02bef2232c4fa9d"}, + {file = "urllib3-2.2.1.tar.gz", hash = "sha256:d0570876c61ab9e520d776c38acbbb5b05a776d3f9ff98a5c8fd5162a444cf19"}, +] + +[package.extras] +brotli = ["brotli (>=1.0.9)", "brotlicffi (>=0.8.0)"] +h2 = ["h2 (>=4,<5)"] +socks = ["pysocks (>=1.5.6,!=1.5.7,<2.0)"] +zstd = ["zstandard (>=0.18.0)"] + +[[package]] +name = "websocket-client" +version = "1.7.0" +description = "WebSocket client for Python with low level API options" +optional = false +python-versions = ">=3.8" +files = [ + {file = "websocket-client-1.7.0.tar.gz", hash = "sha256:10e511ea3a8c744631d3bd77e61eb17ed09304c413ad42cf6ddfa4c7787e8fe6"}, + {file = "websocket_client-1.7.0-py3-none-any.whl", hash = "sha256:f4c3d22fec12a2461427a29957ff07d35098ee2d976d3ba244e688b8b4057588"}, +] + +[package.extras] +docs = ["Sphinx (>=6.0)", "sphinx-rtd-theme (>=1.1.0)"] +optional = ["python-socks", "wsaccel"] +test = ["websockets"] + +[[package]] +name = "yarl" +version = "1.9.4" +description = "Yet another URL library" +optional = false +python-versions = ">=3.7" +files = [ + {file = "yarl-1.9.4-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:a8c1df72eb746f4136fe9a2e72b0c9dc1da1cbd23b5372f94b5820ff8ae30e0e"}, + {file = "yarl-1.9.4-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:a3a6ed1d525bfb91b3fc9b690c5a21bb52de28c018530ad85093cc488bee2dd2"}, + {file = "yarl-1.9.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:c38c9ddb6103ceae4e4498f9c08fac9b590c5c71b0370f98714768e22ac6fa66"}, + {file = "yarl-1.9.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d9e09c9d74f4566e905a0b8fa668c58109f7624db96a2171f21747abc7524234"}, + {file = "yarl-1.9.4-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b8477c1ee4bd47c57d49621a062121c3023609f7a13b8a46953eb6c9716ca392"}, + {file = "yarl-1.9.4-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d5ff2c858f5f6a42c2a8e751100f237c5e869cbde669a724f2062d4c4ef93551"}, + {file = "yarl-1.9.4-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:357495293086c5b6d34ca9616a43d329317feab7917518bc97a08f9e55648455"}, + {file = "yarl-1.9.4-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:54525ae423d7b7a8ee81ba189f131054defdb122cde31ff17477951464c1691c"}, + {file = "yarl-1.9.4-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:801e9264d19643548651b9db361ce3287176671fb0117f96b5ac0ee1c3530d53"}, + {file = "yarl-1.9.4-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:e516dc8baf7b380e6c1c26792610230f37147bb754d6426462ab115a02944385"}, + {file = "yarl-1.9.4-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:7d5aaac37d19b2904bb9dfe12cdb08c8443e7ba7d2852894ad448d4b8f442863"}, + {file = "yarl-1.9.4-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:54beabb809ffcacbd9d28ac57b0db46e42a6e341a030293fb3185c409e626b8b"}, + {file = "yarl-1.9.4-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:bac8d525a8dbc2a1507ec731d2867025d11ceadcb4dd421423a5d42c56818541"}, + {file = "yarl-1.9.4-cp310-cp310-win32.whl", hash = "sha256:7855426dfbddac81896b6e533ebefc0af2f132d4a47340cee6d22cac7190022d"}, + {file = "yarl-1.9.4-cp310-cp310-win_amd64.whl", hash = "sha256:848cd2a1df56ddbffeb375535fb62c9d1645dde33ca4d51341378b3f5954429b"}, + {file = "yarl-1.9.4-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:35a2b9396879ce32754bd457d31a51ff0a9d426fd9e0e3c33394bf4b9036b099"}, + {file = "yarl-1.9.4-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:4c7d56b293cc071e82532f70adcbd8b61909eec973ae9d2d1f9b233f3d943f2c"}, + {file = "yarl-1.9.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:d8a1c6c0be645c745a081c192e747c5de06e944a0d21245f4cf7c05e457c36e0"}, + {file = "yarl-1.9.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4b3c1ffe10069f655ea2d731808e76e0f452fc6c749bea04781daf18e6039525"}, + {file = "yarl-1.9.4-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:549d19c84c55d11687ddbd47eeb348a89df9cb30e1993f1b128f4685cd0ebbf8"}, + {file = "yarl-1.9.4-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a7409f968456111140c1c95301cadf071bd30a81cbd7ab829169fb9e3d72eae9"}, + {file = "yarl-1.9.4-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e23a6d84d9d1738dbc6e38167776107e63307dfc8ad108e580548d1f2c587f42"}, + {file = "yarl-1.9.4-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d8b889777de69897406c9fb0b76cdf2fd0f31267861ae7501d93003d55f54fbe"}, + {file = "yarl-1.9.4-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:03caa9507d3d3c83bca08650678e25364e1843b484f19986a527630ca376ecce"}, + {file = "yarl-1.9.4-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:4e9035df8d0880b2f1c7f5031f33f69e071dfe72ee9310cfc76f7b605958ceb9"}, + {file = "yarl-1.9.4-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:c0ec0ed476f77db9fb29bca17f0a8fcc7bc97ad4c6c1d8959c507decb22e8572"}, + {file = "yarl-1.9.4-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:ee04010f26d5102399bd17f8df8bc38dc7ccd7701dc77f4a68c5b8d733406958"}, + {file = "yarl-1.9.4-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:49a180c2e0743d5d6e0b4d1a9e5f633c62eca3f8a86ba5dd3c471060e352ca98"}, + {file = "yarl-1.9.4-cp311-cp311-win32.whl", hash = "sha256:81eb57278deb6098a5b62e88ad8281b2ba09f2f1147c4767522353eaa6260b31"}, + {file = "yarl-1.9.4-cp311-cp311-win_amd64.whl", hash = "sha256:d1d2532b340b692880261c15aee4dc94dd22ca5d61b9db9a8a361953d36410b1"}, + {file = "yarl-1.9.4-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:0d2454f0aef65ea81037759be5ca9947539667eecebca092733b2eb43c965a81"}, + {file = "yarl-1.9.4-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:44d8ffbb9c06e5a7f529f38f53eda23e50d1ed33c6c869e01481d3fafa6b8142"}, + {file = "yarl-1.9.4-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:aaaea1e536f98754a6e5c56091baa1b6ce2f2700cc4a00b0d49eca8dea471074"}, + {file = "yarl-1.9.4-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3777ce5536d17989c91696db1d459574e9a9bd37660ea7ee4d3344579bb6f129"}, + {file = "yarl-1.9.4-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9fc5fc1eeb029757349ad26bbc5880557389a03fa6ada41703db5e068881e5f2"}, + {file = "yarl-1.9.4-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:ea65804b5dc88dacd4a40279af0cdadcfe74b3e5b4c897aa0d81cf86927fee78"}, + {file = "yarl-1.9.4-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:aa102d6d280a5455ad6a0f9e6d769989638718e938a6a0a2ff3f4a7ff8c62cc4"}, + {file = "yarl-1.9.4-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:09efe4615ada057ba2d30df871d2f668af661e971dfeedf0c159927d48bbeff0"}, + {file = "yarl-1.9.4-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:008d3e808d03ef28542372d01057fd09168419cdc8f848efe2804f894ae03e51"}, + {file = "yarl-1.9.4-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:6f5cb257bc2ec58f437da2b37a8cd48f666db96d47b8a3115c29f316313654ff"}, + {file = "yarl-1.9.4-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:992f18e0ea248ee03b5a6e8b3b4738850ae7dbb172cc41c966462801cbf62cf7"}, + {file = "yarl-1.9.4-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:0e9d124c191d5b881060a9e5060627694c3bdd1fe24c5eecc8d5d7d0eb6faabc"}, + {file = "yarl-1.9.4-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:3986b6f41ad22988e53d5778f91855dc0399b043fc8946d4f2e68af22ee9ff10"}, + {file = "yarl-1.9.4-cp312-cp312-win32.whl", hash = "sha256:4b21516d181cd77ebd06ce160ef8cc2a5e9ad35fb1c5930882baff5ac865eee7"}, + {file = "yarl-1.9.4-cp312-cp312-win_amd64.whl", hash = "sha256:a9bd00dc3bc395a662900f33f74feb3e757429e545d831eef5bb280252631984"}, + {file = "yarl-1.9.4-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:63b20738b5aac74e239622d2fe30df4fca4942a86e31bf47a81a0e94c14df94f"}, + {file = "yarl-1.9.4-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d7d7f7de27b8944f1fee2c26a88b4dabc2409d2fea7a9ed3df79b67277644e17"}, + {file = "yarl-1.9.4-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c74018551e31269d56fab81a728f683667e7c28c04e807ba08f8c9e3bba32f14"}, + {file = "yarl-1.9.4-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:ca06675212f94e7a610e85ca36948bb8fc023e458dd6c63ef71abfd482481aa5"}, + {file = "yarl-1.9.4-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5aef935237d60a51a62b86249839b51345f47564208c6ee615ed2a40878dccdd"}, + {file = "yarl-1.9.4-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2b134fd795e2322b7684155b7855cc99409d10b2e408056db2b93b51a52accc7"}, + {file = "yarl-1.9.4-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:d25039a474c4c72a5ad4b52495056f843a7ff07b632c1b92ea9043a3d9950f6e"}, + {file = "yarl-1.9.4-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:f7d6b36dd2e029b6bcb8a13cf19664c7b8e19ab3a58e0fefbb5b8461447ed5ec"}, + {file = "yarl-1.9.4-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:957b4774373cf6f709359e5c8c4a0af9f6d7875db657adb0feaf8d6cb3c3964c"}, + {file = "yarl-1.9.4-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:d7eeb6d22331e2fd42fce928a81c697c9ee2d51400bd1a28803965883e13cead"}, + {file = "yarl-1.9.4-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:6a962e04b8f91f8c4e5917e518d17958e3bdee71fd1d8b88cdce74dd0ebbf434"}, + {file = "yarl-1.9.4-cp37-cp37m-win32.whl", hash = "sha256:f3bc6af6e2b8f92eced34ef6a96ffb248e863af20ef4fde9448cc8c9b858b749"}, + {file = "yarl-1.9.4-cp37-cp37m-win_amd64.whl", hash = "sha256:ad4d7a90a92e528aadf4965d685c17dacff3df282db1121136c382dc0b6014d2"}, + {file = "yarl-1.9.4-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:ec61d826d80fc293ed46c9dd26995921e3a82146feacd952ef0757236fc137be"}, + {file = "yarl-1.9.4-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:8be9e837ea9113676e5754b43b940b50cce76d9ed7d2461df1af39a8ee674d9f"}, + {file = "yarl-1.9.4-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:bef596fdaa8f26e3d66af846bbe77057237cb6e8efff8cd7cc8dff9a62278bbf"}, + {file = "yarl-1.9.4-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2d47552b6e52c3319fede1b60b3de120fe83bde9b7bddad11a69fb0af7db32f1"}, + {file = "yarl-1.9.4-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:84fc30f71689d7fc9168b92788abc977dc8cefa806909565fc2951d02f6b7d57"}, + {file = "yarl-1.9.4-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4aa9741085f635934f3a2583e16fcf62ba835719a8b2b28fb2917bb0537c1dfa"}, + {file = "yarl-1.9.4-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:206a55215e6d05dbc6c98ce598a59e6fbd0c493e2de4ea6cc2f4934d5a18d130"}, + {file = "yarl-1.9.4-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:07574b007ee20e5c375a8fe4a0789fad26db905f9813be0f9fef5a68080de559"}, + {file = "yarl-1.9.4-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:5a2e2433eb9344a163aced6a5f6c9222c0786e5a9e9cac2c89f0b28433f56e23"}, + {file = "yarl-1.9.4-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:6ad6d10ed9b67a382b45f29ea028f92d25bc0bc1daf6c5b801b90b5aa70fb9ec"}, + {file = "yarl-1.9.4-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:6fe79f998a4052d79e1c30eeb7d6c1c1056ad33300f682465e1b4e9b5a188b78"}, + {file = "yarl-1.9.4-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:a825ec844298c791fd28ed14ed1bffc56a98d15b8c58a20e0e08c1f5f2bea1be"}, + {file = "yarl-1.9.4-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:8619d6915b3b0b34420cf9b2bb6d81ef59d984cb0fde7544e9ece32b4b3043c3"}, + {file = "yarl-1.9.4-cp38-cp38-win32.whl", hash = "sha256:686a0c2f85f83463272ddffd4deb5e591c98aac1897d65e92319f729c320eece"}, + {file = "yarl-1.9.4-cp38-cp38-win_amd64.whl", hash = "sha256:a00862fb23195b6b8322f7d781b0dc1d82cb3bcac346d1e38689370cc1cc398b"}, + {file = "yarl-1.9.4-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:604f31d97fa493083ea21bd9b92c419012531c4e17ea6da0f65cacdcf5d0bd27"}, + {file = "yarl-1.9.4-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:8a854227cf581330ffa2c4824d96e52ee621dd571078a252c25e3a3b3d94a1b1"}, + {file = "yarl-1.9.4-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:ba6f52cbc7809cd8d74604cce9c14868306ae4aa0282016b641c661f981a6e91"}, + {file = "yarl-1.9.4-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a6327976c7c2f4ee6816eff196e25385ccc02cb81427952414a64811037bbc8b"}, + {file = "yarl-1.9.4-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8397a3817d7dcdd14bb266283cd1d6fc7264a48c186b986f32e86d86d35fbac5"}, + {file = "yarl-1.9.4-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e0381b4ce23ff92f8170080c97678040fc5b08da85e9e292292aba67fdac6c34"}, + {file = "yarl-1.9.4-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:23d32a2594cb5d565d358a92e151315d1b2268bc10f4610d098f96b147370136"}, + {file = "yarl-1.9.4-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ddb2a5c08a4eaaba605340fdee8fc08e406c56617566d9643ad8bf6852778fc7"}, + {file = "yarl-1.9.4-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:26a1dc6285e03f3cc9e839a2da83bcbf31dcb0d004c72d0730e755b33466c30e"}, + {file = "yarl-1.9.4-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:18580f672e44ce1238b82f7fb87d727c4a131f3a9d33a5e0e82b793362bf18b4"}, + {file = "yarl-1.9.4-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:29e0f83f37610f173eb7e7b5562dd71467993495e568e708d99e9d1944f561ec"}, + {file = "yarl-1.9.4-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:1f23e4fe1e8794f74b6027d7cf19dc25f8b63af1483d91d595d4a07eca1fb26c"}, + {file = "yarl-1.9.4-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:db8e58b9d79200c76956cefd14d5c90af54416ff5353c5bfd7cbe58818e26ef0"}, + {file = "yarl-1.9.4-cp39-cp39-win32.whl", hash = "sha256:c7224cab95645c7ab53791022ae77a4509472613e839dab722a72abe5a684575"}, + {file = "yarl-1.9.4-cp39-cp39-win_amd64.whl", hash = "sha256:824d6c50492add5da9374875ce72db7a0733b29c2394890aef23d533106e2b15"}, + {file = "yarl-1.9.4-py3-none-any.whl", hash = "sha256:928cecb0ef9d5a7946eb6ff58417ad2fe9375762382f1bf5c55e61645f2c43ad"}, + {file = "yarl-1.9.4.tar.gz", hash = "sha256:566db86717cf8080b99b58b083b773a908ae40f06681e87e589a976faf8246bf"}, +] + +[package.dependencies] +idna = ">=2.0" +multidict = ">=4.0" + +[metadata] +lock-version = "2.0" +python-versions = "^3.10" +content-hash = "da1743991debf2a5c9565ffd3494ebbf6f55b36f9279fd74e9697262e813d2db" diff --git a/airbyte-ci/connectors/live-tests/pyproject.toml b/airbyte-ci/connectors/live-tests/pyproject.toml new file mode 100644 index 000000000000..d88aed385a92 --- /dev/null +++ b/airbyte-ci/connectors/live-tests/pyproject.toml @@ -0,0 +1,46 @@ +[build-system] +requires = ["poetry-core>=1.0.0"] +build-backend = "poetry.core.masonry.api" + +[tool.poetry] +name = "live-tests" +version = "0.1.0" +description = "Contains utilities for testing connectors against live data." +authors = ["Airbyte "] +license = "MIT" +homepage = "https://github.com/airbytehq/airbyte" +readme = "README.md" +packages = [ + { include = "live_tests", from = "src" }, +] + +[tool.poetry.dependencies] +python = "^3.10" +airbyte-protocol-models = "<1.0.0" +cachetools = "~=5.3.3" +dagger-io = "==0.9.6" +pydantic = "*" +pytest = "~=8.0.2" +pytest-asyncio = "~=0.23.5" +pydash = "~=7.0.7" +docker = ">=6,<7" +asyncclick = "^8.1.7.1" + +[tool.poetry.scripts] +live-tests = "live_tests.cli:live_tests" + +[tool.poetry.group.dev.dependencies] +ruff = "^0.3.0" +mypy = "^1.8.0" +types-cachetools = "^5.3.0.7" + +[tool.poe.tasks] +test = "pytest tests" +lint = "ruff check src" +type_check = "mypy src" + +[tool.airbyte_ci] +poe_tasks = ["test", "lint", "type_check"] + +[tool.pytest.ini_options] +pythonpath = ["src"] diff --git a/airbyte-ci/connectors/live-tests/src/live_tests/__init__.py b/airbyte-ci/connectors/live-tests/src/live_tests/__init__.py new file mode 100644 index 000000000000..f70ecfc3a89e --- /dev/null +++ b/airbyte-ci/connectors/live-tests/src/live_tests/__init__.py @@ -0,0 +1 @@ +# Copyright (c) 2023 Airbyte, Inc., all rights reserved. diff --git a/airbyte-ci/connectors/live-tests/src/live_tests/cli.py b/airbyte-ci/connectors/live-tests/src/live_tests/cli.py new file mode 100644 index 000000000000..15a6ec2fe925 --- /dev/null +++ b/airbyte-ci/connectors/live-tests/src/live_tests/cli.py @@ -0,0 +1,13 @@ +# Copyright (c) 2023 Airbyte, Inc., all rights reserved. + +import asyncclick as click +from live_tests.debug.cli import debug_cmd + + +@click.group() +@click.pass_context +async def live_tests(ctx): + pass + + +live_tests.add_command(debug_cmd) diff --git a/airbyte-ci/connectors/live-tests/src/live_tests/commons/__init__.py b/airbyte-ci/connectors/live-tests/src/live_tests/commons/__init__.py new file mode 100644 index 000000000000..f70ecfc3a89e --- /dev/null +++ b/airbyte-ci/connectors/live-tests/src/live_tests/commons/__init__.py @@ -0,0 +1 @@ +# Copyright (c) 2023 Airbyte, Inc., all rights reserved. diff --git a/airbyte-ci/connectors/live-tests/src/live_tests/commons/backends/__init__.py b/airbyte-ci/connectors/live-tests/src/live_tests/commons/backends/__init__.py new file mode 100644 index 000000000000..9a1b7d627ed3 --- /dev/null +++ b/airbyte-ci/connectors/live-tests/src/live_tests/commons/backends/__init__.py @@ -0,0 +1,6 @@ +# Copyright (c) 2023 Airbyte, Inc., all rights reserved. + +from .base_backend import BaseBackend +from .file_backend import FileBackend + +__all__ = ["BaseBackend", "FileBackend"] diff --git a/airbyte-ci/connectors/live-tests/src/live_tests/commons/backends/base_backend.py b/airbyte-ci/connectors/live-tests/src/live_tests/commons/backends/base_backend.py new file mode 100644 index 000000000000..f6005120c216 --- /dev/null +++ b/airbyte-ci/connectors/live-tests/src/live_tests/commons/backends/base_backend.py @@ -0,0 +1,16 @@ +# Copyright (c) 2023 Airbyte, Inc., all rights reserved. + +from abc import ABC, abstractmethod +from typing import Iterable + +from airbyte_protocol.models import AirbyteMessage # type: ignore + + +class BaseBackend(ABC): + """ + Interface to be shared between the file backend and the database backend(s) + """ + + @abstractmethod + async def write(self, airbyte_messages: Iterable[AirbyteMessage]) -> None: + ... diff --git a/airbyte-ci/connectors/live-tests/src/live_tests/commons/backends/file_backend.py b/airbyte-ci/connectors/live-tests/src/live_tests/commons/backends/file_backend.py new file mode 100644 index 000000000000..5588322aac2c --- /dev/null +++ b/airbyte-ci/connectors/live-tests/src/live_tests/commons/backends/file_backend.py @@ -0,0 +1,104 @@ +# Copyright (c) 2023 Airbyte, Inc., all rights reserved. + +import json +from pathlib import Path +from typing import Iterable, TextIO, Tuple + +import pydash +from airbyte_protocol.models import AirbyteMessage # type: ignore +from airbyte_protocol.models import Type as AirbyteMessageType +from cachetools import LRUCache, cached +from live_tests.commons.backends.base_backend import BaseBackend + + +class FileDescriptorLRUCache(LRUCache): + def popitem(self): + filepath, fd = LRUCache.popitem(self) + fd.close() # Close the file descriptor when it's evicted from the cache + return filepath, fd + + +class FileBackend(BaseBackend): + RELATIVE_CATALOGS_PATH = "catalog.jsonl" + RELATIVE_CONNECTION_STATUS_PATH = "connection_status.jsonl" + RELATIVE_RECORDS_PATH = "records.jsonl" + RELATIVE_SPECS_PATH = "spec.jsonl" + RELATIVE_STATES_PATH = "states.jsonl" + RELATIVE_TRACES_PATH = "traces.jsonl" + RELATIVE_LOGS_PATH = "logs.jsonl" + RELATIVE_CONTROLS_PATH = "controls.jsonl" + RECORD_PATHS_TO_POP = ["emitted_at"] + CACHE = FileDescriptorLRUCache(maxsize=250) + + def __init__(self, output_directory: Path): + self._output_directory = output_directory + + async def write(self, airbyte_messages: Iterable[AirbyteMessage]): + """ + Write AirbyteMessages to the appropriate file. + + Catalogs, connection status messages, specs, trace messages, logs, and control messages are all written to their + own file (e.g. "catalog.jsonl", "spec.jsonl"). + + Records and state messages are further subdivided, with one file per stream (e.g. "my_stream_records.jsonl", + "my_stream_states.jsonl"). Streams with global state are stored in a "_global_states.jsonl" file. + + We use an LRU cache here to manage open file objects, in order to limit the number of concurrently open file + descriptors. This mitigates the risk of hitting limits on the number of open file descriptors, particularly for + connections with a high number of streams. The cache is designed to automatically close files upon eviction. + """ + + @cached(cache=self.CACHE) + def _open_file(path: Path) -> TextIO: + return open(path, "a") + + try: + for _message in airbyte_messages: + if not isinstance(_message, AirbyteMessage): + continue + filepath, message = self._get_filepath_and_message(_message) + _open_file(self._output_directory / filepath).write(f"{message}\n") + finally: + for f in self.CACHE.values(): + f.close() + + def _get_filepath_and_message(self, message: AirbyteMessage) -> Tuple[str, str]: + if message.type == AirbyteMessageType.CATALOG: + return self.RELATIVE_CATALOGS_PATH, message.catalog.json() + + elif message.type == AirbyteMessageType.CONNECTION_STATUS: + return self.RELATIVE_CONNECTION_STATUS_PATH, message.connectionStatus.json() + + elif message.type == AirbyteMessageType.RECORD: + record = json.loads(message.record.json()) + # TODO: once we have a comparator and/or database backend implemented we can remove this + for key_path in self.RECORD_PATHS_TO_POP: + pydash.objects.unset(record, key_path) + return f"{message.record.stream}_{self.RELATIVE_RECORDS_PATH}", json.dumps(record) + + elif message.type == AirbyteMessageType.SPEC: + return self.RELATIVE_SPECS_PATH, message.spec.json() + + elif message.type == AirbyteMessageType.STATE: + if message.state.stream and message.state.stream.stream_descriptor: + stream_name = message.state.stream.stream_descriptor.name + stream_namespace = message.state.stream.stream_descriptor.namespace + filepath = ( + f"{stream_name}_{stream_namespace}_{self.RELATIVE_STATES_PATH}" + if stream_namespace + else f"{stream_name}_{self.RELATIVE_STATES_PATH}" + ) + else: + filepath = f"_global_{self.RELATIVE_STATES_PATH}" + return filepath, message.state.json() + + elif message.type == AirbyteMessageType.TRACE: + return self.RELATIVE_TRACES_PATH, message.trace.json() + + elif message.type == AirbyteMessageType.LOG: + return self.RELATIVE_LOGS_PATH, message.log.json() + + elif message.type == AirbyteMessageType.CONTROL: + return self.RELATIVE_CONTROLS_PATH, message.control.json() + + raise NotImplementedError(f"No handling for AirbyteMessage type {message.type} has been implemented. This is unexpected.") diff --git a/airbyte-ci/connectors/live-tests/src/live_tests/commons/connector_runner.py b/airbyte-ci/connectors/live-tests/src/live_tests/commons/connector_runner.py new file mode 100644 index 000000000000..66b5dcc61e57 --- /dev/null +++ b/airbyte-ci/connectors/live-tests/src/live_tests/commons/connector_runner.py @@ -0,0 +1,287 @@ +# +# Copyright (c) 2023 Airbyte, Inc., all rights reserved. +# + +import hashlib +import json +import logging +import os +import uuid +from pathlib import Path +from typing import Dict, List, Optional + +import dagger +import docker # type: ignore +import pytest +from airbyte_protocol.models import ConfiguredAirbyteCatalog # type: ignore +from live_tests.commons.models import Command, ConnectorUnderTest, ExecutionResult, SecretDict + + +async def get_container_from_id(dagger_client: dagger.Client, container_id: str) -> dagger.Container: + """Get a dagger container from its id. + Please remind that container id are not persistent and can change between Dagger sessions. + + Args: + dagger_client (dagger.Client): The dagger client to use to import the connector image + """ + try: + return await dagger_client.container(id=dagger.ContainerID(container_id)) + except dagger.DaggerError as e: + pytest.exit(f"Failed to load connector container: {e}") + + +async def get_container_from_tarball_path(dagger_client: dagger.Client, tarball_path: Path): + if not tarball_path.exists(): + pytest.exit(f"Connector image tarball {tarball_path} does not exist") + container_under_test_tar_file = ( + dagger_client.host().directory(str(tarball_path.parent), include=tarball_path.name).file(tarball_path.name) + ) + try: + return await dagger_client.container().import_(container_under_test_tar_file) + except dagger.DaggerError as e: + pytest.exit(f"Failed to import connector image from tarball: {e}") + + +async def get_container_from_local_image(dagger_client: dagger.Client, local_image_name: str) -> Optional[dagger.Container]: + """Get a dagger container from a local image. + It will use Docker python client to export the image to a tarball and then import it into dagger. + + Args: + dagger_client (dagger.Client): The dagger client to use to import the connector image + local_image_name (str): The name of the local image to import + + Returns: + Optional[dagger.Container]: The dagger container for the local image or None if the image does not exist + """ + docker_client = docker.from_env() + + try: + image = docker_client.images.get(local_image_name) + except docker.errors.ImageNotFound: + return None + + image_digest = image.id.replace("sha256:", "") + tarball_path = Path(f"/tmp/{image_digest}.tar") + if not tarball_path.exists(): + logging.info(f"Exporting local connector image {local_image_name} to tarball {tarball_path}") + with open(tarball_path, "wb") as f: + for chunk in image.save(named=True): + f.write(chunk) + return await get_container_from_tarball_path(dagger_client, tarball_path) + + +async def get_container_from_dockerhub_image(dagger_client: dagger.Client, dockerhub_image_name: str) -> dagger.Container: + """Get a dagger container from a dockerhub image. + + Args: + dagger_client (dagger.Client): The dagger client to use to import the connector image + dockerhub_image_name (str): The name of the dockerhub image to import + + Returns: + dagger.Container: The dagger container for the dockerhub image + """ + try: + return await dagger_client.container().from_(dockerhub_image_name) + except dagger.DaggerError as e: + pytest.exit(f"Failed to import connector image from DockerHub: {e}") + + +async def get_connector_container(dagger_client: dagger.Client, image_name_with_tag: str) -> dagger.Container: + """Get a dagger container for the connector image to test. + + Args: + dagger_client (dagger.Client): The dagger client to use to import the connector image + image_name_with_tag (str): The docker image name and tag of the connector image to test + + Returns: + dagger.Container: The dagger container for the connector image to test + """ + # If a container_id.txt file is available, we'll use it to load the connector container + # We use a txt file as container ids can be too long to be passed as env vars + # It's used for dagger-in-dagger use case with airbyte-ci, when the connector container is built via an upstream dagger operation + connector_container_id_path = Path("/tmp/container_id.txt") + if connector_container_id_path.exists(): + # If the CONNECTOR_CONTAINER_ID env var is set, we'll use it to load the connector container + return await get_container_from_id(dagger_client, connector_container_id_path.read_text()) + + # If the CONNECTOR_UNDER_TEST_IMAGE_TAR_PATH env var is set, we'll use it to import the connector image from the tarball + if connector_image_tarball_path := os.environ.get("CONNECTOR_UNDER_TEST_IMAGE_TAR_PATH"): + tarball_path = Path(connector_image_tarball_path) + return await get_container_from_tarball_path(dagger_client, tarball_path) + + # Let's try to load the connector container from a local image + if connector_container := await get_container_from_local_image(dagger_client, image_name_with_tag): + return connector_container + + # If we get here, we'll try to pull the connector image from DockerHub + return await get_container_from_dockerhub_image(dagger_client, image_name_with_tag) + + +class ConnectorRunner: + IN_CONTAINER_CONFIG_PATH = "/data/config.json" + IN_CONTAINER_CATALOG_PATH = "/data/catalog.json" + IN_CONTAINER_STATE_PATH = "/data/state.json" + MITMPROXY_IMAGE = "mitmproxy/mitmproxy:9.0.1" + HTTP_DUMP_FILE_NAME = "http_dump.mitm" + + def __init__( + self, + dagger_client: dagger.Client, + connector_under_test: ConnectorUnderTest, + command: Command, + config: Optional[SecretDict] = None, + catalog: Optional[ConfiguredAirbyteCatalog] = None, + state: Optional[Dict] = None, + environment_variables: Optional[Dict] = None, + enable_http_cache: bool = True, + ): + self.dagger_client = dagger_client + self.connector_under_test = connector_under_test + self.command = command + self.config = config + self.catalog = catalog + self.state = state + self.environment_variables = environment_variables if environment_variables else {} + self.enable_http_cache = enable_http_cache + self.full_command: List[str] = self._get_full_command(command) + + @property + def _connector_under_test_container(self) -> dagger.Container: + return self.connector_under_test.container + + def _get_full_command(self, command: Command): + if command is Command.SPEC: + return ["spec"] + elif command is Command.CHECK: + return ["check", "--config", self.IN_CONTAINER_CONFIG_PATH] + elif command is Command.DISCOVER: + return ["discover", "--config", self.IN_CONTAINER_CONFIG_PATH] + elif command is Command.READ: + return [ + "read", + "--config", + self.IN_CONTAINER_CONFIG_PATH, + "--catalog", + self.IN_CONTAINER_CATALOG_PATH, + ] + elif command is Command.READ_WITH_STATE: + return [ + "read", + "--config", + self.IN_CONTAINER_CONFIG_PATH, + "--catalog", + self.IN_CONTAINER_CATALOG_PATH, + "--state", + self.IN_CONTAINER_STATE_PATH, + ] + else: + raise NotImplementedError(f"The connector runner does not support the {command} command") + + async def get_container_env_variable_value(self, name: str) -> Optional[str]: + return await self._connector_under_test_container.env_variable(name) + + async def get_container_label(self, label: str): + return await self._connector_under_test_container.label(label) + + async def get_container_entrypoint(self): + entrypoint = await self._connector_under_test_container.entrypoint() + return " ".join(entrypoint) + + async def run( + self, + raise_on_container_error: bool = True, + ) -> ExecutionResult: + container = self._connector_under_test_container + # Do not cache downstream dagger layers + container = container.with_env_variable("CACHEBUSTER", str(uuid.uuid4())) + for env_var_name, env_var_value in self.environment_variables.items(): + container = container.with_env_variable(env_var_name, env_var_value) + if self.config: + container = container.with_new_file(self.IN_CONTAINER_CONFIG_PATH, contents=json.dumps(dict(self.config))) + if self.state: + container = container.with_new_file(self.IN_CONTAINER_STATE_PATH, contents=json.dumps(self.state)) + if self.catalog: + container = container.with_new_file(self.IN_CONTAINER_CATALOG_PATH, contents=self.catalog.json()) + if self.enable_http_cache: + container = await self._bind_connector_container_to_proxy(container) + executed_container = await container.with_exec(self.full_command).sync() + + return ExecutionResult( + stdout=await executed_container.stdout(), + stderr=await executed_container.stderr(), + executed_container=executed_container, + http_dump=await self._retrieve_http_dump() if self.enable_http_cache else None, + ) + + def _get_http_dumps_cache_volume(self) -> dagger.CacheVolume: + config_data = self.config.data if self.config else None + proxy_cache_key = hashlib.md5((self.connector_under_test.name + str(config_data)).encode("utf-8")).hexdigest() + return self.dagger_client.cache_volume(f"{self.MITMPROXY_IMAGE}{proxy_cache_key}") + + def _get_mitmproxy_dir_cache(self) -> dagger.CacheVolume: + return self.dagger_client.cache_volume(self.MITMPROXY_IMAGE) + + async def _get_proxy_container( + self, + ) -> dagger.Container: + proxy_container = ( + self.dagger_client.container() + .from_(self.MITMPROXY_IMAGE) + .with_exec(["mkdir", "-p", "/home/mitmproxy/.mitmproxy"], skip_entrypoint=True) + .with_mounted_cache("/dumps", self._get_http_dumps_cache_volume()) + .with_mounted_cache("/home/mitmproxy/.mitmproxy", self._get_mitmproxy_dir_cache()) + ) + previous_dump_files = ( + await proxy_container.with_env_variable("CACHEBUSTER", str(uuid.uuid4())) + .with_exec(["ls", "/dumps"], skip_entrypoint=True) + .stdout() + ).splitlines() + if self.HTTP_DUMP_FILE_NAME in previous_dump_files: + command = [ + "mitmweb", + "--server-replay", + f"/dumps/{self.HTTP_DUMP_FILE_NAME}", + ] + else: + command = [ + "mitmweb", + "--save-stream-file", + f"/dumps/{self.HTTP_DUMP_FILE_NAME}", + ] + + return proxy_container.with_exec(command) + + async def _bind_connector_container_to_proxy(self, container: dagger.Container): + proxy_srv = await self._get_proxy_container() + proxy_host, proxy_port = "proxy_server", 8080 + cert_path_in_volume = "/mitmproxy_dir/mitmproxy-ca.pem" + requests_cert_path = "/usr/local/lib/python3.9/site-packages/certifi/cacert.pem" + ca_certificate_path = "/usr/local/share/ca-certificates/mitmproxy.crt" + + return ( + container.with_service_binding(proxy_host, proxy_srv.with_exposed_port(proxy_port).as_service()) + .with_mounted_cache("/mitmproxy_dir", self._get_mitmproxy_dir_cache()) + .with_exec(["cp", cert_path_in_volume, requests_cert_path], skip_entrypoint=True) + .with_exec(["cp", cert_path_in_volume, ca_certificate_path], skip_entrypoint=True) + .with_env_variable("REQUESTS_CA_BUNDLE", requests_cert_path) + .with_exec(["update-ca-certificates"], skip_entrypoint=True) + .with_env_variable("http_proxy", f"{proxy_host}:{proxy_port}") + .with_env_variable("https_proxy", f"{proxy_host}:{proxy_port}") + ) + + async def _retrieve_http_dump(self) -> dagger.File: + return await ( + self.dagger_client.container() + .from_("alpine:latest") + .with_mounted_cache("/dumps", self._get_http_dumps_cache_volume()) + .with_exec(["mkdir", "/to_export"]) + .with_exec( + [ + "cp", + "-r", + f"/dumps/{self.HTTP_DUMP_FILE_NAME}", + f"/to_export/{self.HTTP_DUMP_FILE_NAME}", + ] + ) + .file(f"/to_export/{self.HTTP_DUMP_FILE_NAME}") + ) diff --git a/airbyte-ci/connectors/live-tests/src/live_tests/commons/models.py b/airbyte-ci/connectors/live-tests/src/live_tests/commons/models.py new file mode 100644 index 000000000000..0b9bbd58eda5 --- /dev/null +++ b/airbyte-ci/connectors/live-tests/src/live_tests/commons/models.py @@ -0,0 +1,235 @@ +# Copyright (c) 2023 Airbyte, Inc., all rights reserved. + +import time +from dataclasses import dataclass, field +from enum import Enum +from pathlib import Path +from typing import Dict, List, Optional, Tuple + +import _collections_abc +import dagger +from airbyte_protocol.models import AirbyteMessage # type: ignore +from airbyte_protocol.models import ConfiguredAirbyteCatalog # type: ignore +from live_tests.commons.backends import FileBackend +from pydantic import ValidationError + + +class UserDict(_collections_abc.MutableMapping): + # Start by filling-out the abstract methods + def __init__(self, dict=None, /, **kwargs): + self.data = {} + if dict is not None: + self.update(dict) + if kwargs: + self.update(kwargs) + + def __len__(self): + return len(self.data) + + def __getitem__(self, key): + if key in self.data: + return self.data[key] + if hasattr(self.__class__, "__missing__"): + return self.__class__.__missing__(self, key) + raise KeyError(key) + + def __setitem__(self, key, item): + self.data[key] = item + + def __delitem__(self, key): + del self.data[key] + + def __iter__(self): + return iter(self.data) + + # Modify __contains__ to work correctly when __missing__ is present + def __contains__(self, key): + return key in self.data + + # Now, add the methods in dicts but not in MutableMapping + def __repr__(self): + return repr(self.data) + + def __or__(self, other): + if isinstance(other, UserDict): + return self.__class__(self.data | other.data) + if isinstance(other, dict): + return self.__class__(self.data | other) + return NotImplemented + + def __ror__(self, other): + if isinstance(other, UserDict): + return self.__class__(other.data | self.data) + if isinstance(other, dict): + return self.__class__(other | self.data) + return NotImplemented + + def __ior__(self, other): + if isinstance(other, UserDict): + self.data |= other.data + else: + self.data |= other + return self + + def __copy__(self): + inst = self.__class__.__new__(self.__class__) + inst.__dict__.update(self.__dict__) + # Create a copy and avoid triggering descriptors + inst.__dict__["data"] = self.__dict__["data"].copy() + return inst + + def copy(self): + if self.__class__ is UserDict: + return UserDict(self.data.copy()) + import copy + + data = self.data + try: + self.data = {} + c = copy.copy(self) + finally: + self.data = data + c.update(self) + return c + + @classmethod + def fromkeys(cls, iterable, value=None): + d = cls() + for key in iterable: + d[key] = value + return d + + +class SecretDict(UserDict): + def __str__(self) -> str: + return f"{self.__class__.__name__}(******)" + + def __repr__(self) -> str: + return str(self) + + +class Command(Enum): + CHECK = "check" + DISCOVER = "discover" + READ = "read" + READ_WITH_STATE = "read-with-state" + SPEC = "spec" + + +@dataclass +class ConnectorUnderTest: + image_name: str + container: dagger.Container + + @property + def name(self): + return self.image_name.replace("airbyte/", "").split(":")[0] + + @property + def version(self): + return self.image_name.replace("airbyte/", "").split(":")[1] + + +@dataclass +class ExecutionInputs: + connector_under_test: ConnectorUnderTest + command: Command + config: Optional[SecretDict] = None + catalog: Optional[ConfiguredAirbyteCatalog] = None + state: Optional[Dict] = None + environment_variables: Optional[Dict] = None + enable_http_cache: bool = True + + def to_dict(self) -> dict: + return { + "connector_under_test": self.connector_under_test, + "command": self.command, + "config": self.config, + "catalog": self.catalog, + "state": self.state, + "environment_variables": self.environment_variables, + "enable_http_cache": self.enable_http_cache, + } + + def raise_if_missing_attr_for_command(self, attribute: str): + if getattr(self, attribute) is None: + raise ValueError(f"We need a {attribute} to run the {self.command.value} command") + + def __post_init__(self): + if self.command is Command.CHECK: + self.raise_if_missing_attr_for_command("config") + if self.command is Command.DISCOVER: + self.raise_if_missing_attr_for_command("config") + if self.command is Command.READ: + self.raise_if_missing_attr_for_command("config") + self.raise_if_missing_attr_for_command("catalog") + if self.command is Command.READ_WITH_STATE: + self.raise_if_missing_attr_for_command("config") + self.raise_if_missing_attr_for_command("catalog") + self.raise_if_missing_attr_for_command("state") + + +@dataclass +class ExecutionResult: + stdout: str + stderr: str + executed_container: dagger.Container + http_dump: Optional[dagger.File] + airbyte_messages: List[AirbyteMessage] = field(default_factory=list) + airbyte_messages_parsing_errors: List[Tuple[Exception, str]] = field(default_factory=list) + + def __post_init__(self): + self.airbyte_messages, self.airbyte_messages_parsing_errors = self.parse_airbyte_messages_from_command_output(self.stdout) + + @staticmethod + def parse_airbyte_messages_from_command_output( + command_output: str, + ) -> Tuple[List[AirbyteMessage], List[Tuple[Exception, str]]]: + airbyte_messages: List[AirbyteMessage] = [] + parsing_errors: List[Tuple[Exception, str]] = [] + for line in command_output.splitlines(): + try: + airbyte_messages.append(AirbyteMessage.parse_raw(line)) + except ValidationError as e: + parsing_errors.append((e, line)) + return airbyte_messages, parsing_errors + + +@dataclass +class ExecutionReport: + execution_inputs: ExecutionInputs + execution_result: ExecutionResult + created_at: int = field(default_factory=lambda: int(time.time())) + + @property + def report_dir(self) -> str: + return f"{self.created_at}/{self.execution_inputs.connector_under_test.name}/{self.execution_inputs.command.value}/{self.execution_inputs.connector_under_test.version}/" + + @property + def stdout_filename(self): + return "stdout.log" + + @property + def stderr_filename(self): + return "stderr.log" + + @property + def http_dump_filename(self): + return "http_dump.mitm" + + async def save_to_disk(self, output_dir: Path) -> None: + final_dir = output_dir / self.report_dir + final_dir.mkdir(parents=True, exist_ok=True) + stdout_file_path = final_dir / self.stdout_filename + stdout_file_path.write_text(self.execution_result.stdout) + + stderr_file_path = final_dir / self.stderr_filename + stderr_file_path.write_text(self.execution_result.stderr) + if self.execution_result.http_dump: + http_dump_file_path = final_dir / self.http_dump_filename + await self.execution_result.http_dump.export(str(http_dump_file_path.resolve())) + # TODO merge ExecutionReport.save_to_disk and Backend.write? + # Make backends use customizable + airbyte_messages_dir = final_dir / "airbyte_messages" + airbyte_messages_dir.mkdir(parents=True, exist_ok=True) + await FileBackend(airbyte_messages_dir).write(self.execution_result.airbyte_messages) diff --git a/airbyte-ci/connectors/live-tests/src/live_tests/commons/utils.py b/airbyte-ci/connectors/live-tests/src/live_tests/commons/utils.py new file mode 100644 index 000000000000..4b62defb821a --- /dev/null +++ b/airbyte-ci/connectors/live-tests/src/live_tests/commons/utils.py @@ -0,0 +1,37 @@ +# Copyright (c) 2023 Airbyte, Inc., all rights reserved. + +import json +from pathlib import Path +from typing import Dict, List, Optional + +import dagger +from live_tests.commons.connector_runner import SecretDict, get_connector_container +from live_tests.commons.models import ConnectorUnderTest + + +def get_connector_config(path: Optional[str | Path]) -> Optional[SecretDict]: + if path is None: + return None + return SecretDict(_read_json(path)) + + +def get_state(path: Optional[str | Path]) -> Optional[Dict]: + if path is None: + return None + return _read_json(path) + + +def _read_json(path: Path | str) -> Dict: + with open(str(path), "r") as file: + contents = file.read() + return json.loads(contents) + + +async def get_connector_under_test(dagger_client: dagger.Client, connector_image_name: str) -> ConnectorUnderTest: + dagger_container = await get_connector_container(dagger_client, connector_image_name) + return ConnectorUnderTest(connector_image_name, dagger_container) + + +def sh_dash_c(lines: List[str]) -> List[str]: + """Wrap sequence of commands in shell for safe usage of dagger Container's with_exec method.""" + return ["sh", "-c", " && ".join(["set -o xtrace"] + lines)] diff --git a/airbyte-ci/connectors/live-tests/src/live_tests/debug/__init__.py b/airbyte-ci/connectors/live-tests/src/live_tests/debug/__init__.py new file mode 100644 index 000000000000..33baac2d2613 --- /dev/null +++ b/airbyte-ci/connectors/live-tests/src/live_tests/debug/__init__.py @@ -0,0 +1,8 @@ +# Copyright (c) 2023 Airbyte, Inc., all rights reserved. + +import dagger +import os +import sys + +DAGGER_EXEC_TIMEOUT = dagger.Timeout(int(os.environ.get("DAGGER_EXEC_TIMEOUT", "3600"))) # One hour by default +DAGGER_CONFIG = dagger.Config(timeout=DAGGER_EXEC_TIMEOUT, log_output=sys.stderr) diff --git a/airbyte-ci/connectors/live-tests/src/live_tests/debug/cli.py b/airbyte-ci/connectors/live-tests/src/live_tests/debug/cli.py new file mode 100644 index 000000000000..9dc7fba9cfbc --- /dev/null +++ b/airbyte-ci/connectors/live-tests/src/live_tests/debug/cli.py @@ -0,0 +1,95 @@ +# Copyright (c) 2023 Airbyte, Inc., all rights reserved. + +import time +from pathlib import Path +from typing import List, Optional + +import asyncclick as click +import dagger +from airbyte_protocol.models import ConfiguredAirbyteCatalog # type: ignore +from live_tests.commons.connector_runner import ConnectorRunner +from live_tests.commons.models import Command, ExecutionInputs, ExecutionReport +from live_tests.commons.utils import get_connector_config, get_connector_under_test, get_state +from live_tests.debug import DAGGER_CONFIG + + +@click.command( + "debug", + help="Run a specific command on one or multiple connectors and persists the outputs to local storage.", +) +@click.argument( + "command", + type=click.Choice([c.value for c in Command]), + callback=lambda _, __, value: Command(value), +) +@click.option( + "-c", + "--connector-image", + "connector_images", + help="Docker image name of the connector to debug (e.g. `airbyte/source-faker:latest`, `airbyte/source-faker:dev`)", + multiple=True, + type=str, + required=True, +) +@click.option( + "-o", + "--output-directory", + help="Directory in which connector output and test results should be stored. Defaults to the current directory.", + default=Path("live_tests_debug_reports"), + type=click.Path(file_okay=False, dir_okay=True, resolve_path=True, path_type=Path), +) +@click.option( + "--config-path", + help="Path to the connector config.", + type=click.Path(exists=True, file_okay=True, dir_okay=False, resolve_path=True, path_type=Path), +) +@click.option( + "--catalog-path", + help="Path to the connector catalog.", + type=click.Path(exists=True, file_okay=True, dir_okay=False, resolve_path=True, path_type=Path), +) +@click.option( + "--state-path", + help="Path to the connector state.", + type=click.Path(exists=True, file_okay=True, dir_okay=False, resolve_path=True, path_type=Path), +) +@click.option( + "-hc", + "--http-cache", + "enable_http_cache", + help="Use the HTTP cache for the connector.", + default=True, + is_flag=True, + type=bool, +) +# TODO add an env var options to pass to the connector +@click.pass_context +async def debug_cmd( + ctx: click.Context, + command: Command, + connector_images: List[str], + output_directory: Path, + config_path: Optional[str], + catalog_path: Optional[str], + state_path: Optional[str], + enable_http_cache: bool, +): + output_directory.mkdir(parents=True, exist_ok=True) + debug_session_start_time = int(time.time()) + async with dagger.Connection(config=DAGGER_CONFIG) as dagger_client: + for connector_image in connector_images: + try: + execution_inputs = ExecutionInputs( + connector_under_test=await get_connector_under_test(dagger_client, connector_image), + command=command, + config=get_connector_config(config_path), + catalog=ConfiguredAirbyteCatalog.parse_file(catalog_path) if catalog_path else None, + state=get_state(state_path) if state_path else None, + environment_variables=None, + enable_http_cache=enable_http_cache, + ) + except ValueError as e: + raise click.UsageError(str(e)) + execution_result = await ConnectorRunner(dagger_client, **execution_inputs.to_dict()).run() + execution_report = ExecutionReport(execution_inputs, execution_result, created_at=debug_session_start_time) + await execution_report.save_to_disk(output_directory) diff --git a/airbyte-ci/connectors/live-tests/tests/__init__.py b/airbyte-ci/connectors/live-tests/tests/__init__.py new file mode 100644 index 000000000000..f70ecfc3a89e --- /dev/null +++ b/airbyte-ci/connectors/live-tests/tests/__init__.py @@ -0,0 +1 @@ +# Copyright (c) 2023 Airbyte, Inc., all rights reserved. diff --git a/airbyte-ci/connectors/live-tests/tests/backends/__init__.py b/airbyte-ci/connectors/live-tests/tests/backends/__init__.py new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/airbyte-ci/connectors/live-tests/tests/backends/test_file_backend.py b/airbyte-ci/connectors/live-tests/tests/backends/test_file_backend.py new file mode 100644 index 000000000000..0de07435efb4 --- /dev/null +++ b/airbyte-ci/connectors/live-tests/tests/backends/test_file_backend.py @@ -0,0 +1,72 @@ +# Copyright (c) 2023 Airbyte, Inc., all rights reserved. + +from pathlib import Path + +import pytest +from airbyte_protocol.models import ( + AirbyteCatalog, + AirbyteConnectionStatus, + AirbyteMessage, + AirbyteRecordMessage, + AirbyteStateMessage, + ConnectorSpecification, + Status, +) +from airbyte_protocol.models import Type as AirbyteMessageType +from live_tests.commons.backends import FileBackend + + +@pytest.mark.asyncio +@pytest.mark.parametrize( + "messages, expected_writes", + [ + ( + [ + AirbyteMessage(type=AirbyteMessageType.CATALOG, catalog=AirbyteCatalog(streams=[])), + AirbyteMessage( + type=AirbyteMessageType.CONNECTION_STATUS, + connectionStatus=AirbyteConnectionStatus(status=Status.SUCCEEDED), + ), + AirbyteMessage( + type=AirbyteMessageType.RECORD, + record=AirbyteRecordMessage(stream="test_stream", data={}, emitted_at=123456789), + ), + AirbyteMessage( + type=AirbyteMessageType.SPEC, + spec=ConnectorSpecification(connectionSpecification={}), + ), + AirbyteMessage( + type=AirbyteMessageType.STATE, + state=AirbyteStateMessage(data={"test": "value"}), + ), + ], + [ + ("catalog.jsonl", '{"streams": []}\n'), + ( + "connection_status.jsonl", + '{"status": "SUCCEEDED", "message": null}\n', + ), + ( + "test_stream_records.jsonl", + '{"namespace": null, "stream": "test_stream", "data": {}, "meta": null}\n', + ), + ( + "spec.jsonl", + '{"documentationUrl": null, "changelogUrl": null, "connectionSpecification": {}, "supportsIncremental": null, "supportsNormalization": false, "supportsDBT": false, "supported_destination_sync_modes": null, "advanced_auth": null, "protocol_version": null}\n', + ), + ( + "_global_states.jsonl", + '{"type": null, "stream": null, "global_": null, "data": {"test": "value"}, "sourceStats": null, "destinationStats": null}\n', + ), + ], + ), + ], +) +async def test_write(tmp_path, messages, expected_writes): + backend = FileBackend(tmp_path) + await backend.write(messages) + for expected_file, expected_content in expected_writes: + expected_path = Path(tmp_path / expected_file) + assert expected_path.exists() + content = expected_path.read_text() + assert content == expected_content diff --git a/airbyte-ci/connectors/pipelines/README.md b/airbyte-ci/connectors/pipelines/README.md index 8bd2d171491b..eff96c3c6856 100644 --- a/airbyte-ci/connectors/pipelines/README.md +++ b/airbyte-ci/connectors/pipelines/README.md @@ -644,10 +644,11 @@ E.G.: running Poe tasks on the modified internal packages of the current branch: | Version | PR | Description | | ------- | ---------------------------------------------------------- | -------------------------------------------------------------------------------------------------------------------------- | +| 4.5.1 | [#35786](https://github.com/airbytehq/airbyte/pull/35786) | Declare `live_tests` as an internal poetry package. | | 4.5.0 | [#35784](https://github.com/airbytehq/airbyte/pull/35784) | Format command supports kotlin | | 4.4.0 | [#35317](https://github.com/airbytehq/airbyte/pull/35317) | Augment java connector reports to include full logs and junit test results | | 4.3.2 | [#35536](https://github.com/airbytehq/airbyte/pull/35536) | Make QA checks run correctly on `*-strict-encrypt` connectors. | -| 4.3.1 | [#35437](https://github.com/airbytehq/airbyte/pull/35437) | Do not run QA checks on publish, just MetadataValidation. | +| 4.3.1 | [#35437](https://github.com/airbytehq/airbyte/pull/35437) | Do not run QA checks on publish, just MetadataValidation. | | 4.3.0 | [#35438](https://github.com/airbytehq/airbyte/pull/35438) | Optionally disable telemetry with environment variable. | | 4.2.4 | [#35325](https://github.com/airbytehq/airbyte/pull/35325) | Use `connectors_qa` for QA checks and remove redundant checks. | | 4.2.3 | [#35322](https://github.com/airbytehq/airbyte/pull/35322) | Declare `connectors_qa` as an internal package for testing. | diff --git a/airbyte-ci/connectors/pipelines/pipelines/airbyte_ci/test/__init__.py b/airbyte-ci/connectors/pipelines/pipelines/airbyte_ci/test/__init__.py index 882d61c8f7c7..43bcee98b388 100644 --- a/airbyte-ci/connectors/pipelines/pipelines/airbyte_ci/test/__init__.py +++ b/airbyte-ci/connectors/pipelines/pipelines/airbyte_ci/test/__init__.py @@ -12,6 +12,7 @@ "airbyte-ci/connectors/connector_ops", "airbyte-ci/connectors/connectors_qa", "airbyte-ci/connectors/ci_credentials", + "airbyte-ci/connectors/live-tests", "airbyte-ci/connectors/metadata_service/lib", "airbyte-ci/connectors/metadata_service/orchestrator", "airbyte-integrations/bases/connector-acceptance-test" diff --git a/airbyte-ci/connectors/pipelines/pyproject.toml b/airbyte-ci/connectors/pipelines/pyproject.toml index 629a40b57e4c..2fbb1c46ce1d 100644 --- a/airbyte-ci/connectors/pipelines/pyproject.toml +++ b/airbyte-ci/connectors/pipelines/pyproject.toml @@ -4,7 +4,7 @@ build-backend = "poetry.core.masonry.api" [tool.poetry] name = "pipelines" -version = "4.5.0" +version = "4.5.1" description = "Packaged maintained by the connector operations team to perform CI for connectors' pipelines" authors = ["Airbyte "] From bba545e7430c4251a3bcfd2fd7582041a15b05cb Mon Sep 17 00:00:00 2001 From: Catherine Noll Date: Wed, 6 Mar 2024 08:19:51 -0500 Subject: [PATCH 101/172] Source s3 cache connection (#35808) --- .../connectors/source-s3/metadata.yaml | 2 +- .../connectors/source-s3/poetry.lock | 115 +++++++++--------- .../connectors/source-s3/pyproject.toml | 2 +- .../source-s3/source_s3/v4/stream_reader.py | 30 ++--- docs/integrations/sources/s3.md | 1 + 5 files changed, 76 insertions(+), 74 deletions(-) diff --git a/airbyte-integrations/connectors/source-s3/metadata.yaml b/airbyte-integrations/connectors/source-s3/metadata.yaml index 2b3b204f87be..cd1690914b5b 100644 --- a/airbyte-integrations/connectors/source-s3/metadata.yaml +++ b/airbyte-integrations/connectors/source-s3/metadata.yaml @@ -10,7 +10,7 @@ data: connectorSubtype: file connectorType: source definitionId: 69589781-7828-43c5-9f63-8925b1c1ccc2 - dockerImageTag: 4.5.7 + dockerImageTag: 4.5.8 dockerRepository: airbyte/source-s3 documentationUrl: https://docs.airbyte.com/integrations/sources/s3 githubIssueLabel: source-s3 diff --git a/airbyte-integrations/connectors/source-s3/poetry.lock b/airbyte-integrations/connectors/source-s3/poetry.lock index b8b2e27ae350..eca34b4323ca 100644 --- a/airbyte-integrations/connectors/source-s3/poetry.lock +++ b/airbyte-integrations/connectors/source-s3/poetry.lock @@ -2,13 +2,13 @@ [[package]] name = "airbyte-cdk" -version = "0.67.0" +version = "0.67.1" description = "A framework for writing Airbyte Connectors." optional = false python-versions = ">=3.8" files = [ - {file = "airbyte-cdk-0.67.0.tar.gz", hash = "sha256:cbbff1b3895c89313764a721870bb293a396c74bad8dd6e5c36a0c3b0a2f6a10"}, - {file = "airbyte_cdk-0.67.0-py3-none-any.whl", hash = "sha256:2082c859536a2450c03b89dba1bbdab21bad314fbf5ef6d2e86fefc4ba935373"}, + {file = "airbyte-cdk-0.67.1.tar.gz", hash = "sha256:3f82be93ae6f574c70d7ad5352d34f9235e86bd74c0db14a0aa7d246f3a403c2"}, + {file = "airbyte_cdk-0.67.1-py3-none-any.whl", hash = "sha256:b1de0f004441a2ae6e2928e55f7ac31bd160af30e928ffda90eb75b5e3c56bf3"}, ] [package.dependencies] @@ -140,17 +140,17 @@ lxml = ["lxml"] [[package]] name = "boto3" -version = "1.34.48" +version = "1.34.55" description = "The AWS SDK for Python" optional = false python-versions = ">= 3.8" files = [ - {file = "boto3-1.34.48-py3-none-any.whl", hash = "sha256:adc785ff05aec9fc93f82d507420b320203cd4fd011c67eb369b3aa2b5aeb298"}, - {file = "boto3-1.34.48.tar.gz", hash = "sha256:f9873c3f03de546d7297475c6acd771840c385521caadb8c121a1ac38bc59cd4"}, + {file = "boto3-1.34.55-py3-none-any.whl", hash = "sha256:ee2c96e8a4a741ecb3380e0a406baa67bfea6186be99b75bdeca3e1b5044c088"}, + {file = "boto3-1.34.55.tar.gz", hash = "sha256:9a6d59e035fac4366dbdaf909c4f66fc817dfbec044fa71564dcf036ad46bb19"}, ] [package.dependencies] -botocore = ">=1.34.48,<1.35.0" +botocore = ">=1.34.55,<1.35.0" jmespath = ">=0.7.1,<2.0.0" s3transfer = ">=0.10.0,<0.11.0" @@ -159,13 +159,13 @@ crt = ["botocore[crt] (>=1.21.0,<2.0a0)"] [[package]] name = "botocore" -version = "1.34.48" +version = "1.34.55" description = "Low-level, data-driven core of boto 3." optional = false python-versions = ">= 3.8" files = [ - {file = "botocore-1.34.48-py3-none-any.whl", hash = "sha256:f3e1c84fa75fd6921dfbfb4b2f803bcc424b9b866982fe80e08edbd26ca9861c"}, - {file = "botocore-1.34.48.tar.gz", hash = "sha256:eabdde36309274b76bb79ae9bdfa10c1fd91a2c9b3343cfa15b8a91f8e1ec224"}, + {file = "botocore-1.34.55-py3-none-any.whl", hash = "sha256:07044c3cbfb86d0ecb9c56d887b8ad63a72eff0e4f6ab329cf335f1fd867ea0b"}, + {file = "botocore-1.34.55.tar.gz", hash = "sha256:bb333e3845bfe65600f36bf92d09668306e224fa9f4e4f87b77f6957192ae59f"}, ] [package.dependencies] @@ -192,13 +192,13 @@ files = [ [[package]] name = "cachetools" -version = "5.3.2" +version = "5.3.3" description = "Extensible memoizing collections and decorators" optional = false python-versions = ">=3.7" files = [ - {file = "cachetools-5.3.2-py3-none-any.whl", hash = "sha256:861f35a13a451f94e301ce2bec7cac63e881232ccce7ed67fab9b5df4d3beaa1"}, - {file = "cachetools-5.3.2.tar.gz", hash = "sha256:086ee420196f7b2ab9ca2db2520aca326318b68fe5ba8bc4d49cca91add450f2"}, + {file = "cachetools-5.3.3-py3-none-any.whl", hash = "sha256:0abad1021d3f8325b2fc1d2e9c8b9c9d57b04c3932657a72465447332c24d945"}, + {file = "cachetools-5.3.3.tar.gz", hash = "sha256:ba29e2dfa0b8b556606f097407ed1aa62080ee108ab0dc5ec9d6a723a007d105"}, ] [[package]] @@ -438,43 +438,43 @@ files = [ [[package]] name = "cryptography" -version = "42.0.4" +version = "42.0.5" description = "cryptography is a package which provides cryptographic recipes and primitives to Python developers." optional = false python-versions = ">=3.7" files = [ - {file = "cryptography-42.0.4-cp37-abi3-macosx_10_12_universal2.whl", hash = "sha256:ffc73996c4fca3d2b6c1c8c12bfd3ad00def8621da24f547626bf06441400449"}, - {file = "cryptography-42.0.4-cp37-abi3-macosx_10_12_x86_64.whl", hash = "sha256:db4b65b02f59035037fde0998974d84244a64c3265bdef32a827ab9b63d61b18"}, - {file = "cryptography-42.0.4-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dad9c385ba8ee025bb0d856714f71d7840020fe176ae0229de618f14dae7a6e2"}, - {file = "cryptography-42.0.4-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:69b22ab6506a3fe483d67d1ed878e1602bdd5912a134e6202c1ec672233241c1"}, - {file = "cryptography-42.0.4-cp37-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:e09469a2cec88fb7b078e16d4adec594414397e8879a4341c6ace96013463d5b"}, - {file = "cryptography-42.0.4-cp37-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:3e970a2119507d0b104f0a8e281521ad28fc26f2820687b3436b8c9a5fcf20d1"}, - {file = "cryptography-42.0.4-cp37-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:e53dc41cda40b248ebc40b83b31516487f7db95ab8ceac1f042626bc43a2f992"}, - {file = "cryptography-42.0.4-cp37-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:c3a5cbc620e1e17009f30dd34cb0d85c987afd21c41a74352d1719be33380885"}, - {file = "cryptography-42.0.4-cp37-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:6bfadd884e7280df24d26f2186e4e07556a05d37393b0f220a840b083dc6a824"}, - {file = "cryptography-42.0.4-cp37-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:01911714117642a3f1792c7f376db572aadadbafcd8d75bb527166009c9f1d1b"}, - {file = "cryptography-42.0.4-cp37-abi3-win32.whl", hash = "sha256:fb0cef872d8193e487fc6bdb08559c3aa41b659a7d9be48b2e10747f47863925"}, - {file = "cryptography-42.0.4-cp37-abi3-win_amd64.whl", hash = "sha256:c1f25b252d2c87088abc8bbc4f1ecbf7c919e05508a7e8628e6875c40bc70923"}, - {file = "cryptography-42.0.4-cp39-abi3-macosx_10_12_universal2.whl", hash = "sha256:15a1fb843c48b4a604663fa30af60818cd28f895572386e5f9b8a665874c26e7"}, - {file = "cryptography-42.0.4-cp39-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a1327f280c824ff7885bdeef8578f74690e9079267c1c8bd7dc5cc5aa065ae52"}, - {file = "cryptography-42.0.4-cp39-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6ffb03d419edcab93b4b19c22ee80c007fb2d708429cecebf1dd3258956a563a"}, - {file = "cryptography-42.0.4-cp39-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:1df6fcbf60560d2113b5ed90f072dc0b108d64750d4cbd46a21ec882c7aefce9"}, - {file = "cryptography-42.0.4-cp39-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:44a64043f743485925d3bcac548d05df0f9bb445c5fcca6681889c7c3ab12764"}, - {file = "cryptography-42.0.4-cp39-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:3c6048f217533d89f2f8f4f0fe3044bf0b2090453b7b73d0b77db47b80af8dff"}, - {file = "cryptography-42.0.4-cp39-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:6d0fbe73728c44ca3a241eff9aefe6496ab2656d6e7a4ea2459865f2e8613257"}, - {file = "cryptography-42.0.4-cp39-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:887623fe0d70f48ab3f5e4dbf234986b1329a64c066d719432d0698522749929"}, - {file = "cryptography-42.0.4-cp39-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:ce8613beaffc7c14f091497346ef117c1798c202b01153a8cc7b8e2ebaaf41c0"}, - {file = "cryptography-42.0.4-cp39-abi3-win32.whl", hash = "sha256:810bcf151caefc03e51a3d61e53335cd5c7316c0a105cc695f0959f2c638b129"}, - {file = "cryptography-42.0.4-cp39-abi3-win_amd64.whl", hash = "sha256:a0298bdc6e98ca21382afe914c642620370ce0470a01e1bef6dd9b5354c36854"}, - {file = "cryptography-42.0.4-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:5f8907fcf57392cd917892ae83708761c6ff3c37a8e835d7246ff0ad251d9298"}, - {file = "cryptography-42.0.4-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:12d341bd42cdb7d4937b0cabbdf2a94f949413ac4504904d0cdbdce4a22cbf88"}, - {file = "cryptography-42.0.4-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:1cdcdbd117681c88d717437ada72bdd5be9de117f96e3f4d50dab3f59fd9ab20"}, - {file = "cryptography-42.0.4-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:0e89f7b84f421c56e7ff69f11c441ebda73b8a8e6488d322ef71746224c20fce"}, - {file = "cryptography-42.0.4-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:f1e85a178384bf19e36779d91ff35c7617c885da487d689b05c1366f9933ad74"}, - {file = "cryptography-42.0.4-pp39-pypy39_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:d2a27aca5597c8a71abbe10209184e1a8e91c1fd470b5070a2ea60cafec35bcd"}, - {file = "cryptography-42.0.4-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:4e36685cb634af55e0677d435d425043967ac2f3790ec652b2b88ad03b85c27b"}, - {file = "cryptography-42.0.4-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:f47be41843200f7faec0683ad751e5ef11b9a56a220d57f300376cd8aba81660"}, - {file = "cryptography-42.0.4.tar.gz", hash = "sha256:831a4b37accef30cccd34fcb916a5d7b5be3cbbe27268a02832c3e450aea39cb"}, + {file = "cryptography-42.0.5-cp37-abi3-macosx_10_12_universal2.whl", hash = "sha256:a30596bae9403a342c978fb47d9b0ee277699fa53bbafad14706af51fe543d16"}, + {file = "cryptography-42.0.5-cp37-abi3-macosx_10_12_x86_64.whl", hash = "sha256:b7ffe927ee6531c78f81aa17e684e2ff617daeba7f189f911065b2ea2d526dec"}, + {file = "cryptography-42.0.5-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2424ff4c4ac7f6b8177b53c17ed5d8fa74ae5955656867f5a8affaca36a27abb"}, + {file = "cryptography-42.0.5-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:329906dcc7b20ff3cad13c069a78124ed8247adcac44b10bea1130e36caae0b4"}, + {file = "cryptography-42.0.5-cp37-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:b03c2ae5d2f0fc05f9a2c0c997e1bc18c8229f392234e8a0194f202169ccd278"}, + {file = "cryptography-42.0.5-cp37-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:f8837fe1d6ac4a8052a9a8ddab256bc006242696f03368a4009be7ee3075cdb7"}, + {file = "cryptography-42.0.5-cp37-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:0270572b8bd2c833c3981724b8ee9747b3ec96f699a9665470018594301439ee"}, + {file = "cryptography-42.0.5-cp37-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:b8cac287fafc4ad485b8a9b67d0ee80c66bf3574f655d3b97ef2e1082360faf1"}, + {file = "cryptography-42.0.5-cp37-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:16a48c23a62a2f4a285699dba2e4ff2d1cff3115b9df052cdd976a18856d8e3d"}, + {file = "cryptography-42.0.5-cp37-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:2bce03af1ce5a5567ab89bd90d11e7bbdff56b8af3acbbec1faded8f44cb06da"}, + {file = "cryptography-42.0.5-cp37-abi3-win32.whl", hash = "sha256:b6cd2203306b63e41acdf39aa93b86fb566049aeb6dc489b70e34bcd07adca74"}, + {file = "cryptography-42.0.5-cp37-abi3-win_amd64.whl", hash = "sha256:98d8dc6d012b82287f2c3d26ce1d2dd130ec200c8679b6213b3c73c08b2b7940"}, + {file = "cryptography-42.0.5-cp39-abi3-macosx_10_12_universal2.whl", hash = "sha256:5e6275c09d2badf57aea3afa80d975444f4be8d3bc58f7f80d2a484c6f9485c8"}, + {file = "cryptography-42.0.5-cp39-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e4985a790f921508f36f81831817cbc03b102d643b5fcb81cd33df3fa291a1a1"}, + {file = "cryptography-42.0.5-cp39-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7cde5f38e614f55e28d831754e8a3bacf9ace5d1566235e39d91b35502d6936e"}, + {file = "cryptography-42.0.5-cp39-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:7367d7b2eca6513681127ebad53b2582911d1736dc2ffc19f2c3ae49997496bc"}, + {file = "cryptography-42.0.5-cp39-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:cd2030f6650c089aeb304cf093f3244d34745ce0cfcc39f20c6fbfe030102e2a"}, + {file = "cryptography-42.0.5-cp39-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:a2913c5375154b6ef2e91c10b5720ea6e21007412f6437504ffea2109b5a33d7"}, + {file = "cryptography-42.0.5-cp39-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:c41fb5e6a5fe9ebcd58ca3abfeb51dffb5d83d6775405305bfa8715b76521922"}, + {file = "cryptography-42.0.5-cp39-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:3eaafe47ec0d0ffcc9349e1708be2aaea4c6dd4978d76bf6eb0cb2c13636c6fc"}, + {file = "cryptography-42.0.5-cp39-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:1b95b98b0d2af784078fa69f637135e3c317091b615cd0905f8b8a087e86fa30"}, + {file = "cryptography-42.0.5-cp39-abi3-win32.whl", hash = "sha256:1f71c10d1e88467126f0efd484bd44bca5e14c664ec2ede64c32f20875c0d413"}, + {file = "cryptography-42.0.5-cp39-abi3-win_amd64.whl", hash = "sha256:a011a644f6d7d03736214d38832e030d8268bcff4a41f728e6030325fea3e400"}, + {file = "cryptography-42.0.5-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:9481ffe3cf013b71b2428b905c4f7a9a4f76ec03065b05ff499bb5682a8d9ad8"}, + {file = "cryptography-42.0.5-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:ba334e6e4b1d92442b75ddacc615c5476d4ad55cc29b15d590cc6b86efa487e2"}, + {file = "cryptography-42.0.5-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:ba3e4a42397c25b7ff88cdec6e2a16c2be18720f317506ee25210f6d31925f9c"}, + {file = "cryptography-42.0.5-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:111a0d8553afcf8eb02a4fea6ca4f59d48ddb34497aa8706a6cf536f1a5ec576"}, + {file = "cryptography-42.0.5-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:cd65d75953847815962c84a4654a84850b2bb4aed3f26fadcc1c13892e1e29f6"}, + {file = "cryptography-42.0.5-pp39-pypy39_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:e807b3188f9eb0eaa7bbb579b462c5ace579f1cedb28107ce8b48a9f7ad3679e"}, + {file = "cryptography-42.0.5-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:f12764b8fffc7a123f641d7d049d382b73f96a34117e0b637b80643169cec8ac"}, + {file = "cryptography-42.0.5-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:37dd623507659e08be98eec89323469e8c7b4c1407c85112634ae3dbdb926fdd"}, + {file = "cryptography-42.0.5.tar.gz", hash = "sha256:6fe07eec95dfd477eb9530aef5bead34fec819b3aaf6c5bd6d20565da607bfe1"}, ] [package.dependencies] @@ -985,22 +985,21 @@ files = [ [[package]] name = "marshmallow" -version = "3.20.2" +version = "3.21.1" description = "A lightweight library for converting complex datatypes to and from native Python datatypes." optional = false python-versions = ">=3.8" files = [ - {file = "marshmallow-3.20.2-py3-none-any.whl", hash = "sha256:c21d4b98fee747c130e6bc8f45c4b3199ea66bc00c12ee1f639f0aeca034d5e9"}, - {file = "marshmallow-3.20.2.tar.gz", hash = "sha256:4c1daff273513dc5eb24b219a8035559dc573c8f322558ef85f5438ddd1236dd"}, + {file = "marshmallow-3.21.1-py3-none-any.whl", hash = "sha256:f085493f79efb0644f270a9bf2892843142d80d7174bbbd2f3713f2a589dc633"}, + {file = "marshmallow-3.21.1.tar.gz", hash = "sha256:4e65e9e0d80fc9e609574b9983cf32579f305c718afb30d7233ab818571768c3"}, ] [package.dependencies] packaging = ">=17.0" [package.extras] -dev = ["pre-commit (>=2.4,<4.0)", "pytest", "pytz", "simplejson", "tox"] -docs = ["alabaster (==0.7.15)", "autodocsumm (==0.2.12)", "sphinx (==7.2.6)", "sphinx-issues (==3.0.1)", "sphinx-version-warning (==1.1.2)"] -lint = ["pre-commit (>=2.4,<4.0)"] +dev = ["marshmallow[tests]", "pre-commit (>=3.5,<4.0)", "tox"] +docs = ["alabaster (==0.7.16)", "autodocsumm (==0.2.12)", "sphinx (==7.2.6)", "sphinx-issues (==4.0.0)", "sphinx-version-warning (==1.1.2)"] tests = ["pytest", "pytz", "simplejson"] [[package]] @@ -1614,13 +1613,13 @@ dev = ["pre-commit", "pytest-asyncio", "tox"] [[package]] name = "python-dateutil" -version = "2.8.2" +version = "2.9.0.post0" description = "Extensions to the standard Python datetime module" optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7" files = [ - {file = "python-dateutil-2.8.2.tar.gz", hash = "sha256:0123cacc1627ae19ddf3c27a5de5bd67ee4586fbdd6440d9748f8abb483d3e86"}, - {file = "python_dateutil-2.8.2-py2.py3-none-any.whl", hash = "sha256:961d03dc3453ebbc59dbdea9e4e11c5651520a876d0f4db161e8674aae935da9"}, + {file = "python-dateutil-2.9.0.post0.tar.gz", hash = "sha256:37dd54208da7e1cd875388217d5e00ebd4179249f90fb72437e91a35459a0ad3"}, + {file = "python_dateutil-2.9.0.post0-py2.py3-none-any.whl", hash = "sha256:a8b2bc7bffae282281c8140a97d3aa9c14da0b136dfe83f850eea9a5f7470427"}, ] [package.dependencies] @@ -2261,13 +2260,13 @@ telegram = ["requests"] [[package]] name = "typing-extensions" -version = "4.9.0" +version = "4.10.0" description = "Backported and Experimental Type Hints for Python 3.8+" optional = false python-versions = ">=3.8" files = [ - {file = "typing_extensions-4.9.0-py3-none-any.whl", hash = "sha256:af72aea155e91adfc61c3ae9e0e342dbc0cba726d6cba4b6c72c1f34e47291cd"}, - {file = "typing_extensions-4.9.0.tar.gz", hash = "sha256:23478f88c37f27d76ac8aee6c905017a143b0b1b886c3c9f66bc2fd94f9f5783"}, + {file = "typing_extensions-4.10.0-py3-none-any.whl", hash = "sha256:69b1a937c3a517342112fb4c6df7e72fc39a38e7891a5730ed4985b5214b5475"}, + {file = "typing_extensions-4.10.0.tar.gz", hash = "sha256:b0abd7c89e8fb96f98db18d86106ff1d90ab692004eb746cf6eda2682f91b3cb"}, ] [[package]] diff --git a/airbyte-integrations/connectors/source-s3/pyproject.toml b/airbyte-integrations/connectors/source-s3/pyproject.toml index 3127d3b94c76..11331a7e084e 100644 --- a/airbyte-integrations/connectors/source-s3/pyproject.toml +++ b/airbyte-integrations/connectors/source-s3/pyproject.toml @@ -3,7 +3,7 @@ requires = [ "poetry-core>=1.0.0",] build-backend = "poetry.core.masonry.api" [tool.poetry] -version = "4.5.7" +version = "4.5.8" name = "source-s3" description = "Source implementation for S3." authors = [ "Airbyte ",] diff --git a/airbyte-integrations/connectors/source-s3/source_s3/v4/stream_reader.py b/airbyte-integrations/connectors/source-s3/source_s3/v4/stream_reader.py index c3e415df7b25..38d63bd1b220 100644 --- a/airbyte-integrations/connectors/source-s3/source_s3/v4/stream_reader.py +++ b/airbyte-integrations/connectors/source-s3/source_s3/v4/stream_reader.py @@ -30,6 +30,7 @@ class SourceS3StreamReader(AbstractFileBasedStreamReader): def __init__(self): super().__init__() + self._s3_client = None @property def config(self) -> Config: @@ -56,23 +57,24 @@ def s3_client(self) -> BaseClient: # list or read files. raise ValueError("Source config is missing; cannot create the S3 client.") - client_kv_args = _get_s3_compatible_client_args(self.config) if self.config.endpoint else {} + if self._s3_client is None: + client_kv_args = _get_s3_compatible_client_args(self.config) if self.config.endpoint else {} - # Set the region_name if it's provided in the config - if self.config.region_name: - client_kv_args["region_name"] = self.config.region_name + # Set the region_name if it's provided in the config + if self.config.region_name: + client_kv_args["region_name"] = self.config.region_name - if self.config.role_arn: - _s3_client = self._get_iam_s3_client(client_kv_args) - else: - _s3_client = boto3.client( - "s3", - aws_access_key_id=self.config.aws_access_key_id, - aws_secret_access_key=self.config.aws_secret_access_key, - **client_kv_args, - ) + if self.config.role_arn: + self._s3_client = self._get_iam_s3_client(client_kv_args) + else: + self._s3_client = boto3.client( + "s3", + aws_access_key_id=self.config.aws_access_key_id, + aws_secret_access_key=self.config.aws_secret_access_key, + **client_kv_args, + ) - return _s3_client + return self._s3_client def _get_iam_s3_client(self, client_kv_args: dict) -> BaseClient: """ diff --git a/docs/integrations/sources/s3.md b/docs/integrations/sources/s3.md index 1586348a10d8..a29fdca0de26 100644 --- a/docs/integrations/sources/s3.md +++ b/docs/integrations/sources/s3.md @@ -264,6 +264,7 @@ To perform the text extraction from PDF and Docx files, the connector uses the [ | Version | Date | Pull Request | Subject | |:--------|:-----------|:----------------------------------------------------------------------------------------------------------------|:---------------------------------------------------------------------------------------------------------------------| +| 4.5.8 | 2024-03-04 | [35808](https://github.com/airbytehq/airbyte/pull/35808) | Use cached AWS client | | 4.5.7 | 2024-02-23 | [34895](https://github.com/airbytehq/airbyte/pull/34895) | Run incremental syncs with concurrency | | 4.5.6 | 2024-02-21 | [35246](https://github.com/airbytehq/airbyte/pull/35246) | Fixes bug that occurred when creating CSV streams with tab delimiter. | | 4.5.5 | 2024-02-18 | [35392](https://github.com/airbytehq/airbyte/pull/35392) | Add support filtering by start date | From 4a2350d6e02261b4db6421acefd5c39eca25ea41 Mon Sep 17 00:00:00 2001 From: Maxime Carbonneau-Leclerc <3360483+maxi297@users.noreply.github.com> Date: Wed, 6 Mar 2024 09:13:35 -0500 Subject: [PATCH 102/172] Attempt to fix Connector Builder release (#35851) --- .github/workflows/publish-cdk-command-manually.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/publish-cdk-command-manually.yml b/.github/workflows/publish-cdk-command-manually.yml index c9b1e36ecdc9..a6b9ae789772 100644 --- a/.github/workflows/publish-cdk-command-manually.yml +++ b/.github/workflows/publish-cdk-command-manually.yml @@ -300,7 +300,7 @@ jobs: pip-compile --upgrade - name: Create Pull Request id: create-pull-request - uses: peter-evans/create-pull-request@v4 + uses: peter-evans/create-pull-request@v6 with: token: ${{ secrets.GH_PAT_MAINTENANCE_OCTAVIA }} commit-message: Updating CDK version following release From 671ed4cdb1a214ad03703fff683572fe75424b46 Mon Sep 17 00:00:00 2001 From: "Aaron (\"AJ\") Steers" Date: Wed, 6 Mar 2024 08:22:46 -0800 Subject: [PATCH 103/172] Source File: Add Python 3.11 support (#35800) --- .../connectors/source-file/metadata.yaml | 2 +- .../connectors/source-file/poetry.lock | 79 +++++++++---------- .../connectors/source-file/pyproject.toml | 4 +- docs/integrations/sources/file.md | 27 ++++--- 4 files changed, 56 insertions(+), 56 deletions(-) diff --git a/airbyte-integrations/connectors/source-file/metadata.yaml b/airbyte-integrations/connectors/source-file/metadata.yaml index 3f58d1298337..d5b483270262 100644 --- a/airbyte-integrations/connectors/source-file/metadata.yaml +++ b/airbyte-integrations/connectors/source-file/metadata.yaml @@ -10,7 +10,7 @@ data: connectorSubtype: file connectorType: source definitionId: 778daa7c-feaf-4db6-96f3-70fd645acc77 - dockerImageTag: 0.4.0 + dockerImageTag: 0.4.1 dockerRepository: airbyte/source-file documentationUrl: https://docs.airbyte.com/integrations/sources/file githubIssueLabel: source-file diff --git a/airbyte-integrations/connectors/source-file/poetry.lock b/airbyte-integrations/connectors/source-file/poetry.lock index 976dee468500..e59ccf50645d 100644 --- a/airbyte-integrations/connectors/source-file/poetry.lock +++ b/airbyte-integrations/connectors/source-file/poetry.lock @@ -1797,37 +1797,47 @@ files = [ [[package]] name = "pyarrow" -version = "9.0.0" +version = "14.0.2" description = "Python library for Apache Arrow" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "pyarrow-9.0.0-cp310-cp310-macosx_10_13_universal2.whl", hash = "sha256:767cafb14278165ad539a2918c14c1b73cf20689747c21375c38e3fe62884902"}, - {file = "pyarrow-9.0.0-cp310-cp310-macosx_10_13_x86_64.whl", hash = "sha256:0238998dc692efcb4e41ae74738d7c1234723271ccf520bd8312dca07d49ef8d"}, - {file = "pyarrow-9.0.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:55328348b9139c2b47450d512d716c2248fd58e2f04e2fc23a65e18726666d42"}, - {file = "pyarrow-9.0.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:fc856628acd8d281652c15b6268ec7f27ebcb015abbe99d9baad17f02adc51f1"}, - {file = "pyarrow-9.0.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:29eb3e086e2b26202f3a4678316b93cfb15d0e2ba20f3ec12db8fd9cc07cde63"}, - {file = "pyarrow-9.0.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2e753f8fcf07d8e3a0efa0c8bd51fef5c90281ffd4c5637c08ce42cd0ac297de"}, - {file = "pyarrow-9.0.0-cp310-cp310-win_amd64.whl", hash = "sha256:3eef8a981f45d89de403e81fb83b8119c20824caddf1404274e41a5d66c73806"}, - {file = "pyarrow-9.0.0-cp37-cp37m-macosx_10_13_x86_64.whl", hash = "sha256:7fa56cbd415cef912677270b8e41baad70cde04c6d8a8336eeb2aba85aa93706"}, - {file = "pyarrow-9.0.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:f8c46bde1030d704e2796182286d1c56846552c50a39ad5bf5a20c0d8159fc35"}, - {file = "pyarrow-9.0.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8ad430cee28ebc4d6661fc7315747c7a18ae2a74e67498dcb039e1c762a2fb67"}, - {file = "pyarrow-9.0.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:81a60bb291a964f63b2717fb1b28f6615ffab7e8585322bfb8a6738e6b321282"}, - {file = "pyarrow-9.0.0-cp37-cp37m-win_amd64.whl", hash = "sha256:9cef618159567d5f62040f2b79b1c7b38e3885f4ffad0ec97cd2d86f88b67cef"}, - {file = "pyarrow-9.0.0-cp38-cp38-macosx_10_13_x86_64.whl", hash = "sha256:5526a3bfb404ff6d31d62ea582cf2466c7378a474a99ee04d1a9b05de5264541"}, - {file = "pyarrow-9.0.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:da3e0f319509a5881867effd7024099fb06950a0768dad0d6873668bb88cfaba"}, - {file = "pyarrow-9.0.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:2c715eca2092273dcccf6f08437371e04d112f9354245ba2fbe6c801879450b7"}, - {file = "pyarrow-9.0.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f11a645a41ee531c3a5edda45dea07c42267f52571f818d388971d33fc7e2d4a"}, - {file = "pyarrow-9.0.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a5b390bdcfb8c5b900ef543f911cdfec63e88524fafbcc15f83767202a4a2491"}, - {file = "pyarrow-9.0.0-cp38-cp38-win_amd64.whl", hash = "sha256:d9eb04db626fa24fdfb83c00f76679ca0d98728cdbaa0481b6402bf793a290c0"}, - {file = "pyarrow-9.0.0-cp39-cp39-macosx_10_13_universal2.whl", hash = "sha256:4eebdab05afa23d5d5274b24c1cbeb1ba017d67c280f7d39fd8a8f18cbad2ec9"}, - {file = "pyarrow-9.0.0-cp39-cp39-macosx_10_13_x86_64.whl", hash = "sha256:02b820ecd1da02012092c180447de449fc688d0c3f9ff8526ca301cdd60dacd0"}, - {file = "pyarrow-9.0.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:92f3977e901db1ef5cba30d6cc1d7942b8d94b910c60f89013e8f7bb86a86eef"}, - {file = "pyarrow-9.0.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:f241bd488c2705df930eedfe304ada71191dcf67d6b98ceda0cc934fd2a8388e"}, - {file = "pyarrow-9.0.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1c5a073a930c632058461547e0bc572da1e724b17b6b9eb31a97da13f50cb6e0"}, - {file = "pyarrow-9.0.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f59bcd5217a3ae1e17870792f82b2ff92df9f3862996e2c78e156c13e56ff62e"}, - {file = "pyarrow-9.0.0-cp39-cp39-win_amd64.whl", hash = "sha256:fe2ce795fa1d95e4e940fe5661c3c58aee7181c730f65ac5dd8794a77228de59"}, - {file = "pyarrow-9.0.0.tar.gz", hash = "sha256:7fb02bebc13ab55573d1ae9bb5002a6d20ba767bf8569b52fce5301d42495ab7"}, + {file = "pyarrow-14.0.2-cp310-cp310-macosx_10_14_x86_64.whl", hash = "sha256:ba9fe808596c5dbd08b3aeffe901e5f81095baaa28e7d5118e01354c64f22807"}, + {file = "pyarrow-14.0.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:22a768987a16bb46220cef490c56c671993fbee8fd0475febac0b3e16b00a10e"}, + {file = "pyarrow-14.0.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2dbba05e98f247f17e64303eb876f4a80fcd32f73c7e9ad975a83834d81f3fda"}, + {file = "pyarrow-14.0.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a898d134d00b1eca04998e9d286e19653f9d0fcb99587310cd10270907452a6b"}, + {file = "pyarrow-14.0.2-cp310-cp310-manylinux_2_28_aarch64.whl", hash = "sha256:87e879323f256cb04267bb365add7208f302df942eb943c93a9dfeb8f44840b1"}, + {file = "pyarrow-14.0.2-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:76fc257559404ea5f1306ea9a3ff0541bf996ff3f7b9209fc517b5e83811fa8e"}, + {file = "pyarrow-14.0.2-cp310-cp310-win_amd64.whl", hash = "sha256:b0c4a18e00f3a32398a7f31da47fefcd7a927545b396e1f15d0c85c2f2c778cd"}, + {file = "pyarrow-14.0.2-cp311-cp311-macosx_10_14_x86_64.whl", hash = "sha256:87482af32e5a0c0cce2d12eb3c039dd1d853bd905b04f3f953f147c7a196915b"}, + {file = "pyarrow-14.0.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:059bd8f12a70519e46cd64e1ba40e97eae55e0cbe1695edd95384653d7626b23"}, + {file = "pyarrow-14.0.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3f16111f9ab27e60b391c5f6d197510e3ad6654e73857b4e394861fc79c37200"}, + {file = "pyarrow-14.0.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:06ff1264fe4448e8d02073f5ce45a9f934c0f3db0a04460d0b01ff28befc3696"}, + {file = "pyarrow-14.0.2-cp311-cp311-manylinux_2_28_aarch64.whl", hash = "sha256:6dd4f4b472ccf4042f1eab77e6c8bce574543f54d2135c7e396f413046397d5a"}, + {file = "pyarrow-14.0.2-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:32356bfb58b36059773f49e4e214996888eeea3a08893e7dbde44753799b2a02"}, + {file = "pyarrow-14.0.2-cp311-cp311-win_amd64.whl", hash = "sha256:52809ee69d4dbf2241c0e4366d949ba035cbcf48409bf404f071f624ed313a2b"}, + {file = "pyarrow-14.0.2-cp312-cp312-macosx_10_14_x86_64.whl", hash = "sha256:c87824a5ac52be210d32906c715f4ed7053d0180c1060ae3ff9b7e560f53f944"}, + {file = "pyarrow-14.0.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:a25eb2421a58e861f6ca91f43339d215476f4fe159eca603c55950c14f378cc5"}, + {file = "pyarrow-14.0.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5c1da70d668af5620b8ba0a23f229030a4cd6c5f24a616a146f30d2386fec422"}, + {file = "pyarrow-14.0.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2cc61593c8e66194c7cdfae594503e91b926a228fba40b5cf25cc593563bcd07"}, + {file = "pyarrow-14.0.2-cp312-cp312-manylinux_2_28_aarch64.whl", hash = "sha256:78ea56f62fb7c0ae8ecb9afdd7893e3a7dbeb0b04106f5c08dbb23f9c0157591"}, + {file = "pyarrow-14.0.2-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:37c233ddbce0c67a76c0985612fef27c0c92aef9413cf5aa56952f359fcb7379"}, + {file = "pyarrow-14.0.2-cp312-cp312-win_amd64.whl", hash = "sha256:e4b123ad0f6add92de898214d404e488167b87b5dd86e9a434126bc2b7a5578d"}, + {file = "pyarrow-14.0.2-cp38-cp38-macosx_10_14_x86_64.whl", hash = "sha256:e354fba8490de258be7687f341bc04aba181fc8aa1f71e4584f9890d9cb2dec2"}, + {file = "pyarrow-14.0.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:20e003a23a13da963f43e2b432483fdd8c38dc8882cd145f09f21792e1cf22a1"}, + {file = "pyarrow-14.0.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fc0de7575e841f1595ac07e5bc631084fd06ca8b03c0f2ecece733d23cd5102a"}, + {file = "pyarrow-14.0.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:66e986dc859712acb0bd45601229021f3ffcdfc49044b64c6d071aaf4fa49e98"}, + {file = "pyarrow-14.0.2-cp38-cp38-manylinux_2_28_aarch64.whl", hash = "sha256:f7d029f20ef56673a9730766023459ece397a05001f4e4d13805111d7c2108c0"}, + {file = "pyarrow-14.0.2-cp38-cp38-manylinux_2_28_x86_64.whl", hash = "sha256:209bac546942b0d8edc8debda248364f7f668e4aad4741bae58e67d40e5fcf75"}, + {file = "pyarrow-14.0.2-cp38-cp38-win_amd64.whl", hash = "sha256:1e6987c5274fb87d66bb36816afb6f65707546b3c45c44c28e3c4133c010a881"}, + {file = "pyarrow-14.0.2-cp39-cp39-macosx_10_14_x86_64.whl", hash = "sha256:a01d0052d2a294a5f56cc1862933014e696aa08cc7b620e8c0cce5a5d362e976"}, + {file = "pyarrow-14.0.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:a51fee3a7db4d37f8cda3ea96f32530620d43b0489d169b285d774da48ca9785"}, + {file = "pyarrow-14.0.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:64df2bf1ef2ef14cee531e2dfe03dd924017650ffaa6f9513d7a1bb291e59c15"}, + {file = "pyarrow-14.0.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3c0fa3bfdb0305ffe09810f9d3e2e50a2787e3a07063001dcd7adae0cee3601a"}, + {file = "pyarrow-14.0.2-cp39-cp39-manylinux_2_28_aarch64.whl", hash = "sha256:c65bf4fd06584f058420238bc47a316e80dda01ec0dfb3044594128a6c2db794"}, + {file = "pyarrow-14.0.2-cp39-cp39-manylinux_2_28_x86_64.whl", hash = "sha256:63ac901baec9369d6aae1cbe6cca11178fb018a8d45068aaf5bb54f94804a866"}, + {file = "pyarrow-14.0.2-cp39-cp39-win_amd64.whl", hash = "sha256:75ee0efe7a87a687ae303d63037d08a48ef9ea0127064df18267252cfe2e9541"}, + {file = "pyarrow-14.0.2.tar.gz", hash = "sha256:36cef6ba12b499d864d1def3e990f97949e0b79400d08b7cf74504ffbd3eb025"}, ] [package.dependencies] @@ -2096,7 +2106,6 @@ files = [ {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:69b023b2b4daa7548bcfbd4aa3da05b3a74b772db9e23b982788168117739938"}, {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:81e0b275a9ecc9c0c0c07b4b90ba548307583c125f54d5b6946cfee6360c733d"}, {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ba336e390cd8e4d1739f42dfe9bb83a3cc2e80f567d8805e11b46f4a943f5515"}, - {file = "PyYAML-6.0.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:326c013efe8048858a6d312ddd31d56e468118ad4cdeda36c719bf5bb6192290"}, {file = "PyYAML-6.0.1-cp310-cp310-win32.whl", hash = "sha256:bd4af7373a854424dabd882decdc5579653d7868b8fb26dc7d0e99f823aa5924"}, {file = "PyYAML-6.0.1-cp310-cp310-win_amd64.whl", hash = "sha256:fd1592b3fdf65fff2ad0004b5e363300ef59ced41c2e6b3a99d4089fa8c5435d"}, {file = "PyYAML-6.0.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:6965a7bc3cf88e5a1c3bd2e0b5c22f8d677dc88a455344035f03399034eb3007"}, @@ -2104,16 +2113,8 @@ files = [ {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:42f8152b8dbc4fe7d96729ec2b99c7097d656dc1213a3229ca5383f973a5ed6d"}, {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:062582fca9fabdd2c8b54a3ef1c978d786e0f6b3a1510e0ac93ef59e0ddae2bc"}, {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d2b04aac4d386b172d5b9692e2d2da8de7bfb6c387fa4f801fbf6fb2e6ba4673"}, - {file = "PyYAML-6.0.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:e7d73685e87afe9f3b36c799222440d6cf362062f78be1013661b00c5c6f678b"}, {file = "PyYAML-6.0.1-cp311-cp311-win32.whl", hash = "sha256:1635fd110e8d85d55237ab316b5b011de701ea0f29d07611174a1b42f1444741"}, {file = "PyYAML-6.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:bf07ee2fef7014951eeb99f56f39c9bb4af143d8aa3c21b1677805985307da34"}, - {file = "PyYAML-6.0.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:855fb52b0dc35af121542a76b9a84f8d1cd886ea97c84703eaa6d88e37a2ad28"}, - {file = "PyYAML-6.0.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:40df9b996c2b73138957fe23a16a4f0ba614f4c0efce1e9406a184b6d07fa3a9"}, - {file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a08c6f0fe150303c1c6b71ebcd7213c2858041a7e01975da3a99aed1e7a378ef"}, - {file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6c22bec3fbe2524cde73d7ada88f6566758a8f7227bfbf93a408a9d86bcc12a0"}, - {file = "PyYAML-6.0.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8d4e9c88387b0f5c7d5f281e55304de64cf7f9c0021a3525bd3b1c542da3b0e4"}, - {file = "PyYAML-6.0.1-cp312-cp312-win32.whl", hash = "sha256:d483d2cdf104e7c9fa60c544d92981f12ad66a457afae824d146093b8c294c54"}, - {file = "PyYAML-6.0.1-cp312-cp312-win_amd64.whl", hash = "sha256:0d3304d8c0adc42be59c5f8a4d9e3d7379e6955ad754aa9d6ab7a398b59dd1df"}, {file = "PyYAML-6.0.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:50550eb667afee136e9a77d6dc71ae76a44df8b3e51e41b77f6de2932bfe0f47"}, {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1fe35611261b29bd1de0070f0b2f47cb6ff71fa6595c077e42bd0c419fa27b98"}, {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:704219a11b772aea0d8ecd7058d0082713c3562b4e271b849ad7dc4a5c90c13c"}, @@ -2130,7 +2131,6 @@ files = [ {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a0cd17c15d3bb3fa06978b4e8958dcdc6e0174ccea823003a106c7d4d7899ac5"}, {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:28c119d996beec18c05208a8bd78cbe4007878c6dd15091efb73a30e90539696"}, {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7e07cbde391ba96ab58e532ff4803f79c4129397514e1413a7dc761ccd755735"}, - {file = "PyYAML-6.0.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:49a183be227561de579b4a36efbb21b3eab9651dd81b1858589f796549873dd6"}, {file = "PyYAML-6.0.1-cp38-cp38-win32.whl", hash = "sha256:184c5108a2aca3c5b3d3bf9395d50893a7ab82a38004c8f61c258d4428e80206"}, {file = "PyYAML-6.0.1-cp38-cp38-win_amd64.whl", hash = "sha256:1e2722cc9fbb45d9b87631ac70924c11d3a401b2d7f410cc0e3bbf249f2dca62"}, {file = "PyYAML-6.0.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:9eb6caa9a297fc2c2fb8862bc5370d0303ddba53ba97e71f08023b6cd73d16a8"}, @@ -2138,7 +2138,6 @@ files = [ {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5773183b6446b2c99bb77e77595dd486303b4faab2b086e7b17bc6bef28865f6"}, {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b786eecbdf8499b9ca1d697215862083bd6d2a99965554781d0d8d1ad31e13a0"}, {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bc1bf2925a1ecd43da378f4db9e4f799775d6367bdb94671027b73b393a7c42c"}, - {file = "PyYAML-6.0.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:04ac92ad1925b2cff1db0cfebffb6ffc43457495c9b3c39d3fcae417d7125dc5"}, {file = "PyYAML-6.0.1-cp39-cp39-win32.whl", hash = "sha256:faca3bdcf85b2fc05d06ff3fbc1f83e1391b3e724afa3feba7d13eeab355484c"}, {file = "PyYAML-6.0.1-cp39-cp39-win_amd64.whl", hash = "sha256:510c9deebc5c0225e8c96813043e62b680ba2f9c50a08d3724c7f28a747d1486"}, {file = "PyYAML-6.0.1.tar.gz", hash = "sha256:bfdf460b1736c775f2ba9f6a92bca30bc2095067b8a9d77876d1fad6cc3b4a43"}, @@ -2638,4 +2637,4 @@ multidict = ">=4.0" [metadata] lock-version = "2.0" python-versions = "^3.9,<3.12" -content-hash = "0ffa93b245e7c41287cebbcfc45ceda1e8a715e878c7263f66b4c0a0dae33244" +content-hash = "97d01109bc5d8b65e2ddbc2e85f378e0b8eeda49dc2344a79d9956167c0b1be6" diff --git a/airbyte-integrations/connectors/source-file/pyproject.toml b/airbyte-integrations/connectors/source-file/pyproject.toml index 3e1f83564b84..aaa803392508 100644 --- a/airbyte-integrations/connectors/source-file/pyproject.toml +++ b/airbyte-integrations/connectors/source-file/pyproject.toml @@ -3,7 +3,7 @@ requires = [ "poetry-core>=1.0.0",] build-backend = "poetry.core.masonry.api" [tool.poetry] -version = "0.4.0" +version = "0.4.1" name = "source-file" description = "Source implementation for File" authors = [ "Airbyte ",] @@ -26,7 +26,7 @@ airbyte-cdk = "==0.51.41" paramiko = "==2.11.0" xlrd = "==2.0.1" boto3 = "==1.21.21" -pyarrow = "==9.0.0" +pyarrow = "14.0.2" s3fs = "==2022.7.1" lxml = "==4.9.1" gcsfs = "==2022.7.1" diff --git a/docs/integrations/sources/file.md b/docs/integrations/sources/file.md index aa9acea4fe9c..30da0439cb6b 100644 --- a/docs/integrations/sources/file.md +++ b/docs/integrations/sources/file.md @@ -31,7 +31,7 @@ This page contains the setup guide and reference information for the Files sourc Set this to active if you want to add the [User-Agent header](https://developer.mozilla.org/en-US/docs/Web/HTTP/Headers/User-Agent) to requests (inactive by default). #### GCS: Google Cloud Storage -- `Service Account JSON` (Required for **private** buckets) +- `Service Account JSON` (Required for **private** buckets) To access **private** buckets stored on Google Cloud, this connector requires a service account JSON credentials file with the appropriate permissions. A detailed breakdown of this topic can be found at the [Google Cloud service accounts page](https://cloud.google.com/iam/docs/service-accounts). Please generate the "credentials.json" file and copy its content to this field, ensuring it is in JSON format. **If you are accessing publicly available data**, this field is not required. @@ -39,9 +39,9 @@ To access **private** buckets stored on Google Cloud, this connector requires a - `AWS Access Key ID` (Required for **private** buckets) - `AWS Secret Access Key` (Required for **private** buckets) -To access **private** buckets stored on AWS S3, this connector requires valid credentials with the necessary permissions. To access these keys, refer to the +To access **private** buckets stored on AWS S3, this connector requires valid credentials with the necessary permissions. To access these keys, refer to the [AWS IAM documentation](https://docs.aws.amazon.com/IAM/latest/UserGuide/id_credentials_access-keys.html). -More information on setting permissions in AWS can be found +More information on setting permissions in AWS can be found [here](https://docs.aws.amazon.com/IAM/latest/UserGuide/access_policies.html). **If you are accessing publicly available data**, these fields are not required. #### AzBlob: Azure Blob Storage @@ -53,18 +53,18 @@ This is the globally unique name of the storage account that the desired blob si - `SAS Token`: [Find more information here](https://learn.microsoft.com/en-us/azure/storage/common/storage-sas-overview). - `Shared Key`: [Find more information here](https://learn.microsoft.com/en-us/rest/api/storageservices/authorize-with-shared-key). - + #### SSH: Secure Shell / SCP: Secure Copy Protocol / SFTP: Secure File Transfer Protocol - `Host` (Required) - + Enter the _hostname_ or _IP address_ of the remote server where the file trasfer will take place. - `User` (Required) - + Enter the _username_ associated with your account on the remote server. -- `Password` (Optional) - +- `Password` (Optional) + **If required by the remote server**, enter the _password_ associated with your user account. Otherwise, leave this field blank. -- `Port` (Optional) +- `Port` (Optional) Specify the _port number_ to use for the connection. The default port is usually 22. However, if your remote server uses a non-standard port, you can enter the appropriate port number here. @@ -72,14 +72,14 @@ Specify the _port number_ to use for the connection. The default port is usually #### Local Filesystem (Airbyte Open Source only) - `Storage` -:::caution +:::caution Currently, the local storage URL for reading must start with the local mount "/local/". ::: -Please note that if you are replicating data from a locally stored file on Windows OS, you will need to open the `.env` file in your local Airbyte root folder and change the values for: -- `LOCAL_ROOT` +Please note that if you are replicating data from a locally stored file on Windows OS, you will need to open the `.env` file in your local Airbyte root folder and change the values for: +- `LOCAL_ROOT` - `LOCAL_DOCKER_MOUNT` -- `HACK_LOCAL_ROOT_PARENT` +- `HACK_LOCAL_ROOT_PARENT` Please set these to an existing absolute path on your machine. Colons in the path need to be replaced with a double forward slash, `//`. `LOCAL_ROOT` & `LOCAL_DOCKER_MOUNT` should be set to the same value, and `HACK_LOCAL_ROOT_PARENT` should be set to their parent directory. @@ -218,6 +218,7 @@ In order to read large files from a remote location, this connector uses the [sm | Version | Date | Pull Request | Subject | |:--------|:-----------|:---------------------------------------------------------|:--------------------------------------------------------------------------------------------------------| +| 0.4.1 | 2024-03-04 | [35800](https://github.com/airbytehq/airbyte/pull/35800) | Add PyAirbyte support on Python 3.11 | | 0.4.0 | 2024-02-15 | [32354](https://github.com/airbytehq/airbyte/pull/32354) | Add Zip File Support | | 0.3.17 | 2024-02-13 | [34678](https://github.com/airbytehq/airbyte/pull/34678) | Add Fixed-Width File Support | | 0.3.16 | 2024-02-12 | [35186](https://github.com/airbytehq/airbyte/pull/35186) | Manage dependencies with Poetry | From 5f0d9c37ecf63d73af61e62da5ebc1fe63ff993e Mon Sep 17 00:00:00 2001 From: Catherine Noll Date: Wed, 6 Mar 2024 12:00:19 -0500 Subject: [PATCH 104/172] Source S3: upgrade dependencies to avoid transformers transitive dependency (#35857) --- .../connectors/source-s3/metadata.yaml | 2 +- .../connectors/source-s3/poetry.lock | 275 +++++++++--------- .../connectors/source-s3/pyproject.toml | 2 +- docs/integrations/sources/s3.md | 1 + 4 files changed, 146 insertions(+), 134 deletions(-) diff --git a/airbyte-integrations/connectors/source-s3/metadata.yaml b/airbyte-integrations/connectors/source-s3/metadata.yaml index cd1690914b5b..d7602ad78871 100644 --- a/airbyte-integrations/connectors/source-s3/metadata.yaml +++ b/airbyte-integrations/connectors/source-s3/metadata.yaml @@ -10,7 +10,7 @@ data: connectorSubtype: file connectorType: source definitionId: 69589781-7828-43c5-9f63-8925b1c1ccc2 - dockerImageTag: 4.5.8 + dockerImageTag: 4.5.9 dockerRepository: airbyte/source-s3 documentationUrl: https://docs.airbyte.com/integrations/sources/s3 githubIssueLabel: source-s3 diff --git a/airbyte-integrations/connectors/source-s3/poetry.lock b/airbyte-integrations/connectors/source-s3/poetry.lock index eca34b4323ca..d2c5a06995b7 100644 --- a/airbyte-integrations/connectors/source-s3/poetry.lock +++ b/airbyte-integrations/connectors/source-s3/poetry.lock @@ -2,13 +2,13 @@ [[package]] name = "airbyte-cdk" -version = "0.67.1" +version = "0.68.2" description = "A framework for writing Airbyte Connectors." optional = false python-versions = ">=3.8" files = [ - {file = "airbyte-cdk-0.67.1.tar.gz", hash = "sha256:3f82be93ae6f574c70d7ad5352d34f9235e86bd74c0db14a0aa7d246f3a403c2"}, - {file = "airbyte_cdk-0.67.1-py3-none-any.whl", hash = "sha256:b1de0f004441a2ae6e2928e55f7ac31bd160af30e928ffda90eb75b5e3c56bf3"}, + {file = "airbyte-cdk-0.68.2.tar.gz", hash = "sha256:04c7557e72a2b2da6ffc8abc5196f16f2c5764738284931856c9210dd2d11998"}, + {file = "airbyte_cdk-0.68.2-py3-none-any.whl", hash = "sha256:bad36c9d9a6755fe5ec2d130fa779bdf7a9248abbc8736fa4da1f35d4a97cc8e"}, ] [package.dependencies] @@ -28,7 +28,7 @@ markdown = {version = "*", optional = true, markers = "extra == \"file-based\""} pdf2image = {version = "1.16.3", optional = true, markers = "extra == \"file-based\""} "pdfminer.six" = {version = "20221105", optional = true, markers = "extra == \"file-based\""} pendulum = "<3.0.0" -pyarrow = {version = "12.0.1", optional = true, markers = "extra == \"file-based\""} +pyarrow = {version = ">=15.0.0,<15.1.0", optional = true, markers = "extra == \"file-based\""} pydantic = ">=1.10.8,<2.0.0" pyrate-limiter = ">=3.1.0,<3.2.0" pytesseract = {version = "0.3.10", optional = true, markers = "extra == \"file-based\""} @@ -44,8 +44,8 @@ unstructured = [ wcmatch = "8.4" [package.extras] -dev = ["avro (>=1.11.2,<1.12.0)", "cohere (==4.21)", "fastavro (>=1.8.0,<1.9.0)", "freezegun", "langchain (==0.0.271)", "markdown", "mypy", "openai[embeddings] (==0.27.9)", "pandas (==2.0.3)", "pdf2image (==1.16.3)", "pdfminer.six (==20221105)", "pyarrow (==12.0.1)", "pytesseract (==0.3.10)", "pytest", "pytest-cov", "pytest-httpserver", "pytest-mock", "requests-mock", "tiktoken (==0.4.0)", "unstructured (==0.10.27)", "unstructured.pytesseract (>=0.3.12)", "unstructured[docx,pptx] (==0.10.27)"] -file-based = ["avro (>=1.11.2,<1.12.0)", "fastavro (>=1.8.0,<1.9.0)", "markdown", "pdf2image (==1.16.3)", "pdfminer.six (==20221105)", "pyarrow (==12.0.1)", "pytesseract (==0.3.10)", "unstructured (==0.10.27)", "unstructured.pytesseract (>=0.3.12)", "unstructured[docx,pptx] (==0.10.27)"] +dev = ["avro (>=1.11.2,<1.12.0)", "cohere (==4.21)", "fastavro (>=1.8.0,<1.9.0)", "freezegun", "langchain (==0.0.271)", "markdown", "mypy", "openai[embeddings] (==0.27.9)", "pandas (==2.0.3)", "pdf2image (==1.16.3)", "pdfminer.six (==20221105)", "pyarrow (>=15.0.0,<15.1.0)", "pytesseract (==0.3.10)", "pytest", "pytest-cov", "pytest-httpserver", "pytest-mock", "requests-mock", "tiktoken (==0.4.0)", "unstructured (==0.10.27)", "unstructured.pytesseract (>=0.3.12)", "unstructured[docx,pptx] (==0.10.27)"] +file-based = ["avro (>=1.11.2,<1.12.0)", "fastavro (>=1.8.0,<1.9.0)", "markdown", "pdf2image (==1.16.3)", "pdfminer.six (==20221105)", "pyarrow (>=15.0.0,<15.1.0)", "pytesseract (==0.3.10)", "unstructured (==0.10.27)", "unstructured.pytesseract (>=0.3.12)", "unstructured[docx,pptx] (==0.10.27)"] sphinx-docs = ["Sphinx (>=4.2,<5.0)", "sphinx-rtd-theme (>=1.0,<2.0)"] vector-db-based = ["cohere (==4.21)", "langchain (==0.0.271)", "openai[embeddings] (==0.27.9)", "tiktoken (==0.4.0)"] @@ -140,17 +140,17 @@ lxml = ["lxml"] [[package]] name = "boto3" -version = "1.34.55" +version = "1.34.56" description = "The AWS SDK for Python" optional = false python-versions = ">= 3.8" files = [ - {file = "boto3-1.34.55-py3-none-any.whl", hash = "sha256:ee2c96e8a4a741ecb3380e0a406baa67bfea6186be99b75bdeca3e1b5044c088"}, - {file = "boto3-1.34.55.tar.gz", hash = "sha256:9a6d59e035fac4366dbdaf909c4f66fc817dfbec044fa71564dcf036ad46bb19"}, + {file = "boto3-1.34.56-py3-none-any.whl", hash = "sha256:300888f0c1b6f32f27f85a9aa876f50f46514ec619647af7e4d20db74d339714"}, + {file = "boto3-1.34.56.tar.gz", hash = "sha256:b26928f9a21cf3649cea20a59061340f3294c6e7785ceb6e1a953eb8010dc3ba"}, ] [package.dependencies] -botocore = ">=1.34.55,<1.35.0" +botocore = ">=1.34.56,<1.35.0" jmespath = ">=0.7.1,<2.0.0" s3transfer = ">=0.10.0,<0.11.0" @@ -159,13 +159,13 @@ crt = ["botocore[crt] (>=1.21.0,<2.0a0)"] [[package]] name = "botocore" -version = "1.34.55" +version = "1.34.56" description = "Low-level, data-driven core of boto 3." optional = false python-versions = ">= 3.8" files = [ - {file = "botocore-1.34.55-py3-none-any.whl", hash = "sha256:07044c3cbfb86d0ecb9c56d887b8ad63a72eff0e4f6ab329cf335f1fd867ea0b"}, - {file = "botocore-1.34.55.tar.gz", hash = "sha256:bb333e3845bfe65600f36bf92d09668306e224fa9f4e4f87b77f6957192ae59f"}, + {file = "botocore-1.34.56-py3-none-any.whl", hash = "sha256:fff66e22a5589c2d58fba57d1d95c334ce771895e831f80365f6cff6453285ec"}, + {file = "botocore-1.34.56.tar.gz", hash = "sha256:bffeb71ab21d47d4ecf947d9bdb2fbd1b0bbd0c27742cea7cf0b77b701c41d9f"}, ] [package.dependencies] @@ -1401,40 +1401,51 @@ files = [ [[package]] name = "pyarrow" -version = "12.0.1" +version = "15.0.0" description = "Python library for Apache Arrow" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "pyarrow-12.0.1-cp310-cp310-macosx_10_14_x86_64.whl", hash = "sha256:6d288029a94a9bb5407ceebdd7110ba398a00412c5b0155ee9813a40d246c5df"}, - {file = "pyarrow-12.0.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:345e1828efdbd9aa4d4de7d5676778aba384a2c3add896d995b23d368e60e5af"}, - {file = "pyarrow-12.0.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8d6009fdf8986332b2169314da482baed47ac053311c8934ac6651e614deacd6"}, - {file = "pyarrow-12.0.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2d3c4cbbf81e6dd23fe921bc91dc4619ea3b79bc58ef10bce0f49bdafb103daf"}, - {file = "pyarrow-12.0.1-cp310-cp310-win_amd64.whl", hash = "sha256:cdacf515ec276709ac8042c7d9bd5be83b4f5f39c6c037a17a60d7ebfd92c890"}, - {file = "pyarrow-12.0.1-cp311-cp311-macosx_10_14_x86_64.whl", hash = "sha256:749be7fd2ff260683f9cc739cb862fb11be376de965a2a8ccbf2693b098db6c7"}, - {file = "pyarrow-12.0.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:6895b5fb74289d055c43db3af0de6e16b07586c45763cb5e558d38b86a91e3a7"}, - {file = "pyarrow-12.0.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1887bdae17ec3b4c046fcf19951e71b6a619f39fa674f9881216173566c8f718"}, - {file = "pyarrow-12.0.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e2c9cb8eeabbadf5fcfc3d1ddea616c7ce893db2ce4dcef0ac13b099ad7ca082"}, - {file = "pyarrow-12.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:ce4aebdf412bd0eeb800d8e47db854f9f9f7e2f5a0220440acf219ddfddd4f63"}, - {file = "pyarrow-12.0.1-cp37-cp37m-macosx_10_14_x86_64.whl", hash = "sha256:e0d8730c7f6e893f6db5d5b86eda42c0a130842d101992b581e2138e4d5663d3"}, - {file = "pyarrow-12.0.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:43364daec02f69fec89d2315f7fbfbeec956e0d991cbbef471681bd77875c40f"}, - {file = "pyarrow-12.0.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:051f9f5ccf585f12d7de836e50965b3c235542cc896959320d9776ab93f3b33d"}, - {file = "pyarrow-12.0.1-cp37-cp37m-win_amd64.whl", hash = "sha256:be2757e9275875d2a9c6e6052ac7957fbbfc7bc7370e4a036a9b893e96fedaba"}, - {file = "pyarrow-12.0.1-cp38-cp38-macosx_10_14_x86_64.whl", hash = "sha256:cf812306d66f40f69e684300f7af5111c11f6e0d89d6b733e05a3de44961529d"}, - {file = "pyarrow-12.0.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:459a1c0ed2d68671188b2118c63bac91eaef6fc150c77ddd8a583e3c795737bf"}, - {file = "pyarrow-12.0.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:85e705e33eaf666bbe508a16fd5ba27ca061e177916b7a317ba5a51bee43384c"}, - {file = "pyarrow-12.0.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9120c3eb2b1f6f516a3b7a9714ed860882d9ef98c4b17edcdc91d95b7528db60"}, - {file = "pyarrow-12.0.1-cp38-cp38-win_amd64.whl", hash = "sha256:c780f4dc40460015d80fcd6a6140de80b615349ed68ef9adb653fe351778c9b3"}, - {file = "pyarrow-12.0.1-cp39-cp39-macosx_10_14_x86_64.whl", hash = "sha256:a3c63124fc26bf5f95f508f5d04e1ece8cc23a8b0af2a1e6ab2b1ec3fdc91b24"}, - {file = "pyarrow-12.0.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:b13329f79fa4472324f8d32dc1b1216616d09bd1e77cfb13104dec5463632c36"}, - {file = "pyarrow-12.0.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bb656150d3d12ec1396f6dde542db1675a95c0cc8366d507347b0beed96e87ca"}, - {file = "pyarrow-12.0.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6251e38470da97a5b2e00de5c6a049149f7b2bd62f12fa5dbb9ac674119ba71a"}, - {file = "pyarrow-12.0.1-cp39-cp39-win_amd64.whl", hash = "sha256:3de26da901216149ce086920547dfff5cd22818c9eab67ebc41e863a5883bac7"}, - {file = "pyarrow-12.0.1.tar.gz", hash = "sha256:cce317fc96e5b71107bf1f9f184d5e54e2bd14bbf3f9a3d62819961f0af86fec"}, + {file = "pyarrow-15.0.0-cp310-cp310-macosx_10_15_x86_64.whl", hash = "sha256:0a524532fd6dd482edaa563b686d754c70417c2f72742a8c990b322d4c03a15d"}, + {file = "pyarrow-15.0.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:60a6bdb314affa9c2e0d5dddf3d9cbb9ef4a8dddaa68669975287d47ece67642"}, + {file = "pyarrow-15.0.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:66958fd1771a4d4b754cd385835e66a3ef6b12611e001d4e5edfcef5f30391e2"}, + {file = "pyarrow-15.0.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1f500956a49aadd907eaa21d4fff75f73954605eaa41f61cb94fb008cf2e00c6"}, + {file = "pyarrow-15.0.0-cp310-cp310-manylinux_2_28_aarch64.whl", hash = "sha256:6f87d9c4f09e049c2cade559643424da84c43a35068f2a1c4653dc5b1408a929"}, + {file = "pyarrow-15.0.0-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:85239b9f93278e130d86c0e6bb455dcb66fc3fd891398b9d45ace8799a871a1e"}, + {file = "pyarrow-15.0.0-cp310-cp310-win_amd64.whl", hash = "sha256:5b8d43e31ca16aa6e12402fcb1e14352d0d809de70edd185c7650fe80e0769e3"}, + {file = "pyarrow-15.0.0-cp311-cp311-macosx_10_15_x86_64.whl", hash = "sha256:fa7cd198280dbd0c988df525e50e35b5d16873e2cdae2aaaa6363cdb64e3eec5"}, + {file = "pyarrow-15.0.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:8780b1a29d3c8b21ba6b191305a2a607de2e30dab399776ff0aa09131e266340"}, + {file = "pyarrow-15.0.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fe0ec198ccc680f6c92723fadcb97b74f07c45ff3fdec9dd765deb04955ccf19"}, + {file = "pyarrow-15.0.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:036a7209c235588c2f07477fe75c07e6caced9b7b61bb897c8d4e52c4b5f9555"}, + {file = "pyarrow-15.0.0-cp311-cp311-manylinux_2_28_aarch64.whl", hash = "sha256:2bd8a0e5296797faf9a3294e9fa2dc67aa7f10ae2207920dbebb785c77e9dbe5"}, + {file = "pyarrow-15.0.0-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:e8ebed6053dbe76883a822d4e8da36860f479d55a762bd9e70d8494aed87113e"}, + {file = "pyarrow-15.0.0-cp311-cp311-win_amd64.whl", hash = "sha256:17d53a9d1b2b5bd7d5e4cd84d018e2a45bc9baaa68f7e6e3ebed45649900ba99"}, + {file = "pyarrow-15.0.0-cp312-cp312-macosx_10_15_x86_64.whl", hash = "sha256:9950a9c9df24090d3d558b43b97753b8f5867fb8e521f29876aa021c52fda351"}, + {file = "pyarrow-15.0.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:003d680b5e422d0204e7287bb3fa775b332b3fce2996aa69e9adea23f5c8f970"}, + {file = "pyarrow-15.0.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f75fce89dad10c95f4bf590b765e3ae98bcc5ba9f6ce75adb828a334e26a3d40"}, + {file = "pyarrow-15.0.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0ca9cb0039923bec49b4fe23803807e4ef39576a2bec59c32b11296464623dc2"}, + {file = "pyarrow-15.0.0-cp312-cp312-manylinux_2_28_aarch64.whl", hash = "sha256:9ed5a78ed29d171d0acc26a305a4b7f83c122d54ff5270810ac23c75813585e4"}, + {file = "pyarrow-15.0.0-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:6eda9e117f0402dfcd3cd6ec9bfee89ac5071c48fc83a84f3075b60efa96747f"}, + {file = "pyarrow-15.0.0-cp312-cp312-win_amd64.whl", hash = "sha256:9a3a6180c0e8f2727e6f1b1c87c72d3254cac909e609f35f22532e4115461177"}, + {file = "pyarrow-15.0.0-cp38-cp38-macosx_10_15_x86_64.whl", hash = "sha256:19a8918045993349b207de72d4576af0191beef03ea655d8bdb13762f0cd6eac"}, + {file = "pyarrow-15.0.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:d0ec076b32bacb6666e8813a22e6e5a7ef1314c8069d4ff345efa6246bc38593"}, + {file = "pyarrow-15.0.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5db1769e5d0a77eb92344c7382d6543bea1164cca3704f84aa44e26c67e320fb"}, + {file = "pyarrow-15.0.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e2617e3bf9df2a00020dd1c1c6dce5cc343d979efe10bc401c0632b0eef6ef5b"}, + {file = "pyarrow-15.0.0-cp38-cp38-manylinux_2_28_aarch64.whl", hash = "sha256:d31c1d45060180131caf10f0f698e3a782db333a422038bf7fe01dace18b3a31"}, + {file = "pyarrow-15.0.0-cp38-cp38-manylinux_2_28_x86_64.whl", hash = "sha256:c8c287d1d479de8269398b34282e206844abb3208224dbdd7166d580804674b7"}, + {file = "pyarrow-15.0.0-cp38-cp38-win_amd64.whl", hash = "sha256:07eb7f07dc9ecbb8dace0f58f009d3a29ee58682fcdc91337dfeb51ea618a75b"}, + {file = "pyarrow-15.0.0-cp39-cp39-macosx_10_15_x86_64.whl", hash = "sha256:47af7036f64fce990bb8a5948c04722e4e3ea3e13b1007ef52dfe0aa8f23cf7f"}, + {file = "pyarrow-15.0.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:93768ccfff85cf044c418bfeeafce9a8bb0cee091bd8fd19011aff91e58de540"}, + {file = "pyarrow-15.0.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f6ee87fd6892700960d90abb7b17a72a5abb3b64ee0fe8db6c782bcc2d0dc0b4"}, + {file = "pyarrow-15.0.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:001fca027738c5f6be0b7a3159cc7ba16a5c52486db18160909a0831b063c4e4"}, + {file = "pyarrow-15.0.0-cp39-cp39-manylinux_2_28_aarch64.whl", hash = "sha256:d1c48648f64aec09accf44140dccb92f4f94394b8d79976c426a5b79b11d4fa7"}, + {file = "pyarrow-15.0.0-cp39-cp39-manylinux_2_28_x86_64.whl", hash = "sha256:972a0141be402bb18e3201448c8ae62958c9c7923dfaa3b3d4530c835ac81aed"}, + {file = "pyarrow-15.0.0-cp39-cp39-win_amd64.whl", hash = "sha256:f01fc5cf49081426429127aa2d427d9d98e1cb94a32cb961d583a70b7c4504e6"}, + {file = "pyarrow-15.0.0.tar.gz", hash = "sha256:876858f549d540898f927eba4ef77cd549ad8d24baa3207cf1b72e5788b50e83"}, ] [package.dependencies] -numpy = ">=1.16.6" +numpy = ">=1.16.6,<2" [[package]] name = "pycparser" @@ -1844,101 +1855,101 @@ files = [ [[package]] name = "rapidfuzz" -version = "3.6.1" +version = "3.6.2" description = "rapid fuzzy string matching" optional = false python-versions = ">=3.8" files = [ - {file = "rapidfuzz-3.6.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:ac434fc71edda30d45db4a92ba5e7a42c7405e1a54cb4ec01d03cc668c6dcd40"}, - {file = "rapidfuzz-3.6.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:2a791168e119cfddf4b5a40470620c872812042f0621e6a293983a2d52372db0"}, - {file = "rapidfuzz-3.6.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:5a2f3e9df346145c2be94e4d9eeffb82fab0cbfee85bd4a06810e834fe7c03fa"}, - {file = "rapidfuzz-3.6.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:23de71e7f05518b0bbeef55d67b5dbce3bcd3e2c81e7e533051a2e9401354eb0"}, - {file = "rapidfuzz-3.6.1-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d056e342989248d2bdd67f1955bb7c3b0ecfa239d8f67a8dfe6477b30872c607"}, - {file = "rapidfuzz-3.6.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:01835d02acd5d95c1071e1da1bb27fe213c84a013b899aba96380ca9962364bc"}, - {file = "rapidfuzz-3.6.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:ed0f712e0bb5fea327e92aec8a937afd07ba8de4c529735d82e4c4124c10d5a0"}, - {file = "rapidfuzz-3.6.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:96cd19934f76a1264e8ecfed9d9f5291fde04ecb667faef5f33bdbfd95fe2d1f"}, - {file = "rapidfuzz-3.6.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:e06c4242a1354cf9d48ee01f6f4e6e19c511d50bb1e8d7d20bcadbb83a2aea90"}, - {file = "rapidfuzz-3.6.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:d73dcfe789d37c6c8b108bf1e203e027714a239e50ad55572ced3c004424ed3b"}, - {file = "rapidfuzz-3.6.1-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:06e98ff000e2619e7cfe552d086815671ed09b6899408c2c1b5103658261f6f3"}, - {file = "rapidfuzz-3.6.1-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:08b6fb47dd889c69fbc0b915d782aaed43e025df6979b6b7f92084ba55edd526"}, - {file = "rapidfuzz-3.6.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:a1788ebb5f5b655a15777e654ea433d198f593230277e74d51a2a1e29a986283"}, - {file = "rapidfuzz-3.6.1-cp310-cp310-win32.whl", hash = "sha256:c65f92881753aa1098c77818e2b04a95048f30edbe9c3094dc3707d67df4598b"}, - {file = "rapidfuzz-3.6.1-cp310-cp310-win_amd64.whl", hash = "sha256:4243a9c35667a349788461aae6471efde8d8800175b7db5148a6ab929628047f"}, - {file = "rapidfuzz-3.6.1-cp310-cp310-win_arm64.whl", hash = "sha256:f59d19078cc332dbdf3b7b210852ba1f5db8c0a2cd8cc4c0ed84cc00c76e6802"}, - {file = "rapidfuzz-3.6.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:fbc07e2e4ac696497c5f66ec35c21ddab3fc7a406640bffed64c26ab2f7ce6d6"}, - {file = "rapidfuzz-3.6.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:40cced1a8852652813f30fb5d4b8f9b237112a0bbaeebb0f4cc3611502556764"}, - {file = "rapidfuzz-3.6.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:82300e5f8945d601c2daaaac139d5524d7c1fdf719aa799a9439927739917460"}, - {file = "rapidfuzz-3.6.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:edf97c321fd641fea2793abce0e48fa4f91f3c202092672f8b5b4e781960b891"}, - {file = "rapidfuzz-3.6.1-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7420e801b00dee4a344ae2ee10e837d603461eb180e41d063699fb7efe08faf0"}, - {file = "rapidfuzz-3.6.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:060bd7277dc794279fa95522af355034a29c90b42adcb7aa1da358fc839cdb11"}, - {file = "rapidfuzz-3.6.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b7e3375e4f2bfec77f907680328e4cd16cc64e137c84b1886d547ab340ba6928"}, - {file = "rapidfuzz-3.6.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a490cd645ef9d8524090551016f05f052e416c8adb2d8b85d35c9baa9d0428ab"}, - {file = "rapidfuzz-3.6.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:2e03038bfa66d2d7cffa05d81c2f18fd6acbb25e7e3c068d52bb7469e07ff382"}, - {file = "rapidfuzz-3.6.1-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:2b19795b26b979c845dba407fe79d66975d520947b74a8ab6cee1d22686f7967"}, - {file = "rapidfuzz-3.6.1-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:064c1d66c40b3a0f488db1f319a6e75616b2e5fe5430a59f93a9a5e40a656d15"}, - {file = "rapidfuzz-3.6.1-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:3c772d04fb0ebeece3109d91f6122b1503023086a9591a0b63d6ee7326bd73d9"}, - {file = "rapidfuzz-3.6.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:841eafba6913c4dfd53045835545ba01a41e9644e60920c65b89c8f7e60c00a9"}, - {file = "rapidfuzz-3.6.1-cp311-cp311-win32.whl", hash = "sha256:266dd630f12696ea7119f31d8b8e4959ef45ee2cbedae54417d71ae6f47b9848"}, - {file = "rapidfuzz-3.6.1-cp311-cp311-win_amd64.whl", hash = "sha256:d79aec8aeee02ab55d0ddb33cea3ecd7b69813a48e423c966a26d7aab025cdfe"}, - {file = "rapidfuzz-3.6.1-cp311-cp311-win_arm64.whl", hash = "sha256:484759b5dbc5559e76fefaa9170147d1254468f555fd9649aea3bad46162a88b"}, - {file = "rapidfuzz-3.6.1-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:b2ef4c0fd3256e357b70591ffb9e8ed1d439fb1f481ba03016e751a55261d7c1"}, - {file = "rapidfuzz-3.6.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:588c4b20fa2fae79d60a4e438cf7133d6773915df3cc0a7f1351da19eb90f720"}, - {file = "rapidfuzz-3.6.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:7142ee354e9c06e29a2636b9bbcb592bb00600a88f02aa5e70e4f230347b373e"}, - {file = "rapidfuzz-3.6.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1dfc557c0454ad22382373ec1b7df530b4bbd974335efe97a04caec936f2956a"}, - {file = "rapidfuzz-3.6.1-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:03f73b381bdeccb331a12c3c60f1e41943931461cdb52987f2ecf46bfc22f50d"}, - {file = "rapidfuzz-3.6.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:6b0ccc2ec1781c7e5370d96aef0573dd1f97335343e4982bdb3a44c133e27786"}, - {file = "rapidfuzz-3.6.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:da3e8c9f7e64bb17faefda085ff6862ecb3ad8b79b0f618a6cf4452028aa2222"}, - {file = "rapidfuzz-3.6.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fde9b14302a31af7bdafbf5cfbb100201ba21519be2b9dedcf4f1048e4fbe65d"}, - {file = "rapidfuzz-3.6.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:c1a23eee225dfb21c07f25c9fcf23eb055d0056b48e740fe241cbb4b22284379"}, - {file = "rapidfuzz-3.6.1-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:e49b9575d16c56c696bc7b06a06bf0c3d4ef01e89137b3ddd4e2ce709af9fe06"}, - {file = "rapidfuzz-3.6.1-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:0a9fc714b8c290261669f22808913aad49553b686115ad0ee999d1cb3df0cd66"}, - {file = "rapidfuzz-3.6.1-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:a3ee4f8f076aa92184e80308fc1a079ac356b99c39408fa422bbd00145be9854"}, - {file = "rapidfuzz-3.6.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:f056ba42fd2f32e06b2c2ba2443594873cfccc0c90c8b6327904fc2ddf6d5799"}, - {file = "rapidfuzz-3.6.1-cp312-cp312-win32.whl", hash = "sha256:5d82b9651e3d34b23e4e8e201ecd3477c2baa17b638979deeabbb585bcb8ba74"}, - {file = "rapidfuzz-3.6.1-cp312-cp312-win_amd64.whl", hash = "sha256:dad55a514868dae4543ca48c4e1fc0fac704ead038dafedf8f1fc0cc263746c1"}, - {file = "rapidfuzz-3.6.1-cp312-cp312-win_arm64.whl", hash = "sha256:3c84294f4470fcabd7830795d754d808133329e0a81d62fcc2e65886164be83b"}, - {file = "rapidfuzz-3.6.1-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:e19d519386e9db4a5335a4b29f25b8183a1c3f78cecb4c9c3112e7f86470e37f"}, - {file = "rapidfuzz-3.6.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:01eb03cd880a294d1bf1a583fdd00b87169b9cc9c9f52587411506658c864d73"}, - {file = "rapidfuzz-3.6.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:be368573255f8fbb0125a78330a1a40c65e9ba3c5ad129a426ff4289099bfb41"}, - {file = "rapidfuzz-3.6.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b3e5af946f419c30f5cb98b69d40997fe8580efe78fc83c2f0f25b60d0e56efb"}, - {file = "rapidfuzz-3.6.1-cp38-cp38-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f382f7ffe384ce34345e1c0b2065451267d3453cadde78946fbd99a59f0cc23c"}, - {file = "rapidfuzz-3.6.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:be156f51f3a4f369e758505ed4ae64ea88900dcb2f89d5aabb5752676d3f3d7e"}, - {file = "rapidfuzz-3.6.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1936d134b6c513fbe934aeb668b0fee1ffd4729a3c9d8d373f3e404fbb0ce8a0"}, - {file = "rapidfuzz-3.6.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:12ff8eaf4a9399eb2bebd838f16e2d1ded0955230283b07376d68947bbc2d33d"}, - {file = "rapidfuzz-3.6.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:ae598a172e3a95df3383634589660d6b170cc1336fe7578115c584a99e0ba64d"}, - {file = "rapidfuzz-3.6.1-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:cd4ba4c18b149da11e7f1b3584813159f189dc20833709de5f3df8b1342a9759"}, - {file = "rapidfuzz-3.6.1-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:0402f1629e91a4b2e4aee68043a30191e5e1b7cd2aa8dacf50b1a1bcf6b7d3ab"}, - {file = "rapidfuzz-3.6.1-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:1e12319c6b304cd4c32d5db00b7a1e36bdc66179c44c5707f6faa5a889a317c0"}, - {file = "rapidfuzz-3.6.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:0bbfae35ce4de4c574b386c43c78a0be176eeddfdae148cb2136f4605bebab89"}, - {file = "rapidfuzz-3.6.1-cp38-cp38-win32.whl", hash = "sha256:7fec74c234d3097612ea80f2a80c60720eec34947066d33d34dc07a3092e8105"}, - {file = "rapidfuzz-3.6.1-cp38-cp38-win_amd64.whl", hash = "sha256:a553cc1a80d97459d587529cc43a4c7c5ecf835f572b671107692fe9eddf3e24"}, - {file = "rapidfuzz-3.6.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:757dfd7392ec6346bd004f8826afb3bf01d18a723c97cbe9958c733ab1a51791"}, - {file = "rapidfuzz-3.6.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:2963f4a3f763870a16ee076796be31a4a0958fbae133dbc43fc55c3968564cf5"}, - {file = "rapidfuzz-3.6.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:d2f0274595cc5b2b929c80d4e71b35041104b577e118cf789b3fe0a77b37a4c5"}, - {file = "rapidfuzz-3.6.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:42f211e366e026de110a4246801d43a907cd1a10948082f47e8a4e6da76fef52"}, - {file = "rapidfuzz-3.6.1-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a59472b43879012b90989603aa5a6937a869a72723b1bf2ff1a0d1edee2cc8e6"}, - {file = "rapidfuzz-3.6.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a03863714fa6936f90caa7b4b50ea59ea32bb498cc91f74dc25485b3f8fccfe9"}, - {file = "rapidfuzz-3.6.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5dd95b6b7bfb1584f806db89e1e0c8dbb9d25a30a4683880c195cc7f197eaf0c"}, - {file = "rapidfuzz-3.6.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7183157edf0c982c0b8592686535c8b3e107f13904b36d85219c77be5cefd0d8"}, - {file = "rapidfuzz-3.6.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:ad9d74ef7c619b5b0577e909582a1928d93e07d271af18ba43e428dc3512c2a1"}, - {file = "rapidfuzz-3.6.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:b53137d81e770c82189e07a8f32722d9e4260f13a0aec9914029206ead38cac3"}, - {file = "rapidfuzz-3.6.1-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:49b9ed2472394d306d5dc967a7de48b0aab599016aa4477127b20c2ed982dbf9"}, - {file = "rapidfuzz-3.6.1-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:dec307b57ec2d5054d77d03ee4f654afcd2c18aee00c48014cb70bfed79597d6"}, - {file = "rapidfuzz-3.6.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:4381023fa1ff32fd5076f5d8321249a9aa62128eb3f21d7ee6a55373e672b261"}, - {file = "rapidfuzz-3.6.1-cp39-cp39-win32.whl", hash = "sha256:8d7a072f10ee57c8413c8ab9593086d42aaff6ee65df4aa6663eecdb7c398dca"}, - {file = "rapidfuzz-3.6.1-cp39-cp39-win_amd64.whl", hash = "sha256:ebcfb5bfd0a733514352cfc94224faad8791e576a80ffe2fd40b2177bf0e7198"}, - {file = "rapidfuzz-3.6.1-cp39-cp39-win_arm64.whl", hash = "sha256:1c47d592e447738744905c18dda47ed155620204714e6df20eb1941bb1ba315e"}, - {file = "rapidfuzz-3.6.1-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:eef8b346ab331bec12bbc83ac75641249e6167fab3d84d8f5ca37fd8e6c7a08c"}, - {file = "rapidfuzz-3.6.1-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:53251e256017e2b87f7000aee0353ba42392c442ae0bafd0f6b948593d3f68c6"}, - {file = "rapidfuzz-3.6.1-pp38-pypy38_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6dede83a6b903e3ebcd7e8137e7ff46907ce9316e9d7e7f917d7e7cdc570ee05"}, - {file = "rapidfuzz-3.6.1-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8e4da90e4c2b444d0a171d7444ea10152e07e95972bb40b834a13bdd6de1110c"}, - {file = "rapidfuzz-3.6.1-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:ca3dfcf74f2b6962f411c33dd95b0adf3901266e770da6281bc96bb5a8b20de9"}, - {file = "rapidfuzz-3.6.1-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:bcc957c0a8bde8007f1a8a413a632a1a409890f31f73fe764ef4eac55f59ca87"}, - {file = "rapidfuzz-3.6.1-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:692c9a50bea7a8537442834f9bc6b7d29d8729a5b6379df17c31b6ab4df948c2"}, - {file = "rapidfuzz-3.6.1-pp39-pypy39_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:76c23ceaea27e790ddd35ef88b84cf9d721806ca366199a76fd47cfc0457a81b"}, - {file = "rapidfuzz-3.6.1-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2b155e67fff215c09f130555002e42f7517d0ea72cbd58050abb83cb7c880cec"}, - {file = "rapidfuzz-3.6.1-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:3028ee8ecc48250607fa8a0adce37b56275ec3b1acaccd84aee1f68487c8557b"}, - {file = "rapidfuzz-3.6.1.tar.gz", hash = "sha256:35660bee3ce1204872574fa041c7ad7ec5175b3053a4cb6e181463fc07013de7"}, + {file = "rapidfuzz-3.6.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:a5637e6bf11b15b5aff6ee818c76bdec99ad208511b78985e6209ba648a6e3ee"}, + {file = "rapidfuzz-3.6.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:380586664f2f63807050ddb95e7702888b4f0b425abf17655940c411f39287ad"}, + {file = "rapidfuzz-3.6.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:3168ff565d4b8c239cf11fb604dd2507d30e9bcaac76a4077c0ac23cf2c866ed"}, + {file = "rapidfuzz-3.6.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:be69f7fd46b5c6467fe5e2fd4cff3816b0c03048eed8a4becb9a73e6000960e7"}, + {file = "rapidfuzz-3.6.2-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:cbd5894f23fdf5697499cf759523639838ac822bd1600e343fdce7313baa02ae"}, + {file = "rapidfuzz-3.6.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:85a5b6e026393fe39fb61146b9c17c5af66fffbe1410e992c4bb06d9ec327bd3"}, + {file = "rapidfuzz-3.6.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:ab269adfc64480f209e99f253391a10735edd5c09046e04899adab5fb132f20e"}, + {file = "rapidfuzz-3.6.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:35aeac852bca06023d6bbd50c1fc504ca5a9a3613d5e75a140f0be7601fa34ef"}, + {file = "rapidfuzz-3.6.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:e706f302c6a3ae0d74edd0d6ace46aee1ae07c563b436ccf5ff04db2b3571e60"}, + {file = "rapidfuzz-3.6.2-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:bec353f022011e6e5cd28ccb8700fbd2a33918197af0d4e0abb3c3f4845cc864"}, + {file = "rapidfuzz-3.6.2-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:ef3925daaa93eed20401012e219f569ff0c039ed5bf4ce2d3737b4f75d441622"}, + {file = "rapidfuzz-3.6.2-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:6ee98d88ae9ccc77ff61992ed33b2496478def5dc0da55c9a9aa06fcb725a352"}, + {file = "rapidfuzz-3.6.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:423c7c588b09d618601097b7a0017dfcb91132a2076bef29023c5f3cd2dc3de1"}, + {file = "rapidfuzz-3.6.2-cp310-cp310-win32.whl", hash = "sha256:c17c5efee347a40a6f4c1eec59e3d7d1e22f7613a97f8b8a07733ef723483a04"}, + {file = "rapidfuzz-3.6.2-cp310-cp310-win_amd64.whl", hash = "sha256:4209816626d8d6ff8ae7dc248061c6059e618b70c6e6f6e4d7444ae3740b2b85"}, + {file = "rapidfuzz-3.6.2-cp310-cp310-win_arm64.whl", hash = "sha256:1c54d3c85e522d3ac9ee39415f183c8fa184c4f87e7e5a37938f15a6d50e853a"}, + {file = "rapidfuzz-3.6.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:e06f6d270112f5db001f1cba5a97e1a48aee3d3dbdcbea3ec027c230462dbf9b"}, + {file = "rapidfuzz-3.6.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:080cb71b50cb6aff11d1c6aeb157f273e2da0b2bdb3f9d7b01257e49e69a8576"}, + {file = "rapidfuzz-3.6.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:a7895e04a22d6515bc91a850e0831f2405547605aa311d1ffec51e4818abc3c1"}, + {file = "rapidfuzz-3.6.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bd82f9838519136b7083dd1e3149ee80344521f3dc37f744f227505ff0883efb"}, + {file = "rapidfuzz-3.6.2-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a945567c2b0b6e069454c9782d5234b0b6795718adf7a9f868bd3144afa6a023"}, + {file = "rapidfuzz-3.6.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:673ba2c343644805acdae1cb949c6a4de71aa2f62a998978551ebea59603af3f"}, + {file = "rapidfuzz-3.6.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9d457c89bac1471442002e70551e8268e639b3870b4a4521eae363c07253be87"}, + {file = "rapidfuzz-3.6.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:495c0d8e14e6f12520eb7fc71b9ba9fcaafb47fc23a654e6e89b6c7985ec0020"}, + {file = "rapidfuzz-3.6.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:6d67b649bf3e1b1722d04eca44d37919aef88305ce7ad05564502d013cf550fd"}, + {file = "rapidfuzz-3.6.2-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:e48dde8ca83d11daa00900cf6a5d281a1297aef9b7bfa73801af6e8822be5019"}, + {file = "rapidfuzz-3.6.2-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:824cc381cf81cbf8d158f6935664ec2a69e6ac3b1d39fa201988bf81a257f775"}, + {file = "rapidfuzz-3.6.2-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:1dfe4c24957474ce0ac75d886387e30e292b4be39228a6d71f76de414dc187db"}, + {file = "rapidfuzz-3.6.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:d57b98013b802621bbc8b12a46bfc9d36ac552ab51ca207f7ce167ad46adabeb"}, + {file = "rapidfuzz-3.6.2-cp311-cp311-win32.whl", hash = "sha256:9a07dffac439223b4f1025dbfc68f4445a3460a859309c9858c2a3fa29617cdc"}, + {file = "rapidfuzz-3.6.2-cp311-cp311-win_amd64.whl", hash = "sha256:95a49c6b8bf1229743ae585dd5b7d57f0d15a7eb6e826866d5c9965ba958503c"}, + {file = "rapidfuzz-3.6.2-cp311-cp311-win_arm64.whl", hash = "sha256:af7c19ec86e11488539380d3db1755be5d561a3c0e7b04ff9d07abd7f9a8e9d8"}, + {file = "rapidfuzz-3.6.2-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:de8adc12161bf282c60f12dc9233bb31632f71d446a010fe7469a69b8153427f"}, + {file = "rapidfuzz-3.6.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:337e357f693130c4c6be740652542b260e36f622c59e01fa33d58f1d2750c930"}, + {file = "rapidfuzz-3.6.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:6468f8bc8c3c50604f43631550ef9cfec873515dba5023ca34d461be94669fc8"}, + {file = "rapidfuzz-3.6.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:74c6773b11445b5e5cf93ca383171cd0ac0cdeafea11a7b2a5688f8bf8d813e6"}, + {file = "rapidfuzz-3.6.2-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e1507fc5769aa109dda4de3a15f822a0f6a03e18d627bd0ba3ddbb253cf70e07"}, + {file = "rapidfuzz-3.6.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:617949a70150e6fffdaed19253dd49f7a53528411dc8bf7663d499ba21e0f61e"}, + {file = "rapidfuzz-3.6.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f8b77779174b1b40aa70827692571ab457061897846255ad7d5d559e2edb1932"}, + {file = "rapidfuzz-3.6.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:80e51b22a7da83f9c87a97e92df07ed0612c74c35496590255f4b5d5b4212dfe"}, + {file = "rapidfuzz-3.6.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:3ae7c86914cb6673e97e187ba431b9c4cf4177d9ae77f8a1e5b2ba9a5628839e"}, + {file = "rapidfuzz-3.6.2-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:ddc380ffaa90f204cc9ddcb779114b9ab6f015246d549de9d47871a97ef9f18a"}, + {file = "rapidfuzz-3.6.2-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:3c1dc078ef371fce09f9f3eec2ca4eaa2a8cd412ec53941015b4f39f14d34407"}, + {file = "rapidfuzz-3.6.2-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:9a74102fc5a2534fe91f7507838623e1f3a149d8e05648389c42bb42e14b1c3f"}, + {file = "rapidfuzz-3.6.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:48e1eaea8fcd522fca7f04f0480663f0f0cfb77957092cce60a93f4462864996"}, + {file = "rapidfuzz-3.6.2-cp312-cp312-win32.whl", hash = "sha256:66b008bf2972740cd2dda5d382eb8bdb87265cd88198e71c7797bdc0d1f79d20"}, + {file = "rapidfuzz-3.6.2-cp312-cp312-win_amd64.whl", hash = "sha256:87ac3a87f2251ae2e95fc9478ca5c759de6d141d04c84d3fec9f9cdcfc167b33"}, + {file = "rapidfuzz-3.6.2-cp312-cp312-win_arm64.whl", hash = "sha256:b593cc51aed887e93b78c2f94dfae9008be2b23d17afd3b1f1d3eb3913b58f26"}, + {file = "rapidfuzz-3.6.2-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:7d830bc7a9b586a374147ec60b08b1f9ae5996b43f75cc514f37faef3866b519"}, + {file = "rapidfuzz-3.6.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:dbee7f5ff11872b76505cbd87c814abc823e8757f11c69062eb3b25130a283da"}, + {file = "rapidfuzz-3.6.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:28c011fb31f2c3f82f503aedd6097d3d3854e574e327a119a3b7eb2cf90b79ca"}, + {file = "rapidfuzz-3.6.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cda81d0e0ce0c13abfa46b24e10c1e85f9c6acb628f0a9a948f5779f9c2076a2"}, + {file = "rapidfuzz-3.6.2-cp38-cp38-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4c279928651ce0e9e5220dcb25a00cc53b65e592a0861336a38299bcdca3a596"}, + {file = "rapidfuzz-3.6.2-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:35bd4bc9c40e6994c5d6edea4b9319388b4d9711c13c66d543bb4c37624b4184"}, + {file = "rapidfuzz-3.6.2-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d07899506a5a8760448d9df036d528b55a554bf571714173635c79eef4a86e58"}, + {file = "rapidfuzz-3.6.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:eb2e51d01b9c6d6954a3e055c57a80d4685b4fc82719db5519fc153566bcd6bb"}, + {file = "rapidfuzz-3.6.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:153d065e353371cc0aeff32b99999a5758266a64e958d1364189367c1c9f6814"}, + {file = "rapidfuzz-3.6.2-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:4edcceebb85ebfa49a3ddcde20ad891d36c08dc0fd592efdab0e7d313a4e36af"}, + {file = "rapidfuzz-3.6.2-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:3549123fca5bb817341025f98e8e49ca99f84596c7c4f92b658f8e5836040d4a"}, + {file = "rapidfuzz-3.6.2-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:84c1032ae42628465b7a5cc35249906061e18a8193c9c27cbd2db54e9823a9a6"}, + {file = "rapidfuzz-3.6.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:9bcc91ebd8fc69a6bd3b5711c8250f5f4e70606b4da75ef415f57ad209978205"}, + {file = "rapidfuzz-3.6.2-cp38-cp38-win32.whl", hash = "sha256:f3a70f341c4c111bad910d2df69c78577a98af140319a996af24c9385939335d"}, + {file = "rapidfuzz-3.6.2-cp38-cp38-win_amd64.whl", hash = "sha256:354ad5fe655beb7b279390cb58334903931c5452ecbad1b1666ffb06786498e2"}, + {file = "rapidfuzz-3.6.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:1b86b93d93020c2b3edc1665d75c8855784845fc0a739b312c26c3a4bf0c80d5"}, + {file = "rapidfuzz-3.6.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:28243086ed0e50808bb56632e5442c457241646aeafafd501ac87901f40a3237"}, + {file = "rapidfuzz-3.6.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:ed52461ae5a9ea4c400d38e2649c74a413f1a6d8fb8308b66f1fbd122514732f"}, + {file = "rapidfuzz-3.6.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2a46220f86a5f9cb016af31525e0d0865cad437d02239aa0d8aed2ab8bff1f1c"}, + {file = "rapidfuzz-3.6.2-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:81a630ed2fc3ec5fc7400eb66bab1f87e282b4d47f0abe3e48c6634dfa13b5e4"}, + {file = "rapidfuzz-3.6.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d8e5a437b9089df6242a718d9c31ab1742989e9400a0977af012ef483b63b4c2"}, + {file = "rapidfuzz-3.6.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:16270b5529de83b7bae7457e952e4d9cf3fbf029a837dd32d415bb9e0eb8e599"}, + {file = "rapidfuzz-3.6.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5378c04102c7f084cde30a100154fa6d7e2baf0d51a6bdd2f912545559c1fb35"}, + {file = "rapidfuzz-3.6.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:7f18397c8d6a65fc0b288d2fc29bc7baeea6ba91eeb95163a3cd98f23cd3bc85"}, + {file = "rapidfuzz-3.6.2-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:2acd2514defce81e6ff4bbff50252d5e7df8e85a731442c4b83e44c86cf1c916"}, + {file = "rapidfuzz-3.6.2-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:1df2faf80201952e252413b6fac6f3e146080dcebb87bb1bb722508e67558ed8"}, + {file = "rapidfuzz-3.6.2-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:6440ed0b3007c1c9286b0b88fe2ab2d9e83edd60cd62293b3dfabb732b4e8a30"}, + {file = "rapidfuzz-3.6.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:4fcfa23b5553b27f4016df77c53172ea743454cf12c28cfa7c35a309a2be93b3"}, + {file = "rapidfuzz-3.6.2-cp39-cp39-win32.whl", hash = "sha256:2d580d937146e803c8e5e1b87916cab8d6f84013b6392713e201efcda335c7d8"}, + {file = "rapidfuzz-3.6.2-cp39-cp39-win_amd64.whl", hash = "sha256:fe2a68be734e8e88af23385c68d6467e15818b6b1df1cbfebf7bff577226c957"}, + {file = "rapidfuzz-3.6.2-cp39-cp39-win_arm64.whl", hash = "sha256:6478f7803efebf5f644d0b758439c5b25728550fdfbb19783d150004c46a75a9"}, + {file = "rapidfuzz-3.6.2-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:36ce7b68a7b90b787cdd73480a68d2f1ca63c31a3a9d5a79a8736f978e1e9344"}, + {file = "rapidfuzz-3.6.2-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:53597fd72a9340bcdd80d3620f4957c2b92f9b569313b969a3abdaffd193aae6"}, + {file = "rapidfuzz-3.6.2-pp38-pypy38_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d4f6de745fe6ce46a422d353ee10599013631d7d714a36d025f164b2d4e8c000"}, + {file = "rapidfuzz-3.6.2-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:62df2136068e2515ed8beb01756381ff62c29384d785e3bf46e3111d4ea3ba1e"}, + {file = "rapidfuzz-3.6.2-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:7382c90170f60c846c81a07ddd80bb2e8c43c8383754486fa37f67391a571897"}, + {file = "rapidfuzz-3.6.2-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:f31314fd2e2f3dc3e519e6f93669462ce7953df2def1c344aa8f5345976d0eb2"}, + {file = "rapidfuzz-3.6.2-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:012221629d54d3bee954148247f711eb86d4d390b589ebfe03172ea0b37a7531"}, + {file = "rapidfuzz-3.6.2-pp39-pypy39_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d41dd59a70decfce6595315367a2fea2af660d92a9d144acc6479030501014d7"}, + {file = "rapidfuzz-3.6.2-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7f9fa14136a5b0cba1ec42531f7c3e0b0d3edb7fd6bc5e5ae7b498541f3855ab"}, + {file = "rapidfuzz-3.6.2-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:259364199cbfeca33b1af369fc7951f71717aa285184a3fa5a7b1772da1b89db"}, + {file = "rapidfuzz-3.6.2.tar.gz", hash = "sha256:cf911e792ab0c431694c9bf2648afabfd92099103f2e31492893e078ddca5e1a"}, ] [package.extras] diff --git a/airbyte-integrations/connectors/source-s3/pyproject.toml b/airbyte-integrations/connectors/source-s3/pyproject.toml index 11331a7e084e..44c319fbf268 100644 --- a/airbyte-integrations/connectors/source-s3/pyproject.toml +++ b/airbyte-integrations/connectors/source-s3/pyproject.toml @@ -3,7 +3,7 @@ requires = [ "poetry-core>=1.0.0",] build-backend = "poetry.core.masonry.api" [tool.poetry] -version = "4.5.8" +version = "4.5.9" name = "source-s3" description = "Source implementation for S3." authors = [ "Airbyte ",] diff --git a/docs/integrations/sources/s3.md b/docs/integrations/sources/s3.md index a29fdca0de26..7ff0f2899b57 100644 --- a/docs/integrations/sources/s3.md +++ b/docs/integrations/sources/s3.md @@ -264,6 +264,7 @@ To perform the text extraction from PDF and Docx files, the connector uses the [ | Version | Date | Pull Request | Subject | |:--------|:-----------|:----------------------------------------------------------------------------------------------------------------|:---------------------------------------------------------------------------------------------------------------------| +| 4.5.9 | 2024-03-06 | [35857](https://github.com/airbytehq/airbyte/pull/35857) | Bump poetry.lock to upgrade transitive dependency | | 4.5.8 | 2024-03-04 | [35808](https://github.com/airbytehq/airbyte/pull/35808) | Use cached AWS client | | 4.5.7 | 2024-02-23 | [34895](https://github.com/airbytehq/airbyte/pull/34895) | Run incremental syncs with concurrency | | 4.5.6 | 2024-02-21 | [35246](https://github.com/airbytehq/airbyte/pull/35246) | Fixes bug that occurred when creating CSV streams with tab delimiter. | From cdf80f3bfa571edf2ad9683ae58a956a95161b96 Mon Sep 17 00:00:00 2001 From: Stephane Geneix <147216312+stephane-airbyte@users.noreply.github.com> Date: Wed, 6 Mar 2024 09:01:37 -0800 Subject: [PATCH 105/172] get source-mssql CI to green (#35721) TheAbstractMssqlSourceDatatypeTest was wrong for reals. CdcMssqlSourceTest and CdcMssqlSslSourceTest no longer use exclusive containers. tests that change the SQLServer agent state are now in their own test class (which needs to use an exclusive container) Besides that, the core of the problem can be grouped in 2 issues: a) some tests are failing to enable CDC for tables. This is due to a timing issue. We moved that logic into its own function that will try for a total of 240seconds before giving up. b) some tests are failing when trying to read the minLsn. There is a 1sec wait implemented in the production code. Instead we introduce a busy loop that will wait for a total of 240seconds for records to appear in the CDC table before giving up. That function is implemented in test code. Unfortunately, for both cases, we sometimes needed to wait while in the middle of a function implemented in the CDK. We introduced a few hooks in the parent PR that are implemented in this PR for the source-mssql tests, and use the functions described above --- .../connectors/source-mssql/build.gradle | 2 +- .../connectors/source-mssql/gradle.properties | 3 +- .../connectors/source-mssql/metadata.yaml | 2 +- .../source/mssql/MssqlCdcHelper.java | 3 + .../source/mssql/MssqlCdcStateHandler.java | 2 +- .../source/mssql/MssqlCdcTargetPosition.java | 20 +- .../source/mssql/MssqlSource.java | 2 + .../mssql/cdc/MssqlDebeziumStateUtil.java | 2 +- .../MssqlInitialSyncStateIterator.java | 2 +- .../AbstractMssqlSourceDatatypeTest.java | 15 +- .../AbstractSshMssqlSourceAcceptanceTest.java | 9 +- .../mssql/CdcMssqlSourceAcceptanceTest.java | 15 +- .../mssql/CdcMssqlSourceDatatypeTest.java | 49 +---- .../source/mssql/CdcMssqlSourceTest.java | 125 +++++------ .../source/mssql/CdcMssqlSslSourceTest.java | 20 +- .../source/mssql/CdcStateCompressionTest.java | 44 ++-- .../source/mssql/MssqlAgentStateTest.java | 115 ++++++++++ .../source/mssql/MsSQLContainerFactory.java | 6 +- .../source/mssql/MsSQLTestDatabase.java | 205 ++++++++++++++---- docs/integrations/sources/mssql.md | 3 +- 20 files changed, 401 insertions(+), 243 deletions(-) create mode 100644 airbyte-integrations/connectors/source-mssql/src/test/java/io/airbyte/integrations/source/mssql/MssqlAgentStateTest.java diff --git a/airbyte-integrations/connectors/source-mssql/build.gradle b/airbyte-integrations/connectors/source-mssql/build.gradle index f89ed694a0d9..627f938cba08 100644 --- a/airbyte-integrations/connectors/source-mssql/build.gradle +++ b/airbyte-integrations/connectors/source-mssql/build.gradle @@ -3,7 +3,7 @@ plugins { } airbyteJavaConnector { - cdkVersionRequired = '0.23.14' + cdkVersionRequired = '0.23.15' features = ['db-sources'] useLocalCdk = false } diff --git a/airbyte-integrations/connectors/source-mssql/gradle.properties b/airbyte-integrations/connectors/source-mssql/gradle.properties index 8ef098d20b92..9e4d90aa6508 100644 --- a/airbyte-integrations/connectors/source-mssql/gradle.properties +++ b/airbyte-integrations/connectors/source-mssql/gradle.properties @@ -1 +1,2 @@ -testExecutionConcurrency=-1 \ No newline at end of file +testExecutionConcurrency=-1 +JunitMethodExecutionTimeout=5 m \ No newline at end of file diff --git a/airbyte-integrations/connectors/source-mssql/metadata.yaml b/airbyte-integrations/connectors/source-mssql/metadata.yaml index 430fca185fe0..4ae4c38dfbaa 100644 --- a/airbyte-integrations/connectors/source-mssql/metadata.yaml +++ b/airbyte-integrations/connectors/source-mssql/metadata.yaml @@ -9,7 +9,7 @@ data: connectorSubtype: database connectorType: source definitionId: b5ea17b1-f170-46dc-bc31-cc744ca984c1 - dockerImageTag: 3.7.5 + dockerImageTag: 3.7.6 dockerRepository: airbyte/source-mssql documentationUrl: https://docs.airbyte.com/integrations/sources/mssql githubIssueLabel: source-mssql diff --git a/airbyte-integrations/connectors/source-mssql/src/main/java/io/airbyte/integrations/source/mssql/MssqlCdcHelper.java b/airbyte-integrations/connectors/source-mssql/src/main/java/io/airbyte/integrations/source/mssql/MssqlCdcHelper.java index 86ebc7a0537d..bc5e62e80093 100644 --- a/airbyte-integrations/connectors/source-mssql/src/main/java/io/airbyte/integrations/source/mssql/MssqlCdcHelper.java +++ b/airbyte-integrations/connectors/source-mssql/src/main/java/io/airbyte/integrations/source/mssql/MssqlCdcHelper.java @@ -100,6 +100,7 @@ public static Properties getDebeziumProperties(final JdbcDatabase database, fina final String sslMethod = sslConfig.get("ssl_method").asText(); if ("unencrypted".equals(sslMethod)) { props.setProperty("database.encrypt", "false"); + props.setProperty("driver.trustServerCertificate", "true"); } else if ("encrypted_trust_server_certificate".equals(sslMethod)) { props.setProperty("driver.encrypt", "true"); props.setProperty("driver.trustServerCertificate", "true"); @@ -118,6 +119,8 @@ public static Properties getDebeziumProperties(final JdbcDatabase database, fina props.setProperty("database.hostNameInCertificate", dbConfig.get("hostNameInCertificate").asText()); } } + } else { + props.setProperty("driver.trustServerCertificate", "true"); } return props; diff --git a/airbyte-integrations/connectors/source-mssql/src/main/java/io/airbyte/integrations/source/mssql/MssqlCdcStateHandler.java b/airbyte-integrations/connectors/source-mssql/src/main/java/io/airbyte/integrations/source/mssql/MssqlCdcStateHandler.java index 709c1bc12690..0af1ea2873d7 100644 --- a/airbyte-integrations/connectors/source-mssql/src/main/java/io/airbyte/integrations/source/mssql/MssqlCdcStateHandler.java +++ b/airbyte-integrations/connectors/source-mssql/src/main/java/io/airbyte/integrations/source/mssql/MssqlCdcStateHandler.java @@ -46,7 +46,7 @@ public AirbyteMessage saveState(final Map offset, final SchemaHi final JsonNode asJson = Jsons.jsonNode(state); - LOGGER.info("debezium state: {}", asJson); + LOGGER.info("debezium state offset: {}", Jsons.jsonNode(offset)); final CdcState cdcState = new CdcState().withState(asJson); stateManager.getCdcStateManager().setCdcState(cdcState); diff --git a/airbyte-integrations/connectors/source-mssql/src/main/java/io/airbyte/integrations/source/mssql/MssqlCdcTargetPosition.java b/airbyte-integrations/connectors/source-mssql/src/main/java/io/airbyte/integrations/source/mssql/MssqlCdcTargetPosition.java index 123459f386da..fda25c5bad37 100644 --- a/airbyte-integrations/connectors/source-mssql/src/main/java/io/airbyte/integrations/source/mssql/MssqlCdcTargetPosition.java +++ b/airbyte-integrations/connectors/source-mssql/src/main/java/io/airbyte/integrations/source/mssql/MssqlCdcTargetPosition.java @@ -15,7 +15,6 @@ import io.debezium.connector.sqlserver.Lsn; import java.io.IOException; import java.sql.SQLException; -import java.time.Duration; import java.util.List; import java.util.Map; import java.util.Optional; @@ -26,8 +25,6 @@ public class MssqlCdcTargetPosition implements CdcTargetPosition { private static final Logger LOGGER = LoggerFactory.getLogger(MssqlCdcTargetPosition.class); - public static final Duration MAX_LSN_QUERY_DELAY = Duration.ZERO; - public static final Duration MAX_LSN_QUERY_DELAY_TEST = Duration.ofSeconds(1); public final Lsn targetLsn; public MssqlCdcTargetPosition(final Lsn targetLsn) { @@ -87,27 +84,24 @@ public static MssqlCdcTargetPosition getTargetPosition(final JdbcDatabase databa // a chance to catch up. This is important in tests, where reads might occur in quick succession // which might leave the CT tables (which Debezium consumes) in a stale state. final JsonNode sourceConfig = database.getSourceConfig(); - final Duration delay = (sourceConfig != null && sourceConfig.has("is_test") && sourceConfig.get("is_test").asBoolean()) - ? MAX_LSN_QUERY_DELAY_TEST - : MAX_LSN_QUERY_DELAY; final String maxLsnQuery = """ USE [%s]; - WAITFOR DELAY '%02d:%02d:%02d'; SELECT sys.fn_cdc_get_max_lsn() AS max_lsn; - """.formatted(dbName, delay.toHours(), delay.toMinutesPart(), delay.toSecondsPart()); + """.formatted(dbName); // Query the high-water mark. final List jsonNodes = database.bufferedResultSetQuery( connection -> connection.createStatement().executeQuery(maxLsnQuery), JdbcUtils.getDefaultSourceOperations()::rowToJson); Preconditions.checkState(jsonNodes.size() == 1); + + final Lsn maxLsn; if (jsonNodes.get(0).get("max_lsn") != null) { - final Lsn maxLsn = Lsn.valueOf(jsonNodes.get(0).get("max_lsn").binaryValue()); - LOGGER.info("identified target lsn: " + maxLsn); - return new MssqlCdcTargetPosition(maxLsn); + maxLsn = Lsn.valueOf(jsonNodes.get(0).get("max_lsn").binaryValue()); } else { - throw new RuntimeException("SQL returned max LSN as null, this might be because the SQL Server Agent is not running. " + - "Please enable the Agent and try again (https://docs.microsoft.com/en-us/sql/ssms/agent/start-stop-or-pause-the-sql-server-agent-service)"); + maxLsn = Lsn.NULL; } + LOGGER.info("identified target lsn: " + maxLsn); + return new MssqlCdcTargetPosition(maxLsn); } catch (final SQLException | IOException e) { throw new RuntimeException(e); } diff --git a/airbyte-integrations/connectors/source-mssql/src/main/java/io/airbyte/integrations/source/mssql/MssqlSource.java b/airbyte-integrations/connectors/source-mssql/src/main/java/io/airbyte/integrations/source/mssql/MssqlSource.java index f7de282d5e8c..d441acc1de58 100644 --- a/airbyte-integrations/connectors/source-mssql/src/main/java/io/airbyte/integrations/source/mssql/MssqlSource.java +++ b/airbyte-integrations/connectors/source-mssql/src/main/java/io/airbyte/integrations/source/mssql/MssqlSource.java @@ -224,6 +224,8 @@ public JsonNode toDatabaseConfig(final JsonNode mssqlConfig) { if (mssqlConfig.has("ssl_method")) { readSsl(mssqlConfig, additionalParameters); + } else { + additionalParameters.add("trustServerCertificate=true"); } if (!additionalParameters.isEmpty()) { diff --git a/airbyte-integrations/connectors/source-mssql/src/main/java/io/airbyte/integrations/source/mssql/cdc/MssqlDebeziumStateUtil.java b/airbyte-integrations/connectors/source-mssql/src/main/java/io/airbyte/integrations/source/mssql/cdc/MssqlDebeziumStateUtil.java index f998fb5113bf..8f55fa18be99 100644 --- a/airbyte-integrations/connectors/source-mssql/src/main/java/io/airbyte/integrations/source/mssql/cdc/MssqlDebeziumStateUtil.java +++ b/airbyte-integrations/connectors/source-mssql/src/main/java/io/airbyte/integrations/source/mssql/cdc/MssqlDebeziumStateUtil.java @@ -125,7 +125,7 @@ public JsonNode constructInitialDebeziumState(final Properties properties, assert Objects.nonNull(schemaHistory.schema()); final JsonNode asJson = serialize(offset, schemaHistory); - LOGGER.info("Initial Debezium state constructed: {}", asJson); + LOGGER.info("Initial Debezium state constructed. offset={}", Jsons.jsonNode(offset)); if (asJson.get(MssqlCdcStateConstants.MSSQL_DB_HISTORY).asText().isBlank()) { throw new RuntimeException("Schema history snapshot returned empty history."); diff --git a/airbyte-integrations/connectors/source-mssql/src/main/java/io/airbyte/integrations/source/mssql/initialsync/MssqlInitialSyncStateIterator.java b/airbyte-integrations/connectors/source-mssql/src/main/java/io/airbyte/integrations/source/mssql/initialsync/MssqlInitialSyncStateIterator.java index 0fe6a872f75b..b96b8a705dee 100644 --- a/airbyte-integrations/connectors/source-mssql/src/main/java/io/airbyte/integrations/source/mssql/initialsync/MssqlInitialSyncStateIterator.java +++ b/airbyte-integrations/connectors/source-mssql/src/main/java/io/airbyte/integrations/source/mssql/initialsync/MssqlInitialSyncStateIterator.java @@ -92,7 +92,7 @@ protected AirbyteMessage computeNext() { } else if (!hasEmittedFinalState) { hasEmittedFinalState = true; final AirbyteStateMessage finalStateMessage = stateManager.createFinalStateMessage(pair, streamStateForIncrementalRun); - LOGGER.info("Finished initial sync of stream {}, Emitting final state, state is {}", pair, finalStateMessage); + LOGGER.info("Finished initial sync of stream {}, Emitting final state.", pair); return new AirbyteMessage() .withType(Type.STATE) .withState(finalStateMessage); diff --git a/airbyte-integrations/connectors/source-mssql/src/test-integration/java/io/airbyte/integrations/source/mssql/AbstractMssqlSourceDatatypeTest.java b/airbyte-integrations/connectors/source-mssql/src/test-integration/java/io/airbyte/integrations/source/mssql/AbstractMssqlSourceDatatypeTest.java index 32c42ebea52c..c8b22931a464 100644 --- a/airbyte-integrations/connectors/source-mssql/src/test-integration/java/io/airbyte/integrations/source/mssql/AbstractMssqlSourceDatatypeTest.java +++ b/airbyte-integrations/connectors/source-mssql/src/test-integration/java/io/airbyte/integrations/source/mssql/AbstractMssqlSourceDatatypeTest.java @@ -123,14 +123,13 @@ protected void initTests() { .createTablePatternSql(CREATE_TABLE_SQL) .build()); - addDataTypeTestData( - TestDataHolder.builder() - .sourceType("real") - .airbyteType(JsonSchemaType.NUMBER) - .addInsertValues("'123'", "'1234567890.1234567'", "null") - .addExpectedValues("123.0", "1.23456794E9", null) - .createTablePatternSql(CREATE_TABLE_SQL) - .build()); + addDataTypeTestData(TestDataHolder.builder() + .sourceType("real") + .airbyteType(JsonSchemaType.NUMBER) + .addInsertValues("'123'", "'1234567890.1234567'", "null") + .addExpectedValues("123.0", "1.234568E9", null) + .createTablePatternSql(CREATE_TABLE_SQL) + .build()); addDataTypeTestData( TestDataHolder.builder() diff --git a/airbyte-integrations/connectors/source-mssql/src/test-integration/java/io/airbyte/integrations/source/mssql/AbstractSshMssqlSourceAcceptanceTest.java b/airbyte-integrations/connectors/source-mssql/src/test-integration/java/io/airbyte/integrations/source/mssql/AbstractSshMssqlSourceAcceptanceTest.java index 0ebbcb72c7dc..b5d1c5468e90 100644 --- a/airbyte-integrations/connectors/source-mssql/src/test-integration/java/io/airbyte/integrations/source/mssql/AbstractSshMssqlSourceAcceptanceTest.java +++ b/airbyte-integrations/connectors/source-mssql/src/test-integration/java/io/airbyte/integrations/source/mssql/AbstractSshMssqlSourceAcceptanceTest.java @@ -32,9 +32,13 @@ import java.util.HashMap; import java.util.List; import org.jooq.SQLDialect; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; public abstract class AbstractSshMssqlSourceAcceptanceTest extends SourceAcceptanceTest { + static private final Logger LOGGER = LoggerFactory.getLogger(AbstractSshMssqlSourceAcceptanceTest.class); + private static final String STREAM_NAME = "dbo.id_and_name"; private static final String STREAM_NAME2 = "dbo.starships"; @@ -69,7 +73,6 @@ private void populateDatabaseTestData() throws Exception { JdbcUtils.PORT_LIST_KEY, (CheckedFunction, Exception>) mangledConfig -> getDatabaseFromConfig(mangledConfig) .query(ctx -> { - ctx.fetch("ALTER DATABASE %s SET AUTO_CLOSE OFF WITH NO_WAIT;", testdb.getDatabaseName()); ctx.fetch("CREATE TABLE id_and_name(id INTEGER, name VARCHAR(200), born DATETIMEOFFSET(7));"); ctx.fetch("INSERT INTO id_and_name (id, name, born) VALUES " + "(1, 'picard', '2124-03-04T01:01:01Z'), " + @@ -88,14 +91,16 @@ private static Database getDatabaseFromConfig(final JsonNode config) { String.format(DatabaseDriver.MSSQLSERVER.getUrlFormatString(), config.get(JdbcUtils.HOST_KEY).asText(), config.get(JdbcUtils.PORT_KEY).asInt(), - config.get(JdbcUtils.DATABASE_KEY).asText()), + config.get(JdbcUtils.DATABASE_KEY).asText()) + ";encrypt=false;trustServerCertificate=true", SQLDialect.DEFAULT)); } @Override protected void setupEnvironment(final TestDestinationEnv environment) throws Exception { testdb = MsSQLTestDatabase.in(BaseImage.MSSQL_2017, ContainerModifier.NETWORK); + LOGGER.info("starting bastion"); bastion.initAndStartBastion(testdb.getContainer().getNetwork()); + LOGGER.info("bastion started"); populateDatabaseTestData(); } diff --git a/airbyte-integrations/connectors/source-mssql/src/test-integration/java/io/airbyte/integrations/source/mssql/CdcMssqlSourceAcceptanceTest.java b/airbyte-integrations/connectors/source-mssql/src/test-integration/java/io/airbyte/integrations/source/mssql/CdcMssqlSourceAcceptanceTest.java index 671dc8e31634..152c36614141 100644 --- a/airbyte-integrations/connectors/source-mssql/src/test-integration/java/io/airbyte/integrations/source/mssql/CdcMssqlSourceAcceptanceTest.java +++ b/airbyte-integrations/connectors/source-mssql/src/test-integration/java/io/airbyte/integrations/source/mssql/CdcMssqlSourceAcceptanceTest.java @@ -99,12 +99,6 @@ protected JsonNode getState() { @Override protected void setupEnvironment(final TestDestinationEnv environment) { testdb = MsSQLTestDatabase.in(BaseImage.MSSQL_2022, ContainerModifier.AGENT); - final var enableCdcSqlFmt = """ - EXEC sys.sp_cdc_enable_table - \t@source_schema = N'%s', - \t@source_name = N'%s', - \t@role_name = N'%s', - \t@supports_net_changes = 0"""; testdb .withWaitUntilAgentRunning() .withCdc() @@ -115,17 +109,16 @@ protected void setupEnvironment(final TestDestinationEnv environment) { .with("INSERT INTO %s.%s (id, name) VALUES (1,'picard'), (2, 'crusher'), (3, 'vash');", SCHEMA_NAME, STREAM_NAME) .with("INSERT INTO %s.%s (id, name) VALUES (1,'enterprise-d'), (2, 'defiant'), (3, 'yamato');", SCHEMA_NAME, STREAM_NAME2) // enable cdc on tables for designated role - .with(enableCdcSqlFmt, SCHEMA_NAME, STREAM_NAME, CDC_ROLE_NAME) - .with(enableCdcSqlFmt, SCHEMA_NAME, STREAM_NAME2, CDC_ROLE_NAME) - .withShortenedCapturePollingInterval() - .withWaitUntilMaxLsnAvailable() + .withCdcForTable(SCHEMA_NAME, STREAM_NAME, CDC_ROLE_NAME) + .withCdcForTable(SCHEMA_NAME, STREAM_NAME2, CDC_ROLE_NAME) // revoke user permissions .with("REVOKE ALL FROM %s CASCADE;", testdb.getUserName()) .with("EXEC sp_msforeachtable \"REVOKE ALL ON '?' TO %s;\"", testdb.getUserName()) // grant user permissions .with("EXEC sp_addrolemember N'%s', N'%s';", "db_datareader", testdb.getUserName()) .with("GRANT SELECT ON SCHEMA :: [cdc] TO %s", testdb.getUserName()) - .with("EXEC sp_addrolemember N'%s', N'%s';", CDC_ROLE_NAME, testdb.getUserName()); + .with("EXEC sp_addrolemember N'%s', N'%s';", CDC_ROLE_NAME, testdb.getUserName()) + .withWaitUntilMaxLsnAvailable(); } @Override diff --git a/airbyte-integrations/connectors/source-mssql/src/test-integration/java/io/airbyte/integrations/source/mssql/CdcMssqlSourceDatatypeTest.java b/airbyte-integrations/connectors/source-mssql/src/test-integration/java/io/airbyte/integrations/source/mssql/CdcMssqlSourceDatatypeTest.java index adfa26005af3..62eec21314cb 100644 --- a/airbyte-integrations/connectors/source-mssql/src/test-integration/java/io/airbyte/integrations/source/mssql/CdcMssqlSourceDatatypeTest.java +++ b/airbyte-integrations/connectors/source-mssql/src/test-integration/java/io/airbyte/integrations/source/mssql/CdcMssqlSourceDatatypeTest.java @@ -6,7 +6,6 @@ import com.fasterxml.jackson.databind.JsonNode; import io.airbyte.cdk.db.Database; -import io.airbyte.cdk.integrations.standardtest.source.TestDestinationEnv; import io.airbyte.integrations.source.mssql.MsSQLTestDatabase.BaseImage; import io.airbyte.integrations.source.mssql.MsSQLTestDatabase.ContainerModifier; @@ -27,46 +26,18 @@ protected Database setupDatabase() { return testdb.getDatabase(); } - @Override - protected void setupEnvironment(final TestDestinationEnv environment) throws Exception { - super.setupEnvironment(environment); - enableCdcOnAllTables(); + protected void createTables() throws Exception { + super.createTables(); + for (var test : testDataHolders) { + testdb.withCdcForTable(test.getNameSpace(), test.getNameWithTestPrefix(), null); + } } - private void enableCdcOnAllTables() { - testdb.with(""" - DECLARE @TableName VARCHAR(100) - DECLARE @TableSchema VARCHAR(100) - DECLARE CDC_Cursor CURSOR FOR - SELECT * FROM ( - SELECT Name,SCHEMA_NAME(schema_id) AS TableSchema - FROM sys.objects - WHERE type = 'u' - AND is_ms_shipped <> 1 - ) CDC - OPEN CDC_Cursor - FETCH NEXT FROM CDC_Cursor INTO @TableName,@TableSchema - WHILE @@FETCH_STATUS = 0 - BEGIN - DECLARE @SQL NVARCHAR(1000) - DECLARE @CDC_Status TINYINT - SET @CDC_Status=(SELECT COUNT(*) - FROM cdc.change_tables - WHERE Source_object_id = OBJECT_ID(@TableSchema+'.'+@TableName)) - --IF CDC is not enabled on Table, Enable CDC - IF @CDC_Status <> 1 - BEGIN - SET @SQL='EXEC sys.sp_cdc_enable_table - @source_schema = '''+@TableSchema+''', - @source_name = ''' + @TableName - + ''', - @role_name = null;' - EXEC sp_executesql @SQL - END - FETCH NEXT FROM CDC_Cursor INTO @TableName,@TableSchema - END - CLOSE CDC_Cursor - DEALLOCATE CDC_Cursor"""); + protected void populateTables() throws Exception { + super.populateTables(); + for (var test : testDataHolders) { + testdb.waitForCdcRecords(test.getNameSpace(), test.getNameWithTestPrefix(), test.getValues().size()); + } } @Override diff --git a/airbyte-integrations/connectors/source-mssql/src/test/java/io/airbyte/integrations/source/mssql/CdcMssqlSourceTest.java b/airbyte-integrations/connectors/source-mssql/src/test/java/io/airbyte/integrations/source/mssql/CdcMssqlSourceTest.java index ad63ac5b8558..3ef4988cbe07 100644 --- a/airbyte-integrations/connectors/source-mssql/src/test/java/io/airbyte/integrations/source/mssql/CdcMssqlSourceTest.java +++ b/airbyte-integrations/connectors/source-mssql/src/test/java/io/airbyte/integrations/source/mssql/CdcMssqlSourceTest.java @@ -35,9 +35,12 @@ import io.airbyte.cdk.db.jdbc.streaming.AdaptiveStreamingQueryConfig; import io.airbyte.cdk.integrations.JdbcConnector; import io.airbyte.cdk.integrations.debezium.CdcSourceTest; +import io.airbyte.cdk.integrations.debezium.CdcTargetPosition; import io.airbyte.commons.json.Jsons; import io.airbyte.commons.util.AutoCloseableIterator; import io.airbyte.commons.util.AutoCloseableIterators; +import io.airbyte.integrations.source.mssql.MsSQLTestDatabase.BaseImage; +import io.airbyte.integrations.source.mssql.MsSQLTestDatabase.ContainerModifier; import io.airbyte.integrations.source.mssql.cdc.MssqlDebeziumStateUtil; import io.airbyte.protocol.models.v0.AirbyteConnectionStatus; import io.airbyte.protocol.models.v0.AirbyteGlobalState; @@ -49,6 +52,7 @@ import io.airbyte.protocol.models.v0.AirbyteStreamState; import io.airbyte.protocol.models.v0.SyncMode; import io.debezium.connector.sqlserver.Lsn; +import java.sql.SQLException; import java.time.Duration; import java.util.Collections; import java.util.List; @@ -60,54 +64,33 @@ import java.util.concurrent.Executors; import java.util.stream.Collectors; import javax.sql.DataSource; -import org.junit.jupiter.api.AfterAll; import org.junit.jupiter.api.AfterEach; import org.junit.jupiter.api.Assertions; import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Test; import org.junit.jupiter.api.TestInstance; import org.junit.jupiter.api.TestInstance.Lifecycle; -import org.testcontainers.containers.MSSQLServerContainer; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; @TestInstance(Lifecycle.PER_CLASS) public class CdcMssqlSourceTest extends CdcSourceTest { + private static final Logger LOGGER = LoggerFactory.getLogger(CdcSourceTest.class); + static private final String CDC_ROLE_NAME = "cdc_selector"; static private final String TEST_USER_NAME_PREFIX = "cdc_test_user"; - // Deliberately do not share this test container, as we're going to mutate the global SQL Server - // state. - protected final MSSQLServerContainer privateContainer; - private DataSource testDataSource; - CdcMssqlSourceTest() { - this.privateContainer = createContainer(); - } - - protected MSSQLServerContainer createContainer() { - return new MsSQLContainerFactory().exclusive( - MsSQLTestDatabase.BaseImage.MSSQL_2022.reference, - MsSQLTestDatabase.ContainerModifier.AGENT.methodName); - } - - @AfterAll - void afterAll() { - privateContainer.close(); - } - protected final String testUserName() { return testdb.withNamespace(TEST_USER_NAME_PREFIX); } @Override protected MsSQLTestDatabase createTestDatabase() { - final var testdb = new MsSQLTestDatabase(privateContainer); - return testdb - .withConnectionProperty("encrypt", "false") - .withConnectionProperty("databaseName", testdb.getDatabaseName()) - .initialized() + return MsSQLTestDatabase.in(BaseImage.MSSQL_2022, ContainerModifier.AGENT) .withWaitUntilAgentRunning() .withCdc(); } @@ -134,19 +117,12 @@ protected JsonNode config() { @Override @BeforeEach protected void setup() { - super.setup(); - + testdb = createTestDatabase(); + createTables(); // Enables cdc on MODELS_SCHEMA.MODELS_STREAM_NAME, giving CDC_ROLE_NAME select access. - final var enableCdcSqlFmt = """ - EXEC sys.sp_cdc_enable_table - \t@source_schema = N'%s', - \t@source_name = N'%s', - \t@role_name = N'%s', - \t@supports_net_changes = 0"""; testdb - .with(enableCdcSqlFmt, modelsSchema(), MODELS_STREAM_NAME, CDC_ROLE_NAME) - .with(enableCdcSqlFmt, randomSchema(), RANDOM_TABLE_NAME, CDC_ROLE_NAME) - .withShortenedCapturePollingInterval(); + .withCdcForTable(modelsSchema(), MODELS_STREAM_NAME, CDC_ROLE_NAME) + .withCdcForTable(randomSchema(), RANDOM_TABLE_NAME, CDC_ROLE_NAME); // Create a test user to be used by the source, with proper permissions. testdb @@ -162,16 +138,24 @@ protected void setup() { .with("USE [%s]", testdb.getDatabaseName()) .with("EXEC sp_addrolemember N'%s', N'%s';", CDC_ROLE_NAME, testUserName()); + populateTables(); + waitForCdcRecords(); testDataSource = createTestDataSource(); } + public void waitForCdcRecords() { + testdb.waitForCdcRecords(modelsSchema(), MODELS_STREAM_NAME, MODEL_RECORDS.size()); + testdb.waitForCdcRecords(randomSchema(), RANDOM_TABLE_NAME, MODEL_RECORDS_RANDOM.size()); + + } + protected DataSource createTestDataSource() { return DataSourceFactory.create( testUserName(), testdb.getPassword(), testdb.getDatabaseDriver().getDriverClassName(), testdb.getJdbcUrl(), - Map.of("encrypt", "false"), + Map.of("encrypt", "false", "trustServerCertificate", "true"), JdbcConnector.CONNECT_TIMEOUT_DEFAULT); } @@ -299,41 +283,6 @@ void testAssertCdcSchemaQueryable() { () -> source().assertCdcSchemaQueryable(config(), testDatabase())); } - @Test - void testAssertSqlServerAgentRunning() { - testdb.withAgentStopped().withWaitUntilAgentStopped(); - // assert expected failure if sql server agent stopped - assertThrows(RuntimeException.class, () -> source().assertSqlServerAgentRunning(testDatabase())); - // assert success if sql server agent running - testdb.withAgentStarted().withWaitUntilAgentRunning(); - assertDoesNotThrow(() -> source().assertSqlServerAgentRunning(testDatabase())); - } - - // Ensure the CDC check operations are included when CDC is enabled - // todo: make this better by checking the returned checkOperations from source.getCheckOperations - @Test - void testCdcCheckOperations() throws Exception { - // assertCdcEnabledInDb - testdb.withoutCdc(); - AirbyteConnectionStatus status = source().check(config()); - assertEquals(status.getStatus(), AirbyteConnectionStatus.Status.FAILED); - testdb.withCdc(); - // assertCdcSchemaQueryable - testdb.with("REVOKE SELECT ON SCHEMA :: [cdc] TO %s", testUserName()); - status = source().check(config()); - assertEquals(status.getStatus(), AirbyteConnectionStatus.Status.FAILED); - testdb.with("GRANT SELECT ON SCHEMA :: [cdc] TO %s", testUserName()); - - // assertSqlServerAgentRunning - - testdb.withAgentStopped().withWaitUntilAgentStopped(); - status = source().check(config()); - assertEquals(status.getStatus(), AirbyteConnectionStatus.Status.FAILED); - testdb.withAgentStarted().withWaitUntilAgentRunning(); - status = source().check(config()); - assertEquals(status.getStatus(), AirbyteConnectionStatus.Status.FAILED); - } - @Test void testCdcCheckOperationsWithDot() throws Exception { final String dbNameWithDot = testdb.getDatabaseName().replace("_", "."); @@ -347,7 +296,7 @@ void testCdcCheckOperationsWithDot() throws Exception { // todo: check LSN returned is actually the max LSN // todo: check we fail as expected under certain conditions @Test - void testGetTargetPosition() { + void testGetTargetPosition() throws Exception { // check that getTargetPosition returns higher Lsn after inserting new row testdb.withWaitUntilMaxLsnAvailable(); final Lsn firstLsn = MssqlCdcTargetPosition.getTargetPosition(testDatabase(), testdb.getDatabaseName()).targetLsn; @@ -478,4 +427,32 @@ private void assertStateTypes(final List stateMessages, fin } } + @Override + protected void compareTargetPositionFromTheRecordsWithTargetPostionGeneratedBeforeSync(final CdcTargetPosition targetPosition, + final AirbyteRecordMessage record) { + // The LSN from records should be either equal or grater than the position value before the sync + // started. + // Since we're using shared containers, the current LSN can move forward without any data + // modifications + // (INSERT, UPDATE, DELETE) in the current DB + assert targetPosition instanceof MssqlCdcTargetPosition; + assertTrue(extractPosition(record.getData()).targetLsn.compareTo(((MssqlCdcTargetPosition) targetPosition).targetLsn) >= 0); + } + + protected void waitForCdcRecords(String schemaName, String tableName, int recordCount) + throws Exception { + testdb.waitForCdcRecords(schemaName, tableName, recordCount); + } + + protected void deleteCommand(final String streamName) { + String selectCountSql = "SELECT COUNT(*) FROM %s.%s".formatted(modelsSchema(), streamName); + try { + int rowCount = testdb.query(ctx -> ctx.fetch(selectCountSql)).get(0).get(0, Integer.class); + LOGGER.info("deleting all {} rows from table {}.{}", rowCount, modelsSchema(), streamName); + super.deleteCommand(streamName); + } catch (SQLException e) { + throw new RuntimeException(e); + } + } + } diff --git a/airbyte-integrations/connectors/source-mssql/src/test/java/io/airbyte/integrations/source/mssql/CdcMssqlSslSourceTest.java b/airbyte-integrations/connectors/source-mssql/src/test/java/io/airbyte/integrations/source/mssql/CdcMssqlSslSourceTest.java index f0f869eb686f..c16ac61805d0 100644 --- a/airbyte-integrations/connectors/source-mssql/src/test/java/io/airbyte/integrations/source/mssql/CdcMssqlSslSourceTest.java +++ b/airbyte-integrations/connectors/source-mssql/src/test/java/io/airbyte/integrations/source/mssql/CdcMssqlSslSourceTest.java @@ -10,34 +10,22 @@ import io.airbyte.cdk.db.factory.DataSourceFactory; import io.airbyte.cdk.db.jdbc.JdbcUtils; import io.airbyte.cdk.integrations.JdbcConnector; +import io.airbyte.integrations.source.mssql.MsSQLTestDatabase.BaseImage; import io.airbyte.integrations.source.mssql.MsSQLTestDatabase.CertificateKey; +import io.airbyte.integrations.source.mssql.MsSQLTestDatabase.ContainerModifier; import java.net.InetAddress; import java.net.UnknownHostException; import java.util.Map; import javax.sql.DataSource; import org.junit.jupiter.api.TestInstance; -import org.testcontainers.containers.MSSQLServerContainer; @TestInstance(TestInstance.Lifecycle.PER_CLASS) public class CdcMssqlSslSourceTest extends CdcMssqlSourceTest { - @Override - protected MSSQLServerContainer createContainer() { - return new MsSQLContainerFactory().exclusive( - MsSQLTestDatabase.BaseImage.MSSQL_2022.reference, - MsSQLTestDatabase.ContainerModifier.AGENT.methodName, - MsSQLTestDatabase.ContainerModifier.WITH_SSL_CERTIFICATES.methodName); - } - @Override final protected MsSQLTestDatabase createTestDatabase() { - final var testdb = new MsSQLTestDatabase(privateContainer); - return testdb - .withConnectionProperty("encrypt", "true") - .withConnectionProperty("databaseName", testdb.getDatabaseName()) - .withConnectionProperty("trustServerCertificate", "true") - .initialized() - .withWaitUntilAgentRunning() + final var testdb = MsSQLTestDatabase.in(BaseImage.MSSQL_2022, ContainerModifier.AGENT, ContainerModifier.WITH_SSL_CERTIFICATES); + return testdb.withWaitUntilAgentRunning() .withCdc(); } diff --git a/airbyte-integrations/connectors/source-mssql/src/test/java/io/airbyte/integrations/source/mssql/CdcStateCompressionTest.java b/airbyte-integrations/connectors/source-mssql/src/test/java/io/airbyte/integrations/source/mssql/CdcStateCompressionTest.java index 293189b6683a..3701e8237ff5 100644 --- a/airbyte-integrations/connectors/source-mssql/src/test/java/io/airbyte/integrations/source/mssql/CdcStateCompressionTest.java +++ b/airbyte-integrations/connectors/source-mssql/src/test/java/io/airbyte/integrations/source/mssql/CdcStateCompressionTest.java @@ -56,28 +56,27 @@ public class CdcStateCompressionTest { private MsSQLTestDatabase testdb; @BeforeEach - public void setup() { + public void setup() throws Exception { testdb = MsSQLTestDatabase.in(MsSQLTestDatabase.BaseImage.MSSQL_2022, MsSQLTestDatabase.ContainerModifier.AGENT) .withWaitUntilAgentRunning() .withCdc(); // Create a test schema and a bunch of test tables with CDC enabled. // Insert one row in each table so that they're not empty. - final var enableCdcSqlFmt = """ - EXEC sys.sp_cdc_enable_table - \t@source_schema = N'%s', - \t@source_name = N'test_table_%d', - \t@role_name = N'%s', - \t@supports_net_changes = 0, - \t@capture_instance = N'capture_instance_%d_%d' - """; testdb.with("CREATE SCHEMA %s;", TEST_SCHEMA); for (int i = 0; i < TEST_TABLES; i++) { + String tableName = "test_table_%d".formatted(i); + String cdcInstanceName = "capture_instance_%d_%d".formatted(i, 1); testdb - .with("CREATE TABLE %s.test_table_%d (id INT IDENTITY(1,1) PRIMARY KEY);", TEST_SCHEMA, i) - .with(enableCdcSqlFmt, TEST_SCHEMA, i, CDC_ROLE_NAME, i, 1) - .withShortenedCapturePollingInterval() - .with("INSERT INTO %s.test_table_%d DEFAULT VALUES", TEST_SCHEMA, i); + .with("CREATE TABLE %s.%s (id INT IDENTITY(1,1) PRIMARY KEY);", TEST_SCHEMA, tableName) + .withCdcForTable(TEST_SCHEMA, tableName, CDC_ROLE_NAME, cdcInstanceName) + .with("INSERT INTO %s.%s DEFAULT VALUES", TEST_SCHEMA, tableName); + } + + for (int i = 0; i < TEST_TABLES; i++) { + String tableName = "test_table_%d".formatted(i); + String cdcInstanceName = "capture_instance_%d_%d".formatted(i, 1); + testdb.waitForCdcRecords(TEST_SCHEMA, tableName, cdcInstanceName, 1); } // Create a test user to be used by the source, with proper permissions. @@ -97,15 +96,13 @@ public void setup() { // We do this by adding lots of columns with long names, // then migrating to a new CDC capture instance for each table. // This is admittedly somewhat awkward and perhaps could be improved. - final var disableCdcSqlFmt = """ - EXEC sys.sp_cdc_disable_table - \t@source_schema = N'%s', - \t@source_name = N'test_table_%d', - \t@capture_instance = N'capture_instance_%d_%d' - """; + for (int i = 0; i < TEST_TABLES; i++) { + String tableName = "test_table_%d".formatted(i); + String cdcInstanceName = "capture_instance_%d_%d".formatted(i, 2); + String oldCdcInstanceName = "capture_instance_%d_%d".formatted(i, 1); final var sb = new StringBuilder(); - sb.append("ALTER TABLE ").append(TEST_SCHEMA).append(".test_table_").append(i).append(" ADD"); + sb.append("ALTER TABLE ").append(TEST_SCHEMA).append(".").append(tableName).append(" ADD"); for (int j = 0; j < ADDED_COLUMNS; j++) { sb.append((j > 0) ? ", " : " ") .append("rather_long_column_name_________________________________________________________________________________________").append(j) @@ -113,9 +110,8 @@ public void setup() { } testdb .with(sb.toString()) - .with(enableCdcSqlFmt, TEST_SCHEMA, i, CDC_ROLE_NAME, i, 2) - .with(disableCdcSqlFmt, TEST_SCHEMA, i, i, 1) - .withShortenedCapturePollingInterval(); + .withCdcForTable(TEST_SCHEMA, tableName, CDC_ROLE_NAME, cdcInstanceName) + .withCdcDisabledForTable(TEST_SCHEMA, tableName, oldCdcInstanceName); } } @@ -164,7 +160,7 @@ private String testUserName() { } /** - * This test is similar in principle to {@link CdcMysqlSourceTest.testCompressedSchemaHistory}. + * This test is similar in principle to CdcMysqlSourceTest.testCompressedSchemaHistory. */ @Test public void testCompressedSchemaHistory() throws Exception { diff --git a/airbyte-integrations/connectors/source-mssql/src/test/java/io/airbyte/integrations/source/mssql/MssqlAgentStateTest.java b/airbyte-integrations/connectors/source-mssql/src/test/java/io/airbyte/integrations/source/mssql/MssqlAgentStateTest.java new file mode 100644 index 000000000000..89f3ea5a8969 --- /dev/null +++ b/airbyte-integrations/connectors/source-mssql/src/test/java/io/airbyte/integrations/source/mssql/MssqlAgentStateTest.java @@ -0,0 +1,115 @@ +/* + * Copyright (c) 2023 Airbyte, Inc., all rights reserved. + */ + +package io.airbyte.integrations.source.mssql; + +import static io.airbyte.cdk.integrations.debezium.DebeziumIteratorConstants.SYNC_CHECKPOINT_RECORDS_PROPERTY; +import static org.junit.jupiter.api.Assertions.assertDoesNotThrow; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertThrows; + +import com.fasterxml.jackson.databind.JsonNode; +import io.airbyte.cdk.db.factory.DataSourceFactory; +import io.airbyte.cdk.db.jdbc.DefaultJdbcDatabase; +import io.airbyte.cdk.db.jdbc.JdbcDatabase; +import io.airbyte.cdk.db.jdbc.JdbcUtils; +import io.airbyte.cdk.integrations.JdbcConnector; +import io.airbyte.protocol.models.v0.AirbyteConnectionStatus; +import java.util.Map; +import javax.sql.DataSource; +import org.junit.jupiter.api.AfterAll; +import org.junit.jupiter.api.BeforeAll; +import org.junit.jupiter.api.Test; +import org.testcontainers.containers.MSSQLServerContainer; + +public class MssqlAgentStateTest { + + private static MsSQLTestDatabase testdb; + private static DataSource testDataSource; + private static MSSQLServerContainer privateContainer; + + @BeforeAll + public static void setup() { + privateContainer = new MsSQLContainerFactory().exclusive( + MsSQLTestDatabase.BaseImage.MSSQL_2022.reference, + MsSQLTestDatabase.ContainerModifier.AGENT); + testdb = new MsSQLTestDatabase(privateContainer); + testdb + .withConnectionProperty("encrypt", "false") + .withConnectionProperty("trustServerCertificate", "true") + .withConnectionProperty("databaseName", testdb.getDatabaseName()) + .initialized() + .withWaitUntilAgentRunning() + .withCdc(); + testDataSource = DataSourceFactory.create( + testdb.getUserName(), + testdb.getPassword(), + testdb.getDatabaseDriver().getDriverClassName(), + testdb.getJdbcUrl(), + Map.of("encrypt", "false", "trustServerCertificate", "true"), + JdbcConnector.CONNECT_TIMEOUT_DEFAULT); + } + + @AfterAll + static void tearDown() { + privateContainer.close(); + } + + protected MssqlSource source() { + return new MssqlSource(); + } + + private JdbcDatabase testDatabase() { + return new DefaultJdbcDatabase(testDataSource); + } + + protected JsonNode config() { + return testdb.configBuilder() + .withHostAndPort() + .withDatabase() + .with(JdbcUtils.USERNAME_KEY, testdb.getUserName()) + .with(JdbcUtils.PASSWORD_KEY, testdb.getPassword()) + .withCdcReplication() + .withoutSsl() + .with(SYNC_CHECKPOINT_RECORDS_PROPERTY, 1) + .build(); + } + + @Test + void testAssertSqlServerAgentRunning() throws Exception { + testdb.withAgentStopped().withWaitUntilAgentStopped(); + // assert expected failure if sql server agent stopped + assertThrows(RuntimeException.class, + () -> source().assertSqlServerAgentRunning(testDatabase())); + // assert success if sql server agent running + testdb.withAgentStarted().withWaitUntilAgentRunning(); + assertDoesNotThrow(() -> source().assertSqlServerAgentRunning(testDatabase())); + } + + // Ensure the CDC check operations are included when CDC is enabled + // todo: make this better by checking the returned checkOperations from source.getCheckOperations + @Test + void testCdcCheckOperations() throws Exception { + // assertCdcEnabledInDb + testdb.withoutCdc(); + AirbyteConnectionStatus status = source().check(config()); + assertEquals(status.getStatus(), AirbyteConnectionStatus.Status.FAILED); + testdb.withCdc(); + // assertCdcSchemaQueryable + testdb.with("REVOKE SELECT ON SCHEMA :: [cdc] TO %s", testdb.getUserName()); + status = source().check(config()); + assertEquals(status.getStatus(), AirbyteConnectionStatus.Status.FAILED); + testdb.with("GRANT SELECT ON SCHEMA :: [cdc] TO %s", testdb.getUserName()); + + // assertSqlServerAgentRunning + + testdb.withAgentStopped().withWaitUntilAgentStopped(); + status = source().check(config()); + assertEquals(status.getStatus(), AirbyteConnectionStatus.Status.FAILED); + testdb.withAgentStarted().withWaitUntilAgentRunning(); + status = source().check(config()); + assertEquals(status.getStatus(), AirbyteConnectionStatus.Status.FAILED); + } + +} diff --git a/airbyte-integrations/connectors/source-mssql/src/testFixtures/java/io/airbyte/integrations/source/mssql/MsSQLContainerFactory.java b/airbyte-integrations/connectors/source-mssql/src/testFixtures/java/io/airbyte/integrations/source/mssql/MsSQLContainerFactory.java index 22bd3b2bbf6a..98477dcf47d2 100644 --- a/airbyte-integrations/connectors/source-mssql/src/testFixtures/java/io/airbyte/integrations/source/mssql/MsSQLContainerFactory.java +++ b/airbyte-integrations/connectors/source-mssql/src/testFixtures/java/io/airbyte/integrations/source/mssql/MsSQLContainerFactory.java @@ -23,15 +23,15 @@ protected MSSQLServerContainer createNewContainer(DockerImageName imageName) /** * Create a new network and bind it to the container. */ - public void withNetwork(MSSQLServerContainer container) { + public static void withNetwork(MSSQLServerContainer container) { container.withNetwork(Network.newNetwork()); } - public void withAgent(MSSQLServerContainer container) { + public static void withAgent(MSSQLServerContainer container) { container.addEnv("MSSQL_AGENT_ENABLED", "True"); } - public void withSslCertificates(MSSQLServerContainer container) { + public static void withSslCertificates(MSSQLServerContainer container) { // yes, this is uglier than sin. The reason why I'm doing this is because there's no command to // reload a SqlServer config. So I need to create all the necessary files before I start the // SQL server. Hence this horror diff --git a/airbyte-integrations/connectors/source-mssql/src/testFixtures/java/io/airbyte/integrations/source/mssql/MsSQLTestDatabase.java b/airbyte-integrations/connectors/source-mssql/src/testFixtures/java/io/airbyte/integrations/source/mssql/MsSQLTestDatabase.java index d2cef1e979c3..698992f1ffaa 100644 --- a/airbyte-integrations/connectors/source-mssql/src/testFixtures/java/io/airbyte/integrations/source/mssql/MsSQLTestDatabase.java +++ b/airbyte-integrations/connectors/source-mssql/src/testFixtures/java/io/airbyte/integrations/source/mssql/MsSQLTestDatabase.java @@ -7,19 +7,24 @@ import static io.airbyte.integrations.source.mssql.MsSqlSpecConstants.INVALID_CDC_CURSOR_POSITION_PROPERTY; import static io.airbyte.integrations.source.mssql.MsSqlSpecConstants.RESYNC_DATA_OPTION; +import com.google.common.collect.Sets; import io.airbyte.cdk.db.factory.DatabaseDriver; import io.airbyte.cdk.db.jdbc.JdbcUtils; +import io.airbyte.cdk.testutils.ContainerFactory.NamedContainerModifier; import io.airbyte.cdk.testutils.TestDatabase; import io.debezium.connector.sqlserver.Lsn; import java.io.IOException; import java.io.UncheckedIOException; import java.sql.SQLException; -import java.util.HashMap; import java.util.List; import java.util.Map; +import java.util.Set; +import java.util.concurrent.ConcurrentHashMap; +import java.util.function.Consumer; import java.util.stream.Collectors; import java.util.stream.Stream; import org.jooq.SQLDialect; +import org.jooq.exception.DataAccessException; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.testcontainers.containers.MSSQLServerContainer; @@ -28,7 +33,9 @@ public class MsSQLTestDatabase extends TestDatabase, MsS static private final Logger LOGGER = LoggerFactory.getLogger(MsSQLTestDatabase.class); - static public final int MAX_RETRIES = 60; + // empirically, 240 is enough. If you fee like you need to increase it, you're probably missing a + // check somewhere + static public final int MAX_RETRIES = 240; public enum BaseImage { @@ -44,42 +51,112 @@ public enum BaseImage { } - public enum ContainerModifier { + public enum ContainerModifier implements NamedContainerModifier> { - NETWORK("withNetwork"), - AGENT("withAgent"), - WITH_SSL_CERTIFICATES("withSslCertificates"), + NETWORK(MsSQLContainerFactory::withNetwork), + AGENT(MsSQLContainerFactory::withAgent), + WITH_SSL_CERTIFICATES(MsSQLContainerFactory::withSslCertificates), ; - public final String methodName; + public final Consumer> modifier; - ContainerModifier(final String methodName) { - this.methodName = methodName; + ContainerModifier(final Consumer> modifier) { + this.modifier = modifier; + } + + @Override + public Consumer> modifier() { + return modifier; } } - @SuppressWarnings("deprecation") - static public MsSQLTestDatabase in(final BaseImage imageName, final ContainerModifier... methods) { - final String[] methodNames = Stream.of(methods).map(im -> im.methodName).toList().toArray(new String[0]); - final var container = new MsSQLContainerFactory().shared(imageName.reference, methodNames); - final var testdb = new MsSQLTestDatabase(container); + static public MsSQLTestDatabase in(final BaseImage imageName, final ContainerModifier... modifiers) { + final var container = new MsSQLContainerFactory().shared(imageName.reference, modifiers); + final MsSQLTestDatabase testdb = new MsSQLTestDatabase(container); return testdb .withConnectionProperty("encrypt", "false") + .withConnectionProperty("trustServerCertificate", "true") .withConnectionProperty("databaseName", testdb.getDatabaseName()) .initialized(); } public MsSQLTestDatabase(final MSSQLServerContainer container) { super(container); + LOGGER.info("creating new database. databaseId=" + this.databaseId + ", databaseName=" + getDatabaseName()); } public MsSQLTestDatabase withCdc() { - return with("EXEC sys.sp_cdc_enable_db;"); + LOGGER.info("enabling CDC on database {} with id {}", getDatabaseName(), databaseId); + with("EXEC sys.sp_cdc_enable_db;"); + LOGGER.info("CDC enabled on database {} with id {}", getDatabaseName(), databaseId); + return this; } + private static final String RETRYABLE_CDC_TABLE_ENABLEMENT_ERROR_CONTENT = + "The error returned was 14258: 'Cannot perform this operation while SQLServerAgent is starting. Try again later.'"; + private static final String ENABLE_CDC_SQL_FMT = """ + EXEC sys.sp_cdc_enable_table + \t@source_schema = N'%s', + \t@source_name = N'%s', + \t@role_name = %s, + \t@supports_net_changes = 0, + \t@capture_instance = N'%s'"""; + private final Set CDC_INSTANCE_NAMES = Sets.newConcurrentHashSet(); + + public MsSQLTestDatabase withCdcForTable(String schemaName, String tableName, String roleName) { + return withCdcForTable(schemaName, tableName, roleName, "%s_%s".formatted(schemaName, tableName)); + } + + public MsSQLTestDatabase withCdcForTable(String schemaName, String tableName, String roleName, String instanceName) { + LOGGER.info(formatLogLine("enabling CDC for table {}.{} and role {}, instance {}"), schemaName, tableName, roleName, instanceName); + String sqlRoleName = roleName == null ? "NULL" : "N'%s'".formatted(roleName); + for (int tryCount = 0; tryCount < MAX_RETRIES; tryCount++) { + try { + Thread.sleep(1_000); + synchronized (getContainer()) { + LOGGER.info(formatLogLine("Trying to enable CDC for table {}.{} and role {}, instance {}, try {}/{}"), schemaName, tableName, roleName, + instanceName, tryCount, MAX_RETRIES); + with(ENABLE_CDC_SQL_FMT.formatted(schemaName, tableName, sqlRoleName, instanceName)); + } + CDC_INSTANCE_NAMES.add(instanceName); + return withShortenedCapturePollingInterval(); + } catch (DataAccessException e) { + if (!e.getMessage().contains(RETRYABLE_CDC_TABLE_ENABLEMENT_ERROR_CONTENT)) { + throw e; + } + tryCount++; + } catch (InterruptedException e) { + throw new RuntimeException(e); + } + } + throw new RuntimeException(formatLogLine("failed to enable CDC for table %s.%s within %d seconds").formatted(schemaName, tableName, MAX_RETRIES)); + } + + private static final String DISABLE_CDC_SQL_FMT = """ + EXEC sys.sp_cdc_disable_table + \t@source_schema = N'%s', + \t@source_name = N'%s', + \t@capture_instance = N'%s' + """; + + public MsSQLTestDatabase withCdcDisabledForTable(String schemaName, String tableName, String instanceName) { + LOGGER.info(formatLogLine("disabling CDC for table {}.{}, instance {}"), schemaName, tableName, instanceName); + if (!CDC_INSTANCE_NAMES.remove(instanceName)) { + throw new RuntimeException(formatLogLine("CDC was disabled for instance ") + instanceName); + } + synchronized (getContainer()) { + return with(DISABLE_CDC_SQL_FMT.formatted(schemaName, tableName, instanceName)); + } + } + + private static final String DISABLE_CDC_SQL = "EXEC sys.sp_cdc_disable_db;"; + public MsSQLTestDatabase withoutCdc() { - return with("EXEC sys.sp_cdc_disable_db;"); + CDC_INSTANCE_NAMES.clear(); + synchronized (getContainer()) { + return with(DISABLE_CDC_SQL); + } } public MsSQLTestDatabase withAgentStarted() { @@ -100,50 +177,87 @@ public MsSQLTestDatabase withWaitUntilAgentStopped() { return self(); } + public MsSQLTestDatabase waitForCdcRecords(String schemaName, String tableName, int recordCount) { + return waitForCdcRecords(schemaName, tableName, "%s_%s".formatted(schemaName, tableName), recordCount); + } + + public MsSQLTestDatabase waitForCdcRecords(String schemaName, String tableName, String cdcInstanceName, int recordCount) { + if (!CDC_INSTANCE_NAMES.contains(cdcInstanceName)) { + throw new RuntimeException("CDC is not enabled on instance %s".formatted(cdcInstanceName)); + } + String sql = "SELECT count(*) FROM cdc.%s_ct".formatted(cdcInstanceName); + int actualRecordCount = 0; + for (int tryCount = 0; tryCount < MAX_RETRIES; tryCount++) { + LOGGER.info(formatLogLine("fetching the number of CDC records for {}.{}, instance {}"), schemaName, tableName, cdcInstanceName); + try { + Thread.sleep(1_000); + actualRecordCount = query(ctx -> ctx.fetch(sql)).get(0).get(0, Integer.class); + } catch (SQLException | DataAccessException e) { + actualRecordCount = 0; + } catch (InterruptedException e) { + throw new RuntimeException(e); + } + LOGGER.info(formatLogLine("Found {} CDC records for {}.{} in instance {}. Expecting {}. Trying again ({}/{}"), actualRecordCount, schemaName, + tableName, cdcInstanceName, + recordCount, tryCount, MAX_RETRIES); + if (actualRecordCount >= recordCount) { + LOGGER.info(formatLogLine("found {} records after {} tries!"), actualRecordCount, tryCount); + return self(); + } + } + throw new RuntimeException(formatLogLine( + "failed to find %d records after %s seconds. Only found %d!").formatted(recordCount, MAX_RETRIES, actualRecordCount)); + } + + private boolean shortenedPollingIntervalEnabled = false; + public MsSQLTestDatabase withShortenedCapturePollingInterval() { - return with("EXEC sys.sp_cdc_change_job @job_type = 'capture', @pollinginterval = %d;", - MssqlCdcTargetPosition.MAX_LSN_QUERY_DELAY_TEST.toSeconds()); + if (!shortenedPollingIntervalEnabled) { + synchronized (getContainer()) { + shortenedPollingIntervalEnabled = true; + with("EXEC sys.sp_cdc_change_job @job_type = 'capture', @pollinginterval = 1;"); + } + } + return this; } private void waitForAgentState(final boolean running) { final String expectedValue = running ? "Running." : "Stopped."; - LOGGER.debug("Waiting for SQLServerAgent state to change to '{}'.", expectedValue); + LOGGER.info(formatLogLine("Waiting for SQLServerAgent state to change to '{}'."), expectedValue); for (int i = 0; i < MAX_RETRIES; i++) { try { + Thread.sleep(1_000); final var r = query(ctx -> ctx.fetch("EXEC master.dbo.xp_servicecontrol 'QueryState', N'SQLServerAGENT';").get(0)); if (expectedValue.equalsIgnoreCase(r.getValue(0).toString())) { - LOGGER.debug("SQLServerAgent state is '{}', as expected.", expectedValue); + LOGGER.info(formatLogLine("SQLServerAgent state is '{}', as expected."), expectedValue); return; } - LOGGER.debug("Retrying, SQLServerAgent state {} does not match expected '{}'.", r, expectedValue); + LOGGER.info(formatLogLine("Retrying, SQLServerAgent state {} does not match expected '{}'."), r, expectedValue); } catch (final SQLException e) { - LOGGER.debug("Retrying agent state query after catching exception {}.", e.getMessage()); - } - try { - Thread.sleep(1_000); // Wait one second between retries. - } catch (final InterruptedException e) { + LOGGER.info(formatLogLine("Retrying agent state query after catching exception {}."), e.getMessage()); + } catch (InterruptedException e) { throw new RuntimeException(e); } } - throw new RuntimeException("Exhausted retry attempts while polling for agent state"); + throw new RuntimeException(formatLogLine("Exhausted retry attempts while polling for agent state")); } + public static final String MAX_LSN_QUERY = "SELECT sys.fn_cdc_get_max_lsn();"; + public MsSQLTestDatabase withWaitUntilMaxLsnAvailable() { - LOGGER.debug("Waiting for max LSN to become available for database {}.", getDatabaseName()); + LOGGER.info(formatLogLine("Waiting for max LSN to become available for database {}."), getDatabaseName()); for (int i = 0; i < MAX_RETRIES; i++) { try { - final var maxLSN = query(ctx -> ctx.fetch("SELECT sys.fn_cdc_get_max_lsn();").get(0).get(0, byte[].class)); + Thread.sleep(1_000); + final var maxLSN = query(ctx -> ctx.fetch(MAX_LSN_QUERY).get(0).get(0, byte[].class)); if (maxLSN != null) { - LOGGER.debug("Max LSN available for database {}: {}", getDatabaseName(), Lsn.valueOf(maxLSN)); + LOGGER.info(formatLogLine("Max LSN available for database {}: {}"), getDatabaseName(), Lsn.valueOf(maxLSN)); return self(); } - LOGGER.debug("Retrying, max LSN still not available for database {}.", getDatabaseName()); + LOGGER.info(formatLogLine("Retrying, max LSN still not available for database {}."), getDatabaseName()); } catch (final SQLException e) { - LOGGER.warn("Retrying max LSN query after catching exception {}", e.getMessage()); - } - try { - Thread.sleep(1_000); // Wait one second between retries. - } catch (final InterruptedException e) { + LOGGER.info(formatLogLine("Retrying max LSN query after catching exception {}"), e.getMessage()); + } catch (InterruptedException e) { throw new RuntimeException(e); } } @@ -224,23 +338,22 @@ public static enum CertificateKey { } - private Map cachedCerts; + private volatile Map cachedCerts = new ConcurrentHashMap<>(); - public synchronized String getCertificate(final CertificateKey certificateKey) { - if (cachedCerts == null) { - final Map cachedCerts = new HashMap<>(); + public String getCertificate(final CertificateKey certificateKey) { + if (!cachedCerts.containsKey(certificateKey)) { + final String certificate; try { - for (final CertificateKey key : CertificateKey.values()) { - final String command = "cat /tmp/certs/" + key.name().toLowerCase() + ".crt"; - final String certificate = getContainer().execInContainer("bash", "-c", command).getStdout().trim(); - cachedCerts.put(key, certificate); - } + final String command = "cat /tmp/certs/" + certificateKey.name().toLowerCase() + ".crt"; + certificate = getContainer().execInContainer("bash", "-c", command).getStdout().trim(); } catch (final IOException e) { throw new UncheckedIOException(e); } catch (final InterruptedException e) { throw new RuntimeException(e); } - this.cachedCerts = cachedCerts; + synchronized (cachedCerts) { + this.cachedCerts.put(certificateKey, certificate); + } } return cachedCerts.get(certificateKey); } diff --git a/docs/integrations/sources/mssql.md b/docs/integrations/sources/mssql.md index 4d377bfb1cf5..2480f9fb2e3f 100644 --- a/docs/integrations/sources/mssql.md +++ b/docs/integrations/sources/mssql.md @@ -342,7 +342,8 @@ WHERE actor_definition_id ='b5ea17b1-f170-46dc-bc31-cc744ca984c1' AND (configura | Version | Date | Pull Request | Subject | |:--------|:-----------|:------------------------------------------------------------------------------------------------------------------|:------------------------------------------------------------------------------------------------------------------------------------------------| -| 3.7.5 | 2024-02-29 | [35739](https://github.com/airbytehq/airbyte/pull/35739) | Allow configuring the queue size used for cdc events. | +| 3.7.6 | 2024-03-04 | [35721](https://github.com/airbytehq/airbyte/pull/35721) | Fix tests | +| 3.7.5 | 2024-02-29 | [35739](https://github.com/airbytehq/airbyte/pull/35739) | Allow configuring the queue size used for cdc events. | | 3.7.4 | 2024-02-26 | [35566](https://github.com/airbytehq/airbyte/pull/35566) | Add config to throw an error on invalid CDC position. | | 3.7.3 | 2024-02-23 | [35596](https://github.com/airbytehq/airbyte/pull/35596) | Fix a logger issue | | 3.7.2 | 2024-02-21 | [35368](https://github.com/airbytehq/airbyte/pull/35368) | Change query syntax to make it compatible with Azure SQL Managed Instance. | From 83cfcbfe01f1dc324ed1957c9a4577737073550c Mon Sep 17 00:00:00 2001 From: Catherine Noll Date: Wed, 6 Mar 2024 12:46:19 -0500 Subject: [PATCH 106/172] Source Microsoft Onedrive: Bump poetry.lock to upgrade transitive dependency (#35858) --- .../source-microsoft-onedrive/metadata.yaml | 2 +- .../source-microsoft-onedrive/poetry.lock | 196 +++++++++--------- .../source-microsoft-onedrive/pyproject.toml | 2 +- .../sources/microsoft-onedrive.md | 1 + 4 files changed, 101 insertions(+), 100 deletions(-) diff --git a/airbyte-integrations/connectors/source-microsoft-onedrive/metadata.yaml b/airbyte-integrations/connectors/source-microsoft-onedrive/metadata.yaml index 209d5dab6938..9f72cb715140 100644 --- a/airbyte-integrations/connectors/source-microsoft-onedrive/metadata.yaml +++ b/airbyte-integrations/connectors/source-microsoft-onedrive/metadata.yaml @@ -20,7 +20,7 @@ data: connectorSubtype: api connectorType: source definitionId: 01d1c685-fd4a-4837-8f4c-93fe5a0d2188 - dockerImageTag: 0.1.7 + dockerImageTag: 0.1.8 dockerRepository: airbyte/source-microsoft-onedrive githubIssueLabel: source-microsoft-onedrive icon: microsoft-onedrive.svg diff --git a/airbyte-integrations/connectors/source-microsoft-onedrive/poetry.lock b/airbyte-integrations/connectors/source-microsoft-onedrive/poetry.lock index 5dacbf85016b..f9e32cc272b6 100644 --- a/airbyte-integrations/connectors/source-microsoft-onedrive/poetry.lock +++ b/airbyte-integrations/connectors/source-microsoft-onedrive/poetry.lock @@ -1,4 +1,4 @@ -# This file is automatically @generated by Poetry 1.6.1 and should not be changed by hand. +# This file is automatically @generated by Poetry 1.7.1 and should not be changed by hand. [[package]] name = "airbyte-cdk" @@ -898,13 +898,13 @@ files = [ [[package]] name = "marshmallow" -version = "3.21.0" +version = "3.21.1" description = "A lightweight library for converting complex datatypes to and from native Python datatypes." optional = false python-versions = ">=3.8" files = [ - {file = "marshmallow-3.21.0-py3-none-any.whl", hash = "sha256:e7997f83571c7fd476042c2c188e4ee8a78900ca5e74bd9c8097afa56624e9bd"}, - {file = "marshmallow-3.21.0.tar.gz", hash = "sha256:20f53be28c6e374a711a16165fb22a8dc6003e3f7cda1285e3ca777b9193885b"}, + {file = "marshmallow-3.21.1-py3-none-any.whl", hash = "sha256:f085493f79efb0644f270a9bf2892843142d80d7174bbbd2f3713f2a589dc633"}, + {file = "marshmallow-3.21.1.tar.gz", hash = "sha256:4e65e9e0d80fc9e609574b9983cf32579f305c718afb30d7233ab818571768c3"}, ] [package.dependencies] @@ -1474,13 +1474,13 @@ dev = ["pre-commit", "pytest-asyncio", "tox"] [[package]] name = "python-dateutil" -version = "2.9.0" +version = "2.9.0.post0" description = "Extensions to the standard Python datetime module" optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7" files = [ - {file = "python-dateutil-2.9.0.tar.gz", hash = "sha256:78e73e19c63f5b20ffa567001531680d939dc042bf7850431877645523c66709"}, - {file = "python_dateutil-2.9.0-py2.py3-none-any.whl", hash = "sha256:cbf2f1da5e6083ac2fbfd4da39a25f34312230110440f424a14c7558bb85d82e"}, + {file = "python-dateutil-2.9.0.post0.tar.gz", hash = "sha256:37dd54208da7e1cd875388217d5e00ebd4179249f90fb72437e91a35459a0ad3"}, + {file = "python_dateutil-2.9.0.post0-py2.py3-none-any.whl", hash = "sha256:a8b2bc7bffae282281c8140a97d3aa9c14da0b136dfe83f850eea9a5f7470427"}, ] [package.dependencies] @@ -1625,101 +1625,101 @@ files = [ [[package]] name = "rapidfuzz" -version = "3.6.1" +version = "3.6.2" description = "rapid fuzzy string matching" optional = false python-versions = ">=3.8" files = [ - {file = "rapidfuzz-3.6.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:ac434fc71edda30d45db4a92ba5e7a42c7405e1a54cb4ec01d03cc668c6dcd40"}, - {file = "rapidfuzz-3.6.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:2a791168e119cfddf4b5a40470620c872812042f0621e6a293983a2d52372db0"}, - {file = "rapidfuzz-3.6.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:5a2f3e9df346145c2be94e4d9eeffb82fab0cbfee85bd4a06810e834fe7c03fa"}, - {file = "rapidfuzz-3.6.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:23de71e7f05518b0bbeef55d67b5dbce3bcd3e2c81e7e533051a2e9401354eb0"}, - {file = "rapidfuzz-3.6.1-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d056e342989248d2bdd67f1955bb7c3b0ecfa239d8f67a8dfe6477b30872c607"}, - {file = "rapidfuzz-3.6.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:01835d02acd5d95c1071e1da1bb27fe213c84a013b899aba96380ca9962364bc"}, - {file = "rapidfuzz-3.6.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:ed0f712e0bb5fea327e92aec8a937afd07ba8de4c529735d82e4c4124c10d5a0"}, - {file = "rapidfuzz-3.6.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:96cd19934f76a1264e8ecfed9d9f5291fde04ecb667faef5f33bdbfd95fe2d1f"}, - {file = "rapidfuzz-3.6.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:e06c4242a1354cf9d48ee01f6f4e6e19c511d50bb1e8d7d20bcadbb83a2aea90"}, - {file = "rapidfuzz-3.6.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:d73dcfe789d37c6c8b108bf1e203e027714a239e50ad55572ced3c004424ed3b"}, - {file = "rapidfuzz-3.6.1-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:06e98ff000e2619e7cfe552d086815671ed09b6899408c2c1b5103658261f6f3"}, - {file = "rapidfuzz-3.6.1-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:08b6fb47dd889c69fbc0b915d782aaed43e025df6979b6b7f92084ba55edd526"}, - {file = "rapidfuzz-3.6.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:a1788ebb5f5b655a15777e654ea433d198f593230277e74d51a2a1e29a986283"}, - {file = "rapidfuzz-3.6.1-cp310-cp310-win32.whl", hash = "sha256:c65f92881753aa1098c77818e2b04a95048f30edbe9c3094dc3707d67df4598b"}, - {file = "rapidfuzz-3.6.1-cp310-cp310-win_amd64.whl", hash = "sha256:4243a9c35667a349788461aae6471efde8d8800175b7db5148a6ab929628047f"}, - {file = "rapidfuzz-3.6.1-cp310-cp310-win_arm64.whl", hash = "sha256:f59d19078cc332dbdf3b7b210852ba1f5db8c0a2cd8cc4c0ed84cc00c76e6802"}, - {file = "rapidfuzz-3.6.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:fbc07e2e4ac696497c5f66ec35c21ddab3fc7a406640bffed64c26ab2f7ce6d6"}, - {file = "rapidfuzz-3.6.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:40cced1a8852652813f30fb5d4b8f9b237112a0bbaeebb0f4cc3611502556764"}, - {file = "rapidfuzz-3.6.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:82300e5f8945d601c2daaaac139d5524d7c1fdf719aa799a9439927739917460"}, - {file = "rapidfuzz-3.6.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:edf97c321fd641fea2793abce0e48fa4f91f3c202092672f8b5b4e781960b891"}, - {file = "rapidfuzz-3.6.1-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7420e801b00dee4a344ae2ee10e837d603461eb180e41d063699fb7efe08faf0"}, - {file = "rapidfuzz-3.6.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:060bd7277dc794279fa95522af355034a29c90b42adcb7aa1da358fc839cdb11"}, - {file = "rapidfuzz-3.6.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b7e3375e4f2bfec77f907680328e4cd16cc64e137c84b1886d547ab340ba6928"}, - {file = "rapidfuzz-3.6.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a490cd645ef9d8524090551016f05f052e416c8adb2d8b85d35c9baa9d0428ab"}, - {file = "rapidfuzz-3.6.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:2e03038bfa66d2d7cffa05d81c2f18fd6acbb25e7e3c068d52bb7469e07ff382"}, - {file = "rapidfuzz-3.6.1-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:2b19795b26b979c845dba407fe79d66975d520947b74a8ab6cee1d22686f7967"}, - {file = "rapidfuzz-3.6.1-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:064c1d66c40b3a0f488db1f319a6e75616b2e5fe5430a59f93a9a5e40a656d15"}, - {file = "rapidfuzz-3.6.1-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:3c772d04fb0ebeece3109d91f6122b1503023086a9591a0b63d6ee7326bd73d9"}, - {file = "rapidfuzz-3.6.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:841eafba6913c4dfd53045835545ba01a41e9644e60920c65b89c8f7e60c00a9"}, - {file = "rapidfuzz-3.6.1-cp311-cp311-win32.whl", hash = "sha256:266dd630f12696ea7119f31d8b8e4959ef45ee2cbedae54417d71ae6f47b9848"}, - {file = "rapidfuzz-3.6.1-cp311-cp311-win_amd64.whl", hash = "sha256:d79aec8aeee02ab55d0ddb33cea3ecd7b69813a48e423c966a26d7aab025cdfe"}, - {file = "rapidfuzz-3.6.1-cp311-cp311-win_arm64.whl", hash = "sha256:484759b5dbc5559e76fefaa9170147d1254468f555fd9649aea3bad46162a88b"}, - {file = "rapidfuzz-3.6.1-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:b2ef4c0fd3256e357b70591ffb9e8ed1d439fb1f481ba03016e751a55261d7c1"}, - {file = "rapidfuzz-3.6.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:588c4b20fa2fae79d60a4e438cf7133d6773915df3cc0a7f1351da19eb90f720"}, - {file = "rapidfuzz-3.6.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:7142ee354e9c06e29a2636b9bbcb592bb00600a88f02aa5e70e4f230347b373e"}, - {file = "rapidfuzz-3.6.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1dfc557c0454ad22382373ec1b7df530b4bbd974335efe97a04caec936f2956a"}, - {file = "rapidfuzz-3.6.1-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:03f73b381bdeccb331a12c3c60f1e41943931461cdb52987f2ecf46bfc22f50d"}, - {file = "rapidfuzz-3.6.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:6b0ccc2ec1781c7e5370d96aef0573dd1f97335343e4982bdb3a44c133e27786"}, - {file = "rapidfuzz-3.6.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:da3e8c9f7e64bb17faefda085ff6862ecb3ad8b79b0f618a6cf4452028aa2222"}, - {file = "rapidfuzz-3.6.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fde9b14302a31af7bdafbf5cfbb100201ba21519be2b9dedcf4f1048e4fbe65d"}, - {file = "rapidfuzz-3.6.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:c1a23eee225dfb21c07f25c9fcf23eb055d0056b48e740fe241cbb4b22284379"}, - {file = "rapidfuzz-3.6.1-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:e49b9575d16c56c696bc7b06a06bf0c3d4ef01e89137b3ddd4e2ce709af9fe06"}, - {file = "rapidfuzz-3.6.1-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:0a9fc714b8c290261669f22808913aad49553b686115ad0ee999d1cb3df0cd66"}, - {file = "rapidfuzz-3.6.1-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:a3ee4f8f076aa92184e80308fc1a079ac356b99c39408fa422bbd00145be9854"}, - {file = "rapidfuzz-3.6.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:f056ba42fd2f32e06b2c2ba2443594873cfccc0c90c8b6327904fc2ddf6d5799"}, - {file = "rapidfuzz-3.6.1-cp312-cp312-win32.whl", hash = "sha256:5d82b9651e3d34b23e4e8e201ecd3477c2baa17b638979deeabbb585bcb8ba74"}, - {file = "rapidfuzz-3.6.1-cp312-cp312-win_amd64.whl", hash = "sha256:dad55a514868dae4543ca48c4e1fc0fac704ead038dafedf8f1fc0cc263746c1"}, - {file = "rapidfuzz-3.6.1-cp312-cp312-win_arm64.whl", hash = "sha256:3c84294f4470fcabd7830795d754d808133329e0a81d62fcc2e65886164be83b"}, - {file = "rapidfuzz-3.6.1-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:e19d519386e9db4a5335a4b29f25b8183a1c3f78cecb4c9c3112e7f86470e37f"}, - {file = "rapidfuzz-3.6.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:01eb03cd880a294d1bf1a583fdd00b87169b9cc9c9f52587411506658c864d73"}, - {file = "rapidfuzz-3.6.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:be368573255f8fbb0125a78330a1a40c65e9ba3c5ad129a426ff4289099bfb41"}, - {file = "rapidfuzz-3.6.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b3e5af946f419c30f5cb98b69d40997fe8580efe78fc83c2f0f25b60d0e56efb"}, - {file = "rapidfuzz-3.6.1-cp38-cp38-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f382f7ffe384ce34345e1c0b2065451267d3453cadde78946fbd99a59f0cc23c"}, - {file = "rapidfuzz-3.6.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:be156f51f3a4f369e758505ed4ae64ea88900dcb2f89d5aabb5752676d3f3d7e"}, - {file = "rapidfuzz-3.6.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1936d134b6c513fbe934aeb668b0fee1ffd4729a3c9d8d373f3e404fbb0ce8a0"}, - {file = "rapidfuzz-3.6.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:12ff8eaf4a9399eb2bebd838f16e2d1ded0955230283b07376d68947bbc2d33d"}, - {file = "rapidfuzz-3.6.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:ae598a172e3a95df3383634589660d6b170cc1336fe7578115c584a99e0ba64d"}, - {file = "rapidfuzz-3.6.1-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:cd4ba4c18b149da11e7f1b3584813159f189dc20833709de5f3df8b1342a9759"}, - {file = "rapidfuzz-3.6.1-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:0402f1629e91a4b2e4aee68043a30191e5e1b7cd2aa8dacf50b1a1bcf6b7d3ab"}, - {file = "rapidfuzz-3.6.1-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:1e12319c6b304cd4c32d5db00b7a1e36bdc66179c44c5707f6faa5a889a317c0"}, - {file = "rapidfuzz-3.6.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:0bbfae35ce4de4c574b386c43c78a0be176eeddfdae148cb2136f4605bebab89"}, - {file = "rapidfuzz-3.6.1-cp38-cp38-win32.whl", hash = "sha256:7fec74c234d3097612ea80f2a80c60720eec34947066d33d34dc07a3092e8105"}, - {file = "rapidfuzz-3.6.1-cp38-cp38-win_amd64.whl", hash = "sha256:a553cc1a80d97459d587529cc43a4c7c5ecf835f572b671107692fe9eddf3e24"}, - {file = "rapidfuzz-3.6.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:757dfd7392ec6346bd004f8826afb3bf01d18a723c97cbe9958c733ab1a51791"}, - {file = "rapidfuzz-3.6.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:2963f4a3f763870a16ee076796be31a4a0958fbae133dbc43fc55c3968564cf5"}, - {file = "rapidfuzz-3.6.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:d2f0274595cc5b2b929c80d4e71b35041104b577e118cf789b3fe0a77b37a4c5"}, - {file = "rapidfuzz-3.6.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:42f211e366e026de110a4246801d43a907cd1a10948082f47e8a4e6da76fef52"}, - {file = "rapidfuzz-3.6.1-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a59472b43879012b90989603aa5a6937a869a72723b1bf2ff1a0d1edee2cc8e6"}, - {file = "rapidfuzz-3.6.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a03863714fa6936f90caa7b4b50ea59ea32bb498cc91f74dc25485b3f8fccfe9"}, - {file = "rapidfuzz-3.6.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5dd95b6b7bfb1584f806db89e1e0c8dbb9d25a30a4683880c195cc7f197eaf0c"}, - {file = "rapidfuzz-3.6.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7183157edf0c982c0b8592686535c8b3e107f13904b36d85219c77be5cefd0d8"}, - {file = "rapidfuzz-3.6.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:ad9d74ef7c619b5b0577e909582a1928d93e07d271af18ba43e428dc3512c2a1"}, - {file = "rapidfuzz-3.6.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:b53137d81e770c82189e07a8f32722d9e4260f13a0aec9914029206ead38cac3"}, - {file = "rapidfuzz-3.6.1-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:49b9ed2472394d306d5dc967a7de48b0aab599016aa4477127b20c2ed982dbf9"}, - {file = "rapidfuzz-3.6.1-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:dec307b57ec2d5054d77d03ee4f654afcd2c18aee00c48014cb70bfed79597d6"}, - {file = "rapidfuzz-3.6.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:4381023fa1ff32fd5076f5d8321249a9aa62128eb3f21d7ee6a55373e672b261"}, - {file = "rapidfuzz-3.6.1-cp39-cp39-win32.whl", hash = "sha256:8d7a072f10ee57c8413c8ab9593086d42aaff6ee65df4aa6663eecdb7c398dca"}, - {file = "rapidfuzz-3.6.1-cp39-cp39-win_amd64.whl", hash = "sha256:ebcfb5bfd0a733514352cfc94224faad8791e576a80ffe2fd40b2177bf0e7198"}, - {file = "rapidfuzz-3.6.1-cp39-cp39-win_arm64.whl", hash = "sha256:1c47d592e447738744905c18dda47ed155620204714e6df20eb1941bb1ba315e"}, - {file = "rapidfuzz-3.6.1-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:eef8b346ab331bec12bbc83ac75641249e6167fab3d84d8f5ca37fd8e6c7a08c"}, - {file = "rapidfuzz-3.6.1-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:53251e256017e2b87f7000aee0353ba42392c442ae0bafd0f6b948593d3f68c6"}, - {file = "rapidfuzz-3.6.1-pp38-pypy38_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6dede83a6b903e3ebcd7e8137e7ff46907ce9316e9d7e7f917d7e7cdc570ee05"}, - {file = "rapidfuzz-3.6.1-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8e4da90e4c2b444d0a171d7444ea10152e07e95972bb40b834a13bdd6de1110c"}, - {file = "rapidfuzz-3.6.1-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:ca3dfcf74f2b6962f411c33dd95b0adf3901266e770da6281bc96bb5a8b20de9"}, - {file = "rapidfuzz-3.6.1-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:bcc957c0a8bde8007f1a8a413a632a1a409890f31f73fe764ef4eac55f59ca87"}, - {file = "rapidfuzz-3.6.1-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:692c9a50bea7a8537442834f9bc6b7d29d8729a5b6379df17c31b6ab4df948c2"}, - {file = "rapidfuzz-3.6.1-pp39-pypy39_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:76c23ceaea27e790ddd35ef88b84cf9d721806ca366199a76fd47cfc0457a81b"}, - {file = "rapidfuzz-3.6.1-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2b155e67fff215c09f130555002e42f7517d0ea72cbd58050abb83cb7c880cec"}, - {file = "rapidfuzz-3.6.1-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:3028ee8ecc48250607fa8a0adce37b56275ec3b1acaccd84aee1f68487c8557b"}, - {file = "rapidfuzz-3.6.1.tar.gz", hash = "sha256:35660bee3ce1204872574fa041c7ad7ec5175b3053a4cb6e181463fc07013de7"}, + {file = "rapidfuzz-3.6.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:a5637e6bf11b15b5aff6ee818c76bdec99ad208511b78985e6209ba648a6e3ee"}, + {file = "rapidfuzz-3.6.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:380586664f2f63807050ddb95e7702888b4f0b425abf17655940c411f39287ad"}, + {file = "rapidfuzz-3.6.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:3168ff565d4b8c239cf11fb604dd2507d30e9bcaac76a4077c0ac23cf2c866ed"}, + {file = "rapidfuzz-3.6.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:be69f7fd46b5c6467fe5e2fd4cff3816b0c03048eed8a4becb9a73e6000960e7"}, + {file = "rapidfuzz-3.6.2-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:cbd5894f23fdf5697499cf759523639838ac822bd1600e343fdce7313baa02ae"}, + {file = "rapidfuzz-3.6.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:85a5b6e026393fe39fb61146b9c17c5af66fffbe1410e992c4bb06d9ec327bd3"}, + {file = "rapidfuzz-3.6.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:ab269adfc64480f209e99f253391a10735edd5c09046e04899adab5fb132f20e"}, + {file = "rapidfuzz-3.6.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:35aeac852bca06023d6bbd50c1fc504ca5a9a3613d5e75a140f0be7601fa34ef"}, + {file = "rapidfuzz-3.6.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:e706f302c6a3ae0d74edd0d6ace46aee1ae07c563b436ccf5ff04db2b3571e60"}, + {file = "rapidfuzz-3.6.2-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:bec353f022011e6e5cd28ccb8700fbd2a33918197af0d4e0abb3c3f4845cc864"}, + {file = "rapidfuzz-3.6.2-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:ef3925daaa93eed20401012e219f569ff0c039ed5bf4ce2d3737b4f75d441622"}, + {file = "rapidfuzz-3.6.2-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:6ee98d88ae9ccc77ff61992ed33b2496478def5dc0da55c9a9aa06fcb725a352"}, + {file = "rapidfuzz-3.6.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:423c7c588b09d618601097b7a0017dfcb91132a2076bef29023c5f3cd2dc3de1"}, + {file = "rapidfuzz-3.6.2-cp310-cp310-win32.whl", hash = "sha256:c17c5efee347a40a6f4c1eec59e3d7d1e22f7613a97f8b8a07733ef723483a04"}, + {file = "rapidfuzz-3.6.2-cp310-cp310-win_amd64.whl", hash = "sha256:4209816626d8d6ff8ae7dc248061c6059e618b70c6e6f6e4d7444ae3740b2b85"}, + {file = "rapidfuzz-3.6.2-cp310-cp310-win_arm64.whl", hash = "sha256:1c54d3c85e522d3ac9ee39415f183c8fa184c4f87e7e5a37938f15a6d50e853a"}, + {file = "rapidfuzz-3.6.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:e06f6d270112f5db001f1cba5a97e1a48aee3d3dbdcbea3ec027c230462dbf9b"}, + {file = "rapidfuzz-3.6.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:080cb71b50cb6aff11d1c6aeb157f273e2da0b2bdb3f9d7b01257e49e69a8576"}, + {file = "rapidfuzz-3.6.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:a7895e04a22d6515bc91a850e0831f2405547605aa311d1ffec51e4818abc3c1"}, + {file = "rapidfuzz-3.6.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bd82f9838519136b7083dd1e3149ee80344521f3dc37f744f227505ff0883efb"}, + {file = "rapidfuzz-3.6.2-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a945567c2b0b6e069454c9782d5234b0b6795718adf7a9f868bd3144afa6a023"}, + {file = "rapidfuzz-3.6.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:673ba2c343644805acdae1cb949c6a4de71aa2f62a998978551ebea59603af3f"}, + {file = "rapidfuzz-3.6.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9d457c89bac1471442002e70551e8268e639b3870b4a4521eae363c07253be87"}, + {file = "rapidfuzz-3.6.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:495c0d8e14e6f12520eb7fc71b9ba9fcaafb47fc23a654e6e89b6c7985ec0020"}, + {file = "rapidfuzz-3.6.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:6d67b649bf3e1b1722d04eca44d37919aef88305ce7ad05564502d013cf550fd"}, + {file = "rapidfuzz-3.6.2-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:e48dde8ca83d11daa00900cf6a5d281a1297aef9b7bfa73801af6e8822be5019"}, + {file = "rapidfuzz-3.6.2-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:824cc381cf81cbf8d158f6935664ec2a69e6ac3b1d39fa201988bf81a257f775"}, + {file = "rapidfuzz-3.6.2-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:1dfe4c24957474ce0ac75d886387e30e292b4be39228a6d71f76de414dc187db"}, + {file = "rapidfuzz-3.6.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:d57b98013b802621bbc8b12a46bfc9d36ac552ab51ca207f7ce167ad46adabeb"}, + {file = "rapidfuzz-3.6.2-cp311-cp311-win32.whl", hash = "sha256:9a07dffac439223b4f1025dbfc68f4445a3460a859309c9858c2a3fa29617cdc"}, + {file = "rapidfuzz-3.6.2-cp311-cp311-win_amd64.whl", hash = "sha256:95a49c6b8bf1229743ae585dd5b7d57f0d15a7eb6e826866d5c9965ba958503c"}, + {file = "rapidfuzz-3.6.2-cp311-cp311-win_arm64.whl", hash = "sha256:af7c19ec86e11488539380d3db1755be5d561a3c0e7b04ff9d07abd7f9a8e9d8"}, + {file = "rapidfuzz-3.6.2-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:de8adc12161bf282c60f12dc9233bb31632f71d446a010fe7469a69b8153427f"}, + {file = "rapidfuzz-3.6.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:337e357f693130c4c6be740652542b260e36f622c59e01fa33d58f1d2750c930"}, + {file = "rapidfuzz-3.6.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:6468f8bc8c3c50604f43631550ef9cfec873515dba5023ca34d461be94669fc8"}, + {file = "rapidfuzz-3.6.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:74c6773b11445b5e5cf93ca383171cd0ac0cdeafea11a7b2a5688f8bf8d813e6"}, + {file = "rapidfuzz-3.6.2-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e1507fc5769aa109dda4de3a15f822a0f6a03e18d627bd0ba3ddbb253cf70e07"}, + {file = "rapidfuzz-3.6.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:617949a70150e6fffdaed19253dd49f7a53528411dc8bf7663d499ba21e0f61e"}, + {file = "rapidfuzz-3.6.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f8b77779174b1b40aa70827692571ab457061897846255ad7d5d559e2edb1932"}, + {file = "rapidfuzz-3.6.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:80e51b22a7da83f9c87a97e92df07ed0612c74c35496590255f4b5d5b4212dfe"}, + {file = "rapidfuzz-3.6.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:3ae7c86914cb6673e97e187ba431b9c4cf4177d9ae77f8a1e5b2ba9a5628839e"}, + {file = "rapidfuzz-3.6.2-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:ddc380ffaa90f204cc9ddcb779114b9ab6f015246d549de9d47871a97ef9f18a"}, + {file = "rapidfuzz-3.6.2-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:3c1dc078ef371fce09f9f3eec2ca4eaa2a8cd412ec53941015b4f39f14d34407"}, + {file = "rapidfuzz-3.6.2-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:9a74102fc5a2534fe91f7507838623e1f3a149d8e05648389c42bb42e14b1c3f"}, + {file = "rapidfuzz-3.6.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:48e1eaea8fcd522fca7f04f0480663f0f0cfb77957092cce60a93f4462864996"}, + {file = "rapidfuzz-3.6.2-cp312-cp312-win32.whl", hash = "sha256:66b008bf2972740cd2dda5d382eb8bdb87265cd88198e71c7797bdc0d1f79d20"}, + {file = "rapidfuzz-3.6.2-cp312-cp312-win_amd64.whl", hash = "sha256:87ac3a87f2251ae2e95fc9478ca5c759de6d141d04c84d3fec9f9cdcfc167b33"}, + {file = "rapidfuzz-3.6.2-cp312-cp312-win_arm64.whl", hash = "sha256:b593cc51aed887e93b78c2f94dfae9008be2b23d17afd3b1f1d3eb3913b58f26"}, + {file = "rapidfuzz-3.6.2-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:7d830bc7a9b586a374147ec60b08b1f9ae5996b43f75cc514f37faef3866b519"}, + {file = "rapidfuzz-3.6.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:dbee7f5ff11872b76505cbd87c814abc823e8757f11c69062eb3b25130a283da"}, + {file = "rapidfuzz-3.6.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:28c011fb31f2c3f82f503aedd6097d3d3854e574e327a119a3b7eb2cf90b79ca"}, + {file = "rapidfuzz-3.6.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cda81d0e0ce0c13abfa46b24e10c1e85f9c6acb628f0a9a948f5779f9c2076a2"}, + {file = "rapidfuzz-3.6.2-cp38-cp38-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4c279928651ce0e9e5220dcb25a00cc53b65e592a0861336a38299bcdca3a596"}, + {file = "rapidfuzz-3.6.2-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:35bd4bc9c40e6994c5d6edea4b9319388b4d9711c13c66d543bb4c37624b4184"}, + {file = "rapidfuzz-3.6.2-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d07899506a5a8760448d9df036d528b55a554bf571714173635c79eef4a86e58"}, + {file = "rapidfuzz-3.6.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:eb2e51d01b9c6d6954a3e055c57a80d4685b4fc82719db5519fc153566bcd6bb"}, + {file = "rapidfuzz-3.6.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:153d065e353371cc0aeff32b99999a5758266a64e958d1364189367c1c9f6814"}, + {file = "rapidfuzz-3.6.2-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:4edcceebb85ebfa49a3ddcde20ad891d36c08dc0fd592efdab0e7d313a4e36af"}, + {file = "rapidfuzz-3.6.2-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:3549123fca5bb817341025f98e8e49ca99f84596c7c4f92b658f8e5836040d4a"}, + {file = "rapidfuzz-3.6.2-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:84c1032ae42628465b7a5cc35249906061e18a8193c9c27cbd2db54e9823a9a6"}, + {file = "rapidfuzz-3.6.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:9bcc91ebd8fc69a6bd3b5711c8250f5f4e70606b4da75ef415f57ad209978205"}, + {file = "rapidfuzz-3.6.2-cp38-cp38-win32.whl", hash = "sha256:f3a70f341c4c111bad910d2df69c78577a98af140319a996af24c9385939335d"}, + {file = "rapidfuzz-3.6.2-cp38-cp38-win_amd64.whl", hash = "sha256:354ad5fe655beb7b279390cb58334903931c5452ecbad1b1666ffb06786498e2"}, + {file = "rapidfuzz-3.6.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:1b86b93d93020c2b3edc1665d75c8855784845fc0a739b312c26c3a4bf0c80d5"}, + {file = "rapidfuzz-3.6.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:28243086ed0e50808bb56632e5442c457241646aeafafd501ac87901f40a3237"}, + {file = "rapidfuzz-3.6.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:ed52461ae5a9ea4c400d38e2649c74a413f1a6d8fb8308b66f1fbd122514732f"}, + {file = "rapidfuzz-3.6.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2a46220f86a5f9cb016af31525e0d0865cad437d02239aa0d8aed2ab8bff1f1c"}, + {file = "rapidfuzz-3.6.2-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:81a630ed2fc3ec5fc7400eb66bab1f87e282b4d47f0abe3e48c6634dfa13b5e4"}, + {file = "rapidfuzz-3.6.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d8e5a437b9089df6242a718d9c31ab1742989e9400a0977af012ef483b63b4c2"}, + {file = "rapidfuzz-3.6.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:16270b5529de83b7bae7457e952e4d9cf3fbf029a837dd32d415bb9e0eb8e599"}, + {file = "rapidfuzz-3.6.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5378c04102c7f084cde30a100154fa6d7e2baf0d51a6bdd2f912545559c1fb35"}, + {file = "rapidfuzz-3.6.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:7f18397c8d6a65fc0b288d2fc29bc7baeea6ba91eeb95163a3cd98f23cd3bc85"}, + {file = "rapidfuzz-3.6.2-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:2acd2514defce81e6ff4bbff50252d5e7df8e85a731442c4b83e44c86cf1c916"}, + {file = "rapidfuzz-3.6.2-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:1df2faf80201952e252413b6fac6f3e146080dcebb87bb1bb722508e67558ed8"}, + {file = "rapidfuzz-3.6.2-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:6440ed0b3007c1c9286b0b88fe2ab2d9e83edd60cd62293b3dfabb732b4e8a30"}, + {file = "rapidfuzz-3.6.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:4fcfa23b5553b27f4016df77c53172ea743454cf12c28cfa7c35a309a2be93b3"}, + {file = "rapidfuzz-3.6.2-cp39-cp39-win32.whl", hash = "sha256:2d580d937146e803c8e5e1b87916cab8d6f84013b6392713e201efcda335c7d8"}, + {file = "rapidfuzz-3.6.2-cp39-cp39-win_amd64.whl", hash = "sha256:fe2a68be734e8e88af23385c68d6467e15818b6b1df1cbfebf7bff577226c957"}, + {file = "rapidfuzz-3.6.2-cp39-cp39-win_arm64.whl", hash = "sha256:6478f7803efebf5f644d0b758439c5b25728550fdfbb19783d150004c46a75a9"}, + {file = "rapidfuzz-3.6.2-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:36ce7b68a7b90b787cdd73480a68d2f1ca63c31a3a9d5a79a8736f978e1e9344"}, + {file = "rapidfuzz-3.6.2-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:53597fd72a9340bcdd80d3620f4957c2b92f9b569313b969a3abdaffd193aae6"}, + {file = "rapidfuzz-3.6.2-pp38-pypy38_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d4f6de745fe6ce46a422d353ee10599013631d7d714a36d025f164b2d4e8c000"}, + {file = "rapidfuzz-3.6.2-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:62df2136068e2515ed8beb01756381ff62c29384d785e3bf46e3111d4ea3ba1e"}, + {file = "rapidfuzz-3.6.2-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:7382c90170f60c846c81a07ddd80bb2e8c43c8383754486fa37f67391a571897"}, + {file = "rapidfuzz-3.6.2-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:f31314fd2e2f3dc3e519e6f93669462ce7953df2def1c344aa8f5345976d0eb2"}, + {file = "rapidfuzz-3.6.2-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:012221629d54d3bee954148247f711eb86d4d390b589ebfe03172ea0b37a7531"}, + {file = "rapidfuzz-3.6.2-pp39-pypy39_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d41dd59a70decfce6595315367a2fea2af660d92a9d144acc6479030501014d7"}, + {file = "rapidfuzz-3.6.2-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7f9fa14136a5b0cba1ec42531f7c3e0b0d3edb7fd6bc5e5ae7b498541f3855ab"}, + {file = "rapidfuzz-3.6.2-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:259364199cbfeca33b1af369fc7951f71717aa285184a3fa5a7b1772da1b89db"}, + {file = "rapidfuzz-3.6.2.tar.gz", hash = "sha256:cf911e792ab0c431694c9bf2648afabfd92099103f2e31492893e078ddca5e1a"}, ] [package.extras] diff --git a/airbyte-integrations/connectors/source-microsoft-onedrive/pyproject.toml b/airbyte-integrations/connectors/source-microsoft-onedrive/pyproject.toml index 97cf3c7f5189..234d789c1ec4 100644 --- a/airbyte-integrations/connectors/source-microsoft-onedrive/pyproject.toml +++ b/airbyte-integrations/connectors/source-microsoft-onedrive/pyproject.toml @@ -3,7 +3,7 @@ requires = [ "poetry-core>=1.0.0",] build-backend = "poetry.core.masonry.api" [tool.poetry] -version = "0.1.7" +version = "0.1.8" name = "source-microsoft-onedrive" description = "Source implementation for Microsoft OneDrive." authors = [ "Airbyte ",] diff --git a/docs/integrations/sources/microsoft-onedrive.md b/docs/integrations/sources/microsoft-onedrive.md index 099761b28b4c..4c7ce624213e 100644 --- a/docs/integrations/sources/microsoft-onedrive.md +++ b/docs/integrations/sources/microsoft-onedrive.md @@ -121,6 +121,7 @@ The connector is restricted by normal Microsoft Graph [requests limitation](http | Version | Date | Pull Request | Subject | |:--------|:-----------|:---------------------------------------------------------|:------------------------------------------------------------------------| +| 0.1.8 | 2024-03-06 | [35858](https://github.com/airbytehq/airbyte/pull/35858) | Bump poetry.lock to upgrade transitive dependency | | 0.1.7 | 2024-03-0q | [34936](https://github.com/airbytehq/airbyte/pull/34936) | Enable in Cloud | | 0.1.6 | 2024-02-06 | [34936](https://github.com/airbytehq/airbyte/pull/34936) | Bump CDK version to avoid missing SyncMode errors | | 0.1.5 | 2024-01-30 | [34681](https://github.com/airbytehq/airbyte/pull/34681) | Unpin CDK version to make compatible with the Concurrent CDK | From 9ce9217660ba8477bdb4fe47806bad00a23b8017 Mon Sep 17 00:00:00 2001 From: Rodi Reich Zilberman <867491+rodireich@users.noreply.github.com> Date: Wed, 6 Mar 2024 09:47:50 -0800 Subject: [PATCH 107/172] make query safer for servers that are case sensitive (#35816) --- airbyte-integrations/connectors/source-mssql/metadata.yaml | 2 +- .../integrations/source/mssql/cdc/MssqlDebeziumStateUtil.java | 2 +- docs/integrations/sources/mssql.md | 1 + 3 files changed, 3 insertions(+), 2 deletions(-) diff --git a/airbyte-integrations/connectors/source-mssql/metadata.yaml b/airbyte-integrations/connectors/source-mssql/metadata.yaml index 4ae4c38dfbaa..e9aaafc6a40d 100644 --- a/airbyte-integrations/connectors/source-mssql/metadata.yaml +++ b/airbyte-integrations/connectors/source-mssql/metadata.yaml @@ -9,7 +9,7 @@ data: connectorSubtype: database connectorType: source definitionId: b5ea17b1-f170-46dc-bc31-cc744ca984c1 - dockerImageTag: 3.7.6 + dockerImageTag: 3.7.7 dockerRepository: airbyte/source-mssql documentationUrl: https://docs.airbyte.com/integrations/sources/mssql githubIssueLabel: source-mssql diff --git a/airbyte-integrations/connectors/source-mssql/src/main/java/io/airbyte/integrations/source/mssql/cdc/MssqlDebeziumStateUtil.java b/airbyte-integrations/connectors/source-mssql/src/main/java/io/airbyte/integrations/source/mssql/cdc/MssqlDebeziumStateUtil.java index 8f55fa18be99..e2a4064d7210 100644 --- a/airbyte-integrations/connectors/source-mssql/src/main/java/io/airbyte/integrations/source/mssql/cdc/MssqlDebeziumStateUtil.java +++ b/airbyte-integrations/connectors/source-mssql/src/main/java/io/airbyte/integrations/source/mssql/cdc/MssqlDebeziumStateUtil.java @@ -62,7 +62,7 @@ public class MssqlDebeziumStateUtil implements DebeziumStateUtil { Set @res = 1 ELSE Set @res = 0 - select @res as [included], @MIN_LSN as [min], @MAX_LSN as [max] + select @res as [included], @min_lsn as [min], @max_lsn as [max] """; private static final Logger LOGGER = LoggerFactory.getLogger(MssqlDebeziumStateUtil.class); diff --git a/docs/integrations/sources/mssql.md b/docs/integrations/sources/mssql.md index 2480f9fb2e3f..db10beb279bd 100644 --- a/docs/integrations/sources/mssql.md +++ b/docs/integrations/sources/mssql.md @@ -342,6 +342,7 @@ WHERE actor_definition_id ='b5ea17b1-f170-46dc-bc31-cc744ca984c1' AND (configura | Version | Date | Pull Request | Subject | |:--------|:-----------|:------------------------------------------------------------------------------------------------------------------|:------------------------------------------------------------------------------------------------------------------------------------------------| +| 3.7.7 | 2024-03-06 | [35816](https://github.com/airbytehq/airbyte/pull/35816) | Fix query that was failing on a case sensitive server. | | 3.7.6 | 2024-03-04 | [35721](https://github.com/airbytehq/airbyte/pull/35721) | Fix tests | | 3.7.5 | 2024-02-29 | [35739](https://github.com/airbytehq/airbyte/pull/35739) | Allow configuring the queue size used for cdc events. | | 3.7.4 | 2024-02-26 | [35566](https://github.com/airbytehq/airbyte/pull/35566) | Add config to throw an error on invalid CDC position. | From da79f6ee5b3e77f7f34f56dd638fd0d3aa22e9c1 Mon Sep 17 00:00:00 2001 From: Edward Gao Date: Wed, 6 Mar 2024 10:00:54 -0800 Subject: [PATCH 108/172] Destination Snowflake: Write extracted_at in UTC (#35308) Signed-off-by: Gireesh Sreepathi Co-authored-by: Gireesh Sreepathi --- .../destination-snowflake/build.gradle | 3 +- .../destination-snowflake/metadata.yaml | 2 +- .../SnowflakeInternalStagingDestination.java | 25 +- .../SnowflakeDestinationHandler.java | 107 ++- .../SnowflakeSqlGenerator.java | 40 +- .../migrations/SnowflakeState.kt | 19 + .../AbstractSnowflakeTypingDedupingTest.java | 33 +- .../SnowflakeSqlGeneratorIntegrationTest.java | 905 +++++++++++++++++- ...at_sync1_expectedrecords_dedup_final.jsonl | 5 + ...tracted_at_sync1_expectedrecords_raw.jsonl | 6 + ...orchange_expectedrecords_dedup_final.jsonl | 6 +- ...rsorchange_expectedrecords_dedup_raw.jsonl | 8 +- .../sync1_expectedrecords_dedup_final.jsonl | 8 +- .../sync1_expectedrecords_dedup_final2.jsonl | 2 +- ...sync1_expectedrecords_nondedup_final.jsonl | 10 +- .../dat/sync1_expectedrecords_raw.jsonl | 10 +- .../dat/sync1_expectedrecords_raw2.jsonl | 2 +- ...ectedrecords_incremental_dedup_final.jsonl | 4 +- ...xpectedrecords_incremental_dedup_raw.jsonl | 14 +- ...ctedrecords_fullrefresh_append_final.jsonl | 16 +- ...drecords_fullrefresh_overwrite_final.jsonl | 6 +- ...tedrecords_fullrefresh_overwrite_raw.jsonl | 6 +- ...ectedrecords_incremental_dedup_final.jsonl | 6 +- ...ctedrecords_incremental_dedup_final2.jsonl | 2 +- ...ds_incremental_dedup_final_mixed_tzs.jsonl | 4 + .../dat/sync2_expectedrecords_raw.jsonl | 16 +- .../dat/sync2_expectedrecords_raw2.jsonl | 4 +- .../sync2_expectedrecords_raw_mixed_tzs.jsonl | 10 + docs/integrations/destinations/snowflake.md | 1 + 29 files changed, 1165 insertions(+), 115 deletions(-) create mode 100644 airbyte-integrations/connectors/destination-snowflake/src/main/java/io/airbyte/integrations/destination/snowflake/typing_deduping/migrations/SnowflakeState.kt create mode 100644 airbyte-integrations/connectors/destination-snowflake/src/test-integration/resources/dat/ltz_extracted_at_sync1_expectedrecords_dedup_final.jsonl create mode 100644 airbyte-integrations/connectors/destination-snowflake/src/test-integration/resources/dat/ltz_extracted_at_sync1_expectedrecords_raw.jsonl create mode 100644 airbyte-integrations/connectors/destination-snowflake/src/test-integration/resources/dat/sync2_expectedrecords_incremental_dedup_final_mixed_tzs.jsonl create mode 100644 airbyte-integrations/connectors/destination-snowflake/src/test-integration/resources/dat/sync2_expectedrecords_raw_mixed_tzs.jsonl diff --git a/airbyte-integrations/connectors/destination-snowflake/build.gradle b/airbyte-integrations/connectors/destination-snowflake/build.gradle index b84e054c0609..4cc747506746 100644 --- a/airbyte-integrations/connectors/destination-snowflake/build.gradle +++ b/airbyte-integrations/connectors/destination-snowflake/build.gradle @@ -1,9 +1,10 @@ plugins { id 'airbyte-java-connector' + id 'org.jetbrains.kotlin.jvm' version '1.9.22' } airbyteJavaConnector { - cdkVersionRequired = '0.23.2' + cdkVersionRequired = '0.23.11' features = ['db-destinations', 's3-destinations', 'typing-deduping'] useLocalCdk = false } diff --git a/airbyte-integrations/connectors/destination-snowflake/metadata.yaml b/airbyte-integrations/connectors/destination-snowflake/metadata.yaml index d39c5a8c9669..fc0c46bd82cc 100644 --- a/airbyte-integrations/connectors/destination-snowflake/metadata.yaml +++ b/airbyte-integrations/connectors/destination-snowflake/metadata.yaml @@ -5,7 +5,7 @@ data: connectorSubtype: database connectorType: destination definitionId: 424892c4-daac-4491-b35d-c6688ba547ba - dockerImageTag: 3.5.14 + dockerImageTag: 3.6.0 dockerRepository: airbyte/destination-snowflake documentationUrl: https://docs.airbyte.com/integrations/destinations/snowflake githubIssueLabel: destination-snowflake diff --git a/airbyte-integrations/connectors/destination-snowflake/src/main/java/io/airbyte/integrations/destination/snowflake/SnowflakeInternalStagingDestination.java b/airbyte-integrations/connectors/destination-snowflake/src/main/java/io/airbyte/integrations/destination/snowflake/SnowflakeInternalStagingDestination.java index 253212ecf628..29eb9175e988 100644 --- a/airbyte-integrations/connectors/destination-snowflake/src/main/java/io/airbyte/integrations/destination/snowflake/SnowflakeInternalStagingDestination.java +++ b/airbyte-integrations/connectors/destination-snowflake/src/main/java/io/airbyte/integrations/destination/snowflake/SnowflakeInternalStagingDestination.java @@ -9,6 +9,7 @@ import io.airbyte.cdk.db.jdbc.JdbcDatabase; import io.airbyte.cdk.db.jdbc.JdbcUtils; import io.airbyte.cdk.integrations.base.Destination; +import io.airbyte.cdk.integrations.base.JavaBaseConstants; import io.airbyte.cdk.integrations.base.SerializedAirbyteMessageConsumer; import io.airbyte.cdk.integrations.base.TypingAndDedupingFlag; import io.airbyte.cdk.integrations.destination.NamingConventionTransformer; @@ -23,15 +24,18 @@ import io.airbyte.integrations.base.destination.typing_deduping.ParsedCatalog; import io.airbyte.integrations.base.destination.typing_deduping.TypeAndDedupeOperationValve; import io.airbyte.integrations.base.destination.typing_deduping.TyperDeduper; +import io.airbyte.integrations.base.destination.typing_deduping.migrators.Migration; import io.airbyte.integrations.destination.snowflake.typing_deduping.SnowflakeDestinationHandler; import io.airbyte.integrations.destination.snowflake.typing_deduping.SnowflakeSqlGenerator; import io.airbyte.integrations.destination.snowflake.typing_deduping.SnowflakeV1V2Migrator; import io.airbyte.integrations.destination.snowflake.typing_deduping.SnowflakeV2TableMigrator; +import io.airbyte.integrations.destination.snowflake.typing_deduping.migrations.SnowflakeState; import io.airbyte.protocol.models.v0.AirbyteConnectionStatus; import io.airbyte.protocol.models.v0.AirbyteMessage; import io.airbyte.protocol.models.v0.ConfiguredAirbyteCatalog; import io.airbyte.protocol.models.v0.ConfiguredAirbyteStream; import java.util.Collections; +import java.util.List; import java.util.Map; import java.util.Optional; import java.util.UUID; @@ -131,7 +135,7 @@ protected JdbcSqlGenerator getSqlGenerator() { } @Override - protected JdbcDestinationHandler getDestinationHandler(String databaseName, JdbcDatabase database) { + protected JdbcDestinationHandler getDestinationHandler(String databaseName, JdbcDatabase database, String rawTableSchema) { throw new UnsupportedOperationException("Snowflake does not yet use the native JDBC DV2 interface"); } @@ -151,22 +155,33 @@ public SerializedAirbyteMessageConsumer getSerializedMessageConsumer(final JsonN final TyperDeduper typerDeduper; final JdbcDatabase database = getDatabase(getDataSource(config)); final String databaseName = config.get(JdbcUtils.DATABASE_KEY).asText(); - final SnowflakeDestinationHandler snowflakeDestinationHandler = new SnowflakeDestinationHandler(databaseName, database); + final String rawTableSchemaName; final CatalogParser catalogParser; if (TypingAndDedupingFlag.getRawNamespaceOverride(RAW_SCHEMA_OVERRIDE).isPresent()) { - catalogParser = new CatalogParser(sqlGenerator, TypingAndDedupingFlag.getRawNamespaceOverride(RAW_SCHEMA_OVERRIDE).get()); + rawTableSchemaName = TypingAndDedupingFlag.getRawNamespaceOverride(RAW_SCHEMA_OVERRIDE).get(); + catalogParser = new CatalogParser(sqlGenerator, rawTableSchemaName); } else { + rawTableSchemaName = JavaBaseConstants.DEFAULT_AIRBYTE_INTERNAL_NAMESPACE; catalogParser = new CatalogParser(sqlGenerator); } + final SnowflakeDestinationHandler snowflakeDestinationHandler = new SnowflakeDestinationHandler(databaseName, database, rawTableSchemaName); parsedCatalog = catalogParser.parseCatalog(catalog); final SnowflakeV1V2Migrator migrator = new SnowflakeV1V2Migrator(getNamingResolver(), database, databaseName); final SnowflakeV2TableMigrator v2TableMigrator = new SnowflakeV2TableMigrator(database, databaseName, sqlGenerator, snowflakeDestinationHandler); final boolean disableTypeDedupe = config.has(DISABLE_TYPE_DEDUPE) && config.get(DISABLE_TYPE_DEDUPE).asBoolean(false); + final List> migrations = List.of(); if (disableTypeDedupe) { - typerDeduper = new NoOpTyperDeduperWithV1V2Migrations(sqlGenerator, snowflakeDestinationHandler, parsedCatalog, migrator, v2TableMigrator); + typerDeduper = + new NoOpTyperDeduperWithV1V2Migrations<>(sqlGenerator, snowflakeDestinationHandler, parsedCatalog, migrator, v2TableMigrator, migrations); } else { typerDeduper = - new DefaultTyperDeduper(sqlGenerator, snowflakeDestinationHandler, parsedCatalog, migrator, v2TableMigrator); + new DefaultTyperDeduper<>( + sqlGenerator, + snowflakeDestinationHandler, + parsedCatalog, + migrator, + v2TableMigrator, + migrations); } return StagingConsumerFactory.builder( diff --git a/airbyte-integrations/connectors/destination-snowflake/src/main/java/io/airbyte/integrations/destination/snowflake/typing_deduping/SnowflakeDestinationHandler.java b/airbyte-integrations/connectors/destination-snowflake/src/main/java/io/airbyte/integrations/destination/snowflake/typing_deduping/SnowflakeDestinationHandler.java index 5bfeb5d6b25e..61b500ffccdf 100644 --- a/airbyte-integrations/connectors/destination-snowflake/src/main/java/io/airbyte/integrations/destination/snowflake/typing_deduping/SnowflakeDestinationHandler.java +++ b/airbyte-integrations/connectors/destination-snowflake/src/main/java/io/airbyte/integrations/destination/snowflake/typing_deduping/SnowflakeDestinationHandler.java @@ -14,19 +14,22 @@ import io.airbyte.cdk.integrations.destination.jdbc.ColumnDefinition; import io.airbyte.cdk.integrations.destination.jdbc.TableDefinition; import io.airbyte.cdk.integrations.destination.jdbc.typing_deduping.JdbcDestinationHandler; +import io.airbyte.commons.json.Jsons; import io.airbyte.integrations.base.destination.typing_deduping.AirbyteProtocolType; import io.airbyte.integrations.base.destination.typing_deduping.AirbyteType; import io.airbyte.integrations.base.destination.typing_deduping.Array; import io.airbyte.integrations.base.destination.typing_deduping.ColumnId; -import io.airbyte.integrations.base.destination.typing_deduping.DestinationInitialState; -import io.airbyte.integrations.base.destination.typing_deduping.DestinationInitialStateImpl; -import io.airbyte.integrations.base.destination.typing_deduping.InitialRawTableState; +import io.airbyte.integrations.base.destination.typing_deduping.DestinationInitialStatus; +import io.airbyte.integrations.base.destination.typing_deduping.InitialRawTableStatus; import io.airbyte.integrations.base.destination.typing_deduping.Sql; import io.airbyte.integrations.base.destination.typing_deduping.StreamConfig; import io.airbyte.integrations.base.destination.typing_deduping.StreamId; import io.airbyte.integrations.base.destination.typing_deduping.Struct; import io.airbyte.integrations.base.destination.typing_deduping.Union; import io.airbyte.integrations.base.destination.typing_deduping.UnsupportedOneOf; +import io.airbyte.integrations.destination.snowflake.typing_deduping.migrations.SnowflakeState; +import io.airbyte.protocol.models.v0.AirbyteStreamNameNamespacePair; +import io.airbyte.protocol.models.v0.DestinationSyncMode; import java.sql.ResultSet; import java.sql.SQLException; import java.time.Instant; @@ -40,10 +43,11 @@ import java.util.stream.Collectors; import net.snowflake.client.jdbc.SnowflakeSQLException; import org.apache.commons.text.StringSubstitutor; +import org.jooq.SQLDialect; import org.slf4j.Logger; import org.slf4j.LoggerFactory; -public class SnowflakeDestinationHandler extends JdbcDestinationHandler { +public class SnowflakeDestinationHandler extends JdbcDestinationHandler { private static final Logger LOGGER = LoggerFactory.getLogger(SnowflakeDestinationHandler.class); public static final String EXCEPTION_COMMON_PREFIX = "JavaScript execution error: Uncaught Execution of multiple statements failed on statement"; @@ -51,9 +55,11 @@ public class SnowflakeDestinationHandler extends JdbcDestinationHandler { private final String databaseName; private final JdbcDatabase database; - public SnowflakeDestinationHandler(final String databaseName, final JdbcDatabase database) { - super(databaseName, database); - this.databaseName = databaseName; + public SnowflakeDestinationHandler(final String databaseName, final JdbcDatabase database, final String rawTableSchema) { + // Postgres is close enough to Snowflake SQL for our purposes. + super(databaseName, database, rawTableSchema, SQLDialect.POSTGRES); + // We don't quote the database name in any queries, so just upcase it. + this.databaseName = databaseName.toUpperCase(); this.database = database; } @@ -107,7 +113,7 @@ AND table_schema IN (%s) AND table_name IN (%s) """.formatted(paramHolder, paramHolder); final String[] bindValues = new String[streamIds.size() * 2 + 1]; - bindValues[0] = databaseName.toUpperCase(); + bindValues[0] = databaseName; System.arraycopy(namespaces, 0, bindValues, 1, namespaces.length); System.arraycopy(names, 0, bindValues, namespaces.length + 1, names.length); final List results = database.queryJsons(query, bindValues); @@ -120,14 +126,18 @@ AND table_name IN (%s) return tableRowCounts; } - public InitialRawTableState getInitialRawTableState(final StreamId id) throws Exception { + private InitialRawTableStatus getInitialRawTableState(final StreamId id, final DestinationSyncMode destinationSyncMode) throws Exception { + // Short-circuit for overwrite, table will be truncated anyway + if (destinationSyncMode == DestinationSyncMode.OVERWRITE) { + return new InitialRawTableStatus(false, false, Optional.empty()); + } final ResultSet tables = database.getMetaData().getTables( databaseName, id.rawNamespace(), id.rawName(), null); if (!tables.next()) { - return new InitialRawTableState(false, Optional.empty()); + return new InitialRawTableStatus(false, false, Optional.empty()); } // Snowflake timestamps have nanosecond precision, so decrement by 1ns // And use two explicit queries because COALESCE doesn't short-circuit. @@ -136,33 +146,55 @@ public InitialRawTableState getInitialRawTableState(final StreamId id) throws Ex conn -> conn.createStatement().executeQuery(new StringSubstitutor(Map.of( "raw_table", id.rawTableId(SnowflakeSqlGenerator.QUOTE))).replace( """ - SELECT to_varchar( - TIMESTAMPADD(NANOSECOND, -1, MIN("_airbyte_extracted_at")), - 'YYYY-MM-DDTHH24:MI:SS.FF9TZH:TZM' - ) AS MIN_TIMESTAMP - FROM ${raw_table} - WHERE "_airbyte_loaded_at" IS NULL + WITH MIN_TS AS ( + SELECT TIMESTAMPADD(NANOSECOND, -1, + MIN(TIMESTAMPADD( + HOUR, + EXTRACT(timezone_hour from "_airbyte_extracted_at"), + TIMESTAMPADD( + MINUTE, + EXTRACT(timezone_minute from "_airbyte_extracted_at"), + CONVERT_TIMEZONE('UTC', "_airbyte_extracted_at") + ) + ))) AS MIN_TIMESTAMP + FROM ${raw_table} + WHERE "_airbyte_loaded_at" IS NULL + ) SELECT TO_VARCHAR(MIN_TIMESTAMP,'YYYY-MM-DDTHH24:MI:SS.FF9TZH:TZM') as MIN_TIMESTAMP_UTC from MIN_TS; """)), // The query will always return exactly one record, so use .get(0) - record -> record.getString("MIN_TIMESTAMP")).get(0)); + record -> record.getString("MIN_TIMESTAMP_UTC")).get(0)); if (minUnloadedTimestamp.isPresent()) { - return new InitialRawTableState(true, minUnloadedTimestamp.map(Instant::parse)); + return new InitialRawTableStatus(true, true, minUnloadedTimestamp.map(Instant::parse)); } // If there are no unloaded raw records, then we can safely skip all existing raw records. // This second query just finds the newest raw record. + + // This is _technically_ wrong, because during the DST transition we might select + // the wrong max timestamp. We _should_ do the UTC conversion inside the CTE, but that's a lot + // of work for a very small edge case. + // We released the fix to write extracted_at in UTC before DST changed, so this is fine. final Optional maxTimestamp = Optional.ofNullable(database.queryStrings( conn -> conn.createStatement().executeQuery(new StringSubstitutor(Map.of( "raw_table", id.rawTableId(SnowflakeSqlGenerator.QUOTE))).replace( """ - SELECT to_varchar( - MAX("_airbyte_extracted_at"), - 'YYYY-MM-DDTHH24:MI:SS.FF9TZH:TZM' - ) AS MIN_TIMESTAMP - FROM ${raw_table} + WITH MAX_TS AS ( + SELECT MAX("_airbyte_extracted_at") + AS MAX_TIMESTAMP + FROM ${raw_table} + ) SELECT TO_VARCHAR( + TIMESTAMPADD( + HOUR, + EXTRACT(timezone_hour from MAX_TIMESTAMP), + TIMESTAMPADD( + MINUTE, + EXTRACT(timezone_minute from MAX_TIMESTAMP), + CONVERT_TIMEZONE('UTC', MAX_TIMESTAMP) + ) + ),'YYYY-MM-DDTHH24:MI:SS.FF9TZH:TZM') as MAX_TIMESTAMP_UTC from MAX_TS; """)), - record -> record.getString("MIN_TIMESTAMP")).get(0)); - return new InitialRawTableState(false, maxTimestamp.map(Instant::parse)); + record -> record.getString("MAX_TIMESTAMP_UTC")).get(0)); + return new InitialRawTableStatus(true, false, maxTimestamp.map(Instant::parse)); } @Override @@ -171,7 +203,7 @@ public void execute(final Sql sql) throws Exception { final UUID queryId = UUID.randomUUID(); for (final String transaction : transactions) { final UUID transactionId = UUID.randomUUID(); - LOGGER.debug("Executing sql {}-{}: {}", queryId, transactionId, transaction); + LOGGER.info("Executing sql {}-{}: {}", queryId, transactionId, transaction); final long startTime = System.currentTimeMillis(); try { @@ -190,7 +222,7 @@ public void execute(final Sql sql) throws Exception { throw new RuntimeException(trimmedMessage, e); } - LOGGER.debug("Sql {}-{} completed in {} ms", queryId, transactionId, System.currentTimeMillis() - startTime); + LOGGER.info("Sql {}-{} completed in {} ms", queryId, transactionId, System.currentTimeMillis() - startTime); } } @@ -250,7 +282,9 @@ protected boolean existingSchemaMatchesStreamConfig(final StreamConfig stream, f } @Override - public List gatherInitialState(List streamConfigs) throws Exception { + public List> gatherInitialState(List streamConfigs) throws Exception { + final Map destinationStates = super.getAllDestinationStates(); + List streamIds = streamConfigs.stream().map(StreamConfig::id).toList(); final LinkedHashMap> existingTables = findExistingTables(database, databaseName, streamIds); final LinkedHashMap> tableRowCounts = getFinalTableRowCount(streamIds); @@ -267,8 +301,15 @@ public List gatherInitialState(List strea isSchemaMismatch = !existingSchemaMatchesStreamConfig(streamConfig, existingTable); isFinalTableEmpty = hasRowCount && tableRowCounts.get(namespace).get(name) == 0; } - final InitialRawTableState initialRawTableState = getInitialRawTableState(streamConfig.id()); - return new DestinationInitialStateImpl(streamConfig, isFinalTablePresent, initialRawTableState, isSchemaMismatch, isFinalTableEmpty); + final InitialRawTableStatus initialRawTableState = getInitialRawTableState(streamConfig.id(), streamConfig.destinationSyncMode()); + final SnowflakeState destinationState = destinationStates.getOrDefault(streamConfig.id().asPair(), toDestinationState(Jsons.emptyObject())); + return new DestinationInitialStatus<>( + streamConfig, + isFinalTablePresent, + initialRawTableState, + isSchemaMismatch, + isFinalTableEmpty, + destinationState); } catch (Exception e) { throw new RuntimeException(e); } @@ -290,6 +331,12 @@ protected String toJdbcTypeName(AirbyteType airbyteType) { }; } + @Override + protected SnowflakeState toDestinationState(JsonNode json) { + return new SnowflakeState( + json.hasNonNull("needsSoftReset") && json.get("needsSoftReset").asBoolean()); + } + private String toJdbcTypeName(final AirbyteProtocolType airbyteProtocolType) { return switch (airbyteProtocolType) { case STRING -> "TEXT"; diff --git a/airbyte-integrations/connectors/destination-snowflake/src/main/java/io/airbyte/integrations/destination/snowflake/typing_deduping/SnowflakeSqlGenerator.java b/airbyte-integrations/connectors/destination-snowflake/src/main/java/io/airbyte/integrations/destination/snowflake/typing_deduping/SnowflakeSqlGenerator.java index 37b0bdaefff8..9c87733e6611 100644 --- a/airbyte-integrations/connectors/destination-snowflake/src/main/java/io/airbyte/integrations/destination/snowflake/typing_deduping/SnowflakeSqlGenerator.java +++ b/airbyte-integrations/connectors/destination-snowflake/src/main/java/io/airbyte/integrations/destination/snowflake/typing_deduping/SnowflakeSqlGenerator.java @@ -142,7 +142,7 @@ public Sql updateTable(final StreamConfig stream, dedupFinalTable = dedupFinalTable(stream.id(), finalSuffix, stream.primaryKey(), stream.cursor()); cdcDeletes = cdcDeletes(stream, finalSuffix); } - final String commitRawTable = commitRawTable(stream.id(), minRawTimestamp); + final String commitRawTable = commitRawTable(stream.id()); return transactionally(insertNewRecords, dedupFinalTable, cdcDeletes, commitRawTable); } @@ -227,6 +227,21 @@ WHEN TYPEOF(${expression}) != 'ARRAY' } } + private static String airbyteExtractedAtUtcForced(final String sqlExpression) { + return new StringSubstitutor(Map.of("expression", sqlExpression)).replace( + """ + TIMESTAMPADD( + HOUR, + EXTRACT(timezone_hour from ${expression}), + TIMESTAMPADD( + MINUTE, + EXTRACT(timezone_minute from ${expression}), + CONVERT_TIMEZONE('UTC', ${expression}) + ) + ) + """); + } + @VisibleForTesting String insertNewRecords(final StreamConfig stream, final String finalSuffix, @@ -297,14 +312,15 @@ AND TYPEOF("_airbyte_data":"_ab_cdc_deleted_at") NOT IN ('NULL', 'NULL_VALUE') "extractedAtCondition", extractedAtCondition, "column_list", columnList, "pk_list", pkList, - "cursor_order_clause", cursorOrderClause)).replace( + "cursor_order_clause", cursorOrderClause, + "airbyte_extracted_at_utc", airbyteExtractedAtUtcForced("\"_airbyte_extracted_at\""))).replace( """ WITH intermediate_data AS ( SELECT ${column_casts} ARRAY_CONSTRUCT_COMPACT(${column_errors}) as "_airbyte_cast_errors", "_airbyte_raw_id", - "_airbyte_extracted_at" + ${airbyte_extracted_at_utc} as "_airbyte_extracted_at" FROM ${raw_table_id} WHERE ( "_airbyte_loaded_at" IS NULL @@ -332,14 +348,15 @@ WITH intermediate_data AS ( "column_casts", columnCasts, "column_errors", columnErrors, "extractedAtCondition", extractedAtCondition, - "column_list", columnList)).replace( + "column_list", columnList, + "airbyte_extracted_at_utc", airbyteExtractedAtUtcForced("\"_airbyte_extracted_at\""))).replace( """ WITH intermediate_data AS ( SELECT ${column_casts} ARRAY_CONSTRUCT_COMPACT(${column_errors}) as "_airbyte_cast_errors", "_airbyte_raw_id", - "_airbyte_extracted_at" + ${airbyte_extracted_at_utc} as "_airbyte_extracted_at" FROM ${raw_table_id} WHERE "_airbyte_loaded_at" IS NULL @@ -356,7 +373,7 @@ WITH intermediate_data AS ( private static String buildExtractedAtCondition(final Optional minRawTimestamp) { return minRawTimestamp - .map(ts -> " AND \"_airbyte_extracted_at\" > '" + ts + "'") + .map(ts -> " AND " + airbyteExtractedAtUtcForced("\"_airbyte_extracted_at\"") + " > '" + ts + "'") .orElse(""); } @@ -373,13 +390,14 @@ String dedupFinalTable(final StreamId id, return new StringSubstitutor(Map.of( "final_table_id", id.finalTableId(QUOTE, finalSuffix.toUpperCase()), "pk_list", pkList, - "cursor_order_clause", cursorOrderClause)).replace( + "cursor_order_clause", cursorOrderClause, + "airbyte_extracted_at_utc", airbyteExtractedAtUtcForced("\"_AIRBYTE_EXTRACTED_AT\""))).replace( """ DELETE FROM ${final_table_id} WHERE "_AIRBYTE_RAW_ID" IN ( SELECT "_AIRBYTE_RAW_ID" FROM ( SELECT "_AIRBYTE_RAW_ID", row_number() OVER ( - PARTITION BY ${pk_list} ORDER BY ${cursor_order_clause} "_AIRBYTE_EXTRACTED_AT" DESC + PARTITION BY ${pk_list} ORDER BY ${cursor_order_clause} ${airbyte_extracted_at_utc} DESC ) as row_number FROM ${final_table_id} ) WHERE row_number != 1 @@ -406,15 +424,13 @@ private String cdcDeletes(final StreamConfig stream, final String finalSuffix) { } @VisibleForTesting - String commitRawTable(final StreamId id, final Optional minRawTimestamp) { + String commitRawTable(final StreamId id) { return new StringSubstitutor(Map.of( - "raw_table_id", id.rawTableId(QUOTE), - "extractedAtCondition", buildExtractedAtCondition(minRawTimestamp))).replace( + "raw_table_id", id.rawTableId(QUOTE))).replace( """ UPDATE ${raw_table_id} SET "_airbyte_loaded_at" = CURRENT_TIMESTAMP() WHERE "_airbyte_loaded_at" IS NULL - ${extractedAtCondition} ;"""); } diff --git a/airbyte-integrations/connectors/destination-snowflake/src/main/java/io/airbyte/integrations/destination/snowflake/typing_deduping/migrations/SnowflakeState.kt b/airbyte-integrations/connectors/destination-snowflake/src/main/java/io/airbyte/integrations/destination/snowflake/typing_deduping/migrations/SnowflakeState.kt new file mode 100644 index 000000000000..d6648acb142b --- /dev/null +++ b/airbyte-integrations/connectors/destination-snowflake/src/main/java/io/airbyte/integrations/destination/snowflake/typing_deduping/migrations/SnowflakeState.kt @@ -0,0 +1,19 @@ +/* + * Copyright (c) 2024 Airbyte, Inc., all rights reserved. + */ + +package io.airbyte.integrations.destination.snowflake.typing_deduping.migrations + +import io.airbyte.integrations.base.destination.typing_deduping.migrators.MinimumDestinationState + +// Note the nonnullable fields. Even though the underlying storage medium (a JSON blob) supports +// nullability, we don't want to deal with that in our codebase. +data class SnowflakeState(val needsSoftReset: Boolean) : MinimumDestinationState { + override fun needsSoftReset(): Boolean { + return needsSoftReset + } + + override fun withSoftReset(needsSoftReset: Boolean): T { + return copy(needsSoftReset = needsSoftReset) as T + } +} diff --git a/airbyte-integrations/connectors/destination-snowflake/src/test-integration/java/io/airbyte/integrations/destination/snowflake/typing_deduping/AbstractSnowflakeTypingDedupingTest.java b/airbyte-integrations/connectors/destination-snowflake/src/test-integration/java/io/airbyte/integrations/destination/snowflake/typing_deduping/AbstractSnowflakeTypingDedupingTest.java index 2c502d1c1ac9..de6f4f849868 100644 --- a/airbyte-integrations/connectors/destination-snowflake/src/test-integration/java/io/airbyte/integrations/destination/snowflake/typing_deduping/AbstractSnowflakeTypingDedupingTest.java +++ b/airbyte-integrations/connectors/destination-snowflake/src/test-integration/java/io/airbyte/integrations/destination/snowflake/typing_deduping/AbstractSnowflakeTypingDedupingTest.java @@ -149,7 +149,7 @@ public void testFinalTableUppercasingMigration_append() throws Exception { runSync(catalog, messages2); - final List expectedRawRecords2 = readRecords("dat/sync2_expectedrecords_raw.jsonl"); + final List expectedRawRecords2 = readRecords("dat/sync2_expectedrecords_raw_mixed_tzs.jsonl"); final List expectedFinalRecords2 = readRecords("dat/sync2_expectedrecords_fullrefresh_append_final.jsonl"); verifySyncResult(expectedRawRecords2, expectedFinalRecords2, disableFinalTableComparison()); } finally { @@ -218,6 +218,37 @@ public void testRemovingPKNonNullIndexes() throws Exception { assertEquals(1, dumpFinalTableRecords(streamNamespace, streamName).toArray().length); } + @Test + public void testExtractedAtUtcTimezoneMigration() throws Exception { + final ConfiguredAirbyteCatalog catalog = new ConfiguredAirbyteCatalog().withStreams(List.of( + new ConfiguredAirbyteStream() + .withSyncMode(SyncMode.INCREMENTAL) + .withDestinationSyncMode(DestinationSyncMode.APPEND_DEDUP) + .withPrimaryKey(List.of(List.of("id1"), List.of("id2"))) + .withCursorField(List.of("updated_at")) + .withStream(new AirbyteStream() + .withNamespace(streamNamespace) + .withName(streamName) + .withJsonSchema(SCHEMA)))); + + // First sync + final List messages1 = readMessages("dat/sync1_messages.jsonl"); + runSync(catalog, messages1, "airbyte/destination-snowflake:3.5.11"); + + final List expectedRawRecords1 = readRecords("dat/ltz_extracted_at_sync1_expectedrecords_raw.jsonl"); + final List expectedFinalRecords1 = readRecords("dat/ltz_extracted_at_sync1_expectedrecords_dedup_final.jsonl"); + verifySyncResult(expectedRawRecords1, expectedFinalRecords1, disableFinalTableComparison()); + + // Second sync + final List messages2 = readMessages("dat/sync2_messages.jsonl"); + + runSync(catalog, messages2); + + final List expectedRawRecords2 = readRecords("dat/sync2_expectedrecords_raw_mixed_tzs.jsonl"); + final List expectedFinalRecords2 = readRecords("dat/sync2_expectedrecords_incremental_dedup_final_mixed_tzs.jsonl"); + verifySyncResult(expectedRawRecords2, expectedFinalRecords2, disableFinalTableComparison()); + } + private String getDefaultSchema() { return getConfig().get("schema").asText(); } diff --git a/airbyte-integrations/connectors/destination-snowflake/src/test-integration/java/io/airbyte/integrations/destination/snowflake/typing_deduping/SnowflakeSqlGeneratorIntegrationTest.java b/airbyte-integrations/connectors/destination-snowflake/src/test-integration/java/io/airbyte/integrations/destination/snowflake/typing_deduping/SnowflakeSqlGeneratorIntegrationTest.java index bf204e1909d7..7277f5991957 100644 --- a/airbyte-integrations/connectors/destination-snowflake/src/test-integration/java/io/airbyte/integrations/destination/snowflake/typing_deduping/SnowflakeSqlGeneratorIntegrationTest.java +++ b/airbyte-integrations/connectors/destination-snowflake/src/test-integration/java/io/airbyte/integrations/destination/snowflake/typing_deduping/SnowflakeSqlGeneratorIntegrationTest.java @@ -22,13 +22,15 @@ import io.airbyte.commons.io.IOs; import io.airbyte.commons.json.Jsons; import io.airbyte.integrations.base.destination.typing_deduping.BaseSqlGeneratorIntegrationTest; -import io.airbyte.integrations.base.destination.typing_deduping.DestinationInitialState; +import io.airbyte.integrations.base.destination.typing_deduping.DestinationInitialStatus; import io.airbyte.integrations.base.destination.typing_deduping.Sql; import io.airbyte.integrations.base.destination.typing_deduping.StreamId; +import io.airbyte.integrations.base.destination.typing_deduping.TypeAndDedupeTransaction; import io.airbyte.integrations.destination.snowflake.OssCloudEnvVarConsts; import io.airbyte.integrations.destination.snowflake.SnowflakeDatabase; import io.airbyte.integrations.destination.snowflake.SnowflakeTestSourceOperations; import io.airbyte.integrations.destination.snowflake.SnowflakeTestUtils; +import io.airbyte.integrations.destination.snowflake.typing_deduping.migrations.SnowflakeState; import java.nio.file.Path; import java.sql.SQLException; import java.util.Arrays; @@ -43,9 +45,10 @@ import org.apache.commons.text.StringSubstitutor; import org.junit.jupiter.api.AfterAll; import org.junit.jupiter.api.BeforeAll; +import org.junit.jupiter.api.Disabled; import org.junit.jupiter.api.Test; -public class SnowflakeSqlGeneratorIntegrationTest extends BaseSqlGeneratorIntegrationTest { +public class SnowflakeSqlGeneratorIntegrationTest extends BaseSqlGeneratorIntegrationTest { private static String databaseName; private static JdbcDatabase database; @@ -71,7 +74,7 @@ protected SnowflakeSqlGenerator getSqlGenerator() { @Override protected SnowflakeDestinationHandler getDestinationHandler() { - return new SnowflakeDestinationHandler(databaseName, database); + return new SnowflakeDestinationHandler(databaseName, database, namespace.toUpperCase()); } @Override @@ -387,6 +390,13 @@ record -> record.get(JavaBaseConstants.COLUMN_NAME_AB_RAW_ID).asText(), }); } + @Disabled("We removed the optimization to only set the loaded_at column for new records after certain _extracted_at") + @Test + @Override + public void ignoreOldRawRecords() throws Exception { + super.ignoreOldRawRecords(); + } + /** * Verify that the final table does not include NON-NULL PKs (after * https://github.com/airbytehq/airbyte/pull/31082) @@ -412,9 +422,9 @@ public void ensurePKsAreIndexedUnique() throws Exception { // should be OK with new tables destinationHandler.execute(createTable); - List initialStates = destinationHandler.gatherInitialState(List.of(incrementalDedupStream)); + List> initialStates = destinationHandler.gatherInitialState(List.of(incrementalDedupStream)); assertEquals(1, initialStates.size()); - assertFalse(initialStates.get(0).isSchemaMismatch()); + assertFalse(initialStates.getFirst().isSchemaMismatch()); destinationHandler.execute(Sql.of("DROP TABLE " + streamId.finalTableId(""))); // Hack the create query to add NOT NULLs to emulate the old behavior @@ -431,4 +441,889 @@ public void ensurePKsAreIndexedUnique() throws Exception { assertTrue(initialStates.get(0).isSchemaMismatch()); } + @Test + public void dst_test_oldSyncRunsThroughTransition_thenNewSyncRuns_dedup() throws Exception { + this.createRawTable(this.streamId); + this.createFinalTable(this.incrementalDedupStream, ""); + this.insertRawTableRecords(this.streamId, List.of( + // 2 records written by a sync running on the old version of snowflake + Jsons.deserialize(""" + { + "_airbyte_raw_id": "pre-dst local tz 1", + "_airbyte_extracted_at": "2024-03-10T02:00:00-08:00", + "_airbyte_data": { + "id1": 1, + "id2": 100, + "string": "Alice00" + } + } + """), + Jsons.deserialize(""" + { + "_airbyte_raw_id": "post-dst local tz 2", + "_airbyte_extracted_at": "2024-03-10T02:01:00-07:00", + "_airbyte_data": { + "id1": 2, + "id2": 100, + "string": "Bob00" + } + } + """), + // and 2 records that got successfully loaded. + Jsons.deserialize(""" + { + "_airbyte_raw_id": "pre-dst local tz 3", + "_airbyte_extracted_at": "2024-03-10T02:00:00-08:00", + "_airbyte_loaded_at": "1970-01-01T00:00:00Z", + "_airbyte_data": { + "id1": 3, + "id2": 100, + "string": "Charlie00" + } + } + """), + Jsons.deserialize(""" + { + "_airbyte_raw_id": "post-dst local tz 4", + "_airbyte_extracted_at": "2024-03-10T02:01:00-07:00", + "_airbyte_loaded_at": "1970-01-01T00:00:00Z", + "_airbyte_data": { + "id1": 4, + "id2": 100, + "string": "Dave00" + } + } + """))); + this.insertFinalTableRecords(false, this.streamId, "", List.of( + Jsons.deserialize(""" + { + "_airbyte_raw_id": "pre-dst local tz 3", + "_airbyte_extracted_at": "2024-03-10T02:00:00-08:00", + "_airbyte_meta": {"errors": []}, + "id1": 3, + "id2": 100, + "string": "Charlie00" + } + """), + Jsons.deserialize(""" + { + "_airbyte_raw_id": "post-dst local tz 4", + "_airbyte_extracted_at": "2024-03-10T02:01:00-07:00", + "_airbyte_meta": {"errors": []}, + "id1": 4, + "id2": 100, + "string": "Dave00" + } + """))); + // Gather initial state at the start of our updated sync + DestinationInitialStatus initialState = + this.destinationHandler.gatherInitialState(List.of(this.incrementalDedupStream)).getFirst(); + this.insertRawTableRecords(this.streamId, List.of( + // insert raw records with updates + Jsons.deserialize(""" + { + "_airbyte_raw_id": "post-dst utc 1", + "_airbyte_extracted_at": "2024-03-10T02:02:00Z", + "_airbyte_data": { + "id1": 1, + "id2": 100, + "string": "Alice01" + } + } + """), + Jsons.deserialize(""" + { + "_airbyte_raw_id": "post-dst utc 2", + "_airbyte_extracted_at": "2024-03-10T02:02:00Z", + "_airbyte_data": { + "id1": 2, + "id2": 100, + "string": "Bob01" + } + } + """), + Jsons.deserialize(""" + { + "_airbyte_raw_id": "post-dst utc 3", + "_airbyte_extracted_at": "2024-03-10T02:02:00Z", + "_airbyte_data": { + "id1": 3, + "id2": 100, + "string": "Charlie01" + } + } + """), + Jsons.deserialize(""" + { + "_airbyte_raw_id": "post-dst utc 4", + "_airbyte_extracted_at": "2024-03-10T02:02:00Z", + "_airbyte_data": { + "id1": 4, + "id2": 100, + "string": "Dave01" + } + } + """))); + + TypeAndDedupeTransaction.executeTypeAndDedupe(this.generator, this.destinationHandler, this.incrementalDedupStream, + initialState.initialRawTableStatus().maxProcessedTimestamp(), ""); + + DIFFER.diffFinalTableRecords( + List.of( + Jsons.deserialize(""" + { + "_AIRBYTE_RAW_ID": "post-dst utc 1", + "_AIRBYTE_EXTRACTED_AT": "2024-03-10T02:02:00.000000000Z", + "_AIRBYTE_META": {"errors": []}, + "ID1": 1, + "ID2": 100, + "STRING": "Alice01" + } + """), + Jsons.deserialize(""" + { + "_AIRBYTE_RAW_ID": "post-dst utc 2", + "_AIRBYTE_EXTRACTED_AT": "2024-03-10T02:02:00.000000000Z", + "_AIRBYTE_META": {"errors": []}, + "ID1": 2, + "ID2": 100, + "STRING": "Bob01" + } + """), + Jsons.deserialize(""" + { + "_AIRBYTE_RAW_ID": "post-dst utc 3", + "_AIRBYTE_EXTRACTED_AT": "2024-03-10T02:02:00.000000000Z", + "_AIRBYTE_META": {"errors": []}, + "ID1": 3, + "ID2": 100, + "STRING": "Charlie01" + } + """), + Jsons.deserialize(""" + { + "_AIRBYTE_RAW_ID": "post-dst utc 4", + "_AIRBYTE_EXTRACTED_AT": "2024-03-10T02:02:00.000000000Z", + "_AIRBYTE_META": {"errors": []}, + "ID1": 4, + "ID2": 100, + "STRING": "Dave01" + } + """)), + this.dumpFinalTableRecords(this.streamId, "")); + } + + @Test + public void dst_test_oldSyncRunsBeforeTransition_thenNewSyncRunsThroughTransition_dedup() throws Exception { + this.createRawTable(this.streamId); + this.createFinalTable(this.incrementalDedupStream, ""); + this.insertRawTableRecords(this.streamId, List.of( + // record written by a sync running on the old version of snowflake + Jsons.deserialize(""" + { + "_airbyte_raw_id": "pre-dst local tz 1", + "_airbyte_extracted_at": "2024-03-10T01:59:00-08:00", + "_airbyte_data": { + "id1": 1, + "id2": 100, + "string": "Alice00" + } + } + """))); + // Gather initial state at the start of our updated sync + DestinationInitialStatus initialState = + this.destinationHandler.gatherInitialState(List.of(this.incrementalDedupStream)).getFirst(); + this.insertRawTableRecords(this.streamId, List.of( + // update the record twice + // this never really happens, but verify that it works + Jsons.deserialize(""" + { + "_airbyte_raw_id": "pre-dst utc 1", + "_airbyte_extracted_at": "2024-03-10T02:00:00Z", + "_airbyte_data": { + "id1": 1, + "id2": 100, + "string": "Alice01" + } + } + """), + Jsons.deserialize(""" + { + "_airbyte_raw_id": "post-dst utc 1", + "_airbyte_extracted_at": "2024-03-10T02:01:00Z", + "_airbyte_data": { + "id1": 1, + "id2": 100, + "string": "Alice02" + } + } + """))); + + TypeAndDedupeTransaction.executeTypeAndDedupe(this.generator, this.destinationHandler, this.incrementalDedupStream, + initialState.initialRawTableStatus().maxProcessedTimestamp(), ""); + + DIFFER.diffFinalTableRecords( + List.of( + Jsons.deserialize(""" + { + "_AIRBYTE_RAW_ID": "post-dst utc 1", + "_AIRBYTE_EXTRACTED_AT": "2024-03-10T02:01:00.000000000Z", + "_AIRBYTE_META": {"errors": []}, + "ID1": 1, + "ID2": 100, + "STRING": "Alice02" + } + """)), + this.dumpFinalTableRecords(this.streamId, "")); + } + + @Test + public void dst_test_oldSyncRunsBeforeTransition_thenNewSyncRunsBeforeTransition_thenNewSyncRunsThroughTransition_dedup() throws Exception { + this.createRawTable(this.streamId); + this.createFinalTable(this.incrementalDedupStream, ""); + this.insertRawTableRecords(this.streamId, List.of( + // records written by a sync running on the old version of snowflake + Jsons.deserialize(""" + { + "_airbyte_raw_id": "pre-dst local tz 1", + "_airbyte_extracted_at": "2024-03-10T01:59:00-08:00", + "_airbyte_data": { + "id1": 1, + "id2": 100, + "string": "Alice00" + } + } + """), + Jsons.deserialize(""" + { + "_airbyte_raw_id": "pre-dst local tz 2", + "_airbyte_extracted_at": "2024-03-10T01:59:00-08:00", + "_airbyte_data": { + "id1": 2, + "id2": 100, + "string": "Bob00" + } + } + """))); + + // Gather initial state at the start of our first new sync + DestinationInitialStatus initialState = + this.destinationHandler.gatherInitialState(List.of(this.incrementalDedupStream)).getFirst(); + this.insertRawTableRecords(this.streamId, List.of( + // update the records + Jsons.deserialize(""" + { + "_airbyte_raw_id": "pre-dst utc 1", + "_airbyte_extracted_at": "2024-03-10T02:00:00Z", + "_airbyte_data": { + "id1": 1, + "id2": 100, + "string": "Alice01" + } + } + """), + Jsons.deserialize(""" + { + "_airbyte_raw_id": "pre-dst utc 2", + "_airbyte_extracted_at": "2024-03-10T02:00:00Z", + "_airbyte_data": { + "id1": 2, + "id2": 100, + "string": "Bob01" + } + } + """))); + + TypeAndDedupeTransaction.executeTypeAndDedupe(this.generator, this.destinationHandler, this.incrementalDedupStream, + initialState.initialRawTableStatus().maxProcessedTimestamp(), ""); + + DIFFER.diffFinalTableRecords( + List.of( + Jsons.deserialize(""" + { + "_AIRBYTE_RAW_ID": "pre-dst utc 1", + "_AIRBYTE_EXTRACTED_AT": "2024-03-10T02:00:00.000000000Z", + "_AIRBYTE_META": {"errors": []}, + "ID1": 1, + "ID2": 100, + "STRING": "Alice01" + } + """), + Jsons.deserialize(""" + { + "_AIRBYTE_RAW_ID": "pre-dst utc 2", + "_AIRBYTE_EXTRACTED_AT": "2024-03-10T02:00:00.000000000Z", + "_AIRBYTE_META": {"errors": []}, + "ID1": 2, + "ID2": 100, + "STRING": "Bob01" + } + """)), + this.dumpFinalTableRecords(this.streamId, "")); + + // Gather initial state at the start of our second new sync + DestinationInitialStatus initialState2 = + this.destinationHandler.gatherInitialState(List.of(this.incrementalDedupStream)).getFirst(); + this.insertRawTableRecords(this.streamId, List.of( + // update the records again + Jsons.deserialize(""" + { + "_airbyte_raw_id": "post-dst utc 1", + "_airbyte_extracted_at": "2024-03-10T02:01:00Z", + "_airbyte_data": { + "id1": 1, + "id2": 100, + "string": "Alice02" + } + } + """), + Jsons.deserialize(""" + { + "_airbyte_raw_id": "post-dst utc 2", + "_airbyte_extracted_at": "2024-03-10T02:01:00Z", + "_airbyte_data": { + "id1": 2, + "id2": 100, + "string": "Bob02" + } + } + """))); + + TypeAndDedupeTransaction.executeTypeAndDedupe(this.generator, this.destinationHandler, this.incrementalDedupStream, + initialState2.initialRawTableStatus().maxProcessedTimestamp(), ""); + + DIFFER.diffFinalTableRecords( + List.of( + Jsons.deserialize(""" + { + "_AIRBYTE_RAW_ID": "post-dst utc 1", + "_AIRBYTE_EXTRACTED_AT": "2024-03-10T02:01:00.000000000Z", + "_AIRBYTE_META": {"errors": []}, + "ID1": 1, + "ID2": 100, + "STRING": "Alice02" + } + """), + Jsons.deserialize(""" + { + "_AIRBYTE_RAW_ID": "post-dst utc 2", + "_AIRBYTE_EXTRACTED_AT": "2024-03-10T02:01:00.000000000Z", + "_AIRBYTE_META": {"errors": []}, + "ID1": 2, + "ID2": 100, + "STRING": "Bob02" + } + """)), + this.dumpFinalTableRecords(this.streamId, "")); + } + + @Test + public void dst_test_oldSyncRunsThroughTransition_thenNewSyncRuns_append() throws Exception { + this.createRawTable(this.streamId); + this.createFinalTable(this.incrementalAppendStream, ""); + this.insertRawTableRecords(this.streamId, List.of( + // 2 records written by a sync running on the old version of snowflake + Jsons.deserialize(""" + { + "_airbyte_raw_id": "pre-dst local tz 1", + "_airbyte_extracted_at": "2024-03-10T02:00:00-08:00", + "_airbyte_data": { + "id1": 1, + "id2": 100, + "string": "Alice00" + } + } + """), + Jsons.deserialize(""" + { + "_airbyte_raw_id": "post-dst local tz 2", + "_airbyte_extracted_at": "2024-03-10T02:01:00-07:00", + "_airbyte_data": { + "id1": 2, + "id2": 100, + "string": "Bob00" + } + } + """), + // and 2 records that got successfully loaded with local TZ. + Jsons.deserialize(""" + { + "_airbyte_raw_id": "pre-dst local tz 3", + "_airbyte_extracted_at": "2024-03-10T02:00:00-08:00", + "_airbyte_loaded_at": "1970-01-01T00:00:00Z", + "_airbyte_data": { + "id1": 3, + "id2": 100, + "string": "Charlie00" + } + } + """), + Jsons.deserialize(""" + { + "_airbyte_raw_id": "post-dst local tz 4", + "_airbyte_extracted_at": "2024-03-10T02:01:00-07:00", + "_airbyte_loaded_at": "1970-01-01T00:00:00Z", + "_airbyte_data": { + "id1": 4, + "id2": 100, + "string": "Dave00" + } + } + """))); + this.insertFinalTableRecords(false, this.streamId, "", List.of( + Jsons.deserialize(""" + { + "_airbyte_raw_id": "pre-dst local tz 3", + "_airbyte_extracted_at": "2024-03-10T02:00:00-08:00", + "_airbyte_meta": {"errors": []}, + "id1": 3, + "id2": 100, + "string": "Charlie00" + } + """), + Jsons.deserialize(""" + { + "_airbyte_raw_id": "post-dst local tz 4", + "_airbyte_extracted_at": "2024-03-10T02:01:00-07:00", + "_airbyte_meta": {"errors": []}, + "id1": 4, + "id2": 100, + "string": "Dave00" + } + """))); + // Gather initial state at the start of our updated sync + DestinationInitialStatus initialState = + this.destinationHandler.gatherInitialState(List.of(this.incrementalAppendStream)).getFirst(); + this.insertRawTableRecords(this.streamId, List.of( + // insert raw records with updates + Jsons.deserialize(""" + { + "_airbyte_raw_id": "post-dst utc 1", + "_airbyte_extracted_at": "2024-03-10T02:02:00Z", + "_airbyte_data": { + "id1": 1, + "id2": 100, + "string": "Alice01" + } + } + """), + Jsons.deserialize(""" + { + "_airbyte_raw_id": "post-dst utc 2", + "_airbyte_extracted_at": "2024-03-10T02:02:00Z", + "_airbyte_data": { + "id1": 2, + "id2": 100, + "string": "Bob01" + } + } + """), + Jsons.deserialize(""" + { + "_airbyte_raw_id": "post-dst utc 3", + "_airbyte_extracted_at": "2024-03-10T02:02:00Z", + "_airbyte_data": { + "id1": 3, + "id2": 100, + "string": "Charlie01" + } + } + """), + Jsons.deserialize(""" + { + "_airbyte_raw_id": "post-dst utc 4", + "_airbyte_extracted_at": "2024-03-10T02:02:00Z", + "_airbyte_data": { + "id1": 4, + "id2": 100, + "string": "Dave01" + } + } + """))); + + TypeAndDedupeTransaction.executeTypeAndDedupe(this.generator, this.destinationHandler, this.incrementalAppendStream, + initialState.initialRawTableStatus().maxProcessedTimestamp(), ""); + + DIFFER.diffFinalTableRecords( + List.of( + Jsons.deserialize(""" + { + "_AIRBYTE_RAW_ID": "pre-dst local tz 1", + "_AIRBYTE_EXTRACTED_AT": "2024-03-10T02:00:00.000000000Z", + "_AIRBYTE_META": {"errors": []}, + "ID1": 1, + "ID2": 100, + "STRING": "Alice00" + } + """), + Jsons.deserialize(""" + { + "_AIRBYTE_RAW_ID": "post-dst utc 1", + "_AIRBYTE_EXTRACTED_AT": "2024-03-10T02:02:00.000000000Z", + "_AIRBYTE_META": {"errors": []}, + "ID1": 1, + "ID2": 100, + "STRING": "Alice01" + } + """), + Jsons.deserialize(""" + { + "_AIRBYTE_RAW_ID": "post-dst local tz 2", + "_AIRBYTE_EXTRACTED_AT": "2024-03-10T02:01:00.000000000Z", + "_AIRBYTE_META": { + "errors": [] + }, + "ID1": 2, + "ID2": 100, + "STRING": "Bob00" + }"""), + Jsons.deserialize(""" + { + "_AIRBYTE_RAW_ID": "post-dst utc 2", + "_AIRBYTE_EXTRACTED_AT": "2024-03-10T02:02:00.000000000Z", + "_AIRBYTE_META": {"errors": []}, + "ID1": 2, + "ID2": 100, + "STRING": "Bob01" + } + """), + // note local TZ here. This record was loaded by an older version of the connector. + Jsons.deserialize(""" + { + "_AIRBYTE_RAW_ID": "pre-dst local tz 3", + "_AIRBYTE_EXTRACTED_AT": "2024-03-10T02:00:00.000000000-08:00", + "_AIRBYTE_META": { + "errors": [] + }, + "ID1": 3, + "ID2": 100, + "STRING": "Charlie00" + }"""), + Jsons.deserialize(""" + { + "_AIRBYTE_RAW_ID": "post-dst utc 3", + "_AIRBYTE_EXTRACTED_AT": "2024-03-10T02:02:00.000000000Z", + "_AIRBYTE_META": {"errors": []}, + "ID1": 3, + "ID2": 100, + "STRING": "Charlie01" + } + """), + // note local TZ here. This record was loaded by an older version of the connector. + Jsons.deserialize(""" + { + "_AIRBYTE_RAW_ID": "post-dst local tz 4", + "_AIRBYTE_EXTRACTED_AT": "2024-03-10T02:01:00.000000000-07:00", + "_AIRBYTE_META": { + "errors": [] + }, + "ID1": 4, + "ID2": 100, + "STRING": "Dave00" + }"""), + Jsons.deserialize(""" + { + "_AIRBYTE_RAW_ID": "post-dst utc 4", + "_AIRBYTE_EXTRACTED_AT": "2024-03-10T02:02:00.000000000Z", + "_AIRBYTE_META": {"errors": []}, + "ID1": 4, + "ID2": 100, + "STRING": "Dave01" + } + """)), + this.dumpFinalTableRecords(this.streamId, "")); + } + + @Test + public void dst_test_oldSyncRunsBeforeTransition_thenNewSyncRunsThroughTransition_append() throws Exception { + this.createRawTable(this.streamId); + this.createFinalTable(this.incrementalAppendStream, ""); + this.insertRawTableRecords(this.streamId, List.of( + // record written by a sync running on the old version of snowflake + Jsons.deserialize(""" + { + "_airbyte_raw_id": "pre-dst local tz 1", + "_airbyte_extracted_at": "2024-03-10T01:59:00-08:00", + "_airbyte_data": { + "id1": 1, + "id2": 100, + "string": "Alice00" + } + } + """))); + // Gather initial state at the start of our updated sync + DestinationInitialStatus initialState = + this.destinationHandler.gatherInitialState(List.of(this.incrementalAppendStream)).getFirst(); + this.insertRawTableRecords(this.streamId, List.of( + // update the record twice + // this never really happens, but verify that it works + Jsons.deserialize(""" + { + "_airbyte_raw_id": "pre-dst utc 1", + "_airbyte_extracted_at": "2024-03-10T02:00:00Z", + "_airbyte_data": { + "id1": 1, + "id2": 100, + "string": "Alice01" + } + } + """), + Jsons.deserialize(""" + { + "_airbyte_raw_id": "post-dst utc 1", + "_airbyte_extracted_at": "2024-03-10T02:01:00Z", + "_airbyte_data": { + "id1": 1, + "id2": 100, + "string": "Alice02" + } + } + """))); + + TypeAndDedupeTransaction.executeTypeAndDedupe(this.generator, this.destinationHandler, this.incrementalAppendStream, + initialState.initialRawTableStatus().maxProcessedTimestamp(), ""); + + DIFFER.diffFinalTableRecords( + List.of( + Jsons.deserialize(""" + { + "_AIRBYTE_RAW_ID": "pre-dst local tz 1", + "_AIRBYTE_EXTRACTED_AT": "2024-03-10T01:59:00.000000000Z", + "_AIRBYTE_META": { + "errors": [] + }, + "ID1": 1, + "ID2": 100, + "STRING": "Alice00" + }"""), + Jsons.deserialize(""" + { + "_AIRBYTE_RAW_ID": "pre-dst utc 1", + "_AIRBYTE_EXTRACTED_AT": "2024-03-10T02:00:00.000000000Z", + "_AIRBYTE_META": { + "errors": [] + }, + "ID1": 1, + "ID2": 100, + "STRING": "Alice01" + }"""), + Jsons.deserialize(""" + { + "_AIRBYTE_RAW_ID": "post-dst utc 1", + "_AIRBYTE_EXTRACTED_AT": "2024-03-10T02:01:00.000000000Z", + "_AIRBYTE_META": {"errors": []}, + "ID1": 1, + "ID2": 100, + "STRING": "Alice02" + } + """)), + this.dumpFinalTableRecords(this.streamId, "")); + } + + @Test + public void dst_test_oldSyncRunsBeforeTransition_thenNewSyncRunsBeforeTransition_thenNewSyncRunsThroughTransition_append() throws Exception { + this.createRawTable(this.streamId); + this.createFinalTable(this.incrementalAppendStream, ""); + this.insertRawTableRecords(this.streamId, List.of( + // records written by a sync running on the old version of snowflake + Jsons.deserialize(""" + { + "_airbyte_raw_id": "pre-dst local tz 1", + "_airbyte_extracted_at": "2024-03-10T01:59:00-08:00", + "_airbyte_data": { + "id1": 1, + "id2": 100, + "string": "Alice00" + } + } + """), + Jsons.deserialize(""" + { + "_airbyte_raw_id": "pre-dst local tz 2", + "_airbyte_extracted_at": "2024-03-10T01:59:00-08:00", + "_airbyte_data": { + "id1": 2, + "id2": 100, + "string": "Bob00" + } + } + """))); + + // Gather initial state at the start of our first new sync + DestinationInitialStatus initialState = + this.destinationHandler.gatherInitialState(List.of(this.incrementalAppendStream)).getFirst(); + this.insertRawTableRecords(this.streamId, List.of( + // update the records + Jsons.deserialize(""" + { + "_airbyte_raw_id": "pre-dst utc 1", + "_airbyte_extracted_at": "2024-03-10T02:00:00Z", + "_airbyte_data": { + "id1": 1, + "id2": 100, + "string": "Alice01" + } + } + """), + Jsons.deserialize(""" + { + "_airbyte_raw_id": "pre-dst utc 2", + "_airbyte_extracted_at": "2024-03-10T02:00:00Z", + "_airbyte_data": { + "id1": 2, + "id2": 100, + "string": "Bob01" + } + } + """))); + + TypeAndDedupeTransaction.executeTypeAndDedupe(this.generator, this.destinationHandler, this.incrementalAppendStream, + initialState.initialRawTableStatus().maxProcessedTimestamp(), ""); + + DIFFER.diffFinalTableRecords( + List.of( + Jsons.deserialize(""" + { + "_AIRBYTE_RAW_ID": "pre-dst local tz 1", + "_AIRBYTE_EXTRACTED_AT": "2024-03-10T01:59:00.000000000Z", + "_AIRBYTE_META": { + "errors": [] + }, + "ID1": 1, + "ID2": 100, + "STRING": "Alice00" + }"""), + Jsons.deserialize(""" + { + "_AIRBYTE_RAW_ID": "pre-dst utc 1", + "_AIRBYTE_EXTRACTED_AT": "2024-03-10T02:00:00.000000000Z", + "_AIRBYTE_META": {"errors": []}, + "ID1": 1, + "ID2": 100, + "STRING": "Alice01" + } + """), + Jsons.deserialize(""" + { + "_AIRBYTE_RAW_ID": "pre-dst local tz 2", + "_AIRBYTE_EXTRACTED_AT": "2024-03-10T01:59:00.000000000Z", + "_AIRBYTE_META": { + "errors": [] + }, + "ID1": 2, + "ID2": 100, + "STRING": "Bob00" + }"""), + Jsons.deserialize(""" + { + "_AIRBYTE_RAW_ID": "pre-dst utc 2", + "_AIRBYTE_EXTRACTED_AT": "2024-03-10T02:00:00.000000000Z", + "_AIRBYTE_META": {"errors": []}, + "ID1": 2, + "ID2": 100, + "STRING": "Bob01" + } + """)), + this.dumpFinalTableRecords(this.streamId, "")); + + // Gather initial state at the start of our second new sync + DestinationInitialStatus initialState2 = + this.destinationHandler.gatherInitialState(List.of(this.incrementalAppendStream)).getFirst(); + this.insertRawTableRecords(this.streamId, List.of( + // update the records again + Jsons.deserialize(""" + { + "_airbyte_raw_id": "post-dst utc 1", + "_airbyte_extracted_at": "2024-03-10T02:01:00Z", + "_airbyte_data": { + "id1": 1, + "id2": 100, + "string": "Alice02" + } + } + """), + Jsons.deserialize(""" + { + "_airbyte_raw_id": "post-dst utc 2", + "_airbyte_extracted_at": "2024-03-10T02:01:00Z", + "_airbyte_data": { + "id1": 2, + "id2": 100, + "string": "Bob02" + } + } + """))); + + TypeAndDedupeTransaction.executeTypeAndDedupe(this.generator, this.destinationHandler, this.incrementalAppendStream, + initialState2.initialRawTableStatus().maxProcessedTimestamp(), ""); + + DIFFER.diffFinalTableRecords( + List.of( + Jsons.deserialize(""" + { + "_AIRBYTE_RAW_ID": "pre-dst local tz 1", + "_AIRBYTE_EXTRACTED_AT": "2024-03-10T01:59:00.000000000Z", + "_AIRBYTE_META": { + "errors": [] + }, + "ID1": 1, + "ID2": 100, + "STRING": "Alice00" + }"""), + Jsons.deserialize(""" + { + "_AIRBYTE_RAW_ID": "pre-dst utc 1", + "_AIRBYTE_EXTRACTED_AT": "2024-03-10T02:00:00.000000000Z", + "_AIRBYTE_META": { + "errors": [] + }, + "ID1": 1, + "ID2": 100, + "STRING": "Alice01" + }"""), + Jsons.deserialize(""" + { + "_AIRBYTE_RAW_ID": "post-dst utc 1", + "_AIRBYTE_EXTRACTED_AT": "2024-03-10T02:01:00.000000000Z", + "_AIRBYTE_META": {"errors": []}, + "ID1": 1, + "ID2": 100, + "STRING": "Alice02" + } + """), + Jsons.deserialize(""" + { + "_AIRBYTE_RAW_ID": "pre-dst local tz 2", + "_AIRBYTE_EXTRACTED_AT": "2024-03-10T01:59:00.000000000Z", + "_AIRBYTE_META": { + "errors": [] + }, + "ID1": 2, + "ID2": 100, + "STRING": "Bob00" + }"""), + Jsons.deserialize(""" + { + "_AIRBYTE_RAW_ID": "pre-dst utc 2", + "_AIRBYTE_EXTRACTED_AT": "2024-03-10T02:00:00.000000000Z", + "_AIRBYTE_META": { + "errors": [] + }, + "ID1": 2, + "ID2": 100, + "STRING": "Bob01" + }"""), + Jsons.deserialize(""" + { + "_AIRBYTE_RAW_ID": "post-dst utc 2", + "_AIRBYTE_EXTRACTED_AT": "2024-03-10T02:01:00.000000000Z", + "_AIRBYTE_META": {"errors": []}, + "ID1": 2, + "ID2": 100, + "STRING": "Bob02" + } + """)), + this.dumpFinalTableRecords(this.streamId, "")); + } + } diff --git a/airbyte-integrations/connectors/destination-snowflake/src/test-integration/resources/dat/ltz_extracted_at_sync1_expectedrecords_dedup_final.jsonl b/airbyte-integrations/connectors/destination-snowflake/src/test-integration/resources/dat/ltz_extracted_at_sync1_expectedrecords_dedup_final.jsonl new file mode 100644 index 000000000000..cb50cd6fcc31 --- /dev/null +++ b/airbyte-integrations/connectors/destination-snowflake/src/test-integration/resources/dat/ltz_extracted_at_sync1_expectedrecords_dedup_final.jsonl @@ -0,0 +1,5 @@ +// Note the -08:00 offset in extracted_at. +{"_AIRBYTE_EXTRACTED_AT": "1970-01-01T00:00:01.000000000-08:00", "_AIRBYTE_META": {"errors":[]}, "ID1": 1, "ID2": 200, "UPDATED_AT": "2000-01-01T00:01:00.000000000Z", "NAME": "Alice", "ADDRESS": {"city": "Los Angeles", "state": "CA"}} +{"_AIRBYTE_EXTRACTED_AT": "1970-01-01T00:00:01.000000000-08:00", "_AIRBYTE_META": {"errors":[]}, "ID1": 1, "ID2": 201, "UPDATED_AT": "2000-01-01T00:02:00.000000000Z", "NAME": "Bob", "ADDRESS": {"city": "Boston", "state": "MA"}} +{"_AIRBYTE_EXTRACTED_AT": "1970-01-01T00:00:01.000000000-08:00", "_AIRBYTE_META": {"errors":["Problem with `age`", "Problem with `registration_date`"]}, "ID1": 2, "ID2": 200, "UPDATED_AT": "2000-01-01T00:03:00.000000000Z", "NAME": "Charlie"} +{"_AIRBYTE_EXTRACTED_AT": "1970-01-01T00:00:01.000000000-08:00", "_AIRBYTE_META": {"errors":[]}, "ID1": 3, "ID2": 200, "UPDATED_AT": "2000-01-01T00:04:00.000000000Z", "NAME": "a\bb\fc\nd\re\tf`~!@#$%^&*()_+-=[]\\{}|'\",./<>?"} diff --git a/airbyte-integrations/connectors/destination-snowflake/src/test-integration/resources/dat/ltz_extracted_at_sync1_expectedrecords_raw.jsonl b/airbyte-integrations/connectors/destination-snowflake/src/test-integration/resources/dat/ltz_extracted_at_sync1_expectedrecords_raw.jsonl new file mode 100644 index 000000000000..6849b1072a0b --- /dev/null +++ b/airbyte-integrations/connectors/destination-snowflake/src/test-integration/resources/dat/ltz_extracted_at_sync1_expectedrecords_raw.jsonl @@ -0,0 +1,6 @@ +// Note the -08:00 offset in extracted_at. +{"_airbyte_extracted_at": "1970-01-01T00:00:01.000000000-08:00", "_airbyte_data": {"id1": 1, "id2": 200, "updated_at": "2000-01-01T00:00:00Z", "_ab_cdc_deleted_at": null, "name": "Alice", "address": {"city": "San Francisco", "state": "CA"}}} +{"_airbyte_extracted_at": "1970-01-01T00:00:01.000000000-08:00", "_airbyte_data": {"id1": 1, "id2": 200, "updated_at": "2000-01-01T00:01:00Z", "_ab_cdc_deleted_at": null, "name": "Alice", "address": {"city": "Los Angeles", "state": "CA"}}} +{"_airbyte_extracted_at": "1970-01-01T00:00:01.000000000-08:00", "_airbyte_data": {"id1": 1, "id2": 201, "updated_at": "2000-01-01T00:02:00Z", "name": "Bob", "address": {"city": "Boston", "state": "MA"}}} +{"_airbyte_extracted_at": "1970-01-01T00:00:01.000000000-08:00", "_airbyte_data": {"id1": 2, "id2": 200, "updated_at": "2000-01-01T00:03:00Z", "name": "Charlie", "age": "this is not an integer", "registration_date": "this is not a date"}} +{"_airbyte_extracted_at": "1970-01-01T00:00:01.000000000-08:00", "_airbyte_data": {"id1": 3, "id2": 200, "updated_at": "2000-01-01T00:04:00Z", "name": "a\bb\fc\nd\re\tf`~!@#$%^&*()_+-=[]\\{}|'\",./<>?"}} diff --git a/airbyte-integrations/connectors/destination-snowflake/src/test-integration/resources/dat/sync1_cursorchange_expectedrecords_dedup_final.jsonl b/airbyte-integrations/connectors/destination-snowflake/src/test-integration/resources/dat/sync1_cursorchange_expectedrecords_dedup_final.jsonl index 7c9e93b21705..9672e61c9678 100644 --- a/airbyte-integrations/connectors/destination-snowflake/src/test-integration/resources/dat/sync1_cursorchange_expectedrecords_dedup_final.jsonl +++ b/airbyte-integrations/connectors/destination-snowflake/src/test-integration/resources/dat/sync1_cursorchange_expectedrecords_dedup_final.jsonl @@ -1,3 +1,3 @@ -{"_AIRBYTE_EXTRACTED_AT": "1970-01-01T00:00:01.000000000-08:00", "_AIRBYTE_META": {"errors":[]}, "ID1": 1, "ID2": 200, "OLD_CURSOR": 1, "NAME": "Alice", "ADDRESS": {"city": "Los Angeles", "state": "CA"}} -{"_AIRBYTE_EXTRACTED_AT": "1970-01-01T00:00:01.000000000-08:00", "_AIRBYTE_META": {"errors":[]}, "ID1": 1, "ID2": 201, "OLD_CURSOR": 2, "NAME": "Bob", "ADDRESS": {"city": "Boston", "state": "MA"}} -{"_AIRBYTE_EXTRACTED_AT": "1970-01-01T00:00:01.000000000-08:00", "_AIRBYTE_META": {"errors":["Problem with `age`", "Problem with `registration_date`"]}, "ID1": 2, "ID2": 200, "OLD_CURSOR": 3, "NAME": "Charlie"} +{"_AIRBYTE_EXTRACTED_AT": "1970-01-01T00:00:01.000000000Z", "_AIRBYTE_META": {"errors":[]}, "ID1": 1, "ID2": 200, "OLD_CURSOR": 1, "NAME": "Alice", "ADDRESS": {"city": "Los Angeles", "state": "CA"}} +{"_AIRBYTE_EXTRACTED_AT": "1970-01-01T00:00:01.000000000Z", "_AIRBYTE_META": {"errors":[]}, "ID1": 1, "ID2": 201, "OLD_CURSOR": 2, "NAME": "Bob", "ADDRESS": {"city": "Boston", "state": "MA"}} +{"_AIRBYTE_EXTRACTED_AT": "1970-01-01T00:00:01.000000000Z", "_AIRBYTE_META": {"errors":["Problem with `age`", "Problem with `registration_date`"]}, "ID1": 2, "ID2": 200, "OLD_CURSOR": 3, "NAME": "Charlie"} diff --git a/airbyte-integrations/connectors/destination-snowflake/src/test-integration/resources/dat/sync1_cursorchange_expectedrecords_dedup_raw.jsonl b/airbyte-integrations/connectors/destination-snowflake/src/test-integration/resources/dat/sync1_cursorchange_expectedrecords_dedup_raw.jsonl index fcf596ac0380..2f2b22731087 100644 --- a/airbyte-integrations/connectors/destination-snowflake/src/test-integration/resources/dat/sync1_cursorchange_expectedrecords_dedup_raw.jsonl +++ b/airbyte-integrations/connectors/destination-snowflake/src/test-integration/resources/dat/sync1_cursorchange_expectedrecords_dedup_raw.jsonl @@ -1,4 +1,4 @@ -{"_airbyte_extracted_at": "1970-01-01T00:00:01.000000000-08:00", "_airbyte_data": {"id1": 1, "id2": 200, "old_cursor": 0, "_ab_cdc_deleted_at": null, "name" :"Alice", "address": {"city": "San Francisco", "state": "CA"}}} -{"_airbyte_extracted_at": "1970-01-01T00:00:01.000000000-08:00", "_airbyte_data": {"id1": 1, "id2": 200, "old_cursor": 1, "_ab_cdc_deleted_at": null, "name": "Alice", "address": {"city": "Los Angeles", "state": "CA"}}} -{"_airbyte_extracted_at": "1970-01-01T00:00:01.000000000-08:00", "_airbyte_data": {"id1": 1, "id2": 201, "old_cursor": 2, "name": "Bob", "address": {"city": "Boston", "state": "MA"}}} -{"_airbyte_extracted_at": "1970-01-01T00:00:01.000000000-08:00", "_airbyte_data": {"id1": 2, "id2": 200, "old_cursor": 3, "name": "Charlie", "age": "this is not an integer", "registration_date": "this is not a date"}} +{"_airbyte_extracted_at": "1970-01-01T00:00:01.000000000Z", "_airbyte_data": {"id1": 1, "id2": 200, "old_cursor": 0, "_ab_cdc_deleted_at": null, "name" :"Alice", "address": {"city": "San Francisco", "state": "CA"}}} +{"_airbyte_extracted_at": "1970-01-01T00:00:01.000000000Z", "_airbyte_data": {"id1": 1, "id2": 200, "old_cursor": 1, "_ab_cdc_deleted_at": null, "name": "Alice", "address": {"city": "Los Angeles", "state": "CA"}}} +{"_airbyte_extracted_at": "1970-01-01T00:00:01.000000000Z", "_airbyte_data": {"id1": 1, "id2": 201, "old_cursor": 2, "name": "Bob", "address": {"city": "Boston", "state": "MA"}}} +{"_airbyte_extracted_at": "1970-01-01T00:00:01.000000000Z", "_airbyte_data": {"id1": 2, "id2": 200, "old_cursor": 3, "name": "Charlie", "age": "this is not an integer", "registration_date": "this is not a date"}} diff --git a/airbyte-integrations/connectors/destination-snowflake/src/test-integration/resources/dat/sync1_expectedrecords_dedup_final.jsonl b/airbyte-integrations/connectors/destination-snowflake/src/test-integration/resources/dat/sync1_expectedrecords_dedup_final.jsonl index 136fa8a99003..0338cae59ac4 100644 --- a/airbyte-integrations/connectors/destination-snowflake/src/test-integration/resources/dat/sync1_expectedrecords_dedup_final.jsonl +++ b/airbyte-integrations/connectors/destination-snowflake/src/test-integration/resources/dat/sync1_expectedrecords_dedup_final.jsonl @@ -1,5 +1,5 @@ // Keep the Alice record with more recent UPDATED_AT -{"_AIRBYTE_EXTRACTED_AT": "1970-01-01T00:00:01.000000000-08:00", "_AIRBYTE_META": {"errors":[]}, "ID1": 1, "ID2": 200, "UPDATED_AT": "2000-01-01T00:01:00.000000000Z", "NAME": "Alice", "ADDRESS": {"city": "Los Angeles", "state": "CA"}} -{"_AIRBYTE_EXTRACTED_AT": "1970-01-01T00:00:01.000000000-08:00", "_AIRBYTE_META": {"errors":[]}, "ID1": 1, "ID2": 201, "UPDATED_AT": "2000-01-01T00:02:00.000000000Z", "NAME": "Bob", "ADDRESS": {"city": "Boston", "state": "MA"}} -{"_AIRBYTE_EXTRACTED_AT": "1970-01-01T00:00:01.000000000-08:00", "_AIRBYTE_META": {"errors":["Problem with `age`", "Problem with `registration_date`"]}, "ID1": 2, "ID2": 200, "UPDATED_AT": "2000-01-01T00:03:00.000000000Z", "NAME": "Charlie"} -{"_AIRBYTE_EXTRACTED_AT": "1970-01-01T00:00:01.000000000-08:00", "_AIRBYTE_META": {"errors":[]}, "ID1": 3, "ID2": 200, "UPDATED_AT": "2000-01-01T00:04:00.000000000Z", "NAME": "a\bb\fc\nd\re\tf`~!@#$%^&*()_+-=[]\\{}|'\",./<>?"} +{"_AIRBYTE_EXTRACTED_AT": "1970-01-01T00:00:01.000000000Z", "_AIRBYTE_META": {"errors":[]}, "ID1": 1, "ID2": 200, "UPDATED_AT": "2000-01-01T00:01:00.000000000Z", "NAME": "Alice", "ADDRESS": {"city": "Los Angeles", "state": "CA"}} +{"_AIRBYTE_EXTRACTED_AT": "1970-01-01T00:00:01.000000000Z", "_AIRBYTE_META": {"errors":[]}, "ID1": 1, "ID2": 201, "UPDATED_AT": "2000-01-01T00:02:00.000000000Z", "NAME": "Bob", "ADDRESS": {"city": "Boston", "state": "MA"}} +{"_AIRBYTE_EXTRACTED_AT": "1970-01-01T00:00:01.000000000Z", "_AIRBYTE_META": {"errors":["Problem with `age`", "Problem with `registration_date`"]}, "ID1": 2, "ID2": 200, "UPDATED_AT": "2000-01-01T00:03:00.000000000Z", "NAME": "Charlie"} +{"_AIRBYTE_EXTRACTED_AT": "1970-01-01T00:00:01.000000000Z", "_AIRBYTE_META": {"errors":[]}, "ID1": 3, "ID2": 200, "UPDATED_AT": "2000-01-01T00:04:00.000000000Z", "NAME": "a\bb\fc\nd\re\tf`~!@#$%^&*()_+-=[]\\{}|'\",./<>?"} diff --git a/airbyte-integrations/connectors/destination-snowflake/src/test-integration/resources/dat/sync1_expectedrecords_dedup_final2.jsonl b/airbyte-integrations/connectors/destination-snowflake/src/test-integration/resources/dat/sync1_expectedrecords_dedup_final2.jsonl index 5f9395498870..83294d657935 100644 --- a/airbyte-integrations/connectors/destination-snowflake/src/test-integration/resources/dat/sync1_expectedrecords_dedup_final2.jsonl +++ b/airbyte-integrations/connectors/destination-snowflake/src/test-integration/resources/dat/sync1_expectedrecords_dedup_final2.jsonl @@ -1 +1 @@ -{"_AIRBYTE_EXTRACTED_AT": "1970-01-01T00:00:01.000000000-08:00", "_AIRBYTE_META": {"errors":[]}, "ID1": 1, "ID2": 200, "UPDATED_AT": "2001-01-01T00:00:00.000000000Z", "NAME": "Someone completely different"} +{"_AIRBYTE_EXTRACTED_AT": "1970-01-01T00:00:01.000000000Z", "_AIRBYTE_META": {"errors":[]}, "ID1": 1, "ID2": 200, "UPDATED_AT": "2001-01-01T00:00:00.000000000Z", "NAME": "Someone completely different"} diff --git a/airbyte-integrations/connectors/destination-snowflake/src/test-integration/resources/dat/sync1_expectedrecords_nondedup_final.jsonl b/airbyte-integrations/connectors/destination-snowflake/src/test-integration/resources/dat/sync1_expectedrecords_nondedup_final.jsonl index 575aa338976c..ca3c0aafa537 100644 --- a/airbyte-integrations/connectors/destination-snowflake/src/test-integration/resources/dat/sync1_expectedrecords_nondedup_final.jsonl +++ b/airbyte-integrations/connectors/destination-snowflake/src/test-integration/resources/dat/sync1_expectedrecords_nondedup_final.jsonl @@ -1,6 +1,6 @@ -{"_AIRBYTE_EXTRACTED_AT": "1970-01-01T00:00:01.000000000-08:00", "_AIRBYTE_META": {"errors":[]}, "ID1": 1, "ID2": 200, "UPDATED_AT": "2000-01-01T00:00:00.000000000Z", "NAME": "Alice", "ADDRESS": {"city": "San Francisco", "state": "CA"}} -{"_AIRBYTE_EXTRACTED_AT": "1970-01-01T00:00:01.000000000-08:00", "_AIRBYTE_META": {"errors":[]}, "ID1": 1, "ID2": 200, "UPDATED_AT": "2000-01-01T00:01:00.000000000Z", "NAME": "Alice", "ADDRESS": {"city": "Los Angeles", "state": "CA"}} -{"_AIRBYTE_EXTRACTED_AT": "1970-01-01T00:00:01.000000000-08:00", "_AIRBYTE_META": {"errors":[]}, "ID1": 1, "ID2": 201, "UPDATED_AT": "2000-01-01T00:02:00.000000000Z", "NAME": "Bob", "ADDRESS": {"city": "Boston", "state": "MA"}} +{"_AIRBYTE_EXTRACTED_AT": "1970-01-01T00:00:01.000000000Z", "_AIRBYTE_META": {"errors":[]}, "ID1": 1, "ID2": 200, "UPDATED_AT": "2000-01-01T00:00:00.000000000Z", "NAME": "Alice", "ADDRESS": {"city": "San Francisco", "state": "CA"}} +{"_AIRBYTE_EXTRACTED_AT": "1970-01-01T00:00:01.000000000Z", "_AIRBYTE_META": {"errors":[]}, "ID1": 1, "ID2": 200, "UPDATED_AT": "2000-01-01T00:01:00.000000000Z", "NAME": "Alice", "ADDRESS": {"city": "Los Angeles", "state": "CA"}} +{"_AIRBYTE_EXTRACTED_AT": "1970-01-01T00:00:01.000000000Z", "_AIRBYTE_META": {"errors":[]}, "ID1": 1, "ID2": 201, "UPDATED_AT": "2000-01-01T00:02:00.000000000Z", "NAME": "Bob", "ADDRESS": {"city": "Boston", "state": "MA"}} // Invalid columns are nulled out (i.e. SQL null, not JSON null) -{"_AIRBYTE_EXTRACTED_AT": "1970-01-01T00:00:01.000000000-08:00", "_AIRBYTE_META": {"errors":["Problem with `age`", "Problem with `registration_date`"]}, "ID1": 2, "ID2": 200, "UPDATED_AT": "2000-01-01T00:03:00.000000000Z", "NAME": "Charlie"} -{"_AIRBYTE_EXTRACTED_AT": "1970-01-01T00:00:01.000000000-08:00", "_AIRBYTE_META": {"errors":[]}, "ID1": 3, "ID2": 200, "UPDATED_AT": "2000-01-01T00:04:00.000000000Z", "NAME": "a\bb\fc\nd\re\tf`~!@#$%^&*()_+-=[]\\{}|'\",./<>?"} +{"_AIRBYTE_EXTRACTED_AT": "1970-01-01T00:00:01.000000000Z", "_AIRBYTE_META": {"errors":["Problem with `age`", "Problem with `registration_date`"]}, "ID1": 2, "ID2": 200, "UPDATED_AT": "2000-01-01T00:03:00.000000000Z", "NAME": "Charlie"} +{"_AIRBYTE_EXTRACTED_AT": "1970-01-01T00:00:01.000000000Z", "_AIRBYTE_META": {"errors":[]}, "ID1": 3, "ID2": 200, "UPDATED_AT": "2000-01-01T00:04:00.000000000Z", "NAME": "a\bb\fc\nd\re\tf`~!@#$%^&*()_+-=[]\\{}|'\",./<>?"} diff --git a/airbyte-integrations/connectors/destination-snowflake/src/test-integration/resources/dat/sync1_expectedrecords_raw.jsonl b/airbyte-integrations/connectors/destination-snowflake/src/test-integration/resources/dat/sync1_expectedrecords_raw.jsonl index d1c3045997b3..8dbfcd6cbb9c 100644 --- a/airbyte-integrations/connectors/destination-snowflake/src/test-integration/resources/dat/sync1_expectedrecords_raw.jsonl +++ b/airbyte-integrations/connectors/destination-snowflake/src/test-integration/resources/dat/sync1_expectedrecords_raw.jsonl @@ -1,7 +1,7 @@ -{"_airbyte_extracted_at": "1970-01-01T00:00:01.000000000-08:00", "_airbyte_data": {"id1": 1, "id2": 200, "updated_at": "2000-01-01T00:00:00Z", "_ab_cdc_deleted_at": null, "name": "Alice", "address": {"city": "San Francisco", "state": "CA"}}} +{"_airbyte_extracted_at": "1970-01-01T00:00:01.000000000Z", "_airbyte_data": {"id1": 1, "id2": 200, "updated_at": "2000-01-01T00:00:00Z", "_ab_cdc_deleted_at": null, "name": "Alice", "address": {"city": "San Francisco", "state": "CA"}}} // Note the duplicate record. In this sync mode, we don't dedup anything. -{"_airbyte_extracted_at": "1970-01-01T00:00:01.000000000-08:00", "_airbyte_data": {"id1": 1, "id2": 200, "updated_at": "2000-01-01T00:01:00Z", "_ab_cdc_deleted_at": null, "name": "Alice", "address": {"city": "Los Angeles", "state": "CA"}}} -{"_airbyte_extracted_at": "1970-01-01T00:00:01.000000000-08:00", "_airbyte_data": {"id1": 1, "id2": 201, "updated_at": "2000-01-01T00:02:00Z", "name": "Bob", "address": {"city": "Boston", "state": "MA"}}} +{"_airbyte_extracted_at": "1970-01-01T00:00:01.000000000Z", "_airbyte_data": {"id1": 1, "id2": 200, "updated_at": "2000-01-01T00:01:00Z", "_ab_cdc_deleted_at": null, "name": "Alice", "address": {"city": "Los Angeles", "state": "CA"}}} +{"_airbyte_extracted_at": "1970-01-01T00:00:01.000000000Z", "_airbyte_data": {"id1": 1, "id2": 201, "updated_at": "2000-01-01T00:02:00Z", "name": "Bob", "address": {"city": "Boston", "state": "MA"}}} // Invalid data is still allowed in the raw table. -{"_airbyte_extracted_at": "1970-01-01T00:00:01.000000000-08:00", "_airbyte_data": {"id1": 2, "id2": 200, "updated_at": "2000-01-01T00:03:00Z", "name": "Charlie", "age": "this is not an integer", "registration_date": "this is not a date"}} -{"_airbyte_extracted_at": "1970-01-01T00:00:01.000000000-08:00", "_airbyte_data": {"id1": 3, "id2": 200, "updated_at": "2000-01-01T00:04:00Z", "name": "a\bb\fc\nd\re\tf`~!@#$%^&*()_+-=[]\\{}|'\",./<>?"}} +{"_airbyte_extracted_at": "1970-01-01T00:00:01.000000000Z", "_airbyte_data": {"id1": 2, "id2": 200, "updated_at": "2000-01-01T00:03:00Z", "name": "Charlie", "age": "this is not an integer", "registration_date": "this is not a date"}} +{"_airbyte_extracted_at": "1970-01-01T00:00:01.000000000Z", "_airbyte_data": {"id1": 3, "id2": 200, "updated_at": "2000-01-01T00:04:00Z", "name": "a\bb\fc\nd\re\tf`~!@#$%^&*()_+-=[]\\{}|'\",./<>?"}} diff --git a/airbyte-integrations/connectors/destination-snowflake/src/test-integration/resources/dat/sync1_expectedrecords_raw2.jsonl b/airbyte-integrations/connectors/destination-snowflake/src/test-integration/resources/dat/sync1_expectedrecords_raw2.jsonl index b0f0f8823c90..6849e306164f 100644 --- a/airbyte-integrations/connectors/destination-snowflake/src/test-integration/resources/dat/sync1_expectedrecords_raw2.jsonl +++ b/airbyte-integrations/connectors/destination-snowflake/src/test-integration/resources/dat/sync1_expectedrecords_raw2.jsonl @@ -1 +1 @@ -{"_airbyte_extracted_at": "1970-01-01T00:00:01.000000000-08:00", "_airbyte_data": {"id1": 1, "id2": 200, "updated_at": "2001-01-01T00:00:00Z", "_ab_cdc_deleted_at": null, "name": "Someone completely different"}} +{"_airbyte_extracted_at": "1970-01-01T00:00:01.000000000Z", "_airbyte_data": {"id1": 1, "id2": 200, "updated_at": "2001-01-01T00:00:00Z", "_ab_cdc_deleted_at": null, "name": "Someone completely different"}} diff --git a/airbyte-integrations/connectors/destination-snowflake/src/test-integration/resources/dat/sync2_cursorchange_expectedrecords_incremental_dedup_final.jsonl b/airbyte-integrations/connectors/destination-snowflake/src/test-integration/resources/dat/sync2_cursorchange_expectedrecords_incremental_dedup_final.jsonl index 93e29eb904e4..a22c21dfee41 100644 --- a/airbyte-integrations/connectors/destination-snowflake/src/test-integration/resources/dat/sync2_cursorchange_expectedrecords_incremental_dedup_final.jsonl +++ b/airbyte-integrations/connectors/destination-snowflake/src/test-integration/resources/dat/sync2_cursorchange_expectedrecords_incremental_dedup_final.jsonl @@ -1,3 +1,3 @@ -{"_AIRBYTE_EXTRACTED_AT": "1970-01-01T00:00:02.000000000-08:00", "_AIRBYTE_META":{"errors":[]}, "ID1": 1, "ID2": 200, "UPDATED_AT": "2000-01-02T00:00:00.000000000Z", "NAME": "Alice", "ADDRESS": {"city": "Seattle", "state": "WA"}} +{"_AIRBYTE_EXTRACTED_AT": "1970-01-01T00:00:02.000000000Z", "_AIRBYTE_META":{"errors":[]}, "ID1": 1, "ID2": 200, "UPDATED_AT": "2000-01-02T00:00:00.000000000Z", "NAME": "Alice", "ADDRESS": {"city": "Seattle", "state": "WA"}} // Charlie wasn't reemitted with UPDATED_AT, so it still has a null cursor -{"_AIRBYTE_EXTRACTED_AT": "1970-01-01T00:00:01.000000000-08:00", "_AIRBYTE_META": {"errors":["Problem with `age`", "Problem with `registration_date`"]}, "ID1": 2, "ID2": 200, "NAME": "Charlie"} +{"_AIRBYTE_EXTRACTED_AT": "1970-01-01T00:00:01.000000000Z", "_AIRBYTE_META": {"errors":["Problem with `age`", "Problem with `registration_date`"]}, "ID1": 2, "ID2": 200, "NAME": "Charlie"} diff --git a/airbyte-integrations/connectors/destination-snowflake/src/test-integration/resources/dat/sync2_cursorchange_expectedrecords_incremental_dedup_raw.jsonl b/airbyte-integrations/connectors/destination-snowflake/src/test-integration/resources/dat/sync2_cursorchange_expectedrecords_incremental_dedup_raw.jsonl index 347a9248d265..871f03978f60 100644 --- a/airbyte-integrations/connectors/destination-snowflake/src/test-integration/resources/dat/sync2_cursorchange_expectedrecords_incremental_dedup_raw.jsonl +++ b/airbyte-integrations/connectors/destination-snowflake/src/test-integration/resources/dat/sync2_cursorchange_expectedrecords_incremental_dedup_raw.jsonl @@ -1,7 +1,7 @@ -{"_airbyte_extracted_at": "1970-01-01T00:00:01.000000000-08:00", "_airbyte_data": {"id1": 1, "id2": 200, "old_cursor": 0, "_ab_cdc_deleted_at": null, "name" :"Alice", "address": {"city": "San Francisco", "state": "CA"}}} -{"_airbyte_extracted_at": "1970-01-01T00:00:01.000000000-08:00", "_airbyte_data": {"id1": 1, "id2": 200, "old_cursor": 1, "_ab_cdc_deleted_at": null, "name": "Alice", "address": {"city": "Los Angeles", "state": "CA"}}} -{"_airbyte_extracted_at": "1970-01-01T00:00:01.000000000-08:00", "_airbyte_data": {"id1": 1, "id2": 201, "old_cursor": 2, "name": "Bob", "address": {"city": "Boston", "state": "MA"}}} -{"_airbyte_extracted_at": "1970-01-01T00:00:01.000000000-08:00", "_airbyte_data": {"id1": 2, "id2": 200, "old_cursor": 3, "name": "Charlie", "age": "this is not an integer", "registration_date": "this is not a date"}} -{"_airbyte_extracted_at": "1970-01-01T00:00:02.000000000-08:00", "_airbyte_data": {"id1": 1, "id2": 200, "updated_at": "2000-01-02T00:00:00Z", "_ab_cdc_deleted_at": null, "name": "Alice", "address": {"city": "Seattle", "state": "WA"}}} -{"_airbyte_extracted_at": "1970-01-01T00:00:02.000000000-08:00", "_airbyte_data": {"id1": 1, "id2": 201, "updated_at": "2000-01-02T00:00:00Z", "_ab_cdc_deleted_at": null, "name": "Bob", "address": {"city": "New York", "state": "NY"}}} -{"_airbyte_extracted_at": "1970-01-01T00:00:02.000000000-08:00", "_airbyte_data": {"id1": 1, "id2": 201, "updated_at": "2000-01-02T00:01:00Z", "_ab_cdc_deleted_at": "1970-01-01T00:00:00Z"}} +{"_airbyte_extracted_at": "1970-01-01T00:00:01.000000000Z", "_airbyte_data": {"id1": 1, "id2": 200, "old_cursor": 0, "_ab_cdc_deleted_at": null, "name" :"Alice", "address": {"city": "San Francisco", "state": "CA"}}} +{"_airbyte_extracted_at": "1970-01-01T00:00:01.000000000Z", "_airbyte_data": {"id1": 1, "id2": 200, "old_cursor": 1, "_ab_cdc_deleted_at": null, "name": "Alice", "address": {"city": "Los Angeles", "state": "CA"}}} +{"_airbyte_extracted_at": "1970-01-01T00:00:01.000000000Z", "_airbyte_data": {"id1": 1, "id2": 201, "old_cursor": 2, "name": "Bob", "address": {"city": "Boston", "state": "MA"}}} +{"_airbyte_extracted_at": "1970-01-01T00:00:01.000000000Z", "_airbyte_data": {"id1": 2, "id2": 200, "old_cursor": 3, "name": "Charlie", "age": "this is not an integer", "registration_date": "this is not a date"}} +{"_airbyte_extracted_at": "1970-01-01T00:00:02.000000000Z", "_airbyte_data": {"id1": 1, "id2": 200, "updated_at": "2000-01-02T00:00:00Z", "_ab_cdc_deleted_at": null, "name": "Alice", "address": {"city": "Seattle", "state": "WA"}}} +{"_airbyte_extracted_at": "1970-01-01T00:00:02.000000000Z", "_airbyte_data": {"id1": 1, "id2": 201, "updated_at": "2000-01-02T00:00:00Z", "_ab_cdc_deleted_at": null, "name": "Bob", "address": {"city": "New York", "state": "NY"}}} +{"_airbyte_extracted_at": "1970-01-01T00:00:02.000000000Z", "_airbyte_data": {"id1": 1, "id2": 201, "updated_at": "2000-01-02T00:01:00Z", "_ab_cdc_deleted_at": "1970-01-01T00:00:00Z"}} diff --git a/airbyte-integrations/connectors/destination-snowflake/src/test-integration/resources/dat/sync2_expectedrecords_fullrefresh_append_final.jsonl b/airbyte-integrations/connectors/destination-snowflake/src/test-integration/resources/dat/sync2_expectedrecords_fullrefresh_append_final.jsonl index 67171fa4c01b..8b2a3f160f44 100644 --- a/airbyte-integrations/connectors/destination-snowflake/src/test-integration/resources/dat/sync2_expectedrecords_fullrefresh_append_final.jsonl +++ b/airbyte-integrations/connectors/destination-snowflake/src/test-integration/resources/dat/sync2_expectedrecords_fullrefresh_append_final.jsonl @@ -1,9 +1,9 @@ -{"_AIRBYTE_EXTRACTED_AT": "1970-01-01T00:00:01.000000000-08:00", "_AIRBYTE_META": {"errors":[]}, "ID1": 1, "ID2": 200, "UPDATED_AT": "2000-01-01T00:00:00.000000000Z", "NAME": "Alice", "ADDRESS": {"city": "San Francisco", "state": "CA"}} -{"_AIRBYTE_EXTRACTED_AT": "1970-01-01T00:00:01.000000000-08:00", "_AIRBYTE_META": {"errors":[]}, "ID1": 1, "ID2": 200, "UPDATED_AT": "2000-01-01T00:01:00.000000000Z", "NAME": "Alice", "ADDRESS": {"city": "Los Angeles", "state": "CA"}} -{"_AIRBYTE_EXTRACTED_AT": "1970-01-01T00:00:01.000000000-08:00", "_AIRBYTE_META": {"errors":[]}, "ID1": 1, "ID2": 201, "UPDATED_AT": "2000-01-01T00:02:00.000000000Z", "NAME": "Bob", "ADDRESS": {"city": "Boston", "state": "MA"}} -{"_AIRBYTE_EXTRACTED_AT": "1970-01-01T00:00:01.000000000-08:00", "_AIRBYTE_META": {"errors":["Problem with `age`", "Problem with `registration_date`"]}, "ID1": 2, "ID2": 200, "UPDATED_AT": "2000-01-01T00:03:00.000000000Z", "NAME": "Charlie"} -{"_AIRBYTE_EXTRACTED_AT": "1970-01-01T00:00:01.000000000-08:00", "_AIRBYTE_META": {"errors":[]}, "ID1": 3, "ID2": 200, "UPDATED_AT": "2000-01-01T00:04:00.000000000Z", "NAME": "a\bb\fc\nd\re\tf`~!@#$%^&*()_+-=[]\\{}|'\",./<>?"} +{"_AIRBYTE_EXTRACTED_AT": "1970-01-01T00:00:01.000000000Z", "_AIRBYTE_META": {"errors":[]}, "ID1": 1, "ID2": 200, "UPDATED_AT": "2000-01-01T00:00:00.000000000Z", "NAME": "Alice", "ADDRESS": {"city": "San Francisco", "state": "CA"}} +{"_AIRBYTE_EXTRACTED_AT": "1970-01-01T00:00:01.000000000Z", "_AIRBYTE_META": {"errors":[]}, "ID1": 1, "ID2": 200, "UPDATED_AT": "2000-01-01T00:01:00.000000000Z", "NAME": "Alice", "ADDRESS": {"city": "Los Angeles", "state": "CA"}} +{"_AIRBYTE_EXTRACTED_AT": "1970-01-01T00:00:01.000000000Z", "_AIRBYTE_META": {"errors":[]}, "ID1": 1, "ID2": 201, "UPDATED_AT": "2000-01-01T00:02:00.000000000Z", "NAME": "Bob", "ADDRESS": {"city": "Boston", "state": "MA"}} +{"_AIRBYTE_EXTRACTED_AT": "1970-01-01T00:00:01.000000000Z", "_AIRBYTE_META": {"errors":["Problem with `age`", "Problem with `registration_date`"]}, "ID1": 2, "ID2": 200, "UPDATED_AT": "2000-01-01T00:03:00.000000000Z", "NAME": "Charlie"} +{"_AIRBYTE_EXTRACTED_AT": "1970-01-01T00:00:01.000000000Z", "_AIRBYTE_META": {"errors":[]}, "ID1": 3, "ID2": 200, "UPDATED_AT": "2000-01-01T00:04:00.000000000Z", "NAME": "a\bb\fc\nd\re\tf`~!@#$%^&*()_+-=[]\\{}|'\",./<>?"} -{"_AIRBYTE_EXTRACTED_AT": "1970-01-01T00:00:02.000000000-08:00", "_AIRBYTE_META":{"errors":[]}, "ID1": 1, "ID2": 200, "UPDATED_AT": "2000-01-02T00:00:00.000000000Z", "NAME": "Alice", "ADDRESS": {"city": "Seattle", "state": "WA"}} -{"_AIRBYTE_EXTRACTED_AT": "1970-01-01T00:00:02.000000000-08:00", "_AIRBYTE_META":{"errors":[]}, "ID1": 1, "ID2": 201, "UPDATED_AT": "2000-01-02T00:00:00.000000000Z", "NAME": "Bob", "ADDRESS": {"city": "New York", "state": "NY"}} -{"_AIRBYTE_EXTRACTED_AT": "1970-01-01T00:00:02.000000000-08:00", "_AIRBYTE_META":{"errors":[]}, "ID1": 1, "ID2": 201, "UPDATED_AT": "2000-01-02T00:01:00.000000000Z", "_AB_CDC_DELETED_AT": "1970-01-01T00:00:00.000000000Z"} +{"_AIRBYTE_EXTRACTED_AT": "1970-01-01T00:00:02.000000000Z", "_AIRBYTE_META":{"errors":[]}, "ID1": 1, "ID2": 200, "UPDATED_AT": "2000-01-02T00:00:00.000000000Z", "NAME": "Alice", "ADDRESS": {"city": "Seattle", "state": "WA"}} +{"_AIRBYTE_EXTRACTED_AT": "1970-01-01T00:00:02.000000000Z", "_AIRBYTE_META":{"errors":[]}, "ID1": 1, "ID2": 201, "UPDATED_AT": "2000-01-02T00:00:00.000000000Z", "NAME": "Bob", "ADDRESS": {"city": "New York", "state": "NY"}} +{"_AIRBYTE_EXTRACTED_AT": "1970-01-01T00:00:02.000000000Z", "_AIRBYTE_META":{"errors":[]}, "ID1": 1, "ID2": 201, "UPDATED_AT": "2000-01-02T00:01:00.000000000Z", "_AB_CDC_DELETED_AT": "1970-01-01T00:00:00.000000000Z"} diff --git a/airbyte-integrations/connectors/destination-snowflake/src/test-integration/resources/dat/sync2_expectedrecords_fullrefresh_overwrite_final.jsonl b/airbyte-integrations/connectors/destination-snowflake/src/test-integration/resources/dat/sync2_expectedrecords_fullrefresh_overwrite_final.jsonl index 61366dee9ab4..3f3fd3f1f3e7 100644 --- a/airbyte-integrations/connectors/destination-snowflake/src/test-integration/resources/dat/sync2_expectedrecords_fullrefresh_overwrite_final.jsonl +++ b/airbyte-integrations/connectors/destination-snowflake/src/test-integration/resources/dat/sync2_expectedrecords_fullrefresh_overwrite_final.jsonl @@ -1,3 +1,3 @@ -{"_AIRBYTE_EXTRACTED_AT": "1970-01-01T00:00:02.000000000-08:00", "_AIRBYTE_META":{"errors":[]}, "ID1": 1, "ID2": 200, "UPDATED_AT": "2000-01-02T00:00:00.000000000Z", "NAME": "Alice", "ADDRESS": {"city": "Seattle", "state": "WA"}} -{"_AIRBYTE_EXTRACTED_AT": "1970-01-01T00:00:02.000000000-08:00", "_AIRBYTE_META":{"errors":[]}, "ID1": 1, "ID2": 201, "UPDATED_AT": "2000-01-02T00:00:00.000000000Z", "NAME": "Bob", "ADDRESS": {"city": "New York", "state": "NY"}} -{"_AIRBYTE_EXTRACTED_AT": "1970-01-01T00:00:02.000000000-08:00", "_AIRBYTE_META":{"errors":[]}, "ID1": 1, "ID2": 201, "UPDATED_AT": "2000-01-02T00:01:00.000000000Z", "_AB_CDC_DELETED_AT": "1970-01-01T00:00:00.000000000Z"} +{"_AIRBYTE_EXTRACTED_AT": "1970-01-01T00:00:02.000000000Z", "_AIRBYTE_META":{"errors":[]}, "ID1": 1, "ID2": 200, "UPDATED_AT": "2000-01-02T00:00:00.000000000Z", "NAME": "Alice", "ADDRESS": {"city": "Seattle", "state": "WA"}} +{"_AIRBYTE_EXTRACTED_AT": "1970-01-01T00:00:02.000000000Z", "_AIRBYTE_META":{"errors":[]}, "ID1": 1, "ID2": 201, "UPDATED_AT": "2000-01-02T00:00:00.000000000Z", "NAME": "Bob", "ADDRESS": {"city": "New York", "state": "NY"}} +{"_AIRBYTE_EXTRACTED_AT": "1970-01-01T00:00:02.000000000Z", "_AIRBYTE_META":{"errors":[]}, "ID1": 1, "ID2": 201, "UPDATED_AT": "2000-01-02T00:01:00.000000000Z", "_AB_CDC_DELETED_AT": "1970-01-01T00:00:00.000000000Z"} diff --git a/airbyte-integrations/connectors/destination-snowflake/src/test-integration/resources/dat/sync2_expectedrecords_fullrefresh_overwrite_raw.jsonl b/airbyte-integrations/connectors/destination-snowflake/src/test-integration/resources/dat/sync2_expectedrecords_fullrefresh_overwrite_raw.jsonl index 2607c9f73a49..7ea21e905fe2 100644 --- a/airbyte-integrations/connectors/destination-snowflake/src/test-integration/resources/dat/sync2_expectedrecords_fullrefresh_overwrite_raw.jsonl +++ b/airbyte-integrations/connectors/destination-snowflake/src/test-integration/resources/dat/sync2_expectedrecords_fullrefresh_overwrite_raw.jsonl @@ -1,3 +1,3 @@ -{"_airbyte_extracted_at": "1970-01-01T00:00:02.000000000-08:00", "_airbyte_data": {"id1": 1, "id2": 200, "updated_at": "2000-01-02T00:00:00Z", "_ab_cdc_deleted_at": null, "name": "Alice", "address": {"city": "Seattle", "state": "WA"}}} -{"_airbyte_extracted_at": "1970-01-01T00:00:02.000000000-08:00", "_airbyte_data": {"id1": 1, "id2": 201, "updated_at": "2000-01-02T00:00:00Z", "_ab_cdc_deleted_at": null, "name": "Bob", "address": {"city": "New York", "state": "NY"}}} -{"_airbyte_extracted_at": "1970-01-01T00:00:02.000000000-08:00", "_airbyte_data": {"id1": 1, "id2": 201, "updated_at": "2000-01-02T00:01:00Z", "_ab_cdc_deleted_at": "1970-01-01T00:00:00Z"}} +{"_airbyte_extracted_at": "1970-01-01T00:00:02.000000000Z", "_airbyte_data": {"id1": 1, "id2": 200, "updated_at": "2000-01-02T00:00:00Z", "_ab_cdc_deleted_at": null, "name": "Alice", "address": {"city": "Seattle", "state": "WA"}}} +{"_airbyte_extracted_at": "1970-01-01T00:00:02.000000000Z", "_airbyte_data": {"id1": 1, "id2": 201, "updated_at": "2000-01-02T00:00:00Z", "_ab_cdc_deleted_at": null, "name": "Bob", "address": {"city": "New York", "state": "NY"}}} +{"_airbyte_extracted_at": "1970-01-01T00:00:02.000000000Z", "_airbyte_data": {"id1": 1, "id2": 201, "updated_at": "2000-01-02T00:01:00Z", "_ab_cdc_deleted_at": "1970-01-01T00:00:00Z"}} diff --git a/airbyte-integrations/connectors/destination-snowflake/src/test-integration/resources/dat/sync2_expectedrecords_incremental_dedup_final.jsonl b/airbyte-integrations/connectors/destination-snowflake/src/test-integration/resources/dat/sync2_expectedrecords_incremental_dedup_final.jsonl index 2f7a58c51499..02e36c558939 100644 --- a/airbyte-integrations/connectors/destination-snowflake/src/test-integration/resources/dat/sync2_expectedrecords_incremental_dedup_final.jsonl +++ b/airbyte-integrations/connectors/destination-snowflake/src/test-integration/resources/dat/sync2_expectedrecords_incremental_dedup_final.jsonl @@ -1,4 +1,4 @@ -{"_AIRBYTE_EXTRACTED_AT": "1970-01-01T00:00:02.000000000-08:00", "_AIRBYTE_META":{"errors":[]}, "ID1": 1, "ID2": 200, "UPDATED_AT": "2000-01-02T00:00:00.000000000Z", "NAME": "Alice", "ADDRESS": {"city": "Seattle", "state": "WA"}} +{"_AIRBYTE_EXTRACTED_AT": "1970-01-01T00:00:02.000000000Z", "_AIRBYTE_META":{"errors":[]}, "ID1": 1, "ID2": 200, "UPDATED_AT": "2000-01-02T00:00:00.000000000Z", "NAME": "Alice", "ADDRESS": {"city": "Seattle", "state": "WA"}} // Delete Bob, keep Charlie -{"_AIRBYTE_EXTRACTED_AT": "1970-01-01T00:00:01.000000000-08:00", "_AIRBYTE_META": {"errors":["Problem with `age`", "Problem with `registration_date`"]}, "ID1": 2, "ID2": 200, "UPDATED_AT": "2000-01-01T00:03:00.000000000Z", "NAME": "Charlie"} -{"_AIRBYTE_EXTRACTED_AT": "1970-01-01T00:00:01.000000000-08:00", "_AIRBYTE_META": {"errors":[]}, "ID1": 3, "ID2": 200, "UPDATED_AT": "2000-01-01T00:04:00.000000000Z", "NAME": "a\bb\fc\nd\re\tf`~!@#$%^&*()_+-=[]\\{}|'\",./<>?"} +{"_AIRBYTE_EXTRACTED_AT": "1970-01-01T00:00:01.000000000Z", "_AIRBYTE_META": {"errors":["Problem with `age`", "Problem with `registration_date`"]}, "ID1": 2, "ID2": 200, "UPDATED_AT": "2000-01-01T00:03:00.000000000Z", "NAME": "Charlie"} +{"_AIRBYTE_EXTRACTED_AT": "1970-01-01T00:00:01.000000000Z", "_AIRBYTE_META": {"errors":[]}, "ID1": 3, "ID2": 200, "UPDATED_AT": "2000-01-01T00:04:00.000000000Z", "NAME": "a\bb\fc\nd\re\tf`~!@#$%^&*()_+-=[]\\{}|'\",./<>?"} diff --git a/airbyte-integrations/connectors/destination-snowflake/src/test-integration/resources/dat/sync2_expectedrecords_incremental_dedup_final2.jsonl b/airbyte-integrations/connectors/destination-snowflake/src/test-integration/resources/dat/sync2_expectedrecords_incremental_dedup_final2.jsonl index b86eb147ba89..1eefb353ce6b 100644 --- a/airbyte-integrations/connectors/destination-snowflake/src/test-integration/resources/dat/sync2_expectedrecords_incremental_dedup_final2.jsonl +++ b/airbyte-integrations/connectors/destination-snowflake/src/test-integration/resources/dat/sync2_expectedrecords_incremental_dedup_final2.jsonl @@ -1 +1 @@ -{"_AIRBYTE_EXTRACTED_AT": "1970-01-01T00:00:02.000000000-08:00", "_AIRBYTE_META":{"errors":[]}, "ID1": 1, "ID2": 200, "UPDATED_AT": "2001-01-02T00:00:00.000000000Z", "NAME": "Someone completely different v2"} +{"_AIRBYTE_EXTRACTED_AT": "1970-01-01T00:00:02.000000000Z", "_AIRBYTE_META":{"errors":[]}, "ID1": 1, "ID2": 200, "UPDATED_AT": "2001-01-02T00:00:00.000000000Z", "NAME": "Someone completely different v2"} diff --git a/airbyte-integrations/connectors/destination-snowflake/src/test-integration/resources/dat/sync2_expectedrecords_incremental_dedup_final_mixed_tzs.jsonl b/airbyte-integrations/connectors/destination-snowflake/src/test-integration/resources/dat/sync2_expectedrecords_incremental_dedup_final_mixed_tzs.jsonl new file mode 100644 index 000000000000..686793ed026b --- /dev/null +++ b/airbyte-integrations/connectors/destination-snowflake/src/test-integration/resources/dat/sync2_expectedrecords_incremental_dedup_final_mixed_tzs.jsonl @@ -0,0 +1,4 @@ +{"_AIRBYTE_EXTRACTED_AT": "1970-01-01T00:00:02.000000000Z", "_AIRBYTE_META":{"errors":[]}, "ID1": 1, "ID2": 200, "UPDATED_AT": "2000-01-02T00:00:00.000000000Z", "NAME": "Alice", "ADDRESS": {"city": "Seattle", "state": "WA"}} +// Delete Bob, keep Charlie. We continue to keep old records in PST +{"_AIRBYTE_EXTRACTED_AT": "1970-01-01T00:00:01.000000000-08:00", "_AIRBYTE_META": {"errors":["Problem with `age`", "Problem with `registration_date`"]}, "ID1": 2, "ID2": 200, "UPDATED_AT": "2000-01-01T00:03:00.000000000Z", "NAME": "Charlie"} +{"_AIRBYTE_EXTRACTED_AT": "1970-01-01T00:00:01.000000000-08:00", "_AIRBYTE_META": {"errors":[]}, "ID1": 3, "ID2": 200, "UPDATED_AT": "2000-01-01T00:04:00.000000000Z", "NAME": "a\bb\fc\nd\re\tf`~!@#$%^&*()_+-=[]\\{}|'\",./<>?"} diff --git a/airbyte-integrations/connectors/destination-snowflake/src/test-integration/resources/dat/sync2_expectedrecords_raw.jsonl b/airbyte-integrations/connectors/destination-snowflake/src/test-integration/resources/dat/sync2_expectedrecords_raw.jsonl index d4bd6c49d4e7..2509cc47735e 100644 --- a/airbyte-integrations/connectors/destination-snowflake/src/test-integration/resources/dat/sync2_expectedrecords_raw.jsonl +++ b/airbyte-integrations/connectors/destination-snowflake/src/test-integration/resources/dat/sync2_expectedrecords_raw.jsonl @@ -1,10 +1,10 @@ // We keep the records from the first sync -{"_airbyte_extracted_at": "1970-01-01T00:00:01.000000000-08:00", "_airbyte_data": {"id1": 1, "id2": 200, "updated_at": "2000-01-01T00:00:00Z", "_ab_cdc_deleted_at": null, "name": "Alice", "address": {"city": "San Francisco", "state": "CA"}}} -{"_airbyte_extracted_at": "1970-01-01T00:00:01.000000000-08:00", "_airbyte_data": {"id1": 1, "id2": 200, "updated_at": "2000-01-01T00:01:00Z", "_ab_cdc_deleted_at": null, "name": "Alice", "address": {"city": "Los Angeles", "state": "CA"}}} -{"_airbyte_extracted_at": "1970-01-01T00:00:01.000000000-08:00", "_airbyte_data": {"id1": 1, "id2": 201, "updated_at": "2000-01-01T00:02:00Z", "name": "Bob", "address": {"city": "Boston", "state": "MA"}}} -{"_airbyte_extracted_at": "1970-01-01T00:00:01.000000000-08:00", "_airbyte_data": {"id1": 2, "id2": 200, "updated_at": "2000-01-01T00:03:00Z", "name": "Charlie", "age": "this is not an integer", "registration_date": "this is not a date"}} -{"_airbyte_extracted_at": "1970-01-01T00:00:01.000000000-08:00", "_airbyte_data": {"id1": 3, "id2": 200, "updated_at": "2000-01-01T00:04:00Z", "name": "a\bb\fc\nd\re\tf`~!@#$%^&*()_+-=[]\\{}|'\",./<>?"}} +{"_airbyte_extracted_at": "1970-01-01T00:00:01.000000000Z", "_airbyte_data": {"id1": 1, "id2": 200, "updated_at": "2000-01-01T00:00:00Z", "_ab_cdc_deleted_at": null, "name": "Alice", "address": {"city": "San Francisco", "state": "CA"}}} +{"_airbyte_extracted_at": "1970-01-01T00:00:01.000000000Z", "_airbyte_data": {"id1": 1, "id2": 200, "updated_at": "2000-01-01T00:01:00Z", "_ab_cdc_deleted_at": null, "name": "Alice", "address": {"city": "Los Angeles", "state": "CA"}}} +{"_airbyte_extracted_at": "1970-01-01T00:00:01.000000000Z", "_airbyte_data": {"id1": 1, "id2": 201, "updated_at": "2000-01-01T00:02:00Z", "name": "Bob", "address": {"city": "Boston", "state": "MA"}}} +{"_airbyte_extracted_at": "1970-01-01T00:00:01.000000000Z", "_airbyte_data": {"id1": 2, "id2": 200, "updated_at": "2000-01-01T00:03:00Z", "name": "Charlie", "age": "this is not an integer", "registration_date": "this is not a date"}} +{"_airbyte_extracted_at": "1970-01-01T00:00:01.000000000Z", "_airbyte_data": {"id1": 3, "id2": 200, "updated_at": "2000-01-01T00:04:00Z", "name": "a\bb\fc\nd\re\tf`~!@#$%^&*()_+-=[]\\{}|'\",./<>?"}} // And append the records from the second sync -{"_airbyte_extracted_at": "1970-01-01T00:00:02.000000000-08:00", "_airbyte_data": {"id1": 1, "id2": 200, "updated_at": "2000-01-02T00:00:00Z", "_ab_cdc_deleted_at": null, "name": "Alice", "address": {"city": "Seattle", "state": "WA"}}} -{"_airbyte_extracted_at": "1970-01-01T00:00:02.000000000-08:00", "_airbyte_data": {"id1": 1, "id2": 201, "updated_at": "2000-01-02T00:00:00Z", "_ab_cdc_deleted_at": null, "name": "Bob", "address": {"city": "New York", "state": "NY"}}} -{"_airbyte_extracted_at": "1970-01-01T00:00:02.000000000-08:00", "_airbyte_data": {"id1": 1, "id2": 201, "updated_at": "2000-01-02T00:01:00Z", "_ab_cdc_deleted_at": "1970-01-01T00:00:00Z"}} +{"_airbyte_extracted_at": "1970-01-01T00:00:02.000000000Z", "_airbyte_data": {"id1": 1, "id2": 200, "updated_at": "2000-01-02T00:00:00Z", "_ab_cdc_deleted_at": null, "name": "Alice", "address": {"city": "Seattle", "state": "WA"}}} +{"_airbyte_extracted_at": "1970-01-01T00:00:02.000000000Z", "_airbyte_data": {"id1": 1, "id2": 201, "updated_at": "2000-01-02T00:00:00Z", "_ab_cdc_deleted_at": null, "name": "Bob", "address": {"city": "New York", "state": "NY"}}} +{"_airbyte_extracted_at": "1970-01-01T00:00:02.000000000Z", "_airbyte_data": {"id1": 1, "id2": 201, "updated_at": "2000-01-02T00:01:00Z", "_ab_cdc_deleted_at": "1970-01-01T00:00:00Z"}} diff --git a/airbyte-integrations/connectors/destination-snowflake/src/test-integration/resources/dat/sync2_expectedrecords_raw2.jsonl b/airbyte-integrations/connectors/destination-snowflake/src/test-integration/resources/dat/sync2_expectedrecords_raw2.jsonl index 4d2e3167888c..0c8fd4eceab0 100644 --- a/airbyte-integrations/connectors/destination-snowflake/src/test-integration/resources/dat/sync2_expectedrecords_raw2.jsonl +++ b/airbyte-integrations/connectors/destination-snowflake/src/test-integration/resources/dat/sync2_expectedrecords_raw2.jsonl @@ -1,2 +1,2 @@ -{"_airbyte_extracted_at": "1970-01-01T00:00:01.000000000-08:00", "_airbyte_data": {"id1": 1, "id2": 200, "updated_at": "2001-01-01T00:00:00Z", "_ab_cdc_deleted_at": null, "name": "Someone completely different"}} -{"_airbyte_extracted_at": "1970-01-01T00:00:02.000000000-08:00", "_airbyte_data": {"id1": 1, "id2": 200, "updated_at": "2001-01-02T00:00:00Z", "_ab_cdc_deleted_at": null, "name": "Someone completely different v2"}} +{"_airbyte_extracted_at": "1970-01-01T00:00:01.000000000Z", "_airbyte_data": {"id1": 1, "id2": 200, "updated_at": "2001-01-01T00:00:00Z", "_ab_cdc_deleted_at": null, "name": "Someone completely different"}} +{"_airbyte_extracted_at": "1970-01-01T00:00:02.000000000Z", "_airbyte_data": {"id1": 1, "id2": 200, "updated_at": "2001-01-02T00:00:00Z", "_ab_cdc_deleted_at": null, "name": "Someone completely different v2"}} diff --git a/airbyte-integrations/connectors/destination-snowflake/src/test-integration/resources/dat/sync2_expectedrecords_raw_mixed_tzs.jsonl b/airbyte-integrations/connectors/destination-snowflake/src/test-integration/resources/dat/sync2_expectedrecords_raw_mixed_tzs.jsonl new file mode 100644 index 000000000000..8bd778660427 --- /dev/null +++ b/airbyte-integrations/connectors/destination-snowflake/src/test-integration/resources/dat/sync2_expectedrecords_raw_mixed_tzs.jsonl @@ -0,0 +1,10 @@ +// We keep the records from the first sync which used to be in PST TZ +{"_airbyte_extracted_at": "1970-01-01T00:00:01.000000000-08:00", "_airbyte_data": {"id1": 1, "id2": 200, "updated_at": "2000-01-01T00:00:00Z", "_ab_cdc_deleted_at": null, "name": "Alice", "address": {"city": "San Francisco", "state": "CA"}}} +{"_airbyte_extracted_at": "1970-01-01T00:00:01.000000000-08:00", "_airbyte_data": {"id1": 1, "id2": 200, "updated_at": "2000-01-01T00:01:00Z", "_ab_cdc_deleted_at": null, "name": "Alice", "address": {"city": "Los Angeles", "state": "CA"}}} +{"_airbyte_extracted_at": "1970-01-01T00:00:01.000000000-08:00", "_airbyte_data": {"id1": 1, "id2": 201, "updated_at": "2000-01-01T00:02:00Z", "name": "Bob", "address": {"city": "Boston", "state": "MA"}}} +{"_airbyte_extracted_at": "1970-01-01T00:00:01.000000000-08:00", "_airbyte_data": {"id1": 2, "id2": 200, "updated_at": "2000-01-01T00:03:00Z", "name": "Charlie", "age": "this is not an integer", "registration_date": "this is not a date"}} +{"_airbyte_extracted_at": "1970-01-01T00:00:01.000000000-08:00", "_airbyte_data": {"id1": 3, "id2": 200, "updated_at": "2000-01-01T00:04:00Z", "name": "a\bb\fc\nd\re\tf`~!@#$%^&*()_+-=[]\\{}|'\",./<>?"}} +// And append the records from the second sync +{"_airbyte_extracted_at": "1970-01-01T00:00:02.000000000Z", "_airbyte_data": {"id1": 1, "id2": 200, "updated_at": "2000-01-02T00:00:00Z", "_ab_cdc_deleted_at": null, "name": "Alice", "address": {"city": "Seattle", "state": "WA"}}} +{"_airbyte_extracted_at": "1970-01-01T00:00:02.000000000Z", "_airbyte_data": {"id1": 1, "id2": 201, "updated_at": "2000-01-02T00:00:00Z", "_ab_cdc_deleted_at": null, "name": "Bob", "address": {"city": "New York", "state": "NY"}}} +{"_airbyte_extracted_at": "1970-01-01T00:00:02.000000000Z", "_airbyte_data": {"id1": 1, "id2": 201, "updated_at": "2000-01-02T00:01:00Z", "_ab_cdc_deleted_at": "1970-01-01T00:00:00Z"}} diff --git a/docs/integrations/destinations/snowflake.md b/docs/integrations/destinations/snowflake.md index 39be90148e99..966224d41e13 100644 --- a/docs/integrations/destinations/snowflake.md +++ b/docs/integrations/destinations/snowflake.md @@ -246,6 +246,7 @@ Otherwise, make sure to grant the role the required permissions in the desired n | Version | Date | Pull Request | Subject | |:----------------|:-----------|:-----------------------------------------------------------|:----------------------------------------------------------------------------------------------------------------------------------------------------------------| +| 3.6.0 | 2024-02-22 | [35308](https://github.com/airbytehq/airbyte/pull/35308) | Upgrade CDK; use utc tz for extracted_at; Migrate existing extracted_at to utc; | | 3.5.14 | 2024-02-22 | [35456](https://github.com/airbytehq/airbyte/pull/35456) | Adopt CDK 0.23.0; Gather initial state upfront, reduce information_schema calls | | 3.5.13 | 2024-02-22 | [35569](https://github.com/airbytehq/airbyte/pull/35569) | Fix logging bug. | | 3.5.12 | 2024-02-15 | [35240](https://github.com/airbytehq/airbyte/pull/35240) | Adopt CDK 0.20.9 | From aca8679a0e74a64e5b66cd205225bcc5d32487db Mon Sep 17 00:00:00 2001 From: Ofer Mendelevitch Date: Wed, 6 Mar 2024 11:04:00 -0800 Subject: [PATCH 109/172] =?UTF-8?q?=F0=9F=90=9B=20Destination=20Vectara:?= =?UTF-8?q?=20Improve=20title=20extraction=20logic=20(#35206)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .../connectors/destination-vectara/Dockerfile | 2 +- .../destination-vectara/destination_vectara/writer.py | 4 +++- .../connectors/destination-vectara/metadata.yaml | 2 +- docs/integrations/destinations/vectara.md | 1 + 4 files changed, 6 insertions(+), 3 deletions(-) diff --git a/airbyte-integrations/connectors/destination-vectara/Dockerfile b/airbyte-integrations/connectors/destination-vectara/Dockerfile index 9afa4fa81a36..09e12723300f 100644 --- a/airbyte-integrations/connectors/destination-vectara/Dockerfile +++ b/airbyte-integrations/connectors/destination-vectara/Dockerfile @@ -34,5 +34,5 @@ COPY destination_vectara ./destination_vectara ENV AIRBYTE_ENTRYPOINT "python /airbyte/integration_code/main.py" ENTRYPOINT ["python", "/airbyte/integration_code/main.py"] -LABEL io.airbyte.version=0.2.0 +LABEL io.airbyte.version=0.2.1 LABEL io.airbyte.name=airbyte/destination-vectara diff --git a/airbyte-integrations/connectors/destination-vectara/destination_vectara/writer.py b/airbyte-integrations/connectors/destination-vectara/destination_vectara/writer.py index 0794b0dc9410..401d279294f0 100644 --- a/airbyte-integrations/connectors/destination-vectara/destination_vectara/writer.py +++ b/airbyte-integrations/connectors/destination-vectara/destination_vectara/writer.py @@ -105,7 +105,9 @@ def _get_document_metadata(self, record: AirbyteRecordMessage) -> Dict[str, Any] def _get_document_title(self, record: AirbyteRecordMessage) -> str: title = "Untitled" if self.title_field: - title = dpath.util.get(record.data, self.title_field) + found_title = dpath.util.values(record.data, self.title_field, separator=".") + if found_title: + title = found_title[0] return title def _get_stream_id(self, record: AirbyteRecordMessage) -> str: diff --git a/airbyte-integrations/connectors/destination-vectara/metadata.yaml b/airbyte-integrations/connectors/destination-vectara/metadata.yaml index eed0bec69693..63db8fb2f9db 100644 --- a/airbyte-integrations/connectors/destination-vectara/metadata.yaml +++ b/airbyte-integrations/connectors/destination-vectara/metadata.yaml @@ -11,7 +11,7 @@ data: connectorSubtype: database connectorType: destination definitionId: 102900e7-a236-4c94-83e4-a4189b99adc2 - dockerImageTag: 0.2.0 + dockerImageTag: 0.2.1 dockerRepository: airbyte/destination-vectara githubIssueLabel: destination-vectara icon: vectara.svg diff --git a/docs/integrations/destinations/vectara.md b/docs/integrations/destinations/vectara.md index 1834004b8f24..30bce2b4d076 100644 --- a/docs/integrations/destinations/vectara.md +++ b/docs/integrations/destinations/vectara.md @@ -63,5 +63,6 @@ In addition, in the connector UI you define two set of fields for this connector | Version | Date | Pull Request | Subject | | :------ | :--------- | :------------------------------------------------------- | :---------------------------------------------------------------- | +| 0.2.1 | 2024-03-05 | [35206](https://github.com/airbytehq/airbyte/pull/35206) | Fix: improved title parsing | | 0.2.0 | 2024-01-29 | [34579](https://github.com/airbytehq/airbyte/pull/34579) | Add document title file configuration | | 0.1.0 | 2023-11-10 | [31958](https://github.com/airbytehq/airbyte/pull/31958) | 🎉 New Destination: Vectara (Vector Database) | From 8ee36d690c84c84ab27e6d485aa0006a32145a12 Mon Sep 17 00:00:00 2001 From: Stephane Geneix <147216312+stephane-airbyte@users.noreply.github.com> Date: Wed, 6 Mar 2024 12:20:25 -0800 Subject: [PATCH 110/172] add background threads to MssqlTestDatabase (#35727) --- .../source/mssql/MsSQLTestDatabase.java | 13 +- ...sSqlTestDatabaseWithBackgroundThreads.java | 304 ++++++++++++++++++ 2 files changed, 315 insertions(+), 2 deletions(-) create mode 100644 airbyte-integrations/connectors/source-mssql/src/testFixtures/java/io/airbyte/integrations/source/mssql/MsSqlTestDatabaseWithBackgroundThreads.java diff --git a/airbyte-integrations/connectors/source-mssql/src/testFixtures/java/io/airbyte/integrations/source/mssql/MsSQLTestDatabase.java b/airbyte-integrations/connectors/source-mssql/src/testFixtures/java/io/airbyte/integrations/source/mssql/MsSQLTestDatabase.java index 698992f1ffaa..2d3cf26fe9a4 100644 --- a/airbyte-integrations/connectors/source-mssql/src/testFixtures/java/io/airbyte/integrations/source/mssql/MsSQLTestDatabase.java +++ b/airbyte-integrations/connectors/source-mssql/src/testFixtures/java/io/airbyte/integrations/source/mssql/MsSQLTestDatabase.java @@ -33,7 +33,11 @@ public class MsSQLTestDatabase extends TestDatabase, MsS static private final Logger LOGGER = LoggerFactory.getLogger(MsSQLTestDatabase.class); - // empirically, 240 is enough. If you fee like you need to increase it, you're probably missing a + // Turning this to true will create a bunch of background threads that will regularly check the + // state of the database and log every time it changes. A bit verbose, but useful for debugging + private static final boolean ENABLE_BACKGROUND_THREADS = false; + + // empirically, 240 is enough. If you fee like you need to increase it, you're probably mmissing a // check somewhere static public final int MAX_RETRIES = 240; @@ -73,7 +77,12 @@ public Consumer> modifier() { static public MsSQLTestDatabase in(final BaseImage imageName, final ContainerModifier... modifiers) { final var container = new MsSQLContainerFactory().shared(imageName.reference, modifiers); - final MsSQLTestDatabase testdb = new MsSQLTestDatabase(container); + final MsSQLTestDatabase testdb; + if (ENABLE_BACKGROUND_THREADS) { + testdb = new MsSqlTestDatabaseWithBackgroundThreads(container); + } else { + testdb = new MsSQLTestDatabase(container); + } return testdb .withConnectionProperty("encrypt", "false") .withConnectionProperty("trustServerCertificate", "true") diff --git a/airbyte-integrations/connectors/source-mssql/src/testFixtures/java/io/airbyte/integrations/source/mssql/MsSqlTestDatabaseWithBackgroundThreads.java b/airbyte-integrations/connectors/source-mssql/src/testFixtures/java/io/airbyte/integrations/source/mssql/MsSqlTestDatabaseWithBackgroundThreads.java new file mode 100644 index 000000000000..504520c5e024 --- /dev/null +++ b/airbyte-integrations/connectors/source-mssql/src/testFixtures/java/io/airbyte/integrations/source/mssql/MsSqlTestDatabaseWithBackgroundThreads.java @@ -0,0 +1,304 @@ +/* + * Copyright (c) 2023 Airbyte, Inc., all rights reserved. + */ + +package io.airbyte.integrations.source.mssql; + +import io.airbyte.commons.logging.LoggingHelper.Color; +import io.airbyte.commons.logging.MdcScope; +import java.nio.charset.StandardCharsets; +import java.util.ArrayList; +import java.util.Base64; +import java.util.LinkedList; +import java.util.List; +import java.util.Map; +import java.util.Objects; +import java.util.concurrent.ConcurrentHashMap; +import org.apache.commons.lang3.StringUtils; +import org.apache.commons.lang3.exception.ExceptionUtils; +import org.jooq.Record; +import org.jooq.exception.DataAccessException; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import org.testcontainers.containers.MSSQLServerContainer; + +public class MsSqlTestDatabaseWithBackgroundThreads extends MsSQLTestDatabase { + + private abstract class AbstractMssqlTestDatabaseBackgroundThread extends Thread { + + protected Logger LOGGER = LoggerFactory.getLogger(this.getClass()); + protected final boolean PRINT_EVERY_CALL = false; + + AbstractMssqlTestDatabaseBackgroundThread() { + this.start(); + } + + protected volatile boolean stop = false; + + protected String formatLogLine(String logLine) { + String retVal = this.getClass().getSimpleName() + " databaseId=" + databaseId + ", containerId=" + containerId + " - " + logLine; + return retVal; + } + + @SuppressWarnings("try") + public void run() { + try (MdcScope mdcScope = new MdcScope.Builder().setPrefixColor(Color.PURPLE_BACKGROUND).setLogPrefix(this.getClass().getSimpleName()) + .build()) { + while (!stop) { + try { + Thread.sleep(100); + innerRun(); + } catch (final Throwable t) { + LOGGER.info(formatLogLine( + "got exception of type " + t.getClass() + ":" + StringUtils.replace(t.getMessage() + "\n" + formatStackTrace(t), "\n", "\\n"))); + } + } + } + } + + private String formatStackTrace(Throwable t) { + boolean belowCurrentCall = false; + List stackToDisplay = new LinkedList(); + for (String stackString : ExceptionUtils.getStackFrames(t)) { + if (stackString.startsWith("\tat ")) { + if (!belowCurrentCall && stackString.contains(AbstractMssqlTestDatabaseBackgroundThread.class.getSimpleName())) { + belowCurrentCall = true; + } + } else { + belowCurrentCall = false; + } + if (!belowCurrentCall) { + stackToDisplay.add(stackString); + } + } + return StringUtils.join(stackToDisplay, "\n "); + } + + public abstract void innerRun() throws Exception; + + } + + private class MssqlTestDatabaseBackgroundThreadAgentState extends AbstractMssqlTestDatabaseBackgroundThread { + + private String previousValue = null; + + @Override + public void innerRun() throws Exception { + String agentStateSql = "EXEC master.dbo.xp_servicecontrol 'QueryState', N'SQLServerAGENT';"; + final var r = query(ctx -> ctx.fetch(agentStateSql).get(0)); + String agentState = r.getValue(0).toString(); + if (PRINT_EVERY_CALL || !Objects.equals(agentState, previousValue)) { + LOGGER.info(formatLogLine("agentState changed from {} to {}"), previousValue, agentState); + previousValue = agentState; + } + + } + + } + + private class MssqlTestDatabaseBackgroundThreadFnCdcGetMaxLsn extends AbstractMssqlTestDatabaseBackgroundThread { + + private String previousValue = null; + + @Override + public void innerRun() throws Exception { + String max_lsn; + try { + Object retVal = query(ctx -> ctx.fetch(MAX_LSN_QUERY)).get(0).getValue(0); + if (retVal instanceof byte[] bytes) { + max_lsn = new String(Base64.getEncoder().encode(bytes), StandardCharsets.UTF_8); + } else { + max_lsn = String.valueOf(retVal); + } + } catch (DataAccessException e) { + if (e.getMessage().contains("Invalid object name 'cdc.lsn_time_mapping'")) { + max_lsn = "DataAccessException " + e.getMessage(); + } else { + throw e; + } + } + if (PRINT_EVERY_CALL || !Objects.equals(max_lsn, previousValue)) { + LOGGER.info(formatLogLine("sys.fn_cdc_get_max_lsn changed from {} to {}"), previousValue, max_lsn); + previousValue = max_lsn; + } + } + + } + + private class MssqlTestDatabaseBackgroundThreadLsnTimeMapping extends AbstractMssqlTestDatabaseBackgroundThread { + + private String previousValue = null; + private static final String LSN_TIME_MAPPING_QUERY = "SELECT start_lsn, tran_begin_time, tran_end_time, tran_id FROM cdc.lsn_time_mapping;"; + + @Override + public void innerRun() throws Exception { + String results; + try { + results = query(ctx -> ctx.fetch(LSN_TIME_MAPPING_QUERY)).toString(); + } catch (DataAccessException e) { + if (e.getMessage().contains("Invalid object name 'cdc.lsn_time_mapping'")) { + results = "DataAccessException " + e.getMessage(); + } else { + throw e; + } + } + if (PRINT_EVERY_CALL || !Objects.equals(results, previousValue)) { + LOGGER.info(formatLogLine("sys.lsn_time_mapping changed from {} to {}"), previousValue, results); + previousValue = results; + } + } + + } + + private class MssqlTestDatabaseBackgroundThreadQueryJobsTable extends AbstractMssqlTestDatabaseBackgroundThread { + + private String previousValue = null; + private int previousRowCount = -1; + private static final String JOBS_TABLE_QUERY = "SELECT * FROM msdb.dbo.cdc_jobs"; + + @Override + public void innerRun() throws Exception { + int resultSize = 0; + String resultsAsString; + try { + List results = query(ctx -> ctx.fetch(JOBS_TABLE_QUERY)); + resultsAsString = results.toString(); + resultSize = results.size(); + } catch (DataAccessException e) { + if (e.getMessage().contains("Invalid object name 'msdb.dbo.cdc_jobs'")) { + resultsAsString = "DataAccessException " + e.getMessage(); + } else { + throw e; + } + } + if (PRINT_EVERY_CALL || !Objects.equals(resultsAsString, previousValue)) { + LOGGER.info(formatLogLine("cdc.change_tables changed from {} rows\n{} to {} rows\n{}"), previousRowCount, previousValue, resultSize, + resultsAsString); + previousValue = resultsAsString; + previousRowCount = resultSize; + } + } + + } + + private class MssqlTestDatabaseBackgroundThreadQueryChangeTables extends AbstractMssqlTestDatabaseBackgroundThread { + + private String previousValue = null; + private int previousRowCount = -1; + private static final String CHANGE_TABLES_QUERY = """ + SELECT OBJECT_SCHEMA_NAME(source_object_id, DB_ID('%s')), + OBJECT_NAME(source_object_id, DB_ID('%s')), + capture_instance, + object_id, + start_lsn FROM cdc.change_tables"""; + + @Override + public void innerRun() throws Exception { + int resultSize = 0; + String resultsAsString; + try { + List results = query(ctx -> ctx.fetch(CHANGE_TABLES_QUERY.formatted(getDatabaseName(), getDatabaseName()))); + resultsAsString = results.toString(); + resultSize = results.size(); + } catch (DataAccessException e) { + if (e.getMessage().contains("Invalid object name 'cdc.change_tables'")) { + resultsAsString = "DataAccessException " + e.getMessage(); + } else { + throw e; + } + } + if (PRINT_EVERY_CALL || !Objects.equals(resultsAsString, previousValue)) { + LOGGER.info(formatLogLine("cdc.change_tables changed from {} rows\n{} to {} rows\n{}"), previousRowCount, previousValue, resultSize, + resultsAsString); + previousValue = resultsAsString; + previousRowCount = resultSize; + } + } + + } + + private class MssqlTestDatabaseBackgroundThreadQueryCdcTable extends AbstractMssqlTestDatabaseBackgroundThread { + + private final String schemaName; + private final String tableName; + private final String instanceName; + private String previousValue = null; + private int previousRowCount = -1; + + MssqlTestDatabaseBackgroundThreadQueryCdcTable(String schemaName, String tableName, String instanceName) { + this.schemaName = schemaName; + this.tableName = tableName; + this.instanceName = instanceName; + } + + private static final String CDC_TABLE_SELECT_QUERY_STRING = "SELECT * FROM cdc.%s_ct"; + + @Override + public void innerRun() throws Exception { + int resultSize = 0; + String resultsAsString; + try { + List results = query(ctx -> ctx.fetch(CDC_TABLE_SELECT_QUERY_STRING.formatted(instanceName))); + resultsAsString = results.toString(); + resultSize = results.size(); + } catch (DataAccessException e) { + if (e.getMessage().contains("Invalid object name 'cdc.%s_ct'".formatted(instanceName))) { + resultsAsString = "DataAccessException " + e.getMessage(); + } else { + throw e; + } + } + if (PRINT_EVERY_CALL || !Objects.equals(resultsAsString, previousValue)) { + LOGGER.info(formatLogLine("cdc table {} for {}.{} changed from {} rows\n{} to {} rows\n{}"), instanceName, schemaName, tableName, + previousRowCount, previousValue, resultSize, + resultsAsString); + previousValue = resultsAsString; + previousRowCount = resultSize; + } + } + + } + + private final List bgThreads = new ArrayList<>(); + + MsSqlTestDatabaseWithBackgroundThreads(MSSQLServerContainer container) { + super(container); + + } + + public MsSQLTestDatabase initialized() { + super.initialized(); + bgThreads.add(new MssqlTestDatabaseBackgroundThreadAgentState()); + bgThreads.add(new MssqlTestDatabaseBackgroundThreadFnCdcGetMaxLsn()); + bgThreads.add(new MssqlTestDatabaseBackgroundThreadLsnTimeMapping()); + bgThreads.add(new MssqlTestDatabaseBackgroundThreadQueryChangeTables()); + bgThreads.add(new MssqlTestDatabaseBackgroundThreadQueryJobsTable()); + return self(); + } + + public void close() { + for (var bgThread : bgThreads) { + bgThread.stop = true; + } + super.close(); + } + + private final Map bgThreadByInstance = new ConcurrentHashMap<>(); + + @Override + public MsSQLTestDatabase withCdcForTable(String schemaName, String tableName, String roleName, String instanceName) { + super.withCdcForTable(schemaName, tableName, roleName, instanceName); + MssqlTestDatabaseBackgroundThreadQueryCdcTable bgThread = new MssqlTestDatabaseBackgroundThreadQueryCdcTable(schemaName, tableName, instanceName); + bgThreadByInstance.put(instanceName, bgThread); + bgThreads.add(bgThread); + return this; + } + + @Override + public MsSQLTestDatabase withCdcDisabledForTable(String schemaName, String tableName, String instanceName) { + bgThreadByInstance.get(instanceName).stop = true; + super.withCdcDisabledForTable(schemaName, tableName, instanceName); + return this; + } + +} From d5e91ae83ab836abe07b252ec301a2fcf110430e Mon Sep 17 00:00:00 2001 From: Xiaohan Song Date: Wed, 6 Mar 2024 14:29:42 -0800 Subject: [PATCH 111/172] [source-mongodb] record count in state & initial iterator refactor (#35669) Signed-off-by: Artem Inzhyyants Signed-off-by: Gireesh Sreepathi Co-authored-by: Augustin Co-authored-by: Subodh Kant Chaturvedi Co-authored-by: Anatolii Yatsuk <35109939+tolik0@users.noreply.github.com> Co-authored-by: Baz Co-authored-by: Artem Inzhyyants <36314070+artem1205@users.noreply.github.com> Co-authored-by: Aaron ("AJ") Steers Co-authored-by: Tim Roes Co-authored-by: benmoriceau Co-authored-by: Gireesh Sreepathi Co-authored-by: Marius Posta Co-authored-by: Evan Tahler Co-authored-by: Edward Gao Co-authored-by: Anton Karpets Co-authored-by: Patrick Nilan Co-authored-by: Akash Kulkarni <113392464+akashkulk@users.noreply.github.com> Co-authored-by: Tyler B <104733644+tybernstein@users.noreply.github.com> Co-authored-by: bgroff Co-authored-by: mjgatz <86885812+mjgatz@users.noreply.github.com> Co-authored-by: mgreene Co-authored-by: Serhii Lazebnyi Co-authored-by: Serhii Lazebnyi <53845333+lazebnyi@users.noreply.github.com> Co-authored-by: Rodi Reich Zilberman <867491+rodireich@users.noreply.github.com> Co-authored-by: Daryna Ishchenko <80129833+darynaishchenko@users.noreply.github.com> Co-authored-by: Stephane Geneix <147216312+stephane-airbyte@users.noreply.github.com> Co-authored-by: Joe Reuter Co-authored-by: Marcos Marx Co-authored-by: Maxime Carbonneau-Leclerc <3360483+maxi297@users.noreply.github.com> Co-authored-by: Akash Kulkarni Co-authored-by: Roman Yermilov [GL] <86300758+roman-yermilov-gl@users.noreply.github.com> Co-authored-by: Alexandre Girard Co-authored-by: girarda Co-authored-by: Brian Lai <51336873+brianjlai@users.noreply.github.com> Co-authored-by: brianjlai Co-authored-by: Catherine Noll Co-authored-by: midavadim Co-authored-by: Julien COUTAND Co-authored-by: Christo Grabowski <108154848+ChristoGrab@users.noreply.github.com> Co-authored-by: maxi297 Co-authored-by: Bindi Pankhudi Co-authored-by: Bindi Pankhudi Co-authored-by: Ben Drucker Co-authored-by: TornadoContre <37258495+TornadoContre@users.noreply.github.com> Co-authored-by: Natik Gadzhi Co-authored-by: Thomas Dippel Co-authored-by: marcosmarxm Co-authored-by: Alex Birdsall Co-authored-by: ambirdsall Co-authored-by: Jose Gerardo Pineda Co-authored-by: alafanechere Co-authored-by: Pedro S. Lopez Co-authored-by: Ella Rohm-Ensing Co-authored-by: Siarhei Ivanou Co-authored-by: Anatolii Yatsuk Co-authored-by: Ryan Waskewich <156025126+rwask@users.noreply.github.com> Co-authored-by: Sajarin Co-authored-by: artem1205 Co-authored-by: perangel Co-authored-by: Joe Bell Co-authored-by: Obioma Anomnachi Co-authored-by: maxi297 Co-authored-by: SatishChGit Co-authored-by: Brian Leonard Co-authored-by: David Wallace Co-authored-by: pmossman Co-authored-by: Stephane Geneix Co-authored-by: Alexandre Cuoci Co-authored-by: Danny Tiesling --- .../connectors/source-mongodb-v2/build.gradle | 2 +- .../source-mongodb-v2/metadata.yaml | 2 +- .../mongodb/InitialSnapshotHandler.java | 11 +- .../source/mongodb/MongoDbSource.java | 3 +- .../source/mongodb/MongoDbStateIterator.java | 218 ------------------ .../mongodb/cdc/MongoDbCdcInitializer.java | 4 +- .../mongodb/state/MongoDbStateManager.java | 112 ++++++++- .../mongodb/MongoDbSourceAcceptanceTest.java | 3 +- .../mongodb/InitialSnapshotHandlerTest.java | 82 +++++-- ...Test.java => MongoDbStateManagerTest.java} | 71 +++--- .../cdc/MongoDbCdcInitializerTest.java | 29 ++- .../cdc/MongoDbCdcStateHandlerTest.java | 10 +- .../state/MongoDbStateManagerTest.java | 26 ++- docs/integrations/sources/mongodb-v2.md | 3 +- 14 files changed, 259 insertions(+), 317 deletions(-) delete mode 100644 airbyte-integrations/connectors/source-mongodb-v2/src/main/java/io/airbyte/integrations/source/mongodb/MongoDbStateIterator.java rename airbyte-integrations/connectors/source-mongodb-v2/src/test/java/io/airbyte/integrations/source/mongodb/{MongoDbStateIteratorTest.java => MongoDbStateManagerTest.java} (83%) diff --git a/airbyte-integrations/connectors/source-mongodb-v2/build.gradle b/airbyte-integrations/connectors/source-mongodb-v2/build.gradle index b160c88b04b3..a04fc19cbecd 100644 --- a/airbyte-integrations/connectors/source-mongodb-v2/build.gradle +++ b/airbyte-integrations/connectors/source-mongodb-v2/build.gradle @@ -4,7 +4,7 @@ plugins { } airbyteJavaConnector { - cdkVersionRequired = '0.20.6' + cdkVersionRequired = '0.23.8' features = ['db-sources', 'datastore-mongo'] useLocalCdk = false } diff --git a/airbyte-integrations/connectors/source-mongodb-v2/metadata.yaml b/airbyte-integrations/connectors/source-mongodb-v2/metadata.yaml index 70c2882ab0a0..18a4f31c2152 100644 --- a/airbyte-integrations/connectors/source-mongodb-v2/metadata.yaml +++ b/airbyte-integrations/connectors/source-mongodb-v2/metadata.yaml @@ -5,7 +5,7 @@ data: connectorSubtype: database connectorType: source definitionId: b2e713cd-cc36-4c0a-b5bd-b47cb8a0561e - dockerImageTag: 1.2.15 + dockerImageTag: 1.2.16 dockerRepository: airbyte/source-mongodb-v2 documentationUrl: https://docs.airbyte.com/integrations/sources/mongodb-v2 githubIssueLabel: source-mongodb-v2 diff --git a/airbyte-integrations/connectors/source-mongodb-v2/src/main/java/io/airbyte/integrations/source/mongodb/InitialSnapshotHandler.java b/airbyte-integrations/connectors/source-mongodb-v2/src/main/java/io/airbyte/integrations/source/mongodb/InitialSnapshotHandler.java index f9863945d8fd..08321ded6853 100644 --- a/airbyte-integrations/connectors/source-mongodb-v2/src/main/java/io/airbyte/integrations/source/mongodb/InitialSnapshotHandler.java +++ b/airbyte-integrations/connectors/source-mongodb-v2/src/main/java/io/airbyte/integrations/source/mongodb/InitialSnapshotHandler.java @@ -11,10 +11,11 @@ import com.mongodb.client.model.Filters; import com.mongodb.client.model.Projections; import com.mongodb.client.model.Sorts; +import io.airbyte.cdk.integrations.source.relationaldb.state.SourceStateIterator; +import io.airbyte.cdk.integrations.source.relationaldb.state.StateEmitFrequency; import io.airbyte.commons.exceptions.ConfigErrorException; import io.airbyte.commons.util.AutoCloseableIterator; import io.airbyte.commons.util.AutoCloseableIterators; -import io.airbyte.integrations.source.mongodb.cdc.MongoDbCdcConnectorMetadataInjector; import io.airbyte.integrations.source.mongodb.state.IdType; import io.airbyte.integrations.source.mongodb.state.MongoDbStateManager; import io.airbyte.integrations.source.mongodb.state.MongoDbStreamState; @@ -22,7 +23,6 @@ import io.airbyte.protocol.models.v0.CatalogHelpers; import io.airbyte.protocol.models.v0.ConfiguredAirbyteStream; import io.airbyte.protocol.models.v0.SyncMode; -import java.time.Instant; import java.util.ArrayList; import java.util.List; import java.util.Optional; @@ -53,8 +53,6 @@ public List> getIterators( final List streams, final MongoDbStateManager stateManager, final MongoDatabase database, - final MongoDbCdcConnectorMetadataInjector cdcConnectorMetadataInjector, - final Instant emittedAt, final int checkpointInterval, final boolean isEnforceSchema) { return streams @@ -113,10 +111,9 @@ public List> getIterators( .sort(Sorts.ascending(MongoConstants.ID_FIELD)) .allowDiskUse(true) .cursor(); - final var stateIterator = - new MongoDbStateIterator(cursor, stateManager, Optional.ofNullable(cdcConnectorMetadataInjector), - airbyteStream, emittedAt, checkpointInterval, MongoConstants.CHECKPOINT_DURATION, isEnforceSchema); + new SourceStateIterator<>(cursor, airbyteStream, stateManager, new StateEmitFrequency(checkpointInterval, + MongoConstants.CHECKPOINT_DURATION)); return AutoCloseableIterators.fromIterator(stateIterator, cursor::close, null); }) .toList(); diff --git a/airbyte-integrations/connectors/source-mongodb-v2/src/main/java/io/airbyte/integrations/source/mongodb/MongoDbSource.java b/airbyte-integrations/connectors/source-mongodb-v2/src/main/java/io/airbyte/integrations/source/mongodb/MongoDbSource.java index d7103f6fe854..39974e73cec7 100644 --- a/airbyte-integrations/connectors/source-mongodb-v2/src/main/java/io/airbyte/integrations/source/mongodb/MongoDbSource.java +++ b/airbyte-integrations/connectors/source-mongodb-v2/src/main/java/io/airbyte/integrations/source/mongodb/MongoDbSource.java @@ -122,8 +122,9 @@ public AutoCloseableIterator read(final JsonNode config, final JsonNode state) { final var emittedAt = Instant.now(); final var cdcMetadataInjector = MongoDbCdcConnectorMetadataInjector.getInstance(emittedAt); - final var stateManager = MongoDbStateManager.createStateManager(state); final MongoDbSourceConfig sourceConfig = new MongoDbSourceConfig(config); + final var stateManager = MongoDbStateManager.createStateManager(state, sourceConfig); + if (catalog != null) { MongoUtil.checkSchemaModeMismatch(sourceConfig.getEnforceSchema(), stateManager.getCdcState() != null ? stateManager.getCdcState().schema_enforced() : sourceConfig.getEnforceSchema(), catalog); diff --git a/airbyte-integrations/connectors/source-mongodb-v2/src/main/java/io/airbyte/integrations/source/mongodb/MongoDbStateIterator.java b/airbyte-integrations/connectors/source-mongodb-v2/src/main/java/io/airbyte/integrations/source/mongodb/MongoDbStateIterator.java deleted file mode 100644 index f659c2c06af6..000000000000 --- a/airbyte-integrations/connectors/source-mongodb-v2/src/main/java/io/airbyte/integrations/source/mongodb/MongoDbStateIterator.java +++ /dev/null @@ -1,218 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.integrations.source.mongodb; - -import com.fasterxml.jackson.databind.JsonNode; -import com.fasterxml.jackson.databind.node.ObjectNode; -import com.mongodb.MongoException; -import com.mongodb.client.MongoCursor; -import io.airbyte.cdk.integrations.debezium.CdcMetadataInjector; -import io.airbyte.commons.exceptions.ConfigErrorException; -import io.airbyte.integrations.source.mongodb.cdc.MongoDbCdcEventUtils; -import io.airbyte.integrations.source.mongodb.state.IdType; -import io.airbyte.integrations.source.mongodb.state.InitialSnapshotStatus; -import io.airbyte.integrations.source.mongodb.state.MongoDbStateManager; -import io.airbyte.integrations.source.mongodb.state.MongoDbStreamState; -import io.airbyte.protocol.models.v0.AirbyteMessage; -import io.airbyte.protocol.models.v0.AirbyteMessage.Type; -import io.airbyte.protocol.models.v0.AirbyteRecordMessage; -import io.airbyte.protocol.models.v0.CatalogHelpers; -import io.airbyte.protocol.models.v0.ConfiguredAirbyteStream; -import java.time.Duration; -import java.time.Instant; -import java.util.Iterator; -import java.util.Objects; -import java.util.Optional; -import java.util.Set; -import java.util.stream.Collectors; -import org.bson.Document; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -/** - * A state-emitting iterator that emits a state message every checkpointInterval messages when - * iterating over a MongoCursor. - *

- * Will also output a state message as the last message after the wrapper iterator has completed. - */ -public class MongoDbStateIterator implements Iterator { - - private static final Logger LOGGER = LoggerFactory.getLogger(MongoDbStateIterator.class); - - private final MongoCursor iter; - private final Optional> cdcMetadataInjector; - private final MongoDbStateManager stateManager; - private final ConfiguredAirbyteStream stream; - private final Set fields; - private final Instant emittedAt; - private Instant lastCheckpoint = Instant.now(); - private final Integer checkpointInterval; - private final Duration checkpointDuration; - private final boolean isEnforceSchema; - - /** - * Counts the number of records seen in this batch, resets when a state-message has been generated. - */ - private int count = 0; - - /** - * Pointer to the last document _id seen by this iterator, necessary to track for state messages. - */ - private Object lastId; - - /** - * This iterator outputs a final state when the wrapped `iter` has concluded. When this is true, the - * final message will be returned. - */ - private boolean finalStateNext = false; - - /** - * Tracks if the underlying iterator threw an exception, indicating that the snapshot for this - * stream failed. This helps to determine the final state status emitted from the final next call. - */ - private boolean initialSnapshotFailed = false; - - /** - * Tracks the exception thrown if there initial snapshot has failed. - */ - private Exception initialSnapshotException; - - /** - * Constructor. - * - * @param iter {@link MongoCursor} that iterates over Mongo documents - * @param stateManager {@link MongoDbStateManager} that manages global and per-stream state - * @param cdcMetadataInjector The {@link CdcMetadataInjector} used to add metadata to a published - * record. - * @param stream the stream that this iterator represents - * @param emittedAt when this iterator was started - * @param checkpointInterval how often a state message should be emitted based on number of - * messages. - * @param checkpointDuration how often a state message should be emitted based on time. - */ - public MongoDbStateIterator(final MongoCursor iter, - final MongoDbStateManager stateManager, - final Optional> cdcMetadataInjector, - final ConfiguredAirbyteStream stream, - final Instant emittedAt, - final int checkpointInterval, - final Duration checkpointDuration, - final boolean isEnforceSchema) { - this.iter = iter; - this.stateManager = stateManager; - this.stream = stream; - this.checkpointInterval = checkpointInterval; - this.checkpointDuration = checkpointDuration; - this.emittedAt = emittedAt; - this.fields = CatalogHelpers.getTopLevelFieldNames(stream).stream().collect(Collectors.toSet()); - this.lastId = - stateManager.getStreamState(stream.getStream().getName(), stream.getStream().getNamespace()).map(MongoDbStreamState::id).orElse(null); - this.cdcMetadataInjector = cdcMetadataInjector; - this.isEnforceSchema = isEnforceSchema; - } - - @Override - public boolean hasNext() { - LOGGER.debug("Checking hasNext() for stream {}...", getStream()); - if (initialSnapshotFailed) { - // If the initial snapshot is incomplete for this stream, throw an exception failing the sync. This - // will ensure the platform retry logic - // kicks in and keeps retrying the sync until the initial snapshot is complete. - throw new RuntimeException(initialSnapshotException); - } - try { - if (iter.hasNext()) { - return true; - } - } catch (final MongoException e) { - // If hasNext throws an exception, log it and set the flag to indicate that the initial snapshot - // failed. This indicates to the main iterator - // to emit state associated with what has been processed so far. - initialSnapshotFailed = true; - initialSnapshotException = e; - LOGGER.info("hasNext threw an exception for stream {}: {}", getStream(), e.getMessage(), e); - return true; - } - - // no more records in cursor + no record messages have been emitted => collection is empty - if (lastId == null) { - return false; - } - - // no more records in cursor + record messages have been emitted => we should emit a final state - // message. - if (!finalStateNext) { - finalStateNext = true; - LOGGER.debug("Final state is now true for stream {}...", getStream()); - return true; - } - - return false; - } - - @Override - public AirbyteMessage next() { - LOGGER.debug("Getting next message from stream {}...", getStream()); - // Should a state message be emitted based on the number of messages we've seen? - final var emitStateDueToMessageCount = count > 0 && count % checkpointInterval == 0; - // Should a state message be emitted based on then last time a state message was emitted? - final var emitStateDueToDuration = count > 0 && Duration.between(lastCheckpoint, Instant.now()).compareTo(checkpointDuration) > 0; - - if (finalStateNext || initialSnapshotFailed) { - LOGGER.debug("Emitting final state status for stream {}:{}...", stream.getStream().getNamespace(), stream.getStream().getName()); - final var finalStateStatus = initialSnapshotFailed ? InitialSnapshotStatus.IN_PROGRESS : InitialSnapshotStatus.COMPLETE; - final var idType = IdType.findByJavaType(lastId.getClass().getSimpleName()) - .orElseThrow(() -> new ConfigErrorException("Unsupported _id type " + lastId.getClass().getSimpleName())); - final var state = new MongoDbStreamState(lastId.toString(), finalStateStatus, idType); - - stateManager.updateStreamState(stream.getStream().getName(), stream.getStream().getNamespace(), state); - - return new AirbyteMessage() - .withType(Type.STATE) - .withState(stateManager.toState()); - } else if (emitStateDueToMessageCount || emitStateDueToDuration) { - count = 0; - lastCheckpoint = Instant.now(); - - if (lastId != null) { - final var idType = IdType.findByJavaType(lastId.getClass().getSimpleName()) - .orElseThrow(() -> new ConfigErrorException("Unsupported _id type " + lastId.getClass().getSimpleName())); - final var state = new MongoDbStreamState(lastId.toString(), InitialSnapshotStatus.IN_PROGRESS, idType); - stateManager.updateStreamState(stream.getStream().getName(), stream.getStream().getNamespace(), state); - } - - return new AirbyteMessage() - .withType(Type.STATE) - .withState(stateManager.toState()); - } - - count++; - final var document = iter.next(); - final var jsonNode = isEnforceSchema ? MongoDbCdcEventUtils.toJsonNode(document, fields) : MongoDbCdcEventUtils.toJsonNodeNoSchema(document); - - lastId = document.get(MongoConstants.ID_FIELD); - - return new AirbyteMessage() - .withType(Type.RECORD) - .withRecord(new AirbyteRecordMessage() - .withStream(stream.getStream().getName()) - .withNamespace(stream.getStream().getNamespace()) - .withEmittedAt(emittedAt.toEpochMilli()) - .withData(injectMetadata(jsonNode))); - } - - private JsonNode injectMetadata(final JsonNode jsonNode) { - if (Objects.nonNull(cdcMetadataInjector) && cdcMetadataInjector.isPresent() && jsonNode instanceof ObjectNode) { - cdcMetadataInjector.get().addMetaDataToRowsFetchedOutsideDebezium((ObjectNode) jsonNode, emittedAt.toString(), null); - } - - return jsonNode; - } - - private String getStream() { - return String.format("%s:%s", stream.getStream().getNamespace(), stream.getStream().getName()); - } - -} diff --git a/airbyte-integrations/connectors/source-mongodb-v2/src/main/java/io/airbyte/integrations/source/mongodb/cdc/MongoDbCdcInitializer.java b/airbyte-integrations/connectors/source-mongodb-v2/src/main/java/io/airbyte/integrations/source/mongodb/cdc/MongoDbCdcInitializer.java index 3230092cc0c2..9760b0d4cacc 100644 --- a/airbyte-integrations/connectors/source-mongodb-v2/src/main/java/io/airbyte/integrations/source/mongodb/cdc/MongoDbCdcInitializer.java +++ b/airbyte-integrations/connectors/source-mongodb-v2/src/main/java/io/airbyte/integrations/source/mongodb/cdc/MongoDbCdcInitializer.java @@ -139,8 +139,8 @@ public List> createCdcIterators( MongoDbCdcInitialSnapshotUtils.getStreamsForInitialSnapshot(mongoClient, stateManager, catalog, savedOffsetIsValid); final InitialSnapshotHandler initialSnapshotHandler = new InitialSnapshotHandler(); final List> initialSnapshotIterators = - initialSnapshotHandler.getIterators(initialSnapshotStreams, stateManager, mongoClient.getDatabase(databaseName), cdcMetadataInjector, - emittedAt, config.getCheckpointInterval(), isEnforceSchema); + initialSnapshotHandler.getIterators(initialSnapshotStreams, stateManager, mongoClient.getDatabase(databaseName), + config.getCheckpointInterval(), isEnforceSchema); final AirbyteDebeziumHandler handler = new AirbyteDebeziumHandler<>(config.getDatabaseConfig(), new MongoDbCdcTargetPosition(initialResumeToken), false, firstRecordWaitTime, subsequentRecordWaitTime, queueSize, false); diff --git a/airbyte-integrations/connectors/source-mongodb-v2/src/main/java/io/airbyte/integrations/source/mongodb/state/MongoDbStateManager.java b/airbyte-integrations/connectors/source-mongodb-v2/src/main/java/io/airbyte/integrations/source/mongodb/state/MongoDbStateManager.java index 3c5c473eaeeb..47f7cd87b657 100644 --- a/airbyte-integrations/connectors/source-mongodb-v2/src/main/java/io/airbyte/integrations/source/mongodb/state/MongoDbStateManager.java +++ b/airbyte-integrations/connectors/source-mongodb-v2/src/main/java/io/airbyte/integrations/source/mongodb/state/MongoDbStateManager.java @@ -6,25 +6,41 @@ import com.fasterxml.jackson.core.type.TypeReference; import com.fasterxml.jackson.databind.JsonNode; +import com.fasterxml.jackson.databind.node.ObjectNode; +import io.airbyte.cdk.integrations.debezium.CdcMetadataInjector; +import io.airbyte.cdk.integrations.source.relationaldb.state.SourceStateMessageProducer; +import io.airbyte.commons.exceptions.ConfigErrorException; import io.airbyte.commons.json.Jsons; +import io.airbyte.integrations.source.mongodb.MongoConstants; +import io.airbyte.integrations.source.mongodb.MongoDbSourceConfig; +import io.airbyte.integrations.source.mongodb.cdc.MongoDbCdcConnectorMetadataInjector; +import io.airbyte.integrations.source.mongodb.cdc.MongoDbCdcEventUtils; import io.airbyte.integrations.source.mongodb.cdc.MongoDbCdcState; import io.airbyte.protocol.models.v0.AirbyteGlobalState; +import io.airbyte.protocol.models.v0.AirbyteMessage; +import io.airbyte.protocol.models.v0.AirbyteMessage.Type; +import io.airbyte.protocol.models.v0.AirbyteRecordMessage; import io.airbyte.protocol.models.v0.AirbyteStateMessage; import io.airbyte.protocol.models.v0.AirbyteStreamNameNamespacePair; import io.airbyte.protocol.models.v0.AirbyteStreamState; +import io.airbyte.protocol.models.v0.CatalogHelpers; +import io.airbyte.protocol.models.v0.ConfiguredAirbyteStream; import io.airbyte.protocol.models.v0.StreamDescriptor; +import java.time.Instant; import java.util.HashMap; import java.util.List; import java.util.Map; +import java.util.Objects; import java.util.Optional; import java.util.stream.Collectors; +import org.bson.Document; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * A state manager for MongoDB CDC syncs. */ -public class MongoDbStateManager { +public class MongoDbStateManager implements SourceStateMessageProducer { private static final Logger LOGGER = LoggerFactory.getLogger(MongoDbStateManager.class); @@ -33,6 +49,12 @@ public class MongoDbStateManager { */ private MongoDbCdcState cdcState; + private Instant emittedAt; + private Optional> cdcMetadataInjector; + private boolean isEnforceSchema; + + private Map streamPairToLastIdMap; + /** * Map of streams (name/namespace tuple) to the current stream state information stored in the * state. @@ -45,8 +67,12 @@ public class MongoDbStateManager { * @param initialState The initial state to be stored in the state manager. * @return A new {@link MongoDbStateManager} */ - public static MongoDbStateManager createStateManager(final JsonNode initialState) { + public static MongoDbStateManager createStateManager(final JsonNode initialState, final MongoDbSourceConfig config) { final MongoDbStateManager stateManager = new MongoDbStateManager(); + stateManager.streamPairToLastIdMap = new HashMap<>(); + stateManager.isEnforceSchema = config.getEnforceSchema(); + stateManager.emittedAt = Instant.now(); + stateManager.cdcMetadataInjector = Optional.of(MongoDbCdcConnectorMetadataInjector.getInstance(stateManager.emittedAt)); if (initialState == null) { return stateManager; @@ -81,7 +107,7 @@ private static List deserializeState(final JsonNode initial /** * Creates a new {@link MongoDbStateManager} instance. This constructor should not be called - * directly. Instead, use {@link #createStateManager(JsonNode)}. + * directly. Instead, use {@link #createStateManager(JsonNode, MongoDbSourceConfig)}. */ private MongoDbStateManager() {} @@ -206,4 +232,84 @@ private boolean isValidStreamDescriptor(final StreamDescriptor streamDescriptor) } } + /** + * Generates an intermediate state message for checkpointing purpose. + */ + @Override + public AirbyteStateMessage generateStateMessageAtCheckpoint(final ConfiguredAirbyteStream stream) { + final AirbyteStreamNameNamespacePair pair = new AirbyteStreamNameNamespacePair(stream.getStream().getName(), stream.getStream().getNamespace()); + + // Assuming we will always process at least 1 record message before sending out the state message. + // shouldEmitStateMessage should guard this. + var lastId = streamPairToLastIdMap.get(pair); + if (lastId != null) { + final var idType = IdType.findByJavaType(lastId.getClass().getSimpleName()) + .orElseThrow(() -> new ConfigErrorException("Unsupported _id type " + lastId.getClass().getSimpleName())); + final var state = new MongoDbStreamState(lastId.toString(), InitialSnapshotStatus.IN_PROGRESS, idType); + updateStreamState(stream.getStream().getName(), stream.getStream().getNamespace(), state); + } + return toState(); + } + + /** + * Process the record message and save last Id to the map. + */ + @Override + public AirbyteMessage processRecordMessage(final ConfiguredAirbyteStream stream, final Document document) { + final var fields = CatalogHelpers.getTopLevelFieldNames(stream).stream().collect(Collectors.toSet()); + + final var jsonNode = isEnforceSchema ? MongoDbCdcEventUtils.toJsonNode(document, fields) : MongoDbCdcEventUtils.toJsonNodeNoSchema(document); + + var lastId = document.get(MongoConstants.ID_FIELD); + final AirbyteStreamNameNamespacePair pair = new AirbyteStreamNameNamespacePair(stream.getStream().getName(), stream.getStream().getNamespace()); + streamPairToLastIdMap.put(pair, lastId); + + return new AirbyteMessage() + .withType(Type.RECORD) + .withRecord(new AirbyteRecordMessage() + .withStream(stream.getStream().getName()) + .withNamespace(stream.getStream().getNamespace()) + .withEmittedAt(emittedAt.toEpochMilli()) + .withData(injectMetadata(jsonNode))); + } + + private JsonNode injectMetadata(final JsonNode jsonNode) { + if (Objects.nonNull(cdcMetadataInjector) && cdcMetadataInjector.isPresent() && jsonNode instanceof ObjectNode) { + cdcMetadataInjector.get().addMetaDataToRowsFetchedOutsideDebezium((ObjectNode) jsonNode, emittedAt.toString(), null); + } + + return jsonNode; + } + + /** + * @return final state message. + */ + @Override + public AirbyteStateMessage createFinalStateMessage(final ConfiguredAirbyteStream stream) { + final AirbyteStreamNameNamespacePair pair = new AirbyteStreamNameNamespacePair(stream.getStream().getName(), stream.getStream().getNamespace()); + if (!streamPairToLastIdMap.containsKey(pair)) { + var initialLastId = getStreamState(stream.getStream().getName(), stream.getStream().getNamespace()).map(MongoDbStreamState::id).orElse(null); + streamPairToLastIdMap.put(pair, initialLastId); + } + var lastId = streamPairToLastIdMap.get(pair); + if (lastId != null) { + LOGGER.debug("Emitting final state status for stream {}:{}...", stream.getStream().getNamespace(), stream.getStream().getName()); + final var finalStateStatus = InitialSnapshotStatus.COMPLETE; + final var idType = IdType.findByJavaType(lastId.getClass().getSimpleName()) + .orElseThrow(() -> new ConfigErrorException("Unsupported _id type " + lastId.getClass().getSimpleName())); + final var state = new MongoDbStreamState(lastId.toString(), finalStateStatus, idType); + + updateStreamState(stream.getStream().getName(), stream.getStream().getNamespace(), state); + } + return toState(); + } + + /** + * Make sure we have processed at least 1 record from the stream. + */ + @Override + public boolean shouldEmitStateMessage(final ConfiguredAirbyteStream stream) { + return streamPairToLastIdMap.get(new AirbyteStreamNameNamespacePair(stream.getStream().getName(), stream.getStream().getNamespace())) != null; + } + } diff --git a/airbyte-integrations/connectors/source-mongodb-v2/src/test-integration/java/io/airbyte/integrations/source/mongodb/MongoDbSourceAcceptanceTest.java b/airbyte-integrations/connectors/source-mongodb-v2/src/test-integration/java/io/airbyte/integrations/source/mongodb/MongoDbSourceAcceptanceTest.java index 5b7703b85c81..c66d8c97c6c6 100644 --- a/airbyte-integrations/connectors/source-mongodb-v2/src/test-integration/java/io/airbyte/integrations/source/mongodb/MongoDbSourceAcceptanceTest.java +++ b/airbyte-integrations/connectors/source-mongodb-v2/src/test-integration/java/io/airbyte/integrations/source/mongodb/MongoDbSourceAcceptanceTest.java @@ -239,7 +239,8 @@ void testSyncEmptyCollection() throws Exception { final List stateMessages = filterStateMessages(messages); assertEquals(0, recordMessages.size()); - assertEquals(1, stateMessages.size()); + // Expect 1 state message from initial load and 1 from incremental load. + assertEquals(2, stateMessages.size()); final AirbyteStateMessage lastStateMessage = Iterables.getLast(stateMessages); assertNotNull(lastStateMessage.getGlobal().getSharedState()); diff --git a/airbyte-integrations/connectors/source-mongodb-v2/src/test/java/io/airbyte/integrations/source/mongodb/InitialSnapshotHandlerTest.java b/airbyte-integrations/connectors/source-mongodb-v2/src/test/java/io/airbyte/integrations/source/mongodb/InitialSnapshotHandlerTest.java index 9ece697ec8fa..3acc708372a0 100644 --- a/airbyte-integrations/connectors/source-mongodb-v2/src/test/java/io/airbyte/integrations/source/mongodb/InitialSnapshotHandlerTest.java +++ b/airbyte-integrations/connectors/source-mongodb-v2/src/test/java/io/airbyte/integrations/source/mongodb/InitialSnapshotHandlerTest.java @@ -4,11 +4,16 @@ package io.airbyte.integrations.source.mongodb; +import static io.airbyte.cdk.integrations.debezium.internals.DebeziumEventConverter.CDC_DELETED_AT; +import static io.airbyte.cdk.integrations.debezium.internals.DebeziumEventConverter.CDC_UPDATED_AT; +import static io.airbyte.integrations.source.mongodb.MongoConstants.DATABASE_CONFIG_CONFIGURATION_KEY; +import static io.airbyte.integrations.source.mongodb.cdc.MongoDbCdcConnectorMetadataInjector.CDC_DEFAULT_CURSOR; import static org.junit.jupiter.api.Assertions.assertEquals; import static org.junit.jupiter.api.Assertions.assertFalse; import static org.junit.jupiter.api.Assertions.assertThrows; import static org.junit.jupiter.api.Assertions.assertTrue; import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.spy; import static org.mockito.Mockito.when; import com.fasterxml.jackson.databind.JsonNode; @@ -17,7 +22,9 @@ import com.mongodb.client.MongoClients; import com.mongodb.client.MongoCollection; import io.airbyte.commons.exceptions.ConfigErrorException; +import io.airbyte.commons.json.Jsons; import io.airbyte.commons.util.AutoCloseableIterator; +import io.airbyte.integrations.source.mongodb.cdc.MongoDbDebeziumConstants; import io.airbyte.integrations.source.mongodb.state.IdType; import io.airbyte.integrations.source.mongodb.state.MongoDbStateManager; import io.airbyte.integrations.source.mongodb.state.MongoDbStreamState; @@ -28,7 +35,6 @@ import io.airbyte.protocol.models.v0.CatalogHelpers; import io.airbyte.protocol.models.v0.ConfiguredAirbyteStream; import io.airbyte.protocol.models.v0.SyncMode; -import java.time.Instant; import java.util.List; import java.util.Map; import java.util.Optional; @@ -72,6 +78,14 @@ class InitialSnapshotHandlerTest { private static final String NAME5 = "name5"; private static final String NAME6 = "name6"; + private static final String DATABASE = "test-database"; + + final MongoDbSourceConfig CONFIG = new MongoDbSourceConfig(Jsons.jsonNode( + Map.of(DATABASE_CONFIG_CONFIGURATION_KEY, + Map.of( + MongoDbDebeziumConstants.Configuration.CONNECTION_STRING_CONFIGURATION_KEY, "mongodb://host:12345/", + MongoDbDebeziumConstants.Configuration.DATABASE_CONFIGURATION_KEY, DATABASE)))); + private static final List STREAMS = List.of( CatalogHelpers.createConfiguredAirbyteStream( COLLECTION1, @@ -143,9 +157,10 @@ void testGetIteratorsEmptyInitialState() { NAME_FIELD, NAME6)))); final InitialSnapshotHandler initialSnapshotHandler = new InitialSnapshotHandler(); - final MongoDbStateManager stateManager = mock(MongoDbStateManager.class); + final MongoDbStateManager ogStateManager = MongoDbStateManager.createStateManager(null, CONFIG); + final MongoDbStateManager stateManager = spy(ogStateManager); final List> iterators = - initialSnapshotHandler.getIterators(STREAMS, stateManager, mongoClient.getDatabase(DB_NAME), null, Instant.now(), + initialSnapshotHandler.getIterators(STREAMS, stateManager, mongoClient.getDatabase(DB_NAME), MongoConstants.CHECKPOINT_INTERVAL, true); assertEquals(iterators.size(), 2, "Only two streams are configured as incremental, full refresh streams should be ignored"); @@ -159,21 +174,24 @@ void testGetIteratorsEmptyInitialState() { assertEquals(COLLECTION1, collection1StreamMessage1.getRecord().getStream()); assertEquals(OBJECT_ID1.toString(), collection1StreamMessage1.getRecord().getData().get(CURSOR_FIELD).asText()); assertEquals(NAME1, collection1StreamMessage1.getRecord().getData().get(NAME_FIELD).asText()); - assertConfiguredFieldsEqualsRecordDataFields(Set.of(CURSOR_FIELD, NAME_FIELD), collection1StreamMessage1.getRecord().getData()); + assertConfiguredFieldsEqualsRecordDataFields(Set.of(CURSOR_FIELD, NAME_FIELD, CDC_UPDATED_AT, CDC_DELETED_AT, CDC_DEFAULT_CURSOR), + collection1StreamMessage1.getRecord().getData()); final AirbyteMessage collection1StreamMessage2 = collection1.next(); assertEquals(Type.RECORD, collection1StreamMessage2.getType()); assertEquals(COLLECTION1, collection1StreamMessage2.getRecord().getStream()); assertEquals(OBJECT_ID2.toString(), collection1StreamMessage2.getRecord().getData().get(CURSOR_FIELD).asText()); assertEquals(NAME2, collection1StreamMessage2.getRecord().getData().get(NAME_FIELD).asText()); - assertConfiguredFieldsEqualsRecordDataFields(Set.of(CURSOR_FIELD, NAME_FIELD), collection1StreamMessage2.getRecord().getData()); + assertConfiguredFieldsEqualsRecordDataFields(Set.of(CURSOR_FIELD, NAME_FIELD, CDC_UPDATED_AT, CDC_DELETED_AT, CDC_DEFAULT_CURSOR), + collection1StreamMessage2.getRecord().getData()); final AirbyteMessage collection1StreamMessage3 = collection1.next(); assertEquals(Type.RECORD, collection1StreamMessage3.getType()); assertEquals(COLLECTION1, collection1StreamMessage3.getRecord().getStream()); assertEquals(OBJECT_ID3.toString(), collection1StreamMessage3.getRecord().getData().get(CURSOR_FIELD).asText()); assertEquals(NAME3, collection1StreamMessage3.getRecord().getData().get(NAME_FIELD).asText()); - assertConfiguredFieldsEqualsRecordDataFields(Set.of(CURSOR_FIELD, NAME_FIELD), collection1StreamMessage3.getRecord().getData()); + assertConfiguredFieldsEqualsRecordDataFields(Set.of(CURSOR_FIELD, NAME_FIELD, CDC_UPDATED_AT, CDC_DELETED_AT, CDC_DEFAULT_CURSOR), + collection1StreamMessage3.getRecord().getData()); final AirbyteMessage collection1SateMessage = collection1.next(); assertEquals(Type.STATE, collection1SateMessage.getType(), "State message is expected after all records in a stream are emitted"); @@ -185,13 +203,15 @@ void testGetIteratorsEmptyInitialState() { assertEquals(Type.RECORD, collection2StreamMessage1.getType()); assertEquals(COLLECTION2, collection2StreamMessage1.getRecord().getStream()); assertEquals(OBJECT_ID4.toString(), collection2StreamMessage1.getRecord().getData().get(CURSOR_FIELD).asText()); - assertConfiguredFieldsEqualsRecordDataFields(Set.of(CURSOR_FIELD), collection2StreamMessage1.getRecord().getData()); + assertConfiguredFieldsEqualsRecordDataFields(Set.of(CURSOR_FIELD, CDC_UPDATED_AT, CDC_DELETED_AT, CDC_DEFAULT_CURSOR), + collection2StreamMessage1.getRecord().getData()); final AirbyteMessage collection2StreamMessage2 = collection2.next(); assertEquals(Type.RECORD, collection2StreamMessage2.getType()); assertEquals(COLLECTION2, collection2StreamMessage2.getRecord().getStream()); assertEquals(OBJECT_ID5.toString(), collection2StreamMessage2.getRecord().getData().get(CURSOR_FIELD).asText()); - assertConfiguredFieldsEqualsRecordDataFields(Set.of(CURSOR_FIELD), collection2StreamMessage1.getRecord().getData()); + assertConfiguredFieldsEqualsRecordDataFields(Set.of(CURSOR_FIELD, CDC_UPDATED_AT, CDC_DELETED_AT, CDC_DEFAULT_CURSOR), + collection2StreamMessage1.getRecord().getData()); final AirbyteMessage collection2SateMessage = collection2.next(); assertEquals(Type.STATE, collection2SateMessage.getType(), "State message is expected after all records in a stream are emitted"); @@ -215,11 +235,12 @@ void testGetIteratorsNonEmptyInitialState() { NAME_FIELD, NAME3)))); final InitialSnapshotHandler initialSnapshotHandler = new InitialSnapshotHandler(); - final MongoDbStateManager stateManager = mock(MongoDbStateManager.class); + final MongoDbStateManager ogStateManager = MongoDbStateManager.createStateManager(null, CONFIG); + final MongoDbStateManager stateManager = spy(ogStateManager); when(stateManager.getStreamState(COLLECTION1, NAMESPACE)) .thenReturn(Optional.of(new MongoDbStreamState(OBJECT_ID1_STRING, null, IdType.OBJECT_ID))); final List> iterators = - initialSnapshotHandler.getIterators(STREAMS, stateManager, mongoClient.getDatabase(DB_NAME), null, Instant.now(), + initialSnapshotHandler.getIterators(STREAMS, stateManager, mongoClient.getDatabase(DB_NAME), MongoConstants.CHECKPOINT_INTERVAL, true); assertEquals(iterators.size(), 2, "Only two streams are configured as incremental, full refresh streams should be ignored"); @@ -233,7 +254,8 @@ void testGetIteratorsNonEmptyInitialState() { assertEquals(COLLECTION1, collection1StreamMessage1.getRecord().getStream()); assertEquals(OBJECT_ID2.toString(), collection1StreamMessage1.getRecord().getData().get(CURSOR_FIELD).asText()); assertEquals(NAME2, collection1StreamMessage1.getRecord().getData().get(NAME_FIELD).asText()); - assertConfiguredFieldsEqualsRecordDataFields(Set.of(CURSOR_FIELD, NAME_FIELD), collection1StreamMessage1.getRecord().getData()); + assertConfiguredFieldsEqualsRecordDataFields(Set.of(CURSOR_FIELD, NAME_FIELD, CDC_UPDATED_AT, CDC_DELETED_AT, CDC_DEFAULT_CURSOR), + collection1StreamMessage1.getRecord().getData()); final AirbyteMessage collection1SateMessage = collection1.next(); assertEquals(Type.STATE, collection1SateMessage.getType(), "State message is expected after all records in a stream are emitted"); @@ -245,7 +267,8 @@ void testGetIteratorsNonEmptyInitialState() { assertEquals(Type.RECORD, collection2StreamMessage1.getType()); assertEquals(COLLECTION2, collection2StreamMessage1.getRecord().getStream()); assertEquals(OBJECT_ID3.toString(), collection2StreamMessage1.getRecord().getData().get(CURSOR_FIELD).asText()); - assertConfiguredFieldsEqualsRecordDataFields(Set.of(CURSOR_FIELD), collection2StreamMessage1.getRecord().getData()); + assertConfiguredFieldsEqualsRecordDataFields(Set.of(CURSOR_FIELD, CDC_UPDATED_AT, CDC_DELETED_AT, CDC_DEFAULT_CURSOR), + collection2StreamMessage1.getRecord().getData()); final AirbyteMessage collection2SateMessage = collection2.next(); assertEquals(Type.STATE, collection2SateMessage.getType(), "State message is expected after all records in a stream are emitted"); @@ -267,7 +290,7 @@ void testGetIteratorsThrowsExceptionWhenThereAreDifferentIdTypes() { final MongoDbStateManager stateManager = mock(MongoDbStateManager.class); final var thrown = assertThrows(ConfigErrorException.class, - () -> initialSnapshotHandler.getIterators(STREAMS, stateManager, mongoClient.getDatabase(DB_NAME), null, Instant.now(), + () -> initialSnapshotHandler.getIterators(STREAMS, stateManager, mongoClient.getDatabase(DB_NAME), MongoConstants.CHECKPOINT_INTERVAL, true)); assertTrue(thrown.getMessage().contains("must be consistently typed")); } @@ -280,10 +303,10 @@ void testGetIteratorsThrowsExceptionWhenThereAreUnsupportedIdTypes() { NAME_FIELD, NAME1)))); final InitialSnapshotHandler initialSnapshotHandler = new InitialSnapshotHandler(); - final MongoDbStateManager stateManager = mock(MongoDbStateManager.class); + final MongoDbStateManager stateManager = spy(MongoDbStateManager.class); final var thrown = assertThrows(ConfigErrorException.class, - () -> initialSnapshotHandler.getIterators(STREAMS, stateManager, mongoClient.getDatabase(DB_NAME), null, Instant.now(), + () -> initialSnapshotHandler.getIterators(STREAMS, stateManager, mongoClient.getDatabase(DB_NAME), MongoConstants.CHECKPOINT_INTERVAL, true)); assertTrue(thrown.getMessage().contains("_id fields with the following types are currently supported")); } @@ -307,9 +330,10 @@ void testGetIteratorsWithOneEmptyCollection() { NAME_FIELD, NAME1)))); final InitialSnapshotHandler initialSnapshotHandler = new InitialSnapshotHandler(); - final MongoDbStateManager stateManager = mock(MongoDbStateManager.class); + final MongoDbStateManager ogStateManager = MongoDbStateManager.createStateManager(null, CONFIG); + final MongoDbStateManager stateManager = spy(ogStateManager); final List> iterators = - initialSnapshotHandler.getIterators(STREAMS, stateManager, mongoClient.getDatabase(DB_NAME), null, Instant.now(), + initialSnapshotHandler.getIterators(STREAMS, stateManager, mongoClient.getDatabase(DB_NAME), MongoConstants.CHECKPOINT_INTERVAL, true); assertEquals(iterators.size(), 2, "Only two streams are configured as incremental, full refresh streams should be ignored"); @@ -323,14 +347,18 @@ void testGetIteratorsWithOneEmptyCollection() { assertEquals(COLLECTION1, collection1StreamMessage1.getRecord().getStream()); assertEquals(OBJECT_ID1.toString(), collection1StreamMessage1.getRecord().getData().get(CURSOR_FIELD).asText()); assertEquals(NAME1, collection1StreamMessage1.getRecord().getData().get(NAME_FIELD).asText()); - assertConfiguredFieldsEqualsRecordDataFields(Set.of(CURSOR_FIELD, NAME_FIELD), collection1StreamMessage1.getRecord().getData()); + assertConfiguredFieldsEqualsRecordDataFields(Set.of(CURSOR_FIELD, NAME_FIELD, CDC_UPDATED_AT, CDC_DELETED_AT, CDC_DEFAULT_CURSOR), + collection1StreamMessage1.getRecord().getData()); final AirbyteMessage collection1SateMessage = collection1.next(); assertEquals(Type.STATE, collection1SateMessage.getType(), "State message is expected after all records in a stream are emitted"); assertFalse(collection1.hasNext()); - // collection2 + // collection2 will generate a final state. + + final AirbyteMessage collection2StateMessage = collection2.next(); + assertEquals(Type.STATE, collection2StateMessage.getType(), "State message is expected after all records in a stream are emitted"); assertFalse(collection2.hasNext()); } @@ -350,11 +378,12 @@ void testGetIteratorsWithInitialStateNonDefaultIdType() { NAME_FIELD, NAME3)))); final InitialSnapshotHandler initialSnapshotHandler = new InitialSnapshotHandler(); - final MongoDbStateManager stateManager = mock(MongoDbStateManager.class); + final MongoDbStateManager ogStateManager = MongoDbStateManager.createStateManager(null, CONFIG); + final MongoDbStateManager stateManager = spy(ogStateManager); when(stateManager.getStreamState(COLLECTION1, NAMESPACE)) .thenReturn(Optional.of(new MongoDbStreamState(OBJECT_ID1_STRING, null, IdType.STRING))); final List> iterators = - initialSnapshotHandler.getIterators(STREAMS, stateManager, mongoClient.getDatabase(DB_NAME), null, Instant.now(), + initialSnapshotHandler.getIterators(STREAMS, stateManager, mongoClient.getDatabase(DB_NAME), MongoConstants.CHECKPOINT_INTERVAL, true); assertEquals(iterators.size(), 2, "Only two streams are configured as incremental, full refresh streams should be ignored"); @@ -364,11 +393,16 @@ void testGetIteratorsWithInitialStateNonDefaultIdType() { // collection1, first document should be skipped final AirbyteMessage collection1StreamMessage1 = collection1.next(); + System.out.println("message 1: " + collection1StreamMessage1); + final AirbyteMessage collection2StreamMessage1 = collection2.next(); + System.out.println("message 2: " + collection2StreamMessage1); + assertEquals(Type.RECORD, collection1StreamMessage1.getType()); assertEquals(COLLECTION1, collection1StreamMessage1.getRecord().getStream()); assertEquals(OBJECT_ID2.toString(), collection1StreamMessage1.getRecord().getData().get(CURSOR_FIELD).asText()); assertEquals(NAME2, collection1StreamMessage1.getRecord().getData().get(NAME_FIELD).asText()); - assertConfiguredFieldsEqualsRecordDataFields(Set.of(CURSOR_FIELD, NAME_FIELD), collection1StreamMessage1.getRecord().getData()); + assertConfiguredFieldsEqualsRecordDataFields(Set.of(CURSOR_FIELD, NAME_FIELD, CDC_UPDATED_AT, CDC_DELETED_AT, CDC_DEFAULT_CURSOR), + collection1StreamMessage1.getRecord().getData()); final AirbyteMessage collection1SateMessage = collection1.next(); assertEquals(Type.STATE, collection1SateMessage.getType(), "State message is expected after all records in a stream are emitted"); @@ -376,11 +410,11 @@ void testGetIteratorsWithInitialStateNonDefaultIdType() { assertFalse(collection1.hasNext()); // collection2, no documents should be skipped - final AirbyteMessage collection2StreamMessage1 = collection2.next(); assertEquals(Type.RECORD, collection2StreamMessage1.getType()); assertEquals(COLLECTION2, collection2StreamMessage1.getRecord().getStream()); assertEquals(OBJECT_ID3.toString(), collection2StreamMessage1.getRecord().getData().get(CURSOR_FIELD).asText()); - assertConfiguredFieldsEqualsRecordDataFields(Set.of(CURSOR_FIELD), collection2StreamMessage1.getRecord().getData()); + assertConfiguredFieldsEqualsRecordDataFields(Set.of(CURSOR_FIELD, CDC_UPDATED_AT, CDC_DELETED_AT, CDC_DEFAULT_CURSOR), + collection2StreamMessage1.getRecord().getData()); final AirbyteMessage collection2SateMessage = collection2.next(); assertEquals(Type.STATE, collection2SateMessage.getType(), "State message is expected after all records in a stream are emitted"); diff --git a/airbyte-integrations/connectors/source-mongodb-v2/src/test/java/io/airbyte/integrations/source/mongodb/MongoDbStateIteratorTest.java b/airbyte-integrations/connectors/source-mongodb-v2/src/test/java/io/airbyte/integrations/source/mongodb/MongoDbStateManagerTest.java similarity index 83% rename from airbyte-integrations/connectors/source-mongodb-v2/src/test/java/io/airbyte/integrations/source/mongodb/MongoDbStateIteratorTest.java rename to airbyte-integrations/connectors/source-mongodb-v2/src/test/java/io/airbyte/integrations/source/mongodb/MongoDbStateManagerTest.java index 66bf277dddbc..d830fd53acab 100644 --- a/airbyte-integrations/connectors/source-mongodb-v2/src/test/java/io/airbyte/integrations/source/mongodb/MongoDbStateIteratorTest.java +++ b/airbyte-integrations/connectors/source-mongodb-v2/src/test/java/io/airbyte/integrations/source/mongodb/MongoDbStateManagerTest.java @@ -4,15 +4,18 @@ package io.airbyte.integrations.source.mongodb; +import static io.airbyte.integrations.source.mongodb.MongoConstants.DATABASE_CONFIG_CONFIGURATION_KEY; import static java.time.temporal.ChronoUnit.SECONDS; import static org.junit.jupiter.api.Assertions.*; -import static org.mockito.Mockito.mock; import static org.mockito.Mockito.when; import com.mongodb.MongoException; import com.mongodb.client.MongoCursor; +import io.airbyte.cdk.integrations.source.relationaldb.state.SourceStateIterator; +import io.airbyte.cdk.integrations.source.relationaldb.state.StateEmitFrequency; import io.airbyte.commons.exceptions.ConfigErrorException; -import io.airbyte.integrations.source.mongodb.cdc.MongoDbCdcConnectorMetadataInjector; +import io.airbyte.commons.json.Jsons; +import io.airbyte.integrations.source.mongodb.cdc.MongoDbDebeziumConstants; import io.airbyte.integrations.source.mongodb.state.IdType; import io.airbyte.integrations.source.mongodb.state.InitialSnapshotStatus; import io.airbyte.integrations.source.mongodb.state.MongoDbStateManager; @@ -27,9 +30,8 @@ import io.airbyte.protocol.models.v0.DestinationSyncMode; import io.airbyte.protocol.models.v0.SyncMode; import java.time.Duration; -import java.time.Instant; import java.util.List; -import java.util.Optional; +import java.util.Map; import org.bson.Document; import org.bson.types.ObjectId; import org.junit.jupiter.api.AfterEach; @@ -41,20 +43,25 @@ import org.mockito.invocation.InvocationOnMock; import org.mockito.stubbing.Answer; -class MongoDbStateIteratorTest { +class MongoDbStateManagerTest { private static final int CHECKPOINT_INTERVAL = 2; @Mock private MongoCursor mongoCursor; private AutoCloseable closeable; private MongoDbStateManager stateManager; - private MongoDbCdcConnectorMetadataInjector cdcConnectorMetadataInjector; + private static final String DATABASE = "test-database"; + + final MongoDbSourceConfig CONFIG = new MongoDbSourceConfig(Jsons.jsonNode( + Map.of(DATABASE_CONFIG_CONFIGURATION_KEY, + Map.of( + MongoDbDebeziumConstants.Configuration.CONNECTION_STRING_CONFIGURATION_KEY, "mongodb://host:12345/", + MongoDbDebeziumConstants.Configuration.DATABASE_CONFIGURATION_KEY, DATABASE)))); @BeforeEach public void setup() { closeable = MockitoAnnotations.openMocks(this); - stateManager = MongoDbStateManager.createStateManager(null); - cdcConnectorMetadataInjector = mock(MongoDbCdcConnectorMetadataInjector.class); + stateManager = MongoDbStateManager.createStateManager(null, CONFIG); } @AfterEach @@ -94,8 +101,8 @@ public Document answer(final InvocationOnMock invocation) { final var stream = catalog().getStreams().stream().findFirst().orElseThrow(); - final var iter = new MongoDbStateIterator(mongoCursor, stateManager, Optional.of(cdcConnectorMetadataInjector), stream, Instant.now(), - CHECKPOINT_INTERVAL, MongoConstants.CHECKPOINT_DURATION, true); + final var iter = new SourceStateIterator(mongoCursor, stream, stateManager, new StateEmitFrequency(CHECKPOINT_INTERVAL, + MongoConstants.CHECKPOINT_DURATION)); // with a batch size of 2, the MongoDbStateIterator should return the following after each // `hasNext`/`next` call: @@ -161,8 +168,8 @@ void treatHasNextExceptionAsFalse() { final var stream = catalog().getStreams().stream().findFirst().orElseThrow(); - final var iter = new MongoDbStateIterator(mongoCursor, stateManager, Optional.of(cdcConnectorMetadataInjector), stream, Instant.now(), - CHECKPOINT_INTERVAL, MongoConstants.CHECKPOINT_DURATION, true); + final var iter = new SourceStateIterator(mongoCursor, stream, stateManager, new StateEmitFrequency(CHECKPOINT_INTERVAL, + MongoConstants.CHECKPOINT_DURATION)); // with a batch size of 2, the MongoDbStateIterator should return the following after each // `hasNext`/`next` call: @@ -175,18 +182,6 @@ void treatHasNextExceptionAsFalse() { assertEquals(Type.RECORD, message.getType()); assertEquals(docs.get(0).get("_id").toString(), message.getRecord().getData().get("_id").asText()); - assertTrue(iter.hasNext(), "state should be next"); - message = iter.next(); - assertEquals(Type.STATE, message.getType()); - assertEquals( - docs.get(0).get("_id").toString(), - message.getState().getGlobal().getStreamStates().get(0).getStreamState().get("id").asText(), - "state id should match last record id"); - assertEquals( - InitialSnapshotStatus.IN_PROGRESS.toString(), - message.getState().getGlobal().getStreamStates().get(0).getStreamState().get("status").asText(), - "state status should be in_progress"); - assertThrows(RuntimeException.class, iter::hasNext, "next iteration should throw exception to fail the sync"); } @@ -202,20 +197,19 @@ void anInvalidIdFieldThrowsAnException() { final var stream = catalog().getStreams().stream().findFirst().orElseThrow(); - final var iter = new MongoDbStateIterator(mongoCursor, stateManager, Optional.of(cdcConnectorMetadataInjector), stream, Instant.now(), - CHECKPOINT_INTERVAL, MongoConstants.CHECKPOINT_DURATION, true); + final var iter = new SourceStateIterator(mongoCursor, stream, stateManager, new StateEmitFrequency(CHECKPOINT_INTERVAL, + MongoConstants.CHECKPOINT_DURATION)); assertTrue(iter.hasNext(), "air force blue should be next"); // first next call should return the document iter.next(); - assertTrue(iter.hasNext(), "air force blue should be next"); - // second next call should throw an exception - assertThrows(ConfigErrorException.class, iter::next); + // Second hasNext/next call should throw exception. + assertThrows(ConfigErrorException.class, iter::hasNext); } @Test void initialStateIsReturnedIfUnderlyingIteratorIsEmpty() { - // on the second hasNext call, throw an exception + // underlying cursor is empty. when(mongoCursor.hasNext()).thenReturn(false); final var stream = catalog().getStreams().stream().findFirst().orElseThrow(); @@ -224,8 +218,8 @@ void initialStateIsReturnedIfUnderlyingIteratorIsEmpty() { stateManager.updateStreamState(stream.getStream().getName(), stream.getStream().getNamespace(), new MongoDbStreamState(objectId, InitialSnapshotStatus.IN_PROGRESS, IdType.OBJECT_ID)); - final var iter = new MongoDbStateIterator(mongoCursor, stateManager, Optional.of(cdcConnectorMetadataInjector), stream, Instant.now(), - CHECKPOINT_INTERVAL, MongoConstants.CHECKPOINT_DURATION, true); + final var iter = new SourceStateIterator(mongoCursor, stream, stateManager, new StateEmitFrequency(CHECKPOINT_INTERVAL, + MongoConstants.CHECKPOINT_DURATION)); // the MongoDbStateIterator should return the following after each // `hasNext`/`next` call: @@ -242,7 +236,7 @@ void initialStateIsReturnedIfUnderlyingIteratorIsEmpty() { assertEquals( InitialSnapshotStatus.COMPLETE.toString(), message.getState().getGlobal().getStreamStates().get(0).getStreamState().get("status").asText(), - "state status should be in_progress"); + "state status should be complete: " + message); assertFalse(iter.hasNext(), "should have no more records"); } @@ -262,8 +256,8 @@ void stateEmittedAfterDuration() throws InterruptedException { stateManager.updateStreamState(stream.getStream().getName(), stream.getStream().getNamespace(), new MongoDbStreamState(objectId, InitialSnapshotStatus.IN_PROGRESS, IdType.OBJECT_ID)); - final var iter = new MongoDbStateIterator(mongoCursor, stateManager, Optional.of(cdcConnectorMetadataInjector), stream, Instant.now(), 1000000, - Duration.of(1, SECONDS), true); + final var iter = new SourceStateIterator(mongoCursor, stream, stateManager, new StateEmitFrequency(1000000, + Duration.of(1, SECONDS))); // with a batch size of 1,000,000 and a 1.5s sleep between hasNext calls, the expected results // should be @@ -332,9 +326,12 @@ void stateEmittedAfterDuration() throws InterruptedException { void hasNextNoInitialStateAndNoMoreRecordsInCursor() { when(mongoCursor.hasNext()).thenReturn(false); final var stream = catalog().getStreams().stream().findFirst().orElseThrow(); - final var iter = new MongoDbStateIterator(mongoCursor, stateManager, Optional.of(cdcConnectorMetadataInjector), stream, Instant.now(), 1000000, - Duration.of(1, SECONDS), true); + final var iter = new SourceStateIterator(mongoCursor, stream, stateManager, new StateEmitFrequency(1000000, Duration.of(1, SECONDS))); + + // MongoDbStateIterator should return a final state message + assertTrue(iter.hasNext()); + iter.next(); assertFalse(iter.hasNext()); } diff --git a/airbyte-integrations/connectors/source-mongodb-v2/src/test/java/io/airbyte/integrations/source/mongodb/cdc/MongoDbCdcInitializerTest.java b/airbyte-integrations/connectors/source-mongodb-v2/src/test/java/io/airbyte/integrations/source/mongodb/cdc/MongoDbCdcInitializerTest.java index 7e0ea6eaa222..f7b48c8bcb95 100644 --- a/airbyte-integrations/connectors/source-mongodb-v2/src/test/java/io/airbyte/integrations/source/mongodb/cdc/MongoDbCdcInitializerTest.java +++ b/airbyte-integrations/connectors/source-mongodb-v2/src/test/java/io/airbyte/integrations/source/mongodb/cdc/MongoDbCdcInitializerTest.java @@ -165,7 +165,7 @@ void setUp() { @Test void testCreateCdcIteratorsEmptyInitialState() { - final MongoDbStateManager stateManager = MongoDbStateManager.createStateManager(null); + final MongoDbStateManager stateManager = MongoDbStateManager.createStateManager(null, CONFIG); final List> iterators = cdcInitializer .createCdcIterators(mongoClient, cdcConnectorMetadataInjector, CONFIGURED_CATALOG, stateManager, EMITTED_AT, CONFIG); assertNotNull(iterators); @@ -177,18 +177,17 @@ void testCreateCdcIteratorsEmptyInitialState() { @Test void testCreateCdcIteratorsEmptyInitialStateEmptyCollections() { when(findCursor.hasNext()).thenReturn(false); - final MongoDbStateManager stateManager = MongoDbStateManager.createStateManager(null); + final MongoDbStateManager stateManager = MongoDbStateManager.createStateManager(null, CONFIG); final List> iterators = cdcInitializer .createCdcIterators(mongoClient, cdcConnectorMetadataInjector, CONFIGURED_CATALOG, stateManager, EMITTED_AT, CONFIG); assertNotNull(iterators); assertEquals(2, iterators.size(), "Should always have 2 iterators: 1 for the initial snapshot and 1 for the cdc stream"); - assertFalse(iterators.get(0).hasNext(), - "Initial snapshot iterator should have no messages if there's no initial snapshot state and collections are empty"); } @Test void testCreateCdcIteratorsFromInitialStateWithInProgressInitialSnapshot() { - final MongoDbStateManager stateManager = MongoDbStateManager.createStateManager(createInitialDebeziumState(InitialSnapshotStatus.IN_PROGRESS)); + final MongoDbStateManager stateManager = + MongoDbStateManager.createStateManager(createInitialDebeziumState(InitialSnapshotStatus.IN_PROGRESS), CONFIG); final List> iterators = cdcInitializer .createCdcIterators(mongoClient, cdcConnectorMetadataInjector, CONFIGURED_CATALOG, stateManager, EMITTED_AT, CONFIG); assertNotNull(iterators); @@ -199,7 +198,8 @@ void testCreateCdcIteratorsFromInitialStateWithInProgressInitialSnapshot() { @Test void testCreateCdcIteratorsFromInitialStateWithCompletedInitialSnapshot() { - final MongoDbStateManager stateManager = MongoDbStateManager.createStateManager(createInitialDebeziumState(InitialSnapshotStatus.COMPLETE)); + final MongoDbStateManager stateManager = + MongoDbStateManager.createStateManager(createInitialDebeziumState(InitialSnapshotStatus.COMPLETE), CONFIG); final List> iterators = cdcInitializer .createCdcIterators(mongoClient, cdcConnectorMetadataInjector, CONFIGURED_CATALOG, stateManager, EMITTED_AT, CONFIG); assertNotNull(iterators); @@ -213,7 +213,8 @@ void testCreateCdcIteratorsWithCompletedInitialSnapshotSavedOffsetInvalidDefault .thenReturn(mongoChangeStreamCursor) .thenThrow(new MongoCommandException(new BsonDocument(), new ServerAddress())) .thenReturn(mongoChangeStreamCursor); - final MongoDbStateManager stateManager = MongoDbStateManager.createStateManager(createInitialDebeziumState(InitialSnapshotStatus.COMPLETE)); + final MongoDbStateManager stateManager = + MongoDbStateManager.createStateManager(createInitialDebeziumState(InitialSnapshotStatus.COMPLETE), CONFIG); assertThrows(ConfigErrorException.class, () -> cdcInitializer.createCdcIterators(mongoClient, cdcConnectorMetadataInjector, CONFIGURED_CATALOG, stateManager, EMITTED_AT, CONFIG)); } @@ -224,7 +225,8 @@ void testCreateCdcIteratorsWithCompletedInitialSnapshotSavedOffsetFailOption() { .thenReturn(mongoChangeStreamCursor) .thenThrow(new MongoCommandException(new BsonDocument(), new ServerAddress())) .thenReturn(mongoChangeStreamCursor); - final MongoDbStateManager stateManager = MongoDbStateManager.createStateManager(createInitialDebeziumState(InitialSnapshotStatus.COMPLETE)); + final MongoDbStateManager stateManager = + MongoDbStateManager.createStateManager(createInitialDebeziumState(InitialSnapshotStatus.COMPLETE), CONFIG); assertThrows(ConfigErrorException.class, () -> cdcInitializer.createCdcIterators(mongoClient, cdcConnectorMetadataInjector, CONFIGURED_CATALOG, stateManager, EMITTED_AT, CONFIG)); } @@ -236,7 +238,8 @@ void testCreateCdcIteratorsWithCompletedInitialSnapshotSavedOffsetInvalidResyncO .thenReturn(mongoChangeStreamCursor) .thenThrow(new MongoCommandException(new BsonDocument(), new ServerAddress())) .thenReturn(mongoChangeStreamCursor); - final MongoDbStateManager stateManager = MongoDbStateManager.createStateManager(createInitialDebeziumState(InitialSnapshotStatus.COMPLETE)); + final MongoDbStateManager stateManager = + MongoDbStateManager.createStateManager(createInitialDebeziumState(InitialSnapshotStatus.COMPLETE), CONFIG); final List> iterators = cdcInitializer .createCdcIterators(mongoClient, cdcConnectorMetadataInjector, CONFIGURED_CATALOG, stateManager, EMITTED_AT, resyncConfig); assertNotNull(iterators); @@ -257,7 +260,8 @@ JsonNode createConfig(String cdcCursorFailBehaviour) { @Test void testUnableToExtractOffsetFromStateException() { - final MongoDbStateManager stateManager = MongoDbStateManager.createStateManager(createInitialDebeziumState(InitialSnapshotStatus.COMPLETE)); + final MongoDbStateManager stateManager = + MongoDbStateManager.createStateManager(createInitialDebeziumState(InitialSnapshotStatus.COMPLETE), CONFIG); doReturn(Optional.empty()).when(mongoDbDebeziumStateUtil).savedOffset(any(), any(), any(), any(), any()); assertThrows(RuntimeException.class, () -> cdcInitializer.createCdcIterators(mongoClient, cdcConnectorMetadataInjector, CONFIGURED_CATALOG, stateManager, EMITTED_AT, CONFIG)); @@ -272,7 +276,8 @@ void testMultipleIdTypesThrowsException() { when(aggregateCursor.next()).thenReturn(aggregate1, aggregate2); doCallRealMethod().when(aggregateIterable).forEach(any(Consumer.class)); - final MongoDbStateManager stateManager = MongoDbStateManager.createStateManager(createInitialDebeziumState(InitialSnapshotStatus.IN_PROGRESS)); + final MongoDbStateManager stateManager = + MongoDbStateManager.createStateManager(createInitialDebeziumState(InitialSnapshotStatus.IN_PROGRESS), CONFIG); final var thrown = assertThrows(ConfigErrorException.class, () -> cdcInitializer .createCdcIterators(mongoClient, cdcConnectorMetadataInjector, CONFIGURED_CATALOG, stateManager, EMITTED_AT, CONFIG)); @@ -287,7 +292,7 @@ void testUnsupportedIdTypeThrowsException() { when(aggregateCursor.next()).thenReturn(aggregate); doCallRealMethod().when(aggregateIterable).forEach(any(Consumer.class)); - final MongoDbStateManager stateManager = MongoDbStateManager.createStateManager(null); + final MongoDbStateManager stateManager = MongoDbStateManager.createStateManager(null, CONFIG); final var thrown = assertThrows(ConfigErrorException.class, () -> cdcInitializer .createCdcIterators(mongoClient, cdcConnectorMetadataInjector, CONFIGURED_CATALOG, stateManager, EMITTED_AT, CONFIG)); diff --git a/airbyte-integrations/connectors/source-mongodb-v2/src/test/java/io/airbyte/integrations/source/mongodb/cdc/MongoDbCdcStateHandlerTest.java b/airbyte-integrations/connectors/source-mongodb-v2/src/test/java/io/airbyte/integrations/source/mongodb/cdc/MongoDbCdcStateHandlerTest.java index d3bfed395b0e..a40e69ddd7a5 100644 --- a/airbyte-integrations/connectors/source-mongodb-v2/src/test/java/io/airbyte/integrations/source/mongodb/cdc/MongoDbCdcStateHandlerTest.java +++ b/airbyte-integrations/connectors/source-mongodb-v2/src/test/java/io/airbyte/integrations/source/mongodb/cdc/MongoDbCdcStateHandlerTest.java @@ -4,12 +4,14 @@ package io.airbyte.integrations.source.mongodb.cdc; +import static io.airbyte.integrations.source.mongodb.MongoConstants.DATABASE_CONFIG_CONFIGURATION_KEY; import static io.airbyte.protocol.models.v0.AirbyteStateMessage.AirbyteStateType.GLOBAL; import static org.junit.jupiter.api.Assertions.assertEquals; import static org.junit.jupiter.api.Assertions.assertNotNull; import static org.junit.jupiter.api.Assertions.assertThrows; import com.fasterxml.jackson.core.type.TypeReference; +import io.airbyte.integrations.source.mongodb.MongoDbSourceConfig; import io.airbyte.integrations.source.mongodb.state.MongoDbStateManager; import io.airbyte.protocol.models.Jsons; import io.airbyte.protocol.models.v0.AirbyteMessage; @@ -23,11 +25,17 @@ class MongoDbCdcStateHandlerTest { private static final String REPLICA_SET = "test-replica-set"; private static final String RESUME_TOKEN = "8264BEB9F3000000012B0229296E04"; + final MongoDbSourceConfig CONFIG = new MongoDbSourceConfig(io.airbyte.commons.json.Jsons.jsonNode( + Map.of(DATABASE_CONFIG_CONFIGURATION_KEY, + Map.of( + MongoDbDebeziumConstants.Configuration.CONNECTION_STRING_CONFIGURATION_KEY, "mongodb://host:12345/", + MongoDbDebeziumConstants.Configuration.DATABASE_CONFIGURATION_KEY, DATABASE)))); + private MongoDbCdcStateHandler mongoDbCdcStateHandler; @BeforeEach void setup() { - final MongoDbStateManager mongoDbStateManager = MongoDbStateManager.createStateManager(null); + final MongoDbStateManager mongoDbStateManager = MongoDbStateManager.createStateManager(null, CONFIG); mongoDbCdcStateHandler = new MongoDbCdcStateHandler(mongoDbStateManager); } diff --git a/airbyte-integrations/connectors/source-mongodb-v2/src/test/java/io/airbyte/integrations/source/mongodb/state/MongoDbStateManagerTest.java b/airbyte-integrations/connectors/source-mongodb-v2/src/test/java/io/airbyte/integrations/source/mongodb/state/MongoDbStateManagerTest.java index 30003b914a5a..d2959ebdab9c 100644 --- a/airbyte-integrations/connectors/source-mongodb-v2/src/test/java/io/airbyte/integrations/source/mongodb/state/MongoDbStateManagerTest.java +++ b/airbyte-integrations/connectors/source-mongodb-v2/src/test/java/io/airbyte/integrations/source/mongodb/state/MongoDbStateManagerTest.java @@ -4,6 +4,7 @@ package io.airbyte.integrations.source.mongodb.state; +import static io.airbyte.integrations.source.mongodb.MongoConstants.DATABASE_CONFIG_CONFIGURATION_KEY; import static io.airbyte.integrations.source.mongodb.cdc.MongoDbDebeziumConstants.ChangeEvent.SOURCE_ORDER; import static io.airbyte.integrations.source.mongodb.cdc.MongoDbDebeziumConstants.ChangeEvent.SOURCE_RESUME_TOKEN; import static io.airbyte.integrations.source.mongodb.cdc.MongoDbDebeziumConstants.ChangeEvent.SOURCE_SECONDS; @@ -15,7 +16,9 @@ import com.fasterxml.jackson.databind.JsonNode; import io.airbyte.commons.json.Jsons; +import io.airbyte.integrations.source.mongodb.MongoDbSourceConfig; import io.airbyte.integrations.source.mongodb.cdc.MongoDbCdcState; +import io.airbyte.integrations.source.mongodb.cdc.MongoDbDebeziumConstants; import io.airbyte.protocol.models.v0.AirbyteGlobalState; import io.airbyte.protocol.models.v0.AirbyteStateMessage; import io.airbyte.protocol.models.v0.AirbyteStreamState; @@ -31,6 +34,13 @@ class MongoDbStateManagerTest { private static final String RESUME_TOKEN = "8264BEB9F3000000012B0229296E04"; private static final String STREAM_NAME = "test-collection"; private static final String STREAM_NAMESPACE = "test-database"; + private static final String DATABASE = "test-database"; + + final MongoDbSourceConfig CONFIG = new MongoDbSourceConfig(Jsons.jsonNode( + Map.of(DATABASE_CONFIG_CONFIGURATION_KEY, + Map.of( + MongoDbDebeziumConstants.Configuration.CONNECTION_STRING_CONFIGURATION_KEY, "mongodb://host:12345/", + MongoDbDebeziumConstants.Configuration.DATABASE_CONFIGURATION_KEY, DATABASE)))); @Test void testCreationWithInitialState() { @@ -49,7 +59,7 @@ void testCreationWithInitialState() { final AirbyteStateMessage airbyteStateMessage = new AirbyteStateMessage().withType(AirbyteStateMessage.AirbyteStateType.GLOBAL).withGlobal(airbyteGlobalState); - final MongoDbStateManager stateManager = MongoDbStateManager.createStateManager(Jsons.jsonNode(List.of(airbyteStateMessage))); + final MongoDbStateManager stateManager = MongoDbStateManager.createStateManager(Jsons.jsonNode(List.of(airbyteStateMessage)), CONFIG); assertNotNull(stateManager); assertNotNull(stateManager.getCdcState()); Assertions.assertEquals(seconds, stateManager.getCdcState().state().get(SOURCE_SECONDS).asInt()); @@ -61,21 +71,21 @@ void testCreationWithInitialState() { @Test void testCreationWithInitialNullState() { - final MongoDbStateManager stateManager = MongoDbStateManager.createStateManager(null); + final MongoDbStateManager stateManager = MongoDbStateManager.createStateManager(null, CONFIG); assertNotNull(stateManager); assertNull(stateManager.getCdcState()); } @Test void testCreationWithInitialEmptyState() { - final MongoDbStateManager stateManager = MongoDbStateManager.createStateManager(Jsons.emptyObject()); + final MongoDbStateManager stateManager = MongoDbStateManager.createStateManager(Jsons.emptyObject(), CONFIG); assertNotNull(stateManager); assertNull(stateManager.getCdcState()); } @Test void testCreationWithInitialEmptyListState() { - final MongoDbStateManager stateManager = MongoDbStateManager.createStateManager(Jsons.jsonNode(List.of())); + final MongoDbStateManager stateManager = MongoDbStateManager.createStateManager(Jsons.jsonNode(List.of()), CONFIG); assertNotNull(stateManager); assertNull(stateManager.getCdcState()); } @@ -83,12 +93,12 @@ void testCreationWithInitialEmptyListState() { @Test void testCreationWithInitialStateTooManyMessages() { final List stateMessages = List.of(new AirbyteStateMessage(), new AirbyteStateMessage()); - assertThrows(IllegalStateException.class, () -> MongoDbStateManager.createStateManager(Jsons.jsonNode(stateMessages))); + assertThrows(IllegalStateException.class, () -> MongoDbStateManager.createStateManager(Jsons.jsonNode(stateMessages), CONFIG)); } @Test void testUpdateCdcState() { - final MongoDbStateManager stateManager = MongoDbStateManager.createStateManager(null); + final MongoDbStateManager stateManager = MongoDbStateManager.createStateManager(null, CONFIG); assertNotNull(stateManager); assertNull(stateManager.getCdcState()); @@ -118,7 +128,7 @@ void testGeneratingAirbyteStateMessage() { final AirbyteStateMessage airbyteStateMessage = new AirbyteStateMessage().withType(AirbyteStateMessage.AirbyteStateType.GLOBAL).withGlobal(airbyteGlobalState); - final MongoDbStateManager stateManager = MongoDbStateManager.createStateManager(Jsons.jsonNode(List.of(airbyteStateMessage))); + final MongoDbStateManager stateManager = MongoDbStateManager.createStateManager(Jsons.jsonNode(List.of(airbyteStateMessage)), CONFIG); final AirbyteStateMessage generated = stateManager.toState(); assertNotNull(generated); @@ -161,7 +171,7 @@ void testReset() { final AirbyteStateMessage airbyteStateMessage = new AirbyteStateMessage().withType(AirbyteStateMessage.AirbyteStateType.GLOBAL).withGlobal(airbyteGlobalState); - final MongoDbStateManager stateManager = MongoDbStateManager.createStateManager(Jsons.jsonNode(List.of(airbyteStateMessage))); + final MongoDbStateManager stateManager = MongoDbStateManager.createStateManager(Jsons.jsonNode(List.of(airbyteStateMessage)), CONFIG); final MongoDbCdcState newCdcState = new MongoDbCdcState(Jsons.jsonNode(Map.of())); stateManager.resetState(newCdcState); diff --git a/docs/integrations/sources/mongodb-v2.md b/docs/integrations/sources/mongodb-v2.md index f13754901c73..7bde62a2ff67 100644 --- a/docs/integrations/sources/mongodb-v2.md +++ b/docs/integrations/sources/mongodb-v2.md @@ -214,7 +214,8 @@ For more information regarding configuration parameters, please see [MongoDb Doc | Version | Date | Pull Request | Subject | |:--------|:-----------|:---------------------------------------------------------|:----------------------------------------------------------------------------------------------------------| -| 1.2.15 | 2024-02-27 | [35673](https://github.com/airbytehq/airbyte/pull/35673) | Consume user provided connection string. | +| 1.2.16 | 2024-03-06 | [35669](https://github.com/airbytehq/airbyte/pull/35669) | State message will now include record count. | +| 1.2.15 | 2024-02-27 | [35673](https://github.com/airbytehq/airbyte/pull/35673) | Consume user provided connection string. | | 1.2.14 | 2024-02-27 | [35675](https://github.com/airbytehq/airbyte/pull/35675) | Fix invalid cdc error message. | | 1.2.13 | 2024-02-22 | [35569](https://github.com/airbytehq/airbyte/pull/35569) | Fix logging bug. | | 1.2.12 | 2024-02-21 | [35526](https://github.com/airbytehq/airbyte/pull/35526) | Improve error handling. | From 8ab77d401ef8faf94299706cbbfcfebeca17d077 Mon Sep 17 00:00:00 2001 From: Alex Birdsall Date: Wed, 6 Mar 2024 14:59:11 -0800 Subject: [PATCH 112/172] Pass docker hub credentials to airbyte-ci's `bump_version` command (#35802) --- airbyte-ci/connectors/pipelines/README.md | 1 + .../pipelines/airbyte_ci/connectors/bump_version/commands.py | 2 ++ airbyte-ci/connectors/pipelines/pyproject.toml | 2 +- 3 files changed, 4 insertions(+), 1 deletion(-) diff --git a/airbyte-ci/connectors/pipelines/README.md b/airbyte-ci/connectors/pipelines/README.md index eff96c3c6856..5efb54e231cb 100644 --- a/airbyte-ci/connectors/pipelines/README.md +++ b/airbyte-ci/connectors/pipelines/README.md @@ -644,6 +644,7 @@ E.G.: running Poe tasks on the modified internal packages of the current branch: | Version | PR | Description | | ------- | ---------------------------------------------------------- | -------------------------------------------------------------------------------------------------------------------------- | +| 4.5.2 | [#35802](https://github.com/airbytehq/airbyte/pull/35802) | Fix bug with connectors bump_version command | | 4.5.1 | [#35786](https://github.com/airbytehq/airbyte/pull/35786) | Declare `live_tests` as an internal poetry package. | | 4.5.0 | [#35784](https://github.com/airbytehq/airbyte/pull/35784) | Format command supports kotlin | | 4.4.0 | [#35317](https://github.com/airbytehq/airbyte/pull/35317) | Augment java connector reports to include full logs and junit test results | diff --git a/airbyte-ci/connectors/pipelines/pipelines/airbyte_ci/connectors/bump_version/commands.py b/airbyte-ci/connectors/pipelines/pipelines/airbyte_ci/connectors/bump_version/commands.py index d6ddbe5360bb..17bf1241fc2e 100644 --- a/airbyte-ci/connectors/pipelines/pipelines/airbyte_ci/connectors/bump_version/commands.py +++ b/airbyte-ci/connectors/pipelines/pipelines/airbyte_ci/connectors/bump_version/commands.py @@ -42,6 +42,8 @@ async def bump_version( enable_report_auto_open=False, s3_build_cache_access_key_id=ctx.obj.get("s3_build_cache_access_key_id"), s3_build_cache_secret_key=ctx.obj.get("s3_build_cache_secret_key"), + docker_hub_username=ctx.obj.get("docker_hub_username"), + docker_hub_password=ctx.obj.get("docker_hub_password"), ) for connector in ctx.obj["selected_connectors_with_modified_files"] ] diff --git a/airbyte-ci/connectors/pipelines/pyproject.toml b/airbyte-ci/connectors/pipelines/pyproject.toml index 2fbb1c46ce1d..d79ebc3ad180 100644 --- a/airbyte-ci/connectors/pipelines/pyproject.toml +++ b/airbyte-ci/connectors/pipelines/pyproject.toml @@ -4,7 +4,7 @@ build-backend = "poetry.core.masonry.api" [tool.poetry] name = "pipelines" -version = "4.5.1" +version = "4.5.2" description = "Packaged maintained by the connector operations team to perform CI for connectors' pipelines" authors = ["Airbyte "] From 4fcff4190184b493b264e3a68167cf80da42c69d Mon Sep 17 00:00:00 2001 From: Akash Kulkarni <113392464+akashkulk@users.noreply.github.com> Date: Wed, 6 Mar 2024 17:09:44 -0800 Subject: [PATCH 113/172] [Source-postgres]: Add logging in case of multiple records with same LSN (#35842) --- airbyte-cdk/java/airbyte-cdk/README.md | 1 + .../core/src/main/resources/version.properties | 2 +- .../internals/DebeziumStateDecoratingIterator.java | 9 ++++++--- .../connectors/source-postgres/build.gradle | 2 +- .../connectors/source-postgres/gradle.properties | 4 +++- .../connectors/source-postgres/metadata.yaml | 2 +- .../source/postgres/PostgresTestDatabase.java | 1 + docs/integrations/sources/postgres.md | 4 ++-- 8 files changed, 16 insertions(+), 9 deletions(-) diff --git a/airbyte-cdk/java/airbyte-cdk/README.md b/airbyte-cdk/java/airbyte-cdk/README.md index f1b80e5b4700..42432136091f 100644 --- a/airbyte-cdk/java/airbyte-cdk/README.md +++ b/airbyte-cdk/java/airbyte-cdk/README.md @@ -166,6 +166,7 @@ MavenLocal debugging steps: | Version | Date | Pull Request | Subject | |:--------|:-----------|:-----------------------------------------------------------|:---------------------------------------------------------------------------------------------------------------------------------------------------------------| +| 0.23.16 | 2024-03-06 | [\#35842](https://github.com/airbytehq/airbyte/pull/35842) | Improve logging in debezium processing. | | 0.23.15 | 2024-03-05 | [\#35827](https://github.com/airbytehq/airbyte/pull/35827) | improving the Junit interceptor. | | 0.23.14 | 2024-03-05 | [\#35739](https://github.com/airbytehq/airbyte/pull/35739) | Add logging to the CDC queue size. Fix the ContainerFactory. | | 0.23.13 | 2024-03-04 | [\#35774](https://github.com/airbytehq/airbyte/pull/35774) | minor changes to the CDK test fixtures. | diff --git a/airbyte-cdk/java/airbyte-cdk/core/src/main/resources/version.properties b/airbyte-cdk/java/airbyte-cdk/core/src/main/resources/version.properties index e60bd207859a..ad61006b14c5 100644 --- a/airbyte-cdk/java/airbyte-cdk/core/src/main/resources/version.properties +++ b/airbyte-cdk/java/airbyte-cdk/core/src/main/resources/version.properties @@ -1 +1 @@ -version=0.23.15 +version=0.23.16 diff --git a/airbyte-cdk/java/airbyte-cdk/db-sources/src/main/java/io/airbyte/cdk/integrations/debezium/internals/DebeziumStateDecoratingIterator.java b/airbyte-cdk/java/airbyte-cdk/db-sources/src/main/java/io/airbyte/cdk/integrations/debezium/internals/DebeziumStateDecoratingIterator.java index 71dfee610a3e..5d217185b43b 100644 --- a/airbyte-cdk/java/airbyte-cdk/db-sources/src/main/java/io/airbyte/cdk/integrations/debezium/internals/DebeziumStateDecoratingIterator.java +++ b/airbyte-cdk/java/airbyte-cdk/db-sources/src/main/java/io/airbyte/cdk/integrations/debezium/internals/DebeziumStateDecoratingIterator.java @@ -156,9 +156,12 @@ protected AirbyteMessage computeNext() { if (checkpointOffsetToSend.size() == 1 && changeEventIterator.hasNext() - && !event.isSnapshotEvent() - && targetPosition.isEventAheadOffset(checkpointOffsetToSend, event)) { - sendCheckpointMessage = true; + && !event.isSnapshotEvent()) { + if (targetPosition.isEventAheadOffset(checkpointOffsetToSend, event)) { + sendCheckpointMessage = true; + } else { + LOGGER.info("Encountered {} records with the same event offset", recordsLastSync); + } } } recordsLastSync++; diff --git a/airbyte-integrations/connectors/source-postgres/build.gradle b/airbyte-integrations/connectors/source-postgres/build.gradle index d68232e1e409..76c9a0122c8c 100644 --- a/airbyte-integrations/connectors/source-postgres/build.gradle +++ b/airbyte-integrations/connectors/source-postgres/build.gradle @@ -12,7 +12,7 @@ java { } airbyteJavaConnector { - cdkVersionRequired = '0.20.6' + cdkVersionRequired = '0.23.16' features = ['db-sources', 'datastore-postgres'] useLocalCdk = false } diff --git a/airbyte-integrations/connectors/source-postgres/gradle.properties b/airbyte-integrations/connectors/source-postgres/gradle.properties index 8ef098d20b92..bc88ea85ebd8 100644 --- a/airbyte-integrations/connectors/source-postgres/gradle.properties +++ b/airbyte-integrations/connectors/source-postgres/gradle.properties @@ -1 +1,3 @@ -testExecutionConcurrency=-1 \ No newline at end of file +testExecutionConcurrency=-1 + +JunitMethodExecutionTimeout=5 m \ No newline at end of file diff --git a/airbyte-integrations/connectors/source-postgres/metadata.yaml b/airbyte-integrations/connectors/source-postgres/metadata.yaml index 44cb753419f5..0d9f4677fa22 100644 --- a/airbyte-integrations/connectors/source-postgres/metadata.yaml +++ b/airbyte-integrations/connectors/source-postgres/metadata.yaml @@ -9,7 +9,7 @@ data: connectorSubtype: database connectorType: source definitionId: decd338e-5647-4c0b-adf4-da0e75f5a750 - dockerImageTag: 3.3.13 + dockerImageTag: 3.3.14 dockerRepository: airbyte/source-postgres documentationUrl: https://docs.airbyte.com/integrations/sources/postgres githubIssueLabel: source-postgres diff --git a/airbyte-integrations/connectors/source-postgres/src/testFixtures/java/io/airbyte/integrations/source/postgres/PostgresTestDatabase.java b/airbyte-integrations/connectors/source-postgres/src/testFixtures/java/io/airbyte/integrations/source/postgres/PostgresTestDatabase.java index 07146c33264e..374ed7dceede 100644 --- a/airbyte-integrations/connectors/source-postgres/src/testFixtures/java/io/airbyte/integrations/source/postgres/PostgresTestDatabase.java +++ b/airbyte-integrations/connectors/source-postgres/src/testFixtures/java/io/airbyte/integrations/source/postgres/PostgresTestDatabase.java @@ -57,6 +57,7 @@ private ContainerModifier(String methodName) { } + @SuppressWarnings("deprecation") static public PostgresTestDatabase in(BaseImage baseImage, ContainerModifier... modifiers) { String[] methodNames = Stream.of(modifiers).map(im -> im.methodName).toList().toArray(new String[0]); final var container = new PostgresContainerFactory().shared(baseImage.reference, methodNames); diff --git a/docs/integrations/sources/postgres.md b/docs/integrations/sources/postgres.md index b302ad829718..5fd6de921fcc 100644 --- a/docs/integrations/sources/postgres.md +++ b/docs/integrations/sources/postgres.md @@ -292,8 +292,8 @@ According to Postgres [documentation](https://www.postgresql.org/docs/14/datatyp | Version | Date | Pull Request | Subject | |---------|------------|----------------------------------------------------------|----------------------------------------------------------------------------------------------------------------------------------------------------------------------------| -| 3.3.13 | 2024-02-27 | [35675](https://github.com/airbytehq/airbyte/pull/35675) | Fix invalid cdc error message. | -| 3.3.12 | 2024-02-22 | [35569](https://github.com/airbytehq/airbyte/pull/35569) | Fix logging bug. | +| 3.3.14 | 2024-03-06 | [35842](https://github.com/airbytehq/airbyte/pull/35842) | Add logging to understand cases with a large number of records with the same LSN. | +| 3.3.12 | 2024-02-22 | [35569](https://github.com/airbytehq/airbyte/pull/35569) | Fix logging bug. | | 3.3.11 | 2024-02-20 | [35304](https://github.com/airbytehq/airbyte/pull/35304) | Add config to throw an error on invalid CDC position and enable it by default. | | 3.3.10 | 2024-02-13 | [35036](https://github.com/airbytehq/airbyte/pull/34751) | Emit analytics message for invalid CDC cursor. | | 3.3.9 | 2024-02-13 | [35224](https://github.com/airbytehq/airbyte/pull/35224) | Adopt CDK 0.20.4 | From e4ccffbf6e44e9cb75911aec757e0b547bd49572 Mon Sep 17 00:00:00 2001 From: Natik Gadzhi Date: Wed, 6 Mar 2024 20:15:27 -0800 Subject: [PATCH 114/172] Docs: Tutorials formatting + from scratch connector tutorial cleanup (#33839) Co-authored-by: Marcos Marx --- .../tutorials/building-a-java-destination.md | 183 ++++++--- .../tutorials/building-a-python-source.md | 248 ++++++++---- .../tutorials/cdk-speedrun.md | 81 ++-- .../connection-checking.md | 36 +- .../creating-the-source.md | 14 +- .../declare-schema.md | 57 ++- .../cdk-tutorial-python-http/define-inputs.md | 32 +- .../getting-started.md | 37 +- .../install-dependencies.md | 47 ++- .../cdk-tutorial-python-http/read-data.md | 147 ++++--- .../test-your-connector.md | 20 +- .../use-connector-in-airbyte.md | 36 +- .../profile-java-connector-memory.md | 156 ++++---- .../adding-incremental-sync.md | 93 +++-- .../build-a-connector-the-hard-way.md | 376 +++++++++++------- 15 files changed, 1042 insertions(+), 521 deletions(-) rename docs/connector-development/tutorials/{ => the-hard-way}/adding-incremental-sync.md (78%) rename docs/connector-development/tutorials/{ => the-hard-way}/build-a-connector-the-hard-way.md (76%) diff --git a/docs/connector-development/tutorials/building-a-java-destination.md b/docs/connector-development/tutorials/building-a-java-destination.md index fb91b4f52c1f..d2cb3f1c0bcc 100644 --- a/docs/connector-development/tutorials/building-a-java-destination.md +++ b/docs/connector-development/tutorials/building-a-java-destination.md @@ -2,24 +2,26 @@ ## Summary -This article provides a checklist for how to create a Java destination. Each step in the checklist has a link to a more detailed explanation below. +This article provides a checklist for how to create a Java destination. Each step in the checklist +has a link to a more detailed explanation below. ## Requirements -Docker and Java with the versions listed in the [tech stack section](../../understanding-airbyte/tech-stack.md). +Docker and Java with the versions listed in the +[tech stack section](../../understanding-airbyte/tech-stack.md). ## Checklist ### Creating a destination -* Step 1: Create the destination using the template generator -* Step 2: Build the newly generated destination -* Step 3: Implement `spec` to define the configuration required to run the connector -* Step 4: Implement `check` to provide a way to validate configurations provided to the connector -* Step 5: Implement `write` to write data to the destination -* Step 6: Set up Acceptance Tests -* Step 7: Write unit tests or integration tests -* Step 8: Update the docs \(in `docs/integrations/destinations/.md`\) +- Step 1: Create the destination using the template generator +- Step 2: Build the newly generated destination +- Step 3: Implement `spec` to define the configuration required to run the connector +- Step 4: Implement `check` to provide a way to validate configurations provided to the connector +- Step 5: Implement `write` to write data to the destination +- Step 6: Set up Acceptance Tests +- Step 7: Write unit tests or integration tests +- Step 8: Update the docs \(in `docs/integrations/destinations/.md`\) :::info @@ -29,7 +31,8 @@ All `./gradlew` commands must be run from the root of the airbyte project. :::info -If you need help with any step of the process, feel free to submit a PR with your progress and any questions you have, or ask us on [slack](https://slack.airbyte.io). +If you need help with any step of the process, feel free to submit a PR with your progress and any +questions you have, or ask us on [slack](https://slack.airbyte.io). ::: @@ -44,7 +47,9 @@ $ cd airbyte-integrations/connector-templates/generator # assumes you are starti $ ./generate.sh ``` -Select the `Java Destination` template and then input the name of your connector. We'll refer to the destination as `-destination` in this tutorial, but you should replace `` with the actual name you used for your connector e.g: `BigQueryDestination` or `bigquery-destination`. +Select the `Java Destination` template and then input the name of your connector. We'll refer to the +destination as `-destination` in this tutorial, but you should replace `` with the +actual name you used for your connector e.g: `BigQueryDestination` or `bigquery-destination`. ### Step 2: Build the newly generated destination @@ -55,11 +60,14 @@ You can build the destination by running: ./gradlew :airbyte-integrations:connectors:destination-:build ``` -This compiles the Java code for your destination and builds a Docker image with the connector. At this point, we haven't implemented anything of value yet, but once we do, you'll use this command to compile your code and Docker image. +This compiles the Java code for your destination and builds a Docker image with the connector. At +this point, we haven't implemented anything of value yet, but once we do, you'll use this command to +compile your code and Docker image. :::info -Airbyte uses Gradle to manage Java dependencies. To add dependencies for your connector, manage them in the `build.gradle` file inside your connector's directory. +Airbyte uses Gradle to manage Java dependencies. To add dependencies for your connector, manage them +in the `build.gradle` file inside your connector's directory. ::: @@ -67,38 +75,52 @@ Airbyte uses Gradle to manage Java dependencies. To add dependencies for your co We recommend the following ways of iterating on your connector as you're making changes: -* Test-driven development \(TDD\) in Java -* Test-driven development \(TDD\) using Airbyte's Acceptance Tests -* Directly running the docker image +- Test-driven development \(TDD\) in Java +- Test-driven development \(TDD\) using Airbyte's Acceptance Tests +- Directly running the docker image #### Test-driven development in Java -This should feel like a standard flow for a Java developer: you make some code changes then run java tests against them. You can do this directly in your IDE, but you can also run all unit tests via Gradle by running the command to build the connector: +This should feel like a standard flow for a Java developer: you make some code changes then run java +tests against them. You can do this directly in your IDE, but you can also run all unit tests via +Gradle by running the command to build the connector: ```text ./gradlew :airbyte-integrations:connectors:destination-:build ``` -This will build the code and run any unit tests. This approach is great when you are testing local behaviors and writing unit tests. +This will build the code and run any unit tests. This approach is great when you are testing local +behaviors and writing unit tests. #### TDD using acceptance tests & integration tests -Airbyte provides a standard test suite \(dubbed "Acceptance Tests"\) that runs against every destination connector. They are "free" baseline tests to ensure the basic functionality of the destination. When developing a connector, you can simply run the tests between each change and use the feedback to guide your development. +Airbyte provides a standard test suite \(dubbed "Acceptance Tests"\) that runs against every +destination connector. They are "free" baseline tests to ensure the basic functionality of the +destination. When developing a connector, you can simply run the tests between each change and use +the feedback to guide your development. -If you want to try out this approach, check out Step 6 which describes what you need to do to set up the acceptance Tests for your destination. +If you want to try out this approach, check out Step 6 which describes what you need to do to set up +the acceptance Tests for your destination. -The nice thing about this approach is that you are running your destination exactly as Airbyte will run it in the CI. The downside is that the tests do not run very quickly. As such, we recommend this iteration approach only once you've implemented most of your connector and are in the finishing stages of implementation. Note that Acceptance Tests are required for every connector supported by Airbyte, so you should make sure to run them a couple of times while iterating to make sure your connector is compatible with Airbyte. +The nice thing about this approach is that you are running your destination exactly as Airbyte will +run it in the CI. The downside is that the tests do not run very quickly. As such, we recommend this +iteration approach only once you've implemented most of your connector and are in the finishing +stages of implementation. Note that Acceptance Tests are required for every connector supported by +Airbyte, so you should make sure to run them a couple of times while iterating to make sure your +connector is compatible with Airbyte. #### Directly running the destination using Docker -If you want to run your destination exactly as it will be run by Airbyte \(i.e. within a docker container\), you can use the following commands from the connector module directory \(`airbyte-integrations/connectors/destination-`\): +If you want to run your destination exactly as it will be run by Airbyte \(i.e. within a docker +container\), you can use the following commands from the connector module directory +\(`airbyte-integrations/connectors/destination-`\): ```text # First build the container ./gradlew :airbyte-integrations:connectors:destination-:build # Then use the following commands to run it -# Runs the "spec" command, used to find out what configurations are needed to run a connector +# Runs the "spec" command, used to find out what configurations are needed to run a connector docker run --rm airbyte/destination-:dev spec # Runs the "check" command, used to validate if the input configurations are valid @@ -108,54 +130,72 @@ docker run --rm -v $(pwd)/secrets:/secrets airbyte/destination-:dev check docker run --rm -v $(pwd)/secrets:/secrets -v $(pwd)/sample_files:/sample_files airbyte/destination-:dev write --config /secrets/config.json --catalog /sample_files/configured_catalog.json ``` -Note: Each time you make a change to your implementation you need to re-build the connector image via `./gradlew :airbyte-integrations:connectors:destination-:build`. +Note: Each time you make a change to your implementation you need to re-build the connector image +via `./gradlew :airbyte-integrations:connectors:destination-:build`. -The nice thing about this approach is that you are running your destination exactly as it will be run by Airbyte. The tradeoff is that iteration is slightly slower, because you need to re-build the connector between each change. +The nice thing about this approach is that you are running your destination exactly as it will be +run by Airbyte. The tradeoff is that iteration is slightly slower, because you need to re-build the +connector between each change. #### Handling Exceptions -In order to best propagate user-friendly error messages and log error information to the platform, the [Airbyte Protocol](../../understanding-airbyte/airbyte-protocol.md#The Airbyte Protocol) implements AirbyteTraceMessage. +In order to best propagate user-friendly error messages and log error information to the platform, +the [Airbyte Protocol](../../understanding-airbyte/airbyte-protocol.md#The Airbyte Protocol) +implements AirbyteTraceMessage. -We recommend using AirbyteTraceMessages for known errors, as in these cases you can likely offer the user a helpful message as to what went wrong and suggest how they can resolve it. +We recommend using AirbyteTraceMessages for known errors, as in these cases you can likely offer the +user a helpful message as to what went wrong and suggest how they can resolve it. + +Airbyte provides a static utility class, `io.airbyte.integrations.base.AirbyteTraceMessageUtility`, +to give you a clear and straight-forward way to emit these AirbyteTraceMessages. Example usage: -Airbyte provides a static utility class, `io.airbyte.integrations.base.AirbyteTraceMessageUtility`, to give you a clear and straight-forward way to emit these AirbyteTraceMessages. Example usage: ```java try { // some connector code responsible for doing X -} +} catch (ExceptionIndicatingIncorrectCredentials credErr) { AirbyteTraceMessageUtility.emitConfigErrorTrace( credErr, "Connector failed due to incorrect credentials while doing X. Please check your connection is using valid credentials.") throw credErr -} +} catch (ExceptionIndicatingKnownErrorY knownErr) { AirbyteTraceMessageUtility.emitSystemErrorTrace( knownErr, "Connector failed because of reason Y while doing X. Please check/do/make ... to resolve this.") throw knownErr -} +} catch (Exception e) { AirbyteTraceMessageUtility.emitSystemErrorTrace( e, "Connector failed while doing X. Possible reasons for this could be ...") - throw e + throw e } ``` Note the two different error trace methods. -- Where possible `emitConfigErrorTrace` should be used when we are certain the issue arises from a problem with the user's input configuration, e.g. invalid credentials. + +- Where possible `emitConfigErrorTrace` should be used when we are certain the issue arises from a + problem with the user's input configuration, e.g. invalid credentials. - For everything else or if unsure, use `emitSystemErrorTrace`. ### Step 3: Implement `spec` -Each destination contains a specification written in JsonSchema that describes its inputs. Defining the specification is a good place to start when developing your destination. Check out the documentation [here](https://json-schema.org/) to learn the syntax. Here's [an example](https://github.com/airbytehq/airbyte/blob/master/airbyte-integrations/connectors/destination-postgres/src/main/resources/spec.json) of what the `spec.json` looks like for the postgres destination. +Each destination contains a specification written in JsonSchema that describes its inputs. Defining +the specification is a good place to start when developing your destination. Check out the +documentation [here](https://json-schema.org/) to learn the syntax. Here's +[an example](https://github.com/airbytehq/airbyte/blob/master/airbyte-integrations/connectors/destination-postgres/src/main/resources/spec.json) +of what the `spec.json` looks like for the postgres destination. -Your generated template should have the spec file in `airbyte-integrations/connectors/destination-/src/main/resources/spec.json`. The generated connector will take care of reading this file and converting it to the correct output. Edit it and you should be done with this step. +Your generated template should have the spec file in +`airbyte-integrations/connectors/destination-/src/main/resources/spec.json`. The generated +connector will take care of reading this file and converting it to the correct output. Edit it and +you should be done with this step. -For more details on what the spec is, you can read about the Airbyte Protocol [here](../../understanding-airbyte/airbyte-protocol.md). +For more details on what the spec is, you can read about the Airbyte Protocol +[here](../../understanding-airbyte/airbyte-protocol.md). See the `spec` operation in action: ```bash -# First build the connector +# First build the connector ./gradlew :airbyte-integrations:connectors:destination-:build # Run the spec operation @@ -164,11 +204,17 @@ docker run --rm airbyte/destination-:dev spec ### Step 4: Implement `check` -The check operation accepts a JSON object conforming to the `spec.json`. In other words if the `spec.json` said that the destination requires a `username` and `password` the config object might be `{ "username": "airbyte", "password": "password123" }`. It returns a json object that reports, given the credentials in the config, whether we were able to connect to the destination. +The check operation accepts a JSON object conforming to the `spec.json`. In other words if the +`spec.json` said that the destination requires a `username` and `password` the config object might +be `{ "username": "airbyte", "password": "password123" }`. It returns a json object that reports, +given the credentials in the config, whether we were able to connect to the destination. -While developing, we recommend storing any credentials in `secrets/config.json`. Any `secrets` directory in the Airbyte repo is gitignored by default. +While developing, we recommend storing any credentials in `secrets/config.json`. Any `secrets` +directory in the Airbyte repo is gitignored by default. -Implement the `check` method in the generated file `Destination.java`. Here's an [example implementation](https://github.com/airbytehq/airbyte/blob/master/airbyte-integrations/connectors/destination-bigquery/src/main/java/io/airbyte/integrations/destination/bigquery/BigQueryDestination.java#L94) from the BigQuery destination. +Implement the `check` method in the generated file `Destination.java`. Here's an +[example implementation](https://github.com/airbytehq/airbyte/blob/master/airbyte-integrations/connectors/destination-bigquery/src/main/java/io/airbyte/integrations/destination/bigquery/BigQueryDestination.java#L94) +from the BigQuery destination. Verify that the method is working by placing your config in `secrets/config.json` then running: @@ -182,41 +228,66 @@ docker run -v $(pwd)/secrets:/secrets --rm airbyte/destination-:dev check ### Step 5: Implement `write` -The `write` operation is the main workhorse of a destination connector: it reads input data from the source and writes it to the underlying destination. It takes as input the config file used to run the connector as well as the configured catalog: the file used to describe the schema of the incoming data and how it should be written to the destination. Its "output" is two things: +The `write` operation is the main workhorse of a destination connector: it reads input data from the +source and writes it to the underlying destination. It takes as input the config file used to run +the connector as well as the configured catalog: the file used to describe the schema of the +incoming data and how it should be written to the destination. Its "output" is two things: 1. Data written to the underlying destination -2. `AirbyteMessage`s of type `AirbyteStateMessage`, written to stdout to indicate which records have been written so far during a sync. It's important to output these messages when possible in order to avoid re-extracting messages from the source. See the [write operation protocol reference](https://docs.airbyte.com/understanding-airbyte/airbyte-protocol#write) for more information. +2. `AirbyteMessage`s of type `AirbyteStateMessage`, written to stdout to indicate which records have + been written so far during a sync. It's important to output these messages when possible in order + to avoid re-extracting messages from the source. See the + [write operation protocol reference](https://docs.airbyte.com/understanding-airbyte/airbyte-protocol#write) + for more information. -To implement the `write` Airbyte operation, implement the `getConsumer` method in your generated `Destination.java` file. Here are some example implementations from different destination conectors: +To implement the `write` Airbyte operation, implement the `getConsumer` method in your generated +`Destination.java` file. Here are some example implementations from different destination +conectors: -* [BigQuery](https://github.com/airbytehq/airbyte/blob/master/airbyte-integrations/connectors/destination-bigquery/src/main/java/io/airbyte/integrations/destination/bigquery/BigQueryDestination.java#L188) -* [Google Pubsub](https://github.com/airbytehq/airbyte/blob/master/airbyte-integrations/connectors/destination-pubsub/src/main/java/io/airbyte/integrations/destination/pubsub/PubsubDestination.java#L98) -* [Local CSV](https://github.com/airbytehq/airbyte/blob/master/airbyte-integrations/connectors/destination-csv/src/main/java/io/airbyte/integrations/destination/csv/CsvDestination.java#L90) -* [Postgres](https://github.com/airbytehq/airbyte/blob/master/airbyte-integrations/connectors/destination-postgres/src/main/java/io/airbyte/integrations/destination/postgres/PostgresDestination.java) +- [BigQuery](https://github.com/airbytehq/airbyte/blob/master/airbyte-integrations/connectors/destination-bigquery/src/main/java/io/airbyte/integrations/destination/bigquery/BigQueryDestination.java#L188) +- [Google Pubsub](https://github.com/airbytehq/airbyte/blob/master/airbyte-integrations/connectors/destination-pubsub/src/main/java/io/airbyte/integrations/destination/pubsub/PubsubDestination.java#L98) +- [Local CSV](https://github.com/airbytehq/airbyte/blob/master/airbyte-integrations/connectors/destination-csv/src/main/java/io/airbyte/integrations/destination/csv/CsvDestination.java#L90) +- [Postgres](https://github.com/airbytehq/airbyte/blob/master/airbyte-integrations/connectors/destination-postgres/src/main/java/io/airbyte/integrations/destination/postgres/PostgresDestination.java) :::info -The Postgres destination leverages the `AbstractJdbcDestination` superclass which makes it extremely easy to create a destination for a database or data warehouse if it has a compatible JDBC driver. If the destination you are implementing has a JDBC driver, be sure to check out `AbstractJdbcDestination`. +The Postgres destination leverages the `AbstractJdbcDestination` superclass which makes it extremely +easy to create a destination for a database or data warehouse if it has a compatible JDBC driver. If +the destination you are implementing has a JDBC driver, be sure to check out +`AbstractJdbcDestination`. ::: -For a brief overview on the Airbyte catalog check out [the Beginner's Guide to the Airbyte Catalog](../../understanding-airbyte/beginners-guide-to-catalog.md). +For a brief overview on the Airbyte catalog check out +[the Beginner's Guide to the Airbyte Catalog](../../understanding-airbyte/beginners-guide-to-catalog.md). ### Step 6: Set up Acceptance Tests -The Acceptance Tests are a set of tests that run against all destinations. These tests are run in the Airbyte CI to prevent regressions and verify a baseline of functionality. The test cases are contained and documented in the [following file](https://github.com/airbytehq/airbyte/blob/master/airbyte-integrations/bases/standard-destination-test/src/main/java/io/airbyte/integrations/standardtest/destination/DestinationAcceptanceTest.java). +The Acceptance Tests are a set of tests that run against all destinations. These tests are run in +the Airbyte CI to prevent regressions and verify a baseline of functionality. The test cases are +contained and documented in the +[following file](https://github.com/airbytehq/airbyte/blob/master/airbyte-integrations/bases/standard-destination-test/src/main/java/io/airbyte/integrations/standardtest/destination/DestinationAcceptanceTest.java). -To setup acceptance Tests for your connector, follow the `TODO`s in the generated file `DestinationAcceptanceTest.java`. Once setup, you can run the tests using `./gradlew :airbyte-integrations:connectors:destination-:integrationTest`. Make sure to run this command from the Airbyte repository root. +To setup acceptance Tests for your connector, follow the `TODO`s in the generated file +`DestinationAcceptanceTest.java`. Once setup, you can run the tests using +`./gradlew :airbyte-integrations:connectors:destination-:integrationTest`. Make sure to run +this command from the Airbyte repository root. ### Step 7: Write unit tests and/or integration tests -The Acceptance Tests are meant to cover the basic functionality of a destination. Think of it as the bare minimum required for us to add a destination to Airbyte. You should probably add some unit testing or custom integration testing in case you need to test additional functionality of your destination. +The Acceptance Tests are meant to cover the basic functionality of a destination. Think of it as the +bare minimum required for us to add a destination to Airbyte. You should probably add some unit +testing or custom integration testing in case you need to test additional functionality of your +destination. #### Step 8: Update the docs -Each connector has its own documentation page. By convention, that page should have the following path: in `docs/integrations/destinations/.md`. For the documentation to get packaged with the docs, make sure to add a link to it in `docs/SUMMARY.md`. You can pattern match doing that from existing connectors. +Each connector has its own documentation page. By convention, that page should have the following +path: in `docs/integrations/destinations/.md`. For the documentation to get +packaged with the docs, make sure to add a link to it in `docs/SUMMARY.md`. You can pattern match +doing that from existing connectors. ## Wrapping up -Well done on making it this far! If you'd like your connector to ship with Airbyte by default, create a PR against the Airbyte repo and we'll work with you to get it across the finish line. - +Well done on making it this far! If you'd like your connector to ship with Airbyte by default, +create a PR against the Airbyte repo and we'll work with you to get it across the finish line. diff --git a/docs/connector-development/tutorials/building-a-python-source.md b/docs/connector-development/tutorials/building-a-python-source.md index 49a12872363b..e83aeec9d0ac 100644 --- a/docs/connector-development/tutorials/building-a-python-source.md +++ b/docs/connector-development/tutorials/building-a-python-source.md @@ -2,15 +2,20 @@ ## Summary -This article provides a checklist for how to create a python source. Each step in the checklist has a link to a more detailed explanation below. +This article provides a checklist for how to create a python source. Each step in the checklist has +a link to a more detailed explanation below. ## Requirements -Docker, Python, and Java with the versions listed in the [tech stack section](../../understanding-airbyte/tech-stack.md). +Docker, Python, and Java with the versions listed in the +[tech stack section](../../understanding-airbyte/tech-stack.md). :::info -All the commands below assume that `python` points to a version of python >3.7. On some systems, `python` points to a Python2 installation and `python3` points to Python3. If this is the case on your machine, substitute all `python` commands in this guide with `python3` . Otherwise, make sure to install Python 3 before beginning. +All the commands below assume that `python` points to a version of python >3.7. On some systems, +`python` points to a Python2 installation and `python3` points to Python3. If this is the case on +your machine, substitute all `python` commands in this guide with `python3` . Otherwise, make sure +to install Python 3 before beginning. ::: @@ -18,18 +23,21 @@ All the commands below assume that `python` points to a version of python >3. ### Creating a Source -* Step 1: Create the source using template -* Step 2: Build the newly generated source -* Step 3: Set up your Airbyte development environment -* Step 4: Implement `spec` \(and define the specification for the source `airbyte-integrations/connectors/source-/spec.yaml`\) -* Step 5: Implement `check` -* Step 6: Implement `discover` -* Step 7: Implement `read` -* Step 8: Set up Connector Acceptance Tests -* Step 9: Write unit tests or integration tests -* Step 10: Update the `README.md` \(If API credentials are required to run the integration, please document how they can be obtained or link to a how-to guide.\) -* Step 11: Update the `metadata.yaml` file with accurate information about your connector. These metadata will be used to add the connector to Airbyte's connector registry. -* Step 12: Add docs \(in `docs/integrations/sources/.md`\) +- Step 1: Create the source using template +- Step 2: Build the newly generated source +- Step 3: Set up your Airbyte development environment +- Step 4: Implement `spec` \(and define the specification for the source + `airbyte-integrations/connectors/source-/spec.yaml`\) +- Step 5: Implement `check` +- Step 6: Implement `discover` +- Step 7: Implement `read` +- Step 8: Set up Connector Acceptance Tests +- Step 9: Write unit tests or integration tests +- Step 10: Update the `README.md` \(If API credentials are required to run the integration, please + document how they can be obtained or link to a how-to guide.\) +- Step 11: Update the `metadata.yaml` file with accurate information about your connector. These + metadata will be used to add the connector to Airbyte's connector registry. +- Step 12: Add docs \(in `docs/integrations/sources/.md`\) :::info Each step of the Creating a Source checklist is explained in more detail below. @@ -41,14 +49,24 @@ All `./gradlew` commands must be run from the root of the airbyte project. ### Submitting a Source to Airbyte -* If you need help with any step of the process, feel free to submit a PR with your progress and any questions you have. -* Submit a PR. -* To run integration tests, Airbyte needs access to a test account/environment. Coordinate with an Airbyte engineer \(via the PR\) to add test credentials so that we can run tests for the integration in the CI. \(We will create our own test account once you let us know what source we need to create it for.\) -* Once the config is stored in Github Secrets, edit `.github/workflows/test-command.yml` and `.github/workflows/publish-command.yml` to inject the config into the build environment. -* Edit the `airbyte/tools/bin/ci_credentials.sh` script to pull the script from the build environment and write it to `secrets/config.json` during the build. +- If you need help with any step of the process, feel free to submit a PR with your progress and any + questions you have. +- Submit a PR. +- To run integration tests, Airbyte needs access to a test account/environment. Coordinate with an + Airbyte engineer \(via the PR\) to add test credentials so that we can run tests for the + integration in the CI. \(We will create our own test account once you let us know what source we + need to create it for.\) +- Once the config is stored in Github Secrets, edit `.github/workflows/test-command.yml` and + `.github/workflows/publish-command.yml` to inject the config into the build environment. +- Edit the `airbyte/tools/bin/ci_credentials.sh` script to pull the script from the build + environment and write it to `secrets/config.json` during the build. :::info -If you have a question about a step the Submitting a Source to Airbyte checklist include it in your PR or ask it on [#help-connector-development channel on Slack](https://airbytehq.slack.com/archives/C027KKE4BCZ). + +If you have a question about a step the Submitting a Source to Airbyte checklist include it +in your PR or ask it on +[#help-connector-development channel on Slack](https://airbytehq.slack.com/archives/C027KKE4BCZ). + ::: ## Explaining Each Step @@ -62,7 +80,8 @@ $ cd airbyte-integrations/connector-templates/generator # assumes you are starti $ ./generate.sh ``` -Select the `python` template and then input the name of your connector. For this walk through we will refer to our source as `example-python` +Select the `python` template and then input the name of your connector. For this walk through we +will refer to our source as `example-python` ### Step 2: Install the newly generated source @@ -73,40 +92,58 @@ cd airbyte-integrations/connectors/source- poetry install ``` -This step sets up the initial python environment. - ### Step 3: Set up your Airbyte development environment -The generator creates a file `source_/source.py`. This will be where you implement the logic for your source. The templated `source.py` contains extensive comments explaining each method that needs to be implemented. Briefly here is an overview of each of these methods. +The generator creates a file `source_/source.py`. This will be where you implement the +logic for your source. The templated `source.py` contains extensive comments explaining each method +that needs to be implemented. Briefly here is an overview of each of these methods. 1. `spec`: declares the user-provided credentials or configuration needed to run the connector -2. `check`: tests if with the user-provided configuration the connector can connect with the underlying data source. +2. `check`: tests if with the user-provided configuration the connector can connect with the + underlying data source. 3. `discover`: declares the different streams of data that this connector can output 4. `read`: reads data from the underlying data source \(The stock ticker API\) #### Dependencies -Python dependencies for your source should be declared in `airbyte-integrations/connectors/source-/setup.py` in the `install_requires` field. You will notice that a couple of Airbyte dependencies are already declared there. Do not remove these; they give your source access to the helper interface that is provided by the generator. +Python dependencies for your source should be declared in +`airbyte-integrations/connectors/source-/setup.py` in the `install_requires` field. You +will notice that a couple of Airbyte dependencies are already declared there. Do not remove these; +they give your source access to the helper interface that is provided by the generator. -You may notice that there is a `requirements.txt` in your source's directory as well. Do not touch this. It is autogenerated and used to provide Airbyte dependencies. All your dependencies should be declared in `setup.py`. +You may notice that there is a `requirements.txt` in your source's directory as well. Do not touch +this. It is autogenerated and used to provide Airbyte dependencies. All your dependencies should be +declared in `setup.py`. #### Development Environment -The commands we ran above created a virtual environment for your source. If you want your IDE to auto complete and resolve dependencies properly, point it at the virtual env `airbyte-integrations/connectors/source-/.venv`. Also anytime you change the dependencies in the `setup.py` make sure to re-run the build command. The build system will handle installing all dependencies in the `setup.py` into the virtual environment. +The commands we ran above created a virtual environment for your source. If you want your IDE to +auto complete and resolve dependencies properly, point it at the virtual env +`airbyte-integrations/connectors/source-/.venv`. Also anytime you change the +dependencies in the `setup.py` make sure to re-run the build command. The build system will handle +installing all dependencies in the `setup.py` into the virtual environment. -Pretty much all it takes to create a source is to implement the `Source` interface. The template fills in a lot of information for you and has extensive docstrings describing what you need to do to implement each method. The next 4 steps are just implementing that interface. +Pretty much all it takes to create a source is to implement the `Source` interface. The template +fills in a lot of information for you and has extensive docstrings describing what you need to do to +implement each method. The next 4 steps are just implementing that interface. :::info -All logging should be done through the `logger` object passed into each method. Otherwise, logs will not be shown in the Airbyte UI. + +All logging should be done through the `logger` object passed into each method. Otherwise, +logs will not be shown in the Airbyte UI. + ::: #### Iterating on your implementation -Everyone develops differently but here are 3 ways that we recommend iterating on a source. Consider using whichever one matches your style. +Everyone develops differently but here are 3 ways that we recommend iterating on a source. Consider +using whichever one matches your style. **Run the source using python** -You'll notice in your source's directory that there is a python file called `main.py`. This file exists as convenience for development. You can call it from within the virtual environment mentioned above `. ./.venv/bin/activate` to test out that your source works. +You'll notice in your source's directory that there is a python file called `main.py`. This file +exists as convenience for development. You can call it from within the virtual environment mentioned +above `. ./.venv/bin/activate` to test out that your source works. ```bash # from airbyte-integrations/connectors/source- @@ -116,30 +153,38 @@ poetry run source- discover --config secrets/config.json poetry run source- read --config secrets/config.json --catalog sample_files/configured_catalog.json ``` -The nice thing about this approach is that you can iterate completely within in python. The downside is that you are not quite running your source as it will actually be run by Airbyte. Specifically you're not running it from within the docker container that will house it. - +The nice thing about this approach is that you can iterate completely within in python. The downside +is that you are not quite running your source as it will actually be run by Airbyte. Specifically +you're not running it from within the docker container that will house it. **Build the source docker image** -You have to build a docker image for your connector if you want to run your source exactly as it will be run by Airbyte. +You have to build a docker image for your connector if you want to run your source exactly as it +will be run by Airbyte. **Option A: Building the docker image with `airbyte-ci`** This is the preferred method for building and testing connectors. -If you want to open source your connector we encourage you to use our [`airbyte-ci`](https://github.com/airbytehq/airbyte/blob/master/airbyte-ci/connectors/pipelines/README.md) tool to build your connector. -It will not use a Dockerfile but will build the connector image from our [base image](https://github.com/airbytehq/airbyte/blob/master/airbyte-ci/connectors/base_images/README.md) and use our internal build logic to build an image from your Python connector code. +If you want to open source your connector we encourage you to use our +[`airbyte-ci`](https://github.com/airbytehq/airbyte/blob/master/airbyte-ci/connectors/pipelines/README.md) +tool to build your connector. It will not use a Dockerfile but will build the connector image from +our +[base image](https://github.com/airbytehq/airbyte/blob/master/airbyte-ci/connectors/base_images/README.md) +and use our internal build logic to build an image from your Python connector code. Running `airbyte-ci connectors --name source- build` will build your connector image. -Once the command is done, you will find your connector image in your local docker host: `airbyte/source-:dev`. - - +Once the command is done, you will find your connector image in your local docker host: +`airbyte/source-:dev`. **Option B: Building the docker image with a Dockerfile** -If you don't want to rely on `airbyte-ci` to build your connector, you can build the docker image using your own Dockerfile. This method is not preferred, and is not supported for certified connectors. +If you don't want to rely on `airbyte-ci` to build your connector, you can build the docker image +using your own Dockerfile. This method is not preferred, and is not supported for certified +connectors. -Create a `Dockerfile` in the root of your connector directory. The `Dockerfile` should look something like this: +Create a `Dockerfile` in the root of your connector directory. The `Dockerfile` should look +something like this: ```Dockerfile @@ -156,6 +201,7 @@ RUN pip install ./airbyte/integration_code Please use this as an example. This is not optimized. Build your image: + ```bash docker build . -t airbyte/source-example-python:dev ``` @@ -170,20 +216,28 @@ docker run --rm -v $(pwd)/secrets:/secrets -v $(pwd)/sample_files:/sample_files ``` :::info -Each time you make a change to your implementation you need to re-build the connector image. This ensures the new python code is added into the docker container. + +Each time you make a change to your implementation you need to re-build the connector image. +This ensures the new python code is added into the docker container. + ::: -The nice thing about this approach is that you are running your source exactly as it will be run by Airbyte. The tradeoff is that iteration is slightly slower, because you need to re-build the connector between each change. +The nice thing about this approach is that you are running your source exactly as it will be run by +Airbyte. The tradeoff is that iteration is slightly slower, because you need to re-build the +connector between each change. **Detailed Debug Messages** -During development of your connector, you can enable the printing of detailed debug information during a sync by specifying the `--debug` flag. This will allow you to get a better picture of what is happening during each step of your sync. +During development of your connector, you can enable the printing of detailed debug information +during a sync by specifying the `--debug` flag. This will allow you to get a better picture of what +is happening during each step of your sync. ```bash poetry run source- read --config secrets/config.json --catalog sample_files/configured_catalog.json --debug ``` -In addition to the preset CDK debug statements, you can also emit custom debug information from your connector by introducing your own debug statements: +In addition to the preset CDK debug statements, you can also emit custom debug information from your +connector by introducing your own debug statements: ```python self.logger.debug( @@ -197,50 +251,87 @@ self.logger.debug( **TDD using acceptance tests & integration tests** -Airbyte provides an acceptance test suite that is run against every source. The objective of these tests is to provide some "free" tests that can sanity check that the basic functionality of the source works. One approach to developing your connector is to simply run the tests between each change and use the feedback from them to guide your development. +Airbyte provides an acceptance test suite that is run against every source. The objective of these +tests is to provide some "free" tests that can sanity check that the basic functionality of the +source works. One approach to developing your connector is to simply run the tests between each +change and use the feedback from them to guide your development. -If you want to try out this approach, check out Step 8 which describes what you need to do to set up the standard tests for your source. +If you want to try out this approach, check out Step 8 which describes what you need to do to set up +the standard tests for your source. -The nice thing about this approach is that you are running your source exactly as Airbyte will run it in the CI. The downside is that the tests do not run very quickly. +The nice thing about this approach is that you are running your source exactly as Airbyte will run +it in the CI. The downside is that the tests do not run very quickly. ### Step 4: Implement `spec` -Each source contains a specification that describes what inputs it needs in order for it to pull data. This file can be found in `airbyte-integrations/connectors/source-/spec.yaml`. This is a good place to start when developing your source. Using JsonSchema define what the inputs are \(e.g. username and password\). Here's [an example](https://github.com/airbytehq/airbyte/blob/master/airbyte-integrations/connectors/source-stripe/source_stripe/spec.yaml) of what the `spec.yaml` looks like for the stripe source. +Each source contains a specification that describes what inputs it needs in order for it to pull +data. This file can be found in `airbyte-integrations/connectors/source-/spec.yaml`. +This is a good place to start when developing your source. Using JsonSchema define what the inputs +are \(e.g. username and password\). Here's +[an example](https://github.com/airbytehq/airbyte/blob/master/airbyte-integrations/connectors/source-stripe/source_stripe/spec.yaml) +of what the `spec.yaml` looks like for the stripe source. -For more details on what the spec is, you can read about the Airbyte Protocol [here](../../understanding-airbyte/airbyte-protocol.md). +For more details on what the spec is, you can read about the Airbyte Protocol +[here](../../understanding-airbyte/airbyte-protocol.md). -The generated code that Airbyte provides, handles implementing the `spec` method for you. It assumes that there will be a file called `spec.yaml` in the same directory as `source.py`. If you have declared the necessary JsonSchema in `spec.yaml` you should be done with this step. +The generated code that Airbyte provides, handles implementing the `spec` method for you. It assumes +that there will be a file called `spec.yaml` in the same directory as `source.py`. If you have +declared the necessary JsonSchema in `spec.yaml` you should be done with this step. ### Step 5: Implement `check` -As described in the template code, this method takes in a json object called config that has the values described in the `spec.yaml` filled in. In other words if the `spec.yaml` said that the source requires a `username` and `password` the config object might be `{ "username": "airbyte", "password": "password123" }`. It returns a json object that reports, given the credentials in the config, whether we were able to connect to the source. For example, with the given credentials could the source connect to the database server. +As described in the template code, this method takes in a json object called config that has the +values described in the `spec.yaml` filled in. In other words if the `spec.yaml` said that the +source requires a `username` and `password` the config object might be +`{ "username": "airbyte", "password": "password123" }`. It returns a json object that reports, given +the credentials in the config, whether we were able to connect to the source. For example, with the +given credentials could the source connect to the database server. -While developing, we recommend storing this object in `secrets/config.json`. The `secrets` directory is gitignored by default. +While developing, we recommend storing this object in `secrets/config.json`. The `secrets` directory +is gitignored by default. ### Step 6: Implement `discover` -As described in the template code, this method takes in the same config object as `check`. It then returns a json object called a `catalog` that describes what data is available and metadata on what options are available for how to replicate it. +As described in the template code, this method takes in the same config object as `check`. It then +returns a json object called a `catalog` that describes what data is available and metadata on what +options are available for how to replicate it. -For a brief overview on the catalog check out [Beginner's Guide to the Airbyte Catalog](../../understanding-airbyte/beginners-guide-to-catalog.md). +For a brief overview on the catalog check out +[Beginner's Guide to the Airbyte Catalog](../../understanding-airbyte/beginners-guide-to-catalog.md). ### Step 7: Implement `read` -As described in the template code, this method takes in the same config object as the previous methods. It also takes in a "configured catalog". This object wraps the catalog emitted by the `discover` step and includes configuration on how the data should be replicated. For a brief overview on the configured catalog check out [Beginner's Guide to the Airbyte Catalog](../../understanding-airbyte/beginners-guide-to-catalog.md). It then returns a generator which returns each record in the stream. +As described in the template code, this method takes in the same config object as the previous +methods. It also takes in a "configured catalog". This object wraps the catalog emitted by the +`discover` step and includes configuration on how the data should be replicated. For a brief +overview on the configured catalog check out +[Beginner's Guide to the Airbyte Catalog](../../understanding-airbyte/beginners-guide-to-catalog.md). +It then returns a generator which returns each record in the stream. ### Step 8: Set up Connector Acceptance Tests (CATs) -The Connector Acceptance Tests are a set of tests that run against all sources. These tests are run in the Airbyte CI to prevent regressions. They also can help you sanity check that your source works as expected. The following [article](../testing-connectors/connector-acceptance-tests-reference.md) explains Connector Acceptance Tests and how to run them. +The Connector Acceptance Tests are a set of tests that run against all sources. These tests are run +in the Airbyte CI to prevent regressions. They also can help you sanity check that your source works +as expected. The following [article](../testing-connectors/connector-acceptance-tests-reference.md) +explains Connector Acceptance Tests and how to run them. You can run the tests using [`airbyte-ci`](https://github.com/airbytehq/airbyte/blob/master/airbyte-ci/connectors/pipelines/README.md): `airbyte-ci connectors --name source- test --only-step=acceptance` :::info -In some rare cases we make exceptions and allow a source to not need to pass all the standard tests. If for some reason you think your source cannot reasonably pass one of the tests cases, reach out to us on github or slack, and we can determine whether there's a change we can make so that the test will pass or if we should skip that test for your source. + +In some rare cases we make exceptions and allow a source to not need to pass all the +standard tests. If for some reason you think your source cannot reasonably pass one of the tests +cases, reach out to us on github or slack, and we can determine whether there's a change we can make +so that the test will pass or if we should skip that test for your source. + ::: ### Step 9: Write unit tests and/or integration tests -The connector acceptance tests are meant to cover the basic functionality of a source. Think of it as the bare minimum required for us to add a source to Airbyte. In case you need to test additional functionality of your source, write unit or integration tests. +The connector acceptance tests are meant to cover the basic functionality of a source. Think of it +as the bare minimum required for us to add a source to Airbyte. In case you need to test additional +functionality of your source, write unit or integration tests. #### Unit Tests @@ -250,32 +341,49 @@ You can run the tests using `poetry run pytest tests/unit_tests` #### Integration Tests -Place any integration tests in the `integration_tests` directory such that they can be [discovered by pytest](https://docs.pytest.org/en/6.2.x/goodpractices.html#conventions-for-python-test-discovery). +Place any integration tests in the `integration_tests` directory such that they can be +[discovered by pytest](https://docs.pytest.org/en/6.2.x/goodpractices.html#conventions-for-python-test-discovery). You can run the tests using `poetry run pytest tests/integration_tests` ### Step 10: Update the `README.md` -The template fills in most of the information for the readme for you. Unless there is a special case, the only piece of information you need to add is how one can get the credentials required to run the source. e.g. Where one can find the relevant API key, etc. +The template fills in most of the information for the readme for you. Unless there is a special +case, the only piece of information you need to add is how one can get the credentials required to +run the source. e.g. Where one can find the relevant API key, etc. ### Step 11: Add the connector to the API/UI + There are multiple ways to use the connector you have built. -If you are self hosting Airbyte (OSS) you are able to use the Custom Connector feature. This feature allows you to run any Docker container that implements the Airbye protocol. You can read more about it [here](https://docs.airbyte.com/integrations/custom-connectors/). +If you are self hosting Airbyte (OSS) you are able to use the Custom Connector feature. This feature +allows you to run any Docker container that implements the Airbye protocol. You can read more about +it [here](https://docs.airbyte.com/integrations/custom-connectors/). -If you are using Airbyte Cloud (or OSS), you can submit a PR to add your connector to the Airbyte repository. Once the PR is merged, the connector will be available to all Airbyte Cloud users. You can read more about it [here](https://docs.airbyte.com/contributing-to-airbyte/submit-new-connector). +If you are using Airbyte Cloud (or OSS), you can submit a PR to add your connector to the Airbyte +repository. Once the PR is merged, the connector will be available to all Airbyte Cloud users. You +can read more about it +[here](https://docs.airbyte.com/contributing-to-airbyte/submit-new-connector). Note that when submitting an Airbyte connector, you will need to ensure that -1. The connector passes the CAT suite. See [Set up Connector Acceptance Tests](#step-8-set-up-connector-acceptance-tests-\(cats\)). -2. The metadata.yaml file (created by our generator) is filed out and valid. See [Connector Metadata File](https://docs.airbyte.com/connector-development/connector-metadata-file). -3. You have created appropriate documentation for the connector. See [Add docs](#step-12-add-docs). +1. The connector passes the CAT suite. See + [Set up Connector Acceptance Tests](<#step-8-set-up-connector-acceptance-tests-(cats)>). +2. The metadata.yaml file (created by our generator) is filed out and valid. See + [Connector Metadata File](https://docs.airbyte.com/connector-development/connector-metadata-file). +3. You have created appropriate documentation for the connector. See [Add docs](#step-12-add-docs). ### Step 12: Add docs -Each connector has its own documentation page. By convention, that page should have the following path: in `docs/integrations/sources/.md`. For the documentation to get packaged with the docs, make sure to add a link to it in `docs/SUMMARY.md`. You can pattern match doing that from existing connectors. +Each connector has its own documentation page. By convention, that page should have the following +path: in `docs/integrations/sources/.md`. For the documentation to get packaged with +the docs, make sure to add a link to it in `docs/SUMMARY.md`. You can pattern match doing that from +existing connectors. ## Related tutorials -For additional examples of how to use the Python CDK to build an Airbyte source connector, see the following tutorials: + +For additional examples of how to use the Python CDK to build an Airbyte source connector, see the +following tutorials: + - [Python CDK Speedrun: Creating a Source](https://docs.airbyte.com/connector-development/tutorials/cdk-speedrun) - [Build a connector to extract data from the Webflow API](https://airbyte.com/tutorials/extract-data-from-the-webflow-api) diff --git a/docs/connector-development/tutorials/cdk-speedrun.md b/docs/connector-development/tutorials/cdk-speedrun.md index d9fc6bc82ffd..35a9543d2e53 100644 --- a/docs/connector-development/tutorials/cdk-speedrun.md +++ b/docs/connector-development/tutorials/cdk-speedrun.md @@ -2,9 +2,11 @@ ## CDK Speedrun \(HTTP API Source Creation Any Route\) -This is a blazing fast guide to building an HTTP source connector. Think of it as the TL;DR version of [this tutorial.](cdk-tutorial-python-http/getting-started.md) +This is a blazing fast guide to building an HTTP source connector. Think of it as the TL;DR version +of [this tutorial.](cdk-tutorial-python-http/getting-started.md) -If you are a visual learner and want to see a video version of this guide going over each part in detail, check it out below. +If you are a visual learner and want to see a video version of this guide going over each part in +detail, check it out below. [A speedy CDK overview.](https://www.youtube.com/watch?v=kJ3hLoNfz_E) @@ -19,9 +21,9 @@ If you are a visual learner and want to see a video version of this guide going ```bash # # clone the repo if you havent already -# git clone --depth 1 https://github.com/airbytehq/airbyte/ +# git clone --depth 1 https://github.com/airbytehq/airbyte/ # cd airbyte # start from repo root -cd airbyte-integrations/connector-templates/generator +cd airbyte-integrations/connector-templates/generator ./generate.sh ``` @@ -40,7 +42,8 @@ poetry install cd source_python_http_example ``` -We're working with the PokeAPI, so we need to define our input schema to reflect that. Open the `spec.yaml` file here and replace it with: +We're working with the PokeAPI, so we need to define our input schema to reflect that. Open the +`spec.yaml` file here and replace it with: ```yaml documentationUrl: https://docs.airbyte.com/integrations/sources/pokeapi @@ -61,9 +64,14 @@ connectionSpecification: - snorlax ``` -As you can see, we have one input to our input schema, which is `pokemon_name`, which is required. Normally, input schemas will contain information such as API keys and client secrets that need to get passed down to all endpoints or streams. +As you can see, we have one input to our input schema, which is `pokemon_name`, which is required. +Normally, input schemas will contain information such as API keys and client secrets that need to +get passed down to all endpoints or streams. -Ok, let's write a function that checks the inputs we just defined. Nuke the `source.py` file. Now add this code to it. For a crucial time skip, we're going to define all the imports we need in the future here. Also note that your `AbstractSource` class name must be a camel-cased version of the name you gave in the generation phase. In our case, this is `SourcePythonHttpExample`. +Ok, let's write a function that checks the inputs we just defined. Nuke the `source.py` file. Now +add this code to it. For a crucial time skip, we're going to define all the imports we need in the +future here. Also note that your `AbstractSource` class name must be a camel-cased version of the +name you gave in the generation phase. In our case, this is `SourcePythonHttpExample`. ```python from typing import Any, Iterable, List, Mapping, MutableMapping, Optional, Tuple @@ -94,7 +102,9 @@ class SourcePythonHttpExample(AbstractSource): return [Pokemon(pokemon_name=config["pokemon_name"])] ``` -Create a new file called `pokemon_list.py` at the same level. This will handle input validation for us so that we don't input invalid Pokemon. Let's start with a very limited list - any Pokemon not included in this list will get rejected. +Create a new file called `pokemon_list.py` at the same level. This will handle input validation for +us so that we don't input invalid Pokemon. Let's start with a very limited list - any Pokemon not +included in this list will get rejected. ```python """ @@ -133,7 +143,8 @@ Expected output: ### Define your Stream -In your `source.py` file, add this `Pokemon` class. This stream represents an endpoint you want to hit, which in our case, is the single [Pokemon endpoint](https://pokeapi.co/docs/v2#pokemon). +In your `source.py` file, add this `Pokemon` class. This stream represents an endpoint you want to +hit, which in our case, is the single [Pokemon endpoint](https://pokeapi.co/docs/v2#pokemon). ```python class Pokemon(HttpStream): @@ -151,7 +162,7 @@ class Pokemon(HttpStream): return None def path( - self, + self, ) -> str: return "" # TODO @@ -161,9 +172,16 @@ class Pokemon(HttpStream): return None # TODO ``` -Now download [this file](./cdk-speedrun-assets/pokemon.json). Name it `pokemon.json` and place it in `/source_python_http_example/schemas`. +Now download [this file](./cdk-speedrun-assets/pokemon.json). Name it `pokemon.json` and place it in +`/source_python_http_example/schemas`. -This file defines your output schema for every endpoint that you want to implement. Normally, this will likely be the most time-consuming section of the connector development process, as it requires defining the output of the endpoint exactly. This is really important, as Airbyte needs to have clear expectations for what the stream will output. Note that the name of this stream will be consistent in the naming of the JSON schema and the `HttpStream` class, as `pokemon.json` and `Pokemon` respectively in this case. Learn more about schema creation [here](https://docs.airbyte.com/connector-development/cdk-python/full-refresh-stream#defining-the-streams-schema). +This file defines your output schema for every endpoint that you want to implement. Normally, this +will likely be the most time-consuming section of the connector development process, as it requires +defining the output of the endpoint exactly. This is really important, as Airbyte needs to have +clear expectations for what the stream will output. Note that the name of this stream will be +consistent in the naming of the JSON schema and the `HttpStream` class, as `pokemon.json` and +`Pokemon` respectively in this case. Learn more about schema creation +[here](https://docs.airbyte.com/connector-development/cdk-python/full-refresh-stream#defining-the-streams-schema). Test your discover function. You should receive a fairly large JSON object in return. @@ -171,7 +189,8 @@ Test your discover function. You should receive a fairly large JSON object in re poetry run source-python-http-example discover --config sample_files/config.json ``` -Note that our discover function is using the `pokemon_name` config variable passed in from the `Pokemon` stream when we set it in the `__init__` function. +Note that our discover function is using the `pokemon_name` config variable passed in from the +`Pokemon` stream when we set it in the `__init__` function. ### Reading Data from the Source @@ -220,7 +239,13 @@ class Pokemon(HttpStream): return None ``` -We now need a catalog that defines all of our streams. We only have one stream: `Pokemon`. Download that file [here](./cdk-speedrun-assets/configured_catalog_pokeapi.json). Place it in `/sample_files` named as `configured_catalog.json`. More clearly, this is where we tell Airbyte all the streams/endpoints we support for the connector and in which sync modes Airbyte can run the connector on. Learn more about the AirbyteCatalog [here](https://docs.airbyte.com/understanding-airbyte/beginners-guide-to-catalog) and learn more about sync modes [here](https://docs.airbyte.com/understanding-airbyte/connections#sync-modes). +We now need a catalog that defines all of our streams. We only have one stream: `Pokemon`. Download +that file [here](./cdk-speedrun-assets/configured_catalog_pokeapi.json). Place it in `/sample_files` +named as `configured_catalog.json`. More clearly, this is where we tell Airbyte all the +streams/endpoints we support for the connector and in which sync modes Airbyte can run the connector +on. Learn more about the AirbyteCatalog +[here](https://docs.airbyte.com/understanding-airbyte/beginners-guide-to-catalog) and learn more +about sync modes [here](https://docs.airbyte.com/understanding-airbyte/connections#sync-modes). Let's read some data. @@ -230,24 +255,30 @@ poetry run source-python-http-example read --config sample_files/config.json --c If all goes well, containerize it so you can use it in the UI: - **Option A: Building the docker image with `airbyte-ci`** This is the preferred method for building and testing connectors. -If you want to open source your connector we encourage you to use our [`airbyte-ci`](https://github.com/airbytehq/airbyte/blob/master/airbyte-ci/connectors/pipelines/README.md) tool to build your connector. -It will not use a Dockerfile but will build the connector image from our [base image](https://github.com/airbytehq/airbyte/blob/master/airbyte-ci/connectors/base_images/README.md) and use our internal build logic to build an image from your Python connector code. +If you want to open source your connector we encourage you to use our +[`airbyte-ci`](https://github.com/airbytehq/airbyte/blob/master/airbyte-ci/connectors/pipelines/README.md) +tool to build your connector. It will not use a Dockerfile but will build the connector image from +our +[base image](https://github.com/airbytehq/airbyte/blob/master/airbyte-ci/connectors/base_images/README.md) +and use our internal build logic to build an image from your Python connector code. Running `airbyte-ci connectors --name source- build` will build your connector image. -Once the command is done, you will find your connector image in your local docker host: `airbyte/source-:dev`. - - +Once the command is done, you will find your connector image in your local docker host: +`airbyte/source-:dev`. **Option B: Building the docker image with a Dockerfile** -If you don't want to rely on `airbyte-ci` to build your connector, you can build the docker image using your own Dockerfile. This method is not preferred, and is not supported for certified connectors. +If you don't want to rely on `airbyte-ci` to build your connector, you can build the docker image +using your own Dockerfile. This method is not preferred, and is not supported for certified +connectors. + +Create a `Dockerfile` in the root of your connector directory. The `Dockerfile` should look +something like this: -Create a `Dockerfile` in the root of your connector directory. The `Dockerfile` should look something like this: ```Dockerfile FROM airbyte/python-connector-base:1.1.0 @@ -263,13 +294,15 @@ RUN pip install ./airbyte/integration_code Please use this as an example. This is not optimized. Build your image: + ```bash docker build . -t airbyte/source-example-python:dev ``` - You're done. Stop the clock :\) ## Further reading -If you have enjoyed the above example, and would like to explore the Python CDK in even more detail, you may be interested looking at [how to build a connector to extract data from the Webflow API](https://airbyte.com/tutorials/extract-data-from-the-webflow-api) +If you have enjoyed the above example, and would like to explore the Python CDK in even more detail, +you may be interested looking at +[how to build a connector to extract data from the Webflow API](https://airbyte.com/tutorials/extract-data-from-the-webflow-api) diff --git a/docs/connector-development/tutorials/cdk-tutorial-python-http/connection-checking.md b/docs/connector-development/tutorials/cdk-tutorial-python-http/connection-checking.md index 2e34eb1adf30..984082e7a60b 100644 --- a/docs/connector-development/tutorials/cdk-tutorial-python-http/connection-checking.md +++ b/docs/connector-development/tutorials/cdk-tutorial-python-http/connection-checking.md @@ -2,10 +2,18 @@ The second operation in the Airbyte Protocol that we'll implement is the `check` operation. -This operation verifies that the input configuration supplied by the user can be used to connect to the underlying data source. Note that this user-supplied configuration has the values described in the `spec.yaml` filled in. In other words if the `spec.yaml` said that the source requires a `username` and `password` the config object might be `{ "username": "airbyte", "password": "password123" }`. You should then implement something that returns a json object reporting, given the credentials in the config, whether we were able to connect to the source. - -In order to make requests to the API, we need to specify the access. -In our case, this is a fairly trivial check since the API requires no credentials. Instead, let's verify that the user-input `base` currency is a legitimate currency. In `source.py` we'll find the following autogenerated source: +This operation verifies that the input configuration supplied by the user can be used to connect to +the underlying data source. Note that this user-supplied configuration has the values described in +the `spec.yaml` filled in. In other words if the `spec.yaml` said that the source requires a +`username` and `password` the config object might be +`{ "username": "airbyte", "password": "password123" }`. You should then implement something that +returns a json object reporting, given the credentials in the config, whether we were able to +connect to the source. + +In order to make requests to the API, we need to specify the access. In our case, this is a fairly +trivial check since the API requires no credentials. Instead, let's verify that the user-input +`base` currency is a legitimate currency. In `source.py` we'll find the following autogenerated +source: ```python class SourcePythonHttpTutorial(AbstractSource): @@ -26,7 +34,8 @@ class SourcePythonHttpTutorial(AbstractSource): ... ``` -Following the docstring instructions, we'll change the implementation to verify that the input currency is a real currency: +Following the docstring instructions, we'll change the implementation to verify that the input +currency is a real currency: ```python def check_connection(self, logger, config) -> Tuple[bool, any]: @@ -38,9 +47,19 @@ Following the docstring instructions, we'll change the implementation to verify return True, None ``` -Note: in a real implementation you should write code to connect to the API to validate connectivity and not just validate inputs - for an example see `check_connection` in the [OneSignal source connector implementation](https://github.com/airbytehq/airbyte/blob/master/airbyte-integrations/connectors/source-onesignal/source_onesignal/source.py) +:::info + +In a real implementation you should write code to connect to the API to validate connectivity +and not just validate inputs - for an example see `check_connection` in the +[OneSignal source connector implementation](https://github.com/airbytehq/airbyte/blob/master/airbyte-integrations/connectors/source-onesignal/source_onesignal/source.py) + +::: -Let's test out this implementation by creating two objects: a valid and an invalid config and attempt to give them as input to the connector. For this section, you will need to take the API access key generated earlier and add it to both configs. Because these configs contain secrets, we recommend storing configs which contain secrets in `secrets/config.json` because the `secrets` directory is gitignored by default. +Let's test out this implementation by creating two objects: a valid and an invalid config and +attempt to give them as input to the connector. For this section, you will need to take the API +access key generated earlier and add it to both configs. Because these configs contain secrets, we +recommend storing configs which contain secrets in `secrets/config.json` because the `secrets` +directory is gitignored by default. ```bash mkdir sample_files @@ -60,4 +79,5 @@ You should see output like the following: {"type": "CONNECTION_STATUS", "connectionStatus": {"status": "FAILED", "message": "Input currency BTC is invalid. Please input one of the following currencies: {'DKK', 'USD', 'CZK', 'BGN', 'JPY'}"}} ``` -While developing, we recommend storing configs which contain secrets in `secrets/config.json` because the `secrets` directory is gitignored by default. +While developing, we recommend storing configs which contain secrets in `secrets/config.json` +because the `secrets` directory is gitignored by default. diff --git a/docs/connector-development/tutorials/cdk-tutorial-python-http/creating-the-source.md b/docs/connector-development/tutorials/cdk-tutorial-python-http/creating-the-source.md index bead7be49423..ed4ff875bc38 100644 --- a/docs/connector-development/tutorials/cdk-tutorial-python-http/creating-the-source.md +++ b/docs/connector-development/tutorials/cdk-tutorial-python-http/creating-the-source.md @@ -8,9 +8,15 @@ $ cd airbyte-integrations/connector-templates/generator # assumes you are starti $ ./generate.sh ``` -This will bring up an interactive helper application. Use the arrow keys to pick a template from the list. Select the `Python HTTP API Source` template and then input the name of your connector. The application will create a new directory in airbyte/airbyte-integrations/connectors/ with the name of your new connector. +This will bring up an interactive helper application. Use the arrow keys to pick a template from the +list. Select the `Python HTTP API Source` template and then input the name of your connector. The +application will create a new directory in airbyte/airbyte-integrations/connectors/ with the name of +your new connector. -For this walk-through we will refer to our source as `python-http-example`. The finalized source code for this tutorial can be found [here](https://github.com/airbytehq/airbyte/tree/master/airbyte-integrations/connectors/source-python-http-tutorial). - -The source we will build in this tutorial will pull data from the [Rates API](https://exchangeratesapi.io/), a free and open API which documents historical exchange rates for fiat currencies. +For this walk-through we will refer to our source as `python-http-example`. The finalized source +code for this tutorial can be found +[here](https://github.com/airbytehq/airbyte/tree/master/airbyte-integrations/connectors/source-python-http-tutorial). +The source we will build in this tutorial will pull data from the +[Rates API](https://exchangeratesapi.io/), a free and open API which documents historical exchange +rates for fiat currencies. diff --git a/docs/connector-development/tutorials/cdk-tutorial-python-http/declare-schema.md b/docs/connector-development/tutorials/cdk-tutorial-python-http/declare-schema.md index b97aeb1b587b..54f15a72e5c3 100644 --- a/docs/connector-development/tutorials/cdk-tutorial-python-http/declare-schema.md +++ b/docs/connector-development/tutorials/cdk-tutorial-python-http/declare-schema.md @@ -1,15 +1,26 @@ # Step 5: Declare the Schema -The `discover` method of the Airbyte Protocol returns an `AirbyteCatalog`: an object which declares all the streams output by a connector and their schemas. It also declares the sync modes supported by the stream \(full refresh or incremental\). See the [catalog tutorial](https://docs.airbyte.com/understanding-airbyte/beginners-guide-to-catalog) for more information. +The `discover` method of the Airbyte Protocol returns an `AirbyteCatalog`: an object which declares +all the streams output by a connector and their schemas. It also declares the sync modes supported +by the stream \(full refresh or incremental\). See the +[catalog tutorial](https://docs.airbyte.com/understanding-airbyte/beginners-guide-to-catalog) for +more information. -This is a simple task with the Airbyte CDK. For each stream in our connector we'll need to: +This is a simple task with the Airbyte CDK. For each stream in our connector we'll need to: -1. Create a python `class` in `source.py` which extends `HttpStream`. -2. Place a `.json` file in the `source_/schemas/` directory. The name of the file should be the snake\_case name of the stream whose schema it describes, and its contents should be the JsonSchema describing the output from that stream. +1. Create a python `class` in `source.py` which extends `HttpStream`. +2. Place a `.json` file in the `source_/schemas/` directory. The name of the file + should be the snake_case name of the stream whose schema it describes, and its contents should be + the JsonSchema describing the output from that stream. -Let's create a class in `source.py` which extends `HttpStream`. You'll notice there are classes with extensive comments describing what needs to be done to implement various connector features. Feel free to read these classes as needed. But for the purposes of this tutorial, let's assume that we are adding classes from scratch either by deleting those generated classes or editing them to match the implementation below. +Let's create a class in `source.py` which extends `HttpStream`. You'll notice there are classes with +extensive comments describing what needs to be done to implement various connector features. Feel +free to read these classes as needed. But for the purposes of this tutorial, let's assume that we +are adding classes from scratch either by deleting those generated classes or editing them to match +the implementation below. -We'll begin by creating a stream to represent the data that we're pulling from the Exchange Rates API: +We'll begin by creating a stream to represent the data that we're pulling from the Exchange Rates +API: ```python class ExchangeRates(HttpStream): @@ -23,9 +34,9 @@ class ExchangeRates(HttpStream): return None def path( - self, - stream_state: Mapping[str, Any] = None, - stream_slice: Mapping[str, Any] = None, + self, + stream_state: Mapping[str, Any] = None, + stream_slice: Mapping[str, Any] = None, next_page_token: Mapping[str, Any] = None ) -> str: return "" # TODO @@ -40,7 +51,9 @@ class ExchangeRates(HttpStream): return None # TODO ``` -Note that this implementation is entirely empty -- we haven't actually done anything. We'll come back to this in the next step. But for now we just want to declare the schema of this stream. We'll declare this as a stream that the connector outputs by returning it from the `streams` method: +Note that this implementation is entirely empty -- we haven't actually done anything. We'll come +back to this in the next step. But for now we just want to declare the schema of this stream. We'll +declare this as a stream that the connector outputs by returning it from the `streams` method: ```python from airbyte_cdk.sources.streams.http.auth import NoAuth @@ -53,26 +66,32 @@ class SourcePythonHttpTutorial(AbstractSource): def streams(self, config: Mapping[str, Any]) -> List[Stream]: # NoAuth just means there is no authentication required for this API and is included for completeness. # Skip passing an authenticator if no authentication is required. - # Other authenticators are available for API token-based auth and Oauth2. - auth = NoAuth() + # Other authenticators are available for API token-based auth and Oauth2. + auth = NoAuth() return [ExchangeRates(authenticator=auth)] ``` -Having created this stream in code, we'll put a file `exchange_rates.json` in the `schemas/` folder. You can download the JSON file describing the output schema [here](./exchange_rates_schema.json) for convenience and place it in `schemas/`. +Having created this stream in code, we'll put a file `exchange_rates.json` in the `schemas/` folder. +You can download the JSON file describing the output schema [here](./exchange_rates_schema.json) for +convenience and place it in `schemas/`. -With `.json` schema file in place, let's see if the connector can now find this schema and produce a valid catalog: +With `.json` schema file in place, let's see if the connector can now find this schema and produce a +valid catalog: -```text +```bash poetry run source-python-http-example discover --config secrets/config.json # this is not a mistake, the schema file is found by naming snake_case naming convention as specified above ``` you should see some output like: -```text +```json {"type": "CATALOG", "catalog": {"streams": [{"name": "exchange_rates", "json_schema": {"$schema": "http://json-schema.org/draft-04/schema#", "type": "object", "properties": {"base": {"type": "string"}, "rates": {"type": "object", "properties": {"GBP": {"type": "number"}, "HKD": {"type": "number"}, "IDR": {"type": "number"}, "PHP": {"type": "number"}, "LVL": {"type": "number"}, "INR": {"type": "number"}, "CHF": {"type": "number"}, "MXN": {"type": "number"}, "SGD": {"type": "number"}, "CZK": {"type": "number"}, "THB": {"type": "number"}, "BGN": {"type": "number"}, "EUR": {"type": "number"}, "MYR": {"type": "number"}, "NOK": {"type": "number"}, "CNY": {"type": "number"}, "HRK": {"type": "number"}, "PLN": {"type": "number"}, "LTL": {"type": "number"}, "TRY": {"type": "number"}, "ZAR": {"type": "number"}, "CAD": {"type": "number"}, "BRL": {"type": "number"}, "RON": {"type": "number"}, "DKK": {"type": "number"}, "NZD": {"type": "number"}, "EEK": {"type": "number"}, "JPY": {"type": "number"}, "RUB": {"type": "number"}, "KRW": {"type": "number"}, "USD": {"type": "number"}, "AUD": {"type": "number"}, "HUF": {"type": "number"}, "SEK": {"type": "number"}}}, "date": {"type": "string"}}}, "supported_sync_modes": ["full_refresh"]}]}} ``` -It's that simple! Now the connector knows how to declare your connector's stream's schema. We declare only one stream since our source is simple, but the principle is exactly the same if you had many streams. - -You can also dynamically define schemas, but that's beyond the scope of this tutorial. See the [schema docs](../../cdk-python/full-refresh-stream.md#defining-the-streams-schema) for more information. +It's that simple! Now the connector knows how to declare your connector's stream's schema. We +declare only one stream since our source is simple, but the principle is exactly the same if you had +many streams. +You can also dynamically define schemas, but that's beyond the scope of this tutorial. See the +[schema docs](../../cdk-python/full-refresh-stream.md#defining-the-streams-schema) for more +information. diff --git a/docs/connector-development/tutorials/cdk-tutorial-python-http/define-inputs.md b/docs/connector-development/tutorials/cdk-tutorial-python-http/define-inputs.md index 0cbe0bce93c9..956a45219430 100644 --- a/docs/connector-development/tutorials/cdk-tutorial-python-http/define-inputs.md +++ b/docs/connector-development/tutorials/cdk-tutorial-python-http/define-inputs.md @@ -1,14 +1,25 @@ # Step 3: Define Inputs -Each connector declares the inputs it needs to read data from the underlying data source. This is the Airbyte Protocol's `spec` operation. +Each connector declares the inputs it needs to read data from the underlying data source. This is +the Airbyte Protocol's `spec` operation. -The simplest way to implement this is by creating a `spec.yaml` file in `source_/spec.yaml` which describes your connector's inputs according to the [ConnectorSpecification](https://github.com/airbytehq/airbyte/blob/master/docs/understanding-airbyte/airbyte-protocol.md#spec) schema. This is a good place to start when developing your source. Using JsonSchema, define what the inputs are \(e.g. username and password\). Here's [an example](https://github.com/airbytehq/airbyte/blob/master/airbyte-integrations/connectors/source-stripe/source_stripe/spec.yaml) of what the `spec.yaml` looks like for the Stripe API source. +The simplest way to implement this is by creating a `spec.yaml` file in `source_/spec.yaml` +which describes your connector's inputs according to the +[ConnectorSpecification](https://github.com/airbytehq/airbyte/blob/master/docs/understanding-airbyte/airbyte-protocol.md#spec) +schema. This is a good place to start when developing your source. Using JsonSchema, define what the +inputs are \(e.g. username and password\). Here's +[an example](https://github.com/airbytehq/airbyte/blob/master/airbyte-integrations/connectors/source-stripe/source_stripe/spec.yaml) +of what the `spec.yaml` looks like for the Stripe API source. -For more details on what the spec is, you can read about the Airbyte Protocol [here](https://docs.airbyte.com/understanding-airbyte/airbyte-protocol). +For more details on what the spec is, you can read about the Airbyte Protocol +[here](https://docs.airbyte.com/understanding-airbyte/airbyte-protocol). -The generated code that Airbyte provides, handles implementing the `spec` method for you. It assumes that there will be a file called `spec.yaml` in the same directory as `source.py`. If you have declared the necessary JsonSchema in `spec.yaml` you should be done with this step. +The generated code that Airbyte provides, handles implementing the `spec` method for you. It assumes +that there will be a file called `spec.yaml` in the same directory as `source.py`. If you have +declared the necessary JsonSchema in `spec.yaml` you should be done with this step. -Given that we'll pulling currency data for our example source, we'll define the following `spec.yaml`: +Given that we'll pulling currency data for our example source, we'll define the following +`spec.yaml`: ```yaml documentationUrl: https://docs.airbyte.com/integrations/sources/exchangeratesapi @@ -36,12 +47,13 @@ connectionSpecification: examples: - USD - EUR - description: "ISO reference currency. See here." + description: + 'ISO reference currency. See here.' ``` In addition to metadata, we define three inputs: -* `apikey`: The API access key used to authenticate requests to the API -* `start_date`: The beginning date to start tracking currency exchange rates from -* `base`: The currency whose rates we're interested in tracking - +- `apikey`: The API access key used to authenticate requests to the API +- `start_date`: The beginning date to start tracking currency exchange rates from +- `base`: The currency whose rates we're interested in tracking diff --git a/docs/connector-development/tutorials/cdk-tutorial-python-http/getting-started.md b/docs/connector-development/tutorials/cdk-tutorial-python-http/getting-started.md index 57b2fb4624f9..f97c65bd6352 100644 --- a/docs/connector-development/tutorials/cdk-tutorial-python-http/getting-started.md +++ b/docs/connector-development/tutorials/cdk-tutorial-python-http/getting-started.md @@ -2,30 +2,37 @@ ## Summary -This is a step-by-step guide for how to create an Airbyte source in Python to read data from an HTTP API. We'll be using the Exchange Rates API as an example since it is simple and demonstrates a lot of the capabilities of the CDK. +This is a step-by-step guide for how to create an Airbyte source in Python to read data from an HTTP +API. We'll be using the Exchange Rates API as an example since it is simple and demonstrates a lot +of the capabilities of the CDK. ## Requirements - * Python >= 3.9 * [Poetry](https://python-poetry.org/) * Docker -All the commands below assume that `python` points to a version of python >=3.9.0. On some systems, `python` points to a Python2 installation and `python3` points to Python3. If this is the case on your machine, substitute all `python` commands in this guide with `python3`. +All the commands below assume that `python` points to a version of python >=3.9.0. On some +systems, `python` points to a Python2 installation and `python3` points to Python3. If this is the +case on your machine, substitute all `python` commands in this guide with `python3`. ## Exchange Rates API Setup -For this guide we will be making API calls to the Exchange Rates API. In order to generate the API access key that will be used by the new connector, you will have to follow steps on the [Exchange Rates Data API](https://apilayer.com/marketplace/exchangerates_data-api/) by signing up for the Free tier plan. Once you have an API access key, you can continue with the guide. +For this guide we will be making API calls to the Exchange Rates API. In order to generate the API +access key that will be used by the new connector, you will have to follow steps on the +[Exchange Rates Data API](https://apilayer.com/marketplace/exchangerates_data-api/) by signing up +for the Free tier plan. Once you have an API access key, you can continue with the guide. ## Checklist -* Step 1: Create the source using the template -* Step 2: Install dependencies for the new source -* Step 3: Define the inputs needed by your connector -* Step 4: Implement connection checking -* Step 5: Declare the schema of your streams -* Step 6: Implement functionality for reading your streams -* Step 7: Use the connector in Airbyte -* Step 8: Write unit tests or integration tests - -Each step of the Creating a Source checklist is explained in more detail in the following steps. We also mention how you can submit the connector to be included with the general Airbyte release at the end of the tutorial. - +- Step 1: Create the source using the template +- Step 2: Install dependencies for the new source +- Step 3: Define the inputs needed by your connector +- Step 4: Implement connection checking +- Step 5: Declare the schema of your streams +- Step 6: Implement functionality for reading your streams +- Step 7: Use the connector in Airbyte +- Step 8: Write unit tests or integration tests + +Each step of the Creating a Source checklist is explained in more detail in the following steps. We +also mention how you can submit the connector to be included with the general Airbyte release at the +end of the tutorial. diff --git a/docs/connector-development/tutorials/cdk-tutorial-python-http/install-dependencies.md b/docs/connector-development/tutorials/cdk-tutorial-python-http/install-dependencies.md index 3d7e50e22377..04a835a3c783 100644 --- a/docs/connector-development/tutorials/cdk-tutorial-python-http/install-dependencies.md +++ b/docs/connector-development/tutorials/cdk-tutorial-python-http/install-dependencies.md @@ -7,7 +7,6 @@ cd ../../connectors/source- poetry install ``` - Let's verify everything is working as intended. Run: ```bash @@ -16,32 +15,43 @@ poetry run source- spec You should see some output: -```text +```json {"type": "SPEC", "spec": {"documentationUrl": "https://docsurl.com", "connectionSpecification": {"$schema": "http://json-schema.org/draft-07/schema#", "title": "Python Http Tutorial Spec", "type": "object", "required": ["TODO"], "properties": {"TODO: This schema defines the configuration required for the source. This usually involves metadata such as database and/or authentication information.": {"type": "string", "description": "describe me"}}}}} ``` -We just ran Airbyte Protocol's `spec` command! We'll talk more about this later, but this is a simple sanity check to make sure everything is wired up correctly. - +We just ran Airbyte Protocol's `spec` command! We'll talk more about this later, but this is a +simple sanity check to make sure everything is wired up correctly. ## Notes on iteration cycle ### Dependencies -Python dependencies for your source should be declared in `airbyte-integrations/connectors/source-/setup.py` in the `install_requires` field. You will notice that a couple of Airbyte dependencies are already declared there. Do not remove these; they give your source access to the helper interfaces provided by the generator. +Python dependencies for your source should be declared in +`airbyte-integrations/connectors/source-/setup.py` in the `install_requires` field. You +will notice that a couple of Airbyte dependencies are already declared there. Do not remove these; +they give your source access to the helper interfaces provided by the generator. -You may notice that there is a `requirements.txt` in your source's directory as well. Don't edit this. It is autogenerated and used to provide Airbyte dependencies. All your dependencies should be declared in `setup.py`. +You may notice that there is a `requirements.txt` in your source's directory as well. Don't edit +this. It is autogenerated and used to provide Airbyte dependencies. All your dependencies should be +declared in `setup.py`. ### Development Environment -The commands we ran above created a [Python virtual environment](https://docs.python.org/3/tutorial/venv.html) for your source. If you want your IDE to auto complete and resolve dependencies properly, point it at the virtual env `airbyte-integrations/connectors/source-/.venv`. Also anytime you change the dependencies in the `setup.py` make sure to re-run `pip install -r requirements.txt`. +The commands we ran above created a +[Python virtual environment](https://docs.python.org/3/tutorial/venv.html) for your source. If you +want your IDE to auto complete and resolve dependencies properly, point it at the virtual env +`airbyte-integrations/connectors/source-/.venv`. Also anytime you change the +dependencies in the `setup.py` make sure to re-run `pip install -r requirements.txt`. ### Iterating on your implementation -There are two ways we recommend iterating on a source. Consider using whichever one matches your style. +There are two ways we recommend iterating on a source. Consider using whichever one matches your +style. **Run the source using python** -You'll notice in your source's directory that there is a python file called `main.py`. This file exists as convenience for development. You run it to test that your source works: +You'll notice in your source's directory that there is a python file called `main.py`. This file +exists as convenience for development. You run it to test that your source works: ```bash # from airbyte-integrations/connectors/source- @@ -51,11 +61,15 @@ poetry run source- discover --config secrets/config.json poetry run source- read --config secrets/config.json --catalog sample_files/configured_catalog.json ``` -The nice thing about this approach is that you can iterate completely within python. The downside is that you are not quite running your source as it will actually be run by Airbyte. Specifically, you're not running it from within the docker container that will house it. +The nice thing about this approach is that you can iterate completely within python. The downside is +that you are not quite running your source as it will actually be run by Airbyte. Specifically, +you're not running it from within the docker container that will house it. **Run the source using docker** -If you want to run your source exactly as it will be run by Airbyte \(i.e. within a docker container\), you can use the following commands from the connector module directory \(`airbyte-integrations/connectors/source-python-http-example`\): +If you want to run your source exactly as it will be run by Airbyte \(i.e. within a docker +container\), you can use the following commands from the connector module directory +\(`airbyte-integrations/connectors/source-python-http-example`\): ```bash # First build the container @@ -68,7 +82,14 @@ docker run --rm -v $(pwd)/secrets:/secrets airbyte/source-:dev discover -- docker run --rm -v $(pwd)/secrets:/secrets -v $(pwd)/sample_files:/sample_files airbyte/source-:dev read --config /secrets/config.json --catalog /sample_files/configured_catalog.json ``` -Note: Each time you make a change to your implementation you need to re-build the connector image via `docker build . -t airbyte/source-:dev`. This ensures the new python code is added into the docker container. +:::info + +Each time you make a change to your implementation you need to re-build the connector image +via `docker build . -t airbyte/source-:dev`. This ensures the new python code is added into +the docker container. -The nice thing about this approach is that you are running your source exactly as it will be run by Airbyte. The tradeoff is iteration is slightly slower, as the connector is re-built between each change. +::: +The nice thing about this approach is that you are running your source exactly as it will be run by +Airbyte. The tradeoff is iteration is slightly slower, as the connector is re-built between each +change. diff --git a/docs/connector-development/tutorials/cdk-tutorial-python-http/read-data.md b/docs/connector-development/tutorials/cdk-tutorial-python-http/read-data.md index 0417bcdbde25..a2bcfee77562 100644 --- a/docs/connector-development/tutorials/cdk-tutorial-python-http/read-data.md +++ b/docs/connector-development/tutorials/cdk-tutorial-python-http/read-data.md @@ -1,36 +1,45 @@ # Step 6: Read Data -Describing schemas is good and all, but at some point we have to start reading data! So let's get to work. But before, let's describe what we're about to do: +Describing schemas is good and all, but at some point we have to start reading data! So let's get to +work. But before, let's describe what we're about to do: -The `HttpStream` superclass, like described in the [concepts documentation](../../cdk-python/http-streams.md), is facilitating reading data from HTTP endpoints. It contains built-in functions or helpers for: +The `HttpStream` superclass, like described in the +[concepts documentation](../../cdk-python/http-streams.md), is facilitating reading data from HTTP +endpoints. It contains built-in functions or helpers for: -* authentication -* pagination -* handling rate limiting or transient errors -* and other useful functionality +- authentication +- pagination +- handling rate limiting or transient errors +- and other useful functionality In order for it to be able to do this, we have to provide it with a few inputs: -* the URL base and path of the endpoint we'd like to hit -* how to parse the response from the API -* how to perform pagination +- the URL base and path of the endpoint we'd like to hit +- how to parse the response from the API +- how to perform pagination Optionally, we can provide additional inputs to customize requests: -* request parameters and headers -* how to recognize rate limit errors, and how long to wait \(by default it retries 429 and 5XX errors using exponential backoff\) -* HTTP method and request body if applicable -* configure exponential backoff policy +- request parameters and headers +- how to recognize rate limit errors, and how long to wait \(by default it retries 429 and 5XX + errors using exponential backoff\) +- HTTP method and request body if applicable +- configure exponential backoff policy Backoff policy options: -* `retry_factor` Specifies factor for exponential backoff policy \(by default is 5\) -* `max_retries` Specifies maximum amount of retries for backoff policy \(by default is 5\) -* `raise_on_http_errors` If set to False, allows opting-out of raising HTTP code exception \(by default is True\) +- `retry_factor` Specifies factor for exponential backoff policy \(by default is 5\) +- `max_retries` Specifies maximum amount of retries for backoff policy \(by default is 5\) +- `raise_on_http_errors` If set to False, allows opting-out of raising HTTP code exception \(by + default is True\) -There are many other customizable options - you can find them in the [`airbyte_cdk.sources.streams.http.HttpStream`](https://github.com/airbytehq/airbyte/blob/master/airbyte-cdk/python/airbyte_cdk/sources/streams/http/http.py) class. +There are many other customizable options - you can find them in the +[`airbyte_cdk.sources.streams.http.HttpStream`](https://github.com/airbytehq/airbyte/blob/master/airbyte-cdk/python/airbyte_cdk/sources/streams/http/http.py) +class. -So in order to read data from the exchange rates API, we'll fill out the necessary information for the stream to do its work. First, we'll implement a basic read that just reads the last day's exchange rates, then we'll implement incremental sync using stream slicing. +So in order to read data from the exchange rates API, we'll fill out the necessary information for +the stream to do its work. First, we'll implement a basic read that just reads the last day's +exchange rates, then we'll implement incremental sync using stream slicing. Let's begin by pulling data for the last day's rates by using the `/latest` endpoint: @@ -47,13 +56,13 @@ class ExchangeRates(HttpStream): def path( - self, - stream_state: Mapping[str, Any] = None, - stream_slice: Mapping[str, Any] = None, + self, + stream_state: Mapping[str, Any] = None, + stream_slice: Mapping[str, Any] = None, next_page_token: Mapping[str, Any] = None ) -> str: # The "/latest" path gives us the latest currency exchange rates - return "latest" + return "latest" def request_headers( self, stream_state: Mapping[str, Any], stream_slice: Mapping[str, Any] = None, next_page_token: Mapping[str, Any] = None @@ -77,23 +86,30 @@ class ExchangeRates(HttpStream): stream_slice: Mapping[str, Any] = None, next_page_token: Mapping[str, Any] = None, ) -> Iterable[Mapping]: - # The response is a simple JSON whose schema matches our stream's schema exactly, + # The response is a simple JSON whose schema matches our stream's schema exactly, # so we just return a list containing the response return [response.json()] def next_page_token(self, response: requests.Response) -> Optional[Mapping[str, Any]]: - # The API does not offer pagination, + # The API does not offer pagination, # so we return None to indicate there are no more pages in the response return None ``` -This may look big, but that's just because there are lots of \(unused, for now\) parameters in these methods \(those can be hidden with Python's `**kwargs`, but don't worry about it for now\). Really we just added a few lines of "significant" code: - -1. Added a constructor `__init__` which stores the `base` currency to query for and the `apikey` used for authentication. -2. `return {'base': self.base}` to add the `?base=` query parameter to the request based on the `base` input by the user. -3. `return {'apikey': self.apikey}` to add the header `apikey=` to the request based on the `apikey` input by the user. -4. `return [response.json()]` to parse the response from the API to match the schema of our schema `.json` file. -5. `return "latest"` to indicate that we want to hit the `/latest` endpoint of the API to get the latest exchange rate data. +This may look big, but that's just because there are lots of \(unused, for now\) parameters in these +methods \(those can be hidden with Python's `**kwargs`, but don't worry about it for now\). Really +we just added a few lines of "significant" code: + +1. Added a constructor `__init__` which stores the `base` currency to query for and the `apikey` + used for authentication. +2. `return {'base': self.base}` to add the `?base=` query parameter to the request based + on the `base` input by the user. +3. `return {'apikey': self.apikey}` to add the header `apikey=` to the request based + on the `apikey` input by the user. +4. `return [response.json()]` to parse the response from the API to match the schema of our schema + `.json` file. +5. `return "latest"` to indicate that we want to hit the `/latest` endpoint of the API to get the + latest exchange rate data. Let's also pass the config specified by the user to the stream class: @@ -105,7 +121,11 @@ Let's also pass the config specified by the user to the stream class: We're now ready to query the API! -To do this, we'll need a [ConfiguredCatalog](../../../understanding-airbyte/beginners-guide-to-catalog.md). We've prepared one [here](https://github.com/airbytehq/airbyte/blob/master/docs/connector-development/tutorials/cdk-tutorial-python-http/configured_catalog.json) -- download this and place it in `sample_files/configured_catalog.json`. Then run: +To do this, we'll need a +[ConfiguredCatalog](../../../understanding-airbyte/beginners-guide-to-catalog.md). We've prepared +one +[here](https://github.com/airbytehq/airbyte/blob/master/docs/connector-development/tutorials/cdk-tutorial-python-http/configured_catalog.json) +-- download this and place it in `sample_files/configured_catalog.json`. Then run: ```bash poetry run source- --config secrets/config.json --catalog sample_files/configured_catalog.json @@ -119,20 +139,25 @@ you should see some output lines, one of which is a record from the API: There we have it - a stream which reads data in just a few lines of code! -We theoretically _could_ stop here and call it a connector. But let's give adding incremental sync a shot. +We theoretically _could_ stop here and call it a connector. But let's give adding incremental sync a +shot. ## Adding incremental sync -To add incremental sync, we'll do a few things: -1. Pass the `start_date` param input by the user into the stream. -2. Declare the stream's `cursor_field`. +To add incremental sync, we'll do a few things: + +1. Pass the `start_date` param input by the user into the stream. +2. Declare the stream's `cursor_field`. 3. Declare the stream's property `_cursor_value` to hold the state value -4. Add `IncrementalMixin` to the list of the ancestors of the stream and implement setter and getter of the `state`. -5. Implement the `stream_slices` method. -6. Update the `path` method to specify the date to pull exchange rates for. +4. Add `IncrementalMixin` to the list of the ancestors of the stream and implement setter and getter + of the `state`. +5. Implement the `stream_slices` method. +6. Update the `path` method to specify the date to pull exchange rates for. 7. Update the configured catalog to use `incremental` sync when we're testing the stream. -We'll describe what each of these methods do below. Before we begin, it may help to familiarize yourself with how incremental sync works in Airbyte by reading the [docs on incremental](/using-airbyte/core-concepts/sync-modes/incremental-append.md). +We'll describe what each of these methods do below. Before we begin, it may help to familiarize +yourself with how incremental sync works in Airbyte by reading the +[docs on incremental](/using-airbyte/core-concepts/sync-modes/incremental-append.md). To keep things concise, we'll only show functions as we edit them one by one. @@ -166,11 +191,18 @@ class ExchangeRates(HttpStream, IncrementalMixin): self._cursor_value = None ``` -Declaring the `cursor_field` informs the framework that this stream now supports incremental sync. The next time you run `python main_dev.py discover --config secrets/config.json` you'll find that the `supported_sync_modes` field now also contains `incremental`. +Declaring the `cursor_field` informs the framework that this stream now supports incremental sync. +The next time you run `python main_dev.py discover --config secrets/config.json` you'll find that +the `supported_sync_modes` field now also contains `incremental`. -But we're not quite done with supporting incremental, we have to actually emit state! We'll structure our state object very simply: it will be a `dict` whose single key is `'date'` and value is the date of the last day we synced data from. For example, `{'date': '2021-04-26'}` indicates the connector previously read data up until April 26th and therefore shouldn't re-read anything before April 26th. +But we're not quite done with supporting incremental, we have to actually emit state! We'll +structure our state object very simply: it will be a `dict` whose single key is `'date'` and value +is the date of the last day we synced data from. For example, `{'date': '2021-04-26'}` indicates the +connector previously read data up until April 26th and therefore shouldn't re-read anything before +April 26th. -Let's do this by implementing the getter and setter for the `state` inside the `ExchangeRates` class. +Let's do this by implementing the getter and setter for the `state` inside the `ExchangeRates` +class. ```python @property @@ -179,7 +211,7 @@ Let's do this by implementing the getter and setter for the `state` inside the ` return {self.cursor_field: self._cursor_value.strftime('%Y-%m-%d')} else: return {self.cursor_field: self.start_date.strftime('%Y-%m-%d')} - + @state.setter def state(self, value: Mapping[str, Any]): self._cursor_value = datetime.strptime(value[self.cursor_field], '%Y-%m-%d') @@ -197,9 +229,11 @@ Update internal state `cursor_value` inside `read_records` method ``` -This implementation compares the date from the latest record with the date in the current state and takes the maximum as the "new" state object. +This implementation compares the date from the latest record with the date in the current state and +takes the maximum as the "new" state object. -We'll implement the `stream_slices` method to return a list of the dates for which we should pull data based on the stream state if it exists: +We'll implement the `stream_slices` method to return a list of the dates for which we should pull +data based on the stream state if it exists: ```python def _chunk_date_range(self, start_date: datetime) -> List[Mapping[str, Any]]: @@ -218,18 +252,24 @@ We'll implement the `stream_slices` method to return a list of the dates for whi return self._chunk_date_range(start_date) ``` -Each slice will cause an HTTP request to be made to the API. We can then use the information present in the `stream_slice` parameter \(a single element from the list we constructed in `stream_slices` above\) to set other configurations for the outgoing request like `path` or `request_params`. For more info about stream slicing, see [the slicing docs](../../cdk-python/stream-slices.md). +Each slice will cause an HTTP request to be made to the API. We can then use the information present +in the `stream_slice` parameter \(a single element from the list we constructed in `stream_slices` +above\) to set other configurations for the outgoing request like `path` or `request_params`. For +more info about stream slicing, see [the slicing docs](../../cdk-python/stream-slices.md). -In order to pull data for a specific date, the Exchange Rates API requires that we pass the date as the path component of the URL. Let's override the `path` method to achieve this: +In order to pull data for a specific date, the Exchange Rates API requires that we pass the date as +the path component of the URL. Let's override the `path` method to achieve this: ```python def path(self, stream_state: Mapping[str, Any] = None, stream_slice: Mapping[str, Any] = None, next_page_token: Mapping[str, Any] = None) -> str: return stream_slice['date'] ``` -With these changes, your implementation should look like the file [here](https://github.com/airbytehq/airbyte/blob/master/airbyte-integrations/connectors/source-python-http-tutorial/source_python_http_tutorial/source.py). +With these changes, your implementation should look like the file +[here](https://github.com/airbytehq/airbyte/blob/master/airbyte-integrations/connectors/source-python-http-tutorial/source_python_http_tutorial/source.py). -The last thing we need to do is change the `sync_mode` field in the `sample_files/configured_catalog.json` to `incremental`: +The last thing we need to do is change the `sync_mode` field in the +`sample_files/configured_catalog.json` to `incremental`: ```text "sync_mode": "incremental", @@ -243,7 +283,8 @@ Let's try it out: poetry run source- --config secrets/config.json --catalog sample_files/configured_catalog.json ``` -You should see a bunch of `RECORD` messages and `STATE` messages. To verify that incremental sync is working, pass the input state back to the connector and run it again: +You should see a bunch of `RECORD` messages and `STATE` messages. To verify that incremental sync is +working, pass the input state back to the connector and run it again: ```bash # Save the latest state to sample_files/state.json @@ -253,7 +294,7 @@ poetry run source- --config secrets/config.json --catalog sample_files/con poetry run source- --config secrets/config.json --catalog sample_files/configured_catalog.json --state sample_files/state.json ``` -You should see that only the record from the last date is being synced! This is acceptable behavior, since Airbyte requires at-least-once delivery of records, so repeating the last record twice is OK. +You should see that only the record from the last date is being synced! This is acceptable behavior, +since Airbyte requires at-least-once delivery of records, so repeating the last record twice is OK. With that, we've implemented incremental sync for our connector! - diff --git a/docs/connector-development/tutorials/cdk-tutorial-python-http/test-your-connector.md b/docs/connector-development/tutorials/cdk-tutorial-python-http/test-your-connector.md index 521d8b05821f..c6fe41cc6265 100644 --- a/docs/connector-development/tutorials/cdk-tutorial-python-http/test-your-connector.md +++ b/docs/connector-development/tutorials/cdk-tutorial-python-http/test-your-connector.md @@ -1,4 +1,4 @@ -# Step 8: Test Connector +# Step 8: Test the Connector ## Unit Tests @@ -8,15 +8,21 @@ You can run the tests using `poetry run pytest tests/unit_tests`. ## Integration Tests -Place any integration tests in the `integration_tests` directory such that they can be [discovered by pytest](https://docs.pytest.org/en/6.2.x/goodpractices.html#conventions-for-python-test-discovery). +Place any integration tests in the `integration_tests` directory such that they can be +[discovered by pytest](https://docs.pytest.org/en/6.2.x/goodpractices.html#conventions-for-python-test-discovery). You can run the tests using `poetry run pytest tests/integration_tests`. -More information on integration testing can be found on [the Testing Connectors doc](https://docs.airbyte.com/connector-development/testing-connectors/#running-integration-tests). +More information on integration testing can be found on +[the Testing Connectors doc](https://docs.airbyte.com/connector-development/testing-connectors/#running-integration-tests). -## Standard Tests +## Connector Acceptance Tests -Standard tests are a fixed set of tests Airbyte provides that every Airbyte source connector must pass. While they're only required if you intend to submit your connector to Airbyte, you might find them helpful in any case. See [Testing your connectors](../../testing-connectors/) - -If you want to submit this connector to become a default connector within Airbyte, follow steps 8 onwards from the [Python source checklist](../building-a-python-source.md#step-8-set-up-standard-tests) +Connector Acceptance Tests (CATs) are a fixed set of tests Airbyte provides that every Airbyte +source connector must pass. While they're only required if you intend to submit your connector +to Airbyte, you might find them helpful in any case. See +[Testing your connectors](../../testing-connectors/) +If you want to submit this connector to become a default connector within Airbyte, follow steps 8 +onwards from the +[Python source checklist](../building-a-python-source.md#step-8-set-up-standard-tests) diff --git a/docs/connector-development/tutorials/cdk-tutorial-python-http/use-connector-in-airbyte.md b/docs/connector-development/tutorials/cdk-tutorial-python-http/use-connector-in-airbyte.md index db190ea87d3e..7772bcbebc1d 100644 --- a/docs/connector-development/tutorials/cdk-tutorial-python-http/use-connector-in-airbyte.md +++ b/docs/connector-development/tutorials/cdk-tutorial-python-http/use-connector-in-airbyte.md @@ -1,26 +1,32 @@ # Step 7: Use the Connector in Airbyte -To use your connector in your own installation of Airbyte you have to build the docker image for your connector. - - +To use your connector in your own installation of Airbyte you have to build the docker image for +your connector. **Option A: Building the docker image with `airbyte-ci`** This is the preferred method for building and testing connectors. -If you want to open source your connector we encourage you to use our [`airbyte-ci`](https://github.com/airbytehq/airbyte/blob/master/airbyte-ci/connectors/pipelines/README.md) tool to build your connector. -It will not use a Dockerfile but will build the connector image from our [base image](https://github.com/airbytehq/airbyte/blob/master/airbyte-ci/connectors/base_images/README.md) and use our internal build logic to build an image from your Python connector code. +If you want to open source your connector we encourage you to use our +[`airbyte-ci`](https://github.com/airbytehq/airbyte/blob/master/airbyte-ci/connectors/pipelines/README.md) +tool to build your connector. It will not use a Dockerfile but will build the connector image from +our +[base image](https://github.com/airbytehq/airbyte/blob/master/airbyte-ci/connectors/base_images/README.md) +and use our internal build logic to build an image from your Python connector code. Running `airbyte-ci connectors --name source- build` will build your connector image. -Once the command is done, you will find your connector image in your local docker host: `airbyte/source-:dev`. - - +Once the command is done, you will find your connector image in your local docker host: +`airbyte/source-:dev`. **Option B: Building the docker image with a Dockerfile** -If you don't want to rely on `airbyte-ci` to build your connector, you can build the docker image using your own Dockerfile. This method is not preferred, and is not supported for certified connectors. +If you don't want to rely on `airbyte-ci` to build your connector, you can build the docker image +using your own Dockerfile. This method is not preferred, and is not supported for certified +connectors. + +Create a `Dockerfile` in the root of your connector directory. The `Dockerfile` should look +something like this: -Create a `Dockerfile` in the root of your connector directory. The `Dockerfile` should look something like this: ```Dockerfile FROM airbyte/python-connector-base:1.1.0 @@ -36,11 +42,15 @@ RUN pip install ./airbyte/integration_code Please use this as an example. This is not optimized. Build your image: + ```bash docker build . -t airbyte/source-example-python:dev ``` -Then, follow the instructions from the [building a Python source tutorial](../building-a-python-source.md#step-11-add-the-connector-to-the-api-ui) for using the connector in the Airbyte UI, replacing the name as appropriate. - -Note: your built docker image must be accessible to the `docker` daemon running on the Airbyte node. If you're doing this tutorial locally, these instructions are sufficient. Otherwise you may need to push your Docker image to Dockerhub. +Then, follow the instructions from the +[building a Python source tutorial](../building-a-python-source.md#step-11-add-the-connector-to-the-api-ui) +for using the connector in the Airbyte UI, replacing the name as appropriate. +Note: your built docker image must be accessible to the `docker` daemon running on the Airbyte node. +If you're doing this tutorial locally, these instructions are sufficient. Otherwise you may need to +push your Docker image to Dockerhub. diff --git a/docs/connector-development/tutorials/profile-java-connector-memory.md b/docs/connector-development/tutorials/profile-java-connector-memory.md index e18eb9f21bd1..608e234f6b68 100644 --- a/docs/connector-development/tutorials/profile-java-connector-memory.md +++ b/docs/connector-development/tutorials/profile-java-connector-memory.md @@ -1,97 +1,119 @@ # Profile Java Connector Memory Usage -This tutorial demos how to profile the memory usage of a Java connector with Visual VM. Such profiling can be useful when we want to debug memory leaks, or optimize the connector's memory footprint. +This tutorial demos how to profile the memory usage of a Java connector with Visual VM. Such +profiling can be useful when we want to debug memory leaks, or optimize the connector's memory +footprint. -The example focuses on docker deployment, because it is more straightforward. It is also possible to apply the same procedure to Kubernetes deployments. +The example focuses on docker deployment, because it is more straightforward. It is also possible to +apply the same procedure to Kubernetes deployments. ## Prerequisite + - [Docker](https://www.docker.com/products/personal) running locally. - [VisualVM](https://visualvm.github.io/) preinstalled. ## Step-by-Step -1. Enable JMX in `airbyte-integrations/connectors//build.gradle`, and expose it on port 6000. The port is chosen arbitrary, and can be port number that's available. - - `` examples: `source-mysql`, `source-github`, `destination-snowflake`. - - ```groovy - application { - mainClass = 'io.airbyte.integrations.' - applicationDefaultJvmArgs = [ - '-XX:+ExitOnOutOfMemoryError', - '-XX:MaxRAMPercentage=75.0', - - // add the following JVM arguments to enable JMX: - '-XX:NativeMemoryTracking=detail', - '-XX:+UsePerfData', - '-Djava.rmi.server.hostname=localhost', - '-Dcom.sun.management.jmxremote=true', - '-Dcom.sun.management.jmxremote.port=6000', - "-Dcom.sun.management.jmxremote.rmi.port=6000", - '-Dcom.sun.management.jmxremote.local.only=false', - '-Dcom.sun.management.jmxremote.authenticate=false', - '-Dcom.sun.management.jmxremote.ssl=false', - - // optionally, add a max heap size to limit the memory usage - '-Xmx2000m', - ] + +1. Enable JMX in `airbyte-integrations/connectors//build.gradle`, and expose it on + port 6000. The port is chosen arbitrary, and can be port number that's available. + + - `` examples: `source-mysql`, `source-github`, `destination-snowflake`. + + ```groovy + application { + mainClass = 'io.airbyte.integrations.' + applicationDefaultJvmArgs = [ + '-XX:+ExitOnOutOfMemoryError', + '-XX:MaxRAMPercentage=75.0', + + // add the following JVM arguments to enable JMX: + '-XX:NativeMemoryTracking=detail', + '-XX:+UsePerfData', + '-Djava.rmi.server.hostname=localhost', + '-Dcom.sun.management.jmxremote=true', + '-Dcom.sun.management.jmxremote.port=6000', + "-Dcom.sun.management.jmxremote.rmi.port=6000", + '-Dcom.sun.management.jmxremote.local.only=false', + '-Dcom.sun.management.jmxremote.authenticate=false', + '-Dcom.sun.management.jmxremote.ssl=false', + + // optionally, add a max heap size to limit the memory usage + '-Xmx2000m', + ] } ``` 2. Modify `airbyte-integrations/connectors//Dockerfile` to expose the JMX port. - ```dockerfile - // optionally install procps to enable the ps command in the connector container - RUN apt-get update && apt-get install -y procps && rm -rf /var/lib/apt/lists/* + ```dockerfile + // optionally install procps to enable the ps command in the connector container + RUN apt-get update && apt-get install -y procps && rm -rf /var/lib/apt/lists/* - // expose the same JMX port specified in the previous step - EXPOSE 6000 - ``` + // expose the same JMX port specified in the previous step + EXPOSE 6000 + ``` -3. Expose the same port in `airbyte-workers/src/main/java/io/airbyte/workers/process/DockerProcessFactory.java`. +3. Expose the same port in + `airbyte-workers/src/main/java/io/airbyte/workers/process/DockerProcessFactory.java`. - ```java - // map local 6000 to the JMX port from the container - if (imageName.startsWith("airbyte/")) { - LOGGER.info("Exposing image {} port 6000", imageName); - cmd.add("-p"); - cmd.add("6000:6000"); - } - ``` + ```java + // map local 6000 to the JMX port from the container + if (imageName.startsWith("airbyte/")) { + LOGGER.info("Exposing image {} port 6000", imageName); + cmd.add("-p"); + cmd.add("6000:6000"); + } + ``` - Disable the [`host` network mode](https://docs.docker.com/network/host/) by _removing_ the following code block in the same file. This is necessary because under the `host` network mode, published ports are discarded. + Disable the [`host` network mode](https://docs.docker.com/network/host/) by _removing_ the + following code block in the same file. This is necessary because under the `host` network mode, + published ports are discarded. - ```java - if (networkName != null) { - cmd.add("--network"); - cmd.add(networkName); - } - ``` + ```java + if (networkName != null) { + cmd.add("--network"); + cmd.add(networkName); + } + ``` - (This [commit](https://github.com/airbytehq/airbyte/pull/10394/commits/097ec57869a64027f5b7858aa8bb9575844e8b76) can be used as a reference. It reverts them. So just do the opposite.) + (This + [commit](https://github.com/airbytehq/airbyte/pull/10394/commits/097ec57869a64027f5b7858aa8bb9575844e8b76) + can be used as a reference. It reverts them. So just do the opposite.) -4. Build and launch Airbyte locally. It is necessary to build it because we have modified the `DockerProcessFactory.java`. +4. Build and launch Airbyte locally. It is necessary to build it because we have modified the + `DockerProcessFactory.java`. - ```sh - SUB_BUILD=PLATFORM ./gradlew build -x test - VERSION=dev docker compose up - ``` + ```sh + SUB_BUILD=PLATFORM ./gradlew build -x test + VERSION=dev docker compose up + ``` -5. Build the connector to be profiled locally. It will create a `dev` version local image: `airbyte/:dev`. +5. Build the connector to be profiled locally. It will create a `dev` version local image: + `airbyte/:dev`. - ```sh - ./gradlew :airbyte-integrations:connectors::airbyteDocker - ``` + ```sh + ./gradlew :airbyte-integrations:connectors::airbyteDocker + ``` -6. Connect to the launched local Airbyte server at `localhost:8000`, go to the `Settings` page, and change the version of the connector to be profiled to `dev` which was just built in the previous step. +6. Connect to the launched local Airbyte server at `localhost:8000`, go to the `Settings` page, and + change the version of the connector to be profiled to `dev` which was just built in the previous + step. 7. Create a connection using the connector to be profiled. - - The `Replication frequency` of this connector should be `manual` so that we can control when it starts. - - We can use the e2e test connectors as either the source or destination for convenience. - - The e2e test connectors are usually very reliable, and requires little configuration. - - For example, if we are profiling a source connector, create an e2e test destination at the other end of the connection. + + - The `Replication frequency` of this connector should be `manual` so that we can control when it + starts. + - We can use the e2e test connectors as either the source or destination for convenience. + - The e2e test connectors are usually very reliable, and requires little configuration. + - For example, if we are profiling a source connector, create an e2e test destination at the + other end of the connection. 8. Profile the connector in question. - - Launch a data sync run. - - After the run starts, open Visual VM, and click `File` / `Add JMX Connection...`. A modal will show up. Type in `localhost:6000`, and click `OK`. - - Now we can see a new connection shows up under the `Local` category on the left, and the information about the connector's JVM gets retrieved. - ![visual vm screenshot](https://visualvm.github.io/images/visualvm_screenshot_20.png) + - Launch a data sync run. + - After the run starts, open Visual VM, and click `File` / `Add JMX Connection...`. A modal will + show up. Type in `localhost:6000`, and click `OK`. + - Now we can see a new connection shows up under the `Local` category on the left, and the + information about the connector's JVM gets retrieved. + + ![visual vm screenshot](https://visualvm.github.io/images/visualvm_screenshot_20.png) diff --git a/docs/connector-development/tutorials/adding-incremental-sync.md b/docs/connector-development/tutorials/the-hard-way/adding-incremental-sync.md similarity index 78% rename from docs/connector-development/tutorials/adding-incremental-sync.md rename to docs/connector-development/tutorials/the-hard-way/adding-incremental-sync.md index 8a454049a7dd..f3d3be401ed6 100644 --- a/docs/connector-development/tutorials/adding-incremental-sync.md +++ b/docs/connector-development/tutorials/the-hard-way/adding-incremental-sync.md @@ -2,13 +2,26 @@ ## Overview -This tutorial will assume that you already have a working source. If you do not, feel free to refer to the [Building a Toy Connector](build-a-connector-the-hard-way.md) tutorial. This tutorial will build directly off the example from that article. We will also assume that you have a basic understanding of how Airbyte's Incremental-Append replication strategy works. We have a brief explanation of it [here](/using-airbyte/core-concepts/sync-modes/incremental-append.md). +This tutorial will assume that you already have a working source. If you do not, feel free to refer +to the [Building a Toy Connector](build-a-connector-the-hard-way.md) tutorial. This tutorial will +build directly off the example from that article. We will also assume that you have a basic +understanding of how Airbyte's Incremental-Append replication strategy works. We have a brief +explanation of it [here](../../../using-airbyte/core-concepts/sync-modes/incremental-append.md). ## Update Catalog in `discover` -First we need to identify a given stream in the Source as supporting incremental. This information is declared in the catalog that the `discover` method returns. You will notice in the stream object contains a field called `supported_sync_modes`. If we are adding incremental to an existing stream, we just need to add `"incremental"` to that array. This tells Airbyte that this stream can either be synced in an incremental fashion. In practice, this will mean that in the UI, a user will have the ability to configure this type of sync. +First we need to identify a given stream in the Source as supporting incremental. This information +is declared in the catalog that the `discover` method returns. You will notice in the stream object +contains a field called `supported_sync_modes`. If we are adding incremental to an existing stream, +we just need to add `"incremental"` to that array. This tells Airbyte that this stream can either be +synced in an incremental fashion. In practice, this will mean that in the UI, a user will have the +ability to configure this type of sync. -In the example we used in the Toy Connector tutorial, the `discover` method would not look like this. Note: that "incremental" has been added to the `supported_sync_modes` array. We also set `source_defined_cursor` to `True` and `default_cursor_field` to `["date"]` to declare that the Source knows what field to use for the cursor, in this case the date field, and does not require user input. Nothing else has changed. +In the example we used in the Toy Connector tutorial, the `discover` method would not look like +this. Note: that "incremental" has been added to the `supported_sync_modes` array. We also set +`source_defined_cursor` to `True` and `default_cursor_field` to `["date"]` to declare that the +Source knows what field to use for the cursor, in this case the date field, and does not require +user input. Nothing else has changed. ```python def discover(): @@ -38,6 +51,7 @@ def discover(): ``` Also, create a file called `incremental_configured_catalog.json` with the following content: + ```javascript { "streams": [ @@ -73,7 +87,11 @@ Also, create a file called `incremental_configured_catalog.json` with the follow Next we will adapt the `read` method that we wrote previously. We need to change three things. -First, we need to pass it information about what data was replicated in the previous sync. In Airbyte this is called a `state` object. The structure of the state object is determined by the Source. This means that each Source can construct a state object that makes sense to it and does not need to worry about adhering to any other convention. That being said, a pretty typical structure for a state object is a map of stream name to the last value in the cursor field for that stream. +First, we need to pass it information about what data was replicated in the previous sync. In +Airbyte this is called a `state` object. The structure of the state object is determined by the +Source. This means that each Source can construct a state object that makes sense to it and does not +need to worry about adhering to any other convention. That being said, a pretty typical structure +for a state object is a map of stream name to the last value in the cursor field for that stream. In this case we might choose something like this: @@ -85,9 +103,11 @@ In this case we might choose something like this: } ``` -The second change we need to make to the `read` method is to use the state object so that we only emit new records. +The second change we need to make to the `read` method is to use the state object so that we only +emit new records. -Lastly, we need to emit an updated state object, so that the next time this Source runs we do not resend messages that we have already sent. +Lastly, we need to emit an updated state object, so that the next time this Source runs we do not +resend messages that we have already sent. Here's what our updated `read` method would look like. @@ -150,12 +170,14 @@ def read(config, catalog, state): ``` That code requires to add a new library import in the `source.py` file: + ```python from datetime import timezone ``` -We will also need to parse `state` argument in the `run` method. In order to do that, we will modify the code that -calls `read` method from `run` method: +We will also need to parse `state` argument in the `run` method. In order to do that, we will modify +the code that calls `read` method from `run` method: + ```python elif command == "read": config = read_json(get_input_file_path(parsed_args.config)) @@ -166,19 +188,25 @@ calls `read` method from `run` method: read(config, configured_catalog, state) ``` -Finally, we need to pass more arguments to our `_call_api` method in order to fetch only new prices for incremental sync: + +Finally, we need to pass more arguments to our `_call_api` method in order to fetch only new prices +for incremental sync: + ```python def _call_api(ticker, token, from_day, to_day): return requests.get(f"https://api.polygon.io/v2/aggs/ticker/{ticker}/range/1/day/{from_day}/{to_day}?sort=asc&limit=120&apiKey={token}") ``` -You will notice that in order to test these changes you need a `state` object. If you run an incremental sync -without passing a state object, the new code will output a state object that you can use with the next sync. If you run this: +You will notice that in order to test these changes you need a `state` object. If you run an +incremental sync without passing a state object, the new code will output a state object that you +can use with the next sync. If you run this: + ```bash python source.py read --config secrets/valid_config.json --catalog incremental_configured_catalog.json ``` The output will look like following: + ```bash {"type": "RECORD", "record": {"stream": "stock_prices", "data": {"date": "2022-03-07", "stock_ticker": "TSLA", "price": 804.58}, "emitted_at": 1647294277000}} {"type": "RECORD", "record": {"stream": "stock_prices", "data": {"date": "2022-03-08", "stock_ticker": "TSLA", "price": 824.4}, "emitted_at": 1647294277000}} @@ -189,25 +217,30 @@ The output will look like following: ``` Notice that the last line of output is the state object. Copy the state object: + ```json -{"stock_prices": {"date": "2022-03-11"}} +{ "stock_prices": { "date": "2022-03-11" } } ``` + and paste it into a new file (i.e. `state.json`). Now you can run an incremental sync: + ```bash -python source.py read --config secrets/valid_config.json --catalog incremental_configured_catalog.json --state state.json +python source.py read --config secrets/valid_config.json --catalog incremental_configured_catalog.json --state state.json ``` ## Run the incremental tests -The [Source Acceptance Test (SAT) suite](https://docs.airbyte.com/connector-development/testing-connectors/connector-acceptance-tests-reference) also includes test cases to ensure that incremental mode is working correctly. +The +[Connector Acceptance Test (CAT) suite](../../testing-connectors/connector-acceptance-tests-reference) +also includes test cases to ensure that incremental mode is working correctly. To enable these tests, modify the existing `acceptance-test-config.yml` by adding the following: ```yaml - incremental: - - config_path: "secrets/valid_config.json" - configured_catalog_path: "incremental_configured_catalog.json" - future_state_path: "abnormal_state.json" +incremental: + - config_path: "secrets/valid_config.json" + configured_catalog_path: "incremental_configured_catalog.json" + future_state_path: "abnormal_state.json" ``` Your full `acceptance-test-config.yml` should look something like this: @@ -240,13 +273,16 @@ tests: future_state_path: "abnormal_state.json" ``` -You will also need to create an `abnormal_state.json` file with a date in the future, which should not produce any records: +You will also need to create an `abnormal_state.json` file with a date in the future, which should +not produce any records: -``` +```javascript {"stock_prices": {"date": "2121-01-01"}} ``` -And lastly you need to modify the `check` function call to include the new parameters `from_day` and `to_day` in `source.py`: +And lastly you need to modify the `check` function call to include the new parameters `from_day` and +`to_day` in `source.py`: + ```python def check(config): # Validate input configuration by attempting to get the daily closing prices of the input stock ticker @@ -272,8 +308,8 @@ Run the tests once again: And finally, you should see a successful test summary: ``` -collecting ... - test_core.py ✓✓✓✓✓✓✓✓✓✓✓✓✓✓✓✓✓✓✓ 86% ████████▋ +collecting ... + test_core.py ✓✓✓✓✓✓✓✓✓✓✓✓✓✓✓✓✓✓✓ 86% ████████▋ test_full_refresh.py ✓ 91% █████████▏ test_incremental.py ✓✓ 100% ██████████ @@ -285,14 +321,15 @@ Results (8.90s): That's all you need to do to add incremental functionality to the stock ticker Source. You can deploy the new version of your connector simply by running: + ```bash ./gradlew clean :airbyte-integrations:connectors:source-stock-ticker-api:build ``` Bonus points: go to Airbyte UI and reconfigure the connection to use incremental sync. -Incremental definitely requires more configurability than full refresh, so your implementation may deviate slightly depending on whether your cursor -field is source defined or user-defined. If you think you are running into one of those cases, check out -our [incremental](/using-airbyte/core-concepts/sync-modes/incremental-append.md) documentation for more information on different types of -configuration. - +Incremental definitely requires more configurability than full refresh, so your implementation may +deviate slightly depending on whether your cursor field is source defined or user-defined. If you +think you are running into one of those cases, check out our +[incremental](/using-airbyte/core-concepts/sync-modes/incremental-append.md) documentation for more +information on different types of configuration. diff --git a/docs/connector-development/tutorials/build-a-connector-the-hard-way.md b/docs/connector-development/tutorials/the-hard-way/build-a-connector-the-hard-way.md similarity index 76% rename from docs/connector-development/tutorials/build-a-connector-the-hard-way.md rename to docs/connector-development/tutorials/the-hard-way/build-a-connector-the-hard-way.md index 5f9edd2d0d58..e1713854eb4f 100644 --- a/docs/connector-development/tutorials/build-a-connector-the-hard-way.md +++ b/docs/connector-development/tutorials/the-hard-way/build-a-connector-the-hard-way.md @@ -1,38 +1,48 @@ --- -description: Building a source connector without using any helpers to learn the Airbyte Specification for sources +description: + Building a source connector without using any helpers to learn the Airbyte Specification for + sources --- # Building a Source Connector: The Hard Way -This tutorial walks you through building a simple Airbyte source without using any helpers to demonstrate the following concepts in action: +This tutorial walks you through building a simple Airbyte source without using any helpers to +demonstrate the following concepts in action: -- [The Airbyte Specification](../../understanding-airbyte/airbyte-protocol.md) and the interface implemented by a source connector -- [The AirbyteCatalog](../../understanding-airbyte/beginners-guide-to-catalog.md) +- [The Airbyte Specification](../../../understanding-airbyte/airbyte-protocol.md) and the interface + implemented by a source connector +- [The AirbyteCatalog](../../../understanding-airbyte/beginners-guide-to-catalog.md) - [Packaging your connector](https://docs.airbyte.com/connector-development#1.-implement-and-package-the-connector) -- [Testing your connector](../testing-connectors/connector-acceptance-tests-reference.md) +- [Testing your connector](../../testing-connectors/connector-acceptance-tests-reference.md) :::warning -**This tutorial is meant for those interested in learning how the Airbyte Specification works in detail, -not for creating production connectors**. -If you're building a real source, you should start with using the [Connector Builder](../connector-builder-ui/overview), or -the [Connector Development Kit](https://github.com/airbytehq/airbyte/tree/master/airbyte-cdk/python/docs/tutorials). + +**This tutorial is meant for those interested in learning how the Airbyte Specification +works in detail, not for creating production connectors**. If you're building a real source, you +should start with using the [Connector Builder](../../connector-builder-ui/overview), or the +[Connector Development Kit](https://github.com/airbytehq/airbyte/tree/master/airbyte-cdk/python/docs/tutorials). + ::: ## Requirements To run this tutorial, you'll need: -- Docker, Python, and Java with the versions listed in the [tech stack section](../../understanding-airbyte/tech-stack.md). -- The `requests` Python package installed via `pip install requests` \(or `pip3` if `pip` is linked to a Python2 installation on your system\) +- Docker, Python, and Java with the versions listed in the + [tech stack section](../../../understanding-airbyte/tech-stack.md). +- The `requests` Python package installed via `pip install requests` \(or `pip3` if `pip` is linked + to a Python2 installation on your system\) ## Our connector: a stock ticker API -The connector will output the daily price of a stock since a given date. -We'll leverage [Polygon.io API](https://polygon.io/) for this. +The connector will output the daily price of a stock since a given date. We'll leverage +[Polygon.io API](https://polygon.io/) for this. :::info -We'll use Python to implement the connector, but you could build an Airbyte -connector in any language. + +We'll use Python to implement the connector, but you could build an Airbyte connector in any +language. + ::: Here's the outline of what we'll do to build the connector: @@ -40,7 +50,8 @@ Here's the outline of what we'll do to build the connector: 1. Use the Airbyte connector template to bootstrap the connector package 2. Implement the methods required by the Airbyte Specification for our connector: 1. `spec`: declares the user-provided credentials or configuration needed to run the connector - 2. `check`: tests if the connector can connect with the underlying data source with the user-provided configuration + 2. `check`: tests if the connector can connect with the underlying data source with the + user-provided configuration 3. `discover`: declares the different streams of data that this connector can output 4. `read`: reads data from the underlying data source \(The stock ticker API\) 3. Package the connector in a Docker image @@ -49,10 +60,10 @@ Here's the outline of what we'll do to build the connector: [Part 2 of this article](adding-incremental-sync.md) covers: -- Support [incremental sync](../../using-airbyte/core-concepts/sync-modes/incremental-append.md) +- Support [incremental sync](../../../using-airbyte/core-concepts/sync-modes/incremental-append.md) - Add custom integration tests -Let's get started! +Let's get started! --- @@ -65,7 +76,8 @@ $ pwd /Users/sherifnada/code/airbyte ``` -Airbyte provides a code generator which bootstraps the scaffolding for our connector. Let's use it by running: +Airbyte provides a code generator which bootstraps the scaffolding for our connector. Let's use it +by running: ```bash $ cd airbyte-integrations/connector-templates/generator @@ -74,14 +86,15 @@ $ ./generate.sh Select the `Generic Source` template and call the connector `stock-ticker-api`: -![](../../.gitbook/assets/newsourcetutorial_plop.gif) +![](../../../.gitbook/assets/newsourcetutorial_plop.gif) :::info -This tutorial uses the bare-bones `Generic Source` template to illustrate how all the pieces of a connector -work together. For real connectors, the generator provides `Python` and `Python HTTP API` source templates, they use -[Airbyte CDK](../cdk-python/README.md). -::: +This tutorial uses the bare-bones `Generic Source` template to illustrate how all the pieces +of a connector work together. For real connectors, the generator provides `Python` and +`Python HTTP API` source templates, they use [Airbyte CDK](../../cdk-python/README.md). + +::: ```bash $ cd ../../connectors/source-stock-ticker-api @@ -91,7 +104,8 @@ Dockerfile README.md acceptance-test-config.yml ### 2. Implement the connector in line with the Airbyte Specification -In the connector package directory, create a single Python file `source.py` that will hold our implementation: +In the connector package directory, create a single Python file `source.py` that will hold our +implementation: ```bash touch source.py @@ -99,20 +113,27 @@ touch source.py #### Implement the spec operation -The `spec` operation is described in the [Airbyte Protocol](https://docs.airbyte.com/understanding-airbyte/airbyte-protocol/#spec). -It's a way for the connector to tell Airbyte what user inputs it needs in order to connecto to the source (the stock -ticker API in our case). Airbyte expects the command to output a connector specification in `AirbyteMessage` format. +The `spec` operation is described in the +[Airbyte Protocol](https://docs.airbyte.com/understanding-airbyte/airbyte-protocol/#spec). It's a +way for the connector to tell Airbyte what user inputs it needs in order to connecto to the source +(the stock ticker API in our case). Airbyte expects the command to output a connector specification +in `AirbyteMessage` format. To contact the stock ticker API, we need two things: 1. Which stock ticker we're interested in -2. The API key to use when contacting the API \(you can obtain a free API token from [Polygon.io](https://polygon.io/dashboard/signup) free plan\) +2. The API key to use when contacting the API \(you can obtain a free API token from + [Polygon.io](https://polygon.io/dashboard/signup) free plan\) + +:::info + +For reference, the API docs we'll be using +[can be found here](https://polygon.io/docs/stocks/get_v2_aggs_ticker__stocksticker__range__multiplier___timespan___from___to). -:::info -For reference, the API docs we'll be using [can be found here](https://polygon.io/docs/stocks/get_v2_aggs_ticker__stocksticker__range__multiplier___timespan___from___to). ::: -Let's create a [JSONSchema](http://json-schema.org/) file `spec.json` encoding these two requirements: +Let's create a [JSONSchema](http://json-schema.org/) file `spec.json` encoding these two +requirements: ```javascript { @@ -139,11 +160,15 @@ Let's create a [JSONSchema](http://json-schema.org/) file `spec.json` encoding t } ``` -- `documentationUrl` is the URL that will appear in the UI for the user to gain more info about this connector. Typically this points to `docs.airbyte.com/integrations/sources/source-` but to keep things simple we won't show adding documentation -- `title` is the "human readable" title displayed in the UI. Without this field, The Stock Ticker field will have the title `stock_ticker` in the UI +- `documentationUrl` is the URL that will appear in the UI for the user to gain more info about this + connector. Typically this points to + `docs.airbyte.com/integrations/sources/source-` but to keep things simple we won't + show adding documentation +- `title` is the "human readable" title displayed in the UI. Without this field, The Stock Ticker + field will have the title `stock_ticker` in the UI - `description` will be shown in the Airbyte UI under each field to help the user understand it -- `airbyte_secret` used by Airbyte to determine if the field should be displayed as a password \(e.g: `********`\) in the UI and not readable from the API - +- `airbyte_secret` used by Airbyte to determine if the field should be displayed as a password + \(e.g: `********`\) in the UI and not readable from the API ```bash $ ls -1 @@ -155,7 +180,8 @@ metadata.yaml spec.json ``` -Now, let's edit `source.py` to detect if the program was invoked with the `spec` argument and if so, output the connector specification: +Now, let's edit `source.py` to detect if the program was invoked with the `spec` argument and if so, +output the connector specification: ```python # source.py @@ -228,10 +254,13 @@ if __name__ == "__main__": Some notes on the above code: -1. As described in the [specification](https://docs.airbyte.com/understanding-airbyte/airbyte-protocol/#key-takeaways), - Airbyte connectors are CLIs which communicate via stdout, so the output of the command is simply a JSON string - formatted according to the Airbyte Specification. So to "return" a value we use `print` to output the return value to stdout. -2. All Airbyte commands can output log messages that take the form `{"type":"LOG", "log":"message"}`, so we create a helper method `log(message)` to allow logging. +1. As described in the + [specification](https://docs.airbyte.com/understanding-airbyte/airbyte-protocol/#key-takeaways), + Airbyte connectors are CLIs which communicate via stdout, so the output of the command is simply + a JSON string formatted according to the Airbyte Specification. So to "return" a value we use + `print` to output the return value to stdout. +2. All Airbyte commands can output log messages that take the form + `{"type":"LOG", "log":"message"}`, so we create a helper method `log(message)` to allow logging. 3. All Airbyte commands can output error messages that take the form `{"type":"TRACE", "trace": {"type": "ERROR", "emitted_at": current_time_in_ms, "error": {"message": error_message}}}}`, so we create a helper method `log_error(message)` to allow error messages. @@ -245,17 +274,21 @@ python source.py spec #### Implementing check connection -The second command to implement is the [check operation](https://docs.airbyte.com/understanding-airbyte/airbyte-protocol/#check) `check --config `, -which tells the user whether a config file they gave us is correct. In our case, "correct" means they input a valid -stock ticker and a correct API key like we declare via the `spec` operation. +The second command to implement is the +[check operation](https://docs.airbyte.com/understanding-airbyte/airbyte-protocol/#check) +`check --config `, which tells the user whether a config file they gave us is correct. +In our case, "correct" means they input a valid stock ticker and a correct API key like we declare +via the `spec` operation. To achieve this, we'll: -1. Create valid and invalid configuration files to test the success and failure cases with our connector. - We'll place config files in the `secrets/` directory which is gitignored everywhere in the Airbyte monorepo by - default to avoid accidentally checking in API keys. -2. Add a `check` method which calls the Polygon.io API to verify if the provided token & stock ticker are correct and output the correct airbyte message. -3. Extend the argument parser to recognize the `check --config ` command and call the `check` method when the `check` command is invoked. +1. Create valid and invalid configuration files to test the success and failure cases with our + connector. We'll place config files in the `secrets/` directory which is gitignored everywhere in + the Airbyte monorepo by default to avoid accidentally checking in API keys. +2. Add a `check` method which calls the Polygon.io API to verify if the provided token & stock + ticker are correct and output the correct airbyte message. +3. Extend the argument parser to recognize the `check --config ` command and call the + `check` method when the `check` command is invoked. Let's first add the configuration files: @@ -265,7 +298,8 @@ $ echo '{"api_key": "put_your_key_here", "stock_ticker": "TSLA"}' > secrets/vali $ echo '{"api_key": "not_a_real_key", "stock_ticker": "TSLA"}' > secrets/invalid_config.json ``` -Make sure to add your actual API key instead of the placeholder value `` when following the tutorial. +Make sure to add your actual API key instead of the placeholder value `` when +following the tutorial. Then we'll add the `check` method: @@ -297,8 +331,8 @@ def check(config): print(json.dumps(output_message)) ``` -In Airbyte, the contract for input files is that they will be available in the current working directory if they are not provided as an absolute path. -This method helps us achieve that: +In Airbyte, the contract for input files is that they will be available in the current working +directory if they are not provided as an absolute path. This method helps us achieve that: ```python def get_input_file_path(path): @@ -352,19 +386,30 @@ $ python source.py check --config secrets/invalid_config.json {'type': 'CONNECTION_STATUS', 'connectionStatus': {'status': 'FAILED', 'message': 'API Key is incorrect.'}} ``` -Our connector is able to detect valid and invalid configs correctly. Two methods down, two more to go! +Our connector is able to detect valid and invalid configs correctly. Two methods down, two more to +go! #### Implementing Discover -The `discover` command outputs a Catalog, a struct that declares the Streams and Fields \(Airbyte's equivalents of tables and columns\) output by the connector. It also includes metadata around which features a connector supports \(e.g. which sync modes\). In other words it describes what data is available in the source. If you'd like to read a bit more about this concept check out our [Beginner's Guide to the Airbyte Catalog](../../understanding-airbyte/beginners-guide-to-catalog.md) or for a more detailed treatment read the [Airbyte Specification](../../understanding-airbyte/airbyte-protocol.md). +The `discover` command outputs a Catalog, a struct that declares the Streams and Fields \(Airbyte's +equivalents of tables and columns\) output by the connector. It also includes metadata around which +features a connector supports \(e.g. which sync modes\). In other words it describes what data is +available in the source. If you'd like to read a bit more about this concept check out our +[Beginner's Guide to the Airbyte Catalog](../../../understanding-airbyte/beginners-guide-to-catalog.md) +or for a more detailed treatment read the +[Airbyte Specification](../../../understanding-airbyte/airbyte-protocol.md). -The stock ticker connector outputs records belonging to exactly one Stream \(table\). -Each record contains three Fields \(columns\): `date`, `price`, and `stock_ticker`, corresponding to the price of a stock on a given day. +The stock ticker connector outputs records belonging to exactly one Stream \(table\). Each record +contains three Fields \(columns\): `date`, `price`, and `stock_ticker`, corresponding to the price +of a stock on a given day. To implement `discover`, we'll: -1. Add a method `discover` in `source.py` which outputs the Catalog. To better understand what a catalog is, check out our [Beginner's Guide to the AirbyteCatalog](../../understanding-airbyte/beginners-guide-to-catalog.md) -2. Extend the arguments parser to use detect the `discover --config ` command and call the `discover` method +1. Add a method `discover` in `source.py` which outputs the Catalog. To better understand what a + catalog is, check out our + [Beginner's Guide to the AirbyteCatalog](../../../understanding-airbyte/beginners-guide-to-catalog.md) +2. Extend the arguments parser to use detect the `discover --config ` command and call + the `discover` method Let's implement `discover` by adding the following in `source.py`: @@ -416,8 +461,15 @@ We need to update our list of available commands: ```python log("Invalid command. Allowable commands: [spec, check, discover]") ``` + :::info -You may be wondering why `config` is a required input to `discover` if it's not used. This is done for consistency: the Airbyte Specification requires `--config` as an input to `discover` because many sources require it \(e.g: to discover the tables available in a Postgres database, you must supply a password\). So instead of guessing whether the flag is required depending on the connector, we always assume it is required, and the connector can choose whether to use it. + +You may be wondering why `config` is a required input to `discover` if it's not used. This +is done for consistency: the Airbyte Specification requires `--config` as an input to `discover` +because many sources require it \(e.g: to discover the tables available in a Postgres database, you +must supply a password\). So instead of guessing whether the flag is required depending on the +connector, we always assume it is required, and the connector can choose whether to use it. + ::: The full run method is now below: @@ -473,27 +525,43 @@ With that, we're done implementing the `discover` command. #### Implementing the read operation -We've done a lot so far, but a connector ultimately exists to read data! This is where the [`read` command](https://docs.airbyte.com/understanding-airbyte/airbyte-protocol/#read) comes in. The format of the command is: +We've done a lot so far, but a connector ultimately exists to read data! This is where the +[`read` command](https://docs.airbyte.com/understanding-airbyte/airbyte-protocol/#read) comes in. +The format of the command is: ```bash python source.py read --config --catalog [--state ] ``` -Each of these are described in the Airbyte Specification in detail, but we'll give a quick description of the two options we haven't seen so far: - -- `--catalog` points to a Configured Catalog. The Configured Catalog contains the contents for the Catalog \(remember the Catalog we output from discover?\). It also contains some configuration information that describes how the data will by replicated. For example, we had `supported_sync_modes` in the Catalog. In the Configured Catalog, we select which of the `supported_sync_modes` we want to use by specifying the `sync_mode` field. \(This is the most complicated concept when working Airbyte, so if it is still not making sense that's okay for now. If you're just dying to understand how the Configured Catalog works checkout the [Beginner's Guide to the Airbyte Catalog](../../understanding-airbyte/beginners-guide-to-catalog.md)\). -- `--state` points to a state file. The state file is only relevant when some Streams are synced with the sync mode `incremental`, so we'll cover the state file in more detail in the incremental section below. - -Our connector only supports one Stream, `stock_prices`, so we'd expect the input catalog to contain that stream configured to sync in full refresh. -Since our connector doesn't support incremental sync yet, we'll ignore the state option for now. +Each of these are described in the Airbyte Specification in detail, but we'll give a quick +description of the two options we haven't seen so far: + +- `--catalog` points to a Configured Catalog. The Configured Catalog contains the contents for the + Catalog \(remember the Catalog we output from discover?\). It also contains some configuration + information that describes how the data will by replicated. For example, we had + `supported_sync_modes` in the Catalog. In the Configured Catalog, we select which of the + `supported_sync_modes` we want to use by specifying the `sync_mode` field. \(This is the most + complicated concept when working Airbyte, so if it is still not making sense that's okay for now. + If you're just dying to understand how the Configured Catalog works checkout the + [Beginner's Guide to the Airbyte Catalog](../../../understanding-airbyte/beginners-guide-to-catalog.md)\). +- `--state` points to a state file. The state file is only relevant when some Streams are synced + with the sync mode `incremental`, so we'll cover the state file in more detail in the incremental + section below. + +Our connector only supports one Stream, `stock_prices`, so we'd expect the input catalog to contain +that stream configured to sync in full refresh. Since our connector doesn't support incremental sync +yet, we'll ignore the state option for now. To read data in our connector, we'll: -1. Create a configured catalog which tells our connector that we want to sync the `stock_prices` stream -2. Implement a method `read` in `source.py`. For now we'll always read the last 7 days of a stock price's data +1. Create a configured catalog which tells our connector that we want to sync the `stock_prices` + stream +2. Implement a method `read` in `source.py`. For now we'll always read the last 7 days of a stock + price's data 3. Extend the arguments parser to recognize the `read` command and its arguments -First, let's create a configured catalog `fullrefresh_configured_catalog.json` to use as test input for the read operation: +First, let's create a configured catalog `fullrefresh_configured_catalog.json` to use as test input +for the read operation: ```javascript { @@ -573,7 +641,9 @@ def read(config, catalog): print(json.dumps(output_message)) ``` -After doing some input validation, the code above calls the API to obtain daily prices for the input stock ticker, then outputs the prices. As always, our output is formatted according to the Airbyte Specification. Let's update our args parser with the following blocks: +After doing some input validation, the code above calls the API to obtain daily prices for the input +stock ticker, then outputs the prices. As always, our output is formatted according to the Airbyte +Specification. Let's update our args parser with the following blocks: ```python # Accept the read command @@ -667,7 +737,8 @@ $ python source.py read --config secrets/valid_config.json --catalog fullrefresh {'type': 'RECORD', 'record': {'stream': 'stock_prices', 'data': {'date': '2020-12-21', 'stock_ticker': 'TSLA', 'price': 649.86}, 'emitted_at': 1608626365000}} ``` -With this method, we now have a fully functioning connector! Let's pat ourselves on the back for getting there. +With this method, we now have a fully functioning connector! Let's pat ourselves on the back for +getting there. For reference, the full `source.py` file now looks like this: @@ -868,13 +939,15 @@ if __name__ == "__main__": main() ``` -A full connector in about 200 lines of code. Not bad! We're now ready to package & test our connector then use it in the Airbyte UI. +A full connector in about 200 lines of code. Not bad! We're now ready to package & test our +connector then use it in the Airbyte UI. --- ### 3. Package the connector in a Docker image -Our connector is very lightweight, so the Dockerfile needed to run it is very light as well. Edit the `Dockerfile` as follows: +Our connector is very lightweight, so the Dockerfile needed to run it is very light as well. Edit +the `Dockerfile` as follows: ```Dockerfile FROM python:3.9-slim @@ -905,8 +978,10 @@ Once we save the `Dockerfile`, we can build the image by running: docker build . -t airbyte/source-stock-ticker-api:dev ``` -To run any of our commands, we'll need to mount all the inputs into the Docker container first, then refer to their _mounted_ paths when invoking the connector. -This allows the connector to access your secrets without having to build them into the container. For example, we'd run `check` or `read` as follows: +To run any of our commands, we'll need to mount all the inputs into the Docker container first, then +refer to their _mounted_ paths when invoking the connector. This allows the connector to access your +secrets without having to build them into the container. For example, we'd run `check` or `read` as +follows: ```bash $ docker run airbyte/source-stock-ticker-api:dev spec @@ -930,11 +1005,17 @@ $ docker run -v $(pwd)/secrets/valid_config.json:/data/config.json -v $(pwd)/ful ### 4. Test the connector -The minimum requirement for testing your connector is to pass the [Connector Acceptance Test](https://docs.airbyte.com/connector-development/testing-connectors/connector-acceptance-tests-reference) suite. The connector acceptence test is a blackbox test suite containing a number of tests that validate your connector behaves as intended by the Airbyte Specification. You're encouraged to add custom test cases for your connector where it makes sense to do so e.g: to test edge cases that are not covered by the standard suite. But at the very least, your connector must pass Airbyte's acceptance test suite. +The minimum requirement for testing your connector is to pass the +[Connector Acceptance Test](https://docs.airbyte.com/connector-development/testing-connectors/connector-acceptance-tests-reference) +suite. The connector acceptence test is a blackbox test suite containing a number of tests that +validate your connector behaves as intended by the Airbyte Specification. You're encouraged to add +custom test cases for your connector where it makes sense to do so e.g: to test edge cases that are +not covered by the standard suite. But at the very least, your connector must pass Airbyte's +acceptance test suite. -The code generator makes a minimal acceptance test configuration. Let's modify it as follows to setup -tests for each operation with valid and invalid credentials. Edit `acceptance-test-config.yaml` to look -as follows: +The code generator makes a minimal acceptance test configuration. Let's modify it as follows to +setup tests for each operation with valid and invalid credentials. Edit +`acceptance-test-config.yaml` to look as follows: ```yaml # See [Connector Acceptance Tests](https://docs.airbyte.com/connector-development/testing-connectors/connector-acceptance-tests-reference) @@ -969,8 +1050,11 @@ acceptance_tests: # configured_catalog_path: "integration_tests/configured_catalog.json" # future_state_path: "integration_tests/abnormal_state.json" ``` -To run the test suite, we'll use [`airbyte-ci`](https://github.com/airbytehq/airbyte/blob/master/airbyte-ci/connectors/pipelines/README.md). -You can build and install `airbyte-ci` locally from Airbyte repository root by running `make`. Assuming you have it already: + +To run the test suite, we'll use +[`airbyte-ci`](https://github.com/airbytehq/airbyte/blob/master/airbyte-ci/connectors/pipelines/README.md). +You can build and install `airbyte-ci` locally from Airbyte repository root by running `make`. +Assuming you have it already: ```shell airbyte-ci connectors --name= --use-remote-secrets=false test @@ -978,7 +1062,8 @@ airbyte-ci connectors --name= - `airbyte-ci` will build and then test your connector, and provide a report on the test results. -That's it! We've created a fully functioning connector. Now let's get to the exciting part: using it from the Airbyte UI. +That's it! We've created a fully functioning connector. Now let's get to the exciting part: using it +from the Airbyte UI. --- @@ -992,22 +1077,28 @@ Let's recap what we've achieved so far: To use it from the Airbyte UI, we need to: -1. Publish our connector's Docker image somewhere accessible by Airbyte Core \(Airbyte's server, scheduler, workers, and webapp infrastructure\) -2. Add the connector via the Airbyte UI and setup a connection from our new connector to a local CSV file for illustration purposes +1. Publish our connector's Docker image somewhere accessible by Airbyte Core \(Airbyte's server, + scheduler, workers, and webapp infrastructure\) +2. Add the connector via the Airbyte UI and setup a connection from our new connector to a local CSV + file for illustration purposes 3. Run a sync and inspect the output #### 1. Publish the Docker image -Since we're running this tutorial locally, Airbyte will have access to any Docker images available to your local `docker` daemon. So all we need to do is build & tag our connector. -For real production connectors to be available on Airbyte Cloud, you'd need to publish them on DockerHub. +Since we're running this tutorial locally, Airbyte will have access to any Docker images available +to your local `docker` daemon. So all we need to do is build & tag our connector. For real +production connectors to be available on Airbyte Cloud, you'd need to publish them on DockerHub. -Airbyte's build system builds and tags your connector's image correctly by default as part of the connector's standard `build` process. **From the Airbyte repo root**, run: +Airbyte's build system builds and tags your connector's image correctly by default as part of the +connector's standard `build` process. **From the Airbyte repo root**, run: ```bash ./gradlew clean :airbyte-integrations:connectors:source-stock-ticker-api:build ``` -This is the equivalent of running `docker build . -t airbyte/source-stock-ticker-api:dev` from the connector root, where the tag `airbyte/source-stock-ticker-api` is extracted from the label `LABEL io.airbyte.name` inside your `Dockerfile`. +This is the equivalent of running `docker build . -t airbyte/source-stock-ticker-api:dev` from the +connector root, where the tag `airbyte/source-stock-ticker-api` is extracted from the label +`LABEL io.airbyte.name` inside your `Dockerfile`. Verify the image was built by running: @@ -1020,17 +1111,20 @@ $ docker images | head 1caf57c72afd 3 hours ago 121MB ``` -`airbyte/source-stock-ticker-api` was built and tagged with the `dev` tag. Now let's head to the last step. +`airbyte/source-stock-ticker-api` was built and tagged with the `dev` tag. Now let's head to the +last step. #### 2. Add the connector via the Airbyte UI -If the Airbyte server isn't already running, start it by running **from the Airbyte repository root**: +If the Airbyte server isn't already running, start it by running **from the Airbyte repository +root**: ```bash docker compose up ``` -When Airbyte server is done starting up, it prints the following banner in the log output \(it can take 10-20 seconds for the server to start\): +When Airbyte server is done starting up, it prints the following banner in the log output \(it can +take 10-20 seconds for the server to start\): ```bash airbyte-server | 2022-03-11 18:38:33 INFO i.a.s.ServerApp(start):121 - @@ -1047,79 +1141,90 @@ airbyte-server | Version: dev airbyte-server | ``` -After you see the above banner printed out in the terminal window where you are running `docker compose up`, visit [http://localhost:8000](http://localhost:8000) in your browser and log in with the default credentials: username `airbyte` and password `password`. +After you see the above banner printed out in the terminal window where you are running +`docker compose up`, visit [http://localhost:8000](http://localhost:8000) in your browser and log in +with the default credentials: username `airbyte` and password `password`. -If this is the first time using the Airbyte UI, then you will be prompted to go through a first-time wizard. To skip it, click the "Skip Onboarding" button. +If this is the first time using the Airbyte UI, then you will be prompted to go through a first-time +wizard. To skip it, click the "Skip Onboarding" button. In the UI, click the "Settings" button in the left side bar: -![](../../.gitbook/assets/newsourcetutorial_sidebar_settings.png) +![](../../../.gitbook/assets/newsourcetutorial_sidebar_settings.png) Then on the Settings page, select Sources -![](../../.gitbook/assets/newsourcetutorial_settings_page.png) +![](../../../.gitbook/assets/newsourcetutorial_settings_page.png) Then on the Settings/Sources page, click "+ New Connector" button at the top right: -![](../../.gitbook/assets/newsourcetutorial_settings_sources_newconnector.png) +![](../../../.gitbook/assets/newsourcetutorial_settings_sources_newconnector.png) On the modal that pops up, enter the following information then click "Add" -![](../../.gitbook/assets/newsourcetutorial_new_connector_modal.png) +![](../../../.gitbook/assets/newsourcetutorial_new_connector_modal.png) -After you click "Add", the modal will close and you will be back at the Settings page. -Now click "Sources" in the navigation bar on the left: +After you click "Add", the modal will close and you will be back at the Settings page. Now click +"Sources" in the navigation bar on the left: -![](../../.gitbook/assets/newsourcetutorial_sources_navbar.png) +![](../../../.gitbook/assets/newsourcetutorial_sources_navbar.png) -You will be redirected to Sources page, which, if you have not set up any connections, will be empty. -On the Sources page click "+ new source" in the top right corner: +You will be redirected to Sources page, which, if you have not set up any connections, will be +empty. On the Sources page click "+ new source" in the top right corner: -![](../../.gitbook/assets/newsourcetutorial_sources_page.png) +![](../../../.gitbook/assets/newsourcetutorial_sources_page.png) A new modal will prompt you for details of the new source. Type "Stock Ticker" in the Name field. -Then, find your connector in the Source type dropdown. We have lots of connectors already, so it might be easier -to find your connector by typing part of its name: +Then, find your connector in the Source type dropdown. We have lots of connectors already, so it +might be easier to find your connector by typing part of its name: -![](../../.gitbook/assets/newsourcetutorial_find_your_connector.png) +![](../../../.gitbook/assets/newsourcetutorial_find_your_connector.png) -After you select your connector in the Source type dropdown, the modal will show two more fields: API Key and Stock Ticker. -Remember that `spec.json` file you created at the very beginning of this tutorial? These fields should correspond to the `properties` -section of that file. Copy-paste your Polygon.io API key and a stock ticker into these fields and then click "Set up source" -button at the bottom right of the modal. +After you select your connector in the Source type dropdown, the modal will show two more fields: +API Key and Stock Ticker. Remember that `spec.json` file you created at the very beginning of this +tutorial? These fields should correspond to the `properties` section of that file. Copy-paste your +Polygon.io API key and a stock ticker into these fields and then click "Set up source" button at the +bottom right of the modal. -![](../../.gitbook/assets/newsourcetutorial_source_config.png) +![](../../../.gitbook/assets/newsourcetutorial_source_config.png) -Once you click "Set up source", Airbyte will spin up your connector and run "check" method to verify the configuration. -You will see a progress bar briefly and if the configuration is valid, you will see a success message, -the modal will close and you will see your connector on the updated Sources page. +Once you click "Set up source", Airbyte will spin up your connector and run "check" method to verify +the configuration. You will see a progress bar briefly and if the configuration is valid, you will +see a success message, the modal will close and you will see your connector on the updated Sources +page. -![](../../.gitbook/assets/newsourcetutorial_sources_stock_ticker.png) +![](../../../.gitbook/assets/newsourcetutorial_sources_stock_ticker.png) -Next step is to add a destination. On the same page, click "add destination" and then click "+ add a new destination": +Next step is to add a destination. On the same page, click "add destination" and then click "+ add a +new destination": -![](../../.gitbook/assets/newsourcetutorial_add_destination_new_destination.png) +![](../../../.gitbook/assets/newsourcetutorial_add_destination_new_destination.png) -"New destination" wizard will show up. Type a name (e.g. "Local JSON") into the Name field and select "Local JSON" in Destination type drop-down. -After you select the destination type, type `/local/tutorial_json` into Destination path field. -When we run syncs, we'll find the output on our local filesystem in `/tmp/airbyte_local/tutorial_json`. +"New destination" wizard will show up. Type a name (e.g. "Local JSON") into the Name field and +select "Local JSON" in Destination type drop-down. After you select the destination type, type +`/local/tutorial_json` into Destination path field. When we run syncs, we'll find the output on our +local filesystem in `/tmp/airbyte_local/tutorial_json`. Click "Set up destination" at the lower right of the form. -![](../../.gitbook/assets/newsourcetutorial_add_destination.png) +![](../../../.gitbook/assets/newsourcetutorial_add_destination.png) -After that Airbyte will test the destination and prompt you to configure the connection between Stock Ticker source and Local JSON destination. -Select "Mirror source structure" in the Destination Namespace, check the checkbox next to the stock_prices stream, and click "Set up connection" button at the bottom of the form: +After that Airbyte will test the destination and prompt you to configure the connection between +Stock Ticker source and Local JSON destination. Select "Mirror source structure" in the Destination +Namespace, check the checkbox next to the stock_prices stream, and click "Set up connection" button +at the bottom of the form: -![](../../.gitbook/assets/newsourcetutorial_configure_connection.png) +![](../../../.gitbook/assets/newsourcetutorial_configure_connection.png) -Ta-da! Your connection is now configured to sync once a day. You will see your new connection on the next screen: +Ta-da! Your connection is now configured to sync once a day. You will see your new connection on the +next screen: -![](../../.gitbook/assets/newsourcetutorial_connection_done.png) +![](../../../.gitbook/assets/newsourcetutorial_connection_done.png) -Airbyte will run the first sync job as soon as your connection is saved. Navigate to "Connections" in the side bar and wait for the first sync to succeed: +Airbyte will run the first sync job as soon as your connection is saved. Navigate to "Connections" +in the side bar and wait for the first sync to succeed: -![](../../.gitbook/assets/newsourcetutorial_first_sync.png) +![](../../../.gitbook/assets/newsourcetutorial_first_sync.png) Let's verify the output. From your shell, run: @@ -1132,14 +1237,17 @@ $ cat /tmp/airbyte_local/tutorial_json/_airbyte_raw_stock_prices.jsonl {"_airbyte_ab_id":"0b7a8d33-4500-4a6d-9d74-11716bd22f01","_airbyte_emitted_at":1647026803000,"_airbyte_data":{"date":"2022-03-10","stock_ticker":"TSLA","price":838.3}} ``` -Congratulations! We've successfully written a fully functioning Airbyte connector. You're an Airbyte contributor now ;\) +Congratulations! We've successfully written a fully functioning Airbyte connector. You're an Airbyte +contributor now ;\) 1. Follow the [next tutorial](adding-incremental-sync.md) to implement incremental sync. -2. Implement another connector using the Low-code CDK, [Connector Builder](../connector-builder-ui/overview), or [Connector Development Kit](https://github.com/airbytehq/airbyte/tree/master/airbyte-cdk/python/docs/tutorials) -3. We welcome low-code configuration based connector contributions! If you make a connector in the connector builder - and want to share it with everyone using Airbyte, pull requests are welcome! +2. Implement another connector using the Low-code CDK, + [Connector Builder](../../connector-builder-ui/overview.md), or + [Connector Development Kit](https://github.com/airbytehq/airbyte/tree/master/airbyte-cdk/python/docs/tutorials) +3. We welcome low-code configuration based connector contributions! If you make a connector in the + connector builder and want to share it with everyone using Airbyte, pull requests are welcome! ## Additional guides -- [Building a Python Source](https://docs.airbyte.com/connector-development/tutorials/building-a-python-source) +- [Building a Python Source](https://docs.airbyte.com/connector-development/tutorials/building-a-python-source.md) - [Building a Java Destination](https://docs.airbyte.com/connector-development/tutorials/building-a-java-destination) From df1cff15706d63aa4c1cbe9f2922c6f6e560f082 Mon Sep 17 00:00:00 2001 From: Catherine Noll Date: Thu, 7 Mar 2024 01:22:07 -0500 Subject: [PATCH 115/172] Source Instagram: remove "total_interactions" from MediaInsights queries (#35875) --- .../connectors/source-instagram/metadata.yaml | 2 +- .../connectors/source-instagram/pyproject.toml | 2 +- .../connectors/source-instagram/source_instagram/streams.py | 5 ++--- docs/integrations/sources/instagram.md | 5 +++-- 4 files changed, 7 insertions(+), 7 deletions(-) diff --git a/airbyte-integrations/connectors/source-instagram/metadata.yaml b/airbyte-integrations/connectors/source-instagram/metadata.yaml index a946127344a9..85c4536619e6 100644 --- a/airbyte-integrations/connectors/source-instagram/metadata.yaml +++ b/airbyte-integrations/connectors/source-instagram/metadata.yaml @@ -7,7 +7,7 @@ data: connectorSubtype: api connectorType: source definitionId: 6acf6b55-4f1e-4fca-944e-1a3caef8aba8 - dockerImageTag: 3.0.3 + dockerImageTag: 3.0.4 dockerRepository: airbyte/source-instagram githubIssueLabel: source-instagram icon: instagram.svg diff --git a/airbyte-integrations/connectors/source-instagram/pyproject.toml b/airbyte-integrations/connectors/source-instagram/pyproject.toml index 481ce4c340e1..65f3991a4b50 100644 --- a/airbyte-integrations/connectors/source-instagram/pyproject.toml +++ b/airbyte-integrations/connectors/source-instagram/pyproject.toml @@ -3,7 +3,7 @@ requires = [ "poetry-core>=1.0.0",] build-backend = "poetry.core.masonry.api" [tool.poetry] -version = "3.0.3" +version = "3.0.4" name = "source-instagram" description = "Source implementation for Instagram." authors = [ "Airbyte ",] diff --git a/airbyte-integrations/connectors/source-instagram/source_instagram/streams.py b/airbyte-integrations/connectors/source-instagram/source_instagram/streams.py index 4e6d27c4fb2c..72193ad62ead 100644 --- a/airbyte-integrations/connectors/source-instagram/source_instagram/streams.py +++ b/airbyte-integrations/connectors/source-instagram/source_instagram/streams.py @@ -373,8 +373,8 @@ def _get_children(self, ids: List): class MediaInsights(Media): """Docs: https://developers.facebook.com/docs/instagram-api/reference/ig-media/insights""" - MEDIA_METRICS = ["total_interactions", "impressions", "reach", "saved", "video_views", "likes", "comments", "shares"] - CAROUSEL_ALBUM_METRICS = ["total_interactions", "impressions", "reach", "saved", "video_views"] + MEDIA_METRICS = ["impressions", "reach", "saved", "video_views", "likes", "comments", "shares"] + CAROUSEL_ALBUM_METRICS = ["impressions", "reach", "saved", "video_views"] REELS_METRICS = [ "comments", @@ -385,7 +385,6 @@ class MediaInsights(Media): "reach", "saved", "shares", - "total_interactions", ] def read_records( diff --git a/docs/integrations/sources/instagram.md b/docs/integrations/sources/instagram.md index 2ba71bfbbd3c..96a516bc8cc0 100644 --- a/docs/integrations/sources/instagram.md +++ b/docs/integrations/sources/instagram.md @@ -113,8 +113,9 @@ Instagram limits the number of requests that can be made at a time. See Facebook | Version | Date | Pull Request | Subject | |:--------|:-----------|:---------------------------------------------------------|:--------------------------------------------------------------------------------------------------------------------------| -| 3.0.3 | 2024-02-12 | [35177](https://github.com/airbytehq/airbyte/pull/35177) | Manage dependencies with Poetry. | -| 3.0.2 | 2024-01-15 | [34254](https://github.com/airbytehq/airbyte/pull/34254) | prepare for airbyte-lib | +| 3.0.4 | 2024-03-07 | [35875](https://github.com/airbytehq/airbyte/pull/35875) | Remove `total_interactions` from the `MediaInsights` queries. | +| 3.0.3 | 2024-02-12 | [35177](https://github.com/airbytehq/airbyte/pull/35177) | Manage dependencies with Poetry. | +| 3.0.2 | 2024-01-15 | [34254](https://github.com/airbytehq/airbyte/pull/34254) | prepare for airbyte-lib | | 3.0.1 | 2024-01-08 | [33989](https://github.com/airbytehq/airbyte/pull/33989) | Remove metrics from video feed | | 3.0.0 | 2024-01-05 | [33930](https://github.com/airbytehq/airbyte/pull/33930) | Upgrade to API v18.0 | | 2.0.1 | 2024-01-03 | [33889](https://github.com/airbytehq/airbyte/pull/33889) | Change requested metrics for stream `media_insights` | From fc030718b01ef69480497160d625dfce0fafe55f Mon Sep 17 00:00:00 2001 From: Anatolii Yatsuk <35109939+tolik0@users.noreply.github.com> Date: Thu, 7 Mar 2024 11:23:49 +0200 Subject: [PATCH 116/172] :bug: Source S3: Return iam docs (#35850) --- docs/integrations/sources/s3.md | 75 ++++++++++++++++++++++++++++++--- 1 file changed, 68 insertions(+), 7 deletions(-) diff --git a/docs/integrations/sources/s3.md b/docs/integrations/sources/s3.md index 7ff0f2899b57..298f921fdfa0 100644 --- a/docs/integrations/sources/s3.md +++ b/docs/integrations/sources/s3.md @@ -15,7 +15,7 @@ Please note that using cloud storage may incur egress costs. Egress refers to da ### Step 1: Set up Amazon S3 -**If you are syncing from a private bucket**, you will need to provide both an `AWS Access Key ID` and `AWS Secret Access Key` to authenticate the connection. The IAM user associated with the credentials must be granted `read` and `list` permissions for the bucket and its objects. If you are unfamiliar with configuring AWS permissions, you can follow these steps to obtain the necessary permissions and credentials: +**If you are syncing from a private bucket**, you need to authenticate the connection. This can be done either by using an `IAM User` (with `AWS Access Key ID` and `Secret Access Key`) or an `IAM Role` (with `Role ARN`). Begin by creating a policy with the necessary permissions: #### Create a Policy @@ -47,11 +47,70 @@ At this time, object-level permissions alone are not sufficient to successfully ::: 4. Give your policy a descriptive name, then click **Create policy**. -5. In the IAM dashboard, click **Users**. Select an existing IAM user or create a new one by clicking **Add users**. -6. If you are using an _existing_ IAM user, click the **Add permissions** dropdown menu and select **Add permissions**. If you are creating a _new_ user, you will be taken to the Permissions screen after selecting a name. -7. Select **Attach policies directly**, then find and check the box for your new policy. Click **Next**, then **Add permissions**. -8. After successfully creating your user, select the **Security credentials** tab and click **Create access key**. You will be prompted to select a use case and add optional tags to your access key. Click **Create access key** to generate the keys. - + +#### Option 1: Using an IAM Role (Most secure) + + +:::note +Currently this feature is available only for the users in a Sales Assist workflow. Please contact your Solutions Engineer if you are interested in using this. +::: + + +1. In the IAM dashboard, click **Roles**, then **Create role**. +2. Choose the appropriate trust entity and attach the policy you created. +3. Set up a trust relationship for the role. For example for **AWS account** trusted entity use default AWS account on your instance (it will be used to assume role). To use **External ID** set it to environment variables as `export AWS_ASSUME_ROLE_EXTERNAL_ID="{your-external-id}"`. Edit the trust relationship policy to reflect this: +``` +{ + "Version": "2012-10-17", + "Statement": [ + { + "Effect": "Allow", + "Principal": { + "AWS": "arn:aws:iam::{your-aws-account-id}:user/{your-username}" + }, + "Action": "sts:AssumeRole", + "Condition": { + "StringEquals": { + "sts:ExternalId": "{your-external-id}" + } + } + } + ] +} +``` + + +2. Choose the **AWS account** trusted entity type. +3. Set up a trust relationship for the role. This allows the Airbyte instance's AWS account to assume this role. You will also need to specify an external ID, which is a secret key that the trusting service (Airbyte) and the trusted role (the role you're creating) both know. This ID is used to prevent the "confused deputy" problem. The External ID should be your Airbyte workspace ID, which can be found in the URL of your workspace page. Edit the trust relationship policy to include the external ID: +``` +{ + "Version": "2012-10-17", + "Statement": [ + { + "Effect": "Allow", + "Principal": { + "AWS": "arn:aws:iam::094410056844:user/delegated_access_user" + }, + "Action": "sts:AssumeRole", + "Condition": { + "StringEquals": { + "sts:ExternalId": "{your-airbyte-workspace-id}" + } + } + } + ] +} +``` + +4. Complete the role creation and note the Role ARN. + +#### Option 2: Using an IAM User + +1. In the IAM dashboard, click **Users**. Select an existing IAM user or create a new one by clicking **Add users**. +2. If you are using an _existing_ IAM user, click the **Add permissions** dropdown menu and select **Add permissions**. If you are creating a _new_ user, you will be taken to the Permissions screen after selecting a name. +3. Select **Attach policies directly**, then find and check the box for your new policy. Click **Next**, then **Add permissions**. +4. After successfully creating your user, select the **Security credentials** tab and click **Create access key**. You will be prompted to select a use case and add optional tags to your access key. Click **Create access key** to generate the keys. + :::caution Your `Secret Access Key` will only be visible once upon creation. Be sure to copy and store it securely for future use. ::: @@ -71,7 +130,9 @@ For more information on managing your access keys, please refer to the 3. Give a **Name** to the stream 4. (Optional) - If you want to enforce a specific schema, you can enter a **Input schema**. By default, this value is set to `{}` and will automatically infer the schema from the file\(s\) you are replicating. For details on providing a custom schema, refer to the [User Schema section](#user-schema). 5. Optionally, enter the **Globs** which dictates which files to be synced. This is a regular expression that allows Airbyte to pattern match the specific files to replicate. If you are replicating all the files within your bucket, use `**` as the pattern. For more precise pattern matching options, refer to the [Path Patterns section](#path-patterns) below. -6. **If you are syncing from a private bucket**, you must fill the **AWS Access Key ID** and **AWS Secret Access Key** fields with the appropriate credentials to authenticate the connection. All other fields are optional and can be left empty. Refer to the [S3 Provider Settings section](#s3-provider-settings) below for more information on each field. +6. **To authenticate your private bucket**: + - If using an IAM role, enter the **AWS Role ARN**. + - If using IAM user credentials, fill the **AWS Access Key ID** and **AWS Secret Access Key** fields with the appropriate credentials. All other fields are optional and can be left empty. Refer to the [S3 Provider Settings section](#s3-provider-settings) below for more information on each field. From 8c862a8013150cfee238c379e66ff9e84550da6b Mon Sep 17 00:00:00 2001 From: Augustin Date: Thu, 7 Mar 2024 11:32:29 +0100 Subject: [PATCH 117/172] Fix tags in metadata.yaml files: declare cdk tags (#35670) --- .../connectors/destination-amazon-sqs/metadata.yaml | 1 + .../connectors/destination-astra/metadata.yaml | 1 + .../connectors/destination-aws-datalake/metadata.yaml | 1 + .../connectors/destination-chroma/metadata.yaml | 1 + .../connectors/destination-convex/metadata.yaml | 1 + .../connectors/destination-cumulio/metadata.yaml | 1 + .../connectors/destination-databend/metadata.yaml | 1 + .../connectors/destination-duckdb/metadata.yaml | 1 + .../connectors/destination-firebolt/metadata.yaml | 1 + .../connectors/destination-firestore/metadata.yaml | 1 + .../connectors/destination-google-sheets/metadata.yaml | 1 + .../connectors/destination-kvdb/metadata.yaml | 1 + .../connectors/destination-langchain/metadata.yaml | 1 + .../connectors/destination-meilisearch/metadata.yaml | 1 + .../connectors/destination-milvus/metadata.yaml | 1 + .../connectors/destination-pinecone/metadata.yaml | 1 + .../connectors/destination-qdrant/metadata.yaml | 1 + .../connectors/destination-rabbitmq/metadata.yaml | 1 + .../destination-scaffold-destination-python/metadata.yaml | 1 + .../connectors/destination-sftp-json/metadata.yaml | 1 + .../connectors/destination-sqlite/metadata.yaml | 1 + .../connectors/destination-timeplus/metadata.yaml | 1 + .../connectors/destination-typesense/metadata.yaml | 1 + .../connectors/destination-vectara/metadata.yaml | 1 + .../connectors/destination-weaviate/metadata.yaml | 1 + .../connectors/destination-xata/metadata.yaml | 1 + .../connectors/source-activecampaign/metadata.yaml | 2 +- airbyte-integrations/connectors/source-adjust/metadata.yaml | 1 + airbyte-integrations/connectors/source-aha/metadata.yaml | 2 +- airbyte-integrations/connectors/source-aircall/metadata.yaml | 2 +- .../connectors/source-airtable/metadata.yaml | 1 + .../connectors/source-alpha-vantage/metadata.yaml | 2 +- .../connectors/source-amazon-ads/metadata.yaml | 1 + .../connectors/source-amazon-seller-partner/metadata.yaml | 1 + .../connectors/source-amazon-sqs/metadata.yaml | 1 + .../connectors/source-amplitude/metadata.yaml | 2 +- .../connectors/source-apify-dataset/metadata.yaml | 3 ++- .../connectors/source-appfollow/metadata.yaml | 3 ++- .../connectors/source-apple-search-ads/metadata.yaml | 2 +- .../connectors/source-appsflyer/metadata.yaml | 1 + .../connectors/source-appstore-singer/metadata.yaml | 1 + airbyte-integrations/connectors/source-asana/metadata.yaml | 1 + airbyte-integrations/connectors/source-ashby/metadata.yaml | 2 +- airbyte-integrations/connectors/source-auth0/metadata.yaml | 3 ++- .../connectors/source-aws-cloudtrail/metadata.yaml | 1 + .../connectors/source-azure-blob-storage/metadata.yaml | 1 + .../connectors/source-azure-table/metadata.yaml | 1 + .../connectors/source-babelforce/metadata.yaml | 3 ++- .../connectors/source-bamboo-hr/metadata.yaml | 1 + .../connectors/source-bigcommerce/metadata.yaml | 3 ++- .../connectors/source-bigquery/metadata.yaml | 1 - .../connectors/source-bing-ads/metadata.yaml | 1 + .../connectors/source-braintree/metadata.yaml | 3 ++- airbyte-integrations/connectors/source-braze/metadata.yaml | 2 +- .../connectors/source-breezometer/metadata.yaml | 2 +- .../connectors/source-callrail/metadata.yaml | 2 +- .../connectors/source-captain-data/metadata.yaml | 2 +- airbyte-integrations/connectors/source-cart/metadata.yaml | 1 + .../connectors/source-chargebee/metadata.yaml | 2 +- .../connectors/source-chargify/metadata.yaml | 3 ++- .../connectors/source-chartmogul/metadata.yaml | 3 ++- .../connectors/source-clickhouse/metadata.yaml | 1 - .../connectors/source-clickup-api/metadata.yaml | 2 +- .../connectors/source-clockify/metadata.yaml | 3 ++- .../connectors/source-close-com/metadata.yaml | 2 +- .../connectors/source-cockroachdb/metadata.yaml | 1 - airbyte-integrations/connectors/source-coda/metadata.yaml | 3 ++- .../connectors/source-coin-api/metadata.yaml | 2 +- .../connectors/source-coingecko-coins/metadata.yaml | 2 +- .../connectors/source-coinmarketcap/metadata.yaml | 2 +- .../connectors/source-commcare/metadata.yaml | 1 + .../connectors/source-commercetools/metadata.yaml | 3 ++- .../connectors/source-configcat/metadata.yaml | 2 +- .../connectors/source-confluence/metadata.yaml | 3 ++- .../connectors/source-convertkit/metadata.yaml | 2 +- airbyte-integrations/connectors/source-convex/metadata.yaml | 1 + airbyte-integrations/connectors/source-copper/metadata.yaml | 3 ++- airbyte-integrations/connectors/source-courier/metadata.yaml | 2 +- .../connectors/source-customer-io/metadata.yaml | 3 ++- airbyte-integrations/connectors/source-datadog/metadata.yaml | 3 ++- .../connectors/source-datascope/metadata.yaml | 2 +- airbyte-integrations/connectors/source-db2/metadata.yaml | 1 - .../connectors/source-delighted/metadata.yaml | 2 +- airbyte-integrations/connectors/source-dixa/metadata.yaml | 3 ++- .../connectors/source-dockerhub/metadata.yaml | 3 ++- airbyte-integrations/connectors/source-dremio/metadata.yaml | 2 +- airbyte-integrations/connectors/source-drift/metadata.yaml | 3 ++- airbyte-integrations/connectors/source-dv-360/metadata.yaml | 1 + .../connectors/source-elasticsearch/metadata.yaml | 1 - .../connectors/source-emailoctopus/metadata.yaml | 2 +- .../connectors/source-everhour/metadata.yaml | 3 ++- .../connectors/source-exchange-rates/metadata.yaml | 3 ++- .../connectors/source-facebook-marketing/metadata.yaml | 1 + .../connectors/source-facebook-pages/metadata.yaml | 2 +- airbyte-integrations/connectors/source-faker/metadata.yaml | 1 + .../connectors/source-fastbill/metadata.yaml | 3 ++- airbyte-integrations/connectors/source-fauna/metadata.yaml | 1 + airbyte-integrations/connectors/source-file/metadata.yaml | 1 + .../source-firebase-realtime-database/metadata.yaml | 1 + .../connectors/source-firebolt/metadata.yaml | 1 + .../connectors/source-flexport/metadata.yaml | 3 ++- .../connectors/source-freshcaller/metadata.yaml | 3 ++- .../connectors/source-freshdesk/metadata.yaml | 1 + .../connectors/source-freshsales/metadata.yaml | 3 ++- .../connectors/source-freshservice/metadata.yaml | 3 ++- .../connectors/source-fullstory/metadata.yaml | 2 +- .../connectors/source-gainsight-px/metadata.yaml | 2 +- airbyte-integrations/connectors/source-gcs/metadata.yaml | 1 + airbyte-integrations/connectors/source-genesys/metadata.yaml | 1 + airbyte-integrations/connectors/source-getlago/metadata.yaml | 2 +- airbyte-integrations/connectors/source-github/metadata.yaml | 1 + airbyte-integrations/connectors/source-gitlab/metadata.yaml | 1 + .../connectors/source-glassfrog/metadata.yaml | 3 ++- airbyte-integrations/connectors/source-gnews/metadata.yaml | 2 +- .../connectors/source-gocardless/metadata.yaml | 2 +- airbyte-integrations/connectors/source-gong/metadata.yaml | 2 +- .../connectors/source-google-ads/metadata.yaml | 1 + .../source-google-analytics-data-api/metadata.yaml | 1 + .../metadata.yaml | 1 + .../connectors/source-google-analytics-v4/metadata.yaml | 1 + .../connectors/source-google-directory/metadata.yaml | 1 + .../connectors/source-google-drive/metadata.yaml | 1 + .../source-google-pagespeed-insights/metadata.yaml | 2 +- .../connectors/source-google-search-console/metadata.yaml | 1 + .../connectors/source-google-sheets/metadata.yaml | 1 + .../connectors/source-google-webfonts/metadata.yaml | 2 +- .../source-google-workspace-admin-reports/metadata.yaml | 1 + .../connectors/source-greenhouse/metadata.yaml | 2 +- airbyte-integrations/connectors/source-gridly/metadata.yaml | 1 + .../connectors/source-gutendex/metadata.yaml | 2 +- airbyte-integrations/connectors/source-harness/metadata.yaml | 3 ++- airbyte-integrations/connectors/source-harvest/metadata.yaml | 1 + .../connectors/source-hellobaton/metadata.yaml | 3 ++- .../connectors/source-hubplanner/metadata.yaml | 3 ++- airbyte-integrations/connectors/source-hubspot/metadata.yaml | 1 + .../connectors/source-insightly/metadata.yaml | 3 ++- .../connectors/source-instagram/metadata.yaml | 1 + .../connectors/source-instatus/metadata.yaml | 2 +- .../connectors/source-intercom/metadata.yaml | 3 ++- .../connectors/source-intruder/metadata.yaml | 2 +- .../connectors/source-ip2whois/metadata.yaml | 2 +- .../connectors/source-iterable/metadata.yaml | 1 + airbyte-integrations/connectors/source-jira/metadata.yaml | 1 + .../connectors/source-k6-cloud/metadata.yaml | 2 +- airbyte-integrations/connectors/source-kafka/metadata.yaml | 1 - airbyte-integrations/connectors/source-klarna/metadata.yaml | 3 ++- .../connectors/source-klaus-api/metadata.yaml | 3 ++- airbyte-integrations/connectors/source-klaviyo/metadata.yaml | 1 + .../connectors/source-kustomer-singer/metadata.yaml | 1 + airbyte-integrations/connectors/source-kyriba/metadata.yaml | 1 + airbyte-integrations/connectors/source-kyve/metadata.yaml | 1 + .../connectors/source-launchdarkly/metadata.yaml | 2 +- airbyte-integrations/connectors/source-lemlist/metadata.yaml | 3 ++- .../connectors/source-lever-hiring/metadata.yaml | 1 + .../connectors/source-linkedin-ads/metadata.yaml | 1 + .../connectors/source-linkedin-pages/metadata.yaml | 1 + .../connectors/source-linnworks/metadata.yaml | 1 + .../connectors/source-lokalise/metadata.yaml | 2 +- airbyte-integrations/connectors/source-looker/metadata.yaml | 1 + .../connectors/source-mailchimp/metadata.yaml | 1 + .../connectors/source-mailerlite/metadata.yaml | 2 +- .../connectors/source-mailersend/metadata.yaml | 2 +- airbyte-integrations/connectors/source-mailgun/metadata.yaml | 3 ++- .../connectors/source-mailjet-mail/metadata.yaml | 2 +- .../connectors/source-mailjet-sms/metadata.yaml | 2 +- airbyte-integrations/connectors/source-marketo/metadata.yaml | 1 + airbyte-integrations/connectors/source-merge/metadata.yaml | 2 +- .../connectors/source-metabase/metadata.yaml | 2 +- .../connectors/source-microsoft-dataverse/metadata.yaml | 1 + .../connectors/source-microsoft-onedrive/metadata.yaml | 3 ++- .../connectors/source-microsoft-sharepoint/metadata.yaml | 3 ++- .../connectors/source-microsoft-teams/metadata.yaml | 1 + .../connectors/source-mixpanel/metadata.yaml | 1 + airbyte-integrations/connectors/source-monday/metadata.yaml | 3 ++- airbyte-integrations/connectors/source-mssql/metadata.yaml | 1 - .../connectors/source-my-hours/metadata.yaml | 1 + airbyte-integrations/connectors/source-n8n/metadata.yaml | 2 +- airbyte-integrations/connectors/source-nasa/metadata.yaml | 3 ++- .../connectors/source-netsuite/metadata.yaml | 1 + .../connectors/source-news-api/metadata.yaml | 2 +- .../connectors/source-newsdata/metadata.yaml | 2 +- airbyte-integrations/connectors/source-notion/metadata.yaml | 1 + airbyte-integrations/connectors/source-nytimes/metadata.yaml | 2 +- airbyte-integrations/connectors/source-okta/metadata.yaml | 1 + .../connectors/source-omnisend/metadata.yaml | 2 +- .../connectors/source-onesignal/metadata.yaml | 5 +++-- .../connectors/source-open-exchange-rates/metadata.yaml | 3 ++- .../connectors/source-openweather/metadata.yaml | 3 ++- .../connectors/source-opsgenie/metadata.yaml | 3 ++- airbyte-integrations/connectors/source-oracle/metadata.yaml | 1 - airbyte-integrations/connectors/source-orb/metadata.yaml | 1 + airbyte-integrations/connectors/source-orbit/metadata.yaml | 3 ++- airbyte-integrations/connectors/source-oura/metadata.yaml | 2 +- .../connectors/source-outbrain-amplify/metadata.yaml | 3 ++- .../connectors/source-outreach/metadata.yaml | 1 + .../connectors/source-pagerduty/metadata.yaml | 3 ++- airbyte-integrations/connectors/source-pardot/metadata.yaml | 1 + .../connectors/source-partnerstack/metadata.yaml | 2 +- .../connectors/source-paypal-transaction/metadata.yaml | 3 ++- .../connectors/source-paystack/metadata.yaml | 1 + airbyte-integrations/connectors/source-pendo/metadata.yaml | 2 +- .../connectors/source-persistiq/metadata.yaml | 3 ++- .../connectors/source-pexels-api/metadata.yaml | 2 +- .../connectors/source-pinterest/metadata.yaml | 1 + .../connectors/source-pipedrive/metadata.yaml | 3 ++- .../connectors/source-pivotal-tracker/metadata.yaml | 1 + airbyte-integrations/connectors/source-plaid/metadata.yaml | 3 ++- .../connectors/source-plausible/metadata.yaml | 2 +- airbyte-integrations/connectors/source-pocket/metadata.yaml | 2 +- airbyte-integrations/connectors/source-pokeapi/metadata.yaml | 3 ++- .../connectors/source-polygon-stock-api/metadata.yaml | 2 +- airbyte-integrations/connectors/source-posthog/metadata.yaml | 2 +- .../connectors/source-postmarkapp/metadata.yaml | 2 +- .../connectors/source-prestashop/metadata.yaml | 2 +- .../connectors/source-primetric/metadata.yaml | 1 + .../connectors/source-public-apis/metadata.yaml | 3 ++- .../connectors/source-punk-api/metadata.yaml | 2 +- airbyte-integrations/connectors/source-pypi/metadata.yaml | 2 +- airbyte-integrations/connectors/source-qonto/metadata.yaml | 3 ++- .../connectors/source-qualaroo/metadata.yaml | 3 ++- .../connectors/source-quickbooks/metadata.yaml | 2 +- airbyte-integrations/connectors/source-railz/metadata.yaml | 2 +- .../connectors/source-rd-station-marketing/metadata.yaml | 1 + .../connectors/source-recharge/metadata.yaml | 1 + .../connectors/source-recreation/metadata.yaml | 2 +- .../connectors/source-recruitee/metadata.yaml | 2 +- airbyte-integrations/connectors/source-recurly/metadata.yaml | 1 + .../connectors/source-redshift/metadata.yaml | 1 - .../connectors/source-reply-io/metadata.yaml | 2 +- .../connectors/source-retently/metadata.yaml | 3 ++- .../connectors/source-ringcentral/metadata.yaml | 2 +- .../connectors/source-rki-covid/metadata.yaml | 1 + .../connectors/source-rocket-chat/metadata.yaml | 2 +- airbyte-integrations/connectors/source-rss/metadata.yaml | 1 + airbyte-integrations/connectors/source-s3/metadata.yaml | 1 + .../connectors/source-salesforce/metadata.yaml | 1 + .../connectors/source-salesloft/metadata.yaml | 1 + .../connectors/source-sap-fieldglass/metadata.yaml | 2 +- .../connectors/source-scaffold-source-http/metadata.yaml | 1 + .../connectors/source-scaffold-source-python/metadata.yaml | 1 + .../connectors/source-search-metrics/metadata.yaml | 1 + airbyte-integrations/connectors/source-secoda/metadata.yaml | 2 +- .../connectors/source-sendgrid/metadata.yaml | 2 +- .../connectors/source-sendinblue/metadata.yaml | 2 +- .../connectors/source-senseforce/metadata.yaml | 2 +- airbyte-integrations/connectors/source-sentry/metadata.yaml | 1 + .../connectors/source-serpstat/metadata.yaml | 3 ++- .../connectors/source-sftp-bulk/metadata.yaml | 1 + airbyte-integrations/connectors/source-sftp/metadata.yaml | 1 - airbyte-integrations/connectors/source-shopify/metadata.yaml | 1 + airbyte-integrations/connectors/source-shortio/metadata.yaml | 2 +- airbyte-integrations/connectors/source-slack/metadata.yaml | 1 + airbyte-integrations/connectors/source-smaily/metadata.yaml | 2 +- .../connectors/source-smartengage/metadata.yaml | 2 +- .../connectors/source-smartsheets/metadata.yaml | 1 + .../connectors/source-snapchat-marketing/metadata.yaml | 1 + .../connectors/source-snowflake/metadata.yaml | 1 - .../connectors/source-sonar-cloud/metadata.yaml | 2 +- .../connectors/source-spacex-api/metadata.yaml | 2 +- airbyte-integrations/connectors/source-square/metadata.yaml | 2 +- .../connectors/source-statuspage/metadata.yaml | 2 +- airbyte-integrations/connectors/source-strava/metadata.yaml | 3 ++- airbyte-integrations/connectors/source-stripe/metadata.yaml | 1 + .../connectors/source-survey-sparrow/metadata.yaml | 2 +- .../connectors/source-surveycto/metadata.yaml | 1 + .../connectors/source-surveymonkey/metadata.yaml | 1 + .../connectors/source-talkdesk-explore/metadata.yaml | 1 + airbyte-integrations/connectors/source-tempo/metadata.yaml | 2 +- .../connectors/source-teradata/metadata.yaml | 1 - .../connectors/source-the-guardian-api/metadata.yaml | 2 +- airbyte-integrations/connectors/source-tidb/metadata.yaml | 1 - .../connectors/source-tiktok-marketing/metadata.yaml | 1 + airbyte-integrations/connectors/source-timely/metadata.yaml | 3 ++- airbyte-integrations/connectors/source-tmdb/metadata.yaml | 2 +- airbyte-integrations/connectors/source-todoist/metadata.yaml | 3 ++- airbyte-integrations/connectors/source-toggl/metadata.yaml | 2 +- .../connectors/source-tplcentral/metadata.yaml | 1 + airbyte-integrations/connectors/source-trello/metadata.yaml | 3 ++- .../connectors/source-trustpilot/metadata.yaml | 1 + .../connectors/source-tvmaze-schedule/metadata.yaml | 2 +- .../connectors/source-twilio-taskrouter/metadata.yaml | 2 +- airbyte-integrations/connectors/source-twilio/metadata.yaml | 1 + airbyte-integrations/connectors/source-twitter/metadata.yaml | 2 +- .../connectors/source-tyntec-sms/metadata.yaml | 2 +- .../connectors/source-typeform/metadata.yaml | 3 ++- airbyte-integrations/connectors/source-unleash/metadata.yaml | 3 ++- .../connectors/source-us-census/metadata.yaml | 1 + airbyte-integrations/connectors/source-vantage/metadata.yaml | 2 +- .../connectors/source-visma-economic/metadata.yaml | 5 +++-- airbyte-integrations/connectors/source-vitally/metadata.yaml | 2 +- .../connectors/source-waiteraid/metadata.yaml | 2 +- .../connectors/source-weatherstack/metadata.yaml | 1 + airbyte-integrations/connectors/source-webflow/metadata.yaml | 1 + .../connectors/source-whisky-hunter/metadata.yaml | 2 +- .../connectors/source-wikipedia-pageviews/metadata.yaml | 2 +- .../connectors/source-woocommerce/metadata.yaml | 2 +- .../connectors/source-workable/metadata.yaml | 2 +- .../connectors/source-workramp/metadata.yaml | 2 +- airbyte-integrations/connectors/source-wrike/metadata.yaml | 3 ++- airbyte-integrations/connectors/source-xero/metadata.yaml | 1 + airbyte-integrations/connectors/source-xkcd/metadata.yaml | 1 + .../connectors/source-yahoo-finance-price/metadata.yaml | 3 ++- .../connectors/source-yandex-metrica/metadata.yaml | 1 + airbyte-integrations/connectors/source-yotpo/metadata.yaml | 2 +- airbyte-integrations/connectors/source-younium/metadata.yaml | 3 ++- .../connectors/source-youtube-analytics/metadata.yaml | 1 + .../connectors/source-zapier-supported-storage/metadata.yaml | 2 +- .../connectors/source-zendesk-chat/metadata.yaml | 1 + .../connectors/source-zendesk-sell/metadata.yaml | 3 ++- .../connectors/source-zendesk-sunshine/metadata.yaml | 3 ++- .../connectors/source-zendesk-support/metadata.yaml | 1 + .../connectors/source-zendesk-talk/metadata.yaml | 1 + .../connectors/source-zenefits/metadata.yaml | 3 ++- airbyte-integrations/connectors/source-zenloop/metadata.yaml | 2 +- .../connectors/source-zoho-crm/metadata.yaml | 1 + airbyte-integrations/connectors/source-zoom/metadata.yaml | 2 +- airbyte-integrations/connectors/source-zuora/metadata.yaml | 1 + 317 files changed, 375 insertions(+), 188 deletions(-) diff --git a/airbyte-integrations/connectors/destination-amazon-sqs/metadata.yaml b/airbyte-integrations/connectors/destination-amazon-sqs/metadata.yaml index 1dcf91f8995d..c8e964c5bb32 100644 --- a/airbyte-integrations/connectors/destination-amazon-sqs/metadata.yaml +++ b/airbyte-integrations/connectors/destination-amazon-sqs/metadata.yaml @@ -17,6 +17,7 @@ data: documentationUrl: https://docs.airbyte.com/integrations/destinations/amazon-sqs tags: - language:python + - cdk:python ab_internal: sl: 100 ql: 200 diff --git a/airbyte-integrations/connectors/destination-astra/metadata.yaml b/airbyte-integrations/connectors/destination-astra/metadata.yaml index c675ed875fdd..ffd7abf1f658 100644 --- a/airbyte-integrations/connectors/destination-astra/metadata.yaml +++ b/airbyte-integrations/connectors/destination-astra/metadata.yaml @@ -27,4 +27,5 @@ data: documentationUrl: https://docs.airbyte.com/integrations/destinations/astra tags: - language:python + - cdk:python metadataSpecVersion: "1.0" diff --git a/airbyte-integrations/connectors/destination-aws-datalake/metadata.yaml b/airbyte-integrations/connectors/destination-aws-datalake/metadata.yaml index 032954e7f2b9..86e208bc2562 100644 --- a/airbyte-integrations/connectors/destination-aws-datalake/metadata.yaml +++ b/airbyte-integrations/connectors/destination-aws-datalake/metadata.yaml @@ -17,6 +17,7 @@ data: documentationUrl: https://docs.airbyte.com/integrations/destinations/aws-datalake tags: - language:python + - cdk:python ab_internal: sl: 100 ql: 100 diff --git a/airbyte-integrations/connectors/destination-chroma/metadata.yaml b/airbyte-integrations/connectors/destination-chroma/metadata.yaml index 8283e5453d0b..3fbf9ea9d109 100644 --- a/airbyte-integrations/connectors/destination-chroma/metadata.yaml +++ b/airbyte-integrations/connectors/destination-chroma/metadata.yaml @@ -19,4 +19,5 @@ data: documentationUrl: https://docs.airbyte.com/integrations/destinations/chroma tags: - language:python + - cdk:python metadataSpecVersion: "1.0" diff --git a/airbyte-integrations/connectors/destination-convex/metadata.yaml b/airbyte-integrations/connectors/destination-convex/metadata.yaml index 88cbb5c9616f..6749209e152d 100644 --- a/airbyte-integrations/connectors/destination-convex/metadata.yaml +++ b/airbyte-integrations/connectors/destination-convex/metadata.yaml @@ -17,6 +17,7 @@ data: documentationUrl: https://docs.airbyte.com/integrations/destinations/convex tags: - language:python + - cdk:python ab_internal: sl: 100 ql: 100 diff --git a/airbyte-integrations/connectors/destination-cumulio/metadata.yaml b/airbyte-integrations/connectors/destination-cumulio/metadata.yaml index 0661440e0312..86ff67c2444d 100644 --- a/airbyte-integrations/connectors/destination-cumulio/metadata.yaml +++ b/airbyte-integrations/connectors/destination-cumulio/metadata.yaml @@ -17,6 +17,7 @@ data: documentationUrl: https://docs.airbyte.com/integrations/destinations/cumulio tags: - language:python + - cdk:python ab_internal: sl: 100 ql: 100 diff --git a/airbyte-integrations/connectors/destination-databend/metadata.yaml b/airbyte-integrations/connectors/destination-databend/metadata.yaml index 76d0ed20fd07..b9099d0a950b 100644 --- a/airbyte-integrations/connectors/destination-databend/metadata.yaml +++ b/airbyte-integrations/connectors/destination-databend/metadata.yaml @@ -17,6 +17,7 @@ data: documentationUrl: https://docs.airbyte.com/integrations/destinations/databend tags: - language:python + - cdk:python ab_internal: sl: 100 ql: 100 diff --git a/airbyte-integrations/connectors/destination-duckdb/metadata.yaml b/airbyte-integrations/connectors/destination-duckdb/metadata.yaml index 1064593eb19f..fd914fefbb78 100644 --- a/airbyte-integrations/connectors/destination-duckdb/metadata.yaml +++ b/airbyte-integrations/connectors/destination-duckdb/metadata.yaml @@ -24,6 +24,7 @@ data: documentationUrl: https://docs.airbyte.com/integrations/destinations/duckdb tags: - language:python + - cdk:python ab_internal: sl: 100 ql: 100 diff --git a/airbyte-integrations/connectors/destination-firebolt/metadata.yaml b/airbyte-integrations/connectors/destination-firebolt/metadata.yaml index 0c5fd007f413..11ea765d65c8 100644 --- a/airbyte-integrations/connectors/destination-firebolt/metadata.yaml +++ b/airbyte-integrations/connectors/destination-firebolt/metadata.yaml @@ -18,6 +18,7 @@ data: supportsDbt: true tags: - language:python + - cdk:python ab_internal: sl: 100 ql: 100 diff --git a/airbyte-integrations/connectors/destination-firestore/metadata.yaml b/airbyte-integrations/connectors/destination-firestore/metadata.yaml index e2f730979597..56bc36394188 100644 --- a/airbyte-integrations/connectors/destination-firestore/metadata.yaml +++ b/airbyte-integrations/connectors/destination-firestore/metadata.yaml @@ -17,6 +17,7 @@ data: documentationUrl: https://docs.airbyte.com/integrations/destinations/firestore tags: - language:python + - cdk:python ab_internal: sl: 100 ql: 100 diff --git a/airbyte-integrations/connectors/destination-google-sheets/metadata.yaml b/airbyte-integrations/connectors/destination-google-sheets/metadata.yaml index f8a2d30d507b..595461968be1 100644 --- a/airbyte-integrations/connectors/destination-google-sheets/metadata.yaml +++ b/airbyte-integrations/connectors/destination-google-sheets/metadata.yaml @@ -17,6 +17,7 @@ data: documentationUrl: https://docs.airbyte.com/integrations/destinations/google-sheets tags: - language:python + - cdk:python ab_internal: sl: 100 ql: 200 diff --git a/airbyte-integrations/connectors/destination-kvdb/metadata.yaml b/airbyte-integrations/connectors/destination-kvdb/metadata.yaml index f74cb81b3905..fdca56a76f15 100644 --- a/airbyte-integrations/connectors/destination-kvdb/metadata.yaml +++ b/airbyte-integrations/connectors/destination-kvdb/metadata.yaml @@ -20,6 +20,7 @@ data: documentationUrl: https://docs.airbyte.com/integrations/destinations/kvdb tags: - language:python + - cdk:python ab_internal: sl: 100 ql: 100 diff --git a/airbyte-integrations/connectors/destination-langchain/metadata.yaml b/airbyte-integrations/connectors/destination-langchain/metadata.yaml index f8db27c1afe0..71758877bb17 100644 --- a/airbyte-integrations/connectors/destination-langchain/metadata.yaml +++ b/airbyte-integrations/connectors/destination-langchain/metadata.yaml @@ -18,6 +18,7 @@ data: documentationUrl: https://docs.airbyte.com/integrations/destinations/langchain tags: - language:python + - cdk:python ab_internal: sl: 100 ql: 100 diff --git a/airbyte-integrations/connectors/destination-meilisearch/metadata.yaml b/airbyte-integrations/connectors/destination-meilisearch/metadata.yaml index 401a4c3756d7..2460f1eb737e 100644 --- a/airbyte-integrations/connectors/destination-meilisearch/metadata.yaml +++ b/airbyte-integrations/connectors/destination-meilisearch/metadata.yaml @@ -17,6 +17,7 @@ data: documentationUrl: https://docs.airbyte.com/integrations/destinations/meilisearch tags: - language:python + - cdk:python ab_internal: sl: 100 ql: 100 diff --git a/airbyte-integrations/connectors/destination-milvus/metadata.yaml b/airbyte-integrations/connectors/destination-milvus/metadata.yaml index 09cacd466702..db084c260dc2 100644 --- a/airbyte-integrations/connectors/destination-milvus/metadata.yaml +++ b/airbyte-integrations/connectors/destination-milvus/metadata.yaml @@ -33,6 +33,7 @@ data: documentationUrl: https://docs.airbyte.com/integrations/destinations/milvus tags: - language:python + - cdk:python ab_internal: sl: 200 ql: 300 diff --git a/airbyte-integrations/connectors/destination-pinecone/metadata.yaml b/airbyte-integrations/connectors/destination-pinecone/metadata.yaml index a743a3609f40..4d26762da8ea 100644 --- a/airbyte-integrations/connectors/destination-pinecone/metadata.yaml +++ b/airbyte-integrations/connectors/destination-pinecone/metadata.yaml @@ -36,4 +36,5 @@ data: supportLevel: certified tags: - language:python + - cdk:python metadataSpecVersion: "1.0" diff --git a/airbyte-integrations/connectors/destination-qdrant/metadata.yaml b/airbyte-integrations/connectors/destination-qdrant/metadata.yaml index 73c87125aef2..bf8195a39bf8 100644 --- a/airbyte-integrations/connectors/destination-qdrant/metadata.yaml +++ b/airbyte-integrations/connectors/destination-qdrant/metadata.yaml @@ -31,6 +31,7 @@ data: documentationUrl: https://docs.airbyte.com/integrations/destinations/qdrant tags: - language:python + - cdk:python ab_internal: sl: 100 ql: 100 diff --git a/airbyte-integrations/connectors/destination-rabbitmq/metadata.yaml b/airbyte-integrations/connectors/destination-rabbitmq/metadata.yaml index 0b320468490d..d19aa31ab4c3 100644 --- a/airbyte-integrations/connectors/destination-rabbitmq/metadata.yaml +++ b/airbyte-integrations/connectors/destination-rabbitmq/metadata.yaml @@ -17,6 +17,7 @@ data: documentationUrl: https://docs.airbyte.com/integrations/destinations/rabbitmq tags: - language:python + - cdk:python ab_internal: sl: 100 ql: 100 diff --git a/airbyte-integrations/connectors/destination-scaffold-destination-python/metadata.yaml b/airbyte-integrations/connectors/destination-scaffold-destination-python/metadata.yaml index 066301f0cd77..601dbf0978a7 100644 --- a/airbyte-integrations/connectors/destination-scaffold-destination-python/metadata.yaml +++ b/airbyte-integrations/connectors/destination-scaffold-destination-python/metadata.yaml @@ -27,4 +27,5 @@ data: documentationUrl: https://docs.airbyte.com/integrations/destinations/scaffold-destination-python tags: - language:python + - cdk:python metadataSpecVersion: "1.0" diff --git a/airbyte-integrations/connectors/destination-sftp-json/metadata.yaml b/airbyte-integrations/connectors/destination-sftp-json/metadata.yaml index eec56d8268be..3afa4b70f920 100644 --- a/airbyte-integrations/connectors/destination-sftp-json/metadata.yaml +++ b/airbyte-integrations/connectors/destination-sftp-json/metadata.yaml @@ -17,6 +17,7 @@ data: documentationUrl: https://docs.airbyte.com/integrations/destinations/sftp-json tags: - language:python + - cdk:python ab_internal: sl: 100 ql: 100 diff --git a/airbyte-integrations/connectors/destination-sqlite/metadata.yaml b/airbyte-integrations/connectors/destination-sqlite/metadata.yaml index 679b7722c374..f5f250b9e668 100644 --- a/airbyte-integrations/connectors/destination-sqlite/metadata.yaml +++ b/airbyte-integrations/connectors/destination-sqlite/metadata.yaml @@ -17,6 +17,7 @@ data: documentationUrl: https://docs.airbyte.com/integrations/destinations/sqlite tags: - language:python + - cdk:python ab_internal: sl: 100 ql: 100 diff --git a/airbyte-integrations/connectors/destination-timeplus/metadata.yaml b/airbyte-integrations/connectors/destination-timeplus/metadata.yaml index 917a78f8494d..b1ae6d578de5 100644 --- a/airbyte-integrations/connectors/destination-timeplus/metadata.yaml +++ b/airbyte-integrations/connectors/destination-timeplus/metadata.yaml @@ -17,6 +17,7 @@ data: documentationUrl: https://docs.airbyte.com/integrations/destinations/timeplus tags: - language:python + - cdk:python ab_internal: sl: 100 ql: 100 diff --git a/airbyte-integrations/connectors/destination-typesense/metadata.yaml b/airbyte-integrations/connectors/destination-typesense/metadata.yaml index 4b9a9942aa1b..0b0f9562a2ff 100644 --- a/airbyte-integrations/connectors/destination-typesense/metadata.yaml +++ b/airbyte-integrations/connectors/destination-typesense/metadata.yaml @@ -17,6 +17,7 @@ data: documentationUrl: https://docs.airbyte.com/integrations/destinations/typesense tags: - language:python + - cdk:python ab_internal: sl: 100 ql: 100 diff --git a/airbyte-integrations/connectors/destination-vectara/metadata.yaml b/airbyte-integrations/connectors/destination-vectara/metadata.yaml index 63db8fb2f9db..a144309a0988 100644 --- a/airbyte-integrations/connectors/destination-vectara/metadata.yaml +++ b/airbyte-integrations/connectors/destination-vectara/metadata.yaml @@ -23,4 +23,5 @@ data: documentationUrl: https://docs.airbyte.com/integrations/destinations/vectara tags: - language:python + - cdk:python metadataSpecVersion: "1.0" diff --git a/airbyte-integrations/connectors/destination-weaviate/metadata.yaml b/airbyte-integrations/connectors/destination-weaviate/metadata.yaml index ebd5ba581c6e..8fb3245d933f 100644 --- a/airbyte-integrations/connectors/destination-weaviate/metadata.yaml +++ b/airbyte-integrations/connectors/destination-weaviate/metadata.yaml @@ -44,4 +44,5 @@ data: supportLevel: certified tags: - language:python + - cdk:python metadataSpecVersion: "1.0" diff --git a/airbyte-integrations/connectors/destination-xata/metadata.yaml b/airbyte-integrations/connectors/destination-xata/metadata.yaml index 9ff802b51f58..b2635d992887 100644 --- a/airbyte-integrations/connectors/destination-xata/metadata.yaml +++ b/airbyte-integrations/connectors/destination-xata/metadata.yaml @@ -17,6 +17,7 @@ data: documentationUrl: https://docs.airbyte.com/integrations/destinations/xata tags: - language:python + - cdk:python ab_internal: sl: 100 ql: 100 diff --git a/airbyte-integrations/connectors/source-activecampaign/metadata.yaml b/airbyte-integrations/connectors/source-activecampaign/metadata.yaml index e7b926edc0a0..a03be461aaed 100644 --- a/airbyte-integrations/connectors/source-activecampaign/metadata.yaml +++ b/airbyte-integrations/connectors/source-activecampaign/metadata.yaml @@ -24,6 +24,6 @@ data: releaseStage: alpha supportLevel: community tags: - - language:low-code - language:python + - cdk:low-code metadataSpecVersion: "1.0" diff --git a/airbyte-integrations/connectors/source-adjust/metadata.yaml b/airbyte-integrations/connectors/source-adjust/metadata.yaml index f45ead0f39c6..195b512b5c21 100644 --- a/airbyte-integrations/connectors/source-adjust/metadata.yaml +++ b/airbyte-integrations/connectors/source-adjust/metadata.yaml @@ -25,4 +25,5 @@ data: supportLevel: community tags: - language:python + - cdk:python metadataSpecVersion: "1.0" diff --git a/airbyte-integrations/connectors/source-aha/metadata.yaml b/airbyte-integrations/connectors/source-aha/metadata.yaml index 88d029de3b5d..a1f74a9bb381 100644 --- a/airbyte-integrations/connectors/source-aha/metadata.yaml +++ b/airbyte-integrations/connectors/source-aha/metadata.yaml @@ -24,6 +24,6 @@ data: releaseStage: alpha supportLevel: community tags: - - language:low-code - language:python + - cdk:low-code metadataSpecVersion: "1.0" diff --git a/airbyte-integrations/connectors/source-aircall/metadata.yaml b/airbyte-integrations/connectors/source-aircall/metadata.yaml index c76243302b10..33c86aeff909 100644 --- a/airbyte-integrations/connectors/source-aircall/metadata.yaml +++ b/airbyte-integrations/connectors/source-aircall/metadata.yaml @@ -20,8 +20,8 @@ data: releaseStage: alpha documentationUrl: https://docs.airbyte.com/integrations/sources/aircall tags: - - language:low-code - language:python + - cdk:low-code ab_internal: sl: 100 ql: 100 diff --git a/airbyte-integrations/connectors/source-airtable/metadata.yaml b/airbyte-integrations/connectors/source-airtable/metadata.yaml index fc3d79c11d0e..e6c3777e43c4 100644 --- a/airbyte-integrations/connectors/source-airtable/metadata.yaml +++ b/airbyte-integrations/connectors/source-airtable/metadata.yaml @@ -40,4 +40,5 @@ data: supportLevel: certified tags: - language:python + - cdk:python metadataSpecVersion: "1.0" diff --git a/airbyte-integrations/connectors/source-alpha-vantage/metadata.yaml b/airbyte-integrations/connectors/source-alpha-vantage/metadata.yaml index 2eb0807b3957..63afe531eea2 100644 --- a/airbyte-integrations/connectors/source-alpha-vantage/metadata.yaml +++ b/airbyte-integrations/connectors/source-alpha-vantage/metadata.yaml @@ -20,8 +20,8 @@ data: releaseStage: alpha documentationUrl: https://docs.airbyte.com/integrations/sources/alpha-vantage tags: - - language:low-code - language:python + - cdk:low-code ab_internal: sl: 100 ql: 100 diff --git a/airbyte-integrations/connectors/source-amazon-ads/metadata.yaml b/airbyte-integrations/connectors/source-amazon-ads/metadata.yaml index dc62a61bca6b..1325ac9d18da 100644 --- a/airbyte-integrations/connectors/source-amazon-ads/metadata.yaml +++ b/airbyte-integrations/connectors/source-amazon-ads/metadata.yaml @@ -52,4 +52,5 @@ data: supportLevel: certified tags: - language:python + - cdk:python metadataSpecVersion: "1.0" diff --git a/airbyte-integrations/connectors/source-amazon-seller-partner/metadata.yaml b/airbyte-integrations/connectors/source-amazon-seller-partner/metadata.yaml index f0d564039fd4..59fa239a7adc 100644 --- a/airbyte-integrations/connectors/source-amazon-seller-partner/metadata.yaml +++ b/airbyte-integrations/connectors/source-amazon-seller-partner/metadata.yaml @@ -58,4 +58,5 @@ data: supportLevel: certified tags: - language:python + - cdk:python metadataSpecVersion: "1.0" diff --git a/airbyte-integrations/connectors/source-amazon-sqs/metadata.yaml b/airbyte-integrations/connectors/source-amazon-sqs/metadata.yaml index 6b6bd34f1cfb..d8c89e954504 100644 --- a/airbyte-integrations/connectors/source-amazon-sqs/metadata.yaml +++ b/airbyte-integrations/connectors/source-amazon-sqs/metadata.yaml @@ -25,4 +25,5 @@ data: supportLevel: community tags: - language:python + - cdk:python metadataSpecVersion: "1.0" diff --git a/airbyte-integrations/connectors/source-amplitude/metadata.yaml b/airbyte-integrations/connectors/source-amplitude/metadata.yaml index f277c1ad7774..59a49c0f5fc0 100644 --- a/airbyte-integrations/connectors/source-amplitude/metadata.yaml +++ b/airbyte-integrations/connectors/source-amplitude/metadata.yaml @@ -36,6 +36,6 @@ data: - cohorts supportLevel: certified tags: - - language:low-code - language:python + - cdk:low-code metadataSpecVersion: "1.0" diff --git a/airbyte-integrations/connectors/source-apify-dataset/metadata.yaml b/airbyte-integrations/connectors/source-apify-dataset/metadata.yaml index f87bf805db81..6e19e8010199 100644 --- a/airbyte-integrations/connectors/source-apify-dataset/metadata.yaml +++ b/airbyte-integrations/connectors/source-apify-dataset/metadata.yaml @@ -33,5 +33,6 @@ data: supportLevel: community documentationUrl: https://docs.airbyte.com/integrations/sources/apify-dataset tags: - - language:low-code + - language:python + - cdk:low-code metadataSpecVersion: "1.0" diff --git a/airbyte-integrations/connectors/source-appfollow/metadata.yaml b/airbyte-integrations/connectors/source-appfollow/metadata.yaml index ad5e49e07ca4..391655d1528e 100644 --- a/airbyte-integrations/connectors/source-appfollow/metadata.yaml +++ b/airbyte-integrations/connectors/source-appfollow/metadata.yaml @@ -24,7 +24,8 @@ data: releaseStage: alpha documentationUrl: https://docs.airbyte.com/integrations/sources/appfollow tags: - - language:low-code + - language:python + - cdk:low-code releases: breakingChanges: 1.0.0: diff --git a/airbyte-integrations/connectors/source-apple-search-ads/metadata.yaml b/airbyte-integrations/connectors/source-apple-search-ads/metadata.yaml index bbbaecfeef94..3ec09d1c008a 100644 --- a/airbyte-integrations/connectors/source-apple-search-ads/metadata.yaml +++ b/airbyte-integrations/connectors/source-apple-search-ads/metadata.yaml @@ -20,8 +20,8 @@ data: releaseStage: alpha documentationUrl: https://docs.airbyte.com/integrations/sources/apple-search-ads tags: - - language:low-code - language:python + - cdk:low-code ab_internal: sl: 100 ql: 100 diff --git a/airbyte-integrations/connectors/source-appsflyer/metadata.yaml b/airbyte-integrations/connectors/source-appsflyer/metadata.yaml index 21d47e8efcbd..f94671e73734 100644 --- a/airbyte-integrations/connectors/source-appsflyer/metadata.yaml +++ b/airbyte-integrations/connectors/source-appsflyer/metadata.yaml @@ -21,6 +21,7 @@ data: documentationUrl: https://docs.airbyte.com/integrations/sources/appsflyer tags: - language:python + - cdk:python ab_internal: sl: 100 ql: 100 diff --git a/airbyte-integrations/connectors/source-appstore-singer/metadata.yaml b/airbyte-integrations/connectors/source-appstore-singer/metadata.yaml index ef46dd04f093..7618f083c79f 100644 --- a/airbyte-integrations/connectors/source-appstore-singer/metadata.yaml +++ b/airbyte-integrations/connectors/source-appstore-singer/metadata.yaml @@ -22,6 +22,7 @@ data: documentationUrl: https://docs.airbyte.com/integrations/sources/appstore tags: - language:python + - cdk:python ab_internal: sl: 100 ql: 100 diff --git a/airbyte-integrations/connectors/source-asana/metadata.yaml b/airbyte-integrations/connectors/source-asana/metadata.yaml index ae99033996f8..14028a3e2b3c 100644 --- a/airbyte-integrations/connectors/source-asana/metadata.yaml +++ b/airbyte-integrations/connectors/source-asana/metadata.yaml @@ -28,4 +28,5 @@ data: supportLevel: community tags: - language:python + - cdk:python metadataSpecVersion: "1.0" diff --git a/airbyte-integrations/connectors/source-ashby/metadata.yaml b/airbyte-integrations/connectors/source-ashby/metadata.yaml index 94a4355863a3..37c308265849 100644 --- a/airbyte-integrations/connectors/source-ashby/metadata.yaml +++ b/airbyte-integrations/connectors/source-ashby/metadata.yaml @@ -20,8 +20,8 @@ data: releaseStage: alpha documentationUrl: https://docs.airbyte.com/integrations/sources/ashby tags: - - language:low-code - language:python + - cdk:low-code ab_internal: sl: 100 ql: 100 diff --git a/airbyte-integrations/connectors/source-auth0/metadata.yaml b/airbyte-integrations/connectors/source-auth0/metadata.yaml index 21016c9467fd..450fec50cadd 100644 --- a/airbyte-integrations/connectors/source-auth0/metadata.yaml +++ b/airbyte-integrations/connectors/source-auth0/metadata.yaml @@ -30,5 +30,6 @@ data: releaseStage: alpha supportLevel: community tags: - - language:low-code + - language:python + - cdk:low-code metadataSpecVersion: "1.0" diff --git a/airbyte-integrations/connectors/source-aws-cloudtrail/metadata.yaml b/airbyte-integrations/connectors/source-aws-cloudtrail/metadata.yaml index f4d483e04c68..835117e36131 100644 --- a/airbyte-integrations/connectors/source-aws-cloudtrail/metadata.yaml +++ b/airbyte-integrations/connectors/source-aws-cloudtrail/metadata.yaml @@ -25,4 +25,5 @@ data: supportLevel: community tags: - language:python + - cdk:python metadataSpecVersion: "1.0" diff --git a/airbyte-integrations/connectors/source-azure-blob-storage/metadata.yaml b/airbyte-integrations/connectors/source-azure-blob-storage/metadata.yaml index 5652593c7331..32b5318b628b 100644 --- a/airbyte-integrations/connectors/source-azure-blob-storage/metadata.yaml +++ b/airbyte-integrations/connectors/source-azure-blob-storage/metadata.yaml @@ -27,4 +27,5 @@ data: supportLevel: community tags: - language:python + - cdk:python-file-based metadataSpecVersion: "1.0" diff --git a/airbyte-integrations/connectors/source-azure-table/metadata.yaml b/airbyte-integrations/connectors/source-azure-table/metadata.yaml index f709d284dcee..7cc8370b79fe 100644 --- a/airbyte-integrations/connectors/source-azure-table/metadata.yaml +++ b/airbyte-integrations/connectors/source-azure-table/metadata.yaml @@ -21,6 +21,7 @@ data: documentationUrl: https://docs.airbyte.com/integrations/sources/azure-table tags: - language:python + - cdk:python ab_internal: sl: 100 ql: 100 diff --git a/airbyte-integrations/connectors/source-babelforce/metadata.yaml b/airbyte-integrations/connectors/source-babelforce/metadata.yaml index 5e2159e65e2b..4e435a52f04b 100644 --- a/airbyte-integrations/connectors/source-babelforce/metadata.yaml +++ b/airbyte-integrations/connectors/source-babelforce/metadata.yaml @@ -24,7 +24,8 @@ data: releaseStage: alpha documentationUrl: https://docs.airbyte.com/integrations/sources/babelforce tags: - - language:low-code + - language:python + - cdk:low-code ab_internal: sl: 100 ql: 100 diff --git a/airbyte-integrations/connectors/source-bamboo-hr/metadata.yaml b/airbyte-integrations/connectors/source-bamboo-hr/metadata.yaml index f5ac3969826f..c0650919ee08 100644 --- a/airbyte-integrations/connectors/source-bamboo-hr/metadata.yaml +++ b/airbyte-integrations/connectors/source-bamboo-hr/metadata.yaml @@ -25,4 +25,5 @@ data: supportLevel: community tags: - language:python + - cdk:python metadataSpecVersion: "1.0" diff --git a/airbyte-integrations/connectors/source-bigcommerce/metadata.yaml b/airbyte-integrations/connectors/source-bigcommerce/metadata.yaml index 9a2a63d4a3e0..3068c9965fcf 100644 --- a/airbyte-integrations/connectors/source-bigcommerce/metadata.yaml +++ b/airbyte-integrations/connectors/source-bigcommerce/metadata.yaml @@ -25,7 +25,8 @@ data: releaseStage: alpha supportLevel: community tags: - - language:low-code + - language:python + - cdk:low-code ab_internal: sl: 100 ql: 200 diff --git a/airbyte-integrations/connectors/source-bigquery/metadata.yaml b/airbyte-integrations/connectors/source-bigquery/metadata.yaml index e203e3fae69f..586b6b938b88 100644 --- a/airbyte-integrations/connectors/source-bigquery/metadata.yaml +++ b/airbyte-integrations/connectors/source-bigquery/metadata.yaml @@ -21,5 +21,4 @@ data: supportLevel: community tags: - language:java - - language:python metadataSpecVersion: "1.0" diff --git a/airbyte-integrations/connectors/source-bing-ads/metadata.yaml b/airbyte-integrations/connectors/source-bing-ads/metadata.yaml index 8d4a1a9bb186..77384fe6ea3a 100644 --- a/airbyte-integrations/connectors/source-bing-ads/metadata.yaml +++ b/airbyte-integrations/connectors/source-bing-ads/metadata.yaml @@ -60,4 +60,5 @@ data: supportLevel: certified tags: - language:python + - cdk:python metadataSpecVersion: "1.0" diff --git a/airbyte-integrations/connectors/source-braintree/metadata.yaml b/airbyte-integrations/connectors/source-braintree/metadata.yaml index bf601f6f13a6..23345f244274 100644 --- a/airbyte-integrations/connectors/source-braintree/metadata.yaml +++ b/airbyte-integrations/connectors/source-braintree/metadata.yaml @@ -24,5 +24,6 @@ data: releaseStage: alpha supportLevel: community tags: - - language:low-code + - language:python + - cdk:low-code metadataSpecVersion: "1.0" diff --git a/airbyte-integrations/connectors/source-braze/metadata.yaml b/airbyte-integrations/connectors/source-braze/metadata.yaml index e1fdcf539fd3..34ff2c87d2c7 100644 --- a/airbyte-integrations/connectors/source-braze/metadata.yaml +++ b/airbyte-integrations/connectors/source-braze/metadata.yaml @@ -24,6 +24,6 @@ data: releaseStage: alpha supportLevel: community tags: - - language:low-code - language:python + - cdk:low-code metadataSpecVersion: "1.0" diff --git a/airbyte-integrations/connectors/source-breezometer/metadata.yaml b/airbyte-integrations/connectors/source-breezometer/metadata.yaml index 16796af3c94f..b88e473f1aae 100644 --- a/airbyte-integrations/connectors/source-breezometer/metadata.yaml +++ b/airbyte-integrations/connectors/source-breezometer/metadata.yaml @@ -20,8 +20,8 @@ data: releaseStage: alpha documentationUrl: https://docs.airbyte.com/integrations/sources/breezometer tags: - - language:low-code - language:python + - cdk:low-code ab_internal: sl: 100 ql: 100 diff --git a/airbyte-integrations/connectors/source-callrail/metadata.yaml b/airbyte-integrations/connectors/source-callrail/metadata.yaml index f0e46fadc35a..cf9e52f28c2e 100644 --- a/airbyte-integrations/connectors/source-callrail/metadata.yaml +++ b/airbyte-integrations/connectors/source-callrail/metadata.yaml @@ -20,8 +20,8 @@ data: releaseStage: alpha documentationUrl: https://docs.airbyte.com/integrations/sources/callrail tags: - - language:low-code - language:python + - cdk:low-code ab_internal: sl: 100 ql: 100 diff --git a/airbyte-integrations/connectors/source-captain-data/metadata.yaml b/airbyte-integrations/connectors/source-captain-data/metadata.yaml index e5f9108699ad..4b899a83c5af 100644 --- a/airbyte-integrations/connectors/source-captain-data/metadata.yaml +++ b/airbyte-integrations/connectors/source-captain-data/metadata.yaml @@ -20,8 +20,8 @@ data: releaseStage: alpha documentationUrl: https://docs.airbyte.com/integrations/sources/captain-data tags: - - language:low-code - language:python + - cdk:low-code ab_internal: sl: 100 ql: 100 diff --git a/airbyte-integrations/connectors/source-cart/metadata.yaml b/airbyte-integrations/connectors/source-cart/metadata.yaml index 113417c673b0..5ea967179b46 100644 --- a/airbyte-integrations/connectors/source-cart/metadata.yaml +++ b/airbyte-integrations/connectors/source-cart/metadata.yaml @@ -21,6 +21,7 @@ data: documentationUrl: https://docs.airbyte.com/integrations/sources/cart tags: - language:python + - cdk:python ab_internal: sl: 100 ql: 100 diff --git a/airbyte-integrations/connectors/source-chargebee/metadata.yaml b/airbyte-integrations/connectors/source-chargebee/metadata.yaml index 85a965bb5898..82b51e27cfae 100644 --- a/airbyte-integrations/connectors/source-chargebee/metadata.yaml +++ b/airbyte-integrations/connectors/source-chargebee/metadata.yaml @@ -42,6 +42,6 @@ data: - addon supportLevel: certified tags: - - language:low-code - language:python + - cdk:low-code metadataSpecVersion: "1.0" diff --git a/airbyte-integrations/connectors/source-chargify/metadata.yaml b/airbyte-integrations/connectors/source-chargify/metadata.yaml index ea6cf3005986..66b0a0b48ff3 100644 --- a/airbyte-integrations/connectors/source-chargify/metadata.yaml +++ b/airbyte-integrations/connectors/source-chargify/metadata.yaml @@ -25,7 +25,8 @@ data: supportLevel: community documentationUrl: https://docs.airbyte.com/integrations/sources/chargify tags: - - language:low-code + - language:python + - cdk:low-code ab_internal: sl: 100 ql: 100 diff --git a/airbyte-integrations/connectors/source-chartmogul/metadata.yaml b/airbyte-integrations/connectors/source-chartmogul/metadata.yaml index 3a68eecb886c..c6d811aacdcc 100644 --- a/airbyte-integrations/connectors/source-chartmogul/metadata.yaml +++ b/airbyte-integrations/connectors/source-chartmogul/metadata.yaml @@ -32,5 +32,6 @@ data: releaseStage: beta supportLevel: community tags: - - language:low-code + - language:python + - cdk:low-code metadataSpecVersion: "1.0" diff --git a/airbyte-integrations/connectors/source-clickhouse/metadata.yaml b/airbyte-integrations/connectors/source-clickhouse/metadata.yaml index 6a6416ef1660..ab0271ab07fc 100644 --- a/airbyte-integrations/connectors/source-clickhouse/metadata.yaml +++ b/airbyte-integrations/connectors/source-clickhouse/metadata.yaml @@ -27,5 +27,4 @@ data: supportLevel: community tags: - language:java - - language:python metadataSpecVersion: "1.0" diff --git a/airbyte-integrations/connectors/source-clickup-api/metadata.yaml b/airbyte-integrations/connectors/source-clickup-api/metadata.yaml index dc76068faa84..a3fcebf402d9 100644 --- a/airbyte-integrations/connectors/source-clickup-api/metadata.yaml +++ b/airbyte-integrations/connectors/source-clickup-api/metadata.yaml @@ -20,8 +20,8 @@ data: releaseStage: alpha documentationUrl: https://docs.airbyte.com/integrations/sources/clickup-api tags: - - language:low-code - language:python + - cdk:low-code ab_internal: sl: 100 ql: 100 diff --git a/airbyte-integrations/connectors/source-clockify/metadata.yaml b/airbyte-integrations/connectors/source-clockify/metadata.yaml index be8435177779..6e15b545e0f3 100644 --- a/airbyte-integrations/connectors/source-clockify/metadata.yaml +++ b/airbyte-integrations/connectors/source-clockify/metadata.yaml @@ -25,7 +25,8 @@ data: supportLevel: community documentationUrl: https://docs.airbyte.com/integrations/sources/clockify tags: - - language:low-code + - language:python + - cdk:low-code ab_internal: sl: 100 ql: 100 diff --git a/airbyte-integrations/connectors/source-close-com/metadata.yaml b/airbyte-integrations/connectors/source-close-com/metadata.yaml index ad645a347058..ec847f585a6e 100644 --- a/airbyte-integrations/connectors/source-close-com/metadata.yaml +++ b/airbyte-integrations/connectors/source-close-com/metadata.yaml @@ -27,6 +27,6 @@ data: releaseStage: beta supportLevel: community tags: - - language:low-code - language:python + - cdk:low-code metadataSpecVersion: "1.0" diff --git a/airbyte-integrations/connectors/source-cockroachdb/metadata.yaml b/airbyte-integrations/connectors/source-cockroachdb/metadata.yaml index a64ed788d537..0d64aac56ad9 100644 --- a/airbyte-integrations/connectors/source-cockroachdb/metadata.yaml +++ b/airbyte-integrations/connectors/source-cockroachdb/metadata.yaml @@ -20,7 +20,6 @@ data: documentationUrl: https://docs.airbyte.com/integrations/sources/cockroachdb tags: - language:java - - language:python ab_internal: sl: 100 ql: 100 diff --git a/airbyte-integrations/connectors/source-coda/metadata.yaml b/airbyte-integrations/connectors/source-coda/metadata.yaml index 4fcb66aba68c..d32bea1373c3 100644 --- a/airbyte-integrations/connectors/source-coda/metadata.yaml +++ b/airbyte-integrations/connectors/source-coda/metadata.yaml @@ -25,7 +25,8 @@ data: supportLevel: community documentationUrl: https://docs.airbyte.com/integrations/sources/coda tags: - - language:low-code + - language:python + - cdk:low-code ab_internal: sl: 100 ql: 100 diff --git a/airbyte-integrations/connectors/source-coin-api/metadata.yaml b/airbyte-integrations/connectors/source-coin-api/metadata.yaml index 9da3a7feac04..18cb13d4d2f9 100644 --- a/airbyte-integrations/connectors/source-coin-api/metadata.yaml +++ b/airbyte-integrations/connectors/source-coin-api/metadata.yaml @@ -20,8 +20,8 @@ data: releaseStage: alpha documentationUrl: https://docs.airbyte.com/integrations/sources/coin-api tags: - - language:low-code - language:python + - cdk:low-code ab_internal: sl: 100 ql: 100 diff --git a/airbyte-integrations/connectors/source-coingecko-coins/metadata.yaml b/airbyte-integrations/connectors/source-coingecko-coins/metadata.yaml index ac0035a6f5ff..648159f33aa7 100644 --- a/airbyte-integrations/connectors/source-coingecko-coins/metadata.yaml +++ b/airbyte-integrations/connectors/source-coingecko-coins/metadata.yaml @@ -20,8 +20,8 @@ data: releaseStage: alpha documentationUrl: https://docs.airbyte.com/integrations/sources/coingecko-coins tags: - - language:low-code - language:python + - cdk:low-code ab_internal: sl: 100 ql: 100 diff --git a/airbyte-integrations/connectors/source-coinmarketcap/metadata.yaml b/airbyte-integrations/connectors/source-coinmarketcap/metadata.yaml index d12360ad7599..5bf7fb4454f4 100644 --- a/airbyte-integrations/connectors/source-coinmarketcap/metadata.yaml +++ b/airbyte-integrations/connectors/source-coinmarketcap/metadata.yaml @@ -20,8 +20,8 @@ data: releaseStage: alpha documentationUrl: https://docs.airbyte.com/integrations/sources/coinmarketcap tags: - - language:low-code - language:python + - cdk:low-code ab_internal: sl: 100 ql: 100 diff --git a/airbyte-integrations/connectors/source-commcare/metadata.yaml b/airbyte-integrations/connectors/source-commcare/metadata.yaml index 3e78f837b0e1..d41af3b3f58b 100644 --- a/airbyte-integrations/connectors/source-commcare/metadata.yaml +++ b/airbyte-integrations/connectors/source-commcare/metadata.yaml @@ -20,6 +20,7 @@ data: documentationUrl: https://docs.airbyte.com/integrations/sources/commcare tags: - language:python + - cdk:python ab_internal: sl: 100 ql: 100 diff --git a/airbyte-integrations/connectors/source-commercetools/metadata.yaml b/airbyte-integrations/connectors/source-commercetools/metadata.yaml index f28b88a916c9..60d550f09519 100644 --- a/airbyte-integrations/connectors/source-commercetools/metadata.yaml +++ b/airbyte-integrations/connectors/source-commercetools/metadata.yaml @@ -24,7 +24,8 @@ data: releaseStage: alpha documentationUrl: https://docs.airbyte.com/integrations/sources/commercetools tags: - - language:low-code + - language:python + - cdk:low-code ab_internal: sl: 100 ql: 100 diff --git a/airbyte-integrations/connectors/source-configcat/metadata.yaml b/airbyte-integrations/connectors/source-configcat/metadata.yaml index 3c1f707095be..a2737ed14a7c 100644 --- a/airbyte-integrations/connectors/source-configcat/metadata.yaml +++ b/airbyte-integrations/connectors/source-configcat/metadata.yaml @@ -20,8 +20,8 @@ data: releaseStage: alpha documentationUrl: https://docs.airbyte.com/integrations/sources/configcat tags: - - language:low-code - language:python + - cdk:low-code ab_internal: sl: 100 ql: 100 diff --git a/airbyte-integrations/connectors/source-confluence/metadata.yaml b/airbyte-integrations/connectors/source-confluence/metadata.yaml index be0b48fb334a..cef482a1cbfc 100644 --- a/airbyte-integrations/connectors/source-confluence/metadata.yaml +++ b/airbyte-integrations/connectors/source-confluence/metadata.yaml @@ -28,5 +28,6 @@ data: releaseStage: beta supportLevel: community tags: - - language:low-code + - language:python + - cdk:low-code metadataSpecVersion: "1.0" diff --git a/airbyte-integrations/connectors/source-convertkit/metadata.yaml b/airbyte-integrations/connectors/source-convertkit/metadata.yaml index 8a0327f0580c..1d92f9fd2f64 100644 --- a/airbyte-integrations/connectors/source-convertkit/metadata.yaml +++ b/airbyte-integrations/connectors/source-convertkit/metadata.yaml @@ -20,8 +20,8 @@ data: releaseStage: alpha documentationUrl: https://docs.airbyte.com/integrations/sources/convertkit tags: - - language:low-code - language:python + - cdk:low-code ab_internal: sl: 100 ql: 100 diff --git a/airbyte-integrations/connectors/source-convex/metadata.yaml b/airbyte-integrations/connectors/source-convex/metadata.yaml index 14d34525a641..da0d4f55283f 100644 --- a/airbyte-integrations/connectors/source-convex/metadata.yaml +++ b/airbyte-integrations/connectors/source-convex/metadata.yaml @@ -21,6 +21,7 @@ data: documentationUrl: https://docs.airbyte.com/integrations/sources/convex tags: - language:python + - cdk:python ab_internal: sl: 100 ql: 100 diff --git a/airbyte-integrations/connectors/source-copper/metadata.yaml b/airbyte-integrations/connectors/source-copper/metadata.yaml index 6298afd8c8b5..41218f40d84a 100644 --- a/airbyte-integrations/connectors/source-copper/metadata.yaml +++ b/airbyte-integrations/connectors/source-copper/metadata.yaml @@ -24,5 +24,6 @@ data: supportLevel: community documentationUrl: https://docs.airbyte.com/integrations/sources/copper tags: - - language:low-code + - language:python + - cdk:low-code metadataSpecVersion: "1.0" diff --git a/airbyte-integrations/connectors/source-courier/metadata.yaml b/airbyte-integrations/connectors/source-courier/metadata.yaml index be6f7586c5ad..05fa8a29dbc7 100644 --- a/airbyte-integrations/connectors/source-courier/metadata.yaml +++ b/airbyte-integrations/connectors/source-courier/metadata.yaml @@ -20,8 +20,8 @@ data: releaseStage: alpha documentationUrl: https://docs.airbyte.com/integrations/sources/courier tags: - - language:low-code - language:python + - cdk:low-code ab_internal: sl: 100 ql: 100 diff --git a/airbyte-integrations/connectors/source-customer-io/metadata.yaml b/airbyte-integrations/connectors/source-customer-io/metadata.yaml index 6bb0d1e26176..115ac54b0b63 100644 --- a/airbyte-integrations/connectors/source-customer-io/metadata.yaml +++ b/airbyte-integrations/connectors/source-customer-io/metadata.yaml @@ -25,5 +25,6 @@ data: supportLevel: community documentationUrl: https://docs.airbyte.com/integrations/sources/customer-io tags: - - language:low-code + - language:python + - cdk:low-code metadataSpecVersion: "1.0" diff --git a/airbyte-integrations/connectors/source-datadog/metadata.yaml b/airbyte-integrations/connectors/source-datadog/metadata.yaml index da27ba470328..f4fa46e1bd1a 100644 --- a/airbyte-integrations/connectors/source-datadog/metadata.yaml +++ b/airbyte-integrations/connectors/source-datadog/metadata.yaml @@ -29,7 +29,8 @@ data: supportLevel: community documentationUrl: https://docs.airbyte.com/integrations/sources/datadog tags: - - language:low-code + - language:python + - cdk:low-code ab_internal: sl: 100 ql: 100 diff --git a/airbyte-integrations/connectors/source-datascope/metadata.yaml b/airbyte-integrations/connectors/source-datascope/metadata.yaml index 2649a980d2dd..874695508a3f 100644 --- a/airbyte-integrations/connectors/source-datascope/metadata.yaml +++ b/airbyte-integrations/connectors/source-datascope/metadata.yaml @@ -20,8 +20,8 @@ data: releaseStage: alpha documentationUrl: https://docs.airbyte.com/integrations/sources/datascope tags: - - language:low-code - language:python + - cdk:low-code ab_internal: sl: 100 ql: 100 diff --git a/airbyte-integrations/connectors/source-db2/metadata.yaml b/airbyte-integrations/connectors/source-db2/metadata.yaml index fee9127e66de..695b622a4a8c 100644 --- a/airbyte-integrations/connectors/source-db2/metadata.yaml +++ b/airbyte-integrations/connectors/source-db2/metadata.yaml @@ -20,7 +20,6 @@ data: documentationUrl: https://docs.airbyte.com/integrations/sources/db2 tags: - language:java - - language:python ab_internal: sl: 100 ql: 100 diff --git a/airbyte-integrations/connectors/source-delighted/metadata.yaml b/airbyte-integrations/connectors/source-delighted/metadata.yaml index c9d006b900ee..8b76c87de4d9 100644 --- a/airbyte-integrations/connectors/source-delighted/metadata.yaml +++ b/airbyte-integrations/connectors/source-delighted/metadata.yaml @@ -27,6 +27,6 @@ data: releaseStage: beta supportLevel: community tags: - - language:low-code - language:python + - cdk:low-code metadataSpecVersion: "1.0" diff --git a/airbyte-integrations/connectors/source-dixa/metadata.yaml b/airbyte-integrations/connectors/source-dixa/metadata.yaml index bb4821ee8c81..f2e92b3b1eb5 100644 --- a/airbyte-integrations/connectors/source-dixa/metadata.yaml +++ b/airbyte-integrations/connectors/source-dixa/metadata.yaml @@ -25,7 +25,8 @@ data: supportLevel: community documentationUrl: https://docs.airbyte.com/integrations/sources/dixa tags: - - language:low-code + - language:python + - cdk:low-code ab_internal: sl: 100 ql: 100 diff --git a/airbyte-integrations/connectors/source-dockerhub/metadata.yaml b/airbyte-integrations/connectors/source-dockerhub/metadata.yaml index 7800c7d8f8d9..e80cc2548adf 100644 --- a/airbyte-integrations/connectors/source-dockerhub/metadata.yaml +++ b/airbyte-integrations/connectors/source-dockerhub/metadata.yaml @@ -25,7 +25,8 @@ data: releaseStage: alpha documentationUrl: https://docs.airbyte.com/integrations/sources/dockerhub tags: - - language:low-code + - language:python + - cdk:low-code ab_internal: sl: 100 ql: 100 diff --git a/airbyte-integrations/connectors/source-dremio/metadata.yaml b/airbyte-integrations/connectors/source-dremio/metadata.yaml index 406e3707fdda..f090bbeca056 100644 --- a/airbyte-integrations/connectors/source-dremio/metadata.yaml +++ b/airbyte-integrations/connectors/source-dremio/metadata.yaml @@ -20,8 +20,8 @@ data: releaseStage: alpha documentationUrl: https://docs.airbyte.com/integrations/sources/dremio tags: - - language:low-code - language:python + - cdk:low-code ab_internal: sl: 100 ql: 100 diff --git a/airbyte-integrations/connectors/source-drift/metadata.yaml b/airbyte-integrations/connectors/source-drift/metadata.yaml index e22ab893eae5..1a4abf9fe70f 100644 --- a/airbyte-integrations/connectors/source-drift/metadata.yaml +++ b/airbyte-integrations/connectors/source-drift/metadata.yaml @@ -25,7 +25,8 @@ data: supportLevel: community documentationUrl: https://docs.airbyte.com/integrations/sources/drift tags: - - language:low-code + - language:python + - cdk:low-code ab_internal: sl: 100 ql: 100 diff --git a/airbyte-integrations/connectors/source-dv-360/metadata.yaml b/airbyte-integrations/connectors/source-dv-360/metadata.yaml index 51026a80ab88..9373f3888797 100644 --- a/airbyte-integrations/connectors/source-dv-360/metadata.yaml +++ b/airbyte-integrations/connectors/source-dv-360/metadata.yaml @@ -21,6 +21,7 @@ data: documentationUrl: https://docs.airbyte.com/integrations/sources/dv-360 tags: - language:python + - cdk:python ab_internal: sl: 100 ql: 100 diff --git a/airbyte-integrations/connectors/source-elasticsearch/metadata.yaml b/airbyte-integrations/connectors/source-elasticsearch/metadata.yaml index 887dc9c9fa35..6dea96b014da 100644 --- a/airbyte-integrations/connectors/source-elasticsearch/metadata.yaml +++ b/airbyte-integrations/connectors/source-elasticsearch/metadata.yaml @@ -17,7 +17,6 @@ data: documentationUrl: https://docs.airbyte.com/integrations/sources/elasticsearch tags: - language:java - - language:python ab_internal: sl: 100 ql: 100 diff --git a/airbyte-integrations/connectors/source-emailoctopus/metadata.yaml b/airbyte-integrations/connectors/source-emailoctopus/metadata.yaml index 55a5037b7c05..506718d5abb9 100644 --- a/airbyte-integrations/connectors/source-emailoctopus/metadata.yaml +++ b/airbyte-integrations/connectors/source-emailoctopus/metadata.yaml @@ -20,8 +20,8 @@ data: releaseStage: alpha documentationUrl: https://docs.airbyte.com/integrations/sources/emailoctopus tags: - - language:low-code - language:python + - cdk:low-code ab_internal: sl: 100 ql: 100 diff --git a/airbyte-integrations/connectors/source-everhour/metadata.yaml b/airbyte-integrations/connectors/source-everhour/metadata.yaml index c4615c4ce9ce..29ad6a0a3da3 100644 --- a/airbyte-integrations/connectors/source-everhour/metadata.yaml +++ b/airbyte-integrations/connectors/source-everhour/metadata.yaml @@ -23,7 +23,8 @@ data: releaseStage: alpha documentationUrl: https://docs.airbyte.com/integrations/sources/everhour tags: - - language:low-code + - language:python + - cdk:low-code ab_internal: sl: 100 ql: 100 diff --git a/airbyte-integrations/connectors/source-exchange-rates/metadata.yaml b/airbyte-integrations/connectors/source-exchange-rates/metadata.yaml index 32efbf5e53ff..d7225b57b021 100644 --- a/airbyte-integrations/connectors/source-exchange-rates/metadata.yaml +++ b/airbyte-integrations/connectors/source-exchange-rates/metadata.yaml @@ -26,5 +26,6 @@ data: supportLevel: community documentationUrl: https://docs.airbyte.com/integrations/sources/exchange-rates tags: - - language:low-code + - language:python + - cdk:low-code metadataSpecVersion: "1.0" diff --git a/airbyte-integrations/connectors/source-facebook-marketing/metadata.yaml b/airbyte-integrations/connectors/source-facebook-marketing/metadata.yaml index 4db3a3242c63..40741317abe4 100644 --- a/airbyte-integrations/connectors/source-facebook-marketing/metadata.yaml +++ b/airbyte-integrations/connectors/source-facebook-marketing/metadata.yaml @@ -62,4 +62,5 @@ data: supportLevel: certified tags: - language:python + - cdk:python metadataSpecVersion: "1.0" diff --git a/airbyte-integrations/connectors/source-facebook-pages/metadata.yaml b/airbyte-integrations/connectors/source-facebook-pages/metadata.yaml index 1101c253d8ce..2a7317751845 100644 --- a/airbyte-integrations/connectors/source-facebook-pages/metadata.yaml +++ b/airbyte-integrations/connectors/source-facebook-pages/metadata.yaml @@ -27,6 +27,6 @@ data: releaseStage: beta supportLevel: community tags: - - language:low-code - language:python + - cdk:low-code metadataSpecVersion: "1.0" diff --git a/airbyte-integrations/connectors/source-faker/metadata.yaml b/airbyte-integrations/connectors/source-faker/metadata.yaml index 71bec9727379..63d6337577ec 100644 --- a/airbyte-integrations/connectors/source-faker/metadata.yaml +++ b/airbyte-integrations/connectors/source-faker/metadata.yaml @@ -53,4 +53,5 @@ data: supportLevel: community tags: - language:python + - cdk:python metadataSpecVersion: "1.0" diff --git a/airbyte-integrations/connectors/source-fastbill/metadata.yaml b/airbyte-integrations/connectors/source-fastbill/metadata.yaml index faf0afa33809..5bcbafe5abd0 100644 --- a/airbyte-integrations/connectors/source-fastbill/metadata.yaml +++ b/airbyte-integrations/connectors/source-fastbill/metadata.yaml @@ -25,5 +25,6 @@ data: supportLevel: community documentationUrl: https://docs.airbyte.com/integrations/sources/fastbill tags: - - language:low-code + - language:python + - cdk:low-code metadataSpecVersion: "1.0" diff --git a/airbyte-integrations/connectors/source-fauna/metadata.yaml b/airbyte-integrations/connectors/source-fauna/metadata.yaml index cf36ff0c13ec..5c0421bbb4f4 100644 --- a/airbyte-integrations/connectors/source-fauna/metadata.yaml +++ b/airbyte-integrations/connectors/source-fauna/metadata.yaml @@ -21,6 +21,7 @@ data: documentationUrl: https://docs.airbyte.com/integrations/sources/fauna tags: - language:python + - cdk:python ab_internal: sl: 100 ql: 100 diff --git a/airbyte-integrations/connectors/source-file/metadata.yaml b/airbyte-integrations/connectors/source-file/metadata.yaml index d5b483270262..2f1f4c97e157 100644 --- a/airbyte-integrations/connectors/source-file/metadata.yaml +++ b/airbyte-integrations/connectors/source-file/metadata.yaml @@ -30,4 +30,5 @@ data: supportLevel: certified tags: - language:python + - cdk:python metadataSpecVersion: "1.0" diff --git a/airbyte-integrations/connectors/source-firebase-realtime-database/metadata.yaml b/airbyte-integrations/connectors/source-firebase-realtime-database/metadata.yaml index 3d694aa9f2f3..2bd75d94ba1f 100644 --- a/airbyte-integrations/connectors/source-firebase-realtime-database/metadata.yaml +++ b/airbyte-integrations/connectors/source-firebase-realtime-database/metadata.yaml @@ -23,6 +23,7 @@ data: documentationUrl: https://docs.airbyte.com/integrations/sources/firebase-realtime-database tags: - language:python + - cdk:python ab_internal: sl: 100 ql: 100 diff --git a/airbyte-integrations/connectors/source-firebolt/metadata.yaml b/airbyte-integrations/connectors/source-firebolt/metadata.yaml index 824d857b5d7e..44cf2508f969 100644 --- a/airbyte-integrations/connectors/source-firebolt/metadata.yaml +++ b/airbyte-integrations/connectors/source-firebolt/metadata.yaml @@ -21,6 +21,7 @@ data: documentationUrl: https://docs.airbyte.com/integrations/sources/firebolt tags: - language:python + - cdk:python ab_internal: sl: 100 ql: 100 diff --git a/airbyte-integrations/connectors/source-flexport/metadata.yaml b/airbyte-integrations/connectors/source-flexport/metadata.yaml index 88b125bacb23..527b16930e1a 100644 --- a/airbyte-integrations/connectors/source-flexport/metadata.yaml +++ b/airbyte-integrations/connectors/source-flexport/metadata.yaml @@ -25,7 +25,8 @@ data: releaseStage: alpha documentationUrl: https://docs.airbyte.com/integrations/sources/flexport tags: - - language:low-code + - language:python + - cdk:low-code ab_internal: sl: 100 ql: 100 diff --git a/airbyte-integrations/connectors/source-freshcaller/metadata.yaml b/airbyte-integrations/connectors/source-freshcaller/metadata.yaml index 15eaf3b06ea1..ac253c74aded 100644 --- a/airbyte-integrations/connectors/source-freshcaller/metadata.yaml +++ b/airbyte-integrations/connectors/source-freshcaller/metadata.yaml @@ -21,7 +21,8 @@ data: supportLevel: community documentationUrl: https://docs.airbyte.com/integrations/sources/freshcaller tags: - - language:low-code + - language:python + - cdk:low-code ab_internal: sl: 100 ql: 100 diff --git a/airbyte-integrations/connectors/source-freshdesk/metadata.yaml b/airbyte-integrations/connectors/source-freshdesk/metadata.yaml index 781a09a40dae..03e6358bd499 100644 --- a/airbyte-integrations/connectors/source-freshdesk/metadata.yaml +++ b/airbyte-integrations/connectors/source-freshdesk/metadata.yaml @@ -30,4 +30,5 @@ data: supportLevel: certified tags: - language:python + - cdk:python metadataSpecVersion: "1.0" diff --git a/airbyte-integrations/connectors/source-freshsales/metadata.yaml b/airbyte-integrations/connectors/source-freshsales/metadata.yaml index 5abeea00cab7..53e98f7ce16f 100644 --- a/airbyte-integrations/connectors/source-freshsales/metadata.yaml +++ b/airbyte-integrations/connectors/source-freshsales/metadata.yaml @@ -32,5 +32,6 @@ data: releaseStage: beta supportLevel: community tags: - - language:low-code + - language:python + - cdk:low-code metadataSpecVersion: "1.0" diff --git a/airbyte-integrations/connectors/source-freshservice/metadata.yaml b/airbyte-integrations/connectors/source-freshservice/metadata.yaml index 4262773507e8..8e6c4853672b 100644 --- a/airbyte-integrations/connectors/source-freshservice/metadata.yaml +++ b/airbyte-integrations/connectors/source-freshservice/metadata.yaml @@ -25,5 +25,6 @@ data: supportLevel: community documentationUrl: https://docs.airbyte.com/integrations/sources/freshservice tags: - - language:low-code + - language:python + - cdk:low-code metadataSpecVersion: "1.0" diff --git a/airbyte-integrations/connectors/source-fullstory/metadata.yaml b/airbyte-integrations/connectors/source-fullstory/metadata.yaml index c2083f3631c8..65149972d685 100644 --- a/airbyte-integrations/connectors/source-fullstory/metadata.yaml +++ b/airbyte-integrations/connectors/source-fullstory/metadata.yaml @@ -20,8 +20,8 @@ data: releaseStage: alpha documentationUrl: https://docs.airbyte.com/integrations/sources/fullstory tags: - - language:low-code - language:python + - cdk:low-code ab_internal: sl: 100 ql: 100 diff --git a/airbyte-integrations/connectors/source-gainsight-px/metadata.yaml b/airbyte-integrations/connectors/source-gainsight-px/metadata.yaml index 0a65633f7dcc..37cdbf711106 100644 --- a/airbyte-integrations/connectors/source-gainsight-px/metadata.yaml +++ b/airbyte-integrations/connectors/source-gainsight-px/metadata.yaml @@ -23,8 +23,8 @@ data: releaseStage: alpha documentationUrl: https://docs.airbyte.com/integrations/sources/gainsight-px tags: - - language:low-code - language:python + - cdk:low-code ab_internal: sl: 100 ql: 100 diff --git a/airbyte-integrations/connectors/source-gcs/metadata.yaml b/airbyte-integrations/connectors/source-gcs/metadata.yaml index abda6f594cca..08c09f6bfa2e 100644 --- a/airbyte-integrations/connectors/source-gcs/metadata.yaml +++ b/airbyte-integrations/connectors/source-gcs/metadata.yaml @@ -27,4 +27,5 @@ data: supportLevel: community tags: - language:python + - cdk:python-file-based metadataSpecVersion: "1.0" diff --git a/airbyte-integrations/connectors/source-genesys/metadata.yaml b/airbyte-integrations/connectors/source-genesys/metadata.yaml index 0bad42c982d3..e29a429ee184 100644 --- a/airbyte-integrations/connectors/source-genesys/metadata.yaml +++ b/airbyte-integrations/connectors/source-genesys/metadata.yaml @@ -21,6 +21,7 @@ data: documentationUrl: https://docs.airbyte.com/integrations/sources/genesys tags: - language:python + - cdk:python ab_internal: sl: 100 ql: 100 diff --git a/airbyte-integrations/connectors/source-getlago/metadata.yaml b/airbyte-integrations/connectors/source-getlago/metadata.yaml index de14460248c0..056eba92d5aa 100644 --- a/airbyte-integrations/connectors/source-getlago/metadata.yaml +++ b/airbyte-integrations/connectors/source-getlago/metadata.yaml @@ -20,8 +20,8 @@ data: releaseStage: alpha documentationUrl: https://docs.airbyte.com/integrations/sources/getlago tags: - - language:low-code - language:python + - cdk:low-code ab_internal: sl: 100 ql: 100 diff --git a/airbyte-integrations/connectors/source-github/metadata.yaml b/airbyte-integrations/connectors/source-github/metadata.yaml index f9819d6c3283..c15d331f0930 100644 --- a/airbyte-integrations/connectors/source-github/metadata.yaml +++ b/airbyte-integrations/connectors/source-github/metadata.yaml @@ -43,4 +43,5 @@ data: supportLevel: certified tags: - language:python + - cdk:python metadataSpecVersion: "1.0" diff --git a/airbyte-integrations/connectors/source-gitlab/metadata.yaml b/airbyte-integrations/connectors/source-gitlab/metadata.yaml index 4485a58c3a17..e656b3361f6e 100644 --- a/airbyte-integrations/connectors/source-gitlab/metadata.yaml +++ b/airbyte-integrations/connectors/source-gitlab/metadata.yaml @@ -53,4 +53,5 @@ data: supportLevel: certified tags: - language:python + - cdk:python metadataSpecVersion: "1.0" diff --git a/airbyte-integrations/connectors/source-glassfrog/metadata.yaml b/airbyte-integrations/connectors/source-glassfrog/metadata.yaml index edf2c6a6fba6..07fafcfc548c 100644 --- a/airbyte-integrations/connectors/source-glassfrog/metadata.yaml +++ b/airbyte-integrations/connectors/source-glassfrog/metadata.yaml @@ -24,7 +24,8 @@ data: releaseStage: alpha documentationUrl: https://docs.airbyte.com/integrations/sources/glassfrog tags: - - language:low-code + - language:python + - cdk:low-code ab_internal: sl: 100 ql: 100 diff --git a/airbyte-integrations/connectors/source-gnews/metadata.yaml b/airbyte-integrations/connectors/source-gnews/metadata.yaml index 3c2eb68602d9..f9d1e7131c08 100644 --- a/airbyte-integrations/connectors/source-gnews/metadata.yaml +++ b/airbyte-integrations/connectors/source-gnews/metadata.yaml @@ -21,8 +21,8 @@ data: releaseStage: alpha documentationUrl: https://docs.airbyte.com/integrations/sources/gnews tags: - - language:low-code - language:python + - cdk:low-code ab_internal: sl: 100 ql: 100 diff --git a/airbyte-integrations/connectors/source-gocardless/metadata.yaml b/airbyte-integrations/connectors/source-gocardless/metadata.yaml index 3d8b577d174a..f883f7bd93a8 100644 --- a/airbyte-integrations/connectors/source-gocardless/metadata.yaml +++ b/airbyte-integrations/connectors/source-gocardless/metadata.yaml @@ -20,8 +20,8 @@ data: releaseStage: alpha documentationUrl: https://docs.airbyte.com/integrations/sources/gocardless tags: - - language:low-code - language:python + - cdk:low-code ab_internal: sl: 100 ql: 100 diff --git a/airbyte-integrations/connectors/source-gong/metadata.yaml b/airbyte-integrations/connectors/source-gong/metadata.yaml index dd82f25883b3..2cb363e8f384 100644 --- a/airbyte-integrations/connectors/source-gong/metadata.yaml +++ b/airbyte-integrations/connectors/source-gong/metadata.yaml @@ -20,8 +20,8 @@ data: releaseStage: alpha documentationUrl: https://docs.airbyte.com/integrations/sources/gong tags: - - language:low-code - language:python + - cdk:low-code ab_internal: sl: 100 ql: 100 diff --git a/airbyte-integrations/connectors/source-google-ads/metadata.yaml b/airbyte-integrations/connectors/source-google-ads/metadata.yaml index 087f70587419..a7b4b77066db 100644 --- a/airbyte-integrations/connectors/source-google-ads/metadata.yaml +++ b/airbyte-integrations/connectors/source-google-ads/metadata.yaml @@ -69,4 +69,5 @@ data: supportLevel: certified tags: - language:python + - cdk:python metadataSpecVersion: "1.0" diff --git a/airbyte-integrations/connectors/source-google-analytics-data-api/metadata.yaml b/airbyte-integrations/connectors/source-google-analytics-data-api/metadata.yaml index e2eeabd0d679..8cfca1eec110 100644 --- a/airbyte-integrations/connectors/source-google-analytics-data-api/metadata.yaml +++ b/airbyte-integrations/connectors/source-google-analytics-data-api/metadata.yaml @@ -52,4 +52,5 @@ data: supportLevel: certified tags: - language:python + - cdk:python metadataSpecVersion: "1.0" diff --git a/airbyte-integrations/connectors/source-google-analytics-v4-service-account-only/metadata.yaml b/airbyte-integrations/connectors/source-google-analytics-v4-service-account-only/metadata.yaml index 79cbba174479..70f555975eb6 100644 --- a/airbyte-integrations/connectors/source-google-analytics-v4-service-account-only/metadata.yaml +++ b/airbyte-integrations/connectors/source-google-analytics-v4-service-account-only/metadata.yaml @@ -29,4 +29,5 @@ data: supportLevel: community tags: - language:python + - cdk:python metadataSpecVersion: "1.0" diff --git a/airbyte-integrations/connectors/source-google-analytics-v4/metadata.yaml b/airbyte-integrations/connectors/source-google-analytics-v4/metadata.yaml index 5f36a33ce227..ea9c7148f538 100644 --- a/airbyte-integrations/connectors/source-google-analytics-v4/metadata.yaml +++ b/airbyte-integrations/connectors/source-google-analytics-v4/metadata.yaml @@ -33,4 +33,5 @@ data: supportLevel: certified tags: - language:python + - cdk:python metadataSpecVersion: "1.0" diff --git a/airbyte-integrations/connectors/source-google-directory/metadata.yaml b/airbyte-integrations/connectors/source-google-directory/metadata.yaml index eedf71a553af..27d1209660d4 100644 --- a/airbyte-integrations/connectors/source-google-directory/metadata.yaml +++ b/airbyte-integrations/connectors/source-google-directory/metadata.yaml @@ -22,6 +22,7 @@ data: documentationUrl: https://docs.airbyte.com/integrations/sources/google-directory tags: - language:python + - cdk:python ab_internal: sl: 100 ql: 100 diff --git a/airbyte-integrations/connectors/source-google-drive/metadata.yaml b/airbyte-integrations/connectors/source-google-drive/metadata.yaml index c1352111e894..4eaddbf2414d 100644 --- a/airbyte-integrations/connectors/source-google-drive/metadata.yaml +++ b/airbyte-integrations/connectors/source-google-drive/metadata.yaml @@ -26,5 +26,6 @@ data: documentationUrl: https://docs.airbyte.com/integrations/sources/google-drive tags: - language:python + - cdk:python-file-based supportLevel: community metadataSpecVersion: "1.0" diff --git a/airbyte-integrations/connectors/source-google-pagespeed-insights/metadata.yaml b/airbyte-integrations/connectors/source-google-pagespeed-insights/metadata.yaml index 2bfe25c5eeae..0672a0fb1641 100644 --- a/airbyte-integrations/connectors/source-google-pagespeed-insights/metadata.yaml +++ b/airbyte-integrations/connectors/source-google-pagespeed-insights/metadata.yaml @@ -20,8 +20,8 @@ data: releaseStage: alpha documentationUrl: https://docs.airbyte.com/integrations/sources/google-pagespeed-insights tags: - - language:low-code - language:python + - cdk:low-code ab_internal: sl: 100 ql: 100 diff --git a/airbyte-integrations/connectors/source-google-search-console/metadata.yaml b/airbyte-integrations/connectors/source-google-search-console/metadata.yaml index 4bec12112e05..d5393b329163 100644 --- a/airbyte-integrations/connectors/source-google-search-console/metadata.yaml +++ b/airbyte-integrations/connectors/source-google-search-console/metadata.yaml @@ -39,4 +39,5 @@ data: supportLevel: certified tags: - language:python + - cdk:python metadataSpecVersion: "1.0" diff --git a/airbyte-integrations/connectors/source-google-sheets/metadata.yaml b/airbyte-integrations/connectors/source-google-sheets/metadata.yaml index 6124d5ca3efd..8fe33f1026ce 100644 --- a/airbyte-integrations/connectors/source-google-sheets/metadata.yaml +++ b/airbyte-integrations/connectors/source-google-sheets/metadata.yaml @@ -30,4 +30,5 @@ data: supportLevel: certified tags: - language:python + - cdk:python metadataSpecVersion: "1.0" diff --git a/airbyte-integrations/connectors/source-google-webfonts/metadata.yaml b/airbyte-integrations/connectors/source-google-webfonts/metadata.yaml index b60374d751b9..f3b5fbfcaa04 100644 --- a/airbyte-integrations/connectors/source-google-webfonts/metadata.yaml +++ b/airbyte-integrations/connectors/source-google-webfonts/metadata.yaml @@ -20,8 +20,8 @@ data: releaseStage: alpha documentationUrl: https://docs.airbyte.com/integrations/sources/google-webfonts tags: - - language:low-code - language:python + - cdk:low-code ab_internal: sl: 100 ql: 100 diff --git a/airbyte-integrations/connectors/source-google-workspace-admin-reports/metadata.yaml b/airbyte-integrations/connectors/source-google-workspace-admin-reports/metadata.yaml index bfc8f0f6421e..e8d6d40b7ad6 100644 --- a/airbyte-integrations/connectors/source-google-workspace-admin-reports/metadata.yaml +++ b/airbyte-integrations/connectors/source-google-workspace-admin-reports/metadata.yaml @@ -23,6 +23,7 @@ data: documentationUrl: https://docs.airbyte.com/integrations/sources/google-workspace-admin-reports tags: - language:python + - cdk:python ab_internal: sl: 100 ql: 100 diff --git a/airbyte-integrations/connectors/source-greenhouse/metadata.yaml b/airbyte-integrations/connectors/source-greenhouse/metadata.yaml index 341baccb65ea..60f85374bd96 100644 --- a/airbyte-integrations/connectors/source-greenhouse/metadata.yaml +++ b/airbyte-integrations/connectors/source-greenhouse/metadata.yaml @@ -29,6 +29,6 @@ data: releaseStage: generally_available supportLevel: certified tags: - - language:low-code - language:python + - cdk:low-code metadataSpecVersion: "1.0" diff --git a/airbyte-integrations/connectors/source-gridly/metadata.yaml b/airbyte-integrations/connectors/source-gridly/metadata.yaml index 11a5a90d789f..d4de0781827f 100644 --- a/airbyte-integrations/connectors/source-gridly/metadata.yaml +++ b/airbyte-integrations/connectors/source-gridly/metadata.yaml @@ -21,6 +21,7 @@ data: documentationUrl: https://docs.airbyte.com/integrations/sources/gridly tags: - language:python + - cdk:python ab_internal: sl: 100 ql: 100 diff --git a/airbyte-integrations/connectors/source-gutendex/metadata.yaml b/airbyte-integrations/connectors/source-gutendex/metadata.yaml index 6624a7e221bd..1de849fd0252 100644 --- a/airbyte-integrations/connectors/source-gutendex/metadata.yaml +++ b/airbyte-integrations/connectors/source-gutendex/metadata.yaml @@ -19,8 +19,8 @@ data: releaseStage: alpha documentationUrl: https://docs.airbyte.com/integrations/sources/gutendex tags: - - language:low-code - language:python + - cdk:low-code ab_internal: sl: 100 ql: 100 diff --git a/airbyte-integrations/connectors/source-harness/metadata.yaml b/airbyte-integrations/connectors/source-harness/metadata.yaml index 59b15a6a1b61..9db220e1e110 100644 --- a/airbyte-integrations/connectors/source-harness/metadata.yaml +++ b/airbyte-integrations/connectors/source-harness/metadata.yaml @@ -25,5 +25,6 @@ data: supportLevel: community documentationUrl: https://docs.airbyte.com/integrations/sources/harness tags: - - language:low-code + - language:python + - cdk:low-code metadataSpecVersion: "1.0" diff --git a/airbyte-integrations/connectors/source-harvest/metadata.yaml b/airbyte-integrations/connectors/source-harvest/metadata.yaml index f551698c320c..c358af730a2d 100644 --- a/airbyte-integrations/connectors/source-harvest/metadata.yaml +++ b/airbyte-integrations/connectors/source-harvest/metadata.yaml @@ -30,4 +30,5 @@ data: supportLevel: certified tags: - language:python + - cdk:python metadataSpecVersion: "1.0" diff --git a/airbyte-integrations/connectors/source-hellobaton/metadata.yaml b/airbyte-integrations/connectors/source-hellobaton/metadata.yaml index 183bf69ea8ad..4e8c9d890162 100644 --- a/airbyte-integrations/connectors/source-hellobaton/metadata.yaml +++ b/airbyte-integrations/connectors/source-hellobaton/metadata.yaml @@ -25,7 +25,8 @@ data: supportLevel: community documentationUrl: https://docs.airbyte.com/integrations/sources/hellobaton tags: - - language:low-code + - language:python + - cdk:low-code ab_internal: sl: 100 ql: 100 diff --git a/airbyte-integrations/connectors/source-hubplanner/metadata.yaml b/airbyte-integrations/connectors/source-hubplanner/metadata.yaml index b14c0ba4e0b1..af2a4cef1d9c 100644 --- a/airbyte-integrations/connectors/source-hubplanner/metadata.yaml +++ b/airbyte-integrations/connectors/source-hubplanner/metadata.yaml @@ -25,7 +25,8 @@ data: supportLevel: community documentationUrl: https://docs.airbyte.com/integrations/sources/hubplanner tags: - - language:low-code + - language:python + - cdk:low-code ab_internal: sl: 100 ql: 100 diff --git a/airbyte-integrations/connectors/source-hubspot/metadata.yaml b/airbyte-integrations/connectors/source-hubspot/metadata.yaml index daa438f5ee77..e04330e89311 100644 --- a/airbyte-integrations/connectors/source-hubspot/metadata.yaml +++ b/airbyte-integrations/connectors/source-hubspot/metadata.yaml @@ -50,4 +50,5 @@ data: supportLevel: certified tags: - language:python + - cdk:python metadataSpecVersion: "1.0" diff --git a/airbyte-integrations/connectors/source-insightly/metadata.yaml b/airbyte-integrations/connectors/source-insightly/metadata.yaml index 7fb79460cecd..47a4f238f383 100644 --- a/airbyte-integrations/connectors/source-insightly/metadata.yaml +++ b/airbyte-integrations/connectors/source-insightly/metadata.yaml @@ -24,7 +24,8 @@ data: supportLevel: community documentationUrl: https://docs.airbyte.com/integrations/sources/insightly tags: - - language:low-code + - language:python + - cdk:low-code ab_internal: sl: 100 ql: 100 diff --git a/airbyte-integrations/connectors/source-instagram/metadata.yaml b/airbyte-integrations/connectors/source-instagram/metadata.yaml index 85c4536619e6..15025dd3066e 100644 --- a/airbyte-integrations/connectors/source-instagram/metadata.yaml +++ b/airbyte-integrations/connectors/source-instagram/metadata.yaml @@ -49,6 +49,7 @@ data: documentationUrl: https://docs.airbyte.com/integrations/sources/instagram tags: - language:python + - cdk:python ab_internal: sl: 200 ql: 400 diff --git a/airbyte-integrations/connectors/source-instatus/metadata.yaml b/airbyte-integrations/connectors/source-instatus/metadata.yaml index 225e47a7158b..5741f7f5e0e1 100644 --- a/airbyte-integrations/connectors/source-instatus/metadata.yaml +++ b/airbyte-integrations/connectors/source-instatus/metadata.yaml @@ -20,8 +20,8 @@ data: releaseStage: alpha documentationUrl: https://docs.airbyte.com/integrations/sources/instatus tags: - - language:low-code - language:python + - cdk:low-code ab_internal: sl: 100 ql: 100 diff --git a/airbyte-integrations/connectors/source-intercom/metadata.yaml b/airbyte-integrations/connectors/source-intercom/metadata.yaml index c2af6045c305..6c5bc2935741 100644 --- a/airbyte-integrations/connectors/source-intercom/metadata.yaml +++ b/airbyte-integrations/connectors/source-intercom/metadata.yaml @@ -36,5 +36,6 @@ data: - companies supportLevel: certified tags: - - language:low-code + - language:python + - cdk:low-code metadataSpecVersion: "1.0" diff --git a/airbyte-integrations/connectors/source-intruder/metadata.yaml b/airbyte-integrations/connectors/source-intruder/metadata.yaml index 49d692755d42..efce4cb5155f 100644 --- a/airbyte-integrations/connectors/source-intruder/metadata.yaml +++ b/airbyte-integrations/connectors/source-intruder/metadata.yaml @@ -20,8 +20,8 @@ data: releaseStage: alpha documentationUrl: https://docs.airbyte.com/integrations/sources/intruder tags: - - language:low-code - language:python + - cdk:low-code ab_internal: sl: 100 ql: 100 diff --git a/airbyte-integrations/connectors/source-ip2whois/metadata.yaml b/airbyte-integrations/connectors/source-ip2whois/metadata.yaml index 03866ecbd8ca..aaede5464acd 100644 --- a/airbyte-integrations/connectors/source-ip2whois/metadata.yaml +++ b/airbyte-integrations/connectors/source-ip2whois/metadata.yaml @@ -20,8 +20,8 @@ data: releaseStage: alpha documentationUrl: https://docs.airbyte.com/integrations/sources/ip2whois tags: - - language:low-code - language:python + - cdk:low-code ab_internal: sl: 100 ql: 100 diff --git a/airbyte-integrations/connectors/source-iterable/metadata.yaml b/airbyte-integrations/connectors/source-iterable/metadata.yaml index ce432b88e86e..0c2549cf3e44 100644 --- a/airbyte-integrations/connectors/source-iterable/metadata.yaml +++ b/airbyte-integrations/connectors/source-iterable/metadata.yaml @@ -30,4 +30,5 @@ data: supportLevel: certified tags: - language:python + - cdk:python metadataSpecVersion: "1.0" diff --git a/airbyte-integrations/connectors/source-jira/metadata.yaml b/airbyte-integrations/connectors/source-jira/metadata.yaml index 2c97a3658351..efe78ce01bf0 100644 --- a/airbyte-integrations/connectors/source-jira/metadata.yaml +++ b/airbyte-integrations/connectors/source-jira/metadata.yaml @@ -45,4 +45,5 @@ data: supportLevel: certified tags: - language:python + - cdk:python metadataSpecVersion: "1.0" diff --git a/airbyte-integrations/connectors/source-k6-cloud/metadata.yaml b/airbyte-integrations/connectors/source-k6-cloud/metadata.yaml index f0c7664e3897..fe352ddec6c9 100644 --- a/airbyte-integrations/connectors/source-k6-cloud/metadata.yaml +++ b/airbyte-integrations/connectors/source-k6-cloud/metadata.yaml @@ -20,8 +20,8 @@ data: releaseStage: alpha documentationUrl: https://docs.airbyte.com/integrations/sources/k6-cloud tags: - - language:low-code - language:python + - cdk:low-code ab_internal: sl: 100 ql: 100 diff --git a/airbyte-integrations/connectors/source-kafka/metadata.yaml b/airbyte-integrations/connectors/source-kafka/metadata.yaml index aedf1844ccaf..b6a1d0494c7b 100644 --- a/airbyte-integrations/connectors/source-kafka/metadata.yaml +++ b/airbyte-integrations/connectors/source-kafka/metadata.yaml @@ -17,7 +17,6 @@ data: documentationUrl: https://docs.airbyte.com/integrations/sources/kafka tags: - language:java - - language:python ab_internal: sl: 100 ql: 100 diff --git a/airbyte-integrations/connectors/source-klarna/metadata.yaml b/airbyte-integrations/connectors/source-klarna/metadata.yaml index caf42f707603..2b15eab960bf 100644 --- a/airbyte-integrations/connectors/source-klarna/metadata.yaml +++ b/airbyte-integrations/connectors/source-klarna/metadata.yaml @@ -28,7 +28,8 @@ data: supportLevel: community documentationUrl: https://docs.airbyte.com/integrations/sources/klarna tags: - - language:low-code + - language:python + - cdk:low-code ab_internal: sl: 100 ql: 100 diff --git a/airbyte-integrations/connectors/source-klaus-api/metadata.yaml b/airbyte-integrations/connectors/source-klaus-api/metadata.yaml index 7903adf4b734..e8f7c690acd5 100644 --- a/airbyte-integrations/connectors/source-klaus-api/metadata.yaml +++ b/airbyte-integrations/connectors/source-klaus-api/metadata.yaml @@ -27,5 +27,6 @@ data: ql: 300 sl: 100 tags: - - language:low-code + - language:python + - cdk:low-code metadataSpecVersion: "1.0" diff --git a/airbyte-integrations/connectors/source-klaviyo/metadata.yaml b/airbyte-integrations/connectors/source-klaviyo/metadata.yaml index 7c080fcb6fcf..1c1d865c735f 100644 --- a/airbyte-integrations/connectors/source-klaviyo/metadata.yaml +++ b/airbyte-integrations/connectors/source-klaviyo/metadata.yaml @@ -42,6 +42,7 @@ data: documentationUrl: https://docs.airbyte.com/integrations/sources/klaviyo tags: - language:python + - cdk:python ab_internal: sl: 200 ql: 400 diff --git a/airbyte-integrations/connectors/source-kustomer-singer/metadata.yaml b/airbyte-integrations/connectors/source-kustomer-singer/metadata.yaml index 948cb8adc902..f5c36b849556 100644 --- a/airbyte-integrations/connectors/source-kustomer-singer/metadata.yaml +++ b/airbyte-integrations/connectors/source-kustomer-singer/metadata.yaml @@ -22,6 +22,7 @@ data: documentationUrl: https://docs.airbyte.com/integrations/sources/kustomer-singer tags: - language:python + - cdk:python ab_internal: sl: 100 ql: 100 diff --git a/airbyte-integrations/connectors/source-kyriba/metadata.yaml b/airbyte-integrations/connectors/source-kyriba/metadata.yaml index 49bfb45ac7e3..d011cdff2ac5 100644 --- a/airbyte-integrations/connectors/source-kyriba/metadata.yaml +++ b/airbyte-integrations/connectors/source-kyriba/metadata.yaml @@ -27,4 +27,5 @@ data: supportLevel: community tags: - language:python + - cdk:python metadataSpecVersion: "1.0" diff --git a/airbyte-integrations/connectors/source-kyve/metadata.yaml b/airbyte-integrations/connectors/source-kyve/metadata.yaml index c95fa5e33341..dfb9a57b5fb2 100644 --- a/airbyte-integrations/connectors/source-kyve/metadata.yaml +++ b/airbyte-integrations/connectors/source-kyve/metadata.yaml @@ -22,6 +22,7 @@ data: documentationUrl: https://docs.airbyte.com/integrations/sources/kyve tags: - language:python + - cdk:python ab_internal: sl: 100 ql: 100 diff --git a/airbyte-integrations/connectors/source-launchdarkly/metadata.yaml b/airbyte-integrations/connectors/source-launchdarkly/metadata.yaml index c93a438bd591..46249efe91a8 100644 --- a/airbyte-integrations/connectors/source-launchdarkly/metadata.yaml +++ b/airbyte-integrations/connectors/source-launchdarkly/metadata.yaml @@ -20,8 +20,8 @@ data: releaseStage: alpha documentationUrl: https://docs.airbyte.com/integrations/sources/launchdarkly tags: - - language:low-code - language:python + - cdk:low-code ab_internal: sl: 100 ql: 100 diff --git a/airbyte-integrations/connectors/source-lemlist/metadata.yaml b/airbyte-integrations/connectors/source-lemlist/metadata.yaml index 6fbfb68fb760..107cca3e4580 100644 --- a/airbyte-integrations/connectors/source-lemlist/metadata.yaml +++ b/airbyte-integrations/connectors/source-lemlist/metadata.yaml @@ -25,5 +25,6 @@ data: supportLevel: community documentationUrl: https://docs.airbyte.com/integrations/sources/lemlist tags: - - language:low-code + - language:python + - cdk:low-code metadataSpecVersion: "1.0" diff --git a/airbyte-integrations/connectors/source-lever-hiring/metadata.yaml b/airbyte-integrations/connectors/source-lever-hiring/metadata.yaml index 93c0e331a63b..db7530abf1a3 100644 --- a/airbyte-integrations/connectors/source-lever-hiring/metadata.yaml +++ b/airbyte-integrations/connectors/source-lever-hiring/metadata.yaml @@ -21,6 +21,7 @@ data: documentationUrl: https://docs.airbyte.com/integrations/sources/lever-hiring tags: - language:python + - cdk:python ab_internal: sl: 100 ql: 100 diff --git a/airbyte-integrations/connectors/source-linkedin-ads/metadata.yaml b/airbyte-integrations/connectors/source-linkedin-ads/metadata.yaml index e7a9a9212eb0..0c00c634873c 100644 --- a/airbyte-integrations/connectors/source-linkedin-ads/metadata.yaml +++ b/airbyte-integrations/connectors/source-linkedin-ads/metadata.yaml @@ -41,4 +41,5 @@ data: supportLevel: certified tags: - language:python + - cdk:python metadataSpecVersion: "1.0" diff --git a/airbyte-integrations/connectors/source-linkedin-pages/metadata.yaml b/airbyte-integrations/connectors/source-linkedin-pages/metadata.yaml index f5d1196e7b6c..3d2772011d14 100644 --- a/airbyte-integrations/connectors/source-linkedin-pages/metadata.yaml +++ b/airbyte-integrations/connectors/source-linkedin-pages/metadata.yaml @@ -25,4 +25,5 @@ data: supportLevel: community tags: - language:python + - cdk:python metadataSpecVersion: "1.0" diff --git a/airbyte-integrations/connectors/source-linnworks/metadata.yaml b/airbyte-integrations/connectors/source-linnworks/metadata.yaml index 22f9617db685..38e081974cfa 100644 --- a/airbyte-integrations/connectors/source-linnworks/metadata.yaml +++ b/airbyte-integrations/connectors/source-linnworks/metadata.yaml @@ -27,4 +27,5 @@ data: supportLevel: community tags: - language:python + - cdk:python metadataSpecVersion: "1.0" diff --git a/airbyte-integrations/connectors/source-lokalise/metadata.yaml b/airbyte-integrations/connectors/source-lokalise/metadata.yaml index 2f222da3c397..11c9c7b9d702 100644 --- a/airbyte-integrations/connectors/source-lokalise/metadata.yaml +++ b/airbyte-integrations/connectors/source-lokalise/metadata.yaml @@ -20,8 +20,8 @@ data: releaseStage: alpha documentationUrl: https://docs.airbyte.com/integrations/sources/lokalise tags: - - language:low-code - language:python + - cdk:low-code ab_internal: sl: 100 ql: 100 diff --git a/airbyte-integrations/connectors/source-looker/metadata.yaml b/airbyte-integrations/connectors/source-looker/metadata.yaml index 3d2415c0cd29..334fe0d6ccda 100644 --- a/airbyte-integrations/connectors/source-looker/metadata.yaml +++ b/airbyte-integrations/connectors/source-looker/metadata.yaml @@ -21,6 +21,7 @@ data: documentationUrl: https://docs.airbyte.com/integrations/sources/looker tags: - language:python + - cdk:python ab_internal: sl: 100 ql: 100 diff --git a/airbyte-integrations/connectors/source-mailchimp/metadata.yaml b/airbyte-integrations/connectors/source-mailchimp/metadata.yaml index 17b3023af310..670f7387202e 100644 --- a/airbyte-integrations/connectors/source-mailchimp/metadata.yaml +++ b/airbyte-integrations/connectors/source-mailchimp/metadata.yaml @@ -44,4 +44,5 @@ data: supportLevel: certified tags: - language:python + - cdk:python metadataSpecVersion: "1.0" diff --git a/airbyte-integrations/connectors/source-mailerlite/metadata.yaml b/airbyte-integrations/connectors/source-mailerlite/metadata.yaml index b89c174e8d0f..8cf2ffb5f854 100644 --- a/airbyte-integrations/connectors/source-mailerlite/metadata.yaml +++ b/airbyte-integrations/connectors/source-mailerlite/metadata.yaml @@ -20,8 +20,8 @@ data: releaseStage: alpha documentationUrl: https://docs.airbyte.com/integrations/sources/mailerlite tags: - - language:low-code - language:python + - cdk:low-code ab_internal: sl: 100 ql: 100 diff --git a/airbyte-integrations/connectors/source-mailersend/metadata.yaml b/airbyte-integrations/connectors/source-mailersend/metadata.yaml index 97799066552c..4baf275e2c7c 100644 --- a/airbyte-integrations/connectors/source-mailersend/metadata.yaml +++ b/airbyte-integrations/connectors/source-mailersend/metadata.yaml @@ -20,8 +20,8 @@ data: releaseStage: alpha documentationUrl: https://docs.airbyte.com/integrations/sources/mailersend tags: - - language:low-code - language:python + - cdk:low-code ab_internal: sl: 100 ql: 100 diff --git a/airbyte-integrations/connectors/source-mailgun/metadata.yaml b/airbyte-integrations/connectors/source-mailgun/metadata.yaml index 669d11d1ef41..86f10bca36ba 100644 --- a/airbyte-integrations/connectors/source-mailgun/metadata.yaml +++ b/airbyte-integrations/connectors/source-mailgun/metadata.yaml @@ -24,7 +24,8 @@ data: releaseStage: alpha documentationUrl: https://docs.airbyte.com/integrations/sources/mailgun tags: - - language:low-code + - language:python + - cdk:low-code ab_internal: sl: 100 ql: 200 diff --git a/airbyte-integrations/connectors/source-mailjet-mail/metadata.yaml b/airbyte-integrations/connectors/source-mailjet-mail/metadata.yaml index ce0bd1f6e3b2..028e7faf1b0d 100644 --- a/airbyte-integrations/connectors/source-mailjet-mail/metadata.yaml +++ b/airbyte-integrations/connectors/source-mailjet-mail/metadata.yaml @@ -20,8 +20,8 @@ data: releaseStage: alpha documentationUrl: https://docs.airbyte.com/integrations/sources/mailjet-mail tags: - - language:low-code - language:python + - cdk:low-code ab_internal: sl: 100 ql: 100 diff --git a/airbyte-integrations/connectors/source-mailjet-sms/metadata.yaml b/airbyte-integrations/connectors/source-mailjet-sms/metadata.yaml index 307381bb7f6c..88deb96b6612 100644 --- a/airbyte-integrations/connectors/source-mailjet-sms/metadata.yaml +++ b/airbyte-integrations/connectors/source-mailjet-sms/metadata.yaml @@ -20,8 +20,8 @@ data: releaseStage: alpha documentationUrl: https://docs.airbyte.com/integrations/sources/mailjet-sms tags: - - language:low-code - language:python + - cdk:low-code ab_internal: sl: 100 ql: 100 diff --git a/airbyte-integrations/connectors/source-marketo/metadata.yaml b/airbyte-integrations/connectors/source-marketo/metadata.yaml index 4b3115eb7f2c..534b87980999 100644 --- a/airbyte-integrations/connectors/source-marketo/metadata.yaml +++ b/airbyte-integrations/connectors/source-marketo/metadata.yaml @@ -30,4 +30,5 @@ data: supportLevel: certified tags: - language:python + - cdk:python metadataSpecVersion: "1.0" diff --git a/airbyte-integrations/connectors/source-merge/metadata.yaml b/airbyte-integrations/connectors/source-merge/metadata.yaml index 8a311f0a516d..941467550308 100644 --- a/airbyte-integrations/connectors/source-merge/metadata.yaml +++ b/airbyte-integrations/connectors/source-merge/metadata.yaml @@ -20,8 +20,8 @@ data: releaseStage: alpha documentationUrl: https://docs.airbyte.com/integrations/sources/merge tags: - - language:low-code - language:python + - cdk:low-code ab_internal: sl: 100 ql: 100 diff --git a/airbyte-integrations/connectors/source-metabase/metadata.yaml b/airbyte-integrations/connectors/source-metabase/metadata.yaml index 2ee8fe2744b6..18f35409e27e 100644 --- a/airbyte-integrations/connectors/source-metabase/metadata.yaml +++ b/airbyte-integrations/connectors/source-metabase/metadata.yaml @@ -27,6 +27,6 @@ data: releaseStage: beta supportLevel: community tags: - - language:low-code - language:python + - cdk:low-code metadataSpecVersion: "1.0" diff --git a/airbyte-integrations/connectors/source-microsoft-dataverse/metadata.yaml b/airbyte-integrations/connectors/source-microsoft-dataverse/metadata.yaml index 30fbf7e9da33..8b688a9c0be0 100644 --- a/airbyte-integrations/connectors/source-microsoft-dataverse/metadata.yaml +++ b/airbyte-integrations/connectors/source-microsoft-dataverse/metadata.yaml @@ -21,6 +21,7 @@ data: documentationUrl: https://docs.airbyte.com/integrations/sources/microsoft-dataverse tags: - language:python + - cdk:python ab_internal: sl: 100 ql: 100 diff --git a/airbyte-integrations/connectors/source-microsoft-onedrive/metadata.yaml b/airbyte-integrations/connectors/source-microsoft-onedrive/metadata.yaml index 9f72cb715140..dae4db06bebd 100644 --- a/airbyte-integrations/connectors/source-microsoft-onedrive/metadata.yaml +++ b/airbyte-integrations/connectors/source-microsoft-onedrive/metadata.yaml @@ -17,7 +17,7 @@ data: enabled: true connectorBuildOptions: baseImage: docker.io/airbyte/python-connector-base:1.2.0@sha256:c22a9d97464b69d6ef01898edf3f8612dc11614f05a84984451dde195f337db9 - connectorSubtype: api + connectorSubtype: file connectorType: source definitionId: 01d1c685-fd4a-4837-8f4c-93fe5a0d2188 dockerImageTag: 0.1.8 @@ -31,4 +31,5 @@ data: documentationUrl: https://docs.airbyte.com/integrations/sources/microsoft-onedrive tags: - language:python + - cdk:python-file-based metadataSpecVersion: "1.0" diff --git a/airbyte-integrations/connectors/source-microsoft-sharepoint/metadata.yaml b/airbyte-integrations/connectors/source-microsoft-sharepoint/metadata.yaml index fe6b0d8b8763..a96fa802e95f 100644 --- a/airbyte-integrations/connectors/source-microsoft-sharepoint/metadata.yaml +++ b/airbyte-integrations/connectors/source-microsoft-sharepoint/metadata.yaml @@ -13,7 +13,7 @@ data: enabled: true connectorBuildOptions: baseImage: docker.io/airbyte/python-connector-base:1.2.0@sha256:c22a9d97464b69d6ef01898edf3f8612dc11614f05a84984451dde195f337db9 - connectorSubtype: api + connectorSubtype: file connectorType: source definitionId: 59353119-f0f2-4e5a-a8ba-15d887bc34f6 dockerImageTag: 0.1.0 @@ -28,4 +28,5 @@ data: documentationUrl: https://docs.airbyte.com/integrations/sources/microsoft-sharepoint tags: - language:python + - cdk:python-file-based metadataSpecVersion: "1.0" diff --git a/airbyte-integrations/connectors/source-microsoft-teams/metadata.yaml b/airbyte-integrations/connectors/source-microsoft-teams/metadata.yaml index f9307d6d500c..ce68d172788c 100644 --- a/airbyte-integrations/connectors/source-microsoft-teams/metadata.yaml +++ b/airbyte-integrations/connectors/source-microsoft-teams/metadata.yaml @@ -29,6 +29,7 @@ data: documentationUrl: https://docs.airbyte.com/integrations/sources/microsoft-teams tags: - language:python + - cdk:python ab_internal: sl: 100 ql: 100 diff --git a/airbyte-integrations/connectors/source-mixpanel/metadata.yaml b/airbyte-integrations/connectors/source-mixpanel/metadata.yaml index c8d1c570c5dc..cf59460ef035 100644 --- a/airbyte-integrations/connectors/source-mixpanel/metadata.yaml +++ b/airbyte-integrations/connectors/source-mixpanel/metadata.yaml @@ -57,4 +57,5 @@ data: supportLevel: certified tags: - language:python + - cdk:python metadataSpecVersion: "1.0" diff --git a/airbyte-integrations/connectors/source-monday/metadata.yaml b/airbyte-integrations/connectors/source-monday/metadata.yaml index f93fca602b41..1586899f7dea 100644 --- a/airbyte-integrations/connectors/source-monday/metadata.yaml +++ b/airbyte-integrations/connectors/source-monday/metadata.yaml @@ -46,5 +46,6 @@ data: releaseStage: generally_available supportLevel: certified tags: - - language:low-code + - language:python + - cdk:low-code metadataSpecVersion: "1.0" diff --git a/airbyte-integrations/connectors/source-mssql/metadata.yaml b/airbyte-integrations/connectors/source-mssql/metadata.yaml index e9aaafc6a40d..cc4941df3037 100644 --- a/airbyte-integrations/connectors/source-mssql/metadata.yaml +++ b/airbyte-integrations/connectors/source-mssql/metadata.yaml @@ -29,7 +29,6 @@ data: supportLevel: community tags: - language:java - - language:python releases: breakingChanges: 3.0.0: diff --git a/airbyte-integrations/connectors/source-my-hours/metadata.yaml b/airbyte-integrations/connectors/source-my-hours/metadata.yaml index 27da449d698a..577a4c1c4360 100644 --- a/airbyte-integrations/connectors/source-my-hours/metadata.yaml +++ b/airbyte-integrations/connectors/source-my-hours/metadata.yaml @@ -21,6 +21,7 @@ data: documentationUrl: https://docs.airbyte.com/integrations/sources/my-hours tags: - language:python + - cdk:python ab_internal: sl: 100 ql: 100 diff --git a/airbyte-integrations/connectors/source-n8n/metadata.yaml b/airbyte-integrations/connectors/source-n8n/metadata.yaml index afabe0ee24aa..9ee87aff083f 100644 --- a/airbyte-integrations/connectors/source-n8n/metadata.yaml +++ b/airbyte-integrations/connectors/source-n8n/metadata.yaml @@ -20,8 +20,8 @@ data: releaseStage: alpha documentationUrl: https://docs.airbyte.com/integrations/sources/n8n tags: - - language:low-code - language:python + - cdk:low-code ab_internal: sl: 100 ql: 100 diff --git a/airbyte-integrations/connectors/source-nasa/metadata.yaml b/airbyte-integrations/connectors/source-nasa/metadata.yaml index 8bc0a54a45a0..6268d90d956b 100644 --- a/airbyte-integrations/connectors/source-nasa/metadata.yaml +++ b/airbyte-integrations/connectors/source-nasa/metadata.yaml @@ -25,5 +25,6 @@ data: supportLevel: community documentationUrl: https://docs.airbyte.com/integrations/sources/nasa tags: - - language:low-code + - language:python + - cdk:low-code metadataSpecVersion: "1.0" diff --git a/airbyte-integrations/connectors/source-netsuite/metadata.yaml b/airbyte-integrations/connectors/source-netsuite/metadata.yaml index 2ce3fb426c0c..a3a9b1c84551 100644 --- a/airbyte-integrations/connectors/source-netsuite/metadata.yaml +++ b/airbyte-integrations/connectors/source-netsuite/metadata.yaml @@ -25,4 +25,5 @@ data: supportLevel: community tags: - language:python + - cdk:python metadataSpecVersion: "1.0" diff --git a/airbyte-integrations/connectors/source-news-api/metadata.yaml b/airbyte-integrations/connectors/source-news-api/metadata.yaml index e4f2c6d7e45b..78ffd720b21d 100644 --- a/airbyte-integrations/connectors/source-news-api/metadata.yaml +++ b/airbyte-integrations/connectors/source-news-api/metadata.yaml @@ -20,8 +20,8 @@ data: releaseStage: alpha documentationUrl: https://docs.airbyte.com/integrations/sources/news-api tags: - - language:low-code - language:python + - cdk:low-code ab_internal: sl: 100 ql: 100 diff --git a/airbyte-integrations/connectors/source-newsdata/metadata.yaml b/airbyte-integrations/connectors/source-newsdata/metadata.yaml index 1ae1a6bb448c..309b7119dfba 100644 --- a/airbyte-integrations/connectors/source-newsdata/metadata.yaml +++ b/airbyte-integrations/connectors/source-newsdata/metadata.yaml @@ -19,8 +19,8 @@ data: releaseStage: alpha documentationUrl: https://docs.airbyte.com/integrations/sources/newsdata tags: - - language:low-code - language:python + - cdk:low-code ab_internal: sl: 100 ql: 100 diff --git a/airbyte-integrations/connectors/source-notion/metadata.yaml b/airbyte-integrations/connectors/source-notion/metadata.yaml index 58b329ffe793..3c6c34dd6638 100644 --- a/airbyte-integrations/connectors/source-notion/metadata.yaml +++ b/airbyte-integrations/connectors/source-notion/metadata.yaml @@ -45,4 +45,5 @@ data: supportLevel: certified tags: - language:python + - cdk:python metadataSpecVersion: "1.0" diff --git a/airbyte-integrations/connectors/source-nytimes/metadata.yaml b/airbyte-integrations/connectors/source-nytimes/metadata.yaml index abb5e9e358cc..44a70bd16cbe 100644 --- a/airbyte-integrations/connectors/source-nytimes/metadata.yaml +++ b/airbyte-integrations/connectors/source-nytimes/metadata.yaml @@ -20,8 +20,8 @@ data: releaseStage: alpha documentationUrl: https://docs.airbyte.com/integrations/sources/nytimes tags: - - language:low-code - language:python + - cdk:low-code ab_internal: sl: 100 ql: 100 diff --git a/airbyte-integrations/connectors/source-okta/metadata.yaml b/airbyte-integrations/connectors/source-okta/metadata.yaml index 2a7b9541606e..2818c18a77c5 100644 --- a/airbyte-integrations/connectors/source-okta/metadata.yaml +++ b/airbyte-integrations/connectors/source-okta/metadata.yaml @@ -25,4 +25,5 @@ data: supportLevel: community tags: - language:python + - cdk:python metadataSpecVersion: "1.0" diff --git a/airbyte-integrations/connectors/source-omnisend/metadata.yaml b/airbyte-integrations/connectors/source-omnisend/metadata.yaml index 3b27557209fa..79793f70de45 100644 --- a/airbyte-integrations/connectors/source-omnisend/metadata.yaml +++ b/airbyte-integrations/connectors/source-omnisend/metadata.yaml @@ -20,8 +20,8 @@ data: releaseStage: alpha documentationUrl: https://docs.airbyte.com/integrations/sources/omnisend tags: - - language:low-code - language:python + - cdk:low-code ab_internal: sl: 100 ql: 100 diff --git a/airbyte-integrations/connectors/source-onesignal/metadata.yaml b/airbyte-integrations/connectors/source-onesignal/metadata.yaml index 1c57a5eebb55..da23f2d26b1e 100644 --- a/airbyte-integrations/connectors/source-onesignal/metadata.yaml +++ b/airbyte-integrations/connectors/source-onesignal/metadata.yaml @@ -1,7 +1,7 @@ data: allowedHosts: hosts: - - "onesignal.com" + - onesignal.com remoteRegistries: pypi: enabled: true @@ -24,6 +24,7 @@ data: releaseDate: 2023-08-31 releaseStage: alpha tags: - - language:low-code + - language:python + - cdk:low-code supportLevel: community metadataSpecVersion: "1.0" diff --git a/airbyte-integrations/connectors/source-open-exchange-rates/metadata.yaml b/airbyte-integrations/connectors/source-open-exchange-rates/metadata.yaml index 4cce2373e1b4..89379cff51dc 100644 --- a/airbyte-integrations/connectors/source-open-exchange-rates/metadata.yaml +++ b/airbyte-integrations/connectors/source-open-exchange-rates/metadata.yaml @@ -25,5 +25,6 @@ data: supportLevel: community documentationUrl: https://docs.airbyte.com/integrations/sources/open-exchange-rates tags: - - language:low-code + - language:python + - cdk:low-code metadataSpecVersion: "1.0" diff --git a/airbyte-integrations/connectors/source-openweather/metadata.yaml b/airbyte-integrations/connectors/source-openweather/metadata.yaml index e92518c4f92b..ff3d57350d29 100644 --- a/airbyte-integrations/connectors/source-openweather/metadata.yaml +++ b/airbyte-integrations/connectors/source-openweather/metadata.yaml @@ -24,5 +24,6 @@ data: supportLevel: community documentationUrl: https://docs.airbyte.com/integrations/sources/openweather tags: - - language:low-code + - language:python + - cdk:low-code metadataSpecVersion: "1.0" diff --git a/airbyte-integrations/connectors/source-opsgenie/metadata.yaml b/airbyte-integrations/connectors/source-opsgenie/metadata.yaml index 1c7f9af426a1..e429de1725ea 100644 --- a/airbyte-integrations/connectors/source-opsgenie/metadata.yaml +++ b/airbyte-integrations/connectors/source-opsgenie/metadata.yaml @@ -19,7 +19,8 @@ data: releaseStage: alpha documentationUrl: https://docs.airbyte.com/integrations/sources/opsgenie tags: - - language:low-code + - language:python + - cdk:low-code ab_internal: sl: 100 ql: 100 diff --git a/airbyte-integrations/connectors/source-oracle/metadata.yaml b/airbyte-integrations/connectors/source-oracle/metadata.yaml index 56a6d42d4ff3..6bc49d822dce 100644 --- a/airbyte-integrations/connectors/source-oracle/metadata.yaml +++ b/airbyte-integrations/connectors/source-oracle/metadata.yaml @@ -27,5 +27,4 @@ data: supportLevel: community tags: - language:java - - language:python metadataSpecVersion: "1.0" diff --git a/airbyte-integrations/connectors/source-orb/metadata.yaml b/airbyte-integrations/connectors/source-orb/metadata.yaml index 1c5c21ce8624..80071c06f772 100644 --- a/airbyte-integrations/connectors/source-orb/metadata.yaml +++ b/airbyte-integrations/connectors/source-orb/metadata.yaml @@ -21,6 +21,7 @@ data: documentationUrl: https://docs.airbyte.com/integrations/sources/orb tags: - language:python + - cdk:python ab_internal: sl: 100 ql: 100 diff --git a/airbyte-integrations/connectors/source-orbit/metadata.yaml b/airbyte-integrations/connectors/source-orbit/metadata.yaml index c4ce50c056f2..90f77c2e17c1 100644 --- a/airbyte-integrations/connectors/source-orbit/metadata.yaml +++ b/airbyte-integrations/connectors/source-orbit/metadata.yaml @@ -25,7 +25,8 @@ data: supportLevel: community documentationUrl: https://docs.airbyte.com/integrations/sources/orbit tags: - - language:low-code + - language:python + - cdk:low-code ab_internal: sl: 100 ql: 100 diff --git a/airbyte-integrations/connectors/source-oura/metadata.yaml b/airbyte-integrations/connectors/source-oura/metadata.yaml index a8279f08a933..70955e279a08 100644 --- a/airbyte-integrations/connectors/source-oura/metadata.yaml +++ b/airbyte-integrations/connectors/source-oura/metadata.yaml @@ -20,8 +20,8 @@ data: releaseStage: alpha documentationUrl: https://docs.airbyte.com/integrations/sources/oura tags: - - language:low-code - language:python + - cdk:low-code ab_internal: sl: 100 ql: 100 diff --git a/airbyte-integrations/connectors/source-outbrain-amplify/metadata.yaml b/airbyte-integrations/connectors/source-outbrain-amplify/metadata.yaml index 06fd36aa0e7d..9cbeedacbf6f 100644 --- a/airbyte-integrations/connectors/source-outbrain-amplify/metadata.yaml +++ b/airbyte-integrations/connectors/source-outbrain-amplify/metadata.yaml @@ -20,7 +20,8 @@ data: releaseStage: alpha documentationUrl: https://docs.airbyte.com/integrations/sources/outbrain-amplify tags: - - language:low-code + - language:python + - cdk:python ab_internal: sl: 100 ql: 100 diff --git a/airbyte-integrations/connectors/source-outreach/metadata.yaml b/airbyte-integrations/connectors/source-outreach/metadata.yaml index 3e5d2a105c5e..6387ce85b176 100644 --- a/airbyte-integrations/connectors/source-outreach/metadata.yaml +++ b/airbyte-integrations/connectors/source-outreach/metadata.yaml @@ -25,4 +25,5 @@ data: supportLevel: community tags: - language:python + - cdk:python metadataSpecVersion: "1.0" diff --git a/airbyte-integrations/connectors/source-pagerduty/metadata.yaml b/airbyte-integrations/connectors/source-pagerduty/metadata.yaml index 1d1bce47b87e..4136cdf128fd 100644 --- a/airbyte-integrations/connectors/source-pagerduty/metadata.yaml +++ b/airbyte-integrations/connectors/source-pagerduty/metadata.yaml @@ -25,7 +25,8 @@ data: supportLevel: community documentationUrl: https://docs.airbyte.com/integrations/sources/pagerduty tags: - - language:low-code + - language:python + - cdk:low-code ab_internal: sl: 100 ql: 100 diff --git a/airbyte-integrations/connectors/source-pardot/metadata.yaml b/airbyte-integrations/connectors/source-pardot/metadata.yaml index 6bc3843e5f5c..593082b8a4b2 100644 --- a/airbyte-integrations/connectors/source-pardot/metadata.yaml +++ b/airbyte-integrations/connectors/source-pardot/metadata.yaml @@ -21,6 +21,7 @@ data: documentationUrl: https://docs.airbyte.com/integrations/sources/pardot tags: - language:python + - cdk:python ab_internal: sl: 100 ql: 100 diff --git a/airbyte-integrations/connectors/source-partnerstack/metadata.yaml b/airbyte-integrations/connectors/source-partnerstack/metadata.yaml index cf6483ba4899..c7054818429d 100644 --- a/airbyte-integrations/connectors/source-partnerstack/metadata.yaml +++ b/airbyte-integrations/connectors/source-partnerstack/metadata.yaml @@ -20,8 +20,8 @@ data: releaseStage: alpha documentationUrl: https://docs.airbyte.com/integrations/sources/partnerstack tags: - - language:low-code - language:python + - cdk:low-code ab_internal: sl: 100 ql: 100 diff --git a/airbyte-integrations/connectors/source-paypal-transaction/metadata.yaml b/airbyte-integrations/connectors/source-paypal-transaction/metadata.yaml index a1974a83fbe7..da379fa29475 100644 --- a/airbyte-integrations/connectors/source-paypal-transaction/metadata.yaml +++ b/airbyte-integrations/connectors/source-paypal-transaction/metadata.yaml @@ -41,5 +41,6 @@ data: - list_payments supportLevel: certified tags: - - language:low-code + - language:python + - cdk:low-code metadataSpecVersion: "1.0" diff --git a/airbyte-integrations/connectors/source-paystack/metadata.yaml b/airbyte-integrations/connectors/source-paystack/metadata.yaml index 203dd02b642c..0ff0d015e031 100644 --- a/airbyte-integrations/connectors/source-paystack/metadata.yaml +++ b/airbyte-integrations/connectors/source-paystack/metadata.yaml @@ -24,6 +24,7 @@ data: documentationUrl: https://docs.airbyte.com/integrations/sources/paystack tags: - language:python + - cdk:python ab_internal: sl: 100 ql: 300 diff --git a/airbyte-integrations/connectors/source-pendo/metadata.yaml b/airbyte-integrations/connectors/source-pendo/metadata.yaml index be8343b55fa1..b0ed1db6ccf1 100644 --- a/airbyte-integrations/connectors/source-pendo/metadata.yaml +++ b/airbyte-integrations/connectors/source-pendo/metadata.yaml @@ -20,8 +20,8 @@ data: releaseStage: alpha documentationUrl: https://docs.airbyte.com/integrations/sources/pendo tags: - - language:low-code - language:python + - cdk:low-code ab_internal: sl: 100 ql: 100 diff --git a/airbyte-integrations/connectors/source-persistiq/metadata.yaml b/airbyte-integrations/connectors/source-persistiq/metadata.yaml index 90a020fc773b..2b03a496f47b 100644 --- a/airbyte-integrations/connectors/source-persistiq/metadata.yaml +++ b/airbyte-integrations/connectors/source-persistiq/metadata.yaml @@ -25,5 +25,6 @@ data: supportLevel: community documentationUrl: https://docs.airbyte.com/integrations/sources/persistiq tags: - - language:low-code + - language:python + - cdk:low-code metadataSpecVersion: "1.0" diff --git a/airbyte-integrations/connectors/source-pexels-api/metadata.yaml b/airbyte-integrations/connectors/source-pexels-api/metadata.yaml index 9e5e8cab8fa3..f59992c81ce4 100644 --- a/airbyte-integrations/connectors/source-pexels-api/metadata.yaml +++ b/airbyte-integrations/connectors/source-pexels-api/metadata.yaml @@ -20,8 +20,8 @@ data: releaseStage: alpha documentationUrl: https://docs.airbyte.com/integrations/sources/pexels-api tags: - - language:low-code - language:python + - cdk:low-code ab_internal: sl: 100 ql: 100 diff --git a/airbyte-integrations/connectors/source-pinterest/metadata.yaml b/airbyte-integrations/connectors/source-pinterest/metadata.yaml index e541750bdf51..0ba688fc8f1b 100644 --- a/airbyte-integrations/connectors/source-pinterest/metadata.yaml +++ b/airbyte-integrations/connectors/source-pinterest/metadata.yaml @@ -42,6 +42,7 @@ data: documentationUrl: https://docs.airbyte.com/integrations/sources/pinterest tags: - language:python + - cdk:python ab_internal: sl: 200 ql: 400 diff --git a/airbyte-integrations/connectors/source-pipedrive/metadata.yaml b/airbyte-integrations/connectors/source-pipedrive/metadata.yaml index b52b392e7a85..45dd7f2eec89 100644 --- a/airbyte-integrations/connectors/source-pipedrive/metadata.yaml +++ b/airbyte-integrations/connectors/source-pipedrive/metadata.yaml @@ -33,5 +33,6 @@ data: releaseStage: alpha supportLevel: community tags: - - language:low-code + - cdk:low-code + - language:python metadataSpecVersion: "1.0" diff --git a/airbyte-integrations/connectors/source-pivotal-tracker/metadata.yaml b/airbyte-integrations/connectors/source-pivotal-tracker/metadata.yaml index a41a1713b016..96f6fe69aeab 100644 --- a/airbyte-integrations/connectors/source-pivotal-tracker/metadata.yaml +++ b/airbyte-integrations/connectors/source-pivotal-tracker/metadata.yaml @@ -21,6 +21,7 @@ data: documentationUrl: https://docs.airbyte.com/integrations/sources/pivotal-tracker tags: - language:python + - cdk:python ab_internal: sl: 100 ql: 100 diff --git a/airbyte-integrations/connectors/source-plaid/metadata.yaml b/airbyte-integrations/connectors/source-plaid/metadata.yaml index f15195710ac3..2adeb5b50f9d 100644 --- a/airbyte-integrations/connectors/source-plaid/metadata.yaml +++ b/airbyte-integrations/connectors/source-plaid/metadata.yaml @@ -20,7 +20,8 @@ data: releaseStage: alpha documentationUrl: https://docs.airbyte.com/integrations/sources/plaid tags: - - language:low-code + - language:python + - cdk:low-code ab_internal: sl: 100 ql: 100 diff --git a/airbyte-integrations/connectors/source-plausible/metadata.yaml b/airbyte-integrations/connectors/source-plausible/metadata.yaml index 086d1c33a3c0..226bddfab7cb 100644 --- a/airbyte-integrations/connectors/source-plausible/metadata.yaml +++ b/airbyte-integrations/connectors/source-plausible/metadata.yaml @@ -20,8 +20,8 @@ data: releaseStage: alpha documentationUrl: https://docs.airbyte.com/integrations/sources/plausible tags: - - language:low-code - language:python + - cdk:low-code ab_internal: sl: 100 ql: 100 diff --git a/airbyte-integrations/connectors/source-pocket/metadata.yaml b/airbyte-integrations/connectors/source-pocket/metadata.yaml index 219e92e32df2..a4aeb73321b9 100644 --- a/airbyte-integrations/connectors/source-pocket/metadata.yaml +++ b/airbyte-integrations/connectors/source-pocket/metadata.yaml @@ -20,8 +20,8 @@ data: releaseStage: alpha documentationUrl: https://docs.airbyte.com/integrations/sources/pocket tags: - - language:low-code - language:python + - cdk:low-code ab_internal: sl: 100 ql: 100 diff --git a/airbyte-integrations/connectors/source-pokeapi/metadata.yaml b/airbyte-integrations/connectors/source-pokeapi/metadata.yaml index 2f6a410ea49b..39ba5bc80833 100644 --- a/airbyte-integrations/connectors/source-pokeapi/metadata.yaml +++ b/airbyte-integrations/connectors/source-pokeapi/metadata.yaml @@ -25,5 +25,6 @@ data: supportLevel: community documentationUrl: https://docs.airbyte.com/integrations/sources/pokeapi tags: - - language:low-code + - language:python + - cdk:low-code metadataSpecVersion: "1.0" diff --git a/airbyte-integrations/connectors/source-polygon-stock-api/metadata.yaml b/airbyte-integrations/connectors/source-polygon-stock-api/metadata.yaml index 897591536c31..4ca05fdef49e 100644 --- a/airbyte-integrations/connectors/source-polygon-stock-api/metadata.yaml +++ b/airbyte-integrations/connectors/source-polygon-stock-api/metadata.yaml @@ -23,8 +23,8 @@ data: releaseStage: alpha documentationUrl: https://docs.airbyte.com/integrations/sources/polygon-stock-api tags: - - language:low-code - language:python + - cdk:low-code ab_internal: sl: 100 ql: 100 diff --git a/airbyte-integrations/connectors/source-posthog/metadata.yaml b/airbyte-integrations/connectors/source-posthog/metadata.yaml index cfd5c0c74779..8fe507a91922 100644 --- a/airbyte-integrations/connectors/source-posthog/metadata.yaml +++ b/airbyte-integrations/connectors/source-posthog/metadata.yaml @@ -35,6 +35,6 @@ data: To apply this change, refresh the schema for the `events` stream and reset your data. For more information [visit](https://docs.airbyte.com/integrations/sources/posthog-migrations) upgradeDeadline: "2024-01-15" tags: - - language:low-code + - cdk:low-code - language:python metadataSpecVersion: "1.0" diff --git a/airbyte-integrations/connectors/source-postmarkapp/metadata.yaml b/airbyte-integrations/connectors/source-postmarkapp/metadata.yaml index 58849623dd1c..41ed301fc890 100644 --- a/airbyte-integrations/connectors/source-postmarkapp/metadata.yaml +++ b/airbyte-integrations/connectors/source-postmarkapp/metadata.yaml @@ -20,8 +20,8 @@ data: releaseStage: alpha documentationUrl: https://docs.airbyte.com/integrations/sources/postmarkapp tags: - - language:low-code - language:python + - cdk:low-code ab_internal: sl: 100 ql: 100 diff --git a/airbyte-integrations/connectors/source-prestashop/metadata.yaml b/airbyte-integrations/connectors/source-prestashop/metadata.yaml index dbbf6240c187..b416de7dc8b1 100644 --- a/airbyte-integrations/connectors/source-prestashop/metadata.yaml +++ b/airbyte-integrations/connectors/source-prestashop/metadata.yaml @@ -27,6 +27,6 @@ data: releaseStage: beta supportLevel: community tags: - - language:low-code - language:python + - cdk:low-code metadataSpecVersion: "1.0" diff --git a/airbyte-integrations/connectors/source-primetric/metadata.yaml b/airbyte-integrations/connectors/source-primetric/metadata.yaml index 83bd3b3ecda9..41e8b3f8f801 100644 --- a/airbyte-integrations/connectors/source-primetric/metadata.yaml +++ b/airbyte-integrations/connectors/source-primetric/metadata.yaml @@ -21,6 +21,7 @@ data: documentationUrl: https://docs.airbyte.com/integrations/sources/primetric tags: - language:python + - cdk:python ab_internal: sl: 100 ql: 100 diff --git a/airbyte-integrations/connectors/source-public-apis/metadata.yaml b/airbyte-integrations/connectors/source-public-apis/metadata.yaml index fe8481eaf1f1..b187f9919e0a 100644 --- a/airbyte-integrations/connectors/source-public-apis/metadata.yaml +++ b/airbyte-integrations/connectors/source-public-apis/metadata.yaml @@ -27,5 +27,6 @@ data: releaseStage: alpha supportLevel: community tags: - - language:low-code + - language:python + - cdk:low-code metadataSpecVersion: "1.0" diff --git a/airbyte-integrations/connectors/source-punk-api/metadata.yaml b/airbyte-integrations/connectors/source-punk-api/metadata.yaml index 80d6d03f5a1a..14ba3d09b64a 100644 --- a/airbyte-integrations/connectors/source-punk-api/metadata.yaml +++ b/airbyte-integrations/connectors/source-punk-api/metadata.yaml @@ -20,8 +20,8 @@ data: releaseStage: alpha documentationUrl: https://docs.airbyte.com/integrations/sources/punk-api tags: - - language:low-code - language:python + - cdk:low-code ab_internal: sl: 100 ql: 100 diff --git a/airbyte-integrations/connectors/source-pypi/metadata.yaml b/airbyte-integrations/connectors/source-pypi/metadata.yaml index 978205dca0d5..fe73c5aba323 100644 --- a/airbyte-integrations/connectors/source-pypi/metadata.yaml +++ b/airbyte-integrations/connectors/source-pypi/metadata.yaml @@ -20,8 +20,8 @@ data: releaseStage: alpha documentationUrl: https://docs.airbyte.com/integrations/sources/pypi tags: - - language:low-code - language:python + - cdk:low-code ab_internal: sl: 100 ql: 100 diff --git a/airbyte-integrations/connectors/source-qonto/metadata.yaml b/airbyte-integrations/connectors/source-qonto/metadata.yaml index 05c0393aa6ec..2375865ab633 100644 --- a/airbyte-integrations/connectors/source-qonto/metadata.yaml +++ b/airbyte-integrations/connectors/source-qonto/metadata.yaml @@ -13,5 +13,6 @@ data: supportLevel: community documentationUrl: https://docs.airbyte.com/integrations/sources/qonto tags: - - language:low-code + - language:python + - cdk:low-code metadataSpecVersion: "1.0" diff --git a/airbyte-integrations/connectors/source-qualaroo/metadata.yaml b/airbyte-integrations/connectors/source-qualaroo/metadata.yaml index f33f9ef7bdec..dbe3263f59af 100644 --- a/airbyte-integrations/connectors/source-qualaroo/metadata.yaml +++ b/airbyte-integrations/connectors/source-qualaroo/metadata.yaml @@ -25,5 +25,6 @@ data: supportLevel: community documentationUrl: https://docs.airbyte.com/integrations/sources/qualaroo tags: - - language:low-code + - language:python + - cdk:low-code metadataSpecVersion: "1.0" diff --git a/airbyte-integrations/connectors/source-quickbooks/metadata.yaml b/airbyte-integrations/connectors/source-quickbooks/metadata.yaml index ec82e2ce8d74..fa9cdf172a2c 100644 --- a/airbyte-integrations/connectors/source-quickbooks/metadata.yaml +++ b/airbyte-integrations/connectors/source-quickbooks/metadata.yaml @@ -32,7 +32,7 @@ data: upgradeDeadline: 2023-10-04 documentationUrl: https://docs.airbyte.com/integrations/sources/quickbooks tags: - - language:low-code + - cdk:low-code - language:python ab_internal: sl: 100 diff --git a/airbyte-integrations/connectors/source-railz/metadata.yaml b/airbyte-integrations/connectors/source-railz/metadata.yaml index eb0870a1779a..f3a21f3c79df 100644 --- a/airbyte-integrations/connectors/source-railz/metadata.yaml +++ b/airbyte-integrations/connectors/source-railz/metadata.yaml @@ -20,8 +20,8 @@ data: releaseStage: alpha documentationUrl: https://docs.airbyte.com/integrations/sources/railz tags: - - language:low-code - language:python + - cdk:low-code ab_internal: sl: 100 ql: 100 diff --git a/airbyte-integrations/connectors/source-rd-station-marketing/metadata.yaml b/airbyte-integrations/connectors/source-rd-station-marketing/metadata.yaml index dbfc299e01bb..b42cfbc9af39 100644 --- a/airbyte-integrations/connectors/source-rd-station-marketing/metadata.yaml +++ b/airbyte-integrations/connectors/source-rd-station-marketing/metadata.yaml @@ -21,6 +21,7 @@ data: documentationUrl: https://docs.airbyte.com/integrations/sources/rd-station-marketing tags: - language:python + - cdk:python ab_internal: sl: 100 ql: 100 diff --git a/airbyte-integrations/connectors/source-recharge/metadata.yaml b/airbyte-integrations/connectors/source-recharge/metadata.yaml index 31dbd5c73fc4..eddf63305e54 100644 --- a/airbyte-integrations/connectors/source-recharge/metadata.yaml +++ b/airbyte-integrations/connectors/source-recharge/metadata.yaml @@ -26,6 +26,7 @@ data: documentationUrl: https://docs.airbyte.com/integrations/sources/recharge tags: - language:python + - cdk:python ab_internal: sl: 200 ql: 400 diff --git a/airbyte-integrations/connectors/source-recreation/metadata.yaml b/airbyte-integrations/connectors/source-recreation/metadata.yaml index 821554d94e1d..6df88413eed1 100644 --- a/airbyte-integrations/connectors/source-recreation/metadata.yaml +++ b/airbyte-integrations/connectors/source-recreation/metadata.yaml @@ -20,8 +20,8 @@ data: releaseStage: alpha documentationUrl: https://docs.airbyte.com/integrations/sources/recreation tags: - - language:low-code - language:python + - cdk:low-code ab_internal: sl: 100 ql: 100 diff --git a/airbyte-integrations/connectors/source-recruitee/metadata.yaml b/airbyte-integrations/connectors/source-recruitee/metadata.yaml index 71061b162cbd..a573357b3b25 100644 --- a/airbyte-integrations/connectors/source-recruitee/metadata.yaml +++ b/airbyte-integrations/connectors/source-recruitee/metadata.yaml @@ -20,8 +20,8 @@ data: releaseStage: alpha documentationUrl: https://docs.airbyte.com/integrations/sources/recruitee tags: - - language:low-code - language:python + - cdk:low-code ab_internal: sl: 100 ql: 100 diff --git a/airbyte-integrations/connectors/source-recurly/metadata.yaml b/airbyte-integrations/connectors/source-recurly/metadata.yaml index ff6a08c27d33..7bcf01d74add 100644 --- a/airbyte-integrations/connectors/source-recurly/metadata.yaml +++ b/airbyte-integrations/connectors/source-recurly/metadata.yaml @@ -34,4 +34,5 @@ data: supportLevel: community tags: - language:python + - cdk:python metadataSpecVersion: "1.0" diff --git a/airbyte-integrations/connectors/source-redshift/metadata.yaml b/airbyte-integrations/connectors/source-redshift/metadata.yaml index b8908d9443b5..4653870a0ca0 100644 --- a/airbyte-integrations/connectors/source-redshift/metadata.yaml +++ b/airbyte-integrations/connectors/source-redshift/metadata.yaml @@ -21,5 +21,4 @@ data: supportLevel: community tags: - language:java - - language:python metadataSpecVersion: "1.0" diff --git a/airbyte-integrations/connectors/source-reply-io/metadata.yaml b/airbyte-integrations/connectors/source-reply-io/metadata.yaml index d57e6cb62bdf..31a3230625bb 100644 --- a/airbyte-integrations/connectors/source-reply-io/metadata.yaml +++ b/airbyte-integrations/connectors/source-reply-io/metadata.yaml @@ -20,8 +20,8 @@ data: releaseStage: alpha documentationUrl: https://docs.airbyte.com/integrations/sources/reply-io tags: - - language:low-code - language:python + - cdk:low-code ab_internal: sl: 100 ql: 100 diff --git a/airbyte-integrations/connectors/source-retently/metadata.yaml b/airbyte-integrations/connectors/source-retently/metadata.yaml index 77e0ea8d3fd7..f18815e379af 100644 --- a/airbyte-integrations/connectors/source-retently/metadata.yaml +++ b/airbyte-integrations/connectors/source-retently/metadata.yaml @@ -24,7 +24,8 @@ data: supportLevel: community documentationUrl: https://docs.airbyte.com/integrations/sources/retently tags: - - language:low-code + - language:python + - cdk:low-code ab_internal: sl: 100 ql: 100 diff --git a/airbyte-integrations/connectors/source-ringcentral/metadata.yaml b/airbyte-integrations/connectors/source-ringcentral/metadata.yaml index 62c18c12ec9d..2e46b68c5344 100644 --- a/airbyte-integrations/connectors/source-ringcentral/metadata.yaml +++ b/airbyte-integrations/connectors/source-ringcentral/metadata.yaml @@ -20,8 +20,8 @@ data: releaseStage: alpha documentationUrl: https://docs.airbyte.com/integrations/sources/ringcentral tags: - - language:low-code - language:python + - cdk:low-code ab_internal: sl: 100 ql: 100 diff --git a/airbyte-integrations/connectors/source-rki-covid/metadata.yaml b/airbyte-integrations/connectors/source-rki-covid/metadata.yaml index d11317580f3d..9a648f35d46c 100644 --- a/airbyte-integrations/connectors/source-rki-covid/metadata.yaml +++ b/airbyte-integrations/connectors/source-rki-covid/metadata.yaml @@ -21,6 +21,7 @@ data: documentationUrl: https://docs.airbyte.com/integrations/sources/rki-covid tags: - language:python + - cdk:python ab_internal: sl: 100 ql: 100 diff --git a/airbyte-integrations/connectors/source-rocket-chat/metadata.yaml b/airbyte-integrations/connectors/source-rocket-chat/metadata.yaml index 35229f131695..e086ebddb61a 100644 --- a/airbyte-integrations/connectors/source-rocket-chat/metadata.yaml +++ b/airbyte-integrations/connectors/source-rocket-chat/metadata.yaml @@ -20,8 +20,8 @@ data: releaseStage: alpha documentationUrl: https://docs.airbyte.com/integrations/sources/rocket-chat tags: - - language:low-code - language:python + - cdk:low-code ab_internal: sl: 100 ql: 100 diff --git a/airbyte-integrations/connectors/source-rss/metadata.yaml b/airbyte-integrations/connectors/source-rss/metadata.yaml index 8e7d7b8fdc91..35ff213a67c0 100644 --- a/airbyte-integrations/connectors/source-rss/metadata.yaml +++ b/airbyte-integrations/connectors/source-rss/metadata.yaml @@ -21,6 +21,7 @@ data: documentationUrl: https://docs.airbyte.com/integrations/sources/rss tags: - language:python + - cdk:python ab_internal: sl: 100 ql: 100 diff --git a/airbyte-integrations/connectors/source-s3/metadata.yaml b/airbyte-integrations/connectors/source-s3/metadata.yaml index d7602ad78871..a9378beb99b4 100644 --- a/airbyte-integrations/connectors/source-s3/metadata.yaml +++ b/airbyte-integrations/connectors/source-s3/metadata.yaml @@ -42,4 +42,5 @@ data: supportLevel: certified tags: - language:python + - cdk:python-file-based metadataSpecVersion: "1.0" diff --git a/airbyte-integrations/connectors/source-salesforce/metadata.yaml b/airbyte-integrations/connectors/source-salesforce/metadata.yaml index bc92039fd1ee..ecb9c09e7278 100644 --- a/airbyte-integrations/connectors/source-salesforce/metadata.yaml +++ b/airbyte-integrations/connectors/source-salesforce/metadata.yaml @@ -30,4 +30,5 @@ data: supportLevel: certified tags: - language:python + - cdk:python metadataSpecVersion: "1.0" diff --git a/airbyte-integrations/connectors/source-salesloft/metadata.yaml b/airbyte-integrations/connectors/source-salesloft/metadata.yaml index a5c2cc049e9e..ed329f717fe0 100644 --- a/airbyte-integrations/connectors/source-salesloft/metadata.yaml +++ b/airbyte-integrations/connectors/source-salesloft/metadata.yaml @@ -28,4 +28,5 @@ data: supportLevel: community tags: - language:python + - cdk:python metadataSpecVersion: "1.0" diff --git a/airbyte-integrations/connectors/source-sap-fieldglass/metadata.yaml b/airbyte-integrations/connectors/source-sap-fieldglass/metadata.yaml index 6362a99fd348..6ec6649d6e39 100644 --- a/airbyte-integrations/connectors/source-sap-fieldglass/metadata.yaml +++ b/airbyte-integrations/connectors/source-sap-fieldglass/metadata.yaml @@ -20,8 +20,8 @@ data: releaseStage: alpha documentationUrl: https://docs.airbyte.com/integrations/sources/sap-fieldglass tags: - - language:low-code - language:python + - cdk:low-code ab_internal: sl: 100 ql: 100 diff --git a/airbyte-integrations/connectors/source-scaffold-source-http/metadata.yaml b/airbyte-integrations/connectors/source-scaffold-source-http/metadata.yaml index 2370fb40dbaf..75f8d8ee1f54 100644 --- a/airbyte-integrations/connectors/source-scaffold-source-http/metadata.yaml +++ b/airbyte-integrations/connectors/source-scaffold-source-http/metadata.yaml @@ -31,4 +31,5 @@ data: documentationUrl: https://docs.airbyte.com/integrations/sources/scaffold-source-http tags: - language:python + - cdk:python metadataSpecVersion: "1.0" diff --git a/airbyte-integrations/connectors/source-scaffold-source-python/metadata.yaml b/airbyte-integrations/connectors/source-scaffold-source-python/metadata.yaml index fff878b0cfda..a409f4bb4e20 100644 --- a/airbyte-integrations/connectors/source-scaffold-source-python/metadata.yaml +++ b/airbyte-integrations/connectors/source-scaffold-source-python/metadata.yaml @@ -31,4 +31,5 @@ data: documentationUrl: https://docs.airbyte.com/integrations/sources/scaffold-source-python tags: - language:python + - cdk:python metadataSpecVersion: "1.0" diff --git a/airbyte-integrations/connectors/source-search-metrics/metadata.yaml b/airbyte-integrations/connectors/source-search-metrics/metadata.yaml index 7258ec160d1b..550de0904204 100644 --- a/airbyte-integrations/connectors/source-search-metrics/metadata.yaml +++ b/airbyte-integrations/connectors/source-search-metrics/metadata.yaml @@ -21,6 +21,7 @@ data: documentationUrl: https://docs.airbyte.com/integrations/sources/search-metrics tags: - language:python + - cdk:python ab_internal: sl: 100 ql: 100 diff --git a/airbyte-integrations/connectors/source-secoda/metadata.yaml b/airbyte-integrations/connectors/source-secoda/metadata.yaml index cc7a8383aefe..34cf84430491 100644 --- a/airbyte-integrations/connectors/source-secoda/metadata.yaml +++ b/airbyte-integrations/connectors/source-secoda/metadata.yaml @@ -20,8 +20,8 @@ data: releaseStage: alpha documentationUrl: https://docs.airbyte.com/integrations/sources/secoda tags: - - language:low-code - language:python + - cdk:low-code ab_internal: sl: 100 ql: 100 diff --git a/airbyte-integrations/connectors/source-sendgrid/metadata.yaml b/airbyte-integrations/connectors/source-sendgrid/metadata.yaml index 9955875b363d..1902d258ce44 100644 --- a/airbyte-integrations/connectors/source-sendgrid/metadata.yaml +++ b/airbyte-integrations/connectors/source-sendgrid/metadata.yaml @@ -29,6 +29,6 @@ data: releaseStage: generally_available supportLevel: certified tags: - - language:low-code - language:python + - cdk:low-code metadataSpecVersion: "1.0" diff --git a/airbyte-integrations/connectors/source-sendinblue/metadata.yaml b/airbyte-integrations/connectors/source-sendinblue/metadata.yaml index 0318419f4d85..2d7cf712deb6 100644 --- a/airbyte-integrations/connectors/source-sendinblue/metadata.yaml +++ b/airbyte-integrations/connectors/source-sendinblue/metadata.yaml @@ -24,6 +24,6 @@ data: releaseStage: alpha supportLevel: community tags: - - language:low-code - language:python + - cdk:low-code metadataSpecVersion: "1.0" diff --git a/airbyte-integrations/connectors/source-senseforce/metadata.yaml b/airbyte-integrations/connectors/source-senseforce/metadata.yaml index c5d5d8dd7695..ddaf6136e11d 100644 --- a/airbyte-integrations/connectors/source-senseforce/metadata.yaml +++ b/airbyte-integrations/connectors/source-senseforce/metadata.yaml @@ -24,8 +24,8 @@ data: releaseStage: alpha documentationUrl: https://docs.airbyte.com/integrations/sources/senseforce tags: - - language:low-code - language:python + - cdk:low-code ab_internal: sl: 100 ql: 100 diff --git a/airbyte-integrations/connectors/source-sentry/metadata.yaml b/airbyte-integrations/connectors/source-sentry/metadata.yaml index c0fc7fe02f15..4f69e04d253a 100644 --- a/airbyte-integrations/connectors/source-sentry/metadata.yaml +++ b/airbyte-integrations/connectors/source-sentry/metadata.yaml @@ -31,4 +31,5 @@ data: supportLevel: certified tags: - language:python + - cdk:low-code metadataSpecVersion: "1.0" diff --git a/airbyte-integrations/connectors/source-serpstat/metadata.yaml b/airbyte-integrations/connectors/source-serpstat/metadata.yaml index 849ad93a11da..cbd5a42ab207 100644 --- a/airbyte-integrations/connectors/source-serpstat/metadata.yaml +++ b/airbyte-integrations/connectors/source-serpstat/metadata.yaml @@ -22,5 +22,6 @@ data: releaseStage: alpha documentationUrl: https://docs.airbyte.com/integrations/sources/serpstat tags: - - language:low-code + - language:python + - cdk:low-code metadataSpecVersion: "1.0" diff --git a/airbyte-integrations/connectors/source-sftp-bulk/metadata.yaml b/airbyte-integrations/connectors/source-sftp-bulk/metadata.yaml index 6bddeaa476e5..3339cd083f9b 100644 --- a/airbyte-integrations/connectors/source-sftp-bulk/metadata.yaml +++ b/airbyte-integrations/connectors/source-sftp-bulk/metadata.yaml @@ -25,4 +25,5 @@ data: supportLevel: community tags: - language:python + - cdk:python metadataSpecVersion: "1.0" diff --git a/airbyte-integrations/connectors/source-sftp/metadata.yaml b/airbyte-integrations/connectors/source-sftp/metadata.yaml index d3688c9bb8d2..9d6e9e792795 100644 --- a/airbyte-integrations/connectors/source-sftp/metadata.yaml +++ b/airbyte-integrations/connectors/source-sftp/metadata.yaml @@ -21,5 +21,4 @@ data: supportLevel: community tags: - language:java - - language:python metadataSpecVersion: "1.0" diff --git a/airbyte-integrations/connectors/source-shopify/metadata.yaml b/airbyte-integrations/connectors/source-shopify/metadata.yaml index a1881ee5550b..195c39907928 100644 --- a/airbyte-integrations/connectors/source-shopify/metadata.yaml +++ b/airbyte-integrations/connectors/source-shopify/metadata.yaml @@ -97,4 +97,5 @@ data: supportLevel: certified tags: - language:python + - cdk:python metadataSpecVersion: "1.0" diff --git a/airbyte-integrations/connectors/source-shortio/metadata.yaml b/airbyte-integrations/connectors/source-shortio/metadata.yaml index b14f05840fa2..f01591f2accf 100644 --- a/airbyte-integrations/connectors/source-shortio/metadata.yaml +++ b/airbyte-integrations/connectors/source-shortio/metadata.yaml @@ -27,7 +27,7 @@ data: documentationUrl: https://docs.airbyte.com/integrations/sources/shortio tags: - language:python - - language:low-code + - cdk:low-code ab_internal: sl: 100 ql: 100 diff --git a/airbyte-integrations/connectors/source-slack/metadata.yaml b/airbyte-integrations/connectors/source-slack/metadata.yaml index 06e16de8b167..3090e418f290 100644 --- a/airbyte-integrations/connectors/source-slack/metadata.yaml +++ b/airbyte-integrations/connectors/source-slack/metadata.yaml @@ -37,4 +37,5 @@ data: supportLevel: certified tags: - language:python + - cdk:python metadataSpecVersion: "1.0" diff --git a/airbyte-integrations/connectors/source-smaily/metadata.yaml b/airbyte-integrations/connectors/source-smaily/metadata.yaml index 408ce79e8f32..8e45ead58c0e 100644 --- a/airbyte-integrations/connectors/source-smaily/metadata.yaml +++ b/airbyte-integrations/connectors/source-smaily/metadata.yaml @@ -20,8 +20,8 @@ data: releaseStage: alpha documentationUrl: https://docs.airbyte.com/integrations/sources/smaily tags: - - language:low-code - language:python + - cdk:low-code ab_internal: sl: 100 ql: 100 diff --git a/airbyte-integrations/connectors/source-smartengage/metadata.yaml b/airbyte-integrations/connectors/source-smartengage/metadata.yaml index 4fd15c960d1a..9376cb9f0ed6 100644 --- a/airbyte-integrations/connectors/source-smartengage/metadata.yaml +++ b/airbyte-integrations/connectors/source-smartengage/metadata.yaml @@ -20,8 +20,8 @@ data: releaseStage: alpha documentationUrl: https://docs.airbyte.com/integrations/sources/smartengage tags: - - language:low-code - language:python + - cdk:low-code ab_internal: sl: 100 ql: 100 diff --git a/airbyte-integrations/connectors/source-smartsheets/metadata.yaml b/airbyte-integrations/connectors/source-smartsheets/metadata.yaml index 00660e496826..d0474d7d6c89 100644 --- a/airbyte-integrations/connectors/source-smartsheets/metadata.yaml +++ b/airbyte-integrations/connectors/source-smartsheets/metadata.yaml @@ -29,4 +29,5 @@ data: supportLevel: community tags: - language:python + - cdk:python metadataSpecVersion: "1.0" diff --git a/airbyte-integrations/connectors/source-snapchat-marketing/metadata.yaml b/airbyte-integrations/connectors/source-snapchat-marketing/metadata.yaml index a024f27d9973..82c281aeefce 100644 --- a/airbyte-integrations/connectors/source-snapchat-marketing/metadata.yaml +++ b/airbyte-integrations/connectors/source-snapchat-marketing/metadata.yaml @@ -27,6 +27,7 @@ data: documentationUrl: https://docs.airbyte.com/integrations/sources/snapchat-marketing tags: - language:python + - cdk:python ab_internal: sl: 200 ql: 400 diff --git a/airbyte-integrations/connectors/source-snowflake/metadata.yaml b/airbyte-integrations/connectors/source-snowflake/metadata.yaml index 3015891a4599..6a1042c4357a 100644 --- a/airbyte-integrations/connectors/source-snowflake/metadata.yaml +++ b/airbyte-integrations/connectors/source-snowflake/metadata.yaml @@ -24,5 +24,4 @@ data: supportLevel: community tags: - language:java - - language:python metadataSpecVersion: "1.0" diff --git a/airbyte-integrations/connectors/source-sonar-cloud/metadata.yaml b/airbyte-integrations/connectors/source-sonar-cloud/metadata.yaml index cc224211c7bd..f7e2e64c098a 100644 --- a/airbyte-integrations/connectors/source-sonar-cloud/metadata.yaml +++ b/airbyte-integrations/connectors/source-sonar-cloud/metadata.yaml @@ -23,8 +23,8 @@ data: releaseStage: alpha documentationUrl: https://docs.airbyte.com/integrations/sources/sonar-cloud tags: - - language:low-code - language:python + - cdk:low-code ab_internal: sl: 100 ql: 100 diff --git a/airbyte-integrations/connectors/source-spacex-api/metadata.yaml b/airbyte-integrations/connectors/source-spacex-api/metadata.yaml index 6f1677c520b9..56eb8b8bbc55 100644 --- a/airbyte-integrations/connectors/source-spacex-api/metadata.yaml +++ b/airbyte-integrations/connectors/source-spacex-api/metadata.yaml @@ -20,8 +20,8 @@ data: releaseStage: alpha documentationUrl: https://docs.airbyte.com/integrations/sources/spacex-api tags: - - language:low-code - language:python + - cdk:low-code ab_internal: sl: 100 ql: 100 diff --git a/airbyte-integrations/connectors/source-square/metadata.yaml b/airbyte-integrations/connectors/source-square/metadata.yaml index 7e85289fa520..f43b9dcec37a 100644 --- a/airbyte-integrations/connectors/source-square/metadata.yaml +++ b/airbyte-integrations/connectors/source-square/metadata.yaml @@ -28,6 +28,6 @@ data: releaseStage: beta supportLevel: community tags: - - language:low-code - language:python + - cdk:low-code metadataSpecVersion: "1.0" diff --git a/airbyte-integrations/connectors/source-statuspage/metadata.yaml b/airbyte-integrations/connectors/source-statuspage/metadata.yaml index 45dadd21b5cb..7619f1516e17 100644 --- a/airbyte-integrations/connectors/source-statuspage/metadata.yaml +++ b/airbyte-integrations/connectors/source-statuspage/metadata.yaml @@ -20,8 +20,8 @@ data: releaseStage: alpha documentationUrl: https://docs.airbyte.com/integrations/sources/statuspage tags: - - language:low-code - language:python + - cdk:low-code ab_internal: sl: 100 ql: 100 diff --git a/airbyte-integrations/connectors/source-strava/metadata.yaml b/airbyte-integrations/connectors/source-strava/metadata.yaml index fd85c85ce0b1..7a1cbeddd6ad 100644 --- a/airbyte-integrations/connectors/source-strava/metadata.yaml +++ b/airbyte-integrations/connectors/source-strava/metadata.yaml @@ -25,7 +25,8 @@ data: supportLevel: community documentationUrl: https://docs.airbyte.com/integrations/sources/strava tags: - - language:low-code + - language:python + - cdk:low-code ab_internal: sl: 100 ql: 300 diff --git a/airbyte-integrations/connectors/source-stripe/metadata.yaml b/airbyte-integrations/connectors/source-stripe/metadata.yaml index 75c4e5b6eccb..cb75f6f68a4c 100644 --- a/airbyte-integrations/connectors/source-stripe/metadata.yaml +++ b/airbyte-integrations/connectors/source-stripe/metadata.yaml @@ -52,4 +52,5 @@ data: supportLevel: certified tags: - language:python + - cdk:python metadataSpecVersion: "1.0" diff --git a/airbyte-integrations/connectors/source-survey-sparrow/metadata.yaml b/airbyte-integrations/connectors/source-survey-sparrow/metadata.yaml index 82c2fc3cb21a..55f0e1430873 100644 --- a/airbyte-integrations/connectors/source-survey-sparrow/metadata.yaml +++ b/airbyte-integrations/connectors/source-survey-sparrow/metadata.yaml @@ -20,8 +20,8 @@ data: releaseStage: alpha documentationUrl: https://docs.airbyte.com/integrations/sources/survey-sparrow tags: - - language:low-code - language:python + - cdk:low-code ab_internal: sl: 100 ql: 100 diff --git a/airbyte-integrations/connectors/source-surveycto/metadata.yaml b/airbyte-integrations/connectors/source-surveycto/metadata.yaml index 119a22f9ed49..6480b2420c97 100644 --- a/airbyte-integrations/connectors/source-surveycto/metadata.yaml +++ b/airbyte-integrations/connectors/source-surveycto/metadata.yaml @@ -21,6 +21,7 @@ data: documentationUrl: https://docs.airbyte.com/integrations/sources/surveycto tags: - language:python + - cdk:python ab_internal: sl: 100 ql: 100 diff --git a/airbyte-integrations/connectors/source-surveymonkey/metadata.yaml b/airbyte-integrations/connectors/source-surveymonkey/metadata.yaml index e3c8f55b0746..ad966800ff01 100644 --- a/airbyte-integrations/connectors/source-surveymonkey/metadata.yaml +++ b/airbyte-integrations/connectors/source-surveymonkey/metadata.yaml @@ -30,4 +30,5 @@ data: supportLevel: certified tags: - language:python + - cdk:python metadataSpecVersion: "1.0" diff --git a/airbyte-integrations/connectors/source-talkdesk-explore/metadata.yaml b/airbyte-integrations/connectors/source-talkdesk-explore/metadata.yaml index 6d9238de6442..52007982e79c 100644 --- a/airbyte-integrations/connectors/source-talkdesk-explore/metadata.yaml +++ b/airbyte-integrations/connectors/source-talkdesk-explore/metadata.yaml @@ -21,6 +21,7 @@ data: documentationUrl: https://docs.airbyte.com/integrations/sources/talkdesk-explore tags: - language:python + - cdk:python ab_internal: sl: 100 ql: 100 diff --git a/airbyte-integrations/connectors/source-tempo/metadata.yaml b/airbyte-integrations/connectors/source-tempo/metadata.yaml index b40b265b8b73..b691a604de9c 100644 --- a/airbyte-integrations/connectors/source-tempo/metadata.yaml +++ b/airbyte-integrations/connectors/source-tempo/metadata.yaml @@ -27,6 +27,6 @@ data: releaseStage: beta supportLevel: community tags: - - language:low-code - language:python + - cdk:low-code metadataSpecVersion: "1.0" diff --git a/airbyte-integrations/connectors/source-teradata/metadata.yaml b/airbyte-integrations/connectors/source-teradata/metadata.yaml index 108450bfe625..30c31f9cb10b 100644 --- a/airbyte-integrations/connectors/source-teradata/metadata.yaml +++ b/airbyte-integrations/connectors/source-teradata/metadata.yaml @@ -20,7 +20,6 @@ data: documentationUrl: https://docs.airbyte.com/integrations/sources/teradata tags: - language:java - - language:python ab_internal: sl: 100 ql: 100 diff --git a/airbyte-integrations/connectors/source-the-guardian-api/metadata.yaml b/airbyte-integrations/connectors/source-the-guardian-api/metadata.yaml index f6a56606c61e..7537b9c3156b 100644 --- a/airbyte-integrations/connectors/source-the-guardian-api/metadata.yaml +++ b/airbyte-integrations/connectors/source-the-guardian-api/metadata.yaml @@ -20,8 +20,8 @@ data: releaseStage: alpha documentationUrl: https://docs.airbyte.com/integrations/sources/the-guardian-api tags: - - language:low-code - language:python + - cdk:low-code ab_internal: sl: 100 ql: 100 diff --git a/airbyte-integrations/connectors/source-tidb/metadata.yaml b/airbyte-integrations/connectors/source-tidb/metadata.yaml index 1d277882a8e0..4fb220e593fa 100644 --- a/airbyte-integrations/connectors/source-tidb/metadata.yaml +++ b/airbyte-integrations/connectors/source-tidb/metadata.yaml @@ -21,7 +21,6 @@ data: documentationUrl: https://docs.airbyte.com/integrations/sources/tidb tags: - language:java - - language:python ab_internal: sl: 100 ql: 100 diff --git a/airbyte-integrations/connectors/source-tiktok-marketing/metadata.yaml b/airbyte-integrations/connectors/source-tiktok-marketing/metadata.yaml index e0eae9d0c7e5..7a7377d41d17 100644 --- a/airbyte-integrations/connectors/source-tiktok-marketing/metadata.yaml +++ b/airbyte-integrations/connectors/source-tiktok-marketing/metadata.yaml @@ -31,6 +31,7 @@ data: supportLevel: certified tags: - language:python + - cdk:python suggestedStreams: streams: - ads_reports_daily diff --git a/airbyte-integrations/connectors/source-timely/metadata.yaml b/airbyte-integrations/connectors/source-timely/metadata.yaml index 0b371b513fd3..c82759cf5781 100644 --- a/airbyte-integrations/connectors/source-timely/metadata.yaml +++ b/airbyte-integrations/connectors/source-timely/metadata.yaml @@ -25,7 +25,8 @@ data: supportLevel: community documentationUrl: https://docs.airbyte.com/integrations/sources/timely tags: - - language:low-code + - language:python + - cdk:low-code ab_internal: sl: 100 ql: 100 diff --git a/airbyte-integrations/connectors/source-tmdb/metadata.yaml b/airbyte-integrations/connectors/source-tmdb/metadata.yaml index eb0da1758fad..f8f526f47408 100644 --- a/airbyte-integrations/connectors/source-tmdb/metadata.yaml +++ b/airbyte-integrations/connectors/source-tmdb/metadata.yaml @@ -20,8 +20,8 @@ data: releaseStage: alpha documentationUrl: https://docs.airbyte.com/integrations/sources/tmdb tags: - - language:low-code - language:python + - cdk:low-code ab_internal: sl: 100 ql: 100 diff --git a/airbyte-integrations/connectors/source-todoist/metadata.yaml b/airbyte-integrations/connectors/source-todoist/metadata.yaml index a7e8abc8a55d..1e3bb3393a38 100644 --- a/airbyte-integrations/connectors/source-todoist/metadata.yaml +++ b/airbyte-integrations/connectors/source-todoist/metadata.yaml @@ -30,5 +30,6 @@ data: supportLevel: community documentationUrl: https://docs.airbyte.com/integrations/sources/todoist tags: - - language:low-code + - language:python + - cdk:low-code metadataSpecVersion: "1.0" diff --git a/airbyte-integrations/connectors/source-toggl/metadata.yaml b/airbyte-integrations/connectors/source-toggl/metadata.yaml index 4edc40421588..c8fdf14a7696 100644 --- a/airbyte-integrations/connectors/source-toggl/metadata.yaml +++ b/airbyte-integrations/connectors/source-toggl/metadata.yaml @@ -20,8 +20,8 @@ data: releaseStage: alpha documentationUrl: https://docs.airbyte.com/integrations/sources/toggl tags: - - language:low-code - language:python + - cdk:low-code ab_internal: sl: 100 ql: 100 diff --git a/airbyte-integrations/connectors/source-tplcentral/metadata.yaml b/airbyte-integrations/connectors/source-tplcentral/metadata.yaml index cb14f697ec39..efa50100b631 100644 --- a/airbyte-integrations/connectors/source-tplcentral/metadata.yaml +++ b/airbyte-integrations/connectors/source-tplcentral/metadata.yaml @@ -20,6 +20,7 @@ data: documentationUrl: https://docs.airbyte.com/integrations/sources/tplcentral tags: - language:python + - cdk:python ab_internal: sl: 100 ql: 100 diff --git a/airbyte-integrations/connectors/source-trello/metadata.yaml b/airbyte-integrations/connectors/source-trello/metadata.yaml index 2f7a89819189..6c845b0665d5 100644 --- a/airbyte-integrations/connectors/source-trello/metadata.yaml +++ b/airbyte-integrations/connectors/source-trello/metadata.yaml @@ -33,5 +33,6 @@ data: releaseStage: beta supportLevel: community tags: - - language:low-code + - language:python + - cdk:low-code metadataSpecVersion: "1.0" diff --git a/airbyte-integrations/connectors/source-trustpilot/metadata.yaml b/airbyte-integrations/connectors/source-trustpilot/metadata.yaml index 7ac069f60bdd..34dfb5f4a1b3 100644 --- a/airbyte-integrations/connectors/source-trustpilot/metadata.yaml +++ b/airbyte-integrations/connectors/source-trustpilot/metadata.yaml @@ -21,6 +21,7 @@ data: documentationUrl: https://docs.airbyte.com/integrations/sources/trustpilot tags: - language:python + - cdk:python ab_internal: sl: 100 ql: 100 diff --git a/airbyte-integrations/connectors/source-tvmaze-schedule/metadata.yaml b/airbyte-integrations/connectors/source-tvmaze-schedule/metadata.yaml index a51eed571207..f4526bd3cc03 100644 --- a/airbyte-integrations/connectors/source-tvmaze-schedule/metadata.yaml +++ b/airbyte-integrations/connectors/source-tvmaze-schedule/metadata.yaml @@ -20,8 +20,8 @@ data: releaseStage: alpha documentationUrl: https://docs.airbyte.com/integrations/sources/tvmaze-schedule tags: - - language:low-code - language:python + - cdk:low-code ab_internal: sl: 100 ql: 100 diff --git a/airbyte-integrations/connectors/source-twilio-taskrouter/metadata.yaml b/airbyte-integrations/connectors/source-twilio-taskrouter/metadata.yaml index e8a42439382d..a15bf9abe327 100644 --- a/airbyte-integrations/connectors/source-twilio-taskrouter/metadata.yaml +++ b/airbyte-integrations/connectors/source-twilio-taskrouter/metadata.yaml @@ -20,8 +20,8 @@ data: releaseStage: alpha documentationUrl: https://docs.airbyte.com/integrations/sources/twilio-taskrouter tags: - - language:low-code - language:python + - cdk:low-code ab_internal: sl: 100 ql: 100 diff --git a/airbyte-integrations/connectors/source-twilio/metadata.yaml b/airbyte-integrations/connectors/source-twilio/metadata.yaml index 0037ed37d93e..627130c6cbc0 100644 --- a/airbyte-integrations/connectors/source-twilio/metadata.yaml +++ b/airbyte-integrations/connectors/source-twilio/metadata.yaml @@ -33,4 +33,5 @@ data: supportLevel: certified tags: - language:python + - cdk:python metadataSpecVersion: "1.0" diff --git a/airbyte-integrations/connectors/source-twitter/metadata.yaml b/airbyte-integrations/connectors/source-twitter/metadata.yaml index 8e4b662909de..155d17b92119 100644 --- a/airbyte-integrations/connectors/source-twitter/metadata.yaml +++ b/airbyte-integrations/connectors/source-twitter/metadata.yaml @@ -27,6 +27,6 @@ data: releaseStage: beta supportLevel: community tags: - - language:low-code - language:python + - cdk:low-code metadataSpecVersion: "1.0" diff --git a/airbyte-integrations/connectors/source-tyntec-sms/metadata.yaml b/airbyte-integrations/connectors/source-tyntec-sms/metadata.yaml index 3a5278d572b4..3eaa47d1f19d 100644 --- a/airbyte-integrations/connectors/source-tyntec-sms/metadata.yaml +++ b/airbyte-integrations/connectors/source-tyntec-sms/metadata.yaml @@ -20,8 +20,8 @@ data: releaseStage: alpha documentationUrl: https://docs.airbyte.com/integrations/sources/tyntec-sms tags: - - language:low-code - language:python + - cdk:low-code ab_internal: sl: 100 ql: 100 diff --git a/airbyte-integrations/connectors/source-typeform/metadata.yaml b/airbyte-integrations/connectors/source-typeform/metadata.yaml index c8c2bb6379e4..186fe506f38c 100644 --- a/airbyte-integrations/connectors/source-typeform/metadata.yaml +++ b/airbyte-integrations/connectors/source-typeform/metadata.yaml @@ -40,5 +40,6 @@ data: upgradeDeadline: "2023-09-25" supportLevel: certified tags: - - language:low-code + - language:python + - cdk:low-code metadataSpecVersion: "1.0" diff --git a/airbyte-integrations/connectors/source-unleash/metadata.yaml b/airbyte-integrations/connectors/source-unleash/metadata.yaml index 9fe4ffaea750..51912f18eb2f 100644 --- a/airbyte-integrations/connectors/source-unleash/metadata.yaml +++ b/airbyte-integrations/connectors/source-unleash/metadata.yaml @@ -21,7 +21,8 @@ data: releaseStage: alpha documentationUrl: https://docs.airbyte.com/integrations/sources/unleash tags: - - language:low-code + - language:python + - cdk:low-code ab_internal: sl: 100 ql: 100 diff --git a/airbyte-integrations/connectors/source-us-census/metadata.yaml b/airbyte-integrations/connectors/source-us-census/metadata.yaml index 81721da1cc16..84c44eece652 100644 --- a/airbyte-integrations/connectors/source-us-census/metadata.yaml +++ b/airbyte-integrations/connectors/source-us-census/metadata.yaml @@ -21,6 +21,7 @@ data: documentationUrl: https://docs.airbyte.com/integrations/sources/us-census tags: - language:python + - cdk:python ab_internal: sl: 100 ql: 100 diff --git a/airbyte-integrations/connectors/source-vantage/metadata.yaml b/airbyte-integrations/connectors/source-vantage/metadata.yaml index 1c76b2dcf45e..16ee7d2f6a34 100644 --- a/airbyte-integrations/connectors/source-vantage/metadata.yaml +++ b/airbyte-integrations/connectors/source-vantage/metadata.yaml @@ -20,8 +20,8 @@ data: releaseStage: alpha documentationUrl: https://docs.airbyte.com/integrations/sources/vantage tags: - - language:low-code - language:python + - cdk:low-code ab_internal: sl: 100 ql: 100 diff --git a/airbyte-integrations/connectors/source-visma-economic/metadata.yaml b/airbyte-integrations/connectors/source-visma-economic/metadata.yaml index 9fb3c2bce09b..b960076d5b1f 100644 --- a/airbyte-integrations/connectors/source-visma-economic/metadata.yaml +++ b/airbyte-integrations/connectors/source-visma-economic/metadata.yaml @@ -1,7 +1,7 @@ data: allowedHosts: hosts: - - "restapi.e-conomic.com" + - restapi.e-conomic.com remoteRegistries: pypi: enabled: true @@ -25,5 +25,6 @@ data: supportLevel: community documentationUrl: https://docs.airbyte.com/integrations/sources/visma-economic tags: - - language:low-code + - language:python + - cdk:low-code metadataSpecVersion: "1.0" diff --git a/airbyte-integrations/connectors/source-vitally/metadata.yaml b/airbyte-integrations/connectors/source-vitally/metadata.yaml index 3098c58d83eb..3f89679d0402 100644 --- a/airbyte-integrations/connectors/source-vitally/metadata.yaml +++ b/airbyte-integrations/connectors/source-vitally/metadata.yaml @@ -20,8 +20,8 @@ data: releaseStage: alpha documentationUrl: https://docs.airbyte.com/integrations/sources/vitally tags: - - language:low-code - language:python + - cdk:low-code ab_internal: sl: 100 ql: 100 diff --git a/airbyte-integrations/connectors/source-waiteraid/metadata.yaml b/airbyte-integrations/connectors/source-waiteraid/metadata.yaml index 36ae2230265c..ba8fdd60d624 100644 --- a/airbyte-integrations/connectors/source-waiteraid/metadata.yaml +++ b/airbyte-integrations/connectors/source-waiteraid/metadata.yaml @@ -20,8 +20,8 @@ data: releaseStage: alpha documentationUrl: https://docs.airbyte.com/integrations/sources/waiteraid tags: - - language:low-code - language:python + - cdk:low-code ab_internal: sl: 100 ql: 100 diff --git a/airbyte-integrations/connectors/source-weatherstack/metadata.yaml b/airbyte-integrations/connectors/source-weatherstack/metadata.yaml index d7583620b2c9..beadf66616fb 100644 --- a/airbyte-integrations/connectors/source-weatherstack/metadata.yaml +++ b/airbyte-integrations/connectors/source-weatherstack/metadata.yaml @@ -21,6 +21,7 @@ data: documentationUrl: https://docs.airbyte.com/integrations/sources/weatherstack tags: - language:python + - cdk:python ab_internal: sl: 100 ql: 100 diff --git a/airbyte-integrations/connectors/source-webflow/metadata.yaml b/airbyte-integrations/connectors/source-webflow/metadata.yaml index c88f34c1a656..5e94791a830a 100644 --- a/airbyte-integrations/connectors/source-webflow/metadata.yaml +++ b/airbyte-integrations/connectors/source-webflow/metadata.yaml @@ -21,6 +21,7 @@ data: documentationUrl: https://docs.airbyte.com/integrations/sources/webflow tags: - language:python + - cdk:python ab_internal: sl: 100 ql: 100 diff --git a/airbyte-integrations/connectors/source-whisky-hunter/metadata.yaml b/airbyte-integrations/connectors/source-whisky-hunter/metadata.yaml index b82c59277318..f90689782450 100644 --- a/airbyte-integrations/connectors/source-whisky-hunter/metadata.yaml +++ b/airbyte-integrations/connectors/source-whisky-hunter/metadata.yaml @@ -20,8 +20,8 @@ data: releaseStage: alpha documentationUrl: https://docs.airbyte.com/integrations/sources/whisky-hunter tags: - - language:low-code - language:python + - cdk:low-code ab_internal: sl: 100 ql: 100 diff --git a/airbyte-integrations/connectors/source-wikipedia-pageviews/metadata.yaml b/airbyte-integrations/connectors/source-wikipedia-pageviews/metadata.yaml index f32b4074975a..b6a37d4a78c0 100644 --- a/airbyte-integrations/connectors/source-wikipedia-pageviews/metadata.yaml +++ b/airbyte-integrations/connectors/source-wikipedia-pageviews/metadata.yaml @@ -20,8 +20,8 @@ data: releaseStage: alpha documentationUrl: https://docs.airbyte.com/integrations/sources/wikipedia-pageviews tags: - - language:low-code - language:python + - cdk:low-code ab_internal: sl: 100 ql: 100 diff --git a/airbyte-integrations/connectors/source-woocommerce/metadata.yaml b/airbyte-integrations/connectors/source-woocommerce/metadata.yaml index d66ba0449d4a..131f58ab2e88 100644 --- a/airbyte-integrations/connectors/source-woocommerce/metadata.yaml +++ b/airbyte-integrations/connectors/source-woocommerce/metadata.yaml @@ -27,6 +27,6 @@ data: releaseStage: beta supportLevel: community tags: - - language:low-code - language:python + - cdk:low-code metadataSpecVersion: "1.0" diff --git a/airbyte-integrations/connectors/source-workable/metadata.yaml b/airbyte-integrations/connectors/source-workable/metadata.yaml index 0257dc2802d5..cb42d3e561ad 100644 --- a/airbyte-integrations/connectors/source-workable/metadata.yaml +++ b/airbyte-integrations/connectors/source-workable/metadata.yaml @@ -20,8 +20,8 @@ data: releaseStage: alpha documentationUrl: https://docs.airbyte.com/integrations/sources/workable tags: - - language:low-code - language:python + - cdk:low-code ab_internal: sl: 100 ql: 100 diff --git a/airbyte-integrations/connectors/source-workramp/metadata.yaml b/airbyte-integrations/connectors/source-workramp/metadata.yaml index d704e70619cb..72869f7da9cf 100644 --- a/airbyte-integrations/connectors/source-workramp/metadata.yaml +++ b/airbyte-integrations/connectors/source-workramp/metadata.yaml @@ -20,8 +20,8 @@ data: releaseStage: alpha documentationUrl: https://docs.airbyte.com/integrations/sources/workramp tags: - - language:low-code - language:python + - cdk:low-code ab_internal: sl: 100 ql: 100 diff --git a/airbyte-integrations/connectors/source-wrike/metadata.yaml b/airbyte-integrations/connectors/source-wrike/metadata.yaml index d8c02979a9aa..0b412d807e62 100644 --- a/airbyte-integrations/connectors/source-wrike/metadata.yaml +++ b/airbyte-integrations/connectors/source-wrike/metadata.yaml @@ -27,5 +27,6 @@ data: supportLevel: community documentationUrl: https://docs.airbyte.com/integrations/sources/wrike tags: - - language:low-code + - language:python + - cdk:low-code metadataSpecVersion: "1.0" diff --git a/airbyte-integrations/connectors/source-xero/metadata.yaml b/airbyte-integrations/connectors/source-xero/metadata.yaml index 164c3987663f..87a99df487fc 100644 --- a/airbyte-integrations/connectors/source-xero/metadata.yaml +++ b/airbyte-integrations/connectors/source-xero/metadata.yaml @@ -24,6 +24,7 @@ data: documentationUrl: https://docs.airbyte.com/integrations/sources/xero tags: - language:python + - cdk:python ab_internal: sl: 100 ql: 300 diff --git a/airbyte-integrations/connectors/source-xkcd/metadata.yaml b/airbyte-integrations/connectors/source-xkcd/metadata.yaml index cd6153d0b8da..47586f983fe2 100644 --- a/airbyte-integrations/connectors/source-xkcd/metadata.yaml +++ b/airbyte-integrations/connectors/source-xkcd/metadata.yaml @@ -21,6 +21,7 @@ data: documentationUrl: https://docs.airbyte.com/integrations/sources/xkcd tags: - language:python + - cdk:python ab_internal: sl: 100 ql: 100 diff --git a/airbyte-integrations/connectors/source-yahoo-finance-price/metadata.yaml b/airbyte-integrations/connectors/source-yahoo-finance-price/metadata.yaml index 081e0d18c685..7a694b22cfb4 100644 --- a/airbyte-integrations/connectors/source-yahoo-finance-price/metadata.yaml +++ b/airbyte-integrations/connectors/source-yahoo-finance-price/metadata.yaml @@ -20,7 +20,8 @@ data: releaseStage: alpha documentationUrl: https://docs.airbyte.com/integrations/sources/yahoo-finance-price tags: - - language:low-code + - language:python + - cdk:low-code ab_internal: sl: 100 ql: 100 diff --git a/airbyte-integrations/connectors/source-yandex-metrica/metadata.yaml b/airbyte-integrations/connectors/source-yandex-metrica/metadata.yaml index c2cb2e985886..8dcfc589c7f5 100644 --- a/airbyte-integrations/connectors/source-yandex-metrica/metadata.yaml +++ b/airbyte-integrations/connectors/source-yandex-metrica/metadata.yaml @@ -28,4 +28,5 @@ data: supportLevel: community tags: - language:python + - cdk:python metadataSpecVersion: "1.0" diff --git a/airbyte-integrations/connectors/source-yotpo/metadata.yaml b/airbyte-integrations/connectors/source-yotpo/metadata.yaml index efb0e929ba31..a97bdd36d6e6 100644 --- a/airbyte-integrations/connectors/source-yotpo/metadata.yaml +++ b/airbyte-integrations/connectors/source-yotpo/metadata.yaml @@ -20,8 +20,8 @@ data: releaseStage: alpha documentationUrl: https://docs.airbyte.com/integrations/sources/yotpo tags: - - language:low-code - language:python + - cdk:low-code ab_internal: sl: 100 ql: 100 diff --git a/airbyte-integrations/connectors/source-younium/metadata.yaml b/airbyte-integrations/connectors/source-younium/metadata.yaml index c1b23187fcec..d9b36e7aa69d 100644 --- a/airbyte-integrations/connectors/source-younium/metadata.yaml +++ b/airbyte-integrations/connectors/source-younium/metadata.yaml @@ -27,5 +27,6 @@ data: supportLevel: community documentationUrl: https://docs.airbyte.com/integrations/sources/younium tags: - - language:low-code + - language:python + - cdk:low-code metadataSpecVersion: "1.0" diff --git a/airbyte-integrations/connectors/source-youtube-analytics/metadata.yaml b/airbyte-integrations/connectors/source-youtube-analytics/metadata.yaml index 6168a00de77f..f80eb088461e 100644 --- a/airbyte-integrations/connectors/source-youtube-analytics/metadata.yaml +++ b/airbyte-integrations/connectors/source-youtube-analytics/metadata.yaml @@ -28,4 +28,5 @@ data: supportLevel: community tags: - language:python + - cdk:python metadataSpecVersion: "1.0" diff --git a/airbyte-integrations/connectors/source-zapier-supported-storage/metadata.yaml b/airbyte-integrations/connectors/source-zapier-supported-storage/metadata.yaml index c3a192df8c1a..f3068650273d 100644 --- a/airbyte-integrations/connectors/source-zapier-supported-storage/metadata.yaml +++ b/airbyte-integrations/connectors/source-zapier-supported-storage/metadata.yaml @@ -20,8 +20,8 @@ data: releaseStage: alpha documentationUrl: https://docs.airbyte.com/integrations/sources/zapier-supported-storage tags: - - language:low-code - language:python + - cdk:low-code ab_internal: sl: 100 ql: 100 diff --git a/airbyte-integrations/connectors/source-zendesk-chat/metadata.yaml b/airbyte-integrations/connectors/source-zendesk-chat/metadata.yaml index 1aa58d5146b5..3657e930c7c4 100644 --- a/airbyte-integrations/connectors/source-zendesk-chat/metadata.yaml +++ b/airbyte-integrations/connectors/source-zendesk-chat/metadata.yaml @@ -30,4 +30,5 @@ data: supportLevel: certified tags: - language:python + - cdk:python metadataSpecVersion: "1.0" diff --git a/airbyte-integrations/connectors/source-zendesk-sell/metadata.yaml b/airbyte-integrations/connectors/source-zendesk-sell/metadata.yaml index 511885e7ea86..33c51c4c9aff 100644 --- a/airbyte-integrations/connectors/source-zendesk-sell/metadata.yaml +++ b/airbyte-integrations/connectors/source-zendesk-sell/metadata.yaml @@ -23,7 +23,8 @@ data: releaseStage: alpha documentationUrl: https://docs.airbyte.com/integrations/sources/zendesk-sell tags: - - language:low-code + - language:python + - cdk:low-code ab_internal: sl: 100 ql: 100 diff --git a/airbyte-integrations/connectors/source-zendesk-sunshine/metadata.yaml b/airbyte-integrations/connectors/source-zendesk-sunshine/metadata.yaml index f226bcd50be1..d02428073d81 100644 --- a/airbyte-integrations/connectors/source-zendesk-sunshine/metadata.yaml +++ b/airbyte-integrations/connectors/source-zendesk-sunshine/metadata.yaml @@ -24,7 +24,8 @@ data: releaseStage: alpha documentationUrl: https://docs.airbyte.com/integrations/sources/zendesk-sunshine tags: - - language:low-code + - language:python + - cdk:low-code ab_internal: sl: 100 ql: 100 diff --git a/airbyte-integrations/connectors/source-zendesk-support/metadata.yaml b/airbyte-integrations/connectors/source-zendesk-support/metadata.yaml index 8f6493cfd6c7..130c5f9ad8db 100644 --- a/airbyte-integrations/connectors/source-zendesk-support/metadata.yaml +++ b/airbyte-integrations/connectors/source-zendesk-support/metadata.yaml @@ -57,4 +57,5 @@ data: supportLevel: certified tags: - language:python + - cdk:python metadataSpecVersion: "1.0" diff --git a/airbyte-integrations/connectors/source-zendesk-talk/metadata.yaml b/airbyte-integrations/connectors/source-zendesk-talk/metadata.yaml index 8c9d2e0dddd2..212e2a43ea26 100644 --- a/airbyte-integrations/connectors/source-zendesk-talk/metadata.yaml +++ b/airbyte-integrations/connectors/source-zendesk-talk/metadata.yaml @@ -31,4 +31,5 @@ data: supportLevel: certified tags: - language:python + - cdk:python metadataSpecVersion: "1.0" diff --git a/airbyte-integrations/connectors/source-zenefits/metadata.yaml b/airbyte-integrations/connectors/source-zenefits/metadata.yaml index 7fcc31df2d8b..7ae08d859d96 100644 --- a/airbyte-integrations/connectors/source-zenefits/metadata.yaml +++ b/airbyte-integrations/connectors/source-zenefits/metadata.yaml @@ -28,5 +28,6 @@ data: ql: 100 supportLevel: community tags: - - language:low-code + - language:python + - cdk:low-code metadataSpecVersion: "1.0" diff --git a/airbyte-integrations/connectors/source-zenloop/metadata.yaml b/airbyte-integrations/connectors/source-zenloop/metadata.yaml index 33d3698900e9..e586ad077d1e 100644 --- a/airbyte-integrations/connectors/source-zenloop/metadata.yaml +++ b/airbyte-integrations/connectors/source-zenloop/metadata.yaml @@ -23,8 +23,8 @@ data: releaseStage: beta documentationUrl: https://docs.airbyte.com/integrations/sources/zenloop tags: - - language:low-code - language:python + - cdk:low-code ab_internal: sl: 100 ql: 300 diff --git a/airbyte-integrations/connectors/source-zoho-crm/metadata.yaml b/airbyte-integrations/connectors/source-zoho-crm/metadata.yaml index 275e22b752cd..4e0bec010f6f 100644 --- a/airbyte-integrations/connectors/source-zoho-crm/metadata.yaml +++ b/airbyte-integrations/connectors/source-zoho-crm/metadata.yaml @@ -25,4 +25,5 @@ data: supportLevel: community tags: - language:python + - cdk:python metadataSpecVersion: "1.0" diff --git a/airbyte-integrations/connectors/source-zoom/metadata.yaml b/airbyte-integrations/connectors/source-zoom/metadata.yaml index 7f8e9be8b7c3..e8d75508659c 100644 --- a/airbyte-integrations/connectors/source-zoom/metadata.yaml +++ b/airbyte-integrations/connectors/source-zoom/metadata.yaml @@ -21,8 +21,8 @@ data: releaseStage: alpha documentationUrl: https://docs.airbyte.com/integrations/sources/zoom tags: - - language:low-code - language:python + - cdk:low-code ab_internal: sl: 100 ql: 200 diff --git a/airbyte-integrations/connectors/source-zuora/metadata.yaml b/airbyte-integrations/connectors/source-zuora/metadata.yaml index 7a4d4b53dfb6..1a5dfabd9901 100644 --- a/airbyte-integrations/connectors/source-zuora/metadata.yaml +++ b/airbyte-integrations/connectors/source-zuora/metadata.yaml @@ -25,4 +25,5 @@ data: supportLevel: archived tags: - language:python + - cdk:python metadataSpecVersion: "1.0" From 72eb8a6b5c803bf0069663767dcb718166ee3898 Mon Sep 17 00:00:00 2001 From: Daryna Ishchenko <80129833+darynaishchenko@users.noreply.github.com> Date: Thu, 7 Mar 2024 14:06:50 +0200 Subject: [PATCH 118/172] :sparkles: Source Bing Ads: new streams: Audience Performance Report, Goals And Funnels Report, Product Dimension Performance Report (#35812) --- .../acceptance-test-config.yml | 48 +++++ .../integration_tests/configured_catalog.json | 120 ++++++++++++ .../connectors/source-bing-ads/metadata.yaml | 2 +- .../connectors/source-bing-ads/pyproject.toml | 2 +- .../source_bing_ads/report_streams.py | 106 +++++++++++ .../schemas/audience_performance_report.json | 152 +++++++++++++++ .../audience_performance_report_hourly.json | 153 +++++++++++++++ .../schemas/goals_and_funnels_report.json | 86 +++++++++ .../goals_and_funnels_report_hourly.json | 87 +++++++++ ...oduct_search_query_performance_report.json | 175 +++++++++++++++++ ...earch_query_performance_report_hourly.json | 176 ++++++++++++++++++ .../source-bing-ads/source_bing_ads/source.py | 15 ++ .../test_audience_performance_report.py | 44 +++++ .../test_goals_and_funnels_report.py | 44 +++++ ...product_search_query_performance_report.py | 44 +++++ .../integrations/test_report_stream.py | 8 +- .../audience_performance_report_daily.csv | 9 + ...e_performance_report_daily_incremental.csv | 9 + .../audience_performance_report_hourly.csv | 25 +++ ..._performance_report_hourly_incremental.csv | 25 +++ .../audience_performance_report_monthly.csv | 7 + ...performance_report_monthly_incremental.csv | 7 + .../audience_performance_report_weekly.csv | 4 + ..._performance_report_weekly_incremental.csv | 6 + .../goals_and_funnels_report_daily.csv | 9 + ...s_and_funnels_report_daily_incremental.csv | 9 + .../goals_and_funnels_report_hourly.csv | 25 +++ ..._and_funnels_report_hourly_incremental.csv | 25 +++ .../goals_and_funnels_report_monthly.csv | 7 + ...and_funnels_report_monthly_incremental.csv | 7 + .../goals_and_funnels_report_weekly.csv | 4 + ..._and_funnels_report_weekly_incremental.csv | 6 + ..._search_query_performance_report_daily.csv | 9 + ...y_performance_report_daily_incremental.csv | 9 + ...search_query_performance_report_hourly.csv | 25 +++ ..._performance_report_hourly_incremental.csv | 25 +++ ...earch_query_performance_report_monthly.csv | 7 + ...performance_report_monthly_incremental.csv | 7 + ...search_query_performance_report_weekly.csv | 4 + ..._performance_report_weekly_incremental.csv | 6 + ...dience_performance_report_daily_state.json | 5 + ...ience_performance_report_hourly_state.json | 5 + ...ence_performance_report_monthly_state.json | 5 + ...ience_performance_report_weekly_state.json | 5 + .../goals_and_funnels_report_daily_state.json | 5 + ...goals_and_funnels_report_hourly_state.json | 5 + ...oals_and_funnels_report_monthly_state.json | 5 + ...goals_and_funnels_report_weekly_state.json | 5 + ..._query_performance_report_daily_state.json | 5 + ...query_performance_report_hourly_state.json | 5 + ...uery_performance_report_monthly_state.json | 5 + ...query_performance_report_weekly_state.json | 5 + .../source-bing-ads/unit_tests/test_source.py | 2 +- docs/integrations/sources/bing-ads.md | 23 ++- 54 files changed, 1617 insertions(+), 6 deletions(-) create mode 100644 airbyte-integrations/connectors/source-bing-ads/source_bing_ads/schemas/audience_performance_report.json create mode 100644 airbyte-integrations/connectors/source-bing-ads/source_bing_ads/schemas/audience_performance_report_hourly.json create mode 100644 airbyte-integrations/connectors/source-bing-ads/source_bing_ads/schemas/goals_and_funnels_report.json create mode 100644 airbyte-integrations/connectors/source-bing-ads/source_bing_ads/schemas/goals_and_funnels_report_hourly.json create mode 100644 airbyte-integrations/connectors/source-bing-ads/source_bing_ads/schemas/product_search_query_performance_report.json create mode 100644 airbyte-integrations/connectors/source-bing-ads/source_bing_ads/schemas/product_search_query_performance_report_hourly.json create mode 100644 airbyte-integrations/connectors/source-bing-ads/unit_tests/integrations/test_audience_performance_report.py create mode 100644 airbyte-integrations/connectors/source-bing-ads/unit_tests/integrations/test_goals_and_funnels_report.py create mode 100644 airbyte-integrations/connectors/source-bing-ads/unit_tests/integrations/test_product_search_query_performance_report.py create mode 100644 airbyte-integrations/connectors/source-bing-ads/unit_tests/resource/response/audience_performance_report_daily.csv create mode 100644 airbyte-integrations/connectors/source-bing-ads/unit_tests/resource/response/audience_performance_report_daily_incremental.csv create mode 100644 airbyte-integrations/connectors/source-bing-ads/unit_tests/resource/response/audience_performance_report_hourly.csv create mode 100644 airbyte-integrations/connectors/source-bing-ads/unit_tests/resource/response/audience_performance_report_hourly_incremental.csv create mode 100644 airbyte-integrations/connectors/source-bing-ads/unit_tests/resource/response/audience_performance_report_monthly.csv create mode 100644 airbyte-integrations/connectors/source-bing-ads/unit_tests/resource/response/audience_performance_report_monthly_incremental.csv create mode 100644 airbyte-integrations/connectors/source-bing-ads/unit_tests/resource/response/audience_performance_report_weekly.csv create mode 100644 airbyte-integrations/connectors/source-bing-ads/unit_tests/resource/response/audience_performance_report_weekly_incremental.csv create mode 100644 airbyte-integrations/connectors/source-bing-ads/unit_tests/resource/response/goals_and_funnels_report_daily.csv create mode 100644 airbyte-integrations/connectors/source-bing-ads/unit_tests/resource/response/goals_and_funnels_report_daily_incremental.csv create mode 100644 airbyte-integrations/connectors/source-bing-ads/unit_tests/resource/response/goals_and_funnels_report_hourly.csv create mode 100644 airbyte-integrations/connectors/source-bing-ads/unit_tests/resource/response/goals_and_funnels_report_hourly_incremental.csv create mode 100644 airbyte-integrations/connectors/source-bing-ads/unit_tests/resource/response/goals_and_funnels_report_monthly.csv create mode 100644 airbyte-integrations/connectors/source-bing-ads/unit_tests/resource/response/goals_and_funnels_report_monthly_incremental.csv create mode 100644 airbyte-integrations/connectors/source-bing-ads/unit_tests/resource/response/goals_and_funnels_report_weekly.csv create mode 100644 airbyte-integrations/connectors/source-bing-ads/unit_tests/resource/response/goals_and_funnels_report_weekly_incremental.csv create mode 100644 airbyte-integrations/connectors/source-bing-ads/unit_tests/resource/response/product_search_query_performance_report_daily.csv create mode 100644 airbyte-integrations/connectors/source-bing-ads/unit_tests/resource/response/product_search_query_performance_report_daily_incremental.csv create mode 100644 airbyte-integrations/connectors/source-bing-ads/unit_tests/resource/response/product_search_query_performance_report_hourly.csv create mode 100644 airbyte-integrations/connectors/source-bing-ads/unit_tests/resource/response/product_search_query_performance_report_hourly_incremental.csv create mode 100644 airbyte-integrations/connectors/source-bing-ads/unit_tests/resource/response/product_search_query_performance_report_monthly.csv create mode 100644 airbyte-integrations/connectors/source-bing-ads/unit_tests/resource/response/product_search_query_performance_report_monthly_incremental.csv create mode 100644 airbyte-integrations/connectors/source-bing-ads/unit_tests/resource/response/product_search_query_performance_report_weekly.csv create mode 100644 airbyte-integrations/connectors/source-bing-ads/unit_tests/resource/response/product_search_query_performance_report_weekly_incremental.csv create mode 100644 airbyte-integrations/connectors/source-bing-ads/unit_tests/resource/state/audience_performance_report_daily_state.json create mode 100644 airbyte-integrations/connectors/source-bing-ads/unit_tests/resource/state/audience_performance_report_hourly_state.json create mode 100644 airbyte-integrations/connectors/source-bing-ads/unit_tests/resource/state/audience_performance_report_monthly_state.json create mode 100644 airbyte-integrations/connectors/source-bing-ads/unit_tests/resource/state/audience_performance_report_weekly_state.json create mode 100644 airbyte-integrations/connectors/source-bing-ads/unit_tests/resource/state/goals_and_funnels_report_daily_state.json create mode 100644 airbyte-integrations/connectors/source-bing-ads/unit_tests/resource/state/goals_and_funnels_report_hourly_state.json create mode 100644 airbyte-integrations/connectors/source-bing-ads/unit_tests/resource/state/goals_and_funnels_report_monthly_state.json create mode 100644 airbyte-integrations/connectors/source-bing-ads/unit_tests/resource/state/goals_and_funnels_report_weekly_state.json create mode 100644 airbyte-integrations/connectors/source-bing-ads/unit_tests/resource/state/product_search_query_performance_report_daily_state.json create mode 100644 airbyte-integrations/connectors/source-bing-ads/unit_tests/resource/state/product_search_query_performance_report_hourly_state.json create mode 100644 airbyte-integrations/connectors/source-bing-ads/unit_tests/resource/state/product_search_query_performance_report_monthly_state.json create mode 100644 airbyte-integrations/connectors/source-bing-ads/unit_tests/resource/state/product_search_query_performance_report_weekly_state.json diff --git a/airbyte-integrations/connectors/source-bing-ads/acceptance-test-config.yml b/airbyte-integrations/connectors/source-bing-ads/acceptance-test-config.yml index 8a89d4511f33..93c943f6e769 100644 --- a/airbyte-integrations/connectors/source-bing-ads/acceptance-test-config.yml +++ b/airbyte-integrations/connectors/source-bing-ads/acceptance-test-config.yml @@ -33,6 +33,30 @@ acceptance_tests: bypass_reason: "Test Account doesn't have Merchant Center configured to add Products, testing in integration test" - name: product_dimension_performance_report_monthly bypass_reason: "Test Account doesn't have Merchant Center configured to add Products, testing in integration test" + - name: product_search_query_performance_report_hourly + bypass_reason: "Test Account doesn't have Merchant Center configured to add Products, testing in integration test" + - name: product_search_query_performance_report_daily + bypass_reason: "Test Account doesn't have Merchant Center configured to add Products, testing in integration test" + - name: product_search_query_performance_report_weekly + bypass_reason: "Test Account doesn't have Merchant Center configured to add Products, testing in integration test" + - name: product_search_query_performance_report_monthly + bypass_reason: "Test Account doesn't have Merchant Center configured to add Products, testing in integration test" + - name: audience_performance_report_daily + bypass_reason: "Test Account doesn't have audiences associated with any ad groups in the campaign." + - name: audience_performance_report_hourly + bypass_reason: "Test Account doesn't have audiences associated with any ad groups in the campaign." + - name: audience_performance_report_weekly + bypass_reason: "Test Account doesn't have audiences associated with any ad groups in the campaign." + - name: audience_performance_report_monthly + bypass_reason: "Test Account doesn't have audiences associated with any ad groups in the campaign." + - name: goals_and_funnels_report_daily + bypass_reason: "Test Account doesn't have audiences associated with any ad groups in the campaign." + - name: goals_and_funnels_report_hourly + bypass_reason: "Test Account doesn't have audiences associated with any ad groups in the campaign." + - name: goals_and_funnels_report_weekly + bypass_reason: "Test Account doesn't have audiences associated with any ad groups in the campaign." + - name: goals_and_funnels_report_monthly + bypass_reason: "Test Account doesn't have audiences associated with any ad groups in the campaign." - name: account_performance_report_hourly bypass_reason: "Hourly reports are disabled, because sync is too long" - name: ad_group_performance_report_hourly @@ -110,6 +134,30 @@ acceptance_tests: bypass_reason: "Test Account doesn't have Merchant Center configured to add Products, testing in integration test" - name: product_dimension_performance_report_monthly bypass_reason: "Test Account doesn't have Merchant Center configured to add Products, testing in integration test" + - name: product_search_query_performance_report_hourly + bypass_reason: "Test Account doesn't have Merchant Center configured to add Products, testing in integration test" + - name: product_search_query_performance_report_daily + bypass_reason: "Test Account doesn't have Merchant Center configured to add Products, testing in integration test" + - name: product_search_query_performance_report_weekly + bypass_reason: "Test Account doesn't have Merchant Center configured to add Products, testing in integration test" + - name: product_search_query_performance_report_monthly + bypass_reason: "Test Account doesn't have Merchant Center configured to add Products, testing in integration test" + - name: audience_performance_report_daily + bypass_reason: "Test Account doesn't have audiences associated with any ad groups in the campaign." + - name: audience_performance_report_hourly + bypass_reason: "Test Account doesn't have audiences associated with any ad groups in the campaign." + - name: audience_performance_report_weekly + bypass_reason: "Test Account doesn't have audiences associated with any ad groups in the campaign." + - name: audience_performance_report_monthly + bypass_reason: "Test Account doesn't have audiences associated with any ad groups in the campaign." + - name: goals_and_funnels_report_daily + bypass_reason: "Test Account doesn't have audiences associated with any ad groups in the campaign." + - name: goals_and_funnels_report_hourly + bypass_reason: "Test Account doesn't have audiences associated with any ad groups in the campaign." + - name: goals_and_funnels_report_weekly + bypass_reason: "Test Account doesn't have audiences associated with any ad groups in the campaign." + - name: goals_and_funnels_report_monthly + bypass_reason: "Test Account doesn't have audiences associated with any ad groups in the campaign." - name: app_install_ads bypass_reason: "Can not populate; new campaign with link to app needed; feature is not available yet" - name: app_install_ad_labels diff --git a/airbyte-integrations/connectors/source-bing-ads/integration_tests/configured_catalog.json b/airbyte-integrations/connectors/source-bing-ads/integration_tests/configured_catalog.json index 83f21c6ff1c4..decc20422805 100644 --- a/airbyte-integrations/connectors/source-bing-ads/integration_tests/configured_catalog.json +++ b/airbyte-integrations/connectors/source-bing-ads/integration_tests/configured_catalog.json @@ -635,6 +635,126 @@ "sync_mode": "incremental", "cursor_field": ["TimePeriod"], "destination_sync_mode": "append" + }, + { + "stream": { + "name": "audience_performance_report_daily", + "json_schema": {}, + "supported_sync_modes": ["incremental", "full_refresh"] + }, + "sync_mode": "incremental", + "cursor_field": ["TimePeriod"], + "destination_sync_mode": "append" + }, + { + "stream": { + "name": "audience_performance_report_hourly", + "json_schema": {}, + "supported_sync_modes": ["incremental", "full_refresh"] + }, + "sync_mode": "incremental", + "cursor_field": ["TimePeriod"], + "destination_sync_mode": "append" + }, + { + "stream": { + "name": "audience_performance_report_weekly", + "json_schema": {}, + "supported_sync_modes": ["incremental", "full_refresh"] + }, + "sync_mode": "incremental", + "cursor_field": ["TimePeriod"], + "destination_sync_mode": "append" + }, + { + "stream": { + "name": "audience_performance_report_monthly", + "json_schema": {}, + "supported_sync_modes": ["incremental", "full_refresh"] + }, + "sync_mode": "incremental", + "cursor_field": ["TimePeriod"], + "destination_sync_mode": "append" + }, + { + "stream": { + "name": "goals_and_funnels_report_daily", + "json_schema": {}, + "supported_sync_modes": ["incremental", "full_refresh"] + }, + "sync_mode": "incremental", + "cursor_field": ["TimePeriod"], + "destination_sync_mode": "append" + }, + { + "stream": { + "name": "goals_and_funnels_report_hourly", + "json_schema": {}, + "supported_sync_modes": ["incremental", "full_refresh"] + }, + "sync_mode": "incremental", + "cursor_field": ["TimePeriod"], + "destination_sync_mode": "append" + }, + { + "stream": { + "name": "goals_and_funnels_report_weekly", + "json_schema": {}, + "supported_sync_modes": ["incremental", "full_refresh"] + }, + "sync_mode": "incremental", + "cursor_field": ["TimePeriod"], + "destination_sync_mode": "append" + }, + { + "stream": { + "name": "goals_and_funnels_report_monthly", + "json_schema": {}, + "supported_sync_modes": ["incremental", "full_refresh"] + }, + "sync_mode": "incremental", + "cursor_field": ["TimePeriod"], + "destination_sync_mode": "append" + }, + { + "stream": { + "name": "product_search_query_performance_report_daily", + "json_schema": {}, + "supported_sync_modes": ["incremental", "full_refresh"] + }, + "sync_mode": "incremental", + "cursor_field": ["TimePeriod"], + "destination_sync_mode": "append" + }, + { + "stream": { + "name": "product_search_query_performance_report_hourly", + "json_schema": {}, + "supported_sync_modes": ["incremental", "full_refresh"] + }, + "sync_mode": "incremental", + "cursor_field": ["TimePeriod"], + "destination_sync_mode": "append" + }, + { + "stream": { + "name": "product_search_query_performance_report_weekly", + "json_schema": {}, + "supported_sync_modes": ["incremental", "full_refresh"] + }, + "sync_mode": "incremental", + "cursor_field": ["TimePeriod"], + "destination_sync_mode": "append" + }, + { + "stream": { + "name": "product_search_query_performance_report_monthly", + "json_schema": {}, + "supported_sync_modes": ["incremental", "full_refresh"] + }, + "sync_mode": "incremental", + "cursor_field": ["TimePeriod"], + "destination_sync_mode": "append" } ] } diff --git a/airbyte-integrations/connectors/source-bing-ads/metadata.yaml b/airbyte-integrations/connectors/source-bing-ads/metadata.yaml index 77384fe6ea3a..127fe231e9f9 100644 --- a/airbyte-integrations/connectors/source-bing-ads/metadata.yaml +++ b/airbyte-integrations/connectors/source-bing-ads/metadata.yaml @@ -16,7 +16,7 @@ data: connectorSubtype: api connectorType: source definitionId: 47f25999-dd5e-4636-8c39-e7cea2453331 - dockerImageTag: 2.2.0 + dockerImageTag: 2.3.0 dockerRepository: airbyte/source-bing-ads documentationUrl: https://docs.airbyte.com/integrations/sources/bing-ads githubIssueLabel: source-bing-ads diff --git a/airbyte-integrations/connectors/source-bing-ads/pyproject.toml b/airbyte-integrations/connectors/source-bing-ads/pyproject.toml index 043177bac838..9da2f525ae07 100644 --- a/airbyte-integrations/connectors/source-bing-ads/pyproject.toml +++ b/airbyte-integrations/connectors/source-bing-ads/pyproject.toml @@ -3,7 +3,7 @@ requires = [ "poetry-core>=1.0.0",] build-backend = "poetry.core.masonry.api" [tool.poetry] -version = "2.2.0" +version = "2.3.0" name = "source-bing-ads" description = "Source implementation for Bing Ads." authors = [ "Airbyte ",] diff --git a/airbyte-integrations/connectors/source-bing-ads/source_bing_ads/report_streams.py b/airbyte-integrations/connectors/source-bing-ads/source_bing_ads/report_streams.py index f6e84836c099..5106ac52cfa4 100644 --- a/airbyte-integrations/connectors/source-bing-ads/source_bing_ads/report_streams.py +++ b/airbyte-integrations/connectors/source-bing-ads/source_bing_ads/report_streams.py @@ -709,6 +709,112 @@ class ProductDimensionPerformanceReportMonthly(ProductDimensionPerformanceReport report_aggregation = "Monthly" +class ProductSearchQueryPerformanceReport(BingAdsReportingServicePerformanceStream, ABC): + """ + https://learn.microsoft.com/en-us/advertising/reporting-service/productsearchqueryperformancereportrequest?view=bingads-13 + """ + + report_name: str = "ProductSearchQueryPerformanceReport" + report_schema_name = "product_search_query_performance_report" + primary_key = [ + "AccountId", + "TimePeriod", + "CampaignId", + "AdId", + "AdGroupId", + "SearchQuery", + "DeviceType", + "DeviceOS", + "Language", + "Network", + ] + + +class ProductSearchQueryPerformanceReportHourly(HourlyReportTransformerMixin, ProductSearchQueryPerformanceReport): + report_aggregation = "Hourly" + report_schema_name = "product_search_query_performance_report_hourly" + + +class ProductSearchQueryPerformanceReportDaily(ProductSearchQueryPerformanceReport): + report_aggregation = "Daily" + + +class ProductSearchQueryPerformanceReportWeekly(ProductSearchQueryPerformanceReport): + report_aggregation = "Weekly" + + +class ProductSearchQueryPerformanceReportMonthly(ProductSearchQueryPerformanceReport): + report_aggregation = "Monthly" + + +class GoalsAndFunnelsReport(BingAdsReportingServicePerformanceStream, ABC): + """ + https://learn.microsoft.com/en-us/advertising/reporting-service/goalsandfunnelsreportrequest?view=bingads-13 + """ + + report_name: str = "GoalsAndFunnelsReport" + report_schema_name = "goals_and_funnels_report" + primary_key = [ + "GoalId", + "TimePeriod", + "AccountId", + "CampaignId", + "DeviceType", + "DeviceOS", + "AdGroupId", + ] + + +class GoalsAndFunnelsReportHourly(HourlyReportTransformerMixin, GoalsAndFunnelsReport): + report_aggregation = "Hourly" + report_schema_name = "goals_and_funnels_report_hourly" + + +class GoalsAndFunnelsReportDaily(GoalsAndFunnelsReport): + report_aggregation = "Daily" + + +class GoalsAndFunnelsReportWeekly(GoalsAndFunnelsReport): + report_aggregation = "Weekly" + + +class GoalsAndFunnelsReportMonthly(GoalsAndFunnelsReport): + report_aggregation = "Monthly" + + +class AudiencePerformanceReport(BingAdsReportingServicePerformanceStream, ABC): + """ + https://learn.microsoft.com/en-us/advertising/reporting-service/audienceperformancereportrequest?view=bingads-13 + """ + + report_name: str = "AudiencePerformanceReport" + report_schema_name = "audience_performance_report" + primary_key = [ + "AudienceId", + "TimePeriod", + "AccountId", + "CampaignId", + "AdGroupId", + ] + + +class AudiencePerformanceReportHourly(HourlyReportTransformerMixin, AudiencePerformanceReport): + report_aggregation = "Hourly" + report_schema_name = "audience_performance_report_hourly" + + +class AudiencePerformanceReportDaily(AudiencePerformanceReport): + report_aggregation = "Daily" + + +class AudiencePerformanceReportWeekly(AudiencePerformanceReport): + report_aggregation = "Weekly" + + +class AudiencePerformanceReportMonthly(AudiencePerformanceReport): + report_aggregation = "Monthly" + + class CustomReport(BingAdsReportingServicePerformanceStream, ABC): transformer: TypeTransformer = TypeTransformer(TransformConfig.DefaultSchemaNormalization) custom_report_columns = [] diff --git a/airbyte-integrations/connectors/source-bing-ads/source_bing_ads/schemas/audience_performance_report.json b/airbyte-integrations/connectors/source-bing-ads/source_bing_ads/schemas/audience_performance_report.json new file mode 100644 index 000000000000..38131feb2a1d --- /dev/null +++ b/airbyte-integrations/connectors/source-bing-ads/source_bing_ads/schemas/audience_performance_report.json @@ -0,0 +1,152 @@ +{ + "$schema": "http://json-schema.org/draft-07/schema#", + "type": "object", + "additionalProperties": true, + "properties": { + "AccountName": { + "type": ["null", "string"] + }, + "AccountNumber": { + "type": ["null", "string"] + }, + "AccountId": { + "type": ["null", "integer"] + }, + "TimePeriod": { + "type": ["null", "string"], + "format": "date" + }, + "CampaignName": { + "type": ["null", "string"] + }, + "CampaignId": { + "type": ["null", "integer"] + }, + "AdGroupName": { + "type": ["null", "string"] + }, + "AdGroupId": { + "type": ["null", "integer"] + }, + "AudienceId": { + "type": ["null", "integer"] + }, + "AudienceName": { + "type": ["null", "string"] + }, + "AssociationStatus": { + "type": ["null", "string"] + }, + "BidAdjustment": { + "type": ["null", "number"] + }, + "TargetingSetting": { + "type": ["null", "string"] + }, + "Impressions": { + "type": ["null", "integer"] + }, + "Clicks": { + "type": ["null", "integer"] + }, + "Ctr": { + "type": ["null", "number"] + }, + "AverageCpc": { + "type": ["null", "number"] + }, + "Spend": { + "type": ["null", "number"] + }, + "AveragePosition": { + "type": ["null", "number"] + }, + "Conversions": { + "type": ["null", "integer"] + }, + "ConversionRate": { + "type": ["null", "number"] + }, + "CostPerConversion": { + "type": ["null", "number"] + }, + "Revenue": { + "type": ["null", "number"] + }, + "ReturnOnAdSpend": { + "type": ["null", "number"] + }, + "RevenuePerConversion": { + "type": ["null", "number"] + }, + "AccountStatus": { + "type": ["null", "string"] + }, + "CampaignStatus": { + "type": ["null", "string"] + }, + "AdGroupStatus": { + "type": ["null", "string"] + }, + "AudienceType": { + "type": ["null", "string"] + }, + "BaseCampaignId": { + "type": ["null", "integer"] + }, + "AllConversions": { + "type": ["null", "integer"] + }, + "AllRevenue": { + "type": ["null", "number"] + }, + "AllConversionRate": { + "type": ["null", "number"] + }, + "AllCostPerConversion": { + "type": ["null", "number"] + }, + "AllReturnOnAdSpend": { + "type": ["null", "number"] + }, + "AllRevenuePerConversion": { + "type": ["null", "number"] + }, + "AssociationId": { + "type": ["null", "integer"] + }, + "AssociationLevel": { + "type": ["null", "string"] + }, + "ViewThroughConversions": { + "type": ["null", "integer"] + }, + "Goal": { + "type": ["null", "string"] + }, + "GoalType": { + "type": ["null", "string"] + }, + "AbsoluteTopImpressionRatePercent": { + "type": ["null", "number"] + }, + "TopImpressionRatePercent": { + "type": ["null", "number"] + }, + "AverageCpm": { + "type": ["null", "number"] + }, + "ConversionsQualified": { + "type": ["null", "number"] + }, + "AllConversionsQualified": { + "type": ["null", "number"] + }, + "ViewThroughConversionsQualified": { + "type": ["null", "number"] + }, + "ViewThroughRevenue": { + "type": ["null", "number"] + } + } +} diff --git a/airbyte-integrations/connectors/source-bing-ads/source_bing_ads/schemas/audience_performance_report_hourly.json b/airbyte-integrations/connectors/source-bing-ads/source_bing_ads/schemas/audience_performance_report_hourly.json new file mode 100644 index 000000000000..ec478257887f --- /dev/null +++ b/airbyte-integrations/connectors/source-bing-ads/source_bing_ads/schemas/audience_performance_report_hourly.json @@ -0,0 +1,153 @@ +{ + "$schema": "http://json-schema.org/draft-07/schema#", + "type": "object", + "additionalProperties": true, + "properties": { + "AccountName": { + "type": ["null", "string"] + }, + "AccountNumber": { + "type": ["null", "string"] + }, + "AccountId": { + "type": ["null", "integer"] + }, + "TimePeriod": { + "type": ["null", "string"], + "format": "date-time", + "airbyte_type": "timestamp_with_timezone" + }, + "CampaignName": { + "type": ["null", "string"] + }, + "CampaignId": { + "type": ["null", "integer"] + }, + "AdGroupName": { + "type": ["null", "string"] + }, + "AdGroupId": { + "type": ["null", "integer"] + }, + "AudienceId": { + "type": ["null", "integer"] + }, + "AudienceName": { + "type": ["null", "string"] + }, + "AssociationStatus": { + "type": ["null", "string"] + }, + "BidAdjustment": { + "type": ["null", "number"] + }, + "TargetingSetting": { + "type": ["null", "string"] + }, + "Impressions": { + "type": ["null", "integer"] + }, + "Clicks": { + "type": ["null", "integer"] + }, + "Ctr": { + "type": ["null", "number"] + }, + "AverageCpc": { + "type": ["null", "number"] + }, + "Spend": { + "type": ["null", "number"] + }, + "AveragePosition": { + "type": ["null", "number"] + }, + "Conversions": { + "type": ["null", "integer"] + }, + "ConversionRate": { + "type": ["null", "number"] + }, + "CostPerConversion": { + "type": ["null", "number"] + }, + "Revenue": { + "type": ["null", "number"] + }, + "ReturnOnAdSpend": { + "type": ["null", "number"] + }, + "RevenuePerConversion": { + "type": ["null", "number"] + }, + "AccountStatus": { + "type": ["null", "string"] + }, + "CampaignStatus": { + "type": ["null", "string"] + }, + "AdGroupStatus": { + "type": ["null", "string"] + }, + "AudienceType": { + "type": ["null", "string"] + }, + "BaseCampaignId": { + "type": ["null", "integer"] + }, + "AllConversions": { + "type": ["null", "integer"] + }, + "AllRevenue": { + "type": ["null", "number"] + }, + "AllConversionRate": { + "type": ["null", "number"] + }, + "AllCostPerConversion": { + "type": ["null", "number"] + }, + "AllReturnOnAdSpend": { + "type": ["null", "number"] + }, + "AllRevenuePerConversion": { + "type": ["null", "number"] + }, + "AssociationId": { + "type": ["null", "integer"] + }, + "AssociationLevel": { + "type": ["null", "string"] + }, + "ViewThroughConversions": { + "type": ["null", "integer"] + }, + "Goal": { + "type": ["null", "string"] + }, + "GoalType": { + "type": ["null", "string"] + }, + "AbsoluteTopImpressionRatePercent": { + "type": ["null", "number"] + }, + "TopImpressionRatePercent": { + "type": ["null", "number"] + }, + "AverageCpm": { + "type": ["null", "number"] + }, + "ConversionsQualified": { + "type": ["null", "number"] + }, + "AllConversionsQualified": { + "type": ["null", "number"] + }, + "ViewThroughConversionsQualified": { + "type": ["null", "number"] + }, + "ViewThroughRevenue": { + "type": ["null", "number"] + } + } +} diff --git a/airbyte-integrations/connectors/source-bing-ads/source_bing_ads/schemas/goals_and_funnels_report.json b/airbyte-integrations/connectors/source-bing-ads/source_bing_ads/schemas/goals_and_funnels_report.json new file mode 100644 index 000000000000..b2f538c12a67 --- /dev/null +++ b/airbyte-integrations/connectors/source-bing-ads/source_bing_ads/schemas/goals_and_funnels_report.json @@ -0,0 +1,86 @@ +{ + "$schema": "http://json-schema.org/draft-07/schema#", + "type": "object", + "additionalProperties": true, + "properties": { + "AccountName": { + "type": ["null", "string"] + }, + "AccountNumber": { + "type": ["null", "string"] + }, + "AccountId": { + "type": ["null", "string"] + }, + "TimePeriod": { + "type": ["null", "string"], + "format": "date" + }, + "CampaignName": { + "type": ["null", "string"] + }, + "CampaignId": { + "type": ["null", "integer"] + }, + "AdGroupName": { + "type": ["null", "string"] + }, + "AdGroupId": { + "type": ["null", "integer"] + }, + "Keyword": { + "type": ["null", "string"] + }, + "KeywordId": { + "type": ["null", "integer"] + }, + "Goal": { + "type": ["null", "string"] + }, + "AllConversions": { + "type": ["null", "integer"] + }, + "Assists": { + "type": ["null", "integer"] + }, + "AllRevenue": { + "type": ["null", "number"] + }, + "GoalId": { + "type": ["null", "integer"] + }, + "DeviceType": { + "type": ["null", "string"] + }, + "DeviceOS": { + "type": ["null", "string"] + }, + "AccountStatus": { + "type": ["null", "string"] + }, + "CampaignStatus": { + "type": ["null", "string"] + }, + "AdGroupStatus": { + "type": ["null", "string"] + }, + "KeywordStatus": { + "type": ["null", "string"] + }, + "GoalType": { + "type": ["null", "string"] + }, + "ViewThroughConversions": { + "type": ["null", "integer"] + }, + "AllConversionsQualified": { + "type": ["null", "number"] + }, + "ViewThroughConversionsQualified": { + "type": ["null", "number"] + }, + "ViewThroughRevenue": { + "type": ["null", "number"] + } + } +} diff --git a/airbyte-integrations/connectors/source-bing-ads/source_bing_ads/schemas/goals_and_funnels_report_hourly.json b/airbyte-integrations/connectors/source-bing-ads/source_bing_ads/schemas/goals_and_funnels_report_hourly.json new file mode 100644 index 000000000000..e8fdc772db10 --- /dev/null +++ b/airbyte-integrations/connectors/source-bing-ads/source_bing_ads/schemas/goals_and_funnels_report_hourly.json @@ -0,0 +1,87 @@ +{ + "$schema": "http://json-schema.org/draft-07/schema#", + "type": "object", + "additionalProperties": true, + "properties": { + "AccountName": { + "type": ["null", "string"] + }, + "AccountNumber": { + "type": ["null", "string"] + }, + "AccountId": { + "type": ["null", "string"] + }, + "TimePeriod": { + "type": ["null", "string"], + "format": "date-time", + "airbyte_type": "timestamp_with_timezone" + }, + "CampaignName": { + "type": ["null", "string"] + }, + "CampaignId": { + "type": ["null", "integer"] + }, + "AdGroupName": { + "type": ["null", "string"] + }, + "AdGroupId": { + "type": ["null", "integer"] + }, + "Keyword": { + "type": ["null", "string"] + }, + "KeywordId": { + "type": ["null", "integer"] + }, + "Goal": { + "type": ["null", "string"] + }, + "AllConversions": { + "type": ["null", "integer"] + }, + "Assists": { + "type": ["null", "integer"] + }, + "AllRevenue": { + "type": ["null", "number"] + }, + "GoalId": { + "type": ["null", "integer"] + }, + "DeviceType": { + "type": ["null", "string"] + }, + "DeviceOS": { + "type": ["null", "string"] + }, + "AccountStatus": { + "type": ["null", "string"] + }, + "CampaignStatus": { + "type": ["null", "string"] + }, + "AdGroupStatus": { + "type": ["null", "string"] + }, + "KeywordStatus": { + "type": ["null", "string"] + }, + "GoalType": { + "type": ["null", "string"] + }, + "ViewThroughConversions": { + "type": ["null", "integer"] + }, + "AllConversionsQualified": { + "type": ["null", "number"] + }, + "ViewThroughConversionsQualified": { + "type": ["null", "number"] + }, + "ViewThroughRevenue": { + "type": ["null", "number"] + } + } +} diff --git a/airbyte-integrations/connectors/source-bing-ads/source_bing_ads/schemas/product_search_query_performance_report.json b/airbyte-integrations/connectors/source-bing-ads/source_bing_ads/schemas/product_search_query_performance_report.json new file mode 100644 index 000000000000..4a02d08c251b --- /dev/null +++ b/airbyte-integrations/connectors/source-bing-ads/source_bing_ads/schemas/product_search_query_performance_report.json @@ -0,0 +1,175 @@ +{ + "$schema": "https://json-schema.org/draft-07/schema#", + "type": "object", + "properties": { + "TimePeriod": { + "type": ["null", "string"], + "format": "date" + }, + "AccountId": { + "type": ["null", "integer"] + }, + "AccountNumber": { + "type": ["null", "string"] + }, + "AccountName": { + "type": ["null", "string"] + }, + "AdId": { + "type": ["null", "integer"] + }, + "AdGroupId": { + "type": ["null", "integer"] + }, + "AdGroupName": { + "type": ["null", "string"] + }, + "CampaignId": { + "type": ["null", "integer"] + }, + "CampaignName": { + "type": ["null", "string"] + }, + "DestinationUrl": { + "type": ["null", "string"] + }, + "DeviceType": { + "type": ["null", "string"] + }, + "DeviceOS": { + "type": ["null", "string"] + }, + "Language": { + "type": ["null", "string"] + }, + "SearchQuery": { + "type": ["null", "string"] + }, + "Network": { + "type": ["null", "string"] + }, + "MerchantProductId": { + "type": ["null", "string"] + }, + "Title": { + "type": ["null", "string"] + }, + "ClickTypeId": { + "type": ["null", "string"] + }, + "TotalClicksOnAdElements": { + "type": ["null", "number"] + }, + "ClickType": { + "type": ["null", "string"] + }, + "AdGroupCriterionId": { + "type": ["null", "string"] + }, + "ProductGroup": { + "type": ["null", "string"] + }, + "PartitionType": { + "type": ["null", "string"] + }, + "Impressions": { + "type": ["null", "integer"] + }, + "Clicks": { + "type": ["null", "integer"] + }, + "Ctr": { + "type": ["null", "number"] + }, + "AverageCpc": { + "type": ["null", "number"] + }, + "Spend": { + "type": ["null", "number"] + }, + "Conversions": { + "type": ["null", "integer"] + }, + "ConversionRate": { + "type": ["null", "number"] + }, + "Assists": { + "type": ["null", "integer"] + }, + "CostPerAssist": { + "type": ["null", "number"] + }, + "Revenue": { + "type": ["null", "number"] + }, + "CostPerConversion": { + "type": ["null", "number"] + }, + "RevenuePerConversion": { + "type": ["null", "number"] + }, + "RevenuePerAssist": { + "type": ["null", "number"] + }, + "CustomerId": { + "type": ["null", "integer"] + }, + "CustomerName": { + "type": ["null", "string"] + }, + "AssistedImpressions": { + "type": ["null", "integer"] + }, + "AssistedClicks": { + "type": ["null", "integer"] + }, + "AssistedConversions": { + "type": ["null", "integer"] + }, + "AllConversions": { + "type": ["null", "integer"] + }, + "AllRevenue": { + "type": ["null", "number"] + }, + "AllConversionRate": { + "type": ["null", "number"] + }, + "AllCostPerConversion": { + "type": ["null", "number"] + }, + "AllRevenuePerConversion": { + "type": ["null", "number"] + }, + "Goal": { + "type": ["null", "string"] + }, + "GoalType": { + "type": ["null", "string"] + }, + "AbsoluteTopImpressionRatePercent": { + "type": ["null", "number"] + }, + "AverageCpm": { + "type": ["null", "number"] + }, + "ConversionsQualified": { + "type": ["null", "number"] + }, + "AssistedConversionsQualified": { + "type": ["null", "number"] + }, + "AllConversionsQualified": { + "type": ["null", "number"] + }, + "CampaignType": { + "type": ["null", "string"] + }, + "AssetGroupId": { + "type": ["null", "integer"] + }, + "AssetGroupName": { + "type": ["null", "string"] + } + } +} diff --git a/airbyte-integrations/connectors/source-bing-ads/source_bing_ads/schemas/product_search_query_performance_report_hourly.json b/airbyte-integrations/connectors/source-bing-ads/source_bing_ads/schemas/product_search_query_performance_report_hourly.json new file mode 100644 index 000000000000..0111f1f037f8 --- /dev/null +++ b/airbyte-integrations/connectors/source-bing-ads/source_bing_ads/schemas/product_search_query_performance_report_hourly.json @@ -0,0 +1,176 @@ +{ + "$schema": "https://json-schema.org/draft-07/schema#", + "type": "object", + "properties": { + "TimePeriod": { + "type": ["null", "string"], + "format": "date-time", + "airbyte_type": "timestamp_with_timezone" + }, + "AccountId": { + "type": ["null", "integer"] + }, + "AccountNumber": { + "type": ["null", "string"] + }, + "AccountName": { + "type": ["null", "string"] + }, + "AdId": { + "type": ["null", "integer"] + }, + "AdGroupId": { + "type": ["null", "integer"] + }, + "AdGroupName": { + "type": ["null", "string"] + }, + "CampaignId": { + "type": ["null", "integer"] + }, + "CampaignName": { + "type": ["null", "string"] + }, + "DestinationUrl": { + "type": ["null", "string"] + }, + "DeviceType": { + "type": ["null", "string"] + }, + "DeviceOS": { + "type": ["null", "string"] + }, + "Language": { + "type": ["null", "string"] + }, + "SearchQuery": { + "type": ["null", "string"] + }, + "Network": { + "type": ["null", "string"] + }, + "MerchantProductId": { + "type": ["null", "string"] + }, + "Title": { + "type": ["null", "string"] + }, + "ClickTypeId": { + "type": ["null", "string"] + }, + "TotalClicksOnAdElements": { + "type": ["null", "number"] + }, + "ClickType": { + "type": ["null", "string"] + }, + "AdGroupCriterionId": { + "type": ["null", "string"] + }, + "ProductGroup": { + "type": ["null", "string"] + }, + "PartitionType": { + "type": ["null", "string"] + }, + "Impressions": { + "type": ["null", "integer"] + }, + "Clicks": { + "type": ["null", "integer"] + }, + "Ctr": { + "type": ["null", "number"] + }, + "AverageCpc": { + "type": ["null", "number"] + }, + "Spend": { + "type": ["null", "number"] + }, + "Conversions": { + "type": ["null", "integer"] + }, + "ConversionRate": { + "type": ["null", "number"] + }, + "Assists": { + "type": ["null", "integer"] + }, + "CostPerAssist": { + "type": ["null", "number"] + }, + "Revenue": { + "type": ["null", "number"] + }, + "CostPerConversion": { + "type": ["null", "number"] + }, + "RevenuePerConversion": { + "type": ["null", "number"] + }, + "RevenuePerAssist": { + "type": ["null", "number"] + }, + "CustomerId": { + "type": ["null", "integer"] + }, + "CustomerName": { + "type": ["null", "string"] + }, + "AssistedImpressions": { + "type": ["null", "integer"] + }, + "AssistedClicks": { + "type": ["null", "integer"] + }, + "AssistedConversions": { + "type": ["null", "integer"] + }, + "AllConversions": { + "type": ["null", "integer"] + }, + "AllRevenue": { + "type": ["null", "number"] + }, + "AllConversionRate": { + "type": ["null", "number"] + }, + "AllCostPerConversion": { + "type": ["null", "number"] + }, + "AllRevenuePerConversion": { + "type": ["null", "number"] + }, + "Goal": { + "type": ["null", "string"] + }, + "GoalType": { + "type": ["null", "string"] + }, + "AbsoluteTopImpressionRatePercent": { + "type": ["null", "number"] + }, + "AverageCpm": { + "type": ["null", "number"] + }, + "ConversionsQualified": { + "type": ["null", "number"] + }, + "AssistedConversionsQualified": { + "type": ["null", "number"] + }, + "AllConversionsQualified": { + "type": ["null", "number"] + }, + "CampaignType": { + "type": ["null", "string"] + }, + "AssetGroupId": { + "type": ["null", "integer"] + }, + "AssetGroupName": { + "type": ["null", "string"] + } + } +} diff --git a/airbyte-integrations/connectors/source-bing-ads/source_bing_ads/source.py b/airbyte-integrations/connectors/source-bing-ads/source_bing_ads/source.py index 7323a89c46c5..78cfe0a24d58 100644 --- a/airbyte-integrations/connectors/source-bing-ads/source_bing_ads/source.py +++ b/airbyte-integrations/connectors/source-bing-ads/source_bing_ads/source.py @@ -46,6 +46,10 @@ AgeGenderAudienceReportHourly, AgeGenderAudienceReportMonthly, AgeGenderAudienceReportWeekly, + AudiencePerformanceReportDaily, + AudiencePerformanceReportHourly, + AudiencePerformanceReportMonthly, + AudiencePerformanceReportWeekly, BingAdsReportingServiceStream, BudgetSummaryReport, CampaignImpressionPerformanceReportDaily, @@ -61,6 +65,10 @@ GeographicPerformanceReportHourly, GeographicPerformanceReportMonthly, GeographicPerformanceReportWeekly, + GoalsAndFunnelsReportDaily, + GoalsAndFunnelsReportHourly, + GoalsAndFunnelsReportMonthly, + GoalsAndFunnelsReportWeekly, KeywordPerformanceReportDaily, KeywordPerformanceReportHourly, KeywordPerformanceReportMonthly, @@ -69,6 +77,10 @@ ProductDimensionPerformanceReportHourly, ProductDimensionPerformanceReportMonthly, ProductDimensionPerformanceReportWeekly, + ProductSearchQueryPerformanceReportDaily, + ProductSearchQueryPerformanceReportHourly, + ProductSearchQueryPerformanceReportMonthly, + ProductSearchQueryPerformanceReportWeekly, SearchQueryPerformanceReportDaily, SearchQueryPerformanceReportHourly, SearchQueryPerformanceReportMonthly, @@ -157,6 +169,7 @@ def streams(self, config: Mapping[str, Any]) -> List[Stream]: "AgeGenderAudienceReport", "AccountImpressionPerformanceReport", "AccountPerformanceReport", + "AudiencePerformanceReport", "KeywordPerformanceReport", "AdGroupPerformanceReport", "AdPerformanceReport", @@ -164,7 +177,9 @@ def streams(self, config: Mapping[str, Any]) -> List[Stream]: "CampaignPerformanceReport", "CampaignImpressionPerformanceReport", "GeographicPerformanceReport", + "GoalsAndFunnelsReport", "ProductDimensionPerformanceReport", + "ProductSearchQueryPerformanceReport", "SearchQueryPerformanceReport", "UserLocationPerformanceReport", ) diff --git a/airbyte-integrations/connectors/source-bing-ads/unit_tests/integrations/test_audience_performance_report.py b/airbyte-integrations/connectors/source-bing-ads/unit_tests/integrations/test_audience_performance_report.py new file mode 100644 index 000000000000..f8075e01c2be --- /dev/null +++ b/airbyte-integrations/connectors/source-bing-ads/unit_tests/integrations/test_audience_performance_report.py @@ -0,0 +1,44 @@ +# Copyright (c) 2023 Airbyte, Inc., all rights reserved. + +from test_report_stream import TestSuiteReportStream + + +class TestAudiencePerformanceReportDailyStream(TestSuiteReportStream): + stream_name = "audience_performance_report_daily" + report_file = "audience_performance_report_daily" + records_number = 8 + state_file = "audience_performance_report_daily_state" + incremental_report_file = "audience_performance_report_daily_incremental" + first_read_state = {"audience_performance_report_daily": {"180535609": {"TimePeriod": "2023-12-17"}}} + second_read_state = {"audience_performance_report_daily": {"180535609": {"TimePeriod": "2023-12-24"}}} + + +class TestAudiencePerformanceReportHourlyStream(TestSuiteReportStream): + stream_name = "audience_performance_report_hourly" + report_file = "audience_performance_report_hourly" + records_number = 24 + state_file = "audience_performance_report_hourly_state" + incremental_report_file = "audience_performance_report_hourly_incremental" + first_read_state = {"audience_performance_report_hourly": {"180535609": {"TimePeriod": "2023-11-12T00:00:00+00:00"}}} + second_read_state = {"audience_performance_report_hourly": {"180535609": {"TimePeriod": "2023-11-13T00:00:00+00:00"}}} + + +class TestAudiencePerformanceReportWeeklyStream(TestSuiteReportStream): + stream_name = "audience_performance_report_weekly" + report_file = "audience_performance_report_weekly" + records_number = 3 + second_read_records_number = 5 + state_file = "audience_performance_report_weekly_state" + incremental_report_file = "audience_performance_report_weekly_incremental" + first_read_state = {"audience_performance_report_weekly": {"180535609": {"TimePeriod": "2023-12-25"}}} + second_read_state = {"audience_performance_report_weekly": {"180535609": {"TimePeriod": "2024-01-29"}}} + + +class TestAudiencePerformanceReportMonthlyStream(TestSuiteReportStream): + stream_name = "audience_performance_report_monthly" + report_file = "audience_performance_report_monthly" + records_number = 6 + state_file = "audience_performance_report_monthly_state" + incremental_report_file = "audience_performance_report_monthly_incremental" + first_read_state = {"audience_performance_report_monthly": {"180535609": {"TimePeriod": "2023-09-01"}}} + second_read_state = {"audience_performance_report_monthly": {"180535609": {"TimePeriod": "2024-03-01"}}} diff --git a/airbyte-integrations/connectors/source-bing-ads/unit_tests/integrations/test_goals_and_funnels_report.py b/airbyte-integrations/connectors/source-bing-ads/unit_tests/integrations/test_goals_and_funnels_report.py new file mode 100644 index 000000000000..7affe69b4f4c --- /dev/null +++ b/airbyte-integrations/connectors/source-bing-ads/unit_tests/integrations/test_goals_and_funnels_report.py @@ -0,0 +1,44 @@ +# Copyright (c) 2023 Airbyte, Inc., all rights reserved. + +from test_report_stream import TestSuiteReportStream + + +class TestGoalsAndFunnelsReportDailyStream(TestSuiteReportStream): + stream_name = "goals_and_funnels_report_daily" + report_file = "goals_and_funnels_report_daily" + records_number = 8 + state_file = "goals_and_funnels_report_daily_state" + incremental_report_file = "goals_and_funnels_report_daily_incremental" + first_read_state = {"goals_and_funnels_report_daily": {"180535609": {"TimePeriod": "2023-12-17"}}} + second_read_state = {"goals_and_funnels_report_daily": {"180535609": {"TimePeriod": "2023-12-24"}}} + + +class TestGoalsAndFunnelsReportHourlyStream(TestSuiteReportStream): + stream_name = "goals_and_funnels_report_hourly" + report_file = "goals_and_funnels_report_hourly" + records_number = 24 + state_file = "goals_and_funnels_report_hourly_state" + incremental_report_file = "goals_and_funnels_report_hourly_incremental" + first_read_state = {"goals_and_funnels_report_hourly": {"180535609": {"TimePeriod": "2023-11-12T00:00:00+00:00"}}} + second_read_state = {"goals_and_funnels_report_hourly": {"180535609": {"TimePeriod": "2023-11-13T00:00:00+00:00"}}} + + +class TestGoalsAndFunnelsReportWeeklyStream(TestSuiteReportStream): + stream_name = "goals_and_funnels_report_weekly" + report_file = "goals_and_funnels_report_weekly" + records_number = 3 + second_read_records_number = 5 + state_file = "goals_and_funnels_report_weekly_state" + incremental_report_file = "goals_and_funnels_report_weekly_incremental" + first_read_state = {"goals_and_funnels_report_weekly": {"180535609": {"TimePeriod": "2023-12-25"}}} + second_read_state = {"goals_and_funnels_report_weekly": {"180535609": {"TimePeriod": "2024-01-29"}}} + + +class TestGoalsAndFunnelsReportMonthlyStream(TestSuiteReportStream): + stream_name = "goals_and_funnels_report_monthly" + report_file = "goals_and_funnels_report_monthly" + records_number = 6 + state_file = "goals_and_funnels_report_monthly_state" + incremental_report_file = "goals_and_funnels_report_monthly_incremental" + first_read_state = {"goals_and_funnels_report_monthly": {"180535609": {"TimePeriod": "2023-09-01"}}} + second_read_state = {"goals_and_funnels_report_monthly": {"180535609": {"TimePeriod": "2024-03-01"}}} diff --git a/airbyte-integrations/connectors/source-bing-ads/unit_tests/integrations/test_product_search_query_performance_report.py b/airbyte-integrations/connectors/source-bing-ads/unit_tests/integrations/test_product_search_query_performance_report.py new file mode 100644 index 000000000000..a7fabbf1e1c4 --- /dev/null +++ b/airbyte-integrations/connectors/source-bing-ads/unit_tests/integrations/test_product_search_query_performance_report.py @@ -0,0 +1,44 @@ +# Copyright (c) 2023 Airbyte, Inc., all rights reserved. + +from test_report_stream import TestSuiteReportStream + + +class TestProductSearchPerformanceReportDailyStream(TestSuiteReportStream): + stream_name = "product_search_query_performance_report_daily" + report_file = "product_search_query_performance_report_daily" + records_number = 8 + state_file = "product_search_query_performance_report_daily_state" + incremental_report_file = "product_search_query_performance_report_daily_incremental" + first_read_state = {"product_search_query_performance_report_daily": {"180535609": {"TimePeriod": "2023-12-17"}}} + second_read_state = {"product_search_query_performance_report_daily": {"180535609": {"TimePeriod": "2023-12-24"}}} + + +class TestProductSearchQueryPerformanceReportHourlyStream(TestSuiteReportStream): + stream_name = "product_search_query_performance_report_hourly" + report_file = "product_search_query_performance_report_hourly" + records_number = 24 + state_file = "product_search_query_performance_report_hourly_state" + incremental_report_file = "product_search_query_performance_report_hourly_incremental" + first_read_state = {"product_search_query_performance_report_hourly": {"180535609": {"TimePeriod": "2023-11-12T00:00:00+00:00"}}} + second_read_state = {"product_search_query_performance_report_hourly": {"180535609": {"TimePeriod": "2023-11-13T00:00:00+00:00"}}} + + +class TestProductSearchQueryPerformanceReportWeeklyStream(TestSuiteReportStream): + stream_name = "product_search_query_performance_report_weekly" + report_file = "product_search_query_performance_report_weekly" + records_number = 3 + second_read_records_number = 5 + state_file = "product_dimension_performance_report_weekly_state" + incremental_report_file = "product_search_query_performance_report_weekly_incremental" + first_read_state = {"product_search_query_performance_report_weekly": {"180535609": {"TimePeriod": "2023-12-25"}}} + second_read_state = {"product_search_query_performance_report_weekly": {"180535609": {"TimePeriod": "2024-01-29"}}} + + +class TestProductSearchQueryPerformanceReportMonthlyStream(TestSuiteReportStream): + stream_name = "product_search_query_performance_report_monthly" + report_file = "product_search_query_performance_report_monthly" + records_number = 6 + state_file = "product_search_query_performance_report_monthly_state" + incremental_report_file = "product_search_query_performance_report_monthly_incremental" + first_read_state = {"product_search_query_performance_report_monthly": {"180535609": {"TimePeriod": "2023-09-01"}}} + second_read_state = {"product_search_query_performance_report_monthly": {"180535609": {"TimePeriod": "2024-03-01"}}} diff --git a/airbyte-integrations/connectors/source-bing-ads/unit_tests/integrations/test_report_stream.py b/airbyte-integrations/connectors/source-bing-ads/unit_tests/integrations/test_report_stream.py index 0678ec45aed8..a0d7feb18261 100644 --- a/airbyte-integrations/connectors/source-bing-ads/unit_tests/integrations/test_report_stream.py +++ b/airbyte-integrations/connectors/source-bing-ads/unit_tests/integrations/test_report_stream.py @@ -34,9 +34,10 @@ def _download_file(self, file: Optional[str] = None) -> Path: class TestSuiteReportStream(TestReportStream): - stream_name: str = None + stream_name: Optional[str] = None report_file: str records_number: int + second_read_records_number: Optional[int] = None state_file: str incremental_report_file: str first_read_state: dict @@ -97,7 +98,10 @@ def test_incremental_read_with_state_returns_records(self, http_mocker: HttpMock self.incremental_report_file, state ) - assert len(output.records) == self.records_number + if not self.second_read_records_number: + assert len(output.records) == self.records_number + else: + assert len(output.records) == self.second_read_records_number actual_cursor = output.most_recent_state.get(self.stream_name).get(self.account_id) expected_cursor = self.second_read_state.get(self.stream_name).get(self.account_id) diff --git a/airbyte-integrations/connectors/source-bing-ads/unit_tests/resource/response/audience_performance_report_daily.csv b/airbyte-integrations/connectors/source-bing-ads/unit_tests/resource/response/audience_performance_report_daily.csv new file mode 100644 index 000000000000..248800fe45f8 --- /dev/null +++ b/airbyte-integrations/connectors/source-bing-ads/unit_tests/resource/response/audience_performance_report_daily.csv @@ -0,0 +1,9 @@ +AccountName,AccountNumber,AccountId,TimePeriod,CampaignName,CampaignId,AdGroupName,AdGroupId,AudienceId,AudienceName,AssociationStatus,BidAdjustment,TargetingSetting,Impressions,Clicks,Ctr,AverageCpc,Spend,AveragePosition,Conversions,ConversionRate,CostPerConversion,Revenue,ReturnOnAdSpend,RevenuePerConversion,AccountStatus,CampaignStatus,AdGroupStatus,AudienceType,BaseCampaignId,AllConversions,AllRevenue,AllConversionRate,AllCostPerConversion,AllReturnOnAdSpend,AllRevenuePerConversion,AssociationId,AssociationLevel,ViewThroughConversions,Goal,GoalType,AbsoluteTopImpressionRatePercent,TopImpressionRatePercent,AverageCpm,ConversionsQualified,AllConversionsQualified,ViewThroughConversionsQualified,ViewThroughRevenue,,,,,,,, +Test Account,180535609,180535609,2023-12-10,Test Campaign,2334534,Test Ad Group,38528372,873246,Test Audience,Active,78.9,Bid only,43,345,3,2,45,34,456,46,21,45,86,4,Active,Active,Active,Bid,23545,75,24,76,23,65,65,56352,1,34,Clicks,Clicks,43,567,56,345,56,76,4656,,,,,,,, +Test Account,180535609,180535609,2023-12-11,Test Campaign,2334534,Test Ad Group,38528372,873246,Test Audience,Active,78.9,Bid only,43,345,3,2,45,34,456,46,21,45,86,4,Active,Active,Active,Bid,23545,75,24,76,23,65,65,56352,1,34,Clicks,Clicks,43,567,56,345,56,76,4656,,,,,,,, +Test Account,180535609,180535609,2023-12-12,Test Campaign,2334534,Test Ad Group,38528372,873246,Test Audience,Active,78.9,Bid only,43,345,3,2,45,34,456,46,21,45,86,4,Active,Active,Active,Bid,23545,75,24,76,23,65,65,56352,1,34,Clicks,Clicks,43,567,56,345,56,76,4656,,,,,,,, +Test Account,180535609,180535609,2023-12-13,Test Campaign,2334534,Test Ad Group,38528372,873246,Test Audience,Active,78.9,Bid only,43,345,3,2,45,34,456,46,21,45,86,4,Active,Active,Active,Bid,23545,75,24,76,23,65,65,56352,1,34,Clicks,Clicks,43,567,56,345,56,76,4656,,,,,,,, +Test Account,180535609,180535609,2023-12-14,Test Campaign,2334534,Test Ad Group,38528372,873246,Test Audience,Active,78.9,Bid only,43,345,3,2,45,34,456,46,21,45,86,4,Active,Active,Active,Bid,23545,75,24,76,23,65,65,56352,1,34,Clicks,Clicks,43,567,56,345,56,76,4656,,,,,,,, +Test Account,180535609,180535609,2023-12-15,Test Campaign,2334534,Test Ad Group,38528372,873246,Test Audience,Active,78.9,Bid only,43,345,3,2,45,34,456,46,21,45,86,4,Active,Active,Active,Bid,23545,75,24,76,23,65,65,56352,1,34,Clicks,Clicks,43,567,56,345,56,76,4656,,,,,,,, +Test Account,180535609,180535609,2023-12-16,Test Campaign,2334534,Test Ad Group,38528372,873246,Test Audience,Active,78.9,Bid only,43,345,3,2,45,34,456,46,21,45,86,4,Active,Active,Active,Bid,23545,75,24,76,23,65,65,56352,1,34,Clicks,Clicks,43,567,56,345,56,76,4656,,,,,,,, +Test Account,180535609,180535609,2023-12-17,Test Campaign,2334534,Test Ad Group,38528372,873246,Test Audience,Active,78.9,Bid only,43,345,3,2,45,34,456,46,21,45,86,4,Active,Active,Active,Bid,23545,75,24,76,23,65,65,56352,1,34,Clicks,Clicks,43,567,56,345,56,76,4656,,,,,,,, diff --git a/airbyte-integrations/connectors/source-bing-ads/unit_tests/resource/response/audience_performance_report_daily_incremental.csv b/airbyte-integrations/connectors/source-bing-ads/unit_tests/resource/response/audience_performance_report_daily_incremental.csv new file mode 100644 index 000000000000..aff59f99d684 --- /dev/null +++ b/airbyte-integrations/connectors/source-bing-ads/unit_tests/resource/response/audience_performance_report_daily_incremental.csv @@ -0,0 +1,9 @@ +AccountName,AccountNumber,AccountId,TimePeriod,CampaignName,CampaignId,AdGroupName,AdGroupId,AudienceId,AudienceName,AssociationStatus,BidAdjustment,TargetingSetting,Impressions,Clicks,Ctr,AverageCpc,Spend,AveragePosition,Conversions,ConversionRate,CostPerConversion,Revenue,ReturnOnAdSpend,RevenuePerConversion,AccountStatus,CampaignStatus,AdGroupStatus,AudienceType,BaseCampaignId,AllConversions,AllRevenue,AllConversionRate,AllCostPerConversion,AllReturnOnAdSpend,AllRevenuePerConversion,AssociationId,AssociationLevel,ViewThroughConversions,Goal,GoalType,AbsoluteTopImpressionRatePercent,TopImpressionRatePercent,AverageCpm,ConversionsQualified,AllConversionsQualified,ViewThroughConversionsQualified,ViewThroughRevenue,,,,,,,, +Test Account,180535609,180535609,2023-12-17,Test Campaign,2334534,Test Ad Group,38528372,873246,Test Audience,Active,78.9,Bid only,43,345,3,2,45,34,456,46,21,45,86,4,Active,Active,Active,Bid,23545,75,24,76,23,65,65,56352,1,34,Clicks,Clicks,43,567,56,345,56,76,4656,,,,,,,, +Test Account,180535609,180535609,2023-12-18,Test Campaign,2334534,Test Ad Group,38528372,873246,Test Audience,Active,78.9,Bid only,43,345,3,2,45,34,456,46,21,45,86,4,Active,Active,Active,Bid,23545,75,24,76,23,65,65,56352,1,34,Clicks,Clicks,43,567,56,345,56,76,4656,,,,,,,, +Test Account,180535609,180535609,2023-12-19,Test Campaign,2334534,Test Ad Group,38528372,873246,Test Audience,Active,78.9,Bid only,43,345,3,2,45,34,456,46,21,45,86,4,Active,Active,Active,Bid,23545,75,24,76,23,65,65,56352,1,34,Clicks,Clicks,43,567,56,345,56,76,4656,,,,,,,, +Test Account,180535609,180535609,2023-12-20,Test Campaign,2334534,Test Ad Group,38528372,873246,Test Audience,Active,78.9,Bid only,43,345,3,2,45,34,456,46,21,45,86,4,Active,Active,Active,Bid,23545,75,24,76,23,65,65,56352,1,34,Clicks,Clicks,43,567,56,345,56,76,4656,,,,,,,, +Test Account,180535609,180535609,2023-12-21,Test Campaign,2334534,Test Ad Group,38528372,873246,Test Audience,Active,78.9,Bid only,43,345,3,2,45,34,456,46,21,45,86,4,Active,Active,Active,Bid,23545,75,24,76,23,65,65,56352,1,34,Clicks,Clicks,43,567,56,345,56,76,4656,,,,,,,, +Test Account,180535609,180535609,2023-12-22,Test Campaign,2334534,Test Ad Group,38528372,873246,Test Audience,Active,78.9,Bid only,43,345,3,2,45,34,456,46,21,45,86,4,Active,Active,Active,Bid,23545,75,24,76,23,65,65,56352,1,34,Clicks,Clicks,43,567,56,345,56,76,4656,,,,,,,, +Test Account,180535609,180535609,2023-12-23,Test Campaign,2334534,Test Ad Group,38528372,873246,Test Audience,Active,78.9,Bid only,43,345,3,2,45,34,456,46,21,45,86,4,Active,Active,Active,Bid,23545,75,24,76,23,65,65,56352,1,34,Clicks,Clicks,43,567,56,345,56,76,4656,,,,,,,, +Test Account,180535609,180535609,2023-12-24,Test Campaign,2334534,Test Ad Group,38528372,873246,Test Audience,Active,78.9,Bid only,43,345,3,2,45,34,456,46,21,45,86,4,Active,Active,Active,Bid,23545,75,24,76,23,65,65,56352,1,34,Clicks,Clicks,43,567,56,345,56,76,4656,,,,,,,, diff --git a/airbyte-integrations/connectors/source-bing-ads/unit_tests/resource/response/audience_performance_report_hourly.csv b/airbyte-integrations/connectors/source-bing-ads/unit_tests/resource/response/audience_performance_report_hourly.csv new file mode 100644 index 000000000000..af459a8c2f12 --- /dev/null +++ b/airbyte-integrations/connectors/source-bing-ads/unit_tests/resource/response/audience_performance_report_hourly.csv @@ -0,0 +1,25 @@ +AccountName,AccountNumber,AccountId,TimePeriod,CampaignName,CampaignId,AdGroupName,AdGroupId,AudienceId,AudienceName,AssociationStatus,BidAdjustment,TargetingSetting,Impressions,Clicks,Ctr,AverageCpc,Spend,AveragePosition,Conversions,ConversionRate,CostPerConversion,Revenue,ReturnOnAdSpend,RevenuePerConversion,AccountStatus,CampaignStatus,AdGroupStatus,AudienceType,BaseCampaignId,AllConversions,AllRevenue,AllConversionRate,AllCostPerConversion,AllReturnOnAdSpend,AllRevenuePerConversion,AssociationId,AssociationLevel,ViewThroughConversions,Goal,GoalType,AbsoluteTopImpressionRatePercent,TopImpressionRatePercent,AverageCpm,ConversionsQualified,AllConversionsQualified,ViewThroughConversionsQualified,ViewThroughRevenue,,,,,,,, +Test Account,180535609,180535609,2023-11-11|01,Test Campaign,2334534,Test Ad Group,38528372,873246,Test Audience,Active,78.9,Bid only,43,345,3,2,45,34,456,46,21,45,86,4,Active,Active,Active,Bid,23545,75,24,76,23,65,65,56352,1,34,Clicks,Clicks,43,567,56,345,56,76,4656,,,,,,,, +Test Account,180535609,180535609,2023-11-11|02,Test Campaign,2334534,Test Ad Group,38528372,873246,Test Audience,Active,78.9,Bid only,43,345,3,2,45,34,456,46,21,45,86,4,Active,Active,Active,Bid,23545,75,24,76,23,65,65,56352,1,34,Clicks,Clicks,43,567,56,345,56,76,4656,,,,,,,, +Test Account,180535609,180535609,2023-11-11|03,Test Campaign,2334534,Test Ad Group,38528372,873246,Test Audience,Active,78.9,Bid only,43,345,3,2,45,34,456,46,21,45,86,4,Active,Active,Active,Bid,23545,75,24,76,23,65,65,56352,1,34,Clicks,Clicks,43,567,56,345,56,76,4656,,,,,,,, +Test Account,180535609,180535609,2023-11-11|04,Test Campaign,2334534,Test Ad Group,38528372,873246,Test Audience,Active,78.9,Bid only,43,345,3,2,45,34,456,46,21,45,86,4,Active,Active,Active,Bid,23545,75,24,76,23,65,65,56352,1,34,Clicks,Clicks,43,567,56,345,56,76,4656,,,,,,,, +Test Account,180535609,180535609,2023-11-11|05,Test Campaign,2334534,Test Ad Group,38528372,873246,Test Audience,Active,78.9,Bid only,43,345,3,2,45,34,456,46,21,45,86,4,Active,Active,Active,Bid,23545,75,24,76,23,65,65,56352,1,34,Clicks,Clicks,43,567,56,345,56,76,4656,,,,,,,, +Test Account,180535609,180535609,2023-11-11|06,Test Campaign,2334534,Test Ad Group,38528372,873246,Test Audience,Active,78.9,Bid only,43,345,3,2,45,34,456,46,21,45,86,4,Active,Active,Active,Bid,23545,75,24,76,23,65,65,56352,1,34,Clicks,Clicks,43,567,56,345,56,76,4656,,,,,,,, +Test Account,180535609,180535609,2023-11-11|07,Test Campaign,2334534,Test Ad Group,38528372,873246,Test Audience,Active,78.9,Bid only,43,345,3,2,45,34,456,46,21,45,86,4,Active,Active,Active,Bid,23545,75,24,76,23,65,65,56352,1,34,Clicks,Clicks,43,567,56,345,56,76,4656,,,,,,,, +Test Account,180535609,180535609,2023-11-11|08,Test Campaign,2334534,Test Ad Group,38528372,873246,Test Audience,Active,78.9,Bid only,43,345,3,2,45,34,456,46,21,45,86,4,Active,Active,Active,Bid,23545,75,24,76,23,65,65,56352,1,34,Clicks,Clicks,43,567,56,345,56,76,4656,,,,,,,, +Test Account,180535609,180535609,2023-11-11|09,Test Campaign,2334534,Test Ad Group,38528372,873246,Test Audience,Active,78.9,Bid only,43,345,3,2,45,34,456,46,21,45,86,4,Active,Active,Active,Bid,23545,75,24,76,23,65,65,56352,1,34,Clicks,Clicks,43,567,56,345,56,76,4656,,,,,,,, +Test Account,180535609,180535609,2023-11-11|10,Test Campaign,2334534,Test Ad Group,38528372,873246,Test Audience,Active,78.9,Bid only,43,345,3,2,45,34,456,46,21,45,86,4,Active,Active,Active,Bid,23545,75,24,76,23,65,65,56352,1,34,Clicks,Clicks,43,567,56,345,56,76,4656,,,,,,,, +Test Account,180535609,180535609,2023-11-11|11,Test Campaign,2334534,Test Ad Group,38528372,873246,Test Audience,Active,78.9,Bid only,43,345,3,2,45,34,456,46,21,45,86,4,Active,Active,Active,Bid,23545,75,24,76,23,65,65,56352,1,34,Clicks,Clicks,43,567,56,345,56,76,4656,,,,,,,, +Test Account,180535609,180535609,2023-11-11|12,Test Campaign,2334534,Test Ad Group,38528372,873246,Test Audience,Active,78.9,Bid only,43,345,3,2,45,34,456,46,21,45,86,4,Active,Active,Active,Bid,23545,75,24,76,23,65,65,56352,1,34,Clicks,Clicks,43,567,56,345,56,76,4656,,,,,,,, +Test Account,180535609,180535609,2023-11-11|13,Test Campaign,2334534,Test Ad Group,38528372,873246,Test Audience,Active,78.9,Bid only,43,345,3,2,45,34,456,46,21,45,86,4,Active,Active,Active,Bid,23545,75,24,76,23,65,65,56352,1,34,Clicks,Clicks,43,567,56,345,56,76,4656,,,,,,,, +Test Account,180535609,180535609,2023-11-11|14,Test Campaign,2334534,Test Ad Group,38528372,873246,Test Audience,Active,78.9,Bid only,43,345,3,2,45,34,456,46,21,45,86,4,Active,Active,Active,Bid,23545,75,24,76,23,65,65,56352,1,34,Clicks,Clicks,43,567,56,345,56,76,4656,,,,,,,, +Test Account,180535609,180535609,2023-11-11|15,Test Campaign,2334534,Test Ad Group,38528372,873246,Test Audience,Active,78.9,Bid only,43,345,3,2,45,34,456,46,21,45,86,4,Active,Active,Active,Bid,23545,75,24,76,23,65,65,56352,1,34,Clicks,Clicks,43,567,56,345,56,76,4656,,,,,,,, +Test Account,180535609,180535609,2023-11-11|16,Test Campaign,2334534,Test Ad Group,38528372,873246,Test Audience,Active,78.9,Bid only,43,345,3,2,45,34,456,46,21,45,86,4,Active,Active,Active,Bid,23545,75,24,76,23,65,65,56352,1,34,Clicks,Clicks,43,567,56,345,56,76,4656,,,,,,,, +Test Account,180535609,180535609,2023-11-11|17,Test Campaign,2334534,Test Ad Group,38528372,873246,Test Audience,Active,78.9,Bid only,43,345,3,2,45,34,456,46,21,45,86,4,Active,Active,Active,Bid,23545,75,24,76,23,65,65,56352,1,34,Clicks,Clicks,43,567,56,345,56,76,4656,,,,,,,, +Test Account,180535609,180535609,2023-11-11|18,Test Campaign,2334534,Test Ad Group,38528372,873246,Test Audience,Active,78.9,Bid only,43,345,3,2,45,34,456,46,21,45,86,4,Active,Active,Active,Bid,23545,75,24,76,23,65,65,56352,1,34,Clicks,Clicks,43,567,56,345,56,76,4656,,,,,,,, +Test Account,180535609,180535609,2023-11-11|19,Test Campaign,2334534,Test Ad Group,38528372,873246,Test Audience,Active,78.9,Bid only,43,345,3,2,45,34,456,46,21,45,86,4,Active,Active,Active,Bid,23545,75,24,76,23,65,65,56352,1,34,Clicks,Clicks,43,567,56,345,56,76,4656,,,,,,,, +Test Account,180535609,180535609,2023-11-11|20,Test Campaign,2334534,Test Ad Group,38528372,873246,Test Audience,Active,78.9,Bid only,43,345,3,2,45,34,456,46,21,45,86,4,Active,Active,Active,Bid,23545,75,24,76,23,65,65,56352,1,34,Clicks,Clicks,43,567,56,345,56,76,4656,,,,,,,, +Test Account,180535609,180535609,2023-11-11|21,Test Campaign,2334534,Test Ad Group,38528372,873246,Test Audience,Active,78.9,Bid only,43,345,3,2,45,34,456,46,21,45,86,4,Active,Active,Active,Bid,23545,75,24,76,23,65,65,56352,1,34,Clicks,Clicks,43,567,56,345,56,76,4656,,,,,,,, +Test Account,180535609,180535609,2023-11-11|22,Test Campaign,2334534,Test Ad Group,38528372,873246,Test Audience,Active,78.9,Bid only,43,345,3,2,45,34,456,46,21,45,86,4,Active,Active,Active,Bid,23545,75,24,76,23,65,65,56352,1,34,Clicks,Clicks,43,567,56,345,56,76,4656,,,,,,,, +Test Account,180535609,180535609,2023-11-11|23,Test Campaign,2334534,Test Ad Group,38528372,873246,Test Audience,Active,78.9,Bid only,43,345,3,2,45,34,456,46,21,45,86,4,Active,Active,Active,Bid,23545,75,24,76,23,65,65,56352,1,34,Clicks,Clicks,43,567,56,345,56,76,4656,,,,,,,, +Test Account,180535609,180535609,2023-11-12|00,Test Campaign,2334534,Test Ad Group,38528372,873246,Test Audience,Active,78.9,Bid only,43,345,3,2,45,34,456,46,21,45,86,4,Active,Active,Active,Bid,23545,75,24,76,23,65,65,56352,1,34,Clicks,Clicks,43,567,56,345,56,76,4656,,,,,,,, diff --git a/airbyte-integrations/connectors/source-bing-ads/unit_tests/resource/response/audience_performance_report_hourly_incremental.csv b/airbyte-integrations/connectors/source-bing-ads/unit_tests/resource/response/audience_performance_report_hourly_incremental.csv new file mode 100644 index 000000000000..7ffe7356345e --- /dev/null +++ b/airbyte-integrations/connectors/source-bing-ads/unit_tests/resource/response/audience_performance_report_hourly_incremental.csv @@ -0,0 +1,25 @@ +AccountName,AccountNumber,AccountId,TimePeriod,CampaignName,CampaignId,AdGroupName,AdGroupId,AudienceId,AudienceName,AssociationStatus,BidAdjustment,TargetingSetting,Impressions,Clicks,Ctr,AverageCpc,Spend,AveragePosition,Conversions,ConversionRate,CostPerConversion,Revenue,ReturnOnAdSpend,RevenuePerConversion,AccountStatus,CampaignStatus,AdGroupStatus,AudienceType,BaseCampaignId,AllConversions,AllRevenue,AllConversionRate,AllCostPerConversion,AllReturnOnAdSpend,AllRevenuePerConversion,AssociationId,AssociationLevel,ViewThroughConversions,Goal,GoalType,AbsoluteTopImpressionRatePercent,TopImpressionRatePercent,AverageCpm,ConversionsQualified,AllConversionsQualified,ViewThroughConversionsQualified,ViewThroughRevenue,,,,,,,, +Test Account,180535609,180535609,2023-11-12|01,Test Campaign,2334534,Test Ad Group,38528372,873246,Test Audience,Active,78.9,Bid only,43,345,3,2,45,34,456,46,21,45,86,4,Active,Active,Active,Bid,23545,75,24,76,23,65,65,56352,1,34,Clicks,Clicks,43,567,56,345,56,76,4656,,,,,,,, +Test Account,180535609,180535609,2023-11-12|02,Test Campaign,2334534,Test Ad Group,38528372,873246,Test Audience,Active,78.9,Bid only,43,345,3,2,45,34,456,46,21,45,86,4,Active,Active,Active,Bid,23545,75,24,76,23,65,65,56352,1,34,Clicks,Clicks,43,567,56,345,56,76,4656,,,,,,,, +Test Account,180535609,180535609,2023-11-12|03,Test Campaign,2334534,Test Ad Group,38528372,873246,Test Audience,Active,78.9,Bid only,43,345,3,2,45,34,456,46,21,45,86,4,Active,Active,Active,Bid,23545,75,24,76,23,65,65,56352,1,34,Clicks,Clicks,43,567,56,345,56,76,4656,,,,,,,, +Test Account,180535609,180535609,2023-11-12|04,Test Campaign,2334534,Test Ad Group,38528372,873246,Test Audience,Active,78.9,Bid only,43,345,3,2,45,34,456,46,21,45,86,4,Active,Active,Active,Bid,23545,75,24,76,23,65,65,56352,1,34,Clicks,Clicks,43,567,56,345,56,76,4656,,,,,,,, +Test Account,180535609,180535609,2023-11-12|05,Test Campaign,2334534,Test Ad Group,38528372,873246,Test Audience,Active,78.9,Bid only,43,345,3,2,45,34,456,46,21,45,86,4,Active,Active,Active,Bid,23545,75,24,76,23,65,65,56352,1,34,Clicks,Clicks,43,567,56,345,56,76,4656,,,,,,,, +Test Account,180535609,180535609,2023-11-12|06,Test Campaign,2334534,Test Ad Group,38528372,873246,Test Audience,Active,78.9,Bid only,43,345,3,2,45,34,456,46,21,45,86,4,Active,Active,Active,Bid,23545,75,24,76,23,65,65,56352,1,34,Clicks,Clicks,43,567,56,345,56,76,4656,,,,,,,, +Test Account,180535609,180535609,2023-11-12|07,Test Campaign,2334534,Test Ad Group,38528372,873246,Test Audience,Active,78.9,Bid only,43,345,3,2,45,34,456,46,21,45,86,4,Active,Active,Active,Bid,23545,75,24,76,23,65,65,56352,1,34,Clicks,Clicks,43,567,56,345,56,76,4656,,,,,,,, +Test Account,180535609,180535609,2023-11-12|08,Test Campaign,2334534,Test Ad Group,38528372,873246,Test Audience,Active,78.9,Bid only,43,345,3,2,45,34,456,46,21,45,86,4,Active,Active,Active,Bid,23545,75,24,76,23,65,65,56352,1,34,Clicks,Clicks,43,567,56,345,56,76,4656,,,,,,,, +Test Account,180535609,180535609,2023-11-12|09,Test Campaign,2334534,Test Ad Group,38528372,873246,Test Audience,Active,78.9,Bid only,43,345,3,2,45,34,456,46,21,45,86,4,Active,Active,Active,Bid,23545,75,24,76,23,65,65,56352,1,34,Clicks,Clicks,43,567,56,345,56,76,4656,,,,,,,, +Test Account,180535609,180535609,2023-11-12|10,Test Campaign,2334534,Test Ad Group,38528372,873246,Test Audience,Active,78.9,Bid only,43,345,3,2,45,34,456,46,21,45,86,4,Active,Active,Active,Bid,23545,75,24,76,23,65,65,56352,1,34,Clicks,Clicks,43,567,56,345,56,76,4656,,,,,,,, +Test Account,180535609,180535609,2023-11-12|11,Test Campaign,2334534,Test Ad Group,38528372,873246,Test Audience,Active,78.9,Bid only,43,345,3,2,45,34,456,46,21,45,86,4,Active,Active,Active,Bid,23545,75,24,76,23,65,65,56352,1,34,Clicks,Clicks,43,567,56,345,56,76,4656,,,,,,,, +Test Account,180535609,180535609,2023-11-12|12,Test Campaign,2334534,Test Ad Group,38528372,873246,Test Audience,Active,78.9,Bid only,43,345,3,2,45,34,456,46,21,45,86,4,Active,Active,Active,Bid,23545,75,24,76,23,65,65,56352,1,34,Clicks,Clicks,43,567,56,345,56,76,4656,,,,,,,, +Test Account,180535609,180535609,2023-11-12|13,Test Campaign,2334534,Test Ad Group,38528372,873246,Test Audience,Active,78.9,Bid only,43,345,3,2,45,34,456,46,21,45,86,4,Active,Active,Active,Bid,23545,75,24,76,23,65,65,56352,1,34,Clicks,Clicks,43,567,56,345,56,76,4656,,,,,,,, +Test Account,180535609,180535609,2023-11-12|14,Test Campaign,2334534,Test Ad Group,38528372,873246,Test Audience,Active,78.9,Bid only,43,345,3,2,45,34,456,46,21,45,86,4,Active,Active,Active,Bid,23545,75,24,76,23,65,65,56352,1,34,Clicks,Clicks,43,567,56,345,56,76,4656,,,,,,,, +Test Account,180535609,180535609,2023-11-12|15,Test Campaign,2334534,Test Ad Group,38528372,873246,Test Audience,Active,78.9,Bid only,43,345,3,2,45,34,456,46,21,45,86,4,Active,Active,Active,Bid,23545,75,24,76,23,65,65,56352,1,34,Clicks,Clicks,43,567,56,345,56,76,4656,,,,,,,, +Test Account,180535609,180535609,2023-11-12|16,Test Campaign,2334534,Test Ad Group,38528372,873246,Test Audience,Active,78.9,Bid only,43,345,3,2,45,34,456,46,21,45,86,4,Active,Active,Active,Bid,23545,75,24,76,23,65,65,56352,1,34,Clicks,Clicks,43,567,56,345,56,76,4656,,,,,,,, +Test Account,180535609,180535609,2023-11-12|17,Test Campaign,2334534,Test Ad Group,38528372,873246,Test Audience,Active,78.9,Bid only,43,345,3,2,45,34,456,46,21,45,86,4,Active,Active,Active,Bid,23545,75,24,76,23,65,65,56352,1,34,Clicks,Clicks,43,567,56,345,56,76,4656,,,,,,,, +Test Account,180535609,180535609,2023-11-12|18,Test Campaign,2334534,Test Ad Group,38528372,873246,Test Audience,Active,78.9,Bid only,43,345,3,2,45,34,456,46,21,45,86,4,Active,Active,Active,Bid,23545,75,24,76,23,65,65,56352,1,34,Clicks,Clicks,43,567,56,345,56,76,4656,,,,,,,, +Test Account,180535609,180535609,2023-11-12|19,Test Campaign,2334534,Test Ad Group,38528372,873246,Test Audience,Active,78.9,Bid only,43,345,3,2,45,34,456,46,21,45,86,4,Active,Active,Active,Bid,23545,75,24,76,23,65,65,56352,1,34,Clicks,Clicks,43,567,56,345,56,76,4656,,,,,,,, +Test Account,180535609,180535609,2023-11-12|20,Test Campaign,2334534,Test Ad Group,38528372,873246,Test Audience,Active,78.9,Bid only,43,345,3,2,45,34,456,46,21,45,86,4,Active,Active,Active,Bid,23545,75,24,76,23,65,65,56352,1,34,Clicks,Clicks,43,567,56,345,56,76,4656,,,,,,,, +Test Account,180535609,180535609,2023-11-12|21,Test Campaign,2334534,Test Ad Group,38528372,873246,Test Audience,Active,78.9,Bid only,43,345,3,2,45,34,456,46,21,45,86,4,Active,Active,Active,Bid,23545,75,24,76,23,65,65,56352,1,34,Clicks,Clicks,43,567,56,345,56,76,4656,,,,,,,, +Test Account,180535609,180535609,2023-11-12|22,Test Campaign,2334534,Test Ad Group,38528372,873246,Test Audience,Active,78.9,Bid only,43,345,3,2,45,34,456,46,21,45,86,4,Active,Active,Active,Bid,23545,75,24,76,23,65,65,56352,1,34,Clicks,Clicks,43,567,56,345,56,76,4656,,,,,,,, +Test Account,180535609,180535609,2023-11-12|23,Test Campaign,2334534,Test Ad Group,38528372,873246,Test Audience,Active,78.9,Bid only,43,345,3,2,45,34,456,46,21,45,86,4,Active,Active,Active,Bid,23545,75,24,76,23,65,65,56352,1,34,Clicks,Clicks,43,567,56,345,56,76,4656,,,,,,,, +Test Account,180535609,180535609,2023-11-13|00,Test Campaign,2334534,Test Ad Group,38528372,873246,Test Audience,Active,78.9,Bid only,43,345,3,2,45,34,456,46,21,45,86,4,Active,Active,Active,Bid,23545,75,24,76,23,65,65,56352,1,34,Clicks,Clicks,43,567,56,345,56,76,4656,,,,,,,, diff --git a/airbyte-integrations/connectors/source-bing-ads/unit_tests/resource/response/audience_performance_report_monthly.csv b/airbyte-integrations/connectors/source-bing-ads/unit_tests/resource/response/audience_performance_report_monthly.csv new file mode 100644 index 000000000000..5286ded5373d --- /dev/null +++ b/airbyte-integrations/connectors/source-bing-ads/unit_tests/resource/response/audience_performance_report_monthly.csv @@ -0,0 +1,7 @@ +AccountName,AccountNumber,AccountId,TimePeriod,CampaignName,CampaignId,AdGroupName,AdGroupId,AudienceId,AudienceName,AssociationStatus,BidAdjustment,TargetingSetting,Impressions,Clicks,Ctr,AverageCpc,Spend,AveragePosition,Conversions,ConversionRate,CostPerConversion,Revenue,ReturnOnAdSpend,RevenuePerConversion,AccountStatus,CampaignStatus,AdGroupStatus,AudienceType,BaseCampaignId,AllConversions,AllRevenue,AllConversionRate,AllCostPerConversion,AllReturnOnAdSpend,AllRevenuePerConversion,AssociationId,AssociationLevel,ViewThroughConversions,Goal,GoalType,AbsoluteTopImpressionRatePercent,TopImpressionRatePercent,AverageCpm,ConversionsQualified,AllConversionsQualified,ViewThroughConversionsQualified,ViewThroughRevenue,,,,,,,, +Test Account,180535609,180535609,2023-04-01,Test Campaign,2334534,Test Ad Group,38528372,873246,Test Audience,Active,78.9,Bid only,43,345,3,2,45,34,456,46,21,45,86,4,Active,Active,Active,Bid,23545,75,24,76,23,65,65,56352,1,34,Clicks,Clicks,43,567,56,345,56,76,4656,,,,,,,, +Test Account,180535609,180535609,2023-05-01,Test Campaign,2334534,Test Ad Group,38528372,873246,Test Audience,Active,78.9,Bid only,43,345,3,2,45,34,456,46,21,45,86,4,Active,Active,Active,Bid,23545,75,24,76,23,65,65,56352,1,34,Clicks,Clicks,43,567,56,345,56,76,4656,,,,,,,, +Test Account,180535609,180535609,2023-06-01,Test Campaign,2334534,Test Ad Group,38528372,873246,Test Audience,Active,78.9,Bid only,43,345,3,2,45,34,456,46,21,45,86,4,Active,Active,Active,Bid,23545,75,24,76,23,65,65,56352,1,34,Clicks,Clicks,43,567,56,345,56,76,4656,,,,,,,, +Test Account,180535609,180535609,2023-07-01,Test Campaign,2334534,Test Ad Group,38528372,873246,Test Audience,Active,78.9,Bid only,43,345,3,2,45,34,456,46,21,45,86,4,Active,Active,Active,Bid,23545,75,24,76,23,65,65,56352,1,34,Clicks,Clicks,43,567,56,345,56,76,4656,,,,,,,, +Test Account,180535609,180535609,2023-08-01,Test Campaign,2334534,Test Ad Group,38528372,873246,Test Audience,Active,78.9,Bid only,43,345,3,2,45,34,456,46,21,45,86,4,Active,Active,Active,Bid,23545,75,24,76,23,65,65,56352,1,34,Clicks,Clicks,43,567,56,345,56,76,4656,,,,,,,, +Test Account,180535609,180535609,2023-09-01,Test Campaign,2334534,Test Ad Group,38528372,873246,Test Audience,Active,78.9,Bid only,43,345,3,2,45,34,456,46,21,45,86,4,Active,Active,Active,Bid,23545,75,24,76,23,65,65,56352,1,34,Clicks,Clicks,43,567,56,345,56,76,4656,,,,,,,, diff --git a/airbyte-integrations/connectors/source-bing-ads/unit_tests/resource/response/audience_performance_report_monthly_incremental.csv b/airbyte-integrations/connectors/source-bing-ads/unit_tests/resource/response/audience_performance_report_monthly_incremental.csv new file mode 100644 index 000000000000..e36b3a011e5a --- /dev/null +++ b/airbyte-integrations/connectors/source-bing-ads/unit_tests/resource/response/audience_performance_report_monthly_incremental.csv @@ -0,0 +1,7 @@ +AccountName,AccountNumber,AccountId,TimePeriod,CampaignName,CampaignId,AdGroupName,AdGroupId,AudienceId,AudienceName,AssociationStatus,BidAdjustment,TargetingSetting,Impressions,Clicks,Ctr,AverageCpc,Spend,AveragePosition,Conversions,ConversionRate,CostPerConversion,Revenue,ReturnOnAdSpend,RevenuePerConversion,AccountStatus,CampaignStatus,AdGroupStatus,AudienceType,BaseCampaignId,AllConversions,AllRevenue,AllConversionRate,AllCostPerConversion,AllReturnOnAdSpend,AllRevenuePerConversion,AssociationId,AssociationLevel,ViewThroughConversions,Goal,GoalType,AbsoluteTopImpressionRatePercent,TopImpressionRatePercent,AverageCpm,ConversionsQualified,AllConversionsQualified,ViewThroughConversionsQualified,ViewThroughRevenue,,,,,,,, +Test Account,180535609,180535609,2023-10-01,Test Campaign,2334534,Test Ad Group,38528372,873246,Test Audience,Active,78.9,Bid only,43,345,3,2,45,34,456,46,21,45,86,4,Active,Active,Active,Bid,23545,75,24,76,23,65,65,56352,1,34,Clicks,Clicks,43,567,56,345,56,76,4656,,,,,,,, +Test Account,180535609,180535609,2023-11-01,Test Campaign,2334534,Test Ad Group,38528372,873246,Test Audience,Active,78.9,Bid only,43,345,3,2,45,34,456,46,21,45,86,4,Active,Active,Active,Bid,23545,75,24,76,23,65,65,56352,1,34,Clicks,Clicks,43,567,56,345,56,76,4656,,,,,,,, +Test Account,180535609,180535609,2023-12-01,Test Campaign,2334534,Test Ad Group,38528372,873246,Test Audience,Active,78.9,Bid only,43,345,3,2,45,34,456,46,21,45,86,4,Active,Active,Active,Bid,23545,75,24,76,23,65,65,56352,1,34,Clicks,Clicks,43,567,56,345,56,76,4656,,,,,,,, +Test Account,180535609,180535609,2024-01-01,Test Campaign,2334534,Test Ad Group,38528372,873246,Test Audience,Active,78.9,Bid only,43,345,3,2,45,34,456,46,21,45,86,4,Active,Active,Active,Bid,23545,75,24,76,23,65,65,56352,1,34,Clicks,Clicks,43,567,56,345,56,76,4656,,,,,,,, +Test Account,180535609,180535609,2024-02-01,Test Campaign,2334534,Test Ad Group,38528372,873246,Test Audience,Active,78.9,Bid only,43,345,3,2,45,34,456,46,21,45,86,4,Active,Active,Active,Bid,23545,75,24,76,23,65,65,56352,1,34,Clicks,Clicks,43,567,56,345,56,76,4656,,,,,,,, +Test Account,180535609,180535609,2024-03-01,Test Campaign,2334534,Test Ad Group,38528372,873246,Test Audience,Active,78.9,Bid only,43,345,3,2,45,34,456,46,21,45,86,4,Active,Active,Active,Bid,23545,75,24,76,23,65,65,56352,1,34,Clicks,Clicks,43,567,56,345,56,76,4656,,,,,,,, diff --git a/airbyte-integrations/connectors/source-bing-ads/unit_tests/resource/response/audience_performance_report_weekly.csv b/airbyte-integrations/connectors/source-bing-ads/unit_tests/resource/response/audience_performance_report_weekly.csv new file mode 100644 index 000000000000..001600c822b2 --- /dev/null +++ b/airbyte-integrations/connectors/source-bing-ads/unit_tests/resource/response/audience_performance_report_weekly.csv @@ -0,0 +1,4 @@ +AccountName,AccountNumber,AccountId,TimePeriod,CampaignName,CampaignId,AdGroupName,AdGroupId,AudienceId,AudienceName,AssociationStatus,BidAdjustment,TargetingSetting,Impressions,Clicks,Ctr,AverageCpc,Spend,AveragePosition,Conversions,ConversionRate,CostPerConversion,Revenue,ReturnOnAdSpend,RevenuePerConversion,AccountStatus,CampaignStatus,AdGroupStatus,AudienceType,BaseCampaignId,AllConversions,AllRevenue,AllConversionRate,AllCostPerConversion,AllReturnOnAdSpend,AllRevenuePerConversion,AssociationId,AssociationLevel,ViewThroughConversions,Goal,GoalType,AbsoluteTopImpressionRatePercent,TopImpressionRatePercent,AverageCpm,ConversionsQualified,AllConversionsQualified,ViewThroughConversionsQualified,ViewThroughRevenue,,,,,,,, +Test Account,180535609,180535609,2023-12-04,Test Campaign,2334534,Test Ad Group,38528372,873246,Test Audience,Active,78.9,Bid only,43,345,3,2,45,34,456,46,21,45,86,4,Active,Active,Active,Bid,23545,75,24,76,23,65,65,56352,1,34,Clicks,Clicks,43,567,56,345,56,76,4656,,,,,,,, +Test Account,180535609,180535609,2023-12-11,Test Campaign,2334534,Test Ad Group,38528372,873246,Test Audience,Active,78.9,Bid only,43,345,3,2,45,34,456,46,21,45,86,4,Active,Active,Active,Bid,23545,75,24,76,23,65,65,56352,1,34,Clicks,Clicks,43,567,56,345,56,76,4656,,,,,,,, +Test Account,180535609,180535609,2023-12-25,Test Campaign,2334534,Test Ad Group,38528372,873246,Test Audience,Active,78.9,Bid only,43,345,3,2,45,34,456,46,21,45,86,4,Active,Active,Active,Bid,23545,75,24,76,23,65,65,56352,1,34,Clicks,Clicks,43,567,56,345,56,76,4656,,,,,,,, diff --git a/airbyte-integrations/connectors/source-bing-ads/unit_tests/resource/response/audience_performance_report_weekly_incremental.csv b/airbyte-integrations/connectors/source-bing-ads/unit_tests/resource/response/audience_performance_report_weekly_incremental.csv new file mode 100644 index 000000000000..bf9ebdcaff93 --- /dev/null +++ b/airbyte-integrations/connectors/source-bing-ads/unit_tests/resource/response/audience_performance_report_weekly_incremental.csv @@ -0,0 +1,6 @@ +AccountName,AccountNumber,AccountId,TimePeriod,CampaignName,CampaignId,AdGroupName,AdGroupId,AudienceId,AudienceName,AssociationStatus,BidAdjustment,TargetingSetting,Impressions,Clicks,Ctr,AverageCpc,Spend,AveragePosition,Conversions,ConversionRate,CostPerConversion,Revenue,ReturnOnAdSpend,RevenuePerConversion,AccountStatus,CampaignStatus,AdGroupStatus,AudienceType,BaseCampaignId,AllConversions,AllRevenue,AllConversionRate,AllCostPerConversion,AllReturnOnAdSpend,AllRevenuePerConversion,AssociationId,AssociationLevel,ViewThroughConversions,Goal,GoalType,AbsoluteTopImpressionRatePercent,TopImpressionRatePercent,AverageCpm,ConversionsQualified,AllConversionsQualified,ViewThroughConversionsQualified,ViewThroughRevenue,,,,,,,, +Test Account,180535609,180535609,2024-01-01,Test Campaign,2334534,Test Ad Group,38528372,873246,Test Audience,Active,78.9,Bid only,43,345,3,2,45,34,456,46,21,45,86,4,Active,Active,Active,Bid,23545,75,24,76,23,65,65,56352,1,34,Clicks,Clicks,43,567,56,345,56,76,4656,,,,,,,, +Test Account,180535609,180535609,2024-01-08,Test Campaign,2334534,Test Ad Group,38528372,873246,Test Audience,Active,78.9,Bid only,43,345,3,2,45,34,456,46,21,45,86,4,Active,Active,Active,Bid,23545,75,24,76,23,65,65,56352,1,34,Clicks,Clicks,43,567,56,345,56,76,4656,,,,,,,, +Test Account,180535609,180535609,2024-01-15,Test Campaign,2334534,Test Ad Group,38528372,873246,Test Audience,Active,78.9,Bid only,43,345,3,2,45,34,456,46,21,45,86,4,Active,Active,Active,Bid,23545,75,24,76,23,65,65,56352,1,34,Clicks,Clicks,43,567,56,345,56,76,4656,,,,,,,, +Test Account,180535609,180535609,2024-01-22,Test Campaign,2334534,Test Ad Group,38528372,873246,Test Audience,Active,78.9,Bid only,43,345,3,2,45,34,456,46,21,45,86,4,Active,Active,Active,Bid,23545,75,24,76,23,65,65,56352,1,34,Clicks,Clicks,43,567,56,345,56,76,4656,,,,,,,, +Test Account,180535609,180535609,2024-01-29,Test Campaign,2334534,Test Ad Group,38528372,873246,Test Audience,Active,78.9,Bid only,43,345,3,2,45,34,456,46,21,45,86,4,Active,Active,Active,Bid,23545,75,24,76,23,65,65,56352,1,34,Clicks,Clicks,43,567,56,345,56,76,4656,,,,,,,, diff --git a/airbyte-integrations/connectors/source-bing-ads/unit_tests/resource/response/goals_and_funnels_report_daily.csv b/airbyte-integrations/connectors/source-bing-ads/unit_tests/resource/response/goals_and_funnels_report_daily.csv new file mode 100644 index 000000000000..d4a45f503a87 --- /dev/null +++ b/airbyte-integrations/connectors/source-bing-ads/unit_tests/resource/response/goals_and_funnels_report_daily.csv @@ -0,0 +1,9 @@ +AccountName,AccountNumber,AccountId,TimePeriod,CampaignName,CampaignId,AdGroupName,AdGroupId,Keyword,KeywordId,Goal,AllConversions,Assists,AllRevenue,GoalId,DeviceType,DeviceOS,AccountStatus,CampaignStatus,AdGroupStatus,KeywordStatus,GoalType,ViewThroughConversions,AllConversionsQualified,ViewThroughConversionsQualified,ViewThroughRevenue,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, +Test Account,180535609,180535609,2023-12-10,Test Campaign,234354,Test Ad Group,325435,Keyword,76325,Audience,21,4,43,341212,Computer,Windows,Active,Active,Active,Active,Audience,23,43,34,31,23,,,,,,,,,,,,,,,,,,,,,,,,,,,,, +Test Account,180535609,180535609,2023-12-11,Test Campaign,234354,Test Ad Group,325435,Keyword,76325,Audience,21,4,43,341212,Computer,Windows,Active,Active,Active,Active,Audience,23,43,34,31,23,,,,,,,,,,,,,,,,,,,,,,,,,,,,, +Test Account,180535609,180535609,2023-12-12,Test Campaign,234354,Test Ad Group,325435,Keyword,76325,Audience,21,4,43,341212,Computer,Windows,Active,Active,Active,Active,Audience,23,43,34,31,23,,,,,,,,,,,,,,,,,,,,,,,,,,,,, +Test Account,180535609,180535609,2023-12-13,Test Campaign,234354,Test Ad Group,325435,Keyword,76325,Audience,21,4,43,341212,Computer,Windows,Active,Active,Active,Active,Audience,23,43,34,31,23,,,,,,,,,,,,,,,,,,,,,,,,,,,,, +Test Account,180535609,180535609,2023-12-14,Test Campaign,234354,Test Ad Group,325435,Keyword,76325,Audience,21,4,43,341212,Computer,Windows,Active,Active,Active,Active,Audience,23,43,34,31,23,,,,,,,,,,,,,,,,,,,,,,,,,,,,, +Test Account,180535609,180535609,2023-12-15,Test Campaign,234354,Test Ad Group,325435,Keyword,76325,Audience,21,4,43,341212,Computer,Windows,Active,Active,Active,Active,Audience,23,43,34,31,23,,,,,,,,,,,,,,,,,,,,,,,,,,,,, +Test Account,180535609,180535609,2023-12-16,Test Campaign,234354,Test Ad Group,325435,Keyword,76325,Audience,21,4,43,341212,Computer,Windows,Active,Active,Active,Active,Audience,23,43,34,31,23,,,,,,,,,,,,,,,,,,,,,,,,,,,,, +Test Account,180535609,180535609,2023-12-17,Test Campaign,234354,Test Ad Group,325435,Keyword,76325,Audience,21,4,43,341212,Computer,Windows,Active,Active,Active,Active,Audience,23,43,34,31,23,,,,,,,,,,,,,,,,,,,,,,,,,,,,, diff --git a/airbyte-integrations/connectors/source-bing-ads/unit_tests/resource/response/goals_and_funnels_report_daily_incremental.csv b/airbyte-integrations/connectors/source-bing-ads/unit_tests/resource/response/goals_and_funnels_report_daily_incremental.csv new file mode 100644 index 000000000000..f3361fff483e --- /dev/null +++ b/airbyte-integrations/connectors/source-bing-ads/unit_tests/resource/response/goals_and_funnels_report_daily_incremental.csv @@ -0,0 +1,9 @@ +AccountName,AccountNumber,AccountId,TimePeriod,CampaignName,CampaignId,AdGroupName,AdGroupId,Keyword,KeywordId,Goal,AllConversions,Assists,AllRevenue,GoalId,DeviceType,DeviceOS,AccountStatus,CampaignStatus,AdGroupStatus,KeywordStatus,GoalType,ViewThroughConversions,AllConversionsQualified,ViewThroughConversionsQualified,ViewThroughRevenue,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, +Test Account,180535609,180535609,2023-12-17,Test Campaign,234354,Test Ad Group,325435,Keyword,76325,Audience,21,4,43,341212,Computer,Windows,Active,Active,Active,Active,Audience,23,43,34,31,23,,,,,,,,,,,,,,,,,,,,,,,,,,,,, +Test Account,180535609,180535609,2023-12-18,Test Campaign,234354,Test Ad Group,325435,Keyword,76325,Audience,21,4,43,341212,Computer,Windows,Active,Active,Active,Active,Audience,23,43,34,31,23,,,,,,,,,,,,,,,,,,,,,,,,,,,,, +Test Account,180535609,180535609,2023-12-19,Test Campaign,234354,Test Ad Group,325435,Keyword,76325,Audience,21,4,43,341212,Computer,Windows,Active,Active,Active,Active,Audience,23,43,34,31,23,,,,,,,,,,,,,,,,,,,,,,,,,,,,, +Test Account,180535609,180535609,2023-12-20,Test Campaign,234354,Test Ad Group,325435,Keyword,76325,Audience,21,4,43,341212,Computer,Windows,Active,Active,Active,Active,Audience,23,43,34,31,23,,,,,,,,,,,,,,,,,,,,,,,,,,,,, +Test Account,180535609,180535609,2023-12-21,Test Campaign,234354,Test Ad Group,325435,Keyword,76325,Audience,21,4,43,341212,Computer,Windows,Active,Active,Active,Active,Audience,23,43,34,31,23,,,,,,,,,,,,,,,,,,,,,,,,,,,,, +Test Account,180535609,180535609,2023-12-22,Test Campaign,234354,Test Ad Group,325435,Keyword,76325,Audience,21,4,43,341212,Computer,Windows,Active,Active,Active,Active,Audience,23,43,34,31,23,,,,,,,,,,,,,,,,,,,,,,,,,,,,, +Test Account,180535609,180535609,2023-12-23,Test Campaign,234354,Test Ad Group,325435,Keyword,76325,Audience,21,4,43,341212,Computer,Windows,Active,Active,Active,Active,Audience,23,43,34,31,23,,,,,,,,,,,,,,,,,,,,,,,,,,,,, +Test Account,180535609,180535609,2023-12-24,Test Campaign,234354,Test Ad Group,325435,Keyword,76325,Audience,21,4,43,341212,Computer,Windows,Active,Active,Active,Active,Audience,23,43,34,31,23,,,,,,,,,,,,,,,,,,,,,,,,,,,,, diff --git a/airbyte-integrations/connectors/source-bing-ads/unit_tests/resource/response/goals_and_funnels_report_hourly.csv b/airbyte-integrations/connectors/source-bing-ads/unit_tests/resource/response/goals_and_funnels_report_hourly.csv new file mode 100644 index 000000000000..3ad817f9468a --- /dev/null +++ b/airbyte-integrations/connectors/source-bing-ads/unit_tests/resource/response/goals_and_funnels_report_hourly.csv @@ -0,0 +1,25 @@ +AccountName,AccountNumber,AccountId,TimePeriod,CampaignName,CampaignId,AdGroupName,AdGroupId,Keyword,KeywordId,Goal,AllConversions,Assists,AllRevenue,GoalId,DeviceType,DeviceOS,AccountStatus,CampaignStatus,AdGroupStatus,KeywordStatus,GoalType,ViewThroughConversions,AllConversionsQualified,ViewThroughConversionsQualified,ViewThroughRevenue,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, +Test Account,180535609,180535609,2023-11-11|01,Test Campaign,234354,Test Ad Group,325435,Keyword,76325,Audience,21,4,43,341212,Computer,Windows,Active,Active,Active,Active,Audience,23,43,34,31,23,,,,,,,,,,,,,,,,,,,,,,,,,,,,, +Test Account,180535609,180535609,2023-11-11|02,Test Campaign,234354,Test Ad Group,325435,Keyword,76325,Audience,21,4,43,341212,Computer,Windows,Active,Active,Active,Active,Audience,23,43,34,31,23,,,,,,,,,,,,,,,,,,,,,,,,,,,,, +Test Account,180535609,180535609,2023-11-11|03,Test Campaign,234354,Test Ad Group,325435,Keyword,76325,Audience,21,4,43,341212,Computer,Windows,Active,Active,Active,Active,Audience,23,43,34,31,23,,,,,,,,,,,,,,,,,,,,,,,,,,,,, +Test Account,180535609,180535609,2023-11-11|04,Test Campaign,234354,Test Ad Group,325435,Keyword,76325,Audience,21,4,43,341212,Computer,Windows,Active,Active,Active,Active,Audience,23,43,34,31,23,,,,,,,,,,,,,,,,,,,,,,,,,,,,, +Test Account,180535609,180535609,2023-11-11|05,Test Campaign,234354,Test Ad Group,325435,Keyword,76325,Audience,21,4,43,341212,Computer,Windows,Active,Active,Active,Active,Audience,23,43,34,31,23,,,,,,,,,,,,,,,,,,,,,,,,,,,,, +Test Account,180535609,180535609,2023-11-11|06,Test Campaign,234354,Test Ad Group,325435,Keyword,76325,Audience,21,4,43,341212,Computer,Windows,Active,Active,Active,Active,Audience,23,43,34,31,23,,,,,,,,,,,,,,,,,,,,,,,,,,,,, +Test Account,180535609,180535609,2023-11-11|07,Test Campaign,234354,Test Ad Group,325435,Keyword,76325,Audience,21,4,43,341212,Computer,Windows,Active,Active,Active,Active,Audience,23,43,34,31,23,,,,,,,,,,,,,,,,,,,,,,,,,,,,, +Test Account,180535609,180535609,2023-11-11|08,Test Campaign,234354,Test Ad Group,325435,Keyword,76325,Audience,21,4,43,341212,Computer,Windows,Active,Active,Active,Active,Audience,23,43,34,31,23,,,,,,,,,,,,,,,,,,,,,,,,,,,,, +Test Account,180535609,180535609,2023-11-11|09,Test Campaign,234354,Test Ad Group,325435,Keyword,76325,Audience,21,4,43,341212,Computer,Windows,Active,Active,Active,Active,Audience,23,43,34,31,23,,,,,,,,,,,,,,,,,,,,,,,,,,,,, +Test Account,180535609,180535609,2023-11-11|10,Test Campaign,234354,Test Ad Group,325435,Keyword,76325,Audience,21,4,43,341212,Computer,Windows,Active,Active,Active,Active,Audience,23,43,34,31,23,,,,,,,,,,,,,,,,,,,,,,,,,,,,, +Test Account,180535609,180535609,2023-11-11|11,Test Campaign,234354,Test Ad Group,325435,Keyword,76325,Audience,21,4,43,341212,Computer,Windows,Active,Active,Active,Active,Audience,23,43,34,31,23,,,,,,,,,,,,,,,,,,,,,,,,,,,,, +Test Account,180535609,180535609,2023-11-11|12,Test Campaign,234354,Test Ad Group,325435,Keyword,76325,Audience,21,4,43,341212,Computer,Windows,Active,Active,Active,Active,Audience,23,43,34,31,23,,,,,,,,,,,,,,,,,,,,,,,,,,,,, +Test Account,180535609,180535609,2023-11-11|13,Test Campaign,234354,Test Ad Group,325435,Keyword,76325,Audience,21,4,43,341212,Computer,Windows,Active,Active,Active,Active,Audience,23,43,34,31,23,,,,,,,,,,,,,,,,,,,,,,,,,,,,, +Test Account,180535609,180535609,2023-11-11|14,Test Campaign,234354,Test Ad Group,325435,Keyword,76325,Audience,21,4,43,341212,Computer,Windows,Active,Active,Active,Active,Audience,23,43,34,31,23,,,,,,,,,,,,,,,,,,,,,,,,,,,,, +Test Account,180535609,180535609,2023-11-11|15,Test Campaign,234354,Test Ad Group,325435,Keyword,76325,Audience,21,4,43,341212,Computer,Windows,Active,Active,Active,Active,Audience,23,43,34,31,23,,,,,,,,,,,,,,,,,,,,,,,,,,,,, +Test Account,180535609,180535609,2023-11-11|16,Test Campaign,234354,Test Ad Group,325435,Keyword,76325,Audience,21,4,43,341212,Computer,Windows,Active,Active,Active,Active,Audience,23,43,34,31,23,,,,,,,,,,,,,,,,,,,,,,,,,,,,, +Test Account,180535609,180535609,2023-11-11|17,Test Campaign,234354,Test Ad Group,325435,Keyword,76325,Audience,21,4,43,341212,Computer,Windows,Active,Active,Active,Active,Audience,23,43,34,31,23,,,,,,,,,,,,,,,,,,,,,,,,,,,,, +Test Account,180535609,180535609,2023-11-11|18,Test Campaign,234354,Test Ad Group,325435,Keyword,76325,Audience,21,4,43,341212,Computer,Windows,Active,Active,Active,Active,Audience,23,43,34,31,23,,,,,,,,,,,,,,,,,,,,,,,,,,,,, +Test Account,180535609,180535609,2023-11-11|19,Test Campaign,234354,Test Ad Group,325435,Keyword,76325,Audience,21,4,43,341212,Computer,Windows,Active,Active,Active,Active,Audience,23,43,34,31,23,,,,,,,,,,,,,,,,,,,,,,,,,,,,, +Test Account,180535609,180535609,2023-11-11|20,Test Campaign,234354,Test Ad Group,325435,Keyword,76325,Audience,21,4,43,341212,Computer,Windows,Active,Active,Active,Active,Audience,23,43,34,31,23,,,,,,,,,,,,,,,,,,,,,,,,,,,,, +Test Account,180535609,180535609,2023-11-11|21,Test Campaign,234354,Test Ad Group,325435,Keyword,76325,Audience,21,4,43,341212,Computer,Windows,Active,Active,Active,Active,Audience,23,43,34,31,23,,,,,,,,,,,,,,,,,,,,,,,,,,,,, +Test Account,180535609,180535609,2023-11-11|22,Test Campaign,234354,Test Ad Group,325435,Keyword,76325,Audience,21,4,43,341212,Computer,Windows,Active,Active,Active,Active,Audience,23,43,34,31,23,,,,,,,,,,,,,,,,,,,,,,,,,,,,, +Test Account,180535609,180535609,2023-11-11|23,Test Campaign,234354,Test Ad Group,325435,Keyword,76325,Audience,21,4,43,341212,Computer,Windows,Active,Active,Active,Active,Audience,23,43,34,31,23,,,,,,,,,,,,,,,,,,,,,,,,,,,,, +Test Account,180535609,180535609,2023-11-12|00,Test Campaign,234354,Test Ad Group,325435,Keyword,76325,Audience,21,4,43,341212,Computer,Windows,Active,Active,Active,Active,Audience,23,43,34,31,23,,,,,,,,,,,,,,,,,,,,,,,,,,,,, diff --git a/airbyte-integrations/connectors/source-bing-ads/unit_tests/resource/response/goals_and_funnels_report_hourly_incremental.csv b/airbyte-integrations/connectors/source-bing-ads/unit_tests/resource/response/goals_and_funnels_report_hourly_incremental.csv new file mode 100644 index 000000000000..56ab7ddd39e5 --- /dev/null +++ b/airbyte-integrations/connectors/source-bing-ads/unit_tests/resource/response/goals_and_funnels_report_hourly_incremental.csv @@ -0,0 +1,25 @@ +AccountName,AccountNumber,AccountId,TimePeriod,CampaignName,CampaignId,AdGroupName,AdGroupId,Keyword,KeywordId,Goal,AllConversions,Assists,AllRevenue,GoalId,DeviceType,DeviceOS,AccountStatus,CampaignStatus,AdGroupStatus,KeywordStatus,GoalType,ViewThroughConversions,AllConversionsQualified,ViewThroughConversionsQualified,ViewThroughRevenue,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, +Test Account,180535609,180535609,2023-11-12|01,Test Campaign,234354,Test Ad Group,325435,Keyword,76325,Audience,21,4,43,341212,Computer,Windows,Active,Active,Active,Active,Audience,23,43,34,31,23,,,,,,,,,,,,,,,,,,,,,,,,,,,,, +Test Account,180535609,180535609,2023-11-12|02,Test Campaign,234354,Test Ad Group,325435,Keyword,76325,Audience,21,4,43,341212,Computer,Windows,Active,Active,Active,Active,Audience,23,43,34,31,23,,,,,,,,,,,,,,,,,,,,,,,,,,,,, +Test Account,180535609,180535609,2023-11-12|03,Test Campaign,234354,Test Ad Group,325435,Keyword,76325,Audience,21,4,43,341212,Computer,Windows,Active,Active,Active,Active,Audience,23,43,34,31,23,,,,,,,,,,,,,,,,,,,,,,,,,,,,, +Test Account,180535609,180535609,2023-11-12|04,Test Campaign,234354,Test Ad Group,325435,Keyword,76325,Audience,21,4,43,341212,Computer,Windows,Active,Active,Active,Active,Audience,23,43,34,31,23,,,,,,,,,,,,,,,,,,,,,,,,,,,,, +Test Account,180535609,180535609,2023-11-12|05,Test Campaign,234354,Test Ad Group,325435,Keyword,76325,Audience,21,4,43,341212,Computer,Windows,Active,Active,Active,Active,Audience,23,43,34,31,23,,,,,,,,,,,,,,,,,,,,,,,,,,,,, +Test Account,180535609,180535609,2023-11-12|06,Test Campaign,234354,Test Ad Group,325435,Keyword,76325,Audience,21,4,43,341212,Computer,Windows,Active,Active,Active,Active,Audience,23,43,34,31,23,,,,,,,,,,,,,,,,,,,,,,,,,,,,, +Test Account,180535609,180535609,2023-11-12|07,Test Campaign,234354,Test Ad Group,325435,Keyword,76325,Audience,21,4,43,341212,Computer,Windows,Active,Active,Active,Active,Audience,23,43,34,31,23,,,,,,,,,,,,,,,,,,,,,,,,,,,,, +Test Account,180535609,180535609,2023-11-12|08,Test Campaign,234354,Test Ad Group,325435,Keyword,76325,Audience,21,4,43,341212,Computer,Windows,Active,Active,Active,Active,Audience,23,43,34,31,23,,,,,,,,,,,,,,,,,,,,,,,,,,,,, +Test Account,180535609,180535609,2023-11-12|09,Test Campaign,234354,Test Ad Group,325435,Keyword,76325,Audience,21,4,43,341212,Computer,Windows,Active,Active,Active,Active,Audience,23,43,34,31,23,,,,,,,,,,,,,,,,,,,,,,,,,,,,, +Test Account,180535609,180535609,2023-11-12|10,Test Campaign,234354,Test Ad Group,325435,Keyword,76325,Audience,21,4,43,341212,Computer,Windows,Active,Active,Active,Active,Audience,23,43,34,31,23,,,,,,,,,,,,,,,,,,,,,,,,,,,,, +Test Account,180535609,180535609,2023-11-12|11,Test Campaign,234354,Test Ad Group,325435,Keyword,76325,Audience,21,4,43,341212,Computer,Windows,Active,Active,Active,Active,Audience,23,43,34,31,23,,,,,,,,,,,,,,,,,,,,,,,,,,,,, +Test Account,180535609,180535609,2023-11-12|12,Test Campaign,234354,Test Ad Group,325435,Keyword,76325,Audience,21,4,43,341212,Computer,Windows,Active,Active,Active,Active,Audience,23,43,34,31,23,,,,,,,,,,,,,,,,,,,,,,,,,,,,, +Test Account,180535609,180535609,2023-11-12|13,Test Campaign,234354,Test Ad Group,325435,Keyword,76325,Audience,21,4,43,341212,Computer,Windows,Active,Active,Active,Active,Audience,23,43,34,31,23,,,,,,,,,,,,,,,,,,,,,,,,,,,,, +Test Account,180535609,180535609,2023-11-12|14,Test Campaign,234354,Test Ad Group,325435,Keyword,76325,Audience,21,4,43,341212,Computer,Windows,Active,Active,Active,Active,Audience,23,43,34,31,23,,,,,,,,,,,,,,,,,,,,,,,,,,,,, +Test Account,180535609,180535609,2023-11-12|15,Test Campaign,234354,Test Ad Group,325435,Keyword,76325,Audience,21,4,43,341212,Computer,Windows,Active,Active,Active,Active,Audience,23,43,34,31,23,,,,,,,,,,,,,,,,,,,,,,,,,,,,, +Test Account,180535609,180535609,2023-11-12|16,Test Campaign,234354,Test Ad Group,325435,Keyword,76325,Audience,21,4,43,341212,Computer,Windows,Active,Active,Active,Active,Audience,23,43,34,31,23,,,,,,,,,,,,,,,,,,,,,,,,,,,,, +Test Account,180535609,180535609,2023-11-12|17,Test Campaign,234354,Test Ad Group,325435,Keyword,76325,Audience,21,4,43,341212,Computer,Windows,Active,Active,Active,Active,Audience,23,43,34,31,23,,,,,,,,,,,,,,,,,,,,,,,,,,,,, +Test Account,180535609,180535609,2023-11-12|18,Test Campaign,234354,Test Ad Group,325435,Keyword,76325,Audience,21,4,43,341212,Computer,Windows,Active,Active,Active,Active,Audience,23,43,34,31,23,,,,,,,,,,,,,,,,,,,,,,,,,,,,, +Test Account,180535609,180535609,2023-11-12|19,Test Campaign,234354,Test Ad Group,325435,Keyword,76325,Audience,21,4,43,341212,Computer,Windows,Active,Active,Active,Active,Audience,23,43,34,31,23,,,,,,,,,,,,,,,,,,,,,,,,,,,,, +Test Account,180535609,180535609,2023-11-12|20,Test Campaign,234354,Test Ad Group,325435,Keyword,76325,Audience,21,4,43,341212,Computer,Windows,Active,Active,Active,Active,Audience,23,43,34,31,23,,,,,,,,,,,,,,,,,,,,,,,,,,,,, +Test Account,180535609,180535609,2023-11-12|21,Test Campaign,234354,Test Ad Group,325435,Keyword,76325,Audience,21,4,43,341212,Computer,Windows,Active,Active,Active,Active,Audience,23,43,34,31,23,,,,,,,,,,,,,,,,,,,,,,,,,,,,, +Test Account,180535609,180535609,2023-11-12|22,Test Campaign,234354,Test Ad Group,325435,Keyword,76325,Audience,21,4,43,341212,Computer,Windows,Active,Active,Active,Active,Audience,23,43,34,31,23,,,,,,,,,,,,,,,,,,,,,,,,,,,,, +Test Account,180535609,180535609,2023-11-12|23,Test Campaign,234354,Test Ad Group,325435,Keyword,76325,Audience,21,4,43,341212,Computer,Windows,Active,Active,Active,Active,Audience,23,43,34,31,23,,,,,,,,,,,,,,,,,,,,,,,,,,,,, +Test Account,180535609,180535609,2023-11-13|00,Test Campaign,234354,Test Ad Group,325435,Keyword,76325,Audience,21,4,43,341212,Computer,Windows,Active,Active,Active,Active,Audience,23,43,34,31,23,,,,,,,,,,,,,,,,,,,,,,,,,,,,, diff --git a/airbyte-integrations/connectors/source-bing-ads/unit_tests/resource/response/goals_and_funnels_report_monthly.csv b/airbyte-integrations/connectors/source-bing-ads/unit_tests/resource/response/goals_and_funnels_report_monthly.csv new file mode 100644 index 000000000000..b6092c8b08f3 --- /dev/null +++ b/airbyte-integrations/connectors/source-bing-ads/unit_tests/resource/response/goals_and_funnels_report_monthly.csv @@ -0,0 +1,7 @@ +AccountName,AccountNumber,AccountId,TimePeriod,CampaignName,CampaignId,AdGroupName,AdGroupId,Keyword,KeywordId,Goal,AllConversions,Assists,AllRevenue,GoalId,DeviceType,DeviceOS,AccountStatus,CampaignStatus,AdGroupStatus,KeywordStatus,GoalType,ViewThroughConversions,AllConversionsQualified,ViewThroughConversionsQualified,ViewThroughRevenue,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, +Test Account,180535609,180535609,2023-04-01,Test Campaign,234354,Test Ad Group,325435,Keyword,76325,Audience,21,4,43,341212,Computer,Windows,Active,Active,Active,Active,Audience,23,43,34,31,23,,,,,,,,,,,,,,,,,,,,,,,,,,,,, +Test Account,180535609,180535609,2023-05-01,Test Campaign,234354,Test Ad Group,325435,Keyword,76325,Audience,21,4,43,341212,Computer,Windows,Active,Active,Active,Active,Audience,23,43,34,31,23,,,,,,,,,,,,,,,,,,,,,,,,,,,,, +Test Account,180535609,180535609,2023-06-01,Test Campaign,234354,Test Ad Group,325435,Keyword,76325,Audience,21,4,43,341212,Computer,Windows,Active,Active,Active,Active,Audience,23,43,34,31,23,,,,,,,,,,,,,,,,,,,,,,,,,,,,, +Test Account,180535609,180535609,2023-07-01,Test Campaign,234354,Test Ad Group,325435,Keyword,76325,Audience,21,4,43,341212,Computer,Windows,Active,Active,Active,Active,Audience,23,43,34,31,23,,,,,,,,,,,,,,,,,,,,,,,,,,,,, +Test Account,180535609,180535609,2023-08-01,Test Campaign,234354,Test Ad Group,325435,Keyword,76325,Audience,21,4,43,341212,Computer,Windows,Active,Active,Active,Active,Audience,23,43,34,31,23,,,,,,,,,,,,,,,,,,,,,,,,,,,,, +Test Account,180535609,180535609,2023-09-01,Test Campaign,234354,Test Ad Group,325435,Keyword,76325,Audience,21,4,43,341212,Computer,Windows,Active,Active,Active,Active,Audience,23,43,34,31,23,,,,,,,,,,,,,,,,,,,,,,,,,,,,, diff --git a/airbyte-integrations/connectors/source-bing-ads/unit_tests/resource/response/goals_and_funnels_report_monthly_incremental.csv b/airbyte-integrations/connectors/source-bing-ads/unit_tests/resource/response/goals_and_funnels_report_monthly_incremental.csv new file mode 100644 index 000000000000..6f9622606196 --- /dev/null +++ b/airbyte-integrations/connectors/source-bing-ads/unit_tests/resource/response/goals_and_funnels_report_monthly_incremental.csv @@ -0,0 +1,7 @@ +AccountName,AccountNumber,AccountId,TimePeriod,CampaignName,CampaignId,AdGroupName,AdGroupId,Keyword,KeywordId,Goal,AllConversions,Assists,AllRevenue,GoalId,DeviceType,DeviceOS,AccountStatus,CampaignStatus,AdGroupStatus,KeywordStatus,GoalType,ViewThroughConversions,AllConversionsQualified,ViewThroughConversionsQualified,ViewThroughRevenue,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, +Test Account,180535609,180535609,2023-10-01,Test Campaign,234354,Test Ad Group,325435,Keyword,76325,Audience,21,4,43,341212,Computer,Windows,Active,Active,Active,Active,Audience,23,43,34,31,23,,,,,,,,,,,,,,,,,,,,,,,,,,,,, +Test Account,180535609,180535609,2023-11-01,Test Campaign,234354,Test Ad Group,325435,Keyword,76325,Audience,21,4,43,341212,Computer,Windows,Active,Active,Active,Active,Audience,23,43,34,31,23,,,,,,,,,,,,,,,,,,,,,,,,,,,,, +Test Account,180535609,180535609,2023-12-01,Test Campaign,234354,Test Ad Group,325435,Keyword,76325,Audience,21,4,43,341212,Computer,Windows,Active,Active,Active,Active,Audience,23,43,34,31,23,,,,,,,,,,,,,,,,,,,,,,,,,,,,, +Test Account,180535609,180535609,2024-01-01,Test Campaign,234354,Test Ad Group,325435,Keyword,76325,Audience,21,4,43,341212,Computer,Windows,Active,Active,Active,Active,Audience,23,43,34,31,23,,,,,,,,,,,,,,,,,,,,,,,,,,,,, +Test Account,180535609,180535609,2024-02-01,Test Campaign,234354,Test Ad Group,325435,Keyword,76325,Audience,21,4,43,341212,Computer,Windows,Active,Active,Active,Active,Audience,23,43,34,31,23,,,,,,,,,,,,,,,,,,,,,,,,,,,,, +Test Account,180535609,180535609,2024-03-01,Test Campaign,234354,Test Ad Group,325435,Keyword,76325,Audience,21,4,43,341212,Computer,Windows,Active,Active,Active,Active,Audience,23,43,34,31,23,,,,,,,,,,,,,,,,,,,,,,,,,,,,, diff --git a/airbyte-integrations/connectors/source-bing-ads/unit_tests/resource/response/goals_and_funnels_report_weekly.csv b/airbyte-integrations/connectors/source-bing-ads/unit_tests/resource/response/goals_and_funnels_report_weekly.csv new file mode 100644 index 000000000000..f5e2673ffadb --- /dev/null +++ b/airbyte-integrations/connectors/source-bing-ads/unit_tests/resource/response/goals_and_funnels_report_weekly.csv @@ -0,0 +1,4 @@ +AccountName,AccountNumber,AccountId,TimePeriod,CampaignName,CampaignId,AdGroupName,AdGroupId,Keyword,KeywordId,Goal,AllConversions,Assists,AllRevenue,GoalId,DeviceType,DeviceOS,AccountStatus,CampaignStatus,AdGroupStatus,KeywordStatus,GoalType,ViewThroughConversions,AllConversionsQualified,ViewThroughConversionsQualified,ViewThroughRevenue,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, +Test Account,180535609,180535609,2023-12-04,Test Campaign,234354,Test Ad Group,325435,Keyword,76325,Audience,21,4,43,341212,Computer,Windows,Active,Active,Active,Active,Audience,23,43,34,31,23,,,,,,,,,,,,,,,,,,,,,,,,,,,,, +Test Account,180535609,180535609,2023-12-11,Test Campaign,234354,Test Ad Group,325435,Keyword,76325,Audience,21,4,43,341212,Computer,Windows,Active,Active,Active,Active,Audience,23,43,34,31,23,,,,,,,,,,,,,,,,,,,,,,,,,,,,, +Test Account,180535609,180535609,2023-12-25,Test Campaign,234354,Test Ad Group,325435,Keyword,76325,Audience,21,4,43,341212,Computer,Windows,Active,Active,Active,Active,Audience,23,43,34,31,23,,,,,,,,,,,,,,,,,,,,,,,,,,,,, diff --git a/airbyte-integrations/connectors/source-bing-ads/unit_tests/resource/response/goals_and_funnels_report_weekly_incremental.csv b/airbyte-integrations/connectors/source-bing-ads/unit_tests/resource/response/goals_and_funnels_report_weekly_incremental.csv new file mode 100644 index 000000000000..44901162cb83 --- /dev/null +++ b/airbyte-integrations/connectors/source-bing-ads/unit_tests/resource/response/goals_and_funnels_report_weekly_incremental.csv @@ -0,0 +1,6 @@ +AccountName,AccountNumber,AccountId,TimePeriod,CampaignName,CampaignId,AdGroupName,AdGroupId,Keyword,KeywordId,Goal,AllConversions,Assists,AllRevenue,GoalId,DeviceType,DeviceOS,AccountStatus,CampaignStatus,AdGroupStatus,KeywordStatus,GoalType,ViewThroughConversions,AllConversionsQualified,ViewThroughConversionsQualified,ViewThroughRevenue,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, +Test Account,180535609,180535609,2024-01-01,Test Campaign,234354,Test Ad Group,325435,Keyword,76325,Audience,21,4,43,341212,Computer,Windows,Active,Active,Active,Active,Audience,23,43,34,31,23,,,,,,,,,,,,,,,,,,,,,,,,,,,,, +Test Account,180535609,180535609,2024-01-08,Test Campaign,234354,Test Ad Group,325435,Keyword,76325,Audience,21,4,43,341212,Computer,Windows,Active,Active,Active,Active,Audience,23,43,34,31,23,,,,,,,,,,,,,,,,,,,,,,,,,,,,, +Test Account,180535609,180535609,2024-01-15,Test Campaign,234354,Test Ad Group,325435,Keyword,76325,Audience,21,4,43,341212,Computer,Windows,Active,Active,Active,Active,Audience,23,43,34,31,23,,,,,,,,,,,,,,,,,,,,,,,,,,,,, +Test Account,180535609,180535609,2024-01-22,Test Campaign,234354,Test Ad Group,325435,Keyword,76325,Audience,21,4,43,341212,Computer,Windows,Active,Active,Active,Active,Audience,23,43,34,31,23,,,,,,,,,,,,,,,,,,,,,,,,,,,,, +Test Account,180535609,180535609,2024-01-29,Test Campaign,234354,Test Ad Group,325435,Keyword,76325,Audience,21,4,43,341212,Computer,Windows,Active,Active,Active,Active,Audience,23,43,34,31,23,,,,,,,,,,,,,,,,,,,,,,,,,,,,, diff --git a/airbyte-integrations/connectors/source-bing-ads/unit_tests/resource/response/product_search_query_performance_report_daily.csv b/airbyte-integrations/connectors/source-bing-ads/unit_tests/resource/response/product_search_query_performance_report_daily.csv new file mode 100644 index 000000000000..375ca5a3c55a --- /dev/null +++ b/airbyte-integrations/connectors/source-bing-ads/unit_tests/resource/response/product_search_query_performance_report_daily.csv @@ -0,0 +1,9 @@ +TimePeriod,AccountId,AccountNumber,AccountName,AdId,AdGroupId,AdGroupName,CampaignId,CampaignName,DestinationUrl,DeviceType,DeviceOS,Language,SearchQuery,Network,MerchantProductId,Title,ClickTypeId,TotalClicksOnAdElements,ClickType,AdGroupCriterionId,ProductGroup,PartitionType,Impressions,Clicks,Ctr,AverageCpc,Spend,Conversions,ConversionRate,Assists,CostPerAssist,Revenue,CostPerConversion,RevenuePerConversion,RevenuePerAssist,CustomerId,CustomerName,AssistedImpressions,AssistedClicks,AssistedConversions,AllConversions,AllRevenue,AllConversionRate,AllCostPerConversion,AllRevenuePerConversion,Goal,GoalType,AbsoluteTopImpressionRatePercent,AverageCpm,ConversionsQualified,AssistedConversionsQualified,AllConversionsQualified,CampaignType,AssetGroupId,AssetGroupName +2023-12-10,180535609,180535609,Test Account,123456,123456,Test Group,12345678,Test Campaign,https://destination.com,Computer,Windows,English,test query,Audience,12345657,Title,1,12,Image,72365472,Test Group Product,PartitionType,9,2,0,3,2,3,3,3,5,3,4,34,5,23423,Customer Name,4,3,4,43,4,4,4,3,Click,1,23,32,23,32,32,Audience,123123,Test Asset Group +2023-12-11,180535609,180535610,Test Account,123457,123457,Test Group,12345679,Test Campaign,https://destination.com,Computer,Windows,English,test query,Audience,12345658,Title,2,13,Image,72365473,Test Group Product,PartitionType,10,3,1,4,3,4,4,4,6,4,5,35,6,23424,Customer Name,5,4,5,44,5,5,5,4,Click,2,24,33,24,33,33,Audience,123124,Test Asset Group +2023-12-12,180535609,180535611,Test Account,123458,123458,Test Group,12345680,Test Campaign,https://destination.com,Computer,Windows,English,test query,Audience,12345659,Title,3,14,Image,72365474,Test Group Product,PartitionType,11,4,2,5,4,5,5,5,7,5,6,36,7,23425,Customer Name,6,5,6,45,6,6,6,5,Click,3,25,34,25,34,34,Audience,123125,Test Asset Group +2023-12-13,180535609,180535612,Test Account,123459,123459,Test Group,12345681,Test Campaign,https://destination.com,Computer,Windows,English,test query,Audience,12345660,Title,4,15,Image,72365475,Test Group Product,PartitionType,12,5,3,6,5,6,6,6,8,6,7,37,8,23426,Customer Name,7,6,7,46,7,7,7,6,Click,4,26,35,26,35,35,Audience,123126,Test Asset Group +2023-12-14,180535609,180535613,Test Account,123460,123460,Test Group,12345682,Test Campaign,https://destination.com,Computer,Windows,English,test query,Audience,12345661,Title,5,16,Image,72365476,Test Group Product,PartitionType,13,6,4,7,6,7,7,7,9,7,8,38,9,23427,Customer Name,8,7,8,47,8,8,8,7,Click,5,27,36,27,36,36,Audience,123127,Test Asset Group +2023-12-15,180535609,180535614,Test Account,123461,123461,Test Group,12345683,Test Campaign,https://destination.com,Computer,Windows,English,test query,Audience,12345662,Title,6,17,Image,72365477,Test Group Product,PartitionType,14,7,5,8,7,8,8,8,10,8,9,39,10,23428,Customer Name,9,8,9,48,9,9,9,8,Click,6,28,37,28,37,37,Audience,123128,Test Asset Group +2023-12-16,180535609,180535615,Test Account,123462,123462,Test Group,12345684,Test Campaign,https://destination.com,Computer,Windows,English,test query,Audience,12345663,Title,7,18,Image,72365478,Test Group Product,PartitionType,15,8,6,9,8,9,9,9,11,9,10,40,11,23429,Customer Name,10,9,10,49,10,10,10,9,Click,7,29,38,29,38,38,Audience,123129,Test Asset Group +2023-12-17,180535609,180535616,Test Account,123463,123463,Test Group,12345685,Test Campaign,https://destination.com,Computer,Windows,English,test query,Audience,12345664,Title,8,19,Image,72365479,Test Group Product,PartitionType,16,9,7,10,9,10,10,10,12,10,11,41,12,23430,Customer Name,11,10,11,50,11,11,11,10,Click,8,30,39,30,39,39,Audience,123130,Test Asset Group diff --git a/airbyte-integrations/connectors/source-bing-ads/unit_tests/resource/response/product_search_query_performance_report_daily_incremental.csv b/airbyte-integrations/connectors/source-bing-ads/unit_tests/resource/response/product_search_query_performance_report_daily_incremental.csv new file mode 100644 index 000000000000..7601690a58f9 --- /dev/null +++ b/airbyte-integrations/connectors/source-bing-ads/unit_tests/resource/response/product_search_query_performance_report_daily_incremental.csv @@ -0,0 +1,9 @@ +TimePeriod,AccountId,AccountNumber,AccountName,AdId,AdGroupId,AdGroupName,CampaignId,CampaignName,DestinationUrl,DeviceType,DeviceOS,Language,SearchQuery,Network,MerchantProductId,Title,ClickTypeId,TotalClicksOnAdElements,ClickType,AdGroupCriterionId,ProductGroup,PartitionType,Impressions,Clicks,Ctr,AverageCpc,Spend,Conversions,ConversionRate,Assists,CostPerAssist,Revenue,CostPerConversion,RevenuePerConversion,RevenuePerAssist,CustomerId,CustomerName,AssistedImpressions,AssistedClicks,AssistedConversions,AllConversions,AllRevenue,AllConversionRate,AllCostPerConversion,AllRevenuePerConversion,Goal,GoalType,AbsoluteTopImpressionRatePercent,AverageCpm,ConversionsQualified,AssistedConversionsQualified,AllConversionsQualified,CampaignType,AssetGroupId,AssetGroupName +2023-12-17,180535609,180535609,Test Account,123456,123456,Test Group,12345678,Test Campaign,https://destination.com,Computer,Windows,English,test query,Audience,12345657,Title,1,12,Image,72365472,Test Group Product,PartitionType,9,2,0,3,2,3,3,3,5,3,4,34,5,23423,Customer Name,4,3,4,43,4,4,4,3,Click,1,23,32,23,32,32,Audience,123123,Test Asset Group +2023-12-18,180535609,180535610,Test Account,123457,123457,Test Group,12345679,Test Campaign,https://destination.com,Computer,Windows,English,test query,Audience,12345658,Title,2,13,Image,72365473,Test Group Product,PartitionType,10,3,1,4,3,4,4,4,6,4,5,35,6,23424,Customer Name,5,4,5,44,5,5,5,4,Click,2,24,33,24,33,33,Audience,123124,Test Asset Group +2023-12-19,180535609,180535611,Test Account,123458,123458,Test Group,12345680,Test Campaign,https://destination.com,Computer,Windows,English,test query,Audience,12345659,Title,3,14,Image,72365474,Test Group Product,PartitionType,11,4,2,5,4,5,5,5,7,5,6,36,7,23425,Customer Name,6,5,6,45,6,6,6,5,Click,3,25,34,25,34,34,Audience,123125,Test Asset Group +2023-12-20,180535609,180535612,Test Account,123459,123459,Test Group,12345681,Test Campaign,https://destination.com,Computer,Windows,English,test query,Audience,12345660,Title,4,15,Image,72365475,Test Group Product,PartitionType,12,5,3,6,5,6,6,6,8,6,7,37,8,23426,Customer Name,7,6,7,46,7,7,7,6,Click,4,26,35,26,35,35,Audience,123126,Test Asset Group +2023-12-21,180535609,180535613,Test Account,123460,123460,Test Group,12345682,Test Campaign,https://destination.com,Computer,Windows,English,test query,Audience,12345661,Title,5,16,Image,72365476,Test Group Product,PartitionType,13,6,4,7,6,7,7,7,9,7,8,38,9,23427,Customer Name,8,7,8,47,8,8,8,7,Click,5,27,36,27,36,36,Audience,123127,Test Asset Group +2023-12-22,180535609,180535614,Test Account,123461,123461,Test Group,12345683,Test Campaign,https://destination.com,Computer,Windows,English,test query,Audience,12345662,Title,6,17,Image,72365477,Test Group Product,PartitionType,14,7,5,8,7,8,8,8,10,8,9,39,10,23428,Customer Name,9,8,9,48,9,9,9,8,Click,6,28,37,28,37,37,Audience,123128,Test Asset Group +2023-12-23,180535609,180535615,Test Account,123462,123462,Test Group,12345684,Test Campaign,https://destination.com,Computer,Windows,English,test query,Audience,12345663,Title,7,18,Image,72365478,Test Group Product,PartitionType,15,8,6,9,8,9,9,9,11,9,10,40,11,23429,Customer Name,10,9,10,49,10,10,10,9,Click,7,29,38,29,38,38,Audience,123129,Test Asset Group +2023-12-24,180535609,180535616,Test Account,123463,123463,Test Group,12345685,Test Campaign,https://destination.com,Computer,Windows,English,test query,Audience,12345664,Title,8,19,Image,72365479,Test Group Product,PartitionType,16,9,7,10,9,10,10,10,12,10,11,41,12,23430,Customer Name,11,10,11,50,11,11,11,10,Click,8,30,39,30,39,39,Audience,123130,Test Asset Group diff --git a/airbyte-integrations/connectors/source-bing-ads/unit_tests/resource/response/product_search_query_performance_report_hourly.csv b/airbyte-integrations/connectors/source-bing-ads/unit_tests/resource/response/product_search_query_performance_report_hourly.csv new file mode 100644 index 000000000000..779a5f1b37b1 --- /dev/null +++ b/airbyte-integrations/connectors/source-bing-ads/unit_tests/resource/response/product_search_query_performance_report_hourly.csv @@ -0,0 +1,25 @@ +TimePeriod,AccountId,AccountNumber,AccountName,AdId,AdGroupId,AdGroupName,CampaignId,CampaignName,DestinationUrl,DeviceType,DeviceOS,Language,SearchQuery,Network,MerchantProductId,Title,ClickTypeId,TotalClicksOnAdElements,ClickType,AdGroupCriterionId,ProductGroup,PartitionType,Impressions,Clicks,Ctr,AverageCpc,Spend,Conversions,ConversionRate,Assists,CostPerAssist,Revenue,CostPerConversion,RevenuePerConversion,RevenuePerAssist,CustomerId,CustomerName,AssistedImpressions,AssistedClicks,AssistedConversions,AllConversions,AllRevenue,AllConversionRate,AllCostPerConversion,AllRevenuePerConversion,Goal,GoalType,AbsoluteTopImpressionRatePercent,AverageCpm,ConversionsQualified,AssistedConversionsQualified,AllConversionsQualified,CampaignType,AssetGroupId,AssetGroupName +2023-11-11|01,180535609,180535609,Test Account,123456,123456,Test Group,12345678,Test Campaign,https://destination.com,Computer,Windows,English,test query,Audience,12345657,Title,1,12,Image,72365472,Test Group Product,PartitionType,9,2,0,3,2,3,3,3,5,3,4,34,5,23423,Customer Name,4,3,4,43,4,4,4,3,Click,1,23,32,23,32,32,Audience,123123,Test Asset Group +2023-11-11|02,180535609,180535610,Test Account,123457,123457,Test Group,12345679,Test Campaign,https://destination.com,Computer,Windows,English,test query,Audience,12345658,Title,2,13,Image,72365473,Test Group Product,PartitionType,10,3,1,4,3,4,4,4,6,4,5,35,6,23424,Customer Name,5,4,5,44,5,5,5,4,Click,2,24,33,24,33,33,Audience,123124,Test Asset Group +2023-11-11|03,180535609,180535611,Test Account,123458,123458,Test Group,12345680,Test Campaign,https://destination.com,Computer,Windows,English,test query,Audience,12345659,Title,3,14,Image,72365474,Test Group Product,PartitionType,11,4,2,5,4,5,5,5,7,5,6,36,7,23425,Customer Name,6,5,6,45,6,6,6,5,Click,3,25,34,25,34,34,Audience,123125,Test Asset Group +2023-11-11|04,180535609,180535612,Test Account,123459,123459,Test Group,12345681,Test Campaign,https://destination.com,Computer,Windows,English,test query,Audience,12345660,Title,4,15,Image,72365475,Test Group Product,PartitionType,12,5,3,6,5,6,6,6,8,6,7,37,8,23426,Customer Name,7,6,7,46,7,7,7,6,Click,4,26,35,26,35,35,Audience,123126,Test Asset Group +2023-11-11|05,180535609,180535613,Test Account,123460,123460,Test Group,12345682,Test Campaign,https://destination.com,Computer,Windows,English,test query,Audience,12345661,Title,5,16,Image,72365476,Test Group Product,PartitionType,13,6,4,7,6,7,7,7,9,7,8,38,9,23427,Customer Name,8,7,8,47,8,8,8,7,Click,5,27,36,27,36,36,Audience,123127,Test Asset Group +2023-11-11|06,180535609,180535614,Test Account,123461,123461,Test Group,12345683,Test Campaign,https://destination.com,Computer,Windows,English,test query,Audience,12345662,Title,6,17,Image,72365477,Test Group Product,PartitionType,14,7,5,8,7,8,8,8,10,8,9,39,10,23428,Customer Name,9,8,9,48,9,9,9,8,Click,6,28,37,28,37,37,Audience,123128,Test Asset Group +2023-11-11|07,180535609,180535615,Test Account,123462,123462,Test Group,12345684,Test Campaign,https://destination.com,Computer,Windows,English,test query,Audience,12345663,Title,7,18,Image,72365478,Test Group Product,PartitionType,15,8,6,9,8,9,9,9,11,9,10,40,11,23429,Customer Name,10,9,10,49,10,10,10,9,Click,7,29,38,29,38,38,Audience,123129,Test Asset Group +2023-11-11|08,180535609,180535616,Test Account,123463,123463,Test Group,12345685,Test Campaign,https://destination.com,Computer,Windows,English,test query,Audience,12345664,Title,8,19,Image,72365479,Test Group Product,PartitionType,16,9,7,10,9,10,10,10,12,10,11,41,12,23430,Customer Name,11,10,11,50,11,11,11,10,Click,8,30,39,30,39,39,Audience,123130,Test Asset Group +2023-11-11|09,180535609,180535616,Test Account,123463,123463,Test Group,12345685,Test Campaign,https://destination.com,Computer,Windows,English,test query,Audience,12345664,Title,8,19,Image,72365479,Test Group Product,PartitionType,16,9,7,10,9,10,10,10,12,10,11,41,12,23430,Customer Name,11,10,11,50,11,11,11,10,Click,8,30,39,30,39,39,Audience,123130,Test Asset Group +2023-11-11|10,180535609,180535616,Test Account,123463,123463,Test Group,12345685,Test Campaign,https://destination.com,Computer,Windows,English,test query,Audience,12345664,Title,8,19,Image,72365479,Test Group Product,PartitionType,16,9,7,10,9,10,10,10,12,10,11,41,12,23430,Customer Name,11,10,11,50,11,11,11,10,Click,8,30,39,30,39,39,Audience,123130,Test Asset Group +2023-11-11|11,180535609,180535616,Test Account,123463,123463,Test Group,12345685,Test Campaign,https://destination.com,Computer,Windows,English,test query,Audience,12345664,Title,8,19,Image,72365479,Test Group Product,PartitionType,16,9,7,10,9,10,10,10,12,10,11,41,12,23430,Customer Name,11,10,11,50,11,11,11,10,Click,8,30,39,30,39,39,Audience,123130,Test Asset Group +2023-11-11|12,180535609,180535616,Test Account,123463,123463,Test Group,12345685,Test Campaign,https://destination.com,Computer,Windows,English,test query,Audience,12345664,Title,8,19,Image,72365479,Test Group Product,PartitionType,16,9,7,10,9,10,10,10,12,10,11,41,12,23430,Customer Name,11,10,11,50,11,11,11,10,Click,8,30,39,30,39,39,Audience,123130,Test Asset Group +2023-11-11|13,180535609,180535616,Test Account,123463,123463,Test Group,12345685,Test Campaign,https://destination.com,Computer,Windows,English,test query,Audience,12345664,Title,8,19,Image,72365479,Test Group Product,PartitionType,16,9,7,10,9,10,10,10,12,10,11,41,12,23430,Customer Name,11,10,11,50,11,11,11,10,Click,8,30,39,30,39,39,Audience,123130,Test Asset Group +2023-11-11|14,180535609,180535616,Test Account,123463,123463,Test Group,12345685,Test Campaign,https://destination.com,Computer,Windows,English,test query,Audience,12345664,Title,8,19,Image,72365479,Test Group Product,PartitionType,16,9,7,10,9,10,10,10,12,10,11,41,12,23430,Customer Name,11,10,11,50,11,11,11,10,Click,8,30,39,30,39,39,Audience,123130,Test Asset Group +2023-11-11|15,180535609,180535616,Test Account,123463,123463,Test Group,12345685,Test Campaign,https://destination.com,Computer,Windows,English,test query,Audience,12345664,Title,8,19,Image,72365479,Test Group Product,PartitionType,16,9,7,10,9,10,10,10,12,10,11,41,12,23430,Customer Name,11,10,11,50,11,11,11,10,Click,8,30,39,30,39,39,Audience,123130,Test Asset Group +2023-11-11|16,180535609,180535616,Test Account,123463,123463,Test Group,12345685,Test Campaign,https://destination.com,Computer,Windows,English,test query,Audience,12345664,Title,8,19,Image,72365479,Test Group Product,PartitionType,16,9,7,10,9,10,10,10,12,10,11,41,12,23430,Customer Name,11,10,11,50,11,11,11,10,Click,8,30,39,30,39,39,Audience,123130,Test Asset Group +2023-11-11|17,180535609,180535616,Test Account,123463,123463,Test Group,12345685,Test Campaign,https://destination.com,Computer,Windows,English,test query,Audience,12345664,Title,8,19,Image,72365479,Test Group Product,PartitionType,16,9,7,10,9,10,10,10,12,10,11,41,12,23430,Customer Name,11,10,11,50,11,11,11,10,Click,8,30,39,30,39,39,Audience,123130,Test Asset Group +2023-11-11|18,180535609,180535616,Test Account,123463,123463,Test Group,12345685,Test Campaign,https://destination.com,Computer,Windows,English,test query,Audience,12345664,Title,8,19,Image,72365479,Test Group Product,PartitionType,16,9,7,10,9,10,10,10,12,10,11,41,12,23430,Customer Name,11,10,11,50,11,11,11,10,Click,8,30,39,30,39,39,Audience,123130,Test Asset Group +2023-11-11|19,180535609,180535616,Test Account,123463,123463,Test Group,12345685,Test Campaign,https://destination.com,Computer,Windows,English,test query,Audience,12345664,Title,8,19,Image,72365479,Test Group Product,PartitionType,16,9,7,10,9,10,10,10,12,10,11,41,12,23430,Customer Name,11,10,11,50,11,11,11,10,Click,8,30,39,30,39,39,Audience,123130,Test Asset Group +2023-11-11|20,180535609,180535616,Test Account,123463,123463,Test Group,12345685,Test Campaign,https://destination.com,Computer,Windows,English,test query,Audience,12345664,Title,8,19,Image,72365479,Test Group Product,PartitionType,16,9,7,10,9,10,10,10,12,10,11,41,12,23430,Customer Name,11,10,11,50,11,11,11,10,Click,8,30,39,30,39,39,Audience,123130,Test Asset Group +2023-11-11|21,180535609,180535616,Test Account,123463,123463,Test Group,12345685,Test Campaign,https://destination.com,Computer,Windows,English,test query,Audience,12345664,Title,8,19,Image,72365479,Test Group Product,PartitionType,16,9,7,10,9,10,10,10,12,10,11,41,12,23430,Customer Name,11,10,11,50,11,11,11,10,Click,8,30,39,30,39,39,Audience,123130,Test Asset Group +2023-11-11|22,180535609,180535616,Test Account,123463,123463,Test Group,12345685,Test Campaign,https://destination.com,Computer,Windows,English,test query,Audience,12345664,Title,8,19,Image,72365479,Test Group Product,PartitionType,16,9,7,10,9,10,10,10,12,10,11,41,12,23430,Customer Name,11,10,11,50,11,11,11,10,Click,8,30,39,30,39,39,Audience,123130,Test Asset Group +2023-11-11|23,180535609,180535616,Test Account,123463,123463,Test Group,12345685,Test Campaign,https://destination.com,Computer,Windows,English,test query,Audience,12345664,Title,8,19,Image,72365479,Test Group Product,PartitionType,16,9,7,10,9,10,10,10,12,10,11,41,12,23430,Customer Name,11,10,11,50,11,11,11,10,Click,8,30,39,30,39,39,Audience,123130,Test Asset Group +2023-11-12|00,180535609,180535616,Test Account,123463,123463,Test Group,12345685,Test Campaign,https://destination.com,Computer,Windows,English,test query,Audience,12345664,Title,8,19,Image,72365479,Test Group Product,PartitionType,16,9,7,10,9,10,10,10,12,10,11,41,12,23430,Customer Name,11,10,11,50,11,11,11,10,Click,8,30,39,30,39,39,Audience,123130,Test Asset Group diff --git a/airbyte-integrations/connectors/source-bing-ads/unit_tests/resource/response/product_search_query_performance_report_hourly_incremental.csv b/airbyte-integrations/connectors/source-bing-ads/unit_tests/resource/response/product_search_query_performance_report_hourly_incremental.csv new file mode 100644 index 000000000000..b185b3695bfb --- /dev/null +++ b/airbyte-integrations/connectors/source-bing-ads/unit_tests/resource/response/product_search_query_performance_report_hourly_incremental.csv @@ -0,0 +1,25 @@ +TimePeriod,AccountId,AccountNumber,AccountName,AdId,AdGroupId,AdGroupName,CampaignId,CampaignName,DestinationUrl,DeviceType,DeviceOS,Language,SearchQuery,Network,MerchantProductId,Title,ClickTypeId,TotalClicksOnAdElements,ClickType,AdGroupCriterionId,ProductGroup,PartitionType,Impressions,Clicks,Ctr,AverageCpc,Spend,Conversions,ConversionRate,Assists,CostPerAssist,Revenue,CostPerConversion,RevenuePerConversion,RevenuePerAssist,CustomerId,CustomerName,AssistedImpressions,AssistedClicks,AssistedConversions,AllConversions,AllRevenue,AllConversionRate,AllCostPerConversion,AllRevenuePerConversion,Goal,GoalType,AbsoluteTopImpressionRatePercent,AverageCpm,ConversionsQualified,AssistedConversionsQualified,AllConversionsQualified,CampaignType,AssetGroupId,AssetGroupName +2023-11-12|01,180535609,180535609,Test Account,123456,123456,Test Group,12345678,Test Campaign,https://destination.com,Computer,Windows,English,test query,Audience,12345657,Title,1,12,Image,72365472,Test Group Product,PartitionType,9,2,0,3,2,3,3,3,5,3,4,34,5,23423,Customer Name,4,3,4,43,4,4,4,3,Click,1,23,32,23,32,32,Audience,123123,Test Asset Group +2023-11-12|02,180535609,180535610,Test Account,123457,123457,Test Group,12345679,Test Campaign,https://destination.com,Computer,Windows,English,test query,Audience,12345658,Title,2,13,Image,72365473,Test Group Product,PartitionType,10,3,1,4,3,4,4,4,6,4,5,35,6,23424,Customer Name,5,4,5,44,5,5,5,4,Click,2,24,33,24,33,33,Audience,123124,Test Asset Group +2023-11-12|03,180535609,180535611,Test Account,123458,123458,Test Group,12345680,Test Campaign,https://destination.com,Computer,Windows,English,test query,Audience,12345659,Title,3,14,Image,72365474,Test Group Product,PartitionType,11,4,2,5,4,5,5,5,7,5,6,36,7,23425,Customer Name,6,5,6,45,6,6,6,5,Click,3,25,34,25,34,34,Audience,123125,Test Asset Group +2023-11-12|04,180535609,180535612,Test Account,123459,123459,Test Group,12345681,Test Campaign,https://destination.com,Computer,Windows,English,test query,Audience,12345660,Title,4,15,Image,72365475,Test Group Product,PartitionType,12,5,3,6,5,6,6,6,8,6,7,37,8,23426,Customer Name,7,6,7,46,7,7,7,6,Click,4,26,35,26,35,35,Audience,123126,Test Asset Group +2023-11-12|05,180535609,180535613,Test Account,123460,123460,Test Group,12345682,Test Campaign,https://destination.com,Computer,Windows,English,test query,Audience,12345661,Title,5,16,Image,72365476,Test Group Product,PartitionType,13,6,4,7,6,7,7,7,9,7,8,38,9,23427,Customer Name,8,7,8,47,8,8,8,7,Click,5,27,36,27,36,36,Audience,123127,Test Asset Group +2023-11-12|06,180535609,180535614,Test Account,123461,123461,Test Group,12345683,Test Campaign,https://destination.com,Computer,Windows,English,test query,Audience,12345662,Title,6,17,Image,72365477,Test Group Product,PartitionType,14,7,5,8,7,8,8,8,10,8,9,39,10,23428,Customer Name,9,8,9,48,9,9,9,8,Click,6,28,37,28,37,37,Audience,123128,Test Asset Group +2023-11-12|07,180535609,180535615,Test Account,123462,123462,Test Group,12345684,Test Campaign,https://destination.com,Computer,Windows,English,test query,Audience,12345663,Title,7,18,Image,72365478,Test Group Product,PartitionType,15,8,6,9,8,9,9,9,11,9,10,40,11,23429,Customer Name,10,9,10,49,10,10,10,9,Click,7,29,38,29,38,38,Audience,123129,Test Asset Group +2023-11-12|08,180535609,180535616,Test Account,123463,123463,Test Group,12345685,Test Campaign,https://destination.com,Computer,Windows,English,test query,Audience,12345664,Title,8,19,Image,72365479,Test Group Product,PartitionType,16,9,7,10,9,10,10,10,12,10,11,41,12,23430,Customer Name,11,10,11,50,11,11,11,10,Click,8,30,39,30,39,39,Audience,123130,Test Asset Group +2023-11-12|09,180535609,180535616,Test Account,123463,123463,Test Group,12345685,Test Campaign,https://destination.com,Computer,Windows,English,test query,Audience,12345664,Title,8,19,Image,72365479,Test Group Product,PartitionType,16,9,7,10,9,10,10,10,12,10,11,41,12,23430,Customer Name,11,10,11,50,11,11,11,10,Click,8,30,39,30,39,39,Audience,123130,Test Asset Group +2023-11-12|10,180535609,180535616,Test Account,123463,123463,Test Group,12345685,Test Campaign,https://destination.com,Computer,Windows,English,test query,Audience,12345664,Title,8,19,Image,72365479,Test Group Product,PartitionType,16,9,7,10,9,10,10,10,12,10,11,41,12,23430,Customer Name,11,10,11,50,11,11,11,10,Click,8,30,39,30,39,39,Audience,123130,Test Asset Group +2023-11-12|11,180535609,180535616,Test Account,123463,123463,Test Group,12345685,Test Campaign,https://destination.com,Computer,Windows,English,test query,Audience,12345664,Title,8,19,Image,72365479,Test Group Product,PartitionType,16,9,7,10,9,10,10,10,12,10,11,41,12,23430,Customer Name,11,10,11,50,11,11,11,10,Click,8,30,39,30,39,39,Audience,123130,Test Asset Group +2023-11-12|12,180535609,180535616,Test Account,123463,123463,Test Group,12345685,Test Campaign,https://destination.com,Computer,Windows,English,test query,Audience,12345664,Title,8,19,Image,72365479,Test Group Product,PartitionType,16,9,7,10,9,10,10,10,12,10,11,41,12,23430,Customer Name,11,10,11,50,11,11,11,10,Click,8,30,39,30,39,39,Audience,123130,Test Asset Group +2023-11-12|13,180535609,180535616,Test Account,123463,123463,Test Group,12345685,Test Campaign,https://destination.com,Computer,Windows,English,test query,Audience,12345664,Title,8,19,Image,72365479,Test Group Product,PartitionType,16,9,7,10,9,10,10,10,12,10,11,41,12,23430,Customer Name,11,10,11,50,11,11,11,10,Click,8,30,39,30,39,39,Audience,123130,Test Asset Group +2023-11-12|14,180535609,180535616,Test Account,123463,123463,Test Group,12345685,Test Campaign,https://destination.com,Computer,Windows,English,test query,Audience,12345664,Title,8,19,Image,72365479,Test Group Product,PartitionType,16,9,7,10,9,10,10,10,12,10,11,41,12,23430,Customer Name,11,10,11,50,11,11,11,10,Click,8,30,39,30,39,39,Audience,123130,Test Asset Group +2023-11-12|15,180535609,180535616,Test Account,123463,123463,Test Group,12345685,Test Campaign,https://destination.com,Computer,Windows,English,test query,Audience,12345664,Title,8,19,Image,72365479,Test Group Product,PartitionType,16,9,7,10,9,10,10,10,12,10,11,41,12,23430,Customer Name,11,10,11,50,11,11,11,10,Click,8,30,39,30,39,39,Audience,123130,Test Asset Group +2023-11-12|16,180535609,180535616,Test Account,123463,123463,Test Group,12345685,Test Campaign,https://destination.com,Computer,Windows,English,test query,Audience,12345664,Title,8,19,Image,72365479,Test Group Product,PartitionType,16,9,7,10,9,10,10,10,12,10,11,41,12,23430,Customer Name,11,10,11,50,11,11,11,10,Click,8,30,39,30,39,39,Audience,123130,Test Asset Group +2023-11-12|17,180535609,180535616,Test Account,123463,123463,Test Group,12345685,Test Campaign,https://destination.com,Computer,Windows,English,test query,Audience,12345664,Title,8,19,Image,72365479,Test Group Product,PartitionType,16,9,7,10,9,10,10,10,12,10,11,41,12,23430,Customer Name,11,10,11,50,11,11,11,10,Click,8,30,39,30,39,39,Audience,123130,Test Asset Group +2023-11-12|18,180535609,180535616,Test Account,123463,123463,Test Group,12345685,Test Campaign,https://destination.com,Computer,Windows,English,test query,Audience,12345664,Title,8,19,Image,72365479,Test Group Product,PartitionType,16,9,7,10,9,10,10,10,12,10,11,41,12,23430,Customer Name,11,10,11,50,11,11,11,10,Click,8,30,39,30,39,39,Audience,123130,Test Asset Group +2023-11-12|19,180535609,180535616,Test Account,123463,123463,Test Group,12345685,Test Campaign,https://destination.com,Computer,Windows,English,test query,Audience,12345664,Title,8,19,Image,72365479,Test Group Product,PartitionType,16,9,7,10,9,10,10,10,12,10,11,41,12,23430,Customer Name,11,10,11,50,11,11,11,10,Click,8,30,39,30,39,39,Audience,123130,Test Asset Group +2023-11-12|20,180535609,180535616,Test Account,123463,123463,Test Group,12345685,Test Campaign,https://destination.com,Computer,Windows,English,test query,Audience,12345664,Title,8,19,Image,72365479,Test Group Product,PartitionType,16,9,7,10,9,10,10,10,12,10,11,41,12,23430,Customer Name,11,10,11,50,11,11,11,10,Click,8,30,39,30,39,39,Audience,123130,Test Asset Group +2023-11-12|21,180535609,180535616,Test Account,123463,123463,Test Group,12345685,Test Campaign,https://destination.com,Computer,Windows,English,test query,Audience,12345664,Title,8,19,Image,72365479,Test Group Product,PartitionType,16,9,7,10,9,10,10,10,12,10,11,41,12,23430,Customer Name,11,10,11,50,11,11,11,10,Click,8,30,39,30,39,39,Audience,123130,Test Asset Group +2023-11-12|22,180535609,180535616,Test Account,123463,123463,Test Group,12345685,Test Campaign,https://destination.com,Computer,Windows,English,test query,Audience,12345664,Title,8,19,Image,72365479,Test Group Product,PartitionType,16,9,7,10,9,10,10,10,12,10,11,41,12,23430,Customer Name,11,10,11,50,11,11,11,10,Click,8,30,39,30,39,39,Audience,123130,Test Asset Group +2023-11-12|23,180535609,180535616,Test Account,123463,123463,Test Group,12345685,Test Campaign,https://destination.com,Computer,Windows,English,test query,Audience,12345664,Title,8,19,Image,72365479,Test Group Product,PartitionType,16,9,7,10,9,10,10,10,12,10,11,41,12,23430,Customer Name,11,10,11,50,11,11,11,10,Click,8,30,39,30,39,39,Audience,123130,Test Asset Group +2023-11-13|00,180535609,180535616,Test Account,123463,123463,Test Group,12345685,Test Campaign,https://destination.com,Computer,Windows,English,test query,Audience,12345664,Title,8,19,Image,72365479,Test Group Product,PartitionType,16,9,7,10,9,10,10,10,12,10,11,41,12,23430,Customer Name,11,10,11,50,11,11,11,10,Click,8,30,39,30,39,39,Audience,123130,Test Asset Group diff --git a/airbyte-integrations/connectors/source-bing-ads/unit_tests/resource/response/product_search_query_performance_report_monthly.csv b/airbyte-integrations/connectors/source-bing-ads/unit_tests/resource/response/product_search_query_performance_report_monthly.csv new file mode 100644 index 000000000000..c675a84b5e5d --- /dev/null +++ b/airbyte-integrations/connectors/source-bing-ads/unit_tests/resource/response/product_search_query_performance_report_monthly.csv @@ -0,0 +1,7 @@ +TimePeriod,AccountId,AccountNumber,AccountName,AdId,AdGroupId,AdGroupName,CampaignId,CampaignName,DestinationUrl,DeviceType,DeviceOS,Language,SearchQuery,Network,MerchantProductId,Title,ClickTypeId,TotalClicksOnAdElements,ClickType,AdGroupCriterionId,ProductGroup,PartitionType,Impressions,Clicks,Ctr,AverageCpc,Spend,Conversions,ConversionRate,Assists,CostPerAssist,Revenue,CostPerConversion,RevenuePerConversion,RevenuePerAssist,CustomerId,CustomerName,AssistedImpressions,AssistedClicks,AssistedConversions,AllConversions,AllRevenue,AllConversionRate,AllCostPerConversion,AllRevenuePerConversion,Goal,GoalType,AbsoluteTopImpressionRatePercent,AverageCpm,ConversionsQualified,AssistedConversionsQualified,AllConversionsQualified,CampaignType,AssetGroupId,AssetGroupName +2023-04-01,180535609,180535609,Test Account,123456,123456,Test Group,12345678,Test Campaign,https://destination.com,Computer,Windows,English,test query,Audience,12345657,Title,1,12,Image,72365472,Test Group Product,PartitionType,9,2,0,3,2,3,3,3,5,3,4,34,5,23423,Customer Name,4,3,4,43,4,4,4,3,Click,1,23,32,23,32,32,Audience,123123,Test Asset Group +2023-05-01,180535609,180535609,Test Account,123456,123456,Test Group,12345678,Test Campaign,https://destination.com,Computer,Windows,English,test query,Audience,12345657,Title,1,12,Image,72365472,Test Group Product,PartitionType,9,2,0,3,2,3,3,3,5,3,4,34,5,23423,Customer Name,4,3,4,43,4,4,4,3,Click,1,23,32,23,32,32,Audience,123123,Test Asset Group +2023-06-01,180535609,180535609,Test Account,123456,123456,Test Group,12345678,Test Campaign,https://destination.com,Computer,Windows,English,test query,Audience,12345657,Title,1,12,Image,72365472,Test Group Product,PartitionType,9,2,0,3,2,3,3,3,5,3,4,34,5,23423,Customer Name,4,3,4,43,4,4,4,3,Click,1,23,32,23,32,32,Audience,123123,Test Asset Group +2023-07-01,180535609,180535609,Test Account,123456,123456,Test Group,12345678,Test Campaign,https://destination.com,Computer,Windows,English,test query,Audience,12345657,Title,1,12,Image,72365472,Test Group Product,PartitionType,9,2,0,3,2,3,3,3,5,3,4,34,5,23423,Customer Name,4,3,4,43,4,4,4,3,Click,1,23,32,23,32,32,Audience,123123,Test Asset Group +2023-08-01,180535609,180535609,Test Account,123456,123456,Test Group,12345678,Test Campaign,https://destination.com,Computer,Windows,English,test query,Audience,12345657,Title,1,12,Image,72365472,Test Group Product,PartitionType,9,2,0,3,2,3,3,3,5,3,4,34,5,23423,Customer Name,4,3,4,43,4,4,4,3,Click,1,23,32,23,32,32,Audience,123123,Test Asset Group +2023-09-01,180535609,180535609,Test Account,123456,123456,Test Group,12345678,Test Campaign,https://destination.com,Computer,Windows,English,test query,Audience,12345657,Title,1,12,Image,72365472,Test Group Product,PartitionType,9,2,0,3,2,3,3,3,5,3,4,34,5,23423,Customer Name,4,3,4,43,4,4,4,3,Click,1,23,32,23,32,32,Audience,123123,Test Asset Group diff --git a/airbyte-integrations/connectors/source-bing-ads/unit_tests/resource/response/product_search_query_performance_report_monthly_incremental.csv b/airbyte-integrations/connectors/source-bing-ads/unit_tests/resource/response/product_search_query_performance_report_monthly_incremental.csv new file mode 100644 index 000000000000..a71ec1048149 --- /dev/null +++ b/airbyte-integrations/connectors/source-bing-ads/unit_tests/resource/response/product_search_query_performance_report_monthly_incremental.csv @@ -0,0 +1,7 @@ +TimePeriod,AccountId,AccountNumber,AccountName,AdId,AdGroupId,AdGroupName,CampaignId,CampaignName,DestinationUrl,DeviceType,DeviceOS,Language,SearchQuery,Network,MerchantProductId,Title,ClickTypeId,TotalClicksOnAdElements,ClickType,AdGroupCriterionId,ProductGroup,PartitionType,Impressions,Clicks,Ctr,AverageCpc,Spend,Conversions,ConversionRate,Assists,CostPerAssist,Revenue,CostPerConversion,RevenuePerConversion,RevenuePerAssist,CustomerId,CustomerName,AssistedImpressions,AssistedClicks,AssistedConversions,AllConversions,AllRevenue,AllConversionRate,AllCostPerConversion,AllRevenuePerConversion,Goal,GoalType,AbsoluteTopImpressionRatePercent,AverageCpm,ConversionsQualified,AssistedConversionsQualified,AllConversionsQualified,CampaignType,AssetGroupId,AssetGroupName +2023-10-01,180535609,180535610,Test Account,123457,123457,Test Group,12345679,Test Campaign,https://destination.com,Computer,Windows,English,test query,Audience,12345658,Title,2,13,Image,72365473,Test Group Product,PartitionType,10,3,1,4,3,4,4,4,6,4,5,35,6,23424,Customer Name,5,4,5,44,5,5,5,4,Click,2,24,33,24,33,33,Audience,123124,Test Asset Group +2023-11-01,180535609,180535611,Test Account,123458,123458,Test Group,12345680,Test Campaign,https://destination.com,Computer,Windows,English,test query,Audience,12345659,Title,3,14,Image,72365474,Test Group Product,PartitionType,11,4,2,5,4,5,5,5,7,5,6,36,7,23425,Customer Name,6,5,6,45,6,6,6,5,Click,3,25,34,25,34,34,Audience,123125,Test Asset Group +2023-12-01,180535609,180535611,Test Account,123458,123458,Test Group,12345680,Test Campaign,https://destination.com,Computer,Windows,English,test query,Audience,12345659,Title,3,14,Image,72365474,Test Group Product,PartitionType,11,4,2,5,4,5,5,5,7,5,6,36,7,23425,Customer Name,6,5,6,45,6,6,6,5,Click,3,25,34,25,34,34,Audience,123125,Test Asset Group +2024-01-01,180535609,180535611,Test Account,123458,123458,Test Group,12345680,Test Campaign,https://destination.com,Computer,Windows,English,test query,Audience,12345659,Title,3,14,Image,72365474,Test Group Product,PartitionType,11,4,2,5,4,5,5,5,7,5,6,36,7,23425,Customer Name,6,5,6,45,6,6,6,5,Click,3,25,34,25,34,34,Audience,123125,Test Asset Group +2024-02-01,180535609,180535611,Test Account,123458,123458,Test Group,12345680,Test Campaign,https://destination.com,Computer,Windows,English,test query,Audience,12345659,Title,3,14,Image,72365474,Test Group Product,PartitionType,11,4,2,5,4,5,5,5,7,5,6,36,7,23425,Customer Name,6,5,6,45,6,6,6,5,Click,3,25,34,25,34,34,Audience,123125,Test Asset Group +2024-03-01,180535609,180535611,Test Account,123458,123458,Test Group,12345680,Test Campaign,https://destination.com,Computer,Windows,English,test query,Audience,12345659,Title,3,14,Image,72365474,Test Group Product,PartitionType,11,4,2,5,4,5,5,5,7,5,6,36,7,23425,Customer Name,6,5,6,45,6,6,6,5,Click,3,25,34,25,34,34,Audience,123125,Test Asset Group diff --git a/airbyte-integrations/connectors/source-bing-ads/unit_tests/resource/response/product_search_query_performance_report_weekly.csv b/airbyte-integrations/connectors/source-bing-ads/unit_tests/resource/response/product_search_query_performance_report_weekly.csv new file mode 100644 index 000000000000..45b1d47ab01d --- /dev/null +++ b/airbyte-integrations/connectors/source-bing-ads/unit_tests/resource/response/product_search_query_performance_report_weekly.csv @@ -0,0 +1,4 @@ +TimePeriod,AccountId,AccountNumber,AccountName,AdId,AdGroupId,AdGroupName,CampaignId,CampaignName,DestinationUrl,DeviceType,DeviceOS,Language,SearchQuery,Network,MerchantProductId,Title,ClickTypeId,TotalClicksOnAdElements,ClickType,AdGroupCriterionId,ProductGroup,PartitionType,Impressions,Clicks,Ctr,AverageCpc,Spend,Conversions,ConversionRate,Assists,CostPerAssist,Revenue,CostPerConversion,RevenuePerConversion,RevenuePerAssist,CustomerId,CustomerName,AssistedImpressions,AssistedClicks,AssistedConversions,AllConversions,AllRevenue,AllConversionRate,AllCostPerConversion,AllRevenuePerConversion,Goal,GoalType,AbsoluteTopImpressionRatePercent,AverageCpm,ConversionsQualified,AssistedConversionsQualified,AllConversionsQualified,CampaignType,AssetGroupId,AssetGroupName +2023-12-04,180535609,180535609,Test Account,123456,123456,Test Group,12345678,Test Campaign,https://destination.com,Computer,Windows,English,test query,Audience,12345657,Title,1,12,Image,72365472,Test Group Product,PartitionType,9,2,0,3,2,3,3,3,5,3,4,34,5,23423,Customer Name,4,3,4,43,4,4,4,3,Click,1,23,32,23,32,32,Audience,123123,Test Asset Group +2023-12-11,180535609,180535610,Test Account,123457,123457,Test Group,12345679,Test Campaign,https://destination.com,Computer,Windows,English,test query,Audience,12345658,Title,2,13,Image,72365473,Test Group Product,PartitionType,10,3,1,4,3,4,4,4,6,4,5,35,6,23424,Customer Name,5,4,5,44,5,5,5,4,Click,2,24,33,24,33,33,Audience,123124,Test Asset Group +2023-12-25,180535609,180535611,Test Account,123458,123458,Test Group,12345680,Test Campaign,https://destination.com,Computer,Windows,English,test query,Audience,12345659,Title,3,14,Image,72365474,Test Group Product,PartitionType,11,4,2,5,4,5,5,5,7,5,6,36,7,23425,Customer Name,6,5,6,45,6,6,6,5,Click,3,25,34,25,34,34,Audience,123125,Test Asset Group diff --git a/airbyte-integrations/connectors/source-bing-ads/unit_tests/resource/response/product_search_query_performance_report_weekly_incremental.csv b/airbyte-integrations/connectors/source-bing-ads/unit_tests/resource/response/product_search_query_performance_report_weekly_incremental.csv new file mode 100644 index 000000000000..c36c4d08d233 --- /dev/null +++ b/airbyte-integrations/connectors/source-bing-ads/unit_tests/resource/response/product_search_query_performance_report_weekly_incremental.csv @@ -0,0 +1,6 @@ +TimePeriod,AccountId,AccountNumber,AccountName,AdId,AdGroupId,AdGroupName,CampaignId,CampaignName,DestinationUrl,DeviceType,DeviceOS,Language,SearchQuery,Network,MerchantProductId,Title,ClickTypeId,TotalClicksOnAdElements,ClickType,AdGroupCriterionId,ProductGroup,PartitionType,Impressions,Clicks,Ctr,AverageCpc,Spend,Conversions,ConversionRate,Assists,CostPerAssist,Revenue,CostPerConversion,RevenuePerConversion,RevenuePerAssist,CustomerId,CustomerName,AssistedImpressions,AssistedClicks,AssistedConversions,AllConversions,AllRevenue,AllConversionRate,AllCostPerConversion,AllRevenuePerConversion,Goal,GoalType,AbsoluteTopImpressionRatePercent,AverageCpm,ConversionsQualified,AssistedConversionsQualified,AllConversionsQualified,CampaignType,AssetGroupId,AssetGroupName +2024-01-01,180535609,180535609,Test Account,123456,123456,Test Group,12345678,Test Campaign,https://destination.com,Computer,Windows,English,test query,Audience,12345657,Title,1,12,Image,72365472,Test Group Product,PartitionType,9,2,0,3,2,3,3,3,5,3,4,34,5,23423,Customer Name,4,3,4,43,4,4,4,3,Click,1,23,32,23,32,32,Audience,123123,Test Asset Group +2024-01-08,180535609,180535610,Test Account,123457,123457,Test Group,12345679,Test Campaign,https://destination.com,Computer,Windows,English,test query,Audience,12345658,Title,2,13,Image,72365473,Test Group Product,PartitionType,10,3,1,4,3,4,4,4,6,4,5,35,6,23424,Customer Name,5,4,5,44,5,5,5,4,Click,2,24,33,24,33,33,Audience,123124,Test Asset Group +2024-01-15,180535609,180535611,Test Account,123458,123458,Test Group,12345680,Test Campaign,https://destination.com,Computer,Windows,English,test query,Audience,12345659,Title,3,14,Image,72365474,Test Group Product,PartitionType,11,4,2,5,4,5,5,5,7,5,6,36,7,23425,Customer Name,6,5,6,45,6,6,6,5,Click,3,25,34,25,34,34,Audience,123125,Test Asset Group +2024-01-22,180535609,180535612,Test Account,123459,123459,Test Group,12345681,Test Campaign,https://destination.com,Computer,Windows,English,test query,Audience,12345660,Title,4,15,Image,72365475,Test Group Product,PartitionType,12,5,3,6,5,6,6,6,8,6,7,37,8,23426,Customer Name,7,6,7,46,7,7,7,6,Click,4,26,35,26,35,35,Audience,123126,Test Asset Group +2024-01-29,180535609,180535613,Test Account,123460,123460,Test Group,12345682,Test Campaign,https://destination.com,Computer,Windows,English,test query,Audience,12345661,Title,5,16,Image,72365476,Test Group Product,PartitionType,13,6,4,7,6,7,7,7,9,7,8,38,9,23427,Customer Name,8,7,8,47,8,8,8,7,Click,5,27,36,27,36,36,Audience,123127,Test Asset Group diff --git a/airbyte-integrations/connectors/source-bing-ads/unit_tests/resource/state/audience_performance_report_daily_state.json b/airbyte-integrations/connectors/source-bing-ads/unit_tests/resource/state/audience_performance_report_daily_state.json new file mode 100644 index 000000000000..4e4af135dedd --- /dev/null +++ b/airbyte-integrations/connectors/source-bing-ads/unit_tests/resource/state/audience_performance_report_daily_state.json @@ -0,0 +1,5 @@ +{ + "180535609": { + "TimePeriod": "2023-12-17" + } +} diff --git a/airbyte-integrations/connectors/source-bing-ads/unit_tests/resource/state/audience_performance_report_hourly_state.json b/airbyte-integrations/connectors/source-bing-ads/unit_tests/resource/state/audience_performance_report_hourly_state.json new file mode 100644 index 000000000000..ceac901a39ac --- /dev/null +++ b/airbyte-integrations/connectors/source-bing-ads/unit_tests/resource/state/audience_performance_report_hourly_state.json @@ -0,0 +1,5 @@ +{ + "180535609": { + "TimePeriod": "2023-11-12T00:00:00+00:00" + } +} diff --git a/airbyte-integrations/connectors/source-bing-ads/unit_tests/resource/state/audience_performance_report_monthly_state.json b/airbyte-integrations/connectors/source-bing-ads/unit_tests/resource/state/audience_performance_report_monthly_state.json new file mode 100644 index 000000000000..677eace46644 --- /dev/null +++ b/airbyte-integrations/connectors/source-bing-ads/unit_tests/resource/state/audience_performance_report_monthly_state.json @@ -0,0 +1,5 @@ +{ + "180535609": { + "TimePeriod": "2023-09-01" + } +} diff --git a/airbyte-integrations/connectors/source-bing-ads/unit_tests/resource/state/audience_performance_report_weekly_state.json b/airbyte-integrations/connectors/source-bing-ads/unit_tests/resource/state/audience_performance_report_weekly_state.json new file mode 100644 index 000000000000..2cd678d65a02 --- /dev/null +++ b/airbyte-integrations/connectors/source-bing-ads/unit_tests/resource/state/audience_performance_report_weekly_state.json @@ -0,0 +1,5 @@ +{ + "180535609": { + "TimePeriod": "2023-12-25" + } +} diff --git a/airbyte-integrations/connectors/source-bing-ads/unit_tests/resource/state/goals_and_funnels_report_daily_state.json b/airbyte-integrations/connectors/source-bing-ads/unit_tests/resource/state/goals_and_funnels_report_daily_state.json new file mode 100644 index 000000000000..4e4af135dedd --- /dev/null +++ b/airbyte-integrations/connectors/source-bing-ads/unit_tests/resource/state/goals_and_funnels_report_daily_state.json @@ -0,0 +1,5 @@ +{ + "180535609": { + "TimePeriod": "2023-12-17" + } +} diff --git a/airbyte-integrations/connectors/source-bing-ads/unit_tests/resource/state/goals_and_funnels_report_hourly_state.json b/airbyte-integrations/connectors/source-bing-ads/unit_tests/resource/state/goals_and_funnels_report_hourly_state.json new file mode 100644 index 000000000000..ceac901a39ac --- /dev/null +++ b/airbyte-integrations/connectors/source-bing-ads/unit_tests/resource/state/goals_and_funnels_report_hourly_state.json @@ -0,0 +1,5 @@ +{ + "180535609": { + "TimePeriod": "2023-11-12T00:00:00+00:00" + } +} diff --git a/airbyte-integrations/connectors/source-bing-ads/unit_tests/resource/state/goals_and_funnels_report_monthly_state.json b/airbyte-integrations/connectors/source-bing-ads/unit_tests/resource/state/goals_and_funnels_report_monthly_state.json new file mode 100644 index 000000000000..677eace46644 --- /dev/null +++ b/airbyte-integrations/connectors/source-bing-ads/unit_tests/resource/state/goals_and_funnels_report_monthly_state.json @@ -0,0 +1,5 @@ +{ + "180535609": { + "TimePeriod": "2023-09-01" + } +} diff --git a/airbyte-integrations/connectors/source-bing-ads/unit_tests/resource/state/goals_and_funnels_report_weekly_state.json b/airbyte-integrations/connectors/source-bing-ads/unit_tests/resource/state/goals_and_funnels_report_weekly_state.json new file mode 100644 index 000000000000..2cd678d65a02 --- /dev/null +++ b/airbyte-integrations/connectors/source-bing-ads/unit_tests/resource/state/goals_and_funnels_report_weekly_state.json @@ -0,0 +1,5 @@ +{ + "180535609": { + "TimePeriod": "2023-12-25" + } +} diff --git a/airbyte-integrations/connectors/source-bing-ads/unit_tests/resource/state/product_search_query_performance_report_daily_state.json b/airbyte-integrations/connectors/source-bing-ads/unit_tests/resource/state/product_search_query_performance_report_daily_state.json new file mode 100644 index 000000000000..4e4af135dedd --- /dev/null +++ b/airbyte-integrations/connectors/source-bing-ads/unit_tests/resource/state/product_search_query_performance_report_daily_state.json @@ -0,0 +1,5 @@ +{ + "180535609": { + "TimePeriod": "2023-12-17" + } +} diff --git a/airbyte-integrations/connectors/source-bing-ads/unit_tests/resource/state/product_search_query_performance_report_hourly_state.json b/airbyte-integrations/connectors/source-bing-ads/unit_tests/resource/state/product_search_query_performance_report_hourly_state.json new file mode 100644 index 000000000000..ceac901a39ac --- /dev/null +++ b/airbyte-integrations/connectors/source-bing-ads/unit_tests/resource/state/product_search_query_performance_report_hourly_state.json @@ -0,0 +1,5 @@ +{ + "180535609": { + "TimePeriod": "2023-11-12T00:00:00+00:00" + } +} diff --git a/airbyte-integrations/connectors/source-bing-ads/unit_tests/resource/state/product_search_query_performance_report_monthly_state.json b/airbyte-integrations/connectors/source-bing-ads/unit_tests/resource/state/product_search_query_performance_report_monthly_state.json new file mode 100644 index 000000000000..677eace46644 --- /dev/null +++ b/airbyte-integrations/connectors/source-bing-ads/unit_tests/resource/state/product_search_query_performance_report_monthly_state.json @@ -0,0 +1,5 @@ +{ + "180535609": { + "TimePeriod": "2023-09-01" + } +} diff --git a/airbyte-integrations/connectors/source-bing-ads/unit_tests/resource/state/product_search_query_performance_report_weekly_state.json b/airbyte-integrations/connectors/source-bing-ads/unit_tests/resource/state/product_search_query_performance_report_weekly_state.json new file mode 100644 index 000000000000..2cd678d65a02 --- /dev/null +++ b/airbyte-integrations/connectors/source-bing-ads/unit_tests/resource/state/product_search_query_performance_report_weekly_state.json @@ -0,0 +1,5 @@ +{ + "180535609": { + "TimePeriod": "2023-12-25" + } +} diff --git a/airbyte-integrations/connectors/source-bing-ads/unit_tests/test_source.py b/airbyte-integrations/connectors/source-bing-ads/unit_tests/test_source.py index 5293baef52b0..96d2c7c3ad67 100644 --- a/airbyte-integrations/connectors/source-bing-ads/unit_tests/test_source.py +++ b/airbyte-integrations/connectors/source-bing-ads/unit_tests/test_source.py @@ -16,7 +16,7 @@ @patch.object(source_bing_ads.source, "Client") def test_streams_config_based(mocked_client, config): streams = SourceBingAds().streams(config) - assert len(streams) == 65 + assert len(streams) == 77 @patch.object(source_bing_ads.source, "Client") diff --git a/docs/integrations/sources/bing-ads.md b/docs/integrations/sources/bing-ads.md index 2214886b6137..5cdb8ad25123 100644 --- a/docs/integrations/sources/bing-ads.md +++ b/docs/integrations/sources/bing-ads.md @@ -122,6 +122,14 @@ The Bing Ads source connector supports the following streams. For more informati ### Report Streams +:::note + +Be careful with removing fields that you don't want to sync in the Replication Stream Settings. +Report will be generated by request with all fields in the Stream Schema. Removing fields from in the setting does not affect actual request for the report. +The results of such a report can be not accurate due to not visible values in removed fields. +If you faced this issue please use custom report, where you can define only that fields that you want to see in the report, and no other fields will be used in the request. +::: + - [Account Performance Report Hourly](https://docs.microsoft.com/en-us/advertising/reporting-service/accountperformancereportrequest?view=bingads-13) - [Account Performance Report Daily](https://docs.microsoft.com/en-us/advertising/reporting-service/accountperformancereportrequest?view=bingads-13) - [Account Performance Report Weekly](https://docs.microsoft.com/en-us/advertising/reporting-service/accountperformancereportrequest?view=bingads-13) @@ -146,10 +154,18 @@ The Bing Ads source connector supports the following streams. For more informati - [Age Gender Audience Report Daily](https://learn.microsoft.com/en-us/advertising/reporting-service/agegenderaudiencereportrequest?view=bingads-13) - [Age Gender Audience Report Weekly](https://learn.microsoft.com/en-us/advertising/reporting-service/agegenderaudiencereportrequest?view=bingads-13) - [Age Gender Audience Report Monthly](https://learn.microsoft.com/en-us/advertising/reporting-service/agegenderaudiencereportrequest?view=bingads-13) +- [Audience Performance Report Hourly](https://learn.microsoft.com/en-us/advertising/reporting-service/audienceperformancereportrequest?view=bingads-13) +- [Audience Performance Report Daily](https://learn.microsoft.com/en-us/advertising/reporting-service/audienceperformancereportrequest?view=bingads-13) +- [Audience Performance Report Weekly](https://learn.microsoft.com/en-us/advertising/reporting-service/audienceperformancereportrequest?view=bingads-13) +- [Audience Performance Report Monthly](https://learn.microsoft.com/en-us/advertising/reporting-service/audienceperformancereportrequest?view=bingads-13) - [Geographic Performance Report Hourly](https://learn.microsoft.com/en-us/advertising/reporting-service/geographicperformancereportrequest?view=bingads-13) - [Geographic Performance Report Daily](https://learn.microsoft.com/en-us/advertising/reporting-service/geographicperformancereportrequest?view=bingads-13) - [Geographic Performance Report Weekly](https://learn.microsoft.com/en-us/advertising/reporting-service/geographicperformancereportrequest?view=bingads-13) - [Geographic Performance Report Monthly](https://learn.microsoft.com/en-us/advertising/reporting-service/geographicperformancereportrequest?view=bingads-13) +- [Goals And Funnels Report Hourly](https://learn.microsoft.com/en-us/advertising/reporting-service/goalsandfunnelsreportrequest?view=bingads-13) +- [Goals And Funnels Report Daily](https://learn.microsoft.com/en-us/advertising/reporting-service/goalsandfunnelsreportrequest?view=bingads-13) +- [Goals And Funnels Report Weekly](https://learn.microsoft.com/en-us/advertising/reporting-service/goalsandfunnelsreportrequest?view=bingads-13) +- [Goals And Funnels Report Monthly](https://learn.microsoft.com/en-us/advertising/reporting-service/goalsandfunnelsreportrequest?view=bingads-13) - [Budget Summary Report](https://docs.microsoft.com/en-us/advertising/reporting-service/budgetsummaryreportrequest?view=bingads-13) - [Campaign Performance Report Hourly](https://docs.microsoft.com/en-us/advertising/reporting-service/campaignperformancereportrequest?view=bingads-13) - [Campaign Performance Report Daily](https://docs.microsoft.com/en-us/advertising/reporting-service/campaignperformancereportrequest?view=bingads-13) @@ -171,6 +187,10 @@ The Bing Ads source connector supports the following streams. For more informati - [Product Dimension Performance Report Daily](https://learn.microsoft.com/en-us/advertising/reporting-service/productdimensionperformancereportrequest?view=bingads-13) - [Product Dimension Performance Report Weekly](https://learn.microsoft.com/en-us/advertising/reporting-service/productdimensionperformancereportrequest?view=bingads-13) - [Product Dimension Performance Report Monthly](https://learn.microsoft.com/en-us/advertising/reporting-service/productdimensionperformancereportrequest?view=bingads-13) +- [Product Search Query Performance Report Hourly](https://learn.microsoft.com/en-us/advertising/reporting-service/productsearchqueryperformancereportrequest?view=bingads-13) +- [Product Search Query Performance Report Daily](https://learn.microsoft.com/en-us/advertising/reporting-service/productsearchqueryperformancereportrequest?view=bingads-13) +- [Product Search Query Performance Report Weekly](https://learn.microsoft.com/en-us/advertising/reporting-service/productsearchqueryperformancereportrequest?view=bingads-13) +- [Product Search Query Performance Report Monthly](https://learn.microsoft.com/en-us/advertising/reporting-service/productsearchqueryperformancereportrequest?view=bingads-13) - [Search Query Performance Report Hourly](https://learn.microsoft.com/en-us/advertising/reporting-service/searchqueryperformancereportrequest?view=bingads-13) - [Search Query Performance Report Daily](https://learn.microsoft.com/en-us/advertising/reporting-service/searchqueryperformancereportrequest?view=bingads-13) - [Search Query Performance Report Weekly](https://learn.microsoft.com/en-us/advertising/reporting-service/searchqueryperformancereportrequest?view=bingads-13) @@ -231,7 +251,8 @@ The Bing Ads API limits the number of requests for all Microsoft Advertising cli | Version | Date | Pull Request | Subject | |:--------|:-----------|:---------------------------------------------------------------------------------------------------------------------------------|:------------------------------------------------------------------------------------------------------------------------------------------------| -| 2.2.0 | 2024-02-13 | [35201](https://github.com/airbytehq/airbyte/pull/35201) | New streams Budget and | +| 2.3.0 | 2024-03-05 | [35812](https://github.com/airbytehq/airbyte/pull/35812) | New streams: Audience Performance Report, Goals And Funnels Report, Product Dimension Performance Report. | +| 2.2.0 | 2024-02-13 | [35201](https://github.com/airbytehq/airbyte/pull/35201) | New streams: Budget and Product Dimension Performance. | | 2.1.4 | 2024-02-12 | [35179](https://github.com/airbytehq/airbyte/pull/35179) | Manage dependencies with Poetry. | | 2.1.3 | 2024-01-31 | [34712](https://github.com/airbytehq/airbyte/pull/34712) | Fix duplicated records for report-based streams | | 2.1.2 | 2024-01-09 | [34045](https://github.com/airbytehq/airbyte/pull/34045) | Speed up record transformation | From 0965ebd2e1958eec70ceffaa88346ee74d1c7ba4 Mon Sep 17 00:00:00 2001 From: Augustin Date: Thu, 7 Mar 2024 15:48:10 +0100 Subject: [PATCH 119/172] live-tests: add regression tests suite (#35837) --- airbyte-ci/connectors/live-tests/README.md | 13 + airbyte-ci/connectors/live-tests/poetry.lock | 44 +- .../connectors/live-tests/pyproject.toml | 10 +- .../live-tests/src/live_tests/cli.py | 2 +- .../commons/backends/file_backend.py | 8 +- .../live_tests/commons/connector_runner.py | 11 +- .../src/live_tests/commons/models.py | 72 +-- .../src/live_tests/debug/__init__.py | 4 +- .../live-tests/src/live_tests/debug/cli.py | 2 +- .../live_tests/regression_tests/.gitignore | 1 + .../live_tests/regression_tests/__init__.py | 1 + .../live_tests/regression_tests/conftest.py | 584 ++++++++++++++++++ .../live_tests/regression_tests/pytest.ini | 4 + .../regression_tests/test_expected_records.py | 20 + .../src/live_tests/regression_tests/utils.py | 19 + 15 files changed, 720 insertions(+), 75 deletions(-) create mode 100644 airbyte-ci/connectors/live-tests/src/live_tests/regression_tests/.gitignore create mode 100644 airbyte-ci/connectors/live-tests/src/live_tests/regression_tests/__init__.py create mode 100644 airbyte-ci/connectors/live-tests/src/live_tests/regression_tests/conftest.py create mode 100644 airbyte-ci/connectors/live-tests/src/live_tests/regression_tests/pytest.ini create mode 100644 airbyte-ci/connectors/live-tests/src/live_tests/regression_tests/test_expected_records.py create mode 100644 airbyte-ci/connectors/live-tests/src/live_tests/regression_tests/utils.py diff --git a/airbyte-ci/connectors/live-tests/README.md b/airbyte-ci/connectors/live-tests/README.md index 6e2999617724..0723dd868f39 100644 --- a/airbyte-ci/connectors/live-tests/README.md +++ b/airbyte-ci/connectors/live-tests/README.md @@ -102,7 +102,20 @@ And run: mitmweb --rfile=http_dump.mitm ``` +## Regression tests +We created a regression test suite to run tests to compare outputs of connector commands on different versions of the same connector. +You can run the existing test suites with the following command: + +```bash +cd src/live_tests/regression_tests +poetry run pytest --connector-image=airbyte/source-pokeapi --config-path= --catalog-path= +``` + + ## Changelog +### 0.2.0 +Declare the regression tests suite. + ### 0.1.0 Implement initial primitives and a `debug` command to run connector commands and persist the outputs to local storage. diff --git a/airbyte-ci/connectors/live-tests/poetry.lock b/airbyte-ci/connectors/live-tests/poetry.lock index 29dab322799d..9200049d8e31 100644 --- a/airbyte-ci/connectors/live-tests/poetry.lock +++ b/airbyte-ci/connectors/live-tests/poetry.lock @@ -2,13 +2,13 @@ [[package]] name = "airbyte-protocol-models" -version = "0.6.0" +version = "0.7.0" description = "Declares the Airbyte Protocol." optional = false python-versions = ">=3.8" files = [ - {file = "airbyte_protocol_models-0.6.0-py3-none-any.whl", hash = "sha256:dda91403c9731ecbadffaf05dbe8d24f0d318a189d26fcb727627291837a085c"}, - {file = "airbyte_protocol_models-0.6.0.tar.gz", hash = "sha256:84a0bb0fbedc777f8066295960461ab4a8ab6af63985c21c39bb589569786bc2"}, + {file = "airbyte_protocol_models-0.7.0-py3-none-any.whl", hash = "sha256:0b038134f12eff2c5f8265751a6915f5d247fb15d62c878bdeb1a6fefe1eb59a"}, + {file = "airbyte_protocol_models-0.7.0.tar.gz", hash = "sha256:e084970365ff5c245d3dbfa58d0d2134e8f97455835e5a08dfd9be77b4be016c"}, ] [package.dependencies] @@ -851,28 +851,28 @@ jupyter = ["ipywidgets (>=7.5.1,<9)"] [[package]] name = "ruff" -version = "0.3.0" +version = "0.3.1" description = "An extremely fast Python linter and code formatter, written in Rust." optional = false python-versions = ">=3.7" files = [ - {file = "ruff-0.3.0-py3-none-macosx_10_12_x86_64.macosx_11_0_arm64.macosx_10_12_universal2.whl", hash = "sha256:7deb528029bacf845bdbb3dbb2927d8ef9b4356a5e731b10eef171e3f0a85944"}, - {file = "ruff-0.3.0-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:e1e0d4381ca88fb2b73ea0766008e703f33f460295de658f5467f6f229658c19"}, - {file = "ruff-0.3.0-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2f7dbba46e2827dfcb0f0cc55fba8e96ba7c8700e0a866eb8cef7d1d66c25dcb"}, - {file = "ruff-0.3.0-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:23dbb808e2f1d68eeadd5f655485e235c102ac6f12ad31505804edced2a5ae77"}, - {file = "ruff-0.3.0-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3ef655c51f41d5fa879f98e40c90072b567c666a7114fa2d9fe004dffba00932"}, - {file = "ruff-0.3.0-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:d0d3d7ef3d4f06433d592e5f7d813314a34601e6c5be8481cccb7fa760aa243e"}, - {file = "ruff-0.3.0-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b08b356d06a792e49a12074b62222f9d4ea2a11dca9da9f68163b28c71bf1dd4"}, - {file = "ruff-0.3.0-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9343690f95710f8cf251bee1013bf43030072b9f8d012fbed6ad702ef70d360a"}, - {file = "ruff-0.3.0-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a1f3ed501a42f60f4dedb7805fa8d4534e78b4e196f536bac926f805f0743d49"}, - {file = "ruff-0.3.0-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:cc30a9053ff2f1ffb505a585797c23434d5f6c838bacfe206c0e6cf38c921a1e"}, - {file = "ruff-0.3.0-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:5da894a29ec018a8293d3d17c797e73b374773943e8369cfc50495573d396933"}, - {file = "ruff-0.3.0-py3-none-musllinux_1_2_i686.whl", hash = "sha256:755c22536d7f1889be25f2baf6fedd019d0c51d079e8417d4441159f3bcd30c2"}, - {file = "ruff-0.3.0-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:dd73fe7f4c28d317855da6a7bc4aa29a1500320818dd8f27df95f70a01b8171f"}, - {file = "ruff-0.3.0-py3-none-win32.whl", hash = "sha256:19eacceb4c9406f6c41af806418a26fdb23120dfe53583df76d1401c92b7c14b"}, - {file = "ruff-0.3.0-py3-none-win_amd64.whl", hash = "sha256:128265876c1d703e5f5e5a4543bd8be47c73a9ba223fd3989d4aa87dd06f312f"}, - {file = "ruff-0.3.0-py3-none-win_arm64.whl", hash = "sha256:e3a4a6d46aef0a84b74fcd201a4401ea9a6cd85614f6a9435f2d33dd8cefbf83"}, - {file = "ruff-0.3.0.tar.gz", hash = "sha256:0886184ba2618d815067cf43e005388967b67ab9c80df52b32ec1152ab49f53a"}, + {file = "ruff-0.3.1-py3-none-macosx_10_12_x86_64.macosx_11_0_arm64.macosx_10_12_universal2.whl", hash = "sha256:6b82e3937d0d76554cd5796bc3342a7d40de44494d29ff490022d7a52c501744"}, + {file = "ruff-0.3.1-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:ae7954c8f692b70e6a206087ae3988acc9295d84c550f8d90b66c62424c16771"}, + {file = "ruff-0.3.1-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6b730f56ccf91225da0f06cfe421e83b8cc27b2a79393db9c3df02ed7e2bbc01"}, + {file = "ruff-0.3.1-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:c78bfa85637668f47bd82aa2ae17de2b34221ac23fea30926f6409f9e37fc927"}, + {file = "ruff-0.3.1-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d6abaad602d6e6daaec444cbf4d9364df0a783e49604c21499f75bb92237d4af"}, + {file = "ruff-0.3.1-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:5f0c21b6914c3c9a25a59497cbb1e5b6c2d8d9beecc9b8e03ee986e24eee072e"}, + {file = "ruff-0.3.1-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:434c3fc72e6311c85cd143c4c448b0e60e025a9ac1781e63ba222579a8c29200"}, + {file = "ruff-0.3.1-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:78a7025e6312cbba496341da5062e7cdd47d95f45c1b903e635cdeb1ba5ec2b9"}, + {file = "ruff-0.3.1-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:52b02bb46f1a79b0c1fa93f6495bc7e77e4ef76e6c28995b4974a20ed09c0833"}, + {file = "ruff-0.3.1-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:11b5699c42f7d0b771c633d620f2cb22e727fb226273aba775a91784a9ed856c"}, + {file = "ruff-0.3.1-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:54e5dca3e411772b51194b3102b5f23b36961e8ede463776b289b78180df71a0"}, + {file = "ruff-0.3.1-py3-none-musllinux_1_2_i686.whl", hash = "sha256:951efb610c5844e668bbec4f71cf704f8645cf3106e13f283413969527ebfded"}, + {file = "ruff-0.3.1-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:09c7333b25e983aabcf6e38445252cff0b4745420fc3bda45b8fce791cc7e9ce"}, + {file = "ruff-0.3.1-py3-none-win32.whl", hash = "sha256:d937f9b99ebf346e0606c3faf43c1e297a62ad221d87ef682b5bdebe199e01f6"}, + {file = "ruff-0.3.1-py3-none-win_amd64.whl", hash = "sha256:c0318a512edc9f4e010bbaab588b5294e78c5cdc9b02c3d8ab2d77c7ae1903e3"}, + {file = "ruff-0.3.1-py3-none-win_arm64.whl", hash = "sha256:d3b60e44240f7e903e6dbae3139a65032ea4c6f2ad99b6265534ff1b83c20afa"}, + {file = "ruff-0.3.1.tar.gz", hash = "sha256:d30db97141fc2134299e6e983a6727922c9e03c031ae4883a6d69461de722ae7"}, ] [[package]] @@ -1058,4 +1058,4 @@ multidict = ">=4.0" [metadata] lock-version = "2.0" python-versions = "^3.10" -content-hash = "da1743991debf2a5c9565ffd3494ebbf6f55b36f9279fd74e9697262e813d2db" +content-hash = "98437771d3bb81792186c952105888fb44215dc87f2a0004db79db8f58dd2814" diff --git a/airbyte-ci/connectors/live-tests/pyproject.toml b/airbyte-ci/connectors/live-tests/pyproject.toml index d88aed385a92..d3e0f2276a85 100644 --- a/airbyte-ci/connectors/live-tests/pyproject.toml +++ b/airbyte-ci/connectors/live-tests/pyproject.toml @@ -4,7 +4,7 @@ build-backend = "poetry.core.masonry.api" [tool.poetry] name = "live-tests" -version = "0.1.0" +version = "0.2.0" description = "Contains utilities for testing connectors against live data." authors = ["Airbyte "] license = "MIT" @@ -20,7 +20,6 @@ airbyte-protocol-models = "<1.0.0" cachetools = "~=5.3.3" dagger-io = "==0.9.6" pydantic = "*" -pytest = "~=8.0.2" pytest-asyncio = "~=0.23.5" pydash = "~=7.0.7" docker = ">=6,<7" @@ -37,10 +36,9 @@ types-cachetools = "^5.3.0.7" [tool.poe.tasks] test = "pytest tests" lint = "ruff check src" -type_check = "mypy src" +format = "ruff format src" +type_check = "mypy src --disallow-untyped-defs" +pre-push = ["format", "lint", "test", "type_check"] [tool.airbyte_ci] poe_tasks = ["test", "lint", "type_check"] - -[tool.pytest.ini_options] -pythonpath = ["src"] diff --git a/airbyte-ci/connectors/live-tests/src/live_tests/cli.py b/airbyte-ci/connectors/live-tests/src/live_tests/cli.py index 15a6ec2fe925..5c7e22e56dad 100644 --- a/airbyte-ci/connectors/live-tests/src/live_tests/cli.py +++ b/airbyte-ci/connectors/live-tests/src/live_tests/cli.py @@ -6,7 +6,7 @@ @click.group() @click.pass_context -async def live_tests(ctx): +async def live_tests(ctx: click.Context) -> None: pass diff --git a/airbyte-ci/connectors/live-tests/src/live_tests/commons/backends/file_backend.py b/airbyte-ci/connectors/live-tests/src/live_tests/commons/backends/file_backend.py index 5588322aac2c..a7bea3cb184d 100644 --- a/airbyte-ci/connectors/live-tests/src/live_tests/commons/backends/file_backend.py +++ b/airbyte-ci/connectors/live-tests/src/live_tests/commons/backends/file_backend.py @@ -2,7 +2,7 @@ import json from pathlib import Path -from typing import Iterable, TextIO, Tuple +from typing import Any, Iterable, TextIO, Tuple import pydash from airbyte_protocol.models import AirbyteMessage # type: ignore @@ -12,9 +12,9 @@ class FileDescriptorLRUCache(LRUCache): - def popitem(self): + def popitem(self) -> Tuple[Any, Any]: filepath, fd = LRUCache.popitem(self) - fd.close() # Close the file descriptor when it's evicted from the cache + fd.close() # type: ignore # Close the file descriptor when it's evicted from the cache return filepath, fd @@ -33,7 +33,7 @@ class FileBackend(BaseBackend): def __init__(self, output_directory: Path): self._output_directory = output_directory - async def write(self, airbyte_messages: Iterable[AirbyteMessage]): + async def write(self, airbyte_messages: Iterable[AirbyteMessage]) -> None: """ Write AirbyteMessages to the appropriate file. diff --git a/airbyte-ci/connectors/live-tests/src/live_tests/commons/connector_runner.py b/airbyte-ci/connectors/live-tests/src/live_tests/commons/connector_runner.py index 66b5dcc61e57..ff1bacaf23dd 100644 --- a/airbyte-ci/connectors/live-tests/src/live_tests/commons/connector_runner.py +++ b/airbyte-ci/connectors/live-tests/src/live_tests/commons/connector_runner.py @@ -30,7 +30,7 @@ async def get_container_from_id(dagger_client: dagger.Client, container_id: str) pytest.exit(f"Failed to load connector container: {e}") -async def get_container_from_tarball_path(dagger_client: dagger.Client, tarball_path: Path): +async def get_container_from_tarball_path(dagger_client: dagger.Client, tarball_path: Path) -> dagger.Container: if not tarball_path.exists(): pytest.exit(f"Connector image tarball {tarball_path} does not exist") container_under_test_tar_file = ( @@ -149,7 +149,7 @@ def __init__( def _connector_under_test_container(self) -> dagger.Container: return self.connector_under_test.container - def _get_full_command(self, command: Command): + def _get_full_command(self, command: Command) -> List[str]: if command is Command.SPEC: return ["spec"] elif command is Command.CHECK: @@ -180,11 +180,12 @@ def _get_full_command(self, command: Command): async def get_container_env_variable_value(self, name: str) -> Optional[str]: return await self._connector_under_test_container.env_variable(name) - async def get_container_label(self, label: str): + async def get_container_label(self, label: str) -> Optional[str]: return await self._connector_under_test_container.label(label) - async def get_container_entrypoint(self): + async def get_container_entrypoint(self) -> str: entrypoint = await self._connector_under_test_container.entrypoint() + assert entrypoint, "The connector container has no entrypoint" return " ".join(entrypoint) async def run( @@ -251,7 +252,7 @@ async def _get_proxy_container( return proxy_container.with_exec(command) - async def _bind_connector_container_to_proxy(self, container: dagger.Container): + async def _bind_connector_container_to_proxy(self, container: dagger.Container) -> dagger.Container: proxy_srv = await self._get_proxy_container() proxy_host, proxy_port = "proxy_server", 8080 cert_path_in_volume = "/mitmproxy_dir/mitmproxy-ca.pem" diff --git a/airbyte-ci/connectors/live-tests/src/live_tests/commons/models.py b/airbyte-ci/connectors/live-tests/src/live_tests/commons/models.py index 0b9bbd58eda5..5425fca704f4 100644 --- a/airbyte-ci/connectors/live-tests/src/live_tests/commons/models.py +++ b/airbyte-ci/connectors/live-tests/src/live_tests/commons/models.py @@ -4,7 +4,7 @@ from dataclasses import dataclass, field from enum import Enum from pathlib import Path -from typing import Dict, List, Optional, Tuple +from typing import Any, Dict, Iterable, Iterator, List, MutableMapping, Optional, Tuple import _collections_abc import dagger @@ -14,73 +14,73 @@ from pydantic import ValidationError -class UserDict(_collections_abc.MutableMapping): +class UserDict(_collections_abc.MutableMapping): # type: ignore # Start by filling-out the abstract methods - def __init__(self, dict=None, /, **kwargs): - self.data = {} - if dict is not None: - self.update(dict) + def __init__(self, _dict: Optional[MutableMapping] = None, **kwargs: Any): + self.data: MutableMapping = {} + if _dict is not None: + self.update(_dict) if kwargs: self.update(kwargs) - def __len__(self): + def __len__(self) -> int: return len(self.data) - def __getitem__(self, key): + def __getitem__(self, key: Any) -> Any: if key in self.data: return self.data[key] if hasattr(self.__class__, "__missing__"): return self.__class__.__missing__(self, key) raise KeyError(key) - def __setitem__(self, key, item): + def __setitem__(self, key: Any, item: Any) -> None: self.data[key] = item - def __delitem__(self, key): + def __delitem__(self, key: Any) -> None: del self.data[key] - def __iter__(self): + def __iter__(self) -> Iterator: return iter(self.data) # Modify __contains__ to work correctly when __missing__ is present - def __contains__(self, key): + def __contains__(self, key: Any) -> bool: return key in self.data # Now, add the methods in dicts but not in MutableMapping - def __repr__(self): + def __repr__(self) -> str: return repr(self.data) - def __or__(self, other): + def __or__(self, other: "UserDict" | dict) -> "UserDict": if isinstance(other, UserDict): - return self.__class__(self.data | other.data) + return self.__class__(self.data | other.data) # type: ignore if isinstance(other, dict): - return self.__class__(self.data | other) + return self.__class__(self.data | other) # type: ignore return NotImplemented - def __ror__(self, other): + def __ror__(self, other: "UserDict" | dict) -> "UserDict": if isinstance(other, UserDict): - return self.__class__(other.data | self.data) + return self.__class__(other.data | self.data) # type: ignore if isinstance(other, dict): - return self.__class__(other | self.data) + return self.__class__(other | self.data) # type: ignore return NotImplemented - def __ior__(self, other): + def __ior__(self, other: "UserDict" | dict) -> "UserDict": if isinstance(other, UserDict): - self.data |= other.data + self.data |= other.data # type: ignore else: - self.data |= other + self.data |= other # type: ignore return self - def __copy__(self): + def __copy__(self) -> "UserDict": inst = self.__class__.__new__(self.__class__) inst.__dict__.update(self.__dict__) # Create a copy and avoid triggering descriptors inst.__dict__["data"] = self.__dict__["data"].copy() return inst - def copy(self): + def copy(self) -> "UserDict": if self.__class__ is UserDict: - return UserDict(self.data.copy()) + return UserDict(self.data.copy()) # type: ignore import copy data = self.data @@ -93,7 +93,7 @@ def copy(self): return c @classmethod - def fromkeys(cls, iterable, value=None): + def fromkeys(cls, iterable: Iterable, value: Optional[Any] = None) -> "UserDict": d = cls() for key in iterable: d[key] = value @@ -122,11 +122,11 @@ class ConnectorUnderTest: container: dagger.Container @property - def name(self): + def name(self) -> str: return self.image_name.replace("airbyte/", "").split(":")[0] @property - def version(self): + def version(self) -> str: return self.image_name.replace("airbyte/", "").split(":")[1] @@ -151,11 +151,11 @@ def to_dict(self) -> dict: "enable_http_cache": self.enable_http_cache, } - def raise_if_missing_attr_for_command(self, attribute: str): + def raise_if_missing_attr_for_command(self, attribute: str) -> None: if getattr(self, attribute) is None: raise ValueError(f"We need a {attribute} to run the {self.command.value} command") - def __post_init__(self): + def __post_init__(self) -> None: if self.command is Command.CHECK: self.raise_if_missing_attr_for_command("config") if self.command is Command.DISCOVER: @@ -178,7 +178,7 @@ class ExecutionResult: airbyte_messages: List[AirbyteMessage] = field(default_factory=list) airbyte_messages_parsing_errors: List[Tuple[Exception, str]] = field(default_factory=list) - def __post_init__(self): + def __post_init__(self) -> None: self.airbyte_messages, self.airbyte_messages_parsing_errors = self.parse_airbyte_messages_from_command_output(self.stdout) @staticmethod @@ -200,21 +200,22 @@ class ExecutionReport: execution_inputs: ExecutionInputs execution_result: ExecutionResult created_at: int = field(default_factory=lambda: int(time.time())) + saved_path: Optional[Path] = None @property def report_dir(self) -> str: - return f"{self.created_at}/{self.execution_inputs.connector_under_test.name}/{self.execution_inputs.command.value}/{self.execution_inputs.connector_under_test.version}/" + return f"{self.execution_inputs.connector_under_test.name}/{self.execution_inputs.command.value}/{self.execution_inputs.connector_under_test.version}/" @property - def stdout_filename(self): + def stdout_filename(self) -> str: return "stdout.log" @property - def stderr_filename(self): + def stderr_filename(self) -> str: return "stderr.log" @property - def http_dump_filename(self): + def http_dump_filename(self) -> str: return "http_dump.mitm" async def save_to_disk(self, output_dir: Path) -> None: @@ -233,3 +234,4 @@ async def save_to_disk(self, output_dir: Path) -> None: airbyte_messages_dir = final_dir / "airbyte_messages" airbyte_messages_dir.mkdir(parents=True, exist_ok=True) await FileBackend(airbyte_messages_dir).write(self.execution_result.airbyte_messages) + self.saved_path = final_dir diff --git a/airbyte-ci/connectors/live-tests/src/live_tests/debug/__init__.py b/airbyte-ci/connectors/live-tests/src/live_tests/debug/__init__.py index 33baac2d2613..85a4c5094f0e 100644 --- a/airbyte-ci/connectors/live-tests/src/live_tests/debug/__init__.py +++ b/airbyte-ci/connectors/live-tests/src/live_tests/debug/__init__.py @@ -4,5 +4,7 @@ import os import sys -DAGGER_EXEC_TIMEOUT = dagger.Timeout(int(os.environ.get("DAGGER_EXEC_TIMEOUT", "3600"))) # One hour by default +DAGGER_EXEC_TIMEOUT = dagger.Timeout( + int(os.environ.get("DAGGER_EXEC_TIMEOUT", "3600")) +) # One hour by default DAGGER_CONFIG = dagger.Config(timeout=DAGGER_EXEC_TIMEOUT, log_output=sys.stderr) diff --git a/airbyte-ci/connectors/live-tests/src/live_tests/debug/cli.py b/airbyte-ci/connectors/live-tests/src/live_tests/debug/cli.py index 9dc7fba9cfbc..f42633a87e95 100644 --- a/airbyte-ci/connectors/live-tests/src/live_tests/debug/cli.py +++ b/airbyte-ci/connectors/live-tests/src/live_tests/debug/cli.py @@ -73,7 +73,7 @@ async def debug_cmd( catalog_path: Optional[str], state_path: Optional[str], enable_http_cache: bool, -): +) -> None: output_directory.mkdir(parents=True, exist_ok=True) debug_session_start_time = int(time.time()) async with dagger.Connection(config=DAGGER_CONFIG) as dagger_client: diff --git a/airbyte-ci/connectors/live-tests/src/live_tests/regression_tests/.gitignore b/airbyte-ci/connectors/live-tests/src/live_tests/regression_tests/.gitignore new file mode 100644 index 000000000000..452eecef73dd --- /dev/null +++ b/airbyte-ci/connectors/live-tests/src/live_tests/regression_tests/.gitignore @@ -0,0 +1 @@ +regression_tests_artifacts diff --git a/airbyte-ci/connectors/live-tests/src/live_tests/regression_tests/__init__.py b/airbyte-ci/connectors/live-tests/src/live_tests/regression_tests/__init__.py new file mode 100644 index 000000000000..f70ecfc3a89e --- /dev/null +++ b/airbyte-ci/connectors/live-tests/src/live_tests/regression_tests/__init__.py @@ -0,0 +1 @@ +# Copyright (c) 2023 Airbyte, Inc., all rights reserved. diff --git a/airbyte-ci/connectors/live-tests/src/live_tests/regression_tests/conftest.py b/airbyte-ci/connectors/live-tests/src/live_tests/regression_tests/conftest.py new file mode 100644 index 000000000000..ba60ce4a21b4 --- /dev/null +++ b/airbyte-ci/connectors/live-tests/src/live_tests/regression_tests/conftest.py @@ -0,0 +1,584 @@ +# Copyright (c) 2023 Airbyte, Inc., all rights reserved. +from __future__ import annotations + +import logging +import os +import time +from pathlib import Path +from typing import TYPE_CHECKING, AsyncIterable, Callable, Dict, List, Optional + +import dagger +import pytest +from airbyte_protocol.models import ConfiguredAirbyteCatalog # type: ignore +from live_tests.commons.connector_runner import ConnectorRunner +from live_tests.commons.models import Command, ConnectorUnderTest, ExecutionInputs, ExecutionReport, ExecutionResult, SecretDict +from live_tests.commons.utils import get_connector_config, get_connector_under_test, get_state + +if TYPE_CHECKING: + from _pytest.config import Config + from _pytest.config.argparsing import Parser + from _pytest.fixtures import SubRequest + from pytest_sugar import SugarTerminalReporter # type: ignore + +LOGGER = logging.getLogger("regression_tests") +# It's used by Dagger and its very verbose +logging.getLogger("httpx").setLevel(logging.ERROR) + +## STASH KEYS +SESSION_START_TIMESTAMP = pytest.StashKey[int]() +TEST_ARTIFACT_DIRECTORY = pytest.StashKey[Path]() +DAGGER_LOG_PATH = pytest.StashKey[Path]() + + +## PYTEST HOOKS +def pytest_addoption(parser: Parser) -> None: + parser.addoption( + "--output-directory", + default="./regression_tests_artifacts", + help="Path to a directory where the test execution reports will be stored", + ) + parser.addoption( + "--connector-image", + help="The connector image name on which the regressions tests will run: e.g. airbyte/source-faker", + ) + parser.addoption( + "--control-version", + default="latest", + help="The control version used for regression testing. Defaults to latest", + ) + parser.addoption( + "--target-version", + default="dev", + help="The target version used for regression testing. Defaults to latest", + ) + parser.addoption("--config-path") + parser.addoption("--catalog-path") + parser.addoption("--state-path") + + +def pytest_configure(config: Config) -> None: + start_timestamp = int(time.time()) + main_output_directory = Path(config.option.output_directory) + test_artifacts_directory = main_output_directory / f"session_{start_timestamp}" + test_artifacts_directory.mkdir(parents=True, exist_ok=True) + dagger_log_path = test_artifacts_directory / "dagger.log" + config.stash[SESSION_START_TIMESTAMP] = start_timestamp + config.stash[TEST_ARTIFACT_DIRECTORY] = test_artifacts_directory + dagger_log_path.touch() + config.stash[DAGGER_LOG_PATH] = dagger_log_path + + +def pytest_terminal_summary(terminalreporter: SugarTerminalReporter, exitstatus: int, config: Config) -> None: + terminalreporter.ensure_newline() + terminalreporter.section("Test artifacts", sep="=", bold=True, blue=True) + terminalreporter.line(f"All tests artifacts for this sessions should be available in {config.stash[TEST_ARTIFACT_DIRECTORY].resolve()}") + terminalreporter.section("Dagger logs", sep=".") + terminalreporter.line(f"Dagger logs are stored in {config.stash[DAGGER_LOG_PATH]}") + artifact_subsection: Dict[str, List[str]] = {} + for report in terminalreporter.reports: + properties_dict = { + record_property_key: record_property_value for record_property_key, record_property_value in report.user_properties + } + if "control_execution_report" in properties_dict or "target_execution_report" in properties_dict: + artifact_subsection[report.head_line] = [] + if "control_execution_report" in properties_dict: + artifact_subsection[report.head_line].append( + f"Control execution artifacts stored in {properties_dict['control_execution_report'].saved_path}" + ) + if "target_execution_report" in properties_dict: + artifact_subsection[report.head_line].append( + f"Target execution artifacts stored in {properties_dict['target_execution_report'].saved_path}" + ) + + if artifact_subsection: + terminalreporter.ensure_newline() + for section, artifact_lines in artifact_subsection.items(): + terminalreporter.ensure_newline() + terminalreporter.section(section, sep=".") + terminalreporter.line(os.linesep.join(artifact_lines)) + + +## HELPERS +async def persist_report( + request: SubRequest, + output_directory: Path, + execution_inputs: ExecutionInputs, + execution_result: ExecutionResult, + session_start_timestamp: int, +) -> ExecutionReport: + test_name = request.node.name + test_output_directory = Path(output_directory / test_name) + test_output_directory.mkdir(parents=True, exist_ok=True) + report = ExecutionReport(execution_inputs, execution_result, created_at=session_start_timestamp) + await report.save_to_disk(test_output_directory) + LOGGER.info(f"Execution report saved to {test_output_directory}") + return report + + +def get_option_or_fail(request: SubRequest, option: str) -> str: + if option_value := request.config.getoption(option): + return option_value + pytest.fail(f"Missing required option: {option}") + + +## FIXTURES + + +@pytest.fixture(scope="session") +def anyio_backend() -> str: + return "asyncio" + + +@pytest.fixture(scope="session") +def session_start_timestamp(request: SubRequest) -> int: + return request.config.stash[SESSION_START_TIMESTAMP] + + +@pytest.fixture(scope="session") +def test_artifacts_directory(request: SubRequest) -> Path: + return request.config.stash[TEST_ARTIFACT_DIRECTORY] + + +@pytest.fixture(scope="session") +def connector_image(request: SubRequest) -> str: + return get_option_or_fail(request, "--connector-image") + + +@pytest.fixture(scope="session") +def control_version(request: SubRequest) -> str: + return get_option_or_fail(request, "--control-version") + + +@pytest.fixture(scope="session") +def target_version(request: SubRequest) -> str: + return get_option_or_fail(request, "--target-version") + + +@pytest.fixture(scope="session") +def catalog(request: SubRequest) -> Optional[ConfiguredAirbyteCatalog]: + catalog_path = get_option_or_fail(request, "--catalog-path") + return ConfiguredAirbyteCatalog.parse_file(catalog_path) if catalog_path else None + + +@pytest.fixture(scope="session") +def connector_config(request: SubRequest) -> Optional[SecretDict]: + return get_connector_config(get_option_or_fail(request, "--config-path")) + + +@pytest.fixture(scope="session") +def state(request: SubRequest) -> Optional[dict]: + return get_state(get_option_or_fail(request, "--state-path")) + + +@pytest.fixture(scope="session") +def dagger_connection(request: SubRequest) -> dagger.Connection: + return dagger.Connection(dagger.Config(log_output=request.config.stash[DAGGER_LOG_PATH].open("w"))) + + +@pytest.fixture(scope="session") +async def dagger_client( + dagger_connection: dagger.Connection, +) -> AsyncIterable[dagger.Client]: + async with dagger_connection as client: + yield client + + +@pytest.fixture(scope="session") +async def control_connector(dagger_client: dagger.Client, connector_image: str, control_version: str) -> ConnectorUnderTest: + return await get_connector_under_test(dagger_client, f"{connector_image}:{control_version}") + + +@pytest.fixture(scope="session") +async def target_connector(dagger_client: dagger.Client, connector_image: str, target_version: str) -> ConnectorUnderTest: + return await get_connector_under_test(dagger_client, f"{connector_image}:{target_version}") + + +@pytest.fixture +def spec_control_execution_inputs( + control_connector: ConnectorUnderTest, +) -> ExecutionInputs: + return ExecutionInputs(connector_under_test=control_connector, command=Command.SPEC) + + +@pytest.fixture +def spec_control_connector_runner(dagger_client: dagger.Client, spec_control_execution_inputs: ExecutionInputs) -> ConnectorRunner: + return ConnectorRunner(dagger_client, **spec_control_execution_inputs.to_dict()) + + +@pytest.fixture +async def spec_control_execution_result( + record_property: Callable, + request: SubRequest, + test_artifacts_directory: Path, + spec_control_execution_inputs: ExecutionInputs, + spec_control_connector_runner: ConnectorRunner, + session_start_timestamp: int, +) -> ExecutionResult: + logging.info(f"Running spec for control connector {spec_control_execution_inputs.connector_under_test.name}") + execution_result = await spec_control_connector_runner.run() + execution_report = await persist_report( + request, + test_artifacts_directory, + spec_control_execution_inputs, + execution_result, + session_start_timestamp, + ) + record_property("control_execution_report", execution_report) + return execution_result + + +@pytest.fixture +def spec_target_execution_inputs( + target_connector: ConnectorUnderTest, +) -> ExecutionInputs: + return ExecutionInputs(connector_under_test=target_connector, command=Command.SPEC) + + +@pytest.fixture +def spec_target_connector_runner(dagger_client: dagger.Client, spec_target_execution_inputs: ExecutionInputs) -> ConnectorRunner: + return ConnectorRunner(dagger_client, **spec_target_execution_inputs.to_dict()) + + +@pytest.fixture +async def spec_target_execution_result( + record_property: Callable, + request: SubRequest, + test_artifacts_directory: Path, + spec_control_execution_result: ExecutionResult, + spec_target_execution_inputs: ExecutionInputs, + spec_target_connector_runner: ConnectorRunner, + session_start_timestamp: int, +) -> ExecutionResult: + logging.info(f"Running spec for target connector {spec_target_execution_inputs.connector_under_test.name}") + execution_result = await spec_target_connector_runner.run() + execution_report = await persist_report( + request, + test_artifacts_directory, + spec_target_execution_inputs, + execution_result, + session_start_timestamp, + ) + record_property("target_execution_report", execution_report) + return execution_result + + +@pytest.fixture +def check_control_execution_inputs(control_connector: ConnectorUnderTest, connector_config: SecretDict) -> ExecutionInputs: + return ExecutionInputs( + connector_under_test=control_connector, + command=Command.CHECK, + config=connector_config, + ) + + +@pytest.fixture +def check_control_connector_runner(dagger_client: dagger.Client, check_control_execution_inputs: ExecutionInputs) -> ConnectorRunner: + return ConnectorRunner(dagger_client, **check_control_execution_inputs.to_dict()) + + +@pytest.fixture +async def check_control_execution_result( + record_property: Callable, + request: SubRequest, + test_artifacts_directory: Path, + check_control_execution_inputs: ExecutionInputs, + check_control_connector_runner: ConnectorRunner, + session_start_timestamp: int, +) -> ExecutionResult: + logging.info(f"Running check for control connector {check_control_execution_inputs.connector_under_test.name}") + execution_result = await check_control_connector_runner.run() + execution_report = await persist_report( + request, + test_artifacts_directory, + check_control_execution_inputs, + execution_result, + session_start_timestamp, + ) + record_property("control_execution_report", execution_report) + return execution_result + + +@pytest.fixture +def check_target_execution_inputs(target_connector: ConnectorUnderTest, connector_config: SecretDict) -> ExecutionInputs: + return ExecutionInputs( + connector_under_test=target_connector, + command=Command.CHECK, + config=connector_config, + ) + + +@pytest.fixture +def check_target_connector_runner(dagger_client: dagger.Client, check_target_execution_inputs: ExecutionInputs) -> ConnectorRunner: + return ConnectorRunner(dagger_client, **check_target_execution_inputs.to_dict()) + + +@pytest.fixture +async def check_target_execution_result( + record_property: Callable, + request: SubRequest, + test_artifacts_directory: Path, + check_control_execution_result: ExecutionResult, + check_target_execution_inputs: ExecutionInputs, + check_target_connector_runner: ConnectorRunner, + session_start_timestamp: int, +) -> ExecutionResult: + logging.info(f"Running check for target connector {check_target_execution_inputs.connector_under_test.name}") + execution_result = await check_target_connector_runner.run() + execution_report = await persist_report( + request, + test_artifacts_directory, + check_target_execution_inputs, + execution_result, + session_start_timestamp, + ) + record_property("target_execution_report", execution_report) + return execution_result + + +@pytest.fixture +def discover_control_execution_inputs(control_connector: ConnectorUnderTest, connector_config: SecretDict) -> ExecutionInputs: + return ExecutionInputs( + connector_under_test=control_connector, + command=Command.DISCOVER, + config=connector_config, + ) + + +@pytest.fixture +async def discover_control_execution_result( + record_property: Callable, + request: SubRequest, + test_artifacts_directory: Path, + discover_control_execution_inputs: ExecutionInputs, + discover_control_connector_runner: ConnectorRunner, + session_start_timestamp: int, +) -> ExecutionResult: + logging.info(f"Running discover for control connector {discover_control_execution_inputs.connector_under_test.name}") + execution_result = await discover_control_connector_runner.run() + execution_report = await persist_report( + request, + test_artifacts_directory, + discover_control_execution_inputs, + execution_result, + session_start_timestamp, + ) + record_property("control_execution_report", execution_report) + return execution_result + + +@pytest.fixture +def discover_target_execution_inputs(target_connector: ConnectorUnderTest, connector_config: SecretDict) -> ExecutionInputs: + return ExecutionInputs( + connector_under_test=target_connector, + command=Command.DISCOVER, + config=connector_config, + ) + + +@pytest.fixture +def discover_control_connector_runner(dagger_client: dagger.Client, discover_control_execution_inputs: ExecutionInputs) -> ConnectorRunner: + return ConnectorRunner(dagger_client, **discover_control_execution_inputs.to_dict()) + + +@pytest.fixture +def discover_target_connector_runner(dagger_client: dagger.Client, discover_target_execution_inputs: ExecutionInputs) -> ConnectorRunner: + return ConnectorRunner(dagger_client, **discover_target_execution_inputs.to_dict()) + + +@pytest.fixture +async def discover_target_execution_result( + record_property: Callable, + request: SubRequest, + test_artifacts_directory: Path, + discover_control_execution_result: ExecutionResult, + discover_target_execution_inputs: ExecutionInputs, + discover_target_connector_runner: ConnectorRunner, + session_start_timestamp: int, +) -> ExecutionResult: + logging.info(f"Running discover for target connector {discover_target_execution_inputs.connector_under_test.name}") + execution_result = await discover_target_connector_runner.run() + execution_report = await persist_report( + request, + test_artifacts_directory, + discover_target_execution_inputs, + execution_result, + session_start_timestamp, + ) + record_property("target_execution_report", execution_report) + return execution_result + + +@pytest.fixture +def read_control_execution_inputs( + control_connector: ConnectorUnderTest, + connector_config: SecretDict, + catalog: ConfiguredAirbyteCatalog, +) -> ExecutionInputs: + return ExecutionInputs( + connector_under_test=control_connector, + command=Command.READ, + catalog=catalog, + config=connector_config, + ) + + +@pytest.fixture +def read_target_execution_inputs( + target_connector: ConnectorUnderTest, + connector_config: SecretDict, + catalog: ConfiguredAirbyteCatalog, +) -> ExecutionInputs: + return ExecutionInputs( + connector_under_test=target_connector, + command=Command.READ, + catalog=catalog, + config=connector_config, + ) + + +@pytest.fixture +def read_control_connector_runner(dagger_client: dagger.Client, read_control_execution_inputs: ExecutionInputs) -> ConnectorRunner: + return ConnectorRunner(dagger_client, **read_control_execution_inputs.to_dict()) + + +@pytest.fixture +async def read_control_execution_result( + record_property: Callable, + request: SubRequest, + test_artifacts_directory: Path, + read_control_execution_inputs: ExecutionInputs, + read_control_connector_runner: ConnectorRunner, + session_start_timestamp: int, +) -> ExecutionResult: + logging.info(f"Running read for control connector {read_control_execution_inputs.connector_under_test.name}") + execution_result = await read_control_connector_runner.run() + execution_report = await persist_report( + request, + test_artifacts_directory, + read_control_execution_inputs, + execution_result, + session_start_timestamp, + ) + record_property("control_execution_report", execution_report) + return execution_result + + +@pytest.fixture +def read_target_connector_runner(dagger_client: dagger.Client, read_target_execution_inputs: ExecutionInputs) -> ConnectorRunner: + return ConnectorRunner(dagger_client, **read_target_execution_inputs.to_dict()) + + +@pytest.fixture +async def read_target_execution_result( + record_property: Callable, + request: SubRequest, + test_artifacts_directory: Path, + read_control_execution_result: ExecutionResult, + read_target_execution_inputs: ExecutionInputs, + read_target_connector_runner: ConnectorRunner, + session_start_timestamp: int, +) -> ExecutionResult: + logging.info(f"Running read for target connector {read_target_execution_inputs.connector_under_test.name}") + execution_result = await read_target_connector_runner.run() + execution_report = await persist_report( + request, + test_artifacts_directory, + read_target_execution_inputs, + execution_result, + session_start_timestamp, + ) + record_property("target_execution_report", execution_report) + return execution_result + + +@pytest.fixture +def read_with_state_control_execution_inputs( + control_connector: ConnectorUnderTest, + connector_config: SecretDict, + catalog: ConfiguredAirbyteCatalog, + state: dict, +) -> ExecutionInputs: + return ExecutionInputs( + connector_under_test=control_connector, + command=Command.READ_WITH_STATE, + catalog=catalog, + config=connector_config, + state=state, + ) + + +@pytest.fixture +def read_with_state_target_execution_inputs( + target_connector: ConnectorUnderTest, + connector_config: SecretDict, + catalog: ConfiguredAirbyteCatalog, + state: dict, +) -> ExecutionInputs: + return ExecutionInputs( + connector_under_test=target_connector, + command=Command.READ_WITH_STATE, + catalog=catalog, + config=connector_config, + state=state, + ) + + +@pytest.fixture +def read_with_state_control_connector_runner( + dagger_client: dagger.Client, + read_with_state_control_execution_inputs: ExecutionInputs, +) -> ConnectorRunner: + return ConnectorRunner(dagger_client, **read_with_state_control_execution_inputs.to_dict()) + + +@pytest.fixture +async def read_with_state_control_execution_result( + record_property: Callable, + request: SubRequest, + test_artifacts_directory: Path, + read_with_state_control_execution_inputs: ExecutionInputs, + read_with_state_control_connector_runner: ConnectorRunner, + session_start_timestamp: int, +) -> ExecutionResult: + logging.info(f"Running read with state for control connector {read_with_state_control_execution_inputs.connector_under_test.name}") + execution_result = await read_with_state_control_connector_runner.run() + execution_report = await persist_report( + request, + test_artifacts_directory, + read_with_state_control_execution_inputs, + execution_result, + session_start_timestamp, + ) + record_property("control_execution_report", execution_report) + return execution_result + + +@pytest.fixture +def read_with_state_target_connector_runner( + dagger_client: dagger.Client, + read_with_state_target_execution_inputs: ExecutionInputs, +) -> ConnectorRunner: + return ConnectorRunner(dagger_client, **read_with_state_target_execution_inputs.to_dict()) + + +@pytest.fixture +async def read_with_state_target_execution_result( + record_property: Callable, + request: SubRequest, + test_artifacts_directory: Path, + read_with_state_control_execution_result: ExecutionResult, + read_with_state_target_execution_inputs: ExecutionInputs, + read_with_state_target_connector_runner: ConnectorRunner, + session_start_timestamp: int, +) -> ExecutionResult: + logging.info(f"Running read with state for target connector {read_with_state_target_execution_inputs.connector_under_test.name}") + execution_result = await read_with_state_target_connector_runner.run() + execution_report = await persist_report( + request, + test_artifacts_directory, + read_with_state_target_execution_inputs, + execution_result, + session_start_timestamp, + ) + record_property("target_execution_report", execution_report) + return execution_result diff --git a/airbyte-ci/connectors/live-tests/src/live_tests/regression_tests/pytest.ini b/airbyte-ci/connectors/live-tests/src/live_tests/regression_tests/pytest.ini new file mode 100644 index 000000000000..060aaa5a285f --- /dev/null +++ b/airbyte-ci/connectors/live-tests/src/live_tests/regression_tests/pytest.ini @@ -0,0 +1,4 @@ +[pytest] +console_output_style = progress +log_cli = True +log_cli_level= INFO diff --git a/airbyte-ci/connectors/live-tests/src/live_tests/regression_tests/test_expected_records.py b/airbyte-ci/connectors/live-tests/src/live_tests/regression_tests/test_expected_records.py new file mode 100644 index 000000000000..74a8c26db977 --- /dev/null +++ b/airbyte-ci/connectors/live-tests/src/live_tests/regression_tests/test_expected_records.py @@ -0,0 +1,20 @@ +# Copyright (c) 2023 Airbyte, Inc., all rights reserved. + +import pytest +from live_tests.commons.models import ExecutionResult + +from .utils import filter_records, make_comparable_records + +pytestmark = [ + pytest.mark.anyio, +] + + +# This test is very basic and just used as a demonstration before porting the "real" expected records tests from VA +async def test_all_records_are_produced_in_target_version( + read_control_execution_result: ExecutionResult, + read_target_execution_result: ExecutionResult, +) -> None: + control_records = list(make_comparable_records(filter_records(read_control_execution_result.airbyte_messages))) + target_records = list(make_comparable_records(filter_records(read_target_execution_result.airbyte_messages))) + assert target_records == control_records diff --git a/airbyte-ci/connectors/live-tests/src/live_tests/regression_tests/utils.py b/airbyte-ci/connectors/live-tests/src/live_tests/regression_tests/utils.py new file mode 100644 index 000000000000..e8b26038b0d9 --- /dev/null +++ b/airbyte-ci/connectors/live-tests/src/live_tests/regression_tests/utils.py @@ -0,0 +1,19 @@ +# Copyright (c) 2023 Airbyte, Inc., all rights reserved. + +from typing import Iterable + +from airbyte_protocol.models import AirbyteMessage, Type # type: ignore + + +def filter_records(messages: Iterable[AirbyteMessage]) -> Iterable[AirbyteMessage]: + for message in messages: + if message.type is Type.RECORD: + yield message + + +def make_comparable_records( + record_messages: Iterable[AirbyteMessage], +) -> Iterable[AirbyteMessage]: + for message in record_messages: + message.record.emitted_at = 0 + yield message From 666a3a3a73880c6e2dda56e9662a720376317876 Mon Sep 17 00:00:00 2001 From: Stephane Geneix <147216312+stephane-airbyte@users.noreply.github.com> Date: Thu, 7 Mar 2024 07:04:36 -0800 Subject: [PATCH 120/172] source-mssql: reduce the number of containers used in tests (#35855) --- .../source/mssql/AbstractSshMssqlSourceAcceptanceTest.java | 3 +-- .../airbyte/integrations/source/mssql/MssqlSslSourceTest.java | 2 +- .../integrations/source/mssql/MsSQLContainerFactory.java | 1 + .../airbyte/integrations/source/mssql/MsSQLTestDatabase.java | 2 -- 4 files changed, 3 insertions(+), 5 deletions(-) diff --git a/airbyte-integrations/connectors/source-mssql/src/test-integration/java/io/airbyte/integrations/source/mssql/AbstractSshMssqlSourceAcceptanceTest.java b/airbyte-integrations/connectors/source-mssql/src/test-integration/java/io/airbyte/integrations/source/mssql/AbstractSshMssqlSourceAcceptanceTest.java index b5d1c5468e90..a1b5deafcc8e 100644 --- a/airbyte-integrations/connectors/source-mssql/src/test-integration/java/io/airbyte/integrations/source/mssql/AbstractSshMssqlSourceAcceptanceTest.java +++ b/airbyte-integrations/connectors/source-mssql/src/test-integration/java/io/airbyte/integrations/source/mssql/AbstractSshMssqlSourceAcceptanceTest.java @@ -18,7 +18,6 @@ import io.airbyte.commons.functional.CheckedFunction; import io.airbyte.commons.json.Jsons; import io.airbyte.integrations.source.mssql.MsSQLTestDatabase.BaseImage; -import io.airbyte.integrations.source.mssql.MsSQLTestDatabase.ContainerModifier; import io.airbyte.protocol.models.Field; import io.airbyte.protocol.models.JsonSchemaType; import io.airbyte.protocol.models.v0.CatalogHelpers; @@ -97,7 +96,7 @@ private static Database getDatabaseFromConfig(final JsonNode config) { @Override protected void setupEnvironment(final TestDestinationEnv environment) throws Exception { - testdb = MsSQLTestDatabase.in(BaseImage.MSSQL_2017, ContainerModifier.NETWORK); + testdb = MsSQLTestDatabase.in(BaseImage.MSSQL_2022); LOGGER.info("starting bastion"); bastion.initAndStartBastion(testdb.getContainer().getNetwork()); LOGGER.info("bastion started"); diff --git a/airbyte-integrations/connectors/source-mssql/src/test/java/io/airbyte/integrations/source/mssql/MssqlSslSourceTest.java b/airbyte-integrations/connectors/source-mssql/src/test/java/io/airbyte/integrations/source/mssql/MssqlSslSourceTest.java index 3b45cb7e8210..026e18a6a1c8 100644 --- a/airbyte-integrations/connectors/source-mssql/src/test/java/io/airbyte/integrations/source/mssql/MssqlSslSourceTest.java +++ b/airbyte-integrations/connectors/source-mssql/src/test/java/io/airbyte/integrations/source/mssql/MssqlSslSourceTest.java @@ -29,7 +29,7 @@ public class MssqlSslSourceTest { @BeforeEach void setup() { - testDb = MsSQLTestDatabase.in(BaseImage.MSSQL_2022, ContainerModifier.WITH_SSL_CERTIFICATES); + testDb = MsSQLTestDatabase.in(BaseImage.MSSQL_2022, ContainerModifier.AGENT, ContainerModifier.WITH_SSL_CERTIFICATES); } @AfterEach diff --git a/airbyte-integrations/connectors/source-mssql/src/testFixtures/java/io/airbyte/integrations/source/mssql/MsSQLContainerFactory.java b/airbyte-integrations/connectors/source-mssql/src/testFixtures/java/io/airbyte/integrations/source/mssql/MsSQLContainerFactory.java index 98477dcf47d2..22314da513f9 100644 --- a/airbyte-integrations/connectors/source-mssql/src/testFixtures/java/io/airbyte/integrations/source/mssql/MsSQLContainerFactory.java +++ b/airbyte-integrations/connectors/source-mssql/src/testFixtures/java/io/airbyte/integrations/source/mssql/MsSQLContainerFactory.java @@ -17,6 +17,7 @@ protected MSSQLServerContainer createNewContainer(DockerImageName imageName) imageName = imageName.asCompatibleSubstituteFor("mcr.microsoft.com/mssql/server"); var container = new MSSQLServerContainer<>(imageName).acceptLicense(); container.addEnv("MSSQL_MEMORY_LIMIT_MB", "384"); + withNetwork(container); return container; } diff --git a/airbyte-integrations/connectors/source-mssql/src/testFixtures/java/io/airbyte/integrations/source/mssql/MsSQLTestDatabase.java b/airbyte-integrations/connectors/source-mssql/src/testFixtures/java/io/airbyte/integrations/source/mssql/MsSQLTestDatabase.java index 2d3cf26fe9a4..b36301f0ed13 100644 --- a/airbyte-integrations/connectors/source-mssql/src/testFixtures/java/io/airbyte/integrations/source/mssql/MsSQLTestDatabase.java +++ b/airbyte-integrations/connectors/source-mssql/src/testFixtures/java/io/airbyte/integrations/source/mssql/MsSQLTestDatabase.java @@ -44,7 +44,6 @@ public class MsSQLTestDatabase extends TestDatabase, MsS public enum BaseImage { MSSQL_2022("mcr.microsoft.com/mssql/server:2022-latest"), - MSSQL_2017("mcr.microsoft.com/mssql/server:2017-latest"), ; public final String reference; @@ -57,7 +56,6 @@ public enum BaseImage { public enum ContainerModifier implements NamedContainerModifier> { - NETWORK(MsSQLContainerFactory::withNetwork), AGENT(MsSQLContainerFactory::withAgent), WITH_SSL_CERTIFICATES(MsSQLContainerFactory::withSslCertificates), ; From 6b26b2770a2636ec13fe670652c0bb02b39f8332 Mon Sep 17 00:00:00 2001 From: Stephane Geneix <147216312+stephane-airbyte@users.noreply.github.com> Date: Thu, 7 Mar 2024 07:16:02 -0800 Subject: [PATCH 121/172] speed up source-mssql tests (#35799) --- .../mssql/CdcMssqlSourceAcceptanceTest.java | 6 ++ .../mssql/CdcMssqlSourceDatatypeTest.java | 39 +++++++- .../source/mssql/CdcMssqlSourceTest.java | 5 +- .../source/mssql/CdcMssqlSslSourceTest.java | 6 +- .../source/mssql/CdcStateCompressionTest.java | 90 ++++++++++++------- 5 files changed, 106 insertions(+), 40 deletions(-) diff --git a/airbyte-integrations/connectors/source-mssql/src/test-integration/java/io/airbyte/integrations/source/mssql/CdcMssqlSourceAcceptanceTest.java b/airbyte-integrations/connectors/source-mssql/src/test-integration/java/io/airbyte/integrations/source/mssql/CdcMssqlSourceAcceptanceTest.java index 152c36614141..0db3f1eb31cb 100644 --- a/airbyte-integrations/connectors/source-mssql/src/test-integration/java/io/airbyte/integrations/source/mssql/CdcMssqlSourceAcceptanceTest.java +++ b/airbyte-integrations/connectors/source-mssql/src/test-integration/java/io/airbyte/integrations/source/mssql/CdcMssqlSourceAcceptanceTest.java @@ -30,7 +30,13 @@ import java.util.List; import java.util.stream.Collectors; import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.TestInstance; +import org.junit.jupiter.api.TestInstance.Lifecycle; +import org.junit.jupiter.api.parallel.Execution; +import org.junit.jupiter.api.parallel.ExecutionMode; +@TestInstance(Lifecycle.PER_METHOD) +@Execution(ExecutionMode.CONCURRENT) public class CdcMssqlSourceAcceptanceTest extends SourceAcceptanceTest { private static final String SCHEMA_NAME = "dbo"; diff --git a/airbyte-integrations/connectors/source-mssql/src/test-integration/java/io/airbyte/integrations/source/mssql/CdcMssqlSourceDatatypeTest.java b/airbyte-integrations/connectors/source-mssql/src/test-integration/java/io/airbyte/integrations/source/mssql/CdcMssqlSourceDatatypeTest.java index 62eec21314cb..892ef1593572 100644 --- a/airbyte-integrations/connectors/source-mssql/src/test-integration/java/io/airbyte/integrations/source/mssql/CdcMssqlSourceDatatypeTest.java +++ b/airbyte-integrations/connectors/source-mssql/src/test-integration/java/io/airbyte/integrations/source/mssql/CdcMssqlSourceDatatypeTest.java @@ -8,9 +8,23 @@ import io.airbyte.cdk.db.Database; import io.airbyte.integrations.source.mssql.MsSQLTestDatabase.BaseImage; import io.airbyte.integrations.source.mssql.MsSQLTestDatabase.ContainerModifier; +import java.util.ArrayList; +import java.util.List; +import java.util.Objects; +import java.util.concurrent.Callable; +import java.util.concurrent.ExecutorService; +import java.util.concurrent.Executors; +import org.junit.jupiter.api.TestInstance; +import org.junit.jupiter.api.TestInstance.Lifecycle; +import org.junit.jupiter.api.parallel.Execution; +import org.junit.jupiter.api.parallel.ExecutionMode; +@TestInstance(Lifecycle.PER_METHOD) +@Execution(ExecutionMode.CONCURRENT) public class CdcMssqlSourceDatatypeTest extends AbstractMssqlSourceDatatypeTest { + private final ExecutorService executor = Executors.newFixedThreadPool(Runtime.getRuntime().availableProcessors()); + @Override protected JsonNode getConfig() { return testdb.integrationTestConfigBuilder() @@ -27,17 +41,34 @@ protected Database setupDatabase() { } protected void createTables() throws Exception { - super.createTables(); + List> createTableTasks = new ArrayList<>(); + List> enableCdcForTableTasks = new ArrayList<>(); for (var test : testDataHolders) { - testdb.withCdcForTable(test.getNameSpace(), test.getNameWithTestPrefix(), null); + createTableTasks.add(() -> testdb.with(test.getCreateSqlQuery())); + enableCdcForTableTasks.add(() -> testdb.withCdcForTable(test.getNameSpace(), test.getNameWithTestPrefix(), null)); } + executor.invokeAll(createTableTasks); + executor.invokeAll(enableCdcForTableTasks); } protected void populateTables() throws Exception { - super.populateTables(); + List> insertTasks = new ArrayList<>(); + List> waitForCdcRecordsTasks = new ArrayList<>(); for (var test : testDataHolders) { - testdb.waitForCdcRecords(test.getNameSpace(), test.getNameWithTestPrefix(), test.getValues().size()); + insertTasks.add(() -> { + this.database.query((ctx) -> { + List sql = test.getInsertSqlQueries(); + Objects.requireNonNull(ctx); + sql.forEach(ctx::fetch); + return null; + }); + return null; + }); + waitForCdcRecordsTasks.add(() -> testdb.waitForCdcRecords(test.getNameSpace(), test.getNameWithTestPrefix(), test.getExpectedValues().size())); } + // executor.invokeAll(insertTasks); + executor.invokeAll(insertTasks); + executor.invokeAll(waitForCdcRecordsTasks); } @Override diff --git a/airbyte-integrations/connectors/source-mssql/src/test/java/io/airbyte/integrations/source/mssql/CdcMssqlSourceTest.java b/airbyte-integrations/connectors/source-mssql/src/test/java/io/airbyte/integrations/source/mssql/CdcMssqlSourceTest.java index 3ef4988cbe07..222f9e1c4fa5 100644 --- a/airbyte-integrations/connectors/source-mssql/src/test/java/io/airbyte/integrations/source/mssql/CdcMssqlSourceTest.java +++ b/airbyte-integrations/connectors/source-mssql/src/test/java/io/airbyte/integrations/source/mssql/CdcMssqlSourceTest.java @@ -70,10 +70,13 @@ import org.junit.jupiter.api.Test; import org.junit.jupiter.api.TestInstance; import org.junit.jupiter.api.TestInstance.Lifecycle; +import org.junit.jupiter.api.parallel.Execution; +import org.junit.jupiter.api.parallel.ExecutionMode; import org.slf4j.Logger; import org.slf4j.LoggerFactory; -@TestInstance(Lifecycle.PER_CLASS) +@TestInstance(Lifecycle.PER_METHOD) +@Execution(ExecutionMode.CONCURRENT) public class CdcMssqlSourceTest extends CdcSourceTest { private static final Logger LOGGER = LoggerFactory.getLogger(CdcSourceTest.class); diff --git a/airbyte-integrations/connectors/source-mssql/src/test/java/io/airbyte/integrations/source/mssql/CdcMssqlSslSourceTest.java b/airbyte-integrations/connectors/source-mssql/src/test/java/io/airbyte/integrations/source/mssql/CdcMssqlSslSourceTest.java index c16ac61805d0..06cb43739815 100644 --- a/airbyte-integrations/connectors/source-mssql/src/test/java/io/airbyte/integrations/source/mssql/CdcMssqlSslSourceTest.java +++ b/airbyte-integrations/connectors/source-mssql/src/test/java/io/airbyte/integrations/source/mssql/CdcMssqlSslSourceTest.java @@ -18,8 +18,12 @@ import java.util.Map; import javax.sql.DataSource; import org.junit.jupiter.api.TestInstance; +import org.junit.jupiter.api.TestInstance.Lifecycle; +import org.junit.jupiter.api.parallel.Execution; +import org.junit.jupiter.api.parallel.ExecutionMode; -@TestInstance(TestInstance.Lifecycle.PER_CLASS) +@TestInstance(Lifecycle.PER_METHOD) +@Execution(ExecutionMode.CONCURRENT) public class CdcMssqlSslSourceTest extends CdcMssqlSourceTest { @Override diff --git a/airbyte-integrations/connectors/source-mssql/src/test/java/io/airbyte/integrations/source/mssql/CdcStateCompressionTest.java b/airbyte-integrations/connectors/source-mssql/src/test/java/io/airbyte/integrations/source/mssql/CdcStateCompressionTest.java index 3701e8237ff5..f919478751fc 100644 --- a/airbyte-integrations/connectors/source-mssql/src/test/java/io/airbyte/integrations/source/mssql/CdcStateCompressionTest.java +++ b/airbyte-integrations/connectors/source-mssql/src/test/java/io/airbyte/integrations/source/mssql/CdcStateCompressionTest.java @@ -36,24 +36,45 @@ import java.util.List; import java.util.Map; import java.util.Set; +import java.util.concurrent.Callable; +import java.util.concurrent.ExecutorService; +import java.util.concurrent.Executors; import java.util.stream.Collectors; import org.junit.jupiter.api.AfterEach; import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Test; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; public class CdcStateCompressionTest { + private static final Logger LOGGER = LoggerFactory.getLogger(CdcStateCompressionTest.class); + static private final String CDC_ROLE_NAME = "cdc_selector"; static private final String TEST_USER_NAME_PREFIX = "cdc_test_user"; static private final String TEST_SCHEMA = "test_schema"; - static private final int TEST_TABLES = 10; + static private final int TEST_TABLES = 4; + // SQLServer tables can't have more than 1024 columns. static private final int ADDED_COLUMNS = 1000; private MsSQLTestDatabase testdb; + private final ExecutorService executor = Executors.newFixedThreadPool(Runtime.getRuntime().availableProcessors()); + private static final String ALTER_TABLE_ADD_COLUMN_SQL; + static { + StringBuilder sb = new StringBuilder(); + sb.append("ALTER TABLE ").append(TEST_SCHEMA).append(".%s ADD"); + for (int j = 0; j < ADDED_COLUMNS; j++) { + sb.append((j > 0) ? ", " : " ") + // Sqlserver column names can't be longer than 128 characters + .append("rather_long_column_name_________________________________________________________________________________________").append(j) + .append(" INT NULL"); + } + ALTER_TABLE_ADD_COLUMN_SQL = sb.toString(); + } @BeforeEach public void setup() throws Exception { @@ -64,20 +85,32 @@ public void setup() throws Exception { // Create a test schema and a bunch of test tables with CDC enabled. // Insert one row in each table so that they're not empty. testdb.with("CREATE SCHEMA %s;", TEST_SCHEMA); + List> createAndPopulateTableTasks = new ArrayList<>(); + List> waitForCdcRecordTasks = new ArrayList<>(); + List> alterTabletasks = new ArrayList<>(); + List> enableTableCdctasks = new ArrayList<>(); + List> disableTableCdctasks = new ArrayList<>(); + for (int i = 0; i < TEST_TABLES; i++) { String tableName = "test_table_%d".formatted(i); - String cdcInstanceName = "capture_instance_%d_%d".formatted(i, 1); - testdb + String initialCdcInstanceName = "capture_instance_%d_%d".formatted(i, 1); + String finalCdcInstanceName = "capture_instance_%d_%d".formatted(i, 2); + createAndPopulateTableTasks.add(() -> testdb .with("CREATE TABLE %s.%s (id INT IDENTITY(1,1) PRIMARY KEY);", TEST_SCHEMA, tableName) - .withCdcForTable(TEST_SCHEMA, tableName, CDC_ROLE_NAME, cdcInstanceName) - .with("INSERT INTO %s.%s DEFAULT VALUES", TEST_SCHEMA, tableName); - } + .withCdcForTable(TEST_SCHEMA, tableName, CDC_ROLE_NAME, initialCdcInstanceName) + .with("INSERT INTO %s.%s DEFAULT VALUES", TEST_SCHEMA, tableName)); + waitForCdcRecordTasks.add(() -> testdb.waitForCdcRecords(TEST_SCHEMA, tableName, initialCdcInstanceName, 1)); - for (int i = 0; i < TEST_TABLES; i++) { - String tableName = "test_table_%d".formatted(i); - String cdcInstanceName = "capture_instance_%d_%d".formatted(i, 1); - testdb.waitForCdcRecords(TEST_SCHEMA, tableName, cdcInstanceName, 1); + // Increase schema history size to trigger state compression. + // We do this by adding lots of columns with long names, + // then migrating to a new CDC capture instance for each table. + // This is admittedly somewhat awkward and perhaps could be improved. + alterTabletasks.add(() -> testdb.with(ALTER_TABLE_ADD_COLUMN_SQL.formatted(tableName))); + enableTableCdctasks.add(() -> testdb.withCdcForTable(TEST_SCHEMA, tableName, CDC_ROLE_NAME, finalCdcInstanceName)); + disableTableCdctasks.add(() -> testdb.withCdcDisabledForTable(TEST_SCHEMA, tableName, initialCdcInstanceName)); } + executor.invokeAll(createAndPopulateTableTasks); + executor.invokeAll(waitForCdcRecordTasks); // Create a test user to be used by the source, with proper permissions. testdb @@ -91,28 +124,9 @@ public void setup() throws Exception { .with("GRANT VIEW SERVER STATE TO %s", testUserName()) .with("USE [%s]", testdb.getDatabaseName()) .with("EXEC sp_addrolemember N'%s', N'%s';", CDC_ROLE_NAME, testUserName()); - - // Increase schema history size to trigger state compression. - // We do this by adding lots of columns with long names, - // then migrating to a new CDC capture instance for each table. - // This is admittedly somewhat awkward and perhaps could be improved. - - for (int i = 0; i < TEST_TABLES; i++) { - String tableName = "test_table_%d".formatted(i); - String cdcInstanceName = "capture_instance_%d_%d".formatted(i, 2); - String oldCdcInstanceName = "capture_instance_%d_%d".formatted(i, 1); - final var sb = new StringBuilder(); - sb.append("ALTER TABLE ").append(TEST_SCHEMA).append(".").append(tableName).append(" ADD"); - for (int j = 0; j < ADDED_COLUMNS; j++) { - sb.append((j > 0) ? ", " : " ") - .append("rather_long_column_name_________________________________________________________________________________________").append(j) - .append(" INT NULL"); - } - testdb - .with(sb.toString()) - .withCdcForTable(TEST_SCHEMA, tableName, CDC_ROLE_NAME, cdcInstanceName) - .withCdcDisabledForTable(TEST_SCHEMA, tableName, oldCdcInstanceName); - } + executor.invokeAll(alterTabletasks); + executor.invokeAll(enableTableCdctasks); + executor.invokeAll(disableTableCdctasks); } private AirbyteCatalog getCatalog() { @@ -151,7 +165,7 @@ private JsonNode config() { .with("is_test", true) .with("replication_method", Map.of( "method", "CDC", - "initial_waiting_seconds", 60)) + "initial_waiting_seconds", 20)) .build(); } @@ -182,11 +196,19 @@ public void testCompressedSchemaHistory() throws Exception { assertEquals("1", record.getData().get("id").toString()); } + LOGGER.info("inserting new data into test tables"); + List> waitForCdcTasks = new ArrayList<>(); // Insert a bunch of records (1 per table, again). for (int i = 0; i < TEST_TABLES; i++) { - testdb.with("INSERT %s.test_table_%d DEFAULT VALUES;", TEST_SCHEMA, i); + String tableName = "test_table_%d".formatted(i); + String cdcInstanceName = "capture_instance_%d_%d".formatted(i, 2); + testdb.with("INSERT %s.%s DEFAULT VALUES;", TEST_SCHEMA, tableName); + waitForCdcTasks.add(() -> testdb.waitForCdcRecords(TEST_SCHEMA, tableName, cdcInstanceName, 1)); } + LOGGER.info("waiting for CDC records"); + executor.invokeAll(waitForCdcTasks); + LOGGER.info("starting second sync"); // Second sync. final var secondBatchStateForRead = Jsons.jsonNode(Collections.singletonList(Iterables.getLast(extractStateMessages(dataFromFirstBatch)))); final var secondBatchIterator = source().read(config(), getConfiguredCatalog(), secondBatchStateForRead); From 88314dd003c9d60f26dad4d27f08b30031c94b3a Mon Sep 17 00:00:00 2001 From: Patrick Nilan Date: Thu, 7 Mar 2024 07:35:25 -0800 Subject: [PATCH 122/172] Source Metabase: Updates `dashboards` stream, migrates to poetry (#35680) --- .../connectors/source-metabase/Dockerfile | 16 - .../connectors/source-metabase/README.md | 101 +- .../acceptance-test-config.yml | 6 +- .../integration_tests/configured_catalog.json | 40 + .../connectors/source-metabase/metadata.yaml | 20 +- .../connectors/source-metabase/poetry.lock | 1048 +++++++++++++++++ .../connectors/source-metabase/pyproject.toml | 31 + .../source-metabase/requirements.txt | 1 - .../connectors/source-metabase/setup.py | 45 - .../source_metabase/manifest.yaml | 26 +- .../source_metabase/schemas/dashboards.json | 64 +- .../source_metabase/schemas/databases.json | 12 +- .../schemas/native_query_snippets.json | 4 +- .../sources/metabase-migrations.md | 55 + docs/integrations/sources/metabase.md | 17 +- 15 files changed, 1325 insertions(+), 161 deletions(-) delete mode 100644 airbyte-integrations/connectors/source-metabase/Dockerfile create mode 100644 airbyte-integrations/connectors/source-metabase/poetry.lock create mode 100644 airbyte-integrations/connectors/source-metabase/pyproject.toml delete mode 100644 airbyte-integrations/connectors/source-metabase/requirements.txt delete mode 100644 airbyte-integrations/connectors/source-metabase/setup.py create mode 100644 docs/integrations/sources/metabase-migrations.md diff --git a/airbyte-integrations/connectors/source-metabase/Dockerfile b/airbyte-integrations/connectors/source-metabase/Dockerfile deleted file mode 100644 index f076026e7910..000000000000 --- a/airbyte-integrations/connectors/source-metabase/Dockerfile +++ /dev/null @@ -1,16 +0,0 @@ -FROM python:3.9-slim - -# Bash is installed for more convenient debugging. -RUN apt-get update && apt-get install -y bash && rm -rf /var/lib/apt/lists/* - -WORKDIR /airbyte/integration_code -COPY source_metabase ./source_metabase -COPY main.py ./ -COPY setup.py ./ -RUN pip install . - -ENV AIRBYTE_ENTRYPOINT "python /airbyte/integration_code/main.py" -ENTRYPOINT ["python", "/airbyte/integration_code/main.py"] - -LABEL io.airbyte.version=1.1.0 -LABEL io.airbyte.name=airbyte/source-metabase diff --git a/airbyte-integrations/connectors/source-metabase/README.md b/airbyte-integrations/connectors/source-metabase/README.md index e6519fbb23f8..86bf75320d09 100644 --- a/airbyte-integrations/connectors/source-metabase/README.md +++ b/airbyte-integrations/connectors/source-metabase/README.md @@ -1,76 +1,91 @@ -# Metabase Source +# Metabase source connector + This is the repository for the Metabase source connector, written in Python. -For information about how to use this connector within Airbyte, see [the documentation](https://docs.airbyte.io/integrations/sources/metabase). +For information about how to use this connector within Airbyte, see [the documentation](https://docs.airbyte.com/integrations/sources/metabase). ## Local development ### Prerequisites -**To iterate on this connector, make sure to complete this prerequisites section.** +* Python (~=3.7) +* Poetry (~=1.7) - installation instructions [here](https://python-poetry.org/docs/#installation) -#### Minimum Python version required `= 3.7.0` -#### Build & Activate Virtual Environment and install dependencies -From this connector directory, create a virtual environment: -``` -python -m venv .venv -``` - -This will generate a virtualenv for this module in `.venv/`. Make sure this venv is active in your -development environment of choice. To activate it from the terminal, run: -``` -source .venv/bin/activate -pip install -r requirements.txt +### Installing the connector +From this connector directory, run: +```bash +poetry install --with dev ``` -If you are in an IDE, follow your IDE's instructions to activate the virtualenv. -Note that while we are installing dependencies from `requirements.txt`, you should only edit `setup.py` for your dependencies. `requirements.txt` is -used for editable installs (`pip install -e`) to pull in Python dependencies from the monorepo and will call `setup.py`. -If this is mumbo jumbo to you, don't worry about it, just put your deps in `setup.py` but install using `pip install -r requirements.txt` and everything -should work as you expect. -#### Create credentials -**If you are a community contributor**, follow the instructions in the [documentation](https://docs.airbyte.io/integrations/sources/metabase) +### Create credentials +**If you are a community contributor**, follow the instructions in the [documentation](https://docs.airbyte.com/integrations/sources/metabase) to generate the necessary credentials. Then create a file `secrets/config.json` conforming to the `source_metabase/spec.yaml` file. Note that any directory named `secrets` is gitignored across the entire Airbyte repo, so there is no danger of accidentally checking in sensitive information. -See `sample_files/sample_config.json` for a sample config file. - -**If you are an Airbyte core member**, copy the credentials in Lastpass under the secret name `source metabase test creds` -and place them into `secrets/config.json`. +See `sample_files/config.json` for a sample config file. ### Locally running the connector ``` -python main.py spec -python main.py check --config secrets/config.json -python main.py discover --config secrets/config.json -python main.py read --config secrets/config.json --catalog sample_files/configured_catalog.json +poetry run source-metabase spec +poetry run source-metabase check --config secrets/config.json +poetry run source-metabase discover --config secrets/config.json +poetry run source-metabase read --config secrets/config.json --catalog sample_files/configured_catalog.json ``` -### Unit Tests +### Running unit tests To run unit tests locally, from the connector directory run: ``` -python -m pytest unit_tests +poetry run pytest unit_tests ``` -#### Acceptance Tests -Customize `acceptance-test-config.yml` file to configure tests. See [Connector Acceptance Tests](https://docs.airbyte.io/connector-development/testing-connectors/connector-acceptance-tests-reference) for more information. -If your connector requires to create or destroy resources for use during acceptance tests create fixtures for it and place them inside integration_tests/acceptance.py. -To run your integration tests with acceptance tests, from the connector root, run +### Building the docker image +1. Install [`airbyte-ci`](https://github.com/airbytehq/airbyte/blob/master/airbyte-ci/connectors/pipelines/README.md) +2. Run the following command to build the docker image: +```bash +airbyte-ci connectors --name=source-metabase build +``` + +An image will be available on your host with the tag `airbyte/source-metabase:dev`. + + +### Running as a docker container +Then run any of the connector commands as follows: ``` -python -m pytest integration_tests -p integration_tests.acceptance +docker run --rm airbyte/source-metabase:dev spec +docker run --rm -v $(pwd)/secrets:/secrets airbyte/source-metabase:dev check --config /secrets/config.json +docker run --rm -v $(pwd)/secrets:/secrets airbyte/source-metabase:dev discover --config /secrets/config.json +docker run --rm -v $(pwd)/secrets:/secrets -v $(pwd)/integration_tests:/integration_tests airbyte/source-metabase:dev read --config /secrets/config.json --catalog /integration_tests/configured_catalog.json ``` -## Dependency Management -All of your dependencies should go in `setup.py`, NOT `requirements.txt`. The requirements file is only used to connect internal Airbyte dependencies in the monorepo for local development. +### Running our CI test suite +You can run our full test suite locally using [`airbyte-ci`](https://github.com/airbytehq/airbyte/blob/master/airbyte-ci/connectors/pipelines/README.md): +```bash +airbyte-ci connectors --name=source-metabase test +``` + +### Customizing acceptance Tests +Customize `acceptance-test-config.yml` file to configure acceptance tests. See [Connector Acceptance Tests](https://docs.airbyte.com/connector-development/testing-connectors/connector-acceptance-tests-reference) for more information. +If your connector requires to create or destroy resources for use during acceptance tests create fixtures for it and place them inside integration_tests/acceptance.py. -### Publishing a new version of the connector +### Dependency Management +All of your dependencies should be managed via Poetry. +To add a new dependency, run: +```bash +poetry add +``` + +Please commit the changes to `pyproject.toml` and `poetry.lock` files. + +## Publishing a new version of the connector You've checked out the repo, implemented a million dollar feature, and you're ready to share your changes with the world. Now what? 1. Make sure your changes are passing our test suite: `airbyte-ci connectors --name=source-metabase test` -2. Bump the connector version in `metadata.yaml`: increment the `dockerImageTag` value. Please follow [semantic versioning for connectors](https://docs.airbyte.com/contributing-to-airbyte/resources/pull-requests-handbook/#semantic-versioning-for-connectors). +2. Bump the connector version (please follow [semantic versioning for connectors](https://docs.airbyte.com/contributing-to-airbyte/resources/pull-requests-handbook/#semantic-versioning-for-connectors)): + - bump the `dockerImageTag` value in in `metadata.yaml` + - bump the `version` value in `pyproject.toml` 3. Make sure the `metadata.yaml` content is up to date. -4. Make the connector documentation and its changelog is up to date (`docs/integrations/sources/metabase.md`). +4. Make sure the connector documentation and its changelog is up to date (`docs/integrations/sources/metabase.md`). 5. Create a Pull Request: use [our PR naming conventions](https://docs.airbyte.com/contributing-to-airbyte/resources/pull-requests-handbook/#pull-request-title-convention). 6. Pat yourself on the back for being an awesome contributor. 7. Someone from Airbyte will take a look at your PR and iterate with you to merge it into master. - +8. Once your PR is merged, the new version of the connector will be automatically published to Docker Hub and our connector registry. \ No newline at end of file diff --git a/airbyte-integrations/connectors/source-metabase/acceptance-test-config.yml b/airbyte-integrations/connectors/source-metabase/acceptance-test-config.yml index 327271ee1ef0..1e02dd309a11 100644 --- a/airbyte-integrations/connectors/source-metabase/acceptance-test-config.yml +++ b/airbyte-integrations/connectors/source-metabase/acceptance-test-config.yml @@ -23,12 +23,14 @@ acceptance_tests: empty_streams: - name: cards bypass_reason: "data changes very fast" - - name: collections + - name: databases bypass_reason: "data changes very fast" - - name: dashboards + - name: collections bypass_reason: "data changes very fast" - name: users bypass_reason: "data changes very fast" + - name: native_query_snippets + bypass_reason: "data changes very fast" fail_on_extra_columns: false full_refresh: tests: diff --git a/airbyte-integrations/connectors/source-metabase/integration_tests/configured_catalog.json b/airbyte-integrations/connectors/source-metabase/integration_tests/configured_catalog.json index 3bfb3cea5fc9..ad73f5b0eca4 100644 --- a/airbyte-integrations/connectors/source-metabase/integration_tests/configured_catalog.json +++ b/airbyte-integrations/connectors/source-metabase/integration_tests/configured_catalog.json @@ -1,5 +1,15 @@ { "streams": [ + { + "stream": { + "name": "cards", + "json_schema": {}, + "supported_sync_modes": ["full_refresh"], + "source_defined_primary_key": [["id"]] + }, + "sync_mode": "full_refresh", + "destination_sync_mode": "overwrite" + }, { "stream": { "name": "collections", @@ -19,6 +29,36 @@ }, "sync_mode": "full_refresh", "destination_sync_mode": "overwrite" + }, + { + "stream": { + "name": "databases", + "json_schema": {}, + "supported_sync_modes": ["full_refresh"], + "source_defined_primary_key": [["id"]] + }, + "sync_mode": "full_refresh", + "destination_sync_mode": "overwrite" + }, + { + "stream": { + "name": "native_query_snippets", + "json_schema": {}, + "supported_sync_modes": ["full_refresh"], + "source_defined_primary_key": [["id"]] + }, + "sync_mode": "full_refresh", + "destination_sync_mode": "overwrite" + }, + { + "stream": { + "name": "dashboards", + "json_schema": {}, + "supported_sync_modes": ["full_refresh"], + "source_defined_primary_key": [["id"]] + }, + "sync_mode": "full_refresh", + "destination_sync_mode": "overwrite" } ] } diff --git a/airbyte-integrations/connectors/source-metabase/metadata.yaml b/airbyte-integrations/connectors/source-metabase/metadata.yaml index 18f35409e27e..3dc96fcef836 100644 --- a/airbyte-integrations/connectors/source-metabase/metadata.yaml +++ b/airbyte-integrations/connectors/source-metabase/metadata.yaml @@ -5,26 +5,36 @@ data: allowedHosts: hosts: - "*" + connectorBuildOptions: + baseImage: docker.io/airbyte/python-connector-base:1.2.0@sha256:c22a9d97464b69d6ef01898edf3f8612dc11614f05a84984451dde195f337db9 connectorSubtype: api connectorType: source definitionId: c7cb421b-942e-4468-99ee-e369bcabaec5 - dockerImageTag: 1.1.0 + dockerImageTag: 2.0.0 dockerRepository: airbyte/source-metabase documentationUrl: https://docs.airbyte.com/integrations/sources/metabase githubIssueLabel: source-metabase icon: metabase.svg license: MIT name: Metabase - remoteRegistries: - pypi: - enabled: true - packageName: airbyte-source-metabase registries: cloud: enabled: true oss: enabled: true + releases: + breakingChanges: + 2.0.0: + message: "The `dashboard` stream's endpoint has been updated due to the previous endpoint being deprecated by the service. The new version no longer returns the `creator` field. After upgrading, please reset the schema for the stream. Additionally, the destination may require updating to handle the updated schema." + upgradeDeadline: "2024-04-01" + scopedImpact: + - scopeType: stream + impactedScopes: ["dashboards"] releaseStage: beta + remoteRegistries: + pypi: + enabled: true + packageName: airbyte-source-metabase supportLevel: community tags: - language:python diff --git a/airbyte-integrations/connectors/source-metabase/poetry.lock b/airbyte-integrations/connectors/source-metabase/poetry.lock new file mode 100644 index 000000000000..003d217b9849 --- /dev/null +++ b/airbyte-integrations/connectors/source-metabase/poetry.lock @@ -0,0 +1,1048 @@ +# This file is automatically @generated by Poetry 1.6.1 and should not be changed by hand. + +[[package]] +name = "airbyte-cdk" +version = "0.67.1" +description = "A framework for writing Airbyte Connectors." +optional = false +python-versions = ">=3.8" +files = [ + {file = "airbyte-cdk-0.67.1.tar.gz", hash = "sha256:3f82be93ae6f574c70d7ad5352d34f9235e86bd74c0db14a0aa7d246f3a403c2"}, + {file = "airbyte_cdk-0.67.1-py3-none-any.whl", hash = "sha256:b1de0f004441a2ae6e2928e55f7ac31bd160af30e928ffda90eb75b5e3c56bf3"}, +] + +[package.dependencies] +airbyte-protocol-models = "0.5.1" +backoff = "*" +cachetools = "*" +Deprecated = ">=1.2,<2.0" +dpath = ">=2.0.1,<2.1.0" +genson = "1.2.2" +isodate = ">=0.6.1,<0.7.0" +Jinja2 = ">=3.1.2,<3.2.0" +jsonref = ">=0.2,<1.0" +jsonschema = ">=3.2.0,<3.3.0" +pendulum = "<3.0.0" +pydantic = ">=1.10.8,<2.0.0" +pyrate-limiter = ">=3.1.0,<3.2.0" +python-dateutil = "*" +PyYAML = ">=6.0.1" +requests = "*" +requests-cache = "*" +wcmatch = "8.4" + +[package.extras] +dev = ["avro (>=1.11.2,<1.12.0)", "cohere (==4.21)", "fastavro (>=1.8.0,<1.9.0)", "freezegun", "langchain (==0.0.271)", "markdown", "mypy", "openai[embeddings] (==0.27.9)", "pandas (==2.0.3)", "pdf2image (==1.16.3)", "pdfminer.six (==20221105)", "pyarrow (==12.0.1)", "pytesseract (==0.3.10)", "pytest", "pytest-cov", "pytest-httpserver", "pytest-mock", "requests-mock", "tiktoken (==0.4.0)", "unstructured (==0.10.27)", "unstructured.pytesseract (>=0.3.12)", "unstructured[docx,pptx] (==0.10.27)"] +file-based = ["avro (>=1.11.2,<1.12.0)", "fastavro (>=1.8.0,<1.9.0)", "markdown", "pdf2image (==1.16.3)", "pdfminer.six (==20221105)", "pyarrow (==12.0.1)", "pytesseract (==0.3.10)", "unstructured (==0.10.27)", "unstructured.pytesseract (>=0.3.12)", "unstructured[docx,pptx] (==0.10.27)"] +sphinx-docs = ["Sphinx (>=4.2,<5.0)", "sphinx-rtd-theme (>=1.0,<2.0)"] +vector-db-based = ["cohere (==4.21)", "langchain (==0.0.271)", "openai[embeddings] (==0.27.9)", "tiktoken (==0.4.0)"] + +[[package]] +name = "airbyte-protocol-models" +version = "0.5.1" +description = "Declares the Airbyte Protocol." +optional = false +python-versions = ">=3.8" +files = [ + {file = "airbyte_protocol_models-0.5.1-py3-none-any.whl", hash = "sha256:dfe84e130e51ce2ae81a06d5aa36f6c5ce3152b9e36e6f0195fad6c3dab0927e"}, + {file = "airbyte_protocol_models-0.5.1.tar.gz", hash = "sha256:7c8b16c7c1c7956b1996052e40585a3a93b1e44cb509c4e97c1ee4fe507ea086"}, +] + +[package.dependencies] +pydantic = ">=1.9.2,<2.0.0" + +[[package]] +name = "atomicwrites" +version = "1.4.1" +description = "Atomic file writes." +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +files = [ + {file = "atomicwrites-1.4.1.tar.gz", hash = "sha256:81b2c9071a49367a7f770170e5eec8cb66567cfbbc8c73d20ce5ca4a8d71cf11"}, +] + +[[package]] +name = "attrs" +version = "23.2.0" +description = "Classes Without Boilerplate" +optional = false +python-versions = ">=3.7" +files = [ + {file = "attrs-23.2.0-py3-none-any.whl", hash = "sha256:99b87a485a5820b23b879f04c2305b44b951b502fd64be915879d77a7e8fc6f1"}, + {file = "attrs-23.2.0.tar.gz", hash = "sha256:935dc3b529c262f6cf76e50877d35a4bd3c1de194fd41f47a2b7ae8f19971f30"}, +] + +[package.extras] +cov = ["attrs[tests]", "coverage[toml] (>=5.3)"] +dev = ["attrs[tests]", "pre-commit"] +docs = ["furo", "myst-parser", "sphinx", "sphinx-notfound-page", "sphinxcontrib-towncrier", "towncrier", "zope-interface"] +tests = ["attrs[tests-no-zope]", "zope-interface"] +tests-mypy = ["mypy (>=1.6)", "pytest-mypy-plugins"] +tests-no-zope = ["attrs[tests-mypy]", "cloudpickle", "hypothesis", "pympler", "pytest (>=4.3.0)", "pytest-xdist[psutil]"] + +[[package]] +name = "backoff" +version = "2.2.1" +description = "Function decoration for backoff and retry" +optional = false +python-versions = ">=3.7,<4.0" +files = [ + {file = "backoff-2.2.1-py3-none-any.whl", hash = "sha256:63579f9a0628e06278f7e47b7d7d5b6ce20dc65c5e96a6f3ca99a6adca0396e8"}, + {file = "backoff-2.2.1.tar.gz", hash = "sha256:03f829f5bb1923180821643f8753b0502c3b682293992485b0eef2807afa5cba"}, +] + +[[package]] +name = "bracex" +version = "2.4" +description = "Bash style brace expander." +optional = false +python-versions = ">=3.8" +files = [ + {file = "bracex-2.4-py3-none-any.whl", hash = "sha256:efdc71eff95eaff5e0f8cfebe7d01adf2c8637c8c92edaf63ef348c241a82418"}, + {file = "bracex-2.4.tar.gz", hash = "sha256:a27eaf1df42cf561fed58b7a8f3fdf129d1ea16a81e1fadd1d17989bc6384beb"}, +] + +[[package]] +name = "cachetools" +version = "5.3.3" +description = "Extensible memoizing collections and decorators" +optional = false +python-versions = ">=3.7" +files = [ + {file = "cachetools-5.3.3-py3-none-any.whl", hash = "sha256:0abad1021d3f8325b2fc1d2e9c8b9c9d57b04c3932657a72465447332c24d945"}, + {file = "cachetools-5.3.3.tar.gz", hash = "sha256:ba29e2dfa0b8b556606f097407ed1aa62080ee108ab0dc5ec9d6a723a007d105"}, +] + +[[package]] +name = "cattrs" +version = "23.2.3" +description = "Composable complex class support for attrs and dataclasses." +optional = false +python-versions = ">=3.8" +files = [ + {file = "cattrs-23.2.3-py3-none-any.whl", hash = "sha256:0341994d94971052e9ee70662542699a3162ea1e0c62f7ce1b4a57f563685108"}, + {file = "cattrs-23.2.3.tar.gz", hash = "sha256:a934090d95abaa9e911dac357e3a8699e0b4b14f8529bcc7d2b1ad9d51672b9f"}, +] + +[package.dependencies] +attrs = ">=23.1.0" +exceptiongroup = {version = ">=1.1.1", markers = "python_version < \"3.11\""} +typing-extensions = {version = ">=4.1.0,<4.6.3 || >4.6.3", markers = "python_version < \"3.11\""} + +[package.extras] +bson = ["pymongo (>=4.4.0)"] +cbor2 = ["cbor2 (>=5.4.6)"] +msgpack = ["msgpack (>=1.0.5)"] +orjson = ["orjson (>=3.9.2)"] +pyyaml = ["pyyaml (>=6.0)"] +tomlkit = ["tomlkit (>=0.11.8)"] +ujson = ["ujson (>=5.7.0)"] + +[[package]] +name = "certifi" +version = "2024.2.2" +description = "Python package for providing Mozilla's CA Bundle." +optional = false +python-versions = ">=3.6" +files = [ + {file = "certifi-2024.2.2-py3-none-any.whl", hash = "sha256:dc383c07b76109f368f6106eee2b593b04a011ea4d55f652c6ca24a754d1cdd1"}, + {file = "certifi-2024.2.2.tar.gz", hash = "sha256:0569859f95fc761b18b45ef421b1290a0f65f147e92a1e5eb3e635f9a5e4e66f"}, +] + +[[package]] +name = "charset-normalizer" +version = "3.3.2" +description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet." +optional = false +python-versions = ">=3.7.0" +files = [ + {file = "charset-normalizer-3.3.2.tar.gz", hash = "sha256:f30c3cb33b24454a82faecaf01b19c18562b1e89558fb6c56de4d9118a032fd5"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:25baf083bf6f6b341f4121c2f3c548875ee6f5339300e08be3f2b2ba1721cdd3"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:06435b539f889b1f6f4ac1758871aae42dc3a8c0e24ac9e60c2384973ad73027"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9063e24fdb1e498ab71cb7419e24622516c4a04476b17a2dab57e8baa30d6e03"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6897af51655e3691ff853668779c7bad41579facacf5fd7253b0133308cf000d"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1d3193f4a680c64b4b6a9115943538edb896edc190f0b222e73761716519268e"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:cd70574b12bb8a4d2aaa0094515df2463cb429d8536cfb6c7ce983246983e5a6"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8465322196c8b4d7ab6d1e049e4c5cb460d0394da4a27d23cc242fbf0034b6b5"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a9a8e9031d613fd2009c182b69c7b2c1ef8239a0efb1df3f7c8da66d5dd3d537"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:beb58fe5cdb101e3a055192ac291b7a21e3b7ef4f67fa1d74e331a7f2124341c"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:e06ed3eb3218bc64786f7db41917d4e686cc4856944f53d5bdf83a6884432e12"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:2e81c7b9c8979ce92ed306c249d46894776a909505d8f5a4ba55b14206e3222f"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:572c3763a264ba47b3cf708a44ce965d98555f618ca42c926a9c1616d8f34269"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:fd1abc0d89e30cc4e02e4064dc67fcc51bd941eb395c502aac3ec19fab46b519"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-win32.whl", hash = "sha256:3d47fa203a7bd9c5b6cee4736ee84ca03b8ef23193c0d1ca99b5089f72645c73"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-win_amd64.whl", hash = "sha256:10955842570876604d404661fbccbc9c7e684caf432c09c715ec38fbae45ae09"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:802fe99cca7457642125a8a88a084cef28ff0cf9407060f7b93dca5aa25480db"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:573f6eac48f4769d667c4442081b1794f52919e7edada77495aaed9236d13a96"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:549a3a73da901d5bc3ce8d24e0600d1fa85524c10287f6004fbab87672bf3e1e"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f27273b60488abe721a075bcca6d7f3964f9f6f067c8c4c605743023d7d3944f"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1ceae2f17a9c33cb48e3263960dc5fc8005351ee19db217e9b1bb15d28c02574"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:65f6f63034100ead094b8744b3b97965785388f308a64cf8d7c34f2f2e5be0c4"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:753f10e867343b4511128c6ed8c82f7bec3bd026875576dfd88483c5c73b2fd8"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4a78b2b446bd7c934f5dcedc588903fb2f5eec172f3d29e52a9096a43722adfc"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:e537484df0d8f426ce2afb2d0f8e1c3d0b114b83f8850e5f2fbea0e797bd82ae"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:eb6904c354526e758fda7167b33005998fb68c46fbc10e013ca97f21ca5c8887"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:deb6be0ac38ece9ba87dea880e438f25ca3eddfac8b002a2ec3d9183a454e8ae"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:4ab2fe47fae9e0f9dee8c04187ce5d09f48eabe611be8259444906793ab7cbce"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:80402cd6ee291dcb72644d6eac93785fe2c8b9cb30893c1af5b8fdd753b9d40f"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-win32.whl", hash = "sha256:7cd13a2e3ddeed6913a65e66e94b51d80a041145a026c27e6bb76c31a853c6ab"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-win_amd64.whl", hash = "sha256:663946639d296df6a2bb2aa51b60a2454ca1cb29835324c640dafb5ff2131a77"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:0b2b64d2bb6d3fb9112bafa732def486049e63de9618b5843bcdd081d8144cd8"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:ddbb2551d7e0102e7252db79ba445cdab71b26640817ab1e3e3648dad515003b"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:55086ee1064215781fff39a1af09518bc9255b50d6333f2e4c74ca09fac6a8f6"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8f4a014bc36d3c57402e2977dada34f9c12300af536839dc38c0beab8878f38a"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a10af20b82360ab00827f916a6058451b723b4e65030c5a18577c8b2de5b3389"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8d756e44e94489e49571086ef83b2bb8ce311e730092d2c34ca8f7d925cb20aa"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:90d558489962fd4918143277a773316e56c72da56ec7aa3dc3dbbe20fdfed15b"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6ac7ffc7ad6d040517be39eb591cac5ff87416c2537df6ba3cba3bae290c0fed"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:7ed9e526742851e8d5cc9e6cf41427dfc6068d4f5a3bb03659444b4cabf6bc26"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:8bdb58ff7ba23002a4c5808d608e4e6c687175724f54a5dade5fa8c67b604e4d"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:6b3251890fff30ee142c44144871185dbe13b11bab478a88887a639655be1068"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:b4a23f61ce87adf89be746c8a8974fe1c823c891d8f86eb218bb957c924bb143"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:efcb3f6676480691518c177e3b465bcddf57cea040302f9f4e6e191af91174d4"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-win32.whl", hash = "sha256:d965bba47ddeec8cd560687584e88cf699fd28f192ceb452d1d7ee807c5597b7"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-win_amd64.whl", hash = "sha256:96b02a3dc4381e5494fad39be677abcb5e6634bf7b4fa83a6dd3112607547001"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:95f2a5796329323b8f0512e09dbb7a1860c46a39da62ecb2324f116fa8fdc85c"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c002b4ffc0be611f0d9da932eb0f704fe2602a9a949d1f738e4c34c75b0863d5"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a981a536974bbc7a512cf44ed14938cf01030a99e9b3a06dd59578882f06f985"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3287761bc4ee9e33561a7e058c72ac0938c4f57fe49a09eae428fd88aafe7bb6"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:42cb296636fcc8b0644486d15c12376cb9fa75443e00fb25de0b8602e64c1714"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0a55554a2fa0d408816b3b5cedf0045f4b8e1a6065aec45849de2d6f3f8e9786"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:c083af607d2515612056a31f0a8d9e0fcb5876b7bfc0abad3ecd275bc4ebc2d5"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:87d1351268731db79e0f8e745d92493ee2841c974128ef629dc518b937d9194c"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:bd8f7df7d12c2db9fab40bdd87a7c09b1530128315d047a086fa3ae3435cb3a8"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:c180f51afb394e165eafe4ac2936a14bee3eb10debc9d9e4db8958fe36afe711"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:8c622a5fe39a48f78944a87d4fb8a53ee07344641b0562c540d840748571b811"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-win32.whl", hash = "sha256:db364eca23f876da6f9e16c9da0df51aa4f104a972735574842618b8c6d999d4"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-win_amd64.whl", hash = "sha256:86216b5cee4b06df986d214f664305142d9c76df9b6512be2738aa72a2048f99"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:6463effa3186ea09411d50efc7d85360b38d5f09b870c48e4600f63af490e56a"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:6c4caeef8fa63d06bd437cd4bdcf3ffefe6738fb1b25951440d80dc7df8c03ac"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:37e55c8e51c236f95b033f6fb391d7d7970ba5fe7ff453dad675e88cf303377a"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fb69256e180cb6c8a894fee62b3afebae785babc1ee98b81cdf68bbca1987f33"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ae5f4161f18c61806f411a13b0310bea87f987c7d2ecdbdaad0e94eb2e404238"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b2b0a0c0517616b6869869f8c581d4eb2dd83a4d79e0ebcb7d373ef9956aeb0a"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:45485e01ff4d3630ec0d9617310448a8702f70e9c01906b0d0118bdf9d124cf2"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:eb00ed941194665c332bf8e078baf037d6c35d7c4f3102ea2d4f16ca94a26dc8"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:2127566c664442652f024c837091890cb1942c30937add288223dc895793f898"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:a50aebfa173e157099939b17f18600f72f84eed3049e743b68ad15bd69b6bf99"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:4d0d1650369165a14e14e1e47b372cfcb31d6ab44e6e33cb2d4e57265290044d"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:923c0c831b7cfcb071580d3f46c4baf50f174be571576556269530f4bbd79d04"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:06a81e93cd441c56a9b65d8e1d043daeb97a3d0856d177d5c90ba85acb3db087"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-win32.whl", hash = "sha256:6ef1d82a3af9d3eecdba2321dc1b3c238245d890843e040e41e470ffa64c3e25"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-win_amd64.whl", hash = "sha256:eb8821e09e916165e160797a6c17edda0679379a4be5c716c260e836e122f54b"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:c235ebd9baae02f1b77bcea61bce332cb4331dc3617d254df3323aa01ab47bd4"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:5b4c145409bef602a690e7cfad0a15a55c13320ff7a3ad7ca59c13bb8ba4d45d"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:68d1f8a9e9e37c1223b656399be5d6b448dea850bed7d0f87a8311f1ff3dabb0"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:22afcb9f253dac0696b5a4be4a1c0f8762f8239e21b99680099abd9b2b1b2269"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e27ad930a842b4c5eb8ac0016b0a54f5aebbe679340c26101df33424142c143c"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1f79682fbe303db92bc2b1136016a38a42e835d932bab5b3b1bfcfbf0640e519"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b261ccdec7821281dade748d088bb6e9b69e6d15b30652b74cbbac25e280b796"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:122c7fa62b130ed55f8f285bfd56d5f4b4a5b503609d181f9ad85e55c89f4185"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:d0eccceffcb53201b5bfebb52600a5fb483a20b61da9dbc885f8b103cbe7598c"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:9f96df6923e21816da7e0ad3fd47dd8f94b2a5ce594e00677c0013018b813458"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:7f04c839ed0b6b98b1a7501a002144b76c18fb1c1850c8b98d458ac269e26ed2"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:34d1c8da1e78d2e001f363791c98a272bb734000fcef47a491c1e3b0505657a8"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:ff8fa367d09b717b2a17a052544193ad76cd49979c805768879cb63d9ca50561"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-win32.whl", hash = "sha256:aed38f6e4fb3f5d6bf81bfa990a07806be9d83cf7bacef998ab1a9bd660a581f"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-win_amd64.whl", hash = "sha256:b01b88d45a6fcb69667cd6d2f7a9aeb4bf53760d7fc536bf679ec94fe9f3ff3d"}, + {file = "charset_normalizer-3.3.2-py3-none-any.whl", hash = "sha256:3e4d1f6587322d2788836a99c69062fbb091331ec940e02d12d179c1d53e25fc"}, +] + +[[package]] +name = "colorama" +version = "0.4.6" +description = "Cross-platform colored terminal text." +optional = false +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7" +files = [ + {file = "colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6"}, + {file = "colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44"}, +] + +[[package]] +name = "deprecated" +version = "1.2.14" +description = "Python @deprecated decorator to deprecate old python classes, functions or methods." +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +files = [ + {file = "Deprecated-1.2.14-py2.py3-none-any.whl", hash = "sha256:6fac8b097794a90302bdbb17b9b815e732d3c4720583ff1b198499d78470466c"}, + {file = "Deprecated-1.2.14.tar.gz", hash = "sha256:e5323eb936458dccc2582dc6f9c322c852a775a27065ff2b0c4970b9d53d01b3"}, +] + +[package.dependencies] +wrapt = ">=1.10,<2" + +[package.extras] +dev = ["PyTest", "PyTest-Cov", "bump2version (<1)", "sphinx (<2)", "tox"] + +[[package]] +name = "dpath" +version = "2.0.8" +description = "Filesystem-like pathing and searching for dictionaries" +optional = false +python-versions = ">=3.7" +files = [ + {file = "dpath-2.0.8-py3-none-any.whl", hash = "sha256:f92f595214dd93a00558d75d4b858beee519f4cffca87f02616ad6cd013f3436"}, + {file = "dpath-2.0.8.tar.gz", hash = "sha256:a3440157ebe80d0a3ad794f1b61c571bef125214800ffdb9afc9424e8250fe9b"}, +] + +[[package]] +name = "exceptiongroup" +version = "1.2.0" +description = "Backport of PEP 654 (exception groups)" +optional = false +python-versions = ">=3.7" +files = [ + {file = "exceptiongroup-1.2.0-py3-none-any.whl", hash = "sha256:4bfd3996ac73b41e9b9628b04e079f193850720ea5945fc96a08633c66912f14"}, + {file = "exceptiongroup-1.2.0.tar.gz", hash = "sha256:91f5c769735f051a4290d52edd0858999b57e5876e9f85937691bd4c9fa3ed68"}, +] + +[package.extras] +test = ["pytest (>=6)"] + +[[package]] +name = "genson" +version = "1.2.2" +description = "GenSON is a powerful, user-friendly JSON Schema generator." +optional = false +python-versions = "*" +files = [ + {file = "genson-1.2.2.tar.gz", hash = "sha256:8caf69aa10af7aee0e1a1351d1d06801f4696e005f06cedef438635384346a16"}, +] + +[[package]] +name = "idna" +version = "3.6" +description = "Internationalized Domain Names in Applications (IDNA)" +optional = false +python-versions = ">=3.5" +files = [ + {file = "idna-3.6-py3-none-any.whl", hash = "sha256:c05567e9c24a6b9faaa835c4821bad0590fbb9d5779e7caa6e1cc4978e7eb24f"}, + {file = "idna-3.6.tar.gz", hash = "sha256:9ecdbbd083b06798ae1e86adcbfe8ab1479cf864e4ee30fe4e46a003d12491ca"}, +] + +[[package]] +name = "iniconfig" +version = "2.0.0" +description = "brain-dead simple config-ini parsing" +optional = false +python-versions = ">=3.7" +files = [ + {file = "iniconfig-2.0.0-py3-none-any.whl", hash = "sha256:b6a85871a79d2e3b22d2d1b94ac2824226a63c6b741c88f7ae975f18b6778374"}, + {file = "iniconfig-2.0.0.tar.gz", hash = "sha256:2d91e135bf72d31a410b17c16da610a82cb55f6b0477d1a902134b24a455b8b3"}, +] + +[[package]] +name = "isodate" +version = "0.6.1" +description = "An ISO 8601 date/time/duration parser and formatter" +optional = false +python-versions = "*" +files = [ + {file = "isodate-0.6.1-py2.py3-none-any.whl", hash = "sha256:0751eece944162659049d35f4f549ed815792b38793f07cf73381c1c87cbed96"}, + {file = "isodate-0.6.1.tar.gz", hash = "sha256:48c5881de7e8b0a0d648cb024c8062dc84e7b840ed81e864c7614fd3c127bde9"}, +] + +[package.dependencies] +six = "*" + +[[package]] +name = "jinja2" +version = "3.1.3" +description = "A very fast and expressive template engine." +optional = false +python-versions = ">=3.7" +files = [ + {file = "Jinja2-3.1.3-py3-none-any.whl", hash = "sha256:7d6d50dd97d52cbc355597bd845fabfbac3f551e1f99619e39a35ce8c370b5fa"}, + {file = "Jinja2-3.1.3.tar.gz", hash = "sha256:ac8bd6544d4bb2c9792bf3a159e80bba8fda7f07e81bc3aed565432d5925ba90"}, +] + +[package.dependencies] +MarkupSafe = ">=2.0" + +[package.extras] +i18n = ["Babel (>=2.7)"] + +[[package]] +name = "jsonref" +version = "0.3.0" +description = "jsonref is a library for automatic dereferencing of JSON Reference objects for Python." +optional = false +python-versions = ">=3.3,<4.0" +files = [ + {file = "jsonref-0.3.0-py3-none-any.whl", hash = "sha256:9480ad1b500f7e795daeb0ef29f9c55ae3a9ab38fb8d6659b6f4868acb5a5bc8"}, + {file = "jsonref-0.3.0.tar.gz", hash = "sha256:68b330c6815dc0d490dbb3d65ccda265ddde9f7856fd2f3322f971d456ea7549"}, +] + +[[package]] +name = "jsonschema" +version = "3.2.0" +description = "An implementation of JSON Schema validation for Python" +optional = false +python-versions = "*" +files = [ + {file = "jsonschema-3.2.0-py2.py3-none-any.whl", hash = "sha256:4e5b3cf8216f577bee9ce139cbe72eca3ea4f292ec60928ff24758ce626cd163"}, + {file = "jsonschema-3.2.0.tar.gz", hash = "sha256:c8a85b28d377cc7737e46e2d9f2b4f44ee3c0e1deac6bf46ddefc7187d30797a"}, +] + +[package.dependencies] +attrs = ">=17.4.0" +pyrsistent = ">=0.14.0" +setuptools = "*" +six = ">=1.11.0" + +[package.extras] +format = ["idna", "jsonpointer (>1.13)", "rfc3987", "strict-rfc3339", "webcolors"] +format-nongpl = ["idna", "jsonpointer (>1.13)", "rfc3339-validator", "rfc3986-validator (>0.1.0)", "webcolors"] + +[[package]] +name = "markupsafe" +version = "2.1.5" +description = "Safely add untrusted strings to HTML/XML markup." +optional = false +python-versions = ">=3.7" +files = [ + {file = "MarkupSafe-2.1.5-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:a17a92de5231666cfbe003f0e4b9b3a7ae3afb1ec2845aadc2bacc93ff85febc"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:72b6be590cc35924b02c78ef34b467da4ba07e4e0f0454a2c5907f473fc50ce5"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e61659ba32cf2cf1481e575d0462554625196a1f2fc06a1c777d3f48e8865d46"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2174c595a0d73a3080ca3257b40096db99799265e1c27cc5a610743acd86d62f"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ae2ad8ae6ebee9d2d94b17fb62763125f3f374c25618198f40cbb8b525411900"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:075202fa5b72c86ad32dc7d0b56024ebdbcf2048c0ba09f1cde31bfdd57bcfff"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:598e3276b64aff0e7b3451b72e94fa3c238d452e7ddcd893c3ab324717456bad"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:fce659a462a1be54d2ffcacea5e3ba2d74daa74f30f5f143fe0c58636e355fdd"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-win32.whl", hash = "sha256:d9fad5155d72433c921b782e58892377c44bd6252b5af2f67f16b194987338a4"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-win_amd64.whl", hash = "sha256:bf50cd79a75d181c9181df03572cdce0fbb75cc353bc350712073108cba98de5"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:629ddd2ca402ae6dbedfceeba9c46d5f7b2a61d9749597d4307f943ef198fc1f"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:5b7b716f97b52c5a14bffdf688f971b2d5ef4029127f1ad7a513973cfd818df2"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6ec585f69cec0aa07d945b20805be741395e28ac1627333b1c5b0105962ffced"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b91c037585eba9095565a3556f611e3cbfaa42ca1e865f7b8015fe5c7336d5a5"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7502934a33b54030eaf1194c21c692a534196063db72176b0c4028e140f8f32c"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:0e397ac966fdf721b2c528cf028494e86172b4feba51d65f81ffd65c63798f3f"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:c061bb86a71b42465156a3ee7bd58c8c2ceacdbeb95d05a99893e08b8467359a"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:3a57fdd7ce31c7ff06cdfbf31dafa96cc533c21e443d57f5b1ecc6cdc668ec7f"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-win32.whl", hash = "sha256:397081c1a0bfb5124355710fe79478cdbeb39626492b15d399526ae53422b906"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-win_amd64.whl", hash = "sha256:2b7c57a4dfc4f16f7142221afe5ba4e093e09e728ca65c51f5620c9aaeb9a617"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:8dec4936e9c3100156f8a2dc89c4b88d5c435175ff03413b443469c7c8c5f4d1"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:3c6b973f22eb18a789b1460b4b91bf04ae3f0c4234a0a6aa6b0a92f6f7b951d4"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ac07bad82163452a6884fe8fa0963fb98c2346ba78d779ec06bd7a6262132aee"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f5dfb42c4604dddc8e4305050aa6deb084540643ed5804d7455b5df8fe16f5e5"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ea3d8a3d18833cf4304cd2fc9cbb1efe188ca9b5efef2bdac7adc20594a0e46b"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:d050b3361367a06d752db6ead6e7edeb0009be66bc3bae0ee9d97fb326badc2a"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:bec0a414d016ac1a18862a519e54b2fd0fc8bbfd6890376898a6c0891dd82e9f"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:58c98fee265677f63a4385256a6d7683ab1832f3ddd1e66fe948d5880c21a169"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-win32.whl", hash = "sha256:8590b4ae07a35970728874632fed7bd57b26b0102df2d2b233b6d9d82f6c62ad"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-win_amd64.whl", hash = "sha256:823b65d8706e32ad2df51ed89496147a42a2a6e01c13cfb6ffb8b1e92bc910bb"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:c8b29db45f8fe46ad280a7294f5c3ec36dbac9491f2d1c17345be8e69cc5928f"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ec6a563cff360b50eed26f13adc43e61bc0c04d94b8be985e6fb24b81f6dcfdf"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a549b9c31bec33820e885335b451286e2969a2d9e24879f83fe904a5ce59d70a"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4f11aa001c540f62c6166c7726f71f7573b52c68c31f014c25cc7901deea0b52"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:7b2e5a267c855eea6b4283940daa6e88a285f5f2a67f2220203786dfa59b37e9"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:2d2d793e36e230fd32babe143b04cec8a8b3eb8a3122d2aceb4a371e6b09b8df"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:ce409136744f6521e39fd8e2a24c53fa18ad67aa5bc7c2cf83645cce5b5c4e50"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-win32.whl", hash = "sha256:4096e9de5c6fdf43fb4f04c26fb114f61ef0bf2e5604b6ee3019d51b69e8c371"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-win_amd64.whl", hash = "sha256:4275d846e41ecefa46e2015117a9f491e57a71ddd59bbead77e904dc02b1bed2"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:656f7526c69fac7f600bd1f400991cc282b417d17539a1b228617081106feb4a"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:97cafb1f3cbcd3fd2b6fbfb99ae11cdb14deea0736fc2b0952ee177f2b813a46"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1f3fbcb7ef1f16e48246f704ab79d79da8a46891e2da03f8783a5b6fa41a9532"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fa9db3f79de01457b03d4f01b34cf91bc0048eb2c3846ff26f66687c2f6d16ab"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ffee1f21e5ef0d712f9033568f8344d5da8cc2869dbd08d87c84656e6a2d2f68"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:5dedb4db619ba5a2787a94d877bc8ffc0566f92a01c0ef214865e54ecc9ee5e0"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:30b600cf0a7ac9234b2638fbc0fb6158ba5bdcdf46aeb631ead21248b9affbc4"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:8dd717634f5a044f860435c1d8c16a270ddf0ef8588d4887037c5028b859b0c3"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-win32.whl", hash = "sha256:daa4ee5a243f0f20d528d939d06670a298dd39b1ad5f8a72a4275124a7819eff"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-win_amd64.whl", hash = "sha256:619bc166c4f2de5caa5a633b8b7326fbe98e0ccbfacabd87268a2b15ff73a029"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:7a68b554d356a91cce1236aa7682dc01df0edba8d043fd1ce607c49dd3c1edcf"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:db0b55e0f3cc0be60c1f19efdde9a637c32740486004f20d1cff53c3c0ece4d2"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3e53af139f8579a6d5f7b76549125f0d94d7e630761a2111bc431fd820e163b8"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:17b950fccb810b3293638215058e432159d2b71005c74371d784862b7e4683f3"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4c31f53cdae6ecfa91a77820e8b151dba54ab528ba65dfd235c80b086d68a465"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:bff1b4290a66b490a2f4719358c0cdcd9bafb6b8f061e45c7a2460866bf50c2e"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:bc1667f8b83f48511b94671e0e441401371dfd0f0a795c7daa4a3cd1dde55bea"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:5049256f536511ee3f7e1b3f87d1d1209d327e818e6ae1365e8653d7e3abb6a6"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-win32.whl", hash = "sha256:00e046b6dd71aa03a41079792f8473dc494d564611a8f89bbbd7cb93295ebdcf"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-win_amd64.whl", hash = "sha256:fa173ec60341d6bb97a89f5ea19c85c5643c1e7dedebc22f5181eb73573142c5"}, + {file = "MarkupSafe-2.1.5.tar.gz", hash = "sha256:d283d37a890ba4c1ae73ffadf8046435c76e7bc2247bbb63c00bd1a709c6544b"}, +] + +[[package]] +name = "packaging" +version = "23.2" +description = "Core utilities for Python packages" +optional = false +python-versions = ">=3.7" +files = [ + {file = "packaging-23.2-py3-none-any.whl", hash = "sha256:8c491190033a9af7e1d931d0b5dacc2ef47509b34dd0de67ed209b5203fc88c7"}, + {file = "packaging-23.2.tar.gz", hash = "sha256:048fb0e9405036518eaaf48a55953c750c11e1a1b68e0dd1a9d62ed0c092cfc5"}, +] + +[[package]] +name = "pendulum" +version = "2.1.2" +description = "Python datetimes made easy" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" +files = [ + {file = "pendulum-2.1.2-cp27-cp27m-macosx_10_15_x86_64.whl", hash = "sha256:b6c352f4bd32dff1ea7066bd31ad0f71f8d8100b9ff709fb343f3b86cee43efe"}, + {file = "pendulum-2.1.2-cp27-cp27m-win_amd64.whl", hash = "sha256:318f72f62e8e23cd6660dbafe1e346950281a9aed144b5c596b2ddabc1d19739"}, + {file = "pendulum-2.1.2-cp35-cp35m-macosx_10_15_x86_64.whl", hash = "sha256:0731f0c661a3cb779d398803655494893c9f581f6488048b3fb629c2342b5394"}, + {file = "pendulum-2.1.2-cp35-cp35m-manylinux1_i686.whl", hash = "sha256:3481fad1dc3f6f6738bd575a951d3c15d4b4ce7c82dce37cf8ac1483fde6e8b0"}, + {file = "pendulum-2.1.2-cp35-cp35m-manylinux1_x86_64.whl", hash = "sha256:9702069c694306297ed362ce7e3c1ef8404ac8ede39f9b28b7c1a7ad8c3959e3"}, + {file = "pendulum-2.1.2-cp35-cp35m-win_amd64.whl", hash = "sha256:fb53ffa0085002ddd43b6ca61a7b34f2d4d7c3ed66f931fe599e1a531b42af9b"}, + {file = "pendulum-2.1.2-cp36-cp36m-macosx_10_15_x86_64.whl", hash = "sha256:c501749fdd3d6f9e726086bf0cd4437281ed47e7bca132ddb522f86a1645d360"}, + {file = "pendulum-2.1.2-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:c807a578a532eeb226150d5006f156632df2cc8c5693d778324b43ff8c515dd0"}, + {file = "pendulum-2.1.2-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:2d1619a721df661e506eff8db8614016f0720ac171fe80dda1333ee44e684087"}, + {file = "pendulum-2.1.2-cp36-cp36m-win_amd64.whl", hash = "sha256:f888f2d2909a414680a29ae74d0592758f2b9fcdee3549887779cd4055e975db"}, + {file = "pendulum-2.1.2-cp37-cp37m-macosx_10_15_x86_64.whl", hash = "sha256:e95d329384717c7bf627bf27e204bc3b15c8238fa8d9d9781d93712776c14002"}, + {file = "pendulum-2.1.2-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:4c9c689747f39d0d02a9f94fcee737b34a5773803a64a5fdb046ee9cac7442c5"}, + {file = "pendulum-2.1.2-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:1245cd0075a3c6d889f581f6325dd8404aca5884dea7223a5566c38aab94642b"}, + {file = "pendulum-2.1.2-cp37-cp37m-win_amd64.whl", hash = "sha256:db0a40d8bcd27b4fb46676e8eb3c732c67a5a5e6bfab8927028224fbced0b40b"}, + {file = "pendulum-2.1.2-cp38-cp38-macosx_10_15_x86_64.whl", hash = "sha256:f5e236e7730cab1644e1b87aca3d2ff3e375a608542e90fe25685dae46310116"}, + {file = "pendulum-2.1.2-cp38-cp38-manylinux1_i686.whl", hash = "sha256:de42ea3e2943171a9e95141f2eecf972480636e8e484ccffaf1e833929e9e052"}, + {file = "pendulum-2.1.2-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:7c5ec650cb4bec4c63a89a0242cc8c3cebcec92fcfe937c417ba18277d8560be"}, + {file = "pendulum-2.1.2-cp38-cp38-win_amd64.whl", hash = "sha256:33fb61601083f3eb1d15edeb45274f73c63b3c44a8524703dc143f4212bf3269"}, + {file = "pendulum-2.1.2-cp39-cp39-manylinux1_i686.whl", hash = "sha256:29c40a6f2942376185728c9a0347d7c0f07905638c83007e1d262781f1e6953a"}, + {file = "pendulum-2.1.2-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:94b1fc947bfe38579b28e1cccb36f7e28a15e841f30384b5ad6c5e31055c85d7"}, + {file = "pendulum-2.1.2.tar.gz", hash = "sha256:b06a0ca1bfe41c990bbf0c029f0b6501a7f2ec4e38bfec730712015e8860f207"}, +] + +[package.dependencies] +python-dateutil = ">=2.6,<3.0" +pytzdata = ">=2020.1" + +[[package]] +name = "platformdirs" +version = "4.2.0" +description = "A small Python package for determining appropriate platform-specific dirs, e.g. a \"user data dir\"." +optional = false +python-versions = ">=3.8" +files = [ + {file = "platformdirs-4.2.0-py3-none-any.whl", hash = "sha256:0614df2a2f37e1a662acbd8e2b25b92ccf8632929bc6d43467e17fe89c75e068"}, + {file = "platformdirs-4.2.0.tar.gz", hash = "sha256:ef0cc731df711022c174543cb70a9b5bd22e5a9337c8624ef2c2ceb8ddad8768"}, +] + +[package.extras] +docs = ["furo (>=2023.9.10)", "proselint (>=0.13)", "sphinx (>=7.2.6)", "sphinx-autodoc-typehints (>=1.25.2)"] +test = ["appdirs (==1.4.4)", "covdefaults (>=2.3)", "pytest (>=7.4.3)", "pytest-cov (>=4.1)", "pytest-mock (>=3.12)"] + +[[package]] +name = "pluggy" +version = "1.4.0" +description = "plugin and hook calling mechanisms for python" +optional = false +python-versions = ">=3.8" +files = [ + {file = "pluggy-1.4.0-py3-none-any.whl", hash = "sha256:7db9f7b503d67d1c5b95f59773ebb58a8c1c288129a88665838012cfb07b8981"}, + {file = "pluggy-1.4.0.tar.gz", hash = "sha256:8c85c2876142a764e5b7548e7d9a0e0ddb46f5185161049a79b7e974454223be"}, +] + +[package.extras] +dev = ["pre-commit", "tox"] +testing = ["pytest", "pytest-benchmark"] + +[[package]] +name = "py" +version = "1.11.0" +description = "library with cross-python path, ini-parsing, io, code, log facilities" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" +files = [ + {file = "py-1.11.0-py2.py3-none-any.whl", hash = "sha256:607c53218732647dff4acdfcd50cb62615cedf612e72d1724fb1a0cc6405b378"}, + {file = "py-1.11.0.tar.gz", hash = "sha256:51c75c4126074b472f746a24399ad32f6053d1b34b68d2fa41e558e6f4a98719"}, +] + +[[package]] +name = "pydantic" +version = "1.10.14" +description = "Data validation and settings management using python type hints" +optional = false +python-versions = ">=3.7" +files = [ + {file = "pydantic-1.10.14-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:7f4fcec873f90537c382840f330b90f4715eebc2bc9925f04cb92de593eae054"}, + {file = "pydantic-1.10.14-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:8e3a76f571970fcd3c43ad982daf936ae39b3e90b8a2e96c04113a369869dc87"}, + {file = "pydantic-1.10.14-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:82d886bd3c3fbeaa963692ef6b643159ccb4b4cefaf7ff1617720cbead04fd1d"}, + {file = "pydantic-1.10.14-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:798a3d05ee3b71967844a1164fd5bdb8c22c6d674f26274e78b9f29d81770c4e"}, + {file = "pydantic-1.10.14-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:23d47a4b57a38e8652bcab15a658fdb13c785b9ce217cc3a729504ab4e1d6bc9"}, + {file = "pydantic-1.10.14-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:f9f674b5c3bebc2eba401de64f29948ae1e646ba2735f884d1594c5f675d6f2a"}, + {file = "pydantic-1.10.14-cp310-cp310-win_amd64.whl", hash = "sha256:24a7679fab2e0eeedb5a8924fc4a694b3bcaac7d305aeeac72dd7d4e05ecbebf"}, + {file = "pydantic-1.10.14-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:9d578ac4bf7fdf10ce14caba6f734c178379bd35c486c6deb6f49006e1ba78a7"}, + {file = "pydantic-1.10.14-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:fa7790e94c60f809c95602a26d906eba01a0abee9cc24150e4ce2189352deb1b"}, + {file = "pydantic-1.10.14-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:aad4e10efa5474ed1a611b6d7f0d130f4aafadceb73c11d9e72823e8f508e663"}, + {file = "pydantic-1.10.14-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1245f4f61f467cb3dfeced2b119afef3db386aec3d24a22a1de08c65038b255f"}, + {file = "pydantic-1.10.14-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:21efacc678a11114c765eb52ec0db62edffa89e9a562a94cbf8fa10b5db5c046"}, + {file = "pydantic-1.10.14-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:412ab4a3f6dbd2bf18aefa9f79c7cca23744846b31f1d6555c2ee2b05a2e14ca"}, + {file = "pydantic-1.10.14-cp311-cp311-win_amd64.whl", hash = "sha256:e897c9f35281f7889873a3e6d6b69aa1447ceb024e8495a5f0d02ecd17742a7f"}, + {file = "pydantic-1.10.14-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:d604be0f0b44d473e54fdcb12302495fe0467c56509a2f80483476f3ba92b33c"}, + {file = "pydantic-1.10.14-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a42c7d17706911199798d4c464b352e640cab4351efe69c2267823d619a937e5"}, + {file = "pydantic-1.10.14-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:596f12a1085e38dbda5cbb874d0973303e34227b400b6414782bf205cc14940c"}, + {file = "pydantic-1.10.14-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:bfb113860e9288d0886e3b9e49d9cf4a9d48b441f52ded7d96db7819028514cc"}, + {file = "pydantic-1.10.14-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:bc3ed06ab13660b565eed80887fcfbc0070f0aa0691fbb351657041d3e874efe"}, + {file = "pydantic-1.10.14-cp37-cp37m-win_amd64.whl", hash = "sha256:ad8c2bc677ae5f6dbd3cf92f2c7dc613507eafe8f71719727cbc0a7dec9a8c01"}, + {file = "pydantic-1.10.14-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:c37c28449752bb1f47975d22ef2882d70513c546f8f37201e0fec3a97b816eee"}, + {file = "pydantic-1.10.14-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:49a46a0994dd551ec051986806122767cf144b9702e31d47f6d493c336462597"}, + {file = "pydantic-1.10.14-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:53e3819bd20a42470d6dd0fe7fc1c121c92247bca104ce608e609b59bc7a77ee"}, + {file = "pydantic-1.10.14-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0fbb503bbbbab0c588ed3cd21975a1d0d4163b87e360fec17a792f7d8c4ff29f"}, + {file = "pydantic-1.10.14-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:336709883c15c050b9c55a63d6c7ff09be883dbc17805d2b063395dd9d9d0022"}, + {file = "pydantic-1.10.14-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:4ae57b4d8e3312d486e2498d42aed3ece7b51848336964e43abbf9671584e67f"}, + {file = "pydantic-1.10.14-cp38-cp38-win_amd64.whl", hash = "sha256:dba49d52500c35cfec0b28aa8b3ea5c37c9df183ffc7210b10ff2a415c125c4a"}, + {file = "pydantic-1.10.14-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:c66609e138c31cba607d8e2a7b6a5dc38979a06c900815495b2d90ce6ded35b4"}, + {file = "pydantic-1.10.14-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:d986e115e0b39604b9eee3507987368ff8148222da213cd38c359f6f57b3b347"}, + {file = "pydantic-1.10.14-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:646b2b12df4295b4c3148850c85bff29ef6d0d9621a8d091e98094871a62e5c7"}, + {file = "pydantic-1.10.14-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:282613a5969c47c83a8710cc8bfd1e70c9223feb76566f74683af889faadc0ea"}, + {file = "pydantic-1.10.14-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:466669501d08ad8eb3c4fecd991c5e793c4e0bbd62299d05111d4f827cded64f"}, + {file = "pydantic-1.10.14-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:13e86a19dca96373dcf3190fcb8797d40a6f12f154a244a8d1e8e03b8f280593"}, + {file = "pydantic-1.10.14-cp39-cp39-win_amd64.whl", hash = "sha256:08b6ec0917c30861e3fe71a93be1648a2aa4f62f866142ba21670b24444d7fd8"}, + {file = "pydantic-1.10.14-py3-none-any.whl", hash = "sha256:8ee853cd12ac2ddbf0ecbac1c289f95882b2d4482258048079d13be700aa114c"}, + {file = "pydantic-1.10.14.tar.gz", hash = "sha256:46f17b832fe27de7850896f3afee50ea682220dd218f7e9c88d436788419dca6"}, +] + +[package.dependencies] +typing-extensions = ">=4.2.0" + +[package.extras] +dotenv = ["python-dotenv (>=0.10.4)"] +email = ["email-validator (>=1.0.3)"] + +[[package]] +name = "pyrate-limiter" +version = "3.1.1" +description = "Python Rate-Limiter using Leaky-Bucket Algorithm" +optional = false +python-versions = ">=3.8,<4.0" +files = [ + {file = "pyrate_limiter-3.1.1-py3-none-any.whl", hash = "sha256:c51906f1d51d56dc992ff6c26e8300e32151bc6cfa3e6559792e31971dfd4e2b"}, + {file = "pyrate_limiter-3.1.1.tar.gz", hash = "sha256:2f57eda712687e6eccddf6afe8f8a15b409b97ed675fe64a626058f12863b7b7"}, +] + +[package.extras] +all = ["filelock (>=3.0)", "redis (>=5.0.0,<6.0.0)"] +docs = ["furo (>=2022.3.4,<2023.0.0)", "myst-parser (>=0.17)", "sphinx (>=4.3.0,<5.0.0)", "sphinx-autodoc-typehints (>=1.17,<2.0)", "sphinx-copybutton (>=0.5)", "sphinxcontrib-apidoc (>=0.3,<0.4)"] + +[[package]] +name = "pyrsistent" +version = "0.20.0" +description = "Persistent/Functional/Immutable data structures" +optional = false +python-versions = ">=3.8" +files = [ + {file = "pyrsistent-0.20.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:8c3aba3e01235221e5b229a6c05f585f344734bd1ad42a8ac51493d74722bbce"}, + {file = "pyrsistent-0.20.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c1beb78af5423b879edaf23c5591ff292cf7c33979734c99aa66d5914ead880f"}, + {file = "pyrsistent-0.20.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:21cc459636983764e692b9eba7144cdd54fdec23ccdb1e8ba392a63666c60c34"}, + {file = "pyrsistent-0.20.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f5ac696f02b3fc01a710427585c855f65cd9c640e14f52abe52020722bb4906b"}, + {file = "pyrsistent-0.20.0-cp310-cp310-win32.whl", hash = "sha256:0724c506cd8b63c69c7f883cc233aac948c1ea946ea95996ad8b1380c25e1d3f"}, + {file = "pyrsistent-0.20.0-cp310-cp310-win_amd64.whl", hash = "sha256:8441cf9616d642c475684d6cf2520dd24812e996ba9af15e606df5f6fd9d04a7"}, + {file = "pyrsistent-0.20.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:0f3b1bcaa1f0629c978b355a7c37acd58907390149b7311b5db1b37648eb6958"}, + {file = "pyrsistent-0.20.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5cdd7ef1ea7a491ae70d826b6cc64868de09a1d5ff9ef8d574250d0940e275b8"}, + {file = "pyrsistent-0.20.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cae40a9e3ce178415040a0383f00e8d68b569e97f31928a3a8ad37e3fde6df6a"}, + {file = "pyrsistent-0.20.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6288b3fa6622ad8a91e6eb759cfc48ff3089e7c17fb1d4c59a919769314af224"}, + {file = "pyrsistent-0.20.0-cp311-cp311-win32.whl", hash = "sha256:7d29c23bdf6e5438c755b941cef867ec2a4a172ceb9f50553b6ed70d50dfd656"}, + {file = "pyrsistent-0.20.0-cp311-cp311-win_amd64.whl", hash = "sha256:59a89bccd615551391f3237e00006a26bcf98a4d18623a19909a2c48b8e986ee"}, + {file = "pyrsistent-0.20.0-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:09848306523a3aba463c4b49493a760e7a6ca52e4826aa100ee99d8d39b7ad1e"}, + {file = "pyrsistent-0.20.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a14798c3005ec892bbada26485c2eea3b54109cb2533713e355c806891f63c5e"}, + {file = "pyrsistent-0.20.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b14decb628fac50db5e02ee5a35a9c0772d20277824cfe845c8a8b717c15daa3"}, + {file = "pyrsistent-0.20.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2e2c116cc804d9b09ce9814d17df5edf1df0c624aba3b43bc1ad90411487036d"}, + {file = "pyrsistent-0.20.0-cp312-cp312-win32.whl", hash = "sha256:e78d0c7c1e99a4a45c99143900ea0546025e41bb59ebc10182e947cf1ece9174"}, + {file = "pyrsistent-0.20.0-cp312-cp312-win_amd64.whl", hash = "sha256:4021a7f963d88ccd15b523787d18ed5e5269ce57aa4037146a2377ff607ae87d"}, + {file = "pyrsistent-0.20.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:79ed12ba79935adaac1664fd7e0e585a22caa539dfc9b7c7c6d5ebf91fb89054"}, + {file = "pyrsistent-0.20.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f920385a11207dc372a028b3f1e1038bb244b3ec38d448e6d8e43c6b3ba20e98"}, + {file = "pyrsistent-0.20.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4f5c2d012671b7391803263419e31b5c7c21e7c95c8760d7fc35602353dee714"}, + {file = "pyrsistent-0.20.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ef3992833fbd686ee783590639f4b8343a57f1f75de8633749d984dc0eb16c86"}, + {file = "pyrsistent-0.20.0-cp38-cp38-win32.whl", hash = "sha256:881bbea27bbd32d37eb24dd320a5e745a2a5b092a17f6debc1349252fac85423"}, + {file = "pyrsistent-0.20.0-cp38-cp38-win_amd64.whl", hash = "sha256:6d270ec9dd33cdb13f4d62c95c1a5a50e6b7cdd86302b494217137f760495b9d"}, + {file = "pyrsistent-0.20.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:ca52d1ceae015859d16aded12584c59eb3825f7b50c6cfd621d4231a6cc624ce"}, + {file = "pyrsistent-0.20.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b318ca24db0f0518630e8b6f3831e9cba78f099ed5c1d65ffe3e023003043ba0"}, + {file = "pyrsistent-0.20.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fed2c3216a605dc9a6ea50c7e84c82906e3684c4e80d2908208f662a6cbf9022"}, + {file = "pyrsistent-0.20.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2e14c95c16211d166f59c6611533d0dacce2e25de0f76e4c140fde250997b3ca"}, + {file = "pyrsistent-0.20.0-cp39-cp39-win32.whl", hash = "sha256:f058a615031eea4ef94ead6456f5ec2026c19fb5bd6bfe86e9665c4158cf802f"}, + {file = "pyrsistent-0.20.0-cp39-cp39-win_amd64.whl", hash = "sha256:58b8f6366e152092194ae68fefe18b9f0b4f89227dfd86a07770c3d86097aebf"}, + {file = "pyrsistent-0.20.0-py3-none-any.whl", hash = "sha256:c55acc4733aad6560a7f5f818466631f07efc001fd023f34a6c203f8b6df0f0b"}, + {file = "pyrsistent-0.20.0.tar.gz", hash = "sha256:4c48f78f62ab596c679086084d0dd13254ae4f3d6c72a83ffdf5ebdef8f265a4"}, +] + +[[package]] +name = "pytest" +version = "6.2.5" +description = "pytest: simple powerful testing with Python" +optional = false +python-versions = ">=3.6" +files = [ + {file = "pytest-6.2.5-py3-none-any.whl", hash = "sha256:7310f8d27bc79ced999e760ca304d69f6ba6c6649c0b60fb0e04a4a77cacc134"}, + {file = "pytest-6.2.5.tar.gz", hash = "sha256:131b36680866a76e6781d13f101efb86cf674ebb9762eb70d3082b6f29889e89"}, +] + +[package.dependencies] +atomicwrites = {version = ">=1.0", markers = "sys_platform == \"win32\""} +attrs = ">=19.2.0" +colorama = {version = "*", markers = "sys_platform == \"win32\""} +iniconfig = "*" +packaging = "*" +pluggy = ">=0.12,<2.0" +py = ">=1.8.2" +toml = "*" + +[package.extras] +testing = ["argcomplete", "hypothesis (>=3.56)", "mock", "nose", "requests", "xmlschema"] + +[[package]] +name = "pytest-mock" +version = "3.12.0" +description = "Thin-wrapper around the mock package for easier use with pytest" +optional = false +python-versions = ">=3.8" +files = [ + {file = "pytest-mock-3.12.0.tar.gz", hash = "sha256:31a40f038c22cad32287bb43932054451ff5583ff094bca6f675df2f8bc1a6e9"}, + {file = "pytest_mock-3.12.0-py3-none-any.whl", hash = "sha256:0972719a7263072da3a21c7f4773069bcc7486027d7e8e1f81d98a47e701bc4f"}, +] + +[package.dependencies] +pytest = ">=5.0" + +[package.extras] +dev = ["pre-commit", "pytest-asyncio", "tox"] + +[[package]] +name = "python-dateutil" +version = "2.9.0.post0" +description = "Extensions to the standard Python datetime module" +optional = false +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7" +files = [ + {file = "python-dateutil-2.9.0.post0.tar.gz", hash = "sha256:37dd54208da7e1cd875388217d5e00ebd4179249f90fb72437e91a35459a0ad3"}, + {file = "python_dateutil-2.9.0.post0-py2.py3-none-any.whl", hash = "sha256:a8b2bc7bffae282281c8140a97d3aa9c14da0b136dfe83f850eea9a5f7470427"}, +] + +[package.dependencies] +six = ">=1.5" + +[[package]] +name = "pytzdata" +version = "2020.1" +description = "The Olson timezone database for Python." +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +files = [ + {file = "pytzdata-2020.1-py2.py3-none-any.whl", hash = "sha256:e1e14750bcf95016381e4d472bad004eef710f2d6417240904070b3d6654485f"}, + {file = "pytzdata-2020.1.tar.gz", hash = "sha256:3efa13b335a00a8de1d345ae41ec78dd11c9f8807f522d39850f2dd828681540"}, +] + +[[package]] +name = "pyyaml" +version = "6.0.1" +description = "YAML parser and emitter for Python" +optional = false +python-versions = ">=3.6" +files = [ + {file = "PyYAML-6.0.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d858aa552c999bc8a8d57426ed01e40bef403cd8ccdd0fc5f6f04a00414cac2a"}, + {file = "PyYAML-6.0.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:fd66fc5d0da6d9815ba2cebeb4205f95818ff4b79c3ebe268e75d961704af52f"}, + {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:69b023b2b4daa7548bcfbd4aa3da05b3a74b772db9e23b982788168117739938"}, + {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:81e0b275a9ecc9c0c0c07b4b90ba548307583c125f54d5b6946cfee6360c733d"}, + {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ba336e390cd8e4d1739f42dfe9bb83a3cc2e80f567d8805e11b46f4a943f5515"}, + {file = "PyYAML-6.0.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:326c013efe8048858a6d312ddd31d56e468118ad4cdeda36c719bf5bb6192290"}, + {file = "PyYAML-6.0.1-cp310-cp310-win32.whl", hash = "sha256:bd4af7373a854424dabd882decdc5579653d7868b8fb26dc7d0e99f823aa5924"}, + {file = "PyYAML-6.0.1-cp310-cp310-win_amd64.whl", hash = "sha256:fd1592b3fdf65fff2ad0004b5e363300ef59ced41c2e6b3a99d4089fa8c5435d"}, + {file = "PyYAML-6.0.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:6965a7bc3cf88e5a1c3bd2e0b5c22f8d677dc88a455344035f03399034eb3007"}, + {file = "PyYAML-6.0.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:f003ed9ad21d6a4713f0a9b5a7a0a79e08dd0f221aff4525a2be4c346ee60aab"}, + {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:42f8152b8dbc4fe7d96729ec2b99c7097d656dc1213a3229ca5383f973a5ed6d"}, + {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:062582fca9fabdd2c8b54a3ef1c978d786e0f6b3a1510e0ac93ef59e0ddae2bc"}, + {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d2b04aac4d386b172d5b9692e2d2da8de7bfb6c387fa4f801fbf6fb2e6ba4673"}, + {file = "PyYAML-6.0.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:e7d73685e87afe9f3b36c799222440d6cf362062f78be1013661b00c5c6f678b"}, + {file = "PyYAML-6.0.1-cp311-cp311-win32.whl", hash = "sha256:1635fd110e8d85d55237ab316b5b011de701ea0f29d07611174a1b42f1444741"}, + {file = "PyYAML-6.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:bf07ee2fef7014951eeb99f56f39c9bb4af143d8aa3c21b1677805985307da34"}, + {file = "PyYAML-6.0.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:855fb52b0dc35af121542a76b9a84f8d1cd886ea97c84703eaa6d88e37a2ad28"}, + {file = "PyYAML-6.0.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:40df9b996c2b73138957fe23a16a4f0ba614f4c0efce1e9406a184b6d07fa3a9"}, + {file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a08c6f0fe150303c1c6b71ebcd7213c2858041a7e01975da3a99aed1e7a378ef"}, + {file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6c22bec3fbe2524cde73d7ada88f6566758a8f7227bfbf93a408a9d86bcc12a0"}, + {file = "PyYAML-6.0.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8d4e9c88387b0f5c7d5f281e55304de64cf7f9c0021a3525bd3b1c542da3b0e4"}, + {file = "PyYAML-6.0.1-cp312-cp312-win32.whl", hash = "sha256:d483d2cdf104e7c9fa60c544d92981f12ad66a457afae824d146093b8c294c54"}, + {file = "PyYAML-6.0.1-cp312-cp312-win_amd64.whl", hash = "sha256:0d3304d8c0adc42be59c5f8a4d9e3d7379e6955ad754aa9d6ab7a398b59dd1df"}, + {file = "PyYAML-6.0.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:50550eb667afee136e9a77d6dc71ae76a44df8b3e51e41b77f6de2932bfe0f47"}, + {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1fe35611261b29bd1de0070f0b2f47cb6ff71fa6595c077e42bd0c419fa27b98"}, + {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:704219a11b772aea0d8ecd7058d0082713c3562b4e271b849ad7dc4a5c90c13c"}, + {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:afd7e57eddb1a54f0f1a974bc4391af8bcce0b444685d936840f125cf046d5bd"}, + {file = "PyYAML-6.0.1-cp36-cp36m-win32.whl", hash = "sha256:fca0e3a251908a499833aa292323f32437106001d436eca0e6e7833256674585"}, + {file = "PyYAML-6.0.1-cp36-cp36m-win_amd64.whl", hash = "sha256:f22ac1c3cac4dbc50079e965eba2c1058622631e526bd9afd45fedd49ba781fa"}, + {file = "PyYAML-6.0.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:b1275ad35a5d18c62a7220633c913e1b42d44b46ee12554e5fd39c70a243d6a3"}, + {file = "PyYAML-6.0.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:18aeb1bf9a78867dc38b259769503436b7c72f7a1f1f4c93ff9a17de54319b27"}, + {file = "PyYAML-6.0.1-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:596106435fa6ad000c2991a98fa58eeb8656ef2325d7e158344fb33864ed87e3"}, + {file = "PyYAML-6.0.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:baa90d3f661d43131ca170712d903e6295d1f7a0f595074f151c0aed377c9b9c"}, + {file = "PyYAML-6.0.1-cp37-cp37m-win32.whl", hash = "sha256:9046c58c4395dff28dd494285c82ba00b546adfc7ef001486fbf0324bc174fba"}, + {file = "PyYAML-6.0.1-cp37-cp37m-win_amd64.whl", hash = "sha256:4fb147e7a67ef577a588a0e2c17b6db51dda102c71de36f8549b6816a96e1867"}, + {file = "PyYAML-6.0.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1d4c7e777c441b20e32f52bd377e0c409713e8bb1386e1099c2415f26e479595"}, + {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a0cd17c15d3bb3fa06978b4e8958dcdc6e0174ccea823003a106c7d4d7899ac5"}, + {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:28c119d996beec18c05208a8bd78cbe4007878c6dd15091efb73a30e90539696"}, + {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7e07cbde391ba96ab58e532ff4803f79c4129397514e1413a7dc761ccd755735"}, + {file = "PyYAML-6.0.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:49a183be227561de579b4a36efbb21b3eab9651dd81b1858589f796549873dd6"}, + {file = "PyYAML-6.0.1-cp38-cp38-win32.whl", hash = "sha256:184c5108a2aca3c5b3d3bf9395d50893a7ab82a38004c8f61c258d4428e80206"}, + {file = "PyYAML-6.0.1-cp38-cp38-win_amd64.whl", hash = "sha256:1e2722cc9fbb45d9b87631ac70924c11d3a401b2d7f410cc0e3bbf249f2dca62"}, + {file = "PyYAML-6.0.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:9eb6caa9a297fc2c2fb8862bc5370d0303ddba53ba97e71f08023b6cd73d16a8"}, + {file = "PyYAML-6.0.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:c8098ddcc2a85b61647b2590f825f3db38891662cfc2fc776415143f599bb859"}, + {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5773183b6446b2c99bb77e77595dd486303b4faab2b086e7b17bc6bef28865f6"}, + {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b786eecbdf8499b9ca1d697215862083bd6d2a99965554781d0d8d1ad31e13a0"}, + {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bc1bf2925a1ecd43da378f4db9e4f799775d6367bdb94671027b73b393a7c42c"}, + {file = "PyYAML-6.0.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:04ac92ad1925b2cff1db0cfebffb6ffc43457495c9b3c39d3fcae417d7125dc5"}, + {file = "PyYAML-6.0.1-cp39-cp39-win32.whl", hash = "sha256:faca3bdcf85b2fc05d06ff3fbc1f83e1391b3e724afa3feba7d13eeab355484c"}, + {file = "PyYAML-6.0.1-cp39-cp39-win_amd64.whl", hash = "sha256:510c9deebc5c0225e8c96813043e62b680ba2f9c50a08d3724c7f28a747d1486"}, + {file = "PyYAML-6.0.1.tar.gz", hash = "sha256:bfdf460b1736c775f2ba9f6a92bca30bc2095067b8a9d77876d1fad6cc3b4a43"}, +] + +[[package]] +name = "requests" +version = "2.31.0" +description = "Python HTTP for Humans." +optional = false +python-versions = ">=3.7" +files = [ + {file = "requests-2.31.0-py3-none-any.whl", hash = "sha256:58cd2187c01e70e6e26505bca751777aa9f2ee0b7f4300988b709f44e013003f"}, + {file = "requests-2.31.0.tar.gz", hash = "sha256:942c5a758f98d790eaed1a29cb6eefc7ffb0d1cf7af05c3d2791656dbd6ad1e1"}, +] + +[package.dependencies] +certifi = ">=2017.4.17" +charset-normalizer = ">=2,<4" +idna = ">=2.5,<4" +urllib3 = ">=1.21.1,<3" + +[package.extras] +socks = ["PySocks (>=1.5.6,!=1.5.7)"] +use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"] + +[[package]] +name = "requests-cache" +version = "1.2.0" +description = "A persistent cache for python requests" +optional = false +python-versions = ">=3.8" +files = [ + {file = "requests_cache-1.2.0-py3-none-any.whl", hash = "sha256:490324301bf0cb924ff4e6324bd2613453e7e1f847353928b08adb0fdfb7f722"}, + {file = "requests_cache-1.2.0.tar.gz", hash = "sha256:db1c709ca343cc1cd5b6c8b1a5387298eceed02306a6040760db538c885e3838"}, +] + +[package.dependencies] +attrs = ">=21.2" +cattrs = ">=22.2" +platformdirs = ">=2.5" +requests = ">=2.22" +url-normalize = ">=1.4" +urllib3 = ">=1.25.5" + +[package.extras] +all = ["boto3 (>=1.15)", "botocore (>=1.18)", "itsdangerous (>=2.0)", "pymongo (>=3)", "pyyaml (>=6.0.1)", "redis (>=3)", "ujson (>=5.4)"] +bson = ["bson (>=0.5)"] +docs = ["furo (>=2023.3,<2024.0)", "linkify-it-py (>=2.0,<3.0)", "myst-parser (>=1.0,<2.0)", "sphinx (>=5.0.2,<6.0.0)", "sphinx-autodoc-typehints (>=1.19)", "sphinx-automodapi (>=0.14)", "sphinx-copybutton (>=0.5)", "sphinx-design (>=0.2)", "sphinx-notfound-page (>=0.8)", "sphinxcontrib-apidoc (>=0.3)", "sphinxext-opengraph (>=0.9)"] +dynamodb = ["boto3 (>=1.15)", "botocore (>=1.18)"] +json = ["ujson (>=5.4)"] +mongodb = ["pymongo (>=3)"] +redis = ["redis (>=3)"] +security = ["itsdangerous (>=2.0)"] +yaml = ["pyyaml (>=6.0.1)"] + +[[package]] +name = "requests-mock" +version = "1.11.0" +description = "Mock out responses from the requests package" +optional = false +python-versions = "*" +files = [ + {file = "requests-mock-1.11.0.tar.gz", hash = "sha256:ef10b572b489a5f28e09b708697208c4a3b2b89ef80a9f01584340ea357ec3c4"}, + {file = "requests_mock-1.11.0-py2.py3-none-any.whl", hash = "sha256:f7fae383f228633f6bececebdab236c478ace2284d6292c6e7e2867b9ab74d15"}, +] + +[package.dependencies] +requests = ">=2.3,<3" +six = "*" + +[package.extras] +fixture = ["fixtures"] +test = ["fixtures", "mock", "purl", "pytest", "requests-futures", "sphinx", "testtools"] + +[[package]] +name = "setuptools" +version = "69.1.1" +description = "Easily download, build, install, upgrade, and uninstall Python packages" +optional = false +python-versions = ">=3.8" +files = [ + {file = "setuptools-69.1.1-py3-none-any.whl", hash = "sha256:02fa291a0471b3a18b2b2481ed902af520c69e8ae0919c13da936542754b4c56"}, + {file = "setuptools-69.1.1.tar.gz", hash = "sha256:5c0806c7d9af348e6dd3777b4f4dbb42c7ad85b190104837488eab9a7c945cf8"}, +] + +[package.extras] +docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "rst.linker (>=1.9)", "sphinx (<7.2.5)", "sphinx (>=3.5)", "sphinx-favicon", "sphinx-inline-tabs", "sphinx-lint", "sphinx-notfound-page (>=1,<2)", "sphinx-reredirects", "sphinxcontrib-towncrier"] +testing = ["build[virtualenv]", "filelock (>=3.4.0)", "flake8-2020", "ini2toml[lite] (>=0.9)", "jaraco.develop (>=7.21)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "packaging (>=23.2)", "pip (>=19.1)", "pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-home (>=0.5)", "pytest-mypy (>=0.9.1)", "pytest-perf", "pytest-ruff (>=0.2.1)", "pytest-timeout", "pytest-xdist", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel"] +testing-integration = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "packaging (>=23.2)", "pytest", "pytest-enabler", "pytest-xdist", "tomli", "virtualenv (>=13.0.0)", "wheel"] + +[[package]] +name = "six" +version = "1.16.0" +description = "Python 2 and 3 compatibility utilities" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*" +files = [ + {file = "six-1.16.0-py2.py3-none-any.whl", hash = "sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254"}, + {file = "six-1.16.0.tar.gz", hash = "sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926"}, +] + +[[package]] +name = "toml" +version = "0.10.2" +description = "Python Library for Tom's Obvious, Minimal Language" +optional = false +python-versions = ">=2.6, !=3.0.*, !=3.1.*, !=3.2.*" +files = [ + {file = "toml-0.10.2-py2.py3-none-any.whl", hash = "sha256:806143ae5bfb6a3c6e736a764057db0e6a0e05e338b5630894a5f779cabb4f9b"}, + {file = "toml-0.10.2.tar.gz", hash = "sha256:b3bda1d108d5dd99f4a20d24d9c348e91c4db7ab1b749200bded2f839ccbe68f"}, +] + +[[package]] +name = "types-requests" +version = "2.31.0.20240218" +description = "Typing stubs for requests" +optional = false +python-versions = ">=3.8" +files = [ + {file = "types-requests-2.31.0.20240218.tar.gz", hash = "sha256:f1721dba8385958f504a5386240b92de4734e047a08a40751c1654d1ac3349c5"}, + {file = "types_requests-2.31.0.20240218-py3-none-any.whl", hash = "sha256:a82807ec6ddce8f00fe0e949da6d6bc1fbf1715420218a9640d695f70a9e5a9b"}, +] + +[package.dependencies] +urllib3 = ">=2" + +[[package]] +name = "typing-extensions" +version = "4.10.0" +description = "Backported and Experimental Type Hints for Python 3.8+" +optional = false +python-versions = ">=3.8" +files = [ + {file = "typing_extensions-4.10.0-py3-none-any.whl", hash = "sha256:69b1a937c3a517342112fb4c6df7e72fc39a38e7891a5730ed4985b5214b5475"}, + {file = "typing_extensions-4.10.0.tar.gz", hash = "sha256:b0abd7c89e8fb96f98db18d86106ff1d90ab692004eb746cf6eda2682f91b3cb"}, +] + +[[package]] +name = "url-normalize" +version = "1.4.3" +description = "URL normalization for Python" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*" +files = [ + {file = "url-normalize-1.4.3.tar.gz", hash = "sha256:d23d3a070ac52a67b83a1c59a0e68f8608d1cd538783b401bc9de2c0fac999b2"}, + {file = "url_normalize-1.4.3-py2.py3-none-any.whl", hash = "sha256:ec3c301f04e5bb676d333a7fa162fa977ad2ca04b7e652bfc9fac4e405728eed"}, +] + +[package.dependencies] +six = "*" + +[[package]] +name = "urllib3" +version = "2.2.1" +description = "HTTP library with thread-safe connection pooling, file post, and more." +optional = false +python-versions = ">=3.8" +files = [ + {file = "urllib3-2.2.1-py3-none-any.whl", hash = "sha256:450b20ec296a467077128bff42b73080516e71b56ff59a60a02bef2232c4fa9d"}, + {file = "urllib3-2.2.1.tar.gz", hash = "sha256:d0570876c61ab9e520d776c38acbbb5b05a776d3f9ff98a5c8fd5162a444cf19"}, +] + +[package.extras] +brotli = ["brotli (>=1.0.9)", "brotlicffi (>=0.8.0)"] +h2 = ["h2 (>=4,<5)"] +socks = ["pysocks (>=1.5.6,!=1.5.7,<2.0)"] +zstd = ["zstandard (>=0.18.0)"] + +[[package]] +name = "wcmatch" +version = "8.4" +description = "Wildcard/glob file name matcher." +optional = false +python-versions = ">=3.7" +files = [ + {file = "wcmatch-8.4-py3-none-any.whl", hash = "sha256:dc7351e5a7f8bbf4c6828d51ad20c1770113f5f3fd3dfe2a03cfde2a63f03f98"}, + {file = "wcmatch-8.4.tar.gz", hash = "sha256:ba4fc5558f8946bf1ffc7034b05b814d825d694112499c86035e0e4d398b6a67"}, +] + +[package.dependencies] +bracex = ">=2.1.1" + +[[package]] +name = "wrapt" +version = "1.16.0" +description = "Module for decorators, wrappers and monkey patching." +optional = false +python-versions = ">=3.6" +files = [ + {file = "wrapt-1.16.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:ffa565331890b90056c01db69c0fe634a776f8019c143a5ae265f9c6bc4bd6d4"}, + {file = "wrapt-1.16.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:e4fdb9275308292e880dcbeb12546df7f3e0f96c6b41197e0cf37d2826359020"}, + {file = "wrapt-1.16.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bb2dee3874a500de01c93d5c71415fcaef1d858370d405824783e7a8ef5db440"}, + {file = "wrapt-1.16.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2a88e6010048489cda82b1326889ec075a8c856c2e6a256072b28eaee3ccf487"}, + {file = "wrapt-1.16.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ac83a914ebaf589b69f7d0a1277602ff494e21f4c2f743313414378f8f50a4cf"}, + {file = "wrapt-1.16.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:73aa7d98215d39b8455f103de64391cb79dfcad601701a3aa0dddacf74911d72"}, + {file = "wrapt-1.16.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:807cc8543a477ab7422f1120a217054f958a66ef7314f76dd9e77d3f02cdccd0"}, + {file = "wrapt-1.16.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:bf5703fdeb350e36885f2875d853ce13172ae281c56e509f4e6eca049bdfb136"}, + {file = "wrapt-1.16.0-cp310-cp310-win32.whl", hash = "sha256:f6b2d0c6703c988d334f297aa5df18c45e97b0af3679bb75059e0e0bd8b1069d"}, + {file = "wrapt-1.16.0-cp310-cp310-win_amd64.whl", hash = "sha256:decbfa2f618fa8ed81c95ee18a387ff973143c656ef800c9f24fb7e9c16054e2"}, + {file = "wrapt-1.16.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:1a5db485fe2de4403f13fafdc231b0dbae5eca4359232d2efc79025527375b09"}, + {file = "wrapt-1.16.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:75ea7d0ee2a15733684badb16de6794894ed9c55aa5e9903260922f0482e687d"}, + {file = "wrapt-1.16.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a452f9ca3e3267cd4d0fcf2edd0d035b1934ac2bd7e0e57ac91ad6b95c0c6389"}, + {file = "wrapt-1.16.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:43aa59eadec7890d9958748db829df269f0368521ba6dc68cc172d5d03ed8060"}, + {file = "wrapt-1.16.0-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:72554a23c78a8e7aa02abbd699d129eead8b147a23c56e08d08dfc29cfdddca1"}, + {file = "wrapt-1.16.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:d2efee35b4b0a347e0d99d28e884dfd82797852d62fcd7ebdeee26f3ceb72cf3"}, + {file = "wrapt-1.16.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:6dcfcffe73710be01d90cae08c3e548d90932d37b39ef83969ae135d36ef3956"}, + {file = "wrapt-1.16.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:eb6e651000a19c96f452c85132811d25e9264d836951022d6e81df2fff38337d"}, + {file = "wrapt-1.16.0-cp311-cp311-win32.whl", hash = "sha256:66027d667efe95cc4fa945af59f92c5a02c6f5bb6012bff9e60542c74c75c362"}, + {file = "wrapt-1.16.0-cp311-cp311-win_amd64.whl", hash = "sha256:aefbc4cb0a54f91af643660a0a150ce2c090d3652cf4052a5397fb2de549cd89"}, + {file = "wrapt-1.16.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:5eb404d89131ec9b4f748fa5cfb5346802e5ee8836f57d516576e61f304f3b7b"}, + {file = "wrapt-1.16.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:9090c9e676d5236a6948330e83cb89969f433b1943a558968f659ead07cb3b36"}, + {file = "wrapt-1.16.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:94265b00870aa407bd0cbcfd536f17ecde43b94fb8d228560a1e9d3041462d73"}, + {file = "wrapt-1.16.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f2058f813d4f2b5e3a9eb2eb3faf8f1d99b81c3e51aeda4b168406443e8ba809"}, + {file = "wrapt-1.16.0-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:98b5e1f498a8ca1858a1cdbffb023bfd954da4e3fa2c0cb5853d40014557248b"}, + {file = "wrapt-1.16.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:14d7dc606219cdd7405133c713f2c218d4252f2a469003f8c46bb92d5d095d81"}, + {file = "wrapt-1.16.0-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:49aac49dc4782cb04f58986e81ea0b4768e4ff197b57324dcbd7699c5dfb40b9"}, + {file = "wrapt-1.16.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:418abb18146475c310d7a6dc71143d6f7adec5b004ac9ce08dc7a34e2babdc5c"}, + {file = "wrapt-1.16.0-cp312-cp312-win32.whl", hash = "sha256:685f568fa5e627e93f3b52fda002c7ed2fa1800b50ce51f6ed1d572d8ab3e7fc"}, + {file = "wrapt-1.16.0-cp312-cp312-win_amd64.whl", hash = "sha256:dcdba5c86e368442528f7060039eda390cc4091bfd1dca41e8046af7c910dda8"}, + {file = "wrapt-1.16.0-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:d462f28826f4657968ae51d2181a074dfe03c200d6131690b7d65d55b0f360f8"}, + {file = "wrapt-1.16.0-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a33a747400b94b6d6b8a165e4480264a64a78c8a4c734b62136062e9a248dd39"}, + {file = "wrapt-1.16.0-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b3646eefa23daeba62643a58aac816945cadc0afaf21800a1421eeba5f6cfb9c"}, + {file = "wrapt-1.16.0-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3ebf019be5c09d400cf7b024aa52b1f3aeebeff51550d007e92c3c1c4afc2a40"}, + {file = "wrapt-1.16.0-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:0d2691979e93d06a95a26257adb7bfd0c93818e89b1406f5a28f36e0d8c1e1fc"}, + {file = "wrapt-1.16.0-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:1acd723ee2a8826f3d53910255643e33673e1d11db84ce5880675954183ec47e"}, + {file = "wrapt-1.16.0-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:bc57efac2da352a51cc4658878a68d2b1b67dbe9d33c36cb826ca449d80a8465"}, + {file = "wrapt-1.16.0-cp36-cp36m-win32.whl", hash = "sha256:da4813f751142436b075ed7aa012a8778aa43a99f7b36afe9b742d3ed8bdc95e"}, + {file = "wrapt-1.16.0-cp36-cp36m-win_amd64.whl", hash = "sha256:6f6eac2360f2d543cc875a0e5efd413b6cbd483cb3ad7ebf888884a6e0d2e966"}, + {file = "wrapt-1.16.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:a0ea261ce52b5952bf669684a251a66df239ec6d441ccb59ec7afa882265d593"}, + {file = "wrapt-1.16.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7bd2d7ff69a2cac767fbf7a2b206add2e9a210e57947dd7ce03e25d03d2de292"}, + {file = "wrapt-1.16.0-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9159485323798c8dc530a224bd3ffcf76659319ccc7bbd52e01e73bd0241a0c5"}, + {file = "wrapt-1.16.0-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a86373cf37cd7764f2201b76496aba58a52e76dedfaa698ef9e9688bfd9e41cf"}, + {file = "wrapt-1.16.0-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:73870c364c11f03ed072dda68ff7aea6d2a3a5c3fe250d917a429c7432e15228"}, + {file = "wrapt-1.16.0-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:b935ae30c6e7400022b50f8d359c03ed233d45b725cfdd299462f41ee5ffba6f"}, + {file = "wrapt-1.16.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:db98ad84a55eb09b3c32a96c576476777e87c520a34e2519d3e59c44710c002c"}, + {file = "wrapt-1.16.0-cp37-cp37m-win32.whl", hash = "sha256:9153ed35fc5e4fa3b2fe97bddaa7cbec0ed22412b85bcdaf54aeba92ea37428c"}, + {file = "wrapt-1.16.0-cp37-cp37m-win_amd64.whl", hash = "sha256:66dfbaa7cfa3eb707bbfcd46dab2bc6207b005cbc9caa2199bcbc81d95071a00"}, + {file = "wrapt-1.16.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1dd50a2696ff89f57bd8847647a1c363b687d3d796dc30d4dd4a9d1689a706f0"}, + {file = "wrapt-1.16.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:44a2754372e32ab315734c6c73b24351d06e77ffff6ae27d2ecf14cf3d229202"}, + {file = "wrapt-1.16.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8e9723528b9f787dc59168369e42ae1c3b0d3fadb2f1a71de14531d321ee05b0"}, + {file = "wrapt-1.16.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:dbed418ba5c3dce92619656802cc5355cb679e58d0d89b50f116e4a9d5a9603e"}, + {file = "wrapt-1.16.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:941988b89b4fd6b41c3f0bfb20e92bd23746579736b7343283297c4c8cbae68f"}, + {file = "wrapt-1.16.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:6a42cd0cfa8ffc1915aef79cb4284f6383d8a3e9dcca70c445dcfdd639d51267"}, + {file = "wrapt-1.16.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:1ca9b6085e4f866bd584fb135a041bfc32cab916e69f714a7d1d397f8c4891ca"}, + {file = "wrapt-1.16.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:d5e49454f19ef621089e204f862388d29e6e8d8b162efce05208913dde5b9ad6"}, + {file = "wrapt-1.16.0-cp38-cp38-win32.whl", hash = "sha256:c31f72b1b6624c9d863fc095da460802f43a7c6868c5dda140f51da24fd47d7b"}, + {file = "wrapt-1.16.0-cp38-cp38-win_amd64.whl", hash = "sha256:490b0ee15c1a55be9c1bd8609b8cecd60e325f0575fc98f50058eae366e01f41"}, + {file = "wrapt-1.16.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:9b201ae332c3637a42f02d1045e1d0cccfdc41f1f2f801dafbaa7e9b4797bfc2"}, + {file = "wrapt-1.16.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:2076fad65c6736184e77d7d4729b63a6d1ae0b70da4868adeec40989858eb3fb"}, + {file = "wrapt-1.16.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c5cd603b575ebceca7da5a3a251e69561bec509e0b46e4993e1cac402b7247b8"}, + {file = "wrapt-1.16.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b47cfad9e9bbbed2339081f4e346c93ecd7ab504299403320bf85f7f85c7d46c"}, + {file = "wrapt-1.16.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f8212564d49c50eb4565e502814f694e240c55551a5f1bc841d4fcaabb0a9b8a"}, + {file = "wrapt-1.16.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:5f15814a33e42b04e3de432e573aa557f9f0f56458745c2074952f564c50e664"}, + {file = "wrapt-1.16.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:db2e408d983b0e61e238cf579c09ef7020560441906ca990fe8412153e3b291f"}, + {file = "wrapt-1.16.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:edfad1d29c73f9b863ebe7082ae9321374ccb10879eeabc84ba3b69f2579d537"}, + {file = "wrapt-1.16.0-cp39-cp39-win32.whl", hash = "sha256:ed867c42c268f876097248e05b6117a65bcd1e63b779e916fe2e33cd6fd0d3c3"}, + {file = "wrapt-1.16.0-cp39-cp39-win_amd64.whl", hash = "sha256:eb1b046be06b0fce7249f1d025cd359b4b80fc1c3e24ad9eca33e0dcdb2e4a35"}, + {file = "wrapt-1.16.0-py3-none-any.whl", hash = "sha256:6906c4100a8fcbf2fa735f6059214bb13b97f75b1a61777fcf6432121ef12ef1"}, + {file = "wrapt-1.16.0.tar.gz", hash = "sha256:5f370f952971e7d17c7d1ead40e49f32345a7f7a5373571ef44d800d06b1899d"}, +] + +[metadata] +lock-version = "2.0" +python-versions = "^3.9,<3.12" +content-hash = "30f49dbb6146e5992f34d9f0563ab6440d5cf19827b65b48589cb1d106a6a4e3" diff --git a/airbyte-integrations/connectors/source-metabase/pyproject.toml b/airbyte-integrations/connectors/source-metabase/pyproject.toml new file mode 100644 index 000000000000..73a7165c81ba --- /dev/null +++ b/airbyte-integrations/connectors/source-metabase/pyproject.toml @@ -0,0 +1,31 @@ +[build-system] +requires = [ "poetry-core>=1.0.0",] +build-backend = "poetry.core.masonry.api" + +[tool.poetry] +version = "2.0.0" +name = "source-metabase" +description = "Source implementation for Metabase." +authors = [ "Airbyte ",] +license = "MIT" +readme = "README.md" +documentation = "https://docs.airbyte.com/integrations/sources/metabase" +homepage = "https://airbyte.com" +repository = "https://github.com/airbytehq/airbyte" +[[tool.poetry.packages]] +include = "source_metabase" + +[tool.poetry.dependencies] +python = "^3.9,<3.12" +airbyte-cdk = "^0.67.1" +requests = "^2.28.0" +types-requests = "^2.27.30" + +[tool.poetry.scripts] +source-metabase = "source_metabase.run:run" + +[tool.poetry.group.dev.dependencies] +requests-mock = "^1.9.3" +requests_mock = "^1.8.0" +pytest-mock = "^3.6.1" +pytest = "^6.1" diff --git a/airbyte-integrations/connectors/source-metabase/requirements.txt b/airbyte-integrations/connectors/source-metabase/requirements.txt deleted file mode 100644 index d6e1198b1ab1..000000000000 --- a/airbyte-integrations/connectors/source-metabase/requirements.txt +++ /dev/null @@ -1 +0,0 @@ --e . diff --git a/airbyte-integrations/connectors/source-metabase/setup.py b/airbyte-integrations/connectors/source-metabase/setup.py deleted file mode 100644 index e6772737da24..000000000000 --- a/airbyte-integrations/connectors/source-metabase/setup.py +++ /dev/null @@ -1,45 +0,0 @@ -# -# Copyright (c) 2023 Airbyte, Inc., all rights reserved. -# - - -from setuptools import find_packages, setup - -MAIN_REQUIREMENTS = ["airbyte-cdk", "requests>=2.28.0", "types-requests>=2.27.30"] - -TEST_REQUIREMENTS = [ - "pytest-mock~=3.6.1", - "pytest~=6.1", - "requests-mock", - "requests_mock~=1.8", -] - -setup( - entry_points={ - "console_scripts": [ - "source-metabase=source_metabase.run:run", - ], - }, - name="source_metabase", - description="Source implementation for Metabase.", - author="Airbyte", - author_email="contact@airbyte.io", - packages=find_packages(), - install_requires=MAIN_REQUIREMENTS, - package_data={ - "": [ - # Include yaml files in the package (if any) - "*.yml", - "*.yaml", - # Include all json files in the package, up to 4 levels deep - "*.json", - "*/*.json", - "*/*/*.json", - "*/*/*/*.json", - "*/*/*/*/*.json", - ] - }, - extras_require={ - "tests": TEST_REQUIREMENTS, - }, -) diff --git a/airbyte-integrations/connectors/source-metabase/source_metabase/manifest.yaml b/airbyte-integrations/connectors/source-metabase/source_metabase/manifest.yaml index 10fa59aef347..bfd8017cf51f 100644 --- a/airbyte-integrations/connectors/source-metabase/source_metabase/manifest.yaml +++ b/airbyte-integrations/connectors/source-metabase/source_metabase/manifest.yaml @@ -51,10 +51,32 @@ definitions: path: "collection" dashboards_stream: $ref: "#/definitions/base_stream" + retriever: + $ref: "#/definitions/retriever" + partition_router: + type: SubstreamPartitionRouter + parent_stream_configs: + - type: ParentStreamConfig + stream: "#/definitions/collections_items_dashboards_stream" + parent_key: "id" + partition_field: "id" $parameters: name: "dashboards" - path: "dashboard" - + path: "/dashboard/{{stream_slice.id}}" + collections_items_dashboards_stream: + $ref: "#/definitions/base_stream" + retriever: + $ref: "#/definitions/data_field_retriever" + partition_router: + type: SubstreamPartitionRouter + parent_stream_configs: + - type: ParentStreamConfig + stream: "#/definitions/collections_stream" + parent_key: "id" + partition_field: "id" + $parameters: + name: "collections_items_dashboards" + path: "collection/{{stream_slice.id}}/items?models=dashboard" databases_stream: primary_key: "id" retriever: diff --git a/airbyte-integrations/connectors/source-metabase/source_metabase/schemas/dashboards.json b/airbyte-integrations/connectors/source-metabase/source_metabase/schemas/dashboards.json index e04926b38aa2..93e167c3457e 100644 --- a/airbyte-integrations/connectors/source-metabase/source_metabase/schemas/dashboards.json +++ b/airbyte-integrations/connectors/source-metabase/source_metabase/schemas/dashboards.json @@ -12,38 +12,6 @@ "collection_position": { "type": ["null", "integer"] }, - "creator": { - "type": ["null", "object"], - "properties": { - "email": { - "type": ["null", "string"] - }, - "first_name": { - "type": ["null", "string"] - }, - "last_login": { - "type": ["null", "string"] - }, - "is_qbnewb": { - "type": ["null", "boolean"] - }, - "is_superuser": { - "type": ["null", "boolean"] - }, - "id": { - "type": ["null", "integer"] - }, - "last_name": { - "type": ["null", "string"] - }, - "date_joined": { - "type": ["null", "string"] - }, - "common_name": { - "type": ["null", "string"] - } - } - }, "enable_embedding": { "type": ["null", "boolean"] }, @@ -136,6 +104,38 @@ }, "points_of_interest": { "type": ["null", "string"] + }, + "dashcards": { + "type": ["null", "array"], + "items": { + "type": ["null", "object"], + "properties": {} + } + }, + "param_fields": { + "type": ["null", "object"], + "properties": {} + }, + "param_values": { + "type": ["null", "object"], + "properties": {} + }, + "can_write": { + "type": ["null", "boolean"] + }, + "tabs": { + "type": ["null", "array"], + "items": { + "type": ["null", "object"], + "properties": {} + } + }, + "collection_authority_level": { + "type": ["null", "string"] + }, + "collection": { + "type": ["null", "object"], + "properties": {} } } } diff --git a/airbyte-integrations/connectors/source-metabase/source_metabase/schemas/databases.json b/airbyte-integrations/connectors/source-metabase/source_metabase/schemas/databases.json index b7a330d045f9..bd61cd35793a 100644 --- a/airbyte-integrations/connectors/source-metabase/source_metabase/schemas/databases.json +++ b/airbyte-integrations/connectors/source-metabase/source_metabase/schemas/databases.json @@ -4,7 +4,7 @@ "additionalProperties": true, "properties": { "description": { - "type": "string" + "type": ["string", "null"] }, "features": { "type": ["null", "array"], @@ -49,7 +49,7 @@ "type": ["null", "integer"] }, "details": { - "type": "object", + "type": ["object", "null"], "properties": { "project-id": { "type": ["null", "string"] @@ -93,7 +93,7 @@ "type": ["null", "boolean"] }, "id": { - "type": "integer" + "type": ["integer", "null"] }, "is_on_demand": { "type": ["null", "boolean"] @@ -111,7 +111,7 @@ "type": ["null", "boolean"] }, "dbms_version": { - "type": "object", + "type": ["object", "null"], "properties": { "flavor": { "type": ["null", "string"] @@ -120,9 +120,9 @@ "type": ["null", "string"] }, "semantic-version": { - "type": "array", + "type": ["array", "null"], "items": { - "type": "integer" + "type": ["integer", "null"] } } } diff --git a/airbyte-integrations/connectors/source-metabase/source_metabase/schemas/native_query_snippets.json b/airbyte-integrations/connectors/source-metabase/source_metabase/schemas/native_query_snippets.json index 4ff870c7c48d..ab12db033a5a 100644 --- a/airbyte-integrations/connectors/source-metabase/source_metabase/schemas/native_query_snippets.json +++ b/airbyte-integrations/connectors/source-metabase/source_metabase/schemas/native_query_snippets.json @@ -28,7 +28,7 @@ "type": ["null", "boolean"] }, "id": { - "type": "integer" + "type": ["integer", "null"] }, "last_name": { "type": ["null", "string"] @@ -57,7 +57,7 @@ "type": ["null", "string"] }, "id": { - "type": "integer" + "type": ["integer", "null"] }, "entity_id": { "type": ["null", "string"] diff --git a/docs/integrations/sources/metabase-migrations.md b/docs/integrations/sources/metabase-migrations.md new file mode 100644 index 000000000000..d07440860878 --- /dev/null +++ b/docs/integrations/sources/metabase-migrations.md @@ -0,0 +1,55 @@ +# Metabase Migration Guide + +## Upgrading to 2.0.0 + +Source Metabase has updated the `dashboards` stream's endpoint due to the previous endpoint being deprecated by the service. The new version no longer returns the `creator` field for the `dashboards` stream. + +## Connector Upgrade Guide + +### For Airbyte Open Source: Update the local connector image + +Airbyte Open Source users must manually update the connector image in their local registry before proceeding with the migration. To do so: + +1. Select **Settings** in the main navbar. + 1. Select **Sources**. +2. Find Metabase in the list of connectors. + +:::note +You will see two versions listed, the current in-use version and the latest version available. +::: + +3. Select **Change** to update your OSS version to the latest available version. + +### Update the connector version + +1. Select **Sources** in the main navbar. +2. Select the instance of the connector you wish to upgrade. + +:::note +Each instance of the connector must be updated separately. If you have created multiple instances of a connector, updating one will not affect the others. +::: + +3. Select **Upgrade** + 1. Follow the prompt to confirm you are ready to upgrade to the new version. + +### Refresh affected schemas and reset data + +1. Select **Connections** in the main navbar. + 1. Select the connection(s) affected by the update. +2. Select the **Replication** tab. + 1. Select **Refresh source schema**. + 2. Select **OK**. +:::note +Any detected schema changes will be listed for your review. +::: +3. Select **Save changes** at the bottom of the page. + 1. Ensure the **Reset affected streams** option is checked. +:::note +Depending on destination type you may not be prompted to reset your data. +::: +4. Select **Save connection**. +:::note +This will reset the data in your destination and initiate a fresh sync. +::: + +For more information on resetting your data in Airbyte, see [this page](https://docs.airbyte.com/operator-guides/reset). \ No newline at end of file diff --git a/docs/integrations/sources/metabase.md b/docs/integrations/sources/metabase.md index a09eacaa239d..156f753768bf 100644 --- a/docs/integrations/sources/metabase.md +++ b/docs/integrations/sources/metabase.md @@ -4,9 +4,9 @@ This page contains the setup guide and reference information for the Metabase so ## Prerequisites To set up Metabase you need: - * `username` and `password` - Credential pairs to authenticate with Metabase instance. This may be used to generate a new `session_token` if necessary. An email from Metabase may be sent to the owner's account everytime this is being used to open a new session. - * `session_token` - Credential token to authenticate requests sent to Metabase API. Usually expires after 14 days. - * `instance_api_url` - URL to interact with metabase instance API, that uses https. + * `username` and `password` - Credential pairs to authenticate with Metabase instance. This may be used to generate a new `session_token` if necessary. An email from Metabase may be sent to the owner's account every time this is being used to open a new session. + * `session_token` - Credential token to authenticate requests sent to Metabase API. Usually expires after 14 days. + * `instance_api_url` - URL to interact with Metabase instance API, that uses https. ## Setup guide @@ -28,10 +28,10 @@ If you’re working with a remote server, you’ll need to replace localhost:300 You can use this id value as your `session_token` when configuring the connector. Note that these credentials tokens may expire after 14 days by default, and you might need to update your connector configuration with a new value when that happens (The connector should throw exceptions about Invalid and expired session tokens and return a 401 (Unauthorized) status code in that scenario). -If you are hosting your own metabase instance, you can configure this session duration on your metabase server by setting the environment variable MAX_SESSION_AGE (value is in minutes). +If you are hosting your own Metabase instance, you can configure this session duration on your Metabase server by setting the environment variable MAX_SESSION_AGE (value is in minutes). -If the connector is supplied with only username and password, a session_token will be generated everytime an -authenticated query is running, which might trigger security alerts on the user's metabase account. +If the connector is supplied with only username and password, a session_token will be generated every time an +authenticated query is running, which might trigger security alerts on the user's Metabase account. ## Supported sync modes @@ -44,6 +44,8 @@ The Metabase source connector supports the following [sync modes](https://docs.a * [Collections](https://www.metabase.com/docs/latest/api/collection.html#get-apicollection) * [Dashboard](https://www.metabase.com/docs/latest/api/dashboard.html#get-apidashboard) * [User](https://www.metabase.com/docs/latest/api/user.html#get-apiuser) +* [Databases](https://www.metabase.com/docs/latest/api/user.html#get-apiuser) +* [Native Query Snippet](https://www.metabase.com/docs/latest/api/native-query-snippet#get-apinative-query-snippetid) ## Tutorials @@ -62,7 +64,7 @@ The Metabase source connector supports the following [sync modes](https://docs.a |:------------------|:---------------------|:------| | Full Refresh Sync | Yes | | | Incremental Sync | No | | -| SSL connection | Yes | +| SSL connection | Yes | | | Namespaces | No | | @@ -70,6 +72,7 @@ The Metabase source connector supports the following [sync modes](https://docs.a | Version | Date | Pull Request | Subject | |:--------|:-----------|:---------------------------------------------------------|:---------------------------| +| 2.0.0 | 2024-03-01 | [35680](https://github.com/airbytehq/airbyte/pull/35680) | Updates `dashboards` stream, Base image migration: remove Dockerfile and use the python-connector-base image, migrated to poetry | | 1.1.0 | 2023-10-31 | [31909](https://github.com/airbytehq/airbyte/pull/31909) | Add `databases` and `native_query_snippets` streams | | 1.0.1 | 2023-07-20 | [28470](https://github.com/airbytehq/airbyte/pull/27777) | Update CDK to 0.47.0 | | 1.0.0 | 2023-06-27 | [27777](https://github.com/airbytehq/airbyte/pull/27777) | Remove Activity Stream | From 858e61d67aa05139e16361ccc119e350a7bd3686 Mon Sep 17 00:00:00 2001 From: Maxime Carbonneau-Leclerc <3360483+maxi297@users.noreply.github.com> Date: Thu, 7 Mar 2024 10:43:58 -0500 Subject: [PATCH 123/172] Declarative CDK: Fix None error on stream_slice (#35879) --- .../airbyte_cdk/sources/declarative/declarative_stream.py | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/airbyte-cdk/python/airbyte_cdk/sources/declarative/declarative_stream.py b/airbyte-cdk/python/airbyte_cdk/sources/declarative/declarative_stream.py index d56e7c99a545..aaca24dc610d 100644 --- a/airbyte-cdk/python/airbyte_cdk/sources/declarative/declarative_stream.py +++ b/airbyte-cdk/python/airbyte_cdk/sources/declarative/declarative_stream.py @@ -101,6 +101,13 @@ def read_records( """ :param: stream_state We knowingly avoid using stream_state as we want cursors to manage their own state. """ + if stream_slice is None: + # As the parameter is Optional, many would just call `read_records(sync_mode)` during testing without specifying the field + # As part of the declarative model without custom components, this should never happen as the CDK would wire up a + # SinglePartitionRouter that would create this StreamSlice properly + # As part of the declarative model with custom components, a user that would return a `None` slice would now have the default + # empty slice which seems to make sense. + stream_slice = StreamSlice(partition={}, cursor_slice={}) if not isinstance(stream_slice, StreamSlice): raise ValueError(f"DeclarativeStream does not support stream_slices that are not StreamSlice. Got {stream_slice}") yield from self.retriever.read_records(self.get_json_schema(), stream_slice) From 106102c52cf88c28ef50567f22d977c67a40c924 Mon Sep 17 00:00:00 2001 From: maxi297 Date: Thu, 7 Mar 2024 15:51:14 +0000 Subject: [PATCH 124/172] =?UTF-8?q?=F0=9F=A4=96=20Bump=20patch=20version?= =?UTF-8?q?=20of=20Python=20CDK?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- airbyte-cdk/python/.bumpversion.cfg | 2 +- airbyte-cdk/python/CHANGELOG.md | 3 +++ airbyte-cdk/python/Dockerfile | 4 ++-- airbyte-cdk/python/setup.py | 2 +- 4 files changed, 7 insertions(+), 4 deletions(-) diff --git a/airbyte-cdk/python/.bumpversion.cfg b/airbyte-cdk/python/.bumpversion.cfg index f050d088b6a3..28ff883014bb 100644 --- a/airbyte-cdk/python/.bumpversion.cfg +++ b/airbyte-cdk/python/.bumpversion.cfg @@ -1,5 +1,5 @@ [bumpversion] -current_version = 0.68.2 +current_version = 0.68.3 commit = False [bumpversion:file:setup.py] diff --git a/airbyte-cdk/python/CHANGELOG.md b/airbyte-cdk/python/CHANGELOG.md index ceb58df39fc6..bacd79640127 100644 --- a/airbyte-cdk/python/CHANGELOG.md +++ b/airbyte-cdk/python/CHANGELOG.md @@ -1,5 +1,8 @@ # Changelog +## 0.68.3 +Low-code: adding a default value if a stream slice is None during read_records + ## 0.68.2 low-code: remove parent cursor compoent from incremental substreams' state message diff --git a/airbyte-cdk/python/Dockerfile b/airbyte-cdk/python/Dockerfile index e79934ce9c55..b183704b6114 100644 --- a/airbyte-cdk/python/Dockerfile +++ b/airbyte-cdk/python/Dockerfile @@ -10,7 +10,7 @@ RUN apk --no-cache upgrade \ && apk --no-cache add tzdata build-base # install airbyte-cdk -RUN pip install --prefix=/install airbyte-cdk==0.68.2 +RUN pip install --prefix=/install airbyte-cdk==0.68.3 # build a clean environment FROM base @@ -32,5 +32,5 @@ ENV AIRBYTE_ENTRYPOINT "python /airbyte/integration_code/main.py" ENTRYPOINT ["python", "/airbyte/integration_code/main.py"] # needs to be the same as CDK -LABEL io.airbyte.version=0.68.2 +LABEL io.airbyte.version=0.68.3 LABEL io.airbyte.name=airbyte/source-declarative-manifest diff --git a/airbyte-cdk/python/setup.py b/airbyte-cdk/python/setup.py index b3754c7859e8..c41710e7da35 100644 --- a/airbyte-cdk/python/setup.py +++ b/airbyte-cdk/python/setup.py @@ -36,7 +36,7 @@ name="airbyte-cdk", # The version of the airbyte-cdk package is used at runtime to validate manifests. That validation must be # updated if our semver format changes such as using release candidate versions. - version="0.68.2", + version="0.68.3", description="A framework for writing Airbyte Connectors.", long_description=README, long_description_content_type="text/markdown", From 4a808ee17880fbff49ac7cd2a1aae660ef26b3b9 Mon Sep 17 00:00:00 2001 From: Alexandre Girard Date: Thu, 7 Mar 2024 08:20:02 -0800 Subject: [PATCH 125/172] =?UTF-8?q?=F0=9F=90=9B=20follow=20up=20to=20#3547?= =?UTF-8?q?1:=20update=20the=20cartesian=20stream=20slicer=20(#35865)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .../cartesian_product_stream_slicer.py | 22 +++-- .../test_cartesian_product_stream_slicer.py | 83 +++++++++++++++---- 2 files changed, 83 insertions(+), 22 deletions(-) diff --git a/airbyte-cdk/python/airbyte_cdk/sources/declarative/stream_slicers/cartesian_product_stream_slicer.py b/airbyte-cdk/python/airbyte_cdk/sources/declarative/stream_slicers/cartesian_product_stream_slicer.py index ea57fe4fcf66..8fff2b5346a8 100644 --- a/airbyte-cdk/python/airbyte_cdk/sources/declarative/stream_slicers/cartesian_product_stream_slicer.py +++ b/airbyte-cdk/python/airbyte_cdk/sources/declarative/stream_slicers/cartesian_product_stream_slicer.py @@ -44,7 +44,7 @@ def get_request_params( ) -> Mapping[str, Any]: return dict( ChainMap( - *[ + *[ # type: ignore # ChainMap expects a MutableMapping[Never, Never] for reasons s.get_request_params(stream_state=stream_state, stream_slice=stream_slice, next_page_token=next_page_token) for s in self.stream_slicers ] @@ -60,7 +60,7 @@ def get_request_headers( ) -> Mapping[str, Any]: return dict( ChainMap( - *[ + *[ # type: ignore # ChainMap expects a MutableMapping[Never, Never] for reasons s.get_request_headers(stream_state=stream_state, stream_slice=stream_slice, next_page_token=next_page_token) for s in self.stream_slicers ] @@ -76,7 +76,7 @@ def get_request_body_data( ) -> Mapping[str, Any]: return dict( ChainMap( - *[ + *[ # type: ignore # ChainMap expects a MutableMapping[Never, Never] for reasons s.get_request_body_data(stream_state=stream_state, stream_slice=stream_slice, next_page_token=next_page_token) for s in self.stream_slicers ] @@ -89,10 +89,10 @@ def get_request_body_json( stream_state: Optional[StreamState] = None, stream_slice: Optional[StreamSlice] = None, next_page_token: Optional[Mapping[str, Any]] = None, - ) -> Optional[Mapping]: + ) -> Mapping[str, Any]: return dict( ChainMap( - *[ + *[ # type: ignore # ChainMap expects a MutableMapping[Never, Never] for reasons s.get_request_body_json(stream_state=stream_state, stream_slice=stream_slice, next_page_token=next_page_token) for s in self.stream_slicers ] @@ -101,4 +101,14 @@ def get_request_body_json( def stream_slices(self) -> Iterable[StreamSlice]: sub_slices = (s.stream_slices() for s in self.stream_slicers) - return (dict(ChainMap(*a)) for a in itertools.product(*sub_slices)) + product = itertools.product(*sub_slices) + for stream_slice_tuple in product: + partition = dict(ChainMap(*[s.partition for s in stream_slice_tuple])) + cursor_slices = [s.cursor_slice for s in stream_slice_tuple if s.cursor_slice] + if len(cursor_slices) > 1: + raise ValueError(f"There should only be a single cursor slice. Found {cursor_slices}") + if cursor_slices: + cursor_slice = cursor_slices[0] + else: + cursor_slice = {} + yield StreamSlice(partition=partition, cursor_slice=cursor_slice) diff --git a/airbyte-cdk/python/unit_tests/sources/declarative/stream_slicers/test_cartesian_product_stream_slicer.py b/airbyte-cdk/python/unit_tests/sources/declarative/stream_slicers/test_cartesian_product_stream_slicer.py index e91574f86d1e..74b1cc6ec313 100644 --- a/airbyte-cdk/python/unit_tests/sources/declarative/stream_slicers/test_cartesian_product_stream_slicer.py +++ b/airbyte-cdk/python/unit_tests/sources/declarative/stream_slicers/test_cartesian_product_stream_slicer.py @@ -9,6 +9,7 @@ from airbyte_cdk.sources.declarative.partition_routers.list_partition_router import ListPartitionRouter from airbyte_cdk.sources.declarative.requesters.request_option import RequestOption, RequestOptionType from airbyte_cdk.sources.declarative.stream_slicers.cartesian_product_stream_slicer import CartesianProductStreamSlicer +from airbyte_cdk.sources.declarative.types import StreamSlice @pytest.mark.parametrize( @@ -17,7 +18,9 @@ ( "test_single_stream_slicer", [ListPartitionRouter(values=["customer", "store", "subscription"], cursor_field="owner_resource", config={}, parameters={})], - [{"owner_resource": "customer"}, {"owner_resource": "store"}, {"owner_resource": "subscription"}], + [StreamSlice(partition={"owner_resource": "customer"}, cursor_slice={}), + StreamSlice(partition={"owner_resource": "store"}, cursor_slice={}), + StreamSlice(partition={"owner_resource": "subscription"}, cursor_slice={})], ), ( "test_two_stream_slicers", @@ -26,14 +29,34 @@ ListPartitionRouter(values=["A", "B"], cursor_field="letter", config={}, parameters={}), ], [ - {"owner_resource": "customer", "letter": "A"}, - {"owner_resource": "customer", "letter": "B"}, - {"owner_resource": "store", "letter": "A"}, - {"owner_resource": "store", "letter": "B"}, - {"owner_resource": "subscription", "letter": "A"}, - {"owner_resource": "subscription", "letter": "B"}, + StreamSlice(partition={"owner_resource": "customer", "letter": "A"}, cursor_slice={}), + StreamSlice(partition={"owner_resource": "customer", "letter": "B"}, cursor_slice={}), + StreamSlice(partition={"owner_resource": "store", "letter": "A"}, cursor_slice={}), + StreamSlice(partition={"owner_resource": "store", "letter": "B"}, cursor_slice={}), + StreamSlice(partition={"owner_resource": "subscription", "letter": "A"}, cursor_slice={}), + StreamSlice(partition={"owner_resource": "subscription", "letter": "B"}, cursor_slice={}), ], ), + ( + "test_singledatetime", + [ + DatetimeBasedCursor( + start_datetime=MinMaxDatetime(datetime="2021-01-01", datetime_format="%Y-%m-%d", parameters={}), + end_datetime=MinMaxDatetime(datetime="2021-01-03", datetime_format="%Y-%m-%d", parameters={}), + step="P1D", + cursor_field=InterpolatedString.create("", parameters={}), + datetime_format="%Y-%m-%d", + cursor_granularity="P1D", + config={}, + parameters={}, + ), + ], + [ + StreamSlice(partition={}, cursor_slice={"start_time": "2021-01-01", "end_time": "2021-01-01"}), + StreamSlice(partition={}, cursor_slice={"start_time": "2021-01-02", "end_time": "2021-01-02"}), + StreamSlice(partition={}, cursor_slice={"start_time": "2021-01-03", "end_time": "2021-01-03"}), + ], + ), ( "test_list_and_datetime", [ @@ -50,15 +73,15 @@ ), ], [ - {"owner_resource": "customer", "start_time": "2021-01-01", "end_time": "2021-01-01"}, - {"owner_resource": "customer", "start_time": "2021-01-02", "end_time": "2021-01-02"}, - {"owner_resource": "customer", "start_time": "2021-01-03", "end_time": "2021-01-03"}, - {"owner_resource": "store", "start_time": "2021-01-01", "end_time": "2021-01-01"}, - {"owner_resource": "store", "start_time": "2021-01-02", "end_time": "2021-01-02"}, - {"owner_resource": "store", "start_time": "2021-01-03", "end_time": "2021-01-03"}, - {"owner_resource": "subscription", "start_time": "2021-01-01", "end_time": "2021-01-01"}, - {"owner_resource": "subscription", "start_time": "2021-01-02", "end_time": "2021-01-02"}, - {"owner_resource": "subscription", "start_time": "2021-01-03", "end_time": "2021-01-03"}, + StreamSlice(partition={"owner_resource": "customer"}, cursor_slice={"start_time": "2021-01-01", "end_time": "2021-01-01"}), + StreamSlice(partition={"owner_resource": "customer"}, cursor_slice={"start_time": "2021-01-02", "end_time": "2021-01-02"}), + StreamSlice(partition={"owner_resource": "customer"}, cursor_slice={"start_time": "2021-01-03", "end_time": "2021-01-03"}), + StreamSlice(partition={"owner_resource": "store"}, cursor_slice={"start_time": "2021-01-01", "end_time": "2021-01-01"}), + StreamSlice(partition={"owner_resource": "store"}, cursor_slice={"start_time": "2021-01-02", "end_time": "2021-01-02"}), + StreamSlice(partition={"owner_resource": "store"}, cursor_slice={"start_time": "2021-01-03", "end_time": "2021-01-03"}), + StreamSlice(partition={"owner_resource": "subscription"}, cursor_slice={"start_time": "2021-01-01", "end_time": "2021-01-01"}), + StreamSlice(partition={"owner_resource": "subscription"}, cursor_slice={"start_time": "2021-01-02", "end_time": "2021-01-02"}), + StreamSlice(partition={"owner_resource": "subscription"}, cursor_slice={"start_time": "2021-01-03", "end_time": "2021-01-03"}), ], ), ], @@ -69,6 +92,34 @@ def test_substream_slicer(test_name, stream_slicers, expected_slices): assert slices == expected_slices +def test_stream_slices_raises_exception_if_multiple_cursor_slice_components(): + stream_slicers = [ + DatetimeBasedCursor( + start_datetime=MinMaxDatetime(datetime="2021-01-01", datetime_format="%Y-%m-%d", parameters={}), + end_datetime=MinMaxDatetime(datetime="2021-01-03", datetime_format="%Y-%m-%d", parameters={}), + step="P1D", + cursor_field=InterpolatedString.create("", parameters={}), + datetime_format="%Y-%m-%d", + cursor_granularity="P1D", + config={}, + parameters={}, + ), + DatetimeBasedCursor( + start_datetime=MinMaxDatetime(datetime="2021-01-01", datetime_format="%Y-%m-%d", parameters={}), + end_datetime=MinMaxDatetime(datetime="2021-01-03", datetime_format="%Y-%m-%d", parameters={}), + step="P1D", + cursor_field=InterpolatedString.create("", parameters={}), + datetime_format="%Y-%m-%d", + cursor_granularity="P1D", + config={}, + parameters={}, + ), + ] + slicer = CartesianProductStreamSlicer(stream_slicers=stream_slicers, parameters={}) + with pytest.raises(ValueError): + list(slicer.stream_slices()) + + @pytest.mark.parametrize( "test_name, stream_1_request_option, stream_2_request_option, expected_req_params, expected_headers,expected_body_json, expected_body_data", [ From 7d3a5f86d8989728906f70411b2dce47637bbbc8 Mon Sep 17 00:00:00 2001 From: girarda Date: Thu, 7 Mar 2024 16:27:35 +0000 Subject: [PATCH 126/172] =?UTF-8?q?=F0=9F=A4=96=20Bump=20patch=20version?= =?UTF-8?q?=20of=20Python=20CDK?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- airbyte-cdk/python/.bumpversion.cfg | 2 +- airbyte-cdk/python/CHANGELOG.md | 3 +++ airbyte-cdk/python/Dockerfile | 4 ++-- airbyte-cdk/python/setup.py | 2 +- 4 files changed, 7 insertions(+), 4 deletions(-) diff --git a/airbyte-cdk/python/.bumpversion.cfg b/airbyte-cdk/python/.bumpversion.cfg index 28ff883014bb..e23d05b9a01f 100644 --- a/airbyte-cdk/python/.bumpversion.cfg +++ b/airbyte-cdk/python/.bumpversion.cfg @@ -1,5 +1,5 @@ [bumpversion] -current_version = 0.68.3 +current_version = 0.68.4 commit = False [bumpversion:file:setup.py] diff --git a/airbyte-cdk/python/CHANGELOG.md b/airbyte-cdk/python/CHANGELOG.md index bacd79640127..3e0ad06f11c5 100644 --- a/airbyte-cdk/python/CHANGELOG.md +++ b/airbyte-cdk/python/CHANGELOG.md @@ -1,5 +1,8 @@ # Changelog +## 0.68.4 +low-code: update cartesian stream slice to emit typed StreamSlice + ## 0.68.3 Low-code: adding a default value if a stream slice is None during read_records diff --git a/airbyte-cdk/python/Dockerfile b/airbyte-cdk/python/Dockerfile index b183704b6114..fdf812003904 100644 --- a/airbyte-cdk/python/Dockerfile +++ b/airbyte-cdk/python/Dockerfile @@ -10,7 +10,7 @@ RUN apk --no-cache upgrade \ && apk --no-cache add tzdata build-base # install airbyte-cdk -RUN pip install --prefix=/install airbyte-cdk==0.68.3 +RUN pip install --prefix=/install airbyte-cdk==0.68.4 # build a clean environment FROM base @@ -32,5 +32,5 @@ ENV AIRBYTE_ENTRYPOINT "python /airbyte/integration_code/main.py" ENTRYPOINT ["python", "/airbyte/integration_code/main.py"] # needs to be the same as CDK -LABEL io.airbyte.version=0.68.3 +LABEL io.airbyte.version=0.68.4 LABEL io.airbyte.name=airbyte/source-declarative-manifest diff --git a/airbyte-cdk/python/setup.py b/airbyte-cdk/python/setup.py index c41710e7da35..af776b923b92 100644 --- a/airbyte-cdk/python/setup.py +++ b/airbyte-cdk/python/setup.py @@ -36,7 +36,7 @@ name="airbyte-cdk", # The version of the airbyte-cdk package is used at runtime to validate manifests. That validation must be # updated if our semver format changes such as using release candidate versions. - version="0.68.3", + version="0.68.4", description="A framework for writing Airbyte Connectors.", long_description=README, long_description_content_type="text/markdown", From 6f16ac5c1127dea1487e5dada21a1ea2b783347e Mon Sep 17 00:00:00 2001 From: Anush Date: Thu, 7 Mar 2024 22:28:00 +0530 Subject: [PATCH 127/172] =?UTF-8?q?=F0=9F=93=9D=20Updated=20Qdrant=20desti?= =?UTF-8?q?nation=20icon=20(#35104)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Co-authored-by: Marcos Marx Co-authored-by: marcosmarxm --- .../connectors/destination-qdrant/icon.svg | 38 +++++++++---------- 1 file changed, 17 insertions(+), 21 deletions(-) diff --git a/airbyte-integrations/connectors/destination-qdrant/icon.svg b/airbyte-integrations/connectors/destination-qdrant/icon.svg index fbbf0f1d49fd..521d2dda8831 100644 --- a/airbyte-integrations/connectors/destination-qdrant/icon.svg +++ b/airbyte-integrations/connectors/destination-qdrant/icon.svg @@ -1,21 +1,17 @@ - - - qdrant - - - - - - - - - - - - - - - - - - \ No newline at end of file + + + + + + + + + + + + + + + + + From be6849f571f106152bb238d2deb331632ef8c6ef Mon Sep 17 00:00:00 2001 From: Shruti Mantri Date: Thu, 7 Mar 2024 22:29:35 +0530 Subject: [PATCH 128/172] Source Ringcentral: add logo (#34242) Co-authored-by: Marcos Marx Co-authored-by: marcosmarxm --- airbyte-integrations/connectors/source-ringcentral/icon.svg | 5 +++++ .../connectors/source-ringcentral/metadata.yaml | 2 +- 2 files changed, 6 insertions(+), 1 deletion(-) create mode 100644 airbyte-integrations/connectors/source-ringcentral/icon.svg diff --git a/airbyte-integrations/connectors/source-ringcentral/icon.svg b/airbyte-integrations/connectors/source-ringcentral/icon.svg new file mode 100644 index 000000000000..36e4f398466b --- /dev/null +++ b/airbyte-integrations/connectors/source-ringcentral/icon.svg @@ -0,0 +1,5 @@ + + + + + diff --git a/airbyte-integrations/connectors/source-ringcentral/metadata.yaml b/airbyte-integrations/connectors/source-ringcentral/metadata.yaml index 2e46b68c5344..e9341f929b3a 100644 --- a/airbyte-integrations/connectors/source-ringcentral/metadata.yaml +++ b/airbyte-integrations/connectors/source-ringcentral/metadata.yaml @@ -5,7 +5,7 @@ data: dockerImageTag: 0.1.0 dockerRepository: airbyte/source-ringcentral githubIssueLabel: source-ringcentral - icon: ringcentral.svg + icon: icon.svg license: MIT name: Ringcentral remoteRegistries: From 1bb7a1c0bbbaf63fa51c02d5c2b8a1c34cb9636a Mon Sep 17 00:00:00 2001 From: Xiaohan Song Date: Thu, 7 Mar 2024 09:45:26 -0800 Subject: [PATCH 129/172] [source-postgres] State counter on postgres (#34724) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Co-authored-by: Aaron ("AJ") Steers Co-authored-by: Joe Bell Co-authored-by: Marcos Marx Co-authored-by: SatishChGit Co-authored-by: evantahler Co-authored-by: Rodi Reich Zilberman <867491+rodireich@users.noreply.github.com> Co-authored-by: Joe Reuter Co-authored-by: Catherine Noll Co-authored-by: Anton Karpets Co-authored-by: Christo Grabowski <108154848+ChristoGrab@users.noreply.github.com> Co-authored-by: Akash Kulkarni Co-authored-by: Akash Kulkarni <113392464+akashkulk@users.noreply.github.com> Co-authored-by: Gireesh Sreepathi Co-authored-by: Artem Inzhyyants <36314070+artem1205@users.noreply.github.com> Co-authored-by: Ella Rohm-Ensing Co-authored-by: Daryna Ishchenko <80129833+darynaishchenko@users.noreply.github.com> Co-authored-by: Baz Co-authored-by: Patrick Nilan Co-authored-by: Serhii Lazebnyi <53845333+lazebnyi@users.noreply.github.com> Co-authored-by: Marius Posta Co-authored-by: Chandler Prall Co-authored-by: pmossman Co-authored-by: Anatolii Yatsuk <35109939+tolik0@users.noreply.github.com> Co-authored-by: Cole Snodgrass Co-authored-by: Anatolii Yatsuk Co-authored-by: bgroff Co-authored-by: Maxime Carbonneau-Leclerc <3360483+maxi297@users.noreply.github.com> Co-authored-by: Ryan Waskewich <156025126+rwask@users.noreply.github.com> Co-authored-by: maxi297 Co-authored-by: Pedro S. Lopez Co-authored-by: Håkon Åmdal Co-authored-by: Roman Yermilov [GL] <86300758+roman-yermilov-gl@users.noreply.github.com> Co-authored-by: Augustin Co-authored-by: Lake Mossman Co-authored-by: lmossman Co-authored-by: Brian Lai <51336873+brianjlai@users.noreply.github.com> Co-authored-by: Subodh Kant Chaturvedi Co-authored-by: Stephane Geneix <147216312+stephane-airbyte@users.noreply.github.com> Co-authored-by: Sajarin Co-authored-by: Edward Gao Co-authored-by: nguyenaiden Co-authored-by: Natalie Kwong <38087517+nataliekwong@users.noreply.github.com> Co-authored-by: terencecho <3916587+terencecho@users.noreply.github.com> Co-authored-by: Alex Birdsall --- .../src/main/resources/version.properties | 2 +- .../debezium/DebeziumIteratorConstants.java | 2 + .../integrations/debezium/CdcSourceTest.java | 16 +++ .../connectors/source-postgres/build.gradle | 2 +- .../connectors/source-postgres/metadata.yaml | 2 +- .../postgres/ctid/CtidStateIterator.java | 107 ------------------ .../postgres/ctid/CtidStateManager.java | 77 ++++++++++++- .../postgres/ctid/PostgresCtidHandler.java | 23 ++-- .../postgres/xmin/PostgresXminHandler.java | 13 ++- .../postgres/xmin/XminStateIterator.java | 81 ------------- .../postgres/xmin/XminStateManager.java | 34 +++++- .../postgres/CdcPostgresSourceTest.java | 8 ++ .../postgres/XminPostgresSourceTest.java | 10 +- .../XminPostgresWithOldServerSourceTest.java | 5 +- ....java => XminSourceStateIteratorTest.java} | 47 +++++--- docs/integrations/sources/postgres.md | 2 + 16 files changed, 206 insertions(+), 225 deletions(-) delete mode 100644 airbyte-integrations/connectors/source-postgres/src/main/java/io/airbyte/integrations/source/postgres/ctid/CtidStateIterator.java delete mode 100644 airbyte-integrations/connectors/source-postgres/src/main/java/io/airbyte/integrations/source/postgres/xmin/XminStateIterator.java rename airbyte-integrations/connectors/source-postgres/src/test/java/io/airbyte/integrations/source/postgres/xmin/{XminStateIteratorTest.java => XminSourceStateIteratorTest.java} (58%) diff --git a/airbyte-cdk/java/airbyte-cdk/core/src/main/resources/version.properties b/airbyte-cdk/java/airbyte-cdk/core/src/main/resources/version.properties index ad61006b14c5..0d77af48d666 100644 --- a/airbyte-cdk/java/airbyte-cdk/core/src/main/resources/version.properties +++ b/airbyte-cdk/java/airbyte-cdk/core/src/main/resources/version.properties @@ -1 +1 @@ -version=0.23.16 +version=0.23.17 diff --git a/airbyte-cdk/java/airbyte-cdk/db-sources/src/main/java/io/airbyte/cdk/integrations/debezium/DebeziumIteratorConstants.java b/airbyte-cdk/java/airbyte-cdk/db-sources/src/main/java/io/airbyte/cdk/integrations/debezium/DebeziumIteratorConstants.java index 2e31f1ec7293..9148f93cdac4 100644 --- a/airbyte-cdk/java/airbyte-cdk/db-sources/src/main/java/io/airbyte/cdk/integrations/debezium/DebeziumIteratorConstants.java +++ b/airbyte-cdk/java/airbyte-cdk/db-sources/src/main/java/io/airbyte/cdk/integrations/debezium/DebeziumIteratorConstants.java @@ -11,6 +11,8 @@ public class DebeziumIteratorConstants { public static final String SYNC_CHECKPOINT_DURATION_PROPERTY = "sync_checkpoint_seconds"; public static final String SYNC_CHECKPOINT_RECORDS_PROPERTY = "sync_checkpoint_records"; + // TODO: Move these variables to a separate class IteratorConstants, as they will be used in state + // iterators for non debezium cases too. public static final Duration SYNC_CHECKPOINT_DURATION = Duration.ofMinutes(15); public static final Integer SYNC_CHECKPOINT_RECORDS = 10_000; diff --git a/airbyte-cdk/java/airbyte-cdk/db-sources/src/testFixtures/java/io/airbyte/cdk/integrations/debezium/CdcSourceTest.java b/airbyte-cdk/java/airbyte-cdk/db-sources/src/testFixtures/java/io/airbyte/cdk/integrations/debezium/CdcSourceTest.java index 62ab77544d4f..729d774f33a8 100644 --- a/airbyte-cdk/java/airbyte-cdk/db-sources/src/testFixtures/java/io/airbyte/cdk/integrations/debezium/CdcSourceTest.java +++ b/airbyte-cdk/java/airbyte-cdk/db-sources/src/testFixtures/java/io/airbyte/cdk/integrations/debezium/CdcSourceTest.java @@ -141,6 +141,13 @@ protected ConfiguredAirbyteCatalog getConfiguredCatalog() { protected abstract void assertExpectedStateMessages(final List stateMessages); + // TODO: this assertion should be added into test cases in this class, we will need to implement + // corresponding iterator for other connectors before + // doing so. + protected void assertExpectedStateMessageCountMatches(final List stateMessages, long totalCount) { + // Do nothing. + } + @BeforeEach protected void setup() { testdb = createTestDatabase(); @@ -350,6 +357,7 @@ void testExistingData() throws Exception { assertExpectedRecords(new HashSet<>(MODEL_RECORDS), recordMessages); assertExpectedStateMessages(stateMessages); + assertExpectedStateMessageCountMatches(stateMessages, MODEL_RECORDS.size()); } protected void compareTargetPositionFromTheRecordsWithTargetPostionGeneratedBeforeSync(final CdcTargetPosition targetPosition, @@ -377,6 +385,7 @@ public void testDelete() throws Exception { extractRecordMessages(actualRecords2)); final List stateMessages2 = extractStateMessages(actualRecords2); assertExpectedStateMessagesFromIncrementalSync(stateMessages2); + assertExpectedStateMessageCountMatches(stateMessages2, 1); assertEquals(1, recordMessages2.size()); assertEquals(11, recordMessages2.get(0).getData().get(COL_ID).asInt()); assertCdcMetaData(recordMessages2.get(0).getData(), false); @@ -411,6 +420,7 @@ public void testUpdate() throws Exception { assertEquals(11, recordMessages2.get(0).getData().get(COL_ID).asInt()); assertEquals(updatedModel, recordMessages2.get(0).getData().get(COL_MODEL).asText()); assertCdcMetaData(recordMessages2.get(0).getData(), true); + assertExpectedStateMessageCountMatches(stateMessages2, 1); } @SuppressWarnings({"BusyWait", "CodeBlock2Expr"}) @@ -534,6 +544,8 @@ public void testCdcAndFullRefreshInSameSync() throws Exception { final HashSet names = new HashSet<>(STREAM_NAMES); names.add(MODELS_STREAM_NAME + "_2"); assertExpectedStateMessages(stateMessages1); + // Full refresh does not get any state messages. + assertExpectedStateMessageCountMatches(stateMessages1, MODEL_RECORDS_2.size()); assertExpectedRecords(Streams.concat(MODEL_RECORDS_2.stream(), MODEL_RECORDS.stream()) .collect(Collectors.toSet()), recordMessages1, @@ -554,6 +566,7 @@ public void testCdcAndFullRefreshInSameSync() throws Exception { final Set recordMessages2 = extractRecordMessages(actualRecords2); final List stateMessages2 = extractStateMessages(actualRecords2); assertExpectedStateMessagesFromIncrementalSync(stateMessages2); + assertExpectedStateMessageCountMatches(stateMessages2, 1); assertExpectedRecords( Streams.concat(MODEL_RECORDS_2.stream(), Stream.of(puntoRecord)) .collect(Collectors.toSet()), @@ -576,6 +589,7 @@ public void testNoData() throws Exception { final List stateMessages = extractStateMessages(actualRecords); assertExpectedRecords(Collections.emptySet(), recordMessages); assertExpectedStateMessagesForNoData(stateMessages); + assertExpectedStateMessageCountMatches(stateMessages, 0); } protected void assertExpectedStateMessagesForNoData(final List stateMessages) { @@ -600,6 +614,7 @@ public void testNoDataOnSecondSync() throws Exception { assertExpectedRecords(Collections.emptySet(), recordMessages2); assertExpectedStateMessagesFromIncrementalSync(stateMessages2); + assertExpectedStateMessageCountMatches(stateMessages2, 0); } @Test @@ -630,6 +645,7 @@ public void newTableSnapshotTest() throws Exception { dataFromFirstBatch); final List stateAfterFirstBatch = extractStateMessages(dataFromFirstBatch); assertExpectedStateMessages(stateAfterFirstBatch); + assertExpectedStateMessageCountMatches(stateAfterFirstBatch, MODEL_RECORDS.size()); final AirbyteStateMessage stateMessageEmittedAfterFirstSyncCompletion = stateAfterFirstBatch.get(stateAfterFirstBatch.size() - 1); assertEquals(AirbyteStateMessage.AirbyteStateType.GLOBAL, stateMessageEmittedAfterFirstSyncCompletion.getType()); diff --git a/airbyte-integrations/connectors/source-postgres/build.gradle b/airbyte-integrations/connectors/source-postgres/build.gradle index 76c9a0122c8c..df72ad0baa1b 100644 --- a/airbyte-integrations/connectors/source-postgres/build.gradle +++ b/airbyte-integrations/connectors/source-postgres/build.gradle @@ -12,7 +12,7 @@ java { } airbyteJavaConnector { - cdkVersionRequired = '0.23.16' + cdkVersionRequired = '0.23.17' features = ['db-sources', 'datastore-postgres'] useLocalCdk = false } diff --git a/airbyte-integrations/connectors/source-postgres/metadata.yaml b/airbyte-integrations/connectors/source-postgres/metadata.yaml index 0d9f4677fa22..f5bc3ca80c87 100644 --- a/airbyte-integrations/connectors/source-postgres/metadata.yaml +++ b/airbyte-integrations/connectors/source-postgres/metadata.yaml @@ -9,7 +9,7 @@ data: connectorSubtype: database connectorType: source definitionId: decd338e-5647-4c0b-adf4-da0e75f5a750 - dockerImageTag: 3.3.14 + dockerImageTag: 3.3.15 dockerRepository: airbyte/source-postgres documentationUrl: https://docs.airbyte.com/integrations/sources/postgres githubIssueLabel: source-postgres diff --git a/airbyte-integrations/connectors/source-postgres/src/main/java/io/airbyte/integrations/source/postgres/ctid/CtidStateIterator.java b/airbyte-integrations/connectors/source-postgres/src/main/java/io/airbyte/integrations/source/postgres/ctid/CtidStateIterator.java deleted file mode 100644 index 3b9e06e001b6..000000000000 --- a/airbyte-integrations/connectors/source-postgres/src/main/java/io/airbyte/integrations/source/postgres/ctid/CtidStateIterator.java +++ /dev/null @@ -1,107 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.integrations.source.postgres.ctid; - -import static io.airbyte.integrations.source.postgres.ctid.CtidStateManager.CTID_STATUS_VERSION; - -import com.fasterxml.jackson.databind.JsonNode; -import com.google.common.collect.AbstractIterator; -import io.airbyte.cdk.integrations.debezium.DebeziumIteratorConstants; -import io.airbyte.integrations.source.postgres.internal.models.CtidStatus; -import io.airbyte.integrations.source.postgres.internal.models.InternalModels.StateType; -import io.airbyte.protocol.models.AirbyteStreamNameNamespacePair; -import io.airbyte.protocol.models.v0.AirbyteMessage; -import io.airbyte.protocol.models.v0.AirbyteMessage.Type; -import io.airbyte.protocol.models.v0.AirbyteStateMessage; -import java.time.Duration; -import java.time.Instant; -import java.time.OffsetDateTime; -import java.util.Iterator; -import java.util.Objects; -import javax.annotation.CheckForNull; -import org.apache.commons.lang3.StringUtils; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -public class CtidStateIterator extends AbstractIterator implements Iterator { - - private static final Logger LOGGER = LoggerFactory.getLogger(CtidStateIterator.class); - public static final Duration SYNC_CHECKPOINT_DURATION = DebeziumIteratorConstants.SYNC_CHECKPOINT_DURATION; - public static final Integer SYNC_CHECKPOINT_RECORDS = DebeziumIteratorConstants.SYNC_CHECKPOINT_RECORDS; - - private final Iterator messageIterator; - private final AirbyteStreamNameNamespacePair pair; - private boolean hasEmittedFinalState; - private String lastCtid; - private final JsonNode streamStateForIncrementalRun; - private final FileNodeHandler fileNodeHandler; - private final CtidStateManager stateManager; - private long recordCount = 0L; - private Instant lastCheckpoint = Instant.now(); - private final Duration syncCheckpointDuration; - private final Long syncCheckpointRecords; - - public CtidStateIterator(final Iterator messageIterator, - final AirbyteStreamNameNamespacePair pair, - final FileNodeHandler fileNodeHandler, - final CtidStateManager stateManager, - final JsonNode streamStateForIncrementalRun, - final Duration checkpointDuration, - final Long checkpointRecords) { - this.messageIterator = messageIterator; - this.pair = pair; - this.fileNodeHandler = fileNodeHandler; - this.stateManager = stateManager; - this.streamStateForIncrementalRun = streamStateForIncrementalRun; - this.syncCheckpointDuration = checkpointDuration; - this.syncCheckpointRecords = checkpointRecords; - } - - @CheckForNull - @Override - protected AirbyteMessage computeNext() { - if (messageIterator.hasNext()) { - if ((recordCount >= syncCheckpointRecords || Duration.between(lastCheckpoint, OffsetDateTime.now()).compareTo(syncCheckpointDuration) > 0) - && Objects.nonNull(lastCtid) - && StringUtils.isNotBlank(lastCtid)) { - final Long fileNode = fileNodeHandler.getFileNode(pair); - assert fileNode != null; - final CtidStatus ctidStatus = new CtidStatus() - .withVersion(CTID_STATUS_VERSION) - .withStateType(StateType.CTID) - .withCtid(lastCtid) - .withIncrementalState(streamStateForIncrementalRun) - .withRelationFilenode(fileNode); - LOGGER.info("Emitting ctid state for stream {}, state is {}", pair, ctidStatus); - recordCount = 0L; - lastCheckpoint = Instant.now(); - return new AirbyteMessage() - .withType(Type.STATE) - .withState(stateManager.createCtidStateMessage(pair, ctidStatus)); - } - // Use try-catch to catch Exception that could occur when connection to the database fails - try { - final AirbyteMessageWithCtid message = messageIterator.next(); - if (Objects.nonNull(message.ctid())) { - this.lastCtid = message.ctid(); - } - recordCount++; - return message.recordMessage(); - } catch (final Exception e) { - throw new RuntimeException(e); - } - } else if (!hasEmittedFinalState) { - hasEmittedFinalState = true; - final AirbyteStateMessage finalStateMessage = stateManager.createFinalStateMessage(pair, streamStateForIncrementalRun); - LOGGER.info("Finished initial sync of stream {}, Emitting final state, state is {}", pair, finalStateMessage); - return new AirbyteMessage() - .withType(Type.STATE) - .withState(finalStateMessage); - } else { - return endOfData(); - } - } - -} diff --git a/airbyte-integrations/connectors/source-postgres/src/main/java/io/airbyte/integrations/source/postgres/ctid/CtidStateManager.java b/airbyte-integrations/connectors/source-postgres/src/main/java/io/airbyte/integrations/source/postgres/ctid/CtidStateManager.java index 1b58a9ae2852..d3f89529fbe6 100644 --- a/airbyte-integrations/connectors/source-postgres/src/main/java/io/airbyte/integrations/source/postgres/ctid/CtidStateManager.java +++ b/airbyte-integrations/connectors/source-postgres/src/main/java/io/airbyte/integrations/source/postgres/ctid/CtidStateManager.java @@ -5,18 +5,32 @@ package io.airbyte.integrations.source.postgres.ctid; import com.fasterxml.jackson.databind.JsonNode; +import io.airbyte.cdk.integrations.source.relationaldb.state.SourceStateMessageProducer; import io.airbyte.integrations.source.postgres.internal.models.CtidStatus; +import io.airbyte.integrations.source.postgres.internal.models.InternalModels.StateType; import io.airbyte.protocol.models.AirbyteStreamNameNamespacePair; +import io.airbyte.protocol.models.v0.AirbyteMessage; import io.airbyte.protocol.models.v0.AirbyteStateMessage; +import io.airbyte.protocol.models.v0.ConfiguredAirbyteStream; import java.util.Map; import java.util.Objects; +import java.util.function.Function; +import org.apache.commons.lang3.StringUtils; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; -public abstract class CtidStateManager { +public abstract class CtidStateManager implements SourceStateMessageProducer { + + private static final Logger LOGGER = LoggerFactory.getLogger(CtidStateManager.class); public static final long CTID_STATUS_VERSION = 2; public static final String STATE_TYPE_KEY = "state_type"; protected final Map pairToCtidStatus; + private Function streamStateForIncrementalRunSupplier; + + private String lastCtid; + private FileNodeHandler fileNodeHandler; protected CtidStateManager(final Map pairToCtidStatus) { this.pairToCtidStatus = pairToCtidStatus; @@ -41,4 +55,65 @@ public static boolean validateRelationFileNode(final CtidStatus ctidstatus, public abstract AirbyteStateMessage createFinalStateMessage(final AirbyteStreamNameNamespacePair pair, final JsonNode streamStateForIncrementalRun); + public void setStreamStateIteratorFields(Function streamStateForIncrementalRunSupplier, + FileNodeHandler fileNodeHandler) { + this.streamStateForIncrementalRunSupplier = streamStateForIncrementalRunSupplier; + this.fileNodeHandler = fileNodeHandler; + } + + @Override + public AirbyteStateMessage generateStateMessageAtCheckpoint(final ConfiguredAirbyteStream stream) { + final AirbyteStreamNameNamespacePair pair = new AirbyteStreamNameNamespacePair(stream.getStream().getName(), + stream.getStream().getNamespace()); + final Long fileNode = fileNodeHandler.getFileNode(pair); + assert fileNode != null; + final CtidStatus ctidStatus = new CtidStatus() + .withVersion(CTID_STATUS_VERSION) + .withStateType(StateType.CTID) + .withCtid(lastCtid) + .withIncrementalState(getStreamState(pair)) + .withRelationFilenode(fileNode); + LOGGER.info("Emitting ctid state for stream {}, state is {}", pair, ctidStatus); + return createCtidStateMessage(pair, ctidStatus); + } + + /** + * Stores the latest CTID. + */ + @Override + public AirbyteMessage processRecordMessage(final ConfiguredAirbyteStream stream, AirbyteMessageWithCtid message) { + if (Objects.nonNull(message.ctid())) { + this.lastCtid = message.ctid(); + } + return message.recordMessage(); + } + + /** + * Creates a final state message for the stream. + */ + @Override + public AirbyteStateMessage createFinalStateMessage(final ConfiguredAirbyteStream stream) { + final AirbyteStreamNameNamespacePair pair = new AirbyteStreamNameNamespacePair(stream.getStream().getName(), + stream.getStream().getNamespace()); + + final AirbyteStateMessage finalStateMessage = createFinalStateMessage(pair, getStreamState(pair)); + LOGGER.info("Finished initial sync of stream {}, Emitting final state, state is {}", pair, finalStateMessage); + return finalStateMessage; + } + + /** + * Extra criteria(besides checking frequency) to check if we should emit state message. + */ + @Override + public boolean shouldEmitStateMessage(final ConfiguredAirbyteStream stream) { + return Objects.nonNull(lastCtid) + && StringUtils.isNotBlank(lastCtid); + } + + private JsonNode getStreamState(final AirbyteStreamNameNamespacePair pair) { + final CtidStatus currentCtidStatus = getCtidStatus(pair); + return (currentCtidStatus == null || currentCtidStatus.getIncrementalState() == null) ? streamStateForIncrementalRunSupplier.apply(pair) + : currentCtidStatus.getIncrementalState(); + } + } diff --git a/airbyte-integrations/connectors/source-postgres/src/main/java/io/airbyte/integrations/source/postgres/ctid/PostgresCtidHandler.java b/airbyte-integrations/connectors/source-postgres/src/main/java/io/airbyte/integrations/source/postgres/ctid/PostgresCtidHandler.java index d8b255ac7185..8c18d1937706 100644 --- a/airbyte-integrations/connectors/source-postgres/src/main/java/io/airbyte/integrations/source/postgres/ctid/PostgresCtidHandler.java +++ b/airbyte-integrations/connectors/source-postgres/src/main/java/io/airbyte/integrations/source/postgres/ctid/PostgresCtidHandler.java @@ -10,15 +10,17 @@ import com.fasterxml.jackson.databind.JsonNode; import io.airbyte.cdk.db.jdbc.JdbcDatabase; +import io.airbyte.cdk.integrations.debezium.DebeziumIteratorConstants; import io.airbyte.cdk.integrations.source.relationaldb.DbSourceDiscoverUtil; import io.airbyte.cdk.integrations.source.relationaldb.TableInfo; +import io.airbyte.cdk.integrations.source.relationaldb.state.SourceStateIterator; +import io.airbyte.cdk.integrations.source.relationaldb.state.StateEmitFrequency; import io.airbyte.commons.stream.AirbyteStreamUtils; import io.airbyte.commons.util.AutoCloseableIterator; import io.airbyte.commons.util.AutoCloseableIterators; import io.airbyte.integrations.source.postgres.PostgresQueryUtils.TableBlockSize; import io.airbyte.integrations.source.postgres.PostgresType; import io.airbyte.integrations.source.postgres.ctid.CtidPostgresSourceOperations.RowDataWithCtid; -import io.airbyte.integrations.source.postgres.internal.models.CtidStatus; import io.airbyte.protocol.models.AirbyteStreamNameNamespacePair; import io.airbyte.protocol.models.CommonField; import io.airbyte.protocol.models.v0.AirbyteMessage; @@ -109,7 +111,7 @@ public List> getInitialSyncCtidIterator( tablesMaxTuple.orElseGet(() -> Map.of(pair, -1)).get(pair)); final AutoCloseableIterator recordIterator = getRecordIterator(queryStream, streamName, namespace, emmitedAt.toEpochMilli()); - final AutoCloseableIterator recordAndMessageIterator = augmentWithState(recordIterator, pair); + final AutoCloseableIterator recordAndMessageIterator = augmentWithState(recordIterator, airbyteStream); final AutoCloseableIterator logAugmented = augmentWithLogs(recordAndMessageIterator, pair, streamName); iteratorList.add(logAugmented); @@ -165,21 +167,20 @@ private AutoCloseableIterator augmentWithLogs(final AutoCloseabl } private AutoCloseableIterator augmentWithState(final AutoCloseableIterator recordIterator, - final AirbyteStreamNameNamespacePair pair) { + final ConfiguredAirbyteStream airbyteStream) { - final CtidStatus currentCtidStatus = ctidStateManager.getCtidStatus(pair); - final JsonNode incrementalState = - (currentCtidStatus == null || currentCtidStatus.getIncrementalState() == null) ? streamStateForIncrementalRunSupplier.apply(pair) - : currentCtidStatus.getIncrementalState(); final Duration syncCheckpointDuration = config.get(SYNC_CHECKPOINT_DURATION_PROPERTY) != null ? Duration.ofSeconds(config.get(SYNC_CHECKPOINT_DURATION_PROPERTY).asLong()) - : CtidStateIterator.SYNC_CHECKPOINT_DURATION; + : DebeziumIteratorConstants.SYNC_CHECKPOINT_DURATION; final Long syncCheckpointRecords = config.get(SYNC_CHECKPOINT_RECORDS_PROPERTY) != null ? config.get(SYNC_CHECKPOINT_RECORDS_PROPERTY).asLong() - : CtidStateIterator.SYNC_CHECKPOINT_RECORDS; + : DebeziumIteratorConstants.SYNC_CHECKPOINT_RECORDS; + ctidStateManager.setStreamStateIteratorFields(streamStateForIncrementalRunSupplier, fileNodeHandler); + + final AirbyteStreamNameNamespacePair pair = + new AirbyteStreamNameNamespacePair(airbyteStream.getStream().getName(), airbyteStream.getStream().getNamespace()); return AutoCloseableIterators.transformIterator( - r -> new CtidStateIterator(r, pair, fileNodeHandler, ctidStateManager, incrementalState, - syncCheckpointDuration, syncCheckpointRecords), + r -> new SourceStateIterator(r, airbyteStream, ctidStateManager, new StateEmitFrequency(syncCheckpointRecords, syncCheckpointDuration)), recordIterator, pair); } diff --git a/airbyte-integrations/connectors/source-postgres/src/main/java/io/airbyte/integrations/source/postgres/xmin/PostgresXminHandler.java b/airbyte-integrations/connectors/source-postgres/src/main/java/io/airbyte/integrations/source/postgres/xmin/PostgresXminHandler.java index d58a0d07a2e8..6d93443aa4e6 100644 --- a/airbyte-integrations/connectors/source-postgres/src/main/java/io/airbyte/integrations/source/postgres/xmin/PostgresXminHandler.java +++ b/airbyte-integrations/connectors/source-postgres/src/main/java/io/airbyte/integrations/source/postgres/xmin/PostgresXminHandler.java @@ -13,6 +13,8 @@ import io.airbyte.cdk.integrations.source.relationaldb.DbSourceDiscoverUtil; import io.airbyte.cdk.integrations.source.relationaldb.RelationalDbQueryUtils; import io.airbyte.cdk.integrations.source.relationaldb.TableInfo; +import io.airbyte.cdk.integrations.source.relationaldb.state.SourceStateIterator; +import io.airbyte.cdk.integrations.source.relationaldb.state.StateEmitFrequency; import io.airbyte.commons.stream.AirbyteStreamUtils; import io.airbyte.commons.util.AutoCloseableIterator; import io.airbyte.commons.util.AutoCloseableIterators; @@ -31,6 +33,7 @@ import java.sql.Connection; import java.sql.PreparedStatement; import java.sql.SQLException; +import java.time.Duration; import java.time.Instant; import java.util.ArrayList; import java.util.List; @@ -99,7 +102,7 @@ public List> getIncrementalIterators( final AutoCloseableIterator queryStream = queryTableXmin(selectedDatabaseFields, table.getNameSpace(), table.getName()); final AutoCloseableIterator recordIterator = getRecordIterator(queryStream, streamName, namespace, emittedAt.toEpochMilli()); - final AutoCloseableIterator recordAndMessageIterator = augmentWithState(recordIterator, pair); + final AutoCloseableIterator recordAndMessageIterator = augmentWithState(recordIterator, airbyteStream, pair); iteratorList.add(augmentWithLogs(recordAndMessageIterator, pair, streamName)); } @@ -233,12 +236,14 @@ private AutoCloseableIterator augmentWithLogs(final AutoCloseabl } private AutoCloseableIterator augmentWithState(final AutoCloseableIterator recordIterator, + final ConfiguredAirbyteStream airbyteStream, final AirbyteStreamNameNamespacePair pair) { + xminStateManager.setCurrentXminStatus(currentXminStatus); return AutoCloseableIterators.transform( - autoCloseableIterator -> new XminStateIterator( + autoCloseableIterator -> new SourceStateIterator( autoCloseableIterator, - pair, - currentXminStatus), + airbyteStream, + xminStateManager, new StateEmitFrequency(0L, Duration.ofSeconds(1L))), recordIterator, AirbyteStreamUtils.convertFromNameAndNamespace(pair.getName(), pair.getNamespace())); } diff --git a/airbyte-integrations/connectors/source-postgres/src/main/java/io/airbyte/integrations/source/postgres/xmin/XminStateIterator.java b/airbyte-integrations/connectors/source-postgres/src/main/java/io/airbyte/integrations/source/postgres/xmin/XminStateIterator.java deleted file mode 100644 index 9b3a31da7067..000000000000 --- a/airbyte-integrations/connectors/source-postgres/src/main/java/io/airbyte/integrations/source/postgres/xmin/XminStateIterator.java +++ /dev/null @@ -1,81 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.integrations.source.postgres.xmin; - -import com.google.common.collect.AbstractIterator; -import io.airbyte.integrations.source.postgres.internal.models.XminStatus; -import io.airbyte.protocol.models.AirbyteStreamNameNamespacePair; -import io.airbyte.protocol.models.v0.AirbyteMessage; -import io.airbyte.protocol.models.v0.AirbyteStateMessage; -import java.util.Iterator; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -public class XminStateIterator extends AbstractIterator implements Iterator { - - private static final Logger LOGGER = LoggerFactory.getLogger(XminStateIterator.class); - - private final Iterator messageIterator; - private final AirbyteStreamNameNamespacePair pair; - private boolean hasEmittedFinalState; - - private boolean hasCaughtException = false; - private final XminStatus xminStatus; - - /** - * @param pair Stream Name and Namespace (e.g. public.users) - */ - public XminStateIterator(final Iterator messageIterator, - final AirbyteStreamNameNamespacePair pair, - final XminStatus xminStatus) { - this.messageIterator = messageIterator; - this.pair = pair; - this.xminStatus = xminStatus; - } - - /** - * Computes the next record retrieved from Source stream. Emits StateMessage containing data of the - * record that has been read so far - * - *

- * If this method throws an exception, it will propagate outward to the {@code hasNext} or - * {@code next} invocation that invoked this method. Any further attempts to use the iterator will - * result in an {@link IllegalStateException}. - *

- * - * @return {@link AirbyteStateMessage} containing information of the records read so far - */ - @Override - protected AirbyteMessage computeNext() { - if (hasCaughtException) { - // Mark iterator as done since the next call to messageIterator will result in an - // IllegalArgumentException and resets exception caught state. - // This occurs when the previous iteration emitted state so this iteration cycle will indicate - // iteration is complete - hasCaughtException = false; - return endOfData(); - } - - if (messageIterator.hasNext()) { - // Use try-catch to catch Exception that could occur when connection to the database fails - try { - return messageIterator.next(); - } catch (final Exception e) { - hasCaughtException = true; - LOGGER.error("Message iterator failed to read next record.", e); - // We want to still continue attempting to sync future streams, so the exception is caught. When - // frequent state emission is introduced, this - // will result in a partial success. - return endOfData(); - } - } else if (!hasEmittedFinalState) { - hasEmittedFinalState = true; - return XminStateManager.createStateMessage(pair, xminStatus); - } else { - return endOfData(); - } - } - -} diff --git a/airbyte-integrations/connectors/source-postgres/src/main/java/io/airbyte/integrations/source/postgres/xmin/XminStateManager.java b/airbyte-integrations/connectors/source-postgres/src/main/java/io/airbyte/integrations/source/postgres/xmin/XminStateManager.java index 8ec2138a5880..ce36ed225636 100644 --- a/airbyte-integrations/connectors/source-postgres/src/main/java/io/airbyte/integrations/source/postgres/xmin/XminStateManager.java +++ b/airbyte-integrations/connectors/source-postgres/src/main/java/io/airbyte/integrations/source/postgres/xmin/XminStateManager.java @@ -4,6 +4,7 @@ package io.airbyte.integrations.source.postgres.xmin; +import io.airbyte.cdk.integrations.source.relationaldb.state.SourceStateMessageProducer; import io.airbyte.commons.exceptions.ConfigErrorException; import io.airbyte.commons.json.Jsons; import io.airbyte.integrations.source.postgres.internal.models.XminStatus; @@ -13,21 +14,25 @@ import io.airbyte.protocol.models.v0.AirbyteStateMessage; import io.airbyte.protocol.models.v0.AirbyteStateMessage.AirbyteStateType; import io.airbyte.protocol.models.v0.AirbyteStreamState; +import io.airbyte.protocol.models.v0.ConfiguredAirbyteStream; import io.airbyte.protocol.models.v0.StreamDescriptor; import java.util.HashMap; import java.util.List; import java.util.Map; +import org.apache.commons.lang3.NotImplementedException; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * Utility class to manage xmin state. */ -public class XminStateManager { +public class XminStateManager implements SourceStateMessageProducer { private static final Logger LOGGER = LoggerFactory.getLogger(XminStateManager.class); public static final long XMIN_STATE_VERSION = 2L; + private XminStatus currentXminStatus; + private final Map pairToXminStatus; private final static AirbyteStateMessage EMPTY_STATE = new AirbyteStateMessage() @@ -95,4 +100,31 @@ public static AirbyteStateMessage getAirbyteStateMessage(final AirbyteStreamName .withStream(airbyteStreamState); } + public void setCurrentXminStatus(final XminStatus currentXminStatus) { + this.currentXminStatus = currentXminStatus; + } + + @Override + public AirbyteStateMessage generateStateMessageAtCheckpoint(final ConfiguredAirbyteStream stream) { + // This is not expected to be called. + throw new NotImplementedException(); + } + + @Override + public AirbyteMessage processRecordMessage(final ConfiguredAirbyteStream stream, AirbyteMessage message) { + return message; + } + + @Override + public AirbyteStateMessage createFinalStateMessage(final ConfiguredAirbyteStream stream) { + final AirbyteStreamNameNamespacePair pair = new AirbyteStreamNameNamespacePair(stream.getStream().getName(), stream.getStream().getNamespace()); + return XminStateManager.createStateMessage(pair, currentXminStatus).getState(); + } + + // We do not send state message for xmin for checkpointing purpose. + @Override + public boolean shouldEmitStateMessage(final ConfiguredAirbyteStream stream) { + return false; + } + } diff --git a/airbyte-integrations/connectors/source-postgres/src/test/java/io/airbyte/integrations/source/postgres/CdcPostgresSourceTest.java b/airbyte-integrations/connectors/source-postgres/src/test/java/io/airbyte/integrations/source/postgres/CdcPostgresSourceTest.java index c51a2ad086d2..10e3ccec6541 100644 --- a/airbyte-integrations/connectors/source-postgres/src/test/java/io/airbyte/integrations/source/postgres/CdcPostgresSourceTest.java +++ b/airbyte-integrations/connectors/source-postgres/src/test/java/io/airbyte/integrations/source/postgres/CdcPostgresSourceTest.java @@ -66,6 +66,7 @@ import java.util.Objects; import java.util.Optional; import java.util.Set; +import java.util.concurrent.atomic.AtomicLong; import java.util.stream.Collectors; import javax.sql.DataSource; import org.junit.jupiter.api.BeforeEach; @@ -81,6 +82,13 @@ protected void setBaseImage() { this.postgresImage = getServerImage(); } + @Override + protected void assertExpectedStateMessageCountMatches(final List stateMessages, long totalCount) { + AtomicLong count = new AtomicLong(0L); + stateMessages.stream().forEach(stateMessage -> count.addAndGet(stateMessage.getSourceStats().getRecordCount().longValue())); + assertEquals(totalCount, count.get()); + } + @Override protected PostgresTestDatabase createTestDatabase() { setBaseImage(); diff --git a/airbyte-integrations/connectors/source-postgres/src/test/java/io/airbyte/integrations/source/postgres/XminPostgresSourceTest.java b/airbyte-integrations/connectors/source-postgres/src/test/java/io/airbyte/integrations/source/postgres/XminPostgresSourceTest.java index 7bf7f586918d..f31de1c63b44 100644 --- a/airbyte-integrations/connectors/source-postgres/src/test/java/io/airbyte/integrations/source/postgres/XminPostgresSourceTest.java +++ b/airbyte-integrations/connectors/source-postgres/src/test/java/io/airbyte/integrations/source/postgres/XminPostgresSourceTest.java @@ -27,6 +27,7 @@ import io.airbyte.protocol.models.v0.AirbyteMessage; import io.airbyte.protocol.models.v0.AirbyteMessage.Type; import io.airbyte.protocol.models.v0.AirbyteStateMessage; +import io.airbyte.protocol.models.v0.AirbyteStateStats; import io.airbyte.protocol.models.v0.AirbyteStream; import io.airbyte.protocol.models.v0.CatalogHelpers; import io.airbyte.protocol.models.v0.ConfiguredAirbyteCatalog; @@ -177,6 +178,10 @@ void testReadSuccess() throws Exception { // Since the third state message would be the final, it should be of xmin type assertEquals("xmin", stateTypeFromThirdStateMessage); + assertEquals(firstStateMessage.getSourceStats().getRecordCount(), 1.0); + assertEquals(secondStateMessage.getSourceStats().getRecordCount(), 1.0); + assertEquals(thirdStateMessage.getSourceStats().getRecordCount(), 1.0); + // The ctid value from second state message should be bigger than first state message assertEquals(1, ctidFromSecondStateMessage.compareTo(ctidFromFirstStateMessage)); @@ -212,6 +217,8 @@ void testReadSuccess() throws Exception { assertEquals(2, stateAfterSyncWithCtidState.size()); assertEquals(secondStateMessage, stateAfterSyncWithCtidState.get(0)); assertEquals(thirdStateMessage, stateAfterSyncWithCtidState.get(1)); + assertEquals(stateAfterSyncWithCtidState.get(0).getSourceStats().getRecordCount(), 1.0); + assertEquals(stateAfterSyncWithCtidState.get(1).getSourceStats().getRecordCount(), 1.0); assertMessageSequence(recordsFromSyncRunningWithACtidState); @@ -225,7 +232,8 @@ void testReadSuccess() throws Exception { // Even though no records were emitted, a state message is still expected final List stateAfterXminSync = extractStateMessage(syncWithXminStateType); assertEquals(1, stateAfterXminSync.size()); - // Since no records were returned so the state should be the same as before + // Since no records were returned so the state should be the same as before without the count. + thirdStateMessage.setSourceStats(new AirbyteStateStats().withRecordCount(0.0)); assertEquals(thirdStateMessage, stateAfterXminSync.get(0)); // We add some data and perform a third read. We should verify that (i) a delete is not captured and diff --git a/airbyte-integrations/connectors/source-postgres/src/test/java/io/airbyte/integrations/source/postgres/XminPostgresWithOldServerSourceTest.java b/airbyte-integrations/connectors/source-postgres/src/test/java/io/airbyte/integrations/source/postgres/XminPostgresWithOldServerSourceTest.java index 2027365218fc..a41f308cd8c4 100644 --- a/airbyte-integrations/connectors/source-postgres/src/test/java/io/airbyte/integrations/source/postgres/XminPostgresWithOldServerSourceTest.java +++ b/airbyte-integrations/connectors/source-postgres/src/test/java/io/airbyte/integrations/source/postgres/XminPostgresWithOldServerSourceTest.java @@ -17,6 +17,7 @@ import io.airbyte.integrations.source.postgres.PostgresTestDatabase.BaseImage; import io.airbyte.protocol.models.v0.AirbyteMessage; import io.airbyte.protocol.models.v0.AirbyteStateMessage; +import io.airbyte.protocol.models.v0.AirbyteStateStats; import io.airbyte.protocol.models.v0.ConfiguredAirbyteCatalog; import java.util.Collections; import java.util.List; @@ -71,7 +72,9 @@ void testReadSuccess() throws Exception { // Even though no records were emitted, a state message is still expected final List stateAfterXminSync = extractStateMessage(syncWithXminStateType); assertEquals(1, stateAfterXminSync.size()); - // Since no records were returned so the state should be the same as before + // Since no records were returned so the state should be the same as before; just without the + // counts. + firstSyncStateMessage.setSourceStats(new AirbyteStateStats().withRecordCount(0.0)); assertEquals(firstSyncStateMessage, stateAfterXminSync.get(0)); // We add some data and perform a third read. We should verify that (i) a delete is not captured and diff --git a/airbyte-integrations/connectors/source-postgres/src/test/java/io/airbyte/integrations/source/postgres/xmin/XminStateIteratorTest.java b/airbyte-integrations/connectors/source-postgres/src/test/java/io/airbyte/integrations/source/postgres/xmin/XminSourceStateIteratorTest.java similarity index 58% rename from airbyte-integrations/connectors/source-postgres/src/test/java/io/airbyte/integrations/source/postgres/xmin/XminStateIteratorTest.java rename to airbyte-integrations/connectors/source-postgres/src/test/java/io/airbyte/integrations/source/postgres/xmin/XminSourceStateIteratorTest.java index 85eab7b8e500..87df3553e7da 100644 --- a/airbyte-integrations/connectors/source-postgres/src/test/java/io/airbyte/integrations/source/postgres/xmin/XminStateIteratorTest.java +++ b/airbyte-integrations/connectors/source-postgres/src/test/java/io/airbyte/integrations/source/postgres/xmin/XminSourceStateIteratorTest.java @@ -4,22 +4,30 @@ package io.airbyte.integrations.source.postgres.xmin; -import static io.airbyte.integrations.source.postgres.xmin.XminTestConstants.PAIR1; +import static io.airbyte.integrations.source.postgres.xmin.XminTestConstants.NAMESPACE; import static io.airbyte.integrations.source.postgres.xmin.XminTestConstants.RECORD_MESSAGE_1; import static io.airbyte.integrations.source.postgres.xmin.XminTestConstants.RECORD_MESSAGE_2; import static io.airbyte.integrations.source.postgres.xmin.XminTestConstants.RECORD_MESSAGE_3; +import static io.airbyte.integrations.source.postgres.xmin.XminTestConstants.STREAM_NAME1; import static io.airbyte.integrations.source.postgres.xmin.XminTestConstants.XMIN_STATE_MESSAGE_1; import static io.airbyte.integrations.source.postgres.xmin.XminTestConstants.XMIN_STATUS1; +import static org.junit.Assert.assertThrows; import static org.junit.jupiter.api.Assertions.assertEquals; import static org.junit.jupiter.api.Assertions.assertFalse; +import io.airbyte.cdk.integrations.source.relationaldb.state.SourceStateIterator; +import io.airbyte.cdk.integrations.source.relationaldb.state.StateEmitFrequency; import io.airbyte.commons.util.MoreIterators; import io.airbyte.protocol.models.v0.AirbyteMessage; +import io.airbyte.protocol.models.v0.AirbyteStateStats; +import io.airbyte.protocol.models.v0.AirbyteStream; +import io.airbyte.protocol.models.v0.ConfiguredAirbyteStream; import java.sql.SQLException; +import java.time.Duration; import java.util.Iterator; import org.junit.jupiter.api.Test; -public class XminStateIteratorTest { +public class XminSourceStateIteratorTest { private static Iterator messageIterator; @@ -52,34 +60,43 @@ public AirbyteMessage next() { @Test void testSuccessfulSync() { messageIterator = MoreIterators.of(RECORD_MESSAGE_1, RECORD_MESSAGE_2); - final XminStateIterator iterator = new XminStateIterator( + final XminStateManager manager = new XminStateManager(null); + manager.setCurrentXminStatus(XMIN_STATUS1); + final ConfiguredAirbyteStream stream = + new ConfiguredAirbyteStream().withStream(new AirbyteStream().withNamespace(NAMESPACE).withName(STREAM_NAME1)); + final SourceStateIterator iterator = new SourceStateIterator( messageIterator, - PAIR1, - XMIN_STATUS1); + stream, + manager, + new StateEmitFrequency(0L, Duration.ofSeconds(1L))); + + var expectedStateMessage = + XMIN_STATE_MESSAGE_1.withState(XMIN_STATE_MESSAGE_1.getState().withSourceStats(new AirbyteStateStats().withRecordCount(2.0))); assertEquals(RECORD_MESSAGE_1, iterator.next()); assertEquals(RECORD_MESSAGE_2, iterator.next()); - assertEquals(XMIN_STATE_MESSAGE_1, iterator.next()); + assertEquals(expectedStateMessage, iterator.next()); assertFalse(iterator.hasNext()); } @Test void testSyncFail() { messageIterator = MoreIterators.of(RECORD_MESSAGE_1, RECORD_MESSAGE_2); - final XminStateIterator iterator = new XminStateIterator( + final XminStateManager manager = new XminStateManager(null); + manager.setCurrentXminStatus(XMIN_STATUS1); + final ConfiguredAirbyteStream stream = + new ConfiguredAirbyteStream().withStream(new AirbyteStream().withNamespace(NAMESPACE).withName(STREAM_NAME1)); + final SourceStateIterator iterator = new SourceStateIterator( createExceptionIterator(), - PAIR1, - XMIN_STATUS1); + stream, + manager, + new StateEmitFrequency(0L, Duration.ofSeconds(1L))); assertEquals(RECORD_MESSAGE_1, iterator.next()); assertEquals(RECORD_MESSAGE_2, iterator.next()); assertEquals(RECORD_MESSAGE_3, iterator.next()); - // No state message is emitted at this point. - // Since there is no intermediate stateEmission, this will catch the error but not emit a state - // message - // but will prevent an exception from causing the iterator to fail by marking iterator as - // endOfData() - assertFalse(iterator.hasNext()); + // We want to throw an exception here. + assertThrows(RuntimeException.class, () -> iterator.hasNext()); } } diff --git a/docs/integrations/sources/postgres.md b/docs/integrations/sources/postgres.md index 5fd6de921fcc..6ac689d6cc57 100644 --- a/docs/integrations/sources/postgres.md +++ b/docs/integrations/sources/postgres.md @@ -292,8 +292,10 @@ According to Postgres [documentation](https://www.postgresql.org/docs/14/datatyp | Version | Date | Pull Request | Subject | |---------|------------|----------------------------------------------------------|----------------------------------------------------------------------------------------------------------------------------------------------------------------------------| +| 3.3.15 | 2024-02-29 | [34724](https://github.com/airbytehq/airbyte/pull/34724) | Add record count in state message. | | 3.3.14 | 2024-03-06 | [35842](https://github.com/airbytehq/airbyte/pull/35842) | Add logging to understand cases with a large number of records with the same LSN. | | 3.3.12 | 2024-02-22 | [35569](https://github.com/airbytehq/airbyte/pull/35569) | Fix logging bug. | +| 3.3.13 | 2024-02-27 | [35675](https://github.com/airbytehq/airbyte/pull/35675) | Fix invalid cdc error message. | | 3.3.11 | 2024-02-20 | [35304](https://github.com/airbytehq/airbyte/pull/35304) | Add config to throw an error on invalid CDC position and enable it by default. | | 3.3.10 | 2024-02-13 | [35036](https://github.com/airbytehq/airbyte/pull/34751) | Emit analytics message for invalid CDC cursor. | | 3.3.9 | 2024-02-13 | [35224](https://github.com/airbytehq/airbyte/pull/35224) | Adopt CDK 0.20.4 | From 6b8cc39de16f9996011fc8a97c6a642416f862a4 Mon Sep 17 00:00:00 2001 From: Christo Grabowski <108154848+ChristoGrab@users.noreply.github.com> Date: Thu, 7 Mar 2024 16:09:27 -0500 Subject: [PATCH 130/172] Docs: Fix link in Core Concepts page (#35900) --- docs/using-airbyte/core-concepts/readme.md | 15 ++++++++------- 1 file changed, 8 insertions(+), 7 deletions(-) diff --git a/docs/using-airbyte/core-concepts/readme.md b/docs/using-airbyte/core-concepts/readme.md index 09398b25a618..85c47d4431d3 100644 --- a/docs/using-airbyte/core-concepts/readme.md +++ b/docs/using-airbyte/core-concepts/readme.md @@ -26,11 +26,11 @@ A connection is an automated data pipeline that replicates data from a source to | Concept | Description | |-----------------------------------------------------------------------------------------------------------------|--------------------------------------------------------------------| -| [Replication Frequency](/using-airbyte/core-concepts/sync-schedules.md) | When should a data sync be triggered? | -| [Destination Namespace and Stream Prefix](/using-airbyte/core-concepts/namespaces.md) | Where should the replicated data be written? | -| [Sync Mode](/using-airbyte/core-concepts/sync-modes/README.md) | How should the streams be replicated (read and written)? | -| [Schema Propagation](/cloud/managing-airbyte-cloud/manage-schema-changes.md) | How should Airbyte handle schema drift in sources? | -| [Catalog Selection](/cloud/managing-airbyte-cloud/configuring-connections.md#modify-streams-in-your-connection) | What data should be replicated from the source to the destination? | +| [Replication Frequency](/using-airbyte/core-concepts/sync-schedules.md) | When should a data sync be triggered? | +| [Destination Namespace and Stream Prefix](/using-airbyte/core-concepts/namespaces.md) | Where should the replicated data be written? | +| [Sync Mode](/using-airbyte/core-concepts/sync-modes/README.md) | How should the streams be replicated (read and written)? | +| [Schema Propagation](/cloud/managing-airbyte-cloud/manage-schema-changes.md) | How should Airbyte handle schema drift in sources? | +| [Catalog Selection](/cloud/managing-airbyte-cloud/configuring-connections.md#modify-streams-in-your-connection) | What data should be replicated from the source to the destination? | ## Stream @@ -53,7 +53,8 @@ Examples of fields: ## Sync Schedules -There are three options for scheduling a sync to run: +There are three options for scheduling a sync to run: + - Scheduled (ie. every 24 hours, every 2 hours) - [CRON schedule](https://www.quartz-scheduler.org/documentation/quartz-2.3.0/tutorials/crontrigger.html) - Manual \(i.e: clicking the "Sync Now" button in the UI or through the API\) @@ -70,7 +71,7 @@ For more details, see our [Namespace documentation](namespaces.md). A sync mode governs how Airbyte reads from a source and writes to a destination. Airbyte provides different sync modes depending on what you want to accomplish. -Read more about each [sync mode](using-airbyte/core-concepts/sync-modes) and how they differ. +Read more about each [sync mode](/using-airbyte/core-concepts/sync-modes/README.md) and how they differ. ## Typing and Deduping From 55e6fbd191c7cdf7232a9e818dfa10cd835c0b92 Mon Sep 17 00:00:00 2001 From: Gireesh Sreepathi Date: Thu, 7 Mar 2024 14:25:38 -0800 Subject: [PATCH 131/172] Destination Snowflake: Handle NPE for state value. (#35899) --- airbyte-cdk/java/airbyte-cdk/README.md | 1 + .../airbyte-cdk/core/src/main/resources/version.properties | 2 +- .../jdbc/typing_deduping/JdbcDestinationHandler.java | 6 +++++- .../destination-postgres-strict-encrypt/build.gradle | 2 +- .../destination-postgres-strict-encrypt/metadata.yaml | 2 +- .../connectors/destination-postgres/build.gradle | 2 +- .../connectors/destination-postgres/metadata.yaml | 2 +- .../destination/postgres/PostgresDestinationTest.java | 4 ++++ .../connectors/destination-redshift/build.gradle | 2 +- .../connectors/destination-redshift/gradle.properties | 2 +- .../connectors/destination-redshift/metadata.yaml | 2 +- .../connectors/destination-snowflake/build.gradle | 2 +- .../connectors/destination-snowflake/gradle.properties | 1 + .../connectors/destination-snowflake/metadata.yaml | 2 +- docs/integrations/destinations/postgres.md | 1 + docs/integrations/destinations/redshift.md | 1 + docs/integrations/destinations/snowflake.md | 3 ++- 17 files changed, 25 insertions(+), 12 deletions(-) diff --git a/airbyte-cdk/java/airbyte-cdk/README.md b/airbyte-cdk/java/airbyte-cdk/README.md index 42432136091f..e158953abc5b 100644 --- a/airbyte-cdk/java/airbyte-cdk/README.md +++ b/airbyte-cdk/java/airbyte-cdk/README.md @@ -166,6 +166,7 @@ MavenLocal debugging steps: | Version | Date | Pull Request | Subject | |:--------|:-----------|:-----------------------------------------------------------|:---------------------------------------------------------------------------------------------------------------------------------------------------------------| +| 0.23.18 | 2024-03-07 | [\#35899](https://github.com/airbytehq/airbyte/pull/35899) | Null check when retrieving destination state | | 0.23.16 | 2024-03-06 | [\#35842](https://github.com/airbytehq/airbyte/pull/35842) | Improve logging in debezium processing. | | 0.23.15 | 2024-03-05 | [\#35827](https://github.com/airbytehq/airbyte/pull/35827) | improving the Junit interceptor. | | 0.23.14 | 2024-03-05 | [\#35739](https://github.com/airbytehq/airbyte/pull/35739) | Add logging to the CDC queue size. Fix the ContainerFactory. | diff --git a/airbyte-cdk/java/airbyte-cdk/core/src/main/resources/version.properties b/airbyte-cdk/java/airbyte-cdk/core/src/main/resources/version.properties index 0d77af48d666..c6b68bc8f36f 100644 --- a/airbyte-cdk/java/airbyte-cdk/core/src/main/resources/version.properties +++ b/airbyte-cdk/java/airbyte-cdk/core/src/main/resources/version.properties @@ -1 +1 @@ -version=0.23.17 +version=0.23.18 diff --git a/airbyte-cdk/java/airbyte-cdk/db-destinations/src/main/java/io/airbyte/cdk/integrations/destination/jdbc/typing_deduping/JdbcDestinationHandler.java b/airbyte-cdk/java/airbyte-cdk/db-destinations/src/main/java/io/airbyte/cdk/integrations/destination/jdbc/typing_deduping/JdbcDestinationHandler.java index f68595dd6ecf..b211d0add964 100644 --- a/airbyte-cdk/java/airbyte-cdk/db-destinations/src/main/java/io/airbyte/cdk/integrations/destination/jdbc/typing_deduping/JdbcDestinationHandler.java +++ b/airbyte-cdk/java/airbyte-cdk/db-destinations/src/main/java/io/airbyte/cdk/integrations/destination/jdbc/typing_deduping/JdbcDestinationHandler.java @@ -225,7 +225,11 @@ record -> { nameNode != null ? nameNode.asText() : null, namespaceNode != null ? namespaceNode.asText() : null); }, - record -> toDestinationState(Jsons.deserialize(record.get(DESTINATION_STATE_TABLE_COLUMN_STATE).asText())))); + record -> { + final JsonNode stateNode = record.get(DESTINATION_STATE_TABLE_COLUMN_STATE); + JsonNode state = stateNode != null ? Jsons.deserialize(stateNode.asText()) : Jsons.emptyObject(); + return toDestinationState(state); + })); } private CompletionStage> retrieveState(final CompletableFuture> destinationStatesFuture, diff --git a/airbyte-integrations/connectors/destination-postgres-strict-encrypt/build.gradle b/airbyte-integrations/connectors/destination-postgres-strict-encrypt/build.gradle index 9277c61ead5f..211f167e7fbf 100644 --- a/airbyte-integrations/connectors/destination-postgres-strict-encrypt/build.gradle +++ b/airbyte-integrations/connectors/destination-postgres-strict-encrypt/build.gradle @@ -4,7 +4,7 @@ plugins { } airbyteJavaConnector { - cdkVersionRequired = '0.23.11' + cdkVersionRequired = '0.23.18' features = ['db-destinations', 'typing-deduping', 'datastore-postgres'] useLocalCdk = false } diff --git a/airbyte-integrations/connectors/destination-postgres-strict-encrypt/metadata.yaml b/airbyte-integrations/connectors/destination-postgres-strict-encrypt/metadata.yaml index f748f77282b3..e788d32190ed 100644 --- a/airbyte-integrations/connectors/destination-postgres-strict-encrypt/metadata.yaml +++ b/airbyte-integrations/connectors/destination-postgres-strict-encrypt/metadata.yaml @@ -2,7 +2,7 @@ data: connectorSubtype: database connectorType: destination definitionId: 25c5221d-dce2-4163-ade9-739ef790f503 - dockerImageTag: 2.0.3 + dockerImageTag: 2.0.4 dockerRepository: airbyte/destination-postgres-strict-encrypt documentationUrl: https://docs.airbyte.com/integrations/destinations/postgres githubIssueLabel: destination-postgres diff --git a/airbyte-integrations/connectors/destination-postgres/build.gradle b/airbyte-integrations/connectors/destination-postgres/build.gradle index 40d3cd9579f4..3ccc03586ae5 100644 --- a/airbyte-integrations/connectors/destination-postgres/build.gradle +++ b/airbyte-integrations/connectors/destination-postgres/build.gradle @@ -4,7 +4,7 @@ plugins { } airbyteJavaConnector { - cdkVersionRequired = '0.23.11' + cdkVersionRequired = '0.23.18' features = ['db-destinations', 'datastore-postgres', 'typing-deduping'] useLocalCdk = false } diff --git a/airbyte-integrations/connectors/destination-postgres/metadata.yaml b/airbyte-integrations/connectors/destination-postgres/metadata.yaml index 94a27c50b032..6d4b8efd9326 100644 --- a/airbyte-integrations/connectors/destination-postgres/metadata.yaml +++ b/airbyte-integrations/connectors/destination-postgres/metadata.yaml @@ -5,7 +5,7 @@ data: connectorSubtype: database connectorType: destination definitionId: 25c5221d-dce2-4163-ade9-739ef790f503 - dockerImageTag: 2.0.3 + dockerImageTag: 2.0.4 dockerRepository: airbyte/destination-postgres documentationUrl: https://docs.airbyte.com/integrations/destinations/postgres githubIssueLabel: destination-postgres diff --git a/airbyte-integrations/connectors/destination-postgres/src/test/java/io/airbyte/integrations/destination/postgres/PostgresDestinationTest.java b/airbyte-integrations/connectors/destination-postgres/src/test/java/io/airbyte/integrations/destination/postgres/PostgresDestinationTest.java index 84eb63509696..3861adec49aa 100644 --- a/airbyte-integrations/connectors/destination-postgres/src/test/java/io/airbyte/integrations/destination/postgres/PostgresDestinationTest.java +++ b/airbyte-integrations/connectors/destination-postgres/src/test/java/io/airbyte/integrations/destination/postgres/PostgresDestinationTest.java @@ -36,6 +36,7 @@ import java.util.HashMap; import java.util.List; import java.util.Map; +import java.util.concurrent.TimeUnit; import java.util.stream.Collectors; import java.util.stream.IntStream; import javax.sql.DataSource; @@ -44,9 +45,12 @@ import org.junit.jupiter.api.BeforeAll; import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.Timeout; import org.testcontainers.containers.PostgreSQLContainer; import org.testcontainers.utility.MountableFile; +@Timeout(value = 10, + unit = TimeUnit.MINUTES) public class PostgresDestinationTest { private static PostgreSQLContainer PSQL_DB; diff --git a/airbyte-integrations/connectors/destination-redshift/build.gradle b/airbyte-integrations/connectors/destination-redshift/build.gradle index c55b76b11eb9..bf3009392619 100644 --- a/airbyte-integrations/connectors/destination-redshift/build.gradle +++ b/airbyte-integrations/connectors/destination-redshift/build.gradle @@ -5,7 +5,7 @@ plugins { } airbyteJavaConnector { - cdkVersionRequired = '0.23.11' + cdkVersionRequired = '0.23.18' features = ['db-destinations', 's3-destinations', 'typing-deduping'] useLocalCdk = false } diff --git a/airbyte-integrations/connectors/destination-redshift/gradle.properties b/airbyte-integrations/connectors/destination-redshift/gradle.properties index 8b137891791f..4041680b9859 100644 --- a/airbyte-integrations/connectors/destination-redshift/gradle.properties +++ b/airbyte-integrations/connectors/destination-redshift/gradle.properties @@ -1 +1 @@ - +JunitMethodExecutionTimeout=15 m diff --git a/airbyte-integrations/connectors/destination-redshift/metadata.yaml b/airbyte-integrations/connectors/destination-redshift/metadata.yaml index 909685693541..de2f7e7d91ea 100644 --- a/airbyte-integrations/connectors/destination-redshift/metadata.yaml +++ b/airbyte-integrations/connectors/destination-redshift/metadata.yaml @@ -5,7 +5,7 @@ data: connectorSubtype: database connectorType: destination definitionId: f7a7d195-377f-cf5b-70a5-be6b819019dc - dockerImageTag: 2.1.9 + dockerImageTag: 2.1.10 dockerRepository: airbyte/destination-redshift documentationUrl: https://docs.airbyte.com/integrations/destinations/redshift githubIssueLabel: destination-redshift diff --git a/airbyte-integrations/connectors/destination-snowflake/build.gradle b/airbyte-integrations/connectors/destination-snowflake/build.gradle index 4cc747506746..77922be6f777 100644 --- a/airbyte-integrations/connectors/destination-snowflake/build.gradle +++ b/airbyte-integrations/connectors/destination-snowflake/build.gradle @@ -4,7 +4,7 @@ plugins { } airbyteJavaConnector { - cdkVersionRequired = '0.23.11' + cdkVersionRequired = '0.23.18' features = ['db-destinations', 's3-destinations', 'typing-deduping'] useLocalCdk = false } diff --git a/airbyte-integrations/connectors/destination-snowflake/gradle.properties b/airbyte-integrations/connectors/destination-snowflake/gradle.properties index 3ce49dd31e29..8d4afe7f29ca 100644 --- a/airbyte-integrations/connectors/destination-snowflake/gradle.properties +++ b/airbyte-integrations/connectors/destination-snowflake/gradle.properties @@ -1,3 +1,4 @@ # currently limit the number of parallel threads until further investigation into the issues \ # where Snowflake will fail to login using config credentials testExecutionConcurrency=4 +JunitMethodExecutionTimeout=15 m diff --git a/airbyte-integrations/connectors/destination-snowflake/metadata.yaml b/airbyte-integrations/connectors/destination-snowflake/metadata.yaml index fc0c46bd82cc..a72861cd1794 100644 --- a/airbyte-integrations/connectors/destination-snowflake/metadata.yaml +++ b/airbyte-integrations/connectors/destination-snowflake/metadata.yaml @@ -5,7 +5,7 @@ data: connectorSubtype: database connectorType: destination definitionId: 424892c4-daac-4491-b35d-c6688ba547ba - dockerImageTag: 3.6.0 + dockerImageTag: 3.6.1 dockerRepository: airbyte/destination-snowflake documentationUrl: https://docs.airbyte.com/integrations/destinations/snowflake githubIssueLabel: destination-snowflake diff --git a/docs/integrations/destinations/postgres.md b/docs/integrations/destinations/postgres.md index bc49991d756f..dd680839b959 100644 --- a/docs/integrations/destinations/postgres.md +++ b/docs/integrations/destinations/postgres.md @@ -193,6 +193,7 @@ Now that you have set up the Postgres destination connector, check out the follo | Version | Date | Pull Request | Subject | |:--------|:-----------|:-----------------------------------------------------------|:----------------------------------------------------------------------------------------------------| +| 2.0.4 | 2024-03-07 | [\#35899](https://github.com/airbytehq/airbyte/pull/35899) | Adopt CDK 0.23.18; Null safety check in state parsing | | 2.0.3 | 2024-03-01 | [\#35528](https://github.com/airbytehq/airbyte/pull/35528) | Adopt CDK 0.23.11; Use Migration framework | | 2.0.2 | 2024-03-01 | [\#35760](https://github.com/airbytehq/airbyte/pull/35760) | Mark as certified, add PSQL exception to deinterpolator | | 2.0.1 | 2024-02-22 | [\#35385](https://github.com/airbytehq/airbyte/pull/35385) | Upgrade CDK to 0.23.0; Gathering required initial state upfront | diff --git a/docs/integrations/destinations/redshift.md b/docs/integrations/destinations/redshift.md index 0df49130d9bf..3757cdb462e3 100644 --- a/docs/integrations/destinations/redshift.md +++ b/docs/integrations/destinations/redshift.md @@ -237,6 +237,7 @@ Each stream will be output into its own raw table in Redshift. Each table will c | Version | Date | Pull Request | Subject | |:--------|:-----------|:-----------------------------------------------------------|:-----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------| +| 2.1.10 | 2024-03-07 | [\#35899](https://github.com/airbytehq/airbyte/pull/35899) | Adopt CDK 0.23.18; Null safety check in state parsing | | 2.1.9 | 2024-03-04 | [\#35316](https://github.com/airbytehq/airbyte/pull/35316) | Update to CDK 0.23.11; Adopt migration framework | | 2.1.8 | 2024-02-09 | [\#35354](https://github.com/airbytehq/airbyte/pull/35354) | Update to CDK 0.23.0; Gather required initial state upfront, remove dependency on svv_table_info for table empty check | | 2.1.7 | 2024-02-09 | [\#34562](https://github.com/airbytehq/airbyte/pull/34562) | Switch back to jooq-based sql execution for standard insert | diff --git a/docs/integrations/destinations/snowflake.md b/docs/integrations/destinations/snowflake.md index 966224d41e13..b3f15eab7e60 100644 --- a/docs/integrations/destinations/snowflake.md +++ b/docs/integrations/destinations/snowflake.md @@ -246,7 +246,8 @@ Otherwise, make sure to grant the role the required permissions in the desired n | Version | Date | Pull Request | Subject | |:----------------|:-----------|:-----------------------------------------------------------|:----------------------------------------------------------------------------------------------------------------------------------------------------------------| -| 3.6.0 | 2024-02-22 | [35308](https://github.com/airbytehq/airbyte/pull/35308) | Upgrade CDK; use utc tz for extracted_at; Migrate existing extracted_at to utc; | +| 3.6.1 | 2024-03-07 | [\#35899](https://github.com/airbytehq/airbyte/pull/35899) | Adopt CDK 0.23.18; Null safety check in state parsing | +| 3.6.0 | 2024-03-06 | [35308](https://github.com/airbytehq/airbyte/pull/35308) | Upgrade CDK; use utc tz for extracted_at; Migrate existing extracted_at to utc; | | 3.5.14 | 2024-02-22 | [35456](https://github.com/airbytehq/airbyte/pull/35456) | Adopt CDK 0.23.0; Gather initial state upfront, reduce information_schema calls | | 3.5.13 | 2024-02-22 | [35569](https://github.com/airbytehq/airbyte/pull/35569) | Fix logging bug. | | 3.5.12 | 2024-02-15 | [35240](https://github.com/airbytehq/airbyte/pull/35240) | Adopt CDK 0.20.9 | From 0a9e0fe84a5042ac124121d71793950ae1a5fe95 Mon Sep 17 00:00:00 2001 From: Alexandre Cuoci Date: Thu, 7 Mar 2024 17:51:42 -0500 Subject: [PATCH 132/172] updated instructions for external logging (#35903) --- docs/enterprise-setup/implementation-guide.md | 64 ++++++++----------- 1 file changed, 25 insertions(+), 39 deletions(-) diff --git a/docs/enterprise-setup/implementation-guide.md b/docs/enterprise-setup/implementation-guide.md index 1634853a8582..0806e590c33f 100644 --- a/docs/enterprise-setup/implementation-guide.md +++ b/docs/enterprise-setup/implementation-guide.md @@ -86,7 +86,7 @@ Follow these instructions to add the Airbyte helm repository: Template airbyte.yml file ``` -webapp-url: # example: localhost:8080 +webapp-url: # example: http://localhost:8080 initial-user: email: @@ -221,7 +221,6 @@ For Self-Managed Enterprise deployments, we recommend spinning up standalone log To do this, add external log storage details to your `airbyte.yml` file. This disables the default internal Minio instance (`airbyte/minio`), and configures the external log database: - @@ -230,32 +229,21 @@ minio: enabled: false global: - log4jConfig: "log4j2-no-minio.xml" - logs: - storage: - type: "S3" - - minio: - enabled: false - - s3: - enabled: true - bucket: "" ## S3 bucket name that you've created. - bucketRegion: "" ## e.g. us-east-1 - - accessKey: ## AWS Access Key. - password: "" - existingSecret: "" ## The name of an existing Kubernetes secret containing the AWS Access Key. - existingSecretKey: "" ## The Kubernetes secret key containing the AWS Access Key. - - secretKey: ## AWS Secret Access Key - password: - existingSecret: "" ## The name of an existing Kubernetes secret containing the AWS Secret Access Key. - existingSecretKey: "" ## The name of an existing Kubernetes secret containing the AWS Secret Access Key. + storage: + type: "S3" + bucket: ## S3 bucket names that you've created. We recommend storing the following all in one bucket. + log: airbyte-bucket + state: airbyte-bucket + workloadOutput: airbyte-bucket + + s3: + region: "" ## e.g. us-east-1 + accessKeyExistingSecret: ## The name of an existing Kubernetes secret containing the AWS Access Key. + accessKeyExistingSecretKey: ## The Kubernetes secret key containing the AWS Access Key. + secretKeyExistingSecret: ## The name of an existing Kubernetes secret containing the AWS Secret Access Key. + secretKeyExistingSecretKey: ## The name of an existing Kubernetes secret containing the AWS Secret Access Key. ``` -For each of `accessKey` and `secretKey`, the `password` and `existingSecret` fields are mutually exclusive. - Then, ensure your access key is tied to an IAM user with the [following policies](https://docs.aws.amazon.com/AmazonS3/latest/userguide/example-policies-s3.html#iam-policy-ex0), allowing the user access to S3 storage: ```yaml @@ -288,7 +276,7 @@ Then, ensure your access key is tied to an IAM user with the [following policies ``` - + ```yaml @@ -296,18 +284,16 @@ minio: enabled: false global: - log4jConfig: "log4j2-no-minio.xml" - logs: - storage: - type: "GCS" - - minio: - enabled: false - - gcs: - bucket: airbyte-dev-logs # GCS bucket name that you've created. - credentials: "" - credentialsJson: "" ## Base64 encoded json GCP credentials file contents + storage: + type: "GCS" + bucket: ## GCS bucket names that you've created. We recommend storing the following all in one bucket. + log: airbyte-bucket + state: airbyte-bucket + workloadOutput: airbyte-bucket + + gcs: + credentials: "" + credentialsJson: "" ## Base64 encoded json GCP credentials file contents. ``` Note that the `credentials` and `credentialsJson` fields are mutually exclusive. From eabf39c2e70345c7a438e6809b7ba891ee81b825 Mon Sep 17 00:00:00 2001 From: Rodi Reich Zilberman <867491+rodireich@users.noreply.github.com> Date: Thu, 7 Mar 2024 14:52:38 -0800 Subject: [PATCH 133/172] GA release 4.0.0 (#35873) --- .../connectors/source-mssql/metadata.yaml | 11 +++-- docs/integrations/sources/mssql-migrations.md | 4 ++ docs/integrations/sources/mssql.md | 42 +++++++------------ 3 files changed, 23 insertions(+), 34 deletions(-) diff --git a/airbyte-integrations/connectors/source-mssql/metadata.yaml b/airbyte-integrations/connectors/source-mssql/metadata.yaml index cc4941df3037..a9a11767436a 100644 --- a/airbyte-integrations/connectors/source-mssql/metadata.yaml +++ b/airbyte-integrations/connectors/source-mssql/metadata.yaml @@ -9,7 +9,7 @@ data: connectorSubtype: database connectorType: source definitionId: b5ea17b1-f170-46dc-bc31-cc744ca984c1 - dockerImageTag: 3.7.7 + dockerImageTag: 4.0.0 dockerRepository: airbyte/source-mssql documentationUrl: https://docs.airbyte.com/integrations/sources/mssql githubIssueLabel: source-mssql @@ -18,19 +18,18 @@ data: name: Microsoft SQL Server (MSSQL) registries: cloud: - # CI pipeline is broken for mssql - dockerImageTag: 3.6.1 enabled: true oss: - # CI pipeline is broken for mssql - dockerImageTag: 3.6.1 enabled: true - releaseStage: alpha + releaseStage: generally_available supportLevel: community tags: - language:java releases: breakingChanges: + 4.0.0: + message: "We have overhauled our MSSQL source connector and it is now supported by the Airbyte team! To benefit from new features, including terabyte-sized table support, reliability improvements, expanded datetime data types, and various bug fixes, please opt in to the 4.0.0 version." + upgradeDeadline: "2024-04-07" 3.0.0: message: "Remapped columns of types: date, datetime, datetime2, datetimeoffset, smalldatetime, and time from `String` to their appropriate Airbyte types. Customers whose streams have columns with the affected data types must take action with their connections." upgradeDeadline: "2023-12-07" diff --git a/docs/integrations/sources/mssql-migrations.md b/docs/integrations/sources/mssql-migrations.md index d637f94fe073..dc0c892f5d5b 100644 --- a/docs/integrations/sources/mssql-migrations.md +++ b/docs/integrations/sources/mssql-migrations.md @@ -1,5 +1,9 @@ # Microsoft SQL Server (MSSQL) Migration Guide +## Upgrading to 4.0.0 +Source MSSQL provides incremental sync that can read unlimited sized tables and can resume if the initial read has failed. +Upgrading from previous versions will be seamless and does not require any intervention. + ## Upgrading to 3.0.0 This change remapped date, datetime, datetime2, datetimeoffset, smalldatetime, and time data type to their correct Airbyte types. Customers whose streams have columns with the affected datatype must refresh their schema and reset their data. See chart below for the mapping change. diff --git a/docs/integrations/sources/mssql.md b/docs/integrations/sources/mssql.md index db10beb279bd..c28ecd6d1372 100644 --- a/docs/integrations/sources/mssql.md +++ b/docs/integrations/sources/mssql.md @@ -1,4 +1,9 @@ # Microsoft SQL Server (MSSQL) +Airbyte's certified MSSQL connector offers the following features: +* Multiple methods of keeping your data fresh, including [Change Data Capture (CDC)](https://docs.airbyte.com/understanding-airbyte/cdc) using the [binlog](https://dev.mysql.com/doc/refman/8.0/en/binary-log.html). +* Incremental as well as Full Refresh [sync modes](https://docs.airbyte.com/cloud/core-concepts#connection-sync-modes), providing flexibility in how data is delivered to your destination. +* Reliable replication at any table size with [checkpointing](https://docs.airbyte.com/understanding-airbyte/airbyte-protocol/#state--checkpointing) and chunking of database reads. + ## Features @@ -14,18 +19,7 @@ The MSSQL source does not alter the schema present in your database. Depending on the destination connected to this source, however, the schema may be altered. See the destination's documentation for more details. -## Troubleshooting - -You may run into an issue where the connector provides wrong values for some data types. See [discussion](https://github.com/airbytehq/airbyte/issues/4270) on unexpected behaviour for certain datatypes. - -Note: Currently hierarchyid and sql_variant are not processed in CDC migration type (not supported by debezium). For more details please check -[this ticket](https://github.com/airbytehq/airbyte/issues/14411) - -## Getting Started \(Airbyte Cloud\) - -On Airbyte Cloud, only TLS connections to your MSSQL instance are supported in source configuration. Other than that, you can proceed with the open-source instructions below. - -## Getting Started \(Airbyte Open Source\) +## Getting Started #### Requirements @@ -41,13 +35,14 @@ This is dependent on your networking setup. The easiest way to verify if Airbyte This step is optional but highly recommended to allow for better permission control and auditing. Alternatively, you can use Airbyte with an existing user in your database. -_Coming soon: suggestions on how to create this user._ - #### 3. Your database user should now be ready for use with Airbyte! +#### Airbyte Cloud +On Airbyte Cloud, only secured connections to your MSSQL instance are supported in source configuration. You may either configure your connection using one of the supported SSL Methods or by using an SSH Tunnel. + ## Change Data Capture \(CDC\) -We use [SQL Server's change data capture feature](https://docs.microsoft.com/en-us/sql/relational-databases/track-changes/about-change-data-capture-sql-server?view=sql-server-2017) to capture row-level `INSERT`, `UPDATE` and `DELETE` operations that occur on cdc-enabled tables. +We use [SQL Server's change data capture feature](https://docs.microsoft.com/en-us/sql/relational-databases/track-changes/about-change-data-capture-sql-server?view=sql-server-2017) with transaction logs to capture row-level `INSERT`, `UPDATE` and `DELETE` operations that occur on CDC-enabled tables. Some extra setup requiring at least _db_owner_ permissions on the database\(s\) you intend to sync from will be required \(detailed [below](mssql.md#setting-up-cdc-for-mssql)\). @@ -60,27 +55,17 @@ Please read the [CDC docs](../../understanding-airbyte/cdc.md) for an overview o - If the limitations below prevent you from using CDC and your goal is to maintain a snapshot of your table in the destination, consider using non-CDC incremental and occasionally reset the data and re-sync. - If your table has a primary key but doesn't have a reasonable cursor field for incremental syncing \(i.e. `updated_at`\), CDC allows you to sync your table incrementally. -### CDC Config - -| Parameter | Type | Default | Description | -| :----------------------- | :------------------------------------------: | :----------------: | :-------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | -| Data to Sync | Enum: `Existing and New`, `New Changes Only` | `Existing and New` | What data should be synced under the CDC. `Existing and New` will read existing data as a snapshot, and sync new changes through CDC. `New Changes Only` will skip the initial snapshot, and only sync new changes through CDC. See documentation [here](https://debezium.io/documentation/reference/stable/connectors/sqlserver.html#sqlserver-property-snapshot-mode) for details. Under the hood, this parameter sets the `snapshot.mode` in Debezium. | -| Snapshot Isolation Level | Enum: `Snapshot`, `Read Committed` | `Snapshot` | Mode to control which transaction isolation level is used and how long the connector locks tables that are designated for capture. If you don't know which one to choose, just use the default one. See documentation [here](https://debezium.io/documentation/reference/stable/connectors/sqlserver.html#sqlserver-property-snapshot-isolation-mode) for details. Under the hood, this parameter sets the `snapshot.isolation.mode` in Debezium. | - #### CDC Limitations - Make sure to read our [CDC docs](../../understanding-airbyte/cdc.md) to see limitations that impact all databases using CDC replication. -- There are some critical issues regarding certain datatypes. Please find detailed info in [this Github issue](https://github.com/airbytehq/airbyte/issues/4542). +- `hierarchyid` and `sql_variant` types are not processed in CDC migration type (not supported by Debezium). For more details please check +[this ticket](https://github.com/airbytehq/airbyte/issues/14411) - CDC is only available for SQL Server 2016 Service Pack 1 \(SP1\) and later. - _db_owner_ \(or higher\) permissions are required to perform the [neccessary setup](mssql.md#setting-up-cdc-for-mssql) for CDC. -- If you set `Initial Snapshot Isolation Level` to `Snapshot`, you must enable [snapshot isolation mode](https://docs.microsoft.com/en-us/dotnet/framework/data/adonet/sql/snapshot-isolation-in-sql-server) on the database\(s\) you want to sync. This is used for retrieving an initial snapshot without locking tables. -- For SQL Server Always On read-only replica, only `Snapshot` initial snapshot isolation level is supported. - On Linux, CDC is not supported on versions earlier than SQL Server 2017 CU18 \(SQL Server 2019 is supported\). - Change data capture cannot be enabled on tables with a clustered columnstore index. \(It can be enabled on tables with a _non-clustered_ columnstore index\). - The SQL Server CDC feature processes changes that occur in user-created tables only. You cannot enable CDC on the SQL Server master database. - Using variables with partition switching on databases or tables with change data capture \(CDC\) is not supported for the `ALTER TABLE` ... `SWITCH TO` ... `PARTITION` ... statement -- Our implementation has not been tested with managed instances, such as Azure SQL Database \(we welcome any feedback from users who try this!\) - - If you do want to try this, CDC can only be enabled on Azure SQL databases tiers above Standard 3 \(S3+\). Basic, S0, S1 and S2 tiers are not supported for CDC. - Our CDC implementation uses at least once delivery for all change records. - Read more on CDC limitations in the [Microsoft docs](https://docs.microsoft.com/en-us/sql/relational-databases/track-changes/about-change-data-capture-sql-server?view=sql-server-2017#limitations). @@ -342,8 +327,9 @@ WHERE actor_definition_id ='b5ea17b1-f170-46dc-bc31-cc744ca984c1' AND (configura | Version | Date | Pull Request | Subject | |:--------|:-----------|:------------------------------------------------------------------------------------------------------------------|:------------------------------------------------------------------------------------------------------------------------------------------------| +| 4.0.0 | 2024-03-06 | [35873](https://github.com/airbytehq/airbyte/pull/35873) | Terabyte-sized tables support, reliability improvements, bug fixes. | | 3.7.7 | 2024-03-06 | [35816](https://github.com/airbytehq/airbyte/pull/35816) | Fix query that was failing on a case sensitive server. | -| 3.7.6 | 2024-03-04 | [35721](https://github.com/airbytehq/airbyte/pull/35721) | Fix tests | +| 3.7.6 | 2024-03-04 | [35721](https://github.com/airbytehq/airbyte/pull/35721) | Fix tests | | 3.7.5 | 2024-02-29 | [35739](https://github.com/airbytehq/airbyte/pull/35739) | Allow configuring the queue size used for cdc events. | | 3.7.4 | 2024-02-26 | [35566](https://github.com/airbytehq/airbyte/pull/35566) | Add config to throw an error on invalid CDC position. | | 3.7.3 | 2024-02-23 | [35596](https://github.com/airbytehq/airbyte/pull/35596) | Fix a logger issue | From f09c97cae3d9ecd67db79ffcba1208b44037e1e5 Mon Sep 17 00:00:00 2001 From: Augustin Date: Fri, 8 Mar 2024 09:00:08 +0100 Subject: [PATCH 134/172] connectors-qa: check cdk and language tags (#35685) --- .../src/connectors_qa/checks/metadata.py | 85 ++++++++++++++++--- .../unit_tests/test_checks/test_metadata.py | 53 ++++++++++++ .../resources/qa-checks.md | 10 +++ 3 files changed, 134 insertions(+), 14 deletions(-) diff --git a/airbyte-ci/connectors/connectors_qa/src/connectors_qa/checks/metadata.py b/airbyte-ci/connectors/connectors_qa/src/connectors_qa/checks/metadata.py index bb549f86d1cd..72ca27c6263f 100644 --- a/airbyte-ci/connectors/connectors_qa/src/connectors_qa/checks/metadata.py +++ b/airbyte-ci/connectors/connectors_qa/src/connectors_qa/checks/metadata.py @@ -3,7 +3,8 @@ import os -from connector_ops.utils import Connector # type: ignore +import toml +from connector_ops.utils import Connector, ConnectorLanguage # type: ignore from connectors_qa import consts from connectors_qa.models import Check, CheckCategory, CheckResult from metadata_service.validators.metadata_validator import PRE_UPLOAD_VALIDATORS, ValidatorOptions, validate_and_load # type: ignore @@ -56,7 +57,25 @@ class CheckConnectorLanguageTag(MetadataCheck): PYTHON_LANGUAGE_TAG = "language:python" JAVA_LANGUAGE_TAG = "language:java" + def get_expected_language_tag(self, connector: Connector) -> str: + if (connector.code_directory / consts.SETUP_PY_FILE_NAME).exists() or ( + connector.code_directory / consts.PYPROJECT_FILE_NAME + ).exists(): + return self.PYTHON_LANGUAGE_TAG + elif (connector.code_directory / consts.GRADLE_FILE_NAME).exists(): + return self.JAVA_LANGUAGE_TAG + else: + raise ValueError("Could not infer the language tag from the connector directory") + def _run(self, connector: Connector) -> CheckResult: + try: + expected_language_tag = self.get_expected_language_tag(connector) + except ValueError: + return self.fail( + connector=connector, + message="Could not infer the language tag from the connector directory", + ) + current_language_tags = [t for t in connector.metadata.get("tags", []) if t.startswith("language:")] if not current_language_tags: return self.fail( @@ -69,31 +88,69 @@ def _run(self, connector: Connector) -> CheckResult: message=f"Multiple language tags found in the metadata file: {current_language_tags}", ) current_language_tag = current_language_tags[0] + if current_language_tag != expected_language_tag: + return self.fail( + connector=connector, + message=f"Expected language tag '{expected_language_tag}' in the {consts.METADATA_FILE_NAME} file, but found '{current_language_tag}'", + ) + return self.pass_( + connector=connector, + message=f"Language tag {expected_language_tag} is present in the metadata file", + ) - if (connector.code_directory / consts.SETUP_PY_FILE_NAME).exists() or ( - connector.code_directory / consts.PYPROJECT_FILE_NAME - ).exists(): - expected_language = self.PYTHON_LANGUAGE_TAG - elif (connector.code_directory / consts.GRADLE_FILE_NAME).exists(): - expected_language = self.JAVA_LANGUAGE_TAG - else: + +class CheckConnectorCDKTag(MetadataCheck): + name = "Python connectors must have a CDK tag in metadata" + description = f"Python connectors must have a CDK tag in their metadata. It must be set in the `tags` field in {consts.METADATA_FILE_NAME}. The values can be `cdk:low-code`, `cdk:python`, or `cdk:file`." + applies_to_connector_languages = [ConnectorLanguage.PYTHON, ConnectorLanguage.LOW_CODE] + + class CDKTag: + LOW_CODE = "cdk:low-code" + PYTHON = "cdk:python" + FILE = "cdk:python-file-based" + + def get_expected_cdk_tag(self, connector: Connector) -> str: + manifest_file = connector.code_directory / connector.technical_name.replace("-", "_") / consts.LOW_CODE_MANIFEST_FILE_NAME + pyproject_file = connector.code_directory / consts.PYPROJECT_FILE_NAME + setup_py_file = connector.code_directory / consts.SETUP_PY_FILE_NAME + if manifest_file.exists(): + return self.CDKTag.LOW_CODE + if pyproject_file.exists(): + pyproject = toml.load((connector.code_directory / consts.PYPROJECT_FILE_NAME)) + cdk_deps = pyproject["tool"]["poetry"]["dependencies"].get("airbyte-cdk", None) + if cdk_deps and isinstance(cdk_deps, dict) and "file-based" in cdk_deps.get("extras", []): + return self.CDKTag.FILE + if setup_py_file.exists(): + if "airbyte-cdk[file-based]" in (connector.code_directory / consts.SETUP_PY_FILE_NAME).read_text(): + return self.CDKTag.FILE + return self.CDKTag.PYTHON + + def _run(self, connector: Connector) -> CheckResult: + current_cdk_tags = [t for t in connector.metadata.get("tags", []) if t.startswith("cdk:")] + expected_cdk_tag = self.get_expected_cdk_tag(connector) + if not current_cdk_tags: return self.fail( connector=connector, - message="Could not infer the language tag from the connector directory", + message="CDK tag is missing in the metadata file", + ) + if len(current_cdk_tags) > 1: + return self.fail( + connector=connector, + message=f"Multiple CDK tags found in the metadata file: {current_cdk_tags}", ) - if current_language_tag != expected_language: + if current_cdk_tags[0] != expected_cdk_tag: return self.fail( connector=connector, - message=f"Expected language tag '{expected_language}' in the {consts.METADATA_FILE_NAME} file, but found '{current_language_tag}'", + message=f"Expected CDK tag '{self.get_expected_cdk_tag(connector)}' in the {consts.METADATA_FILE_NAME} file, but found '{current_cdk_tags[0]}'", ) return self.pass_( connector=connector, - message=f"Language tag {expected_language} is present in the metadata file", + message=f"CDK tag {self.get_expected_cdk_tag(connector)} is present in the metadata file", ) ENABLED_CHECKS = [ ValidateMetadata(), - # Disabled until metadata are globally cleaned up - # CheckConnectorLanguageTag() + CheckConnectorLanguageTag(), + CheckConnectorCDKTag(), ] diff --git a/airbyte-ci/connectors/connectors_qa/tests/unit_tests/test_checks/test_metadata.py b/airbyte-ci/connectors/connectors_qa/tests/unit_tests/test_checks/test_metadata.py index 360f3c72f4a5..c8ee9961baa9 100644 --- a/airbyte-ci/connectors/connectors_qa/tests/unit_tests/test_checks/test_metadata.py +++ b/airbyte-ci/connectors/connectors_qa/tests/unit_tests/test_checks/test_metadata.py @@ -155,3 +155,56 @@ def test_pass_when_java(self, mocker, tmp_path): # Assert assert result.status == CheckStatus.PASSED assert result.message == "Language tag language:java is present in the metadata file" + +class TestCheckConnectorCDKTag: + + def test_fail_when_no_cdk_tags(self, mocker): + # Arrange + connector = mocker.MagicMock(metadata={"tags": []}) + + # Act + result = metadata.CheckConnectorCDKTag()._run(connector) + + # Assert + assert result.status == CheckStatus.FAILED + assert result.message == "CDK tag is missing in the metadata file" + + def test_fail_when_multiple_cdk_tags(self, mocker): + # Arrange + connector = mocker.MagicMock(metadata={"tags": ["cdk:low-code", "cdk:python"]}) + + # Act + result = metadata.CheckConnectorCDKTag()._run(connector) + + # Assert + assert result.status == CheckStatus.FAILED + assert result.message == "Multiple CDK tags found in the metadata file: ['cdk:low-code', 'cdk:python']" + + def test_fail_when_low_code_tag_on_python_connector(self, mocker, tmp_path): + # Arrange + connector = mocker.MagicMock(metadata={"tags": ["cdk:low-code"]}, code_directory=tmp_path) + code_directory = tmp_path + (code_directory / consts.PYPROJECT_FILE_NAME).write_text("[tool.poetry.dependencies]\nairbyte-cdk = '^1.0.0'") + + # Act + result = metadata.CheckConnectorCDKTag()._run(connector) + + # Assert + assert result.status == CheckStatus.FAILED + assert "Expected CDK tag 'cdk:python'" in result.message + assert "but found 'cdk:low-code'" in result.message + + def test_fail_when_python_tag_on_low_code_connector(self, mocker, tmp_path): + # Arrange + connector = mocker.MagicMock(technical_name="source-test", metadata={"tags": ["cdk:python"]}, code_directory=tmp_path) + code_directory = tmp_path + (code_directory / "source_test").mkdir() + (code_directory / "source_test"/ consts.LOW_CODE_MANIFEST_FILE_NAME).touch() + + # Act + result = metadata.CheckConnectorCDKTag()._run(connector) + + # Assert + assert result.status == CheckStatus.FAILED + assert "Expected CDK tag 'cdk:low-code'" in result.message + assert "but found 'cdk:python'" in result.message diff --git a/docs/contributing-to-airbyte/resources/qa-checks.md b/docs/contributing-to-airbyte/resources/qa-checks.md index 640b3d458a1a..037b499e17c4 100644 --- a/docs/contributing-to-airbyte/resources/qa-checks.md +++ b/docs/contributing-to-airbyte/resources/qa-checks.md @@ -33,6 +33,16 @@ Each new version of a connector must have a changelog entry defined in the user *Applies to the following connector languages: java, low-code, python* Connectors must have a `metadata.yaml` file at the root of their directory. This file is used to build our connector registry. Its structure must follow our metadata schema. Field values are also validated. This is to ensure that all connectors have the required metadata fields and that the metadata is valid. More details in this [documentation](https://docs.airbyte.com/connector-development/connector-metadata-file). +### Connector must have a language tag in metadata +*Applies to the following connector types: source, destination* +*Applies to the following connector languages: java, low-code, python* + +Connectors must have a language tag in their metadata. It must be set in the `tags` field in metadata.yaml. The values can be `language:python` or `language:java`. This checks infers the correct language tag based on the presence of certain files in the connector directory. +### Python connectors must have a CDK tag in metadata +*Applies to the following connector types: source, destination* +*Applies to the following connector languages: python, low-code* + +Python connectors must have a CDK tag in their metadata. It must be set in the `tags` field in metadata.yaml. The values can be `cdk:low-code`, `cdk:python`, or `cdk:file`. ## 📦 Packaging From 63091e502453a07b0a38bb71cd20d03843ff1505 Mon Sep 17 00:00:00 2001 From: Artem Inzhyyants <36314070+artem1205@users.noreply.github.com> Date: Fri, 8 Mar 2024 12:24:42 +0100 Subject: [PATCH 135/172] =?UTF-8?q?=F0=9F=9A=A8=F0=9F=9A=A8=20Source=20Hub?= =?UTF-8?q?spot:=20Update=20`Deals=20Property=20History`=20and=20`Companie?= =?UTF-8?q?s=20Property=20History`=20to=20API=20`v3`=20(#35662)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Signed-off-by: Artem Inzhyyants --- .../integration_tests/expected_records.jsonl | 8 +- .../connectors/source-hubspot/metadata.yaml | 18 ++- .../connectors/source-hubspot/pyproject.toml | 2 +- .../schemas/companies_property_history.json | 38 +---- .../schemas/deals_property_history.json | 38 +---- .../source-hubspot/source_hubspot/streams.py | 146 +++++------------- .../source-hubspot/unit_tests/conftest.py | 4 +- .../source-hubspot/unit_tests/test_source.py | 20 +-- .../source-hubspot/unit_tests/test_streams.py | 55 ++----- .../sources/hubspot-migrations.md | 40 +++++ docs/integrations/sources/hubspot.md | 1 + 11 files changed, 137 insertions(+), 233 deletions(-) diff --git a/airbyte-integrations/connectors/source-hubspot/integration_tests/expected_records.jsonl b/airbyte-integrations/connectors/source-hubspot/integration_tests/expected_records.jsonl index 852a890a91a5..402c6b34ad3a 100644 --- a/airbyte-integrations/connectors/source-hubspot/integration_tests/expected_records.jsonl +++ b/airbyte-integrations/connectors/source-hubspot/integration_tests/expected_records.jsonl @@ -51,10 +51,10 @@ {"stream": "products", "data": {"id": "646176423", "properties": {"amount": null, "createdate": "2021-02-23T20:03:48.577000+00:00", "description": null, "discount": null, "hs_all_accessible_team_ids": null, "hs_all_assigned_business_unit_ids": null, "hs_all_owner_ids": null, "hs_all_team_ids": null, "hs_avatar_filemanager_key": null, "hs_cost_of_goods_sold": null, "hs_created_by_user_id": 12282590, "hs_createdate": null, "hs_discount_percentage": null, "hs_folder_id": 2430008, "hs_folder_name": "test folder", "hs_images": null, "hs_lastmodifieddate": "2021-02-23T20:03:48.577000+00:00", "hs_merged_object_ids": null, "hs_object_id": 646176423, "hs_object_source": "CRM_UI", "hs_object_source_detail_1": null, "hs_object_source_detail_2": null, "hs_object_source_detail_3": null, "hs_object_source_id": "userId:12282590", "hs_object_source_label": "CRM_UI", "hs_object_source_user_id": 12282590, "hs_product_type": null, "hs_read_only": null, "hs_recurring_billing_period": null, "hs_recurring_billing_start_date": null, "hs_sku": null, "hs_unique_creation_key": null, "hs_updated_by_user_id": 12282590, "hs_url": null, "hs_user_ids_of_all_notification_followers": null, "hs_user_ids_of_all_notification_unfollowers": null, "hs_user_ids_of_all_owners": null, "hs_was_imported": null, "hubspot_owner_assigneddate": null, "hubspot_owner_id": null, "hubspot_team_id": null, "name": "Test product 1", "price": 123, "quantity": null, "recurringbillingfrequency": null, "tax": null, "test": null, "test_product_price": null}, "createdAt": "2021-02-23T20:03:48.577Z", "updatedAt": "2021-02-23T20:03:48.577Z", "archived": false, "properties_amount": null, "properties_createdate": "2021-02-23T20:03:48.577000+00:00", "properties_description": null, "properties_discount": null, "properties_hs_all_accessible_team_ids": null, "properties_hs_all_assigned_business_unit_ids": null, "properties_hs_all_owner_ids": null, "properties_hs_all_team_ids": null, "properties_hs_avatar_filemanager_key": null, "properties_hs_cost_of_goods_sold": null, "properties_hs_created_by_user_id": 12282590, "properties_hs_createdate": null, "properties_hs_discount_percentage": null, "properties_hs_folder_id": 2430008, "properties_hs_folder_name": "test folder", "properties_hs_images": null, "properties_hs_lastmodifieddate": "2021-02-23T20:03:48.577000+00:00", "properties_hs_merged_object_ids": null, "properties_hs_object_id": 646176423, "properties_hs_object_source": "CRM_UI", "properties_hs_object_source_detail_1": null, "properties_hs_object_source_detail_2": null, "properties_hs_object_source_detail_3": null, "properties_hs_object_source_id": "userId:12282590", "properties_hs_object_source_label": "CRM_UI", "properties_hs_object_source_user_id": 12282590, "properties_hs_product_type": null, "properties_hs_read_only": null, "properties_hs_recurring_billing_period": null, "properties_hs_recurring_billing_start_date": null, "properties_hs_sku": null, "properties_hs_unique_creation_key": null, "properties_hs_updated_by_user_id": 12282590, "properties_hs_url": null, "properties_hs_user_ids_of_all_notification_followers": null, "properties_hs_user_ids_of_all_notification_unfollowers": null, "properties_hs_user_ids_of_all_owners": null, "properties_hs_was_imported": null, "properties_hubspot_owner_assigneddate": null, "properties_hubspot_owner_id": null, "properties_hubspot_team_id": null, "properties_name": "Test product 1", "properties_price": 123, "properties_quantity": null, "properties_recurringbillingfrequency": null, "properties_tax": null, "properties_test": null, "properties_test_product_price": null}, "emitted_at": 1708014628643} {"stream": "products", "data": {"id": "646316535", "properties": {"amount": null, "createdate": "2021-02-23T20:11:54.030000+00:00", "description": "baseball hat, large", "discount": null, "hs_all_accessible_team_ids": null, "hs_all_assigned_business_unit_ids": null, "hs_all_owner_ids": null, "hs_all_team_ids": null, "hs_avatar_filemanager_key": null, "hs_cost_of_goods_sold": 5, "hs_created_by_user_id": null, "hs_createdate": null, "hs_discount_percentage": null, "hs_folder_id": null, "hs_folder_name": null, "hs_images": null, "hs_lastmodifieddate": "2021-02-23T20:11:54.030000+00:00", "hs_merged_object_ids": null, "hs_object_id": 646316535, "hs_object_source": "IMPORT", "hs_object_source_detail_1": null, "hs_object_source_detail_2": null, "hs_object_source_detail_3": null, "hs_object_source_id": null, "hs_object_source_label": "IMPORT", "hs_object_source_user_id": null, "hs_product_type": null, "hs_read_only": null, "hs_recurring_billing_period": null, "hs_recurring_billing_start_date": null, "hs_sku": null, "hs_unique_creation_key": null, "hs_updated_by_user_id": null, "hs_url": null, "hs_user_ids_of_all_notification_followers": null, "hs_user_ids_of_all_notification_unfollowers": null, "hs_user_ids_of_all_owners": null, "hs_was_imported": true, "hubspot_owner_assigneddate": null, "hubspot_owner_id": null, "hubspot_team_id": null, "name": "Green Hat", "price": 10, "quantity": null, "recurringbillingfrequency": null, "tax": null, "test": null, "test_product_price": null}, "createdAt": "2021-02-23T20:11:54.030Z", "updatedAt": "2021-02-23T20:11:54.030Z", "archived": false, "properties_amount": null, "properties_createdate": "2021-02-23T20:11:54.030000+00:00", "properties_description": "baseball hat, large", "properties_discount": null, "properties_hs_all_accessible_team_ids": null, "properties_hs_all_assigned_business_unit_ids": null, "properties_hs_all_owner_ids": null, "properties_hs_all_team_ids": null, "properties_hs_avatar_filemanager_key": null, "properties_hs_cost_of_goods_sold": 5, "properties_hs_created_by_user_id": null, "properties_hs_createdate": null, "properties_hs_discount_percentage": null, "properties_hs_folder_id": null, "properties_hs_folder_name": null, "properties_hs_images": null, "properties_hs_lastmodifieddate": "2021-02-23T20:11:54.030000+00:00", "properties_hs_merged_object_ids": null, "properties_hs_object_id": 646316535, "properties_hs_object_source": "IMPORT", "properties_hs_object_source_detail_1": null, "properties_hs_object_source_detail_2": null, "properties_hs_object_source_detail_3": null, "properties_hs_object_source_id": null, "properties_hs_object_source_label": "IMPORT", "properties_hs_object_source_user_id": null, "properties_hs_product_type": null, "properties_hs_read_only": null, "properties_hs_recurring_billing_period": null, "properties_hs_recurring_billing_start_date": null, "properties_hs_sku": null, "properties_hs_unique_creation_key": null, "properties_hs_updated_by_user_id": null, "properties_hs_url": null, "properties_hs_user_ids_of_all_notification_followers": null, "properties_hs_user_ids_of_all_notification_unfollowers": null, "properties_hs_user_ids_of_all_owners": null, "properties_hs_was_imported": true, "properties_hubspot_owner_assigneddate": null, "properties_hubspot_owner_id": null, "properties_hubspot_team_id": null, "properties_name": "Green Hat", "properties_price": 10, "properties_quantity": null, "properties_recurringbillingfrequency": null, "properties_tax": null, "properties_test": null, "properties_test_product_price": null}, "emitted_at": 1708014628645} {"stream": "contacts_property_history", "data": {"value": "testo", "source-type": "CRM_UI", "source-id": "userId:12282590", "source-label": null, "updated-by-user-id": 12282590, "timestamp": 1700681340515, "selected": false, "property": "firstname", "vid": 2501, "portal-id": 8727216, "is-contact": true, "canonical-vid": 2501}, "emitted_at": 1701905506064} -{"stream": "contacts_property_history", "data": {"value": "test", "source-type": "CRM_UI", "source-id": "userId:12282590", "source-label": null, "updated-by-user-id": 12282590, "timestamp": 1675120629904, "selected": false, "property": "firstname", "vid": 2501, "portal-id": 8727216, "is-contact": true, "canonical-vid": 2501}, "emitted_at": 1701905506064} -{"stream": "companies_property_history", "data": {"name": "hs_analytics_latest_source_data_2", "value": "CRM_UI", "timestamp": 1657222285656, "sourceId": "RollupProperties", "source": "MIGRATION", "sourceVid": [], "property": "hs_analytics_latest_source_data_2", "companyId": 5000526215, "portalId": 8727216, "isDeleted": false}, "emitted_at": 1701905731242} -{"stream": "companies_property_history", "data": {"name": "hs_analytics_latest_source_data_1", "value": "CONTACTS", "timestamp": 1657222285656, "sourceId": "RollupProperties", "source": "MIGRATION", "sourceVid": [], "property": "hs_analytics_latest_source_data_1", "companyId": 5000526215, "portalId": 8727216, "isDeleted": false}, "emitted_at": 1701905731242} -{"stream": "deals_property_history", "data": {"name": "dealname", "value": "Test Deal 2", "timestamp": 1610635080797, "source": "API", "sourceVid": [], "requestId": "cdc0501c-7d08-40e4-a937-953492b1a6c2", "property": "dealname", "dealId": 3986867076, "portalId": 8727216, "isDeleted": false}, "emitted_at": 1707258294359} +{"stream":"companies_property_history","data":{"value":"Test","timestamp":"2023-01-30T23:22:56.969Z","sourceType":"CRM_UI","sourceId":"userId:12282590","updatedByUserId":12282590,"property":"name","companyId":"11481383026","archived":false},"emitted_at":1709764013901} +{"stream":"companies_property_history","data":{"value":"CONTACTS","timestamp":"2022-07-07T19:31:25.656Z","sourceType":"MIGRATION","sourceId":"RollupProperties","property":"hs_analytics_latest_source_data_1","companyId":"5000526215","archived":false},"emitted_at":1709764012837} +{"stream":"companies_property_history","data":{"value":"CRM_UI","timestamp":"2022-07-07T19:31:25.656Z","sourceType":"MIGRATION","sourceId":"RollupProperties","property":"hs_analytics_latest_source_data_2","companyId":"5000526215","archived":false},"emitted_at":1709764012837} +{"stream":"deals_property_history","data":{"value":"Test Deal 2","timestamp":"2021-01-14T14:38:00.797Z","sourceType":"API","property":"dealname","dealId":"3986867076","archived":false},"emitted_at":1709751896693} {"stream": "subscription_changes", "data": {"timestamp": 1616173134301, "portalId": 8727216, "recipient": "0c90ecf5-629e-4fe4-8516-05f75636c3e3@gdpr-forgotten.hubspot.com", "normalizedEmailId": "0c90ecf5-629e-4fe4-8516-05f75636c3e3", "changes": [{"source": "SOURCE_HUBSPOT_CUSTOMER", "timestamp": 1616173134301, "portalId": 8727216, "causedByEvent": {"id": "d70b78b9-a411-4d3e-808b-fe931be35b43", "created": 1616173134301}, "changeType": "PORTAL_STATUS", "change": "SUBSCRIBED"}]}, "emitted_at": 1697714255435} {"stream": "subscription_changes", "data": {"timestamp": 1616173134301, "portalId": 8727216, "recipient": "0c90ecf5-629e-4fe4-8516-05f75636c3e3@gdpr-forgotten.hubspot.com", "normalizedEmailId": "0c90ecf5-629e-4fe4-8516-05f75636c3e3", "changes": [{"source": "SOURCE_HUBSPOT_CUSTOMER", "timestamp": 1616173134301, "subscriptionId": 10798197, "portalId": 8727216, "causedByEvent": {"id": "ff118718-786d-4a35-94f9-6bbd413654de", "created": 1616173134301}, "changeType": "SUBSCRIPTION_STATUS", "change": "SUBSCRIBED"}]}, "emitted_at": 1697714255436} {"stream": "subscription_changes", "data": {"timestamp": 1616173106737, "portalId": 8727216, "recipient": "0c90ecf5-629e-4fe4-8516-05f75636c3e3@gdpr-forgotten.hubspot.com", "normalizedEmailId": "0c90ecf5-629e-4fe4-8516-05f75636c3e3", "changes": [{"source": "SOURCE_HUBSPOT_CUSTOMER", "timestamp": 1616173106737, "portalId": 8727216, "causedByEvent": {"id": "24539f1f-0b20-4296-a5bf-6ba3bb9dc1b8", "created": 1616173106737}, "changeType": "PORTAL_STATUS", "change": "SUBSCRIBED"}]}, "emitted_at": 1697714255437} diff --git a/airbyte-integrations/connectors/source-hubspot/metadata.yaml b/airbyte-integrations/connectors/source-hubspot/metadata.yaml index e04330e89311..a42e4b8190f4 100644 --- a/airbyte-integrations/connectors/source-hubspot/metadata.yaml +++ b/airbyte-integrations/connectors/source-hubspot/metadata.yaml @@ -10,7 +10,7 @@ data: connectorSubtype: api connectorType: source definitionId: 36c891d9-4bd9-43ac-bad2-10e12756272c - dockerImageTag: 3.3.0 + dockerImageTag: 4.0.0 dockerRepository: airbyte/source-hubspot documentationUrl: https://docs.airbyte.com/integrations/sources/hubspot githubIssueLabel: source-hubspot @@ -29,11 +29,14 @@ data: releaseStage: generally_available releases: breakingChanges: - 2.0.0: + 4.0.0: message: >- - This version eliminates the Property History stream in favor of creating 3 different streams, Contacts, Companies, and Deals, which can now all fetch their property history. - It will affect only users who use Property History stream, who will need to fix schema conflicts and sync Contacts Property History stream instead of Property History. - upgradeDeadline: 2024-01-15 + This update brings extended schema with data type changes for the streams `Deals Property History` and `Companies Property History`. Users will need to refresh their schema and reset their streams after upgrading. + upgradeDeadline: 2024-03-10 + scopedImpact: + - scopeType: stream + impactedScopes: + ["deals_property_history", "companies_property_history"] 3.0.0: message: >- This update brings extended schema with data type changes for the Marketing Emails stream. @@ -42,6 +45,11 @@ data: scopedImpact: - scopeType: stream impactedScopes: ["marketing_emails"] + 2.0.0: + message: >- + This version replaces the `Property History` stream in favor of creating 3 different streams: `Contacts`, `Companies`, and `Deals`, which can now all fetch their property history. + It will affect only users who use `Property History` stream, who will need to fix schema conflicts and sync `Contacts Property History` stream instead of `Property History`. + upgradeDeadline: 2024-01-15 suggestedStreams: streams: - contacts diff --git a/airbyte-integrations/connectors/source-hubspot/pyproject.toml b/airbyte-integrations/connectors/source-hubspot/pyproject.toml index b36d715999e6..6d37d0ae1b2c 100644 --- a/airbyte-integrations/connectors/source-hubspot/pyproject.toml +++ b/airbyte-integrations/connectors/source-hubspot/pyproject.toml @@ -3,7 +3,7 @@ requires = [ "poetry-core>=1.0.0",] build-backend = "poetry.core.masonry.api" [tool.poetry] -version = "3.3.0" +version = "4.0.0" name = "source-hubspot" description = "Source implementation for HubSpot." authors = [ "Airbyte ",] diff --git a/airbyte-integrations/connectors/source-hubspot/source_hubspot/schemas/companies_property_history.json b/airbyte-integrations/connectors/source-hubspot/source_hubspot/schemas/companies_property_history.json index 01697a1022fa..7e807dd52ee2 100644 --- a/airbyte-integrations/connectors/source-hubspot/source_hubspot/schemas/companies_property_history.json +++ b/airbyte-integrations/connectors/source-hubspot/source_hubspot/schemas/companies_property_history.json @@ -6,50 +6,28 @@ "updatedByUserId": { "type": ["null", "number"] }, - "requestId": { - "type": ["null", "string"] - }, - "source": { - "type": ["null", "string"] - }, - "portalId": { - "type": ["null", "number"] - }, - "isDeleted": { - "type": ["null", "boolean"] - }, "timestamp": { - "type": ["null", "number"] + "type": ["null", "string"], + "format": "date-time", + "airbyte_type": "timestamp_with_timezone" }, "property": { "type": ["null", "string"] }, - "persistenceTimestamp": { - "type": ["null", "number"] - }, - "name": { - "type": ["null", "string"] - }, - "sourceVid": { - "type": ["null", "array"] - }, - "useTimestampAsPersistenceTimestamp": { - "type": ["null", "boolean"] - }, - "sourceMetadata": { + "companyId": { "type": ["null", "string"] }, - "dataSensitivity": { + "sourceType": { "type": ["null", "string"] }, - "companyId": { - "type": ["null", "number"] - }, "sourceId": { "type": ["null", "string"] }, "value": { "type": ["null", "string"] + }, + "archived": { + "type": ["null", "boolean"] } } } diff --git a/airbyte-integrations/connectors/source-hubspot/source_hubspot/schemas/deals_property_history.json b/airbyte-integrations/connectors/source-hubspot/source_hubspot/schemas/deals_property_history.json index 3c877a2ce929..5f4bb9e4987b 100644 --- a/airbyte-integrations/connectors/source-hubspot/source_hubspot/schemas/deals_property_history.json +++ b/airbyte-integrations/connectors/source-hubspot/source_hubspot/schemas/deals_property_history.json @@ -3,53 +3,31 @@ "type": ["null", "object"], "additionalProperties": true, "properties": { - "dataSensitivity": { - "type": ["null", "string"] - }, "updatedByUserId": { "type": ["null", "number"] }, - "requestId": { - "type": ["null", "string"] - }, - "source": { - "type": ["null", "string"] - }, - "portalId": { - "type": ["null", "number"] - }, - "isDeleted": { - "type": ["null", "boolean"] - }, "timestamp": { - "type": ["null", "number"] + "type": ["null", "string"], + "format": "date-time", + "airbyte_type": "timestamp_with_timezone" }, "property": { "type": ["null", "string"] }, - "persistenceTimestamp": { - "type": ["null", "number"] - }, - "name": { + "dealId": { "type": ["null", "string"] }, - "sourceVid": { - "type": ["null", "array"] - }, - "useTimestampAsPersistenceTimestamp": { - "type": ["null", "boolean"] - }, - "sourceMetadata": { + "sourceType": { "type": ["null", "string"] }, - "dealId": { - "type": ["null", "number"] - }, "sourceId": { "type": ["null", "string"] }, "value": { "type": ["null", "string"] + }, + "archived": { + "type": ["null", "boolean"] } } } diff --git a/airbyte-integrations/connectors/source-hubspot/source_hubspot/streams.py b/airbyte-integrations/connectors/source-hubspot/source_hubspot/streams.py index 3720bb68daae..7445af401cd1 100644 --- a/airbyte-integrations/connectors/source-hubspot/source_hubspot/streams.py +++ b/airbyte-integrations/connectors/source-hubspot/source_hubspot/streams.py @@ -8,7 +8,7 @@ import time from abc import ABC, abstractmethod from datetime import timedelta -from functools import cached_property, lru_cache, reduce +from functools import cached_property, lru_cache from http import HTTPStatus from typing import Any, Dict, Iterable, List, Mapping, MutableMapping, Optional, Set, Tuple, Union @@ -17,7 +17,6 @@ import requests from airbyte_cdk.entrypoint import logger from airbyte_cdk.models import FailureType, SyncMode -from airbyte_cdk.models.airbyte_protocol import SyncMode from airbyte_cdk.sources import Source from airbyte_cdk.sources.streams import IncrementalMixin, Stream from airbyte_cdk.sources.streams.availability_strategy import AvailabilityStrategy @@ -1963,66 +1962,53 @@ def url(self): return "/contacts/v1/lists/all/contacts/all" -class CompaniesPropertyHistory(PropertyHistory): +class PropertyHistoryV3(PropertyHistory): @cached_property def _property_wrapper(self) -> IURLPropertyRepresentation: properties = list(self.properties.keys()) return APIPropertiesWithHistory(properties=properties) - @property - def scopes(self) -> set: - return {"crm.objects.companies.read"} - - @property - def properties_scopes(self) -> set: - return {"crm.schemas.companies.read"} - - @property - def page_field(self) -> str: - return "offset" - - @property - def limit_field(self) -> str: - return "limit" - - @property - def page_filter(self) -> str: - return "offset" - - @property - def more_key(self) -> str: - return "has-more" + limit = 50 + more_key = page_filter = page_field = None + limit_field = "limit" + data_field = "results" + additional_keys = ["archived"] + last_modified_date_field_name = "hs_lastmodifieddate" - @property - def entity(self) -> str: - return "companies" + def update_request_properties(self, params: Mapping[str, Any], properties: IURLPropertyRepresentation) -> None: + pass - @property - def entity_primary_key(self) -> list: - return "companyId" + def _transform(self, records: Iterable) -> Iterable: + for record in records: + properties_with_history = record.get("propertiesWithHistory") + primary_key = record.get("id") + additional_keys = {additional_key: record.get(additional_key) for additional_key in self.additional_keys} - @property - def primary_key(self) -> list: - return ["companyId", "property", "timestamp"] + for property_name, value_dict in properties_with_history.items(): + if property_name == self.last_modified_date_field_name: + # Skipping the lastmodifieddate since it only returns the value + # when one field of a record was changed no matter which + # field was changed. It therefore creates overhead, since for + # every changed property there will be the date it was changed in itself + # and a change in the lastmodifieddate field. + continue + for version in value_dict: + version["property"] = property_name + version[self.entity_primary_key] = primary_key + yield version | additional_keys - @property - def additional_keys(self) -> list: - return ["portalId", "isDeleted"] - @property - def last_modified_date_field_name(self) -> str: - return "hs_lastmodifieddate" +class CompaniesPropertyHistory(PropertyHistoryV3): - @property - def data_field(self) -> str: - return "companies" + scopes = {"crm.objects.companies.read"} + properties_scopes = {"crm.schemas.companies.read"} + entity = "companies" + entity_primary_key = "companyId" + primary_key = ["companyId", "property", "timestamp"] @property def url(self) -> str: - return "/companies/v2/companies/paged" - - def update_request_properties(self, params: Mapping[str, Any], properties: IURLPropertyRepresentation) -> None: - pass + return "/crm/v3/objects/companies" def path( self, @@ -2035,66 +2021,16 @@ def path( return f"{self.url}?{properties.as_url_param()}" -class DealsPropertyHistory(PropertyHistory): - @cached_property - def _property_wrapper(self) -> IURLPropertyRepresentation: - properties = list(self.properties.keys()) - return APIPropertiesWithHistory(properties=properties) - - @property - def scopes(self) -> set: - return {"crm.objects.deals.read"} - - @property - def properties_scopes(self): - return {"crm.schemas.deals.read"} - - @property - def page_field(self) -> str: - return "offset" - - @property - def limit_field(self) -> str: - return "limit" - - @property - def page_filter(self) -> str: - return "offset" - - @property - def more_key(self) -> str: - return "hasMore" - - @property - def entity(self) -> set: - return "deals" - - @property - def entity_primary_key(self) -> list: - return "dealId" - - @property - def primary_key(self) -> list: - return ["dealId", "property", "timestamp"] - - @property - def additional_keys(self) -> list: - return ["portalId", "isDeleted"] - - @property - def last_modified_date_field_name(self) -> str: - return "hs_lastmodifieddate" - - @property - def data_field(self) -> str: - return "deals" +class DealsPropertyHistory(PropertyHistoryV3): + scopes = {"crm.objects.deals.read"} + properties_scopes = {"crm.schemas.deals.read"} + entity = "deals" + entity_primary_key = "dealId" + primary_key = ["dealId", "property", "timestamp"] @property def url(self) -> str: - return "/deals/v1/deal/paged" - - def update_request_properties(self, params: Mapping[str, Any], properties: IURLPropertyRepresentation) -> None: - pass + return "/crm/v3/objects/deals" def path( self, diff --git a/airbyte-integrations/connectors/source-hubspot/unit_tests/conftest.py b/airbyte-integrations/connectors/source-hubspot/unit_tests/conftest.py index 08fe337e61c8..ea8070af9d87 100644 --- a/airbyte-integrations/connectors/source-hubspot/unit_tests/conftest.py +++ b/airbyte-integrations/connectors/source-hubspot/unit_tests/conftest.py @@ -51,7 +51,7 @@ def config_fixture(): return { "start_date": "2021-01-10T00:00:00Z", "credentials": {"credentials_title": "Private App Credentials", "access_token": "test_access_token"}, - "enable_experimental_streams": False + "enable_experimental_streams": False, } @@ -60,7 +60,7 @@ def config_eperimantal_fixture(): return { "start_date": "2021-01-10T00:00:00Z", "credentials": {"credentials_title": "Private App Credentials", "access_token": "test_access_token"}, - "enable_experimental_streams": True + "enable_experimental_streams": True, } diff --git a/airbyte-integrations/connectors/source-hubspot/unit_tests/test_source.py b/airbyte-integrations/connectors/source-hubspot/unit_tests/test_source.py index 7ee8b639bbbd..c72fd75d5656 100644 --- a/airbyte-integrations/connectors/source-hubspot/unit_tests/test_source.py +++ b/airbyte-integrations/connectors/source-hubspot/unit_tests/test_source.py @@ -85,11 +85,11 @@ def test_streams(requests_mock, config): streams = SourceHubspot().streams(config) - assert len(streams) == 32 + assert len(streams) == 33 @mock.patch("source_hubspot.source.SourceHubspot.get_custom_object_streams") -def test_streams(requests_mock, config_experimental): +def test_streams_incremental(requests_mock, config_experimental): streams = SourceHubspot().streams(config_experimental) @@ -97,16 +97,10 @@ def test_streams(requests_mock, config_experimental): def test_custom_streams(config_experimental): - custom_object_stream_instances = [ - MagicMock() - ] + custom_object_stream_instances = [MagicMock()] streams = SourceHubspot().get_web_analytics_custom_objects_stream( custom_object_stream_instances=custom_object_stream_instances, - common_params={ - "api": MagicMock(), - "start_date": "2021-01-01T00:00:00Z", - "credentials": config_experimental["credentials"] - } + common_params={"api": MagicMock(), "start_date": "2021-01-01T00:00:00Z", "credentials": config_experimental["credentials"]}, ) assert len(list(streams)) == 1 @@ -481,7 +475,7 @@ def test_search_based_stream_should_not_attempt_to_get_more_than_10k_records(req requests_mock.register_uri( "POST", "/crm/v4/associations/company/contacts/batch/read", - [{"status_code": 200, "json": {"results": [{"from": {"id": "1"}, "to": [{"toObjectId": "2"}]}]}}] + [{"status_code": 200, "json": {"results": [{"from": {"id": "1"}, "to": [{"toObjectId": "2"}]}]}}], ) records, _ = read_incremental(test_stream, {}) @@ -709,13 +703,13 @@ def test_pagination_marketing_emails_stream(requests_mock, common_params): def test_get_granted_scopes(requests_mock, mocker): authenticator = mocker.Mock() authenticator.get_access_token.return_value = "the-token" - + expected_scopes = ["a", "b", "c"] response = [ {"json": {"scopes": expected_scopes}, "status_code": 200}, ] requests_mock.register_uri("GET", "https://api.hubapi.com/oauth/v1/access-tokens/the-token", response) - + actual_scopes = SourceHubspot().get_granted_scopes(authenticator) assert expected_scopes == actual_scopes diff --git a/airbyte-integrations/connectors/source-hubspot/unit_tests/test_streams.py b/airbyte-integrations/connectors/source-hubspot/unit_tests/test_streams.py index 24f7991badfb..27ddfa226e16 100644 --- a/airbyte-integrations/connectors/source-hubspot/unit_tests/test_streams.py +++ b/airbyte-integrations/connectors/source-hubspot/unit_tests/test_streams.py @@ -521,10 +521,7 @@ def test_web_analytics_stream_slices(common_params, mocker): assert len(slices) == 2 assert all(map(lambda slice: slice["objectId"] == 1, slices)) - assert [ - ("2021-01-10T00:00:00Z", "2021-02-09T00:00:00Z"), - ("2021-02-09T00:00:00Z", "2021-03-01T00:00:00Z") - ] == [ + assert [("2021-01-10T00:00:00Z", "2021-02-09T00:00:00Z"), ("2021-02-09T00:00:00Z", "2021-03-01T00:00:00Z")] == [ (s["occurredAfter"], s["occurredBefore"]) for s in slices ] @@ -542,7 +539,9 @@ def test_web_analytics_latest_state(common_params, mocker): stream = ContactsWebAnalytics(**common_params) stream.state = {"1": {"occurredAt": "2021-01-01T00:00:00Z"}} slices = list(stream.stream_slices(SyncMode.incremental, cursor_field="occurredAt")) - records = [list(stream.read_records(SyncMode.incremental, cursor_field="occurredAt", stream_slice=stream_slice)) for stream_slice in slices] + records = [ + list(stream.read_records(SyncMode.incremental, cursor_field="occurredAt", stream_slice=stream_slice)) for stream_slice in slices + ] assert len(slices) == 1 assert len(records) == 1 @@ -553,63 +552,33 @@ def test_web_analytics_latest_state(common_params, mocker): def test_property_history_transform(common_params): stream = ContactsPropertyHistory(**common_params) - versions = [ - { - "value": "Georgia", - "timestamp": 1645135236625 - } - ] + versions = [{"value": "Georgia", "timestamp": 1645135236625}] records = [ { "vid": 1, "canonical-vid": 1, "portal-id": 1, "is-contact": True, - "properties": { - "hs_country": {"versions": versions}, - "lastmodifieddate": {"value": 1645135236625} - } + "properties": {"hs_country": {"versions": versions}, "lastmodifieddate": {"value": 1645135236625}}, } ] assert [ - { - "vid": 1, - "canonical-vid": 1, - "portal-id": 1, - "is-contact": True, - "property": "hs_country", - **version - } for version in versions + {"vid": 1, "canonical-vid": 1, "portal-id": 1, "is-contact": True, "property": "hs_country", **version} for version in versions ] == list(stream._transform(records=records)) def test_contacts_membership_transform(common_params): stream = ContactsListMemberships(**common_params) - versions = [ - { - "value": "Georgia", - "timestamp": 1645135236625 - } - ] - memberships = [ - {"membership": 1} - ] + versions = [{"value": "Georgia", "timestamp": 1645135236625}] + memberships = [{"membership": 1}] records = [ { "vid": 1, "canonical-vid": 1, "portal-id": 1, "is-contact": True, - "properties": { - "hs_country": {"versions": versions}, - "lastmodifieddate": {"value": 1645135236625} - }, - "list-memberships": memberships + "properties": {"hs_country": {"versions": versions}, "lastmodifieddate": {"value": 1645135236625}}, + "list-memberships": memberships, } ] - assert [ - { - "membership": 1, - "canonical-vid": 1 - } for _ in versions - ] == list(stream._transform(records=records)) + assert [{"membership": 1, "canonical-vid": 1} for _ in versions] == list(stream._transform(records=records)) diff --git a/docs/integrations/sources/hubspot-migrations.md b/docs/integrations/sources/hubspot-migrations.md index 32c8f6ff9997..73219f9d9273 100644 --- a/docs/integrations/sources/hubspot-migrations.md +++ b/docs/integrations/sources/hubspot-migrations.md @@ -1,5 +1,45 @@ # HubSpot Migration Guide +## Upgrading to 4.0.0 + +:::note +This change is only breaking if you are syncing streams `Deals Property History` or `Companies Peoperty History`. +::: + +This update brings extended schema with data type changes for the Marketing Emails stream. + +Users should: + - Refresh the source schema for the Marketing Emails stream. + - Reset the stream after upgrading to ensure uninterrupted syncs. + +### Refresh affected schemas and reset data + +1. Select **Connections** in the main nav bar. + 1. Select the connection affected by the update. +2. Select the **Replication** tab. + 1. Select **Refresh source schema**. + 2. Select **OK**. + +:::note +Any detected schema changes will be listed for your review. +::: + +3. Select **Save changes** at the bottom of the page. + 1. Ensure the **Reset affected streams** option is checked. + +:::note +Depending on destination type you may not be prompted to reset your data. +::: + +4. Select **Save connection**. + +:::note +This will reset the data in your destination and initiate a fresh sync. +::: + +For more information on resetting your data in Airbyte, see [this page](https://docs.airbyte.com/operator-guides/reset) + + ## Upgrading to 3.0.0 :::note diff --git a/docs/integrations/sources/hubspot.md b/docs/integrations/sources/hubspot.md index 6c3cc01f2598..c23b52837753 100644 --- a/docs/integrations/sources/hubspot.md +++ b/docs/integrations/sources/hubspot.md @@ -322,6 +322,7 @@ The connector is restricted by normal HubSpot [rate limitations](https://legacyd | Version | Date | Pull Request | Subject | |:--------|:-----------|:---------------------------------------------------------|:---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------| +| 4.0.0 | 2024-03-10 | [35662](https://github.com/airbytehq/airbyte/pull/35662) | Update `Deals Property History` and `Companies Property History` schemas | | 3.3.0 | 2024-02-16 | [34597](https://github.com/airbytehq/airbyte/pull/34597) | Make start date not required, sync all data from default value if it's not provided | | 3.2.0 | 2024-02-15 | [35328](https://github.com/airbytehq/airbyte/pull/35328) | Add mailingIlsListsIncluded and mailingIlsListsExcluded fields to Marketing emails stream schema | | 3.1.1 | 2024-02-12 | [35165](https://github.com/airbytehq/airbyte/pull/35165) | Manage dependencies with Poetry. | From e77901da46088ffdb7399893ae3bae8a55abdd12 Mon Sep 17 00:00:00 2001 From: Maxime Carbonneau-Leclerc <3360483+maxi297@users.noreply.github.com> Date: Fri, 8 Mar 2024 09:03:46 -0500 Subject: [PATCH 136/172] Attempt to fix transient nightly build errors: Remove poetry cache (#35894) --- .../pipelines/pipelines/dagger/actions/python/common.py | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/airbyte-ci/connectors/pipelines/pipelines/dagger/actions/python/common.py b/airbyte-ci/connectors/pipelines/pipelines/dagger/actions/python/common.py index fff7611c5ec5..821719aa554f 100644 --- a/airbyte-ci/connectors/pipelines/pipelines/dagger/actions/python/common.py +++ b/airbyte-ci/connectors/pipelines/pipelines/dagger/actions/python/common.py @@ -9,7 +9,7 @@ from dagger import Container, Directory from pipelines import hacks from pipelines.airbyte_ci.connectors.context import ConnectorContext, PipelineContext -from pipelines.dagger.containers.python import with_pip_cache, with_poetry_cache, with_python_base, with_testing_dependencies +from pipelines.dagger.containers.python import with_pip_cache, with_python_base, with_testing_dependencies from pipelines.helpers.utils import check_path_in_workdir, get_file_contents @@ -211,7 +211,9 @@ async def with_installed_python_package( has_pyproject_toml = await check_path_in_workdir(container, "pyproject.toml") if has_pyproject_toml: - container = with_poetry_cache(container, context.dagger_client) + # This is a temporary change in order to scope an issue. There should be following action items once we have more information. + # maxi297 has an action item on his calendar for 2024-03-21 to review this + # container = with_poetry_cache(container, context.dagger_client) container = _install_python_dependencies_from_poetry(container, additional_dependency_groups, install_root_package) elif has_setup_py: container = with_pip_cache(container, context.dagger_client) From 4dc8c4372fe766e7194a224f1171efc8cf65a04e Mon Sep 17 00:00:00 2001 From: Marcos Marx Date: Fri, 8 Mar 2024 12:57:26 -0300 Subject: [PATCH 137/172] =?UTF-8?q?=20=F0=9F=90=9B=20Source=20Orb:=20updat?= =?UTF-8?q?e=20enrich=20ledger=20entry=20with=20event=20stream=20to=20pass?= =?UTF-8?q?=20timeframe=20bounds=20(#35897)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Co-authored-by: Kshitij Grover Co-authored-by: Arsh --- .../connectors/source-orb/Dockerfile | 38 - .../source-orb/acceptance-test-config.yml | 11 +- .../integration_tests/abnormal_state.json | 10 - .../abnormal_state_credits.json | 7 + .../integration_tests/configured_catalog.json | 40 - .../configured_catalog_credits.json | 16 + .../integration_tests/sample_state.json | 62 +- .../connectors/source-orb/metadata.yaml | 4 +- .../connectors/source-orb/poetry.lock | 1080 +++++++++++++++++ .../connectors/source-orb/pyproject.toml | 33 + .../connectors/source-orb/setup.py | 40 - .../source-orb/source_orb/source.py | 11 +- .../unit_tests/test_incremental_streams.py | 16 +- docs/integrations/sources/orb.md | 15 +- 14 files changed, 1219 insertions(+), 164 deletions(-) delete mode 100644 airbyte-integrations/connectors/source-orb/Dockerfile create mode 100644 airbyte-integrations/connectors/source-orb/integration_tests/abnormal_state_credits.json create mode 100644 airbyte-integrations/connectors/source-orb/integration_tests/configured_catalog_credits.json create mode 100644 airbyte-integrations/connectors/source-orb/poetry.lock create mode 100644 airbyte-integrations/connectors/source-orb/pyproject.toml delete mode 100644 airbyte-integrations/connectors/source-orb/setup.py diff --git a/airbyte-integrations/connectors/source-orb/Dockerfile b/airbyte-integrations/connectors/source-orb/Dockerfile deleted file mode 100644 index 35361e7105df..000000000000 --- a/airbyte-integrations/connectors/source-orb/Dockerfile +++ /dev/null @@ -1,38 +0,0 @@ -FROM python:3.9.11-alpine3.15 as base - -# build and load all requirements -FROM base as builder -WORKDIR /airbyte/integration_code - -# upgrade pip to the latest version -RUN apk --no-cache upgrade \ - && pip install --upgrade pip \ - && apk --no-cache add tzdata build-base - - -COPY setup.py ./ -# install necessary packages to a temporary folder -RUN pip install --prefix=/install . - -# build a clean environment -FROM base -WORKDIR /airbyte/integration_code - -# copy all loaded and built libraries to a pure basic image -COPY --from=builder /install /usr/local -# add default timezone settings -COPY --from=builder /usr/share/zoneinfo/Etc/UTC /etc/localtime -RUN echo "Etc/UTC" > /etc/timezone - -# bash is installed for more convenient debugging. -RUN apk --no-cache add bash - -# copy payload code only -COPY main.py ./ -COPY source_orb ./source_orb - -ENV AIRBYTE_ENTRYPOINT "python /airbyte/integration_code/main.py" -ENTRYPOINT ["python", "/airbyte/integration_code/main.py"] - -LABEL io.airbyte.version=1.1.0 -LABEL io.airbyte.name=airbyte/source-orb diff --git a/airbyte-integrations/connectors/source-orb/acceptance-test-config.yml b/airbyte-integrations/connectors/source-orb/acceptance-test-config.yml index b636b4e14d0a..4080daab6166 100644 --- a/airbyte-integrations/connectors/source-orb/acceptance-test-config.yml +++ b/airbyte-integrations/connectors/source-orb/acceptance-test-config.yml @@ -18,10 +18,15 @@ tests: configured_catalog_path: "integration_tests/configured_catalog.json" fail_on_extra_columns: false empty_streams: ["credits_ledger_entries"] + - config_path: "secrets/config_credits_ledger_entries.json" + configured_catalog_path: "integration_tests/configured_catalog_credits.json" incremental: - - config_path: "secrets/config.json" - configured_catalog_path: "integration_tests/configured_catalog.json" - future_state_path: "integration_tests/abnormal_state.json" + # - config_path: "secrets/config.json" + # configured_catalog_path: "integration_tests/configured_catalog.json" + # future_state_path: "integration_tests/abnormal_state.json" + - config_path: "secrets/config_credits_ledger_entries.json" + configured_catalog_path: "integration_tests/configured_catalog_credits.json" + future_state_path: "integration_tests/abnormal_state_credits.json" full_refresh: - config_path: "secrets/config.json" configured_catalog_path: "integration_tests/configured_catalog.json" diff --git a/airbyte-integrations/connectors/source-orb/integration_tests/abnormal_state.json b/airbyte-integrations/connectors/source-orb/integration_tests/abnormal_state.json index d655333ee877..2b2aeeeda4f3 100644 --- a/airbyte-integrations/connectors/source-orb/integration_tests/abnormal_state.json +++ b/airbyte-integrations/connectors/source-orb/integration_tests/abnormal_state.json @@ -8,16 +8,6 @@ "subscriptions": { "created_at": "2122-01-01T00:00:00Z" }, - "credits_ledger_entries": { - "hHQF5BT5jtyj9r7V": { - "created_at": "2122-01-01T00:00:00Z" - } - }, - "subscription_usage": { - "FDWRvxuBUiFfZech": { - "timeframe_start": "2122-01-01T00:00:00Z" - } - }, "invoices": { "invoice_date": "2122-01-01T00:00:00Z" } diff --git a/airbyte-integrations/connectors/source-orb/integration_tests/abnormal_state_credits.json b/airbyte-integrations/connectors/source-orb/integration_tests/abnormal_state_credits.json new file mode 100644 index 000000000000..5a9506bb8f20 --- /dev/null +++ b/airbyte-integrations/connectors/source-orb/integration_tests/abnormal_state_credits.json @@ -0,0 +1,7 @@ +{ + "credits_ledger_entries": { + "hHQF5BT5jtyj9r7V": { + "created_at": "2122-01-01T00:00:00Z" + } + } +} diff --git a/airbyte-integrations/connectors/source-orb/integration_tests/configured_catalog.json b/airbyte-integrations/connectors/source-orb/integration_tests/configured_catalog.json index ab54424f7682..1ce1f7de7141 100644 --- a/airbyte-integrations/connectors/source-orb/integration_tests/configured_catalog.json +++ b/airbyte-integrations/connectors/source-orb/integration_tests/configured_catalog.json @@ -1,17 +1,5 @@ { "streams": [ - { - "stream": { - "name": "customers", - "json_schema": {}, - "supported_sync_modes": ["full_refresh", "incremental"], - "source_defined_cursor": true, - "default_cursor_field": ["created_at"], - "source_defined_primary_key": [["id"]] - }, - "sync_mode": "incremental", - "destination_sync_mode": "overwrite" - }, { "stream": { "name": "subscriptions", @@ -47,34 +35,6 @@ }, "sync_mode": "incremental", "destination_sync_mode": "overwrite" - }, - { - "stream": { - "name": "credits_ledger_entries", - "json_schema": {}, - "supported_sync_modes": ["full_refresh", "incremental"], - "source_defined_cursor": true, - "default_cursor_field": ["created_at"], - "source_defined_primary_key": [["id"]] - }, - "sync_mode": "incremental", - "destination_sync_mode": "overwrite" - }, - { - "stream": { - "name": "subscription_usage", - "json_schema": {}, - "supported_sync_modes": ["full_refresh", "incremental"], - "source_defined_cursor": true, - "default_cursor_field": ["timeframe_start"], - "source_defined_primary_key": [ - ["subscription_id"], - ["billable_metric_id"], - ["timeframe_start"] - ] - }, - "sync_mode": "incremental", - "destination_sync_mode": "overwrite" } ] } diff --git a/airbyte-integrations/connectors/source-orb/integration_tests/configured_catalog_credits.json b/airbyte-integrations/connectors/source-orb/integration_tests/configured_catalog_credits.json new file mode 100644 index 000000000000..79b8bc3fcdc5 --- /dev/null +++ b/airbyte-integrations/connectors/source-orb/integration_tests/configured_catalog_credits.json @@ -0,0 +1,16 @@ +{ + "streams": [ + { + "stream": { + "name": "credits_ledger_entries", + "json_schema": {}, + "supported_sync_modes": ["full_refresh", "incremental"], + "source_defined_cursor": true, + "default_cursor_field": ["created_at"], + "source_defined_primary_key": [["id"]] + }, + "sync_mode": "incremental", + "destination_sync_mode": "overwrite" + } + ] +} diff --git a/airbyte-integrations/connectors/source-orb/integration_tests/sample_state.json b/airbyte-integrations/connectors/source-orb/integration_tests/sample_state.json index f87d36a5bc89..28a557d91780 100644 --- a/airbyte-integrations/connectors/source-orb/integration_tests/sample_state.json +++ b/airbyte-integrations/connectors/source-orb/integration_tests/sample_state.json @@ -1,24 +1,54 @@ -{ - "customers": { - "created_at": "2022-01-01T00:00:00Z" +[ + { + "streamDescriptor": { + "name": "credits_ledger_entries" + }, + "streamState": { + "hHQF5BT5jtyj9r7V": { + "created_at": "2022-02-03T18:59:01+00:00" + } + } }, - "plans": { - "created_at": "2022-01-01T00:00:00Z" + { + "streamDescriptor": { + "name": "subscription_usage" + }, + "streamState": { + "FDWRvxuBUiFfZech": { + "timeframe_start": "2022-02-02T00:00:00+00:00" + } + } }, - "subscriptions": { - "created_at": "2022-01-01T00:00:00Z" + { + "streamDescriptor": { + "name": "plans" + }, + "streamState": { + "created_at": "2022-02-02T10:03:57+00:00" + } }, - "credits_ledger_entries": { - "7c507794-7413-4467-8f1d-d3785a6c65ca": { - "created_at": "2022-01-01T00:00:00Z" + { + "streamDescriptor": { + "name": "invoices" + }, + "streamState": { + "invoice_date": "2022-03-01T08:00:00+00:00" } }, - "subscription_usage": { - "someId": { - "timeframe_start": "2022-01-01T00:00:00Z" + { + "streamDescriptor": { + "name": "customers" + }, + "streamState": { + "created_at": "2022-02-02T10:01:15+00:00" } }, - "invoices": { - "invoice_date": "2022-01-01T00:00:00Z" + { + "streamDescriptor": { + "name": "subscriptions" + }, + "streamState": { + "created_at": "2022-02-02T10:04:09+00:00" + } } -} +] diff --git a/airbyte-integrations/connectors/source-orb/metadata.yaml b/airbyte-integrations/connectors/source-orb/metadata.yaml index 80071c06f772..1e701b726ebf 100644 --- a/airbyte-integrations/connectors/source-orb/metadata.yaml +++ b/airbyte-integrations/connectors/source-orb/metadata.yaml @@ -1,8 +1,10 @@ data: + connectorBuildOptions: + baseImage: docker.io/airbyte/python-connector-base:1.2.0@sha256:c22a9d97464b69d6ef01898edf3f8612dc11614f05a84984451dde195f337db9 connectorSubtype: api connectorType: source definitionId: 7f0455fb-4518-4ec0-b7a3-d808bf8081cc - dockerImageTag: 1.1.0 + dockerImageTag: 1.1.1 dockerRepository: airbyte/source-orb githubIssueLabel: source-orb icon: orb.svg diff --git a/airbyte-integrations/connectors/source-orb/poetry.lock b/airbyte-integrations/connectors/source-orb/poetry.lock new file mode 100644 index 000000000000..26a785d5b49c --- /dev/null +++ b/airbyte-integrations/connectors/source-orb/poetry.lock @@ -0,0 +1,1080 @@ +# This file is automatically @generated by Poetry 1.5.1 and should not be changed by hand. + +[[package]] +name = "airbyte-cdk" +version = "0.68.4" +description = "A framework for writing Airbyte Connectors." +optional = false +python-versions = ">=3.8" +files = [ + {file = "airbyte-cdk-0.68.4.tar.gz", hash = "sha256:da4d923d9dac9f13fbd2e89a0094c58d440dac85552e8084d19cbb0a73efd9d7"}, + {file = "airbyte_cdk-0.68.4-py3-none-any.whl", hash = "sha256:3b6a9b6adf81a1d9c2d40acecfe9016e73197dd95f1e6027423aeee85d3a7ee1"}, +] + +[package.dependencies] +airbyte-protocol-models = "0.5.1" +backoff = "*" +cachetools = "*" +Deprecated = ">=1.2,<2.0" +dpath = ">=2.0.1,<2.1.0" +genson = "1.2.2" +isodate = ">=0.6.1,<0.7.0" +Jinja2 = ">=3.1.2,<3.2.0" +jsonref = ">=0.2,<1.0" +jsonschema = ">=3.2.0,<3.3.0" +pendulum = "<3.0.0" +pydantic = ">=1.10.8,<2.0.0" +pyrate-limiter = ">=3.1.0,<3.2.0" +python-dateutil = "*" +PyYAML = ">=6.0.1" +requests = "*" +requests-cache = "*" +wcmatch = "8.4" + +[package.extras] +dev = ["avro (>=1.11.2,<1.12.0)", "cohere (==4.21)", "fastavro (>=1.8.0,<1.9.0)", "freezegun", "langchain (==0.0.271)", "markdown", "mypy", "openai[embeddings] (==0.27.9)", "pandas (==2.0.3)", "pdf2image (==1.16.3)", "pdfminer.six (==20221105)", "pyarrow (>=15.0.0,<15.1.0)", "pytesseract (==0.3.10)", "pytest", "pytest-cov", "pytest-httpserver", "pytest-mock", "requests-mock", "tiktoken (==0.4.0)", "unstructured (==0.10.27)", "unstructured.pytesseract (>=0.3.12)", "unstructured[docx,pptx] (==0.10.27)"] +file-based = ["avro (>=1.11.2,<1.12.0)", "fastavro (>=1.8.0,<1.9.0)", "markdown", "pdf2image (==1.16.3)", "pdfminer.six (==20221105)", "pyarrow (>=15.0.0,<15.1.0)", "pytesseract (==0.3.10)", "unstructured (==0.10.27)", "unstructured.pytesseract (>=0.3.12)", "unstructured[docx,pptx] (==0.10.27)"] +sphinx-docs = ["Sphinx (>=4.2,<5.0)", "sphinx-rtd-theme (>=1.0,<2.0)"] +vector-db-based = ["cohere (==4.21)", "langchain (==0.0.271)", "openai[embeddings] (==0.27.9)", "tiktoken (==0.4.0)"] + +[[package]] +name = "airbyte-protocol-models" +version = "0.5.1" +description = "Declares the Airbyte Protocol." +optional = false +python-versions = ">=3.8" +files = [ + {file = "airbyte_protocol_models-0.5.1-py3-none-any.whl", hash = "sha256:dfe84e130e51ce2ae81a06d5aa36f6c5ce3152b9e36e6f0195fad6c3dab0927e"}, + {file = "airbyte_protocol_models-0.5.1.tar.gz", hash = "sha256:7c8b16c7c1c7956b1996052e40585a3a93b1e44cb509c4e97c1ee4fe507ea086"}, +] + +[package.dependencies] +pydantic = ">=1.9.2,<2.0.0" + +[[package]] +name = "atomicwrites" +version = "1.4.1" +description = "Atomic file writes." +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +files = [ + {file = "atomicwrites-1.4.1.tar.gz", hash = "sha256:81b2c9071a49367a7f770170e5eec8cb66567cfbbc8c73d20ce5ca4a8d71cf11"}, +] + +[[package]] +name = "attrs" +version = "23.2.0" +description = "Classes Without Boilerplate" +optional = false +python-versions = ">=3.7" +files = [ + {file = "attrs-23.2.0-py3-none-any.whl", hash = "sha256:99b87a485a5820b23b879f04c2305b44b951b502fd64be915879d77a7e8fc6f1"}, + {file = "attrs-23.2.0.tar.gz", hash = "sha256:935dc3b529c262f6cf76e50877d35a4bd3c1de194fd41f47a2b7ae8f19971f30"}, +] + +[package.extras] +cov = ["attrs[tests]", "coverage[toml] (>=5.3)"] +dev = ["attrs[tests]", "pre-commit"] +docs = ["furo", "myst-parser", "sphinx", "sphinx-notfound-page", "sphinxcontrib-towncrier", "towncrier", "zope-interface"] +tests = ["attrs[tests-no-zope]", "zope-interface"] +tests-mypy = ["mypy (>=1.6)", "pytest-mypy-plugins"] +tests-no-zope = ["attrs[tests-mypy]", "cloudpickle", "hypothesis", "pympler", "pytest (>=4.3.0)", "pytest-xdist[psutil]"] + +[[package]] +name = "backoff" +version = "2.2.1" +description = "Function decoration for backoff and retry" +optional = false +python-versions = ">=3.7,<4.0" +files = [ + {file = "backoff-2.2.1-py3-none-any.whl", hash = "sha256:63579f9a0628e06278f7e47b7d7d5b6ce20dc65c5e96a6f3ca99a6adca0396e8"}, + {file = "backoff-2.2.1.tar.gz", hash = "sha256:03f829f5bb1923180821643f8753b0502c3b682293992485b0eef2807afa5cba"}, +] + +[[package]] +name = "bracex" +version = "2.4" +description = "Bash style brace expander." +optional = false +python-versions = ">=3.8" +files = [ + {file = "bracex-2.4-py3-none-any.whl", hash = "sha256:efdc71eff95eaff5e0f8cfebe7d01adf2c8637c8c92edaf63ef348c241a82418"}, + {file = "bracex-2.4.tar.gz", hash = "sha256:a27eaf1df42cf561fed58b7a8f3fdf129d1ea16a81e1fadd1d17989bc6384beb"}, +] + +[[package]] +name = "cachetools" +version = "5.3.3" +description = "Extensible memoizing collections and decorators" +optional = false +python-versions = ">=3.7" +files = [ + {file = "cachetools-5.3.3-py3-none-any.whl", hash = "sha256:0abad1021d3f8325b2fc1d2e9c8b9c9d57b04c3932657a72465447332c24d945"}, + {file = "cachetools-5.3.3.tar.gz", hash = "sha256:ba29e2dfa0b8b556606f097407ed1aa62080ee108ab0dc5ec9d6a723a007d105"}, +] + +[[package]] +name = "cattrs" +version = "23.2.3" +description = "Composable complex class support for attrs and dataclasses." +optional = false +python-versions = ">=3.8" +files = [ + {file = "cattrs-23.2.3-py3-none-any.whl", hash = "sha256:0341994d94971052e9ee70662542699a3162ea1e0c62f7ce1b4a57f563685108"}, + {file = "cattrs-23.2.3.tar.gz", hash = "sha256:a934090d95abaa9e911dac357e3a8699e0b4b14f8529bcc7d2b1ad9d51672b9f"}, +] + +[package.dependencies] +attrs = ">=23.1.0" +exceptiongroup = {version = ">=1.1.1", markers = "python_version < \"3.11\""} +typing-extensions = {version = ">=4.1.0,<4.6.3 || >4.6.3", markers = "python_version < \"3.11\""} + +[package.extras] +bson = ["pymongo (>=4.4.0)"] +cbor2 = ["cbor2 (>=5.4.6)"] +msgpack = ["msgpack (>=1.0.5)"] +orjson = ["orjson (>=3.9.2)"] +pyyaml = ["pyyaml (>=6.0)"] +tomlkit = ["tomlkit (>=0.11.8)"] +ujson = ["ujson (>=5.7.0)"] + +[[package]] +name = "certifi" +version = "2024.2.2" +description = "Python package for providing Mozilla's CA Bundle." +optional = false +python-versions = ">=3.6" +files = [ + {file = "certifi-2024.2.2-py3-none-any.whl", hash = "sha256:dc383c07b76109f368f6106eee2b593b04a011ea4d55f652c6ca24a754d1cdd1"}, + {file = "certifi-2024.2.2.tar.gz", hash = "sha256:0569859f95fc761b18b45ef421b1290a0f65f147e92a1e5eb3e635f9a5e4e66f"}, +] + +[[package]] +name = "charset-normalizer" +version = "3.3.2" +description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet." +optional = false +python-versions = ">=3.7.0" +files = [ + {file = "charset-normalizer-3.3.2.tar.gz", hash = "sha256:f30c3cb33b24454a82faecaf01b19c18562b1e89558fb6c56de4d9118a032fd5"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:25baf083bf6f6b341f4121c2f3c548875ee6f5339300e08be3f2b2ba1721cdd3"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:06435b539f889b1f6f4ac1758871aae42dc3a8c0e24ac9e60c2384973ad73027"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9063e24fdb1e498ab71cb7419e24622516c4a04476b17a2dab57e8baa30d6e03"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6897af51655e3691ff853668779c7bad41579facacf5fd7253b0133308cf000d"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1d3193f4a680c64b4b6a9115943538edb896edc190f0b222e73761716519268e"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:cd70574b12bb8a4d2aaa0094515df2463cb429d8536cfb6c7ce983246983e5a6"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8465322196c8b4d7ab6d1e049e4c5cb460d0394da4a27d23cc242fbf0034b6b5"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a9a8e9031d613fd2009c182b69c7b2c1ef8239a0efb1df3f7c8da66d5dd3d537"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:beb58fe5cdb101e3a055192ac291b7a21e3b7ef4f67fa1d74e331a7f2124341c"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:e06ed3eb3218bc64786f7db41917d4e686cc4856944f53d5bdf83a6884432e12"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:2e81c7b9c8979ce92ed306c249d46894776a909505d8f5a4ba55b14206e3222f"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:572c3763a264ba47b3cf708a44ce965d98555f618ca42c926a9c1616d8f34269"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:fd1abc0d89e30cc4e02e4064dc67fcc51bd941eb395c502aac3ec19fab46b519"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-win32.whl", hash = "sha256:3d47fa203a7bd9c5b6cee4736ee84ca03b8ef23193c0d1ca99b5089f72645c73"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-win_amd64.whl", hash = "sha256:10955842570876604d404661fbccbc9c7e684caf432c09c715ec38fbae45ae09"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:802fe99cca7457642125a8a88a084cef28ff0cf9407060f7b93dca5aa25480db"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:573f6eac48f4769d667c4442081b1794f52919e7edada77495aaed9236d13a96"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:549a3a73da901d5bc3ce8d24e0600d1fa85524c10287f6004fbab87672bf3e1e"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f27273b60488abe721a075bcca6d7f3964f9f6f067c8c4c605743023d7d3944f"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1ceae2f17a9c33cb48e3263960dc5fc8005351ee19db217e9b1bb15d28c02574"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:65f6f63034100ead094b8744b3b97965785388f308a64cf8d7c34f2f2e5be0c4"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:753f10e867343b4511128c6ed8c82f7bec3bd026875576dfd88483c5c73b2fd8"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4a78b2b446bd7c934f5dcedc588903fb2f5eec172f3d29e52a9096a43722adfc"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:e537484df0d8f426ce2afb2d0f8e1c3d0b114b83f8850e5f2fbea0e797bd82ae"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:eb6904c354526e758fda7167b33005998fb68c46fbc10e013ca97f21ca5c8887"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:deb6be0ac38ece9ba87dea880e438f25ca3eddfac8b002a2ec3d9183a454e8ae"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:4ab2fe47fae9e0f9dee8c04187ce5d09f48eabe611be8259444906793ab7cbce"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:80402cd6ee291dcb72644d6eac93785fe2c8b9cb30893c1af5b8fdd753b9d40f"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-win32.whl", hash = "sha256:7cd13a2e3ddeed6913a65e66e94b51d80a041145a026c27e6bb76c31a853c6ab"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-win_amd64.whl", hash = "sha256:663946639d296df6a2bb2aa51b60a2454ca1cb29835324c640dafb5ff2131a77"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:0b2b64d2bb6d3fb9112bafa732def486049e63de9618b5843bcdd081d8144cd8"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:ddbb2551d7e0102e7252db79ba445cdab71b26640817ab1e3e3648dad515003b"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:55086ee1064215781fff39a1af09518bc9255b50d6333f2e4c74ca09fac6a8f6"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8f4a014bc36d3c57402e2977dada34f9c12300af536839dc38c0beab8878f38a"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a10af20b82360ab00827f916a6058451b723b4e65030c5a18577c8b2de5b3389"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8d756e44e94489e49571086ef83b2bb8ce311e730092d2c34ca8f7d925cb20aa"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:90d558489962fd4918143277a773316e56c72da56ec7aa3dc3dbbe20fdfed15b"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6ac7ffc7ad6d040517be39eb591cac5ff87416c2537df6ba3cba3bae290c0fed"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:7ed9e526742851e8d5cc9e6cf41427dfc6068d4f5a3bb03659444b4cabf6bc26"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:8bdb58ff7ba23002a4c5808d608e4e6c687175724f54a5dade5fa8c67b604e4d"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:6b3251890fff30ee142c44144871185dbe13b11bab478a88887a639655be1068"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:b4a23f61ce87adf89be746c8a8974fe1c823c891d8f86eb218bb957c924bb143"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:efcb3f6676480691518c177e3b465bcddf57cea040302f9f4e6e191af91174d4"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-win32.whl", hash = "sha256:d965bba47ddeec8cd560687584e88cf699fd28f192ceb452d1d7ee807c5597b7"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-win_amd64.whl", hash = "sha256:96b02a3dc4381e5494fad39be677abcb5e6634bf7b4fa83a6dd3112607547001"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:95f2a5796329323b8f0512e09dbb7a1860c46a39da62ecb2324f116fa8fdc85c"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c002b4ffc0be611f0d9da932eb0f704fe2602a9a949d1f738e4c34c75b0863d5"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a981a536974bbc7a512cf44ed14938cf01030a99e9b3a06dd59578882f06f985"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3287761bc4ee9e33561a7e058c72ac0938c4f57fe49a09eae428fd88aafe7bb6"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:42cb296636fcc8b0644486d15c12376cb9fa75443e00fb25de0b8602e64c1714"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0a55554a2fa0d408816b3b5cedf0045f4b8e1a6065aec45849de2d6f3f8e9786"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:c083af607d2515612056a31f0a8d9e0fcb5876b7bfc0abad3ecd275bc4ebc2d5"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:87d1351268731db79e0f8e745d92493ee2841c974128ef629dc518b937d9194c"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:bd8f7df7d12c2db9fab40bdd87a7c09b1530128315d047a086fa3ae3435cb3a8"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:c180f51afb394e165eafe4ac2936a14bee3eb10debc9d9e4db8958fe36afe711"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:8c622a5fe39a48f78944a87d4fb8a53ee07344641b0562c540d840748571b811"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-win32.whl", hash = "sha256:db364eca23f876da6f9e16c9da0df51aa4f104a972735574842618b8c6d999d4"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-win_amd64.whl", hash = "sha256:86216b5cee4b06df986d214f664305142d9c76df9b6512be2738aa72a2048f99"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:6463effa3186ea09411d50efc7d85360b38d5f09b870c48e4600f63af490e56a"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:6c4caeef8fa63d06bd437cd4bdcf3ffefe6738fb1b25951440d80dc7df8c03ac"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:37e55c8e51c236f95b033f6fb391d7d7970ba5fe7ff453dad675e88cf303377a"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fb69256e180cb6c8a894fee62b3afebae785babc1ee98b81cdf68bbca1987f33"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ae5f4161f18c61806f411a13b0310bea87f987c7d2ecdbdaad0e94eb2e404238"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b2b0a0c0517616b6869869f8c581d4eb2dd83a4d79e0ebcb7d373ef9956aeb0a"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:45485e01ff4d3630ec0d9617310448a8702f70e9c01906b0d0118bdf9d124cf2"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:eb00ed941194665c332bf8e078baf037d6c35d7c4f3102ea2d4f16ca94a26dc8"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:2127566c664442652f024c837091890cb1942c30937add288223dc895793f898"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:a50aebfa173e157099939b17f18600f72f84eed3049e743b68ad15bd69b6bf99"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:4d0d1650369165a14e14e1e47b372cfcb31d6ab44e6e33cb2d4e57265290044d"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:923c0c831b7cfcb071580d3f46c4baf50f174be571576556269530f4bbd79d04"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:06a81e93cd441c56a9b65d8e1d043daeb97a3d0856d177d5c90ba85acb3db087"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-win32.whl", hash = "sha256:6ef1d82a3af9d3eecdba2321dc1b3c238245d890843e040e41e470ffa64c3e25"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-win_amd64.whl", hash = "sha256:eb8821e09e916165e160797a6c17edda0679379a4be5c716c260e836e122f54b"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:c235ebd9baae02f1b77bcea61bce332cb4331dc3617d254df3323aa01ab47bd4"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:5b4c145409bef602a690e7cfad0a15a55c13320ff7a3ad7ca59c13bb8ba4d45d"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:68d1f8a9e9e37c1223b656399be5d6b448dea850bed7d0f87a8311f1ff3dabb0"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:22afcb9f253dac0696b5a4be4a1c0f8762f8239e21b99680099abd9b2b1b2269"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e27ad930a842b4c5eb8ac0016b0a54f5aebbe679340c26101df33424142c143c"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1f79682fbe303db92bc2b1136016a38a42e835d932bab5b3b1bfcfbf0640e519"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b261ccdec7821281dade748d088bb6e9b69e6d15b30652b74cbbac25e280b796"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:122c7fa62b130ed55f8f285bfd56d5f4b4a5b503609d181f9ad85e55c89f4185"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:d0eccceffcb53201b5bfebb52600a5fb483a20b61da9dbc885f8b103cbe7598c"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:9f96df6923e21816da7e0ad3fd47dd8f94b2a5ce594e00677c0013018b813458"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:7f04c839ed0b6b98b1a7501a002144b76c18fb1c1850c8b98d458ac269e26ed2"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:34d1c8da1e78d2e001f363791c98a272bb734000fcef47a491c1e3b0505657a8"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:ff8fa367d09b717b2a17a052544193ad76cd49979c805768879cb63d9ca50561"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-win32.whl", hash = "sha256:aed38f6e4fb3f5d6bf81bfa990a07806be9d83cf7bacef998ab1a9bd660a581f"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-win_amd64.whl", hash = "sha256:b01b88d45a6fcb69667cd6d2f7a9aeb4bf53760d7fc536bf679ec94fe9f3ff3d"}, + {file = "charset_normalizer-3.3.2-py3-none-any.whl", hash = "sha256:3e4d1f6587322d2788836a99c69062fbb091331ec940e02d12d179c1d53e25fc"}, +] + +[[package]] +name = "colorama" +version = "0.4.6" +description = "Cross-platform colored terminal text." +optional = false +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7" +files = [ + {file = "colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6"}, + {file = "colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44"}, +] + +[[package]] +name = "deprecated" +version = "1.2.14" +description = "Python @deprecated decorator to deprecate old python classes, functions or methods." +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +files = [ + {file = "Deprecated-1.2.14-py2.py3-none-any.whl", hash = "sha256:6fac8b097794a90302bdbb17b9b815e732d3c4720583ff1b198499d78470466c"}, + {file = "Deprecated-1.2.14.tar.gz", hash = "sha256:e5323eb936458dccc2582dc6f9c322c852a775a27065ff2b0c4970b9d53d01b3"}, +] + +[package.dependencies] +wrapt = ">=1.10,<2" + +[package.extras] +dev = ["PyTest", "PyTest-Cov", "bump2version (<1)", "sphinx (<2)", "tox"] + +[[package]] +name = "dpath" +version = "2.0.8" +description = "Filesystem-like pathing and searching for dictionaries" +optional = false +python-versions = ">=3.7" +files = [ + {file = "dpath-2.0.8-py3-none-any.whl", hash = "sha256:f92f595214dd93a00558d75d4b858beee519f4cffca87f02616ad6cd013f3436"}, + {file = "dpath-2.0.8.tar.gz", hash = "sha256:a3440157ebe80d0a3ad794f1b61c571bef125214800ffdb9afc9424e8250fe9b"}, +] + +[[package]] +name = "exceptiongroup" +version = "1.2.0" +description = "Backport of PEP 654 (exception groups)" +optional = false +python-versions = ">=3.7" +files = [ + {file = "exceptiongroup-1.2.0-py3-none-any.whl", hash = "sha256:4bfd3996ac73b41e9b9628b04e079f193850720ea5945fc96a08633c66912f14"}, + {file = "exceptiongroup-1.2.0.tar.gz", hash = "sha256:91f5c769735f051a4290d52edd0858999b57e5876e9f85937691bd4c9fa3ed68"}, +] + +[package.extras] +test = ["pytest (>=6)"] + +[[package]] +name = "faker" +version = "24.0.0" +description = "Faker is a Python package that generates fake data for you." +optional = false +python-versions = ">=3.8" +files = [ + {file = "Faker-24.0.0-py3-none-any.whl", hash = "sha256:2456d674f40bd51eb3acbf85221277027822e529a90cc826453d9a25dff932b1"}, + {file = "Faker-24.0.0.tar.gz", hash = "sha256:ea6f784c40730de0f77067e49e78cdd590efb00bec3d33f577492262206c17fc"}, +] + +[package.dependencies] +python-dateutil = ">=2.4" + +[[package]] +name = "genson" +version = "1.2.2" +description = "GenSON is a powerful, user-friendly JSON Schema generator." +optional = false +python-versions = "*" +files = [ + {file = "genson-1.2.2.tar.gz", hash = "sha256:8caf69aa10af7aee0e1a1351d1d06801f4696e005f06cedef438635384346a16"}, +] + +[[package]] +name = "idna" +version = "3.6" +description = "Internationalized Domain Names in Applications (IDNA)" +optional = false +python-versions = ">=3.5" +files = [ + {file = "idna-3.6-py3-none-any.whl", hash = "sha256:c05567e9c24a6b9faaa835c4821bad0590fbb9d5779e7caa6e1cc4978e7eb24f"}, + {file = "idna-3.6.tar.gz", hash = "sha256:9ecdbbd083b06798ae1e86adcbfe8ab1479cf864e4ee30fe4e46a003d12491ca"}, +] + +[[package]] +name = "iniconfig" +version = "2.0.0" +description = "brain-dead simple config-ini parsing" +optional = false +python-versions = ">=3.7" +files = [ + {file = "iniconfig-2.0.0-py3-none-any.whl", hash = "sha256:b6a85871a79d2e3b22d2d1b94ac2824226a63c6b741c88f7ae975f18b6778374"}, + {file = "iniconfig-2.0.0.tar.gz", hash = "sha256:2d91e135bf72d31a410b17c16da610a82cb55f6b0477d1a902134b24a455b8b3"}, +] + +[[package]] +name = "isodate" +version = "0.6.1" +description = "An ISO 8601 date/time/duration parser and formatter" +optional = false +python-versions = "*" +files = [ + {file = "isodate-0.6.1-py2.py3-none-any.whl", hash = "sha256:0751eece944162659049d35f4f549ed815792b38793f07cf73381c1c87cbed96"}, + {file = "isodate-0.6.1.tar.gz", hash = "sha256:48c5881de7e8b0a0d648cb024c8062dc84e7b840ed81e864c7614fd3c127bde9"}, +] + +[package.dependencies] +six = "*" + +[[package]] +name = "jinja2" +version = "3.1.3" +description = "A very fast and expressive template engine." +optional = false +python-versions = ">=3.7" +files = [ + {file = "Jinja2-3.1.3-py3-none-any.whl", hash = "sha256:7d6d50dd97d52cbc355597bd845fabfbac3f551e1f99619e39a35ce8c370b5fa"}, + {file = "Jinja2-3.1.3.tar.gz", hash = "sha256:ac8bd6544d4bb2c9792bf3a159e80bba8fda7f07e81bc3aed565432d5925ba90"}, +] + +[package.dependencies] +MarkupSafe = ">=2.0" + +[package.extras] +i18n = ["Babel (>=2.7)"] + +[[package]] +name = "jsonref" +version = "0.3.0" +description = "jsonref is a library for automatic dereferencing of JSON Reference objects for Python." +optional = false +python-versions = ">=3.3,<4.0" +files = [ + {file = "jsonref-0.3.0-py3-none-any.whl", hash = "sha256:9480ad1b500f7e795daeb0ef29f9c55ae3a9ab38fb8d6659b6f4868acb5a5bc8"}, + {file = "jsonref-0.3.0.tar.gz", hash = "sha256:68b330c6815dc0d490dbb3d65ccda265ddde9f7856fd2f3322f971d456ea7549"}, +] + +[[package]] +name = "jsonschema" +version = "3.2.0" +description = "An implementation of JSON Schema validation for Python" +optional = false +python-versions = "*" +files = [ + {file = "jsonschema-3.2.0-py2.py3-none-any.whl", hash = "sha256:4e5b3cf8216f577bee9ce139cbe72eca3ea4f292ec60928ff24758ce626cd163"}, + {file = "jsonschema-3.2.0.tar.gz", hash = "sha256:c8a85b28d377cc7737e46e2d9f2b4f44ee3c0e1deac6bf46ddefc7187d30797a"}, +] + +[package.dependencies] +attrs = ">=17.4.0" +pyrsistent = ">=0.14.0" +setuptools = "*" +six = ">=1.11.0" + +[package.extras] +format = ["idna", "jsonpointer (>1.13)", "rfc3987", "strict-rfc3339", "webcolors"] +format-nongpl = ["idna", "jsonpointer (>1.13)", "rfc3339-validator", "rfc3986-validator (>0.1.0)", "webcolors"] + +[[package]] +name = "markupsafe" +version = "2.1.5" +description = "Safely add untrusted strings to HTML/XML markup." +optional = false +python-versions = ">=3.7" +files = [ + {file = "MarkupSafe-2.1.5-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:a17a92de5231666cfbe003f0e4b9b3a7ae3afb1ec2845aadc2bacc93ff85febc"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:72b6be590cc35924b02c78ef34b467da4ba07e4e0f0454a2c5907f473fc50ce5"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e61659ba32cf2cf1481e575d0462554625196a1f2fc06a1c777d3f48e8865d46"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2174c595a0d73a3080ca3257b40096db99799265e1c27cc5a610743acd86d62f"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ae2ad8ae6ebee9d2d94b17fb62763125f3f374c25618198f40cbb8b525411900"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:075202fa5b72c86ad32dc7d0b56024ebdbcf2048c0ba09f1cde31bfdd57bcfff"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:598e3276b64aff0e7b3451b72e94fa3c238d452e7ddcd893c3ab324717456bad"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:fce659a462a1be54d2ffcacea5e3ba2d74daa74f30f5f143fe0c58636e355fdd"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-win32.whl", hash = "sha256:d9fad5155d72433c921b782e58892377c44bd6252b5af2f67f16b194987338a4"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-win_amd64.whl", hash = "sha256:bf50cd79a75d181c9181df03572cdce0fbb75cc353bc350712073108cba98de5"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:629ddd2ca402ae6dbedfceeba9c46d5f7b2a61d9749597d4307f943ef198fc1f"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:5b7b716f97b52c5a14bffdf688f971b2d5ef4029127f1ad7a513973cfd818df2"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6ec585f69cec0aa07d945b20805be741395e28ac1627333b1c5b0105962ffced"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b91c037585eba9095565a3556f611e3cbfaa42ca1e865f7b8015fe5c7336d5a5"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7502934a33b54030eaf1194c21c692a534196063db72176b0c4028e140f8f32c"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:0e397ac966fdf721b2c528cf028494e86172b4feba51d65f81ffd65c63798f3f"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:c061bb86a71b42465156a3ee7bd58c8c2ceacdbeb95d05a99893e08b8467359a"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:3a57fdd7ce31c7ff06cdfbf31dafa96cc533c21e443d57f5b1ecc6cdc668ec7f"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-win32.whl", hash = "sha256:397081c1a0bfb5124355710fe79478cdbeb39626492b15d399526ae53422b906"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-win_amd64.whl", hash = "sha256:2b7c57a4dfc4f16f7142221afe5ba4e093e09e728ca65c51f5620c9aaeb9a617"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:8dec4936e9c3100156f8a2dc89c4b88d5c435175ff03413b443469c7c8c5f4d1"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:3c6b973f22eb18a789b1460b4b91bf04ae3f0c4234a0a6aa6b0a92f6f7b951d4"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ac07bad82163452a6884fe8fa0963fb98c2346ba78d779ec06bd7a6262132aee"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f5dfb42c4604dddc8e4305050aa6deb084540643ed5804d7455b5df8fe16f5e5"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ea3d8a3d18833cf4304cd2fc9cbb1efe188ca9b5efef2bdac7adc20594a0e46b"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:d050b3361367a06d752db6ead6e7edeb0009be66bc3bae0ee9d97fb326badc2a"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:bec0a414d016ac1a18862a519e54b2fd0fc8bbfd6890376898a6c0891dd82e9f"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:58c98fee265677f63a4385256a6d7683ab1832f3ddd1e66fe948d5880c21a169"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-win32.whl", hash = "sha256:8590b4ae07a35970728874632fed7bd57b26b0102df2d2b233b6d9d82f6c62ad"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-win_amd64.whl", hash = "sha256:823b65d8706e32ad2df51ed89496147a42a2a6e01c13cfb6ffb8b1e92bc910bb"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:c8b29db45f8fe46ad280a7294f5c3ec36dbac9491f2d1c17345be8e69cc5928f"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ec6a563cff360b50eed26f13adc43e61bc0c04d94b8be985e6fb24b81f6dcfdf"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a549b9c31bec33820e885335b451286e2969a2d9e24879f83fe904a5ce59d70a"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4f11aa001c540f62c6166c7726f71f7573b52c68c31f014c25cc7901deea0b52"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:7b2e5a267c855eea6b4283940daa6e88a285f5f2a67f2220203786dfa59b37e9"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:2d2d793e36e230fd32babe143b04cec8a8b3eb8a3122d2aceb4a371e6b09b8df"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:ce409136744f6521e39fd8e2a24c53fa18ad67aa5bc7c2cf83645cce5b5c4e50"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-win32.whl", hash = "sha256:4096e9de5c6fdf43fb4f04c26fb114f61ef0bf2e5604b6ee3019d51b69e8c371"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-win_amd64.whl", hash = "sha256:4275d846e41ecefa46e2015117a9f491e57a71ddd59bbead77e904dc02b1bed2"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:656f7526c69fac7f600bd1f400991cc282b417d17539a1b228617081106feb4a"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:97cafb1f3cbcd3fd2b6fbfb99ae11cdb14deea0736fc2b0952ee177f2b813a46"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1f3fbcb7ef1f16e48246f704ab79d79da8a46891e2da03f8783a5b6fa41a9532"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fa9db3f79de01457b03d4f01b34cf91bc0048eb2c3846ff26f66687c2f6d16ab"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ffee1f21e5ef0d712f9033568f8344d5da8cc2869dbd08d87c84656e6a2d2f68"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:5dedb4db619ba5a2787a94d877bc8ffc0566f92a01c0ef214865e54ecc9ee5e0"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:30b600cf0a7ac9234b2638fbc0fb6158ba5bdcdf46aeb631ead21248b9affbc4"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:8dd717634f5a044f860435c1d8c16a270ddf0ef8588d4887037c5028b859b0c3"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-win32.whl", hash = "sha256:daa4ee5a243f0f20d528d939d06670a298dd39b1ad5f8a72a4275124a7819eff"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-win_amd64.whl", hash = "sha256:619bc166c4f2de5caa5a633b8b7326fbe98e0ccbfacabd87268a2b15ff73a029"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:7a68b554d356a91cce1236aa7682dc01df0edba8d043fd1ce607c49dd3c1edcf"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:db0b55e0f3cc0be60c1f19efdde9a637c32740486004f20d1cff53c3c0ece4d2"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3e53af139f8579a6d5f7b76549125f0d94d7e630761a2111bc431fd820e163b8"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:17b950fccb810b3293638215058e432159d2b71005c74371d784862b7e4683f3"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4c31f53cdae6ecfa91a77820e8b151dba54ab528ba65dfd235c80b086d68a465"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:bff1b4290a66b490a2f4719358c0cdcd9bafb6b8f061e45c7a2460866bf50c2e"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:bc1667f8b83f48511b94671e0e441401371dfd0f0a795c7daa4a3cd1dde55bea"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:5049256f536511ee3f7e1b3f87d1d1209d327e818e6ae1365e8653d7e3abb6a6"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-win32.whl", hash = "sha256:00e046b6dd71aa03a41079792f8473dc494d564611a8f89bbbd7cb93295ebdcf"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-win_amd64.whl", hash = "sha256:fa173ec60341d6bb97a89f5ea19c85c5643c1e7dedebc22f5181eb73573142c5"}, + {file = "MarkupSafe-2.1.5.tar.gz", hash = "sha256:d283d37a890ba4c1ae73ffadf8046435c76e7bc2247bbb63c00bd1a709c6544b"}, +] + +[[package]] +name = "packaging" +version = "23.2" +description = "Core utilities for Python packages" +optional = false +python-versions = ">=3.7" +files = [ + {file = "packaging-23.2-py3-none-any.whl", hash = "sha256:8c491190033a9af7e1d931d0b5dacc2ef47509b34dd0de67ed209b5203fc88c7"}, + {file = "packaging-23.2.tar.gz", hash = "sha256:048fb0e9405036518eaaf48a55953c750c11e1a1b68e0dd1a9d62ed0c092cfc5"}, +] + +[[package]] +name = "pendulum" +version = "2.1.2" +description = "Python datetimes made easy" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" +files = [ + {file = "pendulum-2.1.2-cp27-cp27m-macosx_10_15_x86_64.whl", hash = "sha256:b6c352f4bd32dff1ea7066bd31ad0f71f8d8100b9ff709fb343f3b86cee43efe"}, + {file = "pendulum-2.1.2-cp27-cp27m-win_amd64.whl", hash = "sha256:318f72f62e8e23cd6660dbafe1e346950281a9aed144b5c596b2ddabc1d19739"}, + {file = "pendulum-2.1.2-cp35-cp35m-macosx_10_15_x86_64.whl", hash = "sha256:0731f0c661a3cb779d398803655494893c9f581f6488048b3fb629c2342b5394"}, + {file = "pendulum-2.1.2-cp35-cp35m-manylinux1_i686.whl", hash = "sha256:3481fad1dc3f6f6738bd575a951d3c15d4b4ce7c82dce37cf8ac1483fde6e8b0"}, + {file = "pendulum-2.1.2-cp35-cp35m-manylinux1_x86_64.whl", hash = "sha256:9702069c694306297ed362ce7e3c1ef8404ac8ede39f9b28b7c1a7ad8c3959e3"}, + {file = "pendulum-2.1.2-cp35-cp35m-win_amd64.whl", hash = "sha256:fb53ffa0085002ddd43b6ca61a7b34f2d4d7c3ed66f931fe599e1a531b42af9b"}, + {file = "pendulum-2.1.2-cp36-cp36m-macosx_10_15_x86_64.whl", hash = "sha256:c501749fdd3d6f9e726086bf0cd4437281ed47e7bca132ddb522f86a1645d360"}, + {file = "pendulum-2.1.2-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:c807a578a532eeb226150d5006f156632df2cc8c5693d778324b43ff8c515dd0"}, + {file = "pendulum-2.1.2-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:2d1619a721df661e506eff8db8614016f0720ac171fe80dda1333ee44e684087"}, + {file = "pendulum-2.1.2-cp36-cp36m-win_amd64.whl", hash = "sha256:f888f2d2909a414680a29ae74d0592758f2b9fcdee3549887779cd4055e975db"}, + {file = "pendulum-2.1.2-cp37-cp37m-macosx_10_15_x86_64.whl", hash = "sha256:e95d329384717c7bf627bf27e204bc3b15c8238fa8d9d9781d93712776c14002"}, + {file = "pendulum-2.1.2-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:4c9c689747f39d0d02a9f94fcee737b34a5773803a64a5fdb046ee9cac7442c5"}, + {file = "pendulum-2.1.2-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:1245cd0075a3c6d889f581f6325dd8404aca5884dea7223a5566c38aab94642b"}, + {file = "pendulum-2.1.2-cp37-cp37m-win_amd64.whl", hash = "sha256:db0a40d8bcd27b4fb46676e8eb3c732c67a5a5e6bfab8927028224fbced0b40b"}, + {file = "pendulum-2.1.2-cp38-cp38-macosx_10_15_x86_64.whl", hash = "sha256:f5e236e7730cab1644e1b87aca3d2ff3e375a608542e90fe25685dae46310116"}, + {file = "pendulum-2.1.2-cp38-cp38-manylinux1_i686.whl", hash = "sha256:de42ea3e2943171a9e95141f2eecf972480636e8e484ccffaf1e833929e9e052"}, + {file = "pendulum-2.1.2-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:7c5ec650cb4bec4c63a89a0242cc8c3cebcec92fcfe937c417ba18277d8560be"}, + {file = "pendulum-2.1.2-cp38-cp38-win_amd64.whl", hash = "sha256:33fb61601083f3eb1d15edeb45274f73c63b3c44a8524703dc143f4212bf3269"}, + {file = "pendulum-2.1.2-cp39-cp39-manylinux1_i686.whl", hash = "sha256:29c40a6f2942376185728c9a0347d7c0f07905638c83007e1d262781f1e6953a"}, + {file = "pendulum-2.1.2-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:94b1fc947bfe38579b28e1cccb36f7e28a15e841f30384b5ad6c5e31055c85d7"}, + {file = "pendulum-2.1.2.tar.gz", hash = "sha256:b06a0ca1bfe41c990bbf0c029f0b6501a7f2ec4e38bfec730712015e8860f207"}, +] + +[package.dependencies] +python-dateutil = ">=2.6,<3.0" +pytzdata = ">=2020.1" + +[[package]] +name = "platformdirs" +version = "4.2.0" +description = "A small Python package for determining appropriate platform-specific dirs, e.g. a \"user data dir\"." +optional = false +python-versions = ">=3.8" +files = [ + {file = "platformdirs-4.2.0-py3-none-any.whl", hash = "sha256:0614df2a2f37e1a662acbd8e2b25b92ccf8632929bc6d43467e17fe89c75e068"}, + {file = "platformdirs-4.2.0.tar.gz", hash = "sha256:ef0cc731df711022c174543cb70a9b5bd22e5a9337c8624ef2c2ceb8ddad8768"}, +] + +[package.extras] +docs = ["furo (>=2023.9.10)", "proselint (>=0.13)", "sphinx (>=7.2.6)", "sphinx-autodoc-typehints (>=1.25.2)"] +test = ["appdirs (==1.4.4)", "covdefaults (>=2.3)", "pytest (>=7.4.3)", "pytest-cov (>=4.1)", "pytest-mock (>=3.12)"] + +[[package]] +name = "pluggy" +version = "1.4.0" +description = "plugin and hook calling mechanisms for python" +optional = false +python-versions = ">=3.8" +files = [ + {file = "pluggy-1.4.0-py3-none-any.whl", hash = "sha256:7db9f7b503d67d1c5b95f59773ebb58a8c1c288129a88665838012cfb07b8981"}, + {file = "pluggy-1.4.0.tar.gz", hash = "sha256:8c85c2876142a764e5b7548e7d9a0e0ddb46f5185161049a79b7e974454223be"}, +] + +[package.extras] +dev = ["pre-commit", "tox"] +testing = ["pytest", "pytest-benchmark"] + +[[package]] +name = "py" +version = "1.11.0" +description = "library with cross-python path, ini-parsing, io, code, log facilities" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" +files = [ + {file = "py-1.11.0-py2.py3-none-any.whl", hash = "sha256:607c53218732647dff4acdfcd50cb62615cedf612e72d1724fb1a0cc6405b378"}, + {file = "py-1.11.0.tar.gz", hash = "sha256:51c75c4126074b472f746a24399ad32f6053d1b34b68d2fa41e558e6f4a98719"}, +] + +[[package]] +name = "pydantic" +version = "1.10.14" +description = "Data validation and settings management using python type hints" +optional = false +python-versions = ">=3.7" +files = [ + {file = "pydantic-1.10.14-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:7f4fcec873f90537c382840f330b90f4715eebc2bc9925f04cb92de593eae054"}, + {file = "pydantic-1.10.14-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:8e3a76f571970fcd3c43ad982daf936ae39b3e90b8a2e96c04113a369869dc87"}, + {file = "pydantic-1.10.14-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:82d886bd3c3fbeaa963692ef6b643159ccb4b4cefaf7ff1617720cbead04fd1d"}, + {file = "pydantic-1.10.14-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:798a3d05ee3b71967844a1164fd5bdb8c22c6d674f26274e78b9f29d81770c4e"}, + {file = "pydantic-1.10.14-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:23d47a4b57a38e8652bcab15a658fdb13c785b9ce217cc3a729504ab4e1d6bc9"}, + {file = "pydantic-1.10.14-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:f9f674b5c3bebc2eba401de64f29948ae1e646ba2735f884d1594c5f675d6f2a"}, + {file = "pydantic-1.10.14-cp310-cp310-win_amd64.whl", hash = "sha256:24a7679fab2e0eeedb5a8924fc4a694b3bcaac7d305aeeac72dd7d4e05ecbebf"}, + {file = "pydantic-1.10.14-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:9d578ac4bf7fdf10ce14caba6f734c178379bd35c486c6deb6f49006e1ba78a7"}, + {file = "pydantic-1.10.14-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:fa7790e94c60f809c95602a26d906eba01a0abee9cc24150e4ce2189352deb1b"}, + {file = "pydantic-1.10.14-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:aad4e10efa5474ed1a611b6d7f0d130f4aafadceb73c11d9e72823e8f508e663"}, + {file = "pydantic-1.10.14-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1245f4f61f467cb3dfeced2b119afef3db386aec3d24a22a1de08c65038b255f"}, + {file = "pydantic-1.10.14-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:21efacc678a11114c765eb52ec0db62edffa89e9a562a94cbf8fa10b5db5c046"}, + {file = "pydantic-1.10.14-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:412ab4a3f6dbd2bf18aefa9f79c7cca23744846b31f1d6555c2ee2b05a2e14ca"}, + {file = "pydantic-1.10.14-cp311-cp311-win_amd64.whl", hash = "sha256:e897c9f35281f7889873a3e6d6b69aa1447ceb024e8495a5f0d02ecd17742a7f"}, + {file = "pydantic-1.10.14-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:d604be0f0b44d473e54fdcb12302495fe0467c56509a2f80483476f3ba92b33c"}, + {file = "pydantic-1.10.14-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a42c7d17706911199798d4c464b352e640cab4351efe69c2267823d619a937e5"}, + {file = "pydantic-1.10.14-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:596f12a1085e38dbda5cbb874d0973303e34227b400b6414782bf205cc14940c"}, + {file = "pydantic-1.10.14-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:bfb113860e9288d0886e3b9e49d9cf4a9d48b441f52ded7d96db7819028514cc"}, + {file = "pydantic-1.10.14-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:bc3ed06ab13660b565eed80887fcfbc0070f0aa0691fbb351657041d3e874efe"}, + {file = "pydantic-1.10.14-cp37-cp37m-win_amd64.whl", hash = "sha256:ad8c2bc677ae5f6dbd3cf92f2c7dc613507eafe8f71719727cbc0a7dec9a8c01"}, + {file = "pydantic-1.10.14-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:c37c28449752bb1f47975d22ef2882d70513c546f8f37201e0fec3a97b816eee"}, + {file = "pydantic-1.10.14-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:49a46a0994dd551ec051986806122767cf144b9702e31d47f6d493c336462597"}, + {file = "pydantic-1.10.14-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:53e3819bd20a42470d6dd0fe7fc1c121c92247bca104ce608e609b59bc7a77ee"}, + {file = "pydantic-1.10.14-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0fbb503bbbbab0c588ed3cd21975a1d0d4163b87e360fec17a792f7d8c4ff29f"}, + {file = "pydantic-1.10.14-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:336709883c15c050b9c55a63d6c7ff09be883dbc17805d2b063395dd9d9d0022"}, + {file = "pydantic-1.10.14-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:4ae57b4d8e3312d486e2498d42aed3ece7b51848336964e43abbf9671584e67f"}, + {file = "pydantic-1.10.14-cp38-cp38-win_amd64.whl", hash = "sha256:dba49d52500c35cfec0b28aa8b3ea5c37c9df183ffc7210b10ff2a415c125c4a"}, + {file = "pydantic-1.10.14-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:c66609e138c31cba607d8e2a7b6a5dc38979a06c900815495b2d90ce6ded35b4"}, + {file = "pydantic-1.10.14-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:d986e115e0b39604b9eee3507987368ff8148222da213cd38c359f6f57b3b347"}, + {file = "pydantic-1.10.14-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:646b2b12df4295b4c3148850c85bff29ef6d0d9621a8d091e98094871a62e5c7"}, + {file = "pydantic-1.10.14-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:282613a5969c47c83a8710cc8bfd1e70c9223feb76566f74683af889faadc0ea"}, + {file = "pydantic-1.10.14-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:466669501d08ad8eb3c4fecd991c5e793c4e0bbd62299d05111d4f827cded64f"}, + {file = "pydantic-1.10.14-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:13e86a19dca96373dcf3190fcb8797d40a6f12f154a244a8d1e8e03b8f280593"}, + {file = "pydantic-1.10.14-cp39-cp39-win_amd64.whl", hash = "sha256:08b6ec0917c30861e3fe71a93be1648a2aa4f62f866142ba21670b24444d7fd8"}, + {file = "pydantic-1.10.14-py3-none-any.whl", hash = "sha256:8ee853cd12ac2ddbf0ecbac1c289f95882b2d4482258048079d13be700aa114c"}, + {file = "pydantic-1.10.14.tar.gz", hash = "sha256:46f17b832fe27de7850896f3afee50ea682220dd218f7e9c88d436788419dca6"}, +] + +[package.dependencies] +typing-extensions = ">=4.2.0" + +[package.extras] +dotenv = ["python-dotenv (>=0.10.4)"] +email = ["email-validator (>=1.0.3)"] + +[[package]] +name = "pyrate-limiter" +version = "3.1.1" +description = "Python Rate-Limiter using Leaky-Bucket Algorithm" +optional = false +python-versions = ">=3.8,<4.0" +files = [ + {file = "pyrate_limiter-3.1.1-py3-none-any.whl", hash = "sha256:c51906f1d51d56dc992ff6c26e8300e32151bc6cfa3e6559792e31971dfd4e2b"}, + {file = "pyrate_limiter-3.1.1.tar.gz", hash = "sha256:2f57eda712687e6eccddf6afe8f8a15b409b97ed675fe64a626058f12863b7b7"}, +] + +[package.extras] +all = ["filelock (>=3.0)", "redis (>=5.0.0,<6.0.0)"] +docs = ["furo (>=2022.3.4,<2023.0.0)", "myst-parser (>=0.17)", "sphinx (>=4.3.0,<5.0.0)", "sphinx-autodoc-typehints (>=1.17,<2.0)", "sphinx-copybutton (>=0.5)", "sphinxcontrib-apidoc (>=0.3,<0.4)"] + +[[package]] +name = "pyrsistent" +version = "0.20.0" +description = "Persistent/Functional/Immutable data structures" +optional = false +python-versions = ">=3.8" +files = [ + {file = "pyrsistent-0.20.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:8c3aba3e01235221e5b229a6c05f585f344734bd1ad42a8ac51493d74722bbce"}, + {file = "pyrsistent-0.20.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c1beb78af5423b879edaf23c5591ff292cf7c33979734c99aa66d5914ead880f"}, + {file = "pyrsistent-0.20.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:21cc459636983764e692b9eba7144cdd54fdec23ccdb1e8ba392a63666c60c34"}, + {file = "pyrsistent-0.20.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f5ac696f02b3fc01a710427585c855f65cd9c640e14f52abe52020722bb4906b"}, + {file = "pyrsistent-0.20.0-cp310-cp310-win32.whl", hash = "sha256:0724c506cd8b63c69c7f883cc233aac948c1ea946ea95996ad8b1380c25e1d3f"}, + {file = "pyrsistent-0.20.0-cp310-cp310-win_amd64.whl", hash = "sha256:8441cf9616d642c475684d6cf2520dd24812e996ba9af15e606df5f6fd9d04a7"}, + {file = "pyrsistent-0.20.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:0f3b1bcaa1f0629c978b355a7c37acd58907390149b7311b5db1b37648eb6958"}, + {file = "pyrsistent-0.20.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5cdd7ef1ea7a491ae70d826b6cc64868de09a1d5ff9ef8d574250d0940e275b8"}, + {file = "pyrsistent-0.20.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cae40a9e3ce178415040a0383f00e8d68b569e97f31928a3a8ad37e3fde6df6a"}, + {file = "pyrsistent-0.20.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6288b3fa6622ad8a91e6eb759cfc48ff3089e7c17fb1d4c59a919769314af224"}, + {file = "pyrsistent-0.20.0-cp311-cp311-win32.whl", hash = "sha256:7d29c23bdf6e5438c755b941cef867ec2a4a172ceb9f50553b6ed70d50dfd656"}, + {file = "pyrsistent-0.20.0-cp311-cp311-win_amd64.whl", hash = "sha256:59a89bccd615551391f3237e00006a26bcf98a4d18623a19909a2c48b8e986ee"}, + {file = "pyrsistent-0.20.0-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:09848306523a3aba463c4b49493a760e7a6ca52e4826aa100ee99d8d39b7ad1e"}, + {file = "pyrsistent-0.20.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a14798c3005ec892bbada26485c2eea3b54109cb2533713e355c806891f63c5e"}, + {file = "pyrsistent-0.20.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b14decb628fac50db5e02ee5a35a9c0772d20277824cfe845c8a8b717c15daa3"}, + {file = "pyrsistent-0.20.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2e2c116cc804d9b09ce9814d17df5edf1df0c624aba3b43bc1ad90411487036d"}, + {file = "pyrsistent-0.20.0-cp312-cp312-win32.whl", hash = "sha256:e78d0c7c1e99a4a45c99143900ea0546025e41bb59ebc10182e947cf1ece9174"}, + {file = "pyrsistent-0.20.0-cp312-cp312-win_amd64.whl", hash = "sha256:4021a7f963d88ccd15b523787d18ed5e5269ce57aa4037146a2377ff607ae87d"}, + {file = "pyrsistent-0.20.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:79ed12ba79935adaac1664fd7e0e585a22caa539dfc9b7c7c6d5ebf91fb89054"}, + {file = "pyrsistent-0.20.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f920385a11207dc372a028b3f1e1038bb244b3ec38d448e6d8e43c6b3ba20e98"}, + {file = "pyrsistent-0.20.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4f5c2d012671b7391803263419e31b5c7c21e7c95c8760d7fc35602353dee714"}, + {file = "pyrsistent-0.20.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ef3992833fbd686ee783590639f4b8343a57f1f75de8633749d984dc0eb16c86"}, + {file = "pyrsistent-0.20.0-cp38-cp38-win32.whl", hash = "sha256:881bbea27bbd32d37eb24dd320a5e745a2a5b092a17f6debc1349252fac85423"}, + {file = "pyrsistent-0.20.0-cp38-cp38-win_amd64.whl", hash = "sha256:6d270ec9dd33cdb13f4d62c95c1a5a50e6b7cdd86302b494217137f760495b9d"}, + {file = "pyrsistent-0.20.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:ca52d1ceae015859d16aded12584c59eb3825f7b50c6cfd621d4231a6cc624ce"}, + {file = "pyrsistent-0.20.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b318ca24db0f0518630e8b6f3831e9cba78f099ed5c1d65ffe3e023003043ba0"}, + {file = "pyrsistent-0.20.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fed2c3216a605dc9a6ea50c7e84c82906e3684c4e80d2908208f662a6cbf9022"}, + {file = "pyrsistent-0.20.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2e14c95c16211d166f59c6611533d0dacce2e25de0f76e4c140fde250997b3ca"}, + {file = "pyrsistent-0.20.0-cp39-cp39-win32.whl", hash = "sha256:f058a615031eea4ef94ead6456f5ec2026c19fb5bd6bfe86e9665c4158cf802f"}, + {file = "pyrsistent-0.20.0-cp39-cp39-win_amd64.whl", hash = "sha256:58b8f6366e152092194ae68fefe18b9f0b4f89227dfd86a07770c3d86097aebf"}, + {file = "pyrsistent-0.20.0-py3-none-any.whl", hash = "sha256:c55acc4733aad6560a7f5f818466631f07efc001fd023f34a6c203f8b6df0f0b"}, + {file = "pyrsistent-0.20.0.tar.gz", hash = "sha256:4c48f78f62ab596c679086084d0dd13254ae4f3d6c72a83ffdf5ebdef8f265a4"}, +] + +[[package]] +name = "pytest" +version = "6.2.5" +description = "pytest: simple powerful testing with Python" +optional = false +python-versions = ">=3.6" +files = [ + {file = "pytest-6.2.5-py3-none-any.whl", hash = "sha256:7310f8d27bc79ced999e760ca304d69f6ba6c6649c0b60fb0e04a4a77cacc134"}, + {file = "pytest-6.2.5.tar.gz", hash = "sha256:131b36680866a76e6781d13f101efb86cf674ebb9762eb70d3082b6f29889e89"}, +] + +[package.dependencies] +atomicwrites = {version = ">=1.0", markers = "sys_platform == \"win32\""} +attrs = ">=19.2.0" +colorama = {version = "*", markers = "sys_platform == \"win32\""} +iniconfig = "*" +packaging = "*" +pluggy = ">=0.12,<2.0" +py = ">=1.8.2" +toml = "*" + +[package.extras] +testing = ["argcomplete", "hypothesis (>=3.56)", "mock", "nose", "requests", "xmlschema"] + +[[package]] +name = "pytest-faker" +version = "2.0.0" +description = "Faker integration with the pytest framework." +optional = false +python-versions = "*" +files = [ + {file = "pytest-faker-2.0.0.tar.gz", hash = "sha256:6b37bb89d94f96552bfa51f8e8b89d32addded8ddb58a331488299ef0137d9b6"}, +] + +[package.dependencies] +Faker = ">=0.7.3" + +[[package]] +name = "pytest-mock" +version = "3.12.0" +description = "Thin-wrapper around the mock package for easier use with pytest" +optional = false +python-versions = ">=3.8" +files = [ + {file = "pytest-mock-3.12.0.tar.gz", hash = "sha256:31a40f038c22cad32287bb43932054451ff5583ff094bca6f675df2f8bc1a6e9"}, + {file = "pytest_mock-3.12.0-py3-none-any.whl", hash = "sha256:0972719a7263072da3a21c7f4773069bcc7486027d7e8e1f81d98a47e701bc4f"}, +] + +[package.dependencies] +pytest = ">=5.0" + +[package.extras] +dev = ["pre-commit", "pytest-asyncio", "tox"] + +[[package]] +name = "python-dateutil" +version = "2.9.0.post0" +description = "Extensions to the standard Python datetime module" +optional = false +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7" +files = [ + {file = "python-dateutil-2.9.0.post0.tar.gz", hash = "sha256:37dd54208da7e1cd875388217d5e00ebd4179249f90fb72437e91a35459a0ad3"}, + {file = "python_dateutil-2.9.0.post0-py2.py3-none-any.whl", hash = "sha256:a8b2bc7bffae282281c8140a97d3aa9c14da0b136dfe83f850eea9a5f7470427"}, +] + +[package.dependencies] +six = ">=1.5" + +[[package]] +name = "pytzdata" +version = "2020.1" +description = "The Olson timezone database for Python." +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +files = [ + {file = "pytzdata-2020.1-py2.py3-none-any.whl", hash = "sha256:e1e14750bcf95016381e4d472bad004eef710f2d6417240904070b3d6654485f"}, + {file = "pytzdata-2020.1.tar.gz", hash = "sha256:3efa13b335a00a8de1d345ae41ec78dd11c9f8807f522d39850f2dd828681540"}, +] + +[[package]] +name = "pyyaml" +version = "6.0.1" +description = "YAML parser and emitter for Python" +optional = false +python-versions = ">=3.6" +files = [ + {file = "PyYAML-6.0.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d858aa552c999bc8a8d57426ed01e40bef403cd8ccdd0fc5f6f04a00414cac2a"}, + {file = "PyYAML-6.0.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:fd66fc5d0da6d9815ba2cebeb4205f95818ff4b79c3ebe268e75d961704af52f"}, + {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:69b023b2b4daa7548bcfbd4aa3da05b3a74b772db9e23b982788168117739938"}, + {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:81e0b275a9ecc9c0c0c07b4b90ba548307583c125f54d5b6946cfee6360c733d"}, + {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ba336e390cd8e4d1739f42dfe9bb83a3cc2e80f567d8805e11b46f4a943f5515"}, + {file = "PyYAML-6.0.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:326c013efe8048858a6d312ddd31d56e468118ad4cdeda36c719bf5bb6192290"}, + {file = "PyYAML-6.0.1-cp310-cp310-win32.whl", hash = "sha256:bd4af7373a854424dabd882decdc5579653d7868b8fb26dc7d0e99f823aa5924"}, + {file = "PyYAML-6.0.1-cp310-cp310-win_amd64.whl", hash = "sha256:fd1592b3fdf65fff2ad0004b5e363300ef59ced41c2e6b3a99d4089fa8c5435d"}, + {file = "PyYAML-6.0.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:6965a7bc3cf88e5a1c3bd2e0b5c22f8d677dc88a455344035f03399034eb3007"}, + {file = "PyYAML-6.0.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:f003ed9ad21d6a4713f0a9b5a7a0a79e08dd0f221aff4525a2be4c346ee60aab"}, + {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:42f8152b8dbc4fe7d96729ec2b99c7097d656dc1213a3229ca5383f973a5ed6d"}, + {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:062582fca9fabdd2c8b54a3ef1c978d786e0f6b3a1510e0ac93ef59e0ddae2bc"}, + {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d2b04aac4d386b172d5b9692e2d2da8de7bfb6c387fa4f801fbf6fb2e6ba4673"}, + {file = "PyYAML-6.0.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:e7d73685e87afe9f3b36c799222440d6cf362062f78be1013661b00c5c6f678b"}, + {file = "PyYAML-6.0.1-cp311-cp311-win32.whl", hash = "sha256:1635fd110e8d85d55237ab316b5b011de701ea0f29d07611174a1b42f1444741"}, + {file = "PyYAML-6.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:bf07ee2fef7014951eeb99f56f39c9bb4af143d8aa3c21b1677805985307da34"}, + {file = "PyYAML-6.0.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:855fb52b0dc35af121542a76b9a84f8d1cd886ea97c84703eaa6d88e37a2ad28"}, + {file = "PyYAML-6.0.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:40df9b996c2b73138957fe23a16a4f0ba614f4c0efce1e9406a184b6d07fa3a9"}, + {file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a08c6f0fe150303c1c6b71ebcd7213c2858041a7e01975da3a99aed1e7a378ef"}, + {file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6c22bec3fbe2524cde73d7ada88f6566758a8f7227bfbf93a408a9d86bcc12a0"}, + {file = "PyYAML-6.0.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8d4e9c88387b0f5c7d5f281e55304de64cf7f9c0021a3525bd3b1c542da3b0e4"}, + {file = "PyYAML-6.0.1-cp312-cp312-win32.whl", hash = "sha256:d483d2cdf104e7c9fa60c544d92981f12ad66a457afae824d146093b8c294c54"}, + {file = "PyYAML-6.0.1-cp312-cp312-win_amd64.whl", hash = "sha256:0d3304d8c0adc42be59c5f8a4d9e3d7379e6955ad754aa9d6ab7a398b59dd1df"}, + {file = "PyYAML-6.0.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:50550eb667afee136e9a77d6dc71ae76a44df8b3e51e41b77f6de2932bfe0f47"}, + {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1fe35611261b29bd1de0070f0b2f47cb6ff71fa6595c077e42bd0c419fa27b98"}, + {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:704219a11b772aea0d8ecd7058d0082713c3562b4e271b849ad7dc4a5c90c13c"}, + {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:afd7e57eddb1a54f0f1a974bc4391af8bcce0b444685d936840f125cf046d5bd"}, + {file = "PyYAML-6.0.1-cp36-cp36m-win32.whl", hash = "sha256:fca0e3a251908a499833aa292323f32437106001d436eca0e6e7833256674585"}, + {file = "PyYAML-6.0.1-cp36-cp36m-win_amd64.whl", hash = "sha256:f22ac1c3cac4dbc50079e965eba2c1058622631e526bd9afd45fedd49ba781fa"}, + {file = "PyYAML-6.0.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:b1275ad35a5d18c62a7220633c913e1b42d44b46ee12554e5fd39c70a243d6a3"}, + {file = "PyYAML-6.0.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:18aeb1bf9a78867dc38b259769503436b7c72f7a1f1f4c93ff9a17de54319b27"}, + {file = "PyYAML-6.0.1-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:596106435fa6ad000c2991a98fa58eeb8656ef2325d7e158344fb33864ed87e3"}, + {file = "PyYAML-6.0.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:baa90d3f661d43131ca170712d903e6295d1f7a0f595074f151c0aed377c9b9c"}, + {file = "PyYAML-6.0.1-cp37-cp37m-win32.whl", hash = "sha256:9046c58c4395dff28dd494285c82ba00b546adfc7ef001486fbf0324bc174fba"}, + {file = "PyYAML-6.0.1-cp37-cp37m-win_amd64.whl", hash = "sha256:4fb147e7a67ef577a588a0e2c17b6db51dda102c71de36f8549b6816a96e1867"}, + {file = "PyYAML-6.0.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1d4c7e777c441b20e32f52bd377e0c409713e8bb1386e1099c2415f26e479595"}, + {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a0cd17c15d3bb3fa06978b4e8958dcdc6e0174ccea823003a106c7d4d7899ac5"}, + {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:28c119d996beec18c05208a8bd78cbe4007878c6dd15091efb73a30e90539696"}, + {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7e07cbde391ba96ab58e532ff4803f79c4129397514e1413a7dc761ccd755735"}, + {file = "PyYAML-6.0.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:49a183be227561de579b4a36efbb21b3eab9651dd81b1858589f796549873dd6"}, + {file = "PyYAML-6.0.1-cp38-cp38-win32.whl", hash = "sha256:184c5108a2aca3c5b3d3bf9395d50893a7ab82a38004c8f61c258d4428e80206"}, + {file = "PyYAML-6.0.1-cp38-cp38-win_amd64.whl", hash = "sha256:1e2722cc9fbb45d9b87631ac70924c11d3a401b2d7f410cc0e3bbf249f2dca62"}, + {file = "PyYAML-6.0.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:9eb6caa9a297fc2c2fb8862bc5370d0303ddba53ba97e71f08023b6cd73d16a8"}, + {file = "PyYAML-6.0.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:c8098ddcc2a85b61647b2590f825f3db38891662cfc2fc776415143f599bb859"}, + {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5773183b6446b2c99bb77e77595dd486303b4faab2b086e7b17bc6bef28865f6"}, + {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b786eecbdf8499b9ca1d697215862083bd6d2a99965554781d0d8d1ad31e13a0"}, + {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bc1bf2925a1ecd43da378f4db9e4f799775d6367bdb94671027b73b393a7c42c"}, + {file = "PyYAML-6.0.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:04ac92ad1925b2cff1db0cfebffb6ffc43457495c9b3c39d3fcae417d7125dc5"}, + {file = "PyYAML-6.0.1-cp39-cp39-win32.whl", hash = "sha256:faca3bdcf85b2fc05d06ff3fbc1f83e1391b3e724afa3feba7d13eeab355484c"}, + {file = "PyYAML-6.0.1-cp39-cp39-win_amd64.whl", hash = "sha256:510c9deebc5c0225e8c96813043e62b680ba2f9c50a08d3724c7f28a747d1486"}, + {file = "PyYAML-6.0.1.tar.gz", hash = "sha256:bfdf460b1736c775f2ba9f6a92bca30bc2095067b8a9d77876d1fad6cc3b4a43"}, +] + +[[package]] +name = "requests" +version = "2.31.0" +description = "Python HTTP for Humans." +optional = false +python-versions = ">=3.7" +files = [ + {file = "requests-2.31.0-py3-none-any.whl", hash = "sha256:58cd2187c01e70e6e26505bca751777aa9f2ee0b7f4300988b709f44e013003f"}, + {file = "requests-2.31.0.tar.gz", hash = "sha256:942c5a758f98d790eaed1a29cb6eefc7ffb0d1cf7af05c3d2791656dbd6ad1e1"}, +] + +[package.dependencies] +certifi = ">=2017.4.17" +charset-normalizer = ">=2,<4" +idna = ">=2.5,<4" +urllib3 = ">=1.21.1,<3" + +[package.extras] +socks = ["PySocks (>=1.5.6,!=1.5.7)"] +use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"] + +[[package]] +name = "requests-cache" +version = "1.2.0" +description = "A persistent cache for python requests" +optional = false +python-versions = ">=3.8" +files = [ + {file = "requests_cache-1.2.0-py3-none-any.whl", hash = "sha256:490324301bf0cb924ff4e6324bd2613453e7e1f847353928b08adb0fdfb7f722"}, + {file = "requests_cache-1.2.0.tar.gz", hash = "sha256:db1c709ca343cc1cd5b6c8b1a5387298eceed02306a6040760db538c885e3838"}, +] + +[package.dependencies] +attrs = ">=21.2" +cattrs = ">=22.2" +platformdirs = ">=2.5" +requests = ">=2.22" +url-normalize = ">=1.4" +urllib3 = ">=1.25.5" + +[package.extras] +all = ["boto3 (>=1.15)", "botocore (>=1.18)", "itsdangerous (>=2.0)", "pymongo (>=3)", "pyyaml (>=6.0.1)", "redis (>=3)", "ujson (>=5.4)"] +bson = ["bson (>=0.5)"] +docs = ["furo (>=2023.3,<2024.0)", "linkify-it-py (>=2.0,<3.0)", "myst-parser (>=1.0,<2.0)", "sphinx (>=5.0.2,<6.0.0)", "sphinx-autodoc-typehints (>=1.19)", "sphinx-automodapi (>=0.14)", "sphinx-copybutton (>=0.5)", "sphinx-design (>=0.2)", "sphinx-notfound-page (>=0.8)", "sphinxcontrib-apidoc (>=0.3)", "sphinxext-opengraph (>=0.9)"] +dynamodb = ["boto3 (>=1.15)", "botocore (>=1.18)"] +json = ["ujson (>=5.4)"] +mongodb = ["pymongo (>=3)"] +redis = ["redis (>=3)"] +security = ["itsdangerous (>=2.0)"] +yaml = ["pyyaml (>=6.0.1)"] + +[[package]] +name = "requests-mock" +version = "1.11.0" +description = "Mock out responses from the requests package" +optional = false +python-versions = "*" +files = [ + {file = "requests-mock-1.11.0.tar.gz", hash = "sha256:ef10b572b489a5f28e09b708697208c4a3b2b89ef80a9f01584340ea357ec3c4"}, + {file = "requests_mock-1.11.0-py2.py3-none-any.whl", hash = "sha256:f7fae383f228633f6bececebdab236c478ace2284d6292c6e7e2867b9ab74d15"}, +] + +[package.dependencies] +requests = ">=2.3,<3" +six = "*" + +[package.extras] +fixture = ["fixtures"] +test = ["fixtures", "mock", "purl", "pytest", "requests-futures", "sphinx", "testtools"] + +[[package]] +name = "responses" +version = "0.13.4" +description = "A utility library for mocking out the `requests` Python library." +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" +files = [ + {file = "responses-0.13.4-py2.py3-none-any.whl", hash = "sha256:d8d0f655710c46fd3513b9202a7f0dcedd02ca0f8cf4976f27fa8ab5b81e656d"}, + {file = "responses-0.13.4.tar.gz", hash = "sha256:9476775d856d3c24ae660bbebe29fb6d789d4ad16acd723efbfb6ee20990b899"}, +] + +[package.dependencies] +requests = ">=2.0" +six = "*" +urllib3 = ">=1.25.10" + +[package.extras] +tests = ["coverage (>=3.7.1,<6.0.0)", "flake8", "mypy", "pytest (>=4.6)", "pytest (>=4.6,<5.0)", "pytest-cov", "pytest-localserver", "types-mock", "types-requests", "types-six"] + +[[package]] +name = "setuptools" +version = "69.1.1" +description = "Easily download, build, install, upgrade, and uninstall Python packages" +optional = false +python-versions = ">=3.8" +files = [ + {file = "setuptools-69.1.1-py3-none-any.whl", hash = "sha256:02fa291a0471b3a18b2b2481ed902af520c69e8ae0919c13da936542754b4c56"}, + {file = "setuptools-69.1.1.tar.gz", hash = "sha256:5c0806c7d9af348e6dd3777b4f4dbb42c7ad85b190104837488eab9a7c945cf8"}, +] + +[package.extras] +docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "rst.linker (>=1.9)", "sphinx (<7.2.5)", "sphinx (>=3.5)", "sphinx-favicon", "sphinx-inline-tabs", "sphinx-lint", "sphinx-notfound-page (>=1,<2)", "sphinx-reredirects", "sphinxcontrib-towncrier"] +testing = ["build[virtualenv]", "filelock (>=3.4.0)", "flake8-2020", "ini2toml[lite] (>=0.9)", "jaraco.develop (>=7.21)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "packaging (>=23.2)", "pip (>=19.1)", "pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-home (>=0.5)", "pytest-mypy (>=0.9.1)", "pytest-perf", "pytest-ruff (>=0.2.1)", "pytest-timeout", "pytest-xdist", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel"] +testing-integration = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "packaging (>=23.2)", "pytest", "pytest-enabler", "pytest-xdist", "tomli", "virtualenv (>=13.0.0)", "wheel"] + +[[package]] +name = "six" +version = "1.16.0" +description = "Python 2 and 3 compatibility utilities" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*" +files = [ + {file = "six-1.16.0-py2.py3-none-any.whl", hash = "sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254"}, + {file = "six-1.16.0.tar.gz", hash = "sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926"}, +] + +[[package]] +name = "toml" +version = "0.10.2" +description = "Python Library for Tom's Obvious, Minimal Language" +optional = false +python-versions = ">=2.6, !=3.0.*, !=3.1.*, !=3.2.*" +files = [ + {file = "toml-0.10.2-py2.py3-none-any.whl", hash = "sha256:806143ae5bfb6a3c6e736a764057db0e6a0e05e338b5630894a5f779cabb4f9b"}, + {file = "toml-0.10.2.tar.gz", hash = "sha256:b3bda1d108d5dd99f4a20d24d9c348e91c4db7ab1b749200bded2f839ccbe68f"}, +] + +[[package]] +name = "typing-extensions" +version = "4.10.0" +description = "Backported and Experimental Type Hints for Python 3.8+" +optional = false +python-versions = ">=3.8" +files = [ + {file = "typing_extensions-4.10.0-py3-none-any.whl", hash = "sha256:69b1a937c3a517342112fb4c6df7e72fc39a38e7891a5730ed4985b5214b5475"}, + {file = "typing_extensions-4.10.0.tar.gz", hash = "sha256:b0abd7c89e8fb96f98db18d86106ff1d90ab692004eb746cf6eda2682f91b3cb"}, +] + +[[package]] +name = "url-normalize" +version = "1.4.3" +description = "URL normalization for Python" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*" +files = [ + {file = "url-normalize-1.4.3.tar.gz", hash = "sha256:d23d3a070ac52a67b83a1c59a0e68f8608d1cd538783b401bc9de2c0fac999b2"}, + {file = "url_normalize-1.4.3-py2.py3-none-any.whl", hash = "sha256:ec3c301f04e5bb676d333a7fa162fa977ad2ca04b7e652bfc9fac4e405728eed"}, +] + +[package.dependencies] +six = "*" + +[[package]] +name = "urllib3" +version = "2.2.1" +description = "HTTP library with thread-safe connection pooling, file post, and more." +optional = false +python-versions = ">=3.8" +files = [ + {file = "urllib3-2.2.1-py3-none-any.whl", hash = "sha256:450b20ec296a467077128bff42b73080516e71b56ff59a60a02bef2232c4fa9d"}, + {file = "urllib3-2.2.1.tar.gz", hash = "sha256:d0570876c61ab9e520d776c38acbbb5b05a776d3f9ff98a5c8fd5162a444cf19"}, +] + +[package.extras] +brotli = ["brotli (>=1.0.9)", "brotlicffi (>=0.8.0)"] +h2 = ["h2 (>=4,<5)"] +socks = ["pysocks (>=1.5.6,!=1.5.7,<2.0)"] +zstd = ["zstandard (>=0.18.0)"] + +[[package]] +name = "wcmatch" +version = "8.4" +description = "Wildcard/glob file name matcher." +optional = false +python-versions = ">=3.7" +files = [ + {file = "wcmatch-8.4-py3-none-any.whl", hash = "sha256:dc7351e5a7f8bbf4c6828d51ad20c1770113f5f3fd3dfe2a03cfde2a63f03f98"}, + {file = "wcmatch-8.4.tar.gz", hash = "sha256:ba4fc5558f8946bf1ffc7034b05b814d825d694112499c86035e0e4d398b6a67"}, +] + +[package.dependencies] +bracex = ">=2.1.1" + +[[package]] +name = "wrapt" +version = "1.16.0" +description = "Module for decorators, wrappers and monkey patching." +optional = false +python-versions = ">=3.6" +files = [ + {file = "wrapt-1.16.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:ffa565331890b90056c01db69c0fe634a776f8019c143a5ae265f9c6bc4bd6d4"}, + {file = "wrapt-1.16.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:e4fdb9275308292e880dcbeb12546df7f3e0f96c6b41197e0cf37d2826359020"}, + {file = "wrapt-1.16.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bb2dee3874a500de01c93d5c71415fcaef1d858370d405824783e7a8ef5db440"}, + {file = "wrapt-1.16.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2a88e6010048489cda82b1326889ec075a8c856c2e6a256072b28eaee3ccf487"}, + {file = "wrapt-1.16.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ac83a914ebaf589b69f7d0a1277602ff494e21f4c2f743313414378f8f50a4cf"}, + {file = "wrapt-1.16.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:73aa7d98215d39b8455f103de64391cb79dfcad601701a3aa0dddacf74911d72"}, + {file = "wrapt-1.16.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:807cc8543a477ab7422f1120a217054f958a66ef7314f76dd9e77d3f02cdccd0"}, + {file = "wrapt-1.16.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:bf5703fdeb350e36885f2875d853ce13172ae281c56e509f4e6eca049bdfb136"}, + {file = "wrapt-1.16.0-cp310-cp310-win32.whl", hash = "sha256:f6b2d0c6703c988d334f297aa5df18c45e97b0af3679bb75059e0e0bd8b1069d"}, + {file = "wrapt-1.16.0-cp310-cp310-win_amd64.whl", hash = "sha256:decbfa2f618fa8ed81c95ee18a387ff973143c656ef800c9f24fb7e9c16054e2"}, + {file = "wrapt-1.16.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:1a5db485fe2de4403f13fafdc231b0dbae5eca4359232d2efc79025527375b09"}, + {file = "wrapt-1.16.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:75ea7d0ee2a15733684badb16de6794894ed9c55aa5e9903260922f0482e687d"}, + {file = "wrapt-1.16.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a452f9ca3e3267cd4d0fcf2edd0d035b1934ac2bd7e0e57ac91ad6b95c0c6389"}, + {file = "wrapt-1.16.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:43aa59eadec7890d9958748db829df269f0368521ba6dc68cc172d5d03ed8060"}, + {file = "wrapt-1.16.0-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:72554a23c78a8e7aa02abbd699d129eead8b147a23c56e08d08dfc29cfdddca1"}, + {file = "wrapt-1.16.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:d2efee35b4b0a347e0d99d28e884dfd82797852d62fcd7ebdeee26f3ceb72cf3"}, + {file = "wrapt-1.16.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:6dcfcffe73710be01d90cae08c3e548d90932d37b39ef83969ae135d36ef3956"}, + {file = "wrapt-1.16.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:eb6e651000a19c96f452c85132811d25e9264d836951022d6e81df2fff38337d"}, + {file = "wrapt-1.16.0-cp311-cp311-win32.whl", hash = "sha256:66027d667efe95cc4fa945af59f92c5a02c6f5bb6012bff9e60542c74c75c362"}, + {file = "wrapt-1.16.0-cp311-cp311-win_amd64.whl", hash = "sha256:aefbc4cb0a54f91af643660a0a150ce2c090d3652cf4052a5397fb2de549cd89"}, + {file = "wrapt-1.16.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:5eb404d89131ec9b4f748fa5cfb5346802e5ee8836f57d516576e61f304f3b7b"}, + {file = "wrapt-1.16.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:9090c9e676d5236a6948330e83cb89969f433b1943a558968f659ead07cb3b36"}, + {file = "wrapt-1.16.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:94265b00870aa407bd0cbcfd536f17ecde43b94fb8d228560a1e9d3041462d73"}, + {file = "wrapt-1.16.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f2058f813d4f2b5e3a9eb2eb3faf8f1d99b81c3e51aeda4b168406443e8ba809"}, + {file = "wrapt-1.16.0-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:98b5e1f498a8ca1858a1cdbffb023bfd954da4e3fa2c0cb5853d40014557248b"}, + {file = "wrapt-1.16.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:14d7dc606219cdd7405133c713f2c218d4252f2a469003f8c46bb92d5d095d81"}, + {file = "wrapt-1.16.0-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:49aac49dc4782cb04f58986e81ea0b4768e4ff197b57324dcbd7699c5dfb40b9"}, + {file = "wrapt-1.16.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:418abb18146475c310d7a6dc71143d6f7adec5b004ac9ce08dc7a34e2babdc5c"}, + {file = "wrapt-1.16.0-cp312-cp312-win32.whl", hash = "sha256:685f568fa5e627e93f3b52fda002c7ed2fa1800b50ce51f6ed1d572d8ab3e7fc"}, + {file = "wrapt-1.16.0-cp312-cp312-win_amd64.whl", hash = "sha256:dcdba5c86e368442528f7060039eda390cc4091bfd1dca41e8046af7c910dda8"}, + {file = "wrapt-1.16.0-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:d462f28826f4657968ae51d2181a074dfe03c200d6131690b7d65d55b0f360f8"}, + {file = "wrapt-1.16.0-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a33a747400b94b6d6b8a165e4480264a64a78c8a4c734b62136062e9a248dd39"}, + {file = "wrapt-1.16.0-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b3646eefa23daeba62643a58aac816945cadc0afaf21800a1421eeba5f6cfb9c"}, + {file = "wrapt-1.16.0-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3ebf019be5c09d400cf7b024aa52b1f3aeebeff51550d007e92c3c1c4afc2a40"}, + {file = "wrapt-1.16.0-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:0d2691979e93d06a95a26257adb7bfd0c93818e89b1406f5a28f36e0d8c1e1fc"}, + {file = "wrapt-1.16.0-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:1acd723ee2a8826f3d53910255643e33673e1d11db84ce5880675954183ec47e"}, + {file = "wrapt-1.16.0-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:bc57efac2da352a51cc4658878a68d2b1b67dbe9d33c36cb826ca449d80a8465"}, + {file = "wrapt-1.16.0-cp36-cp36m-win32.whl", hash = "sha256:da4813f751142436b075ed7aa012a8778aa43a99f7b36afe9b742d3ed8bdc95e"}, + {file = "wrapt-1.16.0-cp36-cp36m-win_amd64.whl", hash = "sha256:6f6eac2360f2d543cc875a0e5efd413b6cbd483cb3ad7ebf888884a6e0d2e966"}, + {file = "wrapt-1.16.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:a0ea261ce52b5952bf669684a251a66df239ec6d441ccb59ec7afa882265d593"}, + {file = "wrapt-1.16.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7bd2d7ff69a2cac767fbf7a2b206add2e9a210e57947dd7ce03e25d03d2de292"}, + {file = "wrapt-1.16.0-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9159485323798c8dc530a224bd3ffcf76659319ccc7bbd52e01e73bd0241a0c5"}, + {file = "wrapt-1.16.0-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a86373cf37cd7764f2201b76496aba58a52e76dedfaa698ef9e9688bfd9e41cf"}, + {file = "wrapt-1.16.0-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:73870c364c11f03ed072dda68ff7aea6d2a3a5c3fe250d917a429c7432e15228"}, + {file = "wrapt-1.16.0-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:b935ae30c6e7400022b50f8d359c03ed233d45b725cfdd299462f41ee5ffba6f"}, + {file = "wrapt-1.16.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:db98ad84a55eb09b3c32a96c576476777e87c520a34e2519d3e59c44710c002c"}, + {file = "wrapt-1.16.0-cp37-cp37m-win32.whl", hash = "sha256:9153ed35fc5e4fa3b2fe97bddaa7cbec0ed22412b85bcdaf54aeba92ea37428c"}, + {file = "wrapt-1.16.0-cp37-cp37m-win_amd64.whl", hash = "sha256:66dfbaa7cfa3eb707bbfcd46dab2bc6207b005cbc9caa2199bcbc81d95071a00"}, + {file = "wrapt-1.16.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1dd50a2696ff89f57bd8847647a1c363b687d3d796dc30d4dd4a9d1689a706f0"}, + {file = "wrapt-1.16.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:44a2754372e32ab315734c6c73b24351d06e77ffff6ae27d2ecf14cf3d229202"}, + {file = "wrapt-1.16.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8e9723528b9f787dc59168369e42ae1c3b0d3fadb2f1a71de14531d321ee05b0"}, + {file = "wrapt-1.16.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:dbed418ba5c3dce92619656802cc5355cb679e58d0d89b50f116e4a9d5a9603e"}, + {file = "wrapt-1.16.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:941988b89b4fd6b41c3f0bfb20e92bd23746579736b7343283297c4c8cbae68f"}, + {file = "wrapt-1.16.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:6a42cd0cfa8ffc1915aef79cb4284f6383d8a3e9dcca70c445dcfdd639d51267"}, + {file = "wrapt-1.16.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:1ca9b6085e4f866bd584fb135a041bfc32cab916e69f714a7d1d397f8c4891ca"}, + {file = "wrapt-1.16.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:d5e49454f19ef621089e204f862388d29e6e8d8b162efce05208913dde5b9ad6"}, + {file = "wrapt-1.16.0-cp38-cp38-win32.whl", hash = "sha256:c31f72b1b6624c9d863fc095da460802f43a7c6868c5dda140f51da24fd47d7b"}, + {file = "wrapt-1.16.0-cp38-cp38-win_amd64.whl", hash = "sha256:490b0ee15c1a55be9c1bd8609b8cecd60e325f0575fc98f50058eae366e01f41"}, + {file = "wrapt-1.16.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:9b201ae332c3637a42f02d1045e1d0cccfdc41f1f2f801dafbaa7e9b4797bfc2"}, + {file = "wrapt-1.16.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:2076fad65c6736184e77d7d4729b63a6d1ae0b70da4868adeec40989858eb3fb"}, + {file = "wrapt-1.16.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c5cd603b575ebceca7da5a3a251e69561bec509e0b46e4993e1cac402b7247b8"}, + {file = "wrapt-1.16.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b47cfad9e9bbbed2339081f4e346c93ecd7ab504299403320bf85f7f85c7d46c"}, + {file = "wrapt-1.16.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f8212564d49c50eb4565e502814f694e240c55551a5f1bc841d4fcaabb0a9b8a"}, + {file = "wrapt-1.16.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:5f15814a33e42b04e3de432e573aa557f9f0f56458745c2074952f564c50e664"}, + {file = "wrapt-1.16.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:db2e408d983b0e61e238cf579c09ef7020560441906ca990fe8412153e3b291f"}, + {file = "wrapt-1.16.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:edfad1d29c73f9b863ebe7082ae9321374ccb10879eeabc84ba3b69f2579d537"}, + {file = "wrapt-1.16.0-cp39-cp39-win32.whl", hash = "sha256:ed867c42c268f876097248e05b6117a65bcd1e63b779e916fe2e33cd6fd0d3c3"}, + {file = "wrapt-1.16.0-cp39-cp39-win_amd64.whl", hash = "sha256:eb1b046be06b0fce7249f1d025cd359b4b80fc1c3e24ad9eca33e0dcdb2e4a35"}, + {file = "wrapt-1.16.0-py3-none-any.whl", hash = "sha256:6906c4100a8fcbf2fa735f6059214bb13b97f75b1a61777fcf6432121ef12ef1"}, + {file = "wrapt-1.16.0.tar.gz", hash = "sha256:5f370f952971e7d17c7d1ead40e49f32345a7f7a5373571ef44d800d06b1899d"}, +] + +[metadata] +lock-version = "2.0" +python-versions = "^3.9,<3.12" +content-hash = "098ea5d9961a0222868250b2f49e4800c4166c74038591d896b564764a7b0703" diff --git a/airbyte-integrations/connectors/source-orb/pyproject.toml b/airbyte-integrations/connectors/source-orb/pyproject.toml new file mode 100644 index 000000000000..12519c1662f6 --- /dev/null +++ b/airbyte-integrations/connectors/source-orb/pyproject.toml @@ -0,0 +1,33 @@ +[build-system] +requires = [ "poetry-core>=1.0.0",] +build-backend = "poetry.core.masonry.api" + +[tool.poetry] +version = "1.1.1" +name = "source-orb" +description = "Source implementation for Orb." +authors = [ "Airbyte ",] +license = "MIT" +readme = "README.md" +documentation = "https://docs.airbyte.com/integrations/sources/orb" +homepage = "https://airbyte.com" +repository = "https://github.com/airbytehq/airbyte" +[[tool.poetry.packages]] +include = "source_orb" + +[tool.poetry.dependencies] +python = "^3.9,<3.12" +airbyte-cdk = "==0.68.4" +pendulum = "==2.1.2" + +[tool.poetry.scripts] +source-orb = "source_orb.run:run" + +[tool.poetry.group.dev.dependencies] +pytest = "^6.1" +requests-mock = "^1.11.0" +pytest-faker = "==2.0.0" +pytest-mock = "^3.6.1" +pendulum = "==2.1.2" +responses = "^0.13.3" + diff --git a/airbyte-integrations/connectors/source-orb/setup.py b/airbyte-integrations/connectors/source-orb/setup.py deleted file mode 100644 index da07d9e21ecd..000000000000 --- a/airbyte-integrations/connectors/source-orb/setup.py +++ /dev/null @@ -1,40 +0,0 @@ -# -# Copyright (c) 2023 Airbyte, Inc., all rights reserved. -# - - -from setuptools import find_packages, setup - -MAIN_REQUIREMENTS = ["airbyte-cdk~=0.1", "pendulum==2.1.2"] - -TEST_REQUIREMENTS = ["requests-mock~=1.9.3", "pytest~=6.1", "pytest-mock~=3.6.1", "responses~=0.13.3", "pendulum==2.1.2"] - -setup( - entry_points={ - "console_scripts": [ - "source-orb=source_orb.run:run", - ], - }, - name="source_orb", - description="Source implementation for Orb.", - author="Airbyte", - author_email="contact@airbyte.io", - packages=find_packages(), - install_requires=MAIN_REQUIREMENTS, - package_data={ - "": [ - # Include yaml files in the package (if any) - "*.yml", - "*.yaml", - # Include all json files in the package, up to 4 levels deep - "*.json", - "*/*.json", - "*/*/*.json", - "*/*/*/*.json", - "*/*/*/*/*.json", - ] - }, - extras_require={ - "tests": TEST_REQUIREMENTS, - }, -) diff --git a/airbyte-integrations/connectors/source-orb/source_orb/source.py b/airbyte-integrations/connectors/source-orb/source_orb/source.py index 0005a9b70407..a33620be7c4a 100644 --- a/airbyte-integrations/connectors/source-orb/source_orb/source.py +++ b/airbyte-integrations/connectors/source-orb/source_orb/source.py @@ -590,9 +590,14 @@ def enrich_ledger_entries_with_event_data(self, ledger_entries): # Build up a list of the subset of ledger entries we are expected # to enrich with event metadata. event_id_to_ledger_entries = {} + min_created_at_timestamp = pendulum.now() + max_created_at_timestamp = pendulum.now() + for entry in ledger_entries: maybe_event_id: Optional[str] = entry.get("event_id") if maybe_event_id: + min_created_at_timestamp = min(min_created_at_timestamp, pendulum.parse(entry["created_at"])) + max_created_at_timestamp = max(max_created_at_timestamp, pendulum.parse(entry["created_at"])) # There can be multiple entries with the same event ID event_id_to_ledger_entries[maybe_event_id] = event_id_to_ledger_entries.get(maybe_event_id, []) + [entry] @@ -621,7 +626,11 @@ def modify_ledger_entry_schema(ledger_entry): # The events endpoint is a `POST` endpoint which expects a list of # event_ids to filter on - request_filter_json = {"event_ids": list(event_id_to_ledger_entries)} + request_filter_json = { + "event_ids": list(event_id_to_ledger_entries), + "timeframe_start": min_created_at_timestamp.to_iso8601_string(), + "timeframe_end": max_created_at_timestamp.add(minutes=1).to_iso8601_string(), + } # Prepare request with self._session, which should # automatically deal with the authentication header. diff --git a/airbyte-integrations/connectors/source-orb/unit_tests/test_incremental_streams.py b/airbyte-integrations/connectors/source-orb/unit_tests/test_incremental_streams.py index ccf3d2d63f15..a322784c5c03 100644 --- a/airbyte-integrations/connectors/source-orb/unit_tests/test_incremental_streams.py +++ b/airbyte-integrations/connectors/source-orb/unit_tests/test_incremental_streams.py @@ -256,7 +256,7 @@ def test_credits_ledger_entries_transform_record(mocker): @responses.activate def test_credits_ledger_entries_no_matching_events(mocker): stream = CreditsLedgerEntries(string_event_properties_keys=["ping"]) - ledger_entries = [{"event_id": "foo-event-id", "entry_type": "decrement"}, {"event_id": "bar-event-id", "entry_type": "decrement"}] + ledger_entries = [{"event_id": "foo-event-id", "entry_type": "decrement", "created_at": "2022-02-21T07:00:00+00:00"}, {"event_id": "bar-event-id", "entry_type": "decrement", "created_at": "2022-02-21T07:00:00+00:00"}] mock_response = { "data": [ { @@ -276,8 +276,8 @@ def test_credits_ledger_entries_no_matching_events(mocker): # We failed to enrich either event, but still check that the schema was # transformed as expected assert enriched_entries == [ - {"event": {"id": "foo-event-id"}, "entry_type": "decrement"}, - {"event": {"id": "bar-event-id"}, "entry_type": "decrement"}, + {"event": {"id": "foo-event-id"}, "entry_type": "decrement", "created_at": "2022-02-21T07:00:00+00:00"}, + {"event": {"id": "bar-event-id"}, "entry_type": "decrement", "created_at": "2022-02-21T07:00:00+00:00"}, ] @@ -300,7 +300,7 @@ def test_credits_ledger_entries_enriches_selected_property_keys( string_event_properties_keys=selected_string_property_keys, numeric_event_properties_keys=selected_numeric_property_keys ) original_entry_1 = {"entry_type": "increment"} - ledger_entries = [{"event_id": "foo-event-id", "entry_type": "decrement"}, original_entry_1] + ledger_entries = [{"event_id": "foo-event-id", "entry_type": "decrement", "created_at": "2022-02-21T07:00:00+00:00"}, original_entry_1] mock_response = { "data": [ { @@ -316,7 +316,7 @@ def test_credits_ledger_entries_enriches_selected_property_keys( responses.add(responses.POST, f"{stream.url_base}events", json=mock_response, status=200) enriched_entries = stream.enrich_ledger_entries_with_event_data(ledger_entries) - assert enriched_entries[0] == {"entry_type": "decrement", "event": {"id": "foo-event-id", "properties": resulting_properties}} + assert enriched_entries[0] == {"entry_type": "decrement", "created_at": "2022-02-21T07:00:00+00:00", "event": {"id": "foo-event-id", "properties": resulting_properties}} # Does not enrich, but still passes back, irrelevant (for enrichment purposes) ledger entry assert enriched_entries[1] == original_entry_1 @@ -324,7 +324,7 @@ def test_credits_ledger_entries_enriches_selected_property_keys( @responses.activate def test_credits_ledger_entries_enriches_with_multiple_entries_per_event(mocker): stream = CreditsLedgerEntries(string_event_properties_keys=["ping"]) - ledger_entries = [{"event_id": "foo-event-id", "entry_type": "decrement"}, {"event_id": "foo-event-id", "entry_type": "decrement"}] + ledger_entries = [{"event_id": "foo-event-id", "entry_type": "decrement", "created_at": "2022-02-21T07:00:00+00:00",}, {"event_id": "foo-event-id", "entry_type": "decrement", "created_at": "2022-02-21T07:00:00+00:00",}] mock_response = { "data": [ { @@ -342,8 +342,8 @@ def test_credits_ledger_entries_enriches_with_multiple_entries_per_event(mocker) # We expect both events are enriched correctly assert enriched_entries == [ - {"event": {"id": "foo-event-id", "properties": {"ping": "pong"}}, "entry_type": "decrement"}, - {"event": {"id": "foo-event-id", "properties": {"ping": "pong"}}, "entry_type": "decrement"}, + {"event": {"id": "foo-event-id", "properties": {"ping": "pong"}}, "entry_type": "decrement", "created_at": "2022-02-21T07:00:00+00:00",}, + {"event": {"id": "foo-event-id", "properties": {"ping": "pong"}}, "entry_type": "decrement", "created_at": "2022-02-21T07:00:00+00:00",}, ] diff --git a/docs/integrations/sources/orb.md b/docs/integrations/sources/orb.md index aa219864f4d3..9214043c4c7c 100644 --- a/docs/integrations/sources/orb.md +++ b/docs/integrations/sources/orb.md @@ -54,12 +54,13 @@ an Orb Account and API Key. | Version | Date | Pull Request | Subject | | --- |------------|----------------------------------------------------------| --- | -| 1.1.0 | 2023-03-03 | [24567](https://github.com/airbytehq/airbyte/pull/24567) | Add Invoices incremental stream (merged from [#24737](https://github.com/airbytehq/airbyte/pull/24737) -| 1.0.0 | 2023-02-02 | [21951](https://github.com/airbytehq/airbyte/pull/21951) | Add SubscriptionUsage stream, and made `start_date` a required field -| 0.1.4 | 2022-10-07 | [17761](https://github.com/airbytehq/airbyte/pull/17761) | Fix bug with enriching ledger entries with multiple credit blocks -| 0.1.3 | 2022-08-26 | [16017](https://github.com/airbytehq/airbyte/pull/16017) | Add credit block id to ledger entries -| 0.1.2 | 2022-04-20 | [11528](https://github.com/airbytehq/airbyte/pull/11528) | Add cost basis to ledger entries, update expiration date, sync only committed entries -| 0.1.1 | 2022-03-03 | [10839](https://github.com/airbytehq/airbyte/pull/10839) | Support ledger entries with numeric properties + schema fixes -| 0.1.0 | 2022-02-01 | | New Source: Orb +| 1.1.1 | 2024-02-07 | [35005](https://github.com/airbytehq/airbyte/pull/35005) | Pass timeframe_start, timeframe_end to events query | +| 1.1.0 | 2023-03-03 | [24567](https://github.com/airbytehq/airbyte/pull/24567) | Add Invoices incremental stream merged from [#24737](https://github.com/airbytehq/airbyte/pull/24737) | +| 1.0.0 | 2023-02-02 | [21951](https://github.com/airbytehq/airbyte/pull/21951) | Add SubscriptionUsage stream, and made `start_date` a required field | +| 0.1.4 | 2022-10-07 | [17761](https://github.com/airbytehq/airbyte/pull/17761) | Fix bug with enriching ledger entries with multiple credit blocks | +| 0.1.3 | 2022-08-26 | [16017](https://github.com/airbytehq/airbyte/pull/16017) | Add credit block id to ledger entries | +| 0.1.2 | 2022-04-20 | [11528](https://github.com/airbytehq/airbyte/pull/11528) | Add cost basis to ledger entries, update expiration date, sync only committed entries | +| 0.1.1 | 2022-03-03 | [10839](https://github.com/airbytehq/airbyte/pull/10839) | Support ledger entries with numeric properties + schema fixes | +| 0.1.0 | 2022-02-01 | | New Source: Orb | | :--- | :--- | :--- | :--- | From 9175956a01f65d814e72a20316db6180ee5863d2 Mon Sep 17 00:00:00 2001 From: Artem Inzhyyants <36314070+artem1205@users.noreply.github.com> Date: Fri, 8 Mar 2024 17:10:45 +0100 Subject: [PATCH 138/172] Source GitHub: update CDK (#35915) Signed-off-by: Artem Inzhyyants --- .../connectors/source-github/metadata.yaml | 2 +- .../connectors/source-github/poetry.lock | 12 ++++++------ .../connectors/source-github/pyproject.toml | 4 ++-- .../unit_tests/integration/test_events.py | 2 +- docs/integrations/sources/github.md | 2 +- 5 files changed, 11 insertions(+), 11 deletions(-) diff --git a/airbyte-integrations/connectors/source-github/metadata.yaml b/airbyte-integrations/connectors/source-github/metadata.yaml index c15d331f0930..aeb83b414d07 100644 --- a/airbyte-integrations/connectors/source-github/metadata.yaml +++ b/airbyte-integrations/connectors/source-github/metadata.yaml @@ -10,7 +10,7 @@ data: connectorSubtype: api connectorType: source definitionId: ef69ef6e-aa7f-4af1-a01d-ef775033524e - dockerImageTag: 1.6.3 + dockerImageTag: 1.6.4 dockerRepository: airbyte/source-github documentationUrl: https://docs.airbyte.com/integrations/sources/github githubIssueLabel: source-github diff --git a/airbyte-integrations/connectors/source-github/poetry.lock b/airbyte-integrations/connectors/source-github/poetry.lock index 24d8fe9af20e..b5e7825c76f5 100644 --- a/airbyte-integrations/connectors/source-github/poetry.lock +++ b/airbyte-integrations/connectors/source-github/poetry.lock @@ -2,13 +2,13 @@ [[package]] name = "airbyte-cdk" -version = "0.62.1" +version = "0.68.4" description = "A framework for writing Airbyte Connectors." optional = false python-versions = ">=3.8" files = [ - {file = "airbyte-cdk-0.62.1.tar.gz", hash = "sha256:3c934dd8b045079a9c807f699ca2012eaa5df755606e3f5b8b16247cbbd7e8c6"}, - {file = "airbyte_cdk-0.62.1-py3-none-any.whl", hash = "sha256:792399a602b7f5c3cd4ed2a5fce5910cfe3676b9b9199b9208f2d5236f5f42d3"}, + {file = "airbyte-cdk-0.68.4.tar.gz", hash = "sha256:da4d923d9dac9f13fbd2e89a0094c58d440dac85552e8084d19cbb0a73efd9d7"}, + {file = "airbyte_cdk-0.68.4-py3-none-any.whl", hash = "sha256:3b6a9b6adf81a1d9c2d40acecfe9016e73197dd95f1e6027423aeee85d3a7ee1"}, ] [package.dependencies] @@ -32,8 +32,8 @@ requests-cache = "*" wcmatch = "8.4" [package.extras] -dev = ["avro (>=1.11.2,<1.12.0)", "cohere (==4.21)", "fastavro (>=1.8.0,<1.9.0)", "freezegun", "langchain (==0.0.271)", "markdown", "mypy", "openai[embeddings] (==0.27.9)", "pandas (==2.0.3)", "pdf2image (==1.16.3)", "pdfminer.six (==20221105)", "pyarrow (==12.0.1)", "pytesseract (==0.3.10)", "pytest", "pytest-cov", "pytest-httpserver", "pytest-mock", "requests-mock", "tiktoken (==0.4.0)", "unstructured (==0.10.27)", "unstructured.pytesseract (>=0.3.12)", "unstructured[docx,pptx] (==0.10.27)"] -file-based = ["avro (>=1.11.2,<1.12.0)", "fastavro (>=1.8.0,<1.9.0)", "markdown", "pdf2image (==1.16.3)", "pdfminer.six (==20221105)", "pyarrow (==12.0.1)", "pytesseract (==0.3.10)", "unstructured (==0.10.27)", "unstructured.pytesseract (>=0.3.12)", "unstructured[docx,pptx] (==0.10.27)"] +dev = ["avro (>=1.11.2,<1.12.0)", "cohere (==4.21)", "fastavro (>=1.8.0,<1.9.0)", "freezegun", "langchain (==0.0.271)", "markdown", "mypy", "openai[embeddings] (==0.27.9)", "pandas (==2.0.3)", "pdf2image (==1.16.3)", "pdfminer.six (==20221105)", "pyarrow (>=15.0.0,<15.1.0)", "pytesseract (==0.3.10)", "pytest", "pytest-cov", "pytest-httpserver", "pytest-mock", "requests-mock", "tiktoken (==0.4.0)", "unstructured (==0.10.27)", "unstructured.pytesseract (>=0.3.12)", "unstructured[docx,pptx] (==0.10.27)"] +file-based = ["avro (>=1.11.2,<1.12.0)", "fastavro (>=1.8.0,<1.9.0)", "markdown", "pdf2image (==1.16.3)", "pdfminer.six (==20221105)", "pyarrow (>=15.0.0,<15.1.0)", "pytesseract (==0.3.10)", "unstructured (==0.10.27)", "unstructured.pytesseract (>=0.3.12)", "unstructured[docx,pptx] (==0.10.27)"] sphinx-docs = ["Sphinx (>=4.2,<5.0)", "sphinx-rtd-theme (>=1.0,<2.0)"] vector-db-based = ["cohere (==4.21)", "langchain (==0.0.271)", "openai[embeddings] (==0.27.9)", "tiktoken (==0.4.0)"] @@ -1105,4 +1105,4 @@ files = [ [metadata] lock-version = "2.0" python-versions = "^3.9,<3.12" -content-hash = "40cc246c45e6c2d626e016673f3aa60794f3464d82c8ccd0b62a6b66df2b30da" +content-hash = "21dc716e53a3184f45aa9dd040b2fdd142daf95a2847aa6d9d2910c9ff637d5c" diff --git a/airbyte-integrations/connectors/source-github/pyproject.toml b/airbyte-integrations/connectors/source-github/pyproject.toml index 085b9c828c18..86d738db8f06 100644 --- a/airbyte-integrations/connectors/source-github/pyproject.toml +++ b/airbyte-integrations/connectors/source-github/pyproject.toml @@ -3,7 +3,7 @@ requires = [ "poetry-core>=1.0.0",] build-backend = "poetry.core.masonry.api" [tool.poetry] -version = "1.6.3" +version = "1.6.4" name = "source-github" description = "Source implementation for GitHub." authors = [ "Airbyte ",] @@ -17,7 +17,7 @@ include = "source_github" [tool.poetry.dependencies] python = "^3.9,<3.12" -airbyte-cdk = "^0.62.1" +airbyte-cdk = "^0.68.4" sgqlc = "==16.3" [tool.poetry.scripts] diff --git a/airbyte-integrations/connectors/source-github/unit_tests/integration/test_events.py b/airbyte-integrations/connectors/source-github/unit_tests/integration/test_events.py index a3f98b1a2cd5..6e5eb6ee100f 100644 --- a/airbyte-integrations/connectors/source-github/unit_tests/integration/test_events.py +++ b/airbyte-integrations/connectors/source-github/unit_tests/integration/test_events.py @@ -177,7 +177,7 @@ def test_when_read_incrementally_then_emit_state_message(self): .with_stream_state("events", {"airbytehq/integration-test": {"created_at": "2020-06-09T10:00:00Z"}}) .build(), ) - assert actual_messages.state_messages[0].state.data == {'events': {'airbytehq/integration-test': {'created_at': '2022-06-09T12:47:28Z'}}} + assert actual_messages.state_messages[0].state.stream.stream_state == {'airbytehq/integration-test': {'created_at': '2022-06-09T12:47:28Z'}} def test_read_handles_expected_error_correctly_and_exits_with_complete_status(self): """Ensure read() method does not raise an Exception and log message with error is in output""" diff --git a/docs/integrations/sources/github.md b/docs/integrations/sources/github.md index 03d20e32b24a..a68273aaaf04 100644 --- a/docs/integrations/sources/github.md +++ b/docs/integrations/sources/github.md @@ -67,7 +67,6 @@ Repositories with the wrong name or repositories that do not exist or have the w - The **Start Date** does not apply to the streams below and all data will be synced for these streams: `assignees`, `branches`, `collaborators`, `issue_labels`, `organizations`, `pull_request_commits`, `pull_request_stats`, `repositories`, `tags`, `teams`, `users` 8. **Branch (Optional)** - List of GitHub repository branches to pull commits from, e.g. `airbytehq/airbyte/master`. If no branches are specified for a repository, the default branch will be pulled. (e.g. `airbytehq/airbyte/master airbytehq/airbyte/my-branch`). -9. **Max requests per hour (Optional)** - The GitHub API allows for a maximum of 5,000 requests per hour (15,000 for Github Enterprise). You can specify a lower value to limit your use of the API quota. Refer to GitHub article [Rate limits for the REST API](https://docs.github.com/en/rest/overview/rate-limits-for-the-rest-api). @@ -207,6 +206,7 @@ Your token should have at least the `repo` scope. Depending on which streams you | Version | Date | Pull Request | Subject | |:--------|:-----------|:------------------------------------------------------------------------------------------------------------------|:--------------------------------------------------------------------------------------------------------------------------------------------------------------------| +| 1.6.4 | 2024-03-08 | [35915](https://github.com/airbytehq/airbyte/pull/35915) | Fix per stream error handler; Make use the latest CDK version | | 1.6.3 | 2024-02-15 | [35271](https://github.com/airbytehq/airbyte/pull/35271) | Update branches schema | | 1.6.2 | 2024-02-12 | [34933](https://github.com/airbytehq/airbyte/pull/34933) | Update Airbyte CDK for integration tests | | 1.6.1 | 2024-02-09 | [35087](https://github.com/airbytehq/airbyte/pull/35087) | Manage dependencies with Poetry. | From d7f1a244b646565d714a0670a33d26705f6bb1c5 Mon Sep 17 00:00:00 2001 From: Artem Inzhyyants <36314070+artem1205@users.noreply.github.com> Date: Fri, 8 Mar 2024 17:16:08 +0100 Subject: [PATCH 139/172] Source Facebook Marketing: Fix lookback window (#35913) Signed-off-by: Artem Inzhyyants --- .../source-facebook-marketing/metadata.yaml | 2 +- .../source-facebook-marketing/pyproject.toml | 2 +- .../streams/base_insight_streams.py | 26 ++------ .../unit_tests/test_base_insight_streams.py | 60 +++++++++++++++---- .../sources/facebook-marketing.md | 1 + 5 files changed, 56 insertions(+), 35 deletions(-) diff --git a/airbyte-integrations/connectors/source-facebook-marketing/metadata.yaml b/airbyte-integrations/connectors/source-facebook-marketing/metadata.yaml index 40741317abe4..fa14a5309711 100644 --- a/airbyte-integrations/connectors/source-facebook-marketing/metadata.yaml +++ b/airbyte-integrations/connectors/source-facebook-marketing/metadata.yaml @@ -10,7 +10,7 @@ data: connectorSubtype: api connectorType: source definitionId: e7778cfc-e97c-4458-9ecb-b4f2bba8946c - dockerImageTag: 2.0.0 + dockerImageTag: 2.0.1 dockerRepository: airbyte/source-facebook-marketing documentationUrl: https://docs.airbyte.com/integrations/sources/facebook-marketing githubIssueLabel: source-facebook-marketing diff --git a/airbyte-integrations/connectors/source-facebook-marketing/pyproject.toml b/airbyte-integrations/connectors/source-facebook-marketing/pyproject.toml index fb1913198496..15fd8ec79f6f 100644 --- a/airbyte-integrations/connectors/source-facebook-marketing/pyproject.toml +++ b/airbyte-integrations/connectors/source-facebook-marketing/pyproject.toml @@ -3,7 +3,7 @@ requires = [ "poetry-core>=1.0.0",] build-backend = "poetry.core.masonry.api" [tool.poetry] -version = "2.0.0" +version = "2.0.1" name = "source-facebook-marketing" description = "Source implementation for Facebook Marketing." authors = [ "Airbyte ",] diff --git a/airbyte-integrations/connectors/source-facebook-marketing/source_facebook_marketing/streams/base_insight_streams.py b/airbyte-integrations/connectors/source-facebook-marketing/source_facebook_marketing/streams/base_insight_streams.py index af36033599c3..7a4962a30bf2 100644 --- a/airbyte-integrations/connectors/source-facebook-marketing/source_facebook_marketing/streams/base_insight_streams.py +++ b/airbyte-integrations/connectors/source-facebook-marketing/source_facebook_marketing/streams/base_insight_streams.py @@ -192,18 +192,6 @@ def state(self, value: Mapping[str, Any]): self._next_cursor_values = self._get_start_date() - def get_updated_state( - self, - current_stream_state: MutableMapping[str, Any], - latest_record: Mapping[str, Any], - ): - """Update stream state from latest record - - :param current_stream_state: latest state returned - :param latest_record: latest record that we read - """ - return self.state - def _date_intervals(self, account_id: str) -> Iterator[pendulum.Date]: """Get date period to sync""" if self._end_date < self._next_cursor_values[account_id]: @@ -232,7 +220,10 @@ def _generate_async_jobs(self, params: Mapping, account_id: str) -> Iterator[Asy self._next_cursor_values = self._get_start_date() for ts_start in self._date_intervals(account_id): - if ts_start in self._completed_slices.get(account_id, []): + if ( + ts_start in self._completed_slices.get(account_id, []) + and ts_start < self._next_cursor_values.get(account_id, self._start_date) - self.insights_lookback_period + ): continue ts_end = ts_start + pendulum.duration(days=self.time_increment - 1) interval = pendulum.Period(ts_start, ts_end) @@ -309,22 +300,18 @@ def _get_start_date(self) -> Mapping[str, pendulum.Date]: """ today = pendulum.today().date() oldest_date = today - self.INSIGHTS_RETENTION_PERIOD - refresh_date = today - self.insights_lookback_period start_dates_for_account = {} for account_id in self._account_ids: cursor_value = self._cursor_values.get(account_id) if self._cursor_values else None if cursor_value: - start_date = cursor_value + pendulum.duration(days=self.time_increment) + start_date = cursor_value + refresh_date: pendulum.Date = cursor_value - self.insights_lookback_period if start_date > refresh_date: logger.info( f"The cursor value within refresh period ({self.insights_lookback_period}), start sync from {refresh_date} instead." ) start_date = min(start_date, refresh_date) - - if start_date < self._start_date: - logger.warning(f"Ignore provided state and start sync from start_date ({self._start_date}).") - start_date = max(start_date, self._start_date) else: start_date = self._start_date if start_date < oldest_date: @@ -332,7 +319,6 @@ def _get_start_date(self) -> Mapping[str, pendulum.Date]: f"Loading insights older then {self.INSIGHTS_RETENTION_PERIOD} is not possible. Start sync from {oldest_date}." ) start_dates_for_account[account_id] = max(oldest_date, start_date) - return start_dates_for_account def request_params(self, **kwargs) -> MutableMapping[str, Any]: diff --git a/airbyte-integrations/connectors/source-facebook-marketing/unit_tests/test_base_insight_streams.py b/airbyte-integrations/connectors/source-facebook-marketing/unit_tests/test_base_insight_streams.py index 28890704b449..592a5c1eda68 100644 --- a/airbyte-integrations/connectors/source-facebook-marketing/unit_tests/test_base_insight_streams.py +++ b/airbyte-integrations/connectors/source-facebook-marketing/unit_tests/test_base_insight_streams.py @@ -7,6 +7,7 @@ import pendulum import pytest from airbyte_cdk.models import SyncMode +from freezegun import freeze_time from pendulum import duration from source_facebook_marketing.streams import AdsInsights from source_facebook_marketing.streams.async_job import AsyncJob, InsightAsyncJob @@ -20,17 +21,17 @@ def api_fixture(mocker): @pytest.fixture(name="old_start_date") -def old_start_date_fixture(): +def old_start_date_fixture() -> pendulum.DateTime: return pendulum.now() - duration(months=37 + 1) @pytest.fixture(name="recent_start_date") -def recent_start_date_fixture(): +def recent_start_date_fixture() -> pendulum.DateTime: return pendulum.now() - duration(days=10) @pytest.fixture(name="start_date") -def start_date_fixture(): +def start_date_fixture() -> pendulum.DateTime: return pendulum.now() - duration(months=12) @@ -332,9 +333,10 @@ def test_stream_slices_with_state(self, api, async_manager_mock, start_date, som async_manager_mock.assert_called_once() args, kwargs = async_manager_mock.call_args generated_jobs = list(kwargs["jobs"]) - assert len(generated_jobs) == (end_date - cursor_value).days - assert generated_jobs[0].interval.start == cursor_value.date() + duration(days=1) - assert generated_jobs[1].interval.start == cursor_value.date() + duration(days=2) + # assert that we sync all periods including insight_lookback_period + assert len(generated_jobs) == (end_date.date() - (cursor_value.date() - stream.insights_lookback_period)).days + 1 + assert generated_jobs[0].interval.start == cursor_value.date() - stream.insights_lookback_period + assert generated_jobs[1].interval.start == cursor_value.date() - stream.insights_lookback_period + duration(days=1) def test_stream_slices_with_state_close_to_now(self, api, async_manager_mock, recent_start_date, some_config): """Stream will use start_date when close to now and start_date close to now""" @@ -361,9 +363,9 @@ def test_stream_slices_with_state_close_to_now(self, api, async_manager_mock, re async_manager_mock.assert_called_once() args, kwargs = async_manager_mock.call_args generated_jobs = list(kwargs["jobs"]) - assert len(generated_jobs) == (end_date - start_date).days + 1 - assert generated_jobs[0].interval.start == start_date.date() - assert generated_jobs[1].interval.start == start_date.date() + duration(days=1) + assert len(generated_jobs) == (end_date.date() - (cursor_value.date() - stream.insights_lookback_period)).days + 1 + assert generated_jobs[0].interval.start == cursor_value.date() - stream.insights_lookback_period + assert generated_jobs[1].interval.start == cursor_value.date() - stream.insights_lookback_period + duration(days=1) @pytest.mark.parametrize("state_format", ["old_format", "new_format"]) def test_stream_slices_with_state_and_slices(self, api, async_manager_mock, start_date, some_config, state_format): @@ -408,9 +410,9 @@ def test_stream_slices_with_state_and_slices(self, api, async_manager_mock, star async_manager_mock.assert_called_once() args, kwargs = async_manager_mock.call_args generated_jobs = list(kwargs["jobs"]) - assert len(generated_jobs) == (end_date - cursor_value).days - 2, "should be 2 slices short because of state" - assert generated_jobs[0].interval.start == cursor_value.date() + duration(days=2) - assert generated_jobs[1].interval.start == cursor_value.date() + duration(days=4) + assert len(generated_jobs) == (end_date.date() - (cursor_value.date() - stream.insights_lookback_period)).days + 1, "should be 34 slices because we ignore slices which are within insights_lookback_period" + assert generated_jobs[0].interval.start == cursor_value.date() - stream.insights_lookback_period + assert generated_jobs[1].interval.start == cursor_value.date() - stream.insights_lookback_period + duration(days=1) def test_get_json_schema(self, api, some_config): stream = AdsInsights( @@ -493,7 +495,7 @@ def test_level_custom(self, api, some_config): assert stream.level == "adset" - def test_breackdowns_fields_present_in_response_data(self, api, some_config): + def test_breakdowns_fields_present_in_response_data(self, api, some_config): stream = AdsInsights( api=api, account_ids=some_config["account_ids"], @@ -510,3 +512,35 @@ def test_breackdowns_fields_present_in_response_data(self, api, some_config): data = {"id": "0000001", "name": "Pipenpodl Absakopalis"} assert not stream._response_data_is_valid(data) + + @pytest.mark.parametrize( + "config_start_date, saved_cursor_date, expected_start_date, lookback_window", + [ + ("2024-01-01", "2024-02-29", "2024-02-19", 10), + ("2024-01-01", "2024-02-29", "2024-02-01", 28), + ("2018-01-01", "2020-02-29", "2021-02-01", 28), + ], + ids=[ + "with_stream_state in 37 month interval__stream_state_minus_lookback_10_expected", + "with_stream_state in 37 month interval__stream_state_minus_lookback_28_expected", + "with_stream_state NOT in 37 month interval__today_minus_37_month_expected", + ], + ) + @freeze_time("2024-03-01") + def test_start_date_with_lookback_window( + self, api, some_config, config_start_date: str, saved_cursor_date: str, expected_start_date: str, lookback_window: int + ): + start_date = pendulum.parse(config_start_date) + end_date = start_date + duration(days=10) + state = ( + {"unknown_account": {AdsInsights.cursor_field: pendulum.parse(saved_cursor_date).isoformat()}} if saved_cursor_date else None + ) + stream = AdsInsights( + api=api, + account_ids=some_config["account_ids"], + start_date=start_date, + end_date=end_date, + insights_lookback_window=lookback_window, + ) + stream.state = state + assert stream._get_start_date().get("unknown_account").to_date_string() == expected_start_date diff --git a/docs/integrations/sources/facebook-marketing.md b/docs/integrations/sources/facebook-marketing.md index 5ab841a4cdf9..b38e5897c4f1 100644 --- a/docs/integrations/sources/facebook-marketing.md +++ b/docs/integrations/sources/facebook-marketing.md @@ -200,6 +200,7 @@ The Facebook Marketing connector uses the `lookback_window` parameter to repeate | Version | Date | Pull Request | Subject | |:--------|:-----------|:---------------------------------------------------------|:--------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------| +| 2.0.1 | 2024-03-08 | [35913](https://github.com/airbytehq/airbyte/pull/35913) | Fix lookback window | | 2.0.0 | 2024-03-01 | [35746](https://github.com/airbytehq/airbyte/pull/35746) | Update API to `v19.0` | | 1.4.2 | 2024-02-22 | [35539](https://github.com/airbytehq/airbyte/pull/35539) | Add missing config migration from `include_deleted` field | | 1.4.1 | 2024-02-21 | [35467](https://github.com/airbytehq/airbyte/pull/35467) | Fix error with incorrect state transforming in the 1.4.0 version | From 21d1330cca054bb358fba25273a68d24e07320f4 Mon Sep 17 00:00:00 2001 From: Marius Posta Date: Fri, 8 Mar 2024 10:11:32 -0800 Subject: [PATCH 140/172] .github: remove unused connector_dependency_template.md (#35917) --- .../connector_dependency_template.md | 70 ------------------- 1 file changed, 70 deletions(-) delete mode 100644 .github/comment_templates/connector_dependency_template.md diff --git a/.github/comment_templates/connector_dependency_template.md b/.github/comment_templates/connector_dependency_template.md deleted file mode 100644 index 384a166c04f3..000000000000 --- a/.github/comment_templates/connector_dependency_template.md +++ /dev/null @@ -1,70 +0,0 @@ - - -## Affected Connector Report - -NOTE ⚠️ Changes in this PR affect the following connectors. Make sure to do the following as needed: -- Run integration tests -- Bump connector or module version -- Add changelog -- Publish the new version - -
- - -### {source_status_summary} Sources ({num_sources}) - - - -| Connector | Version | Changelog | Publish | -| --- | :---: | :---: | :---: | -{source_rows} - -* See "Actionable Items" below for how to resolve warnings and errors. - -
- -
- - -### {destination_status_summary} Destinations ({num_destinations}) - - - -| Connector | Version | Changelog | Publish | -| --- | :---: | :---: | :---: | -{destination_rows} - -* See "Actionable Items" below for how to resolve warnings and errors. - -
- -
- - -### {other_status_summary} Other Modules ({num_others}) - - - -{others_rows} - -
- -
- - - -### Actionable Items - -(click to expand) - - - -| Category | Status | Actionable Item | -| --- | :---: | --- | -| Version | ❌
mismatch | The version of the connector is different from its normal variant. Please bump the version of the connector. | -| | ⚠
doc not found | The connector does not seem to have a documentation file. This can be normal (e.g. basic connector like `source-jdbc` is not published or documented). Please double-check to make sure that it is not a bug. | -| Changelog | ⚠
doc not found | The connector does not seem to have a documentation file. This can be normal (e.g. basic connector like `source-jdbc` is not published or documented). Please double-check to make sure that it is not a bug. | -| | ❌
changelog missing | There is no chnagelog for the current version of the connector. If you are the author of the current version, please add a changelog. | -| Publish | ⚠
not in seed | The connector is not in the [cloud](https://connectors.airbyte.com/files/registries/v0/cloud_registry.json) or [oss](https://connectors.airbyte.com/files/registries/v0/oss_registry.json) registry, so its publication status cannot be checked. This can be normal (e.g. some connectors are cloud-specific, and only listed in the cloud seed file). Please double-check to make sure that you have added a metadata.yaml file and the expected registries are enabled. | - -
From 2547af2e8fa62e3a14707d96fde694b03bdc7583 Mon Sep 17 00:00:00 2001 From: Marius Posta Date: Fri, 8 Mar 2024 10:11:48 -0800 Subject: [PATCH 141/172] delete unused airbyte-base-java-image top-level folder (#35918) --- airbyte-base-java-image/Dockerfile | 13 ------------- airbyte-base-java-image/README.md | 24 ------------------------ 2 files changed, 37 deletions(-) delete mode 100644 airbyte-base-java-image/Dockerfile delete mode 100644 airbyte-base-java-image/README.md diff --git a/airbyte-base-java-image/Dockerfile b/airbyte-base-java-image/Dockerfile deleted file mode 100644 index e672d3b71ce0..000000000000 --- a/airbyte-base-java-image/Dockerfile +++ /dev/null @@ -1,13 +0,0 @@ -FROM amazoncorretto:19 - -ARG DOCKER_BUILD_ARCH=amd64 - -WORKDIR /app - -RUN yum install -y tar && yum clean all - -# Add the Datadog Java APM agent -ADD https://dtdg.co/latest-java-tracer dd-java-agent.jar - -# Add the OpenTelemetry Java APM agent -ADD https://github.com/open-telemetry/opentelemetry-java-instrumentation/releases/latest/download/opentelemetry-javaagent.jar opentelemetry-javaagent.jar diff --git a/airbyte-base-java-image/README.md b/airbyte-base-java-image/README.md deleted file mode 100644 index 78bc7ccceb9e..000000000000 --- a/airbyte-base-java-image/README.md +++ /dev/null @@ -1,24 +0,0 @@ -# Base Docker Image for Java - -This Docker image provides the base for any Java-based Airbyte module. It is currently based on the [Amazon Corretto](https://aws.amazon.com/corretto/?filtered-posts.sort-by=item.additionalFields.createdDate&filtered-posts.sort-order=desc) -distribution of [OpenJDK](https://openjdk.org/). - -# Releasing - -To release a new version of this base image, use the following steps: - -1. Log in to [Dockerhub](https://hub.docker.com/) via the Docker CLI (`docker login`). -2. Run `docker buildx create --use` to enable Docker `buildx` if you have not used it previously. -3. Run the following to build and push a new version of this image (replace `` with a new version!) : - ``` - docker buildx build --push \ - --tag airbyte/airbyte-base-java-image: \ - --platform linux/amd64,linux/arm64 . - ``` - To see existing versions, [view the image on Dockerhub](https://hub.docker.com/r/airbyte/airbyte-base-java-image). -4. Update base Docker image tag to the new version in all Dockerfiles that depend on the base image: - ```bash - FROM airbyte/java-datadog-tracer-base: - ``` - -[dockerhub]: https://hub.docker.com/repository/registry-1.docker.io/airbyte/airbyte-base-java-image/general From db0c9add6a854deafe2e74abad4adc032cc9019c Mon Sep 17 00:00:00 2001 From: terencecho Date: Fri, 8 Mar 2024 18:32:16 +0000 Subject: [PATCH 142/172] Bump Airbyte version from 0.51.0 to 0.52.0 --- .bumpversion.cfg | 2 +- gradle.properties | 2 +- run-ab-platform.sh | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/.bumpversion.cfg b/.bumpversion.cfg index 343dcac32e7e..865f5df601bc 100644 --- a/.bumpversion.cfg +++ b/.bumpversion.cfg @@ -1,5 +1,5 @@ [bumpversion] -current_version = 0.51.0 +current_version = 0.52.0 commit = False tag = False parse = (?P\d+)\.(?P\d+)\.(?P\d+)(\-[a-z]+)? diff --git a/gradle.properties b/gradle.properties index 9fc47a66700d..c43254b47004 100644 --- a/gradle.properties +++ b/gradle.properties @@ -1,4 +1,4 @@ -VERSION=0.51.0 +VERSION=0.52.0 # NOTE: some of these values are overwritten in CI! # NOTE: if you want to override this for your local machine, set overrides in ~/.gradle/gradle.properties diff --git a/run-ab-platform.sh b/run-ab-platform.sh index 172cd5862132..1c447a612ff3 100755 --- a/run-ab-platform.sh +++ b/run-ab-platform.sh @@ -1,6 +1,6 @@ #!/bin/bash -VERSION=0.51.0 +VERSION=0.52.0 # Run away from anything even a little scary set -o nounset # -u exit if a variable is not set set -o errexit # -f exit for any command failure" From 23a2ea5f182057878c1eb6365f99d69d954bfee4 Mon Sep 17 00:00:00 2001 From: Christo Grabowski <108154848+ChristoGrab@users.noreply.github.com> Date: Fri, 8 Mar 2024 14:48:02 -0500 Subject: [PATCH 143/172] Source Google Search Console: Update records/test config (#35927) --- .../acceptance-test-config.yml | 16 ------- .../integration_tests/expected_records.jsonl | 45 ++++++++++++------- .../sources/google-search-console.md | 5 +++ 3 files changed, 34 insertions(+), 32 deletions(-) diff --git a/airbyte-integrations/connectors/source-google-search-console/acceptance-test-config.yml b/airbyte-integrations/connectors/source-google-search-console/acceptance-test-config.yml index aaf7caeb5c8e..d73c05c4f894 100755 --- a/airbyte-integrations/connectors/source-google-search-console/acceptance-test-config.yml +++ b/airbyte-integrations/connectors/source-google-search-console/acceptance-test-config.yml @@ -28,19 +28,6 @@ acceptance_tests: path: "integration_tests/expected_records.jsonl" exact_order: no timeout_seconds: 3600 - empty_streams: - - name: search_analytics_page_report - bypass_reason: "Fast changing data" - - name: search_analytics_keyword_site_report_by_page - bypass_reason: "Fast changing data" - - name: search_analytics_keyword_site_report_by_site - bypass_reason: "Fast changing data" - - name: search_analytics_keyword_page_report - bypass_reason: "Fast changing data" - - name: search_analytics_site_report_by_page - bypass_reason: "Fast changing data" - - name: search_analytics_site_report_by_site - bypass_reason: "Fast changing data" full_refresh: tests: - config_path: "secrets/config.json" @@ -53,6 +40,3 @@ acceptance_tests: timeout_seconds: 3600 future_state: future_state_path: "integration_tests/abnormal_state.json" - # Incremental read with current config produces multiple empty state messages before emitting first record. - # This leads to identical consecutive sync results which fail the test - skip_comprehensive_incremental_tests: true diff --git a/airbyte-integrations/connectors/source-google-search-console/integration_tests/expected_records.jsonl b/airbyte-integrations/connectors/source-google-search-console/integration_tests/expected_records.jsonl index 457a58444b62..03d98bca329a 100644 --- a/airbyte-integrations/connectors/source-google-search-console/integration_tests/expected_records.jsonl +++ b/airbyte-integrations/connectors/source-google-search-console/integration_tests/expected_records.jsonl @@ -1,16 +1,29 @@ -{"stream": "sites", "data": {"siteUrl": "sc-domain:airbyte.io", "permissionLevel": "siteFullUser"}, "emitted_at": 1709211825229} -{"stream": "sitemaps", "data": {"path": "https://airbyte.io/sitemap.xml", "lastSubmitted": "2021-09-10T23:02:22.258Z", "isPending": false, "isSitemapsIndex": false, "type": "sitemap", "lastDownloaded": "2023-03-02T03:42:19.607Z", "warnings": "0", "errors": "0", "contents": [{"type": "web", "submitted": "5165", "indexed": "0"}]}, "emitted_at": 1677799185696} -{"stream": "sitemaps", "data": {"path": "https://airbyte.io/sitemap.xml", "lastSubmitted": "2021-09-10T23:02:22.258Z", "isPending": false, "isSitemapsIndex": false, "type": "sitemap", "lastDownloaded": "2023-03-02T03:42:19.607Z", "warnings": "0", "errors": "0", "contents": [{"type": "web", "submitted": "5165", "indexed": "0"}]}, "emitted_at": 1677799186044} -{"stream": "search_analytics_by_date", "data": {"clicks": 160, "impressions": 6097, "ctr": 0.026242414302115796, "position": 27.335410857798916, "site_url": "sc-domain:airbyte.io", "search_type": "web", "date": "2022-10-22"}, "emitted_at": 1709284338937} -{"stream": "search_analytics_by_date", "data": {"clicks": 227, "impressions": 7309, "ctr": 0.031057600218908195, "position": 25.308523737857435, "site_url": "sc-domain:airbyte.io", "search_type": "web", "date": "2022-10-23"}, "emitted_at": 1709284338938} -{"stream": "search_analytics_by_country", "data": {"clicks": 37, "impressions": 1246, "ctr": 0.02969502407704655, "position": 31.96548956661316, "site_url": "sc-domain:airbyte.io", "search_type": "web", "date": "2022-10-22", "country": "usa"}, "emitted_at": 1709558064452} -{"stream": "search_analytics_by_country", "data": {"clicks": 31, "impressions": 1282, "ctr": 0.02418096723868955, "position": 30.254290171606865, "site_url": "sc-domain:airbyte.io", "search_type": "web", "date": "2022-10-23", "country": "usa"}, "emitted_at": 1709558064452} -{"stream": "search_analytics_by_device", "data": {"clicks": 203, "impressions": 6206, "ctr": 0.03271028037383177, "position": 23.797937479858202, "site_url": "sc-domain:airbyte.io", "search_type": "web", "date": "2022-10-23", "device": "DESKTOP"}, "emitted_at": 1709558104602} -{"stream": "search_analytics_by_device", "data": {"clicks": 21, "impressions": 1084, "ctr": 0.01937269372693727, "position": 34.21678966789668, "site_url": "sc-domain:airbyte.io", "search_type": "web", "date": "2022-10-23", "device": "MOBILE"}, "emitted_at": 1709558104603} -{"stream": "search_analytics_by_page", "data": {"clicks": 8, "impressions": 197, "ctr": 0.04060913705583756, "position": 8.802030456852792, "site_url": "sc-domain:airbyte.io", "search_type": "web", "date": "2022-10-21", "page": "https://discuss.airbyte.io/t/kafka-connection-fails/723"}, "emitted_at": 1709558151837} -{"stream": "search_analytics_by_query", "data": {"clicks": 2, "impressions": 2, "ctr": 1, "position": 1, "site_url": "sc-domain:airbyte.io", "search_type": "web", "date": "2022-10-21", "query": "airbyte authentication"}, "emitted_at": 1709558202703} -{"stream": "search_analytics_by_query", "data": {"clicks": 2, "impressions": 11, "ctr": 0.18181818181818182, "position": 2.090909090909091, "site_url": "sc-domain:airbyte.io", "search_type": "web", "date": "2022-10-21", "query": "airbyte cloud"}, "emitted_at": 1709558202703} -{"stream": "search_analytics_all_fields", "data": {"clicks": 1, "impressions": 1, "ctr": 1, "position": 9, "site_url": "sc-domain:airbyte.io", "search_type": "web", "date": "2022-10-21", "country": "aut", "device": "DESKTOP", "page": "https://discuss.airbyte.io/t/cannot-build-docker-images-for-python-destination-connector/1454", "query": "fatal error: ffi.h: no such file or directory"}, "emitted_at": 1709558247944} -{"stream": "search_analytics_all_fields", "data": {"clicks": 1, "impressions": 1, "ctr": 1, "position": 1, "site_url": "sc-domain:airbyte.io", "search_type": "web", "date": "2022-10-21", "country": "bel", "device": "DESKTOP", "page": "https://cloud.airbyte.io/login", "query": "airbyte login"}, "emitted_at": 1709558247944} -{"stream": "custom_dimensions", "data": {"clicks": 29, "impressions": 521, "ctr": 0.05566218809980806, "position": 11.186180422264876, "site_url": "sc-domain:airbyte.io", "search_type": "web", "date": "2022-10-21", "country": "fra", "device": "DESKTOP"}, "emitted_at": 1709559198005} -{"stream": "custom_dimensions", "data": {"clicks": 27, "impressions": 421, "ctr": 0.06413301662707839, "position": 14.931116389548693, "site_url": "sc-domain:airbyte.io", "search_type": "web", "date": "2022-10-21", "country": "bra", "device": "DESKTOP"}, "emitted_at": 1709559198006} +{"stream": "sites", "data": {"siteUrl": "sc-domain:airbyte.io", "permissionLevel": "siteFullUser"}, "emitted_at": 1709913944973} +{"stream": "sitemaps", "data": {"path": "https://discuss.airbyte.io/sitemap.xml", "lastSubmitted": "2024-02-10T17:31:13.470Z", "isPending": false, "isSitemapsIndex": true, "lastDownloaded": "2024-03-08T04:51:33.425Z", "warnings": "0", "errors": "0", "contents": [{"type": "web", "submitted": "1778", "indexed": "0"}]}, "emitted_at": 1709913945327} +{"stream": "sitemaps", "data": {"path": "https://airbyte.io/sitemap.xml", "lastSubmitted": "2021-09-10T23:02:22.258Z", "isPending": false, "isSitemapsIndex": false, "type": "sitemap", "lastDownloaded": "2024-03-08T02:25:38.869Z", "warnings": "6", "errors": "0", "contents": [{"type": "web", "submitted": "30333", "indexed": "0"}]}, "emitted_at": 1709913945328} +{"stream": "search_analytics_by_date", "data": {"clicks": 120, "impressions": 5267, "ctr": 0.022783368141256883, "position": 35.45490791722043, "site_url": "sc-domain:airbyte.io", "search_type": "web", "date": "2024-01-01"}, "emitted_at": 1709913946169} +{"stream": "search_analytics_by_date", "data": {"clicks": 439, "impressions": 10076, "ctr": 0.043568876538308855, "position": 24.655617308455735, "site_url": "sc-domain:airbyte.io", "search_type": "web", "date": "2024-01-02"}, "emitted_at": 1709913946170} +{"stream": "search_analytics_by_country", "data": {"clicks": 98, "impressions": 2544, "ctr": 0.03852201257861635, "position": 25.294025157232703, "site_url": "sc-domain:airbyte.io", "search_type": "web", "date": "2024-01-03", "country": "usa"}, "emitted_at": 1709913947363} +{"stream": "search_analytics_by_country", "data": {"clicks": 84, "impressions": 2389, "ctr": 0.03516115529510255, "position": 28.137295939723735, "site_url": "sc-domain:airbyte.io", "search_type": "web", "date": "2024-01-02", "country": "usa"}, "emitted_at": 1709913947364} +{"stream": "search_analytics_by_device", "data": {"clicks": 453, "impressions": 10313, "ctr": 0.043925143023368564, "position": 22.476873848540677, "site_url": "sc-domain:airbyte.io", "search_type": "web", "date": "2024-01-03", "device": "DESKTOP"}, "emitted_at": 1709913947994} +{"stream": "search_analytics_by_device", "data": {"clicks": 415, "impressions": 9270, "ctr": 0.0447680690399137, "position": 23.817044228694716, "site_url": "sc-domain:airbyte.io", "search_type": "web", "date": "2024-01-02", "device": "DESKTOP"}, "emitted_at": 1709913947996} +{"stream": "search_analytics_by_page", "data": {"clicks": 14, "impressions": 178, "ctr": 0.07865168539325842, "position": 7.162921348314606, "site_url": "sc-domain:airbyte.io", "search_type": "web", "date": "2024-01-02", "page": "https://discuss.airbyte.io/t/nil-pointer-error-when-deploying-helm-chart/601"}, "emitted_at": 1709913949344} +{"stream": "search_analytics_by_page", "data": {"clicks": 14, "impressions": 59, "ctr": 0.23728813559322035, "position": 7.5423728813559325, "site_url": "sc-domain:airbyte.io", "search_type": "web", "date": "2024-01-03", "page": "https://discuss.airbyte.io/t/using-a-private-git-repo-for-transformations-the-selection-criterion-does-not-match-any-nodes/4170"}, "emitted_at": 1709913949345} +{"stream": "search_analytics_by_query", "data": {"clicks": 5, "impressions": 6, "ctr": 0.8333333333333334, "position": 1, "site_url": "sc-domain:airbyte.io", "search_type": "web", "date": "2024-01-03", "query": "internal server error: cannot invoke \"io.airbyte.api.model.generated.airbytecatalog.getstreams()\" because \"discovered\" is null"}, "emitted_at": 1709913950680} +{"stream": "search_analytics_by_query", "data": {"clicks": 3, "impressions": 4, "ctr": 0.75, "position": 2, "site_url": "sc-domain:airbyte.io", "search_type": "web", "date": "2024-01-02", "query": "the selection criterion does not match any nodes"}, "emitted_at": 1709913950680} +{"stream": "search_analytics_all_fields", "data": {"clicks": 2, "impressions": 3, "ctr": 0.6666666666666666, "position": 2, "site_url": "sc-domain:airbyte.io", "search_type": "web", "date": "2024-01-02", "country": "gbr", "device": "DESKTOP", "page": "https://discuss.airbyte.io/t/using-a-private-git-repo-for-transformations-the-selection-criterion-does-not-match-any-nodes/4170", "query": "the selection criterion does not match any nodes"}, "emitted_at": 1709913953146} +{"stream": "search_analytics_all_fields", "data": {"clicks": 2, "impressions": 2, "ctr": 1, "position": 2, "site_url": "sc-domain:airbyte.io", "search_type": "web", "date": "2024-01-02", "country": "usa", "device": "DESKTOP", "page": "https://discuss.airbyte.io/t/integrating-keycloak-iam-with-airbyte/2826", "query": "airbyte keycloak"}, "emitted_at": 1709913953146} +{"stream": "custom_dimensions", "data": {"clicks": 97, "impressions": 2392, "ctr": 0.040551839464882944, "position": 24.149247491638796, "site_url": "sc-domain:airbyte.io", "search_type": "web", "date": "2024-01-03", "country": "usa", "device": "DESKTOP"}, "emitted_at": 1709913970183} +{"stream": "custom_dimensions", "data": {"clicks": 81, "impressions": 2220, "ctr": 0.03648648648648649, "position": 27.025675675675675, "site_url": "sc-domain:airbyte.io", "search_type": "web", "date": "2024-01-02", "country": "usa", "device": "DESKTOP"}, "emitted_at": 1709913970184} +{"stream": "search_analytics_keyword_page_report", "data": {"clicks": 0, "impressions": 1, "ctr": 0, "position": 6, "site_url": "sc-domain:airbyte.io", "search_type": "web", "date": "2024-01-08", "country": "usa", "device": "DESKTOP", "query": "fatal: not a dbt project (or any of the parent directories). missing dbt_project.yml file", "page": "https://discuss.airbyte.io/t/how-to-set-workspace-folder-job-id-in-entrypoint-arguments-for-custom-dbt-transformation/2805"}, "emitted_at": 1709913956708} +{"stream": "search_analytics_keyword_page_report", "data": {"clicks": 0, "impressions": 1, "ctr": 0, "position": 4, "site_url": "sc-domain:airbyte.io", "search_type": "web", "date": "2024-01-09", "country": "usa", "device": "DESKTOP", "query": "could not find a version that satisfies the requirement comm>=0.1.3", "page": "https://discuss.airbyte.io/t/error-could-not-find-a-version-that-satisfies-the-requirement-airbyte-cdk-0-1-56/1397"}, "emitted_at": 1709913956709} +{"stream": "search_analytics_page_report", "data": {"clicks": 2, "impressions": 4, "ctr": 0.5, "position": 2.5, "site_url": "sc-domain:airbyte.io", "search_type": "web", "date": "2024-01-02", "country": "gbr", "device": "DESKTOP", "page": "https://discuss.airbyte.io/t/using-a-private-git-repo-for-transformations-the-selection-criterion-does-not-match-any-nodes/4170"}, "emitted_at": 1709913968085} +{"stream": "search_analytics_page_report", "data": {"clicks": 2, "impressions": 3, "ctr": 0.6666666666666666, "position": 1, "site_url": "sc-domain:airbyte.io", "search_type": "web", "date": "2024-01-03", "country": "deu", "device": "DESKTOP", "page": "https://discuss.airbyte.io/t/mixpanel-connector-issue-follow-up-on-previous-case/2814"}, "emitted_at": 1709913968086} +{"stream": "search_analytics_keyword_site_report_by_page", "data": {"clicks": 0, "impressions": 1, "ctr": 0, "position": 6, "site_url": "sc-domain:airbyte.io", "search_type": "web", "date": "2024-01-08", "country": "usa", "device": "DESKTOP", "query": "fatal: not a dbt project (or any of the parent directories). missing dbt_project.yml file"}, "emitted_at": 1709913961303} +{"stream": "search_analytics_keyword_site_report_by_page", "data": {"clicks": 0, "impressions": 1, "ctr": 0, "position": 4, "site_url": "sc-domain:airbyte.io", "search_type": "web", "date": "2024-01-09", "country": "usa", "device": "DESKTOP", "query": "could not find a version that satisfies the requirement comm>=0.1.3"}, "emitted_at": 1709913961305} +{"stream": "search_analytics_keyword_site_report_by_site", "data": {"clicks": 0, "impressions": 1, "ctr": 0, "position": 5, "site_url": "sc-domain:airbyte.io", "search_type": "web", "date": "2024-01-10", "country": "usa", "device": "DESKTOP", "query": "fatal: not a dbt project (or any of the parent directories). missing dbt_project.yml file"}, "emitted_at": 1709913966177} +{"stream": "search_analytics_keyword_site_report_by_site", "data": {"clicks": 0, "impressions": 1, "ctr": 0, "position": 5, "site_url": "sc-domain:airbyte.io", "search_type": "web", "date": "2024-01-11", "country": "usa", "device": "DESKTOP", "query": "dbt_project.yml not found"}, "emitted_at": 1709913966179} +{"stream": "search_analytics_site_report_by_page", "data": {"clicks": 105, "impressions": 2905, "ctr": 0.03614457831325301, "position": 21.6447504302926, "site_url": "sc-domain:airbyte.io", "search_type": "web", "date": "2024-01-03", "country": "usa", "device": "DESKTOP"}, "emitted_at": 1709913968684} +{"stream": "search_analytics_site_report_by_page", "data": {"clicks": 87, "impressions": 2598, "ctr": 0.03348729792147806, "position": 24.50269438029253, "site_url": "sc-domain:airbyte.io", "search_type": "web", "date": "2024-01-02", "country": "usa", "device": "DESKTOP"}, "emitted_at": 1709913968685} +{"stream": "search_analytics_site_report_by_site", "data": {"clicks": 97, "impressions": 2392, "ctr": 0.040551839464882944, "position": 24.149247491638796, "site_url": "sc-domain:airbyte.io", "search_type": "web", "date": "2024-01-03", "country": "usa", "device": "DESKTOP"}, "emitted_at": 1709913969703} +{"stream": "search_analytics_site_report_by_site", "data": {"clicks": 81, "impressions": 2220, "ctr": 0.03648648648648649, "position": 27.025675675675675, "site_url": "sc-domain:airbyte.io", "search_type": "web", "date": "2024-01-02", "country": "usa", "device": "DESKTOP"}, "emitted_at": 1709913969704} diff --git a/docs/integrations/sources/google-search-console.md b/docs/integrations/sources/google-search-console.md index 6b2c2549e6c6..9476122d7ef2 100644 --- a/docs/integrations/sources/google-search-console.md +++ b/docs/integrations/sources/google-search-console.md @@ -190,8 +190,13 @@ Expand to see details about Google Search Console connector limitations and trou ### Connector limitations #### Rate limiting + This connector attempts to back off gracefully when it hits Reports API's rate limits. To find more information about limits, see [Usage Limits](https://developers.google.com/webmaster-tools/limits) documentation. +#### Data retention + +Google Search Console only retains data for websites from the last 16 months. Any data prior to this cutoff point will not be accessible. [Please see this article for more information](https://seotesting.com/google-search-console/how-long-does-gsc-keep-my-data/#:~:text=Google%20Search%20Console%20holds%20relevant,October%2C%202022%2C%20until%20today.). + ### Troubleshooting * Check out common troubleshooting issues for the Google Search Console source connector on our [Airbyte Forum](https://github.com/airbytehq/airbyte/discussions). From 003b6369f9f71bfb7a72a1d6201427f63211e423 Mon Sep 17 00:00:00 2001 From: Marius Posta Date: Fri, 8 Mar 2024 12:09:15 -0800 Subject: [PATCH 144/172] .github: clean up CODEOWNERS (#35919) --- .github/CODEOWNERS | 75 +++++++++++++++++++--------------------------- 1 file changed, 30 insertions(+), 45 deletions(-) diff --git a/.github/CODEOWNERS b/.github/CODEOWNERS index b3b9368f31d4..7367643197a9 100644 --- a/.github/CODEOWNERS +++ b/.github/CODEOWNERS @@ -6,63 +6,48 @@ /airbyte-integrations/connectors/destination-chroma @airbytehq/ai-language-models /airbyte-cdk/python/airbyte_cdk/destinations/vector_db_based @airbytehq/ai-language-models -# CDK and Connector Acceptance Tests +# CI/CD +/.github/ @airbytehq/connector-extensibility +/airbyte-ci/ @airbytehq/connector-extensibility + +# Python CDK and Connector Acceptance Tests /airbyte-cdk/python @airbytehq/connector-extensibility /airbyte-integrations/connector-templates/ @airbytehq/connector-extensibility /airbyte-integrations/bases/connector-acceptance-test/ @airbytehq/connector-extensibility @lazebnyi @oustynova +# Build customization file change +/airbyte-integrations/connectors/**/build_customization.py @airbytehq/connector-extensibility + # Protocol related items /docs/understanding-airbyte/airbyte-protocol.md @airbytehq/protocol-reviewers -# Normalization -/airbyte-integrations/bases/base-normalization/ @airbytehq/destinations - -# Java-based connectors -/airbyte-integrations/bases/base-java/ @airbytehq/jdbc-connectors - -# Java-based source connectors -/airbyte-integrations/bases/debezium-v1-4-2/ @airbytehq/dbsources -/airbyte-integrations/bases/debezium-v1-9-6/ @airbytehq/dbsources -/airbyte-integrations/connectors/source-jdbc/ @airbytehq/dbsources -/airbyte-integrations/connectors/source-alloydb/ @airbytehq/dbsources -/airbyte-integrations/connectors/source-bigquery/ @airbytehq/dbsources -/airbyte-integrations/connectors/source-clickhouse/ @airbytehq/dbsources -/airbyte-integrations/connectors/source-cockroachdb/ @airbytehq/dbsources -/airbyte-integrations/connectors/source-db2/ @airbytehq/dbsources +# Java CDK +/airbyte-cdk/java/airbyte-cdk @airbytehq/dbsources @airbytehq/destinations +/airbyte-cdk/java/airbyte-cdk/*-sources/ @airbytehq/dbsources +/airbyte-cdk/java/airbyte-cdk/*-destinations/ @airbytehq/destinations +/airbyte-cdk/java/airbyte-cdk/typing-deduping/ @airbytehq/destinations + +# Java connectors catch-all +/buildSrc/ @airbytehq/dbsources @airbytehq/destinations +/airbyte-integrations/connectors/source-*/**/*.java @airbytehq/dbsources +/airbyte-integrations/connectors/source-*/**/*.kt @airbytehq/dbsources +/airbyte-integrations/connectors/source-*/**/*.gradle @airbytehq/dbsources +/airbyte-integrations/connectors-performance/source-harness/ @airbytehq/dbsources +/airbyte-integrations/connectors/destination-*/**/*.java @airbytehq/destinations +/airbyte-integrations/connectors/destination-*/**/*.kt @airbytehq/destinations +/airbyte-integrations/connectors/destination-*/**/*.gradle @airbytehq/destinations +/airbyte-integrations/connectors-performance/destination-harness/ @airbytehq/dbsources + +# Java-based certified or incubating source connectors +/airbyte-integrations/connectors/source-mongodb-v2/ @airbytehq/dbsources /airbyte-integrations/connectors/source-mssql/ @airbytehq/dbsources /airbyte-integrations/connectors/source-mysql/ @airbytehq/dbsources -/airbyte-integrations/connectors/source-oracle/ @airbytehq/dbsources /airbyte-integrations/connectors/source-postgres/ @airbytehq/dbsources -/airbyte-integrations/connectors/source-redshift/ @airbytehq/dbsources -/airbyte-integrations/connectors/source-snowflake/ @airbytehq/dbsources -/airbyte-integrations/connectors/source-tidb/ @airbytehq/dbsources -# Java-based destination connectors -airbyte-cdk/java/airbyte-cdk/db-destinations/ @airbytehq/destinations -airbyte-cdk/java/airbyte-cdk/s3-destinations/ @airbytehq/destinations -airbyte-cdk/java/airbyte-cdk/typing-deduping/ @airbytehq/destinations -/airbyte-integrations/bases/standard-destination-test/ @airbytehq/destinations -/airbyte-integrations/bases/base-java-s3/ @airbytehq/destinations -/airbyte-integrations/bases/bases-destination-jdbc/ @airbytehq/destinations +# Java-based certified or incubating destination connectors /airbyte-integrations/connectors/destination-bigquery/ @airbytehq/destinations -/airbyte-integrations/connectors/destination-bigquery-denormalized/ @airbytehq/destinations -/airbyte-integrations/connectors/destination-azure-blob-storage/ @airbytehq/destinations -/airbyte-integrations/connectors/destination-clickhouse/ @airbytehq/destinations -/airbyte-integrations/connectors/destination-databricks/ @airbytehq/destinations -/airbyte-integrations/connectors/destination-gcs/ @airbytehq/destinations -/airbyte-integrations/connectors/destination-mariadb-columnstore/ @airbytehq/destinations -/airbyte-integrations/connectors/destination-mysql/ @airbytehq/destinations -/airbyte-integrations/connectors/destination-mssql/ @airbytehq/destinations -/airbyte-integrations/connectors/destination-oracle/ @airbytehq/destinations /airbyte-integrations/connectors/destination-postgres/ @airbytehq/destinations -/airbyte-integrations/connectors/destination-redshift/ @airbytehq/destinations -/airbyte-integrations/connectors/destination-rockset/ @airbytehq/destinations +/airbyte-integrations/connectors/destination-postgres-strict-encrypt/ @airbytehq/destinations /airbyte-integrations/connectors/destination-s3/ @airbytehq/destinations /airbyte-integrations/connectors/destination-snowflake/ @airbytehq/destinations -/airbyte-integrations/connectors/destination-tidb/ @airbytehq/destinations - -# Build customization file change -/airbyte-integrations/connectors/**/build_customization.py @airbytehq/connector-extensibility - -# airbyte-ci -/airbyte-ci @airbytehq/connector-extensibility +/airbyte-integrations/connectors/destination-redshift/ @airbytehq/destinations From 23ac2cda74d47df69f1ea365321af06ec2547b31 Mon Sep 17 00:00:00 2001 From: Marius Posta Date: Fri, 8 Mar 2024 12:09:43 -0800 Subject: [PATCH 145/172] .github: delete pull_request_template.md (#35921) --- .github/pull_request_template.md | 109 ------------------------------- 1 file changed, 109 deletions(-) delete mode 100644 .github/pull_request_template.md diff --git a/.github/pull_request_template.md b/.github/pull_request_template.md deleted file mode 100644 index 85f751847748..000000000000 --- a/.github/pull_request_template.md +++ /dev/null @@ -1,109 +0,0 @@ - - -## What -*Describe what the change is solving* -*It helps to add screenshots if it affects the frontend.* - -## How -*Describe the solution* - -## Recommended reading order -1. `x.java` -2. `y.python` - -## 🚨 User Impact 🚨 -*Are there any breaking changes? What is the end result perceived by the user?* - -*For connector PRs, use this section to explain which type of semantic versioning bump occurs as a result of the changes. Refer to our [Semantic Versioning for Connectors](https://docs.airbyte.com/contributing-to-airbyte/#semantic-versioning-for-connectors) guidelines for more information. **Breaking changes to connectors must be documented by an Airbyte engineer (PR author, or reviewer for community PRs) by using the [Breaking Change Release Playbook](https://docs.google.com/document/d/1VYQggHbL_PN0dDDu7rCyzBLGRtX-R3cpwXaY8QxEgzw/edit).*** - -*If there are breaking changes, please merge this PR with the 🚨🚨 emoji so changelog authors can further highlight this if needed.* - - -## Pre-merge Actions -*Expand the relevant checklist and delete the others.* - -
New Connector - -### Community member or Airbyter - -- **Community member?** Grant edit access to maintainers ([instructions](https://docs.github.com/en/github/collaborating-with-pull-requests/working-with-forks/allowing-changes-to-a-pull-request-branch-created-from-a-fork#enabling-repository-maintainer-permissions-on-existing-pull-requests)) -- Unit & integration tests added and passing. Community members, please provide proof of success locally e.g: screenshot or copy-paste unit, integration, and acceptance test output. To run acceptance tests for a Python connector, follow instructions in the README. For java connectors run `./gradlew :airbyte-integrations:connectors::integrationTest`. -- Connector version is set to `0.0.1` - - `Dockerfile` has version `0.0.1` -- Documentation updated - - Connector's `README.md` - - Connector's `bootstrap.md`. See [description and examples](https://docs.google.com/document/d/1ypdgmwmEHWv-TrO4_YOQ7pAJGVrMp5BOkEVh831N260/edit?usp=sharing) - - `docs/integrations//.md` including changelog with an entry for the initial version. See changelog [example](https://docs.airbyte.io/integrations/sources/stripe#changelog) - - `docs/integrations/README.md` - -### Airbyter - -If this is a community PR, the Airbyte engineer reviewing this PR is responsible for the below items. - -- Create a non-forked branch based on this PR and test the below items on it -- Build is successful -- If new credentials are required for use in CI, add them to GSM. [Instructions](https://docs.airbyte.io/connector-development#using-credentials-in-ci). - -
- -
Updating a connector - -### Community member or Airbyter - -- Grant edit access to maintainers ([instructions](https://docs.github.com/en/github/collaborating-with-pull-requests/working-with-forks/allowing-changes-to-a-pull-request-branch-created-from-a-fork#enabling-repository-maintainer-permissions-on-existing-pull-requests)) -- Unit & integration tests added - - -### Airbyter - -If this is a community PR, the Airbyte engineer reviewing this PR is responsible for the below items. - -- Create a non-forked branch based on this PR and test the below items on it -- Build is successful -- If new credentials are required for use in CI, add them to GSM. [Instructions](https://docs.airbyte.io/connector-development#using-credentials-in-ci). - -
- -
Connector Generator - -- Issue acceptance criteria met -- PR name follows [PR naming conventions](https://docs.airbyte.com/contributing-to-airbyte/resources/pull-requests-handbook) -- If adding a new generator, add it to the [list of scaffold modules being tested](https://github.com/airbytehq/airbyte/blob/master/airbyte-integrations/connector-templates/generator/build.gradle#L41) -- The generator test modules (all connectors with `-scaffold` in their name) have been updated with the latest scaffold by running `./gradlew :airbyte-integrations:connector-templates:generator:generateScaffolds` then checking in your changes -- Documentation which references the generator is updated as needed - -
- -
Updating the Python CDK - -### Airbyter - -Before merging: -- Pull Request description explains what problem it is solving -- Code change is unit tested -- Build and my-py check pass -- Smoke test the change on at least one affected connector - - On Github: Run [this workflow](https://github.com/airbytehq/airbyte/actions/workflows/connectors_tests.yml), passing `--use-local-cdk --name=source-` as options - - Locally: `airbyte-ci connectors --use-local-cdk --name=source- test` -- PR is reviewed and approved - -After merging: -- [Publish the CDK](https://github.com/airbytehq/airbyte/actions/workflows/publish-cdk-command-manually.yml) - - The CDK does not follow proper semantic versioning. Choose minor if this the change has significant user impact or is a breaking change. Choose patch otherwise. - - Write a thoughtful changelog message so we know what was updated. -- Merge the platform PR that was auto-created for updating the Connector Builder's CDK version - - This step is optional if the change does not affect the connector builder or declarative connectors. - -
From acbdc2d6e1fc169bfe7df347f80fcd616626d3f1 Mon Sep 17 00:00:00 2001 From: Ella Rohm-Ensing Date: Fri, 8 Mar 2024 13:58:26 -0800 Subject: [PATCH 146/172] Introduce FinalStateCursor to emit state messages at the end of full refresh syncs (#35905) Co-authored-by: brianjlai --- .../sources/file_based/file_based_source.py | 6 ++-- .../file_based/stream/concurrent/adapters.py | 4 +-- .../stream/concurrent/cursor/__init__.py | 4 +-- ...or.py => file_based_final_state_cursor.py} | 27 +++++++++++++---- .../sources/streams/concurrent/adapters.py | 4 +-- .../sources/streams/concurrent/cursor.py | 30 +++++++++++++++++-- .../streams/concurrent/default_stream.py | 10 +++++-- .../test_concurrent_source_adapter.py | 4 +-- .../stream/concurrent/test_adapters.py | 6 ++-- .../scenarios/stream_facade_builder.py | 4 +-- ...hread_based_concurrent_stream_scenarios.py | 30 ++++++++++--------- ..._based_concurrent_stream_source_builder.py | 9 +++--- .../streams/concurrent/test_default_stream.py | 14 +++++---- .../sources/streams/test_stream_read.py | 5 ++-- .../sources/test_concurrent_source.py | 12 ++++---- .../unit_tests/sources/test_source_read.py | 5 ++-- 16 files changed, 113 insertions(+), 61 deletions(-) rename airbyte-cdk/python/airbyte_cdk/sources/file_based/stream/concurrent/cursor/{file_based_noop_cursor.py => file_based_final_state_cursor.py} (52%) diff --git a/airbyte-cdk/python/airbyte_cdk/sources/file_based/file_based_source.py b/airbyte-cdk/python/airbyte_cdk/sources/file_based/file_based_source.py index 8bd5cfe9565e..c3ef77cea94f 100644 --- a/airbyte-cdk/python/airbyte_cdk/sources/file_based/file_based_source.py +++ b/airbyte-cdk/python/airbyte_cdk/sources/file_based/file_based_source.py @@ -36,7 +36,7 @@ from airbyte_cdk.sources.file_based.stream.concurrent.cursor import ( AbstractConcurrentFileBasedCursor, FileBasedConcurrentCursor, - FileBasedNoopCursor, + FileBasedFinalStateCursor, ) from airbyte_cdk.sources.file_based.stream.cursor import AbstractFileBasedCursor from airbyte_cdk.sources.message.repository import InMemoryMessageRepository, MessageRepository @@ -170,7 +170,9 @@ def streams(self, config: Mapping[str, Any]) -> List[Stream]: sync_mode = self._get_sync_mode_from_catalog(stream_config.name) if sync_mode == SyncMode.full_refresh and hasattr(self, "_concurrency_level") and self._concurrency_level is not None: - cursor = FileBasedNoopCursor(stream_config) + cursor = FileBasedFinalStateCursor( + stream_config=stream_config, stream_namespace=None, message_repository=self.message_repository + ) stream = FileBasedStreamFacade.create_from_stream( self._make_default_stream(stream_config, cursor), self, self.logger, stream_state, cursor ) diff --git a/airbyte-cdk/python/airbyte_cdk/sources/file_based/stream/concurrent/adapters.py b/airbyte-cdk/python/airbyte_cdk/sources/file_based/stream/concurrent/adapters.py index 4fc1a365b424..e92b78df9c46 100644 --- a/airbyte-cdk/python/airbyte_cdk/sources/file_based/stream/concurrent/adapters.py +++ b/airbyte-cdk/python/airbyte_cdk/sources/file_based/stream/concurrent/adapters.py @@ -18,7 +18,7 @@ from airbyte_cdk.sources.file_based.file_types.file_type_parser import FileTypeParser from airbyte_cdk.sources.file_based.remote_file import RemoteFile from airbyte_cdk.sources.file_based.stream import AbstractFileBasedStream -from airbyte_cdk.sources.file_based.stream.concurrent.cursor import FileBasedNoopCursor +from airbyte_cdk.sources.file_based.stream.concurrent.cursor import FileBasedFinalStateCursor from airbyte_cdk.sources.file_based.stream.cursor import AbstractFileBasedCursor from airbyte_cdk.sources.file_based.types import StreamSlice from airbyte_cdk.sources.message import MessageRepository @@ -71,7 +71,7 @@ def create_from_stream( partition_generator=FileBasedStreamPartitionGenerator( stream, message_repository, - SyncMode.full_refresh if isinstance(cursor, FileBasedNoopCursor) else SyncMode.incremental, + SyncMode.full_refresh if isinstance(cursor, FileBasedFinalStateCursor) else SyncMode.incremental, [cursor_field] if cursor_field is not None else None, state, cursor, diff --git a/airbyte-cdk/python/airbyte_cdk/sources/file_based/stream/concurrent/cursor/__init__.py b/airbyte-cdk/python/airbyte_cdk/sources/file_based/stream/concurrent/cursor/__init__.py index 6ab66bb39888..590f37bb6d63 100644 --- a/airbyte-cdk/python/airbyte_cdk/sources/file_based/stream/concurrent/cursor/__init__.py +++ b/airbyte-cdk/python/airbyte_cdk/sources/file_based/stream/concurrent/cursor/__init__.py @@ -1,5 +1,5 @@ from .abstract_concurrent_file_based_cursor import AbstractConcurrentFileBasedCursor from .file_based_concurrent_cursor import FileBasedConcurrentCursor -from .file_based_noop_cursor import FileBasedNoopCursor +from .file_based_final_state_cursor import FileBasedFinalStateCursor -__all__ = ["AbstractConcurrentFileBasedCursor", "FileBasedConcurrentCursor", "FileBasedNoopCursor"] +__all__ = ["AbstractConcurrentFileBasedCursor", "FileBasedConcurrentCursor", "FileBasedFinalStateCursor"] diff --git a/airbyte-cdk/python/airbyte_cdk/sources/file_based/stream/concurrent/cursor/file_based_noop_cursor.py b/airbyte-cdk/python/airbyte_cdk/sources/file_based/stream/concurrent/cursor/file_based_final_state_cursor.py similarity index 52% rename from airbyte-cdk/python/airbyte_cdk/sources/file_based/stream/concurrent/cursor/file_based_noop_cursor.py rename to airbyte-cdk/python/airbyte_cdk/sources/file_based/stream/concurrent/cursor/file_based_final_state_cursor.py index 2aa5a204d503..ca6f43aec2e4 100644 --- a/airbyte-cdk/python/airbyte_cdk/sources/file_based/stream/concurrent/cursor/file_based_noop_cursor.py +++ b/airbyte-cdk/python/airbyte_cdk/sources/file_based/stream/concurrent/cursor/file_based_final_state_cursor.py @@ -4,12 +4,15 @@ import logging from datetime import datetime -from typing import TYPE_CHECKING, Any, Iterable, List, MutableMapping +from typing import TYPE_CHECKING, Any, Iterable, List, MutableMapping, Optional +from airbyte_cdk.sources.connector_state_manager import ConnectorStateManager from airbyte_cdk.sources.file_based.config.file_based_stream_config import FileBasedStreamConfig from airbyte_cdk.sources.file_based.remote_file import RemoteFile from airbyte_cdk.sources.file_based.stream.concurrent.cursor.abstract_concurrent_file_based_cursor import AbstractConcurrentFileBasedCursor from airbyte_cdk.sources.file_based.types import StreamState +from airbyte_cdk.sources.message import MessageRepository +from airbyte_cdk.sources.streams import FULL_REFRESH_SENTINEL_STATE_KEY from airbyte_cdk.sources.streams.concurrent.partitions.partition import Partition from airbyte_cdk.sources.streams.concurrent.partitions.record import Record @@ -17,13 +20,23 @@ from airbyte_cdk.sources.file_based.stream.concurrent.adapters import FileBasedStreamPartition -class FileBasedNoopCursor(AbstractConcurrentFileBasedCursor): - def __init__(self, stream_config: FileBasedStreamConfig, **kwargs: Any): - pass +class FileBasedFinalStateCursor(AbstractConcurrentFileBasedCursor): + """Cursor that is used to guarantee at least one state message is emitted for a concurrent file-based stream.""" + + def __init__( + self, stream_config: FileBasedStreamConfig, message_repository: MessageRepository, stream_namespace: Optional[str], **kwargs: Any + ): + self._stream_name = stream_config.name + self._stream_namespace = stream_namespace + self._message_repository = message_repository + # Normally the connector state manager operates at the source-level. However, we only need it to write the sentinel + # state message rather than manage overall source state. This is also only temporary as we move to the resumable + # full refresh world where every stream uses a FileBasedConcurrentCursor with incremental state. + self._connector_state_manager = ConnectorStateManager(stream_instance_map={}) @property def state(self) -> MutableMapping[str, Any]: - return {} + return {FULL_REFRESH_SENTINEL_STATE_KEY: True} def observe(self, record: Record) -> None: pass @@ -53,4 +66,6 @@ def emit_state_message(self) -> None: pass def ensure_at_least_one_state_emitted(self) -> None: - pass + self._connector_state_manager.update_state_for_stream(self._stream_name, self._stream_namespace, self.state) + state_message = self._connector_state_manager.create_state_message(self._stream_name, self._stream_namespace) + self._message_repository.emit_message(state_message) diff --git a/airbyte-cdk/python/airbyte_cdk/sources/streams/concurrent/adapters.py b/airbyte-cdk/python/airbyte_cdk/sources/streams/concurrent/adapters.py index 8b762e63a7b1..a6556caf577e 100644 --- a/airbyte-cdk/python/airbyte_cdk/sources/streams/concurrent/adapters.py +++ b/airbyte-cdk/python/airbyte_cdk/sources/streams/concurrent/adapters.py @@ -21,7 +21,7 @@ StreamAvailable, StreamUnavailable, ) -from airbyte_cdk.sources.streams.concurrent.cursor import Cursor, NoopCursor +from airbyte_cdk.sources.streams.concurrent.cursor import Cursor, FinalStateCursor from airbyte_cdk.sources.streams.concurrent.default_stream import DefaultStream from airbyte_cdk.sources.streams.concurrent.exceptions import ExceptionWithDisplayMessage from airbyte_cdk.sources.streams.concurrent.helpers import get_cursor_field_from_stream, get_primary_key_from_stream @@ -77,7 +77,7 @@ def create_from_stream( partition_generator=StreamPartitionGenerator( stream, message_repository, - SyncMode.full_refresh if isinstance(cursor, NoopCursor) else SyncMode.incremental, + SyncMode.full_refresh if isinstance(cursor, FinalStateCursor) else SyncMode.incremental, [cursor_field] if cursor_field is not None else None, state, cursor, diff --git a/airbyte-cdk/python/airbyte_cdk/sources/streams/concurrent/cursor.py b/airbyte-cdk/python/airbyte_cdk/sources/streams/concurrent/cursor.py index d581e66a33d8..2a7dcd65c889 100644 --- a/airbyte-cdk/python/airbyte_cdk/sources/streams/concurrent/cursor.py +++ b/airbyte-cdk/python/airbyte_cdk/sources/streams/concurrent/cursor.py @@ -8,6 +8,7 @@ from airbyte_cdk.sources.connector_state_manager import ConnectorStateManager from airbyte_cdk.sources.message import MessageRepository +from airbyte_cdk.sources.streams import FULL_REFRESH_SENTINEL_STATE_KEY from airbyte_cdk.sources.streams.concurrent.partitions.partition import Partition from airbyte_cdk.sources.streams.concurrent.partitions.record import Record from airbyte_cdk.sources.streams.concurrent.state_converters.abstract_stream_state_converter import AbstractStreamStateConverter @@ -65,10 +66,27 @@ def ensure_at_least_one_state_emitted(self) -> None: raise NotImplementedError() -class NoopCursor(Cursor): +class FinalStateCursor(Cursor): + """Cursor that is used to guarantee at least one state message is emitted for a concurrent stream.""" + + def __init__( + self, + stream_name: str, + stream_namespace: Optional[str], + message_repository: MessageRepository, + ) -> None: + self._stream_name = stream_name + self._stream_namespace = stream_namespace + self._message_repository = message_repository + # Normally the connector state manager operates at the source-level. However, we only need it to write the sentinel + # state message rather than manage overall source state. This is also only temporary as we move to the resumable + # full refresh world where every stream uses a FileBasedConcurrentCursor with incremental state. + self._connector_state_manager = ConnectorStateManager(stream_instance_map={}) + self._has_closed_at_least_one_slice = False + @property def state(self) -> MutableMapping[str, Any]: - return {} + return {FULL_REFRESH_SENTINEL_STATE_KEY: True} def observe(self, record: Record) -> None: pass @@ -77,7 +95,13 @@ def close_partition(self, partition: Partition) -> None: pass def ensure_at_least_one_state_emitted(self) -> None: - pass + """ + Used primarily for full refresh syncs that do not have a valid cursor value to emit at the end of a sync + """ + + self._connector_state_manager.update_state_for_stream(self._stream_name, self._stream_namespace, self.state) + state_message = self._connector_state_manager.create_state_message(self._stream_name, self._stream_namespace) + self._message_repository.emit_message(state_message) class ConcurrentCursor(Cursor): diff --git a/airbyte-cdk/python/airbyte_cdk/sources/streams/concurrent/default_stream.py b/airbyte-cdk/python/airbyte_cdk/sources/streams/concurrent/default_stream.py index 3e839cb3959e..6cf4a694118e 100644 --- a/airbyte-cdk/python/airbyte_cdk/sources/streams/concurrent/default_stream.py +++ b/airbyte-cdk/python/airbyte_cdk/sources/streams/concurrent/default_stream.py @@ -9,7 +9,7 @@ from airbyte_cdk.models import AirbyteStream, SyncMode from airbyte_cdk.sources.streams.concurrent.abstract_stream import AbstractStream from airbyte_cdk.sources.streams.concurrent.availability_strategy import AbstractAvailabilityStrategy, StreamAvailability -from airbyte_cdk.sources.streams.concurrent.cursor import Cursor, NoopCursor +from airbyte_cdk.sources.streams.concurrent.cursor import Cursor from airbyte_cdk.sources.streams.concurrent.partitions.partition import Partition from airbyte_cdk.sources.streams.concurrent.partitions.partition_generator import PartitionGenerator @@ -24,7 +24,7 @@ def __init__( primary_key: List[str], cursor_field: Optional[str], logger: Logger, - cursor: Optional[Cursor], + cursor: Cursor, namespace: Optional[str] = None, ) -> None: self._stream_partition_generator = partition_generator @@ -34,7 +34,7 @@ def __init__( self._primary_key = primary_key self._cursor_field = cursor_field self._logger = logger - self._cursor = cursor or NoopCursor() + self._cursor = cursor self._namespace = namespace def generate_partitions(self) -> Iterable[Partition]: @@ -44,6 +44,10 @@ def generate_partitions(self) -> Iterable[Partition]: def name(self) -> str: return self._name + @property + def namespace(self) -> Optional[str]: + return self._namespace + def check_availability(self) -> StreamAvailability: return self._availability_strategy.check_availability(self._logger) diff --git a/airbyte-cdk/python/unit_tests/sources/concurrent_source/test_concurrent_source_adapter.py b/airbyte-cdk/python/unit_tests/sources/concurrent_source/test_concurrent_source_adapter.py index 80cc7c4a9d9e..141355ee9057 100644 --- a/airbyte-cdk/python/unit_tests/sources/concurrent_source/test_concurrent_source_adapter.py +++ b/airbyte-cdk/python/unit_tests/sources/concurrent_source/test_concurrent_source_adapter.py @@ -20,7 +20,7 @@ from airbyte_cdk.sources.message import InMemoryMessageRepository from airbyte_cdk.sources.streams import Stream from airbyte_cdk.sources.streams.concurrent.adapters import StreamFacade -from airbyte_cdk.sources.streams.concurrent.cursor import NoopCursor +from airbyte_cdk.sources.streams.concurrent.cursor import FinalStateCursor class _MockSource(ConcurrentSourceAdapter): @@ -36,7 +36,7 @@ def check_connection(self, logger: logging.Logger, config: Mapping[str, Any]) -> def streams(self, config: Mapping[str, Any]) -> List[Stream]: return [ - StreamFacade.create_from_stream(s, self, self._logger, None, NoopCursor()) if is_concurrent else s + StreamFacade.create_from_stream(s, self, self._logger, None, FinalStateCursor(stream_name=s.name, stream_namespace=s.namespace, message_repository=InMemoryMessageRepository())) if is_concurrent else s for s, is_concurrent in self._streams_to_is_concurrent.items() ] diff --git a/airbyte-cdk/python/unit_tests/sources/file_based/stream/concurrent/test_adapters.py b/airbyte-cdk/python/unit_tests/sources/file_based/stream/concurrent/test_adapters.py index 01ac6bcb78d3..7ff30fb59cc6 100644 --- a/airbyte-cdk/python/unit_tests/sources/file_based/stream/concurrent/test_adapters.py +++ b/airbyte-cdk/python/unit_tests/sources/file_based/stream/concurrent/test_adapters.py @@ -23,7 +23,7 @@ FileBasedStreamPartition, FileBasedStreamPartitionGenerator, ) -from airbyte_cdk.sources.file_based.stream.concurrent.cursor import FileBasedNoopCursor +from airbyte_cdk.sources.file_based.stream.concurrent.cursor import FileBasedFinalStateCursor from airbyte_cdk.sources.message import InMemoryMessageRepository from airbyte_cdk.sources.streams.concurrent.cursor import Cursor from airbyte_cdk.sources.streams.concurrent.exceptions import ExceptionWithDisplayMessage @@ -36,7 +36,7 @@ _ANY_STATE = {"state_key": "state_value"} _ANY_CURSOR_FIELD = ["a", "cursor", "key"] _STREAM_NAME = "stream" -_ANY_CURSOR = Mock(spec=FileBasedNoopCursor) +_ANY_CURSOR = Mock(spec=FileBasedFinalStateCursor) @pytest.mark.parametrize( @@ -165,7 +165,7 @@ def setUp(self): supported_sync_modes=[SyncMode.full_refresh], ) self._legacy_stream = DefaultFileBasedStream( - cursor=FileBasedNoopCursor(MagicMock()), + cursor=FileBasedFinalStateCursor(stream_config=MagicMock(), stream_namespace=None, message_repository=Mock()), config=FileBasedStreamConfig(name="stream", format=CsvFormat()), catalog_schema={}, stream_reader=MagicMock(), diff --git a/airbyte-cdk/python/unit_tests/sources/streams/concurrent/scenarios/stream_facade_builder.py b/airbyte-cdk/python/unit_tests/sources/streams/concurrent/scenarios/stream_facade_builder.py index 10c93aebb334..290bd26b9384 100644 --- a/airbyte-cdk/python/unit_tests/sources/streams/concurrent/scenarios/stream_facade_builder.py +++ b/airbyte-cdk/python/unit_tests/sources/streams/concurrent/scenarios/stream_facade_builder.py @@ -21,7 +21,7 @@ from airbyte_cdk.sources.source import TState from airbyte_cdk.sources.streams import Stream from airbyte_cdk.sources.streams.concurrent.adapters import StreamFacade -from airbyte_cdk.sources.streams.concurrent.cursor import ConcurrentCursor, CursorField, NoopCursor +from airbyte_cdk.sources.streams.concurrent.cursor import ConcurrentCursor, CursorField, FinalStateCursor from airbyte_cdk.sources.streams.concurrent.state_converters.datetime_stream_state_converter import EpochValueConcurrentStreamStateConverter from airbyte_protocol.models import ConfiguredAirbyteStream from unit_tests.sources.file_based.scenarios.scenario_builder import SourceBuilder @@ -83,7 +83,7 @@ def streams(self, config: Mapping[str, Any]) -> List[Stream]: None, ) if self._cursor_field - else NoopCursor(), + else FinalStateCursor(stream_name=stream.name, stream_namespace=stream.namespace, message_repository=self.message_repository), ) for stream, state in zip(self._streams, stream_states) ] diff --git a/airbyte-cdk/python/unit_tests/sources/streams/concurrent/scenarios/thread_based_concurrent_stream_scenarios.py b/airbyte-cdk/python/unit_tests/sources/streams/concurrent/scenarios/thread_based_concurrent_stream_scenarios.py index e1eb81445d4a..846e7af194ce 100644 --- a/airbyte-cdk/python/unit_tests/sources/streams/concurrent/scenarios/thread_based_concurrent_stream_scenarios.py +++ b/airbyte-cdk/python/unit_tests/sources/streams/concurrent/scenarios/thread_based_concurrent_stream_scenarios.py @@ -4,7 +4,7 @@ import logging from airbyte_cdk.sources.message import InMemoryMessageRepository -from airbyte_cdk.sources.streams.concurrent.cursor import NoopCursor +from airbyte_cdk.sources.streams.concurrent.cursor import FinalStateCursor from airbyte_cdk.sources.streams.concurrent.default_stream import DefaultStream from airbyte_cdk.sources.streams.concurrent.partitions.record import Record from unit_tests.sources.file_based.scenarios.scenario_builder import TestScenarioBuilder @@ -15,6 +15,8 @@ InMemoryPartitionGenerator, ) +_message_repository = InMemoryMessageRepository() + _id_only_stream = DefaultStream( partition_generator=InMemoryPartitionGenerator( [InMemoryPartition("partition1", "stream1", None, [Record({"id": "1"}, "stream1"), Record({"id": "2"}, "stream1")])] @@ -30,7 +32,7 @@ primary_key=[], cursor_field=None, logger=logging.getLogger("test_logger"), - cursor=NoopCursor(), + cursor=FinalStateCursor(stream_name="stream1", stream_namespace=None, message_repository=_message_repository), ) _id_only_stream_with_slice_logger = DefaultStream( @@ -48,7 +50,7 @@ primary_key=[], cursor_field=None, logger=logging.getLogger("test_logger"), - cursor=NoopCursor(), + cursor=FinalStateCursor(stream_name="stream1", stream_namespace=None, message_repository=_message_repository), ) _id_only_stream_with_primary_key = DefaultStream( @@ -66,7 +68,7 @@ primary_key=["id"], cursor_field=None, logger=logging.getLogger("test_logger"), - cursor=NoopCursor(), + cursor=FinalStateCursor(stream_name="stream1", stream_namespace=None, message_repository=_message_repository), ) _id_only_stream_multiple_partitions = DefaultStream( @@ -87,7 +89,7 @@ primary_key=[], cursor_field=None, logger=logging.getLogger("test_logger"), - cursor=NoopCursor(), + cursor=FinalStateCursor(stream_name="stream1", stream_namespace=None, message_repository=_message_repository), ) _id_only_stream_multiple_partitions_concurrency_level_two = DefaultStream( @@ -108,7 +110,7 @@ primary_key=[], cursor_field=None, logger=logging.getLogger("test_logger"), - cursor=NoopCursor(), + cursor=FinalStateCursor(stream_name="stream1", stream_namespace=None, message_repository=_message_repository), ) _stream_raising_exception = DefaultStream( @@ -126,7 +128,7 @@ primary_key=[], cursor_field=None, logger=logging.getLogger("test_logger"), - cursor=NoopCursor(), + cursor=FinalStateCursor(stream_name="stream1", stream_namespace=None, message_repository=_message_repository), ) test_concurrent_cdk_single_stream = ( @@ -140,7 +142,7 @@ _id_only_stream, ] ) - .set_message_repository(InMemoryMessageRepository()) + .set_message_repository(_message_repository) ) .set_expected_records( [ @@ -193,7 +195,7 @@ _id_only_stream_with_primary_key, ] ) - .set_message_repository(InMemoryMessageRepository()) + .set_message_repository(_message_repository) ) .set_expected_records( [ @@ -253,11 +255,11 @@ primary_key=[], cursor_field=None, logger=logging.getLogger("test_logger"), - cursor=NoopCursor(), + cursor=FinalStateCursor(stream_name="stream1", stream_namespace=None, message_repository=_message_repository), ), ] ) - .set_message_repository(InMemoryMessageRepository()) + .set_message_repository(_message_repository) ) .set_expected_records( [ @@ -308,7 +310,7 @@ _stream_raising_exception, ] ) - .set_message_repository(InMemoryMessageRepository()) + .set_message_repository(_message_repository) ) .set_expected_records( [ @@ -346,7 +348,7 @@ _id_only_stream_multiple_partitions, ] ) - .set_message_repository(InMemoryMessageRepository()) + .set_message_repository(_message_repository) ) .set_expected_records( [ @@ -386,7 +388,7 @@ _id_only_stream_multiple_partitions_concurrency_level_two, ] ) - .set_message_repository(InMemoryMessageRepository()) + .set_message_repository(_message_repository) ) .set_expected_records( [ diff --git a/airbyte-cdk/python/unit_tests/sources/streams/concurrent/scenarios/thread_based_concurrent_stream_source_builder.py b/airbyte-cdk/python/unit_tests/sources/streams/concurrent/scenarios/thread_based_concurrent_stream_source_builder.py index 87a65ea6efd8..43c198916a67 100644 --- a/airbyte-cdk/python/unit_tests/sources/streams/concurrent/scenarios/thread_based_concurrent_stream_source_builder.py +++ b/airbyte-cdk/python/unit_tests/sources/streams/concurrent/scenarios/thread_based_concurrent_stream_source_builder.py @@ -8,11 +8,11 @@ from airbyte_cdk.models import ConfiguredAirbyteCatalog, ConnectorSpecification, DestinationSyncMode, SyncMode from airbyte_cdk.sources.concurrent_source.concurrent_source import ConcurrentSource from airbyte_cdk.sources.concurrent_source.concurrent_source_adapter import ConcurrentSourceAdapter -from airbyte_cdk.sources.message import MessageRepository +from airbyte_cdk.sources.message import InMemoryMessageRepository, MessageRepository from airbyte_cdk.sources.streams import Stream from airbyte_cdk.sources.streams.concurrent.adapters import StreamFacade from airbyte_cdk.sources.streams.concurrent.availability_strategy import AbstractAvailabilityStrategy, StreamAvailability, StreamAvailable -from airbyte_cdk.sources.streams.concurrent.cursor import NoopCursor +from airbyte_cdk.sources.streams.concurrent.cursor import FinalStateCursor from airbyte_cdk.sources.streams.concurrent.default_stream import DefaultStream from airbyte_cdk.sources.streams.concurrent.partitions.partition import Partition from airbyte_cdk.sources.streams.concurrent.partitions.partition_generator import PartitionGenerator @@ -42,13 +42,14 @@ def __init__(self, streams: List[DefaultStream], message_repository: Optional[Me concurrent_source = ConcurrentSource.create(1, 1, streams[0]._logger, NeverLogSliceLogger(), message_repository) super().__init__(concurrent_source) self._streams = streams + self._message_repository = message_repository def check_connection(self, logger: logging.Logger, config: Mapping[str, Any]) -> Tuple[bool, Optional[Any]]: # Check is not verified because it is up to the source to implement this method return True, None def streams(self, config: Mapping[str, Any]) -> List[Stream]: - return [StreamFacade(s, LegacyStream(), NoopCursor(), NeverLogSliceLogger(), s._logger) for s in self._streams] + return [StreamFacade(s, LegacyStream(), FinalStateCursor(stream_name=s.name, stream_namespace=s.namespace, message_repository=self.message_repository), NeverLogSliceLogger(), s._logger) for s in self._streams] def spec(self, *args: Any, **kwargs: Any) -> ConnectorSpecification: return ConnectorSpecification(connectionSpecification={}) @@ -57,7 +58,7 @@ def read_catalog(self, catalog_path: str) -> ConfiguredAirbyteCatalog: return ConfiguredAirbyteCatalog( streams=[ ConfiguredAirbyteStream( - stream=StreamFacade(s, LegacyStream(), NoopCursor(), NeverLogSliceLogger(), s._logger).as_airbyte_stream(), + stream=StreamFacade(s, LegacyStream(), FinalStateCursor(stream_name=s.name, stream_namespace=s.namespace, message_repository=InMemoryMessageRepository()), NeverLogSliceLogger(), s._logger).as_airbyte_stream(), sync_mode=SyncMode.full_refresh, destination_sync_mode=DestinationSyncMode.overwrite, ) diff --git a/airbyte-cdk/python/unit_tests/sources/streams/concurrent/test_default_stream.py b/airbyte-cdk/python/unit_tests/sources/streams/concurrent/test_default_stream.py index fb40368d98b3..7b238e5d5e27 100644 --- a/airbyte-cdk/python/unit_tests/sources/streams/concurrent/test_default_stream.py +++ b/airbyte-cdk/python/unit_tests/sources/streams/concurrent/test_default_stream.py @@ -5,8 +5,9 @@ from unittest.mock import Mock from airbyte_cdk.models import AirbyteStream, SyncMode +from airbyte_cdk.sources.message import InMemoryMessageRepository from airbyte_cdk.sources.streams.concurrent.availability_strategy import STREAM_AVAILABLE -from airbyte_cdk.sources.streams.concurrent.cursor import Cursor, NoopCursor +from airbyte_cdk.sources.streams.concurrent.cursor import Cursor, FinalStateCursor from airbyte_cdk.sources.streams.concurrent.default_stream import DefaultStream @@ -20,6 +21,7 @@ def setUp(self): self._cursor_field = None self._logger = Mock() self._cursor = Mock(spec=Cursor) + self._message_repository = InMemoryMessageRepository() self._stream = DefaultStream( self._partition_generator, self._name, @@ -28,7 +30,7 @@ def setUp(self): self._primary_key, self._cursor_field, self._logger, - NoopCursor(), + FinalStateCursor(stream_name=self._name, stream_namespace=None, message_repository=self._message_repository), ) def test_get_json_schema(self): @@ -89,7 +91,7 @@ def test_as_airbyte_stream_with_primary_key(self): ["id"], self._cursor_field, self._logger, - NoopCursor(), + FinalStateCursor(stream_name=self._name, stream_namespace=None, message_repository=self._message_repository), ) expected_airbyte_stream = AirbyteStream( @@ -121,7 +123,7 @@ def test_as_airbyte_stream_with_composite_primary_key(self): ["id_a", "id_b"], self._cursor_field, self._logger, - NoopCursor(), + FinalStateCursor(stream_name=self._name, stream_namespace=None, message_repository=self._message_repository), ) expected_airbyte_stream = AirbyteStream( @@ -153,7 +155,7 @@ def test_as_airbyte_stream_with_a_cursor(self): self._primary_key, "date", self._logger, - NoopCursor(), + FinalStateCursor(stream_name=self._name, stream_namespace=None, message_repository=self._message_repository), ) expected_airbyte_stream = AirbyteStream( @@ -178,7 +180,7 @@ def test_as_airbyte_stream_with_namespace(self): self._primary_key, self._cursor_field, self._logger, - NoopCursor(), + FinalStateCursor(stream_name=self._name, stream_namespace=None, message_repository=self._message_repository), namespace="test", ) expected_airbyte_stream = AirbyteStream( diff --git a/airbyte-cdk/python/unit_tests/sources/streams/test_stream_read.py b/airbyte-cdk/python/unit_tests/sources/streams/test_stream_read.py index 6f12585ca2b6..155bd1581535 100644 --- a/airbyte-cdk/python/unit_tests/sources/streams/test_stream_read.py +++ b/airbyte-cdk/python/unit_tests/sources/streams/test_stream_read.py @@ -26,7 +26,7 @@ from airbyte_cdk.sources.message import InMemoryMessageRepository, MessageRepository from airbyte_cdk.sources.streams import Stream from airbyte_cdk.sources.streams.concurrent.adapters import StreamFacade -from airbyte_cdk.sources.streams.concurrent.cursor import Cursor, NoopCursor +from airbyte_cdk.sources.streams.concurrent.cursor import Cursor, FinalStateCursor from airbyte_cdk.sources.streams.concurrent.partitions.partition import Partition from airbyte_cdk.sources.streams.concurrent.partitions.record import Record from airbyte_cdk.sources.streams.core import StreamData @@ -105,8 +105,9 @@ def _stream(slice_to_partition_mapping, slice_logger, logger, message_repository return _MockStream(slice_to_partition_mapping) -def _concurrent_stream(slice_to_partition_mapping, slice_logger, logger, message_repository, cursor: Cursor = NoopCursor()): +def _concurrent_stream(slice_to_partition_mapping, slice_logger, logger, message_repository, cursor: Optional[Cursor] = None): stream = _stream(slice_to_partition_mapping, slice_logger, logger, message_repository) + cursor = cursor or FinalStateCursor(stream_name=stream.name, stream_namespace=stream.namespace, message_repository=message_repository) source = Mock() source._slice_logger = slice_logger source.message_repository = message_repository diff --git a/airbyte-cdk/python/unit_tests/sources/test_concurrent_source.py b/airbyte-cdk/python/unit_tests/sources/test_concurrent_source.py index ebd082a2b152..9ec0a293cdf6 100644 --- a/airbyte-cdk/python/unit_tests/sources/test_concurrent_source.py +++ b/airbyte-cdk/python/unit_tests/sources/test_concurrent_source.py @@ -12,7 +12,7 @@ from airbyte_cdk.sources.message import InMemoryMessageRepository, MessageRepository from airbyte_cdk.sources.streams.concurrent.abstract_stream import AbstractStream from airbyte_cdk.sources.streams.concurrent.availability_strategy import StreamAvailability, StreamAvailable, StreamUnavailable -from airbyte_cdk.sources.streams.concurrent.cursor import Cursor, NoopCursor +from airbyte_cdk.sources.streams.concurrent.cursor import Cursor, FinalStateCursor from airbyte_cdk.sources.streams.concurrent.partitions.partition import Partition from airbyte_cdk.sources.streams.concurrent.partitions.record import Record from airbyte_protocol.models import AirbyteStream @@ -42,10 +42,11 @@ def __init__( class _MockStream(AbstractStream): - def __init__(self, name: str, available: bool = True, json_schema: Dict[str, Any] = {}): + def __init__(self, name: str, message_repository: MessageRepository, available: bool = True, json_schema: Dict[str, Any] = {}): self._name = name self._available = available self._json_schema = json_schema + self._message_repository = message_repository def generate_partitions(self) -> Iterable[Partition]: yield _MockPartition(self._name) @@ -75,7 +76,7 @@ def log_stream_sync_configuration(self) -> None: @property def cursor(self) -> Cursor: - return NoopCursor() + return FinalStateCursor(stream_name=self._name, stream_namespace=None, message_repository=self._message_repository) class _MockPartition(Partition): @@ -103,8 +104,9 @@ def __hash__(self) -> int: def test_concurrent_source_reading_from_no_streams(): - stream = _MockStream("my_stream", False, {}) - source = _MockSource() + message_repository = InMemoryMessageRepository() + stream = _MockStream("my_stream", message_repository,False, {}) + source = _MockSource(message_repository=message_repository) messages = [] for m in source.read([stream]): messages.append(m) diff --git a/airbyte-cdk/python/unit_tests/sources/test_source_read.py b/airbyte-cdk/python/unit_tests/sources/test_source_read.py index dd08c4d18dac..8aaeed6b777e 100644 --- a/airbyte-cdk/python/unit_tests/sources/test_source_read.py +++ b/airbyte-cdk/python/unit_tests/sources/test_source_read.py @@ -27,7 +27,7 @@ from airbyte_cdk.sources.message import InMemoryMessageRepository from airbyte_cdk.sources.streams import Stream from airbyte_cdk.sources.streams.concurrent.adapters import StreamFacade -from airbyte_cdk.sources.streams.concurrent.cursor import NoopCursor +from airbyte_cdk.sources.streams.concurrent.cursor import FinalStateCursor from airbyte_cdk.sources.streams.core import StreamData from airbyte_cdk.utils import AirbyteTracedException from unit_tests.sources.streams.concurrent.scenarios.thread_based_concurrent_stream_source_builder import NeverLogSliceLogger @@ -409,9 +409,8 @@ def _init_sources(stream_slice_to_partitions, state, logger): def _init_source(stream_slice_to_partitions, state, logger, source): - cursor = NoopCursor() streams = [ - StreamFacade.create_from_stream(_MockStream(stream_slices, f"stream{i}"), source, logger, state, cursor) + StreamFacade.create_from_stream(_MockStream(stream_slices, f"stream{i}"), source, logger, state, FinalStateCursor(stream_name=f"stream{i}", stream_namespace=None, message_repository=InMemoryMessageRepository())) for i, stream_slices in enumerate(stream_slice_to_partitions) ] source.set_streams(streams) From a4dca3b45b7459076966beb0620ea547b70590c4 Mon Sep 17 00:00:00 2001 From: Ella Rohm-Ensing Date: Fri, 8 Mar 2024 14:21:46 -0800 Subject: [PATCH 147/172] CDK: assert >0 state messages per read (fix tests) (#35906) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit ## What * After https://github.com/airbytehq/airbyte/pull/35905, we should be emitting a state message with every successful sync. However there are a few tests that were too lenient and weren't actually _successful_ syncs. This PR fixes those cases and adds validation that we emit at least one state message per successful sync. ## How * Add an assertion that we get at least 1 state message for a successful sync * Fix some tests that previously "output 0 expected records" but actually errored silently - do not run them as read tests * Fix a test that failed silently due to lack of support for multi-format * Add a new test for syncs that output 0 records successfully ## 🚨 User Impact 🚨 None - test changes ## Pre-merge Actions *Expand the relevant checklist and delete the others.*
New Connector ### Community member or Airbyter - **Community member?** Grant edit access to maintainers ([instructions](https://docs.github.com/en/github/collaborating-with-pull-requests/working-with-forks/allowing-changes-to-a-pull-request-branch-created-from-a-fork#enabling-repository-maintainer-permissions-on-existing-pull-requests)) - Unit & integration tests added and passing. Community members, please provide proof of success locally e.g: screenshot or copy-paste unit, integration, and acceptance test output. To run acceptance tests for a Python connector, follow instructions in the README. For java connectors run `./gradlew :airbyte-integrations:connectors::integrationTest`. - Connector version is set to `0.0.1` - `Dockerfile` has version `0.0.1` - Documentation updated - Connector's `README.md` - Connector's `bootstrap.md`. See [description and examples](https://docs.google.com/document/d/1ypdgmwmEHWv-TrO4_YOQ7pAJGVrMp5BOkEVh831N260/edit?usp=sharing) - `docs/integrations//.md` including changelog with an entry for the initial version. See changelog [example](https://docs.airbyte.io/integrations/sources/stripe#changelog) - `docs/integrations/README.md` ### Airbyter If this is a community PR, the Airbyte engineer reviewing this PR is responsible for the below items. - Create a non-forked branch based on this PR and test the below items on it - Build is successful - If new credentials are required for use in CI, add them to GSM. [Instructions](https://docs.airbyte.io/connector-development#using-credentials-in-ci).
Updating a connector ### Community member or Airbyter - Grant edit access to maintainers ([instructions](https://docs.github.com/en/github/collaborating-with-pull-requests/working-with-forks/allowing-changes-to-a-pull-request-branch-created-from-a-fork#enabling-repository-maintainer-permissions-on-existing-pull-requests)) - Unit & integration tests added ### Airbyter If this is a community PR, the Airbyte engineer reviewing this PR is responsible for the below items. - Create a non-forked branch based on this PR and test the below items on it - Build is successful - If new credentials are required for use in CI, add them to GSM. [Instructions](https://docs.airbyte.io/connector-development#using-credentials-in-ci).
Connector Generator - Issue acceptance criteria met - PR name follows [PR naming conventions](https://docs.airbyte.com/contributing-to-airbyte/resources/pull-requests-handbook) - If adding a new generator, add it to the [list of scaffold modules being tested](https://github.com/airbytehq/airbyte/blob/master/airbyte-integrations/connector-templates/generator/build.gradle#L41) - The generator test modules (all connectors with `-scaffold` in their name) have been updated with the latest scaffold by running `./gradlew :airbyte-integrations:connector-templates:generator:generateScaffolds` then checking in your changes - Documentation which references the generator is updated as needed
Updating the Python CDK ### Airbyter Before merging: - Pull Request description explains what problem it is solving - Code change is unit tested - Build and my-py check pass - Smoke test the change on at least one affected connector - On Github: Run [this workflow](https://github.com/airbytehq/airbyte/actions/workflows/connectors_tests.yml), passing `--use-local-cdk --name=source-` as options - Locally: `airbyte-ci connectors --use-local-cdk --name=source- test` - PR is reviewed and approved After merging: - [Publish the CDK](https://github.com/airbytehq/airbyte/actions/workflows/publish-cdk-command-manually.yml) - The CDK does not follow proper semantic versioning. Choose minor if this the change has significant user impact or is a breaking change. Choose patch otherwise. - Write a thoughtful changelog message so we know what was updated. - Merge the platform PR that was auto-created for updating the Connector Builder's CDK version - This step is optional if the change does not affect the connector builder or declarative connectors.
--- .../file_based/scenarios/csv_scenarios.py | 165 ++++++++++++++---- .../file_based/test_file_based_scenarios.py | 19 +- .../sources/file_based/test_scenarios.py | 3 + 3 files changed, 144 insertions(+), 43 deletions(-) diff --git a/airbyte-cdk/python/unit_tests/sources/file_based/scenarios/csv_scenarios.py b/airbyte-cdk/python/unit_tests/sources/file_based/scenarios/csv_scenarios.py index bba3977db2fd..6b68de2d4cb3 100644 --- a/airbyte-cdk/python/unit_tests/sources/file_based/scenarios/csv_scenarios.py +++ b/airbyte-cdk/python/unit_tests/sources/file_based/scenarios/csv_scenarios.py @@ -467,30 +467,24 @@ ) ).build() -multi_format_analytics_scenario: TestScenario[InMemoryFilesSource] = ( +csv_analytics_scenario: TestScenario[InMemoryFilesSource] = ( TestScenarioBuilder[InMemoryFilesSource]() - .set_name("multi_format_analytics") + .set_name("csv_analytics") .set_config( { "streams": [ { "name": "stream1", "format": {"filetype": "csv"}, - "globs": ["file1.csv"], + "globs": ["a.csv"], "validation_policy": "Emit Record", }, { "name": "stream2", "format": {"filetype": "csv"}, - "globs": ["file2.csv"], - "validation_policy": "Emit Record", - }, - { - "name": "stream3", - "format": {"filetype": "jsonl"}, - "globs": ["file3.jsonl"], + "globs": ["b.csv"], "validation_policy": "Emit Record", - }, + } ] } ) @@ -498,17 +492,21 @@ FileBasedSourceBuilder() .set_files( { - "file1.csv": { - "contents": [], + "a.csv": { + "contents": [ + ("col1", "col2"), + ("val11a", "val12a"), + ("val21a", "val22a"), + ], "last_modified": "2023-06-05T03:54:07.000Z", }, - "file2.csv": { - "contents": [], - "last_modified": "2023-06-06T03:54:07.000Z", - }, - "file3.jsonl": { - "contents": [], - "last_modified": "2023-06-07T03:54:07.000Z", + "b.csv": { + "contents": [ + ("col1", "col2", "col3"), + ("val11b", "val12b", "val13b"), + ("val21b", "val22b", "val23b"), + ], + "last_modified": "2023-06-05T03:54:07.000Z", }, } ) @@ -521,7 +519,12 @@ "default_cursor_field": ["_ab_source_file_last_modified"], "json_schema": { "type": "object", - "properties": {}, + "properties": { + "col1": {"type": ["null", "string"]}, + "col2": {"type": ["null", "string"]}, + "_ab_source_file_last_modified": {"type": "string"}, + "_ab_source_file_url": {"type": "string"}, + }, }, "name": "stream1", "source_defined_cursor": True, @@ -531,30 +534,64 @@ "default_cursor_field": ["_ab_source_file_last_modified"], "json_schema": { "type": "object", - "properties": {}, + "properties": { + "col1": {"type": ["null", "string"]}, + "col2": {"type": ["null", "string"]}, + "col3": {"type": ["null", "string"]}, + "_ab_source_file_last_modified": {"type": "string"}, + "_ab_source_file_url": {"type": "string"}, + }, }, "name": "stream2", "source_defined_cursor": True, "supported_sync_modes": ["full_refresh", "incremental"], - }, - { - "default_cursor_field": ["_ab_source_file_last_modified"], - "json_schema": { - "type": "object", - "properties": {}, - }, - "name": "stream3", - "source_defined_cursor": True, - "supported_sync_modes": ["full_refresh", "incremental"], - }, + } ] } ) - .set_expected_records([]) + .set_expected_records([ + { + "data": { + "col1": "val11a", + "col2": "val12a", + "_ab_source_file_last_modified": "2023-06-05T03:54:07.000000Z", + "_ab_source_file_url": "a.csv", + }, + "stream": "stream1", + }, + { + "data": { + "col1": "val21a", + "col2": "val22a", + "_ab_source_file_last_modified": "2023-06-05T03:54:07.000000Z", + "_ab_source_file_url": "a.csv", + }, + "stream": "stream1", + }, + { + "data": { + "col1": "val11b", + "col2": "val12b", + "col3": "val13b", + "_ab_source_file_last_modified": "2023-06-05T03:54:07.000000Z", + "_ab_source_file_url": "b.csv", + }, + "stream": "stream2", + }, + { + "data": { + "col1": "val21b", + "col2": "val22b", + "col3": "val23b", + "_ab_source_file_last_modified": "2023-06-05T03:54:07.000000Z", + "_ab_source_file_url": "b.csv", + }, + "stream": "stream2", + }, + ]) .set_expected_analytics( [ AirbyteAnalyticsTraceMessage(type="file-cdk-csv-stream-count", value="2"), - AirbyteAnalyticsTraceMessage(type="file-cdk-jsonl-stream-count", value="1"), ] ) ).build() @@ -1450,7 +1487,6 @@ } ) .set_expected_discover_error(AirbyteTracedException, FileBasedSourceError.SCHEMA_INFERENCE_ERROR.value) - .set_expected_records([]) ).build() schemaless_csv_scenario: TestScenario[InMemoryFilesSource] = ( @@ -3009,6 +3045,61 @@ ] } ) - .set_expected_records([]) .set_expected_discover_error(AirbyteTracedException, FileBasedSourceError.SCHEMA_INFERENCE_ERROR.value) ).build() + +csv_no_records_scenario: TestScenario[InMemoryFilesSource] = ( + TestScenarioBuilder[InMemoryFilesSource]() + .set_name("csv_empty_no_records") + .set_config( + { + "streams": [ + { + "name": "stream1", + "globs": ["*"], + "validation_policy": "Emit Record", + "input_schema": '{"col1": "boolean", "col2": "string"}', + "format": { + "filetype": "csv", + "null_values": ["null"], + }, + } + ], + "start_date": "2023-06-04T03:54:07.000000Z", + } + ) + .set_source_builder( + FileBasedSourceBuilder() + .set_files( + { + "a.csv": { + "contents": [("col1", "col2")], # column headers, but no data rows + "last_modified": "2023-06-05T03:54:07.000Z", + } + } + ) + .set_file_type("csv") + ) + .set_expected_catalog( + { + "streams": [ + { + "default_cursor_field": ["_ab_source_file_last_modified"], + "json_schema": { + "type": "object", + "properties": { + "col1": {"type": "boolean"}, + "col2": {"type": "string"}, + "_ab_source_file_last_modified": {"type": "string"}, + "_ab_source_file_url": {"type": "string"}, + }, + }, + "name": "stream1", + "source_defined_cursor": True, + "supported_sync_modes": ["full_refresh", "incremental"], + } + ] + } + ) + .set_expected_records([]) +).build() diff --git a/airbyte-cdk/python/unit_tests/sources/file_based/test_file_based_scenarios.py b/airbyte-cdk/python/unit_tests/sources/file_based/test_file_based_scenarios.py index 5a7a7b72ff9b..6969dfd0f39b 100644 --- a/airbyte-cdk/python/unit_tests/sources/file_based/test_file_based_scenarios.py +++ b/airbyte-cdk/python/unit_tests/sources/file_based/test_file_based_scenarios.py @@ -50,6 +50,7 @@ single_csv_no_input_state_scenario_concurrent, ) from unit_tests.sources.file_based.scenarios.csv_scenarios import ( + csv_analytics_scenario, csv_autogenerate_column_names_scenario, csv_custom_bool_values_scenario, csv_custom_delimiter_in_double_quotes_scenario, @@ -61,6 +62,7 @@ csv_multi_stream_scenario, csv_newline_in_values_not_quoted_scenario, csv_newline_in_values_quoted_value_scenario, + csv_no_records_scenario, csv_single_stream_scenario, csv_skip_after_header_scenario, csv_skip_before_and_after_header_scenario, @@ -75,7 +77,6 @@ invalid_csv_scenario, multi_csv_scenario, multi_csv_stream_n_file_exceeds_limit_for_inference, - multi_format_analytics_scenario, multi_stream_custom_format, schemaless_csv_multi_stream_scenario, schemaless_csv_scenario, @@ -152,7 +153,13 @@ ) from unit_tests.sources.file_based.test_scenarios import verify_check, verify_discover, verify_read, verify_spec -discover_scenarios = [ +discover_failure_scenarios = [ + earlier_csv_scenario, + empty_schema_inference_scenario, +] + +discover_success_scenarios = [ + csv_no_records_scenario, csv_multi_stream_scenario, csv_single_stream_scenario, invalid_csv_scenario, @@ -176,9 +183,7 @@ single_csv_file_is_skipped_if_same_modified_at_as_in_history, single_csv_file_is_synced_if_modified_at_is_more_recent_than_in_history, csv_custom_format_scenario, - earlier_csv_scenario, multi_stream_custom_format, - empty_schema_inference_scenario, single_parquet_scenario, multi_parquet_scenario, parquet_various_types_scenario, @@ -260,12 +265,14 @@ single_csv_no_input_state_scenario_concurrent, ] -read_scenarios = discover_scenarios + [ +discover_scenarios = discover_failure_scenarios + discover_success_scenarios + +read_scenarios = discover_success_scenarios + [ emit_record_scenario_multi_stream, emit_record_scenario_single_stream, skip_record_scenario_multi_stream, skip_record_scenario_single_stream, - multi_format_analytics_scenario, + csv_analytics_scenario, wait_for_rediscovery_scenario_multi_stream, wait_for_rediscovery_scenario_single_stream, ] diff --git a/airbyte-cdk/python/unit_tests/sources/file_based/test_scenarios.py b/airbyte-cdk/python/unit_tests/sources/file_based/test_scenarios.py index 5785f13a65ef..84e94d766227 100644 --- a/airbyte-cdk/python/unit_tests/sources/file_based/test_scenarios.py +++ b/airbyte-cdk/python/unit_tests/sources/file_based/test_scenarios.py @@ -105,6 +105,7 @@ def _verify_read_output(output: EntrypointOutput, scenario: TestScenario[Abstrac expected_states = list(filter(lambda e: "data" not in e, expected_records)) states = list(filter(lambda r: r.state, records)) + assert len(states) > 0, "No state messages emitted. Successful syncs should emit at least one stream state." if hasattr(scenario.source, "cursor_cls") and issubclass(scenario.source.cursor_cls, AbstractConcurrentFileBasedCursor): # Only check the last state emitted because we don't know the order the others will be in. @@ -127,6 +128,8 @@ def _verify_read_output(output: EntrypointOutput, scenario: TestScenario[Abstrac def _verify_analytics(analytics: List[AirbyteMessage], expected_analytics: Optional[List[AirbyteAnalyticsTraceMessage]]) -> None: if expected_analytics: + assert len(analytics) == len(expected_analytics), \ + f"Number of actual analytics messages ({len(analytics)}) did not match expected ({len(expected_analytics)})" for actual, expected in zip(analytics, expected_analytics): actual_type, actual_value = actual.trace.analytics.type, actual.trace.analytics.value expected_type = expected.type From e66ec11534d69f714a499b4c0af3aaebd1bbff17 Mon Sep 17 00:00:00 2001 From: Davin Chia Date: Fri, 8 Mar 2024 15:07:19 -0800 Subject: [PATCH 148/172] Better Heartbeating External Documentation. (#35932) In, - airbytehq/airbyte-platform-internal@96baf5b - Better Destination Heartbeat Error Messages airbyte-platform-internal#11595 we improve our heartbeat error messages and point users to this external document. Here, we improve external documentation to help users understand what is happening and what they can do. --- docs/understanding-airbyte/heartbeats.md | 45 ++++++++++++++++++------ 1 file changed, 34 insertions(+), 11 deletions(-) diff --git a/docs/understanding-airbyte/heartbeats.md b/docs/understanding-airbyte/heartbeats.md index 88ce6f86d46d..4b578b2cd700 100644 --- a/docs/understanding-airbyte/heartbeats.md +++ b/docs/understanding-airbyte/heartbeats.md @@ -1,34 +1,57 @@ # Heartbeats -During a data synchronization, many things can go wrong and sometimes the fix is just to restart the synchronization. -Airbyte aims to make this restart as automated as possible and uses heartbeating mechanism in order to do that. -This performed on 2 differents component: the source and the destination. They have different logics which will be -explained bellow. +Many transient issues can occur when moving data, especially for long jobs. Often the fix is a simple restart. -## Source +Airbyte aims to make restarts as automated as possible and uses a heartbeating mechanism to do so. This is performed on 2 different components: the source and the destination. -### Heartbeating logic +Heartbeat errors are expected to be transient and should automatically resolve. If they do not, it is likely a sign of a more serious issue. + +## Known Causes + +Possible reasons for this issue: +1. Certain API sources take an unknown amount of time to generate asynchronous responses (e.g., Salesforce, Facebook, Amplitude). No workaround currently exists. +2. Certain API sources can be rate-limited for a time period longer than their configured threshold. Although Airbyte tries its best to handle this on a per-connector basis, rate limits are not always predictable. +3. Database sources can be slow to respond to a query. This can be due to a variety of reasons, including the size of the database, the complexity of the query, and the number of other queries being made to the database at the same time. + 1. The most common reason we see is using an un-indexed column as a cursor column in an incremental sync, or a dramatically under-provisioned database. +4. Destinations can be slow to respond to write requests. + 1. The most common reason we see here is destination resource availability vis-a-vis data volumes. + +In general, +* **Database Sources and Destination errors are extremely rare**. Any issues are likely to be indicative of actual issues and need to be investigated. +* **API Sources errors are uncommon but not unexpected**. This is especially true if an API source generates asynchronous responses or has rate limits. + +## Airbyte Cloud +Airbyte Cloud has identical heartbeat monitoring and alerting as Airbyte Open Source. + +If these issues show up on Airbyte Cloud, +1. Please read [Known Causes](#known-causes). In many cases, the issue is with the source, the destination or the connection set up, and not with Airbyte. +2. Reach out to Airbyte Support for help. + +## Technical Details + +### Source +#### Heartbeating logic The platform considers both `RECORD` and `STATE` messages emitted by the source as source heartbeats. The Airbyte platform has a process which monitors when the last beat was send and if it reaches a threshold, the synchronization attempt will be failed. It fails with a cause being the source an message saying `The source is unresponsive`. Internal the error has a heartbeat timeout type, which is not display in the UI. -### Configuration +#### Configuration The heartbeat can be configured using the file flags.yaml through 2 entries: -* `heartbeat-max-seconds-between-messages`: this configures the maximum time allowed between 2 messages. +* `hseartbeat-max-seconds-between-messages`: this configures the maximum time allowed between 2 messages. The default is 3 hours. * `heartbeat.failSync`: Setting this to true will make the syncs to fail if a missed heartbeat is detected. If false no sync will be failed because of a missed heartbeat. The default value is true. -## Destination +### Destination -### Heartbeating logic +#### Heartbeating logic Adding a heartbeat to the destination similar to the one at the source is not straightforward since there isn't a constant stream of messages from the destination to the platform. Instead, we have implemented something that is more akin to a timeout. The platform monitors whether there has been a call to the destination that has taken more than a specified amount of time. If such a delay occurs, the platform considers the destination to have timed out. -### Configuration +#### Configuration The timeout can be configured using the file `flags.yaml` through 2 entries: * `destination-timeout-max-seconds`: If the platform detects a call to the destination exceeding the duration specified in this entry, it will consider the destination to have timed out. The default timeout value is 24 hours. * `destination-timeout.failSync`: If enabled (true by default), a detected destination timeout will cause the platform to fail the sync. If not, the platform will log a message and allow the sync to continue. When the platform fails a sync due to a destination timeout, the UI will display the message: `The destination is unresponsive`. From c8bec40cc3a64aa00acf7c6d07f65962218b97c2 Mon Sep 17 00:00:00 2001 From: Davin Chia Date: Fri, 8 Mar 2024 15:34:27 -0800 Subject: [PATCH 149/172] Improve Heartbeating External Docs. (#35934) Better explanation for: General philosophy. What a heartbeat error means. --- docs/understanding-airbyte/heartbeats.md | 14 +++++++++----- 1 file changed, 9 insertions(+), 5 deletions(-) diff --git a/docs/understanding-airbyte/heartbeats.md b/docs/understanding-airbyte/heartbeats.md index 4b578b2cd700..ce2f3499f5a5 100644 --- a/docs/understanding-airbyte/heartbeats.md +++ b/docs/understanding-airbyte/heartbeats.md @@ -1,14 +1,18 @@ # Heartbeats -Many transient issues can occur when moving data, especially for long jobs. Often the fix is a simple restart. +Many transient issues can occur when moving data. One class of issues is an unresponsive Source or Destination. In this case, the fix is often a simple restart. -Airbyte aims to make restarts as automated as possible and uses a heartbeating mechanism to do so. This is performed on 2 different components: the source and the destination. +Airbyte aims to make restarts as automated as possible and uses a heartbeating mechanism to do so. -Heartbeat errors are expected to be transient and should automatically resolve. If they do not, it is likely a sign of a more serious issue. +Airbyte monitors for responses from the Sources and Destination, interpreting these as 'heartbeats'. If the Source or Destination does not heartbeat within +a certain time frame, Airbyte triggers a heartbeat error and automatically restarts the job. -## Known Causes +Heartbeats are a final catch-all mechanism. Errors are expected to be transient and should automatically resolve. If they do not, it is likely a sign of a more serious issue. +In these cases, Airbyte takes the more conservative approach. Airbyte restarts the job to avoid a seemingly endless job, and highlight to users the existence of a potential issue. -Possible reasons for this issue: +## Known Heartbeat Error Causes + +Possible reasons for a heartbeat error: 1. Certain API sources take an unknown amount of time to generate asynchronous responses (e.g., Salesforce, Facebook, Amplitude). No workaround currently exists. 2. Certain API sources can be rate-limited for a time period longer than their configured threshold. Although Airbyte tries its best to handle this on a per-connector basis, rate limits are not always predictable. 3. Database sources can be slow to respond to a query. This can be due to a variety of reasons, including the size of the database, the complexity of the query, and the number of other queries being made to the database at the same time. From 2ac524838750ca1bee590b7083f765159aad46b9 Mon Sep 17 00:00:00 2001 From: Ella Rohm-Ensing Date: Fri, 8 Mar 2024 16:08:59 -0800 Subject: [PATCH 150/172] Emit record counts in state messages for concurrent streams (#35907) Co-authored-by: brianjlai Co-authored-by: Brian Lai <51336873+brianjlai@users.noreply.github.com> --- airbyte-cdk/python/airbyte_cdk/entrypoint.py | 34 ++++- .../sources/connector_state_manager.py | 1 - .../python/airbyte_cdk/utils/message_utils.py | 17 +++ .../sources/file_based/test_scenarios.py | 34 ++++- .../test_mock_server_abstract_source.py | 11 ++ ...hread_based_concurrent_stream_scenarios.py | 2 +- .../python/unit_tests/test_entrypoint.py | 130 ++++++++++++++++-- .../unit_tests/utils/test_message_utils.py | 91 ++++++++++++ 8 files changed, 297 insertions(+), 23 deletions(-) create mode 100644 airbyte-cdk/python/airbyte_cdk/utils/message_utils.py create mode 100644 airbyte-cdk/python/unit_tests/utils/test_message_utils.py diff --git a/airbyte-cdk/python/airbyte_cdk/entrypoint.py b/airbyte-cdk/python/airbyte_cdk/entrypoint.py index 3852cb7e9890..423bda952f53 100644 --- a/airbyte-cdk/python/airbyte_cdk/entrypoint.py +++ b/airbyte-cdk/python/airbyte_cdk/entrypoint.py @@ -10,23 +10,24 @@ import socket import sys import tempfile +from collections import defaultdict from functools import wraps -from typing import Any, Iterable, List, Mapping, MutableMapping, Optional, Union +from typing import Any, DefaultDict, Iterable, List, Mapping, MutableMapping, Optional, Union from urllib.parse import urlparse import requests from airbyte_cdk.connector import TConfig from airbyte_cdk.exception_handler import init_uncaught_exception_handler from airbyte_cdk.logger import init_logger -from airbyte_cdk.models import AirbyteMessage, Status, Type -from airbyte_cdk.models.airbyte_protocol import ConnectorSpecification # type: ignore [attr-defined] +from airbyte_cdk.models import AirbyteMessage, FailureType, Status, Type +from airbyte_cdk.models.airbyte_protocol import AirbyteStateStats, ConnectorSpecification # type: ignore [attr-defined] from airbyte_cdk.sources import Source +from airbyte_cdk.sources.connector_state_manager import HashableStreamDescriptor from airbyte_cdk.sources.utils.schema_helpers import check_config_against_spec_or_exit, split_config -from airbyte_cdk.utils import is_cloud_environment +from airbyte_cdk.utils import is_cloud_environment, message_utils from airbyte_cdk.utils.airbyte_secrets_utils import get_secrets, update_secrets from airbyte_cdk.utils.constants import ENV_REQUEST_CACHE_PATH from airbyte_cdk.utils.traced_exception import AirbyteTracedException -from airbyte_protocol.models import FailureType from requests import PreparedRequest, Response, Session logger = init_logger("airbyte") @@ -160,8 +161,27 @@ def read( if self.source.check_config_against_spec: self.validate_connection(source_spec, config) - yield from self.source.read(self.logger, config, catalog, state) - yield from self._emit_queued_messages(self.source) + stream_message_counter: DefaultDict[HashableStreamDescriptor, int] = defaultdict(int) + for message in self.source.read(self.logger, config, catalog, state): + yield self.handle_record_counts(message, stream_message_counter) + for message in self._emit_queued_messages(self.source): + yield self.handle_record_counts(message, stream_message_counter) + + @staticmethod + def handle_record_counts(message: AirbyteMessage, stream_message_count: DefaultDict[HashableStreamDescriptor, int]) -> AirbyteMessage: + if message.type == Type.RECORD: + stream_message_count[message_utils.get_stream_descriptor(message)] += 1 + + elif message.type == Type.STATE: + stream_descriptor = message_utils.get_stream_descriptor(message) + + # Set record count from the counter onto the state message + message.state.sourceStats = message.state.sourceStats or AirbyteStateStats() + message.state.sourceStats.recordCount = stream_message_count.get(stream_descriptor, 0) + + # Reset the counter + stream_message_count[stream_descriptor] = 0 + return message @staticmethod def validate_connection(source_spec: ConnectorSpecification, config: TConfig) -> None: diff --git a/airbyte-cdk/python/airbyte_cdk/sources/connector_state_manager.py b/airbyte-cdk/python/airbyte_cdk/sources/connector_state_manager.py index 9a85529d29d3..b53372aee4c9 100644 --- a/airbyte-cdk/python/airbyte_cdk/sources/connector_state_manager.py +++ b/airbyte-cdk/python/airbyte_cdk/sources/connector_state_manager.py @@ -82,7 +82,6 @@ def create_state_message(self, stream_name: str, namespace: Optional[str]) -> Ai Generates an AirbyteMessage using the current per-stream state of a specified stream in either the per-stream or legacy format :param stream_name: The name of the stream for the message that is being created :param namespace: The namespace of the stream for the message that is being created - :param send_per_stream_state: Decides which state format the message should be generated as :return: The Airbyte state message to be emitted by the connector during a sync """ hashable_descriptor = HashableStreamDescriptor(name=stream_name, namespace=namespace) diff --git a/airbyte-cdk/python/airbyte_cdk/utils/message_utils.py b/airbyte-cdk/python/airbyte_cdk/utils/message_utils.py new file mode 100644 index 000000000000..410ce809ec8d --- /dev/null +++ b/airbyte-cdk/python/airbyte_cdk/utils/message_utils.py @@ -0,0 +1,17 @@ +# Copyright (c) 2024 Airbyte, Inc., all rights reserved. + +from airbyte_cdk.sources.connector_state_manager import HashableStreamDescriptor +from airbyte_protocol.models import AirbyteMessage, Type + + +def get_stream_descriptor(message: AirbyteMessage) -> HashableStreamDescriptor: + if message.type == Type.RECORD: + return HashableStreamDescriptor(name=message.record.stream, namespace=message.record.namespace) + elif message.type == Type.STATE: + if not message.state.stream or not message.state.stream.stream_descriptor: + raise ValueError("State message was not in per-stream state format, which is required for record counts.") + return HashableStreamDescriptor( + name=message.state.stream.stream_descriptor.name, namespace=message.state.stream.stream_descriptor.namespace + ) + else: + raise NotImplementedError(f"get_stream_descriptor is not implemented for message type '{message.type}'.") diff --git a/airbyte-cdk/python/unit_tests/sources/file_based/test_scenarios.py b/airbyte-cdk/python/unit_tests/sources/file_based/test_scenarios.py index 84e94d766227..6655af928da6 100644 --- a/airbyte-cdk/python/unit_tests/sources/file_based/test_scenarios.py +++ b/airbyte-cdk/python/unit_tests/sources/file_based/test_scenarios.py @@ -16,6 +16,7 @@ from airbyte_cdk.sources.file_based.stream.concurrent.cursor import AbstractConcurrentFileBasedCursor from airbyte_cdk.test.entrypoint_wrapper import EntrypointOutput from airbyte_cdk.test.entrypoint_wrapper import read as entrypoint_read +from airbyte_cdk.utils import message_utils from airbyte_cdk.utils.traced_exception import AirbyteTracedException from airbyte_protocol.models import AirbyteLogMessage, AirbyteMessage, ConfiguredAirbyteCatalog from unit_tests.sources.file_based.scenarios.scenario_builder import TestScenario @@ -71,7 +72,7 @@ def assert_exception(expected_exception: type[BaseException], output: Entrypoint def _verify_read_output(output: EntrypointOutput, scenario: TestScenario[AbstractSource]) -> None: - records, log_messages = output.records_and_state_messages, output.logs + records_and_state_messages, log_messages = output.records_and_state_messages, output.logs logs = [message.log for message in log_messages if message.log.level.value in scenario.log_levels] if scenario.expected_records is None: return @@ -85,7 +86,7 @@ def _verify_read_output(output: EntrypointOutput, scenario: TestScenario[Abstrac ), ) sorted_records = sorted( - filter(lambda r: r.record, records), + filter(lambda r: r.record, records_and_state_messages), key=lambda record: ",".join( f"{k}={v}" for k, v in sorted(record.record.data.items(), key=lambda items: (items[0], items[1])) if k != "emitted_at" ), @@ -104,8 +105,9 @@ def _verify_read_output(output: EntrypointOutput, scenario: TestScenario[Abstrac assert actual.record.stream == expected["stream"] expected_states = list(filter(lambda e: "data" not in e, expected_records)) - states = list(filter(lambda r: r.state, records)) + states = list(filter(lambda r: r.state, records_and_state_messages)) assert len(states) > 0, "No state messages emitted. Successful syncs should emit at least one stream state." + _verify_state_record_counts(sorted_records, states) if hasattr(scenario.source, "cursor_cls") and issubclass(scenario.source.cursor_cls, AbstractConcurrentFileBasedCursor): # Only check the last state emitted because we don't know the order the others will be in. @@ -126,9 +128,33 @@ def _verify_read_output(output: EntrypointOutput, scenario: TestScenario[Abstrac _verify_analytics(analytics, scenario.expected_analytics) +def _verify_state_record_counts(records: List[AirbyteMessage], states: List[AirbyteMessage]) -> None: + actual_record_counts = {} + for record in records: + stream_descriptor = message_utils.get_stream_descriptor(record) + actual_record_counts[stream_descriptor] = actual_record_counts.get(stream_descriptor, 0) + 1 + + state_record_count_sums = {} + for state_message in states: + stream_descriptor = message_utils.get_stream_descriptor(state_message) + state_record_count_sums[stream_descriptor] = ( + state_record_count_sums.get(stream_descriptor, 0) + + state_message.state.sourceStats.recordCount + ) + + for stream, actual_count in actual_record_counts.items(): + assert state_record_count_sums.get(stream) == actual_count + + # We can have extra keys in state_record_count_sums if we processed a stream and reported 0 records + extra_keys = state_record_count_sums.keys() - actual_record_counts.keys() + for stream in extra_keys: + assert state_record_count_sums[stream] == 0 + + def _verify_analytics(analytics: List[AirbyteMessage], expected_analytics: Optional[List[AirbyteAnalyticsTraceMessage]]) -> None: if expected_analytics: - assert len(analytics) == len(expected_analytics), \ + assert len(analytics) == len( + expected_analytics), \ f"Number of actual analytics messages ({len(analytics)}) did not match expected ({len(expected_analytics)})" for actual, expected in zip(analytics, expected_analytics): actual_type, actual_value = actual.trace.analytics.type, actual.trace.analytics.value diff --git a/airbyte-cdk/python/unit_tests/sources/mock_server_tests/test_mock_server_abstract_source.py b/airbyte-cdk/python/unit_tests/sources/mock_server_tests/test_mock_server_abstract_source.py index 4698f7ba8dad..0c04f57b9c15 100644 --- a/airbyte-cdk/python/unit_tests/sources/mock_server_tests/test_mock_server_abstract_source.py +++ b/airbyte-cdk/python/unit_tests/sources/mock_server_tests/test_mock_server_abstract_source.py @@ -205,6 +205,7 @@ def test_full_refresh_sync(self, http_mocker): validate_message_order([Type.RECORD, Type.RECORD, Type.STATE], actual_messages.records_and_state_messages) assert actual_messages.state_messages[0].state.stream.stream_descriptor.name == "users" assert actual_messages.state_messages[0].state.stream.stream_state == {"__ab_full_refresh_state_message": True} + assert actual_messages.state_messages[0].state.sourceStats.recordCount == 2 @HttpMocker() def test_full_refresh_with_slices(self, http_mocker): @@ -232,6 +233,7 @@ def test_full_refresh_with_slices(self, http_mocker): validate_message_order([Type.RECORD, Type.RECORD, Type.RECORD, Type.RECORD, Type.STATE], actual_messages.records_and_state_messages) assert actual_messages.state_messages[0].state.stream.stream_descriptor.name == "dividers" assert actual_messages.state_messages[0].state.stream.stream_state == {"__ab_full_refresh_state_message": True} + assert actual_messages.state_messages[0].state.sourceStats.recordCount == 4 @freezegun.freeze_time(_NOW) @@ -264,8 +266,10 @@ def test_incremental_sync(self, http_mocker): validate_message_order([Type.RECORD, Type.RECORD, Type.RECORD, Type.STATE, Type.RECORD, Type.RECORD, Type.STATE], actual_messages.records_and_state_messages) assert actual_messages.state_messages[0].state.stream.stream_descriptor.name == "planets" assert actual_messages.state_messages[0].state.stream.stream_state == {"created_at": last_record_date_0} + assert actual_messages.state_messages[0].state.sourceStats.recordCount == 3 assert actual_messages.state_messages[1].state.stream.stream_descriptor.name == "planets" assert actual_messages.state_messages[1].state.stream.stream_state == {"created_at": last_record_date_1} + assert actual_messages.state_messages[1].state.sourceStats.recordCount == 2 @HttpMocker() def test_incremental_running_as_full_refresh(self, http_mocker): @@ -295,6 +299,7 @@ def test_incremental_running_as_full_refresh(self, http_mocker): validate_message_order([Type.RECORD, Type.RECORD, Type.RECORD, Type.RECORD, Type.RECORD, Type.STATE], actual_messages.records_and_state_messages) assert actual_messages.state_messages[0].state.stream.stream_descriptor.name == "planets" assert actual_messages.state_messages[0].state.stream.stream_state == {"created_at": last_record_date_1} + assert actual_messages.state_messages[0].state.sourceStats.recordCount == 5 @HttpMocker() def test_legacy_incremental_sync(self, http_mocker): @@ -324,8 +329,10 @@ def test_legacy_incremental_sync(self, http_mocker): validate_message_order([Type.RECORD, Type.RECORD, Type.RECORD, Type.STATE, Type.RECORD, Type.RECORD, Type.STATE], actual_messages.records_and_state_messages) assert actual_messages.state_messages[0].state.stream.stream_descriptor.name == "legacies" assert actual_messages.state_messages[0].state.stream.stream_state == {"created_at": last_record_date_0} + assert actual_messages.state_messages[0].state.sourceStats.recordCount == 3 assert actual_messages.state_messages[1].state.stream.stream_descriptor.name == "legacies" assert actual_messages.state_messages[1].state.stream.stream_state == {"created_at": last_record_date_1} + assert actual_messages.state_messages[1].state.sourceStats.recordCount == 2 @freezegun.freeze_time(_NOW) @@ -395,12 +402,16 @@ def test_incremental_and_full_refresh_streams(self, http_mocker): ], actual_messages.records_and_state_messages) assert actual_messages.state_messages[0].state.stream.stream_descriptor.name == "users" assert actual_messages.state_messages[0].state.stream.stream_state == {"__ab_full_refresh_state_message": True} + assert actual_messages.state_messages[0].state.sourceStats.recordCount == 2 assert actual_messages.state_messages[1].state.stream.stream_descriptor.name == "planets" assert actual_messages.state_messages[1].state.stream.stream_state == {"created_at": last_record_date_0} + assert actual_messages.state_messages[1].state.sourceStats.recordCount == 3 assert actual_messages.state_messages[2].state.stream.stream_descriptor.name == "planets" assert actual_messages.state_messages[2].state.stream.stream_state == {"created_at": last_record_date_1} + assert actual_messages.state_messages[2].state.sourceStats.recordCount == 2 assert actual_messages.state_messages[3].state.stream.stream_descriptor.name == "dividers" assert actual_messages.state_messages[3].state.stream.stream_state == {"__ab_full_refresh_state_message": True} + assert actual_messages.state_messages[3].state.sourceStats.recordCount == 4 def emits_successful_sync_status_messages(status_messages: List[AirbyteStreamStatus]) -> bool: diff --git a/airbyte-cdk/python/unit_tests/sources/streams/concurrent/scenarios/thread_based_concurrent_stream_scenarios.py b/airbyte-cdk/python/unit_tests/sources/streams/concurrent/scenarios/thread_based_concurrent_stream_scenarios.py index 846e7af194ce..3164f9ab565d 100644 --- a/airbyte-cdk/python/unit_tests/sources/streams/concurrent/scenarios/thread_based_concurrent_stream_scenarios.py +++ b/airbyte-cdk/python/unit_tests/sources/streams/concurrent/scenarios/thread_based_concurrent_stream_scenarios.py @@ -255,7 +255,7 @@ primary_key=[], cursor_field=None, logger=logging.getLogger("test_logger"), - cursor=FinalStateCursor(stream_name="stream1", stream_namespace=None, message_repository=_message_repository), + cursor=FinalStateCursor(stream_name="stream2", stream_namespace=None, message_repository=_message_repository), ), ] ) diff --git a/airbyte-cdk/python/unit_tests/test_entrypoint.py b/airbyte-cdk/python/unit_tests/test_entrypoint.py index 7451a320d404..5384c1639e62 100644 --- a/airbyte-cdk/python/unit_tests/test_entrypoint.py +++ b/airbyte-cdk/python/unit_tests/test_entrypoint.py @@ -4,6 +4,7 @@ import os from argparse import Namespace +from collections import defaultdict from copy import deepcopy from typing import Any, List, Mapping, MutableMapping, Union from unittest import mock @@ -20,14 +21,25 @@ AirbyteControlMessage, AirbyteMessage, AirbyteRecordMessage, + AirbyteStateBlob, + AirbyteStateMessage, + AirbyteStateType, AirbyteStream, + AirbyteStreamState, + AirbyteStreamStatus, + AirbyteStreamStatusTraceMessage, + AirbyteTraceMessage, ConnectorSpecification, OrchestratorType, Status, + StreamDescriptor, SyncMode, + TraceType, Type, ) +from airbyte_cdk.models.airbyte_protocol import AirbyteStateStats from airbyte_cdk.sources import Source +from airbyte_cdk.sources.connector_state_manager import HashableStreamDescriptor from airbyte_cdk.utils import AirbyteTracedException @@ -94,14 +106,14 @@ def test_airbyte_entrypoint_init(mocker): ("check", {"config": "config_path"}, {"command": "check", "config": "config_path", "debug": False}), ("discover", {"config": "config_path", "debug": ""}, {"command": "discover", "config": "config_path", "debug": True}), ( - "read", - {"config": "config_path", "catalog": "catalog_path", "state": "None"}, - {"command": "read", "config": "config_path", "catalog": "catalog_path", "state": "None", "debug": False}, + "read", + {"config": "config_path", "catalog": "catalog_path", "state": "None"}, + {"command": "read", "config": "config_path", "catalog": "catalog_path", "state": "None", "debug": False}, ), ( - "read", - {"config": "config_path", "catalog": "catalog_path", "state": "state_path", "debug": ""}, - {"command": "read", "config": "config_path", "catalog": "catalog_path", "state": "state_path", "debug": True}, + "read", + {"config": "config_path", "catalog": "catalog_path", "state": "state_path", "debug": ""}, + {"command": "read", "config": "config_path", "catalog": "catalog_path", "state": "state_path", "debug": True}, ), ], ) @@ -169,9 +181,9 @@ def config_mock(mocker, request): ({"username": "fake"}, {"type": "object", "properties": {"user": {"type": "string"}}}, True), ({"username": "fake"}, {"type": "object", "properties": {"user": {"type": "string", "airbyte_secret": True}}}, True), ( - {"username": "fake", "_limit": 22}, - {"type": "object", "properties": {"username": {"type": "string"}}, "additionalProperties": False}, - True, + {"username": "fake", "_limit": 22}, + {"type": "object", "properties": {"username": {"type": "string"}}, "additionalProperties": False}, + True, ), ], indirect=["config_mock"], @@ -248,7 +260,7 @@ def test_run_read(entrypoint: AirbyteEntrypoint, mocker, spec_mock, config_mock) def test_given_message_emitted_during_config_when_read_then_emit_message_before_next_steps( - entrypoint: AirbyteEntrypoint, mocker, spec_mock, config_mock + entrypoint: AirbyteEntrypoint, mocker, spec_mock, config_mock ): parsed_args = Namespace(command="read", config="config_path", state="statepath", catalog="catalogpath") mocker.patch.object(MockSource, "read_catalog", side_effect=ValueError) @@ -309,3 +321,101 @@ def test_filter_internal_requests(deployment_mode, url, expected_error): else: actual_response = session.send(request=prepared_request) assert isinstance(actual_response, requests.Response) + + +@pytest.mark.parametrize( + "incoming_message, stream_message_count, expected_message, expected_records_by_stream", + [ + pytest.param( + AirbyteMessage(type=Type.RECORD, record=AirbyteRecordMessage(stream="customers", data={"id": "12345"}, emitted_at=1)), + {HashableStreamDescriptor(name="customers"): 100}, + AirbyteMessage(type=Type.RECORD, record=AirbyteRecordMessage(stream="customers", data={"id": "12345"}, emitted_at=1)), + {HashableStreamDescriptor(name="customers"): 101}, + id="test_handle_record_message", + ), + pytest.param( + AirbyteMessage(type=Type.STATE, state=AirbyteStateMessage(type=AirbyteStateType.STREAM, stream=AirbyteStreamState( + stream_descriptor=StreamDescriptor(name="customers"), stream_state=AirbyteStateBlob(updated_at="2024-02-02")))), + {HashableStreamDescriptor(name="customers"): 100}, + AirbyteMessage(type=Type.STATE, state=AirbyteStateMessage(type=AirbyteStateType.STREAM, stream=AirbyteStreamState( + stream_descriptor=StreamDescriptor(name="customers"), stream_state=AirbyteStateBlob(updated_at="2024-02-02")), + sourceStats=AirbyteStateStats(recordCount=100.0))), + {HashableStreamDescriptor(name="customers"): 0}, + id="test_handle_state_message", + ), + pytest.param( + AirbyteMessage(type=Type.RECORD, record=AirbyteRecordMessage(stream="customers", data={"id": "12345"}, emitted_at=1)), + defaultdict(int), + AirbyteMessage(type=Type.RECORD, record=AirbyteRecordMessage(stream="customers", data={"id": "12345"}, emitted_at=1)), + {HashableStreamDescriptor(name="customers"): 1}, + id="test_handle_first_record_message", + ), + pytest.param( + AirbyteMessage(type=Type.TRACE, trace=AirbyteTraceMessage(type=TraceType.STREAM_STATUS, + stream_status=AirbyteStreamStatusTraceMessage( + stream_descriptor=StreamDescriptor(name="customers"), + status=AirbyteStreamStatus.COMPLETE), emitted_at=1)), + {HashableStreamDescriptor(name="customers"): 5}, + AirbyteMessage(type=Type.TRACE, trace=AirbyteTraceMessage(type=TraceType.STREAM_STATUS, + stream_status=AirbyteStreamStatusTraceMessage( + stream_descriptor=StreamDescriptor(name="customers"), + status=AirbyteStreamStatus.COMPLETE), emitted_at=1)), + {HashableStreamDescriptor(name="customers"): 5}, + id="test_handle_other_message_type", + ), + pytest.param( + AirbyteMessage(type=Type.RECORD, record=AirbyteRecordMessage(stream="others", data={"id": "12345"}, emitted_at=1)), + {HashableStreamDescriptor(name="customers"): 100, HashableStreamDescriptor(name="others"): 27}, + AirbyteMessage(type=Type.RECORD, record=AirbyteRecordMessage(stream="others", data={"id": "12345"}, emitted_at=1)), + {HashableStreamDescriptor(name="customers"): 100, HashableStreamDescriptor(name="others"): 28}, + id="test_handle_record_message_for_other_stream", + ), + pytest.param( + AirbyteMessage(type=Type.STATE, state=AirbyteStateMessage(type=AirbyteStateType.STREAM, stream=AirbyteStreamState( + stream_descriptor=StreamDescriptor(name="others"), stream_state=AirbyteStateBlob(updated_at="2024-02-02")))), + {HashableStreamDescriptor(name="customers"): 100, HashableStreamDescriptor(name="others"): 27}, + AirbyteMessage(type=Type.STATE, state=AirbyteStateMessage(type=AirbyteStateType.STREAM, stream=AirbyteStreamState( + stream_descriptor=StreamDescriptor(name="others"), stream_state=AirbyteStateBlob(updated_at="2024-02-02")), + sourceStats=AirbyteStateStats(recordCount=27.0))), + {HashableStreamDescriptor(name="customers"): 100, HashableStreamDescriptor(name="others"): 0}, + id="test_handle_state_message_for_other_stream", + ), + pytest.param( + AirbyteMessage(type=Type.RECORD, + record=AirbyteRecordMessage(stream="customers", namespace="public", data={"id": "12345"}, emitted_at=1)), + {HashableStreamDescriptor(name="customers", namespace="public"): 100}, + AirbyteMessage(type=Type.RECORD, + record=AirbyteRecordMessage(stream="customers", namespace="public", data={"id": "12345"}, emitted_at=1)), + {HashableStreamDescriptor(name="customers", namespace="public"): 101}, + id="test_handle_record_message_with_descriptor", + ), + pytest.param( + AirbyteMessage(type=Type.STATE, state=AirbyteStateMessage(type=AirbyteStateType.STREAM, stream=AirbyteStreamState( + stream_descriptor=StreamDescriptor(name="customers", namespace="public"), + stream_state=AirbyteStateBlob(updated_at="2024-02-02")))), + {HashableStreamDescriptor(name="customers", namespace="public"): 100}, + AirbyteMessage(type=Type.STATE, state=AirbyteStateMessage(type=AirbyteStateType.STREAM, stream=AirbyteStreamState( + stream_descriptor=StreamDescriptor(name="customers", namespace="public"), + stream_state=AirbyteStateBlob(updated_at="2024-02-02")), sourceStats=AirbyteStateStats(recordCount=100.0))), + {HashableStreamDescriptor(name="customers", namespace="public"): 0}, + id="test_handle_state_message_with_descriptor", + ), + pytest.param( + AirbyteMessage(type=Type.STATE, state=AirbyteStateMessage(type=AirbyteStateType.STREAM, stream=AirbyteStreamState( + stream_descriptor=StreamDescriptor(name="others", namespace="public"), + stream_state=AirbyteStateBlob(updated_at="2024-02-02")))), + {HashableStreamDescriptor(name="customers", namespace="public"): 100}, + AirbyteMessage(type=Type.STATE, state=AirbyteStateMessage(type=AirbyteStateType.STREAM, stream=AirbyteStreamState( + stream_descriptor=StreamDescriptor(name="others", namespace="public"), + stream_state=AirbyteStateBlob(updated_at="2024-02-02")), sourceStats=AirbyteStateStats(recordCount=0.0))), + {HashableStreamDescriptor(name="customers", namespace="public"): 100, + HashableStreamDescriptor(name="others", namespace="public"): 0}, + id="test_handle_state_message_no_records", + ), + ] +) +def test_handle_record_counts(incoming_message, stream_message_count, expected_message, expected_records_by_stream): + entrypoint = AirbyteEntrypoint(source=MockSource()) + actual_record = entrypoint.handle_record_counts(message=incoming_message, stream_message_count=stream_message_count) + assert actual_record == expected_message + assert stream_message_count == expected_records_by_stream diff --git a/airbyte-cdk/python/unit_tests/utils/test_message_utils.py b/airbyte-cdk/python/unit_tests/utils/test_message_utils.py new file mode 100644 index 000000000000..496360ea46f3 --- /dev/null +++ b/airbyte-cdk/python/unit_tests/utils/test_message_utils.py @@ -0,0 +1,91 @@ +# Copyright (c) 2024 Airbyte, Inc., all rights reserved. + +import pytest +from airbyte_cdk.sources.connector_state_manager import HashableStreamDescriptor +from airbyte_cdk.utils.message_utils import get_stream_descriptor +from airbyte_protocol.models import ( + AirbyteControlConnectorConfigMessage, + AirbyteControlMessage, + AirbyteMessage, + AirbyteRecordMessage, + AirbyteStateBlob, + AirbyteStateMessage, + AirbyteStateStats, + AirbyteStateType, + AirbyteStreamState, + OrchestratorType, + StreamDescriptor, + Type, +) + + +def test_get_record_message_stream_descriptor(): + message = AirbyteMessage( + type=Type.RECORD, + record=AirbyteRecordMessage( + stream="test_stream", + namespace="test_namespace", + data={"id": "12345"}, + emitted_at=1, + ), + ) + expected_descriptor = HashableStreamDescriptor(name="test_stream", namespace="test_namespace") + assert get_stream_descriptor(message) == expected_descriptor + + +def test_get_record_message_stream_descriptor_no_namespace(): + message = AirbyteMessage( + type=Type.RECORD, + record=AirbyteRecordMessage( + stream="test_stream", data={"id": "12345"}, emitted_at=1 + ), + ) + expected_descriptor = HashableStreamDescriptor(name="test_stream", namespace=None) + assert get_stream_descriptor(message) == expected_descriptor + + +def test_get_state_message_stream_descriptor(): + message = AirbyteMessage( + type=Type.STATE, + state=AirbyteStateMessage( + type=AirbyteStateType.STREAM, + stream=AirbyteStreamState( + stream_descriptor=StreamDescriptor( + name="test_stream", namespace="test_namespace" + ), + stream_state=AirbyteStateBlob(updated_at="2024-02-02"), + ), + sourceStats=AirbyteStateStats(recordCount=27.0), + ), + ) + expected_descriptor = HashableStreamDescriptor(name="test_stream", namespace="test_namespace") + assert get_stream_descriptor(message) == expected_descriptor + + +def test_get_state_message_stream_descriptor_no_namespace(): + message = AirbyteMessage( + type=Type.STATE, + state=AirbyteStateMessage( + type=AirbyteStateType.STREAM, + stream=AirbyteStreamState( + stream_descriptor=StreamDescriptor(name="test_stream"), + stream_state=AirbyteStateBlob(updated_at="2024-02-02"), + ), + sourceStats=AirbyteStateStats(recordCount=27.0), + ), + ) + expected_descriptor = HashableStreamDescriptor(name="test_stream", namespace=None) + assert get_stream_descriptor(message) == expected_descriptor + + +def test_get_other_message_stream_descriptor_fails(): + message = AirbyteMessage( + type=Type.CONTROL, + control=AirbyteControlMessage( + type=OrchestratorType.CONNECTOR_CONFIG, + emitted_at=10, + connectorConfig=AirbyteControlConnectorConfigMessage(config={"any config": "a config value"}), + ), + ) + with pytest.raises(NotImplementedError): + get_stream_descriptor(message) From 0f2685be4f2ce2705a384a1091b77743638001eb Mon Sep 17 00:00:00 2001 From: brianjlai Date: Sat, 9 Mar 2024 00:29:49 +0000 Subject: [PATCH 151/172] =?UTF-8?q?=F0=9F=A4=96=20Bump=20minor=20version?= =?UTF-8?q?=20of=20Python=20CDK?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- airbyte-cdk/python/.bumpversion.cfg | 2 +- airbyte-cdk/python/CHANGELOG.md | 3 +++ airbyte-cdk/python/Dockerfile | 4 ++-- airbyte-cdk/python/setup.py | 2 +- 4 files changed, 7 insertions(+), 4 deletions(-) diff --git a/airbyte-cdk/python/.bumpversion.cfg b/airbyte-cdk/python/.bumpversion.cfg index e23d05b9a01f..7c7b62120931 100644 --- a/airbyte-cdk/python/.bumpversion.cfg +++ b/airbyte-cdk/python/.bumpversion.cfg @@ -1,5 +1,5 @@ [bumpversion] -current_version = 0.68.4 +current_version = 0.69.0 commit = False [bumpversion:file:setup.py] diff --git a/airbyte-cdk/python/CHANGELOG.md b/airbyte-cdk/python/CHANGELOG.md index 3e0ad06f11c5..057a14e08e3b 100644 --- a/airbyte-cdk/python/CHANGELOG.md +++ b/airbyte-cdk/python/CHANGELOG.md @@ -1,5 +1,8 @@ # Changelog +## 0.69.0 +Include recordCount in stream state messages and final state message for full refresh syncs + ## 0.68.4 low-code: update cartesian stream slice to emit typed StreamSlice diff --git a/airbyte-cdk/python/Dockerfile b/airbyte-cdk/python/Dockerfile index fdf812003904..dc0a81963486 100644 --- a/airbyte-cdk/python/Dockerfile +++ b/airbyte-cdk/python/Dockerfile @@ -10,7 +10,7 @@ RUN apk --no-cache upgrade \ && apk --no-cache add tzdata build-base # install airbyte-cdk -RUN pip install --prefix=/install airbyte-cdk==0.68.4 +RUN pip install --prefix=/install airbyte-cdk==0.69.0 # build a clean environment FROM base @@ -32,5 +32,5 @@ ENV AIRBYTE_ENTRYPOINT "python /airbyte/integration_code/main.py" ENTRYPOINT ["python", "/airbyte/integration_code/main.py"] # needs to be the same as CDK -LABEL io.airbyte.version=0.68.4 +LABEL io.airbyte.version=0.69.0 LABEL io.airbyte.name=airbyte/source-declarative-manifest diff --git a/airbyte-cdk/python/setup.py b/airbyte-cdk/python/setup.py index af776b923b92..90400d67ef96 100644 --- a/airbyte-cdk/python/setup.py +++ b/airbyte-cdk/python/setup.py @@ -36,7 +36,7 @@ name="airbyte-cdk", # The version of the airbyte-cdk package is used at runtime to validate manifests. That validation must be # updated if our semver format changes such as using release candidate versions. - version="0.68.4", + version="0.69.0", description="A framework for writing Airbyte Connectors.", long_description=README, long_description_content_type="text/markdown", From d6454f8e74750a841f5455dd061e538054125c46 Mon Sep 17 00:00:00 2001 From: Bindi Pankhudi Date: Sun, 10 Mar 2024 18:09:56 -0700 Subject: [PATCH 152/172] added integration instructions to getting started (#35861) Co-authored-by: bindipankhudi --- docs/using-airbyte/pyairbyte/getting-started.mdx | 9 +++++++++ 1 file changed, 9 insertions(+) diff --git a/docs/using-airbyte/pyairbyte/getting-started.mdx b/docs/using-airbyte/pyairbyte/getting-started.mdx index d8ece1301b4a..9289bc7ec92f 100644 --- a/docs/using-airbyte/pyairbyte/getting-started.mdx +++ b/docs/using-airbyte/pyairbyte/getting-started.mdx @@ -48,6 +48,7 @@ for name, records in result.streams.items(): * [Shopify](https://github.com/airbytehq/quickstarts/blob/main/pyairbyte_notebooks/PyAirbyte_Shopify_Demo.ipynb) * [GitHub](https://github.com/airbytehq/quickstarts/blob/main/pyairbyte_notebooks/PyAirbyte_Github_Incremental_Demo.ipynb) * [Postgres (cache)](https://github.com/airbytehq/quickstarts/blob/main/pyairbyte_notebooks/PyAirbyte_Postgres_Custom_Cache_Demo.ipynb) +* [RAG With Langchain](https://github.com/airbytehq/quickstarts/blob/main/pyairbyte_notebooks/PyAirbyte_Document_Creation_RAG_with_Langchain_Demo.ipynb) ## API Reference @@ -70,3 +71,11 @@ PyAirbyte is a python library that can be run in any context that supports Pytho The following connectors are available: + +## LangChain integration + +For those interested in using PyAirbyte to drive your LLM use cases, we provide two ways to integrate with LangChain: + +* **LangChain native integration**: This approach requires you to utilize the `langchain-airbyte` integration package. Refer to [LangChain Docs](https://python.langchain.com/docs/integrations/document_loaders/airbyte) or watch this [YouTube video](https://www.youtube.com/watch?v=zQU_1sCLSMU) to get started. + +* **PyAirbyte-centric integration**: You can also directly use PyAirbyte to create documents. With this approach, you do not need to import `langchain-airbyte`. Refer to [PyAirbyte Document Creation Demo](https://github.com/airbytehq/quickstarts/blob/main/pyairbyte_notebooks/PyAirbyte_Document_Creation_RAG_with_Langchain_Demo.ipynb) to get started. From 0bd29ba9f8731dd6ae1f18e1986fb6686d606088 Mon Sep 17 00:00:00 2001 From: Augustin Date: Mon, 11 Mar 2024 09:50:33 +0100 Subject: [PATCH 153/172] live-tests: pass connection id (#35922) --- .github/workflows/airbyte-ci-tests.yml | 1 - .../regression_tests => }/.gitignore | 0 airbyte-ci/connectors/live-tests/README.md | 55 +- airbyte-ci/connectors/live-tests/poetry.lock | 1062 ++++++++++++++++- .../connectors/live-tests/pyproject.toml | 12 +- .../live-tests/src/live_tests/__init__.py | 1 + .../commons/backends/base_backend.py | 2 +- .../commons/backends/file_backend.py | 2 +- .../commons/connection_objects_retrieval.py | 125 ++ .../live_tests/commons/connector_runner.py | 1 + .../src/live_tests/commons/models.py | 22 +- .../src/live_tests/commons/utils.py | 24 +- .../live-tests/src/live_tests/debug/cli.py | 67 +- .../live_tests/regression_tests/conftest.py | 145 ++- .../live_tests/regression_tests/pytest.ini | 1 + .../regression_tests/test_expected_records.py | 8 +- .../tests/backends/test_file_backend.py | 5 +- .../pipelines/airbyte_ci/test/__init__.py | 3 +- 18 files changed, 1438 insertions(+), 98 deletions(-) rename airbyte-ci/connectors/live-tests/{src/live_tests/regression_tests => }/.gitignore (100%) create mode 100644 airbyte-ci/connectors/live-tests/src/live_tests/commons/connection_objects_retrieval.py diff --git a/.github/workflows/airbyte-ci-tests.yml b/.github/workflows/airbyte-ci-tests.yml index a3b2461b07fe..37240d132791 100644 --- a/.github/workflows/airbyte-ci-tests.yml +++ b/.github/workflows/airbyte-ci-tests.yml @@ -38,7 +38,6 @@ jobs: - airbyte-ci/connectors/connector_ops/** - airbyte-ci/connectors/connectors_qa/** - airbyte-ci/connectors/ci_credentials/** - - airbyte-ci/connectors/live-tests/** - airbyte-ci/connectors/metadata_service/lib/** - airbyte-ci/connectors/metadata_service/orchestrator/** - airbyte-integrations/bases/connector-acceptance-test/** diff --git a/airbyte-ci/connectors/live-tests/src/live_tests/regression_tests/.gitignore b/airbyte-ci/connectors/live-tests/.gitignore similarity index 100% rename from airbyte-ci/connectors/live-tests/src/live_tests/regression_tests/.gitignore rename to airbyte-ci/connectors/live-tests/.gitignore diff --git a/airbyte-ci/connectors/live-tests/README.md b/airbyte-ci/connectors/live-tests/README.md index 0723dd868f39..f057330dd5c0 100644 --- a/airbyte-ci/connectors/live-tests/README.md +++ b/airbyte-ci/connectors/live-tests/README.md @@ -21,22 +21,23 @@ pipx install . --force --editable ### `debug` ``` -Usage: live-tests debug [OPTIONS] COMMAND +Usage: live-tests debug [OPTIONS] {check|discover|read|read-with-state|spec} Run a specific command on one or multiple connectors and persists the outputs to local storage. Options: + --connection-id TEXT + --config-path FILE + --catalog-path FILE + --state-path FILE -c, --connector-image TEXT Docker image name of the connector to debug - (e.g. `source-faker:latest`, `source- - faker:dev`) [required] + (e.g. `airbyte/source-faker:latest`, + `airbyte/source-faker:dev`) [required] -o, --output-directory DIRECTORY Directory in which connector output and test - results should be stored. - Defaults to the current directory. - --config-path FILE Path to the connector config. - --catalog-path FILE Path to the connector catalog. - --state-path FILE Path to the connector state. + results should be stored. Defaults to the + current directory. -hc, --http-cache Use the HTTP cache for the connector. --help Show this message and exit. ``` @@ -53,6 +54,7 @@ It will write artifacts to an output directory: * `stdout.log`: The collected standard output following the command execution * `stderr.log`: The collected standard error following the c * `http_dump.txt`: An `mitmproxy` http stream log. Can be consumed with `mitmweb` (version `9.0.1`) for debugging. +* `airbyte_messages`: A directory containing `.jsonl` files for each message type (logs, records, traces, controls, states etc.) produced by the connector. #### Example Let's run `debug` to check the output of `read` on two different versions of the same connector: @@ -91,6 +93,15 @@ live_tests_debug_reports ``` +You can also run the `debug` command on a live connection by passing the `--connection-id` option: + +```bash +live-tests debug read \ +--connector-image=airbyte/source-pokeapi:dev \ +--connector-image=airbyte/source-pokeapi:latest \ +--connection-id= +``` + ##### Consuming `http_dump.mitm` You can install [`mitmproxy`](https://mitmproxy.org/): ```bash @@ -103,17 +114,39 @@ mitmweb --rfile=http_dump.mitm ``` ## Regression tests -We created a regression test suite to run tests to compare outputs of connector commands on different versions of the same connector. +We created a regression test suite to run tests to compare the outputs of connector commands on different versions of the same connector. + You can run the existing test suites with the following command: +#### With local connection objects (`config.json`, `catalog.json`, `state.json`) ```bash -cd src/live_tests/regression_tests -poetry run pytest --connector-image=airbyte/source-pokeapi --config-path= --catalog-path= +poetry run pytest src/live_tests/regression_tests \ +--connector-image=airbyte/source-faker \ + --config-path= \ + --catalog-path= \ + --target-version=dev \ + --control-version=latest ``` +#### Using a live connection +The live connection objects will be fetched. + +```bash + poetry run pytest src/live_tests/regression_tests \ + --connector-image=airbyte/source-faker \ + --connection-id= \ + --target-version=dev \ + --control-version=latest + ``` + +You can also pass local connection objects path to override the live connection objects with `--config-path`, `--state-path` or `--catalog-path`. + ## Changelog +### 0.3.0 +Pass connection id to the regression tests suite. + ### 0.2.0 Declare the regression tests suite. diff --git a/airbyte-ci/connectors/live-tests/poetry.lock b/airbyte-ci/connectors/live-tests/poetry.lock index 9200049d8e31..b1844e7dcfa3 100644 --- a/airbyte-ci/connectors/live-tests/poetry.lock +++ b/airbyte-ci/connectors/live-tests/poetry.lock @@ -1,5 +1,115 @@ # This file is automatically @generated by Poetry 1.6.1 and should not be changed by hand. +[[package]] +name = "aiohttp" +version = "3.9.3" +description = "Async http client/server framework (asyncio)" +optional = false +python-versions = ">=3.8" +files = [ + {file = "aiohttp-3.9.3-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:939677b61f9d72a4fa2a042a5eee2a99a24001a67c13da113b2e30396567db54"}, + {file = "aiohttp-3.9.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:1f5cd333fcf7590a18334c90f8c9147c837a6ec8a178e88d90a9b96ea03194cc"}, + {file = "aiohttp-3.9.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:82e6aa28dd46374f72093eda8bcd142f7771ee1eb9d1e223ff0fa7177a96b4a5"}, + {file = "aiohttp-3.9.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f56455b0c2c7cc3b0c584815264461d07b177f903a04481dfc33e08a89f0c26b"}, + {file = "aiohttp-3.9.3-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:bca77a198bb6e69795ef2f09a5f4c12758487f83f33d63acde5f0d4919815768"}, + {file = "aiohttp-3.9.3-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e083c285857b78ee21a96ba1eb1b5339733c3563f72980728ca2b08b53826ca5"}, + {file = "aiohttp-3.9.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ab40e6251c3873d86ea9b30a1ac6d7478c09277b32e14745d0d3c6e76e3c7e29"}, + {file = "aiohttp-3.9.3-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:df822ee7feaaeffb99c1a9e5e608800bd8eda6e5f18f5cfb0dc7eeb2eaa6bbec"}, + {file = "aiohttp-3.9.3-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:acef0899fea7492145d2bbaaaec7b345c87753168589cc7faf0afec9afe9b747"}, + {file = "aiohttp-3.9.3-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:cd73265a9e5ea618014802ab01babf1940cecb90c9762d8b9e7d2cc1e1969ec6"}, + {file = "aiohttp-3.9.3-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:a78ed8a53a1221393d9637c01870248a6f4ea5b214a59a92a36f18151739452c"}, + {file = "aiohttp-3.9.3-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:6b0e029353361f1746bac2e4cc19b32f972ec03f0f943b390c4ab3371840aabf"}, + {file = "aiohttp-3.9.3-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:7cf5c9458e1e90e3c390c2639f1017a0379a99a94fdfad3a1fd966a2874bba52"}, + {file = "aiohttp-3.9.3-cp310-cp310-win32.whl", hash = "sha256:3e59c23c52765951b69ec45ddbbc9403a8761ee6f57253250c6e1536cacc758b"}, + {file = "aiohttp-3.9.3-cp310-cp310-win_amd64.whl", hash = "sha256:055ce4f74b82551678291473f66dc9fb9048a50d8324278751926ff0ae7715e5"}, + {file = "aiohttp-3.9.3-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:6b88f9386ff1ad91ace19d2a1c0225896e28815ee09fc6a8932fded8cda97c3d"}, + {file = "aiohttp-3.9.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:c46956ed82961e31557b6857a5ca153c67e5476972e5f7190015018760938da2"}, + {file = "aiohttp-3.9.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:07b837ef0d2f252f96009e9b8435ec1fef68ef8b1461933253d318748ec1acdc"}, + {file = "aiohttp-3.9.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dad46e6f620574b3b4801c68255492e0159d1712271cc99d8bdf35f2043ec266"}, + {file = "aiohttp-3.9.3-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5ed3e046ea7b14938112ccd53d91c1539af3e6679b222f9469981e3dac7ba1ce"}, + {file = "aiohttp-3.9.3-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:039df344b45ae0b34ac885ab5b53940b174530d4dd8a14ed8b0e2155b9dddccb"}, + {file = "aiohttp-3.9.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7943c414d3a8d9235f5f15c22ace69787c140c80b718dcd57caaade95f7cd93b"}, + {file = "aiohttp-3.9.3-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:84871a243359bb42c12728f04d181a389718710129b36b6aad0fc4655a7647d4"}, + {file = "aiohttp-3.9.3-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:5eafe2c065df5401ba06821b9a054d9cb2848867f3c59801b5d07a0be3a380ae"}, + {file = "aiohttp-3.9.3-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:9d3c9b50f19704552f23b4eaea1fc082fdd82c63429a6506446cbd8737823da3"}, + {file = "aiohttp-3.9.3-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:f033d80bc6283092613882dfe40419c6a6a1527e04fc69350e87a9df02bbc283"}, + {file = "aiohttp-3.9.3-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:2c895a656dd7e061b2fd6bb77d971cc38f2afc277229ce7dd3552de8313a483e"}, + {file = "aiohttp-3.9.3-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:1f5a71d25cd8106eab05f8704cd9167b6e5187bcdf8f090a66c6d88b634802b4"}, + {file = "aiohttp-3.9.3-cp311-cp311-win32.whl", hash = "sha256:50fca156d718f8ced687a373f9e140c1bb765ca16e3d6f4fe116e3df7c05b2c5"}, + {file = "aiohttp-3.9.3-cp311-cp311-win_amd64.whl", hash = "sha256:5fe9ce6c09668063b8447f85d43b8d1c4e5d3d7e92c63173e6180b2ac5d46dd8"}, + {file = "aiohttp-3.9.3-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:38a19bc3b686ad55804ae931012f78f7a534cce165d089a2059f658f6c91fa60"}, + {file = "aiohttp-3.9.3-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:770d015888c2a598b377bd2f663adfd947d78c0124cfe7b959e1ef39f5b13869"}, + {file = "aiohttp-3.9.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:ee43080e75fc92bf36219926c8e6de497f9b247301bbf88c5c7593d931426679"}, + {file = "aiohttp-3.9.3-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:52df73f14ed99cee84865b95a3d9e044f226320a87af208f068ecc33e0c35b96"}, + {file = "aiohttp-3.9.3-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:dc9b311743a78043b26ffaeeb9715dc360335e5517832f5a8e339f8a43581e4d"}, + {file = "aiohttp-3.9.3-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b955ed993491f1a5da7f92e98d5dad3c1e14dc175f74517c4e610b1f2456fb11"}, + {file = "aiohttp-3.9.3-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:504b6981675ace64c28bf4a05a508af5cde526e36492c98916127f5a02354d53"}, + {file = "aiohttp-3.9.3-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a6fe5571784af92b6bc2fda8d1925cccdf24642d49546d3144948a6a1ed58ca5"}, + {file = "aiohttp-3.9.3-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:ba39e9c8627edc56544c8628cc180d88605df3892beeb2b94c9bc857774848ca"}, + {file = "aiohttp-3.9.3-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:e5e46b578c0e9db71d04c4b506a2121c0cb371dd89af17a0586ff6769d4c58c1"}, + {file = "aiohttp-3.9.3-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:938a9653e1e0c592053f815f7028e41a3062e902095e5a7dc84617c87267ebd5"}, + {file = "aiohttp-3.9.3-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:c3452ea726c76e92f3b9fae4b34a151981a9ec0a4847a627c43d71a15ac32aa6"}, + {file = "aiohttp-3.9.3-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:ff30218887e62209942f91ac1be902cc80cddb86bf00fbc6783b7a43b2bea26f"}, + {file = "aiohttp-3.9.3-cp312-cp312-win32.whl", hash = "sha256:38f307b41e0bea3294a9a2a87833191e4bcf89bb0365e83a8be3a58b31fb7f38"}, + {file = "aiohttp-3.9.3-cp312-cp312-win_amd64.whl", hash = "sha256:b791a3143681a520c0a17e26ae7465f1b6f99461a28019d1a2f425236e6eedb5"}, + {file = "aiohttp-3.9.3-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:0ed621426d961df79aa3b963ac7af0d40392956ffa9be022024cd16297b30c8c"}, + {file = "aiohttp-3.9.3-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:7f46acd6a194287b7e41e87957bfe2ad1ad88318d447caf5b090012f2c5bb528"}, + {file = "aiohttp-3.9.3-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:feeb18a801aacb098220e2c3eea59a512362eb408d4afd0c242044c33ad6d542"}, + {file = "aiohttp-3.9.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f734e38fd8666f53da904c52a23ce517f1b07722118d750405af7e4123933511"}, + {file = "aiohttp-3.9.3-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b40670ec7e2156d8e57f70aec34a7216407848dfe6c693ef131ddf6e76feb672"}, + {file = "aiohttp-3.9.3-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:fdd215b7b7fd4a53994f238d0f46b7ba4ac4c0adb12452beee724ddd0743ae5d"}, + {file = "aiohttp-3.9.3-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:017a21b0df49039c8f46ca0971b3a7fdc1f56741ab1240cb90ca408049766168"}, + {file = "aiohttp-3.9.3-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e99abf0bba688259a496f966211c49a514e65afa9b3073a1fcee08856e04425b"}, + {file = "aiohttp-3.9.3-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:648056db9a9fa565d3fa851880f99f45e3f9a771dd3ff3bb0c048ea83fb28194"}, + {file = "aiohttp-3.9.3-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:8aacb477dc26797ee089721536a292a664846489c49d3ef9725f992449eda5a8"}, + {file = "aiohttp-3.9.3-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:522a11c934ea660ff8953eda090dcd2154d367dec1ae3c540aff9f8a5c109ab4"}, + {file = "aiohttp-3.9.3-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:5bce0dc147ca85caa5d33debc4f4d65e8e8b5c97c7f9f660f215fa74fc49a321"}, + {file = "aiohttp-3.9.3-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:4b4af9f25b49a7be47c0972139e59ec0e8285c371049df1a63b6ca81fdd216a2"}, + {file = "aiohttp-3.9.3-cp38-cp38-win32.whl", hash = "sha256:298abd678033b8571995650ccee753d9458dfa0377be4dba91e4491da3f2be63"}, + {file = "aiohttp-3.9.3-cp38-cp38-win_amd64.whl", hash = "sha256:69361bfdca5468c0488d7017b9b1e5ce769d40b46a9f4a2eed26b78619e9396c"}, + {file = "aiohttp-3.9.3-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:0fa43c32d1643f518491d9d3a730f85f5bbaedcbd7fbcae27435bb8b7a061b29"}, + {file = "aiohttp-3.9.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:835a55b7ca49468aaaac0b217092dfdff370e6c215c9224c52f30daaa735c1c1"}, + {file = "aiohttp-3.9.3-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:06a9b2c8837d9a94fae16c6223acc14b4dfdff216ab9b7202e07a9a09541168f"}, + {file = "aiohttp-3.9.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:abf151955990d23f84205286938796c55ff11bbfb4ccfada8c9c83ae6b3c89a3"}, + {file = "aiohttp-3.9.3-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:59c26c95975f26e662ca78fdf543d4eeaef70e533a672b4113dd888bd2423caa"}, + {file = "aiohttp-3.9.3-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f95511dd5d0e05fd9728bac4096319f80615aaef4acbecb35a990afebe953b0e"}, + {file = "aiohttp-3.9.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:595f105710293e76b9dc09f52e0dd896bd064a79346234b521f6b968ffdd8e58"}, + {file = "aiohttp-3.9.3-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c7c8b816c2b5af5c8a436df44ca08258fc1a13b449393a91484225fcb7545533"}, + {file = "aiohttp-3.9.3-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:f1088fa100bf46e7b398ffd9904f4808a0612e1d966b4aa43baa535d1b6341eb"}, + {file = "aiohttp-3.9.3-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:f59dfe57bb1ec82ac0698ebfcdb7bcd0e99c255bd637ff613760d5f33e7c81b3"}, + {file = "aiohttp-3.9.3-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:361a1026c9dd4aba0109e4040e2aecf9884f5cfe1b1b1bd3d09419c205e2e53d"}, + {file = "aiohttp-3.9.3-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:363afe77cfcbe3a36353d8ea133e904b108feea505aa4792dad6585a8192c55a"}, + {file = "aiohttp-3.9.3-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:8e2c45c208c62e955e8256949eb225bd8b66a4c9b6865729a786f2aa79b72e9d"}, + {file = "aiohttp-3.9.3-cp39-cp39-win32.whl", hash = "sha256:f7217af2e14da0856e082e96ff637f14ae45c10a5714b63c77f26d8884cf1051"}, + {file = "aiohttp-3.9.3-cp39-cp39-win_amd64.whl", hash = "sha256:27468897f628c627230dba07ec65dc8d0db566923c48f29e084ce382119802bc"}, + {file = "aiohttp-3.9.3.tar.gz", hash = "sha256:90842933e5d1ff760fae6caca4b2b3edba53ba8f4b71e95dacf2818a2aca06f7"}, +] + +[package.dependencies] +aiosignal = ">=1.1.2" +async-timeout = {version = ">=4.0,<5.0", markers = "python_version < \"3.11\""} +attrs = ">=17.3.0" +frozenlist = ">=1.1.1" +multidict = ">=4.5,<7.0" +yarl = ">=1.0,<2.0" + +[package.extras] +speedups = ["Brotli", "aiodns", "brotlicffi"] + +[[package]] +name = "aiosignal" +version = "1.3.1" +description = "aiosignal: a list of registered asynchronous callbacks" +optional = false +python-versions = ">=3.7" +files = [ + {file = "aiosignal-1.3.1-py3-none-any.whl", hash = "sha256:f8376fb07dd1e86a584e4fcdec80b36b7f81aac666ebc724e2c090300dd83b17"}, + {file = "aiosignal-1.3.1.tar.gz", hash = "sha256:54cd96e15e1649b75d6c87526a6ff0b6c1b0dd3459f43d9ca11d48c339b68cfc"}, +] + +[package.dependencies] +frozenlist = ">=1.1.0" + [[package]] name = "airbyte-protocol-models" version = "0.7.0" @@ -36,6 +146,28 @@ doc = ["Sphinx (>=7)", "packaging", "sphinx-autodoc-typehints (>=1.2.0)", "sphin test = ["anyio[trio]", "coverage[toml] (>=7)", "exceptiongroup (>=1.2.0)", "hypothesis (>=4.0)", "psutil (>=5.9)", "pytest (>=7.0)", "pytest-mock (>=3.6.1)", "trustme", "uvloop (>=0.17)"] trio = ["trio (>=0.23)"] +[[package]] +name = "asn1crypto" +version = "1.5.1" +description = "Fast ASN.1 parser and serializer with definitions for private keys, public keys, certificates, CRL, OCSP, CMS, PKCS#3, PKCS#7, PKCS#8, PKCS#12, PKCS#5, X.509 and TSP" +optional = false +python-versions = "*" +files = [ + {file = "asn1crypto-1.5.1-py2.py3-none-any.whl", hash = "sha256:db4e40728b728508912cbb3d44f19ce188f218e9eba635821bb4b68564f8fd67"}, + {file = "asn1crypto-1.5.1.tar.gz", hash = "sha256:13ae38502be632115abf8a24cbe5f4da52e3b5231990aff31123c805306ccb9c"}, +] + +[[package]] +name = "async-timeout" +version = "4.0.3" +description = "Timeout context manager for asyncio programs" +optional = false +python-versions = ">=3.7" +files = [ + {file = "async-timeout-4.0.3.tar.gz", hash = "sha256:4640d96be84d82d02ed59ea2b7105a0f7b33abe8703703cd0ab0bf87c427522f"}, + {file = "async_timeout-4.0.3-py3-none-any.whl", hash = "sha256:7405140ff1230c310e51dc27b3145b9092d659ce68ff733fb0cefe3ee42be028"}, +] + [[package]] name = "asyncclick" version = "8.1.7.1" @@ -146,6 +278,70 @@ files = [ {file = "certifi-2024.2.2.tar.gz", hash = "sha256:0569859f95fc761b18b45ef421b1290a0f65f147e92a1e5eb3e635f9a5e4e66f"}, ] +[[package]] +name = "cffi" +version = "1.16.0" +description = "Foreign Function Interface for Python calling C code." +optional = false +python-versions = ">=3.8" +files = [ + {file = "cffi-1.16.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:6b3d6606d369fc1da4fd8c357d026317fbb9c9b75d36dc16e90e84c26854b088"}, + {file = "cffi-1.16.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:ac0f5edd2360eea2f1daa9e26a41db02dd4b0451b48f7c318e217ee092a213e9"}, + {file = "cffi-1.16.0-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7e61e3e4fa664a8588aa25c883eab612a188c725755afff6289454d6362b9673"}, + {file = "cffi-1.16.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a72e8961a86d19bdb45851d8f1f08b041ea37d2bd8d4fd19903bc3083d80c896"}, + {file = "cffi-1.16.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5b50bf3f55561dac5438f8e70bfcdfd74543fd60df5fa5f62d94e5867deca684"}, + {file = "cffi-1.16.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7651c50c8c5ef7bdb41108b7b8c5a83013bfaa8a935590c5d74627c047a583c7"}, + {file = "cffi-1.16.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e4108df7fe9b707191e55f33efbcb2d81928e10cea45527879a4749cbe472614"}, + {file = "cffi-1.16.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:32c68ef735dbe5857c810328cb2481e24722a59a2003018885514d4c09af9743"}, + {file = "cffi-1.16.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:673739cb539f8cdaa07d92d02efa93c9ccf87e345b9a0b556e3ecc666718468d"}, + {file = "cffi-1.16.0-cp310-cp310-win32.whl", hash = "sha256:9f90389693731ff1f659e55c7d1640e2ec43ff725cc61b04b2f9c6d8d017df6a"}, + {file = "cffi-1.16.0-cp310-cp310-win_amd64.whl", hash = "sha256:e6024675e67af929088fda399b2094574609396b1decb609c55fa58b028a32a1"}, + {file = "cffi-1.16.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:b84834d0cf97e7d27dd5b7f3aca7b6e9263c56308ab9dc8aae9784abb774d404"}, + {file = "cffi-1.16.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:1b8ebc27c014c59692bb2664c7d13ce7a6e9a629be20e54e7271fa696ff2b417"}, + {file = "cffi-1.16.0-cp311-cp311-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ee07e47c12890ef248766a6e55bd38ebfb2bb8edd4142d56db91b21ea68b7627"}, + {file = "cffi-1.16.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d8a9d3ebe49f084ad71f9269834ceccbf398253c9fac910c4fd7053ff1386936"}, + {file = "cffi-1.16.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e70f54f1796669ef691ca07d046cd81a29cb4deb1e5f942003f401c0c4a2695d"}, + {file = "cffi-1.16.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5bf44d66cdf9e893637896c7faa22298baebcd18d1ddb6d2626a6e39793a1d56"}, + {file = "cffi-1.16.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7b78010e7b97fef4bee1e896df8a4bbb6712b7f05b7ef630f9d1da00f6444d2e"}, + {file = "cffi-1.16.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:c6a164aa47843fb1b01e941d385aab7215563bb8816d80ff3a363a9f8448a8dc"}, + {file = "cffi-1.16.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:e09f3ff613345df5e8c3667da1d918f9149bd623cd9070c983c013792a9a62eb"}, + {file = "cffi-1.16.0-cp311-cp311-win32.whl", hash = "sha256:2c56b361916f390cd758a57f2e16233eb4f64bcbeee88a4881ea90fca14dc6ab"}, + {file = "cffi-1.16.0-cp311-cp311-win_amd64.whl", hash = "sha256:db8e577c19c0fda0beb7e0d4e09e0ba74b1e4c092e0e40bfa12fe05b6f6d75ba"}, + {file = "cffi-1.16.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:fa3a0128b152627161ce47201262d3140edb5a5c3da88d73a1b790a959126956"}, + {file = "cffi-1.16.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:68e7c44931cc171c54ccb702482e9fc723192e88d25a0e133edd7aff8fcd1f6e"}, + {file = "cffi-1.16.0-cp312-cp312-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:abd808f9c129ba2beda4cfc53bde801e5bcf9d6e0f22f095e45327c038bfe68e"}, + {file = "cffi-1.16.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:88e2b3c14bdb32e440be531ade29d3c50a1a59cd4e51b1dd8b0865c54ea5d2e2"}, + {file = "cffi-1.16.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:fcc8eb6d5902bb1cf6dc4f187ee3ea80a1eba0a89aba40a5cb20a5087d961357"}, + {file = "cffi-1.16.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b7be2d771cdba2942e13215c4e340bfd76398e9227ad10402a8767ab1865d2e6"}, + {file = "cffi-1.16.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e715596e683d2ce000574bae5d07bd522c781a822866c20495e52520564f0969"}, + {file = "cffi-1.16.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:2d92b25dbf6cae33f65005baf472d2c245c050b1ce709cc4588cdcdd5495b520"}, + {file = "cffi-1.16.0-cp312-cp312-win32.whl", hash = "sha256:b2ca4e77f9f47c55c194982e10f058db063937845bb2b7a86c84a6cfe0aefa8b"}, + {file = "cffi-1.16.0-cp312-cp312-win_amd64.whl", hash = "sha256:68678abf380b42ce21a5f2abde8efee05c114c2fdb2e9eef2efdb0257fba1235"}, + {file = "cffi-1.16.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:0c9ef6ff37e974b73c25eecc13952c55bceed9112be2d9d938ded8e856138bcc"}, + {file = "cffi-1.16.0-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a09582f178759ee8128d9270cd1344154fd473bb77d94ce0aeb2a93ebf0feaf0"}, + {file = "cffi-1.16.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e760191dd42581e023a68b758769e2da259b5d52e3103c6060ddc02c9edb8d7b"}, + {file = "cffi-1.16.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:80876338e19c951fdfed6198e70bc88f1c9758b94578d5a7c4c91a87af3cf31c"}, + {file = "cffi-1.16.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a6a14b17d7e17fa0d207ac08642c8820f84f25ce17a442fd15e27ea18d67c59b"}, + {file = "cffi-1.16.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6602bc8dc6f3a9e02b6c22c4fc1e47aa50f8f8e6d3f78a5e16ac33ef5fefa324"}, + {file = "cffi-1.16.0-cp38-cp38-win32.whl", hash = "sha256:131fd094d1065b19540c3d72594260f118b231090295d8c34e19a7bbcf2e860a"}, + {file = "cffi-1.16.0-cp38-cp38-win_amd64.whl", hash = "sha256:31d13b0f99e0836b7ff893d37af07366ebc90b678b6664c955b54561fc36ef36"}, + {file = "cffi-1.16.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:582215a0e9adbe0e379761260553ba11c58943e4bbe9c36430c4ca6ac74b15ed"}, + {file = "cffi-1.16.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:b29ebffcf550f9da55bec9e02ad430c992a87e5f512cd63388abb76f1036d8d2"}, + {file = "cffi-1.16.0-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:dc9b18bf40cc75f66f40a7379f6a9513244fe33c0e8aa72e2d56b0196a7ef872"}, + {file = "cffi-1.16.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9cb4a35b3642fc5c005a6755a5d17c6c8b6bcb6981baf81cea8bfbc8903e8ba8"}, + {file = "cffi-1.16.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b86851a328eedc692acf81fb05444bdf1891747c25af7529e39ddafaf68a4f3f"}, + {file = "cffi-1.16.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c0f31130ebc2d37cdd8e44605fb5fa7ad59049298b3f745c74fa74c62fbfcfc4"}, + {file = "cffi-1.16.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8f8e709127c6c77446a8c0a8c8bf3c8ee706a06cd44b1e827c3e6a2ee6b8c098"}, + {file = "cffi-1.16.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:748dcd1e3d3d7cd5443ef03ce8685043294ad6bd7c02a38d1bd367cfd968e000"}, + {file = "cffi-1.16.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:8895613bcc094d4a1b2dbe179d88d7fb4a15cee43c052e8885783fac397d91fe"}, + {file = "cffi-1.16.0-cp39-cp39-win32.whl", hash = "sha256:ed86a35631f7bfbb28e108dd96773b9d5a6ce4811cf6ea468bb6a359b256b1e4"}, + {file = "cffi-1.16.0-cp39-cp39-win_amd64.whl", hash = "sha256:3686dffb02459559c74dd3d81748269ffb0eb027c39a6fc99502de37d501faa8"}, + {file = "cffi-1.16.0.tar.gz", hash = "sha256:bcb3ef43e58665bbda2fb198698fcae6776483e0c4a631aa5647806c25e02cc0"}, +] + +[package.dependencies] +pycparser = "*" + [[package]] name = "charset-normalizer" version = "3.3.2" @@ -245,6 +441,44 @@ files = [ {file = "charset_normalizer-3.3.2-py3-none-any.whl", hash = "sha256:3e4d1f6587322d2788836a99c69062fbb091331ec940e02d12d179c1d53e25fc"}, ] +[[package]] +name = "click" +version = "8.1.7" +description = "Composable command line interface toolkit" +optional = false +python-versions = ">=3.7" +files = [ + {file = "click-8.1.7-py3-none-any.whl", hash = "sha256:ae74fb96c20a0277a1d615f1e4d73c8414f5a98db8b799a7931d1582f3390c28"}, + {file = "click-8.1.7.tar.gz", hash = "sha256:ca9853ad459e787e2192211578cc907e7594e294c7ccc834310722b41b9ca6de"}, +] + +[package.dependencies] +colorama = {version = "*", markers = "platform_system == \"Windows\""} + +[[package]] +name = "cloud-sql-python-connector" +version = "1.7.0" +description = "The Cloud SQL Python Connector is a library that can be used alongside a database driver to allow users with sufficient permissions to connect to a Cloud SQL database without having to manually allowlist IPs or manage SSL certificates." +optional = false +python-versions = ">=3.8" +files = [ + {file = "cloud-sql-python-connector-1.7.0.tar.gz", hash = "sha256:2eca34feba117ba6ab35872e51e86c2dbd3aea7f56edf626e7c64739233ed803"}, + {file = "cloud_sql_python_connector-1.7.0-py2.py3-none-any.whl", hash = "sha256:db74750365c33216dd3a7c62e6bed9a69bece75561a7ed518090a44434b673d6"}, +] + +[package.dependencies] +aiohttp = "*" +cryptography = ">=42.0.0" +google-auth = "*" +pg8000 = {version = ">=1.30.4", optional = true, markers = "extra == \"pg8000\""} +Requests = "*" + +[package.extras] +asyncpg = ["asyncpg (>=0.29.0)"] +pg8000 = ["pg8000 (>=1.30.4)"] +pymysql = ["PyMySQL (>=1.1.0)"] +pytds = ["python-tds (>=1.15.0)"] + [[package]] name = "colorama" version = "0.4.6" @@ -256,6 +490,85 @@ files = [ {file = "colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44"}, ] +[[package]] +name = "connection-retriever" +version = "0.1.0" +description = "A tool to retrieve connection information from our Airbyte Cloud config api database" +optional = false +python-versions = "^3.10" +files = [] +develop = false + +[package.dependencies] +click = "^8.1.7" +cloud-sql-python-connector = {version = "^1.7.0", extras = ["pg8000"]} +google-cloud-iam = "^2.14.3" +google-cloud-logging = "^3.9.0" +google-cloud-secret-manager = "^2.18.3" +python-dotenv = "^1.0.1" +sqlalchemy = "^2.0.28" + +[package.source] +type = "git" +url = "https://github.com/airbytehq/airbyte-platform-internal.git" +reference = "augustin/03-06-create_connection-retriever_tool" +resolved_reference = "32315e491594c2e55829166e7b36d7ce1118be1c" +subdirectory = "tools/connection-retriever" + +[[package]] +name = "cryptography" +version = "42.0.5" +description = "cryptography is a package which provides cryptographic recipes and primitives to Python developers." +optional = false +python-versions = ">=3.7" +files = [ + {file = "cryptography-42.0.5-cp37-abi3-macosx_10_12_universal2.whl", hash = "sha256:a30596bae9403a342c978fb47d9b0ee277699fa53bbafad14706af51fe543d16"}, + {file = "cryptography-42.0.5-cp37-abi3-macosx_10_12_x86_64.whl", hash = "sha256:b7ffe927ee6531c78f81aa17e684e2ff617daeba7f189f911065b2ea2d526dec"}, + {file = "cryptography-42.0.5-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2424ff4c4ac7f6b8177b53c17ed5d8fa74ae5955656867f5a8affaca36a27abb"}, + {file = "cryptography-42.0.5-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:329906dcc7b20ff3cad13c069a78124ed8247adcac44b10bea1130e36caae0b4"}, + {file = "cryptography-42.0.5-cp37-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:b03c2ae5d2f0fc05f9a2c0c997e1bc18c8229f392234e8a0194f202169ccd278"}, + {file = "cryptography-42.0.5-cp37-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:f8837fe1d6ac4a8052a9a8ddab256bc006242696f03368a4009be7ee3075cdb7"}, + {file = "cryptography-42.0.5-cp37-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:0270572b8bd2c833c3981724b8ee9747b3ec96f699a9665470018594301439ee"}, + {file = "cryptography-42.0.5-cp37-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:b8cac287fafc4ad485b8a9b67d0ee80c66bf3574f655d3b97ef2e1082360faf1"}, + {file = "cryptography-42.0.5-cp37-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:16a48c23a62a2f4a285699dba2e4ff2d1cff3115b9df052cdd976a18856d8e3d"}, + {file = "cryptography-42.0.5-cp37-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:2bce03af1ce5a5567ab89bd90d11e7bbdff56b8af3acbbec1faded8f44cb06da"}, + {file = "cryptography-42.0.5-cp37-abi3-win32.whl", hash = "sha256:b6cd2203306b63e41acdf39aa93b86fb566049aeb6dc489b70e34bcd07adca74"}, + {file = "cryptography-42.0.5-cp37-abi3-win_amd64.whl", hash = "sha256:98d8dc6d012b82287f2c3d26ce1d2dd130ec200c8679b6213b3c73c08b2b7940"}, + {file = "cryptography-42.0.5-cp39-abi3-macosx_10_12_universal2.whl", hash = "sha256:5e6275c09d2badf57aea3afa80d975444f4be8d3bc58f7f80d2a484c6f9485c8"}, + {file = "cryptography-42.0.5-cp39-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e4985a790f921508f36f81831817cbc03b102d643b5fcb81cd33df3fa291a1a1"}, + {file = "cryptography-42.0.5-cp39-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7cde5f38e614f55e28d831754e8a3bacf9ace5d1566235e39d91b35502d6936e"}, + {file = "cryptography-42.0.5-cp39-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:7367d7b2eca6513681127ebad53b2582911d1736dc2ffc19f2c3ae49997496bc"}, + {file = "cryptography-42.0.5-cp39-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:cd2030f6650c089aeb304cf093f3244d34745ce0cfcc39f20c6fbfe030102e2a"}, + {file = "cryptography-42.0.5-cp39-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:a2913c5375154b6ef2e91c10b5720ea6e21007412f6437504ffea2109b5a33d7"}, + {file = "cryptography-42.0.5-cp39-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:c41fb5e6a5fe9ebcd58ca3abfeb51dffb5d83d6775405305bfa8715b76521922"}, + {file = "cryptography-42.0.5-cp39-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:3eaafe47ec0d0ffcc9349e1708be2aaea4c6dd4978d76bf6eb0cb2c13636c6fc"}, + {file = "cryptography-42.0.5-cp39-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:1b95b98b0d2af784078fa69f637135e3c317091b615cd0905f8b8a087e86fa30"}, + {file = "cryptography-42.0.5-cp39-abi3-win32.whl", hash = "sha256:1f71c10d1e88467126f0efd484bd44bca5e14c664ec2ede64c32f20875c0d413"}, + {file = "cryptography-42.0.5-cp39-abi3-win_amd64.whl", hash = "sha256:a011a644f6d7d03736214d38832e030d8268bcff4a41f728e6030325fea3e400"}, + {file = "cryptography-42.0.5-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:9481ffe3cf013b71b2428b905c4f7a9a4f76ec03065b05ff499bb5682a8d9ad8"}, + {file = "cryptography-42.0.5-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:ba334e6e4b1d92442b75ddacc615c5476d4ad55cc29b15d590cc6b86efa487e2"}, + {file = "cryptography-42.0.5-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:ba3e4a42397c25b7ff88cdec6e2a16c2be18720f317506ee25210f6d31925f9c"}, + {file = "cryptography-42.0.5-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:111a0d8553afcf8eb02a4fea6ca4f59d48ddb34497aa8706a6cf536f1a5ec576"}, + {file = "cryptography-42.0.5-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:cd65d75953847815962c84a4654a84850b2bb4aed3f26fadcc1c13892e1e29f6"}, + {file = "cryptography-42.0.5-pp39-pypy39_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:e807b3188f9eb0eaa7bbb579b462c5ace579f1cedb28107ce8b48a9f7ad3679e"}, + {file = "cryptography-42.0.5-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:f12764b8fffc7a123f641d7d049d382b73f96a34117e0b637b80643169cec8ac"}, + {file = "cryptography-42.0.5-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:37dd623507659e08be98eec89323469e8c7b4c1407c85112634ae3dbdb926fdd"}, + {file = "cryptography-42.0.5.tar.gz", hash = "sha256:6fe07eec95dfd477eb9530aef5bead34fec819b3aaf6c5bd6d20565da607bfe1"}, +] + +[package.dependencies] +cffi = {version = ">=1.12", markers = "platform_python_implementation != \"PyPy\""} + +[package.extras] +docs = ["sphinx (>=5.3.0)", "sphinx-rtd-theme (>=1.1.1)"] +docstest = ["pyenchant (>=1.6.11)", "readme-renderer", "sphinxcontrib-spelling (>=4.0.1)"] +nox = ["nox"] +pep8test = ["check-sdist", "click", "mypy", "ruff"] +sdist = ["build"] +ssh = ["bcrypt (>=3.1.5)"] +test = ["certifi", "pretend", "pytest (>=6.2.0)", "pytest-benchmark", "pytest-cov", "pytest-xdist"] +test-randomorder = ["pytest-randomly"] + [[package]] name = "dagger-io" version = "0.9.6" @@ -313,6 +626,271 @@ files = [ [package.extras] test = ["pytest (>=6)"] +[[package]] +name = "frozenlist" +version = "1.4.1" +description = "A list-like structure which implements collections.abc.MutableSequence" +optional = false +python-versions = ">=3.8" +files = [ + {file = "frozenlist-1.4.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:f9aa1878d1083b276b0196f2dfbe00c9b7e752475ed3b682025ff20c1c1f51ac"}, + {file = "frozenlist-1.4.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:29acab3f66f0f24674b7dc4736477bcd4bc3ad4b896f5f45379a67bce8b96868"}, + {file = "frozenlist-1.4.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:74fb4bee6880b529a0c6560885fce4dc95936920f9f20f53d99a213f7bf66776"}, + {file = "frozenlist-1.4.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:590344787a90ae57d62511dd7c736ed56b428f04cd8c161fcc5e7232c130c69a"}, + {file = "frozenlist-1.4.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:068b63f23b17df8569b7fdca5517edef76171cf3897eb68beb01341131fbd2ad"}, + {file = "frozenlist-1.4.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5c849d495bf5154cd8da18a9eb15db127d4dba2968d88831aff6f0331ea9bd4c"}, + {file = "frozenlist-1.4.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9750cc7fe1ae3b1611bb8cfc3f9ec11d532244235d75901fb6b8e42ce9229dfe"}, + {file = "frozenlist-1.4.1-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a9b2de4cf0cdd5bd2dee4c4f63a653c61d2408055ab77b151c1957f221cabf2a"}, + {file = "frozenlist-1.4.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:0633c8d5337cb5c77acbccc6357ac49a1770b8c487e5b3505c57b949b4b82e98"}, + {file = "frozenlist-1.4.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:27657df69e8801be6c3638054e202a135c7f299267f1a55ed3a598934f6c0d75"}, + {file = "frozenlist-1.4.1-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:f9a3ea26252bd92f570600098783d1371354d89d5f6b7dfd87359d669f2109b5"}, + {file = "frozenlist-1.4.1-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:4f57dab5fe3407b6c0c1cc907ac98e8a189f9e418f3b6e54d65a718aaafe3950"}, + {file = "frozenlist-1.4.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:e02a0e11cf6597299b9f3bbd3f93d79217cb90cfd1411aec33848b13f5c656cc"}, + {file = "frozenlist-1.4.1-cp310-cp310-win32.whl", hash = "sha256:a828c57f00f729620a442881cc60e57cfcec6842ba38e1b19fd3e47ac0ff8dc1"}, + {file = "frozenlist-1.4.1-cp310-cp310-win_amd64.whl", hash = "sha256:f56e2333dda1fe0f909e7cc59f021eba0d2307bc6f012a1ccf2beca6ba362439"}, + {file = "frozenlist-1.4.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:a0cb6f11204443f27a1628b0e460f37fb30f624be6051d490fa7d7e26d4af3d0"}, + {file = "frozenlist-1.4.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:b46c8ae3a8f1f41a0d2ef350c0b6e65822d80772fe46b653ab6b6274f61d4a49"}, + {file = "frozenlist-1.4.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:fde5bd59ab5357e3853313127f4d3565fc7dad314a74d7b5d43c22c6a5ed2ced"}, + {file = "frozenlist-1.4.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:722e1124aec435320ae01ee3ac7bec11a5d47f25d0ed6328f2273d287bc3abb0"}, + {file = "frozenlist-1.4.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2471c201b70d58a0f0c1f91261542a03d9a5e088ed3dc6c160d614c01649c106"}, + {file = "frozenlist-1.4.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c757a9dd70d72b076d6f68efdbb9bc943665ae954dad2801b874c8c69e185068"}, + {file = "frozenlist-1.4.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f146e0911cb2f1da549fc58fc7bcd2b836a44b79ef871980d605ec392ff6b0d2"}, + {file = "frozenlist-1.4.1-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4f9c515e7914626b2a2e1e311794b4c35720a0be87af52b79ff8e1429fc25f19"}, + {file = "frozenlist-1.4.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:c302220494f5c1ebeb0912ea782bcd5e2f8308037b3c7553fad0e48ebad6ad82"}, + {file = "frozenlist-1.4.1-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:442acde1e068288a4ba7acfe05f5f343e19fac87bfc96d89eb886b0363e977ec"}, + {file = "frozenlist-1.4.1-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:1b280e6507ea8a4fa0c0a7150b4e526a8d113989e28eaaef946cc77ffd7efc0a"}, + {file = "frozenlist-1.4.1-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:fe1a06da377e3a1062ae5fe0926e12b84eceb8a50b350ddca72dc85015873f74"}, + {file = "frozenlist-1.4.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:db9e724bebd621d9beca794f2a4ff1d26eed5965b004a97f1f1685a173b869c2"}, + {file = "frozenlist-1.4.1-cp311-cp311-win32.whl", hash = "sha256:e774d53b1a477a67838a904131c4b0eef6b3d8a651f8b138b04f748fccfefe17"}, + {file = "frozenlist-1.4.1-cp311-cp311-win_amd64.whl", hash = "sha256:fb3c2db03683b5767dedb5769b8a40ebb47d6f7f45b1b3e3b4b51ec8ad9d9825"}, + {file = "frozenlist-1.4.1-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:1979bc0aeb89b33b588c51c54ab0161791149f2461ea7c7c946d95d5f93b56ae"}, + {file = "frozenlist-1.4.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:cc7b01b3754ea68a62bd77ce6020afaffb44a590c2289089289363472d13aedb"}, + {file = "frozenlist-1.4.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:c9c92be9fd329ac801cc420e08452b70e7aeab94ea4233a4804f0915c14eba9b"}, + {file = "frozenlist-1.4.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5c3894db91f5a489fc8fa6a9991820f368f0b3cbdb9cd8849547ccfab3392d86"}, + {file = "frozenlist-1.4.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ba60bb19387e13597fb059f32cd4d59445d7b18b69a745b8f8e5db0346f33480"}, + {file = "frozenlist-1.4.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8aefbba5f69d42246543407ed2461db31006b0f76c4e32dfd6f42215a2c41d09"}, + {file = "frozenlist-1.4.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:780d3a35680ced9ce682fbcf4cb9c2bad3136eeff760ab33707b71db84664e3a"}, + {file = "frozenlist-1.4.1-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9acbb16f06fe7f52f441bb6f413ebae6c37baa6ef9edd49cdd567216da8600cd"}, + {file = "frozenlist-1.4.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:23b701e65c7b36e4bf15546a89279bd4d8675faabc287d06bbcfac7d3c33e1e6"}, + {file = "frozenlist-1.4.1-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:3e0153a805a98f5ada7e09826255ba99fb4f7524bb81bf6b47fb702666484ae1"}, + {file = "frozenlist-1.4.1-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:dd9b1baec094d91bf36ec729445f7769d0d0cf6b64d04d86e45baf89e2b9059b"}, + {file = "frozenlist-1.4.1-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:1a4471094e146b6790f61b98616ab8e44f72661879cc63fa1049d13ef711e71e"}, + {file = "frozenlist-1.4.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:5667ed53d68d91920defdf4035d1cdaa3c3121dc0b113255124bcfada1cfa1b8"}, + {file = "frozenlist-1.4.1-cp312-cp312-win32.whl", hash = "sha256:beee944ae828747fd7cb216a70f120767fc9f4f00bacae8543c14a6831673f89"}, + {file = "frozenlist-1.4.1-cp312-cp312-win_amd64.whl", hash = "sha256:64536573d0a2cb6e625cf309984e2d873979709f2cf22839bf2d61790b448ad5"}, + {file = "frozenlist-1.4.1-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:20b51fa3f588ff2fe658663db52a41a4f7aa6c04f6201449c6c7c476bd255c0d"}, + {file = "frozenlist-1.4.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:410478a0c562d1a5bcc2f7ea448359fcb050ed48b3c6f6f4f18c313a9bdb1826"}, + {file = "frozenlist-1.4.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:c6321c9efe29975232da3bd0af0ad216800a47e93d763ce64f291917a381b8eb"}, + {file = "frozenlist-1.4.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:48f6a4533887e189dae092f1cf981f2e3885175f7a0f33c91fb5b7b682b6bab6"}, + {file = "frozenlist-1.4.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:6eb73fa5426ea69ee0e012fb59cdc76a15b1283d6e32e4f8dc4482ec67d1194d"}, + {file = "frozenlist-1.4.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:fbeb989b5cc29e8daf7f976b421c220f1b8c731cbf22b9130d8815418ea45887"}, + {file = "frozenlist-1.4.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:32453c1de775c889eb4e22f1197fe3bdfe457d16476ea407472b9442e6295f7a"}, + {file = "frozenlist-1.4.1-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:693945278a31f2086d9bf3df0fe8254bbeaef1fe71e1351c3bd730aa7d31c41b"}, + {file = "frozenlist-1.4.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:1d0ce09d36d53bbbe566fe296965b23b961764c0bcf3ce2fa45f463745c04701"}, + {file = "frozenlist-1.4.1-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:3a670dc61eb0d0eb7080890c13de3066790f9049b47b0de04007090807c776b0"}, + {file = "frozenlist-1.4.1-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:dca69045298ce5c11fd539682cff879cc1e664c245d1c64da929813e54241d11"}, + {file = "frozenlist-1.4.1-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:a06339f38e9ed3a64e4c4e43aec7f59084033647f908e4259d279a52d3757d09"}, + {file = "frozenlist-1.4.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:b7f2f9f912dca3934c1baec2e4585a674ef16fe00218d833856408c48d5beee7"}, + {file = "frozenlist-1.4.1-cp38-cp38-win32.whl", hash = "sha256:e7004be74cbb7d9f34553a5ce5fb08be14fb33bc86f332fb71cbe5216362a497"}, + {file = "frozenlist-1.4.1-cp38-cp38-win_amd64.whl", hash = "sha256:5a7d70357e7cee13f470c7883a063aae5fe209a493c57d86eb7f5a6f910fae09"}, + {file = "frozenlist-1.4.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:bfa4a17e17ce9abf47a74ae02f32d014c5e9404b6d9ac7f729e01562bbee601e"}, + {file = "frozenlist-1.4.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:b7e3ed87d4138356775346e6845cccbe66cd9e207f3cd11d2f0b9fd13681359d"}, + {file = "frozenlist-1.4.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:c99169d4ff810155ca50b4da3b075cbde79752443117d89429595c2e8e37fed8"}, + {file = "frozenlist-1.4.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:edb678da49d9f72c9f6c609fbe41a5dfb9a9282f9e6a2253d5a91e0fc382d7c0"}, + {file = "frozenlist-1.4.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:6db4667b187a6742b33afbbaf05a7bc551ffcf1ced0000a571aedbb4aa42fc7b"}, + {file = "frozenlist-1.4.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:55fdc093b5a3cb41d420884cdaf37a1e74c3c37a31f46e66286d9145d2063bd0"}, + {file = "frozenlist-1.4.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:82e8211d69a4f4bc360ea22cd6555f8e61a1bd211d1d5d39d3d228b48c83a897"}, + {file = "frozenlist-1.4.1-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:89aa2c2eeb20957be2d950b85974b30a01a762f3308cd02bb15e1ad632e22dc7"}, + {file = "frozenlist-1.4.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:9d3e0c25a2350080e9319724dede4f31f43a6c9779be48021a7f4ebde8b2d742"}, + {file = "frozenlist-1.4.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:7268252af60904bf52c26173cbadc3a071cece75f873705419c8681f24d3edea"}, + {file = "frozenlist-1.4.1-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:0c250a29735d4f15321007fb02865f0e6b6a41a6b88f1f523ca1596ab5f50bd5"}, + {file = "frozenlist-1.4.1-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:96ec70beabbd3b10e8bfe52616a13561e58fe84c0101dd031dc78f250d5128b9"}, + {file = "frozenlist-1.4.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:23b2d7679b73fe0e5a4560b672a39f98dfc6f60df63823b0a9970525325b95f6"}, + {file = "frozenlist-1.4.1-cp39-cp39-win32.whl", hash = "sha256:a7496bfe1da7fb1a4e1cc23bb67c58fab69311cc7d32b5a99c2007b4b2a0e932"}, + {file = "frozenlist-1.4.1-cp39-cp39-win_amd64.whl", hash = "sha256:e6a20a581f9ce92d389a8c7d7c3dd47c81fd5d6e655c8dddf341e14aa48659d0"}, + {file = "frozenlist-1.4.1-py3-none-any.whl", hash = "sha256:04ced3e6a46b4cfffe20f9ae482818e34eba9b5fb0ce4056e4cc9b6e212d09b7"}, + {file = "frozenlist-1.4.1.tar.gz", hash = "sha256:c037a86e8513059a2613aaba4d817bb90b9d9b6b69aace3ce9c877e8c8ed402b"}, +] + +[[package]] +name = "google-api-core" +version = "2.17.1" +description = "Google API client core library" +optional = false +python-versions = ">=3.7" +files = [ + {file = "google-api-core-2.17.1.tar.gz", hash = "sha256:9df18a1f87ee0df0bc4eea2770ebc4228392d8cc4066655b320e2cfccb15db95"}, + {file = "google_api_core-2.17.1-py3-none-any.whl", hash = "sha256:610c5b90092c360736baccf17bd3efbcb30dd380e7a6dc28a71059edb8bd0d8e"}, +] + +[package.dependencies] +google-auth = ">=2.14.1,<3.0.dev0" +googleapis-common-protos = ">=1.56.2,<2.0.dev0" +grpcio = [ + {version = ">=1.49.1,<2.0dev", optional = true, markers = "python_version >= \"3.11\" and extra == \"grpc\""}, + {version = ">=1.33.2,<2.0dev", optional = true, markers = "python_version < \"3.11\" and extra == \"grpc\""}, +] +grpcio-status = [ + {version = ">=1.49.1,<2.0.dev0", optional = true, markers = "python_version >= \"3.11\" and extra == \"grpc\""}, + {version = ">=1.33.2,<2.0.dev0", optional = true, markers = "python_version < \"3.11\" and extra == \"grpc\""}, +] +protobuf = ">=3.19.5,<3.20.0 || >3.20.0,<3.20.1 || >3.20.1,<4.21.0 || >4.21.0,<4.21.1 || >4.21.1,<4.21.2 || >4.21.2,<4.21.3 || >4.21.3,<4.21.4 || >4.21.4,<4.21.5 || >4.21.5,<5.0.0.dev0" +requests = ">=2.18.0,<3.0.0.dev0" + +[package.extras] +grpc = ["grpcio (>=1.33.2,<2.0dev)", "grpcio (>=1.49.1,<2.0dev)", "grpcio-status (>=1.33.2,<2.0.dev0)", "grpcio-status (>=1.49.1,<2.0.dev0)"] +grpcgcp = ["grpcio-gcp (>=0.2.2,<1.0.dev0)"] +grpcio-gcp = ["grpcio-gcp (>=0.2.2,<1.0.dev0)"] + +[[package]] +name = "google-auth" +version = "2.28.1" +description = "Google Authentication Library" +optional = false +python-versions = ">=3.7" +files = [ + {file = "google-auth-2.28.1.tar.gz", hash = "sha256:34fc3046c257cedcf1622fc4b31fc2be7923d9b4d44973d481125ecc50d83885"}, + {file = "google_auth-2.28.1-py2.py3-none-any.whl", hash = "sha256:25141e2d7a14bfcba945f5e9827f98092716e99482562f15306e5b026e21aa72"}, +] + +[package.dependencies] +cachetools = ">=2.0.0,<6.0" +pyasn1-modules = ">=0.2.1" +rsa = ">=3.1.4,<5" + +[package.extras] +aiohttp = ["aiohttp (>=3.6.2,<4.0.0.dev0)", "requests (>=2.20.0,<3.0.0.dev0)"] +enterprise-cert = ["cryptography (==36.0.2)", "pyopenssl (==22.0.0)"] +pyopenssl = ["cryptography (>=38.0.3)", "pyopenssl (>=20.0.0)"] +reauth = ["pyu2f (>=0.1.5)"] +requests = ["requests (>=2.20.0,<3.0.0.dev0)"] + +[[package]] +name = "google-cloud-appengine-logging" +version = "1.4.3" +description = "Google Cloud Appengine Logging API client library" +optional = false +python-versions = ">=3.7" +files = [ + {file = "google-cloud-appengine-logging-1.4.3.tar.gz", hash = "sha256:fb504e6199fe8de85baa9d31cecf6776877851fe58867de603317ec7cc739987"}, + {file = "google_cloud_appengine_logging-1.4.3-py2.py3-none-any.whl", hash = "sha256:8e30af51d853f219caf29e8b8b342b9ce8214b29f334dafae38d39aaaff7d372"}, +] + +[package.dependencies] +google-api-core = {version = ">=1.34.1,<2.0.dev0 || >=2.11.dev0,<3.0.0dev", extras = ["grpc"]} +google-auth = ">=2.14.1,<2.24.0 || >2.24.0,<2.25.0 || >2.25.0,<3.0.0dev" +proto-plus = ">=1.22.3,<2.0.0dev" +protobuf = ">=3.19.5,<3.20.0 || >3.20.0,<3.20.1 || >3.20.1,<4.21.0 || >4.21.0,<4.21.1 || >4.21.1,<4.21.2 || >4.21.2,<4.21.3 || >4.21.3,<4.21.4 || >4.21.4,<4.21.5 || >4.21.5,<5.0.0dev" + +[[package]] +name = "google-cloud-audit-log" +version = "0.2.5" +description = "Google Cloud Audit Protos" +optional = false +python-versions = ">=3.7" +files = [ + {file = "google-cloud-audit-log-0.2.5.tar.gz", hash = "sha256:86e2faba3383adc8fd04a5bd7fd4f960b3e4aedaa7ed950f2f891ce16902eb6b"}, + {file = "google_cloud_audit_log-0.2.5-py2.py3-none-any.whl", hash = "sha256:18b94d4579002a450b7902cd2e8b8fdcb1ea2dd4df3b41f8f82be6d9f7fcd746"}, +] + +[package.dependencies] +googleapis-common-protos = ">=1.56.2,<2.0dev" +protobuf = ">=3.19.5,<3.20.0 || >3.20.0,<3.20.1 || >3.20.1,<4.21.1 || >4.21.1,<4.21.2 || >4.21.2,<4.21.3 || >4.21.3,<4.21.4 || >4.21.4,<4.21.5 || >4.21.5,<5.0.0dev" + +[[package]] +name = "google-cloud-core" +version = "2.4.1" +description = "Google Cloud API client core library" +optional = false +python-versions = ">=3.7" +files = [ + {file = "google-cloud-core-2.4.1.tar.gz", hash = "sha256:9b7749272a812bde58fff28868d0c5e2f585b82f37e09a1f6ed2d4d10f134073"}, + {file = "google_cloud_core-2.4.1-py2.py3-none-any.whl", hash = "sha256:a9e6a4422b9ac5c29f79a0ede9485473338e2ce78d91f2370c01e730eab22e61"}, +] + +[package.dependencies] +google-api-core = ">=1.31.6,<2.0.dev0 || >2.3.0,<3.0.0dev" +google-auth = ">=1.25.0,<3.0dev" + +[package.extras] +grpc = ["grpcio (>=1.38.0,<2.0dev)", "grpcio-status (>=1.38.0,<2.0.dev0)"] + +[[package]] +name = "google-cloud-iam" +version = "2.14.3" +description = "Google Cloud Iam API client library" +optional = false +python-versions = ">=3.7" +files = [ + {file = "google-cloud-iam-2.14.3.tar.gz", hash = "sha256:c82e993f8a9219c5ba1fce139c34aed6f019dd5f9b45ce956d5430583d2af26e"}, + {file = "google_cloud_iam-2.14.3-py2.py3-none-any.whl", hash = "sha256:61b8555fd14240b050611d7fe9833f276202a306e4003e01fc7fb7d70d23e6c4"}, +] + +[package.dependencies] +google-api-core = {version = ">=1.34.1,<2.0.dev0 || >=2.11.dev0,<3.0.0dev", extras = ["grpc"]} +google-auth = ">=2.14.1,<2.24.0 || >2.24.0,<2.25.0 || >2.25.0,<3.0.0dev" +proto-plus = ">=1.22.3,<2.0.0dev" +protobuf = ">=3.19.5,<3.20.0 || >3.20.0,<3.20.1 || >3.20.1,<4.21.0 || >4.21.0,<4.21.1 || >4.21.1,<4.21.2 || >4.21.2,<4.21.3 || >4.21.3,<4.21.4 || >4.21.4,<4.21.5 || >4.21.5,<5.0.0dev" + +[[package]] +name = "google-cloud-logging" +version = "3.9.0" +description = "Stackdriver Logging API client library" +optional = false +python-versions = ">=3.7" +files = [ + {file = "google-cloud-logging-3.9.0.tar.gz", hash = "sha256:4decb1b0bed4a0e3c0e58a376646e6002d6be7cad039e3466822e8665072ea33"}, + {file = "google_cloud_logging-3.9.0-py2.py3-none-any.whl", hash = "sha256:094a2db068ff7f38c9e0c1017673fa49c0768fbae02769e03e06baa30f138b87"}, +] + +[package.dependencies] +google-api-core = {version = ">=1.33.2,<2.0.dev0 || >=2.8.dev0,<3.0.0dev", extras = ["grpc"]} +google-cloud-appengine-logging = ">=0.1.0,<2.0.0dev" +google-cloud-audit-log = ">=0.1.0,<1.0.0dev" +google-cloud-core = ">=2.0.0,<3.0.0dev" +grpc-google-iam-v1 = ">=0.12.4,<1.0.0dev" +proto-plus = [ + {version = ">=1.22.2,<2.0.0dev", markers = "python_version >= \"3.11\""}, + {version = ">=1.22.0,<2.0.0dev", markers = "python_version < \"3.11\""}, +] +protobuf = ">=3.19.5,<3.20.0 || >3.20.0,<3.20.1 || >3.20.1,<4.21.0 || >4.21.0,<4.21.1 || >4.21.1,<4.21.2 || >4.21.2,<4.21.3 || >4.21.3,<4.21.4 || >4.21.4,<4.21.5 || >4.21.5,<5.0.0dev" + +[[package]] +name = "google-cloud-secret-manager" +version = "2.18.3" +description = "Google Cloud Secret Manager API client library" +optional = false +python-versions = ">=3.7" +files = [ + {file = "google-cloud-secret-manager-2.18.3.tar.gz", hash = "sha256:1db2f409324536e34f985081d389e3974ca3a3668df7845cad0be03ab8c0fa7d"}, + {file = "google_cloud_secret_manager-2.18.3-py2.py3-none-any.whl", hash = "sha256:4d4af82bddd9099ebdbe79e0c6b68f6c6cabea8323a3c1275bcead8f56310fb7"}, +] + +[package.dependencies] +google-api-core = {version = ">=1.34.1,<2.0.dev0 || >=2.11.dev0,<3.0.0dev", extras = ["grpc"]} +google-auth = ">=2.14.1,<2.24.0 || >2.24.0,<2.25.0 || >2.25.0,<3.0.0dev" +grpc-google-iam-v1 = ">=0.12.4,<1.0.0dev" +proto-plus = ">=1.22.3,<2.0.0dev" +protobuf = ">=3.19.5,<3.20.0 || >3.20.0,<3.20.1 || >3.20.1,<4.21.0 || >4.21.0,<4.21.1 || >4.21.1,<4.21.2 || >4.21.2,<4.21.3 || >4.21.3,<4.21.4 || >4.21.4,<4.21.5 || >4.21.5,<5.0.0dev" + +[[package]] +name = "googleapis-common-protos" +version = "1.62.0" +description = "Common protobufs used in Google APIs" +optional = false +python-versions = ">=3.7" +files = [ + {file = "googleapis-common-protos-1.62.0.tar.gz", hash = "sha256:83f0ece9f94e5672cced82f592d2a5edf527a96ed1794f0bab36d5735c996277"}, + {file = "googleapis_common_protos-1.62.0-py2.py3-none-any.whl", hash = "sha256:4750113612205514f9f6aa4cb00d523a94f3e8c06c5ad2fee466387dc4875f07"}, +] + +[package.dependencies] +grpcio = {version = ">=1.44.0,<2.0.0.dev0", optional = true, markers = "extra == \"grpc\""} +protobuf = ">=3.19.5,<3.20.0 || >3.20.0,<3.20.1 || >3.20.1,<4.21.1 || >4.21.1,<4.21.2 || >4.21.2,<4.21.3 || >4.21.3,<4.21.4 || >4.21.4,<4.21.5 || >4.21.5,<5.0.0.dev0" + +[package.extras] +grpc = ["grpcio (>=1.44.0,<2.0.0.dev0)"] + [[package]] name = "gql" version = "3.5.0" @@ -352,6 +930,175 @@ files = [ {file = "graphql_core-3.2.3-py3-none-any.whl", hash = "sha256:5766780452bd5ec8ba133f8bf287dc92713e3868ddd83aee4faab9fc3e303dc3"}, ] +[[package]] +name = "greenlet" +version = "3.0.3" +description = "Lightweight in-process concurrent programming" +optional = false +python-versions = ">=3.7" +files = [ + {file = "greenlet-3.0.3-cp310-cp310-macosx_11_0_universal2.whl", hash = "sha256:9da2bd29ed9e4f15955dd1595ad7bc9320308a3b766ef7f837e23ad4b4aac31a"}, + {file = "greenlet-3.0.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d353cadd6083fdb056bb46ed07e4340b0869c305c8ca54ef9da3421acbdf6881"}, + {file = "greenlet-3.0.3-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:dca1e2f3ca00b84a396bc1bce13dd21f680f035314d2379c4160c98153b2059b"}, + {file = "greenlet-3.0.3-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3ed7fb269f15dc662787f4119ec300ad0702fa1b19d2135a37c2c4de6fadfd4a"}, + {file = "greenlet-3.0.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dd4f49ae60e10adbc94b45c0b5e6a179acc1736cf7a90160b404076ee283cf83"}, + {file = "greenlet-3.0.3-cp310-cp310-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:73a411ef564e0e097dbe7e866bb2dda0f027e072b04da387282b02c308807405"}, + {file = "greenlet-3.0.3-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:7f362975f2d179f9e26928c5b517524e89dd48530a0202570d55ad6ca5d8a56f"}, + {file = "greenlet-3.0.3-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:649dde7de1a5eceb258f9cb00bdf50e978c9db1b996964cd80703614c86495eb"}, + {file = "greenlet-3.0.3-cp310-cp310-win_amd64.whl", hash = "sha256:68834da854554926fbedd38c76e60c4a2e3198c6fbed520b106a8986445caaf9"}, + {file = "greenlet-3.0.3-cp311-cp311-macosx_11_0_universal2.whl", hash = "sha256:b1b5667cced97081bf57b8fa1d6bfca67814b0afd38208d52538316e9422fc61"}, + {file = "greenlet-3.0.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:52f59dd9c96ad2fc0d5724107444f76eb20aaccb675bf825df6435acb7703559"}, + {file = "greenlet-3.0.3-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:afaff6cf5200befd5cec055b07d1c0a5a06c040fe5ad148abcd11ba6ab9b114e"}, + {file = "greenlet-3.0.3-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:fe754d231288e1e64323cfad462fcee8f0288654c10bdf4f603a39ed923bef33"}, + {file = "greenlet-3.0.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2797aa5aedac23af156bbb5a6aa2cd3427ada2972c828244eb7d1b9255846379"}, + {file = "greenlet-3.0.3-cp311-cp311-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:b7f009caad047246ed379e1c4dbcb8b020f0a390667ea74d2387be2998f58a22"}, + {file = "greenlet-3.0.3-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:c5e1536de2aad7bf62e27baf79225d0d64360d4168cf2e6becb91baf1ed074f3"}, + {file = "greenlet-3.0.3-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:894393ce10ceac937e56ec00bb71c4c2f8209ad516e96033e4b3b1de270e200d"}, + {file = "greenlet-3.0.3-cp311-cp311-win_amd64.whl", hash = "sha256:1ea188d4f49089fc6fb283845ab18a2518d279c7cd9da1065d7a84e991748728"}, + {file = "greenlet-3.0.3-cp312-cp312-macosx_11_0_universal2.whl", hash = "sha256:70fb482fdf2c707765ab5f0b6655e9cfcf3780d8d87355a063547b41177599be"}, + {file = "greenlet-3.0.3-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d4d1ac74f5c0c0524e4a24335350edad7e5f03b9532da7ea4d3c54d527784f2e"}, + {file = "greenlet-3.0.3-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:149e94a2dd82d19838fe4b2259f1b6b9957d5ba1b25640d2380bea9c5df37676"}, + {file = "greenlet-3.0.3-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:15d79dd26056573940fcb8c7413d84118086f2ec1a8acdfa854631084393efcc"}, + {file = "greenlet-3.0.3-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:881b7db1ebff4ba09aaaeae6aa491daeb226c8150fc20e836ad00041bcb11230"}, + {file = "greenlet-3.0.3-cp312-cp312-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:fcd2469d6a2cf298f198f0487e0a5b1a47a42ca0fa4dfd1b6862c999f018ebbf"}, + {file = "greenlet-3.0.3-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:1f672519db1796ca0d8753f9e78ec02355e862d0998193038c7073045899f305"}, + {file = "greenlet-3.0.3-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:2516a9957eed41dd8f1ec0c604f1cdc86758b587d964668b5b196a9db5bfcde6"}, + {file = "greenlet-3.0.3-cp312-cp312-win_amd64.whl", hash = "sha256:bba5387a6975598857d86de9eac14210a49d554a77eb8261cc68b7d082f78ce2"}, + {file = "greenlet-3.0.3-cp37-cp37m-macosx_11_0_universal2.whl", hash = "sha256:5b51e85cb5ceda94e79d019ed36b35386e8c37d22f07d6a751cb659b180d5274"}, + {file = "greenlet-3.0.3-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:daf3cb43b7cf2ba96d614252ce1684c1bccee6b2183a01328c98d36fcd7d5cb0"}, + {file = "greenlet-3.0.3-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:99bf650dc5d69546e076f413a87481ee1d2d09aaaaaca058c9251b6d8c14783f"}, + {file = "greenlet-3.0.3-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2dd6e660effd852586b6a8478a1d244b8dc90ab5b1321751d2ea15deb49ed414"}, + {file = "greenlet-3.0.3-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e3391d1e16e2a5a1507d83e4a8b100f4ee626e8eca43cf2cadb543de69827c4c"}, + {file = "greenlet-3.0.3-cp37-cp37m-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:e1f145462f1fa6e4a4ae3c0f782e580ce44d57c8f2c7aae1b6fa88c0b2efdb41"}, + {file = "greenlet-3.0.3-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:1a7191e42732df52cb5f39d3527217e7ab73cae2cb3694d241e18f53d84ea9a7"}, + {file = "greenlet-3.0.3-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:0448abc479fab28b00cb472d278828b3ccca164531daab4e970a0458786055d6"}, + {file = "greenlet-3.0.3-cp37-cp37m-win32.whl", hash = "sha256:b542be2440edc2d48547b5923c408cbe0fc94afb9f18741faa6ae970dbcb9b6d"}, + {file = "greenlet-3.0.3-cp37-cp37m-win_amd64.whl", hash = "sha256:01bc7ea167cf943b4c802068e178bbf70ae2e8c080467070d01bfa02f337ee67"}, + {file = "greenlet-3.0.3-cp38-cp38-macosx_11_0_universal2.whl", hash = "sha256:1996cb9306c8595335bb157d133daf5cf9f693ef413e7673cb07e3e5871379ca"}, + {file = "greenlet-3.0.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3ddc0f794e6ad661e321caa8d2f0a55ce01213c74722587256fb6566049a8b04"}, + {file = "greenlet-3.0.3-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c9db1c18f0eaad2f804728c67d6c610778456e3e1cc4ab4bbd5eeb8e6053c6fc"}, + {file = "greenlet-3.0.3-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7170375bcc99f1a2fbd9c306f5be8764eaf3ac6b5cb968862cad4c7057756506"}, + {file = "greenlet-3.0.3-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6b66c9c1e7ccabad3a7d037b2bcb740122a7b17a53734b7d72a344ce39882a1b"}, + {file = "greenlet-3.0.3-cp38-cp38-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:098d86f528c855ead3479afe84b49242e174ed262456c342d70fc7f972bc13c4"}, + {file = "greenlet-3.0.3-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:81bb9c6d52e8321f09c3d165b2a78c680506d9af285bfccbad9fb7ad5a5da3e5"}, + {file = "greenlet-3.0.3-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:fd096eb7ffef17c456cfa587523c5f92321ae02427ff955bebe9e3c63bc9f0da"}, + {file = "greenlet-3.0.3-cp38-cp38-win32.whl", hash = "sha256:d46677c85c5ba00a9cb6f7a00b2bfa6f812192d2c9f7d9c4f6a55b60216712f3"}, + {file = "greenlet-3.0.3-cp38-cp38-win_amd64.whl", hash = "sha256:419b386f84949bf0e7c73e6032e3457b82a787c1ab4a0e43732898a761cc9dbf"}, + {file = "greenlet-3.0.3-cp39-cp39-macosx_11_0_universal2.whl", hash = "sha256:da70d4d51c8b306bb7a031d5cff6cc25ad253affe89b70352af5f1cb68e74b53"}, + {file = "greenlet-3.0.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:086152f8fbc5955df88382e8a75984e2bb1c892ad2e3c80a2508954e52295257"}, + {file = "greenlet-3.0.3-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d73a9fe764d77f87f8ec26a0c85144d6a951a6c438dfe50487df5595c6373eac"}, + {file = "greenlet-3.0.3-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b7dcbe92cc99f08c8dd11f930de4d99ef756c3591a5377d1d9cd7dd5e896da71"}, + {file = "greenlet-3.0.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1551a8195c0d4a68fac7a4325efac0d541b48def35feb49d803674ac32582f61"}, + {file = "greenlet-3.0.3-cp39-cp39-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:64d7675ad83578e3fc149b617a444fab8efdafc9385471f868eb5ff83e446b8b"}, + {file = "greenlet-3.0.3-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:b37eef18ea55f2ffd8f00ff8fe7c8d3818abd3e25fb73fae2ca3b672e333a7a6"}, + {file = "greenlet-3.0.3-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:77457465d89b8263bca14759d7c1684df840b6811b2499838cc5b040a8b5b113"}, + {file = "greenlet-3.0.3-cp39-cp39-win32.whl", hash = "sha256:57e8974f23e47dac22b83436bdcf23080ade568ce77df33159e019d161ce1d1e"}, + {file = "greenlet-3.0.3-cp39-cp39-win_amd64.whl", hash = "sha256:c5ee858cfe08f34712f548c3c363e807e7186f03ad7a5039ebadb29e8c6be067"}, + {file = "greenlet-3.0.3.tar.gz", hash = "sha256:43374442353259554ce33599da8b692d5aa96f8976d567d4badf263371fbe491"}, +] + +[package.extras] +docs = ["Sphinx", "furo"] +test = ["objgraph", "psutil"] + +[[package]] +name = "grpc-google-iam-v1" +version = "0.13.0" +description = "IAM API client library" +optional = false +python-versions = ">=3.7" +files = [ + {file = "grpc-google-iam-v1-0.13.0.tar.gz", hash = "sha256:fad318608b9e093258fbf12529180f400d1c44453698a33509cc6ecf005b294e"}, + {file = "grpc_google_iam_v1-0.13.0-py2.py3-none-any.whl", hash = "sha256:53902e2af7de8df8c1bd91373d9be55b0743ec267a7428ea638db3775becae89"}, +] + +[package.dependencies] +googleapis-common-protos = {version = ">=1.56.0,<2.0.0dev", extras = ["grpc"]} +grpcio = ">=1.44.0,<2.0.0dev" +protobuf = ">=3.19.5,<3.20.0 || >3.20.0,<3.20.1 || >3.20.1,<4.21.1 || >4.21.1,<4.21.2 || >4.21.2,<4.21.3 || >4.21.3,<4.21.4 || >4.21.4,<4.21.5 || >4.21.5,<5.0.0dev" + +[[package]] +name = "grpcio" +version = "1.62.0" +description = "HTTP/2-based RPC framework" +optional = false +python-versions = ">=3.7" +files = [ + {file = "grpcio-1.62.0-cp310-cp310-linux_armv7l.whl", hash = "sha256:136ffd79791b1eddda8d827b607a6285474ff8a1a5735c4947b58c481e5e4271"}, + {file = "grpcio-1.62.0-cp310-cp310-macosx_12_0_universal2.whl", hash = "sha256:d6a56ba703be6b6267bf19423d888600c3f574ac7c2cc5e6220af90662a4d6b0"}, + {file = "grpcio-1.62.0-cp310-cp310-manylinux_2_17_aarch64.whl", hash = "sha256:4cd356211579043fce9f52acc861e519316fff93980a212c8109cca8f47366b6"}, + {file = "grpcio-1.62.0-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e803e9b58d8f9b4ff0ea991611a8d51b31c68d2e24572cd1fe85e99e8cc1b4f8"}, + {file = "grpcio-1.62.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f4c04fe33039b35b97c02d2901a164bbbb2f21fb9c4e2a45a959f0b044c3512c"}, + {file = "grpcio-1.62.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:95370c71b8c9062f9ea033a0867c4c73d6f0ff35113ebd2618171ec1f1e903e0"}, + {file = "grpcio-1.62.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:c912688acc05e4ff012c8891803659d6a8a8b5106f0f66e0aed3fb7e77898fa6"}, + {file = "grpcio-1.62.0-cp310-cp310-win32.whl", hash = "sha256:821a44bd63d0f04e33cf4ddf33c14cae176346486b0df08b41a6132b976de5fc"}, + {file = "grpcio-1.62.0-cp310-cp310-win_amd64.whl", hash = "sha256:81531632f93fece32b2762247c4c169021177e58e725494f9a746ca62c83acaa"}, + {file = "grpcio-1.62.0-cp311-cp311-linux_armv7l.whl", hash = "sha256:3fa15850a6aba230eed06b236287c50d65a98f05054a0f01ccedf8e1cc89d57f"}, + {file = "grpcio-1.62.0-cp311-cp311-macosx_10_10_universal2.whl", hash = "sha256:36df33080cd7897623feff57831eb83c98b84640b016ce443305977fac7566fb"}, + {file = "grpcio-1.62.0-cp311-cp311-manylinux_2_17_aarch64.whl", hash = "sha256:7a195531828b46ea9c4623c47e1dc45650fc7206f8a71825898dd4c9004b0928"}, + {file = "grpcio-1.62.0-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ab140a3542bbcea37162bdfc12ce0d47a3cda3f2d91b752a124cc9fe6776a9e2"}, + {file = "grpcio-1.62.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7f9d6c3223914abb51ac564dc9c3782d23ca445d2864321b9059d62d47144021"}, + {file = "grpcio-1.62.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:fbe0c20ce9a1cff75cfb828b21f08d0a1ca527b67f2443174af6626798a754a4"}, + {file = "grpcio-1.62.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:38f69de9c28c1e7a8fd24e4af4264726637b72f27c2099eaea6e513e7142b47e"}, + {file = "grpcio-1.62.0-cp311-cp311-win32.whl", hash = "sha256:ce1aafdf8d3f58cb67664f42a617af0e34555fe955450d42c19e4a6ad41c84bd"}, + {file = "grpcio-1.62.0-cp311-cp311-win_amd64.whl", hash = "sha256:eef1d16ac26c5325e7d39f5452ea98d6988c700c427c52cbc7ce3201e6d93334"}, + {file = "grpcio-1.62.0-cp312-cp312-linux_armv7l.whl", hash = "sha256:8aab8f90b2a41208c0a071ec39a6e5dbba16fd827455aaa070fec241624ccef8"}, + {file = "grpcio-1.62.0-cp312-cp312-macosx_10_10_universal2.whl", hash = "sha256:62aa1659d8b6aad7329ede5d5b077e3d71bf488d85795db517118c390358d5f6"}, + {file = "grpcio-1.62.0-cp312-cp312-manylinux_2_17_aarch64.whl", hash = "sha256:0d7ae7fc7dbbf2d78d6323641ded767d9ec6d121aaf931ec4a5c50797b886532"}, + {file = "grpcio-1.62.0-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f359d635ee9428f0294bea062bb60c478a8ddc44b0b6f8e1f42997e5dc12e2ee"}, + {file = "grpcio-1.62.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:77d48e5b1f8f4204889f1acf30bb57c30378e17c8d20df5acbe8029e985f735c"}, + {file = "grpcio-1.62.0-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:662d3df5314ecde3184cf87ddd2c3a66095b3acbb2d57a8cada571747af03873"}, + {file = "grpcio-1.62.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:92cdb616be44c8ac23a57cce0243af0137a10aa82234f23cd46e69e115071388"}, + {file = "grpcio-1.62.0-cp312-cp312-win32.whl", hash = "sha256:0b9179478b09ee22f4a36b40ca87ad43376acdccc816ce7c2193a9061bf35701"}, + {file = "grpcio-1.62.0-cp312-cp312-win_amd64.whl", hash = "sha256:614c3ed234208e76991992342bab725f379cc81c7dd5035ee1de2f7e3f7a9842"}, + {file = "grpcio-1.62.0-cp37-cp37m-linux_armv7l.whl", hash = "sha256:7e1f51e2a460b7394670fdb615e26d31d3260015154ea4f1501a45047abe06c9"}, + {file = "grpcio-1.62.0-cp37-cp37m-macosx_10_10_universal2.whl", hash = "sha256:bcff647e7fe25495e7719f779cc219bbb90b9e79fbd1ce5bda6aae2567f469f2"}, + {file = "grpcio-1.62.0-cp37-cp37m-manylinux_2_17_aarch64.whl", hash = "sha256:56ca7ba0b51ed0de1646f1735154143dcbdf9ec2dbe8cc6645def299bb527ca1"}, + {file = "grpcio-1.62.0-cp37-cp37m-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2e84bfb2a734e4a234b116be208d6f0214e68dcf7804306f97962f93c22a1839"}, + {file = "grpcio-1.62.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2c1488b31a521fbba50ae86423f5306668d6f3a46d124f7819c603979fc538c4"}, + {file = "grpcio-1.62.0-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:98d8f4eb91f1ce0735bf0b67c3b2a4fea68b52b2fd13dc4318583181f9219b4b"}, + {file = "grpcio-1.62.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:b3d3d755cfa331d6090e13aac276d4a3fb828bf935449dc16c3d554bf366136b"}, + {file = "grpcio-1.62.0-cp37-cp37m-win_amd64.whl", hash = "sha256:a33f2bfd8a58a02aab93f94f6c61279be0f48f99fcca20ebaee67576cd57307b"}, + {file = "grpcio-1.62.0-cp38-cp38-linux_armv7l.whl", hash = "sha256:5e709f7c8028ce0443bddc290fb9c967c1e0e9159ef7a030e8c21cac1feabd35"}, + {file = "grpcio-1.62.0-cp38-cp38-macosx_10_10_universal2.whl", hash = "sha256:2f3d9a4d0abb57e5f49ed5039d3ed375826c2635751ab89dcc25932ff683bbb6"}, + {file = "grpcio-1.62.0-cp38-cp38-manylinux_2_17_aarch64.whl", hash = "sha256:62ccb92f594d3d9fcd00064b149a0187c246b11e46ff1b7935191f169227f04c"}, + {file = "grpcio-1.62.0-cp38-cp38-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:921148f57c2e4b076af59a815467d399b7447f6e0ee10ef6d2601eb1e9c7f402"}, + {file = "grpcio-1.62.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f897b16190b46bc4d4aaf0a32a4b819d559a37a756d7c6b571e9562c360eed72"}, + {file = "grpcio-1.62.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:1bc8449084fe395575ed24809752e1dc4592bb70900a03ca42bf236ed5bf008f"}, + {file = "grpcio-1.62.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:81d444e5e182be4c7856cd33a610154fe9ea1726bd071d07e7ba13fafd202e38"}, + {file = "grpcio-1.62.0-cp38-cp38-win32.whl", hash = "sha256:88f41f33da3840b4a9bbec68079096d4caf629e2c6ed3a72112159d570d98ebe"}, + {file = "grpcio-1.62.0-cp38-cp38-win_amd64.whl", hash = "sha256:fc2836cb829895ee190813446dce63df67e6ed7b9bf76060262c55fcd097d270"}, + {file = "grpcio-1.62.0-cp39-cp39-linux_armv7l.whl", hash = "sha256:fcc98cff4084467839d0a20d16abc2a76005f3d1b38062464d088c07f500d170"}, + {file = "grpcio-1.62.0-cp39-cp39-macosx_10_10_universal2.whl", hash = "sha256:0d3dee701e48ee76b7d6fbbba18ba8bc142e5b231ef7d3d97065204702224e0e"}, + {file = "grpcio-1.62.0-cp39-cp39-manylinux_2_17_aarch64.whl", hash = "sha256:b7a6be562dd18e5d5bec146ae9537f20ae1253beb971c0164f1e8a2f5a27e829"}, + {file = "grpcio-1.62.0-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:29cb592c4ce64a023712875368bcae13938c7f03e99f080407e20ffe0a9aa33b"}, + {file = "grpcio-1.62.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1eda79574aec8ec4d00768dcb07daba60ed08ef32583b62b90bbf274b3c279f7"}, + {file = "grpcio-1.62.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:7eea57444a354ee217fda23f4b479a4cdfea35fb918ca0d8a0e73c271e52c09c"}, + {file = "grpcio-1.62.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:0e97f37a3b7c89f9125b92d22e9c8323f4e76e7993ba7049b9f4ccbe8bae958a"}, + {file = "grpcio-1.62.0-cp39-cp39-win32.whl", hash = "sha256:39cd45bd82a2e510e591ca2ddbe22352e8413378852ae814549c162cf3992a93"}, + {file = "grpcio-1.62.0-cp39-cp39-win_amd64.whl", hash = "sha256:b71c65427bf0ec6a8b48c68c17356cb9fbfc96b1130d20a07cb462f4e4dcdcd5"}, + {file = "grpcio-1.62.0.tar.gz", hash = "sha256:748496af9238ac78dcd98cce65421f1adce28c3979393e3609683fcd7f3880d7"}, +] + +[package.extras] +protobuf = ["grpcio-tools (>=1.62.0)"] + +[[package]] +name = "grpcio-status" +version = "1.62.0" +description = "Status proto mapping for gRPC" +optional = false +python-versions = ">=3.6" +files = [ + {file = "grpcio-status-1.62.0.tar.gz", hash = "sha256:0d693e9c09880daeaac060d0c3dba1ae470a43c99e5d20dfeafd62cf7e08a85d"}, + {file = "grpcio_status-1.62.0-py3-none-any.whl", hash = "sha256:3baac03fcd737310e67758c4082a188107f771d32855bce203331cd4c9aa687a"}, +] + +[package.dependencies] +googleapis-common-protos = ">=1.5.5" +grpcio = ">=1.62.0" +protobuf = ">=4.21.6" + [[package]] name = "h11" version = "0.14.0" @@ -622,6 +1369,51 @@ files = [ {file = "mypy_extensions-1.0.0.tar.gz", hash = "sha256:75dbf8955dc00442a438fc4d0666508a9a97b6bd41aa2f0ffe9d2f2725af0782"}, ] +[[package]] +name = "numpy" +version = "1.26.4" +description = "Fundamental package for array computing in Python" +optional = false +python-versions = ">=3.9" +files = [ + {file = "numpy-1.26.4-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:9ff0f4f29c51e2803569d7a51c2304de5554655a60c5d776e35b4a41413830d0"}, + {file = "numpy-1.26.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:2e4ee3380d6de9c9ec04745830fd9e2eccb3e6cf790d39d7b98ffd19b0dd754a"}, + {file = "numpy-1.26.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d209d8969599b27ad20994c8e41936ee0964e6da07478d6c35016bc386b66ad4"}, + {file = "numpy-1.26.4-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ffa75af20b44f8dba823498024771d5ac50620e6915abac414251bd971b4529f"}, + {file = "numpy-1.26.4-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:62b8e4b1e28009ef2846b4c7852046736bab361f7aeadeb6a5b89ebec3c7055a"}, + {file = "numpy-1.26.4-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:a4abb4f9001ad2858e7ac189089c42178fcce737e4169dc61321660f1a96c7d2"}, + {file = "numpy-1.26.4-cp310-cp310-win32.whl", hash = "sha256:bfe25acf8b437eb2a8b2d49d443800a5f18508cd811fea3181723922a8a82b07"}, + {file = "numpy-1.26.4-cp310-cp310-win_amd64.whl", hash = "sha256:b97fe8060236edf3662adfc2c633f56a08ae30560c56310562cb4f95500022d5"}, + {file = "numpy-1.26.4-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:4c66707fabe114439db9068ee468c26bbdf909cac0fb58686a42a24de1760c71"}, + {file = "numpy-1.26.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:edd8b5fe47dab091176d21bb6de568acdd906d1887a4584a15a9a96a1dca06ef"}, + {file = "numpy-1.26.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7ab55401287bfec946ced39700c053796e7cc0e3acbef09993a9ad2adba6ca6e"}, + {file = "numpy-1.26.4-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:666dbfb6ec68962c033a450943ded891bed2d54e6755e35e5835d63f4f6931d5"}, + {file = "numpy-1.26.4-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:96ff0b2ad353d8f990b63294c8986f1ec3cb19d749234014f4e7eb0112ceba5a"}, + {file = "numpy-1.26.4-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:60dedbb91afcbfdc9bc0b1f3f402804070deed7392c23eb7a7f07fa857868e8a"}, + {file = "numpy-1.26.4-cp311-cp311-win32.whl", hash = "sha256:1af303d6b2210eb850fcf03064d364652b7120803a0b872f5211f5234b399f20"}, + {file = "numpy-1.26.4-cp311-cp311-win_amd64.whl", hash = "sha256:cd25bcecc4974d09257ffcd1f098ee778f7834c3ad767fe5db785be9a4aa9cb2"}, + {file = "numpy-1.26.4-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:b3ce300f3644fb06443ee2222c2201dd3a89ea6040541412b8fa189341847218"}, + {file = "numpy-1.26.4-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:03a8c78d01d9781b28a6989f6fa1bb2c4f2d51201cf99d3dd875df6fbd96b23b"}, + {file = "numpy-1.26.4-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9fad7dcb1aac3c7f0584a5a8133e3a43eeb2fe127f47e3632d43d677c66c102b"}, + {file = "numpy-1.26.4-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:675d61ffbfa78604709862923189bad94014bef562cc35cf61d3a07bba02a7ed"}, + {file = "numpy-1.26.4-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:ab47dbe5cc8210f55aa58e4805fe224dac469cde56b9f731a4c098b91917159a"}, + {file = "numpy-1.26.4-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:1dda2e7b4ec9dd512f84935c5f126c8bd8b9f2fc001e9f54af255e8c5f16b0e0"}, + {file = "numpy-1.26.4-cp312-cp312-win32.whl", hash = "sha256:50193e430acfc1346175fcbdaa28ffec49947a06918b7b92130744e81e640110"}, + {file = "numpy-1.26.4-cp312-cp312-win_amd64.whl", hash = "sha256:08beddf13648eb95f8d867350f6a018a4be2e5ad54c8d8caed89ebca558b2818"}, + {file = "numpy-1.26.4-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:7349ab0fa0c429c82442a27a9673fc802ffdb7c7775fad780226cb234965e53c"}, + {file = "numpy-1.26.4-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:52b8b60467cd7dd1e9ed082188b4e6bb35aa5cdd01777621a1658910745b90be"}, + {file = "numpy-1.26.4-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d5241e0a80d808d70546c697135da2c613f30e28251ff8307eb72ba696945764"}, + {file = "numpy-1.26.4-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f870204a840a60da0b12273ef34f7051e98c3b5961b61b0c2c1be6dfd64fbcd3"}, + {file = "numpy-1.26.4-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:679b0076f67ecc0138fd2ede3a8fd196dddc2ad3254069bcb9faf9a79b1cebcd"}, + {file = "numpy-1.26.4-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:47711010ad8555514b434df65f7d7b076bb8261df1ca9bb78f53d3b2db02e95c"}, + {file = "numpy-1.26.4-cp39-cp39-win32.whl", hash = "sha256:a354325ee03388678242a4d7ebcd08b5c727033fcff3b2f536aea978e15ee9e6"}, + {file = "numpy-1.26.4-cp39-cp39-win_amd64.whl", hash = "sha256:3373d5d70a5fe74a2c1bb6d2cfd9609ecf686d47a2d7b1d37a8f3b6bf6003aea"}, + {file = "numpy-1.26.4-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:afedb719a9dcfc7eaf2287b839d8198e06dcd4cb5d276a3df279231138e83d30"}, + {file = "numpy-1.26.4-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:95a7476c59002f2f6c590b9b7b998306fba6a5aa646b1e22ddfeaf8f78c3a29c"}, + {file = "numpy-1.26.4-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:7e50d0a0cc3189f9cb0aeb3a6a6af18c16f59f004b866cd2be1c14b36134a4a0"}, + {file = "numpy-1.26.4.tar.gz", hash = "sha256:2a02aba9ed12e4ac4eb3ea9421c420301a0c6460d9830d74a9df87efa4912010"}, +] + [[package]] name = "packaging" version = "23.2" @@ -633,6 +1425,36 @@ files = [ {file = "packaging-23.2.tar.gz", hash = "sha256:048fb0e9405036518eaaf48a55953c750c11e1a1b68e0dd1a9d62ed0c092cfc5"}, ] +[[package]] +name = "pandas-stubs" +version = "2.2.0.240218" +description = "Type annotations for pandas" +optional = false +python-versions = ">=3.9" +files = [ + {file = "pandas_stubs-2.2.0.240218-py3-none-any.whl", hash = "sha256:e97478320add9b958391b15a56c5f1bf29da656d5b747d28bbe708454b3a1fe6"}, + {file = "pandas_stubs-2.2.0.240218.tar.gz", hash = "sha256:63138c12eec715d66d48611bdd922f31cd7c78bcadd19384c3bd61fd3720a11a"}, +] + +[package.dependencies] +numpy = {version = ">=1.26.0", markers = "python_version < \"3.13\""} +types-pytz = ">=2022.1.1" + +[[package]] +name = "pg8000" +version = "1.30.5" +description = "PostgreSQL interface library" +optional = false +python-versions = ">=3.8" +files = [ + {file = "pg8000-1.30.5-py3-none-any.whl", hash = "sha256:1abf18da652b0ad8e9cbfe57ed841c350b5330c33d8151303555db1fe5ce57f8"}, + {file = "pg8000-1.30.5.tar.gz", hash = "sha256:072f7ad00cd723695cb2e9fc02c1dfb84c781455e97b8de6f4c4281eea08078c"}, +] + +[package.dependencies] +python-dateutil = ">=2.8.2" +scramp = ">=1.4.4" + [[package]] name = "platformdirs" version = "4.2.0" @@ -663,6 +1485,79 @@ files = [ dev = ["pre-commit", "tox"] testing = ["pytest", "pytest-benchmark"] +[[package]] +name = "proto-plus" +version = "1.23.0" +description = "Beautiful, Pythonic protocol buffers." +optional = false +python-versions = ">=3.6" +files = [ + {file = "proto-plus-1.23.0.tar.gz", hash = "sha256:89075171ef11988b3fa157f5dbd8b9cf09d65fffee97e29ce403cd8defba19d2"}, + {file = "proto_plus-1.23.0-py3-none-any.whl", hash = "sha256:a829c79e619e1cf632de091013a4173deed13a55f326ef84f05af6f50ff4c82c"}, +] + +[package.dependencies] +protobuf = ">=3.19.0,<5.0.0dev" + +[package.extras] +testing = ["google-api-core[grpc] (>=1.31.5)"] + +[[package]] +name = "protobuf" +version = "4.25.3" +description = "" +optional = false +python-versions = ">=3.8" +files = [ + {file = "protobuf-4.25.3-cp310-abi3-win32.whl", hash = "sha256:d4198877797a83cbfe9bffa3803602bbe1625dc30d8a097365dbc762e5790faa"}, + {file = "protobuf-4.25.3-cp310-abi3-win_amd64.whl", hash = "sha256:209ba4cc916bab46f64e56b85b090607a676f66b473e6b762e6f1d9d591eb2e8"}, + {file = "protobuf-4.25.3-cp37-abi3-macosx_10_9_universal2.whl", hash = "sha256:f1279ab38ecbfae7e456a108c5c0681e4956d5b1090027c1de0f934dfdb4b35c"}, + {file = "protobuf-4.25.3-cp37-abi3-manylinux2014_aarch64.whl", hash = "sha256:e7cb0ae90dd83727f0c0718634ed56837bfeeee29a5f82a7514c03ee1364c019"}, + {file = "protobuf-4.25.3-cp37-abi3-manylinux2014_x86_64.whl", hash = "sha256:7c8daa26095f82482307bc717364e7c13f4f1c99659be82890dcfc215194554d"}, + {file = "protobuf-4.25.3-cp38-cp38-win32.whl", hash = "sha256:f4f118245c4a087776e0a8408be33cf09f6c547442c00395fbfb116fac2f8ac2"}, + {file = "protobuf-4.25.3-cp38-cp38-win_amd64.whl", hash = "sha256:c053062984e61144385022e53678fbded7aea14ebb3e0305ae3592fb219ccfa4"}, + {file = "protobuf-4.25.3-cp39-cp39-win32.whl", hash = "sha256:19b270aeaa0099f16d3ca02628546b8baefe2955bbe23224aaf856134eccf1e4"}, + {file = "protobuf-4.25.3-cp39-cp39-win_amd64.whl", hash = "sha256:e3c97a1555fd6388f857770ff8b9703083de6bf1f9274a002a332d65fbb56c8c"}, + {file = "protobuf-4.25.3-py3-none-any.whl", hash = "sha256:f0700d54bcf45424477e46a9f0944155b46fb0639d69728739c0e47bab83f2b9"}, + {file = "protobuf-4.25.3.tar.gz", hash = "sha256:25b5d0b42fd000320bd7830b349e3b696435f3b329810427a6bcce6a5492cc5c"}, +] + +[[package]] +name = "pyasn1" +version = "0.5.1" +description = "Pure-Python implementation of ASN.1 types and DER/BER/CER codecs (X.208)" +optional = false +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,>=2.7" +files = [ + {file = "pyasn1-0.5.1-py2.py3-none-any.whl", hash = "sha256:4439847c58d40b1d0a573d07e3856e95333f1976294494c325775aeca506eb58"}, + {file = "pyasn1-0.5.1.tar.gz", hash = "sha256:6d391a96e59b23130a5cfa74d6fd7f388dbbe26cc8f1edf39fdddf08d9d6676c"}, +] + +[[package]] +name = "pyasn1-modules" +version = "0.3.0" +description = "A collection of ASN.1-based protocols modules" +optional = false +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,>=2.7" +files = [ + {file = "pyasn1_modules-0.3.0-py2.py3-none-any.whl", hash = "sha256:d3ccd6ed470d9ffbc716be08bd90efbd44d0734bc9303818f7336070984a162d"}, + {file = "pyasn1_modules-0.3.0.tar.gz", hash = "sha256:5bd01446b736eb9d31512a30d46c1ac3395d676c6f3cafa4c03eb54b9925631c"}, +] + +[package.dependencies] +pyasn1 = ">=0.4.6,<0.6.0" + +[[package]] +name = "pycparser" +version = "2.21" +description = "C parser in Python" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +files = [ + {file = "pycparser-2.21-py2.py3-none-any.whl", hash = "sha256:8ee45429555515e1f6b185e78100aea234072576aa43ab53aefcae078162fca9"}, + {file = "pycparser-2.21.tar.gz", hash = "sha256:e644fdec12f7872f86c58ff790da456218b10f863970249516d60a5eaca77206"}, +] + [[package]] name = "pydantic" version = "1.10.14" @@ -787,6 +1682,34 @@ pytest = ">=7.0.0,<9" docs = ["sphinx (>=5.3)", "sphinx-rtd-theme (>=1.0)"] testing = ["coverage (>=6.2)", "hypothesis (>=5.7.1)"] +[[package]] +name = "python-dateutil" +version = "2.9.0.post0" +description = "Extensions to the standard Python datetime module" +optional = false +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7" +files = [ + {file = "python-dateutil-2.9.0.post0.tar.gz", hash = "sha256:37dd54208da7e1cd875388217d5e00ebd4179249f90fb72437e91a35459a0ad3"}, + {file = "python_dateutil-2.9.0.post0-py2.py3-none-any.whl", hash = "sha256:a8b2bc7bffae282281c8140a97d3aa9c14da0b136dfe83f850eea9a5f7470427"}, +] + +[package.dependencies] +six = ">=1.5" + +[[package]] +name = "python-dotenv" +version = "1.0.1" +description = "Read key-value pairs from a .env file and set them as environment variables" +optional = false +python-versions = ">=3.8" +files = [ + {file = "python-dotenv-1.0.1.tar.gz", hash = "sha256:e324ee90a023d808f1959c46bcbc04446a10ced277783dc6ee09987c37ec10ca"}, + {file = "python_dotenv-1.0.1-py3-none-any.whl", hash = "sha256:f7b63ef50f1b690dddf550d03497b66d609393b40b564ed0d674909a68ebf16a"}, +] + +[package.extras] +cli = ["click (>=5.0)"] + [[package]] name = "pywin32" version = "306" @@ -849,6 +1772,20 @@ pygments = ">=2.13.0,<3.0.0" [package.extras] jupyter = ["ipywidgets (>=7.5.1,<9)"] +[[package]] +name = "rsa" +version = "4.9" +description = "Pure-Python RSA implementation" +optional = false +python-versions = ">=3.6,<4" +files = [ + {file = "rsa-4.9-py3-none-any.whl", hash = "sha256:90260d9058e514786967344d0ef75fa8727eed8a7d2e43ce9f4bcf1b536174f7"}, + {file = "rsa-4.9.tar.gz", hash = "sha256:e38464a49c6c85d7f1351b0126661487a7e0a14a50f1675ec50eb34d4f20ef21"}, +] + +[package.dependencies] +pyasn1 = ">=0.1.3" + [[package]] name = "ruff" version = "0.3.1" @@ -875,6 +1812,31 @@ files = [ {file = "ruff-0.3.1.tar.gz", hash = "sha256:d30db97141fc2134299e6e983a6727922c9e03c031ae4883a6d69461de722ae7"}, ] +[[package]] +name = "scramp" +version = "1.4.4" +description = "An implementation of the SCRAM protocol." +optional = false +python-versions = ">=3.7" +files = [ + {file = "scramp-1.4.4-py3-none-any.whl", hash = "sha256:b142312df7c2977241d951318b7ee923d6b7a4f75ba0f05b621ece1ed616faa3"}, + {file = "scramp-1.4.4.tar.gz", hash = "sha256:b7022a140040f33cf863ab2657917ed05287a807b917950489b89b9f685d59bc"}, +] + +[package.dependencies] +asn1crypto = ">=1.5.1" + +[[package]] +name = "six" +version = "1.16.0" +description = "Python 2 and 3 compatibility utilities" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*" +files = [ + {file = "six-1.16.0-py2.py3-none-any.whl", hash = "sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254"}, + {file = "six-1.16.0.tar.gz", hash = "sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926"}, +] + [[package]] name = "sniffio" version = "1.3.1" @@ -886,6 +1848,93 @@ files = [ {file = "sniffio-1.3.1.tar.gz", hash = "sha256:f4324edc670a0f49750a81b895f35c3adb843cca46f0530f79fc1babb23789dc"}, ] +[[package]] +name = "sqlalchemy" +version = "2.0.28" +description = "Database Abstraction Library" +optional = false +python-versions = ">=3.7" +files = [ + {file = "SQLAlchemy-2.0.28-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:e0b148ab0438f72ad21cb004ce3bdaafd28465c4276af66df3b9ecd2037bf252"}, + {file = "SQLAlchemy-2.0.28-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:bbda76961eb8f27e6ad3c84d1dc56d5bc61ba8f02bd20fcf3450bd421c2fcc9c"}, + {file = "SQLAlchemy-2.0.28-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:feea693c452d85ea0015ebe3bb9cd15b6f49acc1a31c28b3c50f4db0f8fb1e71"}, + {file = "SQLAlchemy-2.0.28-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5da98815f82dce0cb31fd1e873a0cb30934971d15b74e0d78cf21f9e1b05953f"}, + {file = "SQLAlchemy-2.0.28-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:4a5adf383c73f2d49ad15ff363a8748319ff84c371eed59ffd0127355d6ea1da"}, + {file = "SQLAlchemy-2.0.28-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:56856b871146bfead25fbcaed098269d90b744eea5cb32a952df00d542cdd368"}, + {file = "SQLAlchemy-2.0.28-cp310-cp310-win32.whl", hash = "sha256:943aa74a11f5806ab68278284a4ddd282d3fb348a0e96db9b42cb81bf731acdc"}, + {file = "SQLAlchemy-2.0.28-cp310-cp310-win_amd64.whl", hash = "sha256:c6c4da4843e0dabde41b8f2e8147438330924114f541949e6318358a56d1875a"}, + {file = "SQLAlchemy-2.0.28-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:46a3d4e7a472bfff2d28db838669fc437964e8af8df8ee1e4548e92710929adc"}, + {file = "SQLAlchemy-2.0.28-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:0d3dd67b5d69794cfe82862c002512683b3db038b99002171f624712fa71aeaa"}, + {file = "SQLAlchemy-2.0.28-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c61e2e41656a673b777e2f0cbbe545323dbe0d32312f590b1bc09da1de6c2a02"}, + {file = "SQLAlchemy-2.0.28-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0315d9125a38026227f559488fe7f7cee1bd2fbc19f9fd637739dc50bb6380b2"}, + {file = "SQLAlchemy-2.0.28-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:af8ce2d31679006e7b747d30a89cd3ac1ec304c3d4c20973f0f4ad58e2d1c4c9"}, + {file = "SQLAlchemy-2.0.28-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:81ba314a08c7ab701e621b7ad079c0c933c58cdef88593c59b90b996e8b58fa5"}, + {file = "SQLAlchemy-2.0.28-cp311-cp311-win32.whl", hash = "sha256:1ee8bd6d68578e517943f5ebff3afbd93fc65f7ef8f23becab9fa8fb315afb1d"}, + {file = "SQLAlchemy-2.0.28-cp311-cp311-win_amd64.whl", hash = "sha256:ad7acbe95bac70e4e687a4dc9ae3f7a2f467aa6597049eeb6d4a662ecd990bb6"}, + {file = "SQLAlchemy-2.0.28-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:d3499008ddec83127ab286c6f6ec82a34f39c9817f020f75eca96155f9765097"}, + {file = "SQLAlchemy-2.0.28-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:9b66fcd38659cab5d29e8de5409cdf91e9986817703e1078b2fdaad731ea66f5"}, + {file = "SQLAlchemy-2.0.28-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bea30da1e76cb1acc5b72e204a920a3a7678d9d52f688f087dc08e54e2754c67"}, + {file = "SQLAlchemy-2.0.28-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:124202b4e0edea7f08a4db8c81cc7859012f90a0d14ba2bf07c099aff6e96462"}, + {file = "SQLAlchemy-2.0.28-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:e23b88c69497a6322b5796c0781400692eca1ae5532821b39ce81a48c395aae9"}, + {file = "SQLAlchemy-2.0.28-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:4b6303bfd78fb3221847723104d152e5972c22367ff66edf09120fcde5ddc2e2"}, + {file = "SQLAlchemy-2.0.28-cp312-cp312-win32.whl", hash = "sha256:a921002be69ac3ab2cf0c3017c4e6a3377f800f1fca7f254c13b5f1a2f10022c"}, + {file = "SQLAlchemy-2.0.28-cp312-cp312-win_amd64.whl", hash = "sha256:b4a2cf92995635b64876dc141af0ef089c6eea7e05898d8d8865e71a326c0385"}, + {file = "SQLAlchemy-2.0.28-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:8e91b5e341f8c7f1e5020db8e5602f3ed045a29f8e27f7f565e0bdee3338f2c7"}, + {file = "SQLAlchemy-2.0.28-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:45c7b78dfc7278329f27be02c44abc0d69fe235495bb8e16ec7ef1b1a17952db"}, + {file = "SQLAlchemy-2.0.28-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3eba73ef2c30695cb7eabcdb33bb3d0b878595737479e152468f3ba97a9c22a4"}, + {file = "SQLAlchemy-2.0.28-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:5df5d1dafb8eee89384fb7a1f79128118bc0ba50ce0db27a40750f6f91aa99d5"}, + {file = "SQLAlchemy-2.0.28-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:2858bbab1681ee5406650202950dc8f00e83b06a198741b7c656e63818633526"}, + {file = "SQLAlchemy-2.0.28-cp37-cp37m-win32.whl", hash = "sha256:9461802f2e965de5cff80c5a13bc945abea7edaa1d29360b485c3d2b56cdb075"}, + {file = "SQLAlchemy-2.0.28-cp37-cp37m-win_amd64.whl", hash = "sha256:a6bec1c010a6d65b3ed88c863d56b9ea5eeefdf62b5e39cafd08c65f5ce5198b"}, + {file = "SQLAlchemy-2.0.28-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:843a882cadebecc655a68bd9a5b8aa39b3c52f4a9a5572a3036fb1bb2ccdc197"}, + {file = "SQLAlchemy-2.0.28-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:dbb990612c36163c6072723523d2be7c3eb1517bbdd63fe50449f56afafd1133"}, + {file = "SQLAlchemy-2.0.28-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bd7e4baf9161d076b9a7e432fce06217b9bd90cfb8f1d543d6e8c4595627edb9"}, + {file = "SQLAlchemy-2.0.28-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e0a5354cb4de9b64bccb6ea33162cb83e03dbefa0d892db88a672f5aad638a75"}, + {file = "SQLAlchemy-2.0.28-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:fffcc8edc508801ed2e6a4e7b0d150a62196fd28b4e16ab9f65192e8186102b6"}, + {file = "SQLAlchemy-2.0.28-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:aca7b6d99a4541b2ebab4494f6c8c2f947e0df4ac859ced575238e1d6ca5716b"}, + {file = "SQLAlchemy-2.0.28-cp38-cp38-win32.whl", hash = "sha256:8c7f10720fc34d14abad5b647bc8202202f4948498927d9f1b4df0fb1cf391b7"}, + {file = "SQLAlchemy-2.0.28-cp38-cp38-win_amd64.whl", hash = "sha256:243feb6882b06a2af68ecf4bec8813d99452a1b62ba2be917ce6283852cf701b"}, + {file = "SQLAlchemy-2.0.28-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:fc4974d3684f28b61b9a90fcb4c41fb340fd4b6a50c04365704a4da5a9603b05"}, + {file = "SQLAlchemy-2.0.28-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:87724e7ed2a936fdda2c05dbd99d395c91ea3c96f029a033a4a20e008dd876bf"}, + {file = "SQLAlchemy-2.0.28-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:68722e6a550f5de2e3cfe9da6afb9a7dd15ef7032afa5651b0f0c6b3adb8815d"}, + {file = "SQLAlchemy-2.0.28-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:328529f7c7f90adcd65aed06a161851f83f475c2f664a898af574893f55d9e53"}, + {file = "SQLAlchemy-2.0.28-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:df40c16a7e8be7413b885c9bf900d402918cc848be08a59b022478804ea076b8"}, + {file = "SQLAlchemy-2.0.28-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:426f2fa71331a64f5132369ede5171c52fd1df1bd9727ce621f38b5b24f48750"}, + {file = "SQLAlchemy-2.0.28-cp39-cp39-win32.whl", hash = "sha256:33157920b233bc542ce497a81a2e1452e685a11834c5763933b440fedd1d8e2d"}, + {file = "SQLAlchemy-2.0.28-cp39-cp39-win_amd64.whl", hash = "sha256:2f60843068e432311c886c5f03c4664acaef507cf716f6c60d5fde7265be9d7b"}, + {file = "SQLAlchemy-2.0.28-py3-none-any.whl", hash = "sha256:78bb7e8da0183a8301352d569900d9d3594c48ac21dc1c2ec6b3121ed8b6c986"}, + {file = "SQLAlchemy-2.0.28.tar.gz", hash = "sha256:dd53b6c4e6d960600fd6532b79ee28e2da489322fcf6648738134587faf767b6"}, +] + +[package.dependencies] +greenlet = {version = "!=0.4.17", markers = "platform_machine == \"aarch64\" or platform_machine == \"ppc64le\" or platform_machine == \"x86_64\" or platform_machine == \"amd64\" or platform_machine == \"AMD64\" or platform_machine == \"win32\" or platform_machine == \"WIN32\""} +typing-extensions = ">=4.6.0" + +[package.extras] +aiomysql = ["aiomysql (>=0.2.0)", "greenlet (!=0.4.17)"] +aioodbc = ["aioodbc", "greenlet (!=0.4.17)"] +aiosqlite = ["aiosqlite", "greenlet (!=0.4.17)", "typing_extensions (!=3.10.0.1)"] +asyncio = ["greenlet (!=0.4.17)"] +asyncmy = ["asyncmy (>=0.2.3,!=0.2.4,!=0.2.6)", "greenlet (!=0.4.17)"] +mariadb-connector = ["mariadb (>=1.0.1,!=1.1.2,!=1.1.5)"] +mssql = ["pyodbc"] +mssql-pymssql = ["pymssql"] +mssql-pyodbc = ["pyodbc"] +mypy = ["mypy (>=0.910)"] +mysql = ["mysqlclient (>=1.4.0)"] +mysql-connector = ["mysql-connector-python"] +oracle = ["cx_oracle (>=8)"] +oracle-oracledb = ["oracledb (>=1.0.1)"] +postgresql = ["psycopg2 (>=2.7)"] +postgresql-asyncpg = ["asyncpg", "greenlet (!=0.4.17)"] +postgresql-pg8000 = ["pg8000 (>=1.29.1)"] +postgresql-psycopg = ["psycopg (>=3.0.7)"] +postgresql-psycopg2binary = ["psycopg2-binary"] +postgresql-psycopg2cffi = ["psycopg2cffi"] +postgresql-psycopgbinary = ["psycopg[binary] (>=3.0.7)"] +pymysql = ["pymysql"] +sqlcipher = ["sqlcipher3_binary"] + [[package]] name = "tomli" version = "2.0.1" @@ -908,6 +1957,17 @@ files = [ {file = "types_cachetools-5.3.0.7-py3-none-any.whl", hash = "sha256:98c069dc7fc087b1b061703369c80751b0a0fc561f6fb072b554e5eee23773a0"}, ] +[[package]] +name = "types-pytz" +version = "2024.1.0.20240203" +description = "Typing stubs for pytz" +optional = false +python-versions = ">=3.8" +files = [ + {file = "types-pytz-2024.1.0.20240203.tar.gz", hash = "sha256:c93751ee20dfc6e054a0148f8f5227b9a00b79c90a4d3c9f464711a73179c89e"}, + {file = "types_pytz-2024.1.0.20240203-py3-none-any.whl", hash = "sha256:9679eef0365db3af91ef7722c199dbb75ee5c1b67e3c4dd7bfbeb1b8a71c21a3"}, +] + [[package]] name = "typing-extensions" version = "4.10.0" @@ -1058,4 +2118,4 @@ multidict = ">=4.0" [metadata] lock-version = "2.0" python-versions = "^3.10" -content-hash = "98437771d3bb81792186c952105888fb44215dc87f2a0004db79db8f58dd2814" +content-hash = "fab479eaed289416f2650819e2e8a46bc970c8a1c53c256f448b1d124179272a" diff --git a/airbyte-ci/connectors/live-tests/pyproject.toml b/airbyte-ci/connectors/live-tests/pyproject.toml index d3e0f2276a85..34cc358cc5d3 100644 --- a/airbyte-ci/connectors/live-tests/pyproject.toml +++ b/airbyte-ci/connectors/live-tests/pyproject.toml @@ -4,7 +4,7 @@ build-backend = "poetry.core.masonry.api" [tool.poetry] name = "live-tests" -version = "0.2.0" +version = "0.3.0" description = "Contains utilities for testing connectors against live data." authors = ["Airbyte "] license = "MIT" @@ -24,6 +24,7 @@ pytest-asyncio = "~=0.23.5" pydash = "~=7.0.7" docker = ">=6,<7" asyncclick = "^8.1.7.1" +connection-retriever = {git = "https://github.com/airbytehq/airbyte-platform-internal.git", rev = "augustin/03-06-create_connection-retriever_tool", subdirectory = "tools/connection-retriever"} [tool.poetry.scripts] live-tests = "live_tests.cli:live_tests" @@ -32,13 +33,14 @@ live-tests = "live_tests.cli:live_tests" ruff = "^0.3.0" mypy = "^1.8.0" types-cachetools = "^5.3.0.7" +pandas-stubs = "^2.2.0.240218" [tool.poe.tasks] test = "pytest tests" lint = "ruff check src" -format = "ruff format src" type_check = "mypy src --disallow-untyped-defs" -pre-push = ["format", "lint", "test", "type_check"] +pre-push = ["lint", "test", "type_check"] -[tool.airbyte_ci] -poe_tasks = ["test", "lint", "type_check"] +# Can't run CI at the moment because connection-retriever is not public +# [tool.airbyte_ci] +#poe_tasks = ["test", "lint", "type_check"] diff --git a/airbyte-ci/connectors/live-tests/src/live_tests/__init__.py b/airbyte-ci/connectors/live-tests/src/live_tests/__init__.py index f70ecfc3a89e..51502a263eae 100644 --- a/airbyte-ci/connectors/live-tests/src/live_tests/__init__.py +++ b/airbyte-ci/connectors/live-tests/src/live_tests/__init__.py @@ -1 +1,2 @@ # Copyright (c) 2023 Airbyte, Inc., all rights reserved. + diff --git a/airbyte-ci/connectors/live-tests/src/live_tests/commons/backends/base_backend.py b/airbyte-ci/connectors/live-tests/src/live_tests/commons/backends/base_backend.py index f6005120c216..f009b8272275 100644 --- a/airbyte-ci/connectors/live-tests/src/live_tests/commons/backends/base_backend.py +++ b/airbyte-ci/connectors/live-tests/src/live_tests/commons/backends/base_backend.py @@ -12,5 +12,5 @@ class BaseBackend(ABC): """ @abstractmethod - async def write(self, airbyte_messages: Iterable[AirbyteMessage]) -> None: + def write(self, airbyte_messages: Iterable[AirbyteMessage]) -> None: ... diff --git a/airbyte-ci/connectors/live-tests/src/live_tests/commons/backends/file_backend.py b/airbyte-ci/connectors/live-tests/src/live_tests/commons/backends/file_backend.py index a7bea3cb184d..948073332705 100644 --- a/airbyte-ci/connectors/live-tests/src/live_tests/commons/backends/file_backend.py +++ b/airbyte-ci/connectors/live-tests/src/live_tests/commons/backends/file_backend.py @@ -33,7 +33,7 @@ class FileBackend(BaseBackend): def __init__(self, output_directory: Path): self._output_directory = output_directory - async def write(self, airbyte_messages: Iterable[AirbyteMessage]) -> None: + def write(self, airbyte_messages: Iterable[AirbyteMessage]) -> None: """ Write AirbyteMessages to the appropriate file. diff --git a/airbyte-ci/connectors/live-tests/src/live_tests/commons/connection_objects_retrieval.py b/airbyte-ci/connectors/live-tests/src/live_tests/commons/connection_objects_retrieval.py new file mode 100644 index 000000000000..9803b6a00c9d --- /dev/null +++ b/airbyte-ci/connectors/live-tests/src/live_tests/commons/connection_objects_retrieval.py @@ -0,0 +1,125 @@ +# Copyright (c) 2024 Airbyte, Inc., all rights reserved. + +import json +import logging +from pathlib import Path +from typing import Dict, Optional, Set + +from connection_retriever import ConnectionObject, retrieve_objects # type: ignore + +from .models import Command, ConfiguredAirbyteCatalog, ConnectionObjects, SecretDict + +LOGGER = logging.getLogger(__name__) + + +def parse_config(config: Dict | str | None) -> Optional[SecretDict]: + if not config: + return None + if isinstance(config, str): + return SecretDict(json.loads(config)) + else: + return SecretDict(config) + + +def parse_catalog(catalog: Dict | str | None) -> Optional[ConfiguredAirbyteCatalog]: + if not catalog: + return None + if isinstance(catalog, str): + return ConfiguredAirbyteCatalog.parse_obj(json.loads(catalog)) + else: + return ConfiguredAirbyteCatalog.parse_obj(catalog) + + +def parse_state(state: Dict | str | None) -> Optional[Dict]: + if not state: + return None + if isinstance(state, str): + return json.loads(state) + else: + return state + + +CONNECTION_OBJECT_TYPE_TO_PARSER = { + ConnectionObject.SOURCE_CONFIG: parse_config, + ConnectionObject.CONFIGURED_CATALOG: parse_catalog, + ConnectionObject.STATE: parse_state, +} + + +def get_connector_config_from_path(config_path: Path) -> Optional[SecretDict]: + return parse_config(config_path.read_text()) + + +def get_state_from_path(state_path: Path) -> Optional[Dict]: + return parse_state(state_path.read_text()) + + +def get_catalog_from_path(path: Path) -> Optional[ConfiguredAirbyteCatalog]: + return parse_catalog(path.read_text()) + + +COMMAND_TO_REQUIRED_OBJECT_TYPES = { + Command.SPEC: set(), + Command.CHECK: {ConnectionObject.SOURCE_CONFIG}, + Command.DISCOVER: {ConnectionObject.SOURCE_CONFIG}, + Command.READ: {ConnectionObject.SOURCE_CONFIG, ConnectionObject.CONFIGURED_CATALOG}, + Command.READ_WITH_STATE: { + ConnectionObject.SOURCE_CONFIG, + ConnectionObject.CONFIGURED_CATALOG, + ConnectionObject.STATE, + }, +} + + +def get_connection_objects( + requested_objects: Set[ConnectionObject], + connection_id: Optional[str], + custom_config_path: Optional[Path], + custom_catalog_path: Optional[Path], + custom_state_path: Optional[Path], + retrieval_reason: Optional[str], +) -> ConnectionObjects: + """This function retrieves the connection objects values. + It checks that the required objects are available and raises a UsageError if they are not. + If a connection_id is provided, it retrieves the connection objects from the connection. + If custom objects are provided, it overrides the retrieved objects with them. + + Args: + requested_objects (Set[ConnectionObject]): The set of requested connection objects. + connection_id (Optional[str]): The connection id to retrieve the connection objects for. + custom_config_path (Optional[Path]): The local path to the custom config to use. + custom_catalog_path (Optional[Path]): The local path to the custom catalog to use. + custom_state_path (Optional[Path]): The local path to the custom state to use. + retrieval_reason (Optional[str]): The reason to access the connection objects. + Raises: + click.UsageError: If a required object is missing for the command. + click.UsageError: If a retrieval reason is missing when passing a connection id. + Returns: + ConnectionObjects: The connection objects values. + """ + + custom_config = get_connector_config_from_path(custom_config_path) if custom_config_path else None + custom_catalog = get_catalog_from_path(custom_catalog_path) if custom_catalog_path else None + custom_state = get_state_from_path(custom_state_path) if custom_state_path else None + + if not connection_id: + connection_object = ConnectionObjects(source_config=custom_config, catalog=custom_catalog, state=custom_state) + else: + if not retrieval_reason: + raise ValueError("A retrieval reason is required to access the connection objects when passing a connection id.") + retrieved_objects = retrieve_objects(connection_id, requested_objects, retrieval_reason=retrieval_reason) + retrieved_config = parse_config(retrieved_objects.get(ConnectionObject.SOURCE_CONFIG)) + retrieved_catalog = parse_catalog(retrieved_objects.get(ConnectionObject.CONFIGURED_CATALOG)) + retrieved_state = parse_state(retrieved_objects.get(ConnectionObject.STATE)) + connection_object = ConnectionObjects( + source_config=custom_config if custom_config else retrieved_config, + catalog=custom_catalog if custom_catalog else retrieved_catalog, + state=custom_state if custom_state else retrieved_state, + ) + if not connection_object.source_config and ConnectionObject.SOURCE_CONFIG in requested_objects: + raise ValueError("A source config is required to run the command.") + if not connection_object.catalog and ConnectionObject.CONFIGURED_CATALOG in requested_objects: + raise ValueError("A catalog is required to run the command.") + if not connection_object.state and ConnectionObject.STATE in requested_objects: + raise ValueError("A state is required to run the command.") + return connection_object diff --git a/airbyte-ci/connectors/live-tests/src/live_tests/commons/connector_runner.py b/airbyte-ci/connectors/live-tests/src/live_tests/commons/connector_runner.py index ff1bacaf23dd..11134353ce90 100644 --- a/airbyte-ci/connectors/live-tests/src/live_tests/commons/connector_runner.py +++ b/airbyte-ci/connectors/live-tests/src/live_tests/commons/connector_runner.py @@ -205,6 +205,7 @@ async def run( container = container.with_new_file(self.IN_CONTAINER_CATALOG_PATH, contents=self.catalog.json()) if self.enable_http_cache: container = await self._bind_connector_container_to_proxy(container) + executed_container = await container.with_exec(self.full_command).sync() return ExecutionResult( diff --git a/airbyte-ci/connectors/live-tests/src/live_tests/commons/models.py b/airbyte-ci/connectors/live-tests/src/live_tests/commons/models.py index 5425fca704f4..34c8d21a81e2 100644 --- a/airbyte-ci/connectors/live-tests/src/live_tests/commons/models.py +++ b/airbyte-ci/connectors/live-tests/src/live_tests/commons/models.py @@ -1,4 +1,5 @@ # Copyright (c) 2023 Airbyte, Inc., all rights reserved. +from __future__ import annotations import time from dataclasses import dataclass, field @@ -50,35 +51,35 @@ def __contains__(self, key: Any) -> bool: def __repr__(self) -> str: return repr(self.data) - def __or__(self, other: "UserDict" | dict) -> "UserDict": + def __or__(self, other: UserDict | dict) -> UserDict: if isinstance(other, UserDict): return self.__class__(self.data | other.data) # type: ignore if isinstance(other, dict): return self.__class__(self.data | other) # type: ignore return NotImplemented - def __ror__(self, other: "UserDict" | dict) -> "UserDict": + def __ror__(self, other: UserDict | dict) -> UserDict: if isinstance(other, UserDict): return self.__class__(other.data | self.data) # type: ignore if isinstance(other, dict): return self.__class__(other | self.data) # type: ignore return NotImplemented - def __ior__(self, other: "UserDict" | dict) -> "UserDict": + def __ior__(self, other: UserDict | dict) -> UserDict: if isinstance(other, UserDict): self.data |= other.data # type: ignore else: self.data |= other # type: ignore return self - def __copy__(self) -> "UserDict": + def __copy__(self) -> UserDict: inst = self.__class__.__new__(self.__class__) inst.__dict__.update(self.__dict__) # Create a copy and avoid triggering descriptors inst.__dict__["data"] = self.__dict__["data"].copy() return inst - def copy(self) -> "UserDict": + def copy(self) -> UserDict: if self.__class__ is UserDict: return UserDict(self.data.copy()) # type: ignore import copy @@ -93,7 +94,7 @@ def copy(self) -> "UserDict": return c @classmethod - def fromkeys(cls, iterable: Iterable, value: Optional[Any] = None) -> "UserDict": + def fromkeys(cls, iterable: Iterable, value: Optional[Any] = None) -> UserDict: d = cls() for key in iterable: d[key] = value @@ -233,5 +234,12 @@ async def save_to_disk(self, output_dir: Path) -> None: # Make backends use customizable airbyte_messages_dir = final_dir / "airbyte_messages" airbyte_messages_dir.mkdir(parents=True, exist_ok=True) - await FileBackend(airbyte_messages_dir).write(self.execution_result.airbyte_messages) + FileBackend(airbyte_messages_dir).write(self.execution_result.airbyte_messages) self.saved_path = final_dir + + +@dataclass(kw_only=True) +class ConnectionObjects: + source_config: Optional[SecretDict] + catalog: Optional[ConfiguredAirbyteCatalog] + state: Optional[Dict] diff --git a/airbyte-ci/connectors/live-tests/src/live_tests/commons/utils.py b/airbyte-ci/connectors/live-tests/src/live_tests/commons/utils.py index 4b62defb821a..4437ea0f2e18 100644 --- a/airbyte-ci/connectors/live-tests/src/live_tests/commons/utils.py +++ b/airbyte-ci/connectors/live-tests/src/live_tests/commons/utils.py @@ -1,32 +1,12 @@ # Copyright (c) 2023 Airbyte, Inc., all rights reserved. -import json -from pathlib import Path -from typing import Dict, List, Optional +from typing import List import dagger -from live_tests.commons.connector_runner import SecretDict, get_connector_container +from live_tests.commons.connector_runner import get_connector_container from live_tests.commons.models import ConnectorUnderTest -def get_connector_config(path: Optional[str | Path]) -> Optional[SecretDict]: - if path is None: - return None - return SecretDict(_read_json(path)) - - -def get_state(path: Optional[str | Path]) -> Optional[Dict]: - if path is None: - return None - return _read_json(path) - - -def _read_json(path: Path | str) -> Dict: - with open(str(path), "r") as file: - contents = file.read() - return json.loads(contents) - - async def get_connector_under_test(dagger_client: dagger.Client, connector_image_name: str) -> ConnectorUnderTest: dagger_container = await get_connector_container(dagger_client, connector_image_name) return ConnectorUnderTest(connector_image_name, dagger_container) diff --git a/airbyte-ci/connectors/live-tests/src/live_tests/debug/cli.py b/airbyte-ci/connectors/live-tests/src/live_tests/debug/cli.py index f42633a87e95..d7ddb545d69f 100644 --- a/airbyte-ci/connectors/live-tests/src/live_tests/debug/cli.py +++ b/airbyte-ci/connectors/live-tests/src/live_tests/debug/cli.py @@ -6,10 +6,10 @@ import asyncclick as click import dagger -from airbyte_protocol.models import ConfiguredAirbyteCatalog # type: ignore +from live_tests.commons.connection_objects_retrieval import COMMAND_TO_REQUIRED_OBJECT_TYPES, get_connection_objects from live_tests.commons.connector_runner import ConnectorRunner from live_tests.commons.models import Command, ExecutionInputs, ExecutionReport -from live_tests.commons.utils import get_connector_config, get_connector_under_test, get_state +from live_tests.commons.utils import get_connector_under_test from live_tests.debug import DAGGER_CONFIG @@ -22,6 +22,25 @@ type=click.Choice([c.value for c in Command]), callback=lambda _, __, value: Command(value), ) +@click.option("--connection-id", type=str, required=False, default=None) +@click.option( + "--config-path", + type=click.Path(file_okay=True, readable=True, dir_okay=False, resolve_path=True, path_type=Path), + required=False, + default=None, +) +@click.option( + "--catalog-path", + type=click.Path(file_okay=True, readable=True, dir_okay=False, resolve_path=True, path_type=Path), + required=False, + default=None, +) +@click.option( + "--state-path", + type=click.Path(file_okay=True, readable=True, dir_okay=False, resolve_path=True, path_type=Path), + required=False, + default=None, +) @click.option( "-c", "--connector-image", @@ -38,21 +57,6 @@ default=Path("live_tests_debug_reports"), type=click.Path(file_okay=False, dir_okay=True, resolve_path=True, path_type=Path), ) -@click.option( - "--config-path", - help="Path to the connector config.", - type=click.Path(exists=True, file_okay=True, dir_okay=False, resolve_path=True, path_type=Path), -) -@click.option( - "--catalog-path", - help="Path to the connector catalog.", - type=click.Path(exists=True, file_okay=True, dir_okay=False, resolve_path=True, path_type=Path), -) -@click.option( - "--state-path", - help="Path to the connector state.", - type=click.Path(exists=True, file_okay=True, dir_okay=False, resolve_path=True, path_type=Path), -) @click.option( "-hc", "--http-cache", @@ -67,24 +71,41 @@ async def debug_cmd( ctx: click.Context, command: Command, + connection_id: Optional[str], + config_path: Optional[Path], + catalog_path: Optional[Path], + state_path: Optional[Path], connector_images: List[str], output_directory: Path, - config_path: Optional[str], - catalog_path: Optional[str], - state_path: Optional[str], enable_http_cache: bool, ) -> None: output_directory.mkdir(parents=True, exist_ok=True) debug_session_start_time = int(time.time()) + if connection_id: + retrieval_reason = click.prompt("👮‍♂️ Please provide a reason for accessing the connection objects. This will be logged") + else: + retrieval_reason = None + + try: + connection_objects = get_connection_objects( + COMMAND_TO_REQUIRED_OBJECT_TYPES[command], + connection_id, + config_path, + catalog_path, + state_path, + retrieval_reason, + ) + except ValueError as e: + raise click.UsageError(str(e)) async with dagger.Connection(config=DAGGER_CONFIG) as dagger_client: for connector_image in connector_images: try: execution_inputs = ExecutionInputs( connector_under_test=await get_connector_under_test(dagger_client, connector_image), command=command, - config=get_connector_config(config_path), - catalog=ConfiguredAirbyteCatalog.parse_file(catalog_path) if catalog_path else None, - state=get_state(state_path) if state_path else None, + config=connection_objects.source_config, + catalog=connection_objects.catalog, + state=connection_objects.state, environment_variables=None, enable_http_cache=enable_http_cache, ) diff --git a/airbyte-ci/connectors/live-tests/src/live_tests/regression_tests/conftest.py b/airbyte-ci/connectors/live-tests/src/live_tests/regression_tests/conftest.py index ba60ce4a21b4..d0686ddb6aa0 100644 --- a/airbyte-ci/connectors/live-tests/src/live_tests/regression_tests/conftest.py +++ b/airbyte-ci/connectors/live-tests/src/live_tests/regression_tests/conftest.py @@ -10,9 +10,18 @@ import dagger import pytest from airbyte_protocol.models import ConfiguredAirbyteCatalog # type: ignore +from live_tests.commons.connection_objects_retrieval import ConnectionObject, get_connection_objects from live_tests.commons.connector_runner import ConnectorRunner -from live_tests.commons.models import Command, ConnectorUnderTest, ExecutionInputs, ExecutionReport, ExecutionResult, SecretDict -from live_tests.commons.utils import get_connector_config, get_connector_under_test, get_state +from live_tests.commons.models import ( + Command, + ConnectionObjects, + ConnectorUnderTest, + ExecutionInputs, + ExecutionReport, + ExecutionResult, + SecretDict, +) +from live_tests.commons.utils import get_connector_under_test if TYPE_CHECKING: from _pytest.config import Config @@ -20,7 +29,10 @@ from _pytest.fixtures import SubRequest from pytest_sugar import SugarTerminalReporter # type: ignore +## CONSTS LOGGER = logging.getLogger("regression_tests") +MAIN_OUTPUT_DIRECTORY = Path("/tmp/regression_tests_artifacts") + # It's used by Dagger and its very verbose logging.getLogger("httpx").setLevel(logging.ERROR) @@ -32,11 +44,6 @@ ## PYTEST HOOKS def pytest_addoption(parser: Parser) -> None: - parser.addoption( - "--output-directory", - default="./regression_tests_artifacts", - help="Path to a directory where the test execution reports will be stored", - ) parser.addoption( "--connector-image", help="The connector image name on which the regressions tests will run: e.g. airbyte/source-faker", @@ -51,15 +58,20 @@ def pytest_addoption(parser: Parser) -> None: default="dev", help="The target version used for regression testing. Defaults to latest", ) + parser.addoption( + "--deny-confirmation", + default=False, + help="Always deny confirmation prompts. Useful for test development. Defaults to False", + ) parser.addoption("--config-path") parser.addoption("--catalog-path") parser.addoption("--state-path") + parser.addoption("--connection-id") def pytest_configure(config: Config) -> None: start_timestamp = int(time.time()) - main_output_directory = Path(config.option.output_directory) - test_artifacts_directory = main_output_directory / f"session_{start_timestamp}" + test_artifacts_directory = MAIN_OUTPUT_DIRECTORY / f"session_{start_timestamp}" test_artifacts_directory.mkdir(parents=True, exist_ok=True) dagger_log_path = test_artifacts_directory / "dagger.log" config.stash[SESSION_START_TIMESTAMP] = start_timestamp @@ -121,6 +133,14 @@ def get_option_or_fail(request: SubRequest, option: str) -> str: pytest.fail(f"Missing required option: {option}") +def ask_for_confirmation(message: str, always_deny: bool = False) -> None: + if always_deny: + pytest.skip("Skipped by user.") + if not os.environ.get("CI"): + if not input(f"{message}. Do you want to continue? [y/N]: ").lower().strip() == "y": + pytest.skip("Skipped by user.") + + ## FIXTURES @@ -139,6 +159,11 @@ def test_artifacts_directory(request: SubRequest) -> Path: return request.config.stash[TEST_ARTIFACT_DIRECTORY] +@pytest.fixture(scope="session") +def deny_confirmation(request: SubRequest) -> bool: + return bool(request.config.getoption("--deny-confirmation")) + + @pytest.fixture(scope="session") def connector_image(request: SubRequest) -> str: return get_option_or_fail(request, "--connector-image") @@ -150,24 +175,88 @@ def control_version(request: SubRequest) -> str: @pytest.fixture(scope="session") -def target_version(request: SubRequest) -> str: - return get_option_or_fail(request, "--target-version") +def target_version(control_version: str, request: SubRequest) -> str: + target_version = get_option_or_fail(request, "--target-version") + if target_version == control_version: + pytest.fail(f"Control and target versions are the same: {control_version}. Please provide different versions.") + return target_version + + +@pytest.fixture(scope="session") +def connection_id(request: SubRequest) -> Optional[str]: + return request.config.getoption("--connection-id") @pytest.fixture(scope="session") -def catalog(request: SubRequest) -> Optional[ConfiguredAirbyteCatalog]: - catalog_path = get_option_or_fail(request, "--catalog-path") - return ConfiguredAirbyteCatalog.parse_file(catalog_path) if catalog_path else None +def custom_source_config_path(request: SubRequest) -> Optional[Path]: + if config_path := request.config.getoption("--config-path"): + return Path(config_path) + return None @pytest.fixture(scope="session") -def connector_config(request: SubRequest) -> Optional[SecretDict]: - return get_connector_config(get_option_or_fail(request, "--config-path")) +def custom_catalog_path(request: SubRequest) -> Optional[Path]: + if catalog_path := request.config.getoption("--catalog-path"): + return Path(catalog_path) + return None @pytest.fixture(scope="session") -def state(request: SubRequest) -> Optional[dict]: - return get_state(get_option_or_fail(request, "--state-path")) +def custom_state_path(request: SubRequest) -> Optional[Path]: + if state_path := request.config.getoption("--state-path"): + return Path(state_path) + return None + + +@pytest.fixture(scope="session") +def retrieval_reason( + connection_id: Optional[str], + connector_image: str, + control_version: str, + target_version: str, +) -> Optional[str]: + if connection_id: + return f"Running regression tests on connection {connection_id} for connector {connector_image} on the control ({control_version}) and target versions ({target_version})." + return None + + +@pytest.fixture(scope="session") +def connection_objects( + connection_id: Optional[str], + custom_source_config_path: Optional[Path], + custom_catalog_path: Optional[Path], + custom_state_path: Optional[Path], + retrieval_reason: Optional[str], +) -> ConnectionObjects: + return get_connection_objects( + { + ConnectionObject.SOURCE_CONFIG, + ConnectionObject.CONFIGURED_CATALOG, + ConnectionObject.STATE, + }, + connection_id, + custom_source_config_path, + custom_catalog_path, + custom_state_path, + retrieval_reason, + ) + + +@pytest.fixture(scope="session") +def connector_config(connection_objects: ConnectionObjects) -> Optional[SecretDict]: + return connection_objects.source_config + + +@pytest.fixture(scope="session") +def catalog( + connection_objects: ConnectionObjects, +) -> Optional[ConfiguredAirbyteCatalog]: + return connection_objects.catalog + + +@pytest.fixture(scope="session") +def state(connection_objects: ConnectionObjects) -> Optional[Dict]: + return connection_objects.state @pytest.fixture(scope="session") @@ -449,7 +538,12 @@ async def read_control_execution_result( read_control_execution_inputs: ExecutionInputs, read_control_connector_runner: ConnectorRunner, session_start_timestamp: int, + deny_confirmation: bool, ) -> ExecutionResult: + ask_for_confirmation( + f"{request.node.name} will run a full refresh read on control connector. It might induce rate limits or costs on source", + deny_confirmation, + ) logging.info(f"Running read for control connector {read_control_execution_inputs.connector_under_test.name}") execution_result = await read_control_connector_runner.run() execution_report = await persist_report( @@ -477,7 +571,12 @@ async def read_target_execution_result( read_target_execution_inputs: ExecutionInputs, read_target_connector_runner: ConnectorRunner, session_start_timestamp: int, + deny_confirmation: bool, ) -> ExecutionResult: + ask_for_confirmation( + f"{request.node.name} will run a full refresh read on target connector. It might induce rate limits or costs on source", + deny_confirmation, + ) logging.info(f"Running read for target connector {read_target_execution_inputs.connector_under_test.name}") execution_result = await read_target_connector_runner.run() execution_report = await persist_report( @@ -539,7 +638,12 @@ async def read_with_state_control_execution_result( read_with_state_control_execution_inputs: ExecutionInputs, read_with_state_control_connector_runner: ConnectorRunner, session_start_timestamp: int, + deny_confirmation: bool, ) -> ExecutionResult: + ask_for_confirmation( + f"{request.node.name} will run an incremental read on control connector. It might induce rate limits or costs on source", + deny_confirmation, + ) logging.info(f"Running read with state for control connector {read_with_state_control_execution_inputs.connector_under_test.name}") execution_result = await read_with_state_control_connector_runner.run() execution_report = await persist_report( @@ -570,7 +674,12 @@ async def read_with_state_target_execution_result( read_with_state_target_execution_inputs: ExecutionInputs, read_with_state_target_connector_runner: ConnectorRunner, session_start_timestamp: int, + deny_confirmation: bool, ) -> ExecutionResult: + ask_for_confirmation( + f"{request.node.name} will run an incremental read on target connector. It might induce rate limits or costs on source", + deny_confirmation, + ) logging.info(f"Running read with state for target connector {read_with_state_target_execution_inputs.connector_under_test.name}") execution_result = await read_with_state_target_connector_runner.run() execution_report = await persist_report( diff --git a/airbyte-ci/connectors/live-tests/src/live_tests/regression_tests/pytest.ini b/airbyte-ci/connectors/live-tests/src/live_tests/regression_tests/pytest.ini index 060aaa5a285f..92e77339fe7d 100644 --- a/airbyte-ci/connectors/live-tests/src/live_tests/regression_tests/pytest.ini +++ b/airbyte-ci/connectors/live-tests/src/live_tests/regression_tests/pytest.ini @@ -1,4 +1,5 @@ [pytest] +addopts = --capture=no console_output_style = progress log_cli = True log_cli_level= INFO diff --git a/airbyte-ci/connectors/live-tests/src/live_tests/regression_tests/test_expected_records.py b/airbyte-ci/connectors/live-tests/src/live_tests/regression_tests/test_expected_records.py index 74a8c26db977..5d16dbf3b727 100644 --- a/airbyte-ci/connectors/live-tests/src/live_tests/regression_tests/test_expected_records.py +++ b/airbyte-ci/connectors/live-tests/src/live_tests/regression_tests/test_expected_records.py @@ -12,9 +12,9 @@ # This test is very basic and just used as a demonstration before porting the "real" expected records tests from VA async def test_all_records_are_produced_in_target_version( - read_control_execution_result: ExecutionResult, - read_target_execution_result: ExecutionResult, + read_with_state_control_execution_result: ExecutionResult, + read_with_state_target_execution_result: ExecutionResult, ) -> None: - control_records = list(make_comparable_records(filter_records(read_control_execution_result.airbyte_messages))) - target_records = list(make_comparable_records(filter_records(read_target_execution_result.airbyte_messages))) + control_records = list(make_comparable_records(filter_records(read_with_state_control_execution_result.airbyte_messages))) + target_records = list(make_comparable_records(filter_records(read_with_state_target_execution_result.airbyte_messages))) assert target_records == control_records diff --git a/airbyte-ci/connectors/live-tests/tests/backends/test_file_backend.py b/airbyte-ci/connectors/live-tests/tests/backends/test_file_backend.py index 0de07435efb4..fda723c5b6a5 100644 --- a/airbyte-ci/connectors/live-tests/tests/backends/test_file_backend.py +++ b/airbyte-ci/connectors/live-tests/tests/backends/test_file_backend.py @@ -16,7 +16,6 @@ from live_tests.commons.backends import FileBackend -@pytest.mark.asyncio @pytest.mark.parametrize( "messages, expected_writes", [ @@ -62,9 +61,9 @@ ), ], ) -async def test_write(tmp_path, messages, expected_writes): +def test_write(tmp_path, messages, expected_writes): backend = FileBackend(tmp_path) - await backend.write(messages) + backend.write(messages) for expected_file, expected_content in expected_writes: expected_path = Path(tmp_path / expected_file) assert expected_path.exists() diff --git a/airbyte-ci/connectors/pipelines/pipelines/airbyte_ci/test/__init__.py b/airbyte-ci/connectors/pipelines/pipelines/airbyte_ci/test/__init__.py index 43bcee98b388..9edf8c640de6 100644 --- a/airbyte-ci/connectors/pipelines/pipelines/airbyte_ci/test/__init__.py +++ b/airbyte-ci/connectors/pipelines/pipelines/airbyte_ci/test/__init__.py @@ -12,7 +12,8 @@ "airbyte-ci/connectors/connector_ops", "airbyte-ci/connectors/connectors_qa", "airbyte-ci/connectors/ci_credentials", - "airbyte-ci/connectors/live-tests", + # This will move to a different repo + #"airbyte-ci/connectors/live-tests", "airbyte-ci/connectors/metadata_service/lib", "airbyte-ci/connectors/metadata_service/orchestrator", "airbyte-integrations/bases/connector-acceptance-test" From 4e05272b0ff8c3e4908cc29afcdf107ea63f197b Mon Sep 17 00:00:00 2001 From: Augustin Date: Mon, 11 Mar 2024 11:32:39 +0100 Subject: [PATCH 154/172] live-tests: add duckdb backend (#35923) --- airbyte-ci/connectors/live-tests/README.md | 14 +- airbyte-ci/connectors/live-tests/poetry.lock | 159 +++++++++++++++++- .../connectors/live-tests/pyproject.toml | 4 +- .../live_tests/commons/backends/__init__.py | 3 +- .../commons/backends/duckdb_backend.py | 21 +++ .../commons/backends/file_backend.py | 47 +++++- .../src/live_tests/commons/models.py | 4 +- .../tests/backends/test_file_backend.py | 2 +- 8 files changed, 240 insertions(+), 14 deletions(-) create mode 100644 airbyte-ci/connectors/live-tests/src/live_tests/commons/backends/duckdb_backend.py diff --git a/airbyte-ci/connectors/live-tests/README.md b/airbyte-ci/connectors/live-tests/README.md index f057330dd5c0..d921114cedb1 100644 --- a/airbyte-ci/connectors/live-tests/README.md +++ b/airbyte-ci/connectors/live-tests/README.md @@ -54,6 +54,7 @@ It will write artifacts to an output directory: * `stdout.log`: The collected standard output following the command execution * `stderr.log`: The collected standard error following the c * `http_dump.txt`: An `mitmproxy` http stream log. Can be consumed with `mitmweb` (version `9.0.1`) for debugging. +* `airbyte_messages.db`: A DuckDB database containing the messages produced by the connector. * `airbyte_messages`: A directory containing `.jsonl` files for each message type (logs, records, traces, controls, states etc.) produced by the connector. #### Example @@ -76,18 +77,20 @@ live_tests_debug_reports └── read ├── dev │   ├── airbyte_messages + | │ ├── duck.db # DuckDB database │   │   ├── logs.jsonl - │   │   ├── pokemon_records.jsonl + │   │   ├── records.jsonl │   │   └── traces.jsonl - │   ├── http_dump.mitm + │   ├── http_dump.mitm # Consume it with mitmweb --rfile http_dump.mitm │   ├── stderr.log │   └── stdout.log └── latest ├── airbyte_messages + │ ├── duck.db # DuckDB database │   ├── logs.jsonl - │   ├── pokemon_records.jsonl + │   ├── records.jsonl │   └── traces.jsonl - ├── http_dump.mitm + ├── http_dump.mitm # Consume it with mitmweb --rfile http_dump.mitm ├── stderr.log └── stdout.log @@ -144,6 +147,9 @@ You can also pass local connection objects path to override the live connection ## Changelog +### 0.4.0 +Introduce DuckDB to store the messages produced by the connector. + ### 0.3.0 Pass connection id to the regression tests suite. diff --git a/airbyte-ci/connectors/live-tests/poetry.lock b/airbyte-ci/connectors/live-tests/poetry.lock index b1844e7dcfa3..e59b7b54c38e 100644 --- a/airbyte-ci/connectors/live-tests/poetry.lock +++ b/airbyte-ci/connectors/live-tests/poetry.lock @@ -612,6 +612,62 @@ websocket-client = ">=0.32.0" [package.extras] ssh = ["paramiko (>=2.4.3)"] +[[package]] +name = "duckdb" +version = "0.10.0" +description = "DuckDB in-process database" +optional = false +python-versions = ">=3.7.0" +files = [ + {file = "duckdb-0.10.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:bd0ffb3fddef0f72a150e4d76e10942a84a1a0447d10907df1621b90d6668060"}, + {file = "duckdb-0.10.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:f3d709d5c7c1a12b5e10d0b05fa916c670cd2b50178e3696faa0cc16048a1745"}, + {file = "duckdb-0.10.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9114aa22ec5d591a20ce5184be90f49d8e5b5348ceaab21e102c54560d07a5f8"}, + {file = "duckdb-0.10.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:77a37877efadf39caf7cadde0f430fedf762751b9c54750c821e2f1316705a21"}, + {file = "duckdb-0.10.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:87cbc9e1d9c3fc9f14307bea757f99f15f46843c0ab13a6061354410824ed41f"}, + {file = "duckdb-0.10.0-cp310-cp310-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:f0bfec79fed387201550517d325dff4fad2705020bc139d936cab08b9e845662"}, + {file = "duckdb-0.10.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:c5622134d2d9796b15e09de810e450859d4beb46d9b861357ec9ae40a61b775c"}, + {file = "duckdb-0.10.0-cp310-cp310-win_amd64.whl", hash = "sha256:089ee8e831ccaef1b73fc89c43b661567175eed0115454880bafed5e35cda702"}, + {file = "duckdb-0.10.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:a05af63747f1d7021995f0811c333dee7316cec3b06c0d3e4741b9bdb678dd21"}, + {file = "duckdb-0.10.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:072d6eba5d8a59e0069a8b5b4252fed8a21f9fe3f85a9129d186a39b3d0aea03"}, + {file = "duckdb-0.10.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:a77b85668f59b919042832e4659538337f1c7f197123076c5311f1c9cf077df7"}, + {file = "duckdb-0.10.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:96a666f1d2da65d03199a977aec246920920a5ea1da76b70ae02bd4fb1ffc48c"}, + {file = "duckdb-0.10.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2ec76a4262b783628d26612d184834852d9c92fb203e91af789100c17e3d7173"}, + {file = "duckdb-0.10.0-cp311-cp311-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:009dd9d2cdbd3b061a9efbdfc79f2d1a8377bcf49f1e5f430138621f8c083a6c"}, + {file = "duckdb-0.10.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:878f06766088090dad4a2e5ee0081555242b2e8dcb29415ecc97e388cf0cf8d8"}, + {file = "duckdb-0.10.0-cp311-cp311-win_amd64.whl", hash = "sha256:713ff0a1fb63a6d60f454acf67f31656549fb5d63f21ac68314e4f522daa1a89"}, + {file = "duckdb-0.10.0-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:9c0ee450dfedfb52dd4957244e31820feef17228da31af6d052979450a80fd19"}, + {file = "duckdb-0.10.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:ff79b2ea9994398b545c0d10601cd73565fbd09f8951b3d8003c7c5c0cebc7cb"}, + {file = "duckdb-0.10.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:6bdf1aa71b924ef651062e6b8ff9981ad85bec89598294af8a072062c5717340"}, + {file = "duckdb-0.10.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d0265bbc8216be3ced7b377ba8847128a3fc0ef99798a3c4557c1b88e3a01c23"}, + {file = "duckdb-0.10.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1d418a315a07707a693bd985274c0f8c4dd77015d9ef5d8d3da4cc1942fd82e0"}, + {file = "duckdb-0.10.0-cp312-cp312-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:2828475a292e68c71855190b818aded6bce7328f79e38c04a0c75f8f1c0ceef0"}, + {file = "duckdb-0.10.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:c3aaeaae2eba97035c65f31ffdb18202c951337bf2b3d53d77ce1da8ae2ecf51"}, + {file = "duckdb-0.10.0-cp312-cp312-win_amd64.whl", hash = "sha256:c51790aaaea97d8e4a58a114c371ed8d2c4e1ca7cbf29e3bdab6d8ccfc5afc1e"}, + {file = "duckdb-0.10.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:8af1ae7cc77a12206b6c47ade191882cc8f49f750bb3e72bb86ac1d4fa89926a"}, + {file = "duckdb-0.10.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:aa4f7e8e8dc0e376aeb280b83f2584d0e25ec38985c27d19f3107b2edc4f4a97"}, + {file = "duckdb-0.10.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:28ae942a79fad913defa912b56483cd7827a4e7721f4ce4bc9025b746ecb3c89"}, + {file = "duckdb-0.10.0-cp37-cp37m-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:01b57802898091455ca2a32c1335aac1e398da77c99e8a96a1e5de09f6a0add9"}, + {file = "duckdb-0.10.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:52e1ad4a55fa153d320c367046b9500578192e01c6d04308ba8b540441736f2c"}, + {file = "duckdb-0.10.0-cp37-cp37m-win_amd64.whl", hash = "sha256:904c47d04095af745e989c853f0bfc0776913dfc40dfbd2da7afdbbb5f67fed0"}, + {file = "duckdb-0.10.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:184ae7ea5874f3b8fa51ab0f1519bdd088a0b78c32080ee272b1d137e2c8fd9c"}, + {file = "duckdb-0.10.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:bd33982ecc9bac727a032d6cedced9f19033cbad56647147408891eb51a6cb37"}, + {file = "duckdb-0.10.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:f59bf0949899105dd5f8864cb48139bfb78454a8c017b8258ba2b5e90acf7afc"}, + {file = "duckdb-0.10.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:395f3b18948001e35dceb48a4423d574e38656606d033eef375408b539e7b076"}, + {file = "duckdb-0.10.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9b8eb2b803be7ee1df70435c33b03a4598cdaf676cd67ad782b288dcff65d781"}, + {file = "duckdb-0.10.0-cp38-cp38-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:31b2ddd331801064326c8e3587a4db8a31d02aef11332c168f45b3bd92effb41"}, + {file = "duckdb-0.10.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:c8b89e76a041424b8c2026c5dc1f74b53fbbc6c6f650d563259885ab2e7d093d"}, + {file = "duckdb-0.10.0-cp38-cp38-win_amd64.whl", hash = "sha256:79084a82f16c0a54f6bfb7ded5600400c2daa90eb0d83337d81a56924eaee5d4"}, + {file = "duckdb-0.10.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:79799b3a270dcd9070f677ba510f1e66b112df3068425691bac97c5e278929c7"}, + {file = "duckdb-0.10.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:e8fc394bfe3434920cdbcfbdd0ac3ba40902faa1dbda088db0ba44003a45318a"}, + {file = "duckdb-0.10.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:c116605551b4abf5786243a59bcef02bd69cc51837d0c57cafaa68cdc428aa0c"}, + {file = "duckdb-0.10.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3191170c3b0a43b0c12644800326f5afdea00d5a4621d59dbbd0c1059139e140"}, + {file = "duckdb-0.10.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fee69a50eb93c72dc77e7ab1fabe0c38d21a52c5da44a86aa217081e38f9f1bd"}, + {file = "duckdb-0.10.0-cp39-cp39-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:c5f449e87dacb16b0d145dbe65fa6fdb5a55b2b6911a46d74876e445dd395bac"}, + {file = "duckdb-0.10.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:4487d0df221b17ea4177ad08131bc606b35f25cfadf890987833055b9d10cdf6"}, + {file = "duckdb-0.10.0-cp39-cp39-win_amd64.whl", hash = "sha256:c099ae2ff8fe939fda62da81704f91e2f92ac45e48dc0e37c679c9d243d01e65"}, + {file = "duckdb-0.10.0.tar.gz", hash = "sha256:c02bcc128002aa79e3c9d89b9de25e062d1096a8793bc0d7932317b7977f6845"}, +] + [[package]] name = "exceptiongroup" version = "1.2.0" @@ -727,12 +783,12 @@ files = [ google-auth = ">=2.14.1,<3.0.dev0" googleapis-common-protos = ">=1.56.2,<2.0.dev0" grpcio = [ - {version = ">=1.49.1,<2.0dev", optional = true, markers = "python_version >= \"3.11\" and extra == \"grpc\""}, {version = ">=1.33.2,<2.0dev", optional = true, markers = "python_version < \"3.11\" and extra == \"grpc\""}, + {version = ">=1.49.1,<2.0dev", optional = true, markers = "python_version >= \"3.11\" and extra == \"grpc\""}, ] grpcio-status = [ - {version = ">=1.49.1,<2.0.dev0", optional = true, markers = "python_version >= \"3.11\" and extra == \"grpc\""}, {version = ">=1.33.2,<2.0.dev0", optional = true, markers = "python_version < \"3.11\" and extra == \"grpc\""}, + {version = ">=1.49.1,<2.0.dev0", optional = true, markers = "python_version >= \"3.11\" and extra == \"grpc\""}, ] protobuf = ">=3.19.5,<3.20.0 || >3.20.0,<3.20.1 || >3.20.1,<4.21.0 || >4.21.0,<4.21.1 || >4.21.1,<4.21.2 || >4.21.2,<4.21.3 || >4.21.3,<4.21.4 || >4.21.4,<4.21.5 || >4.21.5,<5.0.0.dev0" requests = ">=2.18.0,<3.0.0.dev0" @@ -850,8 +906,8 @@ google-cloud-audit-log = ">=0.1.0,<1.0.0dev" google-cloud-core = ">=2.0.0,<3.0.0dev" grpc-google-iam-v1 = ">=0.12.4,<1.0.0dev" proto-plus = [ - {version = ">=1.22.2,<2.0.0dev", markers = "python_version >= \"3.11\""}, {version = ">=1.22.0,<2.0.0dev", markers = "python_version < \"3.11\""}, + {version = ">=1.22.2,<2.0.0dev", markers = "python_version >= \"3.11\""}, ] protobuf = ">=3.19.5,<3.20.0 || >3.20.0,<3.20.1 || >3.20.1,<4.21.0 || >4.21.0,<4.21.1 || >4.21.1,<4.21.2 || >4.21.2,<4.21.3 || >4.21.3,<4.21.4 || >4.21.4,<4.21.5 || >4.21.5,<5.0.0dev" @@ -1425,6 +1481,79 @@ files = [ {file = "packaging-23.2.tar.gz", hash = "sha256:048fb0e9405036518eaaf48a55953c750c11e1a1b68e0dd1a9d62ed0c092cfc5"}, ] +[[package]] +name = "pandas" +version = "2.2.1" +description = "Powerful data structures for data analysis, time series, and statistics" +optional = false +python-versions = ">=3.9" +files = [ + {file = "pandas-2.2.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:8df8612be9cd1c7797c93e1c5df861b2ddda0b48b08f2c3eaa0702cf88fb5f88"}, + {file = "pandas-2.2.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:0f573ab277252ed9aaf38240f3b54cfc90fff8e5cab70411ee1d03f5d51f3944"}, + {file = "pandas-2.2.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f02a3a6c83df4026e55b63c1f06476c9aa3ed6af3d89b4f04ea656ccdaaaa359"}, + {file = "pandas-2.2.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c38ce92cb22a4bea4e3929429aa1067a454dcc9c335799af93ba9be21b6beb51"}, + {file = "pandas-2.2.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:c2ce852e1cf2509a69e98358e8458775f89599566ac3775e70419b98615f4b06"}, + {file = "pandas-2.2.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:53680dc9b2519cbf609c62db3ed7c0b499077c7fefda564e330286e619ff0dd9"}, + {file = "pandas-2.2.1-cp310-cp310-win_amd64.whl", hash = "sha256:94e714a1cca63e4f5939cdce5f29ba8d415d85166be3441165edd427dc9f6bc0"}, + {file = "pandas-2.2.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:f821213d48f4ab353d20ebc24e4faf94ba40d76680642fb7ce2ea31a3ad94f9b"}, + {file = "pandas-2.2.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:c70e00c2d894cb230e5c15e4b1e1e6b2b478e09cf27cc593a11ef955b9ecc81a"}, + {file = "pandas-2.2.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e97fbb5387c69209f134893abc788a6486dbf2f9e511070ca05eed4b930b1b02"}, + {file = "pandas-2.2.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:101d0eb9c5361aa0146f500773395a03839a5e6ecde4d4b6ced88b7e5a1a6403"}, + {file = "pandas-2.2.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:7d2ed41c319c9fb4fd454fe25372028dfa417aacb9790f68171b2e3f06eae8cd"}, + {file = "pandas-2.2.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:af5d3c00557d657c8773ef9ee702c61dd13b9d7426794c9dfeb1dc4a0bf0ebc7"}, + {file = "pandas-2.2.1-cp311-cp311-win_amd64.whl", hash = "sha256:06cf591dbaefb6da9de8472535b185cba556d0ce2e6ed28e21d919704fef1a9e"}, + {file = "pandas-2.2.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:88ecb5c01bb9ca927ebc4098136038519aa5d66b44671861ffab754cae75102c"}, + {file = "pandas-2.2.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:04f6ec3baec203c13e3f8b139fb0f9f86cd8c0b94603ae3ae8ce9a422e9f5bee"}, + {file = "pandas-2.2.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a935a90a76c44fe170d01e90a3594beef9e9a6220021acfb26053d01426f7dc2"}, + {file = "pandas-2.2.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c391f594aae2fd9f679d419e9a4d5ba4bce5bb13f6a989195656e7dc4b95c8f0"}, + {file = "pandas-2.2.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:9d1265545f579edf3f8f0cb6f89f234f5e44ba725a34d86535b1a1d38decbccc"}, + {file = "pandas-2.2.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:11940e9e3056576ac3244baef2fedade891977bcc1cb7e5cc8f8cc7d603edc89"}, + {file = "pandas-2.2.1-cp312-cp312-win_amd64.whl", hash = "sha256:4acf681325ee1c7f950d058b05a820441075b0dd9a2adf5c4835b9bc056bf4fb"}, + {file = "pandas-2.2.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:9bd8a40f47080825af4317d0340c656744f2bfdb6819f818e6ba3cd24c0e1397"}, + {file = "pandas-2.2.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:df0c37ebd19e11d089ceba66eba59a168242fc6b7155cba4ffffa6eccdfb8f16"}, + {file = "pandas-2.2.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:739cc70eaf17d57608639e74d63387b0d8594ce02f69e7a0b046f117974b3019"}, + {file = "pandas-2.2.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f9d3558d263073ed95e46f4650becff0c5e1ffe0fc3a015de3c79283dfbdb3df"}, + {file = "pandas-2.2.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:4aa1d8707812a658debf03824016bf5ea0d516afdea29b7dc14cf687bc4d4ec6"}, + {file = "pandas-2.2.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:76f27a809cda87e07f192f001d11adc2b930e93a2b0c4a236fde5429527423be"}, + {file = "pandas-2.2.1-cp39-cp39-win_amd64.whl", hash = "sha256:1ba21b1d5c0e43416218db63037dbe1a01fc101dc6e6024bcad08123e48004ab"}, + {file = "pandas-2.2.1.tar.gz", hash = "sha256:0ab90f87093c13f3e8fa45b48ba9f39181046e8f3317d3aadb2fffbb1b978572"}, +] + +[package.dependencies] +numpy = [ + {version = ">=1.22.4,<2", markers = "python_version < \"3.11\""}, + {version = ">=1.23.2,<2", markers = "python_version == \"3.11\""}, + {version = ">=1.26.0,<2", markers = "python_version >= \"3.12\""}, +] +python-dateutil = ">=2.8.2" +pytz = ">=2020.1" +tzdata = ">=2022.7" + +[package.extras] +all = ["PyQt5 (>=5.15.9)", "SQLAlchemy (>=2.0.0)", "adbc-driver-postgresql (>=0.8.0)", "adbc-driver-sqlite (>=0.8.0)", "beautifulsoup4 (>=4.11.2)", "bottleneck (>=1.3.6)", "dataframe-api-compat (>=0.1.7)", "fastparquet (>=2022.12.0)", "fsspec (>=2022.11.0)", "gcsfs (>=2022.11.0)", "html5lib (>=1.1)", "hypothesis (>=6.46.1)", "jinja2 (>=3.1.2)", "lxml (>=4.9.2)", "matplotlib (>=3.6.3)", "numba (>=0.56.4)", "numexpr (>=2.8.4)", "odfpy (>=1.4.1)", "openpyxl (>=3.1.0)", "pandas-gbq (>=0.19.0)", "psycopg2 (>=2.9.6)", "pyarrow (>=10.0.1)", "pymysql (>=1.0.2)", "pyreadstat (>=1.2.0)", "pytest (>=7.3.2)", "pytest-xdist (>=2.2.0)", "python-calamine (>=0.1.7)", "pyxlsb (>=1.0.10)", "qtpy (>=2.3.0)", "s3fs (>=2022.11.0)", "scipy (>=1.10.0)", "tables (>=3.8.0)", "tabulate (>=0.9.0)", "xarray (>=2022.12.0)", "xlrd (>=2.0.1)", "xlsxwriter (>=3.0.5)", "zstandard (>=0.19.0)"] +aws = ["s3fs (>=2022.11.0)"] +clipboard = ["PyQt5 (>=5.15.9)", "qtpy (>=2.3.0)"] +compression = ["zstandard (>=0.19.0)"] +computation = ["scipy (>=1.10.0)", "xarray (>=2022.12.0)"] +consortium-standard = ["dataframe-api-compat (>=0.1.7)"] +excel = ["odfpy (>=1.4.1)", "openpyxl (>=3.1.0)", "python-calamine (>=0.1.7)", "pyxlsb (>=1.0.10)", "xlrd (>=2.0.1)", "xlsxwriter (>=3.0.5)"] +feather = ["pyarrow (>=10.0.1)"] +fss = ["fsspec (>=2022.11.0)"] +gcp = ["gcsfs (>=2022.11.0)", "pandas-gbq (>=0.19.0)"] +hdf5 = ["tables (>=3.8.0)"] +html = ["beautifulsoup4 (>=4.11.2)", "html5lib (>=1.1)", "lxml (>=4.9.2)"] +mysql = ["SQLAlchemy (>=2.0.0)", "pymysql (>=1.0.2)"] +output-formatting = ["jinja2 (>=3.1.2)", "tabulate (>=0.9.0)"] +parquet = ["pyarrow (>=10.0.1)"] +performance = ["bottleneck (>=1.3.6)", "numba (>=0.56.4)", "numexpr (>=2.8.4)"] +plot = ["matplotlib (>=3.6.3)"] +postgresql = ["SQLAlchemy (>=2.0.0)", "adbc-driver-postgresql (>=0.8.0)", "psycopg2 (>=2.9.6)"] +pyarrow = ["pyarrow (>=10.0.1)"] +spss = ["pyreadstat (>=1.2.0)"] +sql-other = ["SQLAlchemy (>=2.0.0)", "adbc-driver-postgresql (>=0.8.0)", "adbc-driver-sqlite (>=0.8.0)"] +test = ["hypothesis (>=6.46.1)", "pytest (>=7.3.2)", "pytest-xdist (>=2.2.0)"] +xml = ["lxml (>=4.9.2)"] + [[package]] name = "pandas-stubs" version = "2.2.0.240218" @@ -1710,6 +1839,17 @@ files = [ [package.extras] cli = ["click (>=5.0)"] +[[package]] +name = "pytz" +version = "2024.1" +description = "World timezone definitions, modern and historical" +optional = false +python-versions = "*" +files = [ + {file = "pytz-2024.1-py2.py3-none-any.whl", hash = "sha256:328171f4e3623139da4983451950b28e95ac706e13f3f2630a879749e7a8b319"}, + {file = "pytz-2024.1.tar.gz", hash = "sha256:2a29735ea9c18baf14b448846bde5a48030ed267578472d8955cd0e7443a9812"}, +] + [[package]] name = "pywin32" version = "306" @@ -1979,6 +2119,17 @@ files = [ {file = "typing_extensions-4.10.0.tar.gz", hash = "sha256:b0abd7c89e8fb96f98db18d86106ff1d90ab692004eb746cf6eda2682f91b3cb"}, ] +[[package]] +name = "tzdata" +version = "2024.1" +description = "Provider of IANA time zone data" +optional = false +python-versions = ">=2" +files = [ + {file = "tzdata-2024.1-py2.py3-none-any.whl", hash = "sha256:9068bc196136463f5245e51efda838afa15aaeca9903f49050dfa2679db4d252"}, + {file = "tzdata-2024.1.tar.gz", hash = "sha256:2674120f8d891909751c38abcdfd386ac0a5a1127954fbc332af6b5ceae07efd"}, +] + [[package]] name = "urllib3" version = "2.2.1" @@ -2118,4 +2269,4 @@ multidict = ">=4.0" [metadata] lock-version = "2.0" python-versions = "^3.10" -content-hash = "fab479eaed289416f2650819e2e8a46bc970c8a1c53c256f448b1d124179272a" +content-hash = "c0543d0df16fa30be68276e18005fea320d5ef9d4086588101a51323ab26edae" diff --git a/airbyte-ci/connectors/live-tests/pyproject.toml b/airbyte-ci/connectors/live-tests/pyproject.toml index 34cc358cc5d3..8a37737fa3e7 100644 --- a/airbyte-ci/connectors/live-tests/pyproject.toml +++ b/airbyte-ci/connectors/live-tests/pyproject.toml @@ -4,7 +4,7 @@ build-backend = "poetry.core.masonry.api" [tool.poetry] name = "live-tests" -version = "0.3.0" +version = "0.4.0" description = "Contains utilities for testing connectors against live data." authors = ["Airbyte "] license = "MIT" @@ -25,6 +25,8 @@ pydash = "~=7.0.7" docker = ">=6,<7" asyncclick = "^8.1.7.1" connection-retriever = {git = "https://github.com/airbytehq/airbyte-platform-internal.git", rev = "augustin/03-06-create_connection-retriever_tool", subdirectory = "tools/connection-retriever"} +duckdb = "^0.10.0" +pandas = "^2.2.1" [tool.poetry.scripts] live-tests = "live_tests.cli:live_tests" diff --git a/airbyte-ci/connectors/live-tests/src/live_tests/commons/backends/__init__.py b/airbyte-ci/connectors/live-tests/src/live_tests/commons/backends/__init__.py index 9a1b7d627ed3..f65344dad82c 100644 --- a/airbyte-ci/connectors/live-tests/src/live_tests/commons/backends/__init__.py +++ b/airbyte-ci/connectors/live-tests/src/live_tests/commons/backends/__init__.py @@ -2,5 +2,6 @@ from .base_backend import BaseBackend from .file_backend import FileBackend +from .duckdb_backend import DuckDbBackend -__all__ = ["BaseBackend", "FileBackend"] +__all__ = ["BaseBackend", "FileBackend", "DuckDbBackend"] diff --git a/airbyte-ci/connectors/live-tests/src/live_tests/commons/backends/duckdb_backend.py b/airbyte-ci/connectors/live-tests/src/live_tests/commons/backends/duckdb_backend.py new file mode 100644 index 000000000000..3f824b5d8d54 --- /dev/null +++ b/airbyte-ci/connectors/live-tests/src/live_tests/commons/backends/duckdb_backend.py @@ -0,0 +1,21 @@ +# Copyright (c) 2023 Airbyte, Inc., all rights reserved. + + +from typing import Iterable + +import duckdb +from airbyte_protocol.models import AirbyteMessage # type: ignore +from live_tests.commons.backends.file_backend import FileBackend + + +class DuckDbBackend(FileBackend): + DUCK_DB_FILE_NAME = "duck.db" + + def write(self, airbyte_messages: Iterable[AirbyteMessage]) -> None: + # Use the FileBackend to write the messages to disk as jsonl files + super().write(airbyte_messages) + duck_db_conn = duckdb.connect(f"{self._output_directory}/{self.DUCK_DB_FILE_NAME}") + for jsonl_file in self.jsonl_files: + if jsonl_file.exists(): + duck_db_conn.sql(f"CREATE TABLE {jsonl_file.stem} AS SELECT * FROM read_json_auto('{jsonl_file}')") + duck_db_conn.close() diff --git a/airbyte-ci/connectors/live-tests/src/live_tests/commons/backends/file_backend.py b/airbyte-ci/connectors/live-tests/src/live_tests/commons/backends/file_backend.py index 948073332705..7c7ec64bfdda 100644 --- a/airbyte-ci/connectors/live-tests/src/live_tests/commons/backends/file_backend.py +++ b/airbyte-ci/connectors/live-tests/src/live_tests/commons/backends/file_backend.py @@ -33,6 +33,51 @@ class FileBackend(BaseBackend): def __init__(self, output_directory: Path): self._output_directory = output_directory + @property + def jsonl_specs_path(self) -> Path: + return (self._output_directory / self.RELATIVE_SPECS_PATH).resolve() + + @property + def jsonl_catalogs_path(self) -> Path: + return (self._output_directory / self.RELATIVE_CATALOGS_PATH).resolve() + + @property + def jsonl_connection_status_path(self) -> Path: + return (self._output_directory / self.RELATIVE_CONNECTION_STATUS_PATH).resolve() + + @property + def jsonl_records_path(self) -> Path: + return (self._output_directory / self.RELATIVE_RECORDS_PATH).resolve() + + @property + def jsonl_states_path(self) -> Path: + return (self._output_directory / self.RELATIVE_STATES_PATH).resolve() + + @property + def jsonl_traces_path(self) -> Path: + return (self._output_directory / self.RELATIVE_TRACES_PATH).resolve() + + @property + def jsonl_logs_path(self) -> Path: + return (self._output_directory / self.RELATIVE_LOGS_PATH).resolve() + + @property + def jsonl_controls_path(self) -> Path: + return (self._output_directory / self.RELATIVE_CONTROLS_PATH).resolve() + + @property + def jsonl_files(self) -> Iterable[Path]: + return [ + self.jsonl_catalogs_path, + self.jsonl_connection_status_path, + self.jsonl_records_path, + self.jsonl_specs_path, + self.jsonl_states_path, + self.jsonl_traces_path, + self.jsonl_logs_path, + self.jsonl_controls_path, + ] + def write(self, airbyte_messages: Iterable[AirbyteMessage]) -> None: """ Write AirbyteMessages to the appropriate file. @@ -74,7 +119,7 @@ def _get_filepath_and_message(self, message: AirbyteMessage) -> Tuple[str, str]: # TODO: once we have a comparator and/or database backend implemented we can remove this for key_path in self.RECORD_PATHS_TO_POP: pydash.objects.unset(record, key_path) - return f"{message.record.stream}_{self.RELATIVE_RECORDS_PATH}", json.dumps(record) + return self.RELATIVE_RECORDS_PATH, json.dumps(record) elif message.type == AirbyteMessageType.SPEC: return self.RELATIVE_SPECS_PATH, message.spec.json() diff --git a/airbyte-ci/connectors/live-tests/src/live_tests/commons/models.py b/airbyte-ci/connectors/live-tests/src/live_tests/commons/models.py index 34c8d21a81e2..a4c51f2d85f0 100644 --- a/airbyte-ci/connectors/live-tests/src/live_tests/commons/models.py +++ b/airbyte-ci/connectors/live-tests/src/live_tests/commons/models.py @@ -11,7 +11,7 @@ import dagger from airbyte_protocol.models import AirbyteMessage # type: ignore from airbyte_protocol.models import ConfiguredAirbyteCatalog # type: ignore -from live_tests.commons.backends import FileBackend +from live_tests.commons.backends import DuckDbBackend from pydantic import ValidationError @@ -234,7 +234,7 @@ async def save_to_disk(self, output_dir: Path) -> None: # Make backends use customizable airbyte_messages_dir = final_dir / "airbyte_messages" airbyte_messages_dir.mkdir(parents=True, exist_ok=True) - FileBackend(airbyte_messages_dir).write(self.execution_result.airbyte_messages) + DuckDbBackend(final_dir / airbyte_messages_dir).write(self.execution_result.airbyte_messages) self.saved_path = final_dir diff --git a/airbyte-ci/connectors/live-tests/tests/backends/test_file_backend.py b/airbyte-ci/connectors/live-tests/tests/backends/test_file_backend.py index fda723c5b6a5..1cc4526b99e6 100644 --- a/airbyte-ci/connectors/live-tests/tests/backends/test_file_backend.py +++ b/airbyte-ci/connectors/live-tests/tests/backends/test_file_backend.py @@ -46,7 +46,7 @@ '{"status": "SUCCEEDED", "message": null}\n', ), ( - "test_stream_records.jsonl", + "records.jsonl", '{"namespace": null, "stream": "test_stream", "data": {}, "meta": null}\n', ), ( From 46bf510df2decc12b4c7b6a4f3bc008d38ac0efd Mon Sep 17 00:00:00 2001 From: Anatolii Yatsuk <35109939+tolik0@users.noreply.github.com> Date: Mon, 11 Mar 2024 15:58:10 +0200 Subject: [PATCH 155/172] =?UTF-8?q?=F0=9F=93=9D=20Source=20Amazon=20Ads:?= =?UTF-8?q?=20Update=20tags=20in=20metadata.yaml=20(#35951)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- airbyte-integrations/connectors/source-amazon-ads/metadata.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/airbyte-integrations/connectors/source-amazon-ads/metadata.yaml b/airbyte-integrations/connectors/source-amazon-ads/metadata.yaml index 1325ac9d18da..16c1738b0e99 100644 --- a/airbyte-integrations/connectors/source-amazon-ads/metadata.yaml +++ b/airbyte-integrations/connectors/source-amazon-ads/metadata.yaml @@ -52,5 +52,5 @@ data: supportLevel: certified tags: - language:python - - cdk:python + - cdk:low-code metadataSpecVersion: "1.0" From a61f18438081d54e17847013535a399ef2f90d6a Mon Sep 17 00:00:00 2001 From: Ryan Waskewich <156025126+rwask@users.noreply.github.com> Date: Mon, 11 Mar 2024 10:14:01 -0400 Subject: [PATCH 156/172] =?UTF-8?q?Update=20on-kubernetes-via-helm.md=20-?= =?UTF-8?q?=200.52.0=20helm=20charts=20upgrade=20informa=E2=80=A6=20(#3593?= =?UTF-8?q?5)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .../on-kubernetes-via-helm.md | 198 ++++++++++++++---- 1 file changed, 161 insertions(+), 37 deletions(-) diff --git a/docs/deploying-airbyte/on-kubernetes-via-helm.md b/docs/deploying-airbyte/on-kubernetes-via-helm.md index bf6c72e0dd59..762ed86d96da 100644 --- a/docs/deploying-airbyte/on-kubernetes-via-helm.md +++ b/docs/deploying-airbyte/on-kubernetes-via-helm.md @@ -130,43 +130,6 @@ After specifying your own configuration, run the following command: ```text helm install --values path/to/values.yaml %release_name% airbyte/airbyte ``` - -## Migrate from old charts to new ones - -Starting from `0.39.37-alpha` we've revisited helm charts structure and separated all components of airbyte into their own independent charts, thus by allowing our developers to test single component without deploying airbyte as a whole and by upgrading single component at a time. - -In most cases upgrade from older monolith chart to a new one should go without any issue, but if you've configured custom logging or specified custom configuration of DB or Logging then follow the instructions listed below - -### Minio migration - -Since the latest release of bitnami/minio chart, they've changed the way of setting up the credentials for accessing the minio. (written mid-2022) - -Going forward in new version you need to specify the following values in values yaml for user/password instead old one - -Before: - -```text -minio: - rootUser: airbyte-user - rootPassword: airbyte-password-123 -``` - -After: - -```text -minio: - auth: - rootUser: minio - rootPassword: minio123 - -``` - -Before upgrading the chart update values.yaml as stated above and then run: - -- Get the old rootPassword by running `export ROOT_PASSWORD=$(kubectl get secret --namespace "default" %release_name%-minio -o jsonpath="{.data.root-password}" | base64 -d)` -- Perform upgrade of chart by running `helm upgrade %release_name% airbyte/airbyte --set auth.rootPassword=$ROOT_PASSWORD` - - If you get an error about setting the auth.rootPassword, then you forgot to update the `values.yaml` file - ### External Logs with S3 ::info @@ -437,3 +400,164 @@ Upgrade the chart by running: ```shell helm upgrade --install %RELEASE_NAME% airbyte/airbyte -n --values /path/to/values.yaml --version 0.50.13 ``` + +## Migrate from old chart to Airbyte v0.52.0 and latest chart version + +To assist with upgrading to Airbyte App version 0.52.0 and higher with the latest Helm Charts, we've simplified and consolidated several configuration options. Here's a breakdown of the changes: + +**Application.yaml Updates**: +- We've streamlined the configuration for logs and state storage. +- Instead of separate configurations for logs and state, we now have a unified storage configuration. +- The proposed changes involve specifying the storage type and bucket names directly, along with credentials where necessary. + +**Helm Configuration Updates:** +- The global configuration now includes a simplified storage section specifying the type and bucket names for logs, state, and workload output. +- Credentials for MinIO are now set directly in the Helm values, ensuring smoother integration. +- Unused configurations have been removed, and configurations have been aligned with the simplified application.yaml. + +**Technical Details and Renaming:** +- We've renamed or consolidated several environment variables for clarity and consistency. +- Unused methods and classes have been removed, ensuring a cleaner codebase. +- Some configurations have been split into separate files for better management and compatibility with different storage options. + +**Additional Changes:** +- We've added support for workload output storage explicitly, improving flexibility and clarity in configuration. +- The Helm charts have been updated to reflect these changes, removing or replacing old environment variables for storage configuration. +- These changes aim to simplify configuration management and improve the overall user experience during upgrades. Please review these updates and let us know if you have any questions or need further assistance. + +### **Migration Steps** + +This guide aims to assist customers upgrading to the latest version of the Airbyte Helm charts, specifically those using custom configurations for external logging and databases with AWS (S3) and GCS (Google Cloud Buckets). + +### **For AWS S3 Users** + +#### **Prerequisites** + +- Access to your Kubernetes cluster where Airbyte is deployed. +- Helm and kubectl installed and configured on your machine. +- Existing Airbyte deployment using AWS S3 for storage and AWS Secrets Manager for secret management. + +#### **Migration Steps** + +1. **Creating or Updating Kubernetes Secrets** + + If using AWS access keys, create a Kubernetes secret to store these credentials. If relying on an IAM role from an instance profile, this step can be skipped. + + Apply the following Kubernetes manifest, replacing `` and `` with your actual AWS credentials: + + ```yaml + apiVersion: v1 + kind: Secret + metadata: + name: airbyte-config-secrets + type: Opaque + stringData: + aws-secret-manager-access-key-id: + aws-secret-manager-secret-access-key: + ``` + +2. **Update Airbyte Configuration** + + In your `airbyte.yml` configuration file, add the following configuration, adjusting `` to match your AWS region: + + ```yaml + global: + storage: + type: s3 + storageSecretName: airbyte-config-secrets + bucket: + log: airbyte-storage + state: airbyte-storage + workloadOutput: airbyte-storage + s3: + region: + authenticationType: credentials # Use "credentials" or "instanceProfile" + accessKeyIdSecretKey: aws-secret-manager-access-key-id # Omit if using instanceProfile + secretAccessKeySecretKey: aws-secret-manager-secret-access-key # Omit if using instanceProfile + + secretsManager: + type: awsSecretManager + storageSecretName: airbyte-config-secrets + awsSecretManager: + region: + authenticationType: credentials # Use "credentials" or "instanceProfile" + accessKeyIdSecretKey: aws-secret-manager-access-key-id # Omit if using instanceProfile + secretAccessKeySecretKey: aws-secret-manager-secret-access-key # Omit if using instanceProfile + tags: + - key: team + value: deployment + - key: business-unit + value: engineering + ``` + +3. **Remove Deprecated Configuration from `values.yaml`** + + Edit your `values.yaml` or `airbyte-pro-values.yaml` files to remove any deprecated storage and secrets manager environment variables related to S3 and AWS Secrets Manager. Ensure configurations like `state.storage.type: "S3"` and AWS access keys under `server.extraEnv` and `worker.extraEnv` are removed. + +### **For GCS Users** + +#### **Prerequisites** + +- Access to your Kubernetes cluster where Airbyte is deployed. +- Helm and kubectl installed and configured on your machine. +- Existing Airbyte deployment using Google Cloud Storage (GCS) and Google Secret Manager (GSM) for secret management. + +#### **Migration Steps** + +1. **Setting Up or Updating Kubernetes Secrets** + + For Google Secret Manager, you may use existing credentials or create new ones. Apply a Kubernetes manifest like below, replacing `` with your GCP credentials JSON blob: + + ```yaml + apiVersion: v1 + kind: Secret + metadata: + name: gcp-cred-secrets + type: Opaque + stringData: + gcp.json: + ``` + + Or use `kubectl` to create the secret directly from a file: + + ```sh + kubectl create secret generic gcp-cred-secrets --from-file=gcp.json=.json + ``` + +2. **Update Airbyte Configuration** + + In your `airbyte.yml` configuration file, add the following configuration, adjusting `` to match your GCP project ID: + + ```yaml + global: + storage: + type: gcs + storageSecretName: gcp-cred-secrets + bucket: + log: airbyte-storage + state: airbyte-storage + workloadOutput: airbyte-storage + gcs: + authenticationType: credentials + projectId: + credentialsPath: /secrets/gcs-log-creds/gcp.json + + secretsManager: + type: googleSecretManager + storageSecretName: gcp-cred-secrets + googleSecretManager: + authenticationType: credentials + projectId: + credentialsSecretKey: gcp-creds.json + ``` + +3. **Remove Deprecated Configuration from `values.yaml`** + + Edit your `values.yaml` files to remove any deprecated storage and secrets manager environment variables related to GCS. Ensure configurations like `global.state.storage.type: "GCS"` and GCS credentials paths under `extraEnv` are removed. + +This guide ensures that you leverage the latest Helm chart configurations for Airbyte, aligning with best practices for managing storage and secrets in Kubernetes environments for AWS and GCS users. + + + + + From 3791a43a5735cec39600ba9aeb9daca0b26d3c7a Mon Sep 17 00:00:00 2001 From: Baz Date: Mon, 11 Mar 2024 17:26:04 +0200 Subject: [PATCH 157/172] =?UTF-8?q?=F0=9F=90=9B=20Source=20Shopify:=20Fixe?= =?UTF-8?q?d=20bug=20when=20`start=20date`=20was=20not=20provided,=20but?= =?UTF-8?q?=20the=20stream=20was=20using=20it=20(#35952)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .../connectors/source-shopify/metadata.yaml | 2 +- .../connectors/source-shopify/pyproject.toml | 2 +- .../source_shopify/config_migrations.py | 93 +++++++++++++++++++ .../source-shopify/source_shopify/run.py | 6 +- .../source_shopify/streams/base_streams.py | 27 +++--- .../unit_tests/graphql_bulk/test_job.py | 34 +++++++ .../test_migrations/test_config.json | 8 ++ .../test_migrations/test_config_migrations.py | 70 ++++++++++++++ .../test_migrations/test_new_config.json | 9 ++ docs/integrations/sources/shopify.md | 1 + 10 files changed, 238 insertions(+), 14 deletions(-) create mode 100644 airbyte-integrations/connectors/source-shopify/source_shopify/config_migrations.py create mode 100644 airbyte-integrations/connectors/source-shopify/unit_tests/test_migrations/test_config.json create mode 100644 airbyte-integrations/connectors/source-shopify/unit_tests/test_migrations/test_config_migrations.py create mode 100644 airbyte-integrations/connectors/source-shopify/unit_tests/test_migrations/test_new_config.json diff --git a/airbyte-integrations/connectors/source-shopify/metadata.yaml b/airbyte-integrations/connectors/source-shopify/metadata.yaml index 195c39907928..9d5d4352d7f7 100644 --- a/airbyte-integrations/connectors/source-shopify/metadata.yaml +++ b/airbyte-integrations/connectors/source-shopify/metadata.yaml @@ -11,7 +11,7 @@ data: connectorSubtype: api connectorType: source definitionId: 9da77001-af33-4bcd-be46-6252bf9342b9 - dockerImageTag: 2.0.0 + dockerImageTag: 2.0.1 dockerRepository: airbyte/source-shopify documentationUrl: https://docs.airbyte.com/integrations/sources/shopify githubIssueLabel: source-shopify diff --git a/airbyte-integrations/connectors/source-shopify/pyproject.toml b/airbyte-integrations/connectors/source-shopify/pyproject.toml index e26cb727a7e7..a5d0f39d7b8f 100644 --- a/airbyte-integrations/connectors/source-shopify/pyproject.toml +++ b/airbyte-integrations/connectors/source-shopify/pyproject.toml @@ -3,7 +3,7 @@ requires = [ "poetry-core>=1.0.0",] build-backend = "poetry.core.masonry.api" [tool.poetry] -version = "2.0.0" +version = "2.0.1" name = "source-shopify" description = "Source CDK implementation for Shopify." authors = [ "Airbyte ",] diff --git a/airbyte-integrations/connectors/source-shopify/source_shopify/config_migrations.py b/airbyte-integrations/connectors/source-shopify/source_shopify/config_migrations.py new file mode 100644 index 000000000000..f1bd53270d70 --- /dev/null +++ b/airbyte-integrations/connectors/source-shopify/source_shopify/config_migrations.py @@ -0,0 +1,93 @@ +# +# Copyright (c) 2023 Airbyte, Inc., all rights reserved. +# + + +from typing import Any, List, Mapping + +from airbyte_cdk.config_observation import create_connector_config_control_message +from airbyte_cdk.entrypoint import AirbyteEntrypoint +from airbyte_cdk.sources import Source +from airbyte_cdk.sources.message import InMemoryMessageRepository, MessageRepository + + +class MigrateConfig: + """ + This class stands for migrating the config at runtime, + while providing the backward compatibility when falling back to the previous source version. + + Specifically, starting from `2.0.1`, the `start_date` property should be not (None or `None`): + > "start_date": "2020-01-01" + instead of, in `2.0.0` for some older configs, when the `start_date` was not required: + > {...} + """ + + message_repository: MessageRepository = InMemoryMessageRepository() + migrate_key: str = "start_date" + # default spec value for the `start_date` is `2020-01-01` + default_start_date_value: str = "2020-01-01" + + @classmethod + def should_migrate(cls, config: Mapping[str, Any]) -> bool: + """ + This method determines whether the config should be migrated to have the new structure with `start_date`, + based on the source spec. + + Returns: + > True, if the transformation is necessary + > False, otherwise. + """ + # If the config was already migrated, there is no need to do this again. + # but if the customer has already switched to the new version, + # corrected the old config and switches back to the new version, + # we should try to migrate the modified old custom reports. + none_values: List[str] = [None, "None"] + key_not_present_in_config = cls.migrate_key not in config + key_present_in_config_but_invalid = cls.migrate_key in config and config.get(cls.migrate_key) in none_values + + if key_not_present_in_config: + return True + elif key_present_in_config_but_invalid: + return True + else: + return False + + @classmethod + def modify_config(cls, config: Mapping[str, Any], source: Source = None) -> Mapping[str, Any]: + config[cls.migrate_key] = cls.default_start_date_value + return config + + @classmethod + def modify_and_save(cls, config_path: str, source: Source, config: Mapping[str, Any]) -> Mapping[str, Any]: + # modify the config + migrated_config = cls.modify_config(config, source) + # save the config + source.write_config(migrated_config, config_path) + # return modified config + return migrated_config + + @classmethod + def emit_control_message(cls, migrated_config: Mapping[str, Any]) -> None: + # add the Airbyte Control Message to message repo + cls.message_repository.emit_message(create_connector_config_control_message(migrated_config)) + # emit the Airbyte Control Message from message queue to stdout + for message in cls.message_repository._message_queue: + print(message.json(exclude_unset=True)) + + @classmethod + def migrate(cls, args: List[str], source: Source) -> None: + """ + This method checks the input args, should the config be migrated, + transform if neccessary and emit the CONTROL message. + """ + # get config path + config_path = AirbyteEntrypoint(source).extract_config(args) + # proceed only if `--config` arg is provided + if config_path: + # read the existing config + config = source.read_config(config_path) + # migration check + if cls.should_migrate(config): + cls.emit_control_message( + cls.modify_and_save(config_path, source, config), + ) diff --git a/airbyte-integrations/connectors/source-shopify/source_shopify/run.py b/airbyte-integrations/connectors/source-shopify/source_shopify/run.py index 9c13e936ca71..c20aff249a57 100644 --- a/airbyte-integrations/connectors/source-shopify/source_shopify/run.py +++ b/airbyte-integrations/connectors/source-shopify/source_shopify/run.py @@ -6,10 +6,14 @@ import sys from airbyte_cdk.entrypoint import launch +from source_shopify.config_migrations import MigrateConfig from .source import SourceShopify -def run(): +def run() -> None: source = SourceShopify() + # migrate config at runtime + MigrateConfig.migrate(sys.argv[1:], source) + # run the connector launch(source, sys.argv[1:]) diff --git a/airbyte-integrations/connectors/source-shopify/source_shopify/streams/base_streams.py b/airbyte-integrations/connectors/source-shopify/source_shopify/streams/base_streams.py index 6f423a54ec31..da9eae0036dc 100644 --- a/airbyte-integrations/connectors/source-shopify/source_shopify/streams/base_streams.py +++ b/airbyte-integrations/connectors/source-shopify/source_shopify/streams/base_streams.py @@ -89,7 +89,9 @@ def parse_response(self, response: requests.Response, **kwargs) -> Iterable[Mapp self.logger.warning(f"Unexpected error in `parse_ersponse`: {e}, the actual response data: {response.text}") yield {} - def produce_records(self, records: Optional[Union[Iterable[Mapping[str, Any]], Mapping[str, Any]]] = None) -> Mapping[str, Any]: + def produce_records( + self, records: Optional[Union[Iterable[Mapping[str, Any]], Mapping[str, Any]]] = None + ) -> Iterable[Mapping[str, Any]]: # transform method was implemented according to issue 4841 # Shopify API returns price fields as a string and it should be converted to number # this solution designed to convert string into number, but in future can be modified for general purpose @@ -139,7 +141,7 @@ def get_json_schema(self) -> None: """ return {} - def produce_deleted_records_from_events(self, delete_events: Iterable[Mapping[str, Any]] = []) -> Mapping[str, Any]: + def produce_deleted_records_from_events(self, delete_events: Iterable[Mapping[str, Any]] = []) -> Iterable[Mapping[str, Any]]: for event in delete_events: yield { "id": event["subject_id"], @@ -218,7 +220,7 @@ def filter_records_newer_than_state( ) -> Iterable: # Getting records >= state if stream_state: - state_value = stream_state.get(self.cursor_field) + state_value = stream_state.get(self.cursor_field, self.default_state_comparison_value) for record in records_slice: if self.cursor_field in record: record_value = record.get(self.cursor_field, self.default_state_comparison_value) @@ -669,16 +671,19 @@ def get_updated_state( updated_state[self.parent_stream.name] = {self.parent_stream.cursor_field: latest_record.get(self.parent_stream.cursor_field)} return updated_state + def get_stream_state_value(self, stream_state: Optional[Mapping[str, Any]]) -> str: + if self.parent_stream_class: + # get parent stream state from the stream_state object. + parent_state = stream_state.get(self.parent_stream.name, {}) + if parent_state: + return parent_state.get(self.parent_stream.cursor_field, self.default_state_comparison_value) + else: + # get the stream state, if no `parent_stream_class` was assigned. + return stream_state.get(self.cursor_field, self.default_state_comparison_value) + def get_state_value(self, stream_state: Mapping[str, Any] = None) -> Optional[Union[str, int]]: if stream_state: - if self.parent_stream_class: - # get parent stream state from the stream_state object. - parent_state = stream_state.get(self.parent_stream.name, {}) - if parent_state: - return parent_state.get(self.parent_stream.cursor_field, self.default_state_comparison_value) - else: - # get the stream state, if no `parent_stream_class` was assigned. - return stream_state.get(self.cursor_field, self.default_state_comparison_value) + return self.get_stream_state_value(stream_state) else: # for majority of cases we fallback to start_date, otherwise. return self.config.get("start_date") diff --git a/airbyte-integrations/connectors/source-shopify/unit_tests/graphql_bulk/test_job.py b/airbyte-integrations/connectors/source-shopify/unit_tests/graphql_bulk/test_job.py index fa5d50cd0bee..f0e80e1c5944 100644 --- a/airbyte-integrations/connectors/source-shopify/unit_tests/graphql_bulk/test_job.py +++ b/airbyte-integrations/connectors/source-shopify/unit_tests/graphql_bulk/test_job.py @@ -7,6 +7,7 @@ import requests from source_shopify.shopify_graphql.bulk.exceptions import ShopifyBulkExceptions from source_shopify.shopify_graphql.bulk.job import ShopifyBulkStatus +from source_shopify.streams.base_streams import IncrementalShopifyGraphQlBulkStream from source_shopify.streams.streams import ( Collections, CustomerAddress, @@ -263,3 +264,36 @@ def test_bulk_stream_parse_response( assert test_records == [expected_result] elif isinstance(expected_result, list): assert test_records == expected_result + + +@pytest.mark.parametrize( + "stream, stream_state, with_start_date, expected", + [ + (DiscountCodes, {}, True, "updated_at:>='2023-01-01T00:00:00+00:00'"), + # here the config migration is applied and the value should be "2020-01-01" + (DiscountCodes, {}, False, "updated_at:>='2020-01-01T00:00:00+00:00'"), + (DiscountCodes, {"updated_at": "2022-01-01T00:00:00Z"}, True, "updated_at:>='2022-01-01T00:00:00+00:00'"), + (DiscountCodes, {"updated_at": "2021-01-01T00:00:00Z"}, False, "updated_at:>='2021-01-01T00:00:00+00:00'"), + ], + ids=[ + "No State, but Start Date", + "No State, No Start Date - should fallback to 2018", + "With State, Start Date", + "With State, No Start Date", + ], +) +def test_stream_slices( + auth_config, + stream, + stream_state, + with_start_date, + expected, +) -> None: + # simulating `None` for `start_date` and `config migration` + if not with_start_date: + auth_config["start_date"] = "2020-01-01" + + stream = stream(auth_config) + test_result = list(stream.stream_slices(stream_state=stream_state)) + test_query_from_slice = test_result[0].get("query") + assert expected in test_query_from_slice diff --git a/airbyte-integrations/connectors/source-shopify/unit_tests/test_migrations/test_config.json b/airbyte-integrations/connectors/source-shopify/unit_tests/test_migrations/test_config.json new file mode 100644 index 000000000000..327bb81c3b21 --- /dev/null +++ b/airbyte-integrations/connectors/source-shopify/unit_tests/test_migrations/test_config.json @@ -0,0 +1,8 @@ +{ + "shop": "airbyte-integration-test", + "credentials": { + "auth_method": "api_password", + "api_password": "__api_password__" + }, + "bulk_window_in_days": 1000 +} diff --git a/airbyte-integrations/connectors/source-shopify/unit_tests/test_migrations/test_config_migrations.py b/airbyte-integrations/connectors/source-shopify/unit_tests/test_migrations/test_config_migrations.py new file mode 100644 index 000000000000..de54e242294a --- /dev/null +++ b/airbyte-integrations/connectors/source-shopify/unit_tests/test_migrations/test_config_migrations.py @@ -0,0 +1,70 @@ +# +# Copyright (c) 2023 Airbyte, Inc., all rights reserved. +# + + +import json +from typing import Any, Mapping + +from airbyte_cdk.models import OrchestratorType, Type +from airbyte_cdk.sources import Source +from source_shopify.config_migrations import MigrateConfig +from source_shopify.source import SourceShopify + +# BASE ARGS +CMD = "check" +TEST_CONFIG_PATH = "unit_tests/test_migrations/test_config.json" +NEW_TEST_CONFIG_PATH = "unit_tests/test_migrations/test_new_config.json" +SOURCE_INPUT_ARGS = [CMD, "--config", TEST_CONFIG_PATH] +SOURCE: Source = SourceShopify() + + +# HELPERS +def load_config(config_path: str = TEST_CONFIG_PATH) -> Mapping[str, Any]: + with open(config_path, "r") as config: + return json.load(config) + + +def revert_migration(config_path: str = TEST_CONFIG_PATH) -> None: + with open(config_path, "r") as test_config: + config = json.load(test_config) + config.pop("start_date") + with open(config_path, "w") as updated_config: + config = json.dumps(config) + updated_config.write(config) + + +def test_migrate_config() -> None: + migration_instance = MigrateConfig() + # original_config = load_config() + # migrate the test_config + migration_instance.migrate(SOURCE_INPUT_ARGS, SOURCE) + # load the updated config + test_migrated_config = load_config() + # check migrated property + assert "start_date" in test_migrated_config + # check the data type + assert isinstance(test_migrated_config["start_date"], str) + # check the migration should be skipped, once already done + assert not migration_instance.should_migrate(test_migrated_config) + # test CONTROL MESSAGE was emitted + control_msg = migration_instance.message_repository._message_queue[0] + assert control_msg.type == Type.CONTROL + assert control_msg.control.type == OrchestratorType.CONNECTOR_CONFIG + # check the migrated values + assert control_msg.control.connectorConfig.config["start_date"] == "2020-01-01" + # revert the test_config to the starting point + revert_migration() + + +def test_config_is_reverted(): + # check the test_config state, it has to be the same as before tests + test_config = load_config() + # check the config no longer has the migarted property + assert "start_date" not in test_config + + +def test_should_not_migrate_new_config(): + new_config = load_config(NEW_TEST_CONFIG_PATH) + migration_instance = MigrateConfig() + assert not migration_instance.should_migrate(new_config) diff --git a/airbyte-integrations/connectors/source-shopify/unit_tests/test_migrations/test_new_config.json b/airbyte-integrations/connectors/source-shopify/unit_tests/test_migrations/test_new_config.json new file mode 100644 index 000000000000..1a68f9d86857 --- /dev/null +++ b/airbyte-integrations/connectors/source-shopify/unit_tests/test_migrations/test_new_config.json @@ -0,0 +1,9 @@ +{ + "start_date": "2020-01-01", + "shop": "airbyte-integration-test", + "credentials": { + "auth_method": "api_password", + "api_password": "__api_password__" + }, + "bulk_window_in_days": 1000 +} diff --git a/docs/integrations/sources/shopify.md b/docs/integrations/sources/shopify.md index 86b9ebdc35c3..546bf64f7aca 100644 --- a/docs/integrations/sources/shopify.md +++ b/docs/integrations/sources/shopify.md @@ -207,6 +207,7 @@ For all `Shopify GraphQL BULK` api requests these limitations are applied: https | Version | Date | Pull Request | Subject | | :------ | :--------- | :------------------------------------------------------- | :------------------------------------------------------------------------------------------------------------------------------ | +| 2.0.1 | 2024-03-11 | [35952](https://github.com/airbytehq/airbyte/pull/35952) | Fixed the issue when `start date` is missing but the `stream` required it | | 2.0.0 | 2024-02-12 | [32345](https://github.com/airbytehq/airbyte/pull/32345) | Fixed the issue with `state` causing the `substreams` to skip the records, made `metafield_*`: `collections, customers, draft_orders, locations, orders, product_images, product_variants, products`, and `fulfillment_orders, collections, discount_codes, inventory_levels, inventory_items, transactions_graphql, customer_address` streams to use `BULK Operations` instead of `REST`| | 1.1.8 | 2024-02-12 | [35166](https://github.com/airbytehq/airbyte/pull/35166) | Manage dependencies with Poetry. | | 1.1.7 | 2024-01-19 | [33804](https://github.com/airbytehq/airbyte/pull/33804) | Updated documentation with list of all supported streams | From 979b860062d78346dff3341b59a77c10015a7aaa Mon Sep 17 00:00:00 2001 From: Catherine Noll Date: Mon, 11 Mar 2024 12:58:30 -0400 Subject: [PATCH 158/172] Source S3: pin `transformers` transitive dependency (#35955) --- .../connectors/source-s3/metadata.yaml | 2 +- .../connectors/source-s3/poetry.lock | 523 ++++++++++++++++-- .../connectors/source-s3/pyproject.toml | 4 +- docs/integrations/sources/s3.md | 1 + 4 files changed, 472 insertions(+), 58 deletions(-) diff --git a/airbyte-integrations/connectors/source-s3/metadata.yaml b/airbyte-integrations/connectors/source-s3/metadata.yaml index a9378beb99b4..db3ff6ba959e 100644 --- a/airbyte-integrations/connectors/source-s3/metadata.yaml +++ b/airbyte-integrations/connectors/source-s3/metadata.yaml @@ -10,7 +10,7 @@ data: connectorSubtype: file connectorType: source definitionId: 69589781-7828-43c5-9f63-8925b1c1ccc2 - dockerImageTag: 4.5.9 + dockerImageTag: 4.5.10 dockerRepository: airbyte/source-s3 documentationUrl: https://docs.airbyte.com/integrations/sources/s3 githubIssueLabel: source-s3 diff --git a/airbyte-integrations/connectors/source-s3/poetry.lock b/airbyte-integrations/connectors/source-s3/poetry.lock index d2c5a06995b7..d9954223b8a2 100644 --- a/airbyte-integrations/connectors/source-s3/poetry.lock +++ b/airbyte-integrations/connectors/source-s3/poetry.lock @@ -2,13 +2,13 @@ [[package]] name = "airbyte-cdk" -version = "0.68.2" +version = "0.69.0" description = "A framework for writing Airbyte Connectors." optional = false python-versions = ">=3.8" files = [ - {file = "airbyte-cdk-0.68.2.tar.gz", hash = "sha256:04c7557e72a2b2da6ffc8abc5196f16f2c5764738284931856c9210dd2d11998"}, - {file = "airbyte_cdk-0.68.2-py3-none-any.whl", hash = "sha256:bad36c9d9a6755fe5ec2d130fa779bdf7a9248abbc8736fa4da1f35d4a97cc8e"}, + {file = "airbyte-cdk-0.69.0.tar.gz", hash = "sha256:7b70961c74041e5463d7855d6cd0f83faa09cc0b1dcb1244a620c57ac8ea0939"}, + {file = "airbyte_cdk-0.69.0-py3-none-any.whl", hash = "sha256:ff11dd0a47efeb008455b58d5cda0b9966d706f251f0dbe793b2d39de5011ba8"}, ] [package.dependencies] @@ -140,17 +140,17 @@ lxml = ["lxml"] [[package]] name = "boto3" -version = "1.34.56" +version = "1.34.59" description = "The AWS SDK for Python" optional = false python-versions = ">= 3.8" files = [ - {file = "boto3-1.34.56-py3-none-any.whl", hash = "sha256:300888f0c1b6f32f27f85a9aa876f50f46514ec619647af7e4d20db74d339714"}, - {file = "boto3-1.34.56.tar.gz", hash = "sha256:b26928f9a21cf3649cea20a59061340f3294c6e7785ceb6e1a953eb8010dc3ba"}, + {file = "boto3-1.34.59-py3-none-any.whl", hash = "sha256:004e67b078be58d34469406f93cc8b95bc43becef4bbe44523a0b8e51f84c668"}, + {file = "boto3-1.34.59.tar.gz", hash = "sha256:162edf182e53c198137a28432a626dba103f787a8f5000ed4758b73ccd203fa0"}, ] [package.dependencies] -botocore = ">=1.34.56,<1.35.0" +botocore = ">=1.34.59,<1.35.0" jmespath = ">=0.7.1,<2.0.0" s3transfer = ">=0.10.0,<0.11.0" @@ -159,13 +159,13 @@ crt = ["botocore[crt] (>=1.21.0,<2.0a0)"] [[package]] name = "botocore" -version = "1.34.56" +version = "1.34.59" description = "Low-level, data-driven core of boto 3." optional = false python-versions = ">= 3.8" files = [ - {file = "botocore-1.34.56-py3-none-any.whl", hash = "sha256:fff66e22a5589c2d58fba57d1d95c334ce771895e831f80365f6cff6453285ec"}, - {file = "botocore-1.34.56.tar.gz", hash = "sha256:bffeb71ab21d47d4ecf947d9bdb2fbd1b0bbd0c27742cea7cf0b77b701c41d9f"}, + {file = "botocore-1.34.59-py3-none-any.whl", hash = "sha256:4bc112dafb1679ab571117593f7656604726a3da0e5ae5bad00ea772fa40e75c"}, + {file = "botocore-1.34.59.tar.gz", hash = "sha256:24edb4d21d7c97dea0c6c4a80d36b3809b1443a30b0bd5e317d6c319dfac823f"}, ] [package.dependencies] @@ -642,6 +642,22 @@ lz4 = ["lz4"] snappy = ["python-snappy"] zstandard = ["zstandard"] +[[package]] +name = "filelock" +version = "3.13.1" +description = "A platform independent file lock." +optional = false +python-versions = ">=3.8" +files = [ + {file = "filelock-3.13.1-py3-none-any.whl", hash = "sha256:57dbda9b35157b05fb3e58ee91448612eb674172fab98ee235ccb0b5bee19a1c"}, + {file = "filelock-3.13.1.tar.gz", hash = "sha256:521f5f56c50f8426f5e03ad3b281b490a87ef15bc6c526f168290f0c7148d44e"}, +] + +[package.extras] +docs = ["furo (>=2023.9.10)", "sphinx (>=7.2.6)", "sphinx-autodoc-typehints (>=1.24)"] +testing = ["covdefaults (>=2.3)", "coverage (>=7.3.2)", "diff-cover (>=8)", "pytest (>=7.4.3)", "pytest-cov (>=4.1)", "pytest-mock (>=3.12)", "pytest-timeout (>=2.2)"] +typing = ["typing-extensions (>=4.8)"] + [[package]] name = "filetype" version = "1.2.0" @@ -653,6 +669,41 @@ files = [ {file = "filetype-1.2.0.tar.gz", hash = "sha256:66b56cd6474bf41d8c54660347d37afcc3f7d1970648de365c102ef77548aadb"}, ] +[[package]] +name = "fsspec" +version = "2024.2.0" +description = "File-system specification" +optional = false +python-versions = ">=3.8" +files = [ + {file = "fsspec-2024.2.0-py3-none-any.whl", hash = "sha256:817f969556fa5916bc682e02ca2045f96ff7f586d45110fcb76022063ad2c7d8"}, + {file = "fsspec-2024.2.0.tar.gz", hash = "sha256:b6ad1a679f760dda52b1168c859d01b7b80648ea6f7f7c7f5a8a91dc3f3ecb84"}, +] + +[package.extras] +abfs = ["adlfs"] +adl = ["adlfs"] +arrow = ["pyarrow (>=1)"] +dask = ["dask", "distributed"] +devel = ["pytest", "pytest-cov"] +dropbox = ["dropbox", "dropboxdrivefs", "requests"] +full = ["adlfs", "aiohttp (!=4.0.0a0,!=4.0.0a1)", "dask", "distributed", "dropbox", "dropboxdrivefs", "fusepy", "gcsfs", "libarchive-c", "ocifs", "panel", "paramiko", "pyarrow (>=1)", "pygit2", "requests", "s3fs", "smbprotocol", "tqdm"] +fuse = ["fusepy"] +gcs = ["gcsfs"] +git = ["pygit2"] +github = ["requests"] +gs = ["gcsfs"] +gui = ["panel"] +hdfs = ["pyarrow (>=1)"] +http = ["aiohttp (!=4.0.0a0,!=4.0.0a1)"] +libarchive = ["libarchive-c"] +oci = ["ocifs"] +s3 = ["s3fs"] +sftp = ["paramiko"] +smb = ["smbprotocol"] +ssh = ["paramiko"] +tqdm = ["tqdm"] + [[package]] name = "genson" version = "1.2.2" @@ -663,6 +714,39 @@ files = [ {file = "genson-1.2.2.tar.gz", hash = "sha256:8caf69aa10af7aee0e1a1351d1d06801f4696e005f06cedef438635384346a16"}, ] +[[package]] +name = "huggingface-hub" +version = "0.21.4" +description = "Client library to download and publish models, datasets and other repos on the huggingface.co hub" +optional = false +python-versions = ">=3.8.0" +files = [ + {file = "huggingface_hub-0.21.4-py3-none-any.whl", hash = "sha256:df37c2c37fc6c82163cdd8a67ede261687d80d1e262526d6c0ce73b6b3630a7b"}, + {file = "huggingface_hub-0.21.4.tar.gz", hash = "sha256:e1f4968c93726565a80edf6dc309763c7b546d0cfe79aa221206034d50155531"}, +] + +[package.dependencies] +filelock = "*" +fsspec = ">=2023.5.0" +packaging = ">=20.9" +pyyaml = ">=5.1" +requests = "*" +tqdm = ">=4.42.1" +typing-extensions = ">=3.7.4.3" + +[package.extras] +all = ["InquirerPy (==0.3.4)", "Jinja2", "Pillow", "aiohttp", "gradio", "jedi", "mypy (==1.5.1)", "numpy", "pydantic (>1.1,<2.0)", "pydantic (>1.1,<3.0)", "pytest", "pytest-asyncio", "pytest-cov", "pytest-env", "pytest-rerunfailures", "pytest-vcr", "pytest-xdist", "ruff (>=0.1.3)", "soundfile", "types-PyYAML", "types-requests", "types-simplejson", "types-toml", "types-tqdm", "types-urllib3", "typing-extensions (>=4.8.0)", "urllib3 (<2.0)"] +cli = ["InquirerPy (==0.3.4)"] +dev = ["InquirerPy (==0.3.4)", "Jinja2", "Pillow", "aiohttp", "gradio", "jedi", "mypy (==1.5.1)", "numpy", "pydantic (>1.1,<2.0)", "pydantic (>1.1,<3.0)", "pytest", "pytest-asyncio", "pytest-cov", "pytest-env", "pytest-rerunfailures", "pytest-vcr", "pytest-xdist", "ruff (>=0.1.3)", "soundfile", "types-PyYAML", "types-requests", "types-simplejson", "types-toml", "types-tqdm", "types-urllib3", "typing-extensions (>=4.8.0)", "urllib3 (<2.0)"] +fastai = ["fastai (>=2.4)", "fastcore (>=1.3.27)", "toml"] +hf-transfer = ["hf-transfer (>=0.1.4)"] +inference = ["aiohttp", "pydantic (>1.1,<2.0)", "pydantic (>1.1,<3.0)"] +quality = ["mypy (==1.5.1)", "ruff (>=0.1.3)"] +tensorflow = ["graphviz", "pydot", "tensorflow"] +testing = ["InquirerPy (==0.3.4)", "Jinja2", "Pillow", "aiohttp", "gradio", "jedi", "numpy", "pydantic (>1.1,<2.0)", "pydantic (>1.1,<3.0)", "pytest", "pytest-asyncio", "pytest-cov", "pytest-env", "pytest-rerunfailures", "pytest-vcr", "pytest-xdist", "soundfile", "urllib3 (<2.0)"] +torch = ["safetensors", "torch"] +typing = ["types-PyYAML", "types-requests", "types-simplejson", "types-toml", "types-tqdm", "types-urllib3", "typing-extensions (>=4.8.0)"] + [[package]] name = "idna" version = "3.6" @@ -676,22 +760,22 @@ files = [ [[package]] name = "importlib-metadata" -version = "7.0.1" +version = "7.0.2" description = "Read metadata from Python packages" optional = false python-versions = ">=3.8" files = [ - {file = "importlib_metadata-7.0.1-py3-none-any.whl", hash = "sha256:4805911c3a4ec7c3966410053e9ec6a1fecd629117df5adee56dfc9432a1081e"}, - {file = "importlib_metadata-7.0.1.tar.gz", hash = "sha256:f238736bb06590ae52ac1fab06a3a9ef1d8dce2b7a35b5ab329371d6c8f5d2cc"}, + {file = "importlib_metadata-7.0.2-py3-none-any.whl", hash = "sha256:f4bc4c0c070c490abf4ce96d715f68e95923320370efb66143df00199bb6c100"}, + {file = "importlib_metadata-7.0.2.tar.gz", hash = "sha256:198f568f3230878cb1b44fbd7975f87906c22336dba2e4a7f05278c281fbd792"}, ] [package.dependencies] zipp = ">=0.5" [package.extras] -docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (<7.2.5)", "sphinx (>=3.5)", "sphinx-lint"] +docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] perf = ["ipython"] -testing = ["flufl.flake8", "importlib-resources (>=1.3)", "packaging", "pyfakefs", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-mypy (>=0.9.1)", "pytest-perf (>=0.9.2)", "pytest-ruff"] +testing = ["flufl.flake8", "importlib-resources (>=1.3)", "packaging", "pyfakefs", "pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-mypy", "pytest-perf (>=0.9.2)", "pytest-ruff (>=0.2.1)"] [[package]] name = "iniconfig" @@ -1129,13 +1213,13 @@ files = [ [[package]] name = "packaging" -version = "23.2" +version = "24.0" description = "Core utilities for Python packages" optional = false python-versions = ">=3.7" files = [ - {file = "packaging-23.2-py3-none-any.whl", hash = "sha256:8c491190033a9af7e1d931d0b5dacc2ef47509b34dd0de67ed209b5203fc88c7"}, - {file = "packaging-23.2.tar.gz", hash = "sha256:048fb0e9405036518eaaf48a55953c750c11e1a1b68e0dd1a9d62ed0c092cfc5"}, + {file = "packaging-24.0-py3-none-any.whl", hash = "sha256:2ddfb553fdf02fb784c234c7ba6ccc288296ceabec964ad2eae3777778130bc5"}, + {file = "packaging-24.0.tar.gz", hash = "sha256:eb82c5e3e56209074766e6885bb04b8c38a0c015d0a30036ebe7ece34c9989e9"}, ] [[package]] @@ -1401,47 +1485,47 @@ files = [ [[package]] name = "pyarrow" -version = "15.0.0" +version = "15.0.1" description = "Python library for Apache Arrow" optional = false python-versions = ">=3.8" files = [ - {file = "pyarrow-15.0.0-cp310-cp310-macosx_10_15_x86_64.whl", hash = "sha256:0a524532fd6dd482edaa563b686d754c70417c2f72742a8c990b322d4c03a15d"}, - {file = "pyarrow-15.0.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:60a6bdb314affa9c2e0d5dddf3d9cbb9ef4a8dddaa68669975287d47ece67642"}, - {file = "pyarrow-15.0.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:66958fd1771a4d4b754cd385835e66a3ef6b12611e001d4e5edfcef5f30391e2"}, - {file = "pyarrow-15.0.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1f500956a49aadd907eaa21d4fff75f73954605eaa41f61cb94fb008cf2e00c6"}, - {file = "pyarrow-15.0.0-cp310-cp310-manylinux_2_28_aarch64.whl", hash = "sha256:6f87d9c4f09e049c2cade559643424da84c43a35068f2a1c4653dc5b1408a929"}, - {file = "pyarrow-15.0.0-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:85239b9f93278e130d86c0e6bb455dcb66fc3fd891398b9d45ace8799a871a1e"}, - {file = "pyarrow-15.0.0-cp310-cp310-win_amd64.whl", hash = "sha256:5b8d43e31ca16aa6e12402fcb1e14352d0d809de70edd185c7650fe80e0769e3"}, - {file = "pyarrow-15.0.0-cp311-cp311-macosx_10_15_x86_64.whl", hash = "sha256:fa7cd198280dbd0c988df525e50e35b5d16873e2cdae2aaaa6363cdb64e3eec5"}, - {file = "pyarrow-15.0.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:8780b1a29d3c8b21ba6b191305a2a607de2e30dab399776ff0aa09131e266340"}, - {file = "pyarrow-15.0.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fe0ec198ccc680f6c92723fadcb97b74f07c45ff3fdec9dd765deb04955ccf19"}, - {file = "pyarrow-15.0.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:036a7209c235588c2f07477fe75c07e6caced9b7b61bb897c8d4e52c4b5f9555"}, - {file = "pyarrow-15.0.0-cp311-cp311-manylinux_2_28_aarch64.whl", hash = "sha256:2bd8a0e5296797faf9a3294e9fa2dc67aa7f10ae2207920dbebb785c77e9dbe5"}, - {file = "pyarrow-15.0.0-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:e8ebed6053dbe76883a822d4e8da36860f479d55a762bd9e70d8494aed87113e"}, - {file = "pyarrow-15.0.0-cp311-cp311-win_amd64.whl", hash = "sha256:17d53a9d1b2b5bd7d5e4cd84d018e2a45bc9baaa68f7e6e3ebed45649900ba99"}, - {file = "pyarrow-15.0.0-cp312-cp312-macosx_10_15_x86_64.whl", hash = "sha256:9950a9c9df24090d3d558b43b97753b8f5867fb8e521f29876aa021c52fda351"}, - {file = "pyarrow-15.0.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:003d680b5e422d0204e7287bb3fa775b332b3fce2996aa69e9adea23f5c8f970"}, - {file = "pyarrow-15.0.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f75fce89dad10c95f4bf590b765e3ae98bcc5ba9f6ce75adb828a334e26a3d40"}, - {file = "pyarrow-15.0.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0ca9cb0039923bec49b4fe23803807e4ef39576a2bec59c32b11296464623dc2"}, - {file = "pyarrow-15.0.0-cp312-cp312-manylinux_2_28_aarch64.whl", hash = "sha256:9ed5a78ed29d171d0acc26a305a4b7f83c122d54ff5270810ac23c75813585e4"}, - {file = "pyarrow-15.0.0-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:6eda9e117f0402dfcd3cd6ec9bfee89ac5071c48fc83a84f3075b60efa96747f"}, - {file = "pyarrow-15.0.0-cp312-cp312-win_amd64.whl", hash = "sha256:9a3a6180c0e8f2727e6f1b1c87c72d3254cac909e609f35f22532e4115461177"}, - {file = "pyarrow-15.0.0-cp38-cp38-macosx_10_15_x86_64.whl", hash = "sha256:19a8918045993349b207de72d4576af0191beef03ea655d8bdb13762f0cd6eac"}, - {file = "pyarrow-15.0.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:d0ec076b32bacb6666e8813a22e6e5a7ef1314c8069d4ff345efa6246bc38593"}, - {file = "pyarrow-15.0.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5db1769e5d0a77eb92344c7382d6543bea1164cca3704f84aa44e26c67e320fb"}, - {file = "pyarrow-15.0.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e2617e3bf9df2a00020dd1c1c6dce5cc343d979efe10bc401c0632b0eef6ef5b"}, - {file = "pyarrow-15.0.0-cp38-cp38-manylinux_2_28_aarch64.whl", hash = "sha256:d31c1d45060180131caf10f0f698e3a782db333a422038bf7fe01dace18b3a31"}, - {file = "pyarrow-15.0.0-cp38-cp38-manylinux_2_28_x86_64.whl", hash = "sha256:c8c287d1d479de8269398b34282e206844abb3208224dbdd7166d580804674b7"}, - {file = "pyarrow-15.0.0-cp38-cp38-win_amd64.whl", hash = "sha256:07eb7f07dc9ecbb8dace0f58f009d3a29ee58682fcdc91337dfeb51ea618a75b"}, - {file = "pyarrow-15.0.0-cp39-cp39-macosx_10_15_x86_64.whl", hash = "sha256:47af7036f64fce990bb8a5948c04722e4e3ea3e13b1007ef52dfe0aa8f23cf7f"}, - {file = "pyarrow-15.0.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:93768ccfff85cf044c418bfeeafce9a8bb0cee091bd8fd19011aff91e58de540"}, - {file = "pyarrow-15.0.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f6ee87fd6892700960d90abb7b17a72a5abb3b64ee0fe8db6c782bcc2d0dc0b4"}, - {file = "pyarrow-15.0.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:001fca027738c5f6be0b7a3159cc7ba16a5c52486db18160909a0831b063c4e4"}, - {file = "pyarrow-15.0.0-cp39-cp39-manylinux_2_28_aarch64.whl", hash = "sha256:d1c48648f64aec09accf44140dccb92f4f94394b8d79976c426a5b79b11d4fa7"}, - {file = "pyarrow-15.0.0-cp39-cp39-manylinux_2_28_x86_64.whl", hash = "sha256:972a0141be402bb18e3201448c8ae62958c9c7923dfaa3b3d4530c835ac81aed"}, - {file = "pyarrow-15.0.0-cp39-cp39-win_amd64.whl", hash = "sha256:f01fc5cf49081426429127aa2d427d9d98e1cb94a32cb961d583a70b7c4504e6"}, - {file = "pyarrow-15.0.0.tar.gz", hash = "sha256:876858f549d540898f927eba4ef77cd549ad8d24baa3207cf1b72e5788b50e83"}, + {file = "pyarrow-15.0.1-cp310-cp310-macosx_10_15_x86_64.whl", hash = "sha256:c2ddb3be5ea938c329a84171694fc230b241ce1b6b0ff1a0280509af51c375fa"}, + {file = "pyarrow-15.0.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:7543ea88a0ff72f8e6baaf9bfdbec2c62aeabdbede9e4a571c71cc3bc43b6302"}, + {file = "pyarrow-15.0.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1519e218a6941fc074e4501088d891afcb2adf77c236e03c34babcf3d6a0d1c7"}, + {file = "pyarrow-15.0.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:28cafa86e1944761970d3b3fc0411b14ff9b5c2b73cd22aaf470d7a3976335f5"}, + {file = "pyarrow-15.0.1-cp310-cp310-manylinux_2_28_aarch64.whl", hash = "sha256:be5c3d463e33d03eab496e1af7916b1d44001c08f0f458ad27dc16093a020638"}, + {file = "pyarrow-15.0.1-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:47b1eda15d3aa3f49a07b1808648e1397e5dc6a80a30bf87faa8e2d02dad7ac3"}, + {file = "pyarrow-15.0.1-cp310-cp310-win_amd64.whl", hash = "sha256:e524a31be7db22deebbbcf242b189063ab9a7652c62471d296b31bc6e3cae77b"}, + {file = "pyarrow-15.0.1-cp311-cp311-macosx_10_15_x86_64.whl", hash = "sha256:a476fefe8bdd56122fb0d4881b785413e025858803cc1302d0d788d3522b374d"}, + {file = "pyarrow-15.0.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:309e6191be385f2e220586bfdb643f9bb21d7e1bc6dd0a6963dc538e347b2431"}, + {file = "pyarrow-15.0.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:83bc586903dbeb4365cbc72b602f99f70b96c5882e5dfac5278813c7d624ca3c"}, + {file = "pyarrow-15.0.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:07e652daac6d8b05280cd2af31c0fb61a4490ec6a53dc01588014d9fa3fdbee9"}, + {file = "pyarrow-15.0.1-cp311-cp311-manylinux_2_28_aarch64.whl", hash = "sha256:abad2e08652df153a72177ce20c897d083b0c4ebeec051239e2654ddf4d3c996"}, + {file = "pyarrow-15.0.1-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:cde663352bc83ad75ba7b3206e049ca1a69809223942362a8649e37bd22f9e3b"}, + {file = "pyarrow-15.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:1b6e237dd7a08482a8b8f3f6512d258d2460f182931832a8c6ef3953203d31e1"}, + {file = "pyarrow-15.0.1-cp312-cp312-macosx_10_15_x86_64.whl", hash = "sha256:7bd167536ee23192760b8c731d39b7cfd37914c27fd4582335ffd08450ff799d"}, + {file = "pyarrow-15.0.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:7c08bb31eb2984ba5c3747d375bb522e7e536b8b25b149c9cb5e1c49b0ccb736"}, + {file = "pyarrow-15.0.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c0f9c1d630ed2524bd1ddf28ec92780a7b599fd54704cd653519f7ff5aec177a"}, + {file = "pyarrow-15.0.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5186048493395220550bca7b524420471aac2d77af831f584ce132680f55c3df"}, + {file = "pyarrow-15.0.1-cp312-cp312-manylinux_2_28_aarch64.whl", hash = "sha256:31dc30c7ec8958da3a3d9f31d6c3630429b2091ede0ecd0d989fd6bec129f0e4"}, + {file = "pyarrow-15.0.1-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:3f111a014fb8ac2297b43a74bf4495cc479a332908f7ee49cb7cbd50714cb0c1"}, + {file = "pyarrow-15.0.1-cp312-cp312-win_amd64.whl", hash = "sha256:a6d1f7c15d7f68f08490d0cb34611497c74285b8a6bbeab4ef3fc20117310983"}, + {file = "pyarrow-15.0.1-cp38-cp38-macosx_10_15_x86_64.whl", hash = "sha256:9ad931b996f51c2f978ed517b55cb3c6078272fb4ec579e3da5a8c14873b698d"}, + {file = "pyarrow-15.0.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:738f6b53ab1c2f66b2bde8a1d77e186aeaab702d849e0dfa1158c9e2c030add3"}, + {file = "pyarrow-15.0.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2c1c3fc16bc74e33bf8f1e5a212938ed8d88e902f372c4dac6b5bad328567d2f"}, + {file = "pyarrow-15.0.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e1fa92512128f6c1b8dde0468c1454dd70f3bff623970e370d52efd4d24fd0be"}, + {file = "pyarrow-15.0.1-cp38-cp38-manylinux_2_28_aarch64.whl", hash = "sha256:b4157f307c202cbbdac147d9b07447a281fa8e63494f7fc85081da351ec6ace9"}, + {file = "pyarrow-15.0.1-cp38-cp38-manylinux_2_28_x86_64.whl", hash = "sha256:b75e7da26f383787f80ad76143b44844ffa28648fcc7099a83df1538c078d2f2"}, + {file = "pyarrow-15.0.1-cp38-cp38-win_amd64.whl", hash = "sha256:3a99eac76ae14096c209850935057b9e8ce97a78397c5cde8724674774f34e5d"}, + {file = "pyarrow-15.0.1-cp39-cp39-macosx_10_15_x86_64.whl", hash = "sha256:dd532d3177e031e9b2d2df19fd003d0cc0520d1747659fcabbd4d9bb87de508c"}, + {file = "pyarrow-15.0.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:ce8c89848fd37e5313fc2ce601483038ee5566db96ba0808d5883b2e2e55dc53"}, + {file = "pyarrow-15.0.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:862eac5e5f3b6477f7a92b2f27e560e1f4e5e9edfca9ea9da8a7478bb4abd5ce"}, + {file = "pyarrow-15.0.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8f0ea3a29cd5cb99bf14c1c4533eceaa00ea8fb580950fb5a89a5c771a994a4e"}, + {file = "pyarrow-15.0.1-cp39-cp39-manylinux_2_28_aarch64.whl", hash = "sha256:bb902f780cfd624b2e8fd8501fadab17618fdb548532620ef3d91312aaf0888a"}, + {file = "pyarrow-15.0.1-cp39-cp39-manylinux_2_28_x86_64.whl", hash = "sha256:4f87757f02735a6bb4ad2e1b98279ac45d53b748d5baf52401516413007c6999"}, + {file = "pyarrow-15.0.1-cp39-cp39-win_amd64.whl", hash = "sha256:efd3816c7fbfcbd406ac0f69873cebb052effd7cdc153ae5836d1b00845845d7"}, + {file = "pyarrow-15.0.1.tar.gz", hash = "sha256:21d812548d39d490e0c6928a7c663f37b96bf764034123d4b4ab4530ecc757a9"}, ] [package.dependencies] @@ -2163,6 +2247,138 @@ botocore = ">=1.33.2,<2.0a.0" [package.extras] crt = ["botocore[crt] (>=1.33.2,<2.0a.0)"] +[[package]] +name = "safetensors" +version = "0.4.2" +description = "" +optional = false +python-versions = ">=3.7" +files = [ + {file = "safetensors-0.4.2-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:69d8bb8384dc2cb5b72c36c4d6980771b293d1a1377b378763f5e37b6bb8d133"}, + {file = "safetensors-0.4.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:3d420e19fcef96d0067f4de4699682b4bbd85fc8fea0bd45fcd961fdf3e8c82c"}, + {file = "safetensors-0.4.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9ca54742122fa3c4821754adb67318e1cd25c3a22bbf0c5520d5176e77a099ac"}, + {file = "safetensors-0.4.2-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:8b47aa643afdfd66cf7ce4c184092ae734e15d10aba2c2948f24270211801c3c"}, + {file = "safetensors-0.4.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d88a16bbc330f27e7f2d4caaf6fb061ad0b8a756ecc4033260b0378e128ce8a2"}, + {file = "safetensors-0.4.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e9223b8ac21085db614a510eb3445e7083cae915a9202357555fa939695d4f57"}, + {file = "safetensors-0.4.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ce6cb86133dc8930a7ab5e7438545a7f205f7a1cdd5aaf108c1d0da6bdcfbc2b"}, + {file = "safetensors-0.4.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:b8a628e0ae2bbc334b62952c384aa5f41621d01850f8d67b04a96b9c39dd7326"}, + {file = "safetensors-0.4.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:88d6beb7f811a081e0e5f1d9669fdac816c45340c04b1eaf7ebfda0ce93ea403"}, + {file = "safetensors-0.4.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:b57fc5b1b54cb12d8690a58a4cf4b7144730d4bde9d98aa0e1dab6295a1cd579"}, + {file = "safetensors-0.4.2-cp310-none-win32.whl", hash = "sha256:9d87a1c98803c16cf113b9ba03f07b2dce5e8eabfd1811a7f7323fcaa2a1bf47"}, + {file = "safetensors-0.4.2-cp310-none-win_amd64.whl", hash = "sha256:18930ec1d1ecb526d3d9835abc2489b8f1530877518f0c541e77ef0b7abcbd99"}, + {file = "safetensors-0.4.2-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:c5dd2ed788730ed56b415d1a11c62026b8cc8c573f55a2092afb3ab383e94fff"}, + {file = "safetensors-0.4.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:cc41791b33efb9c83a59b731619f3d15f543dfe71f3a793cb8fbf9bd5d0d5d71"}, + {file = "safetensors-0.4.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4c888bf71d5ca12a720f1ed87d407c4918afa022fb247a6546d8fac15b1f112b"}, + {file = "safetensors-0.4.2-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:e6b2feb4b47226a16a792e6fac3f49442714884a3d4c1008569d5068a3941be9"}, + {file = "safetensors-0.4.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f41cc0ee4b838ae8f4d8364a1b162067693d11a3893f0863be8c228d40e4d0ee"}, + {file = "safetensors-0.4.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:51b7228e46c0a483c40ba4b9470dea00fb1ff8685026bb4766799000f6328ac2"}, + {file = "safetensors-0.4.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:02697f8f2be8ca3c37a4958702dbdb1864447ef765e18b5328a1617022dcf164"}, + {file = "safetensors-0.4.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:27fd8f65cf7c80e4280cae1ee6bcd85c483882f6580821abe71ee1a0d3dcfca7"}, + {file = "safetensors-0.4.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:c487b5f113b0924c9534a07dc034830fb4ef05ce9bb6d78cfe016a7dedfe281f"}, + {file = "safetensors-0.4.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:da7f6483f3fe67ff39b3a55552552c67930ea10a36e9f2539d36fc205273d767"}, + {file = "safetensors-0.4.2-cp311-none-win32.whl", hash = "sha256:52a7012f6cb9cb4a132760b6308daede18a9f5f8952ce08adc7c67a7d865c2d8"}, + {file = "safetensors-0.4.2-cp311-none-win_amd64.whl", hash = "sha256:4d1361a097ac430b310ce9eed8ed4746edee33ddafdfbb965debc8966fc34dc2"}, + {file = "safetensors-0.4.2-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:77af8aa0edcc2863760fd6febbfdb82e88fd75d0e60c1ce4ba57208ba5e4a89b"}, + {file = "safetensors-0.4.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:846666c1c5a8c8888d2dfda8d3921cb9cb8e2c5f78365be756c11021e75a0a2a"}, + {file = "safetensors-0.4.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4f4bfc7ea19b446bfad41510d4b4c76101698c00caaa8a332c8edd8090a412ef"}, + {file = "safetensors-0.4.2-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:233436fd30f27ffeb3c3780d0b84f496518868445c7a8db003639a649cc98453"}, + {file = "safetensors-0.4.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:7a09237a795d11cd11f9dae505d170a29b5616151db1e10c14f892b11caadc7d"}, + {file = "safetensors-0.4.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:de01c9a3a3b7b69627d624ff69d9f11d28ce9908eea2fb6245adafa4b1d43df6"}, + {file = "safetensors-0.4.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8c1f25c5069ee42a5bcffdc66c300a407941edd73f3239e9fdefd26216407391"}, + {file = "safetensors-0.4.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:7a73b3649456d09ca8506140d44484b63154a7378434cc1e8719f8056550b224"}, + {file = "safetensors-0.4.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:e1625a8d07d046e968bd5c4961810aba1225984e4fb9243626f9d04a06ed3fee"}, + {file = "safetensors-0.4.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8f74c86b25615cb24ad4cff765a2eefc09d71bf0fed97588cf585aad9c38fbb4"}, + {file = "safetensors-0.4.2-cp312-none-win32.whl", hash = "sha256:8523b9c5777d771bcde5c2389c03f1cdf7ebe8797432a1bd5e345efe25c55987"}, + {file = "safetensors-0.4.2-cp312-none-win_amd64.whl", hash = "sha256:dcff0243e1737a21f83d664c63fed89d1f532c23fc6830d0427279fabd789ccb"}, + {file = "safetensors-0.4.2-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:96ad3d7d472612e26cbe413922b4fb13933310f0511d346ea5cc9a1e856e52eb"}, + {file = "safetensors-0.4.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:88250922401b5ae4e37de929178caf46be47ed16c817b2237b81679bec07c120"}, + {file = "safetensors-0.4.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d40443554142fc0ab30652d5cc8554c4b7a613513bde00373e18afd5de8cbe4b"}, + {file = "safetensors-0.4.2-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:27f53f70106224d32d874aacecbeb4a6e4c5b16a1d2006d0e876d97229086d71"}, + {file = "safetensors-0.4.2-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:cc068afe23734dfb26ce19db0a7877499ddf73b1d55ceb762417e8da4a1b05fb"}, + {file = "safetensors-0.4.2-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9be1918eb8d43a11a6f8806759fccfa0eeb0542b12924caba66af8a7800ad01a"}, + {file = "safetensors-0.4.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:41911087d20a7bbd78cb4ad4f98aab0c431533107584df6635d8b54b99945573"}, + {file = "safetensors-0.4.2-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:50771c662aab909f31e94d048e76861fd027d66076ea773eef2e66c717766e24"}, + {file = "safetensors-0.4.2-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:13f2e57be007b7ea9329133d2399e6bdfcf1910f655440a4da17df3a45afcd30"}, + {file = "safetensors-0.4.2-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:c772147e6395bc829842e0a98e1b30c67fe25d816299c28196488511d5a5e951"}, + {file = "safetensors-0.4.2-cp37-cp37m-macosx_10_12_x86_64.whl", hash = "sha256:36239a0060b537a3e8c473df78cffee14c3ec4f51d5f1a853af99371a2fb2a35"}, + {file = "safetensors-0.4.2-cp37-cp37m-macosx_11_0_arm64.whl", hash = "sha256:d0cbb7664fad2c307f95195f951b7059e95dc23e0e1822e5978c8b500098543c"}, + {file = "safetensors-0.4.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2b3e55adb6bd9dc1c2a341e72f48f075953fa35d173dd8e29a95b3b02d0d1462"}, + {file = "safetensors-0.4.2-cp37-cp37m-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:42f743b3cca863fba53ca57a193f510e5ec359b97f38c282437716b6768e4a25"}, + {file = "safetensors-0.4.2-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:04e6af4a6dbeb06c4e6e7d46cf9c716cbc4cc5ef62584fd8a7c0fe558562df45"}, + {file = "safetensors-0.4.2-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a492ba21b5c8f14ee5ec9b20f42ba969e53ca1f909a4d04aad736b66a341dcc2"}, + {file = "safetensors-0.4.2-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b25b8233a1a85dc67e39838951cfb01595d792f3b7b644add63edb652992e030"}, + {file = "safetensors-0.4.2-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:fd27e063fbdafe776f7b1714da59110e88f270e86db00788a8fd65f4eacfeba7"}, + {file = "safetensors-0.4.2-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:1b6fa399f251bbeb52029bf5a0ac2878d7705dd3612a2f8895b48e9c11f0367d"}, + {file = "safetensors-0.4.2-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:de642d46b459e4afd5c2020b26c0d6d869a171ea00411897d5776c127cac74f0"}, + {file = "safetensors-0.4.2-cp37-none-win32.whl", hash = "sha256:77b72d17754c93bb68f3598182f14d78776e0b9b31682ca5bb2c7c5bd9a75267"}, + {file = "safetensors-0.4.2-cp37-none-win_amd64.whl", hash = "sha256:d36ee3244d461cd655aeef493792c3bccf4875282f8407fd9af99e9a41cf2530"}, + {file = "safetensors-0.4.2-cp38-cp38-macosx_10_12_x86_64.whl", hash = "sha256:16b6b3884f7876c6b3b23a742428223a7170a5a9dac819d8c12a1569422c4b5a"}, + {file = "safetensors-0.4.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:ee25d311493fbbe0be9d395faee46e9d79e8948f461e388ff39e59875ed9a350"}, + {file = "safetensors-0.4.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:eed8097968585cd752a1171f86fce9aa1d89a29033e5cd8bec5a502e29f6b7af"}, + {file = "safetensors-0.4.2-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:880e6865cf72cb67f9ab8d04a3c4b49dd95ae92fb1583929ce65aed94e1f685f"}, + {file = "safetensors-0.4.2-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:91290f83daf80ce6d1a7f629b244443c200060a80f908b29d879021409e5ea94"}, + {file = "safetensors-0.4.2-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3517d568486ab3508a7acc360b82d7a4a3e26b86efdf210a9ecd9d233c40708a"}, + {file = "safetensors-0.4.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e1f43a77eb38540f782999e5dc5645164fe9027d3f0194f6c9a5126168017efa"}, + {file = "safetensors-0.4.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:b684d9818aa5d63fddc65f7d0151968037d255d91adf74eba82125b41c680aaa"}, + {file = "safetensors-0.4.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:ab1f5d84185f9fefaf21413efb764e4908057b8a9a0b987ede890c353490fd70"}, + {file = "safetensors-0.4.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:2bd979642e6c3a517ef4b84ff36c2fee4015664fea05a61154fc565978347553"}, + {file = "safetensors-0.4.2-cp38-none-win32.whl", hash = "sha256:11be6e7afed29e5a5628f0aa6214e34bc194da73f558dc69fc7d56e07037422a"}, + {file = "safetensors-0.4.2-cp38-none-win_amd64.whl", hash = "sha256:2f7a6e5d29bd2cc340cffaa391fa437b1be9d21a2bd8b8724d2875d13a6ef2a9"}, + {file = "safetensors-0.4.2-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:a5a921b4fe6925f9942adff3ebae8c16e0487908c54586a5a42f35b59fd69794"}, + {file = "safetensors-0.4.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:b691727228c28f2d82d8a92b2bc26e7a1f129ee40b2f2a3185b5974e038ed47c"}, + {file = "safetensors-0.4.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:91ca1056decc4e981248786e87b2a202d4841ee5f99d433f1adf3d44d4bcfa0e"}, + {file = "safetensors-0.4.2-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:55969fd2e6fdb38dc221b0ab380668c21b0efa12a7562db9924759faa3c51757"}, + {file = "safetensors-0.4.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:6ae429bfaecc10ab5fe78c93009b3d1656c1581da560041e700eadb497dbe7a4"}, + {file = "safetensors-0.4.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4ff88f194fe4ac50b463a4a6f0c03af9ad72eb5d24ec6d6730af59522e37fedb"}, + {file = "safetensors-0.4.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a80cb48d0a447f8dd18e61813efa7d3f8f8d52edf0f05806abc0c59b83431f57"}, + {file = "safetensors-0.4.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:b286fb7adfee70a4189898ac2342b8a67d5f493e6b21b0af89ca8eac1b967cbf"}, + {file = "safetensors-0.4.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:0ceeff9ddbab4f78738489eb6682867ae946178776f33699737b2129b5394dc1"}, + {file = "safetensors-0.4.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:a26fae748a7488cb3aac381eddfa818c42052c87b5e689fb4c6e82ed58cec209"}, + {file = "safetensors-0.4.2-cp39-none-win32.whl", hash = "sha256:039a42ab33c9d68b39706fd38f1922ace26866eff246bf20271edb619f5f848b"}, + {file = "safetensors-0.4.2-cp39-none-win_amd64.whl", hash = "sha256:b3a3e1f5b85859e398773f064943b62a4059f225008a2a8ee6add1edcf77cacf"}, + {file = "safetensors-0.4.2-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:4e70d442ad17e8b153ef9095bf48ea64f15a66bf26dc2b6ca94660c154edbc24"}, + {file = "safetensors-0.4.2-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:b90f1d9809caf4ff395951b4703295a68d12907f6945bbc3129e934ff8ae46f6"}, + {file = "safetensors-0.4.2-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8c7ac9ad3728838006598e296b3ae9f27d80b489effd4685b92d97b3fc4c98f6"}, + {file = "safetensors-0.4.2-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:de5730d77e6ff7f4c7039e20913661ad0ea2f86c09e71c039e73dfdd1f394f08"}, + {file = "safetensors-0.4.2-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:44feb8cb156d6803dcd19fc6b81b27235f29b877660605a6ac35e1da7d64f0e4"}, + {file = "safetensors-0.4.2-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:523a241c33e7c827ab9a3a23760d75c7d062f43dfe55b6b019409f89b0fb52d1"}, + {file = "safetensors-0.4.2-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:fb18300e8eb74291225214f26c9a8ae2110fd61a6c9b5a2ff4c4e0eb1bb9a998"}, + {file = "safetensors-0.4.2-pp37-pypy37_pp73-macosx_10_12_x86_64.whl", hash = "sha256:fe5437ff9fb116e44f2ab558981249ae63f978392b4576e62fcfe167d353edbc"}, + {file = "safetensors-0.4.2-pp37-pypy37_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d9304a0934ced5a5d272f39de36291dc141dfc152d277f03fb4d65f2fb2ffa7c"}, + {file = "safetensors-0.4.2-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:160ba1b1e11cf874602c233ab80a14f588571d09556cbc3586900121d622b5ed"}, + {file = "safetensors-0.4.2-pp37-pypy37_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:04fcd6fcf7d9c13c7e5dc7e08de5e492ee4daa8f4ad74b4d8299d3eb0224292f"}, + {file = "safetensors-0.4.2-pp37-pypy37_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:906d14c4a677d35834fb0f3a5455ef8305e1bba10a5e0f2e0f357b3d1ad989f2"}, + {file = "safetensors-0.4.2-pp37-pypy37_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:df3fcdec0cd543084610d1f09c65cdb10fb3079f79bceddc092b0d187c6a265b"}, + {file = "safetensors-0.4.2-pp38-pypy38_pp73-macosx_10_12_x86_64.whl", hash = "sha256:5ca76f13fb1cef242ea3ad2cb37388e7d005994f42af8b44bee56ba48b2d45ce"}, + {file = "safetensors-0.4.2-pp38-pypy38_pp73-macosx_11_0_arm64.whl", hash = "sha256:278a1a3414c020785decdcd741c578725721274d2f9f787fcc930882e83b89cc"}, + {file = "safetensors-0.4.2-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:05b5a461cc68ecd42d9d546e5e1268a39d8ede7934a68d1ce17c3c659cb829d6"}, + {file = "safetensors-0.4.2-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c2341411412a41671d25e26bed59ec121e46bf4fadb8132895e610411c4b9681"}, + {file = "safetensors-0.4.2-pp38-pypy38_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:3497ac3895acf17c5f98197f1fa4769f09c5e7ede07fcb102f1c201e663e052c"}, + {file = "safetensors-0.4.2-pp38-pypy38_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:01b5e71d3754d2201294f1eb7a6d59cce3a5702ff96d83d226571b2ca2183837"}, + {file = "safetensors-0.4.2-pp38-pypy38_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:3627dbd1ea488dd8046a0491de5087f3c0d641e7acc80c0189a33c69398f1cd1"}, + {file = "safetensors-0.4.2-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:9d56f0ef53afad26ec54ceede78a43e9a23a076dadbbda7b44d304c591abf4c1"}, + {file = "safetensors-0.4.2-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:b259ca73d42daf658a1bda463f1f83885ae4d93a60869be80d7f7dfcc9d8bbb5"}, + {file = "safetensors-0.4.2-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1ebc3cd401e4eb54e7c0a70346be565e81942d9a41fafd5f4bf7ab3a55d10378"}, + {file = "safetensors-0.4.2-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5bc384a0309b706aa0425c93abb0390508a61bf029ce99c7d9df4220f25871a5"}, + {file = "safetensors-0.4.2-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:af2d8f7235d8a08fbccfb8394387890e7fa38942b349a94e6eff13c52ac98087"}, + {file = "safetensors-0.4.2-pp39-pypy39_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:0911315bbcc5289087d063c2c2c7ccd711ea97a7e557a7bce005ac2cf80146aa"}, + {file = "safetensors-0.4.2-pp39-pypy39_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:1efe31673be91832d73439a2af426743e1395fc9ef7b081914e9e1d567bd7b5f"}, + {file = "safetensors-0.4.2.tar.gz", hash = "sha256:acc85dcb09ec5e8aa787f588d7ad4d55c103f31e4ff060e17d92cc0e8b8cac73"}, +] + +[package.extras] +all = ["safetensors[jax]", "safetensors[numpy]", "safetensors[paddlepaddle]", "safetensors[pinned-tf]", "safetensors[quality]", "safetensors[testing]", "safetensors[torch]"] +dev = ["safetensors[all]"] +jax = ["flax (>=0.6.3)", "jax (>=0.3.25)", "jaxlib (>=0.3.25)", "safetensors[numpy]"] +mlx = ["mlx (>=0.0.9)"] +numpy = ["numpy (>=1.21.6)"] +paddlepaddle = ["paddlepaddle (>=2.4.1)", "safetensors[numpy]"] +pinned-tf = ["safetensors[numpy]", "tensorflow (==2.11.0)"] +quality = ["black (==22.3)", "click (==8.0.4)", "flake8 (>=3.8.3)", "isort (>=5.5.4)"] +tensorflow = ["safetensors[numpy]", "tensorflow (>=2.11.0)"] +testing = ["h5py (>=3.7.0)", "huggingface_hub (>=0.12.1)", "hypothesis (>=6.70.2)", "pytest (>=7.2.0)", "pytest-benchmark (>=4.0.0)", "safetensors[numpy]", "setuptools_rust (>=1.5.2)"] +torch = ["safetensors[numpy]", "torch (>=1.10)"] + [[package]] name = "setuptools" version = "69.1.1" @@ -2238,6 +2454,133 @@ files = [ [package.extras] widechars = ["wcwidth"] +[[package]] +name = "tokenizers" +version = "0.15.2" +description = "" +optional = false +python-versions = ">=3.7" +files = [ + {file = "tokenizers-0.15.2-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:52f6130c9cbf70544287575a985bf44ae1bda2da7e8c24e97716080593638012"}, + {file = "tokenizers-0.15.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:054c1cc9c6d68f7ffa4e810b3d5131e0ba511b6e4be34157aa08ee54c2f8d9ee"}, + {file = "tokenizers-0.15.2-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:a9b9b070fdad06e347563b88c278995735292ded1132f8657084989a4c84a6d5"}, + {file = "tokenizers-0.15.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ea621a7eef4b70e1f7a4e84dd989ae3f0eeb50fc8690254eacc08acb623e82f1"}, + {file = "tokenizers-0.15.2-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:cf7fd9a5141634fa3aa8d6b7be362e6ae1b4cda60da81388fa533e0b552c98fd"}, + {file = "tokenizers-0.15.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:44f2a832cd0825295f7179eaf173381dc45230f9227ec4b44378322d900447c9"}, + {file = "tokenizers-0.15.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8b9ec69247a23747669ec4b0ca10f8e3dfb3545d550258129bd62291aabe8605"}, + {file = "tokenizers-0.15.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:40b6a4c78da863ff26dbd5ad9a8ecc33d8a8d97b535172601cf00aee9d7ce9ce"}, + {file = "tokenizers-0.15.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:5ab2a4d21dcf76af60e05af8063138849eb1d6553a0d059f6534357bce8ba364"}, + {file = "tokenizers-0.15.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:a47acfac7e511f6bbfcf2d3fb8c26979c780a91e06fb5b9a43831b2c0153d024"}, + {file = "tokenizers-0.15.2-cp310-none-win32.whl", hash = "sha256:064ff87bb6acdbd693666de9a4b692add41308a2c0ec0770d6385737117215f2"}, + {file = "tokenizers-0.15.2-cp310-none-win_amd64.whl", hash = "sha256:3b919afe4df7eb6ac7cafd2bd14fb507d3f408db7a68c43117f579c984a73843"}, + {file = "tokenizers-0.15.2-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:89cd1cb93e4b12ff39bb2d626ad77e35209de9309a71e4d3d4672667b4b256e7"}, + {file = "tokenizers-0.15.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:cfed5c64e5be23d7ee0f0e98081a25c2a46b0b77ce99a4f0605b1ec43dd481fa"}, + {file = "tokenizers-0.15.2-cp311-cp311-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:a907d76dcfda37023ba203ab4ceeb21bc5683436ebefbd895a0841fd52f6f6f2"}, + {file = "tokenizers-0.15.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:20ea60479de6fc7b8ae756b4b097572372d7e4032e2521c1bbf3d90c90a99ff0"}, + {file = "tokenizers-0.15.2-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:48e2b9335be2bc0171df9281385c2ed06a15f5cf121c44094338306ab7b33f2c"}, + {file = "tokenizers-0.15.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:112a1dd436d2cc06e6ffdc0b06d55ac019a35a63afd26475205cb4b1bf0bfbff"}, + {file = "tokenizers-0.15.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4620cca5c2817177ee8706f860364cc3a8845bc1e291aaf661fb899e5d1c45b0"}, + {file = "tokenizers-0.15.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ccd73a82751c523b3fc31ff8194702e4af4db21dc20e55b30ecc2079c5d43cb7"}, + {file = "tokenizers-0.15.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:107089f135b4ae7817affe6264f8c7a5c5b4fd9a90f9439ed495f54fcea56fb4"}, + {file = "tokenizers-0.15.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:0ff110ecc57b7aa4a594396525a3451ad70988e517237fe91c540997c4e50e29"}, + {file = "tokenizers-0.15.2-cp311-none-win32.whl", hash = "sha256:6d76f00f5c32da36c61f41c58346a4fa7f0a61be02f4301fd30ad59834977cc3"}, + {file = "tokenizers-0.15.2-cp311-none-win_amd64.whl", hash = "sha256:cc90102ed17271cf0a1262babe5939e0134b3890345d11a19c3145184b706055"}, + {file = "tokenizers-0.15.2-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:f86593c18d2e6248e72fb91c77d413a815153b8ea4e31f7cd443bdf28e467670"}, + {file = "tokenizers-0.15.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:0774bccc6608eca23eb9d620196687c8b2360624619623cf4ba9dc9bd53e8b51"}, + {file = "tokenizers-0.15.2-cp312-cp312-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:d0222c5b7c9b26c0b4822a82f6a7011de0a9d3060e1da176f66274b70f846b98"}, + {file = "tokenizers-0.15.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3835738be1de66624fff2f4f6f6684775da4e9c00bde053be7564cbf3545cc66"}, + {file = "tokenizers-0.15.2-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:0143e7d9dcd811855c1ce1ab9bf5d96d29bf5e528fd6c7824d0465741e8c10fd"}, + {file = "tokenizers-0.15.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:db35825f6d54215f6b6009a7ff3eedee0848c99a6271c870d2826fbbedf31a38"}, + {file = "tokenizers-0.15.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3f5e64b0389a2be47091d8cc53c87859783b837ea1a06edd9d8e04004df55a5c"}, + {file = "tokenizers-0.15.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9e0480c452217edd35eca56fafe2029fb4d368b7c0475f8dfa3c5c9c400a7456"}, + {file = "tokenizers-0.15.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:a33ab881c8fe70474980577e033d0bc9a27b7ab8272896e500708b212995d834"}, + {file = "tokenizers-0.15.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:a308a607ca9de2c64c1b9ba79ec9a403969715a1b8ba5f998a676826f1a7039d"}, + {file = "tokenizers-0.15.2-cp312-none-win32.whl", hash = "sha256:b8fcfa81bcb9447df582c5bc96a031e6df4da2a774b8080d4f02c0c16b42be0b"}, + {file = "tokenizers-0.15.2-cp312-none-win_amd64.whl", hash = "sha256:38d7ab43c6825abfc0b661d95f39c7f8af2449364f01d331f3b51c94dcff7221"}, + {file = "tokenizers-0.15.2-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:38bfb0204ff3246ca4d5e726e8cc8403bfc931090151e6eede54d0e0cf162ef0"}, + {file = "tokenizers-0.15.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:9c861d35e8286a53e06e9e28d030b5a05bcbf5ac9d7229e561e53c352a85b1fc"}, + {file = "tokenizers-0.15.2-cp313-cp313-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:936bf3842db5b2048eaa53dade907b1160f318e7c90c74bfab86f1e47720bdd6"}, + {file = "tokenizers-0.15.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:620beacc3373277700d0e27718aa8b25f7b383eb8001fba94ee00aeea1459d89"}, + {file = "tokenizers-0.15.2-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:2735ecbbf37e52db4ea970e539fd2d450d213517b77745114f92867f3fc246eb"}, + {file = "tokenizers-0.15.2-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:473c83c5e2359bb81b0b6fde870b41b2764fcdd36d997485e07e72cc3a62264a"}, + {file = "tokenizers-0.15.2-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:968fa1fb3c27398b28a4eca1cbd1e19355c4d3a6007f7398d48826bbe3a0f728"}, + {file = "tokenizers-0.15.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:865c60ae6eaebdde7da66191ee9b7db52e542ed8ee9d2c653b6d190a9351b980"}, + {file = "tokenizers-0.15.2-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:7c0d8b52664ab2d4a8d6686eb5effc68b78608a9008f086a122a7b2996befbab"}, + {file = "tokenizers-0.15.2-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:f33dfbdec3784093a9aebb3680d1f91336c56d86cc70ddf88708251da1fe9064"}, + {file = "tokenizers-0.15.2-cp37-cp37m-macosx_10_12_x86_64.whl", hash = "sha256:d44ba80988ff9424e33e0a49445072ac7029d8c0e1601ad25a0ca5f41ed0c1d6"}, + {file = "tokenizers-0.15.2-cp37-cp37m-macosx_11_0_arm64.whl", hash = "sha256:dce74266919b892f82b1b86025a613956ea0ea62a4843d4c4237be2c5498ed3a"}, + {file = "tokenizers-0.15.2-cp37-cp37m-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:0ef06b9707baeb98b316577acb04f4852239d856b93e9ec3a299622f6084e4be"}, + {file = "tokenizers-0.15.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c73e2e74bbb07910da0d37c326869f34113137b23eadad3fc00856e6b3d9930c"}, + {file = "tokenizers-0.15.2-cp37-cp37m-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:4eeb12daf02a59e29f578a865f55d87cd103ce62bd8a3a5874f8fdeaa82e336b"}, + {file = "tokenizers-0.15.2-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9ba9f6895af58487ca4f54e8a664a322f16c26bbb442effd01087eba391a719e"}, + {file = "tokenizers-0.15.2-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:ccec77aa7150e38eec6878a493bf8c263ff1fa8a62404e16c6203c64c1f16a26"}, + {file = "tokenizers-0.15.2-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f3f40604f5042ff210ba82743dda2b6aa3e55aa12df4e9f2378ee01a17e2855e"}, + {file = "tokenizers-0.15.2-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:5645938a42d78c4885086767c70923abad047163d809c16da75d6b290cb30bbe"}, + {file = "tokenizers-0.15.2-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:05a77cbfebe28a61ab5c3891f9939cc24798b63fa236d84e5f29f3a85a200c00"}, + {file = "tokenizers-0.15.2-cp37-none-win32.whl", hash = "sha256:361abdc068e8afe9c5b818769a48624687fb6aaed49636ee39bec4e95e1a215b"}, + {file = "tokenizers-0.15.2-cp37-none-win_amd64.whl", hash = "sha256:7ef789f83eb0f9baeb4d09a86cd639c0a5518528f9992f38b28e819df397eb06"}, + {file = "tokenizers-0.15.2-cp38-cp38-macosx_10_12_x86_64.whl", hash = "sha256:4fe1f74a902bee74a3b25aff180fbfbf4f8b444ab37c4d496af7afd13a784ed2"}, + {file = "tokenizers-0.15.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:4c4b89038a684f40a6b15d6b09f49650ac64d951ad0f2a3ea9169687bbf2a8ba"}, + {file = "tokenizers-0.15.2-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:d05a1b06f986d41aed5f2de464c003004b2df8aaf66f2b7628254bcbfb72a438"}, + {file = "tokenizers-0.15.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:508711a108684111ec8af89d3a9e9e08755247eda27d0ba5e3c50e9da1600f6d"}, + {file = "tokenizers-0.15.2-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:daa348f02d15160cb35439098ac96e3a53bacf35885072611cd9e5be7d333daa"}, + {file = "tokenizers-0.15.2-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:494fdbe5932d3416de2a85fc2470b797e6f3226c12845cadf054dd906afd0442"}, + {file = "tokenizers-0.15.2-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c2d60f5246f4da9373f75ff18d64c69cbf60c3bca597290cea01059c336d2470"}, + {file = "tokenizers-0.15.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:93268e788825f52de4c7bdcb6ebc1fcd4a5442c02e730faa9b6b08f23ead0e24"}, + {file = "tokenizers-0.15.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:6fc7083ab404019fc9acafe78662c192673c1e696bd598d16dc005bd663a5cf9"}, + {file = "tokenizers-0.15.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:41e39b41e5531d6b2122a77532dbea60e171ef87a3820b5a3888daa847df4153"}, + {file = "tokenizers-0.15.2-cp38-none-win32.whl", hash = "sha256:06cd0487b1cbfabefb2cc52fbd6b1f8d4c37799bd6c6e1641281adaa6b2504a7"}, + {file = "tokenizers-0.15.2-cp38-none-win_amd64.whl", hash = "sha256:5179c271aa5de9c71712e31cb5a79e436ecd0d7532a408fa42a8dbfa4bc23fd9"}, + {file = "tokenizers-0.15.2-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:82f8652a74cc107052328b87ea8b34291c0f55b96d8fb261b3880216a9f9e48e"}, + {file = "tokenizers-0.15.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:02458bee6f5f3139f1ebbb6d042b283af712c0981f5bc50edf771d6b762d5e4f"}, + {file = "tokenizers-0.15.2-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:c9a09cd26cca2e1c349f91aa665309ddb48d71636370749414fbf67bc83c5343"}, + {file = "tokenizers-0.15.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:158be8ea8554e5ed69acc1ce3fbb23a06060bd4bbb09029431ad6b9a466a7121"}, + {file = "tokenizers-0.15.2-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:1ddba9a2b0c8c81633eca0bb2e1aa5b3a15362b1277f1ae64176d0f6eba78ab1"}, + {file = "tokenizers-0.15.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3ef5dd1d39797044642dbe53eb2bc56435308432e9c7907728da74c69ee2adca"}, + {file = "tokenizers-0.15.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:454c203164e07a860dbeb3b1f4a733be52b0edbb4dd2e5bd75023ffa8b49403a"}, + {file = "tokenizers-0.15.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0cf6b7f1d4dc59af960e6ffdc4faffe6460bbfa8dce27a58bf75755ffdb2526d"}, + {file = "tokenizers-0.15.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:2ef09bbc16519f6c25d0c7fc0c6a33a6f62923e263c9d7cca4e58b8c61572afb"}, + {file = "tokenizers-0.15.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:c9a2ebdd2ad4ec7a68e7615086e633857c85e2f18025bd05d2a4399e6c5f7169"}, + {file = "tokenizers-0.15.2-cp39-none-win32.whl", hash = "sha256:918fbb0eab96fe08e72a8c2b5461e9cce95585d82a58688e7f01c2bd546c79d0"}, + {file = "tokenizers-0.15.2-cp39-none-win_amd64.whl", hash = "sha256:524e60da0135e106b254bd71f0659be9f89d83f006ea9093ce4d1fab498c6d0d"}, + {file = "tokenizers-0.15.2-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:6a9b648a58281c4672212fab04e60648fde574877d0139cd4b4f93fe28ca8944"}, + {file = "tokenizers-0.15.2-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:7c7d18b733be6bbca8a55084027f7be428c947ddf871c500ee603e375013ffba"}, + {file = "tokenizers-0.15.2-pp310-pypy310_pp73-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:13ca3611de8d9ddfbc4dc39ef54ab1d2d4aaa114ac8727dfdc6a6ec4be017378"}, + {file = "tokenizers-0.15.2-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:237d1bf3361cf2e6463e6c140628e6406766e8b27274f5fcc62c747ae3c6f094"}, + {file = "tokenizers-0.15.2-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:67a0fe1e49e60c664915e9fb6b0cb19bac082ab1f309188230e4b2920230edb3"}, + {file = "tokenizers-0.15.2-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:4e022fe65e99230b8fd89ebdfea138c24421f91c1a4f4781a8f5016fd5cdfb4d"}, + {file = "tokenizers-0.15.2-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:d857be2df69763362ac699f8b251a8cd3fac9d21893de129bc788f8baaef2693"}, + {file = "tokenizers-0.15.2-pp37-pypy37_pp73-macosx_10_12_x86_64.whl", hash = "sha256:708bb3e4283177236309e698da5fcd0879ce8fd37457d7c266d16b550bcbbd18"}, + {file = "tokenizers-0.15.2-pp37-pypy37_pp73-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:64c35e09e9899b72a76e762f9854e8750213f67567787d45f37ce06daf57ca78"}, + {file = "tokenizers-0.15.2-pp37-pypy37_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c1257f4394be0d3b00de8c9e840ca5601d0a4a8438361ce9c2b05c7d25f6057b"}, + {file = "tokenizers-0.15.2-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:02272fe48280e0293a04245ca5d919b2c94a48b408b55e858feae9618138aeda"}, + {file = "tokenizers-0.15.2-pp37-pypy37_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:dc3ad9ebc76eabe8b1d7c04d38be884b8f9d60c0cdc09b0aa4e3bcf746de0388"}, + {file = "tokenizers-0.15.2-pp37-pypy37_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:32e16bdeffa7c4f46bf2152172ca511808b952701d13e7c18833c0b73cb5c23f"}, + {file = "tokenizers-0.15.2-pp38-pypy38_pp73-macosx_10_12_x86_64.whl", hash = "sha256:fb16ba563d59003028b678d2361a27f7e4ae0ab29c7a80690efa20d829c81fdb"}, + {file = "tokenizers-0.15.2-pp38-pypy38_pp73-macosx_11_0_arm64.whl", hash = "sha256:2277c36d2d6cdb7876c274547921a42425b6810d38354327dd65a8009acf870c"}, + {file = "tokenizers-0.15.2-pp38-pypy38_pp73-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:1cf75d32e8d250781940d07f7eece253f2fe9ecdb1dc7ba6e3833fa17b82fcbc"}, + {file = "tokenizers-0.15.2-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f1b3b31884dc8e9b21508bb76da80ebf7308fdb947a17affce815665d5c4d028"}, + {file = "tokenizers-0.15.2-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b10122d8d8e30afb43bb1fe21a3619f62c3e2574bff2699cf8af8b0b6c5dc4a3"}, + {file = "tokenizers-0.15.2-pp38-pypy38_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:d88b96ff0fe8e91f6ef01ba50b0d71db5017fa4e3b1d99681cec89a85faf7bf7"}, + {file = "tokenizers-0.15.2-pp38-pypy38_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:37aaec5a52e959892870a7c47cef80c53797c0db9149d458460f4f31e2fb250e"}, + {file = "tokenizers-0.15.2-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:e2ea752f2b0fe96eb6e2f3adbbf4d72aaa1272079b0dfa1145507bd6a5d537e6"}, + {file = "tokenizers-0.15.2-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:4b19a808d8799fda23504a5cd31d2f58e6f52f140380082b352f877017d6342b"}, + {file = "tokenizers-0.15.2-pp39-pypy39_pp73-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:64c86e5e068ac8b19204419ed8ca90f9d25db20578f5881e337d203b314f4104"}, + {file = "tokenizers-0.15.2-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:de19c4dc503c612847edf833c82e9f73cd79926a384af9d801dcf93f110cea4e"}, + {file = "tokenizers-0.15.2-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ea09acd2fe3324174063d61ad620dec3bcf042b495515f27f638270a7d466e8b"}, + {file = "tokenizers-0.15.2-pp39-pypy39_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:cf27fd43472e07b57cf420eee1e814549203d56de00b5af8659cb99885472f1f"}, + {file = "tokenizers-0.15.2-pp39-pypy39_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:7ca22bd897537a0080521445d91a58886c8c04084a6a19e6c78c586e0cfa92a5"}, + {file = "tokenizers-0.15.2.tar.gz", hash = "sha256:e6e9c6e019dd5484be5beafc775ae6c925f4c69a3487040ed09b45e13df2cb91"}, +] + +[package.dependencies] +huggingface_hub = ">=0.16.4,<1.0" + +[package.extras] +dev = ["tokenizers[testing]"] +docs = ["setuptools_rust", "sphinx", "sphinx_rtd_theme"] +testing = ["black (==22.3)", "datasets", "numpy", "pytest", "requests"] + [[package]] name = "toml" version = "0.10.2" @@ -2269,6 +2612,74 @@ notebook = ["ipywidgets (>=6)"] slack = ["slack-sdk"] telegram = ["requests"] +[[package]] +name = "transformers" +version = "4.38.2" +description = "State-of-the-art Machine Learning for JAX, PyTorch and TensorFlow" +optional = false +python-versions = ">=3.8.0" +files = [ + {file = "transformers-4.38.2-py3-none-any.whl", hash = "sha256:c4029cb9f01b3dd335e52f364c52d2b37c65b4c78e02e6a08b1919c5c928573e"}, + {file = "transformers-4.38.2.tar.gz", hash = "sha256:c5fc7ad682b8a50a48b2a4c05d4ea2de5567adb1bdd00053619dbe5960857dd5"}, +] + +[package.dependencies] +filelock = "*" +huggingface-hub = ">=0.19.3,<1.0" +numpy = ">=1.17" +packaging = ">=20.0" +pyyaml = ">=5.1" +regex = "!=2019.12.17" +requests = "*" +safetensors = ">=0.4.1" +tokenizers = ">=0.14,<0.19" +tqdm = ">=4.27" + +[package.extras] +accelerate = ["accelerate (>=0.21.0)"] +agents = ["Pillow (>=10.0.1,<=15.0)", "accelerate (>=0.21.0)", "datasets (!=2.5.0)", "diffusers", "opencv-python", "sentencepiece (>=0.1.91,!=0.1.92)", "torch"] +all = ["Pillow (>=10.0.1,<=15.0)", "accelerate (>=0.21.0)", "av (==9.2.0)", "codecarbon (==1.2.0)", "decord (==0.6.0)", "flax (>=0.4.1,<=0.7.0)", "jax (>=0.4.1,<=0.4.13)", "jaxlib (>=0.4.1,<=0.4.13)", "kenlm", "keras-nlp (>=0.3.1)", "librosa", "onnxconverter-common", "optax (>=0.0.8,<=0.1.4)", "optuna", "phonemizer", "protobuf", "pyctcdecode (>=0.4.0)", "ray[tune] (>=2.7.0)", "sentencepiece (>=0.1.91,!=0.1.92)", "sigopt", "tensorflow (>=2.6,<2.16)", "tensorflow-text (<2.16)", "tf2onnx", "timm", "tokenizers (>=0.14,<0.19)", "torch", "torchaudio", "torchvision"] +audio = ["kenlm", "librosa", "phonemizer", "pyctcdecode (>=0.4.0)"] +codecarbon = ["codecarbon (==1.2.0)"] +deepspeed = ["accelerate (>=0.21.0)", "deepspeed (>=0.9.3)"] +deepspeed-testing = ["GitPython (<3.1.19)", "accelerate (>=0.21.0)", "beautifulsoup4", "cookiecutter (==1.7.3)", "datasets (!=2.5.0)", "deepspeed (>=0.9.3)", "dill (<0.3.5)", "evaluate (>=0.2.0)", "faiss-cpu", "hf-doc-builder (>=0.3.0)", "nltk", "optuna", "parameterized", "protobuf", "psutil", "pydantic", "pytest (>=7.2.0,<8.0.0)", "pytest-timeout", "pytest-xdist", "rjieba", "rouge-score (!=0.0.7,!=0.0.8,!=0.1,!=0.1.1)", "ruff (==0.1.5)", "sacrebleu (>=1.4.12,<2.0.0)", "sacremoses", "sentencepiece (>=0.1.91,!=0.1.92)", "tensorboard", "timeout-decorator"] +dev = ["GitPython (<3.1.19)", "Pillow (>=10.0.1,<=15.0)", "accelerate (>=0.21.0)", "av (==9.2.0)", "beautifulsoup4", "codecarbon (==1.2.0)", "cookiecutter (==1.7.3)", "datasets (!=2.5.0)", "decord (==0.6.0)", "dill (<0.3.5)", "evaluate (>=0.2.0)", "faiss-cpu", "flax (>=0.4.1,<=0.7.0)", "fugashi (>=1.0)", "hf-doc-builder", "hf-doc-builder (>=0.3.0)", "ipadic (>=1.0.0,<2.0)", "isort (>=5.5.4)", "jax (>=0.4.1,<=0.4.13)", "jaxlib (>=0.4.1,<=0.4.13)", "kenlm", "keras-nlp (>=0.3.1)", "librosa", "nltk", "onnxconverter-common", "optax (>=0.0.8,<=0.1.4)", "optuna", "parameterized", "phonemizer", "protobuf", "psutil", "pyctcdecode (>=0.4.0)", "pydantic", "pytest (>=7.2.0,<8.0.0)", "pytest-timeout", "pytest-xdist", "ray[tune] (>=2.7.0)", "rhoknp (>=1.1.0,<1.3.1)", "rjieba", "rouge-score (!=0.0.7,!=0.0.8,!=0.1,!=0.1.1)", "ruff (==0.1.5)", "sacrebleu (>=1.4.12,<2.0.0)", "sacremoses", "scikit-learn", "sentencepiece (>=0.1.91,!=0.1.92)", "sigopt", "sudachidict-core (>=20220729)", "sudachipy (>=0.6.6)", "tensorboard", "tensorflow (>=2.6,<2.16)", "tensorflow-text (<2.16)", "tf2onnx", "timeout-decorator", "timm", "tokenizers (>=0.14,<0.19)", "torch", "torchaudio", "torchvision", "unidic (>=1.0.2)", "unidic-lite (>=1.0.7)", "urllib3 (<2.0.0)"] +dev-tensorflow = ["GitPython (<3.1.19)", "Pillow (>=10.0.1,<=15.0)", "beautifulsoup4", "cookiecutter (==1.7.3)", "datasets (!=2.5.0)", "dill (<0.3.5)", "evaluate (>=0.2.0)", "faiss-cpu", "hf-doc-builder", "hf-doc-builder (>=0.3.0)", "isort (>=5.5.4)", "kenlm", "keras-nlp (>=0.3.1)", "librosa", "nltk", "onnxconverter-common", "onnxruntime (>=1.4.0)", "onnxruntime-tools (>=1.4.2)", "parameterized", "phonemizer", "protobuf", "psutil", "pyctcdecode (>=0.4.0)", "pydantic", "pytest (>=7.2.0,<8.0.0)", "pytest-timeout", "pytest-xdist", "rjieba", "rouge-score (!=0.0.7,!=0.0.8,!=0.1,!=0.1.1)", "ruff (==0.1.5)", "sacrebleu (>=1.4.12,<2.0.0)", "sacremoses", "scikit-learn", "sentencepiece (>=0.1.91,!=0.1.92)", "tensorboard", "tensorflow (>=2.6,<2.16)", "tensorflow-text (<2.16)", "tf2onnx", "timeout-decorator", "tokenizers (>=0.14,<0.19)", "urllib3 (<2.0.0)"] +dev-torch = ["GitPython (<3.1.19)", "Pillow (>=10.0.1,<=15.0)", "accelerate (>=0.21.0)", "beautifulsoup4", "codecarbon (==1.2.0)", "cookiecutter (==1.7.3)", "datasets (!=2.5.0)", "dill (<0.3.5)", "evaluate (>=0.2.0)", "faiss-cpu", "fugashi (>=1.0)", "hf-doc-builder", "hf-doc-builder (>=0.3.0)", "ipadic (>=1.0.0,<2.0)", "isort (>=5.5.4)", "kenlm", "librosa", "nltk", "onnxruntime (>=1.4.0)", "onnxruntime-tools (>=1.4.2)", "optuna", "parameterized", "phonemizer", "protobuf", "psutil", "pyctcdecode (>=0.4.0)", "pydantic", "pytest (>=7.2.0,<8.0.0)", "pytest-timeout", "pytest-xdist", "ray[tune] (>=2.7.0)", "rhoknp (>=1.1.0,<1.3.1)", "rjieba", "rouge-score (!=0.0.7,!=0.0.8,!=0.1,!=0.1.1)", "ruff (==0.1.5)", "sacrebleu (>=1.4.12,<2.0.0)", "sacremoses", "scikit-learn", "sentencepiece (>=0.1.91,!=0.1.92)", "sigopt", "sudachidict-core (>=20220729)", "sudachipy (>=0.6.6)", "tensorboard", "timeout-decorator", "timm", "tokenizers (>=0.14,<0.19)", "torch", "torchaudio", "torchvision", "unidic (>=1.0.2)", "unidic-lite (>=1.0.7)", "urllib3 (<2.0.0)"] +docs = ["Pillow (>=10.0.1,<=15.0)", "accelerate (>=0.21.0)", "av (==9.2.0)", "codecarbon (==1.2.0)", "decord (==0.6.0)", "flax (>=0.4.1,<=0.7.0)", "hf-doc-builder", "jax (>=0.4.1,<=0.4.13)", "jaxlib (>=0.4.1,<=0.4.13)", "kenlm", "keras-nlp (>=0.3.1)", "librosa", "onnxconverter-common", "optax (>=0.0.8,<=0.1.4)", "optuna", "phonemizer", "protobuf", "pyctcdecode (>=0.4.0)", "ray[tune] (>=2.7.0)", "sentencepiece (>=0.1.91,!=0.1.92)", "sigopt", "tensorflow (>=2.6,<2.16)", "tensorflow-text (<2.16)", "tf2onnx", "timm", "tokenizers (>=0.14,<0.19)", "torch", "torchaudio", "torchvision"] +docs-specific = ["hf-doc-builder"] +flax = ["flax (>=0.4.1,<=0.7.0)", "jax (>=0.4.1,<=0.4.13)", "jaxlib (>=0.4.1,<=0.4.13)", "optax (>=0.0.8,<=0.1.4)"] +flax-speech = ["kenlm", "librosa", "phonemizer", "pyctcdecode (>=0.4.0)"] +ftfy = ["ftfy"] +integrations = ["optuna", "ray[tune] (>=2.7.0)", "sigopt"] +ja = ["fugashi (>=1.0)", "ipadic (>=1.0.0,<2.0)", "rhoknp (>=1.1.0,<1.3.1)", "sudachidict-core (>=20220729)", "sudachipy (>=0.6.6)", "unidic (>=1.0.2)", "unidic-lite (>=1.0.7)"] +modelcreation = ["cookiecutter (==1.7.3)"] +natten = ["natten (>=0.14.6,<0.15.0)"] +onnx = ["onnxconverter-common", "onnxruntime (>=1.4.0)", "onnxruntime-tools (>=1.4.2)", "tf2onnx"] +onnxruntime = ["onnxruntime (>=1.4.0)", "onnxruntime-tools (>=1.4.2)"] +optuna = ["optuna"] +quality = ["GitPython (<3.1.19)", "datasets (!=2.5.0)", "hf-doc-builder (>=0.3.0)", "isort (>=5.5.4)", "ruff (==0.1.5)", "urllib3 (<2.0.0)"] +ray = ["ray[tune] (>=2.7.0)"] +retrieval = ["datasets (!=2.5.0)", "faiss-cpu"] +sagemaker = ["sagemaker (>=2.31.0)"] +sentencepiece = ["protobuf", "sentencepiece (>=0.1.91,!=0.1.92)"] +serving = ["fastapi", "pydantic", "starlette", "uvicorn"] +sigopt = ["sigopt"] +sklearn = ["scikit-learn"] +speech = ["kenlm", "librosa", "phonemizer", "pyctcdecode (>=0.4.0)", "torchaudio"] +testing = ["GitPython (<3.1.19)", "beautifulsoup4", "cookiecutter (==1.7.3)", "datasets (!=2.5.0)", "dill (<0.3.5)", "evaluate (>=0.2.0)", "faiss-cpu", "hf-doc-builder (>=0.3.0)", "nltk", "parameterized", "protobuf", "psutil", "pydantic", "pytest (>=7.2.0,<8.0.0)", "pytest-timeout", "pytest-xdist", "rjieba", "rouge-score (!=0.0.7,!=0.0.8,!=0.1,!=0.1.1)", "ruff (==0.1.5)", "sacrebleu (>=1.4.12,<2.0.0)", "sacremoses", "tensorboard", "timeout-decorator"] +tf = ["keras-nlp (>=0.3.1)", "onnxconverter-common", "tensorflow (>=2.6,<2.16)", "tensorflow-text (<2.16)", "tf2onnx"] +tf-cpu = ["keras-nlp (>=0.3.1)", "onnxconverter-common", "tensorflow-cpu (>=2.6,<2.16)", "tensorflow-text (<2.16)", "tf2onnx"] +tf-speech = ["kenlm", "librosa", "phonemizer", "pyctcdecode (>=0.4.0)"] +timm = ["timm"] +tokenizers = ["tokenizers (>=0.14,<0.19)"] +torch = ["accelerate (>=0.21.0)", "torch"] +torch-speech = ["kenlm", "librosa", "phonemizer", "pyctcdecode (>=0.4.0)", "torchaudio"] +torch-vision = ["Pillow (>=10.0.1,<=15.0)", "torchvision"] +torchhub = ["filelock", "huggingface-hub (>=0.19.3,<1.0)", "importlib-metadata", "numpy (>=1.17)", "packaging (>=20.0)", "protobuf", "regex (!=2019.12.17)", "requests", "sentencepiece (>=0.1.91,!=0.1.92)", "tokenizers (>=0.14,<0.19)", "torch", "tqdm (>=4.27)"] +video = ["av (==9.2.0)", "decord (==0.6.0)"] +vision = ["Pillow (>=10.0.1,<=15.0)"] + [[package]] name = "typing-extensions" version = "4.10.0" @@ -2598,4 +3009,4 @@ testing = ["big-O", "jaraco.functools", "jaraco.itertools", "more-itertools", "p [metadata] lock-version = "2.0" python-versions = "^3.9,<3.12" -content-hash = "e1d8b49218f24ab7ceb86c74a9374287891f17e6481215e1049163a61df28f85" +content-hash = "b6c92390488dfc5f6510aec79f69af60a6dc15305761deb37d1a51a4aecec96b" diff --git a/airbyte-integrations/connectors/source-s3/pyproject.toml b/airbyte-integrations/connectors/source-s3/pyproject.toml index 44c319fbf268..aae8ee51137f 100644 --- a/airbyte-integrations/connectors/source-s3/pyproject.toml +++ b/airbyte-integrations/connectors/source-s3/pyproject.toml @@ -3,7 +3,7 @@ requires = [ "poetry-core>=1.0.0",] build-backend = "poetry.core.masonry.api" [tool.poetry] -version = "4.5.9" +version = "4.5.10" name = "source-s3" description = "Source implementation for S3." authors = [ "Airbyte ",] @@ -21,6 +21,8 @@ pytz = "==2024.1" wcmatch = "==8.4" python-snappy = "==0.6.1" dill = "==0.3.4" +# override transitive dependency that had a vulnerability https://nvd.nist.gov/vuln/detail/CVE-2023-6730 +transformers = "4.38.2" [tool.poetry.scripts] source-s3 = "source_s3.run:run" diff --git a/docs/integrations/sources/s3.md b/docs/integrations/sources/s3.md index 298f921fdfa0..1376a6d15082 100644 --- a/docs/integrations/sources/s3.md +++ b/docs/integrations/sources/s3.md @@ -325,6 +325,7 @@ To perform the text extraction from PDF and Docx files, the connector uses the [ | Version | Date | Pull Request | Subject | |:--------|:-----------|:----------------------------------------------------------------------------------------------------------------|:---------------------------------------------------------------------------------------------------------------------| +| 4.5.10 | 2024-03-11 | [35955](https://github.com/airbytehq/airbyte/pull/35955) | Pin `transformers` transitive dependency | | 4.5.9 | 2024-03-06 | [35857](https://github.com/airbytehq/airbyte/pull/35857) | Bump poetry.lock to upgrade transitive dependency | | 4.5.8 | 2024-03-04 | [35808](https://github.com/airbytehq/airbyte/pull/35808) | Use cached AWS client | | 4.5.7 | 2024-02-23 | [34895](https://github.com/airbytehq/airbyte/pull/34895) | Run incremental syncs with concurrency | From 27d9207ce4e662deef9438b225629456368fca79 Mon Sep 17 00:00:00 2001 From: terencecho <3916587+terencecho@users.noreply.github.com> Date: Mon, 11 Mar 2024 11:00:11 -0700 Subject: [PATCH 159/172] Update enterprise setup guide for storage (#35966) --- docs/enterprise-setup/implementation-guide.md | 64 +++++++++++++++---- 1 file changed, 53 insertions(+), 11 deletions(-) diff --git a/docs/enterprise-setup/implementation-guide.md b/docs/enterprise-setup/implementation-guide.md index 0806e590c33f..e44d644c6caa 100644 --- a/docs/enterprise-setup/implementation-guide.md +++ b/docs/enterprise-setup/implementation-guide.md @@ -219,7 +219,46 @@ For Self-Managed Enterprise deployments, we recommend spinning up standalone log
External log storage setup steps -To do this, add external log storage details to your `airbyte.yml` file. This disables the default internal Minio instance (`airbyte/minio`), and configures the external log database: +If using credentials such as aws access keys, the keys are required to be in the kube secrets. They secret store and secret keys will be referenced in the `airbyte.yml` file. Here is an example of a kube secret manifest to you can apply to your kube cluster. + + + + +```yaml +apiVersion: v1 +kind: Secret +metadata: + name: airbyte-config-secrets +type: Opaque +stringData: +## Storage Secrets + # S3 + aws-secret-manager-access-key-id: AKIMSOSBNEOQ6SLTQSP + aws-secret-manager-secret-access-key: 3MQU9CIk8LhHTEA1sd69KoKW+le93UmAz/i/N6fk +``` + + + + +```yaml +apiVersion: v1 +kind: Secret +metadata: + name: gcp-cred-secrets +type: Opaque +stringData: + gcp.json: +``` + +or use `kubectl` to create the secret directly from the credentials json file. +``` +kubectl create secret generic gcp-cred-secrets --from-file=gcp.json=.json +``` + + + + +Next, add external log storage details to your `airbyte.yml` file. This disables the default internal Minio instance (`airbyte/minio`), and configures the external log database: @@ -235,13 +274,12 @@ global: log: airbyte-bucket state: airbyte-bucket workloadOutput: airbyte-bucket - + storageSecretName: airbyte-config-secrets # name of the kube secret ref s3: - region: "" ## e.g. us-east-1 - accessKeyExistingSecret: ## The name of an existing Kubernetes secret containing the AWS Access Key. - accessKeyExistingSecretKey: ## The Kubernetes secret key containing the AWS Access Key. - secretKeyExistingSecret: ## The name of an existing Kubernetes secret containing the AWS Secret Access Key. - secretKeyExistingSecretKey: ## The name of an existing Kubernetes secret containing the AWS Secret Access Key. + region: "" ## Default region required. e.g. us-east-1 + authenticationType: credentials # credentials | instanceProfile + accessKeyIdSecretKey: aws-secret-manager-access-key-id # not necessary if using instanceProfile creds + secretAccessKeySecretKey: aws-secret-manager-secret-access-key # not necessary if using instanceProfile creds ``` Then, ensure your access key is tied to an IAM user with the [following policies](https://docs.aws.amazon.com/AmazonS3/latest/userguide/example-policies-s3.html#iam-policy-ex0), allowing the user access to S3 storage: @@ -276,7 +314,7 @@ Then, ensure your access key is tied to an IAM user with the [following policies ``` - + ```yaml @@ -290,10 +328,11 @@ global: log: airbyte-bucket state: airbyte-bucket workloadOutput: airbyte-bucket - + storageSecretName: gcp-cred-secrets gcs: - credentials: "" - credentialsJson: "" ## Base64 encoded json GCP credentials file contents. + authenticationType: credentials + project: + credentialsPath: /secrets/gcs-log-creds/gcp.json ``` Note that the `credentials` and `credentialsJson` fields are mutually exclusive. @@ -422,6 +461,7 @@ helm install \ "airbyte-enterprise" \ "airbyte/airbyte" \ --set-file airbyteYml="./airbyte.yml" +--values ./airbyte.yml ``` The default release name is `airbyte-enterprise`. You can change this by modifying the above `helm upgrade` command. @@ -439,6 +479,7 @@ helm upgrade \ --install "airbyte-enterprise" \ "airbyte/airbyte" \ --set-file airbyteYml="./airbyte.yml" +--values ./airbyte.yml ``` ## Customizing your Deployment @@ -454,6 +495,7 @@ helm upgrade \ "airbyte/airbyte" \ --set-file airbyteYml="./airbyte.yml" \ --values path/to/values.yaml + --values ./airbyte.yml ``` ### Customizing your Service Account From b09e5602b3bc70f1d87284ebde26fe38ee6c0bfc Mon Sep 17 00:00:00 2001 From: Natalie Kwong <38087517+nataliekwong@users.noreply.github.com> Date: Mon, 11 Mar 2024 14:04:43 -0700 Subject: [PATCH 160/172] [Docs] Edit recharge wording (#35975) --- docs/cloud/managing-airbyte-cloud/manage-credits.md | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/docs/cloud/managing-airbyte-cloud/manage-credits.md b/docs/cloud/managing-airbyte-cloud/manage-credits.md index 8f04f6ffa788..f8d90a0cbde5 100644 --- a/docs/cloud/managing-airbyte-cloud/manage-credits.md +++ b/docs/cloud/managing-airbyte-cloud/manage-credits.md @@ -42,17 +42,17 @@ To purchase credits directly through the UI, ::: -## Automatic reload of credits (Beta) +## Automatic reload of credits -You can enroll in automatic top-ups of your credit balance. This is a beta feature for those who do not want to manually add credits each time. +You can enroll in automatic top-ups of your credit balance. Thie feature is for those who do not want to manually add credits each time. To enroll, [email us](mailto:billing@airbyte.io) with: -1. A link to your workspace that you'd like to enable this feature for. +1. A link to your workspace or organization that you'd like to enable this feature for. 2. **Recharge threshold** The number under what credit balance you would like the automatic top up to occur. 3. **Recharge balance** The amount of credits you would like to refill to. -As an example, if the recharge threshold is 10 credits and recharge balance is 30 credits, anytime your workspace's credit balance dipped below 10 credits, Airbyte will automatically add enough credits to bring the balance back to 30 credits by charging the difference between your credit balance and 30 credits. +As an example, if the recharge threshold is 10 credits and recharge balance is 30 credits, anytime your credit balance dips below 10 credits, Airbyte will automatically add enough credits to bring the balance back to 30 credits by charging the difference between your credit balance and 30 credits. To take a real example, if: 1. The credit balance reached 3 credits. From ee6b1a0ee60f356bf367f67d3bdcccd8f20a39c2 Mon Sep 17 00:00:00 2001 From: Akash Kulkarni <113392464+akashkulk@users.noreply.github.com> Date: Mon, 11 Mar 2024 15:31:43 -0700 Subject: [PATCH 161/172] [source-postgres] : Add retries back to the DBZ connector (#35904) --- airbyte-cdk/java/airbyte-cdk/README.md | 1 + .../airbyte-cdk/core/src/main/resources/version.properties | 2 +- .../debezium/internals/DebeziumPropertiesManager.java | 5 +---- .../debezium/internals/DebeziumRecordIterator.java | 2 +- airbyte-integrations/connectors/source-postgres/build.gradle | 2 +- .../connectors/source-postgres/metadata.yaml | 2 +- docs/integrations/sources/postgres.md | 1 + 7 files changed, 7 insertions(+), 8 deletions(-) diff --git a/airbyte-cdk/java/airbyte-cdk/README.md b/airbyte-cdk/java/airbyte-cdk/README.md index e158953abc5b..56877e09a400 100644 --- a/airbyte-cdk/java/airbyte-cdk/README.md +++ b/airbyte-cdk/java/airbyte-cdk/README.md @@ -166,6 +166,7 @@ MavenLocal debugging steps: | Version | Date | Pull Request | Subject | |:--------|:-----------|:-----------------------------------------------------------|:---------------------------------------------------------------------------------------------------------------------------------------------------------------| +| 0.23.19 | 2024-03-11 | [\#35904](https://github.com/airbytehq/airbyte/pull/35904) | Add retries to the debezium engine. | | 0.23.18 | 2024-03-07 | [\#35899](https://github.com/airbytehq/airbyte/pull/35899) | Null check when retrieving destination state | | 0.23.16 | 2024-03-06 | [\#35842](https://github.com/airbytehq/airbyte/pull/35842) | Improve logging in debezium processing. | | 0.23.15 | 2024-03-05 | [\#35827](https://github.com/airbytehq/airbyte/pull/35827) | improving the Junit interceptor. | diff --git a/airbyte-cdk/java/airbyte-cdk/core/src/main/resources/version.properties b/airbyte-cdk/java/airbyte-cdk/core/src/main/resources/version.properties index c6b68bc8f36f..66c3969cb410 100644 --- a/airbyte-cdk/java/airbyte-cdk/core/src/main/resources/version.properties +++ b/airbyte-cdk/java/airbyte-cdk/core/src/main/resources/version.properties @@ -1 +1 @@ -version=0.23.18 +version=0.23.19 diff --git a/airbyte-cdk/java/airbyte-cdk/db-sources/src/main/java/io/airbyte/cdk/integrations/debezium/internals/DebeziumPropertiesManager.java b/airbyte-cdk/java/airbyte-cdk/db-sources/src/main/java/io/airbyte/cdk/integrations/debezium/internals/DebeziumPropertiesManager.java index 4bae69e9999b..61c57c1a1025 100644 --- a/airbyte-cdk/java/airbyte-cdk/db-sources/src/main/java/io/airbyte/cdk/integrations/debezium/internals/DebeziumPropertiesManager.java +++ b/airbyte-cdk/java/airbyte-cdk/db-sources/src/main/java/io/airbyte/cdk/integrations/debezium/internals/DebeziumPropertiesManager.java @@ -45,10 +45,7 @@ public Properties getDebeziumProperties( props.setProperty("max.batch.size", "2048"); props.setProperty("max.queue.size", "8192"); - // Disabling retries because debezium startup time might exceed our 60-second wait limit - // The maximum number of retries on connection errors before failing (-1 = no limit, 0 = disabled, > - // 0 = num of retries). - props.setProperty("errors.max.retries", "0"); + props.setProperty("errors.max.retries", "5"); // This property must be strictly less than errors.retry.delay.max.ms // (https://github.com/debezium/debezium/blob/bcc7d49519a4f07d123c616cfa45cd6268def0b9/debezium-core/src/main/java/io/debezium/util/DelayStrategy.java#L135) props.setProperty("errors.retry.delay.initial.ms", "299"); diff --git a/airbyte-cdk/java/airbyte-cdk/db-sources/src/main/java/io/airbyte/cdk/integrations/debezium/internals/DebeziumRecordIterator.java b/airbyte-cdk/java/airbyte-cdk/db-sources/src/main/java/io/airbyte/cdk/integrations/debezium/internals/DebeziumRecordIterator.java index 09ccae30c926..aa0dd90ea5ab 100644 --- a/airbyte-cdk/java/airbyte-cdk/db-sources/src/main/java/io/airbyte/cdk/integrations/debezium/internals/DebeziumRecordIterator.java +++ b/airbyte-cdk/java/airbyte-cdk/db-sources/src/main/java/io/airbyte/cdk/integrations/debezium/internals/DebeziumRecordIterator.java @@ -64,7 +64,7 @@ public DebeziumRecordIterator(final LinkedBlockingQueue(1); this.receivedFirstRecord = false; diff --git a/airbyte-integrations/connectors/source-postgres/build.gradle b/airbyte-integrations/connectors/source-postgres/build.gradle index df72ad0baa1b..22ce22cdb18a 100644 --- a/airbyte-integrations/connectors/source-postgres/build.gradle +++ b/airbyte-integrations/connectors/source-postgres/build.gradle @@ -12,7 +12,7 @@ java { } airbyteJavaConnector { - cdkVersionRequired = '0.23.17' + cdkVersionRequired = '0.23.19' features = ['db-sources', 'datastore-postgres'] useLocalCdk = false } diff --git a/airbyte-integrations/connectors/source-postgres/metadata.yaml b/airbyte-integrations/connectors/source-postgres/metadata.yaml index f5bc3ca80c87..dc56f616b2b3 100644 --- a/airbyte-integrations/connectors/source-postgres/metadata.yaml +++ b/airbyte-integrations/connectors/source-postgres/metadata.yaml @@ -9,7 +9,7 @@ data: connectorSubtype: database connectorType: source definitionId: decd338e-5647-4c0b-adf4-da0e75f5a750 - dockerImageTag: 3.3.15 + dockerImageTag: 3.3.16 dockerRepository: airbyte/source-postgres documentationUrl: https://docs.airbyte.com/integrations/sources/postgres githubIssueLabel: source-postgres diff --git a/docs/integrations/sources/postgres.md b/docs/integrations/sources/postgres.md index 6ac689d6cc57..68b30aceea0f 100644 --- a/docs/integrations/sources/postgres.md +++ b/docs/integrations/sources/postgres.md @@ -292,6 +292,7 @@ According to Postgres [documentation](https://www.postgresql.org/docs/14/datatyp | Version | Date | Pull Request | Subject | |---------|------------|----------------------------------------------------------|----------------------------------------------------------------------------------------------------------------------------------------------------------------------------| +| 3.3.16 | 2024-03-11 | [35904](https://github.com/airbytehq/airbyte/pull/35904) | Adopt Java CDK 0.23.1- debezium retries. | | 3.3.15 | 2024-02-29 | [34724](https://github.com/airbytehq/airbyte/pull/34724) | Add record count in state message. | | 3.3.14 | 2024-03-06 | [35842](https://github.com/airbytehq/airbyte/pull/35842) | Add logging to understand cases with a large number of records with the same LSN. | | 3.3.12 | 2024-02-22 | [35569](https://github.com/airbytehq/airbyte/pull/35569) | Fix logging bug. | From b1f3b8a36f43a9eb1963cffcfff67dbe6cc60fe4 Mon Sep 17 00:00:00 2001 From: Brian Lai <51336873+brianjlai@users.noreply.github.com> Date: Mon, 11 Mar 2024 19:09:08 -0400 Subject: [PATCH 162/172] [airbyte-cdk] entrypoint wrapper should use per-stream state not legacy format (#35976) --- .../python/airbyte_cdk/test/entrypoint_wrapper.py | 2 +- .../unit_tests/test/test_entrypoint_wrapper.py | 15 +++++++++------ 2 files changed, 10 insertions(+), 7 deletions(-) diff --git a/airbyte-cdk/python/airbyte_cdk/test/entrypoint_wrapper.py b/airbyte-cdk/python/airbyte_cdk/test/entrypoint_wrapper.py index 612b2742ea1e..767a13a75ed2 100644 --- a/airbyte-cdk/python/airbyte_cdk/test/entrypoint_wrapper.py +++ b/airbyte-cdk/python/airbyte_cdk/test/entrypoint_wrapper.py @@ -74,7 +74,7 @@ def most_recent_state(self) -> Any: state_messages = self._get_message_by_types([Type.STATE]) if not state_messages: raise ValueError("Can't provide most recent state as there are no state messages") - return state_messages[-1].state.data + return state_messages[-1].state.stream @property def logs(self) -> List[AirbyteMessage]: diff --git a/airbyte-cdk/python/unit_tests/test/test_entrypoint_wrapper.py b/airbyte-cdk/python/unit_tests/test/test_entrypoint_wrapper.py index d0564cdf93f6..35a8b300fb12 100644 --- a/airbyte-cdk/python/unit_tests/test/test_entrypoint_wrapper.py +++ b/airbyte-cdk/python/unit_tests/test/test_entrypoint_wrapper.py @@ -3,7 +3,7 @@ import json import logging import os -from typing import Any, Iterator, List +from typing import Any, Iterator, List, Mapping from unittest import TestCase from unittest.mock import Mock, patch @@ -16,7 +16,9 @@ AirbyteLogMessage, AirbyteMessage, AirbyteRecordMessage, + AirbyteStateBlob, AirbyteStateMessage, + AirbyteStreamState, AirbyteStreamStatus, AirbyteStreamStatusTraceMessage, AirbyteTraceMessage, @@ -28,8 +30,8 @@ ) -def _a_state_message(state: Any) -> AirbyteMessage: - return AirbyteMessage(type=Type.STATE, state=AirbyteStateMessage(data=state)) +def _a_state_message(stream_name: str, stream_state: Mapping[str, Any]) -> AirbyteMessage: + return AirbyteMessage(type=Type.STATE, state=AirbyteStateMessage(stream=AirbyteStreamState(stream_descriptor=StreamDescriptor(name=stream_name), stream_state=AirbyteStateBlob(**stream_state)))) def _a_status_message(stream_name: str, status: AirbyteStreamStatus) -> AirbyteMessage: @@ -49,7 +51,7 @@ def _a_status_message(stream_name: str, status: AirbyteStreamStatus) -> AirbyteM _A_RECORD = AirbyteMessage( type=Type.RECORD, record=AirbyteRecordMessage(stream="stream", data={"record key": "record value"}, emitted_at=0) ) -_A_STATE_MESSAGE = _a_state_message({"state key": "state value for _A_STATE_MESSAGE"}) +_A_STATE_MESSAGE = _a_state_message("stream_name", {"state key": "state value for _A_STATE_MESSAGE"}) _A_LOG = AirbyteMessage(type=Type.LOG, log=AirbyteLogMessage(level=Level.INFO, message="This is an Airbyte log message")) _AN_ERROR_MESSAGE = AirbyteMessage( type=Type.TRACE, @@ -176,8 +178,9 @@ def test_given_state_message_and_records_when_read_then_output_has_records_and_s @patch("airbyte_cdk.test.entrypoint_wrapper.AirbyteEntrypoint") def test_given_many_state_messages_and_records_when_read_then_output_has_records_and_state_message(self, entrypoint): - last_emitted_state = {"last state key": "last state value"} - entrypoint.return_value.run.return_value = _to_entrypoint_output([_A_STATE_MESSAGE, _a_state_message(last_emitted_state)]) + state_value = {"state_key": "last state value"} + last_emitted_state = AirbyteStreamState(stream_descriptor=StreamDescriptor(name="stream_name"), stream_state=AirbyteStateBlob(**state_value)) + entrypoint.return_value.run.return_value = _to_entrypoint_output([_A_STATE_MESSAGE, _a_state_message("stream_name", state_value)]) output = read(self._a_source, _A_CONFIG, _A_CATALOG, _A_STATE) From c073e647f96b0df2573e7addae6f78681bb2d558 Mon Sep 17 00:00:00 2001 From: brianjlai Date: Mon, 11 Mar 2024 23:23:42 +0000 Subject: [PATCH 163/172] =?UTF-8?q?=F0=9F=A4=96=20Bump=20patch=20version?= =?UTF-8?q?=20of=20Python=20CDK?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- airbyte-cdk/python/.bumpversion.cfg | 2 +- airbyte-cdk/python/CHANGELOG.md | 3 +++ airbyte-cdk/python/Dockerfile | 4 ++-- airbyte-cdk/python/setup.py | 2 +- 4 files changed, 7 insertions(+), 4 deletions(-) diff --git a/airbyte-cdk/python/.bumpversion.cfg b/airbyte-cdk/python/.bumpversion.cfg index 7c7b62120931..365441db2f7d 100644 --- a/airbyte-cdk/python/.bumpversion.cfg +++ b/airbyte-cdk/python/.bumpversion.cfg @@ -1,5 +1,5 @@ [bumpversion] -current_version = 0.69.0 +current_version = 0.69.1 commit = False [bumpversion:file:setup.py] diff --git a/airbyte-cdk/python/CHANGELOG.md b/airbyte-cdk/python/CHANGELOG.md index 057a14e08e3b..6a9b7f6979c1 100644 --- a/airbyte-cdk/python/CHANGELOG.md +++ b/airbyte-cdk/python/CHANGELOG.md @@ -1,5 +1,8 @@ # Changelog +## 0.69.1 +Update mock server test entrypoint wrapper to use per-stream state + ## 0.69.0 Include recordCount in stream state messages and final state message for full refresh syncs diff --git a/airbyte-cdk/python/Dockerfile b/airbyte-cdk/python/Dockerfile index dc0a81963486..c151e421317c 100644 --- a/airbyte-cdk/python/Dockerfile +++ b/airbyte-cdk/python/Dockerfile @@ -10,7 +10,7 @@ RUN apk --no-cache upgrade \ && apk --no-cache add tzdata build-base # install airbyte-cdk -RUN pip install --prefix=/install airbyte-cdk==0.69.0 +RUN pip install --prefix=/install airbyte-cdk==0.69.1 # build a clean environment FROM base @@ -32,5 +32,5 @@ ENV AIRBYTE_ENTRYPOINT "python /airbyte/integration_code/main.py" ENTRYPOINT ["python", "/airbyte/integration_code/main.py"] # needs to be the same as CDK -LABEL io.airbyte.version=0.69.0 +LABEL io.airbyte.version=0.69.1 LABEL io.airbyte.name=airbyte/source-declarative-manifest diff --git a/airbyte-cdk/python/setup.py b/airbyte-cdk/python/setup.py index 90400d67ef96..89cf0bd2cb85 100644 --- a/airbyte-cdk/python/setup.py +++ b/airbyte-cdk/python/setup.py @@ -36,7 +36,7 @@ name="airbyte-cdk", # The version of the airbyte-cdk package is used at runtime to validate manifests. That validation must be # updated if our semver format changes such as using release candidate versions. - version="0.69.0", + version="0.69.1", description="A framework for writing Airbyte Connectors.", long_description=README, long_description_content_type="text/markdown", From a8b5296760d29815dae7c0efe9b0def35b434415 Mon Sep 17 00:00:00 2001 From: Catherine Noll Date: Mon, 11 Mar 2024 20:27:03 -0400 Subject: [PATCH 164/172] Source Microsoft Onedrive: pin transformers transitive dependency (#35956) --- .../source-microsoft-onedrive/metadata.yaml | 2 +- .../source-microsoft-onedrive/poetry.lock | 429 +++++++++++++++++- .../source-microsoft-onedrive/pyproject.toml | 4 +- .../sources/microsoft-onedrive.md | 1 + 4 files changed, 425 insertions(+), 11 deletions(-) diff --git a/airbyte-integrations/connectors/source-microsoft-onedrive/metadata.yaml b/airbyte-integrations/connectors/source-microsoft-onedrive/metadata.yaml index dae4db06bebd..ec2d958e7e74 100644 --- a/airbyte-integrations/connectors/source-microsoft-onedrive/metadata.yaml +++ b/airbyte-integrations/connectors/source-microsoft-onedrive/metadata.yaml @@ -20,7 +20,7 @@ data: connectorSubtype: file connectorType: source definitionId: 01d1c685-fd4a-4837-8f4c-93fe5a0d2188 - dockerImageTag: 0.1.8 + dockerImageTag: 0.1.9 dockerRepository: airbyte/source-microsoft-onedrive githubIssueLabel: source-microsoft-onedrive icon: microsoft-onedrive.svg diff --git a/airbyte-integrations/connectors/source-microsoft-onedrive/poetry.lock b/airbyte-integrations/connectors/source-microsoft-onedrive/poetry.lock index f9e32cc272b6..9c0383c5574a 100644 --- a/airbyte-integrations/connectors/source-microsoft-onedrive/poetry.lock +++ b/airbyte-integrations/connectors/source-microsoft-onedrive/poetry.lock @@ -566,6 +566,22 @@ lz4 = ["lz4"] snappy = ["python-snappy"] zstandard = ["zstandard"] +[[package]] +name = "filelock" +version = "3.13.1" +description = "A platform independent file lock." +optional = false +python-versions = ">=3.8" +files = [ + {file = "filelock-3.13.1-py3-none-any.whl", hash = "sha256:57dbda9b35157b05fb3e58ee91448612eb674172fab98ee235ccb0b5bee19a1c"}, + {file = "filelock-3.13.1.tar.gz", hash = "sha256:521f5f56c50f8426f5e03ad3b281b490a87ef15bc6c526f168290f0c7148d44e"}, +] + +[package.extras] +docs = ["furo (>=2023.9.10)", "sphinx (>=7.2.6)", "sphinx-autodoc-typehints (>=1.24)"] +testing = ["covdefaults (>=2.3)", "coverage (>=7.3.2)", "diff-cover (>=8)", "pytest (>=7.4.3)", "pytest-cov (>=4.1)", "pytest-mock (>=3.12)", "pytest-timeout (>=2.2)"] +typing = ["typing-extensions (>=4.8)"] + [[package]] name = "filetype" version = "1.2.0" @@ -577,6 +593,41 @@ files = [ {file = "filetype-1.2.0.tar.gz", hash = "sha256:66b56cd6474bf41d8c54660347d37afcc3f7d1970648de365c102ef77548aadb"}, ] +[[package]] +name = "fsspec" +version = "2024.2.0" +description = "File-system specification" +optional = false +python-versions = ">=3.8" +files = [ + {file = "fsspec-2024.2.0-py3-none-any.whl", hash = "sha256:817f969556fa5916bc682e02ca2045f96ff7f586d45110fcb76022063ad2c7d8"}, + {file = "fsspec-2024.2.0.tar.gz", hash = "sha256:b6ad1a679f760dda52b1168c859d01b7b80648ea6f7f7c7f5a8a91dc3f3ecb84"}, +] + +[package.extras] +abfs = ["adlfs"] +adl = ["adlfs"] +arrow = ["pyarrow (>=1)"] +dask = ["dask", "distributed"] +devel = ["pytest", "pytest-cov"] +dropbox = ["dropbox", "dropboxdrivefs", "requests"] +full = ["adlfs", "aiohttp (!=4.0.0a0,!=4.0.0a1)", "dask", "distributed", "dropbox", "dropboxdrivefs", "fusepy", "gcsfs", "libarchive-c", "ocifs", "panel", "paramiko", "pyarrow (>=1)", "pygit2", "requests", "s3fs", "smbprotocol", "tqdm"] +fuse = ["fusepy"] +gcs = ["gcsfs"] +git = ["pygit2"] +github = ["requests"] +gs = ["gcsfs"] +gui = ["panel"] +hdfs = ["pyarrow (>=1)"] +http = ["aiohttp (!=4.0.0a0,!=4.0.0a1)"] +libarchive = ["libarchive-c"] +oci = ["ocifs"] +s3 = ["s3fs"] +sftp = ["paramiko"] +smb = ["smbprotocol"] +ssh = ["paramiko"] +tqdm = ["tqdm"] + [[package]] name = "genson" version = "1.2.2" @@ -587,6 +638,39 @@ files = [ {file = "genson-1.2.2.tar.gz", hash = "sha256:8caf69aa10af7aee0e1a1351d1d06801f4696e005f06cedef438635384346a16"}, ] +[[package]] +name = "huggingface-hub" +version = "0.21.4" +description = "Client library to download and publish models, datasets and other repos on the huggingface.co hub" +optional = false +python-versions = ">=3.8.0" +files = [ + {file = "huggingface_hub-0.21.4-py3-none-any.whl", hash = "sha256:df37c2c37fc6c82163cdd8a67ede261687d80d1e262526d6c0ce73b6b3630a7b"}, + {file = "huggingface_hub-0.21.4.tar.gz", hash = "sha256:e1f4968c93726565a80edf6dc309763c7b546d0cfe79aa221206034d50155531"}, +] + +[package.dependencies] +filelock = "*" +fsspec = ">=2023.5.0" +packaging = ">=20.9" +pyyaml = ">=5.1" +requests = "*" +tqdm = ">=4.42.1" +typing-extensions = ">=3.7.4.3" + +[package.extras] +all = ["InquirerPy (==0.3.4)", "Jinja2", "Pillow", "aiohttp", "gradio", "jedi", "mypy (==1.5.1)", "numpy", "pydantic (>1.1,<2.0)", "pydantic (>1.1,<3.0)", "pytest", "pytest-asyncio", "pytest-cov", "pytest-env", "pytest-rerunfailures", "pytest-vcr", "pytest-xdist", "ruff (>=0.1.3)", "soundfile", "types-PyYAML", "types-requests", "types-simplejson", "types-toml", "types-tqdm", "types-urllib3", "typing-extensions (>=4.8.0)", "urllib3 (<2.0)"] +cli = ["InquirerPy (==0.3.4)"] +dev = ["InquirerPy (==0.3.4)", "Jinja2", "Pillow", "aiohttp", "gradio", "jedi", "mypy (==1.5.1)", "numpy", "pydantic (>1.1,<2.0)", "pydantic (>1.1,<3.0)", "pytest", "pytest-asyncio", "pytest-cov", "pytest-env", "pytest-rerunfailures", "pytest-vcr", "pytest-xdist", "ruff (>=0.1.3)", "soundfile", "types-PyYAML", "types-requests", "types-simplejson", "types-toml", "types-tqdm", "types-urllib3", "typing-extensions (>=4.8.0)", "urllib3 (<2.0)"] +fastai = ["fastai (>=2.4)", "fastcore (>=1.3.27)", "toml"] +hf-transfer = ["hf-transfer (>=0.1.4)"] +inference = ["aiohttp", "pydantic (>1.1,<2.0)", "pydantic (>1.1,<3.0)"] +quality = ["mypy (==1.5.1)", "ruff (>=0.1.3)"] +tensorflow = ["graphviz", "pydot", "tensorflow"] +testing = ["InquirerPy (==0.3.4)", "Jinja2", "Pillow", "aiohttp", "gradio", "jedi", "numpy", "pydantic (>1.1,<2.0)", "pydantic (>1.1,<3.0)", "pytest", "pytest-asyncio", "pytest-cov", "pytest-env", "pytest-rerunfailures", "pytest-vcr", "pytest-xdist", "soundfile", "urllib3 (<2.0)"] +torch = ["safetensors", "torch"] +typing = ["types-PyYAML", "types-requests", "types-simplejson", "types-toml", "types-tqdm", "types-urllib3", "typing-extensions (>=4.8.0)"] + [[package]] name = "idna" version = "3.6" @@ -600,22 +684,22 @@ files = [ [[package]] name = "importlib-metadata" -version = "7.0.1" +version = "7.0.2" description = "Read metadata from Python packages" optional = false python-versions = ">=3.8" files = [ - {file = "importlib_metadata-7.0.1-py3-none-any.whl", hash = "sha256:4805911c3a4ec7c3966410053e9ec6a1fecd629117df5adee56dfc9432a1081e"}, - {file = "importlib_metadata-7.0.1.tar.gz", hash = "sha256:f238736bb06590ae52ac1fab06a3a9ef1d8dce2b7a35b5ab329371d6c8f5d2cc"}, + {file = "importlib_metadata-7.0.2-py3-none-any.whl", hash = "sha256:f4bc4c0c070c490abf4ce96d715f68e95923320370efb66143df00199bb6c100"}, + {file = "importlib_metadata-7.0.2.tar.gz", hash = "sha256:198f568f3230878cb1b44fbd7975f87906c22336dba2e4a7f05278c281fbd792"}, ] [package.dependencies] zipp = ">=0.5" [package.extras] -docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (<7.2.5)", "sphinx (>=3.5)", "sphinx-lint"] +docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] perf = ["ipython"] -testing = ["flufl.flake8", "importlib-resources (>=1.3)", "packaging", "pyfakefs", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-mypy (>=0.9.1)", "pytest-perf (>=0.9.2)", "pytest-ruff"] +testing = ["flufl.flake8", "importlib-resources (>=1.3)", "packaging", "pyfakefs", "pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-mypy", "pytest-perf (>=0.9.2)", "pytest-ruff (>=0.2.1)"] [[package]] name = "iniconfig" @@ -1037,13 +1121,13 @@ ntlmprovider = ["requests-ntlm"] [[package]] name = "packaging" -version = "23.2" +version = "24.0" description = "Core utilities for Python packages" optional = false python-versions = ">=3.7" files = [ - {file = "packaging-23.2-py3-none-any.whl", hash = "sha256:8c491190033a9af7e1d931d0b5dacc2ef47509b34dd0de67ed209b5203fc88c7"}, - {file = "packaging-23.2.tar.gz", hash = "sha256:048fb0e9405036518eaaf48a55953c750c11e1a1b68e0dd1a9d62ed0c092cfc5"}, + {file = "packaging-24.0-py3-none-any.whl", hash = "sha256:2ddfb553fdf02fb784c234c7ba6ccc288296ceabec964ad2eae3777778130bc5"}, + {file = "packaging-24.0.tar.gz", hash = "sha256:eb82c5e3e56209074766e6885bb04b8c38a0c015d0a30036ebe7ece34c9989e9"}, ] [[package]] @@ -1897,6 +1981,138 @@ six = "*" fixture = ["fixtures"] test = ["fixtures", "mock", "purl", "pytest", "requests-futures", "sphinx", "testtools"] +[[package]] +name = "safetensors" +version = "0.4.2" +description = "" +optional = false +python-versions = ">=3.7" +files = [ + {file = "safetensors-0.4.2-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:69d8bb8384dc2cb5b72c36c4d6980771b293d1a1377b378763f5e37b6bb8d133"}, + {file = "safetensors-0.4.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:3d420e19fcef96d0067f4de4699682b4bbd85fc8fea0bd45fcd961fdf3e8c82c"}, + {file = "safetensors-0.4.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9ca54742122fa3c4821754adb67318e1cd25c3a22bbf0c5520d5176e77a099ac"}, + {file = "safetensors-0.4.2-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:8b47aa643afdfd66cf7ce4c184092ae734e15d10aba2c2948f24270211801c3c"}, + {file = "safetensors-0.4.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d88a16bbc330f27e7f2d4caaf6fb061ad0b8a756ecc4033260b0378e128ce8a2"}, + {file = "safetensors-0.4.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e9223b8ac21085db614a510eb3445e7083cae915a9202357555fa939695d4f57"}, + {file = "safetensors-0.4.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ce6cb86133dc8930a7ab5e7438545a7f205f7a1cdd5aaf108c1d0da6bdcfbc2b"}, + {file = "safetensors-0.4.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:b8a628e0ae2bbc334b62952c384aa5f41621d01850f8d67b04a96b9c39dd7326"}, + {file = "safetensors-0.4.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:88d6beb7f811a081e0e5f1d9669fdac816c45340c04b1eaf7ebfda0ce93ea403"}, + {file = "safetensors-0.4.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:b57fc5b1b54cb12d8690a58a4cf4b7144730d4bde9d98aa0e1dab6295a1cd579"}, + {file = "safetensors-0.4.2-cp310-none-win32.whl", hash = "sha256:9d87a1c98803c16cf113b9ba03f07b2dce5e8eabfd1811a7f7323fcaa2a1bf47"}, + {file = "safetensors-0.4.2-cp310-none-win_amd64.whl", hash = "sha256:18930ec1d1ecb526d3d9835abc2489b8f1530877518f0c541e77ef0b7abcbd99"}, + {file = "safetensors-0.4.2-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:c5dd2ed788730ed56b415d1a11c62026b8cc8c573f55a2092afb3ab383e94fff"}, + {file = "safetensors-0.4.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:cc41791b33efb9c83a59b731619f3d15f543dfe71f3a793cb8fbf9bd5d0d5d71"}, + {file = "safetensors-0.4.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4c888bf71d5ca12a720f1ed87d407c4918afa022fb247a6546d8fac15b1f112b"}, + {file = "safetensors-0.4.2-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:e6b2feb4b47226a16a792e6fac3f49442714884a3d4c1008569d5068a3941be9"}, + {file = "safetensors-0.4.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f41cc0ee4b838ae8f4d8364a1b162067693d11a3893f0863be8c228d40e4d0ee"}, + {file = "safetensors-0.4.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:51b7228e46c0a483c40ba4b9470dea00fb1ff8685026bb4766799000f6328ac2"}, + {file = "safetensors-0.4.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:02697f8f2be8ca3c37a4958702dbdb1864447ef765e18b5328a1617022dcf164"}, + {file = "safetensors-0.4.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:27fd8f65cf7c80e4280cae1ee6bcd85c483882f6580821abe71ee1a0d3dcfca7"}, + {file = "safetensors-0.4.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:c487b5f113b0924c9534a07dc034830fb4ef05ce9bb6d78cfe016a7dedfe281f"}, + {file = "safetensors-0.4.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:da7f6483f3fe67ff39b3a55552552c67930ea10a36e9f2539d36fc205273d767"}, + {file = "safetensors-0.4.2-cp311-none-win32.whl", hash = "sha256:52a7012f6cb9cb4a132760b6308daede18a9f5f8952ce08adc7c67a7d865c2d8"}, + {file = "safetensors-0.4.2-cp311-none-win_amd64.whl", hash = "sha256:4d1361a097ac430b310ce9eed8ed4746edee33ddafdfbb965debc8966fc34dc2"}, + {file = "safetensors-0.4.2-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:77af8aa0edcc2863760fd6febbfdb82e88fd75d0e60c1ce4ba57208ba5e4a89b"}, + {file = "safetensors-0.4.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:846666c1c5a8c8888d2dfda8d3921cb9cb8e2c5f78365be756c11021e75a0a2a"}, + {file = "safetensors-0.4.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4f4bfc7ea19b446bfad41510d4b4c76101698c00caaa8a332c8edd8090a412ef"}, + {file = "safetensors-0.4.2-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:233436fd30f27ffeb3c3780d0b84f496518868445c7a8db003639a649cc98453"}, + {file = "safetensors-0.4.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:7a09237a795d11cd11f9dae505d170a29b5616151db1e10c14f892b11caadc7d"}, + {file = "safetensors-0.4.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:de01c9a3a3b7b69627d624ff69d9f11d28ce9908eea2fb6245adafa4b1d43df6"}, + {file = "safetensors-0.4.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8c1f25c5069ee42a5bcffdc66c300a407941edd73f3239e9fdefd26216407391"}, + {file = "safetensors-0.4.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:7a73b3649456d09ca8506140d44484b63154a7378434cc1e8719f8056550b224"}, + {file = "safetensors-0.4.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:e1625a8d07d046e968bd5c4961810aba1225984e4fb9243626f9d04a06ed3fee"}, + {file = "safetensors-0.4.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8f74c86b25615cb24ad4cff765a2eefc09d71bf0fed97588cf585aad9c38fbb4"}, + {file = "safetensors-0.4.2-cp312-none-win32.whl", hash = "sha256:8523b9c5777d771bcde5c2389c03f1cdf7ebe8797432a1bd5e345efe25c55987"}, + {file = "safetensors-0.4.2-cp312-none-win_amd64.whl", hash = "sha256:dcff0243e1737a21f83d664c63fed89d1f532c23fc6830d0427279fabd789ccb"}, + {file = "safetensors-0.4.2-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:96ad3d7d472612e26cbe413922b4fb13933310f0511d346ea5cc9a1e856e52eb"}, + {file = "safetensors-0.4.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:88250922401b5ae4e37de929178caf46be47ed16c817b2237b81679bec07c120"}, + {file = "safetensors-0.4.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d40443554142fc0ab30652d5cc8554c4b7a613513bde00373e18afd5de8cbe4b"}, + {file = "safetensors-0.4.2-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:27f53f70106224d32d874aacecbeb4a6e4c5b16a1d2006d0e876d97229086d71"}, + {file = "safetensors-0.4.2-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:cc068afe23734dfb26ce19db0a7877499ddf73b1d55ceb762417e8da4a1b05fb"}, + {file = "safetensors-0.4.2-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9be1918eb8d43a11a6f8806759fccfa0eeb0542b12924caba66af8a7800ad01a"}, + {file = "safetensors-0.4.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:41911087d20a7bbd78cb4ad4f98aab0c431533107584df6635d8b54b99945573"}, + {file = "safetensors-0.4.2-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:50771c662aab909f31e94d048e76861fd027d66076ea773eef2e66c717766e24"}, + {file = "safetensors-0.4.2-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:13f2e57be007b7ea9329133d2399e6bdfcf1910f655440a4da17df3a45afcd30"}, + {file = "safetensors-0.4.2-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:c772147e6395bc829842e0a98e1b30c67fe25d816299c28196488511d5a5e951"}, + {file = "safetensors-0.4.2-cp37-cp37m-macosx_10_12_x86_64.whl", hash = "sha256:36239a0060b537a3e8c473df78cffee14c3ec4f51d5f1a853af99371a2fb2a35"}, + {file = "safetensors-0.4.2-cp37-cp37m-macosx_11_0_arm64.whl", hash = "sha256:d0cbb7664fad2c307f95195f951b7059e95dc23e0e1822e5978c8b500098543c"}, + {file = "safetensors-0.4.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2b3e55adb6bd9dc1c2a341e72f48f075953fa35d173dd8e29a95b3b02d0d1462"}, + {file = "safetensors-0.4.2-cp37-cp37m-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:42f743b3cca863fba53ca57a193f510e5ec359b97f38c282437716b6768e4a25"}, + {file = "safetensors-0.4.2-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:04e6af4a6dbeb06c4e6e7d46cf9c716cbc4cc5ef62584fd8a7c0fe558562df45"}, + {file = "safetensors-0.4.2-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a492ba21b5c8f14ee5ec9b20f42ba969e53ca1f909a4d04aad736b66a341dcc2"}, + {file = "safetensors-0.4.2-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b25b8233a1a85dc67e39838951cfb01595d792f3b7b644add63edb652992e030"}, + {file = "safetensors-0.4.2-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:fd27e063fbdafe776f7b1714da59110e88f270e86db00788a8fd65f4eacfeba7"}, + {file = "safetensors-0.4.2-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:1b6fa399f251bbeb52029bf5a0ac2878d7705dd3612a2f8895b48e9c11f0367d"}, + {file = "safetensors-0.4.2-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:de642d46b459e4afd5c2020b26c0d6d869a171ea00411897d5776c127cac74f0"}, + {file = "safetensors-0.4.2-cp37-none-win32.whl", hash = "sha256:77b72d17754c93bb68f3598182f14d78776e0b9b31682ca5bb2c7c5bd9a75267"}, + {file = "safetensors-0.4.2-cp37-none-win_amd64.whl", hash = "sha256:d36ee3244d461cd655aeef493792c3bccf4875282f8407fd9af99e9a41cf2530"}, + {file = "safetensors-0.4.2-cp38-cp38-macosx_10_12_x86_64.whl", hash = "sha256:16b6b3884f7876c6b3b23a742428223a7170a5a9dac819d8c12a1569422c4b5a"}, + {file = "safetensors-0.4.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:ee25d311493fbbe0be9d395faee46e9d79e8948f461e388ff39e59875ed9a350"}, + {file = "safetensors-0.4.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:eed8097968585cd752a1171f86fce9aa1d89a29033e5cd8bec5a502e29f6b7af"}, + {file = "safetensors-0.4.2-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:880e6865cf72cb67f9ab8d04a3c4b49dd95ae92fb1583929ce65aed94e1f685f"}, + {file = "safetensors-0.4.2-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:91290f83daf80ce6d1a7f629b244443c200060a80f908b29d879021409e5ea94"}, + {file = "safetensors-0.4.2-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3517d568486ab3508a7acc360b82d7a4a3e26b86efdf210a9ecd9d233c40708a"}, + {file = "safetensors-0.4.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e1f43a77eb38540f782999e5dc5645164fe9027d3f0194f6c9a5126168017efa"}, + {file = "safetensors-0.4.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:b684d9818aa5d63fddc65f7d0151968037d255d91adf74eba82125b41c680aaa"}, + {file = "safetensors-0.4.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:ab1f5d84185f9fefaf21413efb764e4908057b8a9a0b987ede890c353490fd70"}, + {file = "safetensors-0.4.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:2bd979642e6c3a517ef4b84ff36c2fee4015664fea05a61154fc565978347553"}, + {file = "safetensors-0.4.2-cp38-none-win32.whl", hash = "sha256:11be6e7afed29e5a5628f0aa6214e34bc194da73f558dc69fc7d56e07037422a"}, + {file = "safetensors-0.4.2-cp38-none-win_amd64.whl", hash = "sha256:2f7a6e5d29bd2cc340cffaa391fa437b1be9d21a2bd8b8724d2875d13a6ef2a9"}, + {file = "safetensors-0.4.2-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:a5a921b4fe6925f9942adff3ebae8c16e0487908c54586a5a42f35b59fd69794"}, + {file = "safetensors-0.4.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:b691727228c28f2d82d8a92b2bc26e7a1f129ee40b2f2a3185b5974e038ed47c"}, + {file = "safetensors-0.4.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:91ca1056decc4e981248786e87b2a202d4841ee5f99d433f1adf3d44d4bcfa0e"}, + {file = "safetensors-0.4.2-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:55969fd2e6fdb38dc221b0ab380668c21b0efa12a7562db9924759faa3c51757"}, + {file = "safetensors-0.4.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:6ae429bfaecc10ab5fe78c93009b3d1656c1581da560041e700eadb497dbe7a4"}, + {file = "safetensors-0.4.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4ff88f194fe4ac50b463a4a6f0c03af9ad72eb5d24ec6d6730af59522e37fedb"}, + {file = "safetensors-0.4.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a80cb48d0a447f8dd18e61813efa7d3f8f8d52edf0f05806abc0c59b83431f57"}, + {file = "safetensors-0.4.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:b286fb7adfee70a4189898ac2342b8a67d5f493e6b21b0af89ca8eac1b967cbf"}, + {file = "safetensors-0.4.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:0ceeff9ddbab4f78738489eb6682867ae946178776f33699737b2129b5394dc1"}, + {file = "safetensors-0.4.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:a26fae748a7488cb3aac381eddfa818c42052c87b5e689fb4c6e82ed58cec209"}, + {file = "safetensors-0.4.2-cp39-none-win32.whl", hash = "sha256:039a42ab33c9d68b39706fd38f1922ace26866eff246bf20271edb619f5f848b"}, + {file = "safetensors-0.4.2-cp39-none-win_amd64.whl", hash = "sha256:b3a3e1f5b85859e398773f064943b62a4059f225008a2a8ee6add1edcf77cacf"}, + {file = "safetensors-0.4.2-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:4e70d442ad17e8b153ef9095bf48ea64f15a66bf26dc2b6ca94660c154edbc24"}, + {file = "safetensors-0.4.2-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:b90f1d9809caf4ff395951b4703295a68d12907f6945bbc3129e934ff8ae46f6"}, + {file = "safetensors-0.4.2-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8c7ac9ad3728838006598e296b3ae9f27d80b489effd4685b92d97b3fc4c98f6"}, + {file = "safetensors-0.4.2-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:de5730d77e6ff7f4c7039e20913661ad0ea2f86c09e71c039e73dfdd1f394f08"}, + {file = "safetensors-0.4.2-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:44feb8cb156d6803dcd19fc6b81b27235f29b877660605a6ac35e1da7d64f0e4"}, + {file = "safetensors-0.4.2-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:523a241c33e7c827ab9a3a23760d75c7d062f43dfe55b6b019409f89b0fb52d1"}, + {file = "safetensors-0.4.2-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:fb18300e8eb74291225214f26c9a8ae2110fd61a6c9b5a2ff4c4e0eb1bb9a998"}, + {file = "safetensors-0.4.2-pp37-pypy37_pp73-macosx_10_12_x86_64.whl", hash = "sha256:fe5437ff9fb116e44f2ab558981249ae63f978392b4576e62fcfe167d353edbc"}, + {file = "safetensors-0.4.2-pp37-pypy37_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d9304a0934ced5a5d272f39de36291dc141dfc152d277f03fb4d65f2fb2ffa7c"}, + {file = "safetensors-0.4.2-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:160ba1b1e11cf874602c233ab80a14f588571d09556cbc3586900121d622b5ed"}, + {file = "safetensors-0.4.2-pp37-pypy37_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:04fcd6fcf7d9c13c7e5dc7e08de5e492ee4daa8f4ad74b4d8299d3eb0224292f"}, + {file = "safetensors-0.4.2-pp37-pypy37_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:906d14c4a677d35834fb0f3a5455ef8305e1bba10a5e0f2e0f357b3d1ad989f2"}, + {file = "safetensors-0.4.2-pp37-pypy37_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:df3fcdec0cd543084610d1f09c65cdb10fb3079f79bceddc092b0d187c6a265b"}, + {file = "safetensors-0.4.2-pp38-pypy38_pp73-macosx_10_12_x86_64.whl", hash = "sha256:5ca76f13fb1cef242ea3ad2cb37388e7d005994f42af8b44bee56ba48b2d45ce"}, + {file = "safetensors-0.4.2-pp38-pypy38_pp73-macosx_11_0_arm64.whl", hash = "sha256:278a1a3414c020785decdcd741c578725721274d2f9f787fcc930882e83b89cc"}, + {file = "safetensors-0.4.2-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:05b5a461cc68ecd42d9d546e5e1268a39d8ede7934a68d1ce17c3c659cb829d6"}, + {file = "safetensors-0.4.2-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c2341411412a41671d25e26bed59ec121e46bf4fadb8132895e610411c4b9681"}, + {file = "safetensors-0.4.2-pp38-pypy38_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:3497ac3895acf17c5f98197f1fa4769f09c5e7ede07fcb102f1c201e663e052c"}, + {file = "safetensors-0.4.2-pp38-pypy38_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:01b5e71d3754d2201294f1eb7a6d59cce3a5702ff96d83d226571b2ca2183837"}, + {file = "safetensors-0.4.2-pp38-pypy38_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:3627dbd1ea488dd8046a0491de5087f3c0d641e7acc80c0189a33c69398f1cd1"}, + {file = "safetensors-0.4.2-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:9d56f0ef53afad26ec54ceede78a43e9a23a076dadbbda7b44d304c591abf4c1"}, + {file = "safetensors-0.4.2-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:b259ca73d42daf658a1bda463f1f83885ae4d93a60869be80d7f7dfcc9d8bbb5"}, + {file = "safetensors-0.4.2-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1ebc3cd401e4eb54e7c0a70346be565e81942d9a41fafd5f4bf7ab3a55d10378"}, + {file = "safetensors-0.4.2-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5bc384a0309b706aa0425c93abb0390508a61bf029ce99c7d9df4220f25871a5"}, + {file = "safetensors-0.4.2-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:af2d8f7235d8a08fbccfb8394387890e7fa38942b349a94e6eff13c52ac98087"}, + {file = "safetensors-0.4.2-pp39-pypy39_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:0911315bbcc5289087d063c2c2c7ccd711ea97a7e557a7bce005ac2cf80146aa"}, + {file = "safetensors-0.4.2-pp39-pypy39_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:1efe31673be91832d73439a2af426743e1395fc9ef7b081914e9e1d567bd7b5f"}, + {file = "safetensors-0.4.2.tar.gz", hash = "sha256:acc85dcb09ec5e8aa787f588d7ad4d55c103f31e4ff060e17d92cc0e8b8cac73"}, +] + +[package.extras] +all = ["safetensors[jax]", "safetensors[numpy]", "safetensors[paddlepaddle]", "safetensors[pinned-tf]", "safetensors[quality]", "safetensors[testing]", "safetensors[torch]"] +dev = ["safetensors[all]"] +jax = ["flax (>=0.6.3)", "jax (>=0.3.25)", "jaxlib (>=0.3.25)", "safetensors[numpy]"] +mlx = ["mlx (>=0.0.9)"] +numpy = ["numpy (>=1.21.6)"] +paddlepaddle = ["paddlepaddle (>=2.4.1)", "safetensors[numpy]"] +pinned-tf = ["safetensors[numpy]", "tensorflow (==2.11.0)"] +quality = ["black (==22.3)", "click (==8.0.4)", "flake8 (>=3.8.3)", "isort (>=5.5.4)"] +tensorflow = ["safetensors[numpy]", "tensorflow (>=2.11.0)"] +testing = ["h5py (>=3.7.0)", "huggingface_hub (>=0.12.1)", "hypothesis (>=6.70.2)", "pytest (>=7.2.0)", "pytest-benchmark (>=4.0.0)", "safetensors[numpy]", "setuptools_rust (>=1.5.2)"] +torch = ["safetensors[numpy]", "torch (>=1.10)"] + [[package]] name = "setuptools" version = "69.1.1" @@ -1970,6 +2186,133 @@ files = [ [package.extras] widechars = ["wcwidth"] +[[package]] +name = "tokenizers" +version = "0.15.2" +description = "" +optional = false +python-versions = ">=3.7" +files = [ + {file = "tokenizers-0.15.2-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:52f6130c9cbf70544287575a985bf44ae1bda2da7e8c24e97716080593638012"}, + {file = "tokenizers-0.15.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:054c1cc9c6d68f7ffa4e810b3d5131e0ba511b6e4be34157aa08ee54c2f8d9ee"}, + {file = "tokenizers-0.15.2-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:a9b9b070fdad06e347563b88c278995735292ded1132f8657084989a4c84a6d5"}, + {file = "tokenizers-0.15.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ea621a7eef4b70e1f7a4e84dd989ae3f0eeb50fc8690254eacc08acb623e82f1"}, + {file = "tokenizers-0.15.2-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:cf7fd9a5141634fa3aa8d6b7be362e6ae1b4cda60da81388fa533e0b552c98fd"}, + {file = "tokenizers-0.15.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:44f2a832cd0825295f7179eaf173381dc45230f9227ec4b44378322d900447c9"}, + {file = "tokenizers-0.15.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8b9ec69247a23747669ec4b0ca10f8e3dfb3545d550258129bd62291aabe8605"}, + {file = "tokenizers-0.15.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:40b6a4c78da863ff26dbd5ad9a8ecc33d8a8d97b535172601cf00aee9d7ce9ce"}, + {file = "tokenizers-0.15.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:5ab2a4d21dcf76af60e05af8063138849eb1d6553a0d059f6534357bce8ba364"}, + {file = "tokenizers-0.15.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:a47acfac7e511f6bbfcf2d3fb8c26979c780a91e06fb5b9a43831b2c0153d024"}, + {file = "tokenizers-0.15.2-cp310-none-win32.whl", hash = "sha256:064ff87bb6acdbd693666de9a4b692add41308a2c0ec0770d6385737117215f2"}, + {file = "tokenizers-0.15.2-cp310-none-win_amd64.whl", hash = "sha256:3b919afe4df7eb6ac7cafd2bd14fb507d3f408db7a68c43117f579c984a73843"}, + {file = "tokenizers-0.15.2-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:89cd1cb93e4b12ff39bb2d626ad77e35209de9309a71e4d3d4672667b4b256e7"}, + {file = "tokenizers-0.15.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:cfed5c64e5be23d7ee0f0e98081a25c2a46b0b77ce99a4f0605b1ec43dd481fa"}, + {file = "tokenizers-0.15.2-cp311-cp311-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:a907d76dcfda37023ba203ab4ceeb21bc5683436ebefbd895a0841fd52f6f6f2"}, + {file = "tokenizers-0.15.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:20ea60479de6fc7b8ae756b4b097572372d7e4032e2521c1bbf3d90c90a99ff0"}, + {file = "tokenizers-0.15.2-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:48e2b9335be2bc0171df9281385c2ed06a15f5cf121c44094338306ab7b33f2c"}, + {file = "tokenizers-0.15.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:112a1dd436d2cc06e6ffdc0b06d55ac019a35a63afd26475205cb4b1bf0bfbff"}, + {file = "tokenizers-0.15.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4620cca5c2817177ee8706f860364cc3a8845bc1e291aaf661fb899e5d1c45b0"}, + {file = "tokenizers-0.15.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ccd73a82751c523b3fc31ff8194702e4af4db21dc20e55b30ecc2079c5d43cb7"}, + {file = "tokenizers-0.15.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:107089f135b4ae7817affe6264f8c7a5c5b4fd9a90f9439ed495f54fcea56fb4"}, + {file = "tokenizers-0.15.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:0ff110ecc57b7aa4a594396525a3451ad70988e517237fe91c540997c4e50e29"}, + {file = "tokenizers-0.15.2-cp311-none-win32.whl", hash = "sha256:6d76f00f5c32da36c61f41c58346a4fa7f0a61be02f4301fd30ad59834977cc3"}, + {file = "tokenizers-0.15.2-cp311-none-win_amd64.whl", hash = "sha256:cc90102ed17271cf0a1262babe5939e0134b3890345d11a19c3145184b706055"}, + {file = "tokenizers-0.15.2-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:f86593c18d2e6248e72fb91c77d413a815153b8ea4e31f7cd443bdf28e467670"}, + {file = "tokenizers-0.15.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:0774bccc6608eca23eb9d620196687c8b2360624619623cf4ba9dc9bd53e8b51"}, + {file = "tokenizers-0.15.2-cp312-cp312-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:d0222c5b7c9b26c0b4822a82f6a7011de0a9d3060e1da176f66274b70f846b98"}, + {file = "tokenizers-0.15.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3835738be1de66624fff2f4f6f6684775da4e9c00bde053be7564cbf3545cc66"}, + {file = "tokenizers-0.15.2-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:0143e7d9dcd811855c1ce1ab9bf5d96d29bf5e528fd6c7824d0465741e8c10fd"}, + {file = "tokenizers-0.15.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:db35825f6d54215f6b6009a7ff3eedee0848c99a6271c870d2826fbbedf31a38"}, + {file = "tokenizers-0.15.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3f5e64b0389a2be47091d8cc53c87859783b837ea1a06edd9d8e04004df55a5c"}, + {file = "tokenizers-0.15.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9e0480c452217edd35eca56fafe2029fb4d368b7c0475f8dfa3c5c9c400a7456"}, + {file = "tokenizers-0.15.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:a33ab881c8fe70474980577e033d0bc9a27b7ab8272896e500708b212995d834"}, + {file = "tokenizers-0.15.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:a308a607ca9de2c64c1b9ba79ec9a403969715a1b8ba5f998a676826f1a7039d"}, + {file = "tokenizers-0.15.2-cp312-none-win32.whl", hash = "sha256:b8fcfa81bcb9447df582c5bc96a031e6df4da2a774b8080d4f02c0c16b42be0b"}, + {file = "tokenizers-0.15.2-cp312-none-win_amd64.whl", hash = "sha256:38d7ab43c6825abfc0b661d95f39c7f8af2449364f01d331f3b51c94dcff7221"}, + {file = "tokenizers-0.15.2-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:38bfb0204ff3246ca4d5e726e8cc8403bfc931090151e6eede54d0e0cf162ef0"}, + {file = "tokenizers-0.15.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:9c861d35e8286a53e06e9e28d030b5a05bcbf5ac9d7229e561e53c352a85b1fc"}, + {file = "tokenizers-0.15.2-cp313-cp313-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:936bf3842db5b2048eaa53dade907b1160f318e7c90c74bfab86f1e47720bdd6"}, + {file = "tokenizers-0.15.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:620beacc3373277700d0e27718aa8b25f7b383eb8001fba94ee00aeea1459d89"}, + {file = "tokenizers-0.15.2-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:2735ecbbf37e52db4ea970e539fd2d450d213517b77745114f92867f3fc246eb"}, + {file = "tokenizers-0.15.2-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:473c83c5e2359bb81b0b6fde870b41b2764fcdd36d997485e07e72cc3a62264a"}, + {file = "tokenizers-0.15.2-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:968fa1fb3c27398b28a4eca1cbd1e19355c4d3a6007f7398d48826bbe3a0f728"}, + {file = "tokenizers-0.15.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:865c60ae6eaebdde7da66191ee9b7db52e542ed8ee9d2c653b6d190a9351b980"}, + {file = "tokenizers-0.15.2-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:7c0d8b52664ab2d4a8d6686eb5effc68b78608a9008f086a122a7b2996befbab"}, + {file = "tokenizers-0.15.2-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:f33dfbdec3784093a9aebb3680d1f91336c56d86cc70ddf88708251da1fe9064"}, + {file = "tokenizers-0.15.2-cp37-cp37m-macosx_10_12_x86_64.whl", hash = "sha256:d44ba80988ff9424e33e0a49445072ac7029d8c0e1601ad25a0ca5f41ed0c1d6"}, + {file = "tokenizers-0.15.2-cp37-cp37m-macosx_11_0_arm64.whl", hash = "sha256:dce74266919b892f82b1b86025a613956ea0ea62a4843d4c4237be2c5498ed3a"}, + {file = "tokenizers-0.15.2-cp37-cp37m-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:0ef06b9707baeb98b316577acb04f4852239d856b93e9ec3a299622f6084e4be"}, + {file = "tokenizers-0.15.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c73e2e74bbb07910da0d37c326869f34113137b23eadad3fc00856e6b3d9930c"}, + {file = "tokenizers-0.15.2-cp37-cp37m-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:4eeb12daf02a59e29f578a865f55d87cd103ce62bd8a3a5874f8fdeaa82e336b"}, + {file = "tokenizers-0.15.2-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9ba9f6895af58487ca4f54e8a664a322f16c26bbb442effd01087eba391a719e"}, + {file = "tokenizers-0.15.2-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:ccec77aa7150e38eec6878a493bf8c263ff1fa8a62404e16c6203c64c1f16a26"}, + {file = "tokenizers-0.15.2-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f3f40604f5042ff210ba82743dda2b6aa3e55aa12df4e9f2378ee01a17e2855e"}, + {file = "tokenizers-0.15.2-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:5645938a42d78c4885086767c70923abad047163d809c16da75d6b290cb30bbe"}, + {file = "tokenizers-0.15.2-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:05a77cbfebe28a61ab5c3891f9939cc24798b63fa236d84e5f29f3a85a200c00"}, + {file = "tokenizers-0.15.2-cp37-none-win32.whl", hash = "sha256:361abdc068e8afe9c5b818769a48624687fb6aaed49636ee39bec4e95e1a215b"}, + {file = "tokenizers-0.15.2-cp37-none-win_amd64.whl", hash = "sha256:7ef789f83eb0f9baeb4d09a86cd639c0a5518528f9992f38b28e819df397eb06"}, + {file = "tokenizers-0.15.2-cp38-cp38-macosx_10_12_x86_64.whl", hash = "sha256:4fe1f74a902bee74a3b25aff180fbfbf4f8b444ab37c4d496af7afd13a784ed2"}, + {file = "tokenizers-0.15.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:4c4b89038a684f40a6b15d6b09f49650ac64d951ad0f2a3ea9169687bbf2a8ba"}, + {file = "tokenizers-0.15.2-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:d05a1b06f986d41aed5f2de464c003004b2df8aaf66f2b7628254bcbfb72a438"}, + {file = "tokenizers-0.15.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:508711a108684111ec8af89d3a9e9e08755247eda27d0ba5e3c50e9da1600f6d"}, + {file = "tokenizers-0.15.2-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:daa348f02d15160cb35439098ac96e3a53bacf35885072611cd9e5be7d333daa"}, + {file = "tokenizers-0.15.2-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:494fdbe5932d3416de2a85fc2470b797e6f3226c12845cadf054dd906afd0442"}, + {file = "tokenizers-0.15.2-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c2d60f5246f4da9373f75ff18d64c69cbf60c3bca597290cea01059c336d2470"}, + {file = "tokenizers-0.15.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:93268e788825f52de4c7bdcb6ebc1fcd4a5442c02e730faa9b6b08f23ead0e24"}, + {file = "tokenizers-0.15.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:6fc7083ab404019fc9acafe78662c192673c1e696bd598d16dc005bd663a5cf9"}, + {file = "tokenizers-0.15.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:41e39b41e5531d6b2122a77532dbea60e171ef87a3820b5a3888daa847df4153"}, + {file = "tokenizers-0.15.2-cp38-none-win32.whl", hash = "sha256:06cd0487b1cbfabefb2cc52fbd6b1f8d4c37799bd6c6e1641281adaa6b2504a7"}, + {file = "tokenizers-0.15.2-cp38-none-win_amd64.whl", hash = "sha256:5179c271aa5de9c71712e31cb5a79e436ecd0d7532a408fa42a8dbfa4bc23fd9"}, + {file = "tokenizers-0.15.2-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:82f8652a74cc107052328b87ea8b34291c0f55b96d8fb261b3880216a9f9e48e"}, + {file = "tokenizers-0.15.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:02458bee6f5f3139f1ebbb6d042b283af712c0981f5bc50edf771d6b762d5e4f"}, + {file = "tokenizers-0.15.2-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:c9a09cd26cca2e1c349f91aa665309ddb48d71636370749414fbf67bc83c5343"}, + {file = "tokenizers-0.15.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:158be8ea8554e5ed69acc1ce3fbb23a06060bd4bbb09029431ad6b9a466a7121"}, + {file = "tokenizers-0.15.2-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:1ddba9a2b0c8c81633eca0bb2e1aa5b3a15362b1277f1ae64176d0f6eba78ab1"}, + {file = "tokenizers-0.15.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3ef5dd1d39797044642dbe53eb2bc56435308432e9c7907728da74c69ee2adca"}, + {file = "tokenizers-0.15.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:454c203164e07a860dbeb3b1f4a733be52b0edbb4dd2e5bd75023ffa8b49403a"}, + {file = "tokenizers-0.15.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0cf6b7f1d4dc59af960e6ffdc4faffe6460bbfa8dce27a58bf75755ffdb2526d"}, + {file = "tokenizers-0.15.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:2ef09bbc16519f6c25d0c7fc0c6a33a6f62923e263c9d7cca4e58b8c61572afb"}, + {file = "tokenizers-0.15.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:c9a2ebdd2ad4ec7a68e7615086e633857c85e2f18025bd05d2a4399e6c5f7169"}, + {file = "tokenizers-0.15.2-cp39-none-win32.whl", hash = "sha256:918fbb0eab96fe08e72a8c2b5461e9cce95585d82a58688e7f01c2bd546c79d0"}, + {file = "tokenizers-0.15.2-cp39-none-win_amd64.whl", hash = "sha256:524e60da0135e106b254bd71f0659be9f89d83f006ea9093ce4d1fab498c6d0d"}, + {file = "tokenizers-0.15.2-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:6a9b648a58281c4672212fab04e60648fde574877d0139cd4b4f93fe28ca8944"}, + {file = "tokenizers-0.15.2-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:7c7d18b733be6bbca8a55084027f7be428c947ddf871c500ee603e375013ffba"}, + {file = "tokenizers-0.15.2-pp310-pypy310_pp73-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:13ca3611de8d9ddfbc4dc39ef54ab1d2d4aaa114ac8727dfdc6a6ec4be017378"}, + {file = "tokenizers-0.15.2-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:237d1bf3361cf2e6463e6c140628e6406766e8b27274f5fcc62c747ae3c6f094"}, + {file = "tokenizers-0.15.2-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:67a0fe1e49e60c664915e9fb6b0cb19bac082ab1f309188230e4b2920230edb3"}, + {file = "tokenizers-0.15.2-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:4e022fe65e99230b8fd89ebdfea138c24421f91c1a4f4781a8f5016fd5cdfb4d"}, + {file = "tokenizers-0.15.2-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:d857be2df69763362ac699f8b251a8cd3fac9d21893de129bc788f8baaef2693"}, + {file = "tokenizers-0.15.2-pp37-pypy37_pp73-macosx_10_12_x86_64.whl", hash = "sha256:708bb3e4283177236309e698da5fcd0879ce8fd37457d7c266d16b550bcbbd18"}, + {file = "tokenizers-0.15.2-pp37-pypy37_pp73-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:64c35e09e9899b72a76e762f9854e8750213f67567787d45f37ce06daf57ca78"}, + {file = "tokenizers-0.15.2-pp37-pypy37_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c1257f4394be0d3b00de8c9e840ca5601d0a4a8438361ce9c2b05c7d25f6057b"}, + {file = "tokenizers-0.15.2-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:02272fe48280e0293a04245ca5d919b2c94a48b408b55e858feae9618138aeda"}, + {file = "tokenizers-0.15.2-pp37-pypy37_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:dc3ad9ebc76eabe8b1d7c04d38be884b8f9d60c0cdc09b0aa4e3bcf746de0388"}, + {file = "tokenizers-0.15.2-pp37-pypy37_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:32e16bdeffa7c4f46bf2152172ca511808b952701d13e7c18833c0b73cb5c23f"}, + {file = "tokenizers-0.15.2-pp38-pypy38_pp73-macosx_10_12_x86_64.whl", hash = "sha256:fb16ba563d59003028b678d2361a27f7e4ae0ab29c7a80690efa20d829c81fdb"}, + {file = "tokenizers-0.15.2-pp38-pypy38_pp73-macosx_11_0_arm64.whl", hash = "sha256:2277c36d2d6cdb7876c274547921a42425b6810d38354327dd65a8009acf870c"}, + {file = "tokenizers-0.15.2-pp38-pypy38_pp73-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:1cf75d32e8d250781940d07f7eece253f2fe9ecdb1dc7ba6e3833fa17b82fcbc"}, + {file = "tokenizers-0.15.2-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f1b3b31884dc8e9b21508bb76da80ebf7308fdb947a17affce815665d5c4d028"}, + {file = "tokenizers-0.15.2-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b10122d8d8e30afb43bb1fe21a3619f62c3e2574bff2699cf8af8b0b6c5dc4a3"}, + {file = "tokenizers-0.15.2-pp38-pypy38_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:d88b96ff0fe8e91f6ef01ba50b0d71db5017fa4e3b1d99681cec89a85faf7bf7"}, + {file = "tokenizers-0.15.2-pp38-pypy38_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:37aaec5a52e959892870a7c47cef80c53797c0db9149d458460f4f31e2fb250e"}, + {file = "tokenizers-0.15.2-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:e2ea752f2b0fe96eb6e2f3adbbf4d72aaa1272079b0dfa1145507bd6a5d537e6"}, + {file = "tokenizers-0.15.2-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:4b19a808d8799fda23504a5cd31d2f58e6f52f140380082b352f877017d6342b"}, + {file = "tokenizers-0.15.2-pp39-pypy39_pp73-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:64c86e5e068ac8b19204419ed8ca90f9d25db20578f5881e337d203b314f4104"}, + {file = "tokenizers-0.15.2-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:de19c4dc503c612847edf833c82e9f73cd79926a384af9d801dcf93f110cea4e"}, + {file = "tokenizers-0.15.2-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ea09acd2fe3324174063d61ad620dec3bcf042b495515f27f638270a7d466e8b"}, + {file = "tokenizers-0.15.2-pp39-pypy39_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:cf27fd43472e07b57cf420eee1e814549203d56de00b5af8659cb99885472f1f"}, + {file = "tokenizers-0.15.2-pp39-pypy39_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:7ca22bd897537a0080521445d91a58886c8c04084a6a19e6c78c586e0cfa92a5"}, + {file = "tokenizers-0.15.2.tar.gz", hash = "sha256:e6e9c6e019dd5484be5beafc775ae6c925f4c69a3487040ed09b45e13df2cb91"}, +] + +[package.dependencies] +huggingface_hub = ">=0.16.4,<1.0" + +[package.extras] +dev = ["tokenizers[testing]"] +docs = ["setuptools_rust", "sphinx", "sphinx_rtd_theme"] +testing = ["black (==22.3)", "datasets", "numpy", "pytest", "requests"] + [[package]] name = "toml" version = "0.10.2" @@ -2001,6 +2344,74 @@ notebook = ["ipywidgets (>=6)"] slack = ["slack-sdk"] telegram = ["requests"] +[[package]] +name = "transformers" +version = "4.38.2" +description = "State-of-the-art Machine Learning for JAX, PyTorch and TensorFlow" +optional = false +python-versions = ">=3.8.0" +files = [ + {file = "transformers-4.38.2-py3-none-any.whl", hash = "sha256:c4029cb9f01b3dd335e52f364c52d2b37c65b4c78e02e6a08b1919c5c928573e"}, + {file = "transformers-4.38.2.tar.gz", hash = "sha256:c5fc7ad682b8a50a48b2a4c05d4ea2de5567adb1bdd00053619dbe5960857dd5"}, +] + +[package.dependencies] +filelock = "*" +huggingface-hub = ">=0.19.3,<1.0" +numpy = ">=1.17" +packaging = ">=20.0" +pyyaml = ">=5.1" +regex = "!=2019.12.17" +requests = "*" +safetensors = ">=0.4.1" +tokenizers = ">=0.14,<0.19" +tqdm = ">=4.27" + +[package.extras] +accelerate = ["accelerate (>=0.21.0)"] +agents = ["Pillow (>=10.0.1,<=15.0)", "accelerate (>=0.21.0)", "datasets (!=2.5.0)", "diffusers", "opencv-python", "sentencepiece (>=0.1.91,!=0.1.92)", "torch"] +all = ["Pillow (>=10.0.1,<=15.0)", "accelerate (>=0.21.0)", "av (==9.2.0)", "codecarbon (==1.2.0)", "decord (==0.6.0)", "flax (>=0.4.1,<=0.7.0)", "jax (>=0.4.1,<=0.4.13)", "jaxlib (>=0.4.1,<=0.4.13)", "kenlm", "keras-nlp (>=0.3.1)", "librosa", "onnxconverter-common", "optax (>=0.0.8,<=0.1.4)", "optuna", "phonemizer", "protobuf", "pyctcdecode (>=0.4.0)", "ray[tune] (>=2.7.0)", "sentencepiece (>=0.1.91,!=0.1.92)", "sigopt", "tensorflow (>=2.6,<2.16)", "tensorflow-text (<2.16)", "tf2onnx", "timm", "tokenizers (>=0.14,<0.19)", "torch", "torchaudio", "torchvision"] +audio = ["kenlm", "librosa", "phonemizer", "pyctcdecode (>=0.4.0)"] +codecarbon = ["codecarbon (==1.2.0)"] +deepspeed = ["accelerate (>=0.21.0)", "deepspeed (>=0.9.3)"] +deepspeed-testing = ["GitPython (<3.1.19)", "accelerate (>=0.21.0)", "beautifulsoup4", "cookiecutter (==1.7.3)", "datasets (!=2.5.0)", "deepspeed (>=0.9.3)", "dill (<0.3.5)", "evaluate (>=0.2.0)", "faiss-cpu", "hf-doc-builder (>=0.3.0)", "nltk", "optuna", "parameterized", "protobuf", "psutil", "pydantic", "pytest (>=7.2.0,<8.0.0)", "pytest-timeout", "pytest-xdist", "rjieba", "rouge-score (!=0.0.7,!=0.0.8,!=0.1,!=0.1.1)", "ruff (==0.1.5)", "sacrebleu (>=1.4.12,<2.0.0)", "sacremoses", "sentencepiece (>=0.1.91,!=0.1.92)", "tensorboard", "timeout-decorator"] +dev = ["GitPython (<3.1.19)", "Pillow (>=10.0.1,<=15.0)", "accelerate (>=0.21.0)", "av (==9.2.0)", "beautifulsoup4", "codecarbon (==1.2.0)", "cookiecutter (==1.7.3)", "datasets (!=2.5.0)", "decord (==0.6.0)", "dill (<0.3.5)", "evaluate (>=0.2.0)", "faiss-cpu", "flax (>=0.4.1,<=0.7.0)", "fugashi (>=1.0)", "hf-doc-builder", "hf-doc-builder (>=0.3.0)", "ipadic (>=1.0.0,<2.0)", "isort (>=5.5.4)", "jax (>=0.4.1,<=0.4.13)", "jaxlib (>=0.4.1,<=0.4.13)", "kenlm", "keras-nlp (>=0.3.1)", "librosa", "nltk", "onnxconverter-common", "optax (>=0.0.8,<=0.1.4)", "optuna", "parameterized", "phonemizer", "protobuf", "psutil", "pyctcdecode (>=0.4.0)", "pydantic", "pytest (>=7.2.0,<8.0.0)", "pytest-timeout", "pytest-xdist", "ray[tune] (>=2.7.0)", "rhoknp (>=1.1.0,<1.3.1)", "rjieba", "rouge-score (!=0.0.7,!=0.0.8,!=0.1,!=0.1.1)", "ruff (==0.1.5)", "sacrebleu (>=1.4.12,<2.0.0)", "sacremoses", "scikit-learn", "sentencepiece (>=0.1.91,!=0.1.92)", "sigopt", "sudachidict-core (>=20220729)", "sudachipy (>=0.6.6)", "tensorboard", "tensorflow (>=2.6,<2.16)", "tensorflow-text (<2.16)", "tf2onnx", "timeout-decorator", "timm", "tokenizers (>=0.14,<0.19)", "torch", "torchaudio", "torchvision", "unidic (>=1.0.2)", "unidic-lite (>=1.0.7)", "urllib3 (<2.0.0)"] +dev-tensorflow = ["GitPython (<3.1.19)", "Pillow (>=10.0.1,<=15.0)", "beautifulsoup4", "cookiecutter (==1.7.3)", "datasets (!=2.5.0)", "dill (<0.3.5)", "evaluate (>=0.2.0)", "faiss-cpu", "hf-doc-builder", "hf-doc-builder (>=0.3.0)", "isort (>=5.5.4)", "kenlm", "keras-nlp (>=0.3.1)", "librosa", "nltk", "onnxconverter-common", "onnxruntime (>=1.4.0)", "onnxruntime-tools (>=1.4.2)", "parameterized", "phonemizer", "protobuf", "psutil", "pyctcdecode (>=0.4.0)", "pydantic", "pytest (>=7.2.0,<8.0.0)", "pytest-timeout", "pytest-xdist", "rjieba", "rouge-score (!=0.0.7,!=0.0.8,!=0.1,!=0.1.1)", "ruff (==0.1.5)", "sacrebleu (>=1.4.12,<2.0.0)", "sacremoses", "scikit-learn", "sentencepiece (>=0.1.91,!=0.1.92)", "tensorboard", "tensorflow (>=2.6,<2.16)", "tensorflow-text (<2.16)", "tf2onnx", "timeout-decorator", "tokenizers (>=0.14,<0.19)", "urllib3 (<2.0.0)"] +dev-torch = ["GitPython (<3.1.19)", "Pillow (>=10.0.1,<=15.0)", "accelerate (>=0.21.0)", "beautifulsoup4", "codecarbon (==1.2.0)", "cookiecutter (==1.7.3)", "datasets (!=2.5.0)", "dill (<0.3.5)", "evaluate (>=0.2.0)", "faiss-cpu", "fugashi (>=1.0)", "hf-doc-builder", "hf-doc-builder (>=0.3.0)", "ipadic (>=1.0.0,<2.0)", "isort (>=5.5.4)", "kenlm", "librosa", "nltk", "onnxruntime (>=1.4.0)", "onnxruntime-tools (>=1.4.2)", "optuna", "parameterized", "phonemizer", "protobuf", "psutil", "pyctcdecode (>=0.4.0)", "pydantic", "pytest (>=7.2.0,<8.0.0)", "pytest-timeout", "pytest-xdist", "ray[tune] (>=2.7.0)", "rhoknp (>=1.1.0,<1.3.1)", "rjieba", "rouge-score (!=0.0.7,!=0.0.8,!=0.1,!=0.1.1)", "ruff (==0.1.5)", "sacrebleu (>=1.4.12,<2.0.0)", "sacremoses", "scikit-learn", "sentencepiece (>=0.1.91,!=0.1.92)", "sigopt", "sudachidict-core (>=20220729)", "sudachipy (>=0.6.6)", "tensorboard", "timeout-decorator", "timm", "tokenizers (>=0.14,<0.19)", "torch", "torchaudio", "torchvision", "unidic (>=1.0.2)", "unidic-lite (>=1.0.7)", "urllib3 (<2.0.0)"] +docs = ["Pillow (>=10.0.1,<=15.0)", "accelerate (>=0.21.0)", "av (==9.2.0)", "codecarbon (==1.2.0)", "decord (==0.6.0)", "flax (>=0.4.1,<=0.7.0)", "hf-doc-builder", "jax (>=0.4.1,<=0.4.13)", "jaxlib (>=0.4.1,<=0.4.13)", "kenlm", "keras-nlp (>=0.3.1)", "librosa", "onnxconverter-common", "optax (>=0.0.8,<=0.1.4)", "optuna", "phonemizer", "protobuf", "pyctcdecode (>=0.4.0)", "ray[tune] (>=2.7.0)", "sentencepiece (>=0.1.91,!=0.1.92)", "sigopt", "tensorflow (>=2.6,<2.16)", "tensorflow-text (<2.16)", "tf2onnx", "timm", "tokenizers (>=0.14,<0.19)", "torch", "torchaudio", "torchvision"] +docs-specific = ["hf-doc-builder"] +flax = ["flax (>=0.4.1,<=0.7.0)", "jax (>=0.4.1,<=0.4.13)", "jaxlib (>=0.4.1,<=0.4.13)", "optax (>=0.0.8,<=0.1.4)"] +flax-speech = ["kenlm", "librosa", "phonemizer", "pyctcdecode (>=0.4.0)"] +ftfy = ["ftfy"] +integrations = ["optuna", "ray[tune] (>=2.7.0)", "sigopt"] +ja = ["fugashi (>=1.0)", "ipadic (>=1.0.0,<2.0)", "rhoknp (>=1.1.0,<1.3.1)", "sudachidict-core (>=20220729)", "sudachipy (>=0.6.6)", "unidic (>=1.0.2)", "unidic-lite (>=1.0.7)"] +modelcreation = ["cookiecutter (==1.7.3)"] +natten = ["natten (>=0.14.6,<0.15.0)"] +onnx = ["onnxconverter-common", "onnxruntime (>=1.4.0)", "onnxruntime-tools (>=1.4.2)", "tf2onnx"] +onnxruntime = ["onnxruntime (>=1.4.0)", "onnxruntime-tools (>=1.4.2)"] +optuna = ["optuna"] +quality = ["GitPython (<3.1.19)", "datasets (!=2.5.0)", "hf-doc-builder (>=0.3.0)", "isort (>=5.5.4)", "ruff (==0.1.5)", "urllib3 (<2.0.0)"] +ray = ["ray[tune] (>=2.7.0)"] +retrieval = ["datasets (!=2.5.0)", "faiss-cpu"] +sagemaker = ["sagemaker (>=2.31.0)"] +sentencepiece = ["protobuf", "sentencepiece (>=0.1.91,!=0.1.92)"] +serving = ["fastapi", "pydantic", "starlette", "uvicorn"] +sigopt = ["sigopt"] +sklearn = ["scikit-learn"] +speech = ["kenlm", "librosa", "phonemizer", "pyctcdecode (>=0.4.0)", "torchaudio"] +testing = ["GitPython (<3.1.19)", "beautifulsoup4", "cookiecutter (==1.7.3)", "datasets (!=2.5.0)", "dill (<0.3.5)", "evaluate (>=0.2.0)", "faiss-cpu", "hf-doc-builder (>=0.3.0)", "nltk", "parameterized", "protobuf", "psutil", "pydantic", "pytest (>=7.2.0,<8.0.0)", "pytest-timeout", "pytest-xdist", "rjieba", "rouge-score (!=0.0.7,!=0.0.8,!=0.1,!=0.1.1)", "ruff (==0.1.5)", "sacrebleu (>=1.4.12,<2.0.0)", "sacremoses", "tensorboard", "timeout-decorator"] +tf = ["keras-nlp (>=0.3.1)", "onnxconverter-common", "tensorflow (>=2.6,<2.16)", "tensorflow-text (<2.16)", "tf2onnx"] +tf-cpu = ["keras-nlp (>=0.3.1)", "onnxconverter-common", "tensorflow-cpu (>=2.6,<2.16)", "tensorflow-text (<2.16)", "tf2onnx"] +tf-speech = ["kenlm", "librosa", "phonemizer", "pyctcdecode (>=0.4.0)"] +timm = ["timm"] +tokenizers = ["tokenizers (>=0.14,<0.19)"] +torch = ["accelerate (>=0.21.0)", "torch"] +torch-speech = ["kenlm", "librosa", "phonemizer", "pyctcdecode (>=0.4.0)", "torchaudio"] +torch-vision = ["Pillow (>=10.0.1,<=15.0)", "torchvision"] +torchhub = ["filelock", "huggingface-hub (>=0.19.3,<1.0)", "importlib-metadata", "numpy (>=1.17)", "packaging (>=20.0)", "protobuf", "regex (!=2019.12.17)", "requests", "sentencepiece (>=0.1.91,!=0.1.92)", "tokenizers (>=0.14,<0.19)", "torch", "tqdm (>=4.27)"] +video = ["av (==9.2.0)", "decord (==0.6.0)"] +vision = ["Pillow (>=10.0.1,<=15.0)"] + [[package]] name = "typing-extensions" version = "4.10.0" @@ -2275,4 +2686,4 @@ testing = ["big-O", "jaraco.functools", "jaraco.itertools", "more-itertools", "p [metadata] lock-version = "2.0" python-versions = "^3.9,<3.12" -content-hash = "eee8676fc20f015e8f2496c3cb4c46ef8e8d81d828f49466448868efadb0b53a" +content-hash = "47d4d7c22bd95e2ee9c3543f485f76e05899c49e02fdbe1abcfff2cb8b5c93d1" diff --git a/airbyte-integrations/connectors/source-microsoft-onedrive/pyproject.toml b/airbyte-integrations/connectors/source-microsoft-onedrive/pyproject.toml index 234d789c1ec4..505f87187623 100644 --- a/airbyte-integrations/connectors/source-microsoft-onedrive/pyproject.toml +++ b/airbyte-integrations/connectors/source-microsoft-onedrive/pyproject.toml @@ -3,7 +3,7 @@ requires = [ "poetry-core>=1.0.0",] build-backend = "poetry.core.masonry.api" [tool.poetry] -version = "0.1.8" +version = "0.1.9" name = "source-microsoft-onedrive" description = "Source implementation for Microsoft OneDrive." authors = [ "Airbyte ",] @@ -20,6 +20,8 @@ python = "^3.9,<3.12" Office365-REST-Python-Client = "==2.5.5" smart-open = "==6.4.0" msal = "==1.25.0" +# override transitive dependency that had a vulnerability https://nvd.nist.gov/vuln/detail/CVE-2023-6730 +transformers = "4.38.2" [tool.poetry.scripts] source-microsoft-onedrive = "source_microsoft_onedrive.run:run" diff --git a/docs/integrations/sources/microsoft-onedrive.md b/docs/integrations/sources/microsoft-onedrive.md index 4c7ce624213e..7e74518c79e4 100644 --- a/docs/integrations/sources/microsoft-onedrive.md +++ b/docs/integrations/sources/microsoft-onedrive.md @@ -121,6 +121,7 @@ The connector is restricted by normal Microsoft Graph [requests limitation](http | Version | Date | Pull Request | Subject | |:--------|:-----------|:---------------------------------------------------------|:------------------------------------------------------------------------| +| 0.1.9 | 2024-03-11 | [35956](https://github.com/airbytehq/airbyte/pull/35956) | Pin `transformers` transitive dependency | | 0.1.8 | 2024-03-06 | [35858](https://github.com/airbytehq/airbyte/pull/35858) | Bump poetry.lock to upgrade transitive dependency | | 0.1.7 | 2024-03-0q | [34936](https://github.com/airbytehq/airbyte/pull/34936) | Enable in Cloud | | 0.1.6 | 2024-02-06 | [34936](https://github.com/airbytehq/airbyte/pull/34936) | Bump CDK version to avoid missing SyncMode errors | From c7e0d74f91fd3dea2e029cafa0965851ee1278d0 Mon Sep 17 00:00:00 2001 From: Stephane Geneix <147216312+stephane-airbyte@users.noreply.github.com> Date: Mon, 11 Mar 2024 21:56:37 -0700 Subject: [PATCH 165/172] fix junit versionning mess (#35977) --- build.gradle | 13 +++++++------ deps.toml | 9 ++------- 2 files changed, 9 insertions(+), 13 deletions(-) diff --git a/build.gradle b/build.gradle index 02755ee877b0..17020d7ddb14 100644 --- a/build.gradle +++ b/build.gradle @@ -145,23 +145,24 @@ allprojects { testFixturesAnnotationProcessor lombok // JUnit dependencies. - def vAssertJ = "3.21.0" - def vJUnit = "5.9.1" - def vJUnitJupiter = "5.10.0" - def vJUnitPioneer = "1.7.1" + def vAssertJ = "3.25.3" + def vJUnit = "5.10.2" + def vJUnitJupiter = "5.11.0" + + testFixturesImplementation platform("org.junit:junit-bom:${vJUnit}") testFixturesImplementation "org.junit.jupiter:junit-jupiter-api:${vJUnit}" testFixturesImplementation "org.junit.jupiter:junit-jupiter-params:${vJUnit}" testFixturesImplementation "org.mockito:mockito-junit-jupiter:${vJUnitJupiter}" testFixturesImplementation "org.assertj:assertj-core:${vAssertJ}" - testFixturesImplementation "org.junit-pioneer:junit-pioneer:${vJUnitPioneer}" + testImplementation platform("org.junit:junit-bom:${vJUnit}") testImplementation "org.junit.jupiter:junit-jupiter-api:${vJUnit}" testImplementation "org.junit.jupiter:junit-jupiter-params:${vJUnit}" testImplementation "org.mockito:mockito-junit-jupiter:${vJUnitJupiter}" testImplementation "org.assertj:assertj-core:${vAssertJ}" - testImplementation "org.junit-pioneer:junit-pioneer:${vJUnitPioneer}" + testRuntimeOnly platform("org.junit:junit-bom:${vJUnit}") testRuntimeOnly "org.junit.jupiter:junit-jupiter-engine:${vJUnit}" // Spotbugs dependencies. diff --git a/deps.toml b/deps.toml index 1caea5603cc8..c815cfb50258 100644 --- a/deps.toml +++ b/deps.toml @@ -9,7 +9,7 @@ glassfish_version = "2.31" hikaricp = "5.0.1" jmh = "1.36" jooq = "3.13.4" -junit-jupiter = "5.9.1" +junit-bom = "5.10.1" kotlin = "1.9.0" log4j = "2.21.1" lombok = "1.18.30" @@ -19,6 +19,7 @@ segment = "2.1.1" slf4j = "2.0.9" temporal = "1.17.0" debezium = "2.4.0.Final" +mockito-version = "5.11.0" [libraries] airbyte-protocol = { module = "io.airbyte.airbyte-protocol:protocol-models", version.ref = "airbyte-protocol" } @@ -70,11 +71,7 @@ jooq = { module = "org.jooq:jooq", version.ref = "jooq" } jooq-codegen = { module = "org.jooq:jooq-codegen", version.ref = "jooq" } jooq-meta = { module = "org.jooq:jooq-meta", version.ref = "jooq" } jul-to-slf4j = { module = "org.slf4j:jul-to-slf4j", version.ref = "slf4j" } -junit-jupiter-api = { module = "org.junit.jupiter:junit-jupiter-api", version.ref = "junit-jupiter" } -junit-jupiter-engine = { module = "org.junit.jupiter:junit-jupiter-engine", version.ref = "junit-jupiter" } -junit-jupiter-params = { module = "org.junit.jupiter:junit-jupiter-params", version.ref = "junit-jupiter" } junit-jupiter-system-stubs = { module = "uk.org.webcompere:system-stubs-jupiter", version = "2.0.1" } -junit-pioneer = { module = "org.junit-pioneer:junit-pioneer", version = "1.7.1" } kotlin-logging = { module = "io.github.oshai:kotlin-logging-jvm", version = "5.1.0" } kotlinx-cli = { module = "org.jetbrains.kotlinx:kotlinx-cli", version = "0.3.5" } kotlinx-cli-jvm = { module = "org.jetbrains.kotlinx:kotlinx-cli-jvm", version = "0.3.5" } @@ -87,7 +84,6 @@ log4j-over-slf4j = { module = "org.slf4j:log4j-over-slf4j", version.ref = "slf4j log4j-web = { module = "org.apache.logging.log4j:log4j-web", version.ref = "log4j" } lombok = { module = "org.projectlombok:lombok", version.ref = "lombok" } micrometer-statsd = { module = "io.micrometer:micrometer-registry-statsd", version = "1.9.3" } -mockito-junit-jupiter = { module = "org.mockito:mockito-junit-jupiter", version = "5.10.0" } mockk = { module = "io.mockk:mockk", version = "1.13.3" } mongo-driver-sync = { module = "org.mongodb:mongodb-driver-sync", version = "4.10.2" } otel-bom = { module = "io.opentelemetry:opentelemetry-bom", version = "1.14.0" } @@ -117,7 +113,6 @@ debezium-postgres = { module = "io.debezium:debezium-connector-postgres", versio apache = ["apache-commons", "apache-commons-lang"] datadog = ["datadog-trace-api", "datadog-trace-ot"] jackson = ["jackson-databind", "jackson-annotations", "jackson-dataformat", "jackson-datatype"] -junit = ["junit-jupiter-api", "junit-jupiter-params", "mockito-junit-jupiter"] log4j = ["log4j-api", "log4j-core", "log4j-slf4j-impl", "log4j-slf4j2-impl", "log4j-web"] slf4j = ["jul-to-slf4j", "jcl-over-slf4j", "log4j-over-slf4j"] temporal = ["temporal-sdk", "temporal-serviceclient"] From d804ff8faf0fd56003d9b9fb115abfd907335f55 Mon Sep 17 00:00:00 2001 From: Augustin Date: Tue, 12 Mar 2024 07:34:20 +0100 Subject: [PATCH 166/172] migrate live-test to a different repo (#35948) --- airbyte-ci/connectors/live-tests/.gitignore | 1 - airbyte-ci/connectors/live-tests/README.md | 160 -- airbyte-ci/connectors/live-tests/poetry.lock | 2272 ----------------- .../connectors/live-tests/pyproject.toml | 48 - .../live-tests/src/live_tests/__init__.py | 2 - .../live-tests/src/live_tests/cli.py | 13 - .../src/live_tests/commons/__init__.py | 1 - .../live_tests/commons/backends/__init__.py | 7 - .../commons/backends/base_backend.py | 16 - .../commons/backends/duckdb_backend.py | 21 - .../commons/backends/file_backend.py | 149 -- .../commons/connection_objects_retrieval.py | 125 - .../live_tests/commons/connector_runner.py | 289 --- .../src/live_tests/commons/models.py | 245 -- .../src/live_tests/commons/utils.py | 17 - .../src/live_tests/debug/__init__.py | 10 - .../live-tests/src/live_tests/debug/cli.py | 116 - .../live_tests/regression_tests/__init__.py | 1 - .../live_tests/regression_tests/conftest.py | 693 ----- .../live_tests/regression_tests/pytest.ini | 5 - .../regression_tests/test_expected_records.py | 20 - .../src/live_tests/regression_tests/utils.py | 19 - .../connectors/live-tests/tests/__init__.py | 1 - .../live-tests/tests/backends/__init__.py | 0 .../tests/backends/test_file_backend.py | 71 - 25 files changed, 4302 deletions(-) delete mode 100644 airbyte-ci/connectors/live-tests/.gitignore delete mode 100644 airbyte-ci/connectors/live-tests/README.md delete mode 100644 airbyte-ci/connectors/live-tests/poetry.lock delete mode 100644 airbyte-ci/connectors/live-tests/pyproject.toml delete mode 100644 airbyte-ci/connectors/live-tests/src/live_tests/__init__.py delete mode 100644 airbyte-ci/connectors/live-tests/src/live_tests/cli.py delete mode 100644 airbyte-ci/connectors/live-tests/src/live_tests/commons/__init__.py delete mode 100644 airbyte-ci/connectors/live-tests/src/live_tests/commons/backends/__init__.py delete mode 100644 airbyte-ci/connectors/live-tests/src/live_tests/commons/backends/base_backend.py delete mode 100644 airbyte-ci/connectors/live-tests/src/live_tests/commons/backends/duckdb_backend.py delete mode 100644 airbyte-ci/connectors/live-tests/src/live_tests/commons/backends/file_backend.py delete mode 100644 airbyte-ci/connectors/live-tests/src/live_tests/commons/connection_objects_retrieval.py delete mode 100644 airbyte-ci/connectors/live-tests/src/live_tests/commons/connector_runner.py delete mode 100644 airbyte-ci/connectors/live-tests/src/live_tests/commons/models.py delete mode 100644 airbyte-ci/connectors/live-tests/src/live_tests/commons/utils.py delete mode 100644 airbyte-ci/connectors/live-tests/src/live_tests/debug/__init__.py delete mode 100644 airbyte-ci/connectors/live-tests/src/live_tests/debug/cli.py delete mode 100644 airbyte-ci/connectors/live-tests/src/live_tests/regression_tests/__init__.py delete mode 100644 airbyte-ci/connectors/live-tests/src/live_tests/regression_tests/conftest.py delete mode 100644 airbyte-ci/connectors/live-tests/src/live_tests/regression_tests/pytest.ini delete mode 100644 airbyte-ci/connectors/live-tests/src/live_tests/regression_tests/test_expected_records.py delete mode 100644 airbyte-ci/connectors/live-tests/src/live_tests/regression_tests/utils.py delete mode 100644 airbyte-ci/connectors/live-tests/tests/__init__.py delete mode 100644 airbyte-ci/connectors/live-tests/tests/backends/__init__.py delete mode 100644 airbyte-ci/connectors/live-tests/tests/backends/test_file_backend.py diff --git a/airbyte-ci/connectors/live-tests/.gitignore b/airbyte-ci/connectors/live-tests/.gitignore deleted file mode 100644 index 452eecef73dd..000000000000 --- a/airbyte-ci/connectors/live-tests/.gitignore +++ /dev/null @@ -1 +0,0 @@ -regression_tests_artifacts diff --git a/airbyte-ci/connectors/live-tests/README.md b/airbyte-ci/connectors/live-tests/README.md deleted file mode 100644 index d921114cedb1..000000000000 --- a/airbyte-ci/connectors/live-tests/README.md +++ /dev/null @@ -1,160 +0,0 @@ -# Connector Live Testing - -This project contains utilities for running connector tests against live data. - -## Requirements -* `docker` -* `Python ^3.10` -* `pipx` -* `poetry` - -## Install -```bash -# From airbyte-ci/connectors/live-tests -pipx install . -# To install in editable mode for development -pipx install . --force --editable -``` - -## Commands - -### `debug` - -``` -Usage: live-tests debug [OPTIONS] {check|discover|read|read-with-state|spec} - - Run a specific command on one or multiple connectors and persists the - outputs to local storage. - -Options: - --connection-id TEXT - --config-path FILE - --catalog-path FILE - --state-path FILE - -c, --connector-image TEXT Docker image name of the connector to debug - (e.g. `airbyte/source-faker:latest`, - `airbyte/source-faker:dev`) [required] - -o, --output-directory DIRECTORY - Directory in which connector output and test - results should be stored. Defaults to the - current directory. - -hc, --http-cache Use the HTTP cache for the connector. - --help Show this message and exit. -``` - -This command is made to run any of the following connector commands against one or multiple connector images. - -**Available connector commands:** -* `spec` -* `check` -* `discover` -* `read` or `read_with_state` (requires a `--state-path` to be passed) - -It will write artifacts to an output directory: -* `stdout.log`: The collected standard output following the command execution -* `stderr.log`: The collected standard error following the c -* `http_dump.txt`: An `mitmproxy` http stream log. Can be consumed with `mitmweb` (version `9.0.1`) for debugging. -* `airbyte_messages.db`: A DuckDB database containing the messages produced by the connector. -* `airbyte_messages`: A directory containing `.jsonl` files for each message type (logs, records, traces, controls, states etc.) produced by the connector. - -#### Example -Let's run `debug` to check the output of `read` on two different versions of the same connector: - -```bash -live-tests debug read \ ---connector-image=airbyte/source-pokeapi:dev \ ---connector-image=airbyte/source-pokeapi:latest \ ---config-path=poke_config.json \ ---catalog-path=configured_catalog.json -``` - -It will store the results in a `live_test_debug_reports` directory under the current working directory: - -``` -live_tests_debug_reports -└── 1709547771 - └── source-pokeapi - └── read - ├── dev - │   ├── airbyte_messages - | │ ├── duck.db # DuckDB database - │   │   ├── logs.jsonl - │   │   ├── records.jsonl - │   │   └── traces.jsonl - │   ├── http_dump.mitm # Consume it with mitmweb --rfile http_dump.mitm - │   ├── stderr.log - │   └── stdout.log - └── latest - ├── airbyte_messages - │ ├── duck.db # DuckDB database - │   ├── logs.jsonl - │   ├── records.jsonl - │   └── traces.jsonl - ├── http_dump.mitm # Consume it with mitmweb --rfile http_dump.mitm - ├── stderr.log - └── stdout.log - -``` - -You can also run the `debug` command on a live connection by passing the `--connection-id` option: - -```bash -live-tests debug read \ ---connector-image=airbyte/source-pokeapi:dev \ ---connector-image=airbyte/source-pokeapi:latest \ ---connection-id= -``` - -##### Consuming `http_dump.mitm` -You can install [`mitmproxy`](https://mitmproxy.org/): -```bash -pipx install mitmproxy -``` - -And run: -```bash -mitmweb --rfile=http_dump.mitm -``` - -## Regression tests -We created a regression test suite to run tests to compare the outputs of connector commands on different versions of the same connector. - -You can run the existing test suites with the following command: - -#### With local connection objects (`config.json`, `catalog.json`, `state.json`) -```bash -poetry run pytest src/live_tests/regression_tests \ ---connector-image=airbyte/source-faker \ - --config-path= \ - --catalog-path= \ - --target-version=dev \ - --control-version=latest -``` - -#### Using a live connection -The live connection objects will be fetched. - -```bash - poetry run pytest src/live_tests/regression_tests \ - --connector-image=airbyte/source-faker \ - --connection-id= \ - --target-version=dev \ - --control-version=latest - ``` - -You can also pass local connection objects path to override the live connection objects with `--config-path`, `--state-path` or `--catalog-path`. - - -## Changelog - -### 0.4.0 -Introduce DuckDB to store the messages produced by the connector. - -### 0.3.0 -Pass connection id to the regression tests suite. - -### 0.2.0 -Declare the regression tests suite. - -### 0.1.0 -Implement initial primitives and a `debug` command to run connector commands and persist the outputs to local storage. diff --git a/airbyte-ci/connectors/live-tests/poetry.lock b/airbyte-ci/connectors/live-tests/poetry.lock deleted file mode 100644 index e59b7b54c38e..000000000000 --- a/airbyte-ci/connectors/live-tests/poetry.lock +++ /dev/null @@ -1,2272 +0,0 @@ -# This file is automatically @generated by Poetry 1.6.1 and should not be changed by hand. - -[[package]] -name = "aiohttp" -version = "3.9.3" -description = "Async http client/server framework (asyncio)" -optional = false -python-versions = ">=3.8" -files = [ - {file = "aiohttp-3.9.3-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:939677b61f9d72a4fa2a042a5eee2a99a24001a67c13da113b2e30396567db54"}, - {file = "aiohttp-3.9.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:1f5cd333fcf7590a18334c90f8c9147c837a6ec8a178e88d90a9b96ea03194cc"}, - {file = "aiohttp-3.9.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:82e6aa28dd46374f72093eda8bcd142f7771ee1eb9d1e223ff0fa7177a96b4a5"}, - {file = "aiohttp-3.9.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f56455b0c2c7cc3b0c584815264461d07b177f903a04481dfc33e08a89f0c26b"}, - {file = "aiohttp-3.9.3-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:bca77a198bb6e69795ef2f09a5f4c12758487f83f33d63acde5f0d4919815768"}, - {file = "aiohttp-3.9.3-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e083c285857b78ee21a96ba1eb1b5339733c3563f72980728ca2b08b53826ca5"}, - {file = "aiohttp-3.9.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ab40e6251c3873d86ea9b30a1ac6d7478c09277b32e14745d0d3c6e76e3c7e29"}, - {file = "aiohttp-3.9.3-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:df822ee7feaaeffb99c1a9e5e608800bd8eda6e5f18f5cfb0dc7eeb2eaa6bbec"}, - {file = "aiohttp-3.9.3-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:acef0899fea7492145d2bbaaaec7b345c87753168589cc7faf0afec9afe9b747"}, - {file = "aiohttp-3.9.3-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:cd73265a9e5ea618014802ab01babf1940cecb90c9762d8b9e7d2cc1e1969ec6"}, - {file = "aiohttp-3.9.3-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:a78ed8a53a1221393d9637c01870248a6f4ea5b214a59a92a36f18151739452c"}, - {file = "aiohttp-3.9.3-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:6b0e029353361f1746bac2e4cc19b32f972ec03f0f943b390c4ab3371840aabf"}, - {file = "aiohttp-3.9.3-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:7cf5c9458e1e90e3c390c2639f1017a0379a99a94fdfad3a1fd966a2874bba52"}, - {file = "aiohttp-3.9.3-cp310-cp310-win32.whl", hash = "sha256:3e59c23c52765951b69ec45ddbbc9403a8761ee6f57253250c6e1536cacc758b"}, - {file = "aiohttp-3.9.3-cp310-cp310-win_amd64.whl", hash = "sha256:055ce4f74b82551678291473f66dc9fb9048a50d8324278751926ff0ae7715e5"}, - {file = "aiohttp-3.9.3-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:6b88f9386ff1ad91ace19d2a1c0225896e28815ee09fc6a8932fded8cda97c3d"}, - {file = "aiohttp-3.9.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:c46956ed82961e31557b6857a5ca153c67e5476972e5f7190015018760938da2"}, - {file = "aiohttp-3.9.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:07b837ef0d2f252f96009e9b8435ec1fef68ef8b1461933253d318748ec1acdc"}, - {file = "aiohttp-3.9.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dad46e6f620574b3b4801c68255492e0159d1712271cc99d8bdf35f2043ec266"}, - {file = "aiohttp-3.9.3-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5ed3e046ea7b14938112ccd53d91c1539af3e6679b222f9469981e3dac7ba1ce"}, - {file = "aiohttp-3.9.3-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:039df344b45ae0b34ac885ab5b53940b174530d4dd8a14ed8b0e2155b9dddccb"}, - {file = "aiohttp-3.9.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7943c414d3a8d9235f5f15c22ace69787c140c80b718dcd57caaade95f7cd93b"}, - {file = "aiohttp-3.9.3-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:84871a243359bb42c12728f04d181a389718710129b36b6aad0fc4655a7647d4"}, - {file = "aiohttp-3.9.3-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:5eafe2c065df5401ba06821b9a054d9cb2848867f3c59801b5d07a0be3a380ae"}, - {file = "aiohttp-3.9.3-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:9d3c9b50f19704552f23b4eaea1fc082fdd82c63429a6506446cbd8737823da3"}, - {file = "aiohttp-3.9.3-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:f033d80bc6283092613882dfe40419c6a6a1527e04fc69350e87a9df02bbc283"}, - {file = "aiohttp-3.9.3-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:2c895a656dd7e061b2fd6bb77d971cc38f2afc277229ce7dd3552de8313a483e"}, - {file = "aiohttp-3.9.3-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:1f5a71d25cd8106eab05f8704cd9167b6e5187bcdf8f090a66c6d88b634802b4"}, - {file = "aiohttp-3.9.3-cp311-cp311-win32.whl", hash = "sha256:50fca156d718f8ced687a373f9e140c1bb765ca16e3d6f4fe116e3df7c05b2c5"}, - {file = "aiohttp-3.9.3-cp311-cp311-win_amd64.whl", hash = "sha256:5fe9ce6c09668063b8447f85d43b8d1c4e5d3d7e92c63173e6180b2ac5d46dd8"}, - {file = "aiohttp-3.9.3-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:38a19bc3b686ad55804ae931012f78f7a534cce165d089a2059f658f6c91fa60"}, - {file = "aiohttp-3.9.3-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:770d015888c2a598b377bd2f663adfd947d78c0124cfe7b959e1ef39f5b13869"}, - {file = "aiohttp-3.9.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:ee43080e75fc92bf36219926c8e6de497f9b247301bbf88c5c7593d931426679"}, - {file = "aiohttp-3.9.3-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:52df73f14ed99cee84865b95a3d9e044f226320a87af208f068ecc33e0c35b96"}, - {file = "aiohttp-3.9.3-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:dc9b311743a78043b26ffaeeb9715dc360335e5517832f5a8e339f8a43581e4d"}, - {file = "aiohttp-3.9.3-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b955ed993491f1a5da7f92e98d5dad3c1e14dc175f74517c4e610b1f2456fb11"}, - {file = "aiohttp-3.9.3-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:504b6981675ace64c28bf4a05a508af5cde526e36492c98916127f5a02354d53"}, - {file = "aiohttp-3.9.3-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a6fe5571784af92b6bc2fda8d1925cccdf24642d49546d3144948a6a1ed58ca5"}, - {file = "aiohttp-3.9.3-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:ba39e9c8627edc56544c8628cc180d88605df3892beeb2b94c9bc857774848ca"}, - {file = "aiohttp-3.9.3-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:e5e46b578c0e9db71d04c4b506a2121c0cb371dd89af17a0586ff6769d4c58c1"}, - {file = "aiohttp-3.9.3-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:938a9653e1e0c592053f815f7028e41a3062e902095e5a7dc84617c87267ebd5"}, - {file = "aiohttp-3.9.3-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:c3452ea726c76e92f3b9fae4b34a151981a9ec0a4847a627c43d71a15ac32aa6"}, - {file = "aiohttp-3.9.3-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:ff30218887e62209942f91ac1be902cc80cddb86bf00fbc6783b7a43b2bea26f"}, - {file = "aiohttp-3.9.3-cp312-cp312-win32.whl", hash = "sha256:38f307b41e0bea3294a9a2a87833191e4bcf89bb0365e83a8be3a58b31fb7f38"}, - {file = "aiohttp-3.9.3-cp312-cp312-win_amd64.whl", hash = "sha256:b791a3143681a520c0a17e26ae7465f1b6f99461a28019d1a2f425236e6eedb5"}, - {file = "aiohttp-3.9.3-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:0ed621426d961df79aa3b963ac7af0d40392956ffa9be022024cd16297b30c8c"}, - {file = "aiohttp-3.9.3-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:7f46acd6a194287b7e41e87957bfe2ad1ad88318d447caf5b090012f2c5bb528"}, - {file = "aiohttp-3.9.3-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:feeb18a801aacb098220e2c3eea59a512362eb408d4afd0c242044c33ad6d542"}, - {file = "aiohttp-3.9.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f734e38fd8666f53da904c52a23ce517f1b07722118d750405af7e4123933511"}, - {file = "aiohttp-3.9.3-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b40670ec7e2156d8e57f70aec34a7216407848dfe6c693ef131ddf6e76feb672"}, - {file = "aiohttp-3.9.3-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:fdd215b7b7fd4a53994f238d0f46b7ba4ac4c0adb12452beee724ddd0743ae5d"}, - {file = "aiohttp-3.9.3-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:017a21b0df49039c8f46ca0971b3a7fdc1f56741ab1240cb90ca408049766168"}, - {file = "aiohttp-3.9.3-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e99abf0bba688259a496f966211c49a514e65afa9b3073a1fcee08856e04425b"}, - {file = "aiohttp-3.9.3-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:648056db9a9fa565d3fa851880f99f45e3f9a771dd3ff3bb0c048ea83fb28194"}, - {file = "aiohttp-3.9.3-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:8aacb477dc26797ee089721536a292a664846489c49d3ef9725f992449eda5a8"}, - {file = "aiohttp-3.9.3-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:522a11c934ea660ff8953eda090dcd2154d367dec1ae3c540aff9f8a5c109ab4"}, - {file = "aiohttp-3.9.3-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:5bce0dc147ca85caa5d33debc4f4d65e8e8b5c97c7f9f660f215fa74fc49a321"}, - {file = "aiohttp-3.9.3-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:4b4af9f25b49a7be47c0972139e59ec0e8285c371049df1a63b6ca81fdd216a2"}, - {file = "aiohttp-3.9.3-cp38-cp38-win32.whl", hash = "sha256:298abd678033b8571995650ccee753d9458dfa0377be4dba91e4491da3f2be63"}, - {file = "aiohttp-3.9.3-cp38-cp38-win_amd64.whl", hash = "sha256:69361bfdca5468c0488d7017b9b1e5ce769d40b46a9f4a2eed26b78619e9396c"}, - {file = "aiohttp-3.9.3-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:0fa43c32d1643f518491d9d3a730f85f5bbaedcbd7fbcae27435bb8b7a061b29"}, - {file = "aiohttp-3.9.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:835a55b7ca49468aaaac0b217092dfdff370e6c215c9224c52f30daaa735c1c1"}, - {file = "aiohttp-3.9.3-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:06a9b2c8837d9a94fae16c6223acc14b4dfdff216ab9b7202e07a9a09541168f"}, - {file = "aiohttp-3.9.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:abf151955990d23f84205286938796c55ff11bbfb4ccfada8c9c83ae6b3c89a3"}, - {file = "aiohttp-3.9.3-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:59c26c95975f26e662ca78fdf543d4eeaef70e533a672b4113dd888bd2423caa"}, - {file = "aiohttp-3.9.3-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f95511dd5d0e05fd9728bac4096319f80615aaef4acbecb35a990afebe953b0e"}, - {file = "aiohttp-3.9.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:595f105710293e76b9dc09f52e0dd896bd064a79346234b521f6b968ffdd8e58"}, - {file = "aiohttp-3.9.3-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c7c8b816c2b5af5c8a436df44ca08258fc1a13b449393a91484225fcb7545533"}, - {file = "aiohttp-3.9.3-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:f1088fa100bf46e7b398ffd9904f4808a0612e1d966b4aa43baa535d1b6341eb"}, - {file = "aiohttp-3.9.3-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:f59dfe57bb1ec82ac0698ebfcdb7bcd0e99c255bd637ff613760d5f33e7c81b3"}, - {file = "aiohttp-3.9.3-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:361a1026c9dd4aba0109e4040e2aecf9884f5cfe1b1b1bd3d09419c205e2e53d"}, - {file = "aiohttp-3.9.3-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:363afe77cfcbe3a36353d8ea133e904b108feea505aa4792dad6585a8192c55a"}, - {file = "aiohttp-3.9.3-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:8e2c45c208c62e955e8256949eb225bd8b66a4c9b6865729a786f2aa79b72e9d"}, - {file = "aiohttp-3.9.3-cp39-cp39-win32.whl", hash = "sha256:f7217af2e14da0856e082e96ff637f14ae45c10a5714b63c77f26d8884cf1051"}, - {file = "aiohttp-3.9.3-cp39-cp39-win_amd64.whl", hash = "sha256:27468897f628c627230dba07ec65dc8d0db566923c48f29e084ce382119802bc"}, - {file = "aiohttp-3.9.3.tar.gz", hash = "sha256:90842933e5d1ff760fae6caca4b2b3edba53ba8f4b71e95dacf2818a2aca06f7"}, -] - -[package.dependencies] -aiosignal = ">=1.1.2" -async-timeout = {version = ">=4.0,<5.0", markers = "python_version < \"3.11\""} -attrs = ">=17.3.0" -frozenlist = ">=1.1.1" -multidict = ">=4.5,<7.0" -yarl = ">=1.0,<2.0" - -[package.extras] -speedups = ["Brotli", "aiodns", "brotlicffi"] - -[[package]] -name = "aiosignal" -version = "1.3.1" -description = "aiosignal: a list of registered asynchronous callbacks" -optional = false -python-versions = ">=3.7" -files = [ - {file = "aiosignal-1.3.1-py3-none-any.whl", hash = "sha256:f8376fb07dd1e86a584e4fcdec80b36b7f81aac666ebc724e2c090300dd83b17"}, - {file = "aiosignal-1.3.1.tar.gz", hash = "sha256:54cd96e15e1649b75d6c87526a6ff0b6c1b0dd3459f43d9ca11d48c339b68cfc"}, -] - -[package.dependencies] -frozenlist = ">=1.1.0" - -[[package]] -name = "airbyte-protocol-models" -version = "0.7.0" -description = "Declares the Airbyte Protocol." -optional = false -python-versions = ">=3.8" -files = [ - {file = "airbyte_protocol_models-0.7.0-py3-none-any.whl", hash = "sha256:0b038134f12eff2c5f8265751a6915f5d247fb15d62c878bdeb1a6fefe1eb59a"}, - {file = "airbyte_protocol_models-0.7.0.tar.gz", hash = "sha256:e084970365ff5c245d3dbfa58d0d2134e8f97455835e5a08dfd9be77b4be016c"}, -] - -[package.dependencies] -pydantic = ">=1.9.2,<2.0.0" - -[[package]] -name = "anyio" -version = "4.3.0" -description = "High level compatibility layer for multiple asynchronous event loop implementations" -optional = false -python-versions = ">=3.8" -files = [ - {file = "anyio-4.3.0-py3-none-any.whl", hash = "sha256:048e05d0f6caeed70d731f3db756d35dcc1f35747c8c403364a8332c630441b8"}, - {file = "anyio-4.3.0.tar.gz", hash = "sha256:f75253795a87df48568485fd18cdd2a3fa5c4f7c5be8e5e36637733fce06fed6"}, -] - -[package.dependencies] -exceptiongroup = {version = ">=1.0.2", markers = "python_version < \"3.11\""} -idna = ">=2.8" -sniffio = ">=1.1" -typing-extensions = {version = ">=4.1", markers = "python_version < \"3.11\""} - -[package.extras] -doc = ["Sphinx (>=7)", "packaging", "sphinx-autodoc-typehints (>=1.2.0)", "sphinx-rtd-theme"] -test = ["anyio[trio]", "coverage[toml] (>=7)", "exceptiongroup (>=1.2.0)", "hypothesis (>=4.0)", "psutil (>=5.9)", "pytest (>=7.0)", "pytest-mock (>=3.6.1)", "trustme", "uvloop (>=0.17)"] -trio = ["trio (>=0.23)"] - -[[package]] -name = "asn1crypto" -version = "1.5.1" -description = "Fast ASN.1 parser and serializer with definitions for private keys, public keys, certificates, CRL, OCSP, CMS, PKCS#3, PKCS#7, PKCS#8, PKCS#12, PKCS#5, X.509 and TSP" -optional = false -python-versions = "*" -files = [ - {file = "asn1crypto-1.5.1-py2.py3-none-any.whl", hash = "sha256:db4e40728b728508912cbb3d44f19ce188f218e9eba635821bb4b68564f8fd67"}, - {file = "asn1crypto-1.5.1.tar.gz", hash = "sha256:13ae38502be632115abf8a24cbe5f4da52e3b5231990aff31123c805306ccb9c"}, -] - -[[package]] -name = "async-timeout" -version = "4.0.3" -description = "Timeout context manager for asyncio programs" -optional = false -python-versions = ">=3.7" -files = [ - {file = "async-timeout-4.0.3.tar.gz", hash = "sha256:4640d96be84d82d02ed59ea2b7105a0f7b33abe8703703cd0ab0bf87c427522f"}, - {file = "async_timeout-4.0.3-py3-none-any.whl", hash = "sha256:7405140ff1230c310e51dc27b3145b9092d659ce68ff733fb0cefe3ee42be028"}, -] - -[[package]] -name = "asyncclick" -version = "8.1.7.1" -description = "Composable command line interface toolkit, async version" -optional = false -python-versions = ">=3.7" -files = [ - {file = "asyncclick-8.1.7.1-py3-none-any.whl", hash = "sha256:e0fea5f0223ac45cfc26153cc80a58cc65fc077ac8de79be49248c918e8c3422"}, - {file = "asyncclick-8.1.7.1.tar.gz", hash = "sha256:a47b61258a689212cf9463fbf3b4cc52d05bfd03185f6ead2315fc03fd17ef75"}, -] - -[package.dependencies] -anyio = "*" -colorama = {version = "*", markers = "platform_system == \"Windows\""} - -[[package]] -name = "attrs" -version = "23.2.0" -description = "Classes Without Boilerplate" -optional = false -python-versions = ">=3.7" -files = [ - {file = "attrs-23.2.0-py3-none-any.whl", hash = "sha256:99b87a485a5820b23b879f04c2305b44b951b502fd64be915879d77a7e8fc6f1"}, - {file = "attrs-23.2.0.tar.gz", hash = "sha256:935dc3b529c262f6cf76e50877d35a4bd3c1de194fd41f47a2b7ae8f19971f30"}, -] - -[package.extras] -cov = ["attrs[tests]", "coverage[toml] (>=5.3)"] -dev = ["attrs[tests]", "pre-commit"] -docs = ["furo", "myst-parser", "sphinx", "sphinx-notfound-page", "sphinxcontrib-towncrier", "towncrier", "zope-interface"] -tests = ["attrs[tests-no-zope]", "zope-interface"] -tests-mypy = ["mypy (>=1.6)", "pytest-mypy-plugins"] -tests-no-zope = ["attrs[tests-mypy]", "cloudpickle", "hypothesis", "pympler", "pytest (>=4.3.0)", "pytest-xdist[psutil]"] - -[[package]] -name = "backoff" -version = "2.2.1" -description = "Function decoration for backoff and retry" -optional = false -python-versions = ">=3.7,<4.0" -files = [ - {file = "backoff-2.2.1-py3-none-any.whl", hash = "sha256:63579f9a0628e06278f7e47b7d7d5b6ce20dc65c5e96a6f3ca99a6adca0396e8"}, - {file = "backoff-2.2.1.tar.gz", hash = "sha256:03f829f5bb1923180821643f8753b0502c3b682293992485b0eef2807afa5cba"}, -] - -[[package]] -name = "beartype" -version = "0.17.2" -description = "Unbearably fast runtime type checking in pure Python." -optional = false -python-versions = ">=3.8.0" -files = [ - {file = "beartype-0.17.2-py3-none-any.whl", hash = "sha256:c22b21e1f785cfcf5c4d3d13070f532b6243a3ad67e68d2298ff08d539847dce"}, - {file = "beartype-0.17.2.tar.gz", hash = "sha256:e911e1ae7de4bccd15745f7643609d8732f64de5c2fb844e89cbbed1c5a8d495"}, -] - -[package.extras] -all = ["typing-extensions (>=3.10.0.0)"] -dev = ["autoapi (>=0.9.0)", "coverage (>=5.5)", "equinox", "mypy (>=0.800)", "numpy", "pandera", "pydata-sphinx-theme (<=0.7.2)", "pytest (>=4.0.0)", "sphinx", "sphinx (>=4.2.0,<6.0.0)", "sphinxext-opengraph (>=0.7.5)", "tox (>=3.20.1)", "typing-extensions (>=3.10.0.0)"] -doc-rtd = ["autoapi (>=0.9.0)", "pydata-sphinx-theme (<=0.7.2)", "sphinx (>=4.2.0,<6.0.0)", "sphinxext-opengraph (>=0.7.5)"] -test-tox = ["equinox", "mypy (>=0.800)", "numpy", "pandera", "pytest (>=4.0.0)", "sphinx", "typing-extensions (>=3.10.0.0)"] -test-tox-coverage = ["coverage (>=5.5)"] - -[[package]] -name = "cachetools" -version = "5.3.3" -description = "Extensible memoizing collections and decorators" -optional = false -python-versions = ">=3.7" -files = [ - {file = "cachetools-5.3.3-py3-none-any.whl", hash = "sha256:0abad1021d3f8325b2fc1d2e9c8b9c9d57b04c3932657a72465447332c24d945"}, - {file = "cachetools-5.3.3.tar.gz", hash = "sha256:ba29e2dfa0b8b556606f097407ed1aa62080ee108ab0dc5ec9d6a723a007d105"}, -] - -[[package]] -name = "cattrs" -version = "23.2.3" -description = "Composable complex class support for attrs and dataclasses." -optional = false -python-versions = ">=3.8" -files = [ - {file = "cattrs-23.2.3-py3-none-any.whl", hash = "sha256:0341994d94971052e9ee70662542699a3162ea1e0c62f7ce1b4a57f563685108"}, - {file = "cattrs-23.2.3.tar.gz", hash = "sha256:a934090d95abaa9e911dac357e3a8699e0b4b14f8529bcc7d2b1ad9d51672b9f"}, -] - -[package.dependencies] -attrs = ">=23.1.0" -exceptiongroup = {version = ">=1.1.1", markers = "python_version < \"3.11\""} -typing-extensions = {version = ">=4.1.0,<4.6.3 || >4.6.3", markers = "python_version < \"3.11\""} - -[package.extras] -bson = ["pymongo (>=4.4.0)"] -cbor2 = ["cbor2 (>=5.4.6)"] -msgpack = ["msgpack (>=1.0.5)"] -orjson = ["orjson (>=3.9.2)"] -pyyaml = ["pyyaml (>=6.0)"] -tomlkit = ["tomlkit (>=0.11.8)"] -ujson = ["ujson (>=5.7.0)"] - -[[package]] -name = "certifi" -version = "2024.2.2" -description = "Python package for providing Mozilla's CA Bundle." -optional = false -python-versions = ">=3.6" -files = [ - {file = "certifi-2024.2.2-py3-none-any.whl", hash = "sha256:dc383c07b76109f368f6106eee2b593b04a011ea4d55f652c6ca24a754d1cdd1"}, - {file = "certifi-2024.2.2.tar.gz", hash = "sha256:0569859f95fc761b18b45ef421b1290a0f65f147e92a1e5eb3e635f9a5e4e66f"}, -] - -[[package]] -name = "cffi" -version = "1.16.0" -description = "Foreign Function Interface for Python calling C code." -optional = false -python-versions = ">=3.8" -files = [ - {file = "cffi-1.16.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:6b3d6606d369fc1da4fd8c357d026317fbb9c9b75d36dc16e90e84c26854b088"}, - {file = "cffi-1.16.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:ac0f5edd2360eea2f1daa9e26a41db02dd4b0451b48f7c318e217ee092a213e9"}, - {file = "cffi-1.16.0-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7e61e3e4fa664a8588aa25c883eab612a188c725755afff6289454d6362b9673"}, - {file = "cffi-1.16.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a72e8961a86d19bdb45851d8f1f08b041ea37d2bd8d4fd19903bc3083d80c896"}, - {file = "cffi-1.16.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5b50bf3f55561dac5438f8e70bfcdfd74543fd60df5fa5f62d94e5867deca684"}, - {file = "cffi-1.16.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7651c50c8c5ef7bdb41108b7b8c5a83013bfaa8a935590c5d74627c047a583c7"}, - {file = "cffi-1.16.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e4108df7fe9b707191e55f33efbcb2d81928e10cea45527879a4749cbe472614"}, - {file = "cffi-1.16.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:32c68ef735dbe5857c810328cb2481e24722a59a2003018885514d4c09af9743"}, - {file = "cffi-1.16.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:673739cb539f8cdaa07d92d02efa93c9ccf87e345b9a0b556e3ecc666718468d"}, - {file = "cffi-1.16.0-cp310-cp310-win32.whl", hash = "sha256:9f90389693731ff1f659e55c7d1640e2ec43ff725cc61b04b2f9c6d8d017df6a"}, - {file = "cffi-1.16.0-cp310-cp310-win_amd64.whl", hash = "sha256:e6024675e67af929088fda399b2094574609396b1decb609c55fa58b028a32a1"}, - {file = "cffi-1.16.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:b84834d0cf97e7d27dd5b7f3aca7b6e9263c56308ab9dc8aae9784abb774d404"}, - {file = "cffi-1.16.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:1b8ebc27c014c59692bb2664c7d13ce7a6e9a629be20e54e7271fa696ff2b417"}, - {file = "cffi-1.16.0-cp311-cp311-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ee07e47c12890ef248766a6e55bd38ebfb2bb8edd4142d56db91b21ea68b7627"}, - {file = "cffi-1.16.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d8a9d3ebe49f084ad71f9269834ceccbf398253c9fac910c4fd7053ff1386936"}, - {file = "cffi-1.16.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e70f54f1796669ef691ca07d046cd81a29cb4deb1e5f942003f401c0c4a2695d"}, - {file = "cffi-1.16.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5bf44d66cdf9e893637896c7faa22298baebcd18d1ddb6d2626a6e39793a1d56"}, - {file = "cffi-1.16.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7b78010e7b97fef4bee1e896df8a4bbb6712b7f05b7ef630f9d1da00f6444d2e"}, - {file = "cffi-1.16.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:c6a164aa47843fb1b01e941d385aab7215563bb8816d80ff3a363a9f8448a8dc"}, - {file = "cffi-1.16.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:e09f3ff613345df5e8c3667da1d918f9149bd623cd9070c983c013792a9a62eb"}, - {file = "cffi-1.16.0-cp311-cp311-win32.whl", hash = "sha256:2c56b361916f390cd758a57f2e16233eb4f64bcbeee88a4881ea90fca14dc6ab"}, - {file = "cffi-1.16.0-cp311-cp311-win_amd64.whl", hash = "sha256:db8e577c19c0fda0beb7e0d4e09e0ba74b1e4c092e0e40bfa12fe05b6f6d75ba"}, - {file = "cffi-1.16.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:fa3a0128b152627161ce47201262d3140edb5a5c3da88d73a1b790a959126956"}, - {file = "cffi-1.16.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:68e7c44931cc171c54ccb702482e9fc723192e88d25a0e133edd7aff8fcd1f6e"}, - {file = "cffi-1.16.0-cp312-cp312-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:abd808f9c129ba2beda4cfc53bde801e5bcf9d6e0f22f095e45327c038bfe68e"}, - {file = "cffi-1.16.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:88e2b3c14bdb32e440be531ade29d3c50a1a59cd4e51b1dd8b0865c54ea5d2e2"}, - {file = "cffi-1.16.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:fcc8eb6d5902bb1cf6dc4f187ee3ea80a1eba0a89aba40a5cb20a5087d961357"}, - {file = "cffi-1.16.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b7be2d771cdba2942e13215c4e340bfd76398e9227ad10402a8767ab1865d2e6"}, - {file = "cffi-1.16.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e715596e683d2ce000574bae5d07bd522c781a822866c20495e52520564f0969"}, - {file = "cffi-1.16.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:2d92b25dbf6cae33f65005baf472d2c245c050b1ce709cc4588cdcdd5495b520"}, - {file = "cffi-1.16.0-cp312-cp312-win32.whl", hash = "sha256:b2ca4e77f9f47c55c194982e10f058db063937845bb2b7a86c84a6cfe0aefa8b"}, - {file = "cffi-1.16.0-cp312-cp312-win_amd64.whl", hash = "sha256:68678abf380b42ce21a5f2abde8efee05c114c2fdb2e9eef2efdb0257fba1235"}, - {file = "cffi-1.16.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:0c9ef6ff37e974b73c25eecc13952c55bceed9112be2d9d938ded8e856138bcc"}, - {file = "cffi-1.16.0-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a09582f178759ee8128d9270cd1344154fd473bb77d94ce0aeb2a93ebf0feaf0"}, - {file = "cffi-1.16.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e760191dd42581e023a68b758769e2da259b5d52e3103c6060ddc02c9edb8d7b"}, - {file = "cffi-1.16.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:80876338e19c951fdfed6198e70bc88f1c9758b94578d5a7c4c91a87af3cf31c"}, - {file = "cffi-1.16.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a6a14b17d7e17fa0d207ac08642c8820f84f25ce17a442fd15e27ea18d67c59b"}, - {file = "cffi-1.16.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6602bc8dc6f3a9e02b6c22c4fc1e47aa50f8f8e6d3f78a5e16ac33ef5fefa324"}, - {file = "cffi-1.16.0-cp38-cp38-win32.whl", hash = "sha256:131fd094d1065b19540c3d72594260f118b231090295d8c34e19a7bbcf2e860a"}, - {file = "cffi-1.16.0-cp38-cp38-win_amd64.whl", hash = "sha256:31d13b0f99e0836b7ff893d37af07366ebc90b678b6664c955b54561fc36ef36"}, - {file = "cffi-1.16.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:582215a0e9adbe0e379761260553ba11c58943e4bbe9c36430c4ca6ac74b15ed"}, - {file = "cffi-1.16.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:b29ebffcf550f9da55bec9e02ad430c992a87e5f512cd63388abb76f1036d8d2"}, - {file = "cffi-1.16.0-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:dc9b18bf40cc75f66f40a7379f6a9513244fe33c0e8aa72e2d56b0196a7ef872"}, - {file = "cffi-1.16.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9cb4a35b3642fc5c005a6755a5d17c6c8b6bcb6981baf81cea8bfbc8903e8ba8"}, - {file = "cffi-1.16.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b86851a328eedc692acf81fb05444bdf1891747c25af7529e39ddafaf68a4f3f"}, - {file = "cffi-1.16.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c0f31130ebc2d37cdd8e44605fb5fa7ad59049298b3f745c74fa74c62fbfcfc4"}, - {file = "cffi-1.16.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8f8e709127c6c77446a8c0a8c8bf3c8ee706a06cd44b1e827c3e6a2ee6b8c098"}, - {file = "cffi-1.16.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:748dcd1e3d3d7cd5443ef03ce8685043294ad6bd7c02a38d1bd367cfd968e000"}, - {file = "cffi-1.16.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:8895613bcc094d4a1b2dbe179d88d7fb4a15cee43c052e8885783fac397d91fe"}, - {file = "cffi-1.16.0-cp39-cp39-win32.whl", hash = "sha256:ed86a35631f7bfbb28e108dd96773b9d5a6ce4811cf6ea468bb6a359b256b1e4"}, - {file = "cffi-1.16.0-cp39-cp39-win_amd64.whl", hash = "sha256:3686dffb02459559c74dd3d81748269ffb0eb027c39a6fc99502de37d501faa8"}, - {file = "cffi-1.16.0.tar.gz", hash = "sha256:bcb3ef43e58665bbda2fb198698fcae6776483e0c4a631aa5647806c25e02cc0"}, -] - -[package.dependencies] -pycparser = "*" - -[[package]] -name = "charset-normalizer" -version = "3.3.2" -description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet." -optional = false -python-versions = ">=3.7.0" -files = [ - {file = "charset-normalizer-3.3.2.tar.gz", hash = "sha256:f30c3cb33b24454a82faecaf01b19c18562b1e89558fb6c56de4d9118a032fd5"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:25baf083bf6f6b341f4121c2f3c548875ee6f5339300e08be3f2b2ba1721cdd3"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:06435b539f889b1f6f4ac1758871aae42dc3a8c0e24ac9e60c2384973ad73027"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9063e24fdb1e498ab71cb7419e24622516c4a04476b17a2dab57e8baa30d6e03"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6897af51655e3691ff853668779c7bad41579facacf5fd7253b0133308cf000d"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1d3193f4a680c64b4b6a9115943538edb896edc190f0b222e73761716519268e"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:cd70574b12bb8a4d2aaa0094515df2463cb429d8536cfb6c7ce983246983e5a6"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8465322196c8b4d7ab6d1e049e4c5cb460d0394da4a27d23cc242fbf0034b6b5"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a9a8e9031d613fd2009c182b69c7b2c1ef8239a0efb1df3f7c8da66d5dd3d537"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:beb58fe5cdb101e3a055192ac291b7a21e3b7ef4f67fa1d74e331a7f2124341c"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:e06ed3eb3218bc64786f7db41917d4e686cc4856944f53d5bdf83a6884432e12"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:2e81c7b9c8979ce92ed306c249d46894776a909505d8f5a4ba55b14206e3222f"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:572c3763a264ba47b3cf708a44ce965d98555f618ca42c926a9c1616d8f34269"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:fd1abc0d89e30cc4e02e4064dc67fcc51bd941eb395c502aac3ec19fab46b519"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-win32.whl", hash = "sha256:3d47fa203a7bd9c5b6cee4736ee84ca03b8ef23193c0d1ca99b5089f72645c73"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-win_amd64.whl", hash = "sha256:10955842570876604d404661fbccbc9c7e684caf432c09c715ec38fbae45ae09"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:802fe99cca7457642125a8a88a084cef28ff0cf9407060f7b93dca5aa25480db"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:573f6eac48f4769d667c4442081b1794f52919e7edada77495aaed9236d13a96"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:549a3a73da901d5bc3ce8d24e0600d1fa85524c10287f6004fbab87672bf3e1e"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f27273b60488abe721a075bcca6d7f3964f9f6f067c8c4c605743023d7d3944f"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1ceae2f17a9c33cb48e3263960dc5fc8005351ee19db217e9b1bb15d28c02574"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:65f6f63034100ead094b8744b3b97965785388f308a64cf8d7c34f2f2e5be0c4"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:753f10e867343b4511128c6ed8c82f7bec3bd026875576dfd88483c5c73b2fd8"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4a78b2b446bd7c934f5dcedc588903fb2f5eec172f3d29e52a9096a43722adfc"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:e537484df0d8f426ce2afb2d0f8e1c3d0b114b83f8850e5f2fbea0e797bd82ae"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:eb6904c354526e758fda7167b33005998fb68c46fbc10e013ca97f21ca5c8887"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:deb6be0ac38ece9ba87dea880e438f25ca3eddfac8b002a2ec3d9183a454e8ae"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:4ab2fe47fae9e0f9dee8c04187ce5d09f48eabe611be8259444906793ab7cbce"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:80402cd6ee291dcb72644d6eac93785fe2c8b9cb30893c1af5b8fdd753b9d40f"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-win32.whl", hash = "sha256:7cd13a2e3ddeed6913a65e66e94b51d80a041145a026c27e6bb76c31a853c6ab"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-win_amd64.whl", hash = "sha256:663946639d296df6a2bb2aa51b60a2454ca1cb29835324c640dafb5ff2131a77"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:0b2b64d2bb6d3fb9112bafa732def486049e63de9618b5843bcdd081d8144cd8"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:ddbb2551d7e0102e7252db79ba445cdab71b26640817ab1e3e3648dad515003b"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:55086ee1064215781fff39a1af09518bc9255b50d6333f2e4c74ca09fac6a8f6"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8f4a014bc36d3c57402e2977dada34f9c12300af536839dc38c0beab8878f38a"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a10af20b82360ab00827f916a6058451b723b4e65030c5a18577c8b2de5b3389"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8d756e44e94489e49571086ef83b2bb8ce311e730092d2c34ca8f7d925cb20aa"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:90d558489962fd4918143277a773316e56c72da56ec7aa3dc3dbbe20fdfed15b"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6ac7ffc7ad6d040517be39eb591cac5ff87416c2537df6ba3cba3bae290c0fed"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:7ed9e526742851e8d5cc9e6cf41427dfc6068d4f5a3bb03659444b4cabf6bc26"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:8bdb58ff7ba23002a4c5808d608e4e6c687175724f54a5dade5fa8c67b604e4d"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:6b3251890fff30ee142c44144871185dbe13b11bab478a88887a639655be1068"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:b4a23f61ce87adf89be746c8a8974fe1c823c891d8f86eb218bb957c924bb143"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:efcb3f6676480691518c177e3b465bcddf57cea040302f9f4e6e191af91174d4"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-win32.whl", hash = "sha256:d965bba47ddeec8cd560687584e88cf699fd28f192ceb452d1d7ee807c5597b7"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-win_amd64.whl", hash = "sha256:96b02a3dc4381e5494fad39be677abcb5e6634bf7b4fa83a6dd3112607547001"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:95f2a5796329323b8f0512e09dbb7a1860c46a39da62ecb2324f116fa8fdc85c"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c002b4ffc0be611f0d9da932eb0f704fe2602a9a949d1f738e4c34c75b0863d5"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a981a536974bbc7a512cf44ed14938cf01030a99e9b3a06dd59578882f06f985"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3287761bc4ee9e33561a7e058c72ac0938c4f57fe49a09eae428fd88aafe7bb6"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:42cb296636fcc8b0644486d15c12376cb9fa75443e00fb25de0b8602e64c1714"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0a55554a2fa0d408816b3b5cedf0045f4b8e1a6065aec45849de2d6f3f8e9786"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:c083af607d2515612056a31f0a8d9e0fcb5876b7bfc0abad3ecd275bc4ebc2d5"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:87d1351268731db79e0f8e745d92493ee2841c974128ef629dc518b937d9194c"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:bd8f7df7d12c2db9fab40bdd87a7c09b1530128315d047a086fa3ae3435cb3a8"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:c180f51afb394e165eafe4ac2936a14bee3eb10debc9d9e4db8958fe36afe711"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:8c622a5fe39a48f78944a87d4fb8a53ee07344641b0562c540d840748571b811"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-win32.whl", hash = "sha256:db364eca23f876da6f9e16c9da0df51aa4f104a972735574842618b8c6d999d4"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-win_amd64.whl", hash = "sha256:86216b5cee4b06df986d214f664305142d9c76df9b6512be2738aa72a2048f99"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:6463effa3186ea09411d50efc7d85360b38d5f09b870c48e4600f63af490e56a"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:6c4caeef8fa63d06bd437cd4bdcf3ffefe6738fb1b25951440d80dc7df8c03ac"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:37e55c8e51c236f95b033f6fb391d7d7970ba5fe7ff453dad675e88cf303377a"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fb69256e180cb6c8a894fee62b3afebae785babc1ee98b81cdf68bbca1987f33"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ae5f4161f18c61806f411a13b0310bea87f987c7d2ecdbdaad0e94eb2e404238"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b2b0a0c0517616b6869869f8c581d4eb2dd83a4d79e0ebcb7d373ef9956aeb0a"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:45485e01ff4d3630ec0d9617310448a8702f70e9c01906b0d0118bdf9d124cf2"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:eb00ed941194665c332bf8e078baf037d6c35d7c4f3102ea2d4f16ca94a26dc8"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:2127566c664442652f024c837091890cb1942c30937add288223dc895793f898"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:a50aebfa173e157099939b17f18600f72f84eed3049e743b68ad15bd69b6bf99"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:4d0d1650369165a14e14e1e47b372cfcb31d6ab44e6e33cb2d4e57265290044d"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:923c0c831b7cfcb071580d3f46c4baf50f174be571576556269530f4bbd79d04"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:06a81e93cd441c56a9b65d8e1d043daeb97a3d0856d177d5c90ba85acb3db087"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-win32.whl", hash = "sha256:6ef1d82a3af9d3eecdba2321dc1b3c238245d890843e040e41e470ffa64c3e25"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-win_amd64.whl", hash = "sha256:eb8821e09e916165e160797a6c17edda0679379a4be5c716c260e836e122f54b"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:c235ebd9baae02f1b77bcea61bce332cb4331dc3617d254df3323aa01ab47bd4"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:5b4c145409bef602a690e7cfad0a15a55c13320ff7a3ad7ca59c13bb8ba4d45d"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:68d1f8a9e9e37c1223b656399be5d6b448dea850bed7d0f87a8311f1ff3dabb0"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:22afcb9f253dac0696b5a4be4a1c0f8762f8239e21b99680099abd9b2b1b2269"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e27ad930a842b4c5eb8ac0016b0a54f5aebbe679340c26101df33424142c143c"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1f79682fbe303db92bc2b1136016a38a42e835d932bab5b3b1bfcfbf0640e519"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b261ccdec7821281dade748d088bb6e9b69e6d15b30652b74cbbac25e280b796"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:122c7fa62b130ed55f8f285bfd56d5f4b4a5b503609d181f9ad85e55c89f4185"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:d0eccceffcb53201b5bfebb52600a5fb483a20b61da9dbc885f8b103cbe7598c"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:9f96df6923e21816da7e0ad3fd47dd8f94b2a5ce594e00677c0013018b813458"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:7f04c839ed0b6b98b1a7501a002144b76c18fb1c1850c8b98d458ac269e26ed2"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:34d1c8da1e78d2e001f363791c98a272bb734000fcef47a491c1e3b0505657a8"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:ff8fa367d09b717b2a17a052544193ad76cd49979c805768879cb63d9ca50561"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-win32.whl", hash = "sha256:aed38f6e4fb3f5d6bf81bfa990a07806be9d83cf7bacef998ab1a9bd660a581f"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-win_amd64.whl", hash = "sha256:b01b88d45a6fcb69667cd6d2f7a9aeb4bf53760d7fc536bf679ec94fe9f3ff3d"}, - {file = "charset_normalizer-3.3.2-py3-none-any.whl", hash = "sha256:3e4d1f6587322d2788836a99c69062fbb091331ec940e02d12d179c1d53e25fc"}, -] - -[[package]] -name = "click" -version = "8.1.7" -description = "Composable command line interface toolkit" -optional = false -python-versions = ">=3.7" -files = [ - {file = "click-8.1.7-py3-none-any.whl", hash = "sha256:ae74fb96c20a0277a1d615f1e4d73c8414f5a98db8b799a7931d1582f3390c28"}, - {file = "click-8.1.7.tar.gz", hash = "sha256:ca9853ad459e787e2192211578cc907e7594e294c7ccc834310722b41b9ca6de"}, -] - -[package.dependencies] -colorama = {version = "*", markers = "platform_system == \"Windows\""} - -[[package]] -name = "cloud-sql-python-connector" -version = "1.7.0" -description = "The Cloud SQL Python Connector is a library that can be used alongside a database driver to allow users with sufficient permissions to connect to a Cloud SQL database without having to manually allowlist IPs or manage SSL certificates." -optional = false -python-versions = ">=3.8" -files = [ - {file = "cloud-sql-python-connector-1.7.0.tar.gz", hash = "sha256:2eca34feba117ba6ab35872e51e86c2dbd3aea7f56edf626e7c64739233ed803"}, - {file = "cloud_sql_python_connector-1.7.0-py2.py3-none-any.whl", hash = "sha256:db74750365c33216dd3a7c62e6bed9a69bece75561a7ed518090a44434b673d6"}, -] - -[package.dependencies] -aiohttp = "*" -cryptography = ">=42.0.0" -google-auth = "*" -pg8000 = {version = ">=1.30.4", optional = true, markers = "extra == \"pg8000\""} -Requests = "*" - -[package.extras] -asyncpg = ["asyncpg (>=0.29.0)"] -pg8000 = ["pg8000 (>=1.30.4)"] -pymysql = ["PyMySQL (>=1.1.0)"] -pytds = ["python-tds (>=1.15.0)"] - -[[package]] -name = "colorama" -version = "0.4.6" -description = "Cross-platform colored terminal text." -optional = false -python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7" -files = [ - {file = "colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6"}, - {file = "colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44"}, -] - -[[package]] -name = "connection-retriever" -version = "0.1.0" -description = "A tool to retrieve connection information from our Airbyte Cloud config api database" -optional = false -python-versions = "^3.10" -files = [] -develop = false - -[package.dependencies] -click = "^8.1.7" -cloud-sql-python-connector = {version = "^1.7.0", extras = ["pg8000"]} -google-cloud-iam = "^2.14.3" -google-cloud-logging = "^3.9.0" -google-cloud-secret-manager = "^2.18.3" -python-dotenv = "^1.0.1" -sqlalchemy = "^2.0.28" - -[package.source] -type = "git" -url = "https://github.com/airbytehq/airbyte-platform-internal.git" -reference = "augustin/03-06-create_connection-retriever_tool" -resolved_reference = "32315e491594c2e55829166e7b36d7ce1118be1c" -subdirectory = "tools/connection-retriever" - -[[package]] -name = "cryptography" -version = "42.0.5" -description = "cryptography is a package which provides cryptographic recipes and primitives to Python developers." -optional = false -python-versions = ">=3.7" -files = [ - {file = "cryptography-42.0.5-cp37-abi3-macosx_10_12_universal2.whl", hash = "sha256:a30596bae9403a342c978fb47d9b0ee277699fa53bbafad14706af51fe543d16"}, - {file = "cryptography-42.0.5-cp37-abi3-macosx_10_12_x86_64.whl", hash = "sha256:b7ffe927ee6531c78f81aa17e684e2ff617daeba7f189f911065b2ea2d526dec"}, - {file = "cryptography-42.0.5-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2424ff4c4ac7f6b8177b53c17ed5d8fa74ae5955656867f5a8affaca36a27abb"}, - {file = "cryptography-42.0.5-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:329906dcc7b20ff3cad13c069a78124ed8247adcac44b10bea1130e36caae0b4"}, - {file = "cryptography-42.0.5-cp37-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:b03c2ae5d2f0fc05f9a2c0c997e1bc18c8229f392234e8a0194f202169ccd278"}, - {file = "cryptography-42.0.5-cp37-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:f8837fe1d6ac4a8052a9a8ddab256bc006242696f03368a4009be7ee3075cdb7"}, - {file = "cryptography-42.0.5-cp37-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:0270572b8bd2c833c3981724b8ee9747b3ec96f699a9665470018594301439ee"}, - {file = "cryptography-42.0.5-cp37-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:b8cac287fafc4ad485b8a9b67d0ee80c66bf3574f655d3b97ef2e1082360faf1"}, - {file = "cryptography-42.0.5-cp37-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:16a48c23a62a2f4a285699dba2e4ff2d1cff3115b9df052cdd976a18856d8e3d"}, - {file = "cryptography-42.0.5-cp37-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:2bce03af1ce5a5567ab89bd90d11e7bbdff56b8af3acbbec1faded8f44cb06da"}, - {file = "cryptography-42.0.5-cp37-abi3-win32.whl", hash = "sha256:b6cd2203306b63e41acdf39aa93b86fb566049aeb6dc489b70e34bcd07adca74"}, - {file = "cryptography-42.0.5-cp37-abi3-win_amd64.whl", hash = "sha256:98d8dc6d012b82287f2c3d26ce1d2dd130ec200c8679b6213b3c73c08b2b7940"}, - {file = "cryptography-42.0.5-cp39-abi3-macosx_10_12_universal2.whl", hash = "sha256:5e6275c09d2badf57aea3afa80d975444f4be8d3bc58f7f80d2a484c6f9485c8"}, - {file = "cryptography-42.0.5-cp39-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e4985a790f921508f36f81831817cbc03b102d643b5fcb81cd33df3fa291a1a1"}, - {file = "cryptography-42.0.5-cp39-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7cde5f38e614f55e28d831754e8a3bacf9ace5d1566235e39d91b35502d6936e"}, - {file = "cryptography-42.0.5-cp39-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:7367d7b2eca6513681127ebad53b2582911d1736dc2ffc19f2c3ae49997496bc"}, - {file = "cryptography-42.0.5-cp39-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:cd2030f6650c089aeb304cf093f3244d34745ce0cfcc39f20c6fbfe030102e2a"}, - {file = "cryptography-42.0.5-cp39-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:a2913c5375154b6ef2e91c10b5720ea6e21007412f6437504ffea2109b5a33d7"}, - {file = "cryptography-42.0.5-cp39-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:c41fb5e6a5fe9ebcd58ca3abfeb51dffb5d83d6775405305bfa8715b76521922"}, - {file = "cryptography-42.0.5-cp39-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:3eaafe47ec0d0ffcc9349e1708be2aaea4c6dd4978d76bf6eb0cb2c13636c6fc"}, - {file = "cryptography-42.0.5-cp39-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:1b95b98b0d2af784078fa69f637135e3c317091b615cd0905f8b8a087e86fa30"}, - {file = "cryptography-42.0.5-cp39-abi3-win32.whl", hash = "sha256:1f71c10d1e88467126f0efd484bd44bca5e14c664ec2ede64c32f20875c0d413"}, - {file = "cryptography-42.0.5-cp39-abi3-win_amd64.whl", hash = "sha256:a011a644f6d7d03736214d38832e030d8268bcff4a41f728e6030325fea3e400"}, - {file = "cryptography-42.0.5-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:9481ffe3cf013b71b2428b905c4f7a9a4f76ec03065b05ff499bb5682a8d9ad8"}, - {file = "cryptography-42.0.5-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:ba334e6e4b1d92442b75ddacc615c5476d4ad55cc29b15d590cc6b86efa487e2"}, - {file = "cryptography-42.0.5-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:ba3e4a42397c25b7ff88cdec6e2a16c2be18720f317506ee25210f6d31925f9c"}, - {file = "cryptography-42.0.5-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:111a0d8553afcf8eb02a4fea6ca4f59d48ddb34497aa8706a6cf536f1a5ec576"}, - {file = "cryptography-42.0.5-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:cd65d75953847815962c84a4654a84850b2bb4aed3f26fadcc1c13892e1e29f6"}, - {file = "cryptography-42.0.5-pp39-pypy39_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:e807b3188f9eb0eaa7bbb579b462c5ace579f1cedb28107ce8b48a9f7ad3679e"}, - {file = "cryptography-42.0.5-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:f12764b8fffc7a123f641d7d049d382b73f96a34117e0b637b80643169cec8ac"}, - {file = "cryptography-42.0.5-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:37dd623507659e08be98eec89323469e8c7b4c1407c85112634ae3dbdb926fdd"}, - {file = "cryptography-42.0.5.tar.gz", hash = "sha256:6fe07eec95dfd477eb9530aef5bead34fec819b3aaf6c5bd6d20565da607bfe1"}, -] - -[package.dependencies] -cffi = {version = ">=1.12", markers = "platform_python_implementation != \"PyPy\""} - -[package.extras] -docs = ["sphinx (>=5.3.0)", "sphinx-rtd-theme (>=1.1.1)"] -docstest = ["pyenchant (>=1.6.11)", "readme-renderer", "sphinxcontrib-spelling (>=4.0.1)"] -nox = ["nox"] -pep8test = ["check-sdist", "click", "mypy", "ruff"] -sdist = ["build"] -ssh = ["bcrypt (>=3.1.5)"] -test = ["certifi", "pretend", "pytest (>=6.2.0)", "pytest-benchmark", "pytest-cov", "pytest-xdist"] -test-randomorder = ["pytest-randomly"] - -[[package]] -name = "dagger-io" -version = "0.9.6" -description = "A client package for running Dagger pipelines in Python." -optional = false -python-versions = ">=3.10" -files = [ - {file = "dagger_io-0.9.6-py3-none-any.whl", hash = "sha256:e2f1e4bbc252071a314fa5b0bad11a910433a9ee043972b716f6fcc5f9fc8236"}, - {file = "dagger_io-0.9.6.tar.gz", hash = "sha256:147b5a33c44d17f602a4121679893655e91308beb8c46a466afed39cf40f789b"}, -] - -[package.dependencies] -anyio = ">=3.6.2" -beartype = ">=0.11.0" -cattrs = ">=22.2.0" -gql = ">=3.4.0" -graphql-core = ">=3.2.3" -httpx = ">=0.23.1" -platformdirs = ">=2.6.2" -rich = ">=10.11.0" -typing-extensions = ">=4.8.0" - -[[package]] -name = "docker" -version = "6.1.3" -description = "A Python library for the Docker Engine API." -optional = false -python-versions = ">=3.7" -files = [ - {file = "docker-6.1.3-py3-none-any.whl", hash = "sha256:aecd2277b8bf8e506e484f6ab7aec39abe0038e29fa4a6d3ba86c3fe01844ed9"}, - {file = "docker-6.1.3.tar.gz", hash = "sha256:aa6d17830045ba5ef0168d5eaa34d37beeb113948c413affe1d5991fc11f9a20"}, -] - -[package.dependencies] -packaging = ">=14.0" -pywin32 = {version = ">=304", markers = "sys_platform == \"win32\""} -requests = ">=2.26.0" -urllib3 = ">=1.26.0" -websocket-client = ">=0.32.0" - -[package.extras] -ssh = ["paramiko (>=2.4.3)"] - -[[package]] -name = "duckdb" -version = "0.10.0" -description = "DuckDB in-process database" -optional = false -python-versions = ">=3.7.0" -files = [ - {file = "duckdb-0.10.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:bd0ffb3fddef0f72a150e4d76e10942a84a1a0447d10907df1621b90d6668060"}, - {file = "duckdb-0.10.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:f3d709d5c7c1a12b5e10d0b05fa916c670cd2b50178e3696faa0cc16048a1745"}, - {file = "duckdb-0.10.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9114aa22ec5d591a20ce5184be90f49d8e5b5348ceaab21e102c54560d07a5f8"}, - {file = "duckdb-0.10.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:77a37877efadf39caf7cadde0f430fedf762751b9c54750c821e2f1316705a21"}, - {file = "duckdb-0.10.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:87cbc9e1d9c3fc9f14307bea757f99f15f46843c0ab13a6061354410824ed41f"}, - {file = "duckdb-0.10.0-cp310-cp310-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:f0bfec79fed387201550517d325dff4fad2705020bc139d936cab08b9e845662"}, - {file = "duckdb-0.10.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:c5622134d2d9796b15e09de810e450859d4beb46d9b861357ec9ae40a61b775c"}, - {file = "duckdb-0.10.0-cp310-cp310-win_amd64.whl", hash = "sha256:089ee8e831ccaef1b73fc89c43b661567175eed0115454880bafed5e35cda702"}, - {file = "duckdb-0.10.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:a05af63747f1d7021995f0811c333dee7316cec3b06c0d3e4741b9bdb678dd21"}, - {file = "duckdb-0.10.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:072d6eba5d8a59e0069a8b5b4252fed8a21f9fe3f85a9129d186a39b3d0aea03"}, - {file = "duckdb-0.10.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:a77b85668f59b919042832e4659538337f1c7f197123076c5311f1c9cf077df7"}, - {file = "duckdb-0.10.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:96a666f1d2da65d03199a977aec246920920a5ea1da76b70ae02bd4fb1ffc48c"}, - {file = "duckdb-0.10.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2ec76a4262b783628d26612d184834852d9c92fb203e91af789100c17e3d7173"}, - {file = "duckdb-0.10.0-cp311-cp311-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:009dd9d2cdbd3b061a9efbdfc79f2d1a8377bcf49f1e5f430138621f8c083a6c"}, - {file = "duckdb-0.10.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:878f06766088090dad4a2e5ee0081555242b2e8dcb29415ecc97e388cf0cf8d8"}, - {file = "duckdb-0.10.0-cp311-cp311-win_amd64.whl", hash = "sha256:713ff0a1fb63a6d60f454acf67f31656549fb5d63f21ac68314e4f522daa1a89"}, - {file = "duckdb-0.10.0-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:9c0ee450dfedfb52dd4957244e31820feef17228da31af6d052979450a80fd19"}, - {file = "duckdb-0.10.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:ff79b2ea9994398b545c0d10601cd73565fbd09f8951b3d8003c7c5c0cebc7cb"}, - {file = "duckdb-0.10.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:6bdf1aa71b924ef651062e6b8ff9981ad85bec89598294af8a072062c5717340"}, - {file = "duckdb-0.10.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d0265bbc8216be3ced7b377ba8847128a3fc0ef99798a3c4557c1b88e3a01c23"}, - {file = "duckdb-0.10.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1d418a315a07707a693bd985274c0f8c4dd77015d9ef5d8d3da4cc1942fd82e0"}, - {file = "duckdb-0.10.0-cp312-cp312-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:2828475a292e68c71855190b818aded6bce7328f79e38c04a0c75f8f1c0ceef0"}, - {file = "duckdb-0.10.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:c3aaeaae2eba97035c65f31ffdb18202c951337bf2b3d53d77ce1da8ae2ecf51"}, - {file = "duckdb-0.10.0-cp312-cp312-win_amd64.whl", hash = "sha256:c51790aaaea97d8e4a58a114c371ed8d2c4e1ca7cbf29e3bdab6d8ccfc5afc1e"}, - {file = "duckdb-0.10.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:8af1ae7cc77a12206b6c47ade191882cc8f49f750bb3e72bb86ac1d4fa89926a"}, - {file = "duckdb-0.10.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:aa4f7e8e8dc0e376aeb280b83f2584d0e25ec38985c27d19f3107b2edc4f4a97"}, - {file = "duckdb-0.10.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:28ae942a79fad913defa912b56483cd7827a4e7721f4ce4bc9025b746ecb3c89"}, - {file = "duckdb-0.10.0-cp37-cp37m-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:01b57802898091455ca2a32c1335aac1e398da77c99e8a96a1e5de09f6a0add9"}, - {file = "duckdb-0.10.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:52e1ad4a55fa153d320c367046b9500578192e01c6d04308ba8b540441736f2c"}, - {file = "duckdb-0.10.0-cp37-cp37m-win_amd64.whl", hash = "sha256:904c47d04095af745e989c853f0bfc0776913dfc40dfbd2da7afdbbb5f67fed0"}, - {file = "duckdb-0.10.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:184ae7ea5874f3b8fa51ab0f1519bdd088a0b78c32080ee272b1d137e2c8fd9c"}, - {file = "duckdb-0.10.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:bd33982ecc9bac727a032d6cedced9f19033cbad56647147408891eb51a6cb37"}, - {file = "duckdb-0.10.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:f59bf0949899105dd5f8864cb48139bfb78454a8c017b8258ba2b5e90acf7afc"}, - {file = "duckdb-0.10.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:395f3b18948001e35dceb48a4423d574e38656606d033eef375408b539e7b076"}, - {file = "duckdb-0.10.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9b8eb2b803be7ee1df70435c33b03a4598cdaf676cd67ad782b288dcff65d781"}, - {file = "duckdb-0.10.0-cp38-cp38-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:31b2ddd331801064326c8e3587a4db8a31d02aef11332c168f45b3bd92effb41"}, - {file = "duckdb-0.10.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:c8b89e76a041424b8c2026c5dc1f74b53fbbc6c6f650d563259885ab2e7d093d"}, - {file = "duckdb-0.10.0-cp38-cp38-win_amd64.whl", hash = "sha256:79084a82f16c0a54f6bfb7ded5600400c2daa90eb0d83337d81a56924eaee5d4"}, - {file = "duckdb-0.10.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:79799b3a270dcd9070f677ba510f1e66b112df3068425691bac97c5e278929c7"}, - {file = "duckdb-0.10.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:e8fc394bfe3434920cdbcfbdd0ac3ba40902faa1dbda088db0ba44003a45318a"}, - {file = "duckdb-0.10.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:c116605551b4abf5786243a59bcef02bd69cc51837d0c57cafaa68cdc428aa0c"}, - {file = "duckdb-0.10.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3191170c3b0a43b0c12644800326f5afdea00d5a4621d59dbbd0c1059139e140"}, - {file = "duckdb-0.10.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fee69a50eb93c72dc77e7ab1fabe0c38d21a52c5da44a86aa217081e38f9f1bd"}, - {file = "duckdb-0.10.0-cp39-cp39-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:c5f449e87dacb16b0d145dbe65fa6fdb5a55b2b6911a46d74876e445dd395bac"}, - {file = "duckdb-0.10.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:4487d0df221b17ea4177ad08131bc606b35f25cfadf890987833055b9d10cdf6"}, - {file = "duckdb-0.10.0-cp39-cp39-win_amd64.whl", hash = "sha256:c099ae2ff8fe939fda62da81704f91e2f92ac45e48dc0e37c679c9d243d01e65"}, - {file = "duckdb-0.10.0.tar.gz", hash = "sha256:c02bcc128002aa79e3c9d89b9de25e062d1096a8793bc0d7932317b7977f6845"}, -] - -[[package]] -name = "exceptiongroup" -version = "1.2.0" -description = "Backport of PEP 654 (exception groups)" -optional = false -python-versions = ">=3.7" -files = [ - {file = "exceptiongroup-1.2.0-py3-none-any.whl", hash = "sha256:4bfd3996ac73b41e9b9628b04e079f193850720ea5945fc96a08633c66912f14"}, - {file = "exceptiongroup-1.2.0.tar.gz", hash = "sha256:91f5c769735f051a4290d52edd0858999b57e5876e9f85937691bd4c9fa3ed68"}, -] - -[package.extras] -test = ["pytest (>=6)"] - -[[package]] -name = "frozenlist" -version = "1.4.1" -description = "A list-like structure which implements collections.abc.MutableSequence" -optional = false -python-versions = ">=3.8" -files = [ - {file = "frozenlist-1.4.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:f9aa1878d1083b276b0196f2dfbe00c9b7e752475ed3b682025ff20c1c1f51ac"}, - {file = "frozenlist-1.4.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:29acab3f66f0f24674b7dc4736477bcd4bc3ad4b896f5f45379a67bce8b96868"}, - {file = "frozenlist-1.4.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:74fb4bee6880b529a0c6560885fce4dc95936920f9f20f53d99a213f7bf66776"}, - {file = "frozenlist-1.4.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:590344787a90ae57d62511dd7c736ed56b428f04cd8c161fcc5e7232c130c69a"}, - {file = "frozenlist-1.4.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:068b63f23b17df8569b7fdca5517edef76171cf3897eb68beb01341131fbd2ad"}, - {file = "frozenlist-1.4.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5c849d495bf5154cd8da18a9eb15db127d4dba2968d88831aff6f0331ea9bd4c"}, - {file = "frozenlist-1.4.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9750cc7fe1ae3b1611bb8cfc3f9ec11d532244235d75901fb6b8e42ce9229dfe"}, - {file = "frozenlist-1.4.1-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a9b2de4cf0cdd5bd2dee4c4f63a653c61d2408055ab77b151c1957f221cabf2a"}, - {file = "frozenlist-1.4.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:0633c8d5337cb5c77acbccc6357ac49a1770b8c487e5b3505c57b949b4b82e98"}, - {file = "frozenlist-1.4.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:27657df69e8801be6c3638054e202a135c7f299267f1a55ed3a598934f6c0d75"}, - {file = "frozenlist-1.4.1-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:f9a3ea26252bd92f570600098783d1371354d89d5f6b7dfd87359d669f2109b5"}, - {file = "frozenlist-1.4.1-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:4f57dab5fe3407b6c0c1cc907ac98e8a189f9e418f3b6e54d65a718aaafe3950"}, - {file = "frozenlist-1.4.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:e02a0e11cf6597299b9f3bbd3f93d79217cb90cfd1411aec33848b13f5c656cc"}, - {file = "frozenlist-1.4.1-cp310-cp310-win32.whl", hash = "sha256:a828c57f00f729620a442881cc60e57cfcec6842ba38e1b19fd3e47ac0ff8dc1"}, - {file = "frozenlist-1.4.1-cp310-cp310-win_amd64.whl", hash = "sha256:f56e2333dda1fe0f909e7cc59f021eba0d2307bc6f012a1ccf2beca6ba362439"}, - {file = "frozenlist-1.4.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:a0cb6f11204443f27a1628b0e460f37fb30f624be6051d490fa7d7e26d4af3d0"}, - {file = "frozenlist-1.4.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:b46c8ae3a8f1f41a0d2ef350c0b6e65822d80772fe46b653ab6b6274f61d4a49"}, - {file = "frozenlist-1.4.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:fde5bd59ab5357e3853313127f4d3565fc7dad314a74d7b5d43c22c6a5ed2ced"}, - {file = "frozenlist-1.4.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:722e1124aec435320ae01ee3ac7bec11a5d47f25d0ed6328f2273d287bc3abb0"}, - {file = "frozenlist-1.4.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2471c201b70d58a0f0c1f91261542a03d9a5e088ed3dc6c160d614c01649c106"}, - {file = "frozenlist-1.4.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c757a9dd70d72b076d6f68efdbb9bc943665ae954dad2801b874c8c69e185068"}, - {file = "frozenlist-1.4.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f146e0911cb2f1da549fc58fc7bcd2b836a44b79ef871980d605ec392ff6b0d2"}, - {file = "frozenlist-1.4.1-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4f9c515e7914626b2a2e1e311794b4c35720a0be87af52b79ff8e1429fc25f19"}, - {file = "frozenlist-1.4.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:c302220494f5c1ebeb0912ea782bcd5e2f8308037b3c7553fad0e48ebad6ad82"}, - {file = "frozenlist-1.4.1-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:442acde1e068288a4ba7acfe05f5f343e19fac87bfc96d89eb886b0363e977ec"}, - {file = "frozenlist-1.4.1-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:1b280e6507ea8a4fa0c0a7150b4e526a8d113989e28eaaef946cc77ffd7efc0a"}, - {file = "frozenlist-1.4.1-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:fe1a06da377e3a1062ae5fe0926e12b84eceb8a50b350ddca72dc85015873f74"}, - {file = "frozenlist-1.4.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:db9e724bebd621d9beca794f2a4ff1d26eed5965b004a97f1f1685a173b869c2"}, - {file = "frozenlist-1.4.1-cp311-cp311-win32.whl", hash = "sha256:e774d53b1a477a67838a904131c4b0eef6b3d8a651f8b138b04f748fccfefe17"}, - {file = "frozenlist-1.4.1-cp311-cp311-win_amd64.whl", hash = "sha256:fb3c2db03683b5767dedb5769b8a40ebb47d6f7f45b1b3e3b4b51ec8ad9d9825"}, - {file = "frozenlist-1.4.1-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:1979bc0aeb89b33b588c51c54ab0161791149f2461ea7c7c946d95d5f93b56ae"}, - {file = "frozenlist-1.4.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:cc7b01b3754ea68a62bd77ce6020afaffb44a590c2289089289363472d13aedb"}, - {file = "frozenlist-1.4.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:c9c92be9fd329ac801cc420e08452b70e7aeab94ea4233a4804f0915c14eba9b"}, - {file = "frozenlist-1.4.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5c3894db91f5a489fc8fa6a9991820f368f0b3cbdb9cd8849547ccfab3392d86"}, - {file = "frozenlist-1.4.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ba60bb19387e13597fb059f32cd4d59445d7b18b69a745b8f8e5db0346f33480"}, - {file = "frozenlist-1.4.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8aefbba5f69d42246543407ed2461db31006b0f76c4e32dfd6f42215a2c41d09"}, - {file = "frozenlist-1.4.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:780d3a35680ced9ce682fbcf4cb9c2bad3136eeff760ab33707b71db84664e3a"}, - {file = "frozenlist-1.4.1-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9acbb16f06fe7f52f441bb6f413ebae6c37baa6ef9edd49cdd567216da8600cd"}, - {file = "frozenlist-1.4.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:23b701e65c7b36e4bf15546a89279bd4d8675faabc287d06bbcfac7d3c33e1e6"}, - {file = "frozenlist-1.4.1-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:3e0153a805a98f5ada7e09826255ba99fb4f7524bb81bf6b47fb702666484ae1"}, - {file = "frozenlist-1.4.1-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:dd9b1baec094d91bf36ec729445f7769d0d0cf6b64d04d86e45baf89e2b9059b"}, - {file = "frozenlist-1.4.1-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:1a4471094e146b6790f61b98616ab8e44f72661879cc63fa1049d13ef711e71e"}, - {file = "frozenlist-1.4.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:5667ed53d68d91920defdf4035d1cdaa3c3121dc0b113255124bcfada1cfa1b8"}, - {file = "frozenlist-1.4.1-cp312-cp312-win32.whl", hash = "sha256:beee944ae828747fd7cb216a70f120767fc9f4f00bacae8543c14a6831673f89"}, - {file = "frozenlist-1.4.1-cp312-cp312-win_amd64.whl", hash = "sha256:64536573d0a2cb6e625cf309984e2d873979709f2cf22839bf2d61790b448ad5"}, - {file = "frozenlist-1.4.1-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:20b51fa3f588ff2fe658663db52a41a4f7aa6c04f6201449c6c7c476bd255c0d"}, - {file = "frozenlist-1.4.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:410478a0c562d1a5bcc2f7ea448359fcb050ed48b3c6f6f4f18c313a9bdb1826"}, - {file = "frozenlist-1.4.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:c6321c9efe29975232da3bd0af0ad216800a47e93d763ce64f291917a381b8eb"}, - {file = "frozenlist-1.4.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:48f6a4533887e189dae092f1cf981f2e3885175f7a0f33c91fb5b7b682b6bab6"}, - {file = "frozenlist-1.4.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:6eb73fa5426ea69ee0e012fb59cdc76a15b1283d6e32e4f8dc4482ec67d1194d"}, - {file = "frozenlist-1.4.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:fbeb989b5cc29e8daf7f976b421c220f1b8c731cbf22b9130d8815418ea45887"}, - {file = "frozenlist-1.4.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:32453c1de775c889eb4e22f1197fe3bdfe457d16476ea407472b9442e6295f7a"}, - {file = "frozenlist-1.4.1-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:693945278a31f2086d9bf3df0fe8254bbeaef1fe71e1351c3bd730aa7d31c41b"}, - {file = "frozenlist-1.4.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:1d0ce09d36d53bbbe566fe296965b23b961764c0bcf3ce2fa45f463745c04701"}, - {file = "frozenlist-1.4.1-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:3a670dc61eb0d0eb7080890c13de3066790f9049b47b0de04007090807c776b0"}, - {file = "frozenlist-1.4.1-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:dca69045298ce5c11fd539682cff879cc1e664c245d1c64da929813e54241d11"}, - {file = "frozenlist-1.4.1-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:a06339f38e9ed3a64e4c4e43aec7f59084033647f908e4259d279a52d3757d09"}, - {file = "frozenlist-1.4.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:b7f2f9f912dca3934c1baec2e4585a674ef16fe00218d833856408c48d5beee7"}, - {file = "frozenlist-1.4.1-cp38-cp38-win32.whl", hash = "sha256:e7004be74cbb7d9f34553a5ce5fb08be14fb33bc86f332fb71cbe5216362a497"}, - {file = "frozenlist-1.4.1-cp38-cp38-win_amd64.whl", hash = "sha256:5a7d70357e7cee13f470c7883a063aae5fe209a493c57d86eb7f5a6f910fae09"}, - {file = "frozenlist-1.4.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:bfa4a17e17ce9abf47a74ae02f32d014c5e9404b6d9ac7f729e01562bbee601e"}, - {file = "frozenlist-1.4.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:b7e3ed87d4138356775346e6845cccbe66cd9e207f3cd11d2f0b9fd13681359d"}, - {file = "frozenlist-1.4.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:c99169d4ff810155ca50b4da3b075cbde79752443117d89429595c2e8e37fed8"}, - {file = "frozenlist-1.4.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:edb678da49d9f72c9f6c609fbe41a5dfb9a9282f9e6a2253d5a91e0fc382d7c0"}, - {file = "frozenlist-1.4.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:6db4667b187a6742b33afbbaf05a7bc551ffcf1ced0000a571aedbb4aa42fc7b"}, - {file = "frozenlist-1.4.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:55fdc093b5a3cb41d420884cdaf37a1e74c3c37a31f46e66286d9145d2063bd0"}, - {file = "frozenlist-1.4.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:82e8211d69a4f4bc360ea22cd6555f8e61a1bd211d1d5d39d3d228b48c83a897"}, - {file = "frozenlist-1.4.1-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:89aa2c2eeb20957be2d950b85974b30a01a762f3308cd02bb15e1ad632e22dc7"}, - {file = "frozenlist-1.4.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:9d3e0c25a2350080e9319724dede4f31f43a6c9779be48021a7f4ebde8b2d742"}, - {file = "frozenlist-1.4.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:7268252af60904bf52c26173cbadc3a071cece75f873705419c8681f24d3edea"}, - {file = "frozenlist-1.4.1-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:0c250a29735d4f15321007fb02865f0e6b6a41a6b88f1f523ca1596ab5f50bd5"}, - {file = "frozenlist-1.4.1-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:96ec70beabbd3b10e8bfe52616a13561e58fe84c0101dd031dc78f250d5128b9"}, - {file = "frozenlist-1.4.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:23b2d7679b73fe0e5a4560b672a39f98dfc6f60df63823b0a9970525325b95f6"}, - {file = "frozenlist-1.4.1-cp39-cp39-win32.whl", hash = "sha256:a7496bfe1da7fb1a4e1cc23bb67c58fab69311cc7d32b5a99c2007b4b2a0e932"}, - {file = "frozenlist-1.4.1-cp39-cp39-win_amd64.whl", hash = "sha256:e6a20a581f9ce92d389a8c7d7c3dd47c81fd5d6e655c8dddf341e14aa48659d0"}, - {file = "frozenlist-1.4.1-py3-none-any.whl", hash = "sha256:04ced3e6a46b4cfffe20f9ae482818e34eba9b5fb0ce4056e4cc9b6e212d09b7"}, - {file = "frozenlist-1.4.1.tar.gz", hash = "sha256:c037a86e8513059a2613aaba4d817bb90b9d9b6b69aace3ce9c877e8c8ed402b"}, -] - -[[package]] -name = "google-api-core" -version = "2.17.1" -description = "Google API client core library" -optional = false -python-versions = ">=3.7" -files = [ - {file = "google-api-core-2.17.1.tar.gz", hash = "sha256:9df18a1f87ee0df0bc4eea2770ebc4228392d8cc4066655b320e2cfccb15db95"}, - {file = "google_api_core-2.17.1-py3-none-any.whl", hash = "sha256:610c5b90092c360736baccf17bd3efbcb30dd380e7a6dc28a71059edb8bd0d8e"}, -] - -[package.dependencies] -google-auth = ">=2.14.1,<3.0.dev0" -googleapis-common-protos = ">=1.56.2,<2.0.dev0" -grpcio = [ - {version = ">=1.33.2,<2.0dev", optional = true, markers = "python_version < \"3.11\" and extra == \"grpc\""}, - {version = ">=1.49.1,<2.0dev", optional = true, markers = "python_version >= \"3.11\" and extra == \"grpc\""}, -] -grpcio-status = [ - {version = ">=1.33.2,<2.0.dev0", optional = true, markers = "python_version < \"3.11\" and extra == \"grpc\""}, - {version = ">=1.49.1,<2.0.dev0", optional = true, markers = "python_version >= \"3.11\" and extra == \"grpc\""}, -] -protobuf = ">=3.19.5,<3.20.0 || >3.20.0,<3.20.1 || >3.20.1,<4.21.0 || >4.21.0,<4.21.1 || >4.21.1,<4.21.2 || >4.21.2,<4.21.3 || >4.21.3,<4.21.4 || >4.21.4,<4.21.5 || >4.21.5,<5.0.0.dev0" -requests = ">=2.18.0,<3.0.0.dev0" - -[package.extras] -grpc = ["grpcio (>=1.33.2,<2.0dev)", "grpcio (>=1.49.1,<2.0dev)", "grpcio-status (>=1.33.2,<2.0.dev0)", "grpcio-status (>=1.49.1,<2.0.dev0)"] -grpcgcp = ["grpcio-gcp (>=0.2.2,<1.0.dev0)"] -grpcio-gcp = ["grpcio-gcp (>=0.2.2,<1.0.dev0)"] - -[[package]] -name = "google-auth" -version = "2.28.1" -description = "Google Authentication Library" -optional = false -python-versions = ">=3.7" -files = [ - {file = "google-auth-2.28.1.tar.gz", hash = "sha256:34fc3046c257cedcf1622fc4b31fc2be7923d9b4d44973d481125ecc50d83885"}, - {file = "google_auth-2.28.1-py2.py3-none-any.whl", hash = "sha256:25141e2d7a14bfcba945f5e9827f98092716e99482562f15306e5b026e21aa72"}, -] - -[package.dependencies] -cachetools = ">=2.0.0,<6.0" -pyasn1-modules = ">=0.2.1" -rsa = ">=3.1.4,<5" - -[package.extras] -aiohttp = ["aiohttp (>=3.6.2,<4.0.0.dev0)", "requests (>=2.20.0,<3.0.0.dev0)"] -enterprise-cert = ["cryptography (==36.0.2)", "pyopenssl (==22.0.0)"] -pyopenssl = ["cryptography (>=38.0.3)", "pyopenssl (>=20.0.0)"] -reauth = ["pyu2f (>=0.1.5)"] -requests = ["requests (>=2.20.0,<3.0.0.dev0)"] - -[[package]] -name = "google-cloud-appengine-logging" -version = "1.4.3" -description = "Google Cloud Appengine Logging API client library" -optional = false -python-versions = ">=3.7" -files = [ - {file = "google-cloud-appengine-logging-1.4.3.tar.gz", hash = "sha256:fb504e6199fe8de85baa9d31cecf6776877851fe58867de603317ec7cc739987"}, - {file = "google_cloud_appengine_logging-1.4.3-py2.py3-none-any.whl", hash = "sha256:8e30af51d853f219caf29e8b8b342b9ce8214b29f334dafae38d39aaaff7d372"}, -] - -[package.dependencies] -google-api-core = {version = ">=1.34.1,<2.0.dev0 || >=2.11.dev0,<3.0.0dev", extras = ["grpc"]} -google-auth = ">=2.14.1,<2.24.0 || >2.24.0,<2.25.0 || >2.25.0,<3.0.0dev" -proto-plus = ">=1.22.3,<2.0.0dev" -protobuf = ">=3.19.5,<3.20.0 || >3.20.0,<3.20.1 || >3.20.1,<4.21.0 || >4.21.0,<4.21.1 || >4.21.1,<4.21.2 || >4.21.2,<4.21.3 || >4.21.3,<4.21.4 || >4.21.4,<4.21.5 || >4.21.5,<5.0.0dev" - -[[package]] -name = "google-cloud-audit-log" -version = "0.2.5" -description = "Google Cloud Audit Protos" -optional = false -python-versions = ">=3.7" -files = [ - {file = "google-cloud-audit-log-0.2.5.tar.gz", hash = "sha256:86e2faba3383adc8fd04a5bd7fd4f960b3e4aedaa7ed950f2f891ce16902eb6b"}, - {file = "google_cloud_audit_log-0.2.5-py2.py3-none-any.whl", hash = "sha256:18b94d4579002a450b7902cd2e8b8fdcb1ea2dd4df3b41f8f82be6d9f7fcd746"}, -] - -[package.dependencies] -googleapis-common-protos = ">=1.56.2,<2.0dev" -protobuf = ">=3.19.5,<3.20.0 || >3.20.0,<3.20.1 || >3.20.1,<4.21.1 || >4.21.1,<4.21.2 || >4.21.2,<4.21.3 || >4.21.3,<4.21.4 || >4.21.4,<4.21.5 || >4.21.5,<5.0.0dev" - -[[package]] -name = "google-cloud-core" -version = "2.4.1" -description = "Google Cloud API client core library" -optional = false -python-versions = ">=3.7" -files = [ - {file = "google-cloud-core-2.4.1.tar.gz", hash = "sha256:9b7749272a812bde58fff28868d0c5e2f585b82f37e09a1f6ed2d4d10f134073"}, - {file = "google_cloud_core-2.4.1-py2.py3-none-any.whl", hash = "sha256:a9e6a4422b9ac5c29f79a0ede9485473338e2ce78d91f2370c01e730eab22e61"}, -] - -[package.dependencies] -google-api-core = ">=1.31.6,<2.0.dev0 || >2.3.0,<3.0.0dev" -google-auth = ">=1.25.0,<3.0dev" - -[package.extras] -grpc = ["grpcio (>=1.38.0,<2.0dev)", "grpcio-status (>=1.38.0,<2.0.dev0)"] - -[[package]] -name = "google-cloud-iam" -version = "2.14.3" -description = "Google Cloud Iam API client library" -optional = false -python-versions = ">=3.7" -files = [ - {file = "google-cloud-iam-2.14.3.tar.gz", hash = "sha256:c82e993f8a9219c5ba1fce139c34aed6f019dd5f9b45ce956d5430583d2af26e"}, - {file = "google_cloud_iam-2.14.3-py2.py3-none-any.whl", hash = "sha256:61b8555fd14240b050611d7fe9833f276202a306e4003e01fc7fb7d70d23e6c4"}, -] - -[package.dependencies] -google-api-core = {version = ">=1.34.1,<2.0.dev0 || >=2.11.dev0,<3.0.0dev", extras = ["grpc"]} -google-auth = ">=2.14.1,<2.24.0 || >2.24.0,<2.25.0 || >2.25.0,<3.0.0dev" -proto-plus = ">=1.22.3,<2.0.0dev" -protobuf = ">=3.19.5,<3.20.0 || >3.20.0,<3.20.1 || >3.20.1,<4.21.0 || >4.21.0,<4.21.1 || >4.21.1,<4.21.2 || >4.21.2,<4.21.3 || >4.21.3,<4.21.4 || >4.21.4,<4.21.5 || >4.21.5,<5.0.0dev" - -[[package]] -name = "google-cloud-logging" -version = "3.9.0" -description = "Stackdriver Logging API client library" -optional = false -python-versions = ">=3.7" -files = [ - {file = "google-cloud-logging-3.9.0.tar.gz", hash = "sha256:4decb1b0bed4a0e3c0e58a376646e6002d6be7cad039e3466822e8665072ea33"}, - {file = "google_cloud_logging-3.9.0-py2.py3-none-any.whl", hash = "sha256:094a2db068ff7f38c9e0c1017673fa49c0768fbae02769e03e06baa30f138b87"}, -] - -[package.dependencies] -google-api-core = {version = ">=1.33.2,<2.0.dev0 || >=2.8.dev0,<3.0.0dev", extras = ["grpc"]} -google-cloud-appengine-logging = ">=0.1.0,<2.0.0dev" -google-cloud-audit-log = ">=0.1.0,<1.0.0dev" -google-cloud-core = ">=2.0.0,<3.0.0dev" -grpc-google-iam-v1 = ">=0.12.4,<1.0.0dev" -proto-plus = [ - {version = ">=1.22.0,<2.0.0dev", markers = "python_version < \"3.11\""}, - {version = ">=1.22.2,<2.0.0dev", markers = "python_version >= \"3.11\""}, -] -protobuf = ">=3.19.5,<3.20.0 || >3.20.0,<3.20.1 || >3.20.1,<4.21.0 || >4.21.0,<4.21.1 || >4.21.1,<4.21.2 || >4.21.2,<4.21.3 || >4.21.3,<4.21.4 || >4.21.4,<4.21.5 || >4.21.5,<5.0.0dev" - -[[package]] -name = "google-cloud-secret-manager" -version = "2.18.3" -description = "Google Cloud Secret Manager API client library" -optional = false -python-versions = ">=3.7" -files = [ - {file = "google-cloud-secret-manager-2.18.3.tar.gz", hash = "sha256:1db2f409324536e34f985081d389e3974ca3a3668df7845cad0be03ab8c0fa7d"}, - {file = "google_cloud_secret_manager-2.18.3-py2.py3-none-any.whl", hash = "sha256:4d4af82bddd9099ebdbe79e0c6b68f6c6cabea8323a3c1275bcead8f56310fb7"}, -] - -[package.dependencies] -google-api-core = {version = ">=1.34.1,<2.0.dev0 || >=2.11.dev0,<3.0.0dev", extras = ["grpc"]} -google-auth = ">=2.14.1,<2.24.0 || >2.24.0,<2.25.0 || >2.25.0,<3.0.0dev" -grpc-google-iam-v1 = ">=0.12.4,<1.0.0dev" -proto-plus = ">=1.22.3,<2.0.0dev" -protobuf = ">=3.19.5,<3.20.0 || >3.20.0,<3.20.1 || >3.20.1,<4.21.0 || >4.21.0,<4.21.1 || >4.21.1,<4.21.2 || >4.21.2,<4.21.3 || >4.21.3,<4.21.4 || >4.21.4,<4.21.5 || >4.21.5,<5.0.0dev" - -[[package]] -name = "googleapis-common-protos" -version = "1.62.0" -description = "Common protobufs used in Google APIs" -optional = false -python-versions = ">=3.7" -files = [ - {file = "googleapis-common-protos-1.62.0.tar.gz", hash = "sha256:83f0ece9f94e5672cced82f592d2a5edf527a96ed1794f0bab36d5735c996277"}, - {file = "googleapis_common_protos-1.62.0-py2.py3-none-any.whl", hash = "sha256:4750113612205514f9f6aa4cb00d523a94f3e8c06c5ad2fee466387dc4875f07"}, -] - -[package.dependencies] -grpcio = {version = ">=1.44.0,<2.0.0.dev0", optional = true, markers = "extra == \"grpc\""} -protobuf = ">=3.19.5,<3.20.0 || >3.20.0,<3.20.1 || >3.20.1,<4.21.1 || >4.21.1,<4.21.2 || >4.21.2,<4.21.3 || >4.21.3,<4.21.4 || >4.21.4,<4.21.5 || >4.21.5,<5.0.0.dev0" - -[package.extras] -grpc = ["grpcio (>=1.44.0,<2.0.0.dev0)"] - -[[package]] -name = "gql" -version = "3.5.0" -description = "GraphQL client for Python" -optional = false -python-versions = "*" -files = [ - {file = "gql-3.5.0-py2.py3-none-any.whl", hash = "sha256:70dda5694a5b194a8441f077aa5fb70cc94e4ec08016117523f013680901ecb7"}, - {file = "gql-3.5.0.tar.gz", hash = "sha256:ccb9c5db543682b28f577069950488218ed65d4ac70bb03b6929aaadaf636de9"}, -] - -[package.dependencies] -anyio = ">=3.0,<5" -backoff = ">=1.11.1,<3.0" -graphql-core = ">=3.2,<3.3" -yarl = ">=1.6,<2.0" - -[package.extras] -aiohttp = ["aiohttp (>=3.8.0,<4)", "aiohttp (>=3.9.0b0,<4)"] -all = ["aiohttp (>=3.8.0,<4)", "aiohttp (>=3.9.0b0,<4)", "botocore (>=1.21,<2)", "httpx (>=0.23.1,<1)", "requests (>=2.26,<3)", "requests-toolbelt (>=1.0.0,<2)", "websockets (>=10,<12)"] -botocore = ["botocore (>=1.21,<2)"] -dev = ["aiofiles", "aiohttp (>=3.8.0,<4)", "aiohttp (>=3.9.0b0,<4)", "black (==22.3.0)", "botocore (>=1.21,<2)", "check-manifest (>=0.42,<1)", "flake8 (==3.8.1)", "httpx (>=0.23.1,<1)", "isort (==4.3.21)", "mock (==4.0.2)", "mypy (==0.910)", "parse (==1.15.0)", "pytest (==7.4.2)", "pytest-asyncio (==0.21.1)", "pytest-console-scripts (==1.3.1)", "pytest-cov (==3.0.0)", "requests (>=2.26,<3)", "requests-toolbelt (>=1.0.0,<2)", "sphinx (>=5.3.0,<6)", "sphinx-argparse (==0.2.5)", "sphinx-rtd-theme (>=0.4,<1)", "types-aiofiles", "types-mock", "types-requests", "vcrpy (==4.4.0)", "websockets (>=10,<12)"] -httpx = ["httpx (>=0.23.1,<1)"] -requests = ["requests (>=2.26,<3)", "requests-toolbelt (>=1.0.0,<2)"] -test = ["aiofiles", "aiohttp (>=3.8.0,<4)", "aiohttp (>=3.9.0b0,<4)", "botocore (>=1.21,<2)", "httpx (>=0.23.1,<1)", "mock (==4.0.2)", "parse (==1.15.0)", "pytest (==7.4.2)", "pytest-asyncio (==0.21.1)", "pytest-console-scripts (==1.3.1)", "pytest-cov (==3.0.0)", "requests (>=2.26,<3)", "requests-toolbelt (>=1.0.0,<2)", "vcrpy (==4.4.0)", "websockets (>=10,<12)"] -test-no-transport = ["aiofiles", "mock (==4.0.2)", "parse (==1.15.0)", "pytest (==7.4.2)", "pytest-asyncio (==0.21.1)", "pytest-console-scripts (==1.3.1)", "pytest-cov (==3.0.0)", "vcrpy (==4.4.0)"] -websockets = ["websockets (>=10,<12)"] - -[[package]] -name = "graphql-core" -version = "3.2.3" -description = "GraphQL implementation for Python, a port of GraphQL.js, the JavaScript reference implementation for GraphQL." -optional = false -python-versions = ">=3.6,<4" -files = [ - {file = "graphql-core-3.2.3.tar.gz", hash = "sha256:06d2aad0ac723e35b1cb47885d3e5c45e956a53bc1b209a9fc5369007fe46676"}, - {file = "graphql_core-3.2.3-py3-none-any.whl", hash = "sha256:5766780452bd5ec8ba133f8bf287dc92713e3868ddd83aee4faab9fc3e303dc3"}, -] - -[[package]] -name = "greenlet" -version = "3.0.3" -description = "Lightweight in-process concurrent programming" -optional = false -python-versions = ">=3.7" -files = [ - {file = "greenlet-3.0.3-cp310-cp310-macosx_11_0_universal2.whl", hash = "sha256:9da2bd29ed9e4f15955dd1595ad7bc9320308a3b766ef7f837e23ad4b4aac31a"}, - {file = "greenlet-3.0.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d353cadd6083fdb056bb46ed07e4340b0869c305c8ca54ef9da3421acbdf6881"}, - {file = "greenlet-3.0.3-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:dca1e2f3ca00b84a396bc1bce13dd21f680f035314d2379c4160c98153b2059b"}, - {file = "greenlet-3.0.3-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3ed7fb269f15dc662787f4119ec300ad0702fa1b19d2135a37c2c4de6fadfd4a"}, - {file = "greenlet-3.0.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dd4f49ae60e10adbc94b45c0b5e6a179acc1736cf7a90160b404076ee283cf83"}, - {file = "greenlet-3.0.3-cp310-cp310-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:73a411ef564e0e097dbe7e866bb2dda0f027e072b04da387282b02c308807405"}, - {file = "greenlet-3.0.3-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:7f362975f2d179f9e26928c5b517524e89dd48530a0202570d55ad6ca5d8a56f"}, - {file = "greenlet-3.0.3-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:649dde7de1a5eceb258f9cb00bdf50e978c9db1b996964cd80703614c86495eb"}, - {file = "greenlet-3.0.3-cp310-cp310-win_amd64.whl", hash = "sha256:68834da854554926fbedd38c76e60c4a2e3198c6fbed520b106a8986445caaf9"}, - {file = "greenlet-3.0.3-cp311-cp311-macosx_11_0_universal2.whl", hash = "sha256:b1b5667cced97081bf57b8fa1d6bfca67814b0afd38208d52538316e9422fc61"}, - {file = "greenlet-3.0.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:52f59dd9c96ad2fc0d5724107444f76eb20aaccb675bf825df6435acb7703559"}, - {file = "greenlet-3.0.3-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:afaff6cf5200befd5cec055b07d1c0a5a06c040fe5ad148abcd11ba6ab9b114e"}, - {file = "greenlet-3.0.3-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:fe754d231288e1e64323cfad462fcee8f0288654c10bdf4f603a39ed923bef33"}, - {file = "greenlet-3.0.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2797aa5aedac23af156bbb5a6aa2cd3427ada2972c828244eb7d1b9255846379"}, - {file = "greenlet-3.0.3-cp311-cp311-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:b7f009caad047246ed379e1c4dbcb8b020f0a390667ea74d2387be2998f58a22"}, - {file = "greenlet-3.0.3-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:c5e1536de2aad7bf62e27baf79225d0d64360d4168cf2e6becb91baf1ed074f3"}, - {file = "greenlet-3.0.3-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:894393ce10ceac937e56ec00bb71c4c2f8209ad516e96033e4b3b1de270e200d"}, - {file = "greenlet-3.0.3-cp311-cp311-win_amd64.whl", hash = "sha256:1ea188d4f49089fc6fb283845ab18a2518d279c7cd9da1065d7a84e991748728"}, - {file = "greenlet-3.0.3-cp312-cp312-macosx_11_0_universal2.whl", hash = "sha256:70fb482fdf2c707765ab5f0b6655e9cfcf3780d8d87355a063547b41177599be"}, - {file = "greenlet-3.0.3-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d4d1ac74f5c0c0524e4a24335350edad7e5f03b9532da7ea4d3c54d527784f2e"}, - {file = "greenlet-3.0.3-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:149e94a2dd82d19838fe4b2259f1b6b9957d5ba1b25640d2380bea9c5df37676"}, - {file = "greenlet-3.0.3-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:15d79dd26056573940fcb8c7413d84118086f2ec1a8acdfa854631084393efcc"}, - {file = "greenlet-3.0.3-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:881b7db1ebff4ba09aaaeae6aa491daeb226c8150fc20e836ad00041bcb11230"}, - {file = "greenlet-3.0.3-cp312-cp312-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:fcd2469d6a2cf298f198f0487e0a5b1a47a42ca0fa4dfd1b6862c999f018ebbf"}, - {file = "greenlet-3.0.3-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:1f672519db1796ca0d8753f9e78ec02355e862d0998193038c7073045899f305"}, - {file = "greenlet-3.0.3-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:2516a9957eed41dd8f1ec0c604f1cdc86758b587d964668b5b196a9db5bfcde6"}, - {file = "greenlet-3.0.3-cp312-cp312-win_amd64.whl", hash = "sha256:bba5387a6975598857d86de9eac14210a49d554a77eb8261cc68b7d082f78ce2"}, - {file = "greenlet-3.0.3-cp37-cp37m-macosx_11_0_universal2.whl", hash = "sha256:5b51e85cb5ceda94e79d019ed36b35386e8c37d22f07d6a751cb659b180d5274"}, - {file = "greenlet-3.0.3-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:daf3cb43b7cf2ba96d614252ce1684c1bccee6b2183a01328c98d36fcd7d5cb0"}, - {file = "greenlet-3.0.3-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:99bf650dc5d69546e076f413a87481ee1d2d09aaaaaca058c9251b6d8c14783f"}, - {file = "greenlet-3.0.3-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2dd6e660effd852586b6a8478a1d244b8dc90ab5b1321751d2ea15deb49ed414"}, - {file = "greenlet-3.0.3-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e3391d1e16e2a5a1507d83e4a8b100f4ee626e8eca43cf2cadb543de69827c4c"}, - {file = "greenlet-3.0.3-cp37-cp37m-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:e1f145462f1fa6e4a4ae3c0f782e580ce44d57c8f2c7aae1b6fa88c0b2efdb41"}, - {file = "greenlet-3.0.3-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:1a7191e42732df52cb5f39d3527217e7ab73cae2cb3694d241e18f53d84ea9a7"}, - {file = "greenlet-3.0.3-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:0448abc479fab28b00cb472d278828b3ccca164531daab4e970a0458786055d6"}, - {file = "greenlet-3.0.3-cp37-cp37m-win32.whl", hash = "sha256:b542be2440edc2d48547b5923c408cbe0fc94afb9f18741faa6ae970dbcb9b6d"}, - {file = "greenlet-3.0.3-cp37-cp37m-win_amd64.whl", hash = "sha256:01bc7ea167cf943b4c802068e178bbf70ae2e8c080467070d01bfa02f337ee67"}, - {file = "greenlet-3.0.3-cp38-cp38-macosx_11_0_universal2.whl", hash = "sha256:1996cb9306c8595335bb157d133daf5cf9f693ef413e7673cb07e3e5871379ca"}, - {file = "greenlet-3.0.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3ddc0f794e6ad661e321caa8d2f0a55ce01213c74722587256fb6566049a8b04"}, - {file = "greenlet-3.0.3-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c9db1c18f0eaad2f804728c67d6c610778456e3e1cc4ab4bbd5eeb8e6053c6fc"}, - {file = "greenlet-3.0.3-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7170375bcc99f1a2fbd9c306f5be8764eaf3ac6b5cb968862cad4c7057756506"}, - {file = "greenlet-3.0.3-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6b66c9c1e7ccabad3a7d037b2bcb740122a7b17a53734b7d72a344ce39882a1b"}, - {file = "greenlet-3.0.3-cp38-cp38-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:098d86f528c855ead3479afe84b49242e174ed262456c342d70fc7f972bc13c4"}, - {file = "greenlet-3.0.3-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:81bb9c6d52e8321f09c3d165b2a78c680506d9af285bfccbad9fb7ad5a5da3e5"}, - {file = "greenlet-3.0.3-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:fd096eb7ffef17c456cfa587523c5f92321ae02427ff955bebe9e3c63bc9f0da"}, - {file = "greenlet-3.0.3-cp38-cp38-win32.whl", hash = "sha256:d46677c85c5ba00a9cb6f7a00b2bfa6f812192d2c9f7d9c4f6a55b60216712f3"}, - {file = "greenlet-3.0.3-cp38-cp38-win_amd64.whl", hash = "sha256:419b386f84949bf0e7c73e6032e3457b82a787c1ab4a0e43732898a761cc9dbf"}, - {file = "greenlet-3.0.3-cp39-cp39-macosx_11_0_universal2.whl", hash = "sha256:da70d4d51c8b306bb7a031d5cff6cc25ad253affe89b70352af5f1cb68e74b53"}, - {file = "greenlet-3.0.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:086152f8fbc5955df88382e8a75984e2bb1c892ad2e3c80a2508954e52295257"}, - {file = "greenlet-3.0.3-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d73a9fe764d77f87f8ec26a0c85144d6a951a6c438dfe50487df5595c6373eac"}, - {file = "greenlet-3.0.3-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b7dcbe92cc99f08c8dd11f930de4d99ef756c3591a5377d1d9cd7dd5e896da71"}, - {file = "greenlet-3.0.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1551a8195c0d4a68fac7a4325efac0d541b48def35feb49d803674ac32582f61"}, - {file = "greenlet-3.0.3-cp39-cp39-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:64d7675ad83578e3fc149b617a444fab8efdafc9385471f868eb5ff83e446b8b"}, - {file = "greenlet-3.0.3-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:b37eef18ea55f2ffd8f00ff8fe7c8d3818abd3e25fb73fae2ca3b672e333a7a6"}, - {file = "greenlet-3.0.3-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:77457465d89b8263bca14759d7c1684df840b6811b2499838cc5b040a8b5b113"}, - {file = "greenlet-3.0.3-cp39-cp39-win32.whl", hash = "sha256:57e8974f23e47dac22b83436bdcf23080ade568ce77df33159e019d161ce1d1e"}, - {file = "greenlet-3.0.3-cp39-cp39-win_amd64.whl", hash = "sha256:c5ee858cfe08f34712f548c3c363e807e7186f03ad7a5039ebadb29e8c6be067"}, - {file = "greenlet-3.0.3.tar.gz", hash = "sha256:43374442353259554ce33599da8b692d5aa96f8976d567d4badf263371fbe491"}, -] - -[package.extras] -docs = ["Sphinx", "furo"] -test = ["objgraph", "psutil"] - -[[package]] -name = "grpc-google-iam-v1" -version = "0.13.0" -description = "IAM API client library" -optional = false -python-versions = ">=3.7" -files = [ - {file = "grpc-google-iam-v1-0.13.0.tar.gz", hash = "sha256:fad318608b9e093258fbf12529180f400d1c44453698a33509cc6ecf005b294e"}, - {file = "grpc_google_iam_v1-0.13.0-py2.py3-none-any.whl", hash = "sha256:53902e2af7de8df8c1bd91373d9be55b0743ec267a7428ea638db3775becae89"}, -] - -[package.dependencies] -googleapis-common-protos = {version = ">=1.56.0,<2.0.0dev", extras = ["grpc"]} -grpcio = ">=1.44.0,<2.0.0dev" -protobuf = ">=3.19.5,<3.20.0 || >3.20.0,<3.20.1 || >3.20.1,<4.21.1 || >4.21.1,<4.21.2 || >4.21.2,<4.21.3 || >4.21.3,<4.21.4 || >4.21.4,<4.21.5 || >4.21.5,<5.0.0dev" - -[[package]] -name = "grpcio" -version = "1.62.0" -description = "HTTP/2-based RPC framework" -optional = false -python-versions = ">=3.7" -files = [ - {file = "grpcio-1.62.0-cp310-cp310-linux_armv7l.whl", hash = "sha256:136ffd79791b1eddda8d827b607a6285474ff8a1a5735c4947b58c481e5e4271"}, - {file = "grpcio-1.62.0-cp310-cp310-macosx_12_0_universal2.whl", hash = "sha256:d6a56ba703be6b6267bf19423d888600c3f574ac7c2cc5e6220af90662a4d6b0"}, - {file = "grpcio-1.62.0-cp310-cp310-manylinux_2_17_aarch64.whl", hash = "sha256:4cd356211579043fce9f52acc861e519316fff93980a212c8109cca8f47366b6"}, - {file = "grpcio-1.62.0-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e803e9b58d8f9b4ff0ea991611a8d51b31c68d2e24572cd1fe85e99e8cc1b4f8"}, - {file = "grpcio-1.62.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f4c04fe33039b35b97c02d2901a164bbbb2f21fb9c4e2a45a959f0b044c3512c"}, - {file = "grpcio-1.62.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:95370c71b8c9062f9ea033a0867c4c73d6f0ff35113ebd2618171ec1f1e903e0"}, - {file = "grpcio-1.62.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:c912688acc05e4ff012c8891803659d6a8a8b5106f0f66e0aed3fb7e77898fa6"}, - {file = "grpcio-1.62.0-cp310-cp310-win32.whl", hash = "sha256:821a44bd63d0f04e33cf4ddf33c14cae176346486b0df08b41a6132b976de5fc"}, - {file = "grpcio-1.62.0-cp310-cp310-win_amd64.whl", hash = "sha256:81531632f93fece32b2762247c4c169021177e58e725494f9a746ca62c83acaa"}, - {file = "grpcio-1.62.0-cp311-cp311-linux_armv7l.whl", hash = "sha256:3fa15850a6aba230eed06b236287c50d65a98f05054a0f01ccedf8e1cc89d57f"}, - {file = "grpcio-1.62.0-cp311-cp311-macosx_10_10_universal2.whl", hash = "sha256:36df33080cd7897623feff57831eb83c98b84640b016ce443305977fac7566fb"}, - {file = "grpcio-1.62.0-cp311-cp311-manylinux_2_17_aarch64.whl", hash = "sha256:7a195531828b46ea9c4623c47e1dc45650fc7206f8a71825898dd4c9004b0928"}, - {file = "grpcio-1.62.0-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ab140a3542bbcea37162bdfc12ce0d47a3cda3f2d91b752a124cc9fe6776a9e2"}, - {file = "grpcio-1.62.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7f9d6c3223914abb51ac564dc9c3782d23ca445d2864321b9059d62d47144021"}, - {file = "grpcio-1.62.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:fbe0c20ce9a1cff75cfb828b21f08d0a1ca527b67f2443174af6626798a754a4"}, - {file = "grpcio-1.62.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:38f69de9c28c1e7a8fd24e4af4264726637b72f27c2099eaea6e513e7142b47e"}, - {file = "grpcio-1.62.0-cp311-cp311-win32.whl", hash = "sha256:ce1aafdf8d3f58cb67664f42a617af0e34555fe955450d42c19e4a6ad41c84bd"}, - {file = "grpcio-1.62.0-cp311-cp311-win_amd64.whl", hash = "sha256:eef1d16ac26c5325e7d39f5452ea98d6988c700c427c52cbc7ce3201e6d93334"}, - {file = "grpcio-1.62.0-cp312-cp312-linux_armv7l.whl", hash = "sha256:8aab8f90b2a41208c0a071ec39a6e5dbba16fd827455aaa070fec241624ccef8"}, - {file = "grpcio-1.62.0-cp312-cp312-macosx_10_10_universal2.whl", hash = "sha256:62aa1659d8b6aad7329ede5d5b077e3d71bf488d85795db517118c390358d5f6"}, - {file = "grpcio-1.62.0-cp312-cp312-manylinux_2_17_aarch64.whl", hash = "sha256:0d7ae7fc7dbbf2d78d6323641ded767d9ec6d121aaf931ec4a5c50797b886532"}, - {file = "grpcio-1.62.0-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f359d635ee9428f0294bea062bb60c478a8ddc44b0b6f8e1f42997e5dc12e2ee"}, - {file = "grpcio-1.62.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:77d48e5b1f8f4204889f1acf30bb57c30378e17c8d20df5acbe8029e985f735c"}, - {file = "grpcio-1.62.0-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:662d3df5314ecde3184cf87ddd2c3a66095b3acbb2d57a8cada571747af03873"}, - {file = "grpcio-1.62.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:92cdb616be44c8ac23a57cce0243af0137a10aa82234f23cd46e69e115071388"}, - {file = "grpcio-1.62.0-cp312-cp312-win32.whl", hash = "sha256:0b9179478b09ee22f4a36b40ca87ad43376acdccc816ce7c2193a9061bf35701"}, - {file = "grpcio-1.62.0-cp312-cp312-win_amd64.whl", hash = "sha256:614c3ed234208e76991992342bab725f379cc81c7dd5035ee1de2f7e3f7a9842"}, - {file = "grpcio-1.62.0-cp37-cp37m-linux_armv7l.whl", hash = "sha256:7e1f51e2a460b7394670fdb615e26d31d3260015154ea4f1501a45047abe06c9"}, - {file = "grpcio-1.62.0-cp37-cp37m-macosx_10_10_universal2.whl", hash = "sha256:bcff647e7fe25495e7719f779cc219bbb90b9e79fbd1ce5bda6aae2567f469f2"}, - {file = "grpcio-1.62.0-cp37-cp37m-manylinux_2_17_aarch64.whl", hash = "sha256:56ca7ba0b51ed0de1646f1735154143dcbdf9ec2dbe8cc6645def299bb527ca1"}, - {file = "grpcio-1.62.0-cp37-cp37m-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2e84bfb2a734e4a234b116be208d6f0214e68dcf7804306f97962f93c22a1839"}, - {file = "grpcio-1.62.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2c1488b31a521fbba50ae86423f5306668d6f3a46d124f7819c603979fc538c4"}, - {file = "grpcio-1.62.0-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:98d8f4eb91f1ce0735bf0b67c3b2a4fea68b52b2fd13dc4318583181f9219b4b"}, - {file = "grpcio-1.62.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:b3d3d755cfa331d6090e13aac276d4a3fb828bf935449dc16c3d554bf366136b"}, - {file = "grpcio-1.62.0-cp37-cp37m-win_amd64.whl", hash = "sha256:a33f2bfd8a58a02aab93f94f6c61279be0f48f99fcca20ebaee67576cd57307b"}, - {file = "grpcio-1.62.0-cp38-cp38-linux_armv7l.whl", hash = "sha256:5e709f7c8028ce0443bddc290fb9c967c1e0e9159ef7a030e8c21cac1feabd35"}, - {file = "grpcio-1.62.0-cp38-cp38-macosx_10_10_universal2.whl", hash = "sha256:2f3d9a4d0abb57e5f49ed5039d3ed375826c2635751ab89dcc25932ff683bbb6"}, - {file = "grpcio-1.62.0-cp38-cp38-manylinux_2_17_aarch64.whl", hash = "sha256:62ccb92f594d3d9fcd00064b149a0187c246b11e46ff1b7935191f169227f04c"}, - {file = "grpcio-1.62.0-cp38-cp38-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:921148f57c2e4b076af59a815467d399b7447f6e0ee10ef6d2601eb1e9c7f402"}, - {file = "grpcio-1.62.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f897b16190b46bc4d4aaf0a32a4b819d559a37a756d7c6b571e9562c360eed72"}, - {file = "grpcio-1.62.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:1bc8449084fe395575ed24809752e1dc4592bb70900a03ca42bf236ed5bf008f"}, - {file = "grpcio-1.62.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:81d444e5e182be4c7856cd33a610154fe9ea1726bd071d07e7ba13fafd202e38"}, - {file = "grpcio-1.62.0-cp38-cp38-win32.whl", hash = "sha256:88f41f33da3840b4a9bbec68079096d4caf629e2c6ed3a72112159d570d98ebe"}, - {file = "grpcio-1.62.0-cp38-cp38-win_amd64.whl", hash = "sha256:fc2836cb829895ee190813446dce63df67e6ed7b9bf76060262c55fcd097d270"}, - {file = "grpcio-1.62.0-cp39-cp39-linux_armv7l.whl", hash = "sha256:fcc98cff4084467839d0a20d16abc2a76005f3d1b38062464d088c07f500d170"}, - {file = "grpcio-1.62.0-cp39-cp39-macosx_10_10_universal2.whl", hash = "sha256:0d3dee701e48ee76b7d6fbbba18ba8bc142e5b231ef7d3d97065204702224e0e"}, - {file = "grpcio-1.62.0-cp39-cp39-manylinux_2_17_aarch64.whl", hash = "sha256:b7a6be562dd18e5d5bec146ae9537f20ae1253beb971c0164f1e8a2f5a27e829"}, - {file = "grpcio-1.62.0-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:29cb592c4ce64a023712875368bcae13938c7f03e99f080407e20ffe0a9aa33b"}, - {file = "grpcio-1.62.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1eda79574aec8ec4d00768dcb07daba60ed08ef32583b62b90bbf274b3c279f7"}, - {file = "grpcio-1.62.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:7eea57444a354ee217fda23f4b479a4cdfea35fb918ca0d8a0e73c271e52c09c"}, - {file = "grpcio-1.62.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:0e97f37a3b7c89f9125b92d22e9c8323f4e76e7993ba7049b9f4ccbe8bae958a"}, - {file = "grpcio-1.62.0-cp39-cp39-win32.whl", hash = "sha256:39cd45bd82a2e510e591ca2ddbe22352e8413378852ae814549c162cf3992a93"}, - {file = "grpcio-1.62.0-cp39-cp39-win_amd64.whl", hash = "sha256:b71c65427bf0ec6a8b48c68c17356cb9fbfc96b1130d20a07cb462f4e4dcdcd5"}, - {file = "grpcio-1.62.0.tar.gz", hash = "sha256:748496af9238ac78dcd98cce65421f1adce28c3979393e3609683fcd7f3880d7"}, -] - -[package.extras] -protobuf = ["grpcio-tools (>=1.62.0)"] - -[[package]] -name = "grpcio-status" -version = "1.62.0" -description = "Status proto mapping for gRPC" -optional = false -python-versions = ">=3.6" -files = [ - {file = "grpcio-status-1.62.0.tar.gz", hash = "sha256:0d693e9c09880daeaac060d0c3dba1ae470a43c99e5d20dfeafd62cf7e08a85d"}, - {file = "grpcio_status-1.62.0-py3-none-any.whl", hash = "sha256:3baac03fcd737310e67758c4082a188107f771d32855bce203331cd4c9aa687a"}, -] - -[package.dependencies] -googleapis-common-protos = ">=1.5.5" -grpcio = ">=1.62.0" -protobuf = ">=4.21.6" - -[[package]] -name = "h11" -version = "0.14.0" -description = "A pure-Python, bring-your-own-I/O implementation of HTTP/1.1" -optional = false -python-versions = ">=3.7" -files = [ - {file = "h11-0.14.0-py3-none-any.whl", hash = "sha256:e3fe4ac4b851c468cc8363d500db52c2ead036020723024a109d37346efaa761"}, - {file = "h11-0.14.0.tar.gz", hash = "sha256:8f19fbbe99e72420ff35c00b27a34cb9937e902a8b810e2c88300c6f0a3b699d"}, -] - -[[package]] -name = "httpcore" -version = "1.0.4" -description = "A minimal low-level HTTP client." -optional = false -python-versions = ">=3.8" -files = [ - {file = "httpcore-1.0.4-py3-none-any.whl", hash = "sha256:ac418c1db41bade2ad53ae2f3834a3a0f5ae76b56cf5aa497d2d033384fc7d73"}, - {file = "httpcore-1.0.4.tar.gz", hash = "sha256:cb2839ccfcba0d2d3c1131d3c3e26dfc327326fbe7a5dc0dbfe9f6c9151bb022"}, -] - -[package.dependencies] -certifi = "*" -h11 = ">=0.13,<0.15" - -[package.extras] -asyncio = ["anyio (>=4.0,<5.0)"] -http2 = ["h2 (>=3,<5)"] -socks = ["socksio (==1.*)"] -trio = ["trio (>=0.22.0,<0.25.0)"] - -[[package]] -name = "httpx" -version = "0.27.0" -description = "The next generation HTTP client." -optional = false -python-versions = ">=3.8" -files = [ - {file = "httpx-0.27.0-py3-none-any.whl", hash = "sha256:71d5465162c13681bff01ad59b2cc68dd838ea1f10e51574bac27103f00c91a5"}, - {file = "httpx-0.27.0.tar.gz", hash = "sha256:a0cb88a46f32dc874e04ee956e4c2764aba2aa228f650b06788ba6bda2962ab5"}, -] - -[package.dependencies] -anyio = "*" -certifi = "*" -httpcore = "==1.*" -idna = "*" -sniffio = "*" - -[package.extras] -brotli = ["brotli", "brotlicffi"] -cli = ["click (==8.*)", "pygments (==2.*)", "rich (>=10,<14)"] -http2 = ["h2 (>=3,<5)"] -socks = ["socksio (==1.*)"] - -[[package]] -name = "idna" -version = "3.6" -description = "Internationalized Domain Names in Applications (IDNA)" -optional = false -python-versions = ">=3.5" -files = [ - {file = "idna-3.6-py3-none-any.whl", hash = "sha256:c05567e9c24a6b9faaa835c4821bad0590fbb9d5779e7caa6e1cc4978e7eb24f"}, - {file = "idna-3.6.tar.gz", hash = "sha256:9ecdbbd083b06798ae1e86adcbfe8ab1479cf864e4ee30fe4e46a003d12491ca"}, -] - -[[package]] -name = "iniconfig" -version = "2.0.0" -description = "brain-dead simple config-ini parsing" -optional = false -python-versions = ">=3.7" -files = [ - {file = "iniconfig-2.0.0-py3-none-any.whl", hash = "sha256:b6a85871a79d2e3b22d2d1b94ac2824226a63c6b741c88f7ae975f18b6778374"}, - {file = "iniconfig-2.0.0.tar.gz", hash = "sha256:2d91e135bf72d31a410b17c16da610a82cb55f6b0477d1a902134b24a455b8b3"}, -] - -[[package]] -name = "markdown-it-py" -version = "3.0.0" -description = "Python port of markdown-it. Markdown parsing, done right!" -optional = false -python-versions = ">=3.8" -files = [ - {file = "markdown-it-py-3.0.0.tar.gz", hash = "sha256:e3f60a94fa066dc52ec76661e37c851cb232d92f9886b15cb560aaada2df8feb"}, - {file = "markdown_it_py-3.0.0-py3-none-any.whl", hash = "sha256:355216845c60bd96232cd8d8c40e8f9765cc86f46880e43a8fd22dc1a1a8cab1"}, -] - -[package.dependencies] -mdurl = ">=0.1,<1.0" - -[package.extras] -benchmarking = ["psutil", "pytest", "pytest-benchmark"] -code-style = ["pre-commit (>=3.0,<4.0)"] -compare = ["commonmark (>=0.9,<1.0)", "markdown (>=3.4,<4.0)", "mistletoe (>=1.0,<2.0)", "mistune (>=2.0,<3.0)", "panflute (>=2.3,<3.0)"] -linkify = ["linkify-it-py (>=1,<3)"] -plugins = ["mdit-py-plugins"] -profiling = ["gprof2dot"] -rtd = ["jupyter_sphinx", "mdit-py-plugins", "myst-parser", "pyyaml", "sphinx", "sphinx-copybutton", "sphinx-design", "sphinx_book_theme"] -testing = ["coverage", "pytest", "pytest-cov", "pytest-regressions"] - -[[package]] -name = "mdurl" -version = "0.1.2" -description = "Markdown URL utilities" -optional = false -python-versions = ">=3.7" -files = [ - {file = "mdurl-0.1.2-py3-none-any.whl", hash = "sha256:84008a41e51615a49fc9966191ff91509e3c40b939176e643fd50a5c2196b8f8"}, - {file = "mdurl-0.1.2.tar.gz", hash = "sha256:bb413d29f5eea38f31dd4754dd7377d4465116fb207585f97bf925588687c1ba"}, -] - -[[package]] -name = "multidict" -version = "6.0.5" -description = "multidict implementation" -optional = false -python-versions = ">=3.7" -files = [ - {file = "multidict-6.0.5-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:228b644ae063c10e7f324ab1ab6b548bdf6f8b47f3ec234fef1093bc2735e5f9"}, - {file = "multidict-6.0.5-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:896ebdcf62683551312c30e20614305f53125750803b614e9e6ce74a96232604"}, - {file = "multidict-6.0.5-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:411bf8515f3be9813d06004cac41ccf7d1cd46dfe233705933dd163b60e37600"}, - {file = "multidict-6.0.5-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1d147090048129ce3c453f0292e7697d333db95e52616b3793922945804a433c"}, - {file = "multidict-6.0.5-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:215ed703caf15f578dca76ee6f6b21b7603791ae090fbf1ef9d865571039ade5"}, - {file = "multidict-6.0.5-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7c6390cf87ff6234643428991b7359b5f59cc15155695deb4eda5c777d2b880f"}, - {file = "multidict-6.0.5-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:21fd81c4ebdb4f214161be351eb5bcf385426bf023041da2fd9e60681f3cebae"}, - {file = "multidict-6.0.5-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3cc2ad10255f903656017363cd59436f2111443a76f996584d1077e43ee51182"}, - {file = "multidict-6.0.5-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:6939c95381e003f54cd4c5516740faba40cf5ad3eeff460c3ad1d3e0ea2549bf"}, - {file = "multidict-6.0.5-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:220dd781e3f7af2c2c1053da9fa96d9cf3072ca58f057f4c5adaaa1cab8fc442"}, - {file = "multidict-6.0.5-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:766c8f7511df26d9f11cd3a8be623e59cca73d44643abab3f8c8c07620524e4a"}, - {file = "multidict-6.0.5-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:fe5d7785250541f7f5019ab9cba2c71169dc7d74d0f45253f8313f436458a4ef"}, - {file = "multidict-6.0.5-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:c1c1496e73051918fcd4f58ff2e0f2f3066d1c76a0c6aeffd9b45d53243702cc"}, - {file = "multidict-6.0.5-cp310-cp310-win32.whl", hash = "sha256:7afcdd1fc07befad18ec4523a782cde4e93e0a2bf71239894b8d61ee578c1319"}, - {file = "multidict-6.0.5-cp310-cp310-win_amd64.whl", hash = "sha256:99f60d34c048c5c2fabc766108c103612344c46e35d4ed9ae0673d33c8fb26e8"}, - {file = "multidict-6.0.5-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:f285e862d2f153a70586579c15c44656f888806ed0e5b56b64489afe4a2dbfba"}, - {file = "multidict-6.0.5-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:53689bb4e102200a4fafa9de9c7c3c212ab40a7ab2c8e474491914d2305f187e"}, - {file = "multidict-6.0.5-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:612d1156111ae11d14afaf3a0669ebf6c170dbb735e510a7438ffe2369a847fd"}, - {file = "multidict-6.0.5-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7be7047bd08accdb7487737631d25735c9a04327911de89ff1b26b81745bd4e3"}, - {file = "multidict-6.0.5-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:de170c7b4fe6859beb8926e84f7d7d6c693dfe8e27372ce3b76f01c46e489fcf"}, - {file = "multidict-6.0.5-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:04bde7a7b3de05732a4eb39c94574db1ec99abb56162d6c520ad26f83267de29"}, - {file = "multidict-6.0.5-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:85f67aed7bb647f93e7520633d8f51d3cbc6ab96957c71272b286b2f30dc70ed"}, - {file = "multidict-6.0.5-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:425bf820055005bfc8aa9a0b99ccb52cc2f4070153e34b701acc98d201693733"}, - {file = "multidict-6.0.5-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:d3eb1ceec286eba8220c26f3b0096cf189aea7057b6e7b7a2e60ed36b373b77f"}, - {file = "multidict-6.0.5-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:7901c05ead4b3fb75113fb1dd33eb1253c6d3ee37ce93305acd9d38e0b5f21a4"}, - {file = "multidict-6.0.5-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:e0e79d91e71b9867c73323a3444724d496c037e578a0e1755ae159ba14f4f3d1"}, - {file = "multidict-6.0.5-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:29bfeb0dff5cb5fdab2023a7a9947b3b4af63e9c47cae2a10ad58394b517fddc"}, - {file = "multidict-6.0.5-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:e030047e85cbcedbfc073f71836d62dd5dadfbe7531cae27789ff66bc551bd5e"}, - {file = "multidict-6.0.5-cp311-cp311-win32.whl", hash = "sha256:2f4848aa3baa109e6ab81fe2006c77ed4d3cd1e0ac2c1fbddb7b1277c168788c"}, - {file = "multidict-6.0.5-cp311-cp311-win_amd64.whl", hash = "sha256:2faa5ae9376faba05f630d7e5e6be05be22913782b927b19d12b8145968a85ea"}, - {file = "multidict-6.0.5-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:51d035609b86722963404f711db441cf7134f1889107fb171a970c9701f92e1e"}, - {file = "multidict-6.0.5-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:cbebcd5bcaf1eaf302617c114aa67569dd3f090dd0ce8ba9e35e9985b41ac35b"}, - {file = "multidict-6.0.5-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:2ffc42c922dbfddb4a4c3b438eb056828719f07608af27d163191cb3e3aa6cc5"}, - {file = "multidict-6.0.5-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ceb3b7e6a0135e092de86110c5a74e46bda4bd4fbfeeb3a3bcec79c0f861e450"}, - {file = "multidict-6.0.5-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:79660376075cfd4b2c80f295528aa6beb2058fd289f4c9252f986751a4cd0496"}, - {file = "multidict-6.0.5-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e4428b29611e989719874670fd152b6625500ad6c686d464e99f5aaeeaca175a"}, - {file = "multidict-6.0.5-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d84a5c3a5f7ce6db1f999fb9438f686bc2e09d38143f2d93d8406ed2dd6b9226"}, - {file = "multidict-6.0.5-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:76c0de87358b192de7ea9649beb392f107dcad9ad27276324c24c91774ca5271"}, - {file = "multidict-6.0.5-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:79a6d2ba910adb2cbafc95dad936f8b9386e77c84c35bc0add315b856d7c3abb"}, - {file = "multidict-6.0.5-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:92d16a3e275e38293623ebf639c471d3e03bb20b8ebb845237e0d3664914caef"}, - {file = "multidict-6.0.5-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:fb616be3538599e797a2017cccca78e354c767165e8858ab5116813146041a24"}, - {file = "multidict-6.0.5-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:14c2976aa9038c2629efa2c148022ed5eb4cb939e15ec7aace7ca932f48f9ba6"}, - {file = "multidict-6.0.5-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:435a0984199d81ca178b9ae2c26ec3d49692d20ee29bc4c11a2a8d4514c67eda"}, - {file = "multidict-6.0.5-cp312-cp312-win32.whl", hash = "sha256:9fe7b0653ba3d9d65cbe7698cca585bf0f8c83dbbcc710db9c90f478e175f2d5"}, - {file = "multidict-6.0.5-cp312-cp312-win_amd64.whl", hash = "sha256:01265f5e40f5a17f8241d52656ed27192be03bfa8764d88e8220141d1e4b3556"}, - {file = "multidict-6.0.5-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:19fe01cea168585ba0f678cad6f58133db2aa14eccaf22f88e4a6dccadfad8b3"}, - {file = "multidict-6.0.5-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6bf7a982604375a8d49b6cc1b781c1747f243d91b81035a9b43a2126c04766f5"}, - {file = "multidict-6.0.5-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:107c0cdefe028703fb5dafe640a409cb146d44a6ae201e55b35a4af8e95457dd"}, - {file = "multidict-6.0.5-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:403c0911cd5d5791605808b942c88a8155c2592e05332d2bf78f18697a5fa15e"}, - {file = "multidict-6.0.5-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:aeaf541ddbad8311a87dd695ed9642401131ea39ad7bc8cf3ef3967fd093b626"}, - {file = "multidict-6.0.5-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e4972624066095e52b569e02b5ca97dbd7a7ddd4294bf4e7247d52635630dd83"}, - {file = "multidict-6.0.5-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:d946b0a9eb8aaa590df1fe082cee553ceab173e6cb5b03239716338629c50c7a"}, - {file = "multidict-6.0.5-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:b55358304d7a73d7bdf5de62494aaf70bd33015831ffd98bc498b433dfe5b10c"}, - {file = "multidict-6.0.5-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:a3145cb08d8625b2d3fee1b2d596a8766352979c9bffe5d7833e0503d0f0b5e5"}, - {file = "multidict-6.0.5-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:d65f25da8e248202bd47445cec78e0025c0fe7582b23ec69c3b27a640dd7a8e3"}, - {file = "multidict-6.0.5-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:c9bf56195c6bbd293340ea82eafd0071cb3d450c703d2c93afb89f93b8386ccc"}, - {file = "multidict-6.0.5-cp37-cp37m-win32.whl", hash = "sha256:69db76c09796b313331bb7048229e3bee7928eb62bab5e071e9f7fcc4879caee"}, - {file = "multidict-6.0.5-cp37-cp37m-win_amd64.whl", hash = "sha256:fce28b3c8a81b6b36dfac9feb1de115bab619b3c13905b419ec71d03a3fc1423"}, - {file = "multidict-6.0.5-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:76f067f5121dcecf0d63a67f29080b26c43c71a98b10c701b0677e4a065fbd54"}, - {file = "multidict-6.0.5-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:b82cc8ace10ab5bd93235dfaab2021c70637005e1ac787031f4d1da63d493c1d"}, - {file = "multidict-6.0.5-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:5cb241881eefd96b46f89b1a056187ea8e9ba14ab88ba632e68d7a2ecb7aadf7"}, - {file = "multidict-6.0.5-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e8e94e6912639a02ce173341ff62cc1201232ab86b8a8fcc05572741a5dc7d93"}, - {file = "multidict-6.0.5-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:09a892e4a9fb47331da06948690ae38eaa2426de97b4ccbfafbdcbe5c8f37ff8"}, - {file = "multidict-6.0.5-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:55205d03e8a598cfc688c71ca8ea5f66447164efff8869517f175ea632c7cb7b"}, - {file = "multidict-6.0.5-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:37b15024f864916b4951adb95d3a80c9431299080341ab9544ed148091b53f50"}, - {file = "multidict-6.0.5-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f2a1dee728b52b33eebff5072817176c172050d44d67befd681609b4746e1c2e"}, - {file = "multidict-6.0.5-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:edd08e6f2f1a390bf137080507e44ccc086353c8e98c657e666c017718561b89"}, - {file = "multidict-6.0.5-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:60d698e8179a42ec85172d12f50b1668254628425a6bd611aba022257cac1386"}, - {file = "multidict-6.0.5-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:3d25f19500588cbc47dc19081d78131c32637c25804df8414463ec908631e453"}, - {file = "multidict-6.0.5-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:4cc0ef8b962ac7a5e62b9e826bd0cd5040e7d401bc45a6835910ed699037a461"}, - {file = "multidict-6.0.5-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:eca2e9d0cc5a889850e9bbd68e98314ada174ff6ccd1129500103df7a94a7a44"}, - {file = "multidict-6.0.5-cp38-cp38-win32.whl", hash = "sha256:4a6a4f196f08c58c59e0b8ef8ec441d12aee4125a7d4f4fef000ccb22f8d7241"}, - {file = "multidict-6.0.5-cp38-cp38-win_amd64.whl", hash = "sha256:0275e35209c27a3f7951e1ce7aaf93ce0d163b28948444bec61dd7badc6d3f8c"}, - {file = "multidict-6.0.5-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:e7be68734bd8c9a513f2b0cfd508802d6609da068f40dc57d4e3494cefc92929"}, - {file = "multidict-6.0.5-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:1d9ea7a7e779d7a3561aade7d596649fbecfa5c08a7674b11b423783217933f9"}, - {file = "multidict-6.0.5-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:ea1456df2a27c73ce51120fa2f519f1bea2f4a03a917f4a43c8707cf4cbbae1a"}, - {file = "multidict-6.0.5-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cf590b134eb70629e350691ecca88eac3e3b8b3c86992042fb82e3cb1830d5e1"}, - {file = "multidict-6.0.5-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5c0631926c4f58e9a5ccce555ad7747d9a9f8b10619621f22f9635f069f6233e"}, - {file = "multidict-6.0.5-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:dce1c6912ab9ff5f179eaf6efe7365c1f425ed690b03341911bf4939ef2f3046"}, - {file = "multidict-6.0.5-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c0868d64af83169e4d4152ec612637a543f7a336e4a307b119e98042e852ad9c"}, - {file = "multidict-6.0.5-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:141b43360bfd3bdd75f15ed811850763555a251e38b2405967f8e25fb43f7d40"}, - {file = "multidict-6.0.5-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:7df704ca8cf4a073334e0427ae2345323613e4df18cc224f647f251e5e75a527"}, - {file = "multidict-6.0.5-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:6214c5a5571802c33f80e6c84713b2c79e024995b9c5897f794b43e714daeec9"}, - {file = "multidict-6.0.5-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:cd6c8fca38178e12c00418de737aef1261576bd1b6e8c6134d3e729a4e858b38"}, - {file = "multidict-6.0.5-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:e02021f87a5b6932fa6ce916ca004c4d441509d33bbdbeca70d05dff5e9d2479"}, - {file = "multidict-6.0.5-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:ebd8d160f91a764652d3e51ce0d2956b38efe37c9231cd82cfc0bed2e40b581c"}, - {file = "multidict-6.0.5-cp39-cp39-win32.whl", hash = "sha256:04da1bb8c8dbadf2a18a452639771951c662c5ad03aefe4884775454be322c9b"}, - {file = "multidict-6.0.5-cp39-cp39-win_amd64.whl", hash = "sha256:d6f6d4f185481c9669b9447bf9d9cf3b95a0e9df9d169bbc17e363b7d5487755"}, - {file = "multidict-6.0.5-py3-none-any.whl", hash = "sha256:0d63c74e3d7ab26de115c49bffc92cc77ed23395303d496eae515d4204a625e7"}, - {file = "multidict-6.0.5.tar.gz", hash = "sha256:f7e301075edaf50500f0b341543c41194d8df3ae5caf4702f2095f3ca73dd8da"}, -] - -[[package]] -name = "mypy" -version = "1.8.0" -description = "Optional static typing for Python" -optional = false -python-versions = ">=3.8" -files = [ - {file = "mypy-1.8.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:485a8942f671120f76afffff70f259e1cd0f0cfe08f81c05d8816d958d4577d3"}, - {file = "mypy-1.8.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:df9824ac11deaf007443e7ed2a4a26bebff98d2bc43c6da21b2b64185da011c4"}, - {file = "mypy-1.8.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2afecd6354bbfb6e0160f4e4ad9ba6e4e003b767dd80d85516e71f2e955ab50d"}, - {file = "mypy-1.8.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:8963b83d53ee733a6e4196954502b33567ad07dfd74851f32be18eb932fb1cb9"}, - {file = "mypy-1.8.0-cp310-cp310-win_amd64.whl", hash = "sha256:e46f44b54ebddbeedbd3d5b289a893219065ef805d95094d16a0af6630f5d410"}, - {file = "mypy-1.8.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:855fe27b80375e5c5878492f0729540db47b186509c98dae341254c8f45f42ae"}, - {file = "mypy-1.8.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:4c886c6cce2d070bd7df4ec4a05a13ee20c0aa60cb587e8d1265b6c03cf91da3"}, - {file = "mypy-1.8.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d19c413b3c07cbecf1f991e2221746b0d2a9410b59cb3f4fb9557f0365a1a817"}, - {file = "mypy-1.8.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:9261ed810972061388918c83c3f5cd46079d875026ba97380f3e3978a72f503d"}, - {file = "mypy-1.8.0-cp311-cp311-win_amd64.whl", hash = "sha256:51720c776d148bad2372ca21ca29256ed483aa9a4cdefefcef49006dff2a6835"}, - {file = "mypy-1.8.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:52825b01f5c4c1c4eb0db253ec09c7aa17e1a7304d247c48b6f3599ef40db8bd"}, - {file = "mypy-1.8.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:f5ac9a4eeb1ec0f1ccdc6f326bcdb464de5f80eb07fb38b5ddd7b0de6bc61e55"}, - {file = "mypy-1.8.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:afe3fe972c645b4632c563d3f3eff1cdca2fa058f730df2b93a35e3b0c538218"}, - {file = "mypy-1.8.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:42c6680d256ab35637ef88891c6bd02514ccb7e1122133ac96055ff458f93fc3"}, - {file = "mypy-1.8.0-cp312-cp312-win_amd64.whl", hash = "sha256:720a5ca70e136b675af3af63db533c1c8c9181314d207568bbe79051f122669e"}, - {file = "mypy-1.8.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:028cf9f2cae89e202d7b6593cd98db6759379f17a319b5faf4f9978d7084cdc6"}, - {file = "mypy-1.8.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:4e6d97288757e1ddba10dd9549ac27982e3e74a49d8d0179fc14d4365c7add66"}, - {file = "mypy-1.8.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7f1478736fcebb90f97e40aff11a5f253af890c845ee0c850fe80aa060a267c6"}, - {file = "mypy-1.8.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:42419861b43e6962a649068a61f4a4839205a3ef525b858377a960b9e2de6e0d"}, - {file = "mypy-1.8.0-cp38-cp38-win_amd64.whl", hash = "sha256:2b5b6c721bd4aabaadead3a5e6fa85c11c6c795e0c81a7215776ef8afc66de02"}, - {file = "mypy-1.8.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:5c1538c38584029352878a0466f03a8ee7547d7bd9f641f57a0f3017a7c905b8"}, - {file = "mypy-1.8.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:4ef4be7baf08a203170f29e89d79064463b7fc7a0908b9d0d5114e8009c3a259"}, - {file = "mypy-1.8.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7178def594014aa6c35a8ff411cf37d682f428b3b5617ca79029d8ae72f5402b"}, - {file = "mypy-1.8.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:ab3c84fa13c04aeeeabb2a7f67a25ef5d77ac9d6486ff33ded762ef353aa5592"}, - {file = "mypy-1.8.0-cp39-cp39-win_amd64.whl", hash = "sha256:99b00bc72855812a60d253420d8a2eae839b0afa4938f09f4d2aa9bb4654263a"}, - {file = "mypy-1.8.0-py3-none-any.whl", hash = "sha256:538fd81bb5e430cc1381a443971c0475582ff9f434c16cd46d2c66763ce85d9d"}, - {file = "mypy-1.8.0.tar.gz", hash = "sha256:6ff8b244d7085a0b425b56d327b480c3b29cafbd2eff27316a004f9a7391ae07"}, -] - -[package.dependencies] -mypy-extensions = ">=1.0.0" -tomli = {version = ">=1.1.0", markers = "python_version < \"3.11\""} -typing-extensions = ">=4.1.0" - -[package.extras] -dmypy = ["psutil (>=4.0)"] -install-types = ["pip"] -mypyc = ["setuptools (>=50)"] -reports = ["lxml"] - -[[package]] -name = "mypy-extensions" -version = "1.0.0" -description = "Type system extensions for programs checked with the mypy type checker." -optional = false -python-versions = ">=3.5" -files = [ - {file = "mypy_extensions-1.0.0-py3-none-any.whl", hash = "sha256:4392f6c0eb8a5668a69e23d168ffa70f0be9ccfd32b5cc2d26a34ae5b844552d"}, - {file = "mypy_extensions-1.0.0.tar.gz", hash = "sha256:75dbf8955dc00442a438fc4d0666508a9a97b6bd41aa2f0ffe9d2f2725af0782"}, -] - -[[package]] -name = "numpy" -version = "1.26.4" -description = "Fundamental package for array computing in Python" -optional = false -python-versions = ">=3.9" -files = [ - {file = "numpy-1.26.4-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:9ff0f4f29c51e2803569d7a51c2304de5554655a60c5d776e35b4a41413830d0"}, - {file = "numpy-1.26.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:2e4ee3380d6de9c9ec04745830fd9e2eccb3e6cf790d39d7b98ffd19b0dd754a"}, - {file = "numpy-1.26.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d209d8969599b27ad20994c8e41936ee0964e6da07478d6c35016bc386b66ad4"}, - {file = "numpy-1.26.4-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ffa75af20b44f8dba823498024771d5ac50620e6915abac414251bd971b4529f"}, - {file = "numpy-1.26.4-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:62b8e4b1e28009ef2846b4c7852046736bab361f7aeadeb6a5b89ebec3c7055a"}, - {file = "numpy-1.26.4-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:a4abb4f9001ad2858e7ac189089c42178fcce737e4169dc61321660f1a96c7d2"}, - {file = "numpy-1.26.4-cp310-cp310-win32.whl", hash = "sha256:bfe25acf8b437eb2a8b2d49d443800a5f18508cd811fea3181723922a8a82b07"}, - {file = "numpy-1.26.4-cp310-cp310-win_amd64.whl", hash = "sha256:b97fe8060236edf3662adfc2c633f56a08ae30560c56310562cb4f95500022d5"}, - {file = "numpy-1.26.4-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:4c66707fabe114439db9068ee468c26bbdf909cac0fb58686a42a24de1760c71"}, - {file = "numpy-1.26.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:edd8b5fe47dab091176d21bb6de568acdd906d1887a4584a15a9a96a1dca06ef"}, - {file = "numpy-1.26.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7ab55401287bfec946ced39700c053796e7cc0e3acbef09993a9ad2adba6ca6e"}, - {file = "numpy-1.26.4-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:666dbfb6ec68962c033a450943ded891bed2d54e6755e35e5835d63f4f6931d5"}, - {file = "numpy-1.26.4-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:96ff0b2ad353d8f990b63294c8986f1ec3cb19d749234014f4e7eb0112ceba5a"}, - {file = "numpy-1.26.4-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:60dedbb91afcbfdc9bc0b1f3f402804070deed7392c23eb7a7f07fa857868e8a"}, - {file = "numpy-1.26.4-cp311-cp311-win32.whl", hash = "sha256:1af303d6b2210eb850fcf03064d364652b7120803a0b872f5211f5234b399f20"}, - {file = "numpy-1.26.4-cp311-cp311-win_amd64.whl", hash = "sha256:cd25bcecc4974d09257ffcd1f098ee778f7834c3ad767fe5db785be9a4aa9cb2"}, - {file = "numpy-1.26.4-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:b3ce300f3644fb06443ee2222c2201dd3a89ea6040541412b8fa189341847218"}, - {file = "numpy-1.26.4-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:03a8c78d01d9781b28a6989f6fa1bb2c4f2d51201cf99d3dd875df6fbd96b23b"}, - {file = "numpy-1.26.4-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9fad7dcb1aac3c7f0584a5a8133e3a43eeb2fe127f47e3632d43d677c66c102b"}, - {file = "numpy-1.26.4-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:675d61ffbfa78604709862923189bad94014bef562cc35cf61d3a07bba02a7ed"}, - {file = "numpy-1.26.4-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:ab47dbe5cc8210f55aa58e4805fe224dac469cde56b9f731a4c098b91917159a"}, - {file = "numpy-1.26.4-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:1dda2e7b4ec9dd512f84935c5f126c8bd8b9f2fc001e9f54af255e8c5f16b0e0"}, - {file = "numpy-1.26.4-cp312-cp312-win32.whl", hash = "sha256:50193e430acfc1346175fcbdaa28ffec49947a06918b7b92130744e81e640110"}, - {file = "numpy-1.26.4-cp312-cp312-win_amd64.whl", hash = "sha256:08beddf13648eb95f8d867350f6a018a4be2e5ad54c8d8caed89ebca558b2818"}, - {file = "numpy-1.26.4-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:7349ab0fa0c429c82442a27a9673fc802ffdb7c7775fad780226cb234965e53c"}, - {file = "numpy-1.26.4-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:52b8b60467cd7dd1e9ed082188b4e6bb35aa5cdd01777621a1658910745b90be"}, - {file = "numpy-1.26.4-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d5241e0a80d808d70546c697135da2c613f30e28251ff8307eb72ba696945764"}, - {file = "numpy-1.26.4-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f870204a840a60da0b12273ef34f7051e98c3b5961b61b0c2c1be6dfd64fbcd3"}, - {file = "numpy-1.26.4-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:679b0076f67ecc0138fd2ede3a8fd196dddc2ad3254069bcb9faf9a79b1cebcd"}, - {file = "numpy-1.26.4-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:47711010ad8555514b434df65f7d7b076bb8261df1ca9bb78f53d3b2db02e95c"}, - {file = "numpy-1.26.4-cp39-cp39-win32.whl", hash = "sha256:a354325ee03388678242a4d7ebcd08b5c727033fcff3b2f536aea978e15ee9e6"}, - {file = "numpy-1.26.4-cp39-cp39-win_amd64.whl", hash = "sha256:3373d5d70a5fe74a2c1bb6d2cfd9609ecf686d47a2d7b1d37a8f3b6bf6003aea"}, - {file = "numpy-1.26.4-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:afedb719a9dcfc7eaf2287b839d8198e06dcd4cb5d276a3df279231138e83d30"}, - {file = "numpy-1.26.4-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:95a7476c59002f2f6c590b9b7b998306fba6a5aa646b1e22ddfeaf8f78c3a29c"}, - {file = "numpy-1.26.4-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:7e50d0a0cc3189f9cb0aeb3a6a6af18c16f59f004b866cd2be1c14b36134a4a0"}, - {file = "numpy-1.26.4.tar.gz", hash = "sha256:2a02aba9ed12e4ac4eb3ea9421c420301a0c6460d9830d74a9df87efa4912010"}, -] - -[[package]] -name = "packaging" -version = "23.2" -description = "Core utilities for Python packages" -optional = false -python-versions = ">=3.7" -files = [ - {file = "packaging-23.2-py3-none-any.whl", hash = "sha256:8c491190033a9af7e1d931d0b5dacc2ef47509b34dd0de67ed209b5203fc88c7"}, - {file = "packaging-23.2.tar.gz", hash = "sha256:048fb0e9405036518eaaf48a55953c750c11e1a1b68e0dd1a9d62ed0c092cfc5"}, -] - -[[package]] -name = "pandas" -version = "2.2.1" -description = "Powerful data structures for data analysis, time series, and statistics" -optional = false -python-versions = ">=3.9" -files = [ - {file = "pandas-2.2.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:8df8612be9cd1c7797c93e1c5df861b2ddda0b48b08f2c3eaa0702cf88fb5f88"}, - {file = "pandas-2.2.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:0f573ab277252ed9aaf38240f3b54cfc90fff8e5cab70411ee1d03f5d51f3944"}, - {file = "pandas-2.2.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f02a3a6c83df4026e55b63c1f06476c9aa3ed6af3d89b4f04ea656ccdaaaa359"}, - {file = "pandas-2.2.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c38ce92cb22a4bea4e3929429aa1067a454dcc9c335799af93ba9be21b6beb51"}, - {file = "pandas-2.2.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:c2ce852e1cf2509a69e98358e8458775f89599566ac3775e70419b98615f4b06"}, - {file = "pandas-2.2.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:53680dc9b2519cbf609c62db3ed7c0b499077c7fefda564e330286e619ff0dd9"}, - {file = "pandas-2.2.1-cp310-cp310-win_amd64.whl", hash = "sha256:94e714a1cca63e4f5939cdce5f29ba8d415d85166be3441165edd427dc9f6bc0"}, - {file = "pandas-2.2.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:f821213d48f4ab353d20ebc24e4faf94ba40d76680642fb7ce2ea31a3ad94f9b"}, - {file = "pandas-2.2.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:c70e00c2d894cb230e5c15e4b1e1e6b2b478e09cf27cc593a11ef955b9ecc81a"}, - {file = "pandas-2.2.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e97fbb5387c69209f134893abc788a6486dbf2f9e511070ca05eed4b930b1b02"}, - {file = "pandas-2.2.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:101d0eb9c5361aa0146f500773395a03839a5e6ecde4d4b6ced88b7e5a1a6403"}, - {file = "pandas-2.2.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:7d2ed41c319c9fb4fd454fe25372028dfa417aacb9790f68171b2e3f06eae8cd"}, - {file = "pandas-2.2.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:af5d3c00557d657c8773ef9ee702c61dd13b9d7426794c9dfeb1dc4a0bf0ebc7"}, - {file = "pandas-2.2.1-cp311-cp311-win_amd64.whl", hash = "sha256:06cf591dbaefb6da9de8472535b185cba556d0ce2e6ed28e21d919704fef1a9e"}, - {file = "pandas-2.2.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:88ecb5c01bb9ca927ebc4098136038519aa5d66b44671861ffab754cae75102c"}, - {file = "pandas-2.2.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:04f6ec3baec203c13e3f8b139fb0f9f86cd8c0b94603ae3ae8ce9a422e9f5bee"}, - {file = "pandas-2.2.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a935a90a76c44fe170d01e90a3594beef9e9a6220021acfb26053d01426f7dc2"}, - {file = "pandas-2.2.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c391f594aae2fd9f679d419e9a4d5ba4bce5bb13f6a989195656e7dc4b95c8f0"}, - {file = "pandas-2.2.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:9d1265545f579edf3f8f0cb6f89f234f5e44ba725a34d86535b1a1d38decbccc"}, - {file = "pandas-2.2.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:11940e9e3056576ac3244baef2fedade891977bcc1cb7e5cc8f8cc7d603edc89"}, - {file = "pandas-2.2.1-cp312-cp312-win_amd64.whl", hash = "sha256:4acf681325ee1c7f950d058b05a820441075b0dd9a2adf5c4835b9bc056bf4fb"}, - {file = "pandas-2.2.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:9bd8a40f47080825af4317d0340c656744f2bfdb6819f818e6ba3cd24c0e1397"}, - {file = "pandas-2.2.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:df0c37ebd19e11d089ceba66eba59a168242fc6b7155cba4ffffa6eccdfb8f16"}, - {file = "pandas-2.2.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:739cc70eaf17d57608639e74d63387b0d8594ce02f69e7a0b046f117974b3019"}, - {file = "pandas-2.2.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f9d3558d263073ed95e46f4650becff0c5e1ffe0fc3a015de3c79283dfbdb3df"}, - {file = "pandas-2.2.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:4aa1d8707812a658debf03824016bf5ea0d516afdea29b7dc14cf687bc4d4ec6"}, - {file = "pandas-2.2.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:76f27a809cda87e07f192f001d11adc2b930e93a2b0c4a236fde5429527423be"}, - {file = "pandas-2.2.1-cp39-cp39-win_amd64.whl", hash = "sha256:1ba21b1d5c0e43416218db63037dbe1a01fc101dc6e6024bcad08123e48004ab"}, - {file = "pandas-2.2.1.tar.gz", hash = "sha256:0ab90f87093c13f3e8fa45b48ba9f39181046e8f3317d3aadb2fffbb1b978572"}, -] - -[package.dependencies] -numpy = [ - {version = ">=1.22.4,<2", markers = "python_version < \"3.11\""}, - {version = ">=1.23.2,<2", markers = "python_version == \"3.11\""}, - {version = ">=1.26.0,<2", markers = "python_version >= \"3.12\""}, -] -python-dateutil = ">=2.8.2" -pytz = ">=2020.1" -tzdata = ">=2022.7" - -[package.extras] -all = ["PyQt5 (>=5.15.9)", "SQLAlchemy (>=2.0.0)", "adbc-driver-postgresql (>=0.8.0)", "adbc-driver-sqlite (>=0.8.0)", "beautifulsoup4 (>=4.11.2)", "bottleneck (>=1.3.6)", "dataframe-api-compat (>=0.1.7)", "fastparquet (>=2022.12.0)", "fsspec (>=2022.11.0)", "gcsfs (>=2022.11.0)", "html5lib (>=1.1)", "hypothesis (>=6.46.1)", "jinja2 (>=3.1.2)", "lxml (>=4.9.2)", "matplotlib (>=3.6.3)", "numba (>=0.56.4)", "numexpr (>=2.8.4)", "odfpy (>=1.4.1)", "openpyxl (>=3.1.0)", "pandas-gbq (>=0.19.0)", "psycopg2 (>=2.9.6)", "pyarrow (>=10.0.1)", "pymysql (>=1.0.2)", "pyreadstat (>=1.2.0)", "pytest (>=7.3.2)", "pytest-xdist (>=2.2.0)", "python-calamine (>=0.1.7)", "pyxlsb (>=1.0.10)", "qtpy (>=2.3.0)", "s3fs (>=2022.11.0)", "scipy (>=1.10.0)", "tables (>=3.8.0)", "tabulate (>=0.9.0)", "xarray (>=2022.12.0)", "xlrd (>=2.0.1)", "xlsxwriter (>=3.0.5)", "zstandard (>=0.19.0)"] -aws = ["s3fs (>=2022.11.0)"] -clipboard = ["PyQt5 (>=5.15.9)", "qtpy (>=2.3.0)"] -compression = ["zstandard (>=0.19.0)"] -computation = ["scipy (>=1.10.0)", "xarray (>=2022.12.0)"] -consortium-standard = ["dataframe-api-compat (>=0.1.7)"] -excel = ["odfpy (>=1.4.1)", "openpyxl (>=3.1.0)", "python-calamine (>=0.1.7)", "pyxlsb (>=1.0.10)", "xlrd (>=2.0.1)", "xlsxwriter (>=3.0.5)"] -feather = ["pyarrow (>=10.0.1)"] -fss = ["fsspec (>=2022.11.0)"] -gcp = ["gcsfs (>=2022.11.0)", "pandas-gbq (>=0.19.0)"] -hdf5 = ["tables (>=3.8.0)"] -html = ["beautifulsoup4 (>=4.11.2)", "html5lib (>=1.1)", "lxml (>=4.9.2)"] -mysql = ["SQLAlchemy (>=2.0.0)", "pymysql (>=1.0.2)"] -output-formatting = ["jinja2 (>=3.1.2)", "tabulate (>=0.9.0)"] -parquet = ["pyarrow (>=10.0.1)"] -performance = ["bottleneck (>=1.3.6)", "numba (>=0.56.4)", "numexpr (>=2.8.4)"] -plot = ["matplotlib (>=3.6.3)"] -postgresql = ["SQLAlchemy (>=2.0.0)", "adbc-driver-postgresql (>=0.8.0)", "psycopg2 (>=2.9.6)"] -pyarrow = ["pyarrow (>=10.0.1)"] -spss = ["pyreadstat (>=1.2.0)"] -sql-other = ["SQLAlchemy (>=2.0.0)", "adbc-driver-postgresql (>=0.8.0)", "adbc-driver-sqlite (>=0.8.0)"] -test = ["hypothesis (>=6.46.1)", "pytest (>=7.3.2)", "pytest-xdist (>=2.2.0)"] -xml = ["lxml (>=4.9.2)"] - -[[package]] -name = "pandas-stubs" -version = "2.2.0.240218" -description = "Type annotations for pandas" -optional = false -python-versions = ">=3.9" -files = [ - {file = "pandas_stubs-2.2.0.240218-py3-none-any.whl", hash = "sha256:e97478320add9b958391b15a56c5f1bf29da656d5b747d28bbe708454b3a1fe6"}, - {file = "pandas_stubs-2.2.0.240218.tar.gz", hash = "sha256:63138c12eec715d66d48611bdd922f31cd7c78bcadd19384c3bd61fd3720a11a"}, -] - -[package.dependencies] -numpy = {version = ">=1.26.0", markers = "python_version < \"3.13\""} -types-pytz = ">=2022.1.1" - -[[package]] -name = "pg8000" -version = "1.30.5" -description = "PostgreSQL interface library" -optional = false -python-versions = ">=3.8" -files = [ - {file = "pg8000-1.30.5-py3-none-any.whl", hash = "sha256:1abf18da652b0ad8e9cbfe57ed841c350b5330c33d8151303555db1fe5ce57f8"}, - {file = "pg8000-1.30.5.tar.gz", hash = "sha256:072f7ad00cd723695cb2e9fc02c1dfb84c781455e97b8de6f4c4281eea08078c"}, -] - -[package.dependencies] -python-dateutil = ">=2.8.2" -scramp = ">=1.4.4" - -[[package]] -name = "platformdirs" -version = "4.2.0" -description = "A small Python package for determining appropriate platform-specific dirs, e.g. a \"user data dir\"." -optional = false -python-versions = ">=3.8" -files = [ - {file = "platformdirs-4.2.0-py3-none-any.whl", hash = "sha256:0614df2a2f37e1a662acbd8e2b25b92ccf8632929bc6d43467e17fe89c75e068"}, - {file = "platformdirs-4.2.0.tar.gz", hash = "sha256:ef0cc731df711022c174543cb70a9b5bd22e5a9337c8624ef2c2ceb8ddad8768"}, -] - -[package.extras] -docs = ["furo (>=2023.9.10)", "proselint (>=0.13)", "sphinx (>=7.2.6)", "sphinx-autodoc-typehints (>=1.25.2)"] -test = ["appdirs (==1.4.4)", "covdefaults (>=2.3)", "pytest (>=7.4.3)", "pytest-cov (>=4.1)", "pytest-mock (>=3.12)"] - -[[package]] -name = "pluggy" -version = "1.4.0" -description = "plugin and hook calling mechanisms for python" -optional = false -python-versions = ">=3.8" -files = [ - {file = "pluggy-1.4.0-py3-none-any.whl", hash = "sha256:7db9f7b503d67d1c5b95f59773ebb58a8c1c288129a88665838012cfb07b8981"}, - {file = "pluggy-1.4.0.tar.gz", hash = "sha256:8c85c2876142a764e5b7548e7d9a0e0ddb46f5185161049a79b7e974454223be"}, -] - -[package.extras] -dev = ["pre-commit", "tox"] -testing = ["pytest", "pytest-benchmark"] - -[[package]] -name = "proto-plus" -version = "1.23.0" -description = "Beautiful, Pythonic protocol buffers." -optional = false -python-versions = ">=3.6" -files = [ - {file = "proto-plus-1.23.0.tar.gz", hash = "sha256:89075171ef11988b3fa157f5dbd8b9cf09d65fffee97e29ce403cd8defba19d2"}, - {file = "proto_plus-1.23.0-py3-none-any.whl", hash = "sha256:a829c79e619e1cf632de091013a4173deed13a55f326ef84f05af6f50ff4c82c"}, -] - -[package.dependencies] -protobuf = ">=3.19.0,<5.0.0dev" - -[package.extras] -testing = ["google-api-core[grpc] (>=1.31.5)"] - -[[package]] -name = "protobuf" -version = "4.25.3" -description = "" -optional = false -python-versions = ">=3.8" -files = [ - {file = "protobuf-4.25.3-cp310-abi3-win32.whl", hash = "sha256:d4198877797a83cbfe9bffa3803602bbe1625dc30d8a097365dbc762e5790faa"}, - {file = "protobuf-4.25.3-cp310-abi3-win_amd64.whl", hash = "sha256:209ba4cc916bab46f64e56b85b090607a676f66b473e6b762e6f1d9d591eb2e8"}, - {file = "protobuf-4.25.3-cp37-abi3-macosx_10_9_universal2.whl", hash = "sha256:f1279ab38ecbfae7e456a108c5c0681e4956d5b1090027c1de0f934dfdb4b35c"}, - {file = "protobuf-4.25.3-cp37-abi3-manylinux2014_aarch64.whl", hash = "sha256:e7cb0ae90dd83727f0c0718634ed56837bfeeee29a5f82a7514c03ee1364c019"}, - {file = "protobuf-4.25.3-cp37-abi3-manylinux2014_x86_64.whl", hash = "sha256:7c8daa26095f82482307bc717364e7c13f4f1c99659be82890dcfc215194554d"}, - {file = "protobuf-4.25.3-cp38-cp38-win32.whl", hash = "sha256:f4f118245c4a087776e0a8408be33cf09f6c547442c00395fbfb116fac2f8ac2"}, - {file = "protobuf-4.25.3-cp38-cp38-win_amd64.whl", hash = "sha256:c053062984e61144385022e53678fbded7aea14ebb3e0305ae3592fb219ccfa4"}, - {file = "protobuf-4.25.3-cp39-cp39-win32.whl", hash = "sha256:19b270aeaa0099f16d3ca02628546b8baefe2955bbe23224aaf856134eccf1e4"}, - {file = "protobuf-4.25.3-cp39-cp39-win_amd64.whl", hash = "sha256:e3c97a1555fd6388f857770ff8b9703083de6bf1f9274a002a332d65fbb56c8c"}, - {file = "protobuf-4.25.3-py3-none-any.whl", hash = "sha256:f0700d54bcf45424477e46a9f0944155b46fb0639d69728739c0e47bab83f2b9"}, - {file = "protobuf-4.25.3.tar.gz", hash = "sha256:25b5d0b42fd000320bd7830b349e3b696435f3b329810427a6bcce6a5492cc5c"}, -] - -[[package]] -name = "pyasn1" -version = "0.5.1" -description = "Pure-Python implementation of ASN.1 types and DER/BER/CER codecs (X.208)" -optional = false -python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,>=2.7" -files = [ - {file = "pyasn1-0.5.1-py2.py3-none-any.whl", hash = "sha256:4439847c58d40b1d0a573d07e3856e95333f1976294494c325775aeca506eb58"}, - {file = "pyasn1-0.5.1.tar.gz", hash = "sha256:6d391a96e59b23130a5cfa74d6fd7f388dbbe26cc8f1edf39fdddf08d9d6676c"}, -] - -[[package]] -name = "pyasn1-modules" -version = "0.3.0" -description = "A collection of ASN.1-based protocols modules" -optional = false -python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,>=2.7" -files = [ - {file = "pyasn1_modules-0.3.0-py2.py3-none-any.whl", hash = "sha256:d3ccd6ed470d9ffbc716be08bd90efbd44d0734bc9303818f7336070984a162d"}, - {file = "pyasn1_modules-0.3.0.tar.gz", hash = "sha256:5bd01446b736eb9d31512a30d46c1ac3395d676c6f3cafa4c03eb54b9925631c"}, -] - -[package.dependencies] -pyasn1 = ">=0.4.6,<0.6.0" - -[[package]] -name = "pycparser" -version = "2.21" -description = "C parser in Python" -optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" -files = [ - {file = "pycparser-2.21-py2.py3-none-any.whl", hash = "sha256:8ee45429555515e1f6b185e78100aea234072576aa43ab53aefcae078162fca9"}, - {file = "pycparser-2.21.tar.gz", hash = "sha256:e644fdec12f7872f86c58ff790da456218b10f863970249516d60a5eaca77206"}, -] - -[[package]] -name = "pydantic" -version = "1.10.14" -description = "Data validation and settings management using python type hints" -optional = false -python-versions = ">=3.7" -files = [ - {file = "pydantic-1.10.14-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:7f4fcec873f90537c382840f330b90f4715eebc2bc9925f04cb92de593eae054"}, - {file = "pydantic-1.10.14-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:8e3a76f571970fcd3c43ad982daf936ae39b3e90b8a2e96c04113a369869dc87"}, - {file = "pydantic-1.10.14-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:82d886bd3c3fbeaa963692ef6b643159ccb4b4cefaf7ff1617720cbead04fd1d"}, - {file = "pydantic-1.10.14-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:798a3d05ee3b71967844a1164fd5bdb8c22c6d674f26274e78b9f29d81770c4e"}, - {file = "pydantic-1.10.14-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:23d47a4b57a38e8652bcab15a658fdb13c785b9ce217cc3a729504ab4e1d6bc9"}, - {file = "pydantic-1.10.14-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:f9f674b5c3bebc2eba401de64f29948ae1e646ba2735f884d1594c5f675d6f2a"}, - {file = "pydantic-1.10.14-cp310-cp310-win_amd64.whl", hash = "sha256:24a7679fab2e0eeedb5a8924fc4a694b3bcaac7d305aeeac72dd7d4e05ecbebf"}, - {file = "pydantic-1.10.14-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:9d578ac4bf7fdf10ce14caba6f734c178379bd35c486c6deb6f49006e1ba78a7"}, - {file = "pydantic-1.10.14-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:fa7790e94c60f809c95602a26d906eba01a0abee9cc24150e4ce2189352deb1b"}, - {file = "pydantic-1.10.14-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:aad4e10efa5474ed1a611b6d7f0d130f4aafadceb73c11d9e72823e8f508e663"}, - {file = "pydantic-1.10.14-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1245f4f61f467cb3dfeced2b119afef3db386aec3d24a22a1de08c65038b255f"}, - {file = "pydantic-1.10.14-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:21efacc678a11114c765eb52ec0db62edffa89e9a562a94cbf8fa10b5db5c046"}, - {file = "pydantic-1.10.14-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:412ab4a3f6dbd2bf18aefa9f79c7cca23744846b31f1d6555c2ee2b05a2e14ca"}, - {file = "pydantic-1.10.14-cp311-cp311-win_amd64.whl", hash = "sha256:e897c9f35281f7889873a3e6d6b69aa1447ceb024e8495a5f0d02ecd17742a7f"}, - {file = "pydantic-1.10.14-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:d604be0f0b44d473e54fdcb12302495fe0467c56509a2f80483476f3ba92b33c"}, - {file = "pydantic-1.10.14-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a42c7d17706911199798d4c464b352e640cab4351efe69c2267823d619a937e5"}, - {file = "pydantic-1.10.14-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:596f12a1085e38dbda5cbb874d0973303e34227b400b6414782bf205cc14940c"}, - {file = "pydantic-1.10.14-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:bfb113860e9288d0886e3b9e49d9cf4a9d48b441f52ded7d96db7819028514cc"}, - {file = "pydantic-1.10.14-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:bc3ed06ab13660b565eed80887fcfbc0070f0aa0691fbb351657041d3e874efe"}, - {file = "pydantic-1.10.14-cp37-cp37m-win_amd64.whl", hash = "sha256:ad8c2bc677ae5f6dbd3cf92f2c7dc613507eafe8f71719727cbc0a7dec9a8c01"}, - {file = "pydantic-1.10.14-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:c37c28449752bb1f47975d22ef2882d70513c546f8f37201e0fec3a97b816eee"}, - {file = "pydantic-1.10.14-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:49a46a0994dd551ec051986806122767cf144b9702e31d47f6d493c336462597"}, - {file = "pydantic-1.10.14-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:53e3819bd20a42470d6dd0fe7fc1c121c92247bca104ce608e609b59bc7a77ee"}, - {file = "pydantic-1.10.14-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0fbb503bbbbab0c588ed3cd21975a1d0d4163b87e360fec17a792f7d8c4ff29f"}, - {file = "pydantic-1.10.14-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:336709883c15c050b9c55a63d6c7ff09be883dbc17805d2b063395dd9d9d0022"}, - {file = "pydantic-1.10.14-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:4ae57b4d8e3312d486e2498d42aed3ece7b51848336964e43abbf9671584e67f"}, - {file = "pydantic-1.10.14-cp38-cp38-win_amd64.whl", hash = "sha256:dba49d52500c35cfec0b28aa8b3ea5c37c9df183ffc7210b10ff2a415c125c4a"}, - {file = "pydantic-1.10.14-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:c66609e138c31cba607d8e2a7b6a5dc38979a06c900815495b2d90ce6ded35b4"}, - {file = "pydantic-1.10.14-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:d986e115e0b39604b9eee3507987368ff8148222da213cd38c359f6f57b3b347"}, - {file = "pydantic-1.10.14-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:646b2b12df4295b4c3148850c85bff29ef6d0d9621a8d091e98094871a62e5c7"}, - {file = "pydantic-1.10.14-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:282613a5969c47c83a8710cc8bfd1e70c9223feb76566f74683af889faadc0ea"}, - {file = "pydantic-1.10.14-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:466669501d08ad8eb3c4fecd991c5e793c4e0bbd62299d05111d4f827cded64f"}, - {file = "pydantic-1.10.14-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:13e86a19dca96373dcf3190fcb8797d40a6f12f154a244a8d1e8e03b8f280593"}, - {file = "pydantic-1.10.14-cp39-cp39-win_amd64.whl", hash = "sha256:08b6ec0917c30861e3fe71a93be1648a2aa4f62f866142ba21670b24444d7fd8"}, - {file = "pydantic-1.10.14-py3-none-any.whl", hash = "sha256:8ee853cd12ac2ddbf0ecbac1c289f95882b2d4482258048079d13be700aa114c"}, - {file = "pydantic-1.10.14.tar.gz", hash = "sha256:46f17b832fe27de7850896f3afee50ea682220dd218f7e9c88d436788419dca6"}, -] - -[package.dependencies] -typing-extensions = ">=4.2.0" - -[package.extras] -dotenv = ["python-dotenv (>=0.10.4)"] -email = ["email-validator (>=1.0.3)"] - -[[package]] -name = "pydash" -version = "7.0.7" -description = "The kitchen sink of Python utility libraries for doing \"stuff\" in a functional way. Based on the Lo-Dash Javascript library." -optional = false -python-versions = ">=3.8" -files = [ - {file = "pydash-7.0.7-py3-none-any.whl", hash = "sha256:c3c5b54eec0a562e0080d6f82a14ad4d5090229847b7e554235b5c1558c745e1"}, - {file = "pydash-7.0.7.tar.gz", hash = "sha256:cc935d5ac72dd41fb4515bdf982e7c864c8b5eeea16caffbab1936b849aaa49a"}, -] - -[package.dependencies] -typing-extensions = ">=3.10,<4.6.0 || >4.6.0" - -[package.extras] -dev = ["black", "build", "coverage", "docformatter", "flake8", "flake8-black", "flake8-bugbear", "flake8-isort", "furo", "invoke", "isort", "mypy", "pylint", "pytest", "pytest-cov", "pytest-mypy-testing", "sphinx", "sphinx-autodoc-typehints", "tox", "twine", "wheel"] - -[[package]] -name = "pygments" -version = "2.17.2" -description = "Pygments is a syntax highlighting package written in Python." -optional = false -python-versions = ">=3.7" -files = [ - {file = "pygments-2.17.2-py3-none-any.whl", hash = "sha256:b27c2826c47d0f3219f29554824c30c5e8945175d888647acd804ddd04af846c"}, - {file = "pygments-2.17.2.tar.gz", hash = "sha256:da46cec9fd2de5be3a8a784f434e4c4ab670b4ff54d605c4c2717e9d49c4c367"}, -] - -[package.extras] -plugins = ["importlib-metadata"] -windows-terminal = ["colorama (>=0.4.6)"] - -[[package]] -name = "pytest" -version = "8.0.2" -description = "pytest: simple powerful testing with Python" -optional = false -python-versions = ">=3.8" -files = [ - {file = "pytest-8.0.2-py3-none-any.whl", hash = "sha256:edfaaef32ce5172d5466b5127b42e0d6d35ebbe4453f0e3505d96afd93f6b096"}, - {file = "pytest-8.0.2.tar.gz", hash = "sha256:d4051d623a2e0b7e51960ba963193b09ce6daeb9759a451844a21e4ddedfc1bd"}, -] - -[package.dependencies] -colorama = {version = "*", markers = "sys_platform == \"win32\""} -exceptiongroup = {version = ">=1.0.0rc8", markers = "python_version < \"3.11\""} -iniconfig = "*" -packaging = "*" -pluggy = ">=1.3.0,<2.0" -tomli = {version = ">=1.0.0", markers = "python_version < \"3.11\""} - -[package.extras] -testing = ["argcomplete", "attrs (>=19.2.0)", "hypothesis (>=3.56)", "mock", "nose", "pygments (>=2.7.2)", "requests", "setuptools", "xmlschema"] - -[[package]] -name = "pytest-asyncio" -version = "0.23.5" -description = "Pytest support for asyncio" -optional = false -python-versions = ">=3.8" -files = [ - {file = "pytest-asyncio-0.23.5.tar.gz", hash = "sha256:3a048872a9c4ba14c3e90cc1aa20cbc2def7d01c7c8db3777ec281ba9c057675"}, - {file = "pytest_asyncio-0.23.5-py3-none-any.whl", hash = "sha256:4e7093259ba018d58ede7d5315131d21923a60f8a6e9ee266ce1589685c89eac"}, -] - -[package.dependencies] -pytest = ">=7.0.0,<9" - -[package.extras] -docs = ["sphinx (>=5.3)", "sphinx-rtd-theme (>=1.0)"] -testing = ["coverage (>=6.2)", "hypothesis (>=5.7.1)"] - -[[package]] -name = "python-dateutil" -version = "2.9.0.post0" -description = "Extensions to the standard Python datetime module" -optional = false -python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7" -files = [ - {file = "python-dateutil-2.9.0.post0.tar.gz", hash = "sha256:37dd54208da7e1cd875388217d5e00ebd4179249f90fb72437e91a35459a0ad3"}, - {file = "python_dateutil-2.9.0.post0-py2.py3-none-any.whl", hash = "sha256:a8b2bc7bffae282281c8140a97d3aa9c14da0b136dfe83f850eea9a5f7470427"}, -] - -[package.dependencies] -six = ">=1.5" - -[[package]] -name = "python-dotenv" -version = "1.0.1" -description = "Read key-value pairs from a .env file and set them as environment variables" -optional = false -python-versions = ">=3.8" -files = [ - {file = "python-dotenv-1.0.1.tar.gz", hash = "sha256:e324ee90a023d808f1959c46bcbc04446a10ced277783dc6ee09987c37ec10ca"}, - {file = "python_dotenv-1.0.1-py3-none-any.whl", hash = "sha256:f7b63ef50f1b690dddf550d03497b66d609393b40b564ed0d674909a68ebf16a"}, -] - -[package.extras] -cli = ["click (>=5.0)"] - -[[package]] -name = "pytz" -version = "2024.1" -description = "World timezone definitions, modern and historical" -optional = false -python-versions = "*" -files = [ - {file = "pytz-2024.1-py2.py3-none-any.whl", hash = "sha256:328171f4e3623139da4983451950b28e95ac706e13f3f2630a879749e7a8b319"}, - {file = "pytz-2024.1.tar.gz", hash = "sha256:2a29735ea9c18baf14b448846bde5a48030ed267578472d8955cd0e7443a9812"}, -] - -[[package]] -name = "pywin32" -version = "306" -description = "Python for Window Extensions" -optional = false -python-versions = "*" -files = [ - {file = "pywin32-306-cp310-cp310-win32.whl", hash = "sha256:06d3420a5155ba65f0b72f2699b5bacf3109f36acbe8923765c22938a69dfc8d"}, - {file = "pywin32-306-cp310-cp310-win_amd64.whl", hash = "sha256:84f4471dbca1887ea3803d8848a1616429ac94a4a8d05f4bc9c5dcfd42ca99c8"}, - {file = "pywin32-306-cp311-cp311-win32.whl", hash = "sha256:e65028133d15b64d2ed8f06dd9fbc268352478d4f9289e69c190ecd6818b6407"}, - {file = "pywin32-306-cp311-cp311-win_amd64.whl", hash = "sha256:a7639f51c184c0272e93f244eb24dafca9b1855707d94c192d4a0b4c01e1100e"}, - {file = "pywin32-306-cp311-cp311-win_arm64.whl", hash = "sha256:70dba0c913d19f942a2db25217d9a1b726c278f483a919f1abfed79c9cf64d3a"}, - {file = "pywin32-306-cp312-cp312-win32.whl", hash = "sha256:383229d515657f4e3ed1343da8be101000562bf514591ff383ae940cad65458b"}, - {file = "pywin32-306-cp312-cp312-win_amd64.whl", hash = "sha256:37257794c1ad39ee9be652da0462dc2e394c8159dfd913a8a4e8eb6fd346da0e"}, - {file = "pywin32-306-cp312-cp312-win_arm64.whl", hash = "sha256:5821ec52f6d321aa59e2db7e0a35b997de60c201943557d108af9d4ae1ec7040"}, - {file = "pywin32-306-cp37-cp37m-win32.whl", hash = "sha256:1c73ea9a0d2283d889001998059f5eaaba3b6238f767c9cf2833b13e6a685f65"}, - {file = "pywin32-306-cp37-cp37m-win_amd64.whl", hash = "sha256:72c5f621542d7bdd4fdb716227be0dd3f8565c11b280be6315b06ace35487d36"}, - {file = "pywin32-306-cp38-cp38-win32.whl", hash = "sha256:e4c092e2589b5cf0d365849e73e02c391c1349958c5ac3e9d5ccb9a28e017b3a"}, - {file = "pywin32-306-cp38-cp38-win_amd64.whl", hash = "sha256:e8ac1ae3601bee6ca9f7cb4b5363bf1c0badb935ef243c4733ff9a393b1690c0"}, - {file = "pywin32-306-cp39-cp39-win32.whl", hash = "sha256:e25fd5b485b55ac9c057f67d94bc203f3f6595078d1fb3b458c9c28b7153a802"}, - {file = "pywin32-306-cp39-cp39-win_amd64.whl", hash = "sha256:39b61c15272833b5c329a2989999dcae836b1eed650252ab1b7bfbe1d59f30f4"}, -] - -[[package]] -name = "requests" -version = "2.31.0" -description = "Python HTTP for Humans." -optional = false -python-versions = ">=3.7" -files = [ - {file = "requests-2.31.0-py3-none-any.whl", hash = "sha256:58cd2187c01e70e6e26505bca751777aa9f2ee0b7f4300988b709f44e013003f"}, - {file = "requests-2.31.0.tar.gz", hash = "sha256:942c5a758f98d790eaed1a29cb6eefc7ffb0d1cf7af05c3d2791656dbd6ad1e1"}, -] - -[package.dependencies] -certifi = ">=2017.4.17" -charset-normalizer = ">=2,<4" -idna = ">=2.5,<4" -urllib3 = ">=1.21.1,<3" - -[package.extras] -socks = ["PySocks (>=1.5.6,!=1.5.7)"] -use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"] - -[[package]] -name = "rich" -version = "13.7.1" -description = "Render rich text, tables, progress bars, syntax highlighting, markdown and more to the terminal" -optional = false -python-versions = ">=3.7.0" -files = [ - {file = "rich-13.7.1-py3-none-any.whl", hash = "sha256:4edbae314f59eb482f54e9e30bf00d33350aaa94f4bfcd4e9e3110e64d0d7222"}, - {file = "rich-13.7.1.tar.gz", hash = "sha256:9be308cb1fe2f1f57d67ce99e95af38a1e2bc71ad9813b0e247cf7ffbcc3a432"}, -] - -[package.dependencies] -markdown-it-py = ">=2.2.0" -pygments = ">=2.13.0,<3.0.0" - -[package.extras] -jupyter = ["ipywidgets (>=7.5.1,<9)"] - -[[package]] -name = "rsa" -version = "4.9" -description = "Pure-Python RSA implementation" -optional = false -python-versions = ">=3.6,<4" -files = [ - {file = "rsa-4.9-py3-none-any.whl", hash = "sha256:90260d9058e514786967344d0ef75fa8727eed8a7d2e43ce9f4bcf1b536174f7"}, - {file = "rsa-4.9.tar.gz", hash = "sha256:e38464a49c6c85d7f1351b0126661487a7e0a14a50f1675ec50eb34d4f20ef21"}, -] - -[package.dependencies] -pyasn1 = ">=0.1.3" - -[[package]] -name = "ruff" -version = "0.3.1" -description = "An extremely fast Python linter and code formatter, written in Rust." -optional = false -python-versions = ">=3.7" -files = [ - {file = "ruff-0.3.1-py3-none-macosx_10_12_x86_64.macosx_11_0_arm64.macosx_10_12_universal2.whl", hash = "sha256:6b82e3937d0d76554cd5796bc3342a7d40de44494d29ff490022d7a52c501744"}, - {file = "ruff-0.3.1-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:ae7954c8f692b70e6a206087ae3988acc9295d84c550f8d90b66c62424c16771"}, - {file = "ruff-0.3.1-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6b730f56ccf91225da0f06cfe421e83b8cc27b2a79393db9c3df02ed7e2bbc01"}, - {file = "ruff-0.3.1-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:c78bfa85637668f47bd82aa2ae17de2b34221ac23fea30926f6409f9e37fc927"}, - {file = "ruff-0.3.1-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d6abaad602d6e6daaec444cbf4d9364df0a783e49604c21499f75bb92237d4af"}, - {file = "ruff-0.3.1-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:5f0c21b6914c3c9a25a59497cbb1e5b6c2d8d9beecc9b8e03ee986e24eee072e"}, - {file = "ruff-0.3.1-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:434c3fc72e6311c85cd143c4c448b0e60e025a9ac1781e63ba222579a8c29200"}, - {file = "ruff-0.3.1-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:78a7025e6312cbba496341da5062e7cdd47d95f45c1b903e635cdeb1ba5ec2b9"}, - {file = "ruff-0.3.1-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:52b02bb46f1a79b0c1fa93f6495bc7e77e4ef76e6c28995b4974a20ed09c0833"}, - {file = "ruff-0.3.1-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:11b5699c42f7d0b771c633d620f2cb22e727fb226273aba775a91784a9ed856c"}, - {file = "ruff-0.3.1-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:54e5dca3e411772b51194b3102b5f23b36961e8ede463776b289b78180df71a0"}, - {file = "ruff-0.3.1-py3-none-musllinux_1_2_i686.whl", hash = "sha256:951efb610c5844e668bbec4f71cf704f8645cf3106e13f283413969527ebfded"}, - {file = "ruff-0.3.1-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:09c7333b25e983aabcf6e38445252cff0b4745420fc3bda45b8fce791cc7e9ce"}, - {file = "ruff-0.3.1-py3-none-win32.whl", hash = "sha256:d937f9b99ebf346e0606c3faf43c1e297a62ad221d87ef682b5bdebe199e01f6"}, - {file = "ruff-0.3.1-py3-none-win_amd64.whl", hash = "sha256:c0318a512edc9f4e010bbaab588b5294e78c5cdc9b02c3d8ab2d77c7ae1903e3"}, - {file = "ruff-0.3.1-py3-none-win_arm64.whl", hash = "sha256:d3b60e44240f7e903e6dbae3139a65032ea4c6f2ad99b6265534ff1b83c20afa"}, - {file = "ruff-0.3.1.tar.gz", hash = "sha256:d30db97141fc2134299e6e983a6727922c9e03c031ae4883a6d69461de722ae7"}, -] - -[[package]] -name = "scramp" -version = "1.4.4" -description = "An implementation of the SCRAM protocol." -optional = false -python-versions = ">=3.7" -files = [ - {file = "scramp-1.4.4-py3-none-any.whl", hash = "sha256:b142312df7c2977241d951318b7ee923d6b7a4f75ba0f05b621ece1ed616faa3"}, - {file = "scramp-1.4.4.tar.gz", hash = "sha256:b7022a140040f33cf863ab2657917ed05287a807b917950489b89b9f685d59bc"}, -] - -[package.dependencies] -asn1crypto = ">=1.5.1" - -[[package]] -name = "six" -version = "1.16.0" -description = "Python 2 and 3 compatibility utilities" -optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*" -files = [ - {file = "six-1.16.0-py2.py3-none-any.whl", hash = "sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254"}, - {file = "six-1.16.0.tar.gz", hash = "sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926"}, -] - -[[package]] -name = "sniffio" -version = "1.3.1" -description = "Sniff out which async library your code is running under" -optional = false -python-versions = ">=3.7" -files = [ - {file = "sniffio-1.3.1-py3-none-any.whl", hash = "sha256:2f6da418d1f1e0fddd844478f41680e794e6051915791a034ff65e5f100525a2"}, - {file = "sniffio-1.3.1.tar.gz", hash = "sha256:f4324edc670a0f49750a81b895f35c3adb843cca46f0530f79fc1babb23789dc"}, -] - -[[package]] -name = "sqlalchemy" -version = "2.0.28" -description = "Database Abstraction Library" -optional = false -python-versions = ">=3.7" -files = [ - {file = "SQLAlchemy-2.0.28-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:e0b148ab0438f72ad21cb004ce3bdaafd28465c4276af66df3b9ecd2037bf252"}, - {file = "SQLAlchemy-2.0.28-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:bbda76961eb8f27e6ad3c84d1dc56d5bc61ba8f02bd20fcf3450bd421c2fcc9c"}, - {file = "SQLAlchemy-2.0.28-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:feea693c452d85ea0015ebe3bb9cd15b6f49acc1a31c28b3c50f4db0f8fb1e71"}, - {file = "SQLAlchemy-2.0.28-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5da98815f82dce0cb31fd1e873a0cb30934971d15b74e0d78cf21f9e1b05953f"}, - {file = "SQLAlchemy-2.0.28-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:4a5adf383c73f2d49ad15ff363a8748319ff84c371eed59ffd0127355d6ea1da"}, - {file = "SQLAlchemy-2.0.28-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:56856b871146bfead25fbcaed098269d90b744eea5cb32a952df00d542cdd368"}, - {file = "SQLAlchemy-2.0.28-cp310-cp310-win32.whl", hash = "sha256:943aa74a11f5806ab68278284a4ddd282d3fb348a0e96db9b42cb81bf731acdc"}, - {file = "SQLAlchemy-2.0.28-cp310-cp310-win_amd64.whl", hash = "sha256:c6c4da4843e0dabde41b8f2e8147438330924114f541949e6318358a56d1875a"}, - {file = "SQLAlchemy-2.0.28-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:46a3d4e7a472bfff2d28db838669fc437964e8af8df8ee1e4548e92710929adc"}, - {file = "SQLAlchemy-2.0.28-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:0d3dd67b5d69794cfe82862c002512683b3db038b99002171f624712fa71aeaa"}, - {file = "SQLAlchemy-2.0.28-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c61e2e41656a673b777e2f0cbbe545323dbe0d32312f590b1bc09da1de6c2a02"}, - {file = "SQLAlchemy-2.0.28-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0315d9125a38026227f559488fe7f7cee1bd2fbc19f9fd637739dc50bb6380b2"}, - {file = "SQLAlchemy-2.0.28-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:af8ce2d31679006e7b747d30a89cd3ac1ec304c3d4c20973f0f4ad58e2d1c4c9"}, - {file = "SQLAlchemy-2.0.28-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:81ba314a08c7ab701e621b7ad079c0c933c58cdef88593c59b90b996e8b58fa5"}, - {file = "SQLAlchemy-2.0.28-cp311-cp311-win32.whl", hash = "sha256:1ee8bd6d68578e517943f5ebff3afbd93fc65f7ef8f23becab9fa8fb315afb1d"}, - {file = "SQLAlchemy-2.0.28-cp311-cp311-win_amd64.whl", hash = "sha256:ad7acbe95bac70e4e687a4dc9ae3f7a2f467aa6597049eeb6d4a662ecd990bb6"}, - {file = "SQLAlchemy-2.0.28-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:d3499008ddec83127ab286c6f6ec82a34f39c9817f020f75eca96155f9765097"}, - {file = "SQLAlchemy-2.0.28-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:9b66fcd38659cab5d29e8de5409cdf91e9986817703e1078b2fdaad731ea66f5"}, - {file = "SQLAlchemy-2.0.28-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bea30da1e76cb1acc5b72e204a920a3a7678d9d52f688f087dc08e54e2754c67"}, - {file = "SQLAlchemy-2.0.28-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:124202b4e0edea7f08a4db8c81cc7859012f90a0d14ba2bf07c099aff6e96462"}, - {file = "SQLAlchemy-2.0.28-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:e23b88c69497a6322b5796c0781400692eca1ae5532821b39ce81a48c395aae9"}, - {file = "SQLAlchemy-2.0.28-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:4b6303bfd78fb3221847723104d152e5972c22367ff66edf09120fcde5ddc2e2"}, - {file = "SQLAlchemy-2.0.28-cp312-cp312-win32.whl", hash = "sha256:a921002be69ac3ab2cf0c3017c4e6a3377f800f1fca7f254c13b5f1a2f10022c"}, - {file = "SQLAlchemy-2.0.28-cp312-cp312-win_amd64.whl", hash = "sha256:b4a2cf92995635b64876dc141af0ef089c6eea7e05898d8d8865e71a326c0385"}, - {file = "SQLAlchemy-2.0.28-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:8e91b5e341f8c7f1e5020db8e5602f3ed045a29f8e27f7f565e0bdee3338f2c7"}, - {file = "SQLAlchemy-2.0.28-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:45c7b78dfc7278329f27be02c44abc0d69fe235495bb8e16ec7ef1b1a17952db"}, - {file = "SQLAlchemy-2.0.28-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3eba73ef2c30695cb7eabcdb33bb3d0b878595737479e152468f3ba97a9c22a4"}, - {file = "SQLAlchemy-2.0.28-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:5df5d1dafb8eee89384fb7a1f79128118bc0ba50ce0db27a40750f6f91aa99d5"}, - {file = "SQLAlchemy-2.0.28-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:2858bbab1681ee5406650202950dc8f00e83b06a198741b7c656e63818633526"}, - {file = "SQLAlchemy-2.0.28-cp37-cp37m-win32.whl", hash = "sha256:9461802f2e965de5cff80c5a13bc945abea7edaa1d29360b485c3d2b56cdb075"}, - {file = "SQLAlchemy-2.0.28-cp37-cp37m-win_amd64.whl", hash = "sha256:a6bec1c010a6d65b3ed88c863d56b9ea5eeefdf62b5e39cafd08c65f5ce5198b"}, - {file = "SQLAlchemy-2.0.28-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:843a882cadebecc655a68bd9a5b8aa39b3c52f4a9a5572a3036fb1bb2ccdc197"}, - {file = "SQLAlchemy-2.0.28-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:dbb990612c36163c6072723523d2be7c3eb1517bbdd63fe50449f56afafd1133"}, - {file = "SQLAlchemy-2.0.28-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bd7e4baf9161d076b9a7e432fce06217b9bd90cfb8f1d543d6e8c4595627edb9"}, - {file = "SQLAlchemy-2.0.28-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e0a5354cb4de9b64bccb6ea33162cb83e03dbefa0d892db88a672f5aad638a75"}, - {file = "SQLAlchemy-2.0.28-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:fffcc8edc508801ed2e6a4e7b0d150a62196fd28b4e16ab9f65192e8186102b6"}, - {file = "SQLAlchemy-2.0.28-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:aca7b6d99a4541b2ebab4494f6c8c2f947e0df4ac859ced575238e1d6ca5716b"}, - {file = "SQLAlchemy-2.0.28-cp38-cp38-win32.whl", hash = "sha256:8c7f10720fc34d14abad5b647bc8202202f4948498927d9f1b4df0fb1cf391b7"}, - {file = "SQLAlchemy-2.0.28-cp38-cp38-win_amd64.whl", hash = "sha256:243feb6882b06a2af68ecf4bec8813d99452a1b62ba2be917ce6283852cf701b"}, - {file = "SQLAlchemy-2.0.28-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:fc4974d3684f28b61b9a90fcb4c41fb340fd4b6a50c04365704a4da5a9603b05"}, - {file = "SQLAlchemy-2.0.28-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:87724e7ed2a936fdda2c05dbd99d395c91ea3c96f029a033a4a20e008dd876bf"}, - {file = "SQLAlchemy-2.0.28-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:68722e6a550f5de2e3cfe9da6afb9a7dd15ef7032afa5651b0f0c6b3adb8815d"}, - {file = "SQLAlchemy-2.0.28-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:328529f7c7f90adcd65aed06a161851f83f475c2f664a898af574893f55d9e53"}, - {file = "SQLAlchemy-2.0.28-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:df40c16a7e8be7413b885c9bf900d402918cc848be08a59b022478804ea076b8"}, - {file = "SQLAlchemy-2.0.28-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:426f2fa71331a64f5132369ede5171c52fd1df1bd9727ce621f38b5b24f48750"}, - {file = "SQLAlchemy-2.0.28-cp39-cp39-win32.whl", hash = "sha256:33157920b233bc542ce497a81a2e1452e685a11834c5763933b440fedd1d8e2d"}, - {file = "SQLAlchemy-2.0.28-cp39-cp39-win_amd64.whl", hash = "sha256:2f60843068e432311c886c5f03c4664acaef507cf716f6c60d5fde7265be9d7b"}, - {file = "SQLAlchemy-2.0.28-py3-none-any.whl", hash = "sha256:78bb7e8da0183a8301352d569900d9d3594c48ac21dc1c2ec6b3121ed8b6c986"}, - {file = "SQLAlchemy-2.0.28.tar.gz", hash = "sha256:dd53b6c4e6d960600fd6532b79ee28e2da489322fcf6648738134587faf767b6"}, -] - -[package.dependencies] -greenlet = {version = "!=0.4.17", markers = "platform_machine == \"aarch64\" or platform_machine == \"ppc64le\" or platform_machine == \"x86_64\" or platform_machine == \"amd64\" or platform_machine == \"AMD64\" or platform_machine == \"win32\" or platform_machine == \"WIN32\""} -typing-extensions = ">=4.6.0" - -[package.extras] -aiomysql = ["aiomysql (>=0.2.0)", "greenlet (!=0.4.17)"] -aioodbc = ["aioodbc", "greenlet (!=0.4.17)"] -aiosqlite = ["aiosqlite", "greenlet (!=0.4.17)", "typing_extensions (!=3.10.0.1)"] -asyncio = ["greenlet (!=0.4.17)"] -asyncmy = ["asyncmy (>=0.2.3,!=0.2.4,!=0.2.6)", "greenlet (!=0.4.17)"] -mariadb-connector = ["mariadb (>=1.0.1,!=1.1.2,!=1.1.5)"] -mssql = ["pyodbc"] -mssql-pymssql = ["pymssql"] -mssql-pyodbc = ["pyodbc"] -mypy = ["mypy (>=0.910)"] -mysql = ["mysqlclient (>=1.4.0)"] -mysql-connector = ["mysql-connector-python"] -oracle = ["cx_oracle (>=8)"] -oracle-oracledb = ["oracledb (>=1.0.1)"] -postgresql = ["psycopg2 (>=2.7)"] -postgresql-asyncpg = ["asyncpg", "greenlet (!=0.4.17)"] -postgresql-pg8000 = ["pg8000 (>=1.29.1)"] -postgresql-psycopg = ["psycopg (>=3.0.7)"] -postgresql-psycopg2binary = ["psycopg2-binary"] -postgresql-psycopg2cffi = ["psycopg2cffi"] -postgresql-psycopgbinary = ["psycopg[binary] (>=3.0.7)"] -pymysql = ["pymysql"] -sqlcipher = ["sqlcipher3_binary"] - -[[package]] -name = "tomli" -version = "2.0.1" -description = "A lil' TOML parser" -optional = false -python-versions = ">=3.7" -files = [ - {file = "tomli-2.0.1-py3-none-any.whl", hash = "sha256:939de3e7a6161af0c887ef91b7d41a53e7c5a1ca976325f429cb46ea9bc30ecc"}, - {file = "tomli-2.0.1.tar.gz", hash = "sha256:de526c12914f0c550d15924c62d72abc48d6fe7364aa87328337a31007fe8a4f"}, -] - -[[package]] -name = "types-cachetools" -version = "5.3.0.7" -description = "Typing stubs for cachetools" -optional = false -python-versions = ">=3.7" -files = [ - {file = "types-cachetools-5.3.0.7.tar.gz", hash = "sha256:27c982cdb9cf3fead8b0089ee6b895715ecc99dac90ec29e2cab56eb1aaf4199"}, - {file = "types_cachetools-5.3.0.7-py3-none-any.whl", hash = "sha256:98c069dc7fc087b1b061703369c80751b0a0fc561f6fb072b554e5eee23773a0"}, -] - -[[package]] -name = "types-pytz" -version = "2024.1.0.20240203" -description = "Typing stubs for pytz" -optional = false -python-versions = ">=3.8" -files = [ - {file = "types-pytz-2024.1.0.20240203.tar.gz", hash = "sha256:c93751ee20dfc6e054a0148f8f5227b9a00b79c90a4d3c9f464711a73179c89e"}, - {file = "types_pytz-2024.1.0.20240203-py3-none-any.whl", hash = "sha256:9679eef0365db3af91ef7722c199dbb75ee5c1b67e3c4dd7bfbeb1b8a71c21a3"}, -] - -[[package]] -name = "typing-extensions" -version = "4.10.0" -description = "Backported and Experimental Type Hints for Python 3.8+" -optional = false -python-versions = ">=3.8" -files = [ - {file = "typing_extensions-4.10.0-py3-none-any.whl", hash = "sha256:69b1a937c3a517342112fb4c6df7e72fc39a38e7891a5730ed4985b5214b5475"}, - {file = "typing_extensions-4.10.0.tar.gz", hash = "sha256:b0abd7c89e8fb96f98db18d86106ff1d90ab692004eb746cf6eda2682f91b3cb"}, -] - -[[package]] -name = "tzdata" -version = "2024.1" -description = "Provider of IANA time zone data" -optional = false -python-versions = ">=2" -files = [ - {file = "tzdata-2024.1-py2.py3-none-any.whl", hash = "sha256:9068bc196136463f5245e51efda838afa15aaeca9903f49050dfa2679db4d252"}, - {file = "tzdata-2024.1.tar.gz", hash = "sha256:2674120f8d891909751c38abcdfd386ac0a5a1127954fbc332af6b5ceae07efd"}, -] - -[[package]] -name = "urllib3" -version = "2.2.1" -description = "HTTP library with thread-safe connection pooling, file post, and more." -optional = false -python-versions = ">=3.8" -files = [ - {file = "urllib3-2.2.1-py3-none-any.whl", hash = "sha256:450b20ec296a467077128bff42b73080516e71b56ff59a60a02bef2232c4fa9d"}, - {file = "urllib3-2.2.1.tar.gz", hash = "sha256:d0570876c61ab9e520d776c38acbbb5b05a776d3f9ff98a5c8fd5162a444cf19"}, -] - -[package.extras] -brotli = ["brotli (>=1.0.9)", "brotlicffi (>=0.8.0)"] -h2 = ["h2 (>=4,<5)"] -socks = ["pysocks (>=1.5.6,!=1.5.7,<2.0)"] -zstd = ["zstandard (>=0.18.0)"] - -[[package]] -name = "websocket-client" -version = "1.7.0" -description = "WebSocket client for Python with low level API options" -optional = false -python-versions = ">=3.8" -files = [ - {file = "websocket-client-1.7.0.tar.gz", hash = "sha256:10e511ea3a8c744631d3bd77e61eb17ed09304c413ad42cf6ddfa4c7787e8fe6"}, - {file = "websocket_client-1.7.0-py3-none-any.whl", hash = "sha256:f4c3d22fec12a2461427a29957ff07d35098ee2d976d3ba244e688b8b4057588"}, -] - -[package.extras] -docs = ["Sphinx (>=6.0)", "sphinx-rtd-theme (>=1.1.0)"] -optional = ["python-socks", "wsaccel"] -test = ["websockets"] - -[[package]] -name = "yarl" -version = "1.9.4" -description = "Yet another URL library" -optional = false -python-versions = ">=3.7" -files = [ - {file = "yarl-1.9.4-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:a8c1df72eb746f4136fe9a2e72b0c9dc1da1cbd23b5372f94b5820ff8ae30e0e"}, - {file = "yarl-1.9.4-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:a3a6ed1d525bfb91b3fc9b690c5a21bb52de28c018530ad85093cc488bee2dd2"}, - {file = "yarl-1.9.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:c38c9ddb6103ceae4e4498f9c08fac9b590c5c71b0370f98714768e22ac6fa66"}, - {file = "yarl-1.9.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d9e09c9d74f4566e905a0b8fa668c58109f7624db96a2171f21747abc7524234"}, - {file = "yarl-1.9.4-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b8477c1ee4bd47c57d49621a062121c3023609f7a13b8a46953eb6c9716ca392"}, - {file = "yarl-1.9.4-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d5ff2c858f5f6a42c2a8e751100f237c5e869cbde669a724f2062d4c4ef93551"}, - {file = "yarl-1.9.4-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:357495293086c5b6d34ca9616a43d329317feab7917518bc97a08f9e55648455"}, - {file = "yarl-1.9.4-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:54525ae423d7b7a8ee81ba189f131054defdb122cde31ff17477951464c1691c"}, - {file = "yarl-1.9.4-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:801e9264d19643548651b9db361ce3287176671fb0117f96b5ac0ee1c3530d53"}, - {file = "yarl-1.9.4-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:e516dc8baf7b380e6c1c26792610230f37147bb754d6426462ab115a02944385"}, - {file = "yarl-1.9.4-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:7d5aaac37d19b2904bb9dfe12cdb08c8443e7ba7d2852894ad448d4b8f442863"}, - {file = "yarl-1.9.4-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:54beabb809ffcacbd9d28ac57b0db46e42a6e341a030293fb3185c409e626b8b"}, - {file = "yarl-1.9.4-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:bac8d525a8dbc2a1507ec731d2867025d11ceadcb4dd421423a5d42c56818541"}, - {file = "yarl-1.9.4-cp310-cp310-win32.whl", hash = "sha256:7855426dfbddac81896b6e533ebefc0af2f132d4a47340cee6d22cac7190022d"}, - {file = "yarl-1.9.4-cp310-cp310-win_amd64.whl", hash = "sha256:848cd2a1df56ddbffeb375535fb62c9d1645dde33ca4d51341378b3f5954429b"}, - {file = "yarl-1.9.4-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:35a2b9396879ce32754bd457d31a51ff0a9d426fd9e0e3c33394bf4b9036b099"}, - {file = "yarl-1.9.4-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:4c7d56b293cc071e82532f70adcbd8b61909eec973ae9d2d1f9b233f3d943f2c"}, - {file = "yarl-1.9.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:d8a1c6c0be645c745a081c192e747c5de06e944a0d21245f4cf7c05e457c36e0"}, - {file = "yarl-1.9.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4b3c1ffe10069f655ea2d731808e76e0f452fc6c749bea04781daf18e6039525"}, - {file = "yarl-1.9.4-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:549d19c84c55d11687ddbd47eeb348a89df9cb30e1993f1b128f4685cd0ebbf8"}, - {file = "yarl-1.9.4-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a7409f968456111140c1c95301cadf071bd30a81cbd7ab829169fb9e3d72eae9"}, - {file = "yarl-1.9.4-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e23a6d84d9d1738dbc6e38167776107e63307dfc8ad108e580548d1f2c587f42"}, - {file = "yarl-1.9.4-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d8b889777de69897406c9fb0b76cdf2fd0f31267861ae7501d93003d55f54fbe"}, - {file = "yarl-1.9.4-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:03caa9507d3d3c83bca08650678e25364e1843b484f19986a527630ca376ecce"}, - {file = "yarl-1.9.4-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:4e9035df8d0880b2f1c7f5031f33f69e071dfe72ee9310cfc76f7b605958ceb9"}, - {file = "yarl-1.9.4-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:c0ec0ed476f77db9fb29bca17f0a8fcc7bc97ad4c6c1d8959c507decb22e8572"}, - {file = "yarl-1.9.4-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:ee04010f26d5102399bd17f8df8bc38dc7ccd7701dc77f4a68c5b8d733406958"}, - {file = "yarl-1.9.4-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:49a180c2e0743d5d6e0b4d1a9e5f633c62eca3f8a86ba5dd3c471060e352ca98"}, - {file = "yarl-1.9.4-cp311-cp311-win32.whl", hash = "sha256:81eb57278deb6098a5b62e88ad8281b2ba09f2f1147c4767522353eaa6260b31"}, - {file = "yarl-1.9.4-cp311-cp311-win_amd64.whl", hash = "sha256:d1d2532b340b692880261c15aee4dc94dd22ca5d61b9db9a8a361953d36410b1"}, - {file = "yarl-1.9.4-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:0d2454f0aef65ea81037759be5ca9947539667eecebca092733b2eb43c965a81"}, - {file = "yarl-1.9.4-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:44d8ffbb9c06e5a7f529f38f53eda23e50d1ed33c6c869e01481d3fafa6b8142"}, - {file = "yarl-1.9.4-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:aaaea1e536f98754a6e5c56091baa1b6ce2f2700cc4a00b0d49eca8dea471074"}, - {file = "yarl-1.9.4-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3777ce5536d17989c91696db1d459574e9a9bd37660ea7ee4d3344579bb6f129"}, - {file = "yarl-1.9.4-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9fc5fc1eeb029757349ad26bbc5880557389a03fa6ada41703db5e068881e5f2"}, - {file = "yarl-1.9.4-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:ea65804b5dc88dacd4a40279af0cdadcfe74b3e5b4c897aa0d81cf86927fee78"}, - {file = "yarl-1.9.4-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:aa102d6d280a5455ad6a0f9e6d769989638718e938a6a0a2ff3f4a7ff8c62cc4"}, - {file = "yarl-1.9.4-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:09efe4615ada057ba2d30df871d2f668af661e971dfeedf0c159927d48bbeff0"}, - {file = "yarl-1.9.4-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:008d3e808d03ef28542372d01057fd09168419cdc8f848efe2804f894ae03e51"}, - {file = "yarl-1.9.4-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:6f5cb257bc2ec58f437da2b37a8cd48f666db96d47b8a3115c29f316313654ff"}, - {file = "yarl-1.9.4-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:992f18e0ea248ee03b5a6e8b3b4738850ae7dbb172cc41c966462801cbf62cf7"}, - {file = "yarl-1.9.4-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:0e9d124c191d5b881060a9e5060627694c3bdd1fe24c5eecc8d5d7d0eb6faabc"}, - {file = "yarl-1.9.4-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:3986b6f41ad22988e53d5778f91855dc0399b043fc8946d4f2e68af22ee9ff10"}, - {file = "yarl-1.9.4-cp312-cp312-win32.whl", hash = "sha256:4b21516d181cd77ebd06ce160ef8cc2a5e9ad35fb1c5930882baff5ac865eee7"}, - {file = "yarl-1.9.4-cp312-cp312-win_amd64.whl", hash = "sha256:a9bd00dc3bc395a662900f33f74feb3e757429e545d831eef5bb280252631984"}, - {file = "yarl-1.9.4-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:63b20738b5aac74e239622d2fe30df4fca4942a86e31bf47a81a0e94c14df94f"}, - {file = "yarl-1.9.4-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d7d7f7de27b8944f1fee2c26a88b4dabc2409d2fea7a9ed3df79b67277644e17"}, - {file = "yarl-1.9.4-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c74018551e31269d56fab81a728f683667e7c28c04e807ba08f8c9e3bba32f14"}, - {file = "yarl-1.9.4-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:ca06675212f94e7a610e85ca36948bb8fc023e458dd6c63ef71abfd482481aa5"}, - {file = "yarl-1.9.4-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5aef935237d60a51a62b86249839b51345f47564208c6ee615ed2a40878dccdd"}, - {file = "yarl-1.9.4-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2b134fd795e2322b7684155b7855cc99409d10b2e408056db2b93b51a52accc7"}, - {file = "yarl-1.9.4-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:d25039a474c4c72a5ad4b52495056f843a7ff07b632c1b92ea9043a3d9950f6e"}, - {file = "yarl-1.9.4-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:f7d6b36dd2e029b6bcb8a13cf19664c7b8e19ab3a58e0fefbb5b8461447ed5ec"}, - {file = "yarl-1.9.4-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:957b4774373cf6f709359e5c8c4a0af9f6d7875db657adb0feaf8d6cb3c3964c"}, - {file = "yarl-1.9.4-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:d7eeb6d22331e2fd42fce928a81c697c9ee2d51400bd1a28803965883e13cead"}, - {file = "yarl-1.9.4-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:6a962e04b8f91f8c4e5917e518d17958e3bdee71fd1d8b88cdce74dd0ebbf434"}, - {file = "yarl-1.9.4-cp37-cp37m-win32.whl", hash = "sha256:f3bc6af6e2b8f92eced34ef6a96ffb248e863af20ef4fde9448cc8c9b858b749"}, - {file = "yarl-1.9.4-cp37-cp37m-win_amd64.whl", hash = "sha256:ad4d7a90a92e528aadf4965d685c17dacff3df282db1121136c382dc0b6014d2"}, - {file = "yarl-1.9.4-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:ec61d826d80fc293ed46c9dd26995921e3a82146feacd952ef0757236fc137be"}, - {file = "yarl-1.9.4-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:8be9e837ea9113676e5754b43b940b50cce76d9ed7d2461df1af39a8ee674d9f"}, - {file = "yarl-1.9.4-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:bef596fdaa8f26e3d66af846bbe77057237cb6e8efff8cd7cc8dff9a62278bbf"}, - {file = "yarl-1.9.4-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2d47552b6e52c3319fede1b60b3de120fe83bde9b7bddad11a69fb0af7db32f1"}, - {file = "yarl-1.9.4-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:84fc30f71689d7fc9168b92788abc977dc8cefa806909565fc2951d02f6b7d57"}, - {file = "yarl-1.9.4-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4aa9741085f635934f3a2583e16fcf62ba835719a8b2b28fb2917bb0537c1dfa"}, - {file = "yarl-1.9.4-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:206a55215e6d05dbc6c98ce598a59e6fbd0c493e2de4ea6cc2f4934d5a18d130"}, - {file = "yarl-1.9.4-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:07574b007ee20e5c375a8fe4a0789fad26db905f9813be0f9fef5a68080de559"}, - {file = "yarl-1.9.4-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:5a2e2433eb9344a163aced6a5f6c9222c0786e5a9e9cac2c89f0b28433f56e23"}, - {file = "yarl-1.9.4-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:6ad6d10ed9b67a382b45f29ea028f92d25bc0bc1daf6c5b801b90b5aa70fb9ec"}, - {file = "yarl-1.9.4-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:6fe79f998a4052d79e1c30eeb7d6c1c1056ad33300f682465e1b4e9b5a188b78"}, - {file = "yarl-1.9.4-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:a825ec844298c791fd28ed14ed1bffc56a98d15b8c58a20e0e08c1f5f2bea1be"}, - {file = "yarl-1.9.4-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:8619d6915b3b0b34420cf9b2bb6d81ef59d984cb0fde7544e9ece32b4b3043c3"}, - {file = "yarl-1.9.4-cp38-cp38-win32.whl", hash = "sha256:686a0c2f85f83463272ddffd4deb5e591c98aac1897d65e92319f729c320eece"}, - {file = "yarl-1.9.4-cp38-cp38-win_amd64.whl", hash = "sha256:a00862fb23195b6b8322f7d781b0dc1d82cb3bcac346d1e38689370cc1cc398b"}, - {file = "yarl-1.9.4-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:604f31d97fa493083ea21bd9b92c419012531c4e17ea6da0f65cacdcf5d0bd27"}, - {file = "yarl-1.9.4-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:8a854227cf581330ffa2c4824d96e52ee621dd571078a252c25e3a3b3d94a1b1"}, - {file = "yarl-1.9.4-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:ba6f52cbc7809cd8d74604cce9c14868306ae4aa0282016b641c661f981a6e91"}, - {file = "yarl-1.9.4-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a6327976c7c2f4ee6816eff196e25385ccc02cb81427952414a64811037bbc8b"}, - {file = "yarl-1.9.4-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8397a3817d7dcdd14bb266283cd1d6fc7264a48c186b986f32e86d86d35fbac5"}, - {file = "yarl-1.9.4-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e0381b4ce23ff92f8170080c97678040fc5b08da85e9e292292aba67fdac6c34"}, - {file = "yarl-1.9.4-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:23d32a2594cb5d565d358a92e151315d1b2268bc10f4610d098f96b147370136"}, - {file = "yarl-1.9.4-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ddb2a5c08a4eaaba605340fdee8fc08e406c56617566d9643ad8bf6852778fc7"}, - {file = "yarl-1.9.4-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:26a1dc6285e03f3cc9e839a2da83bcbf31dcb0d004c72d0730e755b33466c30e"}, - {file = "yarl-1.9.4-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:18580f672e44ce1238b82f7fb87d727c4a131f3a9d33a5e0e82b793362bf18b4"}, - {file = "yarl-1.9.4-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:29e0f83f37610f173eb7e7b5562dd71467993495e568e708d99e9d1944f561ec"}, - {file = "yarl-1.9.4-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:1f23e4fe1e8794f74b6027d7cf19dc25f8b63af1483d91d595d4a07eca1fb26c"}, - {file = "yarl-1.9.4-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:db8e58b9d79200c76956cefd14d5c90af54416ff5353c5bfd7cbe58818e26ef0"}, - {file = "yarl-1.9.4-cp39-cp39-win32.whl", hash = "sha256:c7224cab95645c7ab53791022ae77a4509472613e839dab722a72abe5a684575"}, - {file = "yarl-1.9.4-cp39-cp39-win_amd64.whl", hash = "sha256:824d6c50492add5da9374875ce72db7a0733b29c2394890aef23d533106e2b15"}, - {file = "yarl-1.9.4-py3-none-any.whl", hash = "sha256:928cecb0ef9d5a7946eb6ff58417ad2fe9375762382f1bf5c55e61645f2c43ad"}, - {file = "yarl-1.9.4.tar.gz", hash = "sha256:566db86717cf8080b99b58b083b773a908ae40f06681e87e589a976faf8246bf"}, -] - -[package.dependencies] -idna = ">=2.0" -multidict = ">=4.0" - -[metadata] -lock-version = "2.0" -python-versions = "^3.10" -content-hash = "c0543d0df16fa30be68276e18005fea320d5ef9d4086588101a51323ab26edae" diff --git a/airbyte-ci/connectors/live-tests/pyproject.toml b/airbyte-ci/connectors/live-tests/pyproject.toml deleted file mode 100644 index 8a37737fa3e7..000000000000 --- a/airbyte-ci/connectors/live-tests/pyproject.toml +++ /dev/null @@ -1,48 +0,0 @@ -[build-system] -requires = ["poetry-core>=1.0.0"] -build-backend = "poetry.core.masonry.api" - -[tool.poetry] -name = "live-tests" -version = "0.4.0" -description = "Contains utilities for testing connectors against live data." -authors = ["Airbyte "] -license = "MIT" -homepage = "https://github.com/airbytehq/airbyte" -readme = "README.md" -packages = [ - { include = "live_tests", from = "src" }, -] - -[tool.poetry.dependencies] -python = "^3.10" -airbyte-protocol-models = "<1.0.0" -cachetools = "~=5.3.3" -dagger-io = "==0.9.6" -pydantic = "*" -pytest-asyncio = "~=0.23.5" -pydash = "~=7.0.7" -docker = ">=6,<7" -asyncclick = "^8.1.7.1" -connection-retriever = {git = "https://github.com/airbytehq/airbyte-platform-internal.git", rev = "augustin/03-06-create_connection-retriever_tool", subdirectory = "tools/connection-retriever"} -duckdb = "^0.10.0" -pandas = "^2.2.1" - -[tool.poetry.scripts] -live-tests = "live_tests.cli:live_tests" - -[tool.poetry.group.dev.dependencies] -ruff = "^0.3.0" -mypy = "^1.8.0" -types-cachetools = "^5.3.0.7" -pandas-stubs = "^2.2.0.240218" - -[tool.poe.tasks] -test = "pytest tests" -lint = "ruff check src" -type_check = "mypy src --disallow-untyped-defs" -pre-push = ["lint", "test", "type_check"] - -# Can't run CI at the moment because connection-retriever is not public -# [tool.airbyte_ci] -#poe_tasks = ["test", "lint", "type_check"] diff --git a/airbyte-ci/connectors/live-tests/src/live_tests/__init__.py b/airbyte-ci/connectors/live-tests/src/live_tests/__init__.py deleted file mode 100644 index 51502a263eae..000000000000 --- a/airbyte-ci/connectors/live-tests/src/live_tests/__init__.py +++ /dev/null @@ -1,2 +0,0 @@ -# Copyright (c) 2023 Airbyte, Inc., all rights reserved. - diff --git a/airbyte-ci/connectors/live-tests/src/live_tests/cli.py b/airbyte-ci/connectors/live-tests/src/live_tests/cli.py deleted file mode 100644 index 5c7e22e56dad..000000000000 --- a/airbyte-ci/connectors/live-tests/src/live_tests/cli.py +++ /dev/null @@ -1,13 +0,0 @@ -# Copyright (c) 2023 Airbyte, Inc., all rights reserved. - -import asyncclick as click -from live_tests.debug.cli import debug_cmd - - -@click.group() -@click.pass_context -async def live_tests(ctx: click.Context) -> None: - pass - - -live_tests.add_command(debug_cmd) diff --git a/airbyte-ci/connectors/live-tests/src/live_tests/commons/__init__.py b/airbyte-ci/connectors/live-tests/src/live_tests/commons/__init__.py deleted file mode 100644 index f70ecfc3a89e..000000000000 --- a/airbyte-ci/connectors/live-tests/src/live_tests/commons/__init__.py +++ /dev/null @@ -1 +0,0 @@ -# Copyright (c) 2023 Airbyte, Inc., all rights reserved. diff --git a/airbyte-ci/connectors/live-tests/src/live_tests/commons/backends/__init__.py b/airbyte-ci/connectors/live-tests/src/live_tests/commons/backends/__init__.py deleted file mode 100644 index f65344dad82c..000000000000 --- a/airbyte-ci/connectors/live-tests/src/live_tests/commons/backends/__init__.py +++ /dev/null @@ -1,7 +0,0 @@ -# Copyright (c) 2023 Airbyte, Inc., all rights reserved. - -from .base_backend import BaseBackend -from .file_backend import FileBackend -from .duckdb_backend import DuckDbBackend - -__all__ = ["BaseBackend", "FileBackend", "DuckDbBackend"] diff --git a/airbyte-ci/connectors/live-tests/src/live_tests/commons/backends/base_backend.py b/airbyte-ci/connectors/live-tests/src/live_tests/commons/backends/base_backend.py deleted file mode 100644 index f009b8272275..000000000000 --- a/airbyte-ci/connectors/live-tests/src/live_tests/commons/backends/base_backend.py +++ /dev/null @@ -1,16 +0,0 @@ -# Copyright (c) 2023 Airbyte, Inc., all rights reserved. - -from abc import ABC, abstractmethod -from typing import Iterable - -from airbyte_protocol.models import AirbyteMessage # type: ignore - - -class BaseBackend(ABC): - """ - Interface to be shared between the file backend and the database backend(s) - """ - - @abstractmethod - def write(self, airbyte_messages: Iterable[AirbyteMessage]) -> None: - ... diff --git a/airbyte-ci/connectors/live-tests/src/live_tests/commons/backends/duckdb_backend.py b/airbyte-ci/connectors/live-tests/src/live_tests/commons/backends/duckdb_backend.py deleted file mode 100644 index 3f824b5d8d54..000000000000 --- a/airbyte-ci/connectors/live-tests/src/live_tests/commons/backends/duckdb_backend.py +++ /dev/null @@ -1,21 +0,0 @@ -# Copyright (c) 2023 Airbyte, Inc., all rights reserved. - - -from typing import Iterable - -import duckdb -from airbyte_protocol.models import AirbyteMessage # type: ignore -from live_tests.commons.backends.file_backend import FileBackend - - -class DuckDbBackend(FileBackend): - DUCK_DB_FILE_NAME = "duck.db" - - def write(self, airbyte_messages: Iterable[AirbyteMessage]) -> None: - # Use the FileBackend to write the messages to disk as jsonl files - super().write(airbyte_messages) - duck_db_conn = duckdb.connect(f"{self._output_directory}/{self.DUCK_DB_FILE_NAME}") - for jsonl_file in self.jsonl_files: - if jsonl_file.exists(): - duck_db_conn.sql(f"CREATE TABLE {jsonl_file.stem} AS SELECT * FROM read_json_auto('{jsonl_file}')") - duck_db_conn.close() diff --git a/airbyte-ci/connectors/live-tests/src/live_tests/commons/backends/file_backend.py b/airbyte-ci/connectors/live-tests/src/live_tests/commons/backends/file_backend.py deleted file mode 100644 index 7c7ec64bfdda..000000000000 --- a/airbyte-ci/connectors/live-tests/src/live_tests/commons/backends/file_backend.py +++ /dev/null @@ -1,149 +0,0 @@ -# Copyright (c) 2023 Airbyte, Inc., all rights reserved. - -import json -from pathlib import Path -from typing import Any, Iterable, TextIO, Tuple - -import pydash -from airbyte_protocol.models import AirbyteMessage # type: ignore -from airbyte_protocol.models import Type as AirbyteMessageType -from cachetools import LRUCache, cached -from live_tests.commons.backends.base_backend import BaseBackend - - -class FileDescriptorLRUCache(LRUCache): - def popitem(self) -> Tuple[Any, Any]: - filepath, fd = LRUCache.popitem(self) - fd.close() # type: ignore # Close the file descriptor when it's evicted from the cache - return filepath, fd - - -class FileBackend(BaseBackend): - RELATIVE_CATALOGS_PATH = "catalog.jsonl" - RELATIVE_CONNECTION_STATUS_PATH = "connection_status.jsonl" - RELATIVE_RECORDS_PATH = "records.jsonl" - RELATIVE_SPECS_PATH = "spec.jsonl" - RELATIVE_STATES_PATH = "states.jsonl" - RELATIVE_TRACES_PATH = "traces.jsonl" - RELATIVE_LOGS_PATH = "logs.jsonl" - RELATIVE_CONTROLS_PATH = "controls.jsonl" - RECORD_PATHS_TO_POP = ["emitted_at"] - CACHE = FileDescriptorLRUCache(maxsize=250) - - def __init__(self, output_directory: Path): - self._output_directory = output_directory - - @property - def jsonl_specs_path(self) -> Path: - return (self._output_directory / self.RELATIVE_SPECS_PATH).resolve() - - @property - def jsonl_catalogs_path(self) -> Path: - return (self._output_directory / self.RELATIVE_CATALOGS_PATH).resolve() - - @property - def jsonl_connection_status_path(self) -> Path: - return (self._output_directory / self.RELATIVE_CONNECTION_STATUS_PATH).resolve() - - @property - def jsonl_records_path(self) -> Path: - return (self._output_directory / self.RELATIVE_RECORDS_PATH).resolve() - - @property - def jsonl_states_path(self) -> Path: - return (self._output_directory / self.RELATIVE_STATES_PATH).resolve() - - @property - def jsonl_traces_path(self) -> Path: - return (self._output_directory / self.RELATIVE_TRACES_PATH).resolve() - - @property - def jsonl_logs_path(self) -> Path: - return (self._output_directory / self.RELATIVE_LOGS_PATH).resolve() - - @property - def jsonl_controls_path(self) -> Path: - return (self._output_directory / self.RELATIVE_CONTROLS_PATH).resolve() - - @property - def jsonl_files(self) -> Iterable[Path]: - return [ - self.jsonl_catalogs_path, - self.jsonl_connection_status_path, - self.jsonl_records_path, - self.jsonl_specs_path, - self.jsonl_states_path, - self.jsonl_traces_path, - self.jsonl_logs_path, - self.jsonl_controls_path, - ] - - def write(self, airbyte_messages: Iterable[AirbyteMessage]) -> None: - """ - Write AirbyteMessages to the appropriate file. - - Catalogs, connection status messages, specs, trace messages, logs, and control messages are all written to their - own file (e.g. "catalog.jsonl", "spec.jsonl"). - - Records and state messages are further subdivided, with one file per stream (e.g. "my_stream_records.jsonl", - "my_stream_states.jsonl"). Streams with global state are stored in a "_global_states.jsonl" file. - - We use an LRU cache here to manage open file objects, in order to limit the number of concurrently open file - descriptors. This mitigates the risk of hitting limits on the number of open file descriptors, particularly for - connections with a high number of streams. The cache is designed to automatically close files upon eviction. - """ - - @cached(cache=self.CACHE) - def _open_file(path: Path) -> TextIO: - return open(path, "a") - - try: - for _message in airbyte_messages: - if not isinstance(_message, AirbyteMessage): - continue - filepath, message = self._get_filepath_and_message(_message) - _open_file(self._output_directory / filepath).write(f"{message}\n") - finally: - for f in self.CACHE.values(): - f.close() - - def _get_filepath_and_message(self, message: AirbyteMessage) -> Tuple[str, str]: - if message.type == AirbyteMessageType.CATALOG: - return self.RELATIVE_CATALOGS_PATH, message.catalog.json() - - elif message.type == AirbyteMessageType.CONNECTION_STATUS: - return self.RELATIVE_CONNECTION_STATUS_PATH, message.connectionStatus.json() - - elif message.type == AirbyteMessageType.RECORD: - record = json.loads(message.record.json()) - # TODO: once we have a comparator and/or database backend implemented we can remove this - for key_path in self.RECORD_PATHS_TO_POP: - pydash.objects.unset(record, key_path) - return self.RELATIVE_RECORDS_PATH, json.dumps(record) - - elif message.type == AirbyteMessageType.SPEC: - return self.RELATIVE_SPECS_PATH, message.spec.json() - - elif message.type == AirbyteMessageType.STATE: - if message.state.stream and message.state.stream.stream_descriptor: - stream_name = message.state.stream.stream_descriptor.name - stream_namespace = message.state.stream.stream_descriptor.namespace - filepath = ( - f"{stream_name}_{stream_namespace}_{self.RELATIVE_STATES_PATH}" - if stream_namespace - else f"{stream_name}_{self.RELATIVE_STATES_PATH}" - ) - else: - filepath = f"_global_{self.RELATIVE_STATES_PATH}" - return filepath, message.state.json() - - elif message.type == AirbyteMessageType.TRACE: - return self.RELATIVE_TRACES_PATH, message.trace.json() - - elif message.type == AirbyteMessageType.LOG: - return self.RELATIVE_LOGS_PATH, message.log.json() - - elif message.type == AirbyteMessageType.CONTROL: - return self.RELATIVE_CONTROLS_PATH, message.control.json() - - raise NotImplementedError(f"No handling for AirbyteMessage type {message.type} has been implemented. This is unexpected.") diff --git a/airbyte-ci/connectors/live-tests/src/live_tests/commons/connection_objects_retrieval.py b/airbyte-ci/connectors/live-tests/src/live_tests/commons/connection_objects_retrieval.py deleted file mode 100644 index 9803b6a00c9d..000000000000 --- a/airbyte-ci/connectors/live-tests/src/live_tests/commons/connection_objects_retrieval.py +++ /dev/null @@ -1,125 +0,0 @@ -# Copyright (c) 2024 Airbyte, Inc., all rights reserved. - -import json -import logging -from pathlib import Path -from typing import Dict, Optional, Set - -from connection_retriever import ConnectionObject, retrieve_objects # type: ignore - -from .models import Command, ConfiguredAirbyteCatalog, ConnectionObjects, SecretDict - -LOGGER = logging.getLogger(__name__) - - -def parse_config(config: Dict | str | None) -> Optional[SecretDict]: - if not config: - return None - if isinstance(config, str): - return SecretDict(json.loads(config)) - else: - return SecretDict(config) - - -def parse_catalog(catalog: Dict | str | None) -> Optional[ConfiguredAirbyteCatalog]: - if not catalog: - return None - if isinstance(catalog, str): - return ConfiguredAirbyteCatalog.parse_obj(json.loads(catalog)) - else: - return ConfiguredAirbyteCatalog.parse_obj(catalog) - - -def parse_state(state: Dict | str | None) -> Optional[Dict]: - if not state: - return None - if isinstance(state, str): - return json.loads(state) - else: - return state - - -CONNECTION_OBJECT_TYPE_TO_PARSER = { - ConnectionObject.SOURCE_CONFIG: parse_config, - ConnectionObject.CONFIGURED_CATALOG: parse_catalog, - ConnectionObject.STATE: parse_state, -} - - -def get_connector_config_from_path(config_path: Path) -> Optional[SecretDict]: - return parse_config(config_path.read_text()) - - -def get_state_from_path(state_path: Path) -> Optional[Dict]: - return parse_state(state_path.read_text()) - - -def get_catalog_from_path(path: Path) -> Optional[ConfiguredAirbyteCatalog]: - return parse_catalog(path.read_text()) - - -COMMAND_TO_REQUIRED_OBJECT_TYPES = { - Command.SPEC: set(), - Command.CHECK: {ConnectionObject.SOURCE_CONFIG}, - Command.DISCOVER: {ConnectionObject.SOURCE_CONFIG}, - Command.READ: {ConnectionObject.SOURCE_CONFIG, ConnectionObject.CONFIGURED_CATALOG}, - Command.READ_WITH_STATE: { - ConnectionObject.SOURCE_CONFIG, - ConnectionObject.CONFIGURED_CATALOG, - ConnectionObject.STATE, - }, -} - - -def get_connection_objects( - requested_objects: Set[ConnectionObject], - connection_id: Optional[str], - custom_config_path: Optional[Path], - custom_catalog_path: Optional[Path], - custom_state_path: Optional[Path], - retrieval_reason: Optional[str], -) -> ConnectionObjects: - """This function retrieves the connection objects values. - It checks that the required objects are available and raises a UsageError if they are not. - If a connection_id is provided, it retrieves the connection objects from the connection. - If custom objects are provided, it overrides the retrieved objects with them. - - Args: - requested_objects (Set[ConnectionObject]): The set of requested connection objects. - connection_id (Optional[str]): The connection id to retrieve the connection objects for. - custom_config_path (Optional[Path]): The local path to the custom config to use. - custom_catalog_path (Optional[Path]): The local path to the custom catalog to use. - custom_state_path (Optional[Path]): The local path to the custom state to use. - retrieval_reason (Optional[str]): The reason to access the connection objects. - Raises: - click.UsageError: If a required object is missing for the command. - click.UsageError: If a retrieval reason is missing when passing a connection id. - Returns: - ConnectionObjects: The connection objects values. - """ - - custom_config = get_connector_config_from_path(custom_config_path) if custom_config_path else None - custom_catalog = get_catalog_from_path(custom_catalog_path) if custom_catalog_path else None - custom_state = get_state_from_path(custom_state_path) if custom_state_path else None - - if not connection_id: - connection_object = ConnectionObjects(source_config=custom_config, catalog=custom_catalog, state=custom_state) - else: - if not retrieval_reason: - raise ValueError("A retrieval reason is required to access the connection objects when passing a connection id.") - retrieved_objects = retrieve_objects(connection_id, requested_objects, retrieval_reason=retrieval_reason) - retrieved_config = parse_config(retrieved_objects.get(ConnectionObject.SOURCE_CONFIG)) - retrieved_catalog = parse_catalog(retrieved_objects.get(ConnectionObject.CONFIGURED_CATALOG)) - retrieved_state = parse_state(retrieved_objects.get(ConnectionObject.STATE)) - connection_object = ConnectionObjects( - source_config=custom_config if custom_config else retrieved_config, - catalog=custom_catalog if custom_catalog else retrieved_catalog, - state=custom_state if custom_state else retrieved_state, - ) - if not connection_object.source_config and ConnectionObject.SOURCE_CONFIG in requested_objects: - raise ValueError("A source config is required to run the command.") - if not connection_object.catalog and ConnectionObject.CONFIGURED_CATALOG in requested_objects: - raise ValueError("A catalog is required to run the command.") - if not connection_object.state and ConnectionObject.STATE in requested_objects: - raise ValueError("A state is required to run the command.") - return connection_object diff --git a/airbyte-ci/connectors/live-tests/src/live_tests/commons/connector_runner.py b/airbyte-ci/connectors/live-tests/src/live_tests/commons/connector_runner.py deleted file mode 100644 index 11134353ce90..000000000000 --- a/airbyte-ci/connectors/live-tests/src/live_tests/commons/connector_runner.py +++ /dev/null @@ -1,289 +0,0 @@ -# -# Copyright (c) 2023 Airbyte, Inc., all rights reserved. -# - -import hashlib -import json -import logging -import os -import uuid -from pathlib import Path -from typing import Dict, List, Optional - -import dagger -import docker # type: ignore -import pytest -from airbyte_protocol.models import ConfiguredAirbyteCatalog # type: ignore -from live_tests.commons.models import Command, ConnectorUnderTest, ExecutionResult, SecretDict - - -async def get_container_from_id(dagger_client: dagger.Client, container_id: str) -> dagger.Container: - """Get a dagger container from its id. - Please remind that container id are not persistent and can change between Dagger sessions. - - Args: - dagger_client (dagger.Client): The dagger client to use to import the connector image - """ - try: - return await dagger_client.container(id=dagger.ContainerID(container_id)) - except dagger.DaggerError as e: - pytest.exit(f"Failed to load connector container: {e}") - - -async def get_container_from_tarball_path(dagger_client: dagger.Client, tarball_path: Path) -> dagger.Container: - if not tarball_path.exists(): - pytest.exit(f"Connector image tarball {tarball_path} does not exist") - container_under_test_tar_file = ( - dagger_client.host().directory(str(tarball_path.parent), include=tarball_path.name).file(tarball_path.name) - ) - try: - return await dagger_client.container().import_(container_under_test_tar_file) - except dagger.DaggerError as e: - pytest.exit(f"Failed to import connector image from tarball: {e}") - - -async def get_container_from_local_image(dagger_client: dagger.Client, local_image_name: str) -> Optional[dagger.Container]: - """Get a dagger container from a local image. - It will use Docker python client to export the image to a tarball and then import it into dagger. - - Args: - dagger_client (dagger.Client): The dagger client to use to import the connector image - local_image_name (str): The name of the local image to import - - Returns: - Optional[dagger.Container]: The dagger container for the local image or None if the image does not exist - """ - docker_client = docker.from_env() - - try: - image = docker_client.images.get(local_image_name) - except docker.errors.ImageNotFound: - return None - - image_digest = image.id.replace("sha256:", "") - tarball_path = Path(f"/tmp/{image_digest}.tar") - if not tarball_path.exists(): - logging.info(f"Exporting local connector image {local_image_name} to tarball {tarball_path}") - with open(tarball_path, "wb") as f: - for chunk in image.save(named=True): - f.write(chunk) - return await get_container_from_tarball_path(dagger_client, tarball_path) - - -async def get_container_from_dockerhub_image(dagger_client: dagger.Client, dockerhub_image_name: str) -> dagger.Container: - """Get a dagger container from a dockerhub image. - - Args: - dagger_client (dagger.Client): The dagger client to use to import the connector image - dockerhub_image_name (str): The name of the dockerhub image to import - - Returns: - dagger.Container: The dagger container for the dockerhub image - """ - try: - return await dagger_client.container().from_(dockerhub_image_name) - except dagger.DaggerError as e: - pytest.exit(f"Failed to import connector image from DockerHub: {e}") - - -async def get_connector_container(dagger_client: dagger.Client, image_name_with_tag: str) -> dagger.Container: - """Get a dagger container for the connector image to test. - - Args: - dagger_client (dagger.Client): The dagger client to use to import the connector image - image_name_with_tag (str): The docker image name and tag of the connector image to test - - Returns: - dagger.Container: The dagger container for the connector image to test - """ - # If a container_id.txt file is available, we'll use it to load the connector container - # We use a txt file as container ids can be too long to be passed as env vars - # It's used for dagger-in-dagger use case with airbyte-ci, when the connector container is built via an upstream dagger operation - connector_container_id_path = Path("/tmp/container_id.txt") - if connector_container_id_path.exists(): - # If the CONNECTOR_CONTAINER_ID env var is set, we'll use it to load the connector container - return await get_container_from_id(dagger_client, connector_container_id_path.read_text()) - - # If the CONNECTOR_UNDER_TEST_IMAGE_TAR_PATH env var is set, we'll use it to import the connector image from the tarball - if connector_image_tarball_path := os.environ.get("CONNECTOR_UNDER_TEST_IMAGE_TAR_PATH"): - tarball_path = Path(connector_image_tarball_path) - return await get_container_from_tarball_path(dagger_client, tarball_path) - - # Let's try to load the connector container from a local image - if connector_container := await get_container_from_local_image(dagger_client, image_name_with_tag): - return connector_container - - # If we get here, we'll try to pull the connector image from DockerHub - return await get_container_from_dockerhub_image(dagger_client, image_name_with_tag) - - -class ConnectorRunner: - IN_CONTAINER_CONFIG_PATH = "/data/config.json" - IN_CONTAINER_CATALOG_PATH = "/data/catalog.json" - IN_CONTAINER_STATE_PATH = "/data/state.json" - MITMPROXY_IMAGE = "mitmproxy/mitmproxy:9.0.1" - HTTP_DUMP_FILE_NAME = "http_dump.mitm" - - def __init__( - self, - dagger_client: dagger.Client, - connector_under_test: ConnectorUnderTest, - command: Command, - config: Optional[SecretDict] = None, - catalog: Optional[ConfiguredAirbyteCatalog] = None, - state: Optional[Dict] = None, - environment_variables: Optional[Dict] = None, - enable_http_cache: bool = True, - ): - self.dagger_client = dagger_client - self.connector_under_test = connector_under_test - self.command = command - self.config = config - self.catalog = catalog - self.state = state - self.environment_variables = environment_variables if environment_variables else {} - self.enable_http_cache = enable_http_cache - self.full_command: List[str] = self._get_full_command(command) - - @property - def _connector_under_test_container(self) -> dagger.Container: - return self.connector_under_test.container - - def _get_full_command(self, command: Command) -> List[str]: - if command is Command.SPEC: - return ["spec"] - elif command is Command.CHECK: - return ["check", "--config", self.IN_CONTAINER_CONFIG_PATH] - elif command is Command.DISCOVER: - return ["discover", "--config", self.IN_CONTAINER_CONFIG_PATH] - elif command is Command.READ: - return [ - "read", - "--config", - self.IN_CONTAINER_CONFIG_PATH, - "--catalog", - self.IN_CONTAINER_CATALOG_PATH, - ] - elif command is Command.READ_WITH_STATE: - return [ - "read", - "--config", - self.IN_CONTAINER_CONFIG_PATH, - "--catalog", - self.IN_CONTAINER_CATALOG_PATH, - "--state", - self.IN_CONTAINER_STATE_PATH, - ] - else: - raise NotImplementedError(f"The connector runner does not support the {command} command") - - async def get_container_env_variable_value(self, name: str) -> Optional[str]: - return await self._connector_under_test_container.env_variable(name) - - async def get_container_label(self, label: str) -> Optional[str]: - return await self._connector_under_test_container.label(label) - - async def get_container_entrypoint(self) -> str: - entrypoint = await self._connector_under_test_container.entrypoint() - assert entrypoint, "The connector container has no entrypoint" - return " ".join(entrypoint) - - async def run( - self, - raise_on_container_error: bool = True, - ) -> ExecutionResult: - container = self._connector_under_test_container - # Do not cache downstream dagger layers - container = container.with_env_variable("CACHEBUSTER", str(uuid.uuid4())) - for env_var_name, env_var_value in self.environment_variables.items(): - container = container.with_env_variable(env_var_name, env_var_value) - if self.config: - container = container.with_new_file(self.IN_CONTAINER_CONFIG_PATH, contents=json.dumps(dict(self.config))) - if self.state: - container = container.with_new_file(self.IN_CONTAINER_STATE_PATH, contents=json.dumps(self.state)) - if self.catalog: - container = container.with_new_file(self.IN_CONTAINER_CATALOG_PATH, contents=self.catalog.json()) - if self.enable_http_cache: - container = await self._bind_connector_container_to_proxy(container) - - executed_container = await container.with_exec(self.full_command).sync() - - return ExecutionResult( - stdout=await executed_container.stdout(), - stderr=await executed_container.stderr(), - executed_container=executed_container, - http_dump=await self._retrieve_http_dump() if self.enable_http_cache else None, - ) - - def _get_http_dumps_cache_volume(self) -> dagger.CacheVolume: - config_data = self.config.data if self.config else None - proxy_cache_key = hashlib.md5((self.connector_under_test.name + str(config_data)).encode("utf-8")).hexdigest() - return self.dagger_client.cache_volume(f"{self.MITMPROXY_IMAGE}{proxy_cache_key}") - - def _get_mitmproxy_dir_cache(self) -> dagger.CacheVolume: - return self.dagger_client.cache_volume(self.MITMPROXY_IMAGE) - - async def _get_proxy_container( - self, - ) -> dagger.Container: - proxy_container = ( - self.dagger_client.container() - .from_(self.MITMPROXY_IMAGE) - .with_exec(["mkdir", "-p", "/home/mitmproxy/.mitmproxy"], skip_entrypoint=True) - .with_mounted_cache("/dumps", self._get_http_dumps_cache_volume()) - .with_mounted_cache("/home/mitmproxy/.mitmproxy", self._get_mitmproxy_dir_cache()) - ) - previous_dump_files = ( - await proxy_container.with_env_variable("CACHEBUSTER", str(uuid.uuid4())) - .with_exec(["ls", "/dumps"], skip_entrypoint=True) - .stdout() - ).splitlines() - if self.HTTP_DUMP_FILE_NAME in previous_dump_files: - command = [ - "mitmweb", - "--server-replay", - f"/dumps/{self.HTTP_DUMP_FILE_NAME}", - ] - else: - command = [ - "mitmweb", - "--save-stream-file", - f"/dumps/{self.HTTP_DUMP_FILE_NAME}", - ] - - return proxy_container.with_exec(command) - - async def _bind_connector_container_to_proxy(self, container: dagger.Container) -> dagger.Container: - proxy_srv = await self._get_proxy_container() - proxy_host, proxy_port = "proxy_server", 8080 - cert_path_in_volume = "/mitmproxy_dir/mitmproxy-ca.pem" - requests_cert_path = "/usr/local/lib/python3.9/site-packages/certifi/cacert.pem" - ca_certificate_path = "/usr/local/share/ca-certificates/mitmproxy.crt" - - return ( - container.with_service_binding(proxy_host, proxy_srv.with_exposed_port(proxy_port).as_service()) - .with_mounted_cache("/mitmproxy_dir", self._get_mitmproxy_dir_cache()) - .with_exec(["cp", cert_path_in_volume, requests_cert_path], skip_entrypoint=True) - .with_exec(["cp", cert_path_in_volume, ca_certificate_path], skip_entrypoint=True) - .with_env_variable("REQUESTS_CA_BUNDLE", requests_cert_path) - .with_exec(["update-ca-certificates"], skip_entrypoint=True) - .with_env_variable("http_proxy", f"{proxy_host}:{proxy_port}") - .with_env_variable("https_proxy", f"{proxy_host}:{proxy_port}") - ) - - async def _retrieve_http_dump(self) -> dagger.File: - return await ( - self.dagger_client.container() - .from_("alpine:latest") - .with_mounted_cache("/dumps", self._get_http_dumps_cache_volume()) - .with_exec(["mkdir", "/to_export"]) - .with_exec( - [ - "cp", - "-r", - f"/dumps/{self.HTTP_DUMP_FILE_NAME}", - f"/to_export/{self.HTTP_DUMP_FILE_NAME}", - ] - ) - .file(f"/to_export/{self.HTTP_DUMP_FILE_NAME}") - ) diff --git a/airbyte-ci/connectors/live-tests/src/live_tests/commons/models.py b/airbyte-ci/connectors/live-tests/src/live_tests/commons/models.py deleted file mode 100644 index a4c51f2d85f0..000000000000 --- a/airbyte-ci/connectors/live-tests/src/live_tests/commons/models.py +++ /dev/null @@ -1,245 +0,0 @@ -# Copyright (c) 2023 Airbyte, Inc., all rights reserved. -from __future__ import annotations - -import time -from dataclasses import dataclass, field -from enum import Enum -from pathlib import Path -from typing import Any, Dict, Iterable, Iterator, List, MutableMapping, Optional, Tuple - -import _collections_abc -import dagger -from airbyte_protocol.models import AirbyteMessage # type: ignore -from airbyte_protocol.models import ConfiguredAirbyteCatalog # type: ignore -from live_tests.commons.backends import DuckDbBackend -from pydantic import ValidationError - - -class UserDict(_collections_abc.MutableMapping): # type: ignore - # Start by filling-out the abstract methods - def __init__(self, _dict: Optional[MutableMapping] = None, **kwargs: Any): - self.data: MutableMapping = {} - if _dict is not None: - self.update(_dict) - if kwargs: - self.update(kwargs) - - def __len__(self) -> int: - return len(self.data) - - def __getitem__(self, key: Any) -> Any: - if key in self.data: - return self.data[key] - if hasattr(self.__class__, "__missing__"): - return self.__class__.__missing__(self, key) - raise KeyError(key) - - def __setitem__(self, key: Any, item: Any) -> None: - self.data[key] = item - - def __delitem__(self, key: Any) -> None: - del self.data[key] - - def __iter__(self) -> Iterator: - return iter(self.data) - - # Modify __contains__ to work correctly when __missing__ is present - def __contains__(self, key: Any) -> bool: - return key in self.data - - # Now, add the methods in dicts but not in MutableMapping - def __repr__(self) -> str: - return repr(self.data) - - def __or__(self, other: UserDict | dict) -> UserDict: - if isinstance(other, UserDict): - return self.__class__(self.data | other.data) # type: ignore - if isinstance(other, dict): - return self.__class__(self.data | other) # type: ignore - return NotImplemented - - def __ror__(self, other: UserDict | dict) -> UserDict: - if isinstance(other, UserDict): - return self.__class__(other.data | self.data) # type: ignore - if isinstance(other, dict): - return self.__class__(other | self.data) # type: ignore - return NotImplemented - - def __ior__(self, other: UserDict | dict) -> UserDict: - if isinstance(other, UserDict): - self.data |= other.data # type: ignore - else: - self.data |= other # type: ignore - return self - - def __copy__(self) -> UserDict: - inst = self.__class__.__new__(self.__class__) - inst.__dict__.update(self.__dict__) - # Create a copy and avoid triggering descriptors - inst.__dict__["data"] = self.__dict__["data"].copy() - return inst - - def copy(self) -> UserDict: - if self.__class__ is UserDict: - return UserDict(self.data.copy()) # type: ignore - import copy - - data = self.data - try: - self.data = {} - c = copy.copy(self) - finally: - self.data = data - c.update(self) - return c - - @classmethod - def fromkeys(cls, iterable: Iterable, value: Optional[Any] = None) -> UserDict: - d = cls() - for key in iterable: - d[key] = value - return d - - -class SecretDict(UserDict): - def __str__(self) -> str: - return f"{self.__class__.__name__}(******)" - - def __repr__(self) -> str: - return str(self) - - -class Command(Enum): - CHECK = "check" - DISCOVER = "discover" - READ = "read" - READ_WITH_STATE = "read-with-state" - SPEC = "spec" - - -@dataclass -class ConnectorUnderTest: - image_name: str - container: dagger.Container - - @property - def name(self) -> str: - return self.image_name.replace("airbyte/", "").split(":")[0] - - @property - def version(self) -> str: - return self.image_name.replace("airbyte/", "").split(":")[1] - - -@dataclass -class ExecutionInputs: - connector_under_test: ConnectorUnderTest - command: Command - config: Optional[SecretDict] = None - catalog: Optional[ConfiguredAirbyteCatalog] = None - state: Optional[Dict] = None - environment_variables: Optional[Dict] = None - enable_http_cache: bool = True - - def to_dict(self) -> dict: - return { - "connector_under_test": self.connector_under_test, - "command": self.command, - "config": self.config, - "catalog": self.catalog, - "state": self.state, - "environment_variables": self.environment_variables, - "enable_http_cache": self.enable_http_cache, - } - - def raise_if_missing_attr_for_command(self, attribute: str) -> None: - if getattr(self, attribute) is None: - raise ValueError(f"We need a {attribute} to run the {self.command.value} command") - - def __post_init__(self) -> None: - if self.command is Command.CHECK: - self.raise_if_missing_attr_for_command("config") - if self.command is Command.DISCOVER: - self.raise_if_missing_attr_for_command("config") - if self.command is Command.READ: - self.raise_if_missing_attr_for_command("config") - self.raise_if_missing_attr_for_command("catalog") - if self.command is Command.READ_WITH_STATE: - self.raise_if_missing_attr_for_command("config") - self.raise_if_missing_attr_for_command("catalog") - self.raise_if_missing_attr_for_command("state") - - -@dataclass -class ExecutionResult: - stdout: str - stderr: str - executed_container: dagger.Container - http_dump: Optional[dagger.File] - airbyte_messages: List[AirbyteMessage] = field(default_factory=list) - airbyte_messages_parsing_errors: List[Tuple[Exception, str]] = field(default_factory=list) - - def __post_init__(self) -> None: - self.airbyte_messages, self.airbyte_messages_parsing_errors = self.parse_airbyte_messages_from_command_output(self.stdout) - - @staticmethod - def parse_airbyte_messages_from_command_output( - command_output: str, - ) -> Tuple[List[AirbyteMessage], List[Tuple[Exception, str]]]: - airbyte_messages: List[AirbyteMessage] = [] - parsing_errors: List[Tuple[Exception, str]] = [] - for line in command_output.splitlines(): - try: - airbyte_messages.append(AirbyteMessage.parse_raw(line)) - except ValidationError as e: - parsing_errors.append((e, line)) - return airbyte_messages, parsing_errors - - -@dataclass -class ExecutionReport: - execution_inputs: ExecutionInputs - execution_result: ExecutionResult - created_at: int = field(default_factory=lambda: int(time.time())) - saved_path: Optional[Path] = None - - @property - def report_dir(self) -> str: - return f"{self.execution_inputs.connector_under_test.name}/{self.execution_inputs.command.value}/{self.execution_inputs.connector_under_test.version}/" - - @property - def stdout_filename(self) -> str: - return "stdout.log" - - @property - def stderr_filename(self) -> str: - return "stderr.log" - - @property - def http_dump_filename(self) -> str: - return "http_dump.mitm" - - async def save_to_disk(self, output_dir: Path) -> None: - final_dir = output_dir / self.report_dir - final_dir.mkdir(parents=True, exist_ok=True) - stdout_file_path = final_dir / self.stdout_filename - stdout_file_path.write_text(self.execution_result.stdout) - - stderr_file_path = final_dir / self.stderr_filename - stderr_file_path.write_text(self.execution_result.stderr) - if self.execution_result.http_dump: - http_dump_file_path = final_dir / self.http_dump_filename - await self.execution_result.http_dump.export(str(http_dump_file_path.resolve())) - # TODO merge ExecutionReport.save_to_disk and Backend.write? - # Make backends use customizable - airbyte_messages_dir = final_dir / "airbyte_messages" - airbyte_messages_dir.mkdir(parents=True, exist_ok=True) - DuckDbBackend(final_dir / airbyte_messages_dir).write(self.execution_result.airbyte_messages) - self.saved_path = final_dir - - -@dataclass(kw_only=True) -class ConnectionObjects: - source_config: Optional[SecretDict] - catalog: Optional[ConfiguredAirbyteCatalog] - state: Optional[Dict] diff --git a/airbyte-ci/connectors/live-tests/src/live_tests/commons/utils.py b/airbyte-ci/connectors/live-tests/src/live_tests/commons/utils.py deleted file mode 100644 index 4437ea0f2e18..000000000000 --- a/airbyte-ci/connectors/live-tests/src/live_tests/commons/utils.py +++ /dev/null @@ -1,17 +0,0 @@ -# Copyright (c) 2023 Airbyte, Inc., all rights reserved. - -from typing import List - -import dagger -from live_tests.commons.connector_runner import get_connector_container -from live_tests.commons.models import ConnectorUnderTest - - -async def get_connector_under_test(dagger_client: dagger.Client, connector_image_name: str) -> ConnectorUnderTest: - dagger_container = await get_connector_container(dagger_client, connector_image_name) - return ConnectorUnderTest(connector_image_name, dagger_container) - - -def sh_dash_c(lines: List[str]) -> List[str]: - """Wrap sequence of commands in shell for safe usage of dagger Container's with_exec method.""" - return ["sh", "-c", " && ".join(["set -o xtrace"] + lines)] diff --git a/airbyte-ci/connectors/live-tests/src/live_tests/debug/__init__.py b/airbyte-ci/connectors/live-tests/src/live_tests/debug/__init__.py deleted file mode 100644 index 85a4c5094f0e..000000000000 --- a/airbyte-ci/connectors/live-tests/src/live_tests/debug/__init__.py +++ /dev/null @@ -1,10 +0,0 @@ -# Copyright (c) 2023 Airbyte, Inc., all rights reserved. - -import dagger -import os -import sys - -DAGGER_EXEC_TIMEOUT = dagger.Timeout( - int(os.environ.get("DAGGER_EXEC_TIMEOUT", "3600")) -) # One hour by default -DAGGER_CONFIG = dagger.Config(timeout=DAGGER_EXEC_TIMEOUT, log_output=sys.stderr) diff --git a/airbyte-ci/connectors/live-tests/src/live_tests/debug/cli.py b/airbyte-ci/connectors/live-tests/src/live_tests/debug/cli.py deleted file mode 100644 index d7ddb545d69f..000000000000 --- a/airbyte-ci/connectors/live-tests/src/live_tests/debug/cli.py +++ /dev/null @@ -1,116 +0,0 @@ -# Copyright (c) 2023 Airbyte, Inc., all rights reserved. - -import time -from pathlib import Path -from typing import List, Optional - -import asyncclick as click -import dagger -from live_tests.commons.connection_objects_retrieval import COMMAND_TO_REQUIRED_OBJECT_TYPES, get_connection_objects -from live_tests.commons.connector_runner import ConnectorRunner -from live_tests.commons.models import Command, ExecutionInputs, ExecutionReport -from live_tests.commons.utils import get_connector_under_test -from live_tests.debug import DAGGER_CONFIG - - -@click.command( - "debug", - help="Run a specific command on one or multiple connectors and persists the outputs to local storage.", -) -@click.argument( - "command", - type=click.Choice([c.value for c in Command]), - callback=lambda _, __, value: Command(value), -) -@click.option("--connection-id", type=str, required=False, default=None) -@click.option( - "--config-path", - type=click.Path(file_okay=True, readable=True, dir_okay=False, resolve_path=True, path_type=Path), - required=False, - default=None, -) -@click.option( - "--catalog-path", - type=click.Path(file_okay=True, readable=True, dir_okay=False, resolve_path=True, path_type=Path), - required=False, - default=None, -) -@click.option( - "--state-path", - type=click.Path(file_okay=True, readable=True, dir_okay=False, resolve_path=True, path_type=Path), - required=False, - default=None, -) -@click.option( - "-c", - "--connector-image", - "connector_images", - help="Docker image name of the connector to debug (e.g. `airbyte/source-faker:latest`, `airbyte/source-faker:dev`)", - multiple=True, - type=str, - required=True, -) -@click.option( - "-o", - "--output-directory", - help="Directory in which connector output and test results should be stored. Defaults to the current directory.", - default=Path("live_tests_debug_reports"), - type=click.Path(file_okay=False, dir_okay=True, resolve_path=True, path_type=Path), -) -@click.option( - "-hc", - "--http-cache", - "enable_http_cache", - help="Use the HTTP cache for the connector.", - default=True, - is_flag=True, - type=bool, -) -# TODO add an env var options to pass to the connector -@click.pass_context -async def debug_cmd( - ctx: click.Context, - command: Command, - connection_id: Optional[str], - config_path: Optional[Path], - catalog_path: Optional[Path], - state_path: Optional[Path], - connector_images: List[str], - output_directory: Path, - enable_http_cache: bool, -) -> None: - output_directory.mkdir(parents=True, exist_ok=True) - debug_session_start_time = int(time.time()) - if connection_id: - retrieval_reason = click.prompt("👮‍♂️ Please provide a reason for accessing the connection objects. This will be logged") - else: - retrieval_reason = None - - try: - connection_objects = get_connection_objects( - COMMAND_TO_REQUIRED_OBJECT_TYPES[command], - connection_id, - config_path, - catalog_path, - state_path, - retrieval_reason, - ) - except ValueError as e: - raise click.UsageError(str(e)) - async with dagger.Connection(config=DAGGER_CONFIG) as dagger_client: - for connector_image in connector_images: - try: - execution_inputs = ExecutionInputs( - connector_under_test=await get_connector_under_test(dagger_client, connector_image), - command=command, - config=connection_objects.source_config, - catalog=connection_objects.catalog, - state=connection_objects.state, - environment_variables=None, - enable_http_cache=enable_http_cache, - ) - except ValueError as e: - raise click.UsageError(str(e)) - execution_result = await ConnectorRunner(dagger_client, **execution_inputs.to_dict()).run() - execution_report = ExecutionReport(execution_inputs, execution_result, created_at=debug_session_start_time) - await execution_report.save_to_disk(output_directory) diff --git a/airbyte-ci/connectors/live-tests/src/live_tests/regression_tests/__init__.py b/airbyte-ci/connectors/live-tests/src/live_tests/regression_tests/__init__.py deleted file mode 100644 index f70ecfc3a89e..000000000000 --- a/airbyte-ci/connectors/live-tests/src/live_tests/regression_tests/__init__.py +++ /dev/null @@ -1 +0,0 @@ -# Copyright (c) 2023 Airbyte, Inc., all rights reserved. diff --git a/airbyte-ci/connectors/live-tests/src/live_tests/regression_tests/conftest.py b/airbyte-ci/connectors/live-tests/src/live_tests/regression_tests/conftest.py deleted file mode 100644 index d0686ddb6aa0..000000000000 --- a/airbyte-ci/connectors/live-tests/src/live_tests/regression_tests/conftest.py +++ /dev/null @@ -1,693 +0,0 @@ -# Copyright (c) 2023 Airbyte, Inc., all rights reserved. -from __future__ import annotations - -import logging -import os -import time -from pathlib import Path -from typing import TYPE_CHECKING, AsyncIterable, Callable, Dict, List, Optional - -import dagger -import pytest -from airbyte_protocol.models import ConfiguredAirbyteCatalog # type: ignore -from live_tests.commons.connection_objects_retrieval import ConnectionObject, get_connection_objects -from live_tests.commons.connector_runner import ConnectorRunner -from live_tests.commons.models import ( - Command, - ConnectionObjects, - ConnectorUnderTest, - ExecutionInputs, - ExecutionReport, - ExecutionResult, - SecretDict, -) -from live_tests.commons.utils import get_connector_under_test - -if TYPE_CHECKING: - from _pytest.config import Config - from _pytest.config.argparsing import Parser - from _pytest.fixtures import SubRequest - from pytest_sugar import SugarTerminalReporter # type: ignore - -## CONSTS -LOGGER = logging.getLogger("regression_tests") -MAIN_OUTPUT_DIRECTORY = Path("/tmp/regression_tests_artifacts") - -# It's used by Dagger and its very verbose -logging.getLogger("httpx").setLevel(logging.ERROR) - -## STASH KEYS -SESSION_START_TIMESTAMP = pytest.StashKey[int]() -TEST_ARTIFACT_DIRECTORY = pytest.StashKey[Path]() -DAGGER_LOG_PATH = pytest.StashKey[Path]() - - -## PYTEST HOOKS -def pytest_addoption(parser: Parser) -> None: - parser.addoption( - "--connector-image", - help="The connector image name on which the regressions tests will run: e.g. airbyte/source-faker", - ) - parser.addoption( - "--control-version", - default="latest", - help="The control version used for regression testing. Defaults to latest", - ) - parser.addoption( - "--target-version", - default="dev", - help="The target version used for regression testing. Defaults to latest", - ) - parser.addoption( - "--deny-confirmation", - default=False, - help="Always deny confirmation prompts. Useful for test development. Defaults to False", - ) - parser.addoption("--config-path") - parser.addoption("--catalog-path") - parser.addoption("--state-path") - parser.addoption("--connection-id") - - -def pytest_configure(config: Config) -> None: - start_timestamp = int(time.time()) - test_artifacts_directory = MAIN_OUTPUT_DIRECTORY / f"session_{start_timestamp}" - test_artifacts_directory.mkdir(parents=True, exist_ok=True) - dagger_log_path = test_artifacts_directory / "dagger.log" - config.stash[SESSION_START_TIMESTAMP] = start_timestamp - config.stash[TEST_ARTIFACT_DIRECTORY] = test_artifacts_directory - dagger_log_path.touch() - config.stash[DAGGER_LOG_PATH] = dagger_log_path - - -def pytest_terminal_summary(terminalreporter: SugarTerminalReporter, exitstatus: int, config: Config) -> None: - terminalreporter.ensure_newline() - terminalreporter.section("Test artifacts", sep="=", bold=True, blue=True) - terminalreporter.line(f"All tests artifacts for this sessions should be available in {config.stash[TEST_ARTIFACT_DIRECTORY].resolve()}") - terminalreporter.section("Dagger logs", sep=".") - terminalreporter.line(f"Dagger logs are stored in {config.stash[DAGGER_LOG_PATH]}") - artifact_subsection: Dict[str, List[str]] = {} - for report in terminalreporter.reports: - properties_dict = { - record_property_key: record_property_value for record_property_key, record_property_value in report.user_properties - } - if "control_execution_report" in properties_dict or "target_execution_report" in properties_dict: - artifact_subsection[report.head_line] = [] - if "control_execution_report" in properties_dict: - artifact_subsection[report.head_line].append( - f"Control execution artifacts stored in {properties_dict['control_execution_report'].saved_path}" - ) - if "target_execution_report" in properties_dict: - artifact_subsection[report.head_line].append( - f"Target execution artifacts stored in {properties_dict['target_execution_report'].saved_path}" - ) - - if artifact_subsection: - terminalreporter.ensure_newline() - for section, artifact_lines in artifact_subsection.items(): - terminalreporter.ensure_newline() - terminalreporter.section(section, sep=".") - terminalreporter.line(os.linesep.join(artifact_lines)) - - -## HELPERS -async def persist_report( - request: SubRequest, - output_directory: Path, - execution_inputs: ExecutionInputs, - execution_result: ExecutionResult, - session_start_timestamp: int, -) -> ExecutionReport: - test_name = request.node.name - test_output_directory = Path(output_directory / test_name) - test_output_directory.mkdir(parents=True, exist_ok=True) - report = ExecutionReport(execution_inputs, execution_result, created_at=session_start_timestamp) - await report.save_to_disk(test_output_directory) - LOGGER.info(f"Execution report saved to {test_output_directory}") - return report - - -def get_option_or_fail(request: SubRequest, option: str) -> str: - if option_value := request.config.getoption(option): - return option_value - pytest.fail(f"Missing required option: {option}") - - -def ask_for_confirmation(message: str, always_deny: bool = False) -> None: - if always_deny: - pytest.skip("Skipped by user.") - if not os.environ.get("CI"): - if not input(f"{message}. Do you want to continue? [y/N]: ").lower().strip() == "y": - pytest.skip("Skipped by user.") - - -## FIXTURES - - -@pytest.fixture(scope="session") -def anyio_backend() -> str: - return "asyncio" - - -@pytest.fixture(scope="session") -def session_start_timestamp(request: SubRequest) -> int: - return request.config.stash[SESSION_START_TIMESTAMP] - - -@pytest.fixture(scope="session") -def test_artifacts_directory(request: SubRequest) -> Path: - return request.config.stash[TEST_ARTIFACT_DIRECTORY] - - -@pytest.fixture(scope="session") -def deny_confirmation(request: SubRequest) -> bool: - return bool(request.config.getoption("--deny-confirmation")) - - -@pytest.fixture(scope="session") -def connector_image(request: SubRequest) -> str: - return get_option_or_fail(request, "--connector-image") - - -@pytest.fixture(scope="session") -def control_version(request: SubRequest) -> str: - return get_option_or_fail(request, "--control-version") - - -@pytest.fixture(scope="session") -def target_version(control_version: str, request: SubRequest) -> str: - target_version = get_option_or_fail(request, "--target-version") - if target_version == control_version: - pytest.fail(f"Control and target versions are the same: {control_version}. Please provide different versions.") - return target_version - - -@pytest.fixture(scope="session") -def connection_id(request: SubRequest) -> Optional[str]: - return request.config.getoption("--connection-id") - - -@pytest.fixture(scope="session") -def custom_source_config_path(request: SubRequest) -> Optional[Path]: - if config_path := request.config.getoption("--config-path"): - return Path(config_path) - return None - - -@pytest.fixture(scope="session") -def custom_catalog_path(request: SubRequest) -> Optional[Path]: - if catalog_path := request.config.getoption("--catalog-path"): - return Path(catalog_path) - return None - - -@pytest.fixture(scope="session") -def custom_state_path(request: SubRequest) -> Optional[Path]: - if state_path := request.config.getoption("--state-path"): - return Path(state_path) - return None - - -@pytest.fixture(scope="session") -def retrieval_reason( - connection_id: Optional[str], - connector_image: str, - control_version: str, - target_version: str, -) -> Optional[str]: - if connection_id: - return f"Running regression tests on connection {connection_id} for connector {connector_image} on the control ({control_version}) and target versions ({target_version})." - return None - - -@pytest.fixture(scope="session") -def connection_objects( - connection_id: Optional[str], - custom_source_config_path: Optional[Path], - custom_catalog_path: Optional[Path], - custom_state_path: Optional[Path], - retrieval_reason: Optional[str], -) -> ConnectionObjects: - return get_connection_objects( - { - ConnectionObject.SOURCE_CONFIG, - ConnectionObject.CONFIGURED_CATALOG, - ConnectionObject.STATE, - }, - connection_id, - custom_source_config_path, - custom_catalog_path, - custom_state_path, - retrieval_reason, - ) - - -@pytest.fixture(scope="session") -def connector_config(connection_objects: ConnectionObjects) -> Optional[SecretDict]: - return connection_objects.source_config - - -@pytest.fixture(scope="session") -def catalog( - connection_objects: ConnectionObjects, -) -> Optional[ConfiguredAirbyteCatalog]: - return connection_objects.catalog - - -@pytest.fixture(scope="session") -def state(connection_objects: ConnectionObjects) -> Optional[Dict]: - return connection_objects.state - - -@pytest.fixture(scope="session") -def dagger_connection(request: SubRequest) -> dagger.Connection: - return dagger.Connection(dagger.Config(log_output=request.config.stash[DAGGER_LOG_PATH].open("w"))) - - -@pytest.fixture(scope="session") -async def dagger_client( - dagger_connection: dagger.Connection, -) -> AsyncIterable[dagger.Client]: - async with dagger_connection as client: - yield client - - -@pytest.fixture(scope="session") -async def control_connector(dagger_client: dagger.Client, connector_image: str, control_version: str) -> ConnectorUnderTest: - return await get_connector_under_test(dagger_client, f"{connector_image}:{control_version}") - - -@pytest.fixture(scope="session") -async def target_connector(dagger_client: dagger.Client, connector_image: str, target_version: str) -> ConnectorUnderTest: - return await get_connector_under_test(dagger_client, f"{connector_image}:{target_version}") - - -@pytest.fixture -def spec_control_execution_inputs( - control_connector: ConnectorUnderTest, -) -> ExecutionInputs: - return ExecutionInputs(connector_under_test=control_connector, command=Command.SPEC) - - -@pytest.fixture -def spec_control_connector_runner(dagger_client: dagger.Client, spec_control_execution_inputs: ExecutionInputs) -> ConnectorRunner: - return ConnectorRunner(dagger_client, **spec_control_execution_inputs.to_dict()) - - -@pytest.fixture -async def spec_control_execution_result( - record_property: Callable, - request: SubRequest, - test_artifacts_directory: Path, - spec_control_execution_inputs: ExecutionInputs, - spec_control_connector_runner: ConnectorRunner, - session_start_timestamp: int, -) -> ExecutionResult: - logging.info(f"Running spec for control connector {spec_control_execution_inputs.connector_under_test.name}") - execution_result = await spec_control_connector_runner.run() - execution_report = await persist_report( - request, - test_artifacts_directory, - spec_control_execution_inputs, - execution_result, - session_start_timestamp, - ) - record_property("control_execution_report", execution_report) - return execution_result - - -@pytest.fixture -def spec_target_execution_inputs( - target_connector: ConnectorUnderTest, -) -> ExecutionInputs: - return ExecutionInputs(connector_under_test=target_connector, command=Command.SPEC) - - -@pytest.fixture -def spec_target_connector_runner(dagger_client: dagger.Client, spec_target_execution_inputs: ExecutionInputs) -> ConnectorRunner: - return ConnectorRunner(dagger_client, **spec_target_execution_inputs.to_dict()) - - -@pytest.fixture -async def spec_target_execution_result( - record_property: Callable, - request: SubRequest, - test_artifacts_directory: Path, - spec_control_execution_result: ExecutionResult, - spec_target_execution_inputs: ExecutionInputs, - spec_target_connector_runner: ConnectorRunner, - session_start_timestamp: int, -) -> ExecutionResult: - logging.info(f"Running spec for target connector {spec_target_execution_inputs.connector_under_test.name}") - execution_result = await spec_target_connector_runner.run() - execution_report = await persist_report( - request, - test_artifacts_directory, - spec_target_execution_inputs, - execution_result, - session_start_timestamp, - ) - record_property("target_execution_report", execution_report) - return execution_result - - -@pytest.fixture -def check_control_execution_inputs(control_connector: ConnectorUnderTest, connector_config: SecretDict) -> ExecutionInputs: - return ExecutionInputs( - connector_under_test=control_connector, - command=Command.CHECK, - config=connector_config, - ) - - -@pytest.fixture -def check_control_connector_runner(dagger_client: dagger.Client, check_control_execution_inputs: ExecutionInputs) -> ConnectorRunner: - return ConnectorRunner(dagger_client, **check_control_execution_inputs.to_dict()) - - -@pytest.fixture -async def check_control_execution_result( - record_property: Callable, - request: SubRequest, - test_artifacts_directory: Path, - check_control_execution_inputs: ExecutionInputs, - check_control_connector_runner: ConnectorRunner, - session_start_timestamp: int, -) -> ExecutionResult: - logging.info(f"Running check for control connector {check_control_execution_inputs.connector_under_test.name}") - execution_result = await check_control_connector_runner.run() - execution_report = await persist_report( - request, - test_artifacts_directory, - check_control_execution_inputs, - execution_result, - session_start_timestamp, - ) - record_property("control_execution_report", execution_report) - return execution_result - - -@pytest.fixture -def check_target_execution_inputs(target_connector: ConnectorUnderTest, connector_config: SecretDict) -> ExecutionInputs: - return ExecutionInputs( - connector_under_test=target_connector, - command=Command.CHECK, - config=connector_config, - ) - - -@pytest.fixture -def check_target_connector_runner(dagger_client: dagger.Client, check_target_execution_inputs: ExecutionInputs) -> ConnectorRunner: - return ConnectorRunner(dagger_client, **check_target_execution_inputs.to_dict()) - - -@pytest.fixture -async def check_target_execution_result( - record_property: Callable, - request: SubRequest, - test_artifacts_directory: Path, - check_control_execution_result: ExecutionResult, - check_target_execution_inputs: ExecutionInputs, - check_target_connector_runner: ConnectorRunner, - session_start_timestamp: int, -) -> ExecutionResult: - logging.info(f"Running check for target connector {check_target_execution_inputs.connector_under_test.name}") - execution_result = await check_target_connector_runner.run() - execution_report = await persist_report( - request, - test_artifacts_directory, - check_target_execution_inputs, - execution_result, - session_start_timestamp, - ) - record_property("target_execution_report", execution_report) - return execution_result - - -@pytest.fixture -def discover_control_execution_inputs(control_connector: ConnectorUnderTest, connector_config: SecretDict) -> ExecutionInputs: - return ExecutionInputs( - connector_under_test=control_connector, - command=Command.DISCOVER, - config=connector_config, - ) - - -@pytest.fixture -async def discover_control_execution_result( - record_property: Callable, - request: SubRequest, - test_artifacts_directory: Path, - discover_control_execution_inputs: ExecutionInputs, - discover_control_connector_runner: ConnectorRunner, - session_start_timestamp: int, -) -> ExecutionResult: - logging.info(f"Running discover for control connector {discover_control_execution_inputs.connector_under_test.name}") - execution_result = await discover_control_connector_runner.run() - execution_report = await persist_report( - request, - test_artifacts_directory, - discover_control_execution_inputs, - execution_result, - session_start_timestamp, - ) - record_property("control_execution_report", execution_report) - return execution_result - - -@pytest.fixture -def discover_target_execution_inputs(target_connector: ConnectorUnderTest, connector_config: SecretDict) -> ExecutionInputs: - return ExecutionInputs( - connector_under_test=target_connector, - command=Command.DISCOVER, - config=connector_config, - ) - - -@pytest.fixture -def discover_control_connector_runner(dagger_client: dagger.Client, discover_control_execution_inputs: ExecutionInputs) -> ConnectorRunner: - return ConnectorRunner(dagger_client, **discover_control_execution_inputs.to_dict()) - - -@pytest.fixture -def discover_target_connector_runner(dagger_client: dagger.Client, discover_target_execution_inputs: ExecutionInputs) -> ConnectorRunner: - return ConnectorRunner(dagger_client, **discover_target_execution_inputs.to_dict()) - - -@pytest.fixture -async def discover_target_execution_result( - record_property: Callable, - request: SubRequest, - test_artifacts_directory: Path, - discover_control_execution_result: ExecutionResult, - discover_target_execution_inputs: ExecutionInputs, - discover_target_connector_runner: ConnectorRunner, - session_start_timestamp: int, -) -> ExecutionResult: - logging.info(f"Running discover for target connector {discover_target_execution_inputs.connector_under_test.name}") - execution_result = await discover_target_connector_runner.run() - execution_report = await persist_report( - request, - test_artifacts_directory, - discover_target_execution_inputs, - execution_result, - session_start_timestamp, - ) - record_property("target_execution_report", execution_report) - return execution_result - - -@pytest.fixture -def read_control_execution_inputs( - control_connector: ConnectorUnderTest, - connector_config: SecretDict, - catalog: ConfiguredAirbyteCatalog, -) -> ExecutionInputs: - return ExecutionInputs( - connector_under_test=control_connector, - command=Command.READ, - catalog=catalog, - config=connector_config, - ) - - -@pytest.fixture -def read_target_execution_inputs( - target_connector: ConnectorUnderTest, - connector_config: SecretDict, - catalog: ConfiguredAirbyteCatalog, -) -> ExecutionInputs: - return ExecutionInputs( - connector_under_test=target_connector, - command=Command.READ, - catalog=catalog, - config=connector_config, - ) - - -@pytest.fixture -def read_control_connector_runner(dagger_client: dagger.Client, read_control_execution_inputs: ExecutionInputs) -> ConnectorRunner: - return ConnectorRunner(dagger_client, **read_control_execution_inputs.to_dict()) - - -@pytest.fixture -async def read_control_execution_result( - record_property: Callable, - request: SubRequest, - test_artifacts_directory: Path, - read_control_execution_inputs: ExecutionInputs, - read_control_connector_runner: ConnectorRunner, - session_start_timestamp: int, - deny_confirmation: bool, -) -> ExecutionResult: - ask_for_confirmation( - f"{request.node.name} will run a full refresh read on control connector. It might induce rate limits or costs on source", - deny_confirmation, - ) - logging.info(f"Running read for control connector {read_control_execution_inputs.connector_under_test.name}") - execution_result = await read_control_connector_runner.run() - execution_report = await persist_report( - request, - test_artifacts_directory, - read_control_execution_inputs, - execution_result, - session_start_timestamp, - ) - record_property("control_execution_report", execution_report) - return execution_result - - -@pytest.fixture -def read_target_connector_runner(dagger_client: dagger.Client, read_target_execution_inputs: ExecutionInputs) -> ConnectorRunner: - return ConnectorRunner(dagger_client, **read_target_execution_inputs.to_dict()) - - -@pytest.fixture -async def read_target_execution_result( - record_property: Callable, - request: SubRequest, - test_artifacts_directory: Path, - read_control_execution_result: ExecutionResult, - read_target_execution_inputs: ExecutionInputs, - read_target_connector_runner: ConnectorRunner, - session_start_timestamp: int, - deny_confirmation: bool, -) -> ExecutionResult: - ask_for_confirmation( - f"{request.node.name} will run a full refresh read on target connector. It might induce rate limits or costs on source", - deny_confirmation, - ) - logging.info(f"Running read for target connector {read_target_execution_inputs.connector_under_test.name}") - execution_result = await read_target_connector_runner.run() - execution_report = await persist_report( - request, - test_artifacts_directory, - read_target_execution_inputs, - execution_result, - session_start_timestamp, - ) - record_property("target_execution_report", execution_report) - return execution_result - - -@pytest.fixture -def read_with_state_control_execution_inputs( - control_connector: ConnectorUnderTest, - connector_config: SecretDict, - catalog: ConfiguredAirbyteCatalog, - state: dict, -) -> ExecutionInputs: - return ExecutionInputs( - connector_under_test=control_connector, - command=Command.READ_WITH_STATE, - catalog=catalog, - config=connector_config, - state=state, - ) - - -@pytest.fixture -def read_with_state_target_execution_inputs( - target_connector: ConnectorUnderTest, - connector_config: SecretDict, - catalog: ConfiguredAirbyteCatalog, - state: dict, -) -> ExecutionInputs: - return ExecutionInputs( - connector_under_test=target_connector, - command=Command.READ_WITH_STATE, - catalog=catalog, - config=connector_config, - state=state, - ) - - -@pytest.fixture -def read_with_state_control_connector_runner( - dagger_client: dagger.Client, - read_with_state_control_execution_inputs: ExecutionInputs, -) -> ConnectorRunner: - return ConnectorRunner(dagger_client, **read_with_state_control_execution_inputs.to_dict()) - - -@pytest.fixture -async def read_with_state_control_execution_result( - record_property: Callable, - request: SubRequest, - test_artifacts_directory: Path, - read_with_state_control_execution_inputs: ExecutionInputs, - read_with_state_control_connector_runner: ConnectorRunner, - session_start_timestamp: int, - deny_confirmation: bool, -) -> ExecutionResult: - ask_for_confirmation( - f"{request.node.name} will run an incremental read on control connector. It might induce rate limits or costs on source", - deny_confirmation, - ) - logging.info(f"Running read with state for control connector {read_with_state_control_execution_inputs.connector_under_test.name}") - execution_result = await read_with_state_control_connector_runner.run() - execution_report = await persist_report( - request, - test_artifacts_directory, - read_with_state_control_execution_inputs, - execution_result, - session_start_timestamp, - ) - record_property("control_execution_report", execution_report) - return execution_result - - -@pytest.fixture -def read_with_state_target_connector_runner( - dagger_client: dagger.Client, - read_with_state_target_execution_inputs: ExecutionInputs, -) -> ConnectorRunner: - return ConnectorRunner(dagger_client, **read_with_state_target_execution_inputs.to_dict()) - - -@pytest.fixture -async def read_with_state_target_execution_result( - record_property: Callable, - request: SubRequest, - test_artifacts_directory: Path, - read_with_state_control_execution_result: ExecutionResult, - read_with_state_target_execution_inputs: ExecutionInputs, - read_with_state_target_connector_runner: ConnectorRunner, - session_start_timestamp: int, - deny_confirmation: bool, -) -> ExecutionResult: - ask_for_confirmation( - f"{request.node.name} will run an incremental read on target connector. It might induce rate limits or costs on source", - deny_confirmation, - ) - logging.info(f"Running read with state for target connector {read_with_state_target_execution_inputs.connector_under_test.name}") - execution_result = await read_with_state_target_connector_runner.run() - execution_report = await persist_report( - request, - test_artifacts_directory, - read_with_state_target_execution_inputs, - execution_result, - session_start_timestamp, - ) - record_property("target_execution_report", execution_report) - return execution_result diff --git a/airbyte-ci/connectors/live-tests/src/live_tests/regression_tests/pytest.ini b/airbyte-ci/connectors/live-tests/src/live_tests/regression_tests/pytest.ini deleted file mode 100644 index 92e77339fe7d..000000000000 --- a/airbyte-ci/connectors/live-tests/src/live_tests/regression_tests/pytest.ini +++ /dev/null @@ -1,5 +0,0 @@ -[pytest] -addopts = --capture=no -console_output_style = progress -log_cli = True -log_cli_level= INFO diff --git a/airbyte-ci/connectors/live-tests/src/live_tests/regression_tests/test_expected_records.py b/airbyte-ci/connectors/live-tests/src/live_tests/regression_tests/test_expected_records.py deleted file mode 100644 index 5d16dbf3b727..000000000000 --- a/airbyte-ci/connectors/live-tests/src/live_tests/regression_tests/test_expected_records.py +++ /dev/null @@ -1,20 +0,0 @@ -# Copyright (c) 2023 Airbyte, Inc., all rights reserved. - -import pytest -from live_tests.commons.models import ExecutionResult - -from .utils import filter_records, make_comparable_records - -pytestmark = [ - pytest.mark.anyio, -] - - -# This test is very basic and just used as a demonstration before porting the "real" expected records tests from VA -async def test_all_records_are_produced_in_target_version( - read_with_state_control_execution_result: ExecutionResult, - read_with_state_target_execution_result: ExecutionResult, -) -> None: - control_records = list(make_comparable_records(filter_records(read_with_state_control_execution_result.airbyte_messages))) - target_records = list(make_comparable_records(filter_records(read_with_state_target_execution_result.airbyte_messages))) - assert target_records == control_records diff --git a/airbyte-ci/connectors/live-tests/src/live_tests/regression_tests/utils.py b/airbyte-ci/connectors/live-tests/src/live_tests/regression_tests/utils.py deleted file mode 100644 index e8b26038b0d9..000000000000 --- a/airbyte-ci/connectors/live-tests/src/live_tests/regression_tests/utils.py +++ /dev/null @@ -1,19 +0,0 @@ -# Copyright (c) 2023 Airbyte, Inc., all rights reserved. - -from typing import Iterable - -from airbyte_protocol.models import AirbyteMessage, Type # type: ignore - - -def filter_records(messages: Iterable[AirbyteMessage]) -> Iterable[AirbyteMessage]: - for message in messages: - if message.type is Type.RECORD: - yield message - - -def make_comparable_records( - record_messages: Iterable[AirbyteMessage], -) -> Iterable[AirbyteMessage]: - for message in record_messages: - message.record.emitted_at = 0 - yield message diff --git a/airbyte-ci/connectors/live-tests/tests/__init__.py b/airbyte-ci/connectors/live-tests/tests/__init__.py deleted file mode 100644 index f70ecfc3a89e..000000000000 --- a/airbyte-ci/connectors/live-tests/tests/__init__.py +++ /dev/null @@ -1 +0,0 @@ -# Copyright (c) 2023 Airbyte, Inc., all rights reserved. diff --git a/airbyte-ci/connectors/live-tests/tests/backends/__init__.py b/airbyte-ci/connectors/live-tests/tests/backends/__init__.py deleted file mode 100644 index e69de29bb2d1..000000000000 diff --git a/airbyte-ci/connectors/live-tests/tests/backends/test_file_backend.py b/airbyte-ci/connectors/live-tests/tests/backends/test_file_backend.py deleted file mode 100644 index 1cc4526b99e6..000000000000 --- a/airbyte-ci/connectors/live-tests/tests/backends/test_file_backend.py +++ /dev/null @@ -1,71 +0,0 @@ -# Copyright (c) 2023 Airbyte, Inc., all rights reserved. - -from pathlib import Path - -import pytest -from airbyte_protocol.models import ( - AirbyteCatalog, - AirbyteConnectionStatus, - AirbyteMessage, - AirbyteRecordMessage, - AirbyteStateMessage, - ConnectorSpecification, - Status, -) -from airbyte_protocol.models import Type as AirbyteMessageType -from live_tests.commons.backends import FileBackend - - -@pytest.mark.parametrize( - "messages, expected_writes", - [ - ( - [ - AirbyteMessage(type=AirbyteMessageType.CATALOG, catalog=AirbyteCatalog(streams=[])), - AirbyteMessage( - type=AirbyteMessageType.CONNECTION_STATUS, - connectionStatus=AirbyteConnectionStatus(status=Status.SUCCEEDED), - ), - AirbyteMessage( - type=AirbyteMessageType.RECORD, - record=AirbyteRecordMessage(stream="test_stream", data={}, emitted_at=123456789), - ), - AirbyteMessage( - type=AirbyteMessageType.SPEC, - spec=ConnectorSpecification(connectionSpecification={}), - ), - AirbyteMessage( - type=AirbyteMessageType.STATE, - state=AirbyteStateMessage(data={"test": "value"}), - ), - ], - [ - ("catalog.jsonl", '{"streams": []}\n'), - ( - "connection_status.jsonl", - '{"status": "SUCCEEDED", "message": null}\n', - ), - ( - "records.jsonl", - '{"namespace": null, "stream": "test_stream", "data": {}, "meta": null}\n', - ), - ( - "spec.jsonl", - '{"documentationUrl": null, "changelogUrl": null, "connectionSpecification": {}, "supportsIncremental": null, "supportsNormalization": false, "supportsDBT": false, "supported_destination_sync_modes": null, "advanced_auth": null, "protocol_version": null}\n', - ), - ( - "_global_states.jsonl", - '{"type": null, "stream": null, "global_": null, "data": {"test": "value"}, "sourceStats": null, "destinationStats": null}\n', - ), - ], - ), - ], -) -def test_write(tmp_path, messages, expected_writes): - backend = FileBackend(tmp_path) - backend.write(messages) - for expected_file, expected_content in expected_writes: - expected_path = Path(tmp_path / expected_file) - assert expected_path.exists() - content = expected_path.read_text() - assert content == expected_content From 311e9f73b4729570c373d39d3aa4e0d60f5459cb Mon Sep 17 00:00:00 2001 From: Anton Karpets Date: Tue, 12 Mar 2024 09:51:40 +0200 Subject: [PATCH 167/172] =?UTF-8?q?=E2=9C=A8Source=20Amazon=20Seller=20Par?= =?UTF-8?q?tner:=20add=20`GET=5FVENDOR=5FFORECASTING=5FREPORT`=20streams?= =?UTF-8?q?=20(#35954)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .../acceptance-test-config.yml | 4 + .../metadata.yaml | 2 +- .../pyproject.toml | 2 +- .../GET_VENDOR_FORECASTING_FRESH_REPORT.json | 7 + .../GET_VENDOR_FORECASTING_RETAIL_REPORT.json | 7 + .../shared/GET_VENDOR_FORECASTING_REPORT.json | 33 ++ .../source_amazon_seller_partner/source.py | 5 +- .../source_amazon_seller_partner/streams.py | 56 +++- .../integration/test_report_based_streams.py | 313 +++++++++++++++++- .../GET_VENDOR_FORECASTING_FRESH_REPORT.json | 31 ++ .../GET_VENDOR_FORECASTING_RETAIL_REPORT.json | 31 ++ .../sources/amazon-seller-partner.md | 7 +- 12 files changed, 487 insertions(+), 11 deletions(-) create mode 100644 airbyte-integrations/connectors/source-amazon-seller-partner/source_amazon_seller_partner/schemas/GET_VENDOR_FORECASTING_FRESH_REPORT.json create mode 100644 airbyte-integrations/connectors/source-amazon-seller-partner/source_amazon_seller_partner/schemas/GET_VENDOR_FORECASTING_RETAIL_REPORT.json create mode 100644 airbyte-integrations/connectors/source-amazon-seller-partner/source_amazon_seller_partner/schemas/shared/GET_VENDOR_FORECASTING_REPORT.json create mode 100644 airbyte-integrations/connectors/source-amazon-seller-partner/unit_tests/resource/http/response/GET_VENDOR_FORECASTING_FRESH_REPORT.json create mode 100644 airbyte-integrations/connectors/source-amazon-seller-partner/unit_tests/resource/http/response/GET_VENDOR_FORECASTING_RETAIL_REPORT.json diff --git a/airbyte-integrations/connectors/source-amazon-seller-partner/acceptance-test-config.yml b/airbyte-integrations/connectors/source-amazon-seller-partner/acceptance-test-config.yml index 2d84d9683a7f..4aea83c726f6 100644 --- a/airbyte-integrations/connectors/source-amazon-seller-partner/acceptance-test-config.yml +++ b/airbyte-integrations/connectors/source-amazon-seller-partner/acceptance-test-config.yml @@ -99,6 +99,10 @@ acceptance_tests: bypass_reason: "Data cannot be seeded in the test account, integration tests added for the stream instead" - name: VendorOrders bypass_reason: "Data cannot be seeded in the test account, integration tests added for the stream instead" + - name: GET_VENDOR_FORECASTING_FRESH_REPORT + bypass_reason: "Data cannot be seeded in the test account, integration tests added for the stream instead" + - name: GET_VENDOR_FORECASTING_RETAIL_REPORT + bypass_reason: "Data cannot be seeded in the test account, integration tests added for the stream instead" incremental: tests: - config_path: "secrets/config.json" diff --git a/airbyte-integrations/connectors/source-amazon-seller-partner/metadata.yaml b/airbyte-integrations/connectors/source-amazon-seller-partner/metadata.yaml index 59fa239a7adc..aa0e71de7625 100644 --- a/airbyte-integrations/connectors/source-amazon-seller-partner/metadata.yaml +++ b/airbyte-integrations/connectors/source-amazon-seller-partner/metadata.yaml @@ -15,7 +15,7 @@ data: connectorSubtype: api connectorType: source definitionId: e55879a8-0ef8-4557-abcf-ab34c53ec460 - dockerImageTag: 4.0.0 + dockerImageTag: 4.1.0 dockerRepository: airbyte/source-amazon-seller-partner documentationUrl: https://docs.airbyte.com/integrations/sources/amazon-seller-partner githubIssueLabel: source-amazon-seller-partner diff --git a/airbyte-integrations/connectors/source-amazon-seller-partner/pyproject.toml b/airbyte-integrations/connectors/source-amazon-seller-partner/pyproject.toml index 467e46d1c195..2e2726eb2f53 100644 --- a/airbyte-integrations/connectors/source-amazon-seller-partner/pyproject.toml +++ b/airbyte-integrations/connectors/source-amazon-seller-partner/pyproject.toml @@ -3,7 +3,7 @@ requires = ["poetry-core>=1.0.0"] build-backend = "poetry.core.masonry.api" [tool.poetry] -version = "4.0.0" +version = "4.1.0" name = "source-amazon-seller-partner" description = "Source implementation for Amazon Seller Partner." authors = ["Airbyte "] diff --git a/airbyte-integrations/connectors/source-amazon-seller-partner/source_amazon_seller_partner/schemas/GET_VENDOR_FORECASTING_FRESH_REPORT.json b/airbyte-integrations/connectors/source-amazon-seller-partner/source_amazon_seller_partner/schemas/GET_VENDOR_FORECASTING_FRESH_REPORT.json new file mode 100644 index 000000000000..3a1cd743901e --- /dev/null +++ b/airbyte-integrations/connectors/source-amazon-seller-partner/source_amazon_seller_partner/schemas/GET_VENDOR_FORECASTING_FRESH_REPORT.json @@ -0,0 +1,7 @@ +{ + "title": "Vendor Forecasting Fresh Report", + "description": "A report with forward looking mean, P70, P80, and P90 weekly customer demand forecasts. Data is reported at the ASIN level for the most recent weekly forecast generation date.", + "type": "object", + "$schema": "http://json-schema.org/draft-07/schema#", + "properties": { "$ref": "GET_VENDOR_FORECASTING_REPORT.json" } +} diff --git a/airbyte-integrations/connectors/source-amazon-seller-partner/source_amazon_seller_partner/schemas/GET_VENDOR_FORECASTING_RETAIL_REPORT.json b/airbyte-integrations/connectors/source-amazon-seller-partner/source_amazon_seller_partner/schemas/GET_VENDOR_FORECASTING_RETAIL_REPORT.json new file mode 100644 index 000000000000..37002421ae3a --- /dev/null +++ b/airbyte-integrations/connectors/source-amazon-seller-partner/source_amazon_seller_partner/schemas/GET_VENDOR_FORECASTING_RETAIL_REPORT.json @@ -0,0 +1,7 @@ +{ + "title": "Vendor Forecasting Retail Report", + "description": "A report with forward looking mean, P70, P80, and P90 weekly customer demand forecasts. Data is reported at the ASIN level for the most recent weekly forecast generation date.", + "type": "object", + "$schema": "http://json-schema.org/draft-07/schema#", + "properties": { "$ref": "GET_VENDOR_FORECASTING_REPORT.json" } +} diff --git a/airbyte-integrations/connectors/source-amazon-seller-partner/source_amazon_seller_partner/schemas/shared/GET_VENDOR_FORECASTING_REPORT.json b/airbyte-integrations/connectors/source-amazon-seller-partner/source_amazon_seller_partner/schemas/shared/GET_VENDOR_FORECASTING_REPORT.json new file mode 100644 index 000000000000..9890c14ef486 --- /dev/null +++ b/airbyte-integrations/connectors/source-amazon-seller-partner/source_amazon_seller_partner/schemas/shared/GET_VENDOR_FORECASTING_REPORT.json @@ -0,0 +1,33 @@ +{ + "forecastGenerationDate": { + "type": ["null", "string"], + "format": "date" + }, + "asin": { + "type": ["null", "string"] + }, + "startDate": { + "type": ["null", "string"], + "format": "date" + }, + "endDate": { + "type": ["null", "string"], + "format": "date" + }, + "meanForecastUnits": { + "type": ["null", "number"] + }, + "p70ForecastUnits": { + "type": ["null", "number"] + }, + "p80ForecastUnits": { + "type": ["null", "number"] + }, + "p90ForecastUnits": { + "type": ["null", "number"] + }, + "dataEndTime": { + "type": ["null", "string"], + "format": "date" + } +} diff --git a/airbyte-integrations/connectors/source-amazon-seller-partner/source_amazon_seller_partner/source.py b/airbyte-integrations/connectors/source-amazon-seller-partner/source_amazon_seller_partner/source.py index 91c2e1bd80ec..71a866be6dc7 100644 --- a/airbyte-integrations/connectors/source-amazon-seller-partner/source_amazon_seller_partner/source.py +++ b/airbyte-integrations/connectors/source-amazon-seller-partner/source_amazon_seller_partner/source.py @@ -3,7 +3,6 @@ # -import traceback from os import getenv from typing import Any, List, Mapping, Optional, Tuple @@ -63,6 +62,8 @@ SellerFeedbackReports, StrandedInventoryUiReport, VendorDirectFulfillmentShipping, + VendorForecastingFreshReport, + VendorForecastingRetailReport, VendorInventoryReports, VendorOrders, VendorSalesReports, @@ -184,6 +185,8 @@ def streams(self, config: Mapping[str, Any]) -> List[Stream]: LedgerSummaryViewReport, FbaReimbursementsReports, VendorOrders, + VendorForecastingFreshReport, + VendorForecastingRetailReport, ] # TODO: Remove after Brand Analytics will be enabled in CLOUD: https://github.com/airbytehq/airbyte/issues/32353 diff --git a/airbyte-integrations/connectors/source-amazon-seller-partner/source_amazon_seller_partner/streams.py b/airbyte-integrations/connectors/source-amazon-seller-partner/source_amazon_seller_partner/streams.py index 83cd4b4663f4..7ac46dd100ca 100644 --- a/airbyte-integrations/connectors/source-amazon-seller-partner/source_amazon_seller_partner/streams.py +++ b/airbyte-integrations/connectors/source-amazon-seller-partner/source_amazon_seller_partner/streams.py @@ -409,13 +409,13 @@ def read_records( logging.error(f"Failed to retrieve the report result document for stream '{self.name}'. Exception: {e}") error_response = "Failed to retrieve the report result document." - raise AirbyteTracedException( - internal_message=( - f"Failed to retrieve the report '{self.name}' for period " - f"{stream_slice['dataStartTime']}-{stream_slice['dataEndTime']}. " + exception_message = f"Failed to retrieve the report '{self.name}'" + if stream_slice and "dataStartTime" in stream_slice: + exception_message += ( + f" for period {stream_slice['dataStartTime']}-{stream_slice['dataEndTime']}. " f"This will be read during the next sync. Error: {error_response}" ) - ) + raise AirbyteTracedException(internal_message=exception_message) elif processing_status == ReportProcessingStatus.CANCELLED: logger.warning(f"The report for stream '{self.name}' was cancelled or there is no data to return.") else: @@ -891,6 +891,52 @@ class VendorSalesReports(IncrementalAnalyticsStream): availability_sla_days = 4 # Data is only available after 4 days +class VendorForecastingReport(AnalyticsStream, ABC): + """ + Field definitions: + https://github.com/amzn/selling-partner-api-models/blob/main/schemas/reports/vendorForecastingReport.json + Docs: https://developer-docs.amazon.com/sp-api/docs/report-type-values-analytics#vendor-retail-analytics-reports + """ + + result_key = "forecastByAsin" + + @property + @abstractmethod + def selling_program(self) -> str: + pass + + @property + def name(self) -> str: + return f"GET_VENDOR_FORECASTING_{self.selling_program}_REPORT" + + def stream_slices( + self, sync_mode: SyncMode, cursor_field: List[str] = None, stream_state: Mapping[str, Any] = None + ) -> Iterable[Optional[Mapping[str, Any]]]: + return [None] + + def _report_data( + self, + sync_mode: SyncMode, + cursor_field: List[str] = None, + stream_slice: Mapping[str, Any] = None, + stream_state: Mapping[str, Any] = None, + ) -> Mapping[str, Any]: + # This report supports the `sellingProgram` parameter only + return { + "reportType": "GET_VENDOR_FORECASTING_REPORT", + "marketplaceIds": [self.marketplace_id], + "reportOptions": {"sellingProgram": self.selling_program}, + } + + +class VendorForecastingFreshReport(VendorForecastingReport): + selling_program = "FRESH" + + +class VendorForecastingRetailReport(VendorForecastingReport): + selling_program = "RETAIL" + + class SellerFeedbackReports(IncrementalReportsAmazonSPStream): """ Field definitions: https://sellercentral.amazon.com/help/hub/reference/G202125660 diff --git a/airbyte-integrations/connectors/source-amazon-seller-partner/unit_tests/integration/test_report_based_streams.py b/airbyte-integrations/connectors/source-amazon-seller-partner/unit_tests/integration/test_report_based_streams.py index bfdbfa241e73..167c324aa343 100644 --- a/airbyte-integrations/connectors/source-amazon-seller-partner/unit_tests/integration/test_report_based_streams.py +++ b/airbyte-integrations/connectors/source-amazon-seller-partner/unit_tests/integration/test_report_based_streams.py @@ -4,6 +4,7 @@ import gzip +import json from http import HTTPStatus from typing import List, Optional @@ -170,7 +171,6 @@ def _read(stream_name: str, config_: ConfigBuilder, expecting_exception: bool = @HttpMocker() def test_given_report_when_read_then_return_records(self, stream_name: str, data_format: str, http_mocker: HttpMocker) -> None: mock_auth(http_mocker) - http_mocker.post(_create_report_request(stream_name).build(), _create_report_response(_REPORT_ID)) http_mocker.get( _check_report_status_request(_REPORT_ID).build(), @@ -194,7 +194,6 @@ def test_given_compressed_report_when_read_then_return_records( self, stream_name: str, data_format: str, http_mocker: HttpMocker ) -> None: mock_auth(http_mocker) - http_mocker.post(_create_report_request(stream_name).build(), _create_report_response(_REPORT_ID)) http_mocker.get( _check_report_status_request(_REPORT_ID).build(), @@ -513,3 +512,313 @@ def test_given_report_when_read_then_state_message_produced_and_state_match_late cursor_value_from_state_message = output.most_recent_state.get(stream_name, {}).get(cursor_field) cursor_value_from_latest_record = output.records[-1].record.data.get(cursor_field) assert cursor_value_from_state_message == cursor_value_from_latest_record + + +@freezegun.freeze_time(NOW.isoformat()) +class TestVendorSalesReportsFullRefresh: + data_format = "json" + selling_program = ("RETAIL", "FRESH") + + @staticmethod + def _read(stream_name: str, config_: ConfigBuilder, expecting_exception: bool = False) -> EntrypointOutput: + return read_output( + config_builder=config_, + stream_name=stream_name, + sync_mode=SyncMode.full_refresh, + expecting_exception=expecting_exception, + ) + + @staticmethod + def _get_stream_name(selling_program: str) -> str: + return f"GET_VENDOR_FORECASTING_{selling_program}_REPORT" + + @staticmethod + def _get_report_request_body(selling_program: str) -> str: + return json.dumps( + { + "reportType": "GET_VENDOR_FORECASTING_REPORT", + "marketplaceIds": [MARKETPLACE_ID], + "reportOptions": {"sellingProgram": selling_program}, + } + ) + + @pytest.mark.parametrize("selling_program", selling_program) + @HttpMocker() + def test_given_report_when_read_then_return_records(self, selling_program: str, http_mocker: HttpMocker) -> None: + mock_auth(http_mocker) + stream_name = self._get_stream_name(selling_program) + create_report_request_body = self._get_report_request_body(selling_program) + http_mocker.post( + _create_report_request(stream_name).with_body(create_report_request_body).build(), + _create_report_response(_REPORT_ID), + ) + http_mocker.get( + _check_report_status_request(_REPORT_ID).build(), + _check_report_status_response(stream_name, report_document_id=_REPORT_DOCUMENT_ID), + ) + http_mocker.get( + _get_document_download_url_request(_REPORT_DOCUMENT_ID).build(), + _get_document_download_url_response(_DOCUMENT_DOWNLOAD_URL, _REPORT_DOCUMENT_ID), + ) + http_mocker.get( + _download_document_request(_DOCUMENT_DOWNLOAD_URL).build(), + _download_document_response(stream_name, data_format=self.data_format), + ) + + output = self._read(stream_name, config()) + assert len(output.records) == DEFAULT_EXPECTED_NUMBER_OF_RECORDS + + @pytest.mark.parametrize("selling_program", selling_program) + @HttpMocker() + def test_given_compressed_report_when_read_then_return_records( + self, selling_program: str, http_mocker: HttpMocker + ) -> None: + mock_auth(http_mocker) + stream_name = self._get_stream_name(selling_program) + create_report_request_body = self._get_report_request_body(selling_program) + http_mocker.post( + _create_report_request(stream_name).with_body(create_report_request_body).build(), + _create_report_response(_REPORT_ID), + ) + http_mocker.get( + _check_report_status_request(_REPORT_ID).build(), + _check_report_status_response(stream_name, report_document_id=_REPORT_DOCUMENT_ID), + ) + http_mocker.get( + _get_document_download_url_request(_REPORT_DOCUMENT_ID).build(), + _get_document_download_url_response(_DOCUMENT_DOWNLOAD_URL, _REPORT_DOCUMENT_ID, compressed=True), + ) + + # a workaround to pass compressed document to the mocked response + document_request = _download_document_request(_DOCUMENT_DOWNLOAD_URL).build() + document_response = _download_document_response(stream_name, data_format=self.data_format, compressed=True) + document_request_matcher = HttpRequestMatcher(document_request, minimum_number_of_expected_match=1) + http_mocker._matchers.append(document_request_matcher) + + http_mocker._mocker.get( + requests_mock.ANY, + additional_matcher=http_mocker._matches_wrapper(document_request_matcher), + response_list=[{"content": document_response.body, "status_code": document_response.status_code}], + ) + + output = self._read(stream_name, config()) + assert len(output.records) == DEFAULT_EXPECTED_NUMBER_OF_RECORDS + + @pytest.mark.parametrize("selling_program", selling_program) + @HttpMocker() + def test_given_http_status_500_then_200_when_create_report_then_retry_and_return_records( + self, selling_program: str, http_mocker: HttpMocker + ) -> None: + mock_auth(http_mocker) + stream_name = self._get_stream_name(selling_program) + create_report_request_body = self._get_report_request_body(selling_program) + http_mocker.post( + _create_report_request(stream_name).with_body(create_report_request_body).build(), + [response_with_status(status_code=HTTPStatus.INTERNAL_SERVER_ERROR), _create_report_response(_REPORT_ID)], + ) + http_mocker.get( + _check_report_status_request(_REPORT_ID).build(), + _check_report_status_response(stream_name, report_document_id=_REPORT_DOCUMENT_ID), + ) + http_mocker.get( + _get_document_download_url_request(_REPORT_DOCUMENT_ID).build(), + _get_document_download_url_response(_DOCUMENT_DOWNLOAD_URL, _REPORT_DOCUMENT_ID), + ) + http_mocker.get( + _download_document_request(_DOCUMENT_DOWNLOAD_URL).build(), + _download_document_response(stream_name, data_format=self.data_format), + ) + + output = self._read(stream_name, config()) + assert len(output.records) == DEFAULT_EXPECTED_NUMBER_OF_RECORDS + + @pytest.mark.parametrize("selling_program", selling_program) + @HttpMocker() + def test_given_http_status_500_then_200_when_retrieve_report_then_retry_and_return_records( + self, selling_program: str, http_mocker: HttpMocker + ) -> None: + mock_auth(http_mocker) + stream_name = self._get_stream_name(selling_program) + create_report_request_body = self._get_report_request_body(selling_program) + http_mocker.post( + _create_report_request(stream_name).with_body(create_report_request_body).build(), + _create_report_response(_REPORT_ID), + ) + http_mocker.get( + _check_report_status_request(_REPORT_ID).build(), + [ + response_with_status(status_code=HTTPStatus.INTERNAL_SERVER_ERROR), + _check_report_status_response(stream_name, report_document_id=_REPORT_DOCUMENT_ID), + ], + ) + http_mocker.get( + _get_document_download_url_request(_REPORT_DOCUMENT_ID).build(), + _get_document_download_url_response(_DOCUMENT_DOWNLOAD_URL, _REPORT_DOCUMENT_ID), + ) + http_mocker.get( + _download_document_request(_DOCUMENT_DOWNLOAD_URL).build(), + _download_document_response(stream_name, data_format=self.data_format), + ) + + output = self._read(stream_name, config()) + assert len(output.records) == DEFAULT_EXPECTED_NUMBER_OF_RECORDS + + @pytest.mark.parametrize("selling_program", selling_program) + @HttpMocker() + def test_given_http_status_500_then_200_when_get_document_url_then_retry_and_return_records( + self, selling_program: str, http_mocker: HttpMocker + ) -> None: + mock_auth(http_mocker) + stream_name = self._get_stream_name(selling_program) + create_report_request_body = self._get_report_request_body(selling_program) + http_mocker.post( + _create_report_request(stream_name).with_body(create_report_request_body).build(), + _create_report_response(_REPORT_ID), + ) + http_mocker.get( + _check_report_status_request(_REPORT_ID).build(), + _check_report_status_response(stream_name, report_document_id=_REPORT_DOCUMENT_ID), + ) + http_mocker.get( + _get_document_download_url_request(_REPORT_DOCUMENT_ID).build(), + [ + response_with_status(status_code=HTTPStatus.INTERNAL_SERVER_ERROR), + _get_document_download_url_response(_DOCUMENT_DOWNLOAD_URL, _REPORT_DOCUMENT_ID), + ], + ) + http_mocker.get( + _download_document_request(_DOCUMENT_DOWNLOAD_URL).build(), + _download_document_response(stream_name, data_format=self.data_format), + ) + + output = self._read(stream_name, config()) + assert len(output.records) == DEFAULT_EXPECTED_NUMBER_OF_RECORDS + + @pytest.mark.parametrize("selling_program", selling_program) + @HttpMocker() + def test_given_http_status_500_then_200_when_download_document_then_retry_and_return_records( + self, selling_program: str, http_mocker: HttpMocker + ) -> None: + mock_auth(http_mocker) + stream_name = self._get_stream_name(selling_program) + create_report_request_body = self._get_report_request_body(selling_program) + http_mocker.post( + _create_report_request(stream_name).with_body(create_report_request_body).build(), + _create_report_response(_REPORT_ID), + ) + http_mocker.get( + _check_report_status_request(_REPORT_ID).build(), + _check_report_status_response(stream_name, report_document_id=_REPORT_DOCUMENT_ID), + ) + http_mocker.get( + _get_document_download_url_request(_REPORT_DOCUMENT_ID).build(), + _get_document_download_url_response(_DOCUMENT_DOWNLOAD_URL, _REPORT_DOCUMENT_ID), + ) + http_mocker.get( + _download_document_request(_DOCUMENT_DOWNLOAD_URL).build(), + [ + response_with_status(status_code=HTTPStatus.INTERNAL_SERVER_ERROR), + _download_document_response(stream_name, data_format=self.data_format), + ], + ) + + output = self._read(stream_name, config()) + assert len(output.records) == DEFAULT_EXPECTED_NUMBER_OF_RECORDS + + @pytest.mark.parametrize("selling_program", selling_program) + @HttpMocker() + def test_given_report_access_forbidden_when_read_then_no_records_and_error_logged( + self, selling_program: str, http_mocker: HttpMocker + ) -> None: + mock_auth(http_mocker) + stream_name = self._get_stream_name(selling_program) + create_report_request_body = self._get_report_request_body(selling_program) + http_mocker.post( + _create_report_request(stream_name).with_body(create_report_request_body).build(), + response_with_status(status_code=HTTPStatus.FORBIDDEN), + ) + + output = self._read(stream_name, config()) + message_on_access_forbidden = ( + "This is most likely due to insufficient permissions on the credentials in use. " + "Try to grant required permissions/scopes or re-authenticate." + ) + assert_message_in_log_output(message_on_access_forbidden, output) + assert len(output.records) == 0 + + @pytest.mark.parametrize("selling_program", selling_program) + @HttpMocker() + def test_given_report_status_cancelled_when_read_then_stream_completed_successfully_and_warn_about_cancellation( + self, selling_program: str, http_mocker: HttpMocker + ) -> None: + mock_auth(http_mocker) + stream_name = self._get_stream_name(selling_program) + create_report_request_body = self._get_report_request_body(selling_program) + http_mocker.post( + _create_report_request(stream_name).with_body(create_report_request_body).build(), + _create_report_response(_REPORT_ID), + ) + http_mocker.get( + _check_report_status_request(_REPORT_ID).build(), + _check_report_status_response(stream_name, processing_status=ReportProcessingStatus.CANCELLED), + ) + + message_on_report_cancelled = f"The report for stream '{stream_name}' was cancelled or there is no data to return." + + output = self._read(stream_name, config()) + assert_message_in_log_output(message_on_report_cancelled, output) + assert len(output.records) == 0 + + @pytest.mark.parametrize("selling_program", selling_program) + @HttpMocker() + def test_given_report_status_fatal_when_read_then_exception_raised( + self, selling_program: str, http_mocker: HttpMocker + ) -> None: + mock_auth(http_mocker) + stream_name = self._get_stream_name(selling_program) + create_report_request_body = self._get_report_request_body(selling_program) + http_mocker.post( + _create_report_request(stream_name).with_body(create_report_request_body).build(), + _create_report_response(_REPORT_ID), + ) + http_mocker.get( + _check_report_status_request(_REPORT_ID).build(), + _check_report_status_response( + stream_name, processing_status=ReportProcessingStatus.FATAL, report_document_id=_REPORT_DOCUMENT_ID + ), + ) + + http_mocker.get( + _get_document_download_url_request(_REPORT_DOCUMENT_ID).build(), + _get_document_download_url_response(_DOCUMENT_DOWNLOAD_URL, _REPORT_DOCUMENT_ID), + ) + http_mocker.get( + _download_document_request(_DOCUMENT_DOWNLOAD_URL).build(), + [ + response_with_status(status_code=HTTPStatus.INTERNAL_SERVER_ERROR), + _download_document_error_response(), + ], + ) + + output = self._read(stream_name, config(), expecting_exception=True) + assert output.errors[-1].trace.error.failure_type == FailureType.config_error + assert f"Failed to retrieve the report '{stream_name}'" in output.errors[-1].trace.error.message + + @pytest.mark.parametrize("selling_program", selling_program) + @HttpMocker() + def test_given_http_error_500_on_create_report_when_read_then_no_records_and_error_logged( + self, selling_program: str, http_mocker: HttpMocker + ) -> None: + mock_auth(http_mocker) + stream_name = self._get_stream_name(selling_program) + create_report_request_body = self._get_report_request_body(selling_program) + http_mocker.post( + _create_report_request(stream_name).with_body(create_report_request_body).build(), + response_with_status(status_code=HTTPStatus.INTERNAL_SERVER_ERROR), + ) + + message_on_backoff_exception = f"The report for stream '{stream_name}' was cancelled due to several failed retry attempts." + + output = self._read(stream_name, config()) + assert_message_in_log_output(message_on_backoff_exception, output) + assert len(output.records) == 0 diff --git a/airbyte-integrations/connectors/source-amazon-seller-partner/unit_tests/resource/http/response/GET_VENDOR_FORECASTING_FRESH_REPORT.json b/airbyte-integrations/connectors/source-amazon-seller-partner/unit_tests/resource/http/response/GET_VENDOR_FORECASTING_FRESH_REPORT.json new file mode 100644 index 000000000000..5cbf2cdff169 --- /dev/null +++ b/airbyte-integrations/connectors/source-amazon-seller-partner/unit_tests/resource/http/response/GET_VENDOR_FORECASTING_FRESH_REPORT.json @@ -0,0 +1,31 @@ +{ + "reportSpecification": { + "reportType": "GET_VENDOR_FORECASTING_REPORT", + "reportOptions": { + "sellingProgram": "FRESH" + }, + "marketplaceIds": ["ATVPDKIKX0DER"] + }, + "forecastByAsin": [ + { + "forecastGenerationDate": "2021-06-06", + "asin": "B123456789", + "startDate": "2021-06-06", + "endDate": "2021-06-12", + "meanForecastUnits": 3.1, + "p70ForecastUnits": 3.9, + "p80ForecastUnits": 30.3, + "p90ForecastUnits": 300.7 + }, + { + "forecastGenerationDate": "2021-06-06", + "asin": "B123456789", + "startDate": "2021-06-13", + "endDate": "2021-06-19", + "meanForecastUnits": 3.1, + "p70ForecastUnits": 3.9, + "p80ForecastUnits": 30.3, + "p90ForecastUnits": 300.7 + } + ] +} diff --git a/airbyte-integrations/connectors/source-amazon-seller-partner/unit_tests/resource/http/response/GET_VENDOR_FORECASTING_RETAIL_REPORT.json b/airbyte-integrations/connectors/source-amazon-seller-partner/unit_tests/resource/http/response/GET_VENDOR_FORECASTING_RETAIL_REPORT.json new file mode 100644 index 000000000000..059c5301d8d6 --- /dev/null +++ b/airbyte-integrations/connectors/source-amazon-seller-partner/unit_tests/resource/http/response/GET_VENDOR_FORECASTING_RETAIL_REPORT.json @@ -0,0 +1,31 @@ +{ + "reportSpecification": { + "reportType": "GET_VENDOR_FORECASTING_REPORT", + "reportOptions": { + "sellingProgram": "RETAIL" + }, + "marketplaceIds": ["ATVPDKIKX0DER"] + }, + "forecastByAsin": [ + { + "forecastGenerationDate": "2021-06-06", + "asin": "B123456789", + "startDate": "2021-06-06", + "endDate": "2021-06-12", + "meanForecastUnits": 3.1, + "p70ForecastUnits": 3.9, + "p80ForecastUnits": 30.3, + "p90ForecastUnits": 300.7 + }, + { + "forecastGenerationDate": "2021-06-06", + "asin": "B123456789", + "startDate": "2021-06-13", + "endDate": "2021-06-19", + "meanForecastUnits": 3.1, + "p70ForecastUnits": 3.9, + "p80ForecastUnits": 30.3, + "p90ForecastUnits": 300.7 + } + ] +} diff --git a/docs/integrations/sources/amazon-seller-partner.md b/docs/integrations/sources/amazon-seller-partner.md index 40f11a5e7d12..ff9fbe60600c 100644 --- a/docs/integrations/sources/amazon-seller-partner.md +++ b/docs/integrations/sources/amazon-seller-partner.md @@ -135,10 +135,11 @@ The Amazon Seller Partner source connector supports the following [sync modes](h - [Unshipped Orders Report](https://developer-docs.amazon.com/sp-api/docs/report-type-values-order#order-reports) \(incremental\) - [Vendor Direct Fulfillment Shipping](https://developer-docs.amazon.com/sp-api/docs/vendor-direct-fulfillment-shipping-api-v1-reference) \(incremental\) - [Vendor Inventory Report](https://developer-docs.amazon.com/sp-api/docs/report-type-values-analytics#vendor-retail-analytics-reports) \(incremental\) +- [Vendor Forecasting Report](https://developer-docs.amazon.com/sp-api/docs/report-type-values-analytics#vendor-retail-analytics-reports) \(full-refresh\) +- [Vendor Orders](https://developer-docs.amazon.com/sp-api/docs/vendor-orders-api-v1-reference#get-vendorordersv1purchaseorders) \(incremental\) - [Vendor Sales Report](https://developer-docs.amazon.com/sp-api/docs/report-type-values-analytics#vendor-retail-analytics-reports) \(incremental\) - [Vendor Traffic Report](https://developer-docs.amazon.com/sp-api/docs/report-type-values-analytics#vendor-retail-analytics-reports) \(incremental\) - [XML Orders By Order Date Report](https://developer-docs.amazon.com/sp-api/docs/report-type-values-order#order-tracking-reports) \(incremental\) -- [Vendor Orders](https://developer-docs.amazon.com/sp-api/docs/vendor-orders-api-v1-reference#get-vendorordersv1purchaseorders) \(incremental\) ## Report options @@ -149,6 +150,9 @@ Certain report types have required parameters that must be defined. For `GET_AMAZON_FULFILLED_SHIPMENTS_DATA_GENERAL` and `GET_FLAT_FILE_RETURNS_DATA_BY_RETURN_DATE` streams maximum value for `period_in_days` 30 days and 60 days. So, for any value that exceeds the limit, the `period_in_days` will be automatically reduced to the limit for the stream. +For the Vendor Forecasting Report, we have two streams - `GET_VENDOR_FORECASTING_FRESH_REPORT` and `GET_VENDOR_FORECASTING_RETAIL_REPORT` which use the same `GET_VENDOR_FORECASTING_REPORT` Amazon's report, +but with different options for the `sellingProgram` parameter - `FRESH` and `RETAIL` respectively. + ## Performance considerations Information about rate limits you may find [here](https://developer-docs.amazon.com/sp-api/docs/usage-plans-and-rate-limits-in-the-sp-api). @@ -168,6 +172,7 @@ Information about rate limits you may find [here](https://developer-docs.amazon. | Version | Date | Pull Request | Subject | |:---------|:-----------|:------------------------------------------------------------|:------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------| +| `4.1.0` | 2024-03-12 | [\#35954](https://github.com/airbytehq/airbyte/pull/35954) | Add `GET_VENDOR_FORECASTING_FRESH_REPORT` and `GET_VENDOR_FORECASTING_RETAIL_REPORT` streams | | `4.0.0` | 2024-02-23 | [\#35439](https://github.com/airbytehq/airbyte/pull/35439) | Update schema for the `GET_FBA_STORAGE_FEE_CHARGES_DATA` stream | | `3.5.0` | 2024-02-09 | [\#35331](https://github.com/airbytehq/airbyte/pull/35331) | Fix check for Vendor accounts. Add failed report result message | | `3.4.0` | 2024-02-15 | [\#35273](https://github.com/airbytehq/airbyte/pull/35273) | Add `VendorOrders` stream | From 36e9edd4892bff213ed324c42a971c642d8b527e Mon Sep 17 00:00:00 2001 From: Anatolii Yatsuk <35109939+tolik0@users.noreply.github.com> Date: Tue, 12 Mar 2024 11:46:35 +0200 Subject: [PATCH 168/172] =?UTF-8?q?=F0=9F=8F=A5Source=20Mixpanel:=20fix=20?= =?UTF-8?q?expected=20records=20(#35964)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .../integration_tests/expected_records.jsonl | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/airbyte-integrations/connectors/source-mixpanel/integration_tests/expected_records.jsonl b/airbyte-integrations/connectors/source-mixpanel/integration_tests/expected_records.jsonl index 3b576d1a580f..f699bfcb4c1b 100644 --- a/airbyte-integrations/connectors/source-mixpanel/integration_tests/expected_records.jsonl +++ b/airbyte-integrations/connectors/source-mixpanel/integration_tests/expected_records.jsonl @@ -2,11 +2,11 @@ {"stream": "engage", "data": {"distinct_id": "123@gmail.com", "email": "123@gmail.com", "name": "123", "123": "123456", "last_seen": "2023-01-01T00:00:00", "how are you": "just fine"}, "emitted_at": 1695642956746} {"stream": "engage", "data": {"distinct_id": "integration-test@airbyte.io", "name": "Integration Test1", "test": "test", "email": "integration-test@airbyte.io", "last_seen": "2023-01-01T00:00:00"}, "emitted_at": 1695642956748} {"stream": "engage", "data": {"distinct_id": "integration-test.db4415.mp-service-account", "name": "test", "test": "test", "last_seen": "2023-01-01T00:00:00"}, "emitted_at": 1695642956749} -{"stream": "funnels", "data": {"funnel_id": 36152117, "name": "test", "date": "2024-02-26", "steps": [{"count": 0, "avg_time": null, "avg_time_from_start": null, "event": "Purchase", "goal": "Purchase", "step_label": "Purchase", "overall_conv_ratio": 1, "step_conv_ratio": 1}, {"count": 0, "avg_time": null, "avg_time_from_start": null, "event": "$custom_event:1305068", "goal": "$custom_event:1305068", "step_label": "111", "custom_event": true, "custom_event_id": 1305068, "overall_conv_ratio": 0, "step_conv_ratio": 0}], "analysis": {"completion": 0, "starting_amount": 0, "steps": 2, "worst": 1}}, "emitted_at": 1709117161812} -{"stream": "funnels", "data": {"funnel_id": 36152117, "name": "test", "date": "2024-02-27", "steps": [{"count": 0, "avg_time": null, "avg_time_from_start": null, "event": "Purchase", "goal": "Purchase", "step_label": "Purchase", "overall_conv_ratio": 1, "step_conv_ratio": 1}, {"count": 0, "avg_time": null, "avg_time_from_start": null, "event": "$custom_event:1305068", "goal": "$custom_event:1305068", "step_label": "111", "custom_event": true, "custom_event_id": 1305068, "overall_conv_ratio": 0, "step_conv_ratio": 0}], "analysis": {"completion": 0, "starting_amount": 0, "steps": 2, "worst": 1}}, "emitted_at": 1709117161812} -{"stream": "funnels", "data": {"funnel_id": 36152117, "name": "test", "date": "2024-02-28", "steps": [{"count": 0, "avg_time": null, "avg_time_from_start": null, "event": "Purchase", "goal": "Purchase", "step_label": "Purchase", "overall_conv_ratio": 1, "step_conv_ratio": 1}, {"count": 0, "avg_time": null, "avg_time_from_start": null, "event": "$custom_event:1305068", "goal": "$custom_event:1305068", "step_label": "111", "custom_event": true, "custom_event_id": 1305068, "overall_conv_ratio": 0, "step_conv_ratio": 0}], "analysis": {"completion": 0, "starting_amount": 0, "steps": 2, "worst": 1}}, "emitted_at": 1709117161812} -{"stream": "revenue", "data": {"date": "2024-02-26", "amount": 0.0, "count": 3, "paid_count": 0}, "emitted_at": 1695644343316} -{"stream": "revenue", "data": {"date": "2024-02-27", "amount": 0.0, "count": 3, "paid_count": 0}, "emitted_at": 1695644343317} -{"stream": "revenue", "data": {"date": "2024-02-28", "amount": 0.0, "count": 3, "paid_count": 0}, "emitted_at": 1695644343317} +{"stream": "funnels", "data": {"funnel_id": 36152117, "name": "test", "date": "2024-03-09", "steps": [{"count": 0, "avg_time": null, "avg_time_from_start": null, "event": "Purchase", "goal": "Purchase", "step_label": "Purchase", "overall_conv_ratio": 1, "step_conv_ratio": 1}, {"count": 0, "avg_time": null, "avg_time_from_start": null, "event": "$custom_event:1305068", "goal": "$custom_event:1305068", "step_label": "111", "custom_event": true, "custom_event_id": 1305068, "overall_conv_ratio": 0, "step_conv_ratio": 0}], "analysis": {"completion": 0, "starting_amount": 0, "steps": 2, "worst": 1}}, "emitted_at": 1709117161812} +{"stream": "funnels", "data": {"funnel_id": 36152117, "name": "test", "date": "2024-03-10", "steps": [{"count": 0, "avg_time": null, "avg_time_from_start": null, "event": "Purchase", "goal": "Purchase", "step_label": "Purchase", "overall_conv_ratio": 1, "step_conv_ratio": 1}, {"count": 0, "avg_time": null, "avg_time_from_start": null, "event": "$custom_event:1305068", "goal": "$custom_event:1305068", "step_label": "111", "custom_event": true, "custom_event_id": 1305068, "overall_conv_ratio": 0, "step_conv_ratio": 0}], "analysis": {"completion": 0, "starting_amount": 0, "steps": 2, "worst": 1}}, "emitted_at": 1709117161812} +{"stream": "funnels", "data": {"funnel_id": 36152117, "name": "test", "date": "2024-03-11", "steps": [{"count": 0, "avg_time": null, "avg_time_from_start": null, "event": "Purchase", "goal": "Purchase", "step_label": "Purchase", "overall_conv_ratio": 1, "step_conv_ratio": 1}, {"count": 0, "avg_time": null, "avg_time_from_start": null, "event": "$custom_event:1305068", "goal": "$custom_event:1305068", "step_label": "111", "custom_event": true, "custom_event_id": 1305068, "overall_conv_ratio": 0, "step_conv_ratio": 0}], "analysis": {"completion": 0, "starting_amount": 0, "steps": 2, "worst": 1}}, "emitted_at": 1709117161812} +{"stream": "revenue", "data": {"date": "2024-03-09", "amount": 0.0, "count": 3, "paid_count": 0}, "emitted_at": 1695644343316} +{"stream": "revenue", "data": {"date": "2024-03-10", "amount": 0.0, "count": 3, "paid_count": 0}, "emitted_at": 1695644343317} +{"stream": "revenue", "data": {"date": "2024-03-11", "amount": 0.0, "count": 3, "paid_count": 0}, "emitted_at": 1695644343317} {"stream": "cohort_members", "data": {"distinct_id": "integration-test@airbyte.io", "name": "Integration Test1", "test": "test", "email": "integration-test@airbyte.io", "last_seen": "2023-01-01T00:00:00", "cohort_id": 1478097}, "emitted_at": 1695644214153} {"stream": "cohort_members", "data": {"distinct_id": "integration-test.db4415.mp-service-account", "name": "test", "test": "test", "last_seen": "2023-01-01T00:00:00", "cohort_id": 1478097}, "emitted_at": 1695644214154} From 0113c525548080f128258042d8663a415897ef8c Mon Sep 17 00:00:00 2001 From: Baz Date: Tue, 12 Mar 2024 14:05:22 +0200 Subject: [PATCH 169/172] =?UTF-8?q?=F0=9F=90=9B=20Source=20Recharge:=20Add?= =?UTF-8?q?=20`sort=5Fby`=20to=20guarantee=20the=20records=20are=20in=20`A?= =?UTF-8?q?SC`=20order=20(#35982)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .../connectors/source-recharge/README.md | 2 +- .../connectors/source-recharge/metadata.yaml | 2 +- .../connectors/source-recharge/poetry.lock | 56 +++++++++---------- .../connectors/source-recharge/pyproject.toml | 2 +- .../source-recharge/source_recharge/api.py | 2 + .../source-recharge/unit_tests/test_api.py | 16 +++--- docs/integrations/sources/recharge.md | 1 + 7 files changed, 42 insertions(+), 39 deletions(-) diff --git a/airbyte-integrations/connectors/source-recharge/README.md b/airbyte-integrations/connectors/source-recharge/README.md index 03ee5b5b276e..c09b9cde6f48 100644 --- a/airbyte-integrations/connectors/source-recharge/README.md +++ b/airbyte-integrations/connectors/source-recharge/README.md @@ -30,7 +30,7 @@ See `sample_files/sample_config.json` for a sample config file. poetry run source-recharge spec poetry run source-recharge check --config secrets/config.json poetry run source-recharge discover --config secrets/config.json -poetry run source-recharge read --config secrets/config.json --catalog sample_files/configured_catalog.json +poetry run source-recharge read --config secrets/config.json --catalog integration_tests/configured_catalog.json ``` ### Running unit tests diff --git a/airbyte-integrations/connectors/source-recharge/metadata.yaml b/airbyte-integrations/connectors/source-recharge/metadata.yaml index eddf63305e54..a35102066609 100644 --- a/airbyte-integrations/connectors/source-recharge/metadata.yaml +++ b/airbyte-integrations/connectors/source-recharge/metadata.yaml @@ -7,7 +7,7 @@ data: connectorBuildOptions: baseImage: docker.io/airbyte/python-connector-base:1.1.0@sha256:bd98f6505c6764b1b5f99d3aedc23dfc9e9af631a62533f60eb32b1d3dbab20c definitionId: 45d2e135-2ede-49e1-939f-3e3ec357a65e - dockerImageTag: 1.1.5 + dockerImageTag: 1.1.6 dockerRepository: airbyte/source-recharge githubIssueLabel: source-recharge icon: recharge.svg diff --git a/airbyte-integrations/connectors/source-recharge/poetry.lock b/airbyte-integrations/connectors/source-recharge/poetry.lock index 4135398b392f..262bc4e419c0 100644 --- a/airbyte-integrations/connectors/source-recharge/poetry.lock +++ b/airbyte-integrations/connectors/source-recharge/poetry.lock @@ -1,4 +1,4 @@ -# This file is automatically @generated by Poetry 1.6.1 and should not be changed by hand. +# This file is automatically @generated by Poetry 1.7.1 and should not be changed by hand. [[package]] name = "airbyte-cdk" @@ -104,13 +104,13 @@ files = [ [[package]] name = "cachetools" -version = "5.3.2" +version = "5.3.3" description = "Extensible memoizing collections and decorators" optional = false python-versions = ">=3.7" files = [ - {file = "cachetools-5.3.2-py3-none-any.whl", hash = "sha256:861f35a13a451f94e301ce2bec7cac63e881232ccce7ed67fab9b5df4d3beaa1"}, - {file = "cachetools-5.3.2.tar.gz", hash = "sha256:086ee420196f7b2ab9ca2db2520aca326318b68fe5ba8bc4d49cca91add450f2"}, + {file = "cachetools-5.3.3-py3-none-any.whl", hash = "sha256:0abad1021d3f8325b2fc1d2e9c8b9c9d57b04c3932657a72465447332c24d945"}, + {file = "cachetools-5.3.3.tar.gz", hash = "sha256:ba29e2dfa0b8b556606f097407ed1aa62080ee108ab0dc5ec9d6a723a007d105"}, ] [[package]] @@ -467,13 +467,13 @@ files = [ [[package]] name = "packaging" -version = "23.2" +version = "24.0" description = "Core utilities for Python packages" optional = false python-versions = ">=3.7" files = [ - {file = "packaging-23.2-py3-none-any.whl", hash = "sha256:8c491190033a9af7e1d931d0b5dacc2ef47509b34dd0de67ed209b5203fc88c7"}, - {file = "packaging-23.2.tar.gz", hash = "sha256:048fb0e9405036518eaaf48a55953c750c11e1a1b68e0dd1a9d62ed0c092cfc5"}, + {file = "packaging-24.0-py3-none-any.whl", hash = "sha256:2ddfb553fdf02fb784c234c7ba6ccc288296ceabec964ad2eae3777778130bc5"}, + {file = "packaging-24.0.tar.gz", hash = "sha256:eb82c5e3e56209074766e6885bb04b8c38a0c015d0a30036ebe7ece34c9989e9"}, ] [[package]] @@ -702,13 +702,13 @@ dev = ["pre-commit", "pytest-asyncio", "tox"] [[package]] name = "python-dateutil" -version = "2.8.2" +version = "2.9.0.post0" description = "Extensions to the standard Python datetime module" optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7" files = [ - {file = "python-dateutil-2.8.2.tar.gz", hash = "sha256:0123cacc1627ae19ddf3c27a5de5bd67ee4586fbdd6440d9748f8abb483d3e86"}, - {file = "python_dateutil-2.8.2-py2.py3-none-any.whl", hash = "sha256:961d03dc3453ebbc59dbdea9e4e11c5651520a876d0f4db161e8674aae935da9"}, + {file = "python-dateutil-2.9.0.post0.tar.gz", hash = "sha256:37dd54208da7e1cd875388217d5e00ebd4179249f90fb72437e91a35459a0ad3"}, + {file = "python_dateutil-2.9.0.post0-py2.py3-none-any.whl", hash = "sha256:a8b2bc7bffae282281c8140a97d3aa9c14da0b136dfe83f850eea9a5f7470427"}, ] [package.dependencies] @@ -808,13 +808,13 @@ use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"] [[package]] name = "requests-cache" -version = "1.1.1" +version = "1.2.0" description = "A persistent cache for python requests" optional = false -python-versions = ">=3.7,<4.0" +python-versions = ">=3.8" files = [ - {file = "requests_cache-1.1.1-py3-none-any.whl", hash = "sha256:c8420cf096f3aafde13c374979c21844752e2694ffd8710e6764685bb577ac90"}, - {file = "requests_cache-1.1.1.tar.gz", hash = "sha256:764f93d3fa860be72125a568c2cc8eafb151cf29b4dc2515433a56ee657e1c60"}, + {file = "requests_cache-1.2.0-py3-none-any.whl", hash = "sha256:490324301bf0cb924ff4e6324bd2613453e7e1f847353928b08adb0fdfb7f722"}, + {file = "requests_cache-1.2.0.tar.gz", hash = "sha256:db1c709ca343cc1cd5b6c8b1a5387298eceed02306a6040760db538c885e3838"}, ] [package.dependencies] @@ -826,15 +826,15 @@ url-normalize = ">=1.4" urllib3 = ">=1.25.5" [package.extras] -all = ["boto3 (>=1.15)", "botocore (>=1.18)", "itsdangerous (>=2.0)", "pymongo (>=3)", "pyyaml (>=5.4)", "redis (>=3)", "ujson (>=5.4)"] +all = ["boto3 (>=1.15)", "botocore (>=1.18)", "itsdangerous (>=2.0)", "pymongo (>=3)", "pyyaml (>=6.0.1)", "redis (>=3)", "ujson (>=5.4)"] bson = ["bson (>=0.5)"] -docs = ["furo (>=2023.3,<2024.0)", "linkify-it-py (>=2.0,<3.0)", "myst-parser (>=1.0,<2.0)", "sphinx (>=5.0.2,<6.0.0)", "sphinx-autodoc-typehints (>=1.19)", "sphinx-automodapi (>=0.14)", "sphinx-copybutton (>=0.5)", "sphinx-design (>=0.2)", "sphinx-notfound-page (>=0.8)", "sphinxcontrib-apidoc (>=0.3)", "sphinxext-opengraph (>=0.6)"] +docs = ["furo (>=2023.3,<2024.0)", "linkify-it-py (>=2.0,<3.0)", "myst-parser (>=1.0,<2.0)", "sphinx (>=5.0.2,<6.0.0)", "sphinx-autodoc-typehints (>=1.19)", "sphinx-automodapi (>=0.14)", "sphinx-copybutton (>=0.5)", "sphinx-design (>=0.2)", "sphinx-notfound-page (>=0.8)", "sphinxcontrib-apidoc (>=0.3)", "sphinxext-opengraph (>=0.9)"] dynamodb = ["boto3 (>=1.15)", "botocore (>=1.18)"] json = ["ujson (>=5.4)"] mongodb = ["pymongo (>=3)"] redis = ["redis (>=3)"] security = ["itsdangerous (>=2.0)"] -yaml = ["pyyaml (>=5.4)"] +yaml = ["pyyaml (>=6.0.1)"] [[package]] name = "requests-mock" @@ -857,19 +857,19 @@ test = ["fixtures", "mock", "purl", "pytest", "requests-futures", "sphinx", "tes [[package]] name = "setuptools" -version = "69.1.0" +version = "69.1.1" description = "Easily download, build, install, upgrade, and uninstall Python packages" optional = false python-versions = ">=3.8" files = [ - {file = "setuptools-69.1.0-py3-none-any.whl", hash = "sha256:c054629b81b946d63a9c6e732bc8b2513a7c3ea645f11d0139a2191d735c60c6"}, - {file = "setuptools-69.1.0.tar.gz", hash = "sha256:850894c4195f09c4ed30dba56213bf7c3f21d86ed6bdaafb5df5972593bfc401"}, + {file = "setuptools-69.1.1-py3-none-any.whl", hash = "sha256:02fa291a0471b3a18b2b2481ed902af520c69e8ae0919c13da936542754b4c56"}, + {file = "setuptools-69.1.1.tar.gz", hash = "sha256:5c0806c7d9af348e6dd3777b4f4dbb42c7ad85b190104837488eab9a7c945cf8"}, ] [package.extras] docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "rst.linker (>=1.9)", "sphinx (<7.2.5)", "sphinx (>=3.5)", "sphinx-favicon", "sphinx-inline-tabs", "sphinx-lint", "sphinx-notfound-page (>=1,<2)", "sphinx-reredirects", "sphinxcontrib-towncrier"] -testing = ["build[virtualenv]", "filelock (>=3.4.0)", "flake8-2020", "ini2toml[lite] (>=0.9)", "jaraco.develop (>=7.21)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "pip (>=19.1)", "pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-home (>=0.5)", "pytest-mypy (>=0.9.1)", "pytest-perf", "pytest-ruff (>=0.2.1)", "pytest-timeout", "pytest-xdist", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel"] -testing-integration = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "packaging (>=23.1)", "pytest", "pytest-enabler", "pytest-xdist", "tomli", "virtualenv (>=13.0.0)", "wheel"] +testing = ["build[virtualenv]", "filelock (>=3.4.0)", "flake8-2020", "ini2toml[lite] (>=0.9)", "jaraco.develop (>=7.21)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "packaging (>=23.2)", "pip (>=19.1)", "pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-home (>=0.5)", "pytest-mypy (>=0.9.1)", "pytest-perf", "pytest-ruff (>=0.2.1)", "pytest-timeout", "pytest-xdist", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel"] +testing-integration = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "packaging (>=23.2)", "pytest", "pytest-enabler", "pytest-xdist", "tomli", "virtualenv (>=13.0.0)", "wheel"] [[package]] name = "six" @@ -895,13 +895,13 @@ files = [ [[package]] name = "typing-extensions" -version = "4.9.0" +version = "4.10.0" description = "Backported and Experimental Type Hints for Python 3.8+" optional = false python-versions = ">=3.8" files = [ - {file = "typing_extensions-4.9.0-py3-none-any.whl", hash = "sha256:af72aea155e91adfc61c3ae9e0e342dbc0cba726d6cba4b6c72c1f34e47291cd"}, - {file = "typing_extensions-4.9.0.tar.gz", hash = "sha256:23478f88c37f27d76ac8aee6c905017a143b0b1b886c3c9f66bc2fd94f9f5783"}, + {file = "typing_extensions-4.10.0-py3-none-any.whl", hash = "sha256:69b1a937c3a517342112fb4c6df7e72fc39a38e7891a5730ed4985b5214b5475"}, + {file = "typing_extensions-4.10.0.tar.gz", hash = "sha256:b0abd7c89e8fb96f98db18d86106ff1d90ab692004eb746cf6eda2682f91b3cb"}, ] [[package]] @@ -920,13 +920,13 @@ six = "*" [[package]] name = "urllib3" -version = "2.2.0" +version = "2.2.1" description = "HTTP library with thread-safe connection pooling, file post, and more." optional = false python-versions = ">=3.8" files = [ - {file = "urllib3-2.2.0-py3-none-any.whl", hash = "sha256:ce3711610ddce217e6d113a2732fafad960a03fd0318c91faa79481e35c11224"}, - {file = "urllib3-2.2.0.tar.gz", hash = "sha256:051d961ad0c62a94e50ecf1af379c3aba230c66c710493493560c0c223c49f20"}, + {file = "urllib3-2.2.1-py3-none-any.whl", hash = "sha256:450b20ec296a467077128bff42b73080516e71b56ff59a60a02bef2232c4fa9d"}, + {file = "urllib3-2.2.1.tar.gz", hash = "sha256:d0570876c61ab9e520d776c38acbbb5b05a776d3f9ff98a5c8fd5162a444cf19"}, ] [package.extras] diff --git a/airbyte-integrations/connectors/source-recharge/pyproject.toml b/airbyte-integrations/connectors/source-recharge/pyproject.toml index 15ca7a7471c8..27fdeeb8a39a 100644 --- a/airbyte-integrations/connectors/source-recharge/pyproject.toml +++ b/airbyte-integrations/connectors/source-recharge/pyproject.toml @@ -3,7 +3,7 @@ requires = [ "poetry-core>=1.0.0",] build-backend = "poetry.core.masonry.api" [tool.poetry] -version = "1.1.5" +version = "1.1.6" name = "source-recharge" description = "Source implementation for Recharge." authors = [ "Airbyte ",] diff --git a/airbyte-integrations/connectors/source-recharge/source_recharge/api.py b/airbyte-integrations/connectors/source-recharge/source_recharge/api.py index aaf2cb91cf2e..85d01981cf01 100644 --- a/airbyte-integrations/connectors/source-recharge/source_recharge/api.py +++ b/airbyte-integrations/connectors/source-recharge/source_recharge/api.py @@ -113,6 +113,7 @@ def request_params( else: params.update( { + "sort_by": "updated_at-asc", "updated_at_min": (stream_slice or {}).get("start_date", self._start_date), "updated_at_max": (stream_slice or {}).get("end_date", self._start_date), } @@ -134,6 +135,7 @@ def request_params( ) -> MutableMapping[str, Any]: params = { "limit": self.limit, + "sort_by": "updated_at-asc", "updated_at_min": (stream_slice or {}).get("start_date", self._start_date), "updated_at_max": (stream_slice or {}).get("end_date", self._start_date), } diff --git a/airbyte-integrations/connectors/source-recharge/unit_tests/test_api.py b/airbyte-integrations/connectors/source-recharge/unit_tests/test_api.py index 3981d725c047..473cd753e209 100644 --- a/airbyte-integrations/connectors/source-recharge/unit_tests/test_api.py +++ b/airbyte-integrations/connectors/source-recharge/unit_tests/test_api.py @@ -161,7 +161,7 @@ def test_next_page_token(self, config, stream_cls, cursor_response, requests_moc None, {}, {"start_date": "2020-01-01T00:00:00Z", "end_date": "2020-02-01T00:00:00Z"}, - {"limit": 250, "updated_at_min": "2020-01-01T00:00:00Z", "updated_at_max": "2020-02-01T00:00:00Z"}, + {"limit": 250, "sort_by": "updated_at-asc", "updated_at_min": "2020-01-01T00:00:00Z", "updated_at_max": "2020-02-01T00:00:00Z"}, ), (Metafields, {"cursor": "12353"}, {"updated_at": "2030-01-01"}, {}, {"limit": 250, "owner_resource": None, "cursor": "12353"}), ( @@ -169,7 +169,7 @@ def test_next_page_token(self, config, stream_cls, cursor_response, requests_moc None, {}, {"start_date": "2020-01-01T00:00:00Z", "end_date": "2020-02-01T00:00:00Z"}, - {"limit": 250, "updated_at_min": "2020-01-01T00:00:00Z", "updated_at_max": "2020-02-01T00:00:00Z"}, + {"limit": 250, "sort_by": "updated_at-asc", "updated_at_min": "2020-01-01T00:00:00Z", "updated_at_max": "2020-02-01T00:00:00Z"}, ), (Shop, None, {}, {}, {}), ], @@ -275,7 +275,7 @@ def test_next_page_token(self, config, stream_cls, cursor_response, requests_moc None, {}, {"start_date": "2020-01-01T00:00:00Z", "end_date": "2020-02-01T00:00:00Z"}, - {"limit": 250, "updated_at_min": "2020-01-01T00:00:00Z", "updated_at_max": "2020-02-01T00:00:00Z"}, + {"limit": 250, "sort_by": "updated_at-asc", "updated_at_min": "2020-01-01T00:00:00Z", "updated_at_max": "2020-02-01T00:00:00Z"}, ), ( Charges, @@ -289,14 +289,14 @@ def test_next_page_token(self, config, stream_cls, cursor_response, requests_moc None, {}, {"start_date": "2020-01-01T00:00:00Z", "end_date": "2020-02-01T00:00:00Z"}, - {"limit": 250, "updated_at_min": "2020-01-01T00:00:00Z", "updated_at_max": "2020-02-01T00:00:00Z"}, + {"limit": 250, "sort_by": "updated_at-asc", "updated_at_min": "2020-01-01T00:00:00Z", "updated_at_max": "2020-02-01T00:00:00Z"}, ), ( Discounts, None, {}, {"start_date": "2020-01-01T00:00:00Z", "end_date": "2020-02-01T00:00:00Z"}, - {"limit": 250, "updated_at_min": "2020-01-01T00:00:00Z", "updated_at_max": "2020-02-01T00:00:00Z"}, + {"limit": 250, "sort_by": "updated_at-asc", "updated_at_min": "2020-01-01T00:00:00Z", "updated_at_max": "2020-02-01T00:00:00Z"}, ), ( Onetimes, @@ -310,21 +310,21 @@ def test_next_page_token(self, config, stream_cls, cursor_response, requests_moc None, {}, {"start_date": "2020-01-01T00:00:00Z", "end_date": "2020-02-01T00:00:00Z"}, - {"limit": 250, "updated_at_min": "2020-01-01T00:00:00Z", "updated_at_max": "2020-02-01T00:00:00Z"}, + {"limit": 250, "sort_by": "updated_at-asc", "updated_at_min": "2020-01-01T00:00:00Z", "updated_at_max": "2020-02-01T00:00:00Z"}, ), ( OrdersModernApi, None, {}, {"start_date": "2020-01-01T00:00:00Z", "end_date": "2020-02-01T00:00:00Z"}, - {"limit": 250, "updated_at_min": "2020-01-01T00:00:00Z", "updated_at_max": "2020-02-01T00:00:00Z"}, + {"limit": 250, "sort_by": "updated_at-asc", "updated_at_min": "2020-01-01T00:00:00Z", "updated_at_max": "2020-02-01T00:00:00Z"}, ), ( Subscriptions, None, {}, {"start_date": "2020-01-01T00:00:00Z", "end_date": "2020-02-01T00:00:00Z"}, - {"limit": 250, "updated_at_min": "2020-01-01T00:00:00Z", "updated_at_max": "2020-02-01T00:00:00Z"}, + {"limit": 250, "sort_by": "updated_at-asc", "updated_at_min": "2020-01-01T00:00:00Z", "updated_at_max": "2020-02-01T00:00:00Z"}, ), ], ) diff --git a/docs/integrations/sources/recharge.md b/docs/integrations/sources/recharge.md index fa784da249da..0b768640e0ce 100644 --- a/docs/integrations/sources/recharge.md +++ b/docs/integrations/sources/recharge.md @@ -76,6 +76,7 @@ The Recharge connector should gracefully handle Recharge API limitations under n | Version | Date | Pull Request | Subject | |:--------|:-----------|:---------------------------------------------------------|:------------------------------------------------------------------------------------------| +| 1.1.6 | 2024-03-12 | [35982](https://github.com/airbytehq/airbyte/pull/35982) | Added additional `query param` to guarantee the records are in `asc` order | | 1.1.5 | 2024-02-12 | [35182](https://github.com/airbytehq/airbyte/pull/35182) | Manage dependencies with Poetry. | | 1.1.4 | 2024-02-02 | [34772](https://github.com/airbytehq/airbyte/pull/34772) | Fix airbyte-lib distribution | | 1.1.3 | 2024-01-31 | [34707](https://github.com/airbytehq/airbyte/pull/34707) | Added the UI toggle `Use 'Orders' Deprecated API` to switch between `deprecated` and `modern` api versions for `Orders` stream | From 5da29ac7f6938e1f39bf143c82fcbc4f673d6634 Mon Sep 17 00:00:00 2001 From: Christo Grabowski <108154848+ChristoGrab@users.noreply.github.com> Date: Tue, 12 Mar 2024 15:11:23 -0400 Subject: [PATCH 170/172] Source Amplitude: unpin CDK (#35987) --- .../integration_tests/integration_test.py | 4 +- .../connectors/source-amplitude/metadata.yaml | 2 +- .../connectors/source-amplitude/poetry.lock | 231 +++++++----------- .../source-amplitude/pyproject.toml | 4 +- docs/integrations/sources/amplitude.md | 5 +- 5 files changed, 100 insertions(+), 146 deletions(-) mode change 100644 => 100755 airbyte-integrations/connectors/source-amplitude/integration_tests/integration_test.py diff --git a/airbyte-integrations/connectors/source-amplitude/integration_tests/integration_test.py b/airbyte-integrations/connectors/source-amplitude/integration_tests/integration_test.py old mode 100644 new mode 100755 index 63ad0a9063ec..cb2230f94ead --- a/airbyte-integrations/connectors/source-amplitude/integration_tests/integration_test.py +++ b/airbyte-integrations/connectors/source-amplitude/integration_tests/integration_test.py @@ -8,6 +8,7 @@ import pytest from airbyte_cdk.models import SyncMode +from airbyte_cdk.sources.declarative.types import StreamSlice from source_amplitude.source import SourceAmplitude @@ -85,7 +86,8 @@ def test_empty_streams(stream_fixture_name, url, expected_records, request, requ due to free subscription plan for the sandbox """ stream = request.getfixturevalue(stream_fixture_name) - records_reader = stream.read_records(sync_mode=SyncMode.full_refresh, cursor_field=None, stream_slice={}) + empty_stream_slice = StreamSlice(partition={}, cursor_slice={}) + records_reader = stream.read_records(sync_mode=SyncMode.full_refresh, cursor_field=None, stream_slice=empty_stream_slice) requests_mock.get(url, status_code=200, json={"data": expected_records}) # Sort actual and expected records by ID. diff --git a/airbyte-integrations/connectors/source-amplitude/metadata.yaml b/airbyte-integrations/connectors/source-amplitude/metadata.yaml index 59a49c0f5fc0..5cc66f4cf1a5 100644 --- a/airbyte-integrations/connectors/source-amplitude/metadata.yaml +++ b/airbyte-integrations/connectors/source-amplitude/metadata.yaml @@ -11,7 +11,7 @@ data: connectorSubtype: api connectorType: source definitionId: fa9f58c6-2d03-4237-aaa4-07d75e0c1396 - dockerImageTag: 0.3.7 + dockerImageTag: 0.3.8 dockerRepository: airbyte/source-amplitude documentationUrl: https://docs.airbyte.com/integrations/sources/amplitude githubIssueLabel: source-amplitude diff --git a/airbyte-integrations/connectors/source-amplitude/poetry.lock b/airbyte-integrations/connectors/source-amplitude/poetry.lock index 647f6526bac0..47a96853cf9f 100644 --- a/airbyte-integrations/connectors/source-amplitude/poetry.lock +++ b/airbyte-integrations/connectors/source-amplitude/poetry.lock @@ -1,18 +1,18 @@ -# This file is automatically @generated by Poetry 1.6.1 and should not be changed by hand. +# This file is automatically @generated by Poetry 1.7.1 and should not be changed by hand. [[package]] name = "airbyte-cdk" -version = "0.52.0" +version = "0.69.1" description = "A framework for writing Airbyte Connectors." optional = false python-versions = ">=3.8" files = [ - {file = "airbyte-cdk-0.52.0.tar.gz", hash = "sha256:760b5bb279e5b06455bc33c9744dd9facbc0b203ccc4ac48e1e2877807e3c845"}, - {file = "airbyte_cdk-0.52.0-py3-none-any.whl", hash = "sha256:bf7c82b2a7ec3cc4ddedd17cd6cd6e2385991af965729f23ffbdb0515388a8e2"}, + {file = "airbyte-cdk-0.69.1.tar.gz", hash = "sha256:f30fc6d3756b43b5fc1e50f076861de42f032efde803df07083d1e17b94ca0d8"}, + {file = "airbyte_cdk-0.69.1-py3-none-any.whl", hash = "sha256:dfb3008cbf609c907f8a03c4625de3540812734d8570dec83eae8940929ead4e"}, ] [package.dependencies] -airbyte-protocol-models = "0.4.2" +airbyte-protocol-models = "0.5.1" backoff = "*" cachetools = "*" Deprecated = ">=1.2,<2.0" @@ -22,8 +22,9 @@ isodate = ">=0.6.1,<0.7.0" Jinja2 = ">=3.1.2,<3.2.0" jsonref = ">=0.2,<1.0" jsonschema = ">=3.2.0,<3.3.0" -pendulum = "*" +pendulum = "<3.0.0" pydantic = ">=1.10.8,<2.0.0" +pyrate-limiter = ">=3.1.0,<3.2.0" python-dateutil = "*" PyYAML = ">=6.0.1" requests = "*" @@ -31,20 +32,20 @@ requests-cache = "*" wcmatch = "8.4" [package.extras] -dev = ["avro (>=1.11.2,<1.12.0)", "cohere (==4.21)", "fastavro (>=1.8.0,<1.9.0)", "freezegun", "langchain (==0.0.271)", "mypy", "openai[embeddings] (==0.27.9)", "pandas (==2.0.3)", "pyarrow (==12.0.1)", "pytest", "pytest-cov", "pytest-httpserver", "pytest-mock", "requests-mock", "tiktoken (==0.4.0)"] -file-based = ["avro (>=1.11.2,<1.12.0)", "fastavro (>=1.8.0,<1.9.0)", "pyarrow (==12.0.1)"] +dev = ["avro (>=1.11.2,<1.12.0)", "cohere (==4.21)", "fastavro (>=1.8.0,<1.9.0)", "freezegun", "langchain (==0.0.271)", "markdown", "mypy", "openai[embeddings] (==0.27.9)", "pandas (==2.0.3)", "pdf2image (==1.16.3)", "pdfminer.six (==20221105)", "pyarrow (>=15.0.0,<15.1.0)", "pytesseract (==0.3.10)", "pytest", "pytest-cov", "pytest-httpserver", "pytest-mock", "requests-mock", "tiktoken (==0.4.0)", "unstructured (==0.10.27)", "unstructured.pytesseract (>=0.3.12)", "unstructured[docx,pptx] (==0.10.27)"] +file-based = ["avro (>=1.11.2,<1.12.0)", "fastavro (>=1.8.0,<1.9.0)", "markdown", "pdf2image (==1.16.3)", "pdfminer.six (==20221105)", "pyarrow (>=15.0.0,<15.1.0)", "pytesseract (==0.3.10)", "unstructured (==0.10.27)", "unstructured.pytesseract (>=0.3.12)", "unstructured[docx,pptx] (==0.10.27)"] sphinx-docs = ["Sphinx (>=4.2,<5.0)", "sphinx-rtd-theme (>=1.0,<2.0)"] vector-db-based = ["cohere (==4.21)", "langchain (==0.0.271)", "openai[embeddings] (==0.27.9)", "tiktoken (==0.4.0)"] [[package]] name = "airbyte-protocol-models" -version = "0.4.2" +version = "0.5.1" description = "Declares the Airbyte Protocol." optional = false python-versions = ">=3.8" files = [ - {file = "airbyte_protocol_models-0.4.2-py3-none-any.whl", hash = "sha256:d3bbb14d4af9483bd7b08f5eb06f87e7113553bf4baed3998af95be873a0d821"}, - {file = "airbyte_protocol_models-0.4.2.tar.gz", hash = "sha256:67b149d4812f8fdb88396b161274aa73cf0e16f22e35ce44f2bfc4d47e51915c"}, + {file = "airbyte_protocol_models-0.5.1-py3-none-any.whl", hash = "sha256:dfe84e130e51ce2ae81a06d5aa36f6c5ce3152b9e36e6f0195fad6c3dab0927e"}, + {file = "airbyte_protocol_models-0.5.1.tar.gz", hash = "sha256:7c8b16c7c1c7956b1996052e40585a3a93b1e44cb509c4e97c1ee4fe507ea086"}, ] [package.dependencies] @@ -103,13 +104,13 @@ files = [ [[package]] name = "cachetools" -version = "5.3.2" +version = "5.3.3" description = "Extensible memoizing collections and decorators" optional = false python-versions = ">=3.7" files = [ - {file = "cachetools-5.3.2-py3-none-any.whl", hash = "sha256:861f35a13a451f94e301ce2bec7cac63e881232ccce7ed67fab9b5df4d3beaa1"}, - {file = "cachetools-5.3.2.tar.gz", hash = "sha256:086ee420196f7b2ab9ca2db2520aca326318b68fe5ba8bc4d49cca91add450f2"}, + {file = "cachetools-5.3.3-py3-none-any.whl", hash = "sha256:0abad1021d3f8325b2fc1d2e9c8b9c9d57b04c3932657a72465447332c24d945"}, + {file = "cachetools-5.3.3.tar.gz", hash = "sha256:ba29e2dfa0b8b556606f097407ed1aa62080ee108ab0dc5ec9d6a723a007d105"}, ] [[package]] @@ -466,113 +467,48 @@ files = [ [[package]] name = "packaging" -version = "23.2" +version = "24.0" description = "Core utilities for Python packages" optional = false python-versions = ">=3.7" files = [ - {file = "packaging-23.2-py3-none-any.whl", hash = "sha256:8c491190033a9af7e1d931d0b5dacc2ef47509b34dd0de67ed209b5203fc88c7"}, - {file = "packaging-23.2.tar.gz", hash = "sha256:048fb0e9405036518eaaf48a55953c750c11e1a1b68e0dd1a9d62ed0c092cfc5"}, + {file = "packaging-24.0-py3-none-any.whl", hash = "sha256:2ddfb553fdf02fb784c234c7ba6ccc288296ceabec964ad2eae3777778130bc5"}, + {file = "packaging-24.0.tar.gz", hash = "sha256:eb82c5e3e56209074766e6885bb04b8c38a0c015d0a30036ebe7ece34c9989e9"}, ] [[package]] name = "pendulum" -version = "3.0.0" +version = "2.1.2" description = "Python datetimes made easy" optional = false -python-versions = ">=3.8" +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" files = [ - {file = "pendulum-3.0.0-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:2cf9e53ef11668e07f73190c805dbdf07a1939c3298b78d5a9203a86775d1bfd"}, - {file = "pendulum-3.0.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:fb551b9b5e6059377889d2d878d940fd0bbb80ae4810543db18e6f77b02c5ef6"}, - {file = "pendulum-3.0.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6c58227ac260d5b01fc1025176d7b31858c9f62595737f350d22124a9a3ad82d"}, - {file = "pendulum-3.0.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:60fb6f415fea93a11c52578eaa10594568a6716602be8430b167eb0d730f3332"}, - {file = "pendulum-3.0.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b69f6b4dbcb86f2c2fe696ba991e67347bcf87fe601362a1aba6431454b46bde"}, - {file = "pendulum-3.0.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:138afa9c373ee450ede206db5a5e9004fd3011b3c6bbe1e57015395cd076a09f"}, - {file = "pendulum-3.0.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:83d9031f39c6da9677164241fd0d37fbfc9dc8ade7043b5d6d62f56e81af8ad2"}, - {file = "pendulum-3.0.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:0c2308af4033fa534f089595bcd40a95a39988ce4059ccd3dc6acb9ef14ca44a"}, - {file = "pendulum-3.0.0-cp310-none-win_amd64.whl", hash = "sha256:9a59637cdb8462bdf2dbcb9d389518c0263799189d773ad5c11db6b13064fa79"}, - {file = "pendulum-3.0.0-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:3725245c0352c95d6ca297193192020d1b0c0f83d5ee6bb09964edc2b5a2d508"}, - {file = "pendulum-3.0.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:6c035f03a3e565ed132927e2c1b691de0dbf4eb53b02a5a3c5a97e1a64e17bec"}, - {file = "pendulum-3.0.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:597e66e63cbd68dd6d58ac46cb7a92363d2088d37ccde2dae4332ef23e95cd00"}, - {file = "pendulum-3.0.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:99a0f8172e19f3f0c0e4ace0ad1595134d5243cf75985dc2233e8f9e8de263ca"}, - {file = "pendulum-3.0.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:77d8839e20f54706aed425bec82a83b4aec74db07f26acd039905d1237a5e1d4"}, - {file = "pendulum-3.0.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:afde30e8146292b059020fbc8b6f8fd4a60ae7c5e6f0afef937bbb24880bdf01"}, - {file = "pendulum-3.0.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:660434a6fcf6303c4efd36713ca9212c753140107ee169a3fc6c49c4711c2a05"}, - {file = "pendulum-3.0.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:dee9e5a48c6999dc1106eb7eea3e3a50e98a50651b72c08a87ee2154e544b33e"}, - {file = "pendulum-3.0.0-cp311-none-win_amd64.whl", hash = "sha256:d4cdecde90aec2d67cebe4042fd2a87a4441cc02152ed7ed8fb3ebb110b94ec4"}, - {file = "pendulum-3.0.0-cp311-none-win_arm64.whl", hash = "sha256:773c3bc4ddda2dda9f1b9d51fe06762f9200f3293d75c4660c19b2614b991d83"}, - {file = "pendulum-3.0.0-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:409e64e41418c49f973d43a28afe5df1df4f1dd87c41c7c90f1a63f61ae0f1f7"}, - {file = "pendulum-3.0.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:a38ad2121c5ec7c4c190c7334e789c3b4624798859156b138fcc4d92295835dc"}, - {file = "pendulum-3.0.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fde4d0b2024b9785f66b7f30ed59281bd60d63d9213cda0eb0910ead777f6d37"}, - {file = "pendulum-3.0.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4b2c5675769fb6d4c11238132962939b960fcb365436b6d623c5864287faa319"}, - {file = "pendulum-3.0.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8af95e03e066826f0f4c65811cbee1b3123d4a45a1c3a2b4fc23c4b0dff893b5"}, - {file = "pendulum-3.0.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2165a8f33cb15e06c67070b8afc87a62b85c5a273e3aaa6bc9d15c93a4920d6f"}, - {file = "pendulum-3.0.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:ad5e65b874b5e56bd942546ea7ba9dd1d6a25121db1c517700f1c9de91b28518"}, - {file = "pendulum-3.0.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:17fe4b2c844bbf5f0ece69cfd959fa02957c61317b2161763950d88fed8e13b9"}, - {file = "pendulum-3.0.0-cp312-none-win_amd64.whl", hash = "sha256:78f8f4e7efe5066aca24a7a57511b9c2119f5c2b5eb81c46ff9222ce11e0a7a5"}, - {file = "pendulum-3.0.0-cp312-none-win_arm64.whl", hash = "sha256:28f49d8d1e32aae9c284a90b6bb3873eee15ec6e1d9042edd611b22a94ac462f"}, - {file = "pendulum-3.0.0-cp37-cp37m-macosx_10_12_x86_64.whl", hash = "sha256:d4e2512f4e1a4670284a153b214db9719eb5d14ac55ada5b76cbdb8c5c00399d"}, - {file = "pendulum-3.0.0-cp37-cp37m-macosx_11_0_arm64.whl", hash = "sha256:3d897eb50883cc58d9b92f6405245f84b9286cd2de6e8694cb9ea5cb15195a32"}, - {file = "pendulum-3.0.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2e169cc2ca419517f397811bbe4589cf3cd13fca6dc38bb352ba15ea90739ebb"}, - {file = "pendulum-3.0.0-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f17c3084a4524ebefd9255513692f7e7360e23c8853dc6f10c64cc184e1217ab"}, - {file = "pendulum-3.0.0-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:826d6e258052715f64d05ae0fc9040c0151e6a87aae7c109ba9a0ed930ce4000"}, - {file = "pendulum-3.0.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d2aae97087872ef152a0c40e06100b3665d8cb86b59bc8471ca7c26132fccd0f"}, - {file = "pendulum-3.0.0-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:ac65eeec2250d03106b5e81284ad47f0d417ca299a45e89ccc69e36130ca8bc7"}, - {file = "pendulum-3.0.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:a5346d08f3f4a6e9e672187faa179c7bf9227897081d7121866358af369f44f9"}, - {file = "pendulum-3.0.0-cp37-none-win_amd64.whl", hash = "sha256:235d64e87946d8f95c796af34818c76e0f88c94d624c268693c85b723b698aa9"}, - {file = "pendulum-3.0.0-cp38-cp38-macosx_10_12_x86_64.whl", hash = "sha256:6a881d9c2a7f85bc9adafcfe671df5207f51f5715ae61f5d838b77a1356e8b7b"}, - {file = "pendulum-3.0.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:d7762d2076b9b1cb718a6631ad6c16c23fc3fac76cbb8c454e81e80be98daa34"}, - {file = "pendulum-3.0.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4e8e36a8130819d97a479a0e7bf379b66b3b1b520e5dc46bd7eb14634338df8c"}, - {file = "pendulum-3.0.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:7dc843253ac373358ffc0711960e2dd5b94ab67530a3e204d85c6e8cb2c5fa10"}, - {file = "pendulum-3.0.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0a78ad3635d609ceb1e97d6aedef6a6a6f93433ddb2312888e668365908c7120"}, - {file = "pendulum-3.0.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b30a137e9e0d1f751e60e67d11fc67781a572db76b2296f7b4d44554761049d6"}, - {file = "pendulum-3.0.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:c95984037987f4a457bb760455d9ca80467be792236b69d0084f228a8ada0162"}, - {file = "pendulum-3.0.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:d29c6e578fe0f893766c0d286adbf0b3c726a4e2341eba0917ec79c50274ec16"}, - {file = "pendulum-3.0.0-cp38-none-win_amd64.whl", hash = "sha256:deaba8e16dbfcb3d7a6b5fabdd5a38b7c982809567479987b9c89572df62e027"}, - {file = "pendulum-3.0.0-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:b11aceea5b20b4b5382962b321dbc354af0defe35daa84e9ff3aae3c230df694"}, - {file = "pendulum-3.0.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:a90d4d504e82ad236afac9adca4d6a19e4865f717034fc69bafb112c320dcc8f"}, - {file = "pendulum-3.0.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:825799c6b66e3734227756fa746cc34b3549c48693325b8b9f823cb7d21b19ac"}, - {file = "pendulum-3.0.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ad769e98dc07972e24afe0cff8d365cb6f0ebc7e65620aa1976fcfbcadc4c6f3"}, - {file = "pendulum-3.0.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a6fc26907eb5fb8cc6188cc620bc2075a6c534d981a2f045daa5f79dfe50d512"}, - {file = "pendulum-3.0.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0c717eab1b6d898c00a3e0fa7781d615b5c5136bbd40abe82be100bb06df7a56"}, - {file = "pendulum-3.0.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:3ddd1d66d1a714ce43acfe337190be055cdc221d911fc886d5a3aae28e14b76d"}, - {file = "pendulum-3.0.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:822172853d7a9cf6da95d7b66a16c7160cb99ae6df55d44373888181d7a06edc"}, - {file = "pendulum-3.0.0-cp39-none-win_amd64.whl", hash = "sha256:840de1b49cf1ec54c225a2a6f4f0784d50bd47f68e41dc005b7f67c7d5b5f3ae"}, - {file = "pendulum-3.0.0-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:3b1f74d1e6ffe5d01d6023870e2ce5c2191486928823196f8575dcc786e107b1"}, - {file = "pendulum-3.0.0-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:729e9f93756a2cdfa77d0fc82068346e9731c7e884097160603872686e570f07"}, - {file = "pendulum-3.0.0-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e586acc0b450cd21cbf0db6bae386237011b75260a3adceddc4be15334689a9a"}, - {file = "pendulum-3.0.0-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:22e7944ffc1f0099a79ff468ee9630c73f8c7835cd76fdb57ef7320e6a409df4"}, - {file = "pendulum-3.0.0-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:fa30af36bd8e50686846bdace37cf6707bdd044e5cb6e1109acbad3277232e04"}, - {file = "pendulum-3.0.0-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:440215347b11914ae707981b9a57ab9c7b6983ab0babde07063c6ee75c0dc6e7"}, - {file = "pendulum-3.0.0-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:314c4038dc5e6a52991570f50edb2f08c339debdf8cea68ac355b32c4174e820"}, - {file = "pendulum-3.0.0-pp37-pypy37_pp73-macosx_10_12_x86_64.whl", hash = "sha256:5acb1d386337415f74f4d1955c4ce8d0201978c162927d07df8eb0692b2d8533"}, - {file = "pendulum-3.0.0-pp37-pypy37_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a789e12fbdefaffb7b8ac67f9d8f22ba17a3050ceaaa635cd1cc4645773a4b1e"}, - {file = "pendulum-3.0.0-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:860aa9b8a888e5913bd70d819306749e5eb488e6b99cd6c47beb701b22bdecf5"}, - {file = "pendulum-3.0.0-pp37-pypy37_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:5ebc65ea033ef0281368217fbf59f5cb05b338ac4dd23d60959c7afcd79a60a0"}, - {file = "pendulum-3.0.0-pp37-pypy37_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:d9fef18ab0386ef6a9ac7bad7e43ded42c83ff7ad412f950633854f90d59afa8"}, - {file = "pendulum-3.0.0-pp38-pypy38_pp73-macosx_10_12_x86_64.whl", hash = "sha256:1c134ba2f0571d0b68b83f6972e2307a55a5a849e7dac8505c715c531d2a8795"}, - {file = "pendulum-3.0.0-pp38-pypy38_pp73-macosx_11_0_arm64.whl", hash = "sha256:385680812e7e18af200bb9b4a49777418c32422d05ad5a8eb85144c4a285907b"}, - {file = "pendulum-3.0.0-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9eec91cd87c59fb32ec49eb722f375bd58f4be790cae11c1b70fac3ee4f00da0"}, - {file = "pendulum-3.0.0-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4386bffeca23c4b69ad50a36211f75b35a4deb6210bdca112ac3043deb7e494a"}, - {file = "pendulum-3.0.0-pp38-pypy38_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:dfbcf1661d7146d7698da4b86e7f04814221081e9fe154183e34f4c5f5fa3bf8"}, - {file = "pendulum-3.0.0-pp38-pypy38_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:04a1094a5aa1daa34a6b57c865b25f691848c61583fb22722a4df5699f6bf74c"}, - {file = "pendulum-3.0.0-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:5b0ec85b9045bd49dd3a3493a5e7ddfd31c36a2a60da387c419fa04abcaecb23"}, - {file = "pendulum-3.0.0-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:0a15b90129765b705eb2039062a6daf4d22c4e28d1a54fa260892e8c3ae6e157"}, - {file = "pendulum-3.0.0-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:bb8f6d7acd67a67d6fedd361ad2958ff0539445ef51cbe8cd288db4306503cd0"}, - {file = "pendulum-3.0.0-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fd69b15374bef7e4b4440612915315cc42e8575fcda2a3d7586a0d88192d0c88"}, - {file = "pendulum-3.0.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dc00f8110db6898360c53c812872662e077eaf9c75515d53ecc65d886eec209a"}, - {file = "pendulum-3.0.0-pp39-pypy39_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:83a44e8b40655d0ba565a5c3d1365d27e3e6778ae2a05b69124db9e471255c4a"}, - {file = "pendulum-3.0.0-pp39-pypy39_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:1a3604e9fbc06b788041b2a8b78f75c243021e0f512447806a6d37ee5214905d"}, - {file = "pendulum-3.0.0-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:92c307ae7accebd06cbae4729f0ba9fa724df5f7d91a0964b1b972a22baa482b"}, - {file = "pendulum-3.0.0.tar.gz", hash = "sha256:5d034998dea404ec31fae27af6b22cff1708f830a1ed7353be4d1019bb9f584e"}, + {file = "pendulum-2.1.2-cp27-cp27m-macosx_10_15_x86_64.whl", hash = "sha256:b6c352f4bd32dff1ea7066bd31ad0f71f8d8100b9ff709fb343f3b86cee43efe"}, + {file = "pendulum-2.1.2-cp27-cp27m-win_amd64.whl", hash = "sha256:318f72f62e8e23cd6660dbafe1e346950281a9aed144b5c596b2ddabc1d19739"}, + {file = "pendulum-2.1.2-cp35-cp35m-macosx_10_15_x86_64.whl", hash = "sha256:0731f0c661a3cb779d398803655494893c9f581f6488048b3fb629c2342b5394"}, + {file = "pendulum-2.1.2-cp35-cp35m-manylinux1_i686.whl", hash = "sha256:3481fad1dc3f6f6738bd575a951d3c15d4b4ce7c82dce37cf8ac1483fde6e8b0"}, + {file = "pendulum-2.1.2-cp35-cp35m-manylinux1_x86_64.whl", hash = "sha256:9702069c694306297ed362ce7e3c1ef8404ac8ede39f9b28b7c1a7ad8c3959e3"}, + {file = "pendulum-2.1.2-cp35-cp35m-win_amd64.whl", hash = "sha256:fb53ffa0085002ddd43b6ca61a7b34f2d4d7c3ed66f931fe599e1a531b42af9b"}, + {file = "pendulum-2.1.2-cp36-cp36m-macosx_10_15_x86_64.whl", hash = "sha256:c501749fdd3d6f9e726086bf0cd4437281ed47e7bca132ddb522f86a1645d360"}, + {file = "pendulum-2.1.2-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:c807a578a532eeb226150d5006f156632df2cc8c5693d778324b43ff8c515dd0"}, + {file = "pendulum-2.1.2-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:2d1619a721df661e506eff8db8614016f0720ac171fe80dda1333ee44e684087"}, + {file = "pendulum-2.1.2-cp36-cp36m-win_amd64.whl", hash = "sha256:f888f2d2909a414680a29ae74d0592758f2b9fcdee3549887779cd4055e975db"}, + {file = "pendulum-2.1.2-cp37-cp37m-macosx_10_15_x86_64.whl", hash = "sha256:e95d329384717c7bf627bf27e204bc3b15c8238fa8d9d9781d93712776c14002"}, + {file = "pendulum-2.1.2-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:4c9c689747f39d0d02a9f94fcee737b34a5773803a64a5fdb046ee9cac7442c5"}, + {file = "pendulum-2.1.2-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:1245cd0075a3c6d889f581f6325dd8404aca5884dea7223a5566c38aab94642b"}, + {file = "pendulum-2.1.2-cp37-cp37m-win_amd64.whl", hash = "sha256:db0a40d8bcd27b4fb46676e8eb3c732c67a5a5e6bfab8927028224fbced0b40b"}, + {file = "pendulum-2.1.2-cp38-cp38-macosx_10_15_x86_64.whl", hash = "sha256:f5e236e7730cab1644e1b87aca3d2ff3e375a608542e90fe25685dae46310116"}, + {file = "pendulum-2.1.2-cp38-cp38-manylinux1_i686.whl", hash = "sha256:de42ea3e2943171a9e95141f2eecf972480636e8e484ccffaf1e833929e9e052"}, + {file = "pendulum-2.1.2-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:7c5ec650cb4bec4c63a89a0242cc8c3cebcec92fcfe937c417ba18277d8560be"}, + {file = "pendulum-2.1.2-cp38-cp38-win_amd64.whl", hash = "sha256:33fb61601083f3eb1d15edeb45274f73c63b3c44a8524703dc143f4212bf3269"}, + {file = "pendulum-2.1.2-cp39-cp39-manylinux1_i686.whl", hash = "sha256:29c40a6f2942376185728c9a0347d7c0f07905638c83007e1d262781f1e6953a"}, + {file = "pendulum-2.1.2-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:94b1fc947bfe38579b28e1cccb36f7e28a15e841f30384b5ad6c5e31055c85d7"}, + {file = "pendulum-2.1.2.tar.gz", hash = "sha256:b06a0ca1bfe41c990bbf0c029f0b6501a7f2ec4e38bfec730712015e8860f207"}, ] [package.dependencies] -python-dateutil = ">=2.6" -tzdata = ">=2020.1" - -[package.extras] -test = ["time-machine (>=2.6.0)"] +python-dateutil = ">=2.6,<3.0" +pytzdata = ">=2020.1" [[package]] name = "platformdirs" @@ -667,6 +603,21 @@ typing-extensions = ">=4.2.0" dotenv = ["python-dotenv (>=0.10.4)"] email = ["email-validator (>=1.0.3)"] +[[package]] +name = "pyrate-limiter" +version = "3.1.1" +description = "Python Rate-Limiter using Leaky-Bucket Algorithm" +optional = false +python-versions = ">=3.8,<4.0" +files = [ + {file = "pyrate_limiter-3.1.1-py3-none-any.whl", hash = "sha256:c51906f1d51d56dc992ff6c26e8300e32151bc6cfa3e6559792e31971dfd4e2b"}, + {file = "pyrate_limiter-3.1.1.tar.gz", hash = "sha256:2f57eda712687e6eccddf6afe8f8a15b409b97ed675fe64a626058f12863b7b7"}, +] + +[package.extras] +all = ["filelock (>=3.0)", "redis (>=5.0.0,<6.0.0)"] +docs = ["furo (>=2022.3.4,<2023.0.0)", "myst-parser (>=0.17)", "sphinx (>=4.3.0,<5.0.0)", "sphinx-autodoc-typehints (>=1.17,<2.0)", "sphinx-copybutton (>=0.5)", "sphinxcontrib-apidoc (>=0.3,<0.4)"] + [[package]] name = "pyrsistent" version = "0.20.0" @@ -751,18 +702,29 @@ dev = ["pre-commit", "pytest-asyncio", "tox"] [[package]] name = "python-dateutil" -version = "2.8.2" +version = "2.9.0.post0" description = "Extensions to the standard Python datetime module" optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7" files = [ - {file = "python-dateutil-2.8.2.tar.gz", hash = "sha256:0123cacc1627ae19ddf3c27a5de5bd67ee4586fbdd6440d9748f8abb483d3e86"}, - {file = "python_dateutil-2.8.2-py2.py3-none-any.whl", hash = "sha256:961d03dc3453ebbc59dbdea9e4e11c5651520a876d0f4db161e8674aae935da9"}, + {file = "python-dateutil-2.9.0.post0.tar.gz", hash = "sha256:37dd54208da7e1cd875388217d5e00ebd4179249f90fb72437e91a35459a0ad3"}, + {file = "python_dateutil-2.9.0.post0-py2.py3-none-any.whl", hash = "sha256:a8b2bc7bffae282281c8140a97d3aa9c14da0b136dfe83f850eea9a5f7470427"}, ] [package.dependencies] six = ">=1.5" +[[package]] +name = "pytzdata" +version = "2020.1" +description = "The Olson timezone database for Python." +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +files = [ + {file = "pytzdata-2020.1-py2.py3-none-any.whl", hash = "sha256:e1e14750bcf95016381e4d472bad004eef710f2d6417240904070b3d6654485f"}, + {file = "pytzdata-2020.1.tar.gz", hash = "sha256:3efa13b335a00a8de1d345ae41ec78dd11c9f8807f522d39850f2dd828681540"}, +] + [[package]] name = "pyyaml" version = "6.0.1" @@ -846,13 +808,13 @@ use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"] [[package]] name = "requests-cache" -version = "1.1.1" +version = "1.2.0" description = "A persistent cache for python requests" optional = false -python-versions = ">=3.7,<4.0" +python-versions = ">=3.8" files = [ - {file = "requests_cache-1.1.1-py3-none-any.whl", hash = "sha256:c8420cf096f3aafde13c374979c21844752e2694ffd8710e6764685bb577ac90"}, - {file = "requests_cache-1.1.1.tar.gz", hash = "sha256:764f93d3fa860be72125a568c2cc8eafb151cf29b4dc2515433a56ee657e1c60"}, + {file = "requests_cache-1.2.0-py3-none-any.whl", hash = "sha256:490324301bf0cb924ff4e6324bd2613453e7e1f847353928b08adb0fdfb7f722"}, + {file = "requests_cache-1.2.0.tar.gz", hash = "sha256:db1c709ca343cc1cd5b6c8b1a5387298eceed02306a6040760db538c885e3838"}, ] [package.dependencies] @@ -864,15 +826,15 @@ url-normalize = ">=1.4" urllib3 = ">=1.25.5" [package.extras] -all = ["boto3 (>=1.15)", "botocore (>=1.18)", "itsdangerous (>=2.0)", "pymongo (>=3)", "pyyaml (>=5.4)", "redis (>=3)", "ujson (>=5.4)"] +all = ["boto3 (>=1.15)", "botocore (>=1.18)", "itsdangerous (>=2.0)", "pymongo (>=3)", "pyyaml (>=6.0.1)", "redis (>=3)", "ujson (>=5.4)"] bson = ["bson (>=0.5)"] -docs = ["furo (>=2023.3,<2024.0)", "linkify-it-py (>=2.0,<3.0)", "myst-parser (>=1.0,<2.0)", "sphinx (>=5.0.2,<6.0.0)", "sphinx-autodoc-typehints (>=1.19)", "sphinx-automodapi (>=0.14)", "sphinx-copybutton (>=0.5)", "sphinx-design (>=0.2)", "sphinx-notfound-page (>=0.8)", "sphinxcontrib-apidoc (>=0.3)", "sphinxext-opengraph (>=0.6)"] +docs = ["furo (>=2023.3,<2024.0)", "linkify-it-py (>=2.0,<3.0)", "myst-parser (>=1.0,<2.0)", "sphinx (>=5.0.2,<6.0.0)", "sphinx-autodoc-typehints (>=1.19)", "sphinx-automodapi (>=0.14)", "sphinx-copybutton (>=0.5)", "sphinx-design (>=0.2)", "sphinx-notfound-page (>=0.8)", "sphinxcontrib-apidoc (>=0.3)", "sphinxext-opengraph (>=0.9)"] dynamodb = ["boto3 (>=1.15)", "botocore (>=1.18)"] json = ["ujson (>=5.4)"] mongodb = ["pymongo (>=3)"] redis = ["redis (>=3)"] security = ["itsdangerous (>=2.0)"] -yaml = ["pyyaml (>=5.4)"] +yaml = ["pyyaml (>=6.0.1)"] [[package]] name = "requests-mock" @@ -895,19 +857,19 @@ test = ["fixtures", "mock", "purl", "pytest", "requests-futures", "sphinx", "tes [[package]] name = "setuptools" -version = "69.1.0" +version = "69.1.1" description = "Easily download, build, install, upgrade, and uninstall Python packages" optional = false python-versions = ">=3.8" files = [ - {file = "setuptools-69.1.0-py3-none-any.whl", hash = "sha256:c054629b81b946d63a9c6e732bc8b2513a7c3ea645f11d0139a2191d735c60c6"}, - {file = "setuptools-69.1.0.tar.gz", hash = "sha256:850894c4195f09c4ed30dba56213bf7c3f21d86ed6bdaafb5df5972593bfc401"}, + {file = "setuptools-69.1.1-py3-none-any.whl", hash = "sha256:02fa291a0471b3a18b2b2481ed902af520c69e8ae0919c13da936542754b4c56"}, + {file = "setuptools-69.1.1.tar.gz", hash = "sha256:5c0806c7d9af348e6dd3777b4f4dbb42c7ad85b190104837488eab9a7c945cf8"}, ] [package.extras] docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "rst.linker (>=1.9)", "sphinx (<7.2.5)", "sphinx (>=3.5)", "sphinx-favicon", "sphinx-inline-tabs", "sphinx-lint", "sphinx-notfound-page (>=1,<2)", "sphinx-reredirects", "sphinxcontrib-towncrier"] -testing = ["build[virtualenv]", "filelock (>=3.4.0)", "flake8-2020", "ini2toml[lite] (>=0.9)", "jaraco.develop (>=7.21)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "pip (>=19.1)", "pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-home (>=0.5)", "pytest-mypy (>=0.9.1)", "pytest-perf", "pytest-ruff (>=0.2.1)", "pytest-timeout", "pytest-xdist", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel"] -testing-integration = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "packaging (>=23.1)", "pytest", "pytest-enabler", "pytest-xdist", "tomli", "virtualenv (>=13.0.0)", "wheel"] +testing = ["build[virtualenv]", "filelock (>=3.4.0)", "flake8-2020", "ini2toml[lite] (>=0.9)", "jaraco.develop (>=7.21)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "packaging (>=23.2)", "pip (>=19.1)", "pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-home (>=0.5)", "pytest-mypy (>=0.9.1)", "pytest-perf", "pytest-ruff (>=0.2.1)", "pytest-timeout", "pytest-xdist", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel"] +testing-integration = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "packaging (>=23.2)", "pytest", "pytest-enabler", "pytest-xdist", "tomli", "virtualenv (>=13.0.0)", "wheel"] [[package]] name = "six" @@ -933,24 +895,13 @@ files = [ [[package]] name = "typing-extensions" -version = "4.9.0" +version = "4.10.0" description = "Backported and Experimental Type Hints for Python 3.8+" optional = false python-versions = ">=3.8" files = [ - {file = "typing_extensions-4.9.0-py3-none-any.whl", hash = "sha256:af72aea155e91adfc61c3ae9e0e342dbc0cba726d6cba4b6c72c1f34e47291cd"}, - {file = "typing_extensions-4.9.0.tar.gz", hash = "sha256:23478f88c37f27d76ac8aee6c905017a143b0b1b886c3c9f66bc2fd94f9f5783"}, -] - -[[package]] -name = "tzdata" -version = "2024.1" -description = "Provider of IANA time zone data" -optional = false -python-versions = ">=2" -files = [ - {file = "tzdata-2024.1-py2.py3-none-any.whl", hash = "sha256:9068bc196136463f5245e51efda838afa15aaeca9903f49050dfa2679db4d252"}, - {file = "tzdata-2024.1.tar.gz", hash = "sha256:2674120f8d891909751c38abcdfd386ac0a5a1127954fbc332af6b5ceae07efd"}, + {file = "typing_extensions-4.10.0-py3-none-any.whl", hash = "sha256:69b1a937c3a517342112fb4c6df7e72fc39a38e7891a5730ed4985b5214b5475"}, + {file = "typing_extensions-4.10.0.tar.gz", hash = "sha256:b0abd7c89e8fb96f98db18d86106ff1d90ab692004eb746cf6eda2682f91b3cb"}, ] [[package]] @@ -969,13 +920,13 @@ six = "*" [[package]] name = "urllib3" -version = "2.2.0" +version = "2.2.1" description = "HTTP library with thread-safe connection pooling, file post, and more." optional = false python-versions = ">=3.8" files = [ - {file = "urllib3-2.2.0-py3-none-any.whl", hash = "sha256:ce3711610ddce217e6d113a2732fafad960a03fd0318c91faa79481e35c11224"}, - {file = "urllib3-2.2.0.tar.gz", hash = "sha256:051d961ad0c62a94e50ecf1af379c3aba230c66c710493493560c0c223c49f20"}, + {file = "urllib3-2.2.1-py3-none-any.whl", hash = "sha256:450b20ec296a467077128bff42b73080516e71b56ff59a60a02bef2232c4fa9d"}, + {file = "urllib3-2.2.1.tar.gz", hash = "sha256:d0570876c61ab9e520d776c38acbbb5b05a776d3f9ff98a5c8fd5162a444cf19"}, ] [package.extras] @@ -1080,4 +1031,4 @@ files = [ [metadata] lock-version = "2.0" python-versions = "^3.9,<3.12" -content-hash = "71149e8c9b376cbd538e039f53fb1be4ceb6562766a6221a6a95d15a2dab08e3" +content-hash = "a7a96e2b3330d2b39e398d386ac5724f0ddb92f7862e5029789b59942d9ba36d" diff --git a/airbyte-integrations/connectors/source-amplitude/pyproject.toml b/airbyte-integrations/connectors/source-amplitude/pyproject.toml index e610b3f4642f..65d9582863d7 100644 --- a/airbyte-integrations/connectors/source-amplitude/pyproject.toml +++ b/airbyte-integrations/connectors/source-amplitude/pyproject.toml @@ -3,7 +3,7 @@ requires = [ "poetry-core>=1.0.0",] build-backend = "poetry.core.masonry.api" [tool.poetry] -version = "0.3.7" +version = "0.3.8" name = "source-amplitude" description = "Source implementation for Amplitude." authors = [ "Airbyte ",] @@ -17,7 +17,7 @@ include = "source_amplitude" [tool.poetry.dependencies] python = "^3.9,<3.12" -airbyte-cdk = "==0.52.0" +airbyte-cdk = "^0" [tool.poetry.scripts] source-amplitude = "source_amplitude.run:run" diff --git a/docs/integrations/sources/amplitude.md b/docs/integrations/sources/amplitude.md index 0972f2dcd549..d47212f08ab4 100644 --- a/docs/integrations/sources/amplitude.md +++ b/docs/integrations/sources/amplitude.md @@ -52,8 +52,9 @@ The Amplitude connector ideally should gracefully handle Amplitude API limitatio | Version | Date | Pull Request | Subject | |:--------|:-----------|:---------------------------------------------------------|:----------------------------------------------------------------------------------------------------------| -| 0.3.7 | 2024-02-12 | [35162](https://github.com/airbytehq/airbyte/pull/35162) | Manage dependencies with Poetry. | -| 0.3.6 | 2023-10-23 | [31702](https://github.com/airbytehq/airbyte/pull/31702) | Base image migration: remove Dockerfile and use the python-connector-base image | +| 0.3.8 | 2024-03-12 | [35987](https://github.com/airbytehq/airbyte/pull/35987) | Unpin CDK version | +| 0.3.7 | 2024-02-12 | [35162](https://github.com/airbytehq/airbyte/pull/35162) | Manage dependencies with Poetry. | +| 0.3.6 | 2023-10-23 | [31702](https://github.com/airbytehq/airbyte/pull/31702) | Base image migration: remove Dockerfile and use the python-connector-base image | | 0.3.5 | 2023-09-28 | [30846](https://github.com/airbytehq/airbyte/pull/30846) | Add support of multiple cursor date formats | | 0.3.4 | 2023-09-28 | [30831](https://github.com/airbytehq/airbyte/pull/30831) | Add user friendly error description on 403 error | | 0.3.3 | 2023-09-21 | [30652](https://github.com/airbytehq/airbyte/pull/30652) | Update spec: declare `start_date` type as `date-time` | From 15b954546f3fcaaacfa2c5bd76108e275ae1a19d Mon Sep 17 00:00:00 2001 From: Alexandre Girard Date: Tue, 12 Mar 2024 12:18:38 -0700 Subject: [PATCH 171/172] =?UTF-8?q?raise=20exception=20with=20the=20full?= =?UTF-8?q?=20class=20name=20if=20a=20class=20for=20a=20custom=20comp?= =?UTF-8?q?=E2=80=A6=20(#35868)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .../parsers/model_to_component_factory.py | 11 +++++++---- .../parsers/test_model_to_component_factory.py | 14 ++++++++++++++ 2 files changed, 21 insertions(+), 4 deletions(-) diff --git a/airbyte-cdk/python/airbyte_cdk/sources/declarative/parsers/model_to_component_factory.py b/airbyte-cdk/python/airbyte_cdk/sources/declarative/parsers/model_to_component_factory.py index 8f60500d012b..5797fb879130 100644 --- a/airbyte-cdk/python/airbyte_cdk/sources/declarative/parsers/model_to_component_factory.py +++ b/airbyte-cdk/python/airbyte_cdk/sources/declarative/parsers/model_to_component_factory.py @@ -319,7 +319,7 @@ def create_session_token_authenticator( ) if model.request_authentication.type == "Bearer": return ModelToComponentFactory.create_bearer_authenticator( - BearerAuthenticatorModel(type="BearerAuthenticator", api_token=""), + BearerAuthenticatorModel(type="BearerAuthenticator", api_token=""), # type: ignore # $parameters has a default value config, token_provider=token_provider, # type: ignore # $parameters defaults to None ) @@ -431,11 +431,14 @@ def create_custom_component(self, model: Any, config: Config, **kwargs: Any) -> return custom_component_class(**kwargs) @staticmethod - def _get_class_from_fully_qualified_class_name(class_name: str) -> Any: - split = class_name.split(".") + def _get_class_from_fully_qualified_class_name(full_qualified_class_name: str) -> Any: + split = full_qualified_class_name.split(".") module = ".".join(split[:-1]) class_name = split[-1] - return getattr(importlib.import_module(module), class_name) + try: + return getattr(importlib.import_module(module), class_name) + except AttributeError: + raise ValueError(f"Could not load class {full_qualified_class_name}.") @staticmethod def _derive_component_type_from_type_hints(field_type: Any) -> Optional[str]: diff --git a/airbyte-cdk/python/unit_tests/sources/declarative/parsers/test_model_to_component_factory.py b/airbyte-cdk/python/unit_tests/sources/declarative/parsers/test_model_to_component_factory.py index 0a5a796566c9..4e07cad644db 100644 --- a/airbyte-cdk/python/unit_tests/sources/declarative/parsers/test_model_to_component_factory.py +++ b/airbyte-cdk/python/unit_tests/sources/declarative/parsers/test_model_to_component_factory.py @@ -1239,6 +1239,20 @@ def test_create_default_paginator(): ValueError, id="test_create_custom_component_missing_required_field_emits_error", ), + pytest.param( + { + "type": "CustomErrorHandler", + "class_name": "unit_tests.sources.declarative.parsers.testing_components.NonExistingClass", + "paginator": { + "type": "DefaultPaginator", + "pagination_strategy": {"type": "OffsetIncrement", "page_size": 10}, + }, + }, + "paginator", + None, + ValueError, + id="test_create_custom_component_non_existing_class_raises_value_error", + ), ], ) def test_create_custom_components(manifest, field_name, expected_value, expected_error): From 75385c71d76fd1d548efa9b29c9e4523523cee01 Mon Sep 17 00:00:00 2001 From: girarda Date: Tue, 12 Mar 2024 19:26:10 +0000 Subject: [PATCH 172/172] =?UTF-8?q?=F0=9F=A4=96=20Bump=20patch=20version?= =?UTF-8?q?=20of=20Python=20CDK?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- airbyte-cdk/python/.bumpversion.cfg | 2 +- airbyte-cdk/python/CHANGELOG.md | 3 +++ airbyte-cdk/python/Dockerfile | 4 ++-- airbyte-cdk/python/setup.py | 2 +- 4 files changed, 7 insertions(+), 4 deletions(-) diff --git a/airbyte-cdk/python/.bumpversion.cfg b/airbyte-cdk/python/.bumpversion.cfg index 365441db2f7d..32d45cc34c59 100644 --- a/airbyte-cdk/python/.bumpversion.cfg +++ b/airbyte-cdk/python/.bumpversion.cfg @@ -1,5 +1,5 @@ [bumpversion] -current_version = 0.69.1 +current_version = 0.69.2 commit = False [bumpversion:file:setup.py] diff --git a/airbyte-cdk/python/CHANGELOG.md b/airbyte-cdk/python/CHANGELOG.md index 6a9b7f6979c1..a144afe87128 100644 --- a/airbyte-cdk/python/CHANGELOG.md +++ b/airbyte-cdk/python/CHANGELOG.md @@ -1,5 +1,8 @@ # Changelog +## 0.69.2 +low-code: improve error message when a custom component cannot be found + ## 0.69.1 Update mock server test entrypoint wrapper to use per-stream state diff --git a/airbyte-cdk/python/Dockerfile b/airbyte-cdk/python/Dockerfile index c151e421317c..ba24eae844bc 100644 --- a/airbyte-cdk/python/Dockerfile +++ b/airbyte-cdk/python/Dockerfile @@ -10,7 +10,7 @@ RUN apk --no-cache upgrade \ && apk --no-cache add tzdata build-base # install airbyte-cdk -RUN pip install --prefix=/install airbyte-cdk==0.69.1 +RUN pip install --prefix=/install airbyte-cdk==0.69.2 # build a clean environment FROM base @@ -32,5 +32,5 @@ ENV AIRBYTE_ENTRYPOINT "python /airbyte/integration_code/main.py" ENTRYPOINT ["python", "/airbyte/integration_code/main.py"] # needs to be the same as CDK -LABEL io.airbyte.version=0.69.1 +LABEL io.airbyte.version=0.69.2 LABEL io.airbyte.name=airbyte/source-declarative-manifest diff --git a/airbyte-cdk/python/setup.py b/airbyte-cdk/python/setup.py index 89cf0bd2cb85..de915d056fef 100644 --- a/airbyte-cdk/python/setup.py +++ b/airbyte-cdk/python/setup.py @@ -36,7 +36,7 @@ name="airbyte-cdk", # The version of the airbyte-cdk package is used at runtime to validate manifests. That validation must be # updated if our semver format changes such as using release candidate versions. - version="0.69.1", + version="0.69.2", description="A framework for writing Airbyte Connectors.", long_description=README, long_description_content_type="text/markdown",