Skip to content

Commit

Permalink
Adopt connector best practices for File Source (providers) airbytehq#…
Browse files Browse the repository at this point in the history
…1584 (airbytehq#1738)


Co-authored-by: Sherif Nada <[email protected]>
  • Loading branch information
eugene-kulak and sherifnada authored Jan 28, 2021
1 parent 143f880 commit a54dd89
Show file tree
Hide file tree
Showing 27 changed files with 798 additions and 629 deletions.
1 change: 1 addition & 0 deletions .github/workflows/test-command.yml
Original file line number Diff line number Diff line change
Expand Up @@ -56,6 +56,7 @@ jobs:
GH_INTEGRATION_TEST_CREDS: ${{ secrets.GH_INTEGRATION_TEST_CREDS }}
GOOGLE_ANALYTICS_TEST_CREDS: ${{ secrets.GOOGLE_ANALYTICS_TEST_CREDS }}
GOOGLE_ANALYTICS_TEST_TRACKING_ID: ${{ secrets.GOOGLE_ANALYTICS_TEST_TRACKING_ID }}
GOOGLE_CLOUD_STORAGE_TEST_CREDS: ${{ secrets.GOOGLE_CLOUD_STORAGE_TEST_CREDS }}
GREENHOUSE_TEST_CREDS: ${{ secrets.GREENHOUSE_TEST_CREDS }}
GSHEETS_INTEGRATION_TESTS_CREDS: ${{ secrets.GSHEETS_INTEGRATION_TESTS_CREDS }}
HUBSPOT_INTEGRATION_TESTS_CREDS: ${{ secrets.HUBSPOT_INTEGRATION_TESTS_CREDS }}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,6 @@
"sourceDefinitionId": "778daa7c-feaf-4db6-96f3-70fd645acc77",
"name": "File",
"dockerRepository": "airbyte/source-file",
"dockerImageTag": "0.1.7",
"dockerImageTag": "0.1.8",
"documentationUrl": "https://hub.docker.com/r/airbyte/source-file"
}
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@
- sourceDefinitionId: 778daa7c-feaf-4db6-96f3-70fd645acc77
name: File
dockerRepository: airbyte/source-file
dockerImageTag: 0.1.7
dockerImageTag: 0.1.8
documentationUrl: https://hub.docker.com/r/airbyte/source-file
- sourceDefinitionId: fdc8b827-3257-4b33-83cc-106d234c34d4
name: Google Adwords
Expand Down
2 changes: 1 addition & 1 deletion airbyte-integrations/connectors/source-file/Dockerfile
Original file line number Diff line number Diff line change
Expand Up @@ -11,5 +11,5 @@ COPY $CODE_PATH ./$CODE_PATH
COPY setup.py ./
RUN pip install ".[main]"

LABEL io.airbyte.version=0.1.7
LABEL io.airbyte.version=0.1.8
LABEL io.airbyte.name=airbyte/source-file
Original file line number Diff line number Diff line change
Expand Up @@ -16,6 +16,6 @@ COPY $CODE_PATH $CODE_PATH
COPY source_file/*.json $CODE_PATH
COPY setup.py ./

RUN pip install ".[integration_tests]"
RUN pip install ".[tests]"

WORKDIR /airbyte
9 changes: 9 additions & 0 deletions airbyte-integrations/connectors/source-file/build.gradle
Original file line number Diff line number Diff line change
@@ -1,3 +1,5 @@
import ru.vyarus.gradle.plugin.python.task.PythonTask

plugins {
id 'airbyte-python'
id 'airbyte-docker'
Expand All @@ -8,6 +10,13 @@ airbytePython {
moduleDirectory 'source_file'
}

task("customIntegrationTestPython", type: PythonTask, dependsOn: installTestReqs){
module = "pytest"
command = "-s integration_tests"
}

integrationTest.dependsOn("customIntegrationTestPython")

dependencies {
implementation files(project(':airbyte-integrations:bases:base-python').airbyteDocker.outputs)
}
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,6 @@
SOFTWARE.
"""

from .integration_source_test import TestSourceFile
from .standard_source_test import SourceFileStandardTest

__all__ = ["SourceFileStandardTest", "TestSourceFile"]
__all__ = ["SourceFileStandardTest"]
Original file line number Diff line number Diff line change
@@ -0,0 +1,145 @@
"""
MIT License
Copyright (c) 2020 Airbyte
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
"""

import json
from pathlib import Path

import pytest
from source_file.client import Client

HERE = Path(__file__).parent.absolute()


def check_read(config, expected_columns=10, expected_rows=42):
client = Client(**config)
rows = list(client.read())
assert len(rows) == expected_rows
assert len(rows[0]) == expected_columns


@pytest.mark.parametrize(
"provider_name,file_path,file_format",
[
("ssh", "files/test.csv", "csv"),
("scp", "files/test.csv", "csv"),
("sftp", "files/test.csv", "csv"),
("ssh", "files/test.csv.gz", "csv"), # text in binary
("ssh", "files/test.pkl", "pickle"), # binary
("sftp", "files/test.pkl.gz", "pickle"), # binary in binary
],
)
def test__read_from_private_ssh(provider_config, provider_name, file_path, file_format):
client = Client(dataset_name="output", format=file_format, url=file_path, provider=provider_config(provider_name))
result = next(client.read())
assert result == {"header1": "text", "header2": 1, "header3": 0.2}


@pytest.mark.parametrize(
"provider_name,file_path,file_format",
[
("ssh", "files/file_does_not_exist.csv", "csv"),
("gcs", "gs://gcp-public-data-landsat/file_does_not_exist.csv", "csv"),
],
)
def test__read_file_not_found(provider_config, provider_name, file_path, file_format):
client = Client(dataset_name="output", format=file_format, url=file_path, provider=provider_config(provider_name))
with pytest.raises(FileNotFoundError):
next(client.read())


@pytest.mark.parametrize(
"provider_name, file_path, file_format",
[
("ssh", "files/test.csv", "csv"),
("ssh", "files/test.pkl", "pickle"),
("sftp", "files/test.pkl.gz", "pickle"),
],
)
def test__streams_from_ssh_providers(provider_config, provider_name, file_path, file_format):
client = Client(dataset_name="output", format=file_format, url=file_path, provider=provider_config(provider_name))
streams = list(client.streams)
assert len(streams) == 1
assert streams[0].json_schema["properties"] == {
"header1": {"type": "string"},
"header2": {"type": "number"},
"header3": {"type": "number"},
}


@pytest.mark.parametrize(
"storage_provider, url, columns_nb, separator, has_header",
[
# epidemiology csv
("HTTPS", "https://storage.googleapis.com/covid19-open-data/v2/latest/epidemiology.csv", 10, ",", True),
("HTTPS", "storage.googleapis.com/covid19-open-data/v2/latest/epidemiology.csv", 10, ",", True),
("local", "injected by tests", 10, ",", True),
# landsat compressed csv
("GCS", "gs://gcp-public-data-landsat/index.csv.gz", 18, ",", True),
("GCS", "gs://gcp-public-data-landsat/index.csv.gz", 18, ",", True),
# GDELT csv
("S3", "s3://gdelt-open-data/events/20190914.export.csv", 58, "\\t", False),
("S3", "s3://gdelt-open-data/events/20190914.export.csv", 58, "\\t", False),
],
)
def test__read_from_public_provider(download_gcs_public_data, storage_provider, url, columns_nb, separator, has_header):
# inject temp file path that was downloaded by the test as URL
url = download_gcs_public_data if storage_provider == "local" else url
config = {
"format": "csv",
"dataset_name": "output",
"reader_options": json.dumps({"sep": separator, "nrows": 42}),
"provider": {"storage": storage_provider},
"url": url,
}

check_read(config, expected_columns=columns_nb)


def test__read_from_private_gcs(google_cloud_service_credentials, private_google_cloud_file):
config = {
"dataset_name": "output",
"format": "csv",
"url": private_google_cloud_file,
"reader_options": json.dumps({"sep": ",", "nrows": 42}),
"provider": {
"storage": "GCS",
"service_account_json": json.dumps(google_cloud_service_credentials),
},
}
check_read(config)


def test__read_from_private_aws(aws_credentials, private_aws_file):
config = {
"dataset_name": "output",
"format": "csv",
"url": private_aws_file,
"reader_options": json.dumps({"sep": ",", "nrows": 42}),
"provider": {
"storage": "S3",
"aws_access_key_id": aws_credentials["aws_access_key_id"],
"aws_secret_access_key": aws_credentials["aws_secret_access_key"],
},
}
check_read(config)
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
{
"filename": "integrationTestFile",
"dataset_name": "integrationTestFile",
"format": "csv",
"reader_options": "{\"sep\": \",\", \"nrows\": 20}",
"url": "https://storage.googleapis.com/covid19-open-data/v2/latest/epidemiology.csv",
Expand Down
Loading

0 comments on commit a54dd89

Please sign in to comment.