Skip to content

TST: Refactor S3 tests #61703

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Open
wants to merge 10 commits into
base: main
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
3 changes: 0 additions & 3 deletions .github/workflows/unit-tests.yml
Original file line number Diff line number Diff line change
Expand Up @@ -140,9 +140,6 @@ jobs:

moto:
image: motoserver/moto:5.0.27
env:
AWS_ACCESS_KEY_ID: foobar_key
AWS_SECRET_ACCESS_KEY: foobar_secret
ports:
- 5000:5000

Expand Down
6 changes: 6 additions & 0 deletions pandas/conftest.py
Original file line number Diff line number Diff line change
Expand Up @@ -2116,3 +2116,9 @@ def temp_file(tmp_path):
file_path = tmp_path / str(uuid.uuid4())
file_path.touch()
return file_path

Copy link
Member Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.


@pytest.fixture(scope="session")
def monkeysession():
with pytest.MonkeyPatch.context() as mp:
yield mp
148 changes: 60 additions & 88 deletions pandas/tests/io/conftest.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,3 @@
import shlex
import subprocess
import time
import uuid

import pytest
Expand Down Expand Up @@ -50,93 +47,77 @@ def xml_file(datapath):
return datapath("io", "data", "xml", "books.xml")


@pytest.fixture
def s3_base(worker_id, monkeypatch):
"""
Fixture for mocking S3 interaction.
@pytest.fixture(scope="session")
def aws_credentials(monkeysession):
"""Mocked AWS Credentials for moto."""
monkeysession.setenv("AWS_ACCESS_KEY_ID", "testing")
monkeysession.setenv("AWS_SECRET_ACCESS_KEY", "testing")
monkeysession.setenv("AWS_SECURITY_TOKEN", "testing")
monkeysession.setenv("AWS_SESSION_AWS_SESSION_TOKEN", "testing")
monkeysession.setenv("AWS_DEFAULT_REGION", "us-east-1")

Sets up moto server in separate process locally
Return url for motoserver/moto CI service
"""
pytest.importorskip("s3fs")
pytest.importorskip("boto3")

# temporary workaround as moto fails for botocore >= 1.11 otherwise,
# see https://github.com/spulec/moto/issues/1924 & 1952
monkeypatch.setenv("AWS_ACCESS_KEY_ID", "foobar_key")
monkeypatch.setenv("AWS_SECRET_ACCESS_KEY", "foobar_secret")
if is_ci_environment():
if is_platform_arm() or is_platform_mac() or is_platform_windows():
# NOT RUN on Windows/macOS, only Ubuntu
# - subprocess in CI can cause timeouts
# - GitHub Actions do not support
# container services for the above OSs
pytest.skip(
"S3 tests do not have a corresponding service on "
"Windows or macOS platforms"
)
else:
# set in .github/workflows/unit-tests.yml
yield "http://localhost:5000"

@pytest.fixture(scope="session")
def moto_server(aws_credentials):
# use service container for Linux on GitHub Actions
if is_ci_environment() and not (
is_platform_mac() or is_platform_arm() or is_platform_windows()
):
yield "http://localhost:5000"
else:
requests = pytest.importorskip("requests")
pytest.importorskip("moto")
pytest.importorskip("flask") # server mode needs flask too

# Launching moto in server mode, i.e., as a separate process
# with an S3 endpoint on localhost

worker_id = "5" if worker_id == "master" else worker_id.lstrip("gw")
endpoint_port = f"555{worker_id}"
endpoint_uri = f"http://127.0.0.1:{endpoint_port}/"

# pipe to null to avoid logging in terminal
with subprocess.Popen(
shlex.split(f"moto_server s3 -p {endpoint_port}"),
stdout=subprocess.DEVNULL,
stderr=subprocess.DEVNULL,
) as proc:
timeout = 5
while timeout > 0:
try:
# OK to go once server is accepting connections
r = requests.get(endpoint_uri)
if r.ok:
break
except Exception:
pass
timeout -= 0.1
time.sleep(0.1)
yield endpoint_uri

proc.terminate()
moto_server = pytest.importorskip("moto.server")
server = moto_server.ThreadedMotoServer(port=0)
server.start()
host, port = server.get_host_and_port()
yield f"http://{host}:{port}"
server.stop()


@pytest.fixture
def s3so(s3_base):
return {"client_kwargs": {"endpoint_url": s3_base}}
def moto_s3_resource(moto_server):
boto3 = pytest.importorskip("boto3")
s3 = boto3.resource("s3", endpoint_url=moto_server)
return s3


@pytest.fixture
def s3_resource(s3_base):
import boto3
@pytest.fixture(scope="session")
def s3so(moto_server):
return {
"client_kwargs": {
"endpoint_url": moto_server,
}
}

s3 = boto3.resource("s3", endpoint_url=s3_base)
return s3

@pytest.fixture
def s3_bucket_public(moto_s3_resource):
"""
Create a public S3 bucket using moto.
"""
bucket_name = f"pandas-test-{uuid.uuid4()}"
bucket = moto_s3_resource.Bucket(bucket_name)
bucket.create(ACL="public-read")
yield bucket
bucket.objects.delete()
bucket.delete()


@pytest.fixture
def s3_public_bucket(s3_resource):
bucket = s3_resource.Bucket(f"pandas-test-{uuid.uuid4()}")
bucket.create()
def s3_bucket_private(moto_s3_resource):
"""
Create a private S3 bucket using moto.
"""
bucket_name = f"cant_get_it-{uuid.uuid4()}"
bucket = moto_s3_resource.Bucket(bucket_name)
bucket.create(ACL="private")
yield bucket
bucket.objects.delete()
bucket.delete()


@pytest.fixture
def s3_public_bucket_with_data(
s3_public_bucket, tips_file, jsonl_file, feather_file, xml_file
def s3_bucket_public_with_data(
s3_bucket_public, tips_file, jsonl_file, feather_file, xml_file
):
"""
The following datasets
Expand All @@ -158,22 +139,13 @@ def s3_public_bucket_with_data(
]
for s3_key, file_name in test_s3_files:
with open(file_name, "rb") as f:
s3_public_bucket.put_object(Key=s3_key, Body=f)
return s3_public_bucket


@pytest.fixture
def s3_private_bucket(s3_resource):
bucket = s3_resource.Bucket(f"cant_get_it-{uuid.uuid4()}")
bucket.create(ACL="private")
yield bucket
bucket.objects.delete()
bucket.delete()
s3_bucket_public.put_object(Key=s3_key, Body=f)
return s3_bucket_public


@pytest.fixture
def s3_private_bucket_with_data(
s3_private_bucket, tips_file, jsonl_file, feather_file, xml_file
def s3_bucket_private_with_data(
s3_bucket_private, tips_file, jsonl_file, feather_file, xml_file
):
"""
The following datasets
Expand All @@ -195,8 +167,8 @@ def s3_private_bucket_with_data(
]
for s3_key, file_name in test_s3_files:
with open(file_name, "rb") as f:
s3_private_bucket.put_object(Key=s3_key, Body=f)
return s3_private_bucket
s3_bucket_private.put_object(Key=s3_key, Body=f)
return s3_bucket_private


_compression_formats_params = [
Expand Down
14 changes: 6 additions & 8 deletions pandas/tests/io/excel/test_readers.py
Original file line number Diff line number Diff line change
Expand Up @@ -934,29 +934,27 @@ def test_read_from_http_url(self, httpserver, read_ext):

@td.skip_if_not_us_locale
@pytest.mark.single_cpu
def test_read_from_s3_url(self, read_ext, s3_public_bucket, s3so):
# Bucket created in tests/io/conftest.py
def test_read_from_s3_url(self, read_ext, s3_bucket_public, s3so):
with open("test1" + read_ext, "rb") as f:
s3_public_bucket.put_object(Key="test1" + read_ext, Body=f)
s3_bucket_public.put_object(Key="test1" + read_ext, Body=f)

url = f"s3://{s3_public_bucket.name}/test1" + read_ext
url = f"s3://{s3_bucket_public.name}/test1" + read_ext

url_table = pd.read_excel(url, storage_options=s3so)
local_table = pd.read_excel("test1" + read_ext)
tm.assert_frame_equal(url_table, local_table)

@pytest.mark.single_cpu
def test_read_from_s3_object(self, read_ext, s3_public_bucket, s3so):
def test_read_from_s3_object(self, read_ext, s3_bucket_public, s3so):
# GH 38788
# Bucket created in tests/io/conftest.py
with open("test1" + read_ext, "rb") as f:
s3_public_bucket.put_object(Key="test1" + read_ext, Body=f)
s3_bucket_public.put_object(Key="test1" + read_ext, Body=f)

import s3fs

s3 = s3fs.S3FileSystem(**s3so)

with s3.open(f"s3://{s3_public_bucket.name}/test1" + read_ext) as f:
with s3.open(f"s3://{s3_bucket_public.name}/test1" + read_ext) as f:
url_table = pd.read_excel(f)

local_table = pd.read_excel("test1" + read_ext)
Expand Down
8 changes: 4 additions & 4 deletions pandas/tests/io/excel/test_style.py
Original file line number Diff line number Diff line change
Expand Up @@ -318,16 +318,16 @@ def custom_converter(css):

@pytest.mark.single_cpu
@td.skip_if_not_us_locale
def test_styler_to_s3(s3_public_bucket, s3so):
def test_styler_to_s3(s3_bucket_public, s3so):
# GH#46381

mock_bucket_name, target_file = s3_public_bucket.name, "test.xlsx"
mock_bucket_name = s3_bucket_public.name
target_file = f"{uuid.uuid4()}.xlsx"
df = DataFrame({"x": [1, 2, 3], "y": [2, 4, 6]})
styler = df.style.set_sticky(axis="index")
styler.to_excel(f"s3://{mock_bucket_name}/{target_file}", storage_options=s3so)
timeout = 5
while True:
if target_file in (obj.key for obj in s3_public_bucket.objects.all()):
if target_file in (obj.key for obj in s3_bucket_public.objects.all()):
break
time.sleep(0.1)
timeout -= 0.1
Expand Down
8 changes: 5 additions & 3 deletions pandas/tests/io/json/test_compression.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,7 @@
BytesIO,
StringIO,
)
import uuid

import pytest

Expand Down Expand Up @@ -42,17 +43,18 @@ def test_read_zipped_json(datapath):
@td.skip_if_not_us_locale
@pytest.mark.single_cpu
@pytest.mark.network
def test_with_s3_url(compression, s3_public_bucket, s3so):
def test_with_s3_url(compression, s3_bucket_public, s3so):
# Bucket created in tests/io/conftest.py
df = pd.read_json(StringIO('{"a": [1, 2, 3], "b": [4, 5, 6]}'))

key = f"{uuid.uuid4()}.json"
with tm.ensure_clean() as path:
df.to_json(path, compression=compression)
with open(path, "rb") as f:
s3_public_bucket.put_object(Key="test-1", Body=f)
s3_bucket_public.put_object(Key=key, Body=f)

roundtripped_df = pd.read_json(
f"s3://{s3_public_bucket.name}/test-1",
f"s3://{s3_bucket_public.name}/{key}",
compression=compression,
storage_options=s3so,
)
Expand Down
13 changes: 7 additions & 6 deletions pandas/tests/io/json/test_pandas.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,6 +8,7 @@
import os
import sys
import time
import uuid

import numpy as np
import pytest
Expand Down Expand Up @@ -1411,11 +1412,10 @@ def test_read_inline_jsonl(self):
@pytest.mark.single_cpu
@pytest.mark.network
@td.skip_if_not_us_locale
def test_read_s3_jsonl(self, s3_public_bucket_with_data, s3so):
def test_read_s3_jsonl(self, s3_bucket_public_with_data, s3so):
# GH17200

result = read_json(
f"s3n://{s3_public_bucket_with_data.name}/items.jsonl",
f"s3n://{s3_bucket_public_with_data.name}/items.jsonl",
lines=True,
storage_options=s3so,
)
Expand Down Expand Up @@ -2011,14 +2011,15 @@ def test_json_multiindex(self):

@pytest.mark.single_cpu
@pytest.mark.network
def test_to_s3(self, s3_public_bucket, s3so):
def test_to_s3(self, s3_bucket_public, s3so):
# GH 28375
mock_bucket_name, target_file = s3_public_bucket.name, "test.json"
mock_bucket_name = s3_bucket_public.name
target_file = f"{uuid.uuid4()}.json"
df = DataFrame({"x": [1, 2, 3], "y": [2, 4, 6]})
df.to_json(f"s3://{mock_bucket_name}/{target_file}", storage_options=s3so)
timeout = 5
while True:
if target_file in (obj.key for obj in s3_public_bucket.objects.all()):
if target_file in (obj.key for obj in s3_bucket_public.objects.all()):
break
time.sleep(0.1)
timeout -= 0.1
Expand Down
Loading
Loading