-
Notifications
You must be signed in to change notification settings - Fork 40
/
Copy pathtest_boto3_storage.py
172 lines (142 loc) · 6.31 KB
/
test_boto3_storage.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
# -*- coding: utf-8 -*-
import os
import uuid
import unittest
import mock
import requests
from flaky import flaky
from unittest import SkipTest
from depot._compat import PY2, unicode_text
S3Storage = None
FILE_CONTENT = b'HELLO WORLD'
@flaky
class TestS3FileStorage(unittest.TestCase):
@classmethod
def setUpClass(self):
# Travis runs multiple tests concurrently on fake machines that might
# collide on pid and hostid, so use an uuid1 which should be fairly random
# thanks to clock_seq
self.run_id = '%s-%s' % (uuid.uuid1().hex, os.getpid())
try:
global S3Storage
from depot.io.boto3 import S3Storage
except ImportError:
raise SkipTest('Boto not installed')
from botocore.exceptions import ClientError
env = os.environ
access_key_id = env.get('AWS_ACCESS_KEY_ID')
secret_access_key = env.get('AWS_SECRET_ACCESS_KEY')
if access_key_id is None or secret_access_key is None:
raise SkipTest('Amazon S3 credentials not available')
self.default_bucket_name = 'filedepot-%s' % (access_key_id.lower(), )
self.cred = (access_key_id, secret_access_key)
self.bucket = 'filedepot-testfs-%s' % self.run_id
self.fs = S3Storage(*self.cred, bucket=self.bucket)
@classmethod
def tearDownClass(self):
buckets = set(
b['Name'] for b in self.fs._bucket_driver.s3.meta.client.list_buckets()['Buckets']
)
if self.fs._bucket_driver.bucket.name not in buckets:
# Bucket wasn't created, probably due to monkey patching, just skip.
return
for obj in self.fs._bucket_driver.bucket.objects.all():
obj.delete()
try:
self.fs._bucket_driver.bucket.delete()
except:
pass
else:
self.fs._bucket_driver.bucket.wait_until_not_exists()
def test_fileoutside_depot(self):
fid = str(uuid.uuid1())
key = self.fs._bucket_driver.new_key(fid)
key.put(Body=FILE_CONTENT)
f = self.fs.get(fid)
assert f.read() == FILE_CONTENT
def test_creates_bucket_when_missing(self):
created_buckets = []
def mock_make_api_call(_, operation_name, kwarg):
if operation_name == 'ListBuckets':
return {'Buckets': []}
elif operation_name == 'CreateBucket':
created_buckets.append(kwarg['Bucket'])
return None
elif operation_name == 'HeadBucket':
if kwarg['Bucket'] in created_buckets:
return {'ResponseMetadata': {'HTTPStatusCode': 200}}
else:
return {}
else:
assert False, 'Unexpected Call'
from depot.io.boto3 import CANNED_ACL_PRIVATE
with mock.patch('botocore.client.BaseClient._make_api_call', new=mock_make_api_call):
S3Storage(*self.cred, policy=CANNED_ACL_PRIVATE)
assert created_buckets == [self.default_bucket_name]
def test_bucket_failure(self):
from botocore.exceptions import ClientError
def mock_make_api_call(_, operation_name, kwarg):
if operation_name == 'ListBuckets':
raise ClientError(error_response={'Error': {'Code': 500}},
operation_name=operation_name)
try:
with mock.patch('botocore.client.BaseClient._make_api_call', new=mock_make_api_call):
S3Storage(*self.cred)
except ClientError:
pass
else:
assert False, 'Should have reraised ClientError'
def test_client_receives_extra_args(self):
with mock.patch('boto3.session.Session.client'), mock.patch('boto3.session.Session.resource') as mockresource:
S3Storage(*self.cred, endpoint_url='http://somehwere.it', region_name='worlwide')
mockresource.assert_called_once_with('s3', endpoint_url='http://somehwere.it',
region_name='worlwide')
def test_get_key_failure(self):
from botocore.exceptions import ClientError
from botocore.client import BaseClient
make_api_call = BaseClient._make_api_call
def mock_make_api_call(cli, operation_name, kwarg):
if operation_name == 'HeadObject':
raise ClientError(error_response={'Error': {'Code': 500}},
operation_name=operation_name)
return make_api_call(cli, operation_name, kwarg)
fs = S3Storage(*self.cred)
try:
with mock.patch('botocore.client.BaseClient._make_api_call', new=mock_make_api_call):
fs.get(uuid.uuid1())
except ClientError:
pass
else:
assert False, 'Should have reraised ClientError'
def test_invalid_modified(self):
fid = str(uuid.uuid1())
key = self.fs._bucket_driver.new_key(fid)
key.put(Body=FILE_CONTENT, Metadata={'x-depot-modified': 'INVALID'})
f = self.fs.get(fid)
assert f.last_modified is None, f.last_modified
def test_public_url(self):
fid = str(uuid.uuid1())
key = self.fs._bucket_driver.new_key(fid)
key.put(Body=FILE_CONTENT)
f = self.fs.get(fid)
assert '.s3.amazonaws.com' in f.public_url, f.public_url
assert f.public_url.endswith('/%s' % fid), f.public_url
def test_content_disposition(self):
file_id = self.fs.create(b'content', unicode_text('test.txt'), 'text/plain')
test_file = self.fs.get(file_id)
response = requests.get(test_file.public_url)
assert response.headers['Content-Disposition'] == "inline;filename=\"test.txt\";filename*=utf-8''test.txt"
def test_storage_class(self):
fs_ia = S3Storage(*self.cred, bucket=self.bucket, storage_class='STANDARD_IA')
fid = fs_ia.create(FILE_CONTENT)
key = self.fs._bucket_driver.get_key(fid)
assert key.storage_class == 'STANDARD_IA'
def test_storage_non_ascii_filenames(self):
filename = u'些公.pdf'
storage = S3Storage(*self.cred, bucket=self.bucket, storage_class='STANDARD_IA')
new_file_id = storage.create(
FILE_CONTENT,
filename=filename,
content_type='application/pdf'
)
assert new_file_id is not None