-
Notifications
You must be signed in to change notification settings - Fork 6.5k
/
Copy pathaws_request.py
executable file
·126 lines (103 loc) · 3.83 KB
/
aws_request.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
#!/usr/bin/env python
# Copyright 2021 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
Command-line sample that creates a one-time transfer from Amazon S3 to
Google Cloud Storage.
"""
import argparse
# [START storagetransfer_transfer_from_aws]
from datetime import datetime
from google.cloud import storage_transfer
def create_one_time_aws_transfer(
project_id: str,
description: str,
source_bucket: str,
aws_access_key_id: str,
aws_secret_access_key: str,
sink_bucket: str,
):
"""Creates a one-time transfer job from Amazon S3 to Google Cloud
Storage."""
client = storage_transfer.StorageTransferServiceClient()
# The ID of the Google Cloud Platform Project that owns the job
# project_id = 'my-project-id'
# A useful description for your transfer job
# description = 'My transfer job'
# AWS S3 source bucket name
# source_bucket = 'my-s3-source-bucket'
# AWS Access Key ID
# aws_access_key_id = 'AKIA...'
# AWS Secret Access Key
# aws_secret_access_key = 'HEAoMK2.../...ku8'
# Google Cloud Storage destination bucket name
# sink_bucket = 'my-gcs-destination-bucket'
now = datetime.utcnow()
# Setting the start date and the end date as
# the same time creates a one-time transfer
one_time_schedule = {"day": now.day, "month": now.month, "year": now.year}
transfer_job_request = storage_transfer.CreateTransferJobRequest(
{
"transfer_job": {
"project_id": project_id,
"description": description,
"status": storage_transfer.TransferJob.Status.ENABLED,
"schedule": {
"schedule_start_date": one_time_schedule,
"schedule_end_date": one_time_schedule,
},
"transfer_spec": {
"aws_s3_data_source": {
"bucket_name": source_bucket,
"aws_access_key": {
"access_key_id": aws_access_key_id,
"secret_access_key": aws_secret_access_key,
},
},
"gcs_data_sink": {
"bucket_name": sink_bucket,
},
},
}
}
)
result = client.create_transfer_job(transfer_job_request)
print(f"Created transferJob: {result.name}")
# [END storagetransfer_transfer_from_aws]
if __name__ == "__main__":
parser = argparse.ArgumentParser(description=__doc__)
parser.add_argument(
"--project-id",
help="The ID of the Google Cloud Platform Project that owns the job",
required=True,
)
parser.add_argument(
"--description",
help="A useful description for your transfer job",
default="My transfer job",
)
parser.add_argument(
"--source-bucket", help="AWS S3 source bucket name", required=True
)
parser.add_argument("--aws-access-key-id", help="AWS access key ID", required=True)
parser.add_argument(
"--aws-secret-access-key", help="AWS secret access key", required=True
)
parser.add_argument(
"--sink-bucket",
help="Google Cloud Storage destination bucket name",
required=True,
)
args = parser.parse_args()
create_one_time_aws_transfer(**vars(args))