-
Notifications
You must be signed in to change notification settings - Fork 4.3k
/
Copy paths3handler.py
558 lines (458 loc) · 22.8 KB
/
s3handler.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
465
466
467
468
469
470
471
472
473
474
475
476
477
478
479
480
481
482
483
484
485
486
487
488
489
490
491
492
493
494
495
496
497
498
499
500
501
502
503
504
505
506
507
508
509
510
511
512
513
514
515
516
517
518
519
520
521
522
523
524
525
526
527
528
529
530
531
532
533
534
535
536
537
538
539
540
541
542
543
544
545
546
547
548
549
550
551
552
553
554
555
556
557
558
# Copyright 2013 Amazon.com, Inc. or its affiliates. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"). You
# may not use this file except in compliance with the License. A copy of
# the License is located at
#
# http://aws.amazon.com/apache2.0/
#
# or in the "license" file accompanying this file. This file is
# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
# ANY KIND, either express or implied. See the License for the specific
# language governing permissions and limitations under the License.
import logging
import os
from s3transfer.manager import TransferManager
from awscli.customizations.s3.utils import (
human_readable_size, MAX_UPLOAD_SIZE, find_bucket_key, relative_path,
create_warning, NonSeekableStream)
from awscli.customizations.s3.transferconfig import \
create_transfer_config_from_runtime_config
from awscli.customizations.s3.results import UploadResultSubscriber
from awscli.customizations.s3.results import DownloadResultSubscriber
from awscli.customizations.s3.results import CopyResultSubscriber
from awscli.customizations.s3.results import UploadStreamResultSubscriber
from awscli.customizations.s3.results import DownloadStreamResultSubscriber
from awscli.customizations.s3.results import DeleteResultSubscriber
from awscli.customizations.s3.results import QueuedResult
from awscli.customizations.s3.results import SuccessResult
from awscli.customizations.s3.results import FailureResult
from awscli.customizations.s3.results import DryRunResult
from awscli.customizations.s3.results import ResultRecorder
from awscli.customizations.s3.results import ResultPrinter
from awscli.customizations.s3.results import OnlyShowErrorsResultPrinter
from awscli.customizations.s3.results import NoProgressResultPrinter
from awscli.customizations.s3.results import ResultProcessor
from awscli.customizations.s3.results import CommandResultRecorder
from awscli.customizations.s3.utils import RequestParamsMapper
from awscli.customizations.s3.utils import StdoutBytesWriter
from awscli.customizations.s3.utils import ProvideSizeSubscriber
from awscli.customizations.s3.utils import ProvideUploadContentTypeSubscriber
from awscli.customizations.s3.utils import ProvideCopyContentTypeSubscriber
from awscli.customizations.s3.utils import ProvideLastModifiedTimeSubscriber
from awscli.customizations.s3.utils import DirectoryCreatorSubscriber
from awscli.customizations.s3.utils import DeleteSourceFileSubscriber
from awscli.customizations.s3.utils import DeleteSourceObjectSubscriber
from awscli.customizations.s3.utils import DeleteCopySourceObjectSubscriber
from awscli.compat import get_binary_stdin
LOGGER = logging.getLogger(__name__)
class S3TransferHandlerFactory(object):
MAX_IN_MEMORY_CHUNKS = 6
def __init__(self, cli_params, runtime_config):
"""Factory for S3TransferHandlers
:type cli_params: dict
:param cli_params: The parameters provide to the CLI command
:type runtime_config: RuntimeConfig
:param runtime_config: The runtime config for the CLI command
being run
"""
self._cli_params = cli_params
self._runtime_config = runtime_config
def __call__(self, client, result_queue):
"""Creates a S3TransferHandler instance
:type client: botocore.client.Client
:param client: The client to power the S3TransferHandler
:type result_queue: queue.Queue
:param result_queue: The result queue to be used to process results
for the S3TransferHandler
:returns: A S3TransferHandler instance
"""
transfer_config = create_transfer_config_from_runtime_config(
self._runtime_config)
transfer_config.max_in_memory_upload_chunks = self.MAX_IN_MEMORY_CHUNKS
transfer_config.max_in_memory_download_chunks = \
self.MAX_IN_MEMORY_CHUNKS
transfer_manager = TransferManager(client, transfer_config)
LOGGER.debug(
"Using a multipart threshold of %s and a part size of %s",
transfer_config.multipart_threshold,
transfer_config.multipart_chunksize
)
result_recorder = ResultRecorder()
result_processor_handlers = [result_recorder]
self._add_result_printer(result_recorder, result_processor_handlers)
result_processor = ResultProcessor(
result_queue, result_processor_handlers)
command_result_recorder = CommandResultRecorder(
result_queue, result_recorder, result_processor)
return S3TransferHandler(
transfer_manager, self._cli_params, command_result_recorder)
def _add_result_printer(self, result_recorder, result_processor_handlers):
if self._cli_params.get('quiet'):
return
elif self._cli_params.get('only_show_errors'):
result_printer = OnlyShowErrorsResultPrinter(result_recorder)
elif self._cli_params.get('is_stream'):
result_printer = OnlyShowErrorsResultPrinter(result_recorder)
elif not self._cli_params.get('progress'):
result_printer = NoProgressResultPrinter(result_recorder)
else:
result_printer = ResultPrinter(result_recorder)
result_processor_handlers.append(result_printer)
class S3TransferHandler(object):
def __init__(self, transfer_manager, cli_params, result_command_recorder):
"""Backend for performing S3 transfers
:type transfer_manager: s3transfer.manager.TransferManager
:param transfer_manager: Transfer manager to use for transfers
:type cli_params: dict
:param cli_params: The parameters passed to the CLI command in the
form of a dictionary
:type result_command_recorder: ResultCommandRecorder
:param result_command_recorder: The result command recorder to be
used to get the final result of the transfer
"""
self._transfer_manager = transfer_manager
# TODO: Ideally the s3 transfer handler should not need to know
# about the result command recorder. It really only needs an interface
# for adding results to the queue. When all of the commands have
# converted to use this transfer handler, an effort should be made
# to replace the passing of a result command recorder with an
# abstraction to enqueue results.
self._result_command_recorder = result_command_recorder
submitter_args = (
self._transfer_manager, self._result_command_recorder.result_queue,
cli_params
)
self._submitters = [
UploadStreamRequestSubmitter(*submitter_args),
DownloadStreamRequestSubmitter(*submitter_args),
UploadRequestSubmitter(*submitter_args),
DownloadRequestSubmitter(*submitter_args),
CopyRequestSubmitter(*submitter_args),
DeleteRequestSubmitter(*submitter_args),
LocalDeleteRequestSubmitter(*submitter_args)
]
def call(self, fileinfos):
"""Process iterable of FileInfos for transfer
:type fileinfos: iterable of FileInfos
param fileinfos: Set of FileInfos to submit to underlying transfer
request submitters to make transfer API calls to S3
:rtype: CommandResult
:returns: The result of the command that specifies the number of
failures and warnings encountered.
"""
with self._result_command_recorder:
with self._transfer_manager:
total_submissions = 0
for fileinfo in fileinfos:
for submitter in self._submitters:
if submitter.can_submit(fileinfo):
if submitter.submit(fileinfo):
total_submissions += 1
break
self._result_command_recorder.notify_total_submissions(
total_submissions)
return self._result_command_recorder.get_command_result()
class BaseTransferRequestSubmitter(object):
REQUEST_MAPPER_METHOD = None
RESULT_SUBSCRIBER_CLASS = None
def __init__(self, transfer_manager, result_queue, cli_params):
"""Submits transfer requests to the TransferManager
Given a FileInfo object and provided CLI parameters, it will add the
necessary extra arguments and subscribers in making a call to the
TransferManager.
:type transfer_manager: s3transfer.manager.TransferManager
:param transfer_manager: The underlying transfer manager
:type result_queue: queue.Queue
:param result_queue: The result queue to use
:type cli_params: dict
:param cli_params: The associated CLI parameters passed in to the
command as a dictionary.
"""
self._transfer_manager = transfer_manager
self._result_queue = result_queue
self._cli_params = cli_params
def submit(self, fileinfo):
"""Submits a transfer request based on the FileInfo provided
There is no guarantee that the transfer request will be made on
behalf of the fileinfo as a fileinfo may be skipped based on
circumstances in which the transfer is not possible.
:type fileinfo: awscli.customizations.s3.fileinfo.FileInfo
:param fileinfo: The FileInfo to be used to submit a transfer
request to the underlying transfer manager.
:rtype: s3transfer.futures.TransferFuture
:returns: A TransferFuture representing the transfer if it the
transfer was submitted. If it was not submitted nothing
is returned.
"""
should_skip = self._warn_and_signal_if_skip(fileinfo)
if not should_skip:
return self._do_submit(fileinfo)
def can_submit(self, fileinfo):
"""Checks whether it can submit a particular FileInfo
:type fileinfo: awscli.customizations.s3.fileinfo.FileInfo
:param fileinfo: The FileInfo to check if the transfer request
submitter can handle.
:returns: True if it can use the provided FileInfo to make a transfer
request to the underlying transfer manager. False, otherwise.
"""
raise NotImplementedError('can_submit()')
def _do_submit(self, fileinfo):
extra_args = {}
if self.REQUEST_MAPPER_METHOD:
self.REQUEST_MAPPER_METHOD(extra_args, self._cli_params)
subscribers = []
self._add_additional_subscribers(subscribers, fileinfo)
# The result subscriber class should always be the last registered
# subscriber to ensure it is not missing any information that
# may have been added in a different subscriber such as size.
if self.RESULT_SUBSCRIBER_CLASS:
result_kwargs = {'result_queue': self._result_queue}
if self._cli_params.get('is_move', False):
result_kwargs['transfer_type'] = 'move'
subscribers.append(self.RESULT_SUBSCRIBER_CLASS(**result_kwargs))
if not self._cli_params.get('dryrun'):
return self._submit_transfer_request(
fileinfo, extra_args, subscribers)
else:
self._submit_dryrun(fileinfo)
def _submit_dryrun(self, fileinfo):
transfer_type = fileinfo.operation_name
if self._cli_params.get('is_move', False):
transfer_type = 'move'
src, dest = self._format_src_dest(fileinfo)
self._result_queue.put(DryRunResult(
transfer_type=transfer_type, src=src, dest=dest))
def _add_additional_subscribers(self, subscribers, fileinfo):
pass
def _submit_transfer_request(self, fileinfo, extra_args, subscribers):
raise NotImplementedError('_submit_transfer_request()')
def _warn_and_signal_if_skip(self, fileinfo):
for warning_handler in self._get_warning_handlers():
if warning_handler(fileinfo):
# On the first warning handler that returns a signal to skip
# immediately propagate this signal and no longer check
# the other warning handlers as no matter what the file will
# be skipped.
return True
def _get_warning_handlers(self):
# Returns a list of warning handlers, which are callables that
# take in a single parameter representing a FileInfo. It will then
# add a warning to result_queue if needed and return True if
# that FileInfo should be skipped.
return []
def _should_inject_content_type(self):
return (
self._cli_params.get('guess_mime_type') and
not self._cli_params.get('content_type')
)
def _warn_glacier(self, fileinfo):
if not self._cli_params.get('force_glacier_transfer'):
if not fileinfo.is_glacier_compatible():
LOGGER.debug(
'Encountered glacier object s3://%s. Not performing '
'%s on object.' % (fileinfo.src, fileinfo.operation_name))
if not self._cli_params.get('ignore_glacier_warnings'):
warning = create_warning(
's3://'+fileinfo.src,
'Object is of storage class GLACIER. Unable to '
'perform %s operations on GLACIER objects. You must '
'restore the object to be able to perform the '
'operation. See aws s3 %s help for additional '
'parameter options to ignore or force these '
'transfers.' %
(fileinfo.operation_name, fileinfo.operation_name)
)
self._result_queue.put(warning)
return True
return False
def _warn_parent_reference(self, fileinfo):
# normpath() will use the OS path separator so we
# need to take that into account when checking for a parent prefix.
parent_prefix = '..' + os.path.sep
escapes_cwd = os.path.normpath(fileinfo.compare_key).startswith(
parent_prefix)
if escapes_cwd:
warning = create_warning(
fileinfo.compare_key, "File references a parent directory.")
self._result_queue.put(warning)
return True
return False
def _format_src_dest(self, fileinfo):
"""Returns formatted versions of a fileinfos source and destination."""
raise NotImplementedError('_format_src_dest')
def _format_local_path(self, path):
return relative_path(path)
def _format_s3_path(self, path):
if path.startswith('s3://'):
return path
return 's3://' + path
class UploadRequestSubmitter(BaseTransferRequestSubmitter):
REQUEST_MAPPER_METHOD = RequestParamsMapper.map_put_object_params
RESULT_SUBSCRIBER_CLASS = UploadResultSubscriber
def can_submit(self, fileinfo):
return fileinfo.operation_name == 'upload'
def _add_additional_subscribers(self, subscribers, fileinfo):
subscribers.append(ProvideSizeSubscriber(fileinfo.size))
if self._should_inject_content_type():
subscribers.append(ProvideUploadContentTypeSubscriber())
if self._cli_params.get('is_move', False):
subscribers.append(DeleteSourceFileSubscriber())
def _submit_transfer_request(self, fileinfo, extra_args, subscribers):
bucket, key = find_bucket_key(fileinfo.dest)
filein = self._get_filein(fileinfo)
return self._transfer_manager.upload(
fileobj=filein, bucket=bucket, key=key,
extra_args=extra_args, subscribers=subscribers
)
def _get_filein(self, fileinfo):
return fileinfo.src
def _get_warning_handlers(self):
return [self._warn_if_too_large]
def _warn_if_too_large(self, fileinfo):
if getattr(fileinfo, 'size') and fileinfo.size > MAX_UPLOAD_SIZE:
file_path = relative_path(fileinfo.src)
warning_message = (
"File %s exceeds s3 upload limit of %s." % (
file_path, human_readable_size(MAX_UPLOAD_SIZE)))
warning = create_warning(
file_path, warning_message, skip_file=False)
self._result_queue.put(warning)
def _format_src_dest(self, fileinfo):
src = self._format_local_path(fileinfo.src)
dest = self._format_s3_path(fileinfo.dest)
return src, dest
class DownloadRequestSubmitter(BaseTransferRequestSubmitter):
REQUEST_MAPPER_METHOD = RequestParamsMapper.map_get_object_params
RESULT_SUBSCRIBER_CLASS = DownloadResultSubscriber
def can_submit(self, fileinfo):
return fileinfo.operation_name == 'download'
def _add_additional_subscribers(self, subscribers, fileinfo):
subscribers.append(ProvideSizeSubscriber(fileinfo.size))
subscribers.append(DirectoryCreatorSubscriber())
subscribers.append(ProvideLastModifiedTimeSubscriber(
fileinfo.last_update, self._result_queue))
if self._cli_params.get('is_move', False):
subscribers.append(DeleteSourceObjectSubscriber(
fileinfo.source_client))
def _submit_transfer_request(self, fileinfo, extra_args, subscribers):
bucket, key = find_bucket_key(fileinfo.src)
fileout = self._get_fileout(fileinfo)
return self._transfer_manager.download(
fileobj=fileout, bucket=bucket, key=key,
extra_args=extra_args, subscribers=subscribers
)
def _get_fileout(self, fileinfo):
return fileinfo.dest
def _get_warning_handlers(self):
return [self._warn_glacier, self._warn_parent_reference]
def _format_src_dest(self, fileinfo):
src = self._format_s3_path(fileinfo.src)
dest = self._format_local_path(fileinfo.dest)
return src, dest
class CopyRequestSubmitter(BaseTransferRequestSubmitter):
REQUEST_MAPPER_METHOD = RequestParamsMapper.map_copy_object_params
RESULT_SUBSCRIBER_CLASS = CopyResultSubscriber
def can_submit(self, fileinfo):
return fileinfo.operation_name == 'copy'
def _add_additional_subscribers(self, subscribers, fileinfo):
subscribers.append(ProvideSizeSubscriber(fileinfo.size))
if self._should_inject_content_type():
subscribers.append(ProvideCopyContentTypeSubscriber())
if self._cli_params.get('is_move', False):
subscribers.append(DeleteCopySourceObjectSubscriber(
fileinfo.source_client))
def _submit_transfer_request(self, fileinfo, extra_args, subscribers):
bucket, key = find_bucket_key(fileinfo.dest)
source_bucket, source_key = find_bucket_key(fileinfo.src)
copy_source = {'Bucket': source_bucket, 'Key': source_key}
return self._transfer_manager.copy(
bucket=bucket, key=key, copy_source=copy_source,
extra_args=extra_args, subscribers=subscribers,
source_client=fileinfo.source_client
)
def _get_warning_handlers(self):
return [self._warn_glacier]
def _format_src_dest(self, fileinfo):
src = self._format_s3_path(fileinfo.src)
dest = self._format_s3_path(fileinfo.dest)
return src, dest
class UploadStreamRequestSubmitter(UploadRequestSubmitter):
RESULT_SUBSCRIBER_CLASS = UploadStreamResultSubscriber
def can_submit(self, fileinfo):
return (
fileinfo.operation_name == 'upload' and
self._cli_params.get('is_stream')
)
def _add_additional_subscribers(self, subscribers, fileinfo):
expected_size = self._cli_params.get('expected_size', None)
if expected_size is not None:
subscribers.append(ProvideSizeSubscriber(int(expected_size)))
def _get_filein(self, fileinfo):
binary_stdin = get_binary_stdin()
return NonSeekableStream(binary_stdin)
def _format_local_path(self, path):
return '-'
class DownloadStreamRequestSubmitter(DownloadRequestSubmitter):
RESULT_SUBSCRIBER_CLASS = DownloadStreamResultSubscriber
def can_submit(self, fileinfo):
return (
fileinfo.operation_name == 'download' and
self._cli_params.get('is_stream')
)
def _add_additional_subscribers(self, subscribers, fileinfo):
pass
def _get_fileout(self, fileinfo):
return StdoutBytesWriter()
def _format_local_path(self, path):
return '-'
class DeleteRequestSubmitter(BaseTransferRequestSubmitter):
REQUEST_MAPPER_METHOD = RequestParamsMapper.map_delete_object_params
RESULT_SUBSCRIBER_CLASS = DeleteResultSubscriber
def can_submit(self, fileinfo):
return fileinfo.operation_name == 'delete' and \
fileinfo.src_type == 's3'
def _submit_transfer_request(self, fileinfo, extra_args, subscribers):
bucket, key = find_bucket_key(fileinfo.src)
return self._transfer_manager.delete(
bucket=bucket, key=key, extra_args=extra_args,
subscribers=subscribers)
def _format_src_dest(self, fileinfo):
return self._format_s3_path(fileinfo.src), None
class LocalDeleteRequestSubmitter(BaseTransferRequestSubmitter):
REQUEST_MAPPER_METHOD = None
RESULT_SUBSCRIBER_CLASS = None
def can_submit(self, fileinfo):
return fileinfo.operation_name == 'delete' and \
fileinfo.src_type == 'local'
def _submit_transfer_request(self, fileinfo, extra_args, subscribers):
# This is quirky but essentially instead of relying on a built-in
# method of s3 transfer, the logic lives directly in the submitter.
# The reason a explicit delete local file does not
# live in s3transfer is because it is outside the scope of s3transfer;
# it should only have interfaces for interacting with S3. Therefore,
# the burden of this functionality should live in the CLI.
# The main downsides in doing this is that delete and the result
# creation happens in the main thread as opposed to a separate thread
# in s3transfer. However, this is not too big of a downside because
# deleting a local file only happens for sync --delete downloads and
# is very fast compared to all of the other types of transfers.
src, dest = self._format_src_dest(fileinfo)
result_kwargs = {
'transfer_type': 'delete',
'src': src,
'dest': dest
}
try:
self._result_queue.put(QueuedResult(
total_transfer_size=0, **result_kwargs))
os.remove(fileinfo.src)
self._result_queue.put(SuccessResult(**result_kwargs))
except Exception as e:
self._result_queue.put(
FailureResult(exception=e, **result_kwargs))
finally:
# Return True to indicate that the transfer was submitted
return True
def _format_src_dest(self, fileinfo):
return self._format_local_path(fileinfo.src), None