[Python-modules-commits] [awscli] 01/04: New upstream version 1.11.7
Takaki Taniguchi
takaki at moszumanska.debian.org
Thu Oct 20 07:30:23 UTC 2016
This is an automated email from the git hooks/post-receive script.
takaki pushed a commit to branch master
in repository awscli.
commit 5e6f11812af7dc72a2d7ae2f80989c9ec4fa3477
Author: TANIGUCHI Takaki <takaki at asis.media-as.org>
Date: Thu Oct 20 16:23:58 2016 +0900
New upstream version 1.11.7
---
PKG-INFO | 2 +-
awscli.egg-info/PKG-INFO | 2 +-
awscli.egg-info/requires.txt | 2 +-
awscli/__init__.py | 2 +-
awscli/customizations/paginate.py | 21 ++++---
awscli/customizations/s3/results.py | 24 +++++++-
awscli/customizations/s3/s3handler.py | 102 +++++++++++++++++++++++++++++++-
awscli/customizations/s3/subcommands.py | 4 +-
awscli/customizations/s3/utils.py | 1 -
setup.cfg | 2 +-
setup.py | 2 +-
11 files changed, 139 insertions(+), 25 deletions(-)
diff --git a/PKG-INFO b/PKG-INFO
index 9f894e3..aa414d7 100644
--- a/PKG-INFO
+++ b/PKG-INFO
@@ -1,6 +1,6 @@
Metadata-Version: 1.1
Name: awscli
-Version: 1.11.2
+Version: 1.11.7
Summary: Universal Command Line Environment for AWS.
Home-page: http://aws.amazon.com/cli/
Author: Amazon Web Services
diff --git a/awscli.egg-info/PKG-INFO b/awscli.egg-info/PKG-INFO
index 9f894e3..aa414d7 100644
--- a/awscli.egg-info/PKG-INFO
+++ b/awscli.egg-info/PKG-INFO
@@ -1,6 +1,6 @@
Metadata-Version: 1.1
Name: awscli
-Version: 1.11.2
+Version: 1.11.7
Summary: Universal Command Line Environment for AWS.
Home-page: http://aws.amazon.com/cli/
Author: Amazon Web Services
diff --git a/awscli.egg-info/requires.txt b/awscli.egg-info/requires.txt
index 88e8639..56ca809 100644
--- a/awscli.egg-info/requires.txt
+++ b/awscli.egg-info/requires.txt
@@ -1,4 +1,4 @@
-botocore==1.4.60
+botocore==1.4.64
colorama>=0.2.5,<=0.3.7
docutils>=0.10
rsa>=3.1.2,<=3.5.0
diff --git a/awscli/__init__.py b/awscli/__init__.py
index 741dfcc..7e21cbe 100644
--- a/awscli/__init__.py
+++ b/awscli/__init__.py
@@ -17,7 +17,7 @@ A Universal Command Line Environment for Amazon Web Services.
"""
import os
-__version__ = '1.11.2'
+__version__ = '1.11.7'
#
# Get our data path to be added to botocore's search path
diff --git a/awscli/customizations/paginate.py b/awscli/customizations/paginate.py
index 4e65081..7eab614 100644
--- a/awscli/customizations/paginate.py
+++ b/awscli/customizations/paginate.py
@@ -174,19 +174,18 @@ def check_should_enable_pagination(input_tokens, shadowed_args, argument_table,
"Automatically setting --no-paginate.")
parsed_globals.paginate = False
- # Because pagination is now disabled, there's a chance that
- # we were shadowing arguments. For example, we inject a
- # --max-items argument in unify_paging_params(). If the
- # the operation also provides its own MaxItems (which we
- # expose as --max-items) then our custom pagination arg
- # was shadowing the customers arg. When we turn pagination
- # off we need to put back the original argument which is
- # what we're doing here.
- for key, value in shadowed_args.items():
- argument_table[key] = value
-
if not parsed_globals.paginate:
ensure_paging_params_not_set(parsed_args, shadowed_args)
+ # Because pagination is now disabled, there's a chance that
+ # we were shadowing arguments. For example, we inject a
+ # --max-items argument in unify_paging_params(). If the
+ # the operation also provides its own MaxItems (which we
+ # expose as --max-items) then our custom pagination arg
+ # was shadowing the customers arg. When we turn pagination
+ # off we need to put back the original argument which is
+ # what we're doing here.
+ for key, value in shadowed_args.items():
+ argument_table[key] = value
def ensure_paging_params_not_set(parsed_args, shadowed_args):
diff --git a/awscli/customizations/s3/results.py b/awscli/customizations/s3/results.py
index 38b6041..f8f62a6 100644
--- a/awscli/customizations/s3/results.py
+++ b/awscli/customizations/s3/results.py
@@ -57,6 +57,8 @@ SuccessResult = _create_new_result_cls('SuccessResult')
FailureResult = _create_new_result_cls('FailureResult', ['exception'])
+DryRunResult = _create_new_result_cls('DryRunResult')
+
ErrorResult = namedtuple('ErrorResult', ['exception'])
CtrlCResult = _create_new_result_cls('CtrlCResult', base_cls=ErrorResult)
@@ -328,6 +330,7 @@ class ResultPrinter(BaseResultHandler):
SUCCESS_FORMAT = (
'{transfer_type}: {transfer_location}'
)
+ DRY_RUN_FORMAT = '(dryrun) ' + SUCCESS_FORMAT
FAILURE_FORMAT = (
'{transfer_type} failed: {transfer_location} {exception}'
)
@@ -373,6 +376,9 @@ class ResultPrinter(BaseResultHandler):
WarningResult: self._print_warning,
ErrorResult: self._print_error,
CtrlCResult: self._print_ctrl_c,
+ DryRunResult: self._print_dry_run,
+ FinalTotalSubmissionsResult:
+ self._clear_progress_if_no_more_expected_transfers,
}
def __call__(self, result):
@@ -384,6 +390,14 @@ class ResultPrinter(BaseResultHandler):
# If the result does not have a handler, then do nothing with it.
pass
+ def _print_dry_run(self, result, **kwargs):
+ statement = self.DRY_RUN_FORMAT.format(
+ transfer_type=result.transfer_type,
+ transfer_location=self._get_transfer_location(result)
+ )
+ statement = self._adjust_statement_padding(statement)
+ self._print_to_out_file(statement)
+
def _print_success(self, result, **kwargs):
success_statement = self.SUCCESS_FORMAT.format(
transfer_type=result.transfer_type,
@@ -434,7 +448,7 @@ class ResultPrinter(BaseResultHandler):
self._add_progress_if_needed()
def _add_progress_if_needed(self):
- if not self._has_remaining_progress():
+ if self._has_remaining_progress():
self._print_progress()
def _print_progress(self, **kwargs):
@@ -492,9 +506,11 @@ class ResultPrinter(BaseResultHandler):
return print_statement + ending_char
def _has_remaining_progress(self):
+ if not self._result_recorder.expected_totals_are_final():
+ return True
actual = self._result_recorder.files_transferred
expected = self._result_recorder.expected_files_transferred
- return actual == expected
+ return actual != expected
def _print_to_out_file(self, statement):
uni_print(statement, self._out_file)
@@ -502,6 +518,10 @@ class ResultPrinter(BaseResultHandler):
def _print_to_error_file(self, statement):
uni_print(statement, self._error_file)
+ def _clear_progress_if_no_more_expected_transfers(self, **kwargs):
+ if self._progress_length and not self._has_remaining_progress():
+ uni_print(self._adjust_statement_padding(''), self._out_file)
+
class OnlyShowErrorsResultPrinter(ResultPrinter):
"""A result printer that only prints out errors"""
diff --git a/awscli/customizations/s3/s3handler.py b/awscli/customizations/s3/s3handler.py
index e9e0c03..e1513f6 100644
--- a/awscli/customizations/s3/s3handler.py
+++ b/awscli/customizations/s3/s3handler.py
@@ -30,7 +30,11 @@ from awscli.customizations.s3.results import CopyResultSubscriber
from awscli.customizations.s3.results import UploadStreamResultSubscriber
from awscli.customizations.s3.results import DownloadStreamResultSubscriber
from awscli.customizations.s3.results import DeleteResultSubscriber
+from awscli.customizations.s3.results import QueuedResult
+from awscli.customizations.s3.results import SuccessResult
+from awscli.customizations.s3.results import FailureResult
from awscli.customizations.s3.results import CommandResult
+from awscli.customizations.s3.results import DryRunResult
from awscli.customizations.s3.results import ResultRecorder
from awscli.customizations.s3.results import ResultPrinter
from awscli.customizations.s3.results import OnlyShowErrorsResultPrinter
@@ -492,6 +496,7 @@ class S3TransferHandler(object):
DownloadRequestSubmitter(*submitter_args),
CopyRequestSubmitter(*submitter_args),
DeleteRequestSubmitter(*submitter_args),
+ LocalDeleteRequestSubmitter(*submitter_args)
]
def call(self, fileinfos):
@@ -585,8 +590,19 @@ class BaseTransferRequestSubmitter(object):
# The result subscriber class should always be the last registered
# subscriber to ensure it is not missing any information that
# may have been added in a different subscriber such as size.
- subscribers.append(self.RESULT_SUBSCRIBER_CLASS(self._result_queue))
- return self._submit_transfer_request(fileinfo, extra_args, subscribers)
+ if self.RESULT_SUBSCRIBER_CLASS:
+ subscribers.append(
+ self.RESULT_SUBSCRIBER_CLASS(self._result_queue))
+ if not self._cli_params.get('dryrun'):
+ return self._submit_transfer_request(
+ fileinfo, extra_args, subscribers)
+ else:
+ self._submit_dryrun(fileinfo)
+
+ def _submit_dryrun(self, fileinfo):
+ src, dest = self._format_src_dest(fileinfo)
+ self._result_queue.put(DryRunResult(
+ transfer_type=fileinfo.operation_name, src=src, dest=dest))
def _add_additional_subscribers(self, subscribers, fileinfo):
pass
@@ -635,6 +651,18 @@ class BaseTransferRequestSubmitter(object):
return True
return False
+ def _format_src_dest(self, fileinfo):
+ """Returns formatted versions of a fileinfos source and destination."""
+ raise NotImplementedError('_format_src_dest')
+
+ def _format_local_path(self, path):
+ return relative_path(path)
+
+ def _format_s3_path(self, path):
+ if path.startswith('s3://'):
+ return path
+ return 's3://' + path
+
class UploadRequestSubmitter(BaseTransferRequestSubmitter):
REQUEST_MAPPER_METHOD = RequestParamsMapper.map_put_object_params
@@ -672,6 +700,11 @@ class UploadRequestSubmitter(BaseTransferRequestSubmitter):
file_path, warning_message, skip_file=False)
self._result_queue.put(warning)
+ def _format_src_dest(self, fileinfo):
+ src = self._format_local_path(fileinfo.src)
+ dest = self._format_s3_path(fileinfo.dest)
+ return src, dest
+
class DownloadRequestSubmitter(BaseTransferRequestSubmitter):
REQUEST_MAPPER_METHOD = RequestParamsMapper.map_get_object_params
@@ -700,6 +733,11 @@ class DownloadRequestSubmitter(BaseTransferRequestSubmitter):
def _get_warning_handlers(self):
return [self._warn_glacier]
+ def _format_src_dest(self, fileinfo):
+ src = self._format_s3_path(fileinfo.src)
+ dest = self._format_local_path(fileinfo.dest)
+ return src, dest
+
class CopyRequestSubmitter(BaseTransferRequestSubmitter):
REQUEST_MAPPER_METHOD = RequestParamsMapper.map_copy_object_params
@@ -726,6 +764,11 @@ class CopyRequestSubmitter(BaseTransferRequestSubmitter):
def _get_warning_handlers(self):
return [self._warn_glacier]
+ def _format_src_dest(self, fileinfo):
+ src = self._format_s3_path(fileinfo.src)
+ dest = self._format_s3_path(fileinfo.dest)
+ return src, dest
+
class UploadStreamRequestSubmitter(UploadRequestSubmitter):
RESULT_SUBSCRIBER_CLASS = UploadStreamResultSubscriber
@@ -744,6 +787,9 @@ class UploadStreamRequestSubmitter(UploadRequestSubmitter):
def _get_filein(self, fileinfo):
return NonSeekableStream(binary_stdin)
+ def _format_local_path(self, path):
+ return '-'
+
class DownloadStreamRequestSubmitter(DownloadRequestSubmitter):
RESULT_SUBSCRIBER_CLASS = DownloadStreamResultSubscriber
@@ -760,16 +806,66 @@ class DownloadStreamRequestSubmitter(DownloadRequestSubmitter):
def _get_fileout(self, fileinfo):
return StdoutBytesWriter()
+ def _format_local_path(self, path):
+ return '-'
+
class DeleteRequestSubmitter(BaseTransferRequestSubmitter):
REQUEST_MAPPER_METHOD = None
RESULT_SUBSCRIBER_CLASS = DeleteResultSubscriber
def can_submit(self, fileinfo):
- return fileinfo.operation_name == 'delete'
+ return fileinfo.operation_name == 'delete' and \
+ fileinfo.src_type == 's3'
def _submit_transfer_request(self, fileinfo, extra_args, subscribers):
bucket, key = find_bucket_key(fileinfo.src)
return self._transfer_manager.delete(
bucket=bucket, key=key, extra_args=extra_args,
subscribers=subscribers)
+
+ def _format_src_dest(self, fileinfo):
+ return self._format_s3_path(fileinfo.src), None
+
+
+class LocalDeleteRequestSubmitter(BaseTransferRequestSubmitter):
+ REQUEST_MAPPER_METHOD = None
+ RESULT_SUBSCRIBER_CLASS = None
+
+ def can_submit(self, fileinfo):
+ return fileinfo.operation_name == 'delete' and \
+ fileinfo.src_type == 'local'
+
+ def _submit_transfer_request(self, fileinfo, extra_args, subscribers):
+ # This is quirky but essentially instead of relying on a built-in
+ # method of s3 transfer, the logic lives directly in the submitter.
+ # The reason a explicit delete local file does not
+ # live in s3transfer is because it is outside the scope of s3transfer;
+ # it should only have interfaces for interacting with S3. Therefore,
+ # the burden of this functionality should live in the CLI.
+
+ # The main downsides in doing this is that delete and the result
+ # creation happens in the main thread as opposed to a seperate thread
+ # in s3transfer. However, this is not too big of a downside because
+ # deleting a local file only happens for sync --delete downloads and
+ # is very fast compared to all of the other types of transfers.
+ src, dest = self._format_src_dest(fileinfo)
+ result_kwargs = {
+ 'transfer_type': 'delete',
+ 'src': src,
+ 'dest': dest
+ }
+ try:
+ self._result_queue.put(QueuedResult(
+ total_transfer_size=0, **result_kwargs))
+ os.remove(fileinfo.src)
+ self._result_queue.put(SuccessResult(**result_kwargs))
+ except Exception as e:
+ self._result_queue.put(
+ FailureResult(exception=e, **result_kwargs))
+ finally:
+ # Return True to indicate that the transfer was submitted
+ return True
+
+ def _format_src_dest(self, fileinfo):
+ return self._format_local_path(fileinfo.src), None
diff --git a/awscli/customizations/s3/subcommands.py b/awscli/customizations/s3/subcommands.py
index 10825f3..ad4775d 100644
--- a/awscli/customizations/s3/subcommands.py
+++ b/awscli/customizations/s3/subcommands.py
@@ -1013,7 +1013,7 @@ class CommandArchitecture(object):
result_queue=result_queue)
s3_transfer_handler = s3handler
- if self.cmd in ['cp', 'rm'] and not self.parameters.get('dryrun'):
+ if self.cmd in ['cp', 'rm', 'sync']:
s3_transfer_handler = S3TransferHandlerFactory(
self.parameters, self._runtime_config)(
self._client, result_queue)
@@ -1029,7 +1029,7 @@ class CommandArchitecture(object):
create_filter(self.parameters)],
'comparator': [Comparator(**sync_strategies)],
'file_info_builder': [file_info_builder],
- 's3_handler': [s3handler]}
+ 's3_handler': [s3_transfer_handler]}
elif self.cmd == 'cp' and self.parameters['is_stream']:
command_dict = {'setup': [stream_file_info],
's3_handler': [s3_transfer_handler]}
diff --git a/awscli/customizations/s3/utils.py b/awscli/customizations/s3/utils.py
index 3c0c05e..ef250b5 100644
--- a/awscli/customizations/s3/utils.py
+++ b/awscli/customizations/s3/utils.py
@@ -55,7 +55,6 @@ SIZE_SUFFIX = {
}
-
def human_readable_size(value):
"""Convert an size in bytes into a human readable format.
diff --git a/setup.cfg b/setup.cfg
index 3f2c4aa..1138f18 100644
--- a/setup.cfg
+++ b/setup.cfg
@@ -3,7 +3,7 @@ universal = 1
[metadata]
requires-dist =
- botocore==1.4.60
+ botocore==1.4.64
colorama>=0.2.5,<=0.3.7
docutils>=0.10
rsa>=3.1.2,<=3.5.0
diff --git a/setup.py b/setup.py
index bb3b798..c1b3cd2 100644
--- a/setup.py
+++ b/setup.py
@@ -6,7 +6,7 @@ from setuptools import setup, find_packages
import awscli
-requires = ['botocore==1.4.60',
+requires = ['botocore==1.4.64',
'colorama>=0.2.5,<=0.3.7',
'docutils>=0.10',
'rsa>=3.1.2,<=3.5.0',
--
Alioth's /usr/local/bin/git-commit-notice on /srv/git.debian.org/git/python-modules/packages/awscli.git
More information about the Python-modules-commits
mailing list