[Python-modules-commits] [python-s3transfer] 01/02: Import Upstream version 0.1.8

Takaki Taniguchi takaki at moszumanska.debian.org
Fri Oct 14 10:59:59 UTC 2016


This is an automated email from the git hooks/post-receive script.

takaki pushed a commit to branch master
in repository python-s3transfer.

commit e0885e3212bb47f42f4549ade9a356cafb7b9d07
Author: TANIGUCHI Takaki <takaki at asis.media-as.org>
Date:   Fri Oct 14 19:51:54 2016 +0900

    Import Upstream version 0.1.8
---
 LICENSE.txt                              | 203 +++++++
 MANIFEST.in                              |   4 +
 PKG-INFO                                 |  31 ++
 README.rst                               |  10 +
 requirements-test.txt                    |   8 +
 s3transfer.egg-info/PKG-INFO             |  31 ++
 s3transfer.egg-info/SOURCES.txt          |  49 ++
 s3transfer.egg-info/dependency_links.txt |   1 +
 s3transfer.egg-info/requires.txt         |   5 +
 s3transfer.egg-info/top_level.txt        |   1 +
 s3transfer/__init__.py                   | 739 ++++++++++++++++++++++++
 s3transfer/compat.py                     |  91 +++
 s3transfer/copies.py                     | 316 +++++++++++
 s3transfer/delete.py                     |  72 +++
 s3transfer/download.py                   | 707 +++++++++++++++++++++++
 s3transfer/exceptions.py                 |  32 ++
 s3transfer/futures.py                    | 462 +++++++++++++++
 s3transfer/manager.py                    | 624 +++++++++++++++++++++
 s3transfer/subscribers.py                |  95 ++++
 s3transfer/tasks.py                      | 355 ++++++++++++
 s3transfer/upload.py                     | 711 ++++++++++++++++++++++++
 s3transfer/utils.py                      | 684 +++++++++++++++++++++++
 setup.cfg                                |  13 +
 setup.py                                 |  58 ++
 tests/__init__.py                        | 483 ++++++++++++++++
 tests/functional/__init__.py             |  12 +
 tests/functional/test_copy.py            | 511 +++++++++++++++++
 tests/functional/test_delete.py          |  65 +++
 tests/functional/test_download.py        | 450 +++++++++++++++
 tests/functional/test_manager.py         | 127 +++++
 tests/functional/test_upload.py          | 468 ++++++++++++++++
 tests/functional/test_utils.py           |  42 ++
 tests/integration/__init__.py            |  75 +++
 tests/integration/test_copy.py           |  83 +++
 tests/integration/test_delete.py         |  31 ++
 tests/integration/test_download.py       | 236 ++++++++
 tests/integration/test_s3transfer.py     | 352 ++++++++++++
 tests/integration/test_upload.py         | 163 ++++++
 tests/unit/__init__.py                   |  12 +
 tests/unit/test_compat.py                |  76 +++
 tests/unit/test_copies.py                | 168 ++++++
 tests/unit/test_delete.py                |  61 ++
 tests/unit/test_download.py              | 925 +++++++++++++++++++++++++++++++
 tests/unit/test_futures.py               | 528 ++++++++++++++++++
 tests/unit/test_manager.py               | 137 +++++
 tests/unit/test_s3transfer.py            | 731 ++++++++++++++++++++++++
 tests/unit/test_subscribers.py           |  88 +++
 tests/unit/test_tasks.py                 | 729 ++++++++++++++++++++++++
 tests/unit/test_upload.py                | 654 ++++++++++++++++++++++
 tests/unit/test_utils.py                 | 825 +++++++++++++++++++++++++++
 50 files changed, 13334 insertions(+)

diff --git a/LICENSE.txt b/LICENSE.txt
new file mode 100644
index 0000000..6b0b127
--- /dev/null
+++ b/LICENSE.txt
@@ -0,0 +1,203 @@
+
+                                 Apache License
+                           Version 2.0, January 2004
+                        http://www.apache.org/licenses/
+
+   TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
+
+   1. Definitions.
+
+      "License" shall mean the terms and conditions for use, reproduction,
+      and distribution as defined by Sections 1 through 9 of this document.
+
+      "Licensor" shall mean the copyright owner or entity authorized by
+      the copyright owner that is granting the License.
+
+      "Legal Entity" shall mean the union of the acting entity and all
+      other entities that control, are controlled by, or are under common
+      control with that entity. For the purposes of this definition,
+      "control" means (i) the power, direct or indirect, to cause the
+      direction or management of such entity, whether by contract or
+      otherwise, or (ii) ownership of fifty percent (50%) or more of the
+      outstanding shares, or (iii) beneficial ownership of such entity.
+
+      "You" (or "Your") shall mean an individual or Legal Entity
+      exercising permissions granted by this License.
+
+      "Source" form shall mean the preferred form for making modifications,
+      including but not limited to software source code, documentation
+      source, and configuration files.
+
+      "Object" form shall mean any form resulting from mechanical
+      transformation or translation of a Source form, including but
+      not limited to compiled object code, generated documentation,
+      and conversions to other media types.
+
+      "Work" shall mean the work of authorship, whether in Source or
+      Object form, made available under the License, as indicated by a
+      copyright notice that is included in or attached to the work
+      (an example is provided in the Appendix below).
+
+      "Derivative Works" shall mean any work, whether in Source or Object
+      form, that is based on (or derived from) the Work and for which the
+      editorial revisions, annotations, elaborations, or other modifications
+      represent, as a whole, an original work of authorship. For the purposes
+      of this License, Derivative Works shall not include works that remain
+      separable from, or merely link (or bind by name) to the interfaces of,
+      the Work and Derivative Works thereof.
+
+      "Contribution" shall mean any work of authorship, including
+      the original version of the Work and any modifications or additions
+      to that Work or Derivative Works thereof, that is intentionally
+      submitted to Licensor for inclusion in the Work by the copyright owner
+      or by an individual or Legal Entity authorized to submit on behalf of
+      the copyright owner. For the purposes of this definition, "submitted"
+      means any form of electronic, verbal, or written communication sent
+      to the Licensor or its representatives, including but not limited to
+      communication on electronic mailing lists, source code control systems,
+      and issue tracking systems that are managed by, or on behalf of, the
+      Licensor for the purpose of discussing and improving the Work, but
+      excluding communication that is conspicuously marked or otherwise
+      designated in writing by the copyright owner as "Not a Contribution."
+
+      "Contributor" shall mean Licensor and any individual or Legal Entity
+      on behalf of whom a Contribution has been received by Licensor and
+      subsequently incorporated within the Work.
+
+   2. Grant of Copyright License. Subject to the terms and conditions of
+      this License, each Contributor hereby grants to You a perpetual,
+      worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+      copyright license to reproduce, prepare Derivative Works of,
+      publicly display, publicly perform, sublicense, and distribute the
+      Work and such Derivative Works in Source or Object form.
+
+   3. Grant of Patent License. Subject to the terms and conditions of
+      this License, each Contributor hereby grants to You a perpetual,
+      worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+      (except as stated in this section) patent license to make, have made,
+      use, offer to sell, sell, import, and otherwise transfer the Work,
+      where such license applies only to those patent claims licensable
+      by such Contributor that are necessarily infringed by their
+      Contribution(s) alone or by combination of their Contribution(s)
+      with the Work to which such Contribution(s) was submitted. If You
+      institute patent litigation against any entity (including a
+      cross-claim or counterclaim in a lawsuit) alleging that the Work
+      or a Contribution incorporated within the Work constitutes direct
+      or contributory patent infringement, then any patent licenses
+      granted to You under this License for that Work shall terminate
+      as of the date such litigation is filed.
+
+   4. Redistribution. You may reproduce and distribute copies of the
+      Work or Derivative Works thereof in any medium, with or without
+      modifications, and in Source or Object form, provided that You
+      meet the following conditions:
+
+      (a) You must give any other recipients of the Work or
+          Derivative Works a copy of this License; and
+
+      (b) You must cause any modified files to carry prominent notices
+          stating that You changed the files; and
+
+      (c) You must retain, in the Source form of any Derivative Works
+          that You distribute, all copyright, patent, trademark, and
+          attribution notices from the Source form of the Work,
+          excluding those notices that do not pertain to any part of
+          the Derivative Works; and
+
+      (d) If the Work includes a "NOTICE" text file as part of its
+          distribution, then any Derivative Works that You distribute must
+          include a readable copy of the attribution notices contained
+          within such NOTICE file, excluding those notices that do not
+          pertain to any part of the Derivative Works, in at least one
+          of the following places: within a NOTICE text file distributed
+          as part of the Derivative Works; within the Source form or
+          documentation, if provided along with the Derivative Works; or,
+          within a display generated by the Derivative Works, if and
+          wherever such third-party notices normally appear. The contents
+          of the NOTICE file are for informational purposes only and
+          do not modify the License. You may add Your own attribution
+          notices within Derivative Works that You distribute, alongside
+          or as an addendum to the NOTICE text from the Work, provided
+          that such additional attribution notices cannot be construed
+          as modifying the License.
+
+      You may add Your own copyright statement to Your modifications and
+      may provide additional or different license terms and conditions
+      for use, reproduction, or distribution of Your modifications, or
+      for any such Derivative Works as a whole, provided Your use,
+      reproduction, and distribution of the Work otherwise complies with
+      the conditions stated in this License.
+
+   5. Submission of Contributions. Unless You explicitly state otherwise,
+      any Contribution intentionally submitted for inclusion in the Work
+      by You to the Licensor shall be under the terms and conditions of
+      this License, without any additional terms or conditions.
+      Notwithstanding the above, nothing herein shall supersede or modify
+      the terms of any separate license agreement you may have executed
+      with Licensor regarding such Contributions.
+
+   6. Trademarks. This License does not grant permission to use the trade
+      names, trademarks, service marks, or product names of the Licensor,
+      except as required for reasonable and customary use in describing the
+      origin of the Work and reproducing the content of the NOTICE file.
+
+   7. Disclaimer of Warranty. Unless required by applicable law or
+      agreed to in writing, Licensor provides the Work (and each
+      Contributor provides its Contributions) on an "AS IS" BASIS,
+      WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+      implied, including, without limitation, any warranties or conditions
+      of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
+      PARTICULAR PURPOSE. You are solely responsible for determining the
+      appropriateness of using or redistributing the Work and assume any
+      risks associated with Your exercise of permissions under this License.
+
+   8. Limitation of Liability. In no event and under no legal theory,
+      whether in tort (including negligence), contract, or otherwise,
+      unless required by applicable law (such as deliberate and grossly
+      negligent acts) or agreed to in writing, shall any Contributor be
+      liable to You for damages, including any direct, indirect, special,
+      incidental, or consequential damages of any character arising as a
+      result of this License or out of the use or inability to use the
+      Work (including but not limited to damages for loss of goodwill,
+      work stoppage, computer failure or malfunction, or any and all
+      other commercial damages or losses), even if such Contributor
+      has been advised of the possibility of such damages.
+
+   9. Accepting Warranty or Additional Liability. While redistributing
+      the Work or Derivative Works thereof, You may choose to offer,
+      and charge a fee for, acceptance of support, warranty, indemnity,
+      or other liability obligations and/or rights consistent with this
+      License. However, in accepting such obligations, You may act only
+      on Your own behalf and on Your sole responsibility, not on behalf
+      of any other Contributor, and only if You agree to indemnify,
+      defend, and hold each Contributor harmless for any liability
+      incurred by, or claims asserted against, such Contributor by reason
+      of your accepting any such warranty or additional liability.
+
+   END OF TERMS AND CONDITIONS
+
+   APPENDIX: How to apply the Apache License to your work.
+
+      To apply the Apache License to your work, attach the following
+      boilerplate notice, with the fields enclosed by brackets "[]"
+      replaced with your own identifying information. (Don't include
+      the brackets!)  The text should be enclosed in the appropriate
+      comment syntax for the file format. We also recommend that a
+      file or class name and description of purpose be included on the
+      same "printed page" as the copyright notice for easier
+      identification within third-party archives.
+
+   Copyright [yyyy] [name of copyright owner]
+
+   Licensed under the Apache License, Version 2.0 (the "License");
+   you may not use this file except in compliance with the License.
+   You may obtain a copy of the License at
+
+       http://www.apache.org/licenses/LICENSE-2.0
+
+   Unless required by applicable law or agreed to in writing, software
+   distributed under the License is distributed on an "AS IS" BASIS,
+   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+   See the License for the specific language governing permissions and
+   limitations under the License.
+
diff --git a/MANIFEST.in b/MANIFEST.in
new file mode 100644
index 0000000..1f78cba
--- /dev/null
+++ b/MANIFEST.in
@@ -0,0 +1,4 @@
+include README.rst
+include LICENSE.txt
+include requirements-test.txt
+graft tests
diff --git a/PKG-INFO b/PKG-INFO
new file mode 100644
index 0000000..79cedc3
--- /dev/null
+++ b/PKG-INFO
@@ -0,0 +1,31 @@
+Metadata-Version: 1.1
+Name: s3transfer
+Version: 0.1.8
+Summary: An Amazon S3 Transfer Manager
+Home-page: https://github.com/boto/s3transfer
+Author: Amazon Web Services
+Author-email: kyknapp1 at gmail.com
+License: Apache License 2.0
+Description: =====================================================
+        s3transfer - An Amazon S3 Transfer Manager for Python
+        =====================================================
+        
+        .. warning::
+        
+          This project is currently a work in progress. Please do not rely on
+          this functionality in production as the interfaces may change over time.
+        
+        S3transfer is a Python library for managing Amazon S3 transfers.
+        
+Platform: UNKNOWN
+Classifier: Development Status :: 1 - Planning
+Classifier: Intended Audience :: Developers
+Classifier: Natural Language :: English
+Classifier: License :: OSI Approved :: Apache Software License
+Classifier: Programming Language :: Python
+Classifier: Programming Language :: Python :: 2.6
+Classifier: Programming Language :: Python :: 2.7
+Classifier: Programming Language :: Python :: 3
+Classifier: Programming Language :: Python :: 3.3
+Classifier: Programming Language :: Python :: 3.4
+Classifier: Programming Language :: Python :: 3.5
diff --git a/README.rst b/README.rst
new file mode 100644
index 0000000..ad476bd
--- /dev/null
+++ b/README.rst
@@ -0,0 +1,10 @@
+=====================================================
+s3transfer - An Amazon S3 Transfer Manager for Python
+=====================================================
+
+.. warning::
+
+  This project is currently a work in progress. Please do not rely on
+  this functionality in production as the interfaces may change over time.
+
+S3transfer is a Python library for managing Amazon S3 transfers.
diff --git a/requirements-test.txt b/requirements-test.txt
new file mode 100644
index 0000000..a837301
--- /dev/null
+++ b/requirements-test.txt
@@ -0,0 +1,8 @@
+-e git://github.com/boto/botocore.git@develop#egg=botocore
+nose==1.3.3
+mock==1.3.0
+coverage==4.0.1
+wheel==0.24.0
+# Note you need at least pip --version of 6.0 or
+# higher to be able to pick on these version specifiers.
+unittest2==0.5.1; python_version == '2.6'
diff --git a/s3transfer.egg-info/PKG-INFO b/s3transfer.egg-info/PKG-INFO
new file mode 100644
index 0000000..79cedc3
--- /dev/null
+++ b/s3transfer.egg-info/PKG-INFO
@@ -0,0 +1,31 @@
+Metadata-Version: 1.1
+Name: s3transfer
+Version: 0.1.8
+Summary: An Amazon S3 Transfer Manager
+Home-page: https://github.com/boto/s3transfer
+Author: Amazon Web Services
+Author-email: kyknapp1 at gmail.com
+License: Apache License 2.0
+Description: =====================================================
+        s3transfer - An Amazon S3 Transfer Manager for Python
+        =====================================================
+        
+        .. warning::
+        
+          This project is currently a work in progress. Please do not rely on
+          this functionality in production as the interfaces may change over time.
+        
+        S3transfer is a Python library for managing Amazon S3 transfers.
+        
+Platform: UNKNOWN
+Classifier: Development Status :: 1 - Planning
+Classifier: Intended Audience :: Developers
+Classifier: Natural Language :: English
+Classifier: License :: OSI Approved :: Apache Software License
+Classifier: Programming Language :: Python
+Classifier: Programming Language :: Python :: 2.6
+Classifier: Programming Language :: Python :: 2.7
+Classifier: Programming Language :: Python :: 3
+Classifier: Programming Language :: Python :: 3.3
+Classifier: Programming Language :: Python :: 3.4
+Classifier: Programming Language :: Python :: 3.5
diff --git a/s3transfer.egg-info/SOURCES.txt b/s3transfer.egg-info/SOURCES.txt
new file mode 100644
index 0000000..8651c39
--- /dev/null
+++ b/s3transfer.egg-info/SOURCES.txt
@@ -0,0 +1,49 @@
+LICENSE.txt
+MANIFEST.in
+README.rst
+requirements-test.txt
+setup.cfg
+setup.py
+s3transfer/__init__.py
+s3transfer/compat.py
+s3transfer/copies.py
+s3transfer/delete.py
+s3transfer/download.py
+s3transfer/exceptions.py
+s3transfer/futures.py
+s3transfer/manager.py
+s3transfer/subscribers.py
+s3transfer/tasks.py
+s3transfer/upload.py
+s3transfer/utils.py
+s3transfer.egg-info/PKG-INFO
+s3transfer.egg-info/SOURCES.txt
+s3transfer.egg-info/dependency_links.txt
+s3transfer.egg-info/requires.txt
+s3transfer.egg-info/top_level.txt
+tests/__init__.py
+tests/functional/__init__.py
+tests/functional/test_copy.py
+tests/functional/test_delete.py
+tests/functional/test_download.py
+tests/functional/test_manager.py
+tests/functional/test_upload.py
+tests/functional/test_utils.py
+tests/integration/__init__.py
+tests/integration/test_copy.py
+tests/integration/test_delete.py
+tests/integration/test_download.py
+tests/integration/test_s3transfer.py
+tests/integration/test_upload.py
+tests/unit/__init__.py
+tests/unit/test_compat.py
+tests/unit/test_copies.py
+tests/unit/test_delete.py
+tests/unit/test_download.py
+tests/unit/test_futures.py
+tests/unit/test_manager.py
+tests/unit/test_s3transfer.py
+tests/unit/test_subscribers.py
+tests/unit/test_tasks.py
+tests/unit/test_upload.py
+tests/unit/test_utils.py
\ No newline at end of file
diff --git a/s3transfer.egg-info/dependency_links.txt b/s3transfer.egg-info/dependency_links.txt
new file mode 100644
index 0000000..8b13789
--- /dev/null
+++ b/s3transfer.egg-info/dependency_links.txt
@@ -0,0 +1 @@
+
diff --git a/s3transfer.egg-info/requires.txt b/s3transfer.egg-info/requires.txt
new file mode 100644
index 0000000..f81d58e
--- /dev/null
+++ b/s3transfer.egg-info/requires.txt
@@ -0,0 +1,5 @@
+botocore>=1.3.0,<2.0.0
+futures>=2.2.0,<4.0.0
+
+[:python_version=="2.6" or python_version=="2.7"]
+futures>=2.2.0,<4.0.0
\ No newline at end of file
diff --git a/s3transfer.egg-info/top_level.txt b/s3transfer.egg-info/top_level.txt
new file mode 100644
index 0000000..572c6a9
--- /dev/null
+++ b/s3transfer.egg-info/top_level.txt
@@ -0,0 +1 @@
+s3transfer
diff --git a/s3transfer/__init__.py b/s3transfer/__init__.py
new file mode 100644
index 0000000..dcd3a0a
--- /dev/null
+++ b/s3transfer/__init__.py
@@ -0,0 +1,739 @@
+# Copyright 2016 Amazon.com, Inc. or its affiliates. All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License"). You
+# may not use this file except in compliance with the License. A copy of
+# the License is located at
+#
+# http://aws.amazon.com/apache2.0/
+#
+# or in the "license" file accompanying this file. This file is
+# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
+# ANY KIND, either express or implied. See the License for the specific
+# language governing permissions and limitations under the License.
+"""Abstractions over S3's upload/download operations.
+
+This module provides high level abstractions for efficient
+uploads/downloads.  It handles several things for the user:
+
+* Automatically switching to multipart transfers when
+  a file is over a specific size threshold
+* Uploading/downloading a file in parallel
+* Throttling based on max bandwidth
+* Progress callbacks to monitor transfers
+* Retries.  While botocore handles retries for streaming uploads,
+  it is not possible for it to handle retries for streaming
+  downloads.  This module handles retries for both cases so
+  you don't need to implement any retry logic yourself.
+
+This module has a reasonable set of defaults.  It also allows you
+to configure many aspects of the transfer process including:
+
+* Multipart threshold size
+* Max parallel downloads
+* Max bandwidth
+* Socket timeouts
+* Retry amounts
+
+There is no support for s3->s3 multipart copies at this
+time.
+
+
+.. _ref_s3transfer_usage:
+
+Usage
+=====
+
+The simplest way to use this module is:
+
+.. code-block:: python
+
+    client = boto3.client('s3', 'us-west-2')
+    transfer = S3Transfer(client)
+    # Upload /tmp/myfile to s3://bucket/key
+    transfer.upload_file('/tmp/myfile', 'bucket', 'key')
+
+    # Download s3://bucket/key to /tmp/myfile
+    transfer.download_file('bucket', 'key', '/tmp/myfile')
+
+The ``upload_file`` and ``download_file`` methods also accept
+``**kwargs``, which will be forwarded through to the corresponding
+client operation.  Here are a few examples using ``upload_file``::
+
+    # Making the object public
+    transfer.upload_file('/tmp/myfile', 'bucket', 'key',
+                         extra_args={'ACL': 'public-read'})
+
+    # Setting metadata
+    transfer.upload_file('/tmp/myfile', 'bucket', 'key',
+                         extra_args={'Metadata': {'a': 'b', 'c': 'd'}})
+
+    # Setting content type
+    transfer.upload_file('/tmp/myfile.json', 'bucket', 'key',
+                         extra_args={'ContentType': "application/json"})
+
+
+The ``S3Transfer`` clas also supports progress callbacks so you can
+provide transfer progress to users.  Both the ``upload_file`` and
+``download_file`` methods take an optional ``callback`` parameter.
+Here's an example of how to print a simple progress percentage
+to the user:
+
+.. code-block:: python
+
+    class ProgressPercentage(object):
+        def __init__(self, filename):
+            self._filename = filename
+            self._size = float(os.path.getsize(filename))
+            self._seen_so_far = 0
+            self._lock = threading.Lock()
+
+        def __call__(self, bytes_amount):
+            # To simplify we'll assume this is hooked up
+            # to a single filename.
+            with self._lock:
+                self._seen_so_far += bytes_amount
+                percentage = (self._seen_so_far / self._size) * 100
+                sys.stdout.write(
+                    "\r%s  %s / %s  (%.2f%%)" % (self._filename, self._seen_so_far,
+                                                 self._size, percentage))
+                sys.stdout.flush()
+
+
+    transfer = S3Transfer(boto3.client('s3', 'us-west-2'))
+    # Upload /tmp/myfile to s3://bucket/key and print upload progress.
+    transfer.upload_file('/tmp/myfile', 'bucket', 'key',
+                         callback=ProgressPercentage('/tmp/myfile'))
+
+
+
+You can also provide a TransferConfig object to the S3Transfer
+object that gives you more fine grained control over the
+transfer.  For example:
+
+.. code-block:: python
+
+    client = boto3.client('s3', 'us-west-2')
+    config = TransferConfig(
+        multipart_threshold=8 * 1024 * 1024,
+        max_concurrency=10,
+        num_download_attempts=10,
+    )
+    transfer = S3Transfer(client, config)
+    transfer.upload_file('/tmp/foo', 'bucket', 'key')
+
+
+"""
+import os
+import math
+import functools
+import logging
+import socket
+import threading
+import random
+import string
+import concurrent.futures
+
+from botocore.compat import six
+from botocore.vendored.requests.packages.urllib3.exceptions import \
+    ReadTimeoutError
+from botocore.exceptions import IncompleteReadError
+
+import s3transfer.compat
+from s3transfer.exceptions import RetriesExceededError, S3UploadFailedError
+
+
+__author__ = 'Amazon Web Services'
+__version__ = '0.1.8'
+
+
+class NullHandler(logging.Handler):
+    def emit(self, record):
+        pass
+
+
+logger = logging.getLogger(__name__)
+logger.addHandler(NullHandler())
+
+queue = six.moves.queue
+
+MB = 1024 * 1024
+SHUTDOWN_SENTINEL = object()
+
+
+def random_file_extension(num_digits=8):
+    return ''.join(random.choice(string.hexdigits) for _ in range(num_digits))
+
+
+def disable_upload_callbacks(request, operation_name, **kwargs):
+    if operation_name in ['PutObject', 'UploadPart'] and \
+            hasattr(request.body, 'disable_callback'):
+        request.body.disable_callback()
+
+
+def enable_upload_callbacks(request, operation_name, **kwargs):
+    if operation_name in ['PutObject', 'UploadPart'] and \
+            hasattr(request.body, 'enable_callback'):
+        request.body.enable_callback()
+
+
+class QueueShutdownError(Exception):
+    pass
+
+
+class ReadFileChunk(object):
+    def __init__(self, fileobj, start_byte, chunk_size, full_file_size,
+                 callback=None, enable_callback=True):
+        """
+
+        Given a file object shown below:
+
+            |___________________________________________________|
+            0          |                 |                 full_file_size
+                       |----chunk_size---|
+                 start_byte
+
+        :type fileobj: file
+        :param fileobj: File like object
+
+        :type start_byte: int
+        :param start_byte: The first byte from which to start reading.
+
+        :type chunk_size: int
+        :param chunk_size: The max chunk size to read.  Trying to read
+            pass the end of the chunk size will behave like you've
+            reached the end of the file.
+
+        :type full_file_size: int
+        :param full_file_size: The entire content length associated
+            with ``fileobj``.
+
+        :type callback: function(amount_read)
+        :param callback: Called whenever data is read from this object.
+
+        """
+        self._fileobj = fileobj
+        self._start_byte = start_byte
+        self._size = self._calculate_file_size(
+            self._fileobj, requested_size=chunk_size,
+            start_byte=start_byte, actual_file_size=full_file_size)
+        self._fileobj.seek(self._start_byte)
+        self._amount_read = 0
+        self._callback = callback
+        self._callback_enabled = enable_callback
+
+    @classmethod
+    def from_filename(cls, filename, start_byte, chunk_size, callback=None,
+                      enable_callback=True):
+        """Convenience factory function to create from a filename.
+
+        :type start_byte: int
+        :param start_byte: The first byte from which to start reading.
+
+        :type chunk_size: int
+        :param chunk_size: The max chunk size to read.  Trying to read
+            pass the end of the chunk size will behave like you've
+            reached the end of the file.
+
+        :type full_file_size: int
+        :param full_file_size: The entire content length associated
+            with ``fileobj``.
+
+        :type callback: function(amount_read)
+        :param callback: Called whenever data is read from this object.
+
+        :type enable_callback: bool
+        :param enable_callback: Indicate whether to invoke callback
+            during read() calls.
+
+        :rtype: ``ReadFileChunk``
+        :return: A new instance of ``ReadFileChunk``
+
+        """
+        f = open(filename, 'rb')
+        file_size = os.fstat(f.fileno()).st_size
+        return cls(f, start_byte, chunk_size, file_size, callback,
+                   enable_callback)
+
+    def _calculate_file_size(self, fileobj, requested_size, start_byte,
+                             actual_file_size):
+        max_chunk_size = actual_file_size - start_byte
+        return min(max_chunk_size, requested_size)
+
+    def read(self, amount=None):
+        if amount is None:
+            amount_to_read = self._size - self._amount_read
+        else:
+            amount_to_read = min(self._size - self._amount_read, amount)
+        data = self._fileobj.read(amount_to_read)
+        self._amount_read += len(data)
+        if self._callback is not None and self._callback_enabled:
+            self._callback(len(data))
+        return data
+
+    def enable_callback(self):
+        self._callback_enabled = True
+
+    def disable_callback(self):
+        self._callback_enabled = False
+
+    def seek(self, where):
+        self._fileobj.seek(self._start_byte + where)
+        if self._callback is not None and self._callback_enabled:
+            # To also rewind the callback() for an accurate progress report
+            self._callback(where - self._amount_read)
+        self._amount_read = where
+
+    def close(self):
+        self._fileobj.close()
+
+    def tell(self):
+        return self._amount_read
+
+    def __len__(self):
+        # __len__ is defined because requests will try to determine the length
+        # of the stream to set a content length.  In the normal case
+        # of the file it will just stat the file, but we need to change that
+        # behavior.  By providing a __len__, requests will use that instead
+        # of stat'ing the file.
+        return self._size
+
+    def __enter__(self):
+        return self
+
+    def __exit__(self, *args, **kwargs):
+        self.close()
+
+    def __iter__(self):
+        # This is a workaround for http://bugs.python.org/issue17575
+        # Basically httplib will try to iterate over the contents, even
+        # if its a file like object.  This wasn't noticed because we've
+        # already exhausted the stream so iterating over the file immediately
+        # stops, which is what we're simulating here.
+        return iter([])
+
+
+class StreamReaderProgress(object):
+    """Wrapper for a read only stream that adds progress callbacks."""
+    def __init__(self, stream, callback=None):
+        self._stream = stream
+        self._callback = callback
+
+    def read(self, *args, **kwargs):
+        value = self._stream.read(*args, **kwargs)
+        if self._callback is not None:
+            self._callback(len(value))
+        return value
+
+
+class OSUtils(object):
+    def get_file_size(self, filename):
+        return os.path.getsize(filename)
+
+    def open_file_chunk_reader(self, filename, start_byte, size, callback):
+        return ReadFileChunk.from_filename(filename, start_byte,
+                                           size, callback,
+                                           enable_callback=False)
+
+    def open(self, filename, mode):
+        return open(filename, mode)
+
+    def remove_file(self, filename):
+        """Remove a file, noop if file does not exist."""
+        # Unlike os.remove, if the file does not exist,
+        # then this method does nothing.
+        try:
+            os.remove(filename)
+        except OSError:
+            pass
+
+    def rename_file(self, current_filename, new_filename):
+        s3transfer.compat.rename_file(current_filename, new_filename)
+
+
+class MultipartUploader(object):
+    # These are the extra_args that need to be forwarded onto
+    # subsequent upload_parts.
+    UPLOAD_PART_ARGS = [
+        'SSECustomerKey',
+        'SSECustomerAlgorithm',
+        'SSECustomerKeyMD5',
+        'RequestPayer',
+    ]
+
+    def __init__(self, client, config, osutil,
+                 executor_cls=concurrent.futures.ThreadPoolExecutor):
+        self._client = client
+        self._config = config
+        self._os = osutil
+        self._executor_cls = executor_cls
+
+    def _extra_upload_part_args(self, extra_args):
+        # Only the args in UPLOAD_PART_ARGS actually need to be passed
+        # onto the upload_part calls.
+        upload_parts_args = {}
+        for key, value in extra_args.items():
+            if key in self.UPLOAD_PART_ARGS:
+                upload_parts_args[key] = value
+        return upload_parts_args
+
+    def upload_file(self, filename, bucket, key, callback, extra_args):
+        response = self._client.create_multipart_upload(Bucket=bucket,
+                                                        Key=key, **extra_args)
+        upload_id = response['UploadId']
+        try:
+            parts = self._upload_parts(upload_id, filename, bucket, key,
+                                       callback, extra_args)
+        except Exception as e:
+            logger.debug("Exception raised while uploading parts, "
+                         "aborting multipart upload.", exc_info=True)
+            self._client.abort_multipart_upload(
+                Bucket=bucket, Key=key, UploadId=upload_id)
+            raise S3UploadFailedError(
+                "Failed to upload %s to %s: %s" % (
+                    filename, '/'.join([bucket, key]), e))
+        self._client.complete_multipart_upload(
+            Bucket=bucket, Key=key, UploadId=upload_id,
+            MultipartUpload={'Parts': parts})
+
+    def _upload_parts(self, upload_id, filename, bucket, key, callback,
+                      extra_args):
+        upload_parts_extra_args = self._extra_upload_part_args(extra_args)
+        parts = []
+        part_size = self._config.multipart_chunksize
+        num_parts = int(
+            math.ceil(self._os.get_file_size(filename) / float(part_size)))
+        max_workers = self._config.max_concurrency
+        with self._executor_cls(max_workers=max_workers) as executor:
+            upload_partial = functools.partial(
+                self._upload_one_part, filename, bucket, key, upload_id,
+                part_size, upload_parts_extra_args, callback)
+            for part in executor.map(upload_partial, range(1, num_parts + 1)):
+                parts.append(part)
+        return parts
+
+    def _upload_one_part(self, filename, bucket, key,
+                         upload_id, part_size, extra_args,
+                         callback, part_number):
+        open_chunk_reader = self._os.open_file_chunk_reader
+        with open_chunk_reader(filename, part_size * (part_number - 1),
+                               part_size, callback) as body:
+            response = self._client.upload_part(
+                Bucket=bucket, Key=key,
+                UploadId=upload_id, PartNumber=part_number, Body=body,
+                **extra_args)
+            etag = response['ETag']
+            return {'ETag': etag, 'PartNumber': part_number}
+
+
+class ShutdownQueue(queue.Queue):
+    """A queue implementation that can be shutdown.
+
+    Shutting down a queue means that this class adds a
+    trigger_shutdown method that will trigger all subsequent
+    calls to put() to fail with a ``QueueShutdownError``.
+
+    It purposefully deviates from queue.Queue, and is *not* meant
+    to be a drop in replacement for ``queue.Queue``.
+
+    """
+    def _init(self, maxsize):
+        self._shutdown = False
+        self._shutdown_lock = threading.Lock()
+        # queue.Queue is an old style class so we don't use super().
+        return queue.Queue._init(self, maxsize)
+
+    def trigger_shutdown(self):
+        with self._shutdown_lock:
+            self._shutdown = True
+            logger.debug("The IO queue is now shutdown.")
+
+    def put(self, item):
+        # Note: this is not sufficient, it's still possible to deadlock!
+        # Need to hook into the condition vars used by this class.
+        with self._shutdown_lock:
+            if self._shutdown:
+                raise QueueShutdownError("Cannot put item to queue when "
+                                         "queue has been shutdown.")
+        return queue.Queue.put(self, item)
+
+
+class MultipartDownloader(object):
+    def __init__(self, client, config, osutil,
+                 executor_cls=concurrent.futures.ThreadPoolExecutor):
+        self._client = client
+        self._config = config
+        self._os = osutil
+        self._executor_cls = executor_cls
+        self._ioqueue = ShutdownQueue(self._config.max_io_queue)
+
+    def download_file(self, bucket, key, filename, object_size,
+                      extra_args, callback=None):
+        with self._executor_cls(max_workers=2) as controller:
+            # 1 thread for the future that manages the uploading of files
+            # 1 thread for the future that manages IO writes.
+            download_parts_handler = functools.partial(
+                self._download_file_as_future,
+                bucket, key, filename, object_size, callback)
+            parts_future = controller.submit(download_parts_handler)
+
+            io_writes_handler = functools.partial(
+                self._perform_io_writes, filename)
+            io_future = controller.submit(io_writes_handler)
+            results = concurrent.futures.wait(
+                [parts_future, io_future],
+                return_when=concurrent.futures.FIRST_EXCEPTION)
+            self._process_future_results(results)
+
+    def _process_future_results(self, futures):
+        finished, unfinished = futures
+        for future in finished:
+            future.result()
+
+    def _download_file_as_future(self, bucket, key, filename, object_size,
+                                 callback):
+        part_size = self._config.multipart_chunksize
+        num_parts = int(math.ceil(object_size / float(part_size)))
+        max_workers = self._config.max_concurrency
+        download_partial = functools.partial(
+            self._download_range, bucket, key, filename,
+            part_size, num_parts, callback)
+        try:
+            with self._executor_cls(max_workers=max_workers) as executor:
+                list(executor.map(download_partial, range(num_parts)))
+        finally:
+            self._ioqueue.put(SHUTDOWN_SENTINEL)
+
+    def _calculate_range_param(self, part_size, part_index, num_parts):
+        start_range = part_index * part_size
+        if part_index == num_parts - 1:
+            end_range = ''
+        else:
+            end_range = start_range + part_size - 1
+        range_param = 'bytes=%s-%s' % (start_range, end_range)
+        return range_param
+
+    def _download_range(self, bucket, key, filename,
+                        part_size, num_parts, callback, part_index):
+        try:
+            range_param = self._calculate_range_param(
+                part_size, part_index, num_parts)
+
+            max_attempts = self._config.num_download_attempts
+            last_exception = None
+            for i in range(max_attempts):
+                try:
+                    logger.debug("Making get_object call.")
+                    response = self._client.get_object(
+                        Bucket=bucket, Key=key, Range=range_param)
+                    streaming_body = StreamReaderProgress(
+                        response['Body'], callback)
+                    buffer_size = 1024 * 16
+                    current_index = part_size * part_index
... 12694 lines suppressed ...

-- 
Alioth's /usr/local/bin/git-commit-notice on /srv/git.debian.org/git/python-modules/packages/python-s3transfer.git



More information about the Python-modules-commits mailing list