Skip to content
This repository has been archived by the owner on Jul 18, 2024. It is now read-only.

Commit

Permalink
Tag for 1.0.0a3 release
Browse files Browse the repository at this point in the history
- Rename some options
- Make thread join more robust on Python2
  • Loading branch information
alfpark committed Jun 2, 2017
1 parent b778261 commit e1d97fa
Show file tree
Hide file tree
Showing 12 changed files with 129 additions and 61 deletions.
6 changes: 3 additions & 3 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@

## [Unreleased]

## [1.0.0a2] - 2017-06-02
## [1.0.0a3] - 2017-06-02
### Changed
- From scratch rewrite providing a consistent CLI experience and a vast
array of new and advanced features. Please see the
Expand Down Expand Up @@ -201,8 +201,8 @@ usage documentation carefully when upgrading from 0.12.1.
`--no-skiponmatch`.
- 0.8.2: performance regression fixes

[Unreleased]: https://github.com/Azure/blobxfer/compare/1.0.0a2...HEAD
[1.0.0a2]: https://github.com/Azure/blobxfer/compare/0.12.1...1.0.0a2
[Unreleased]: https://github.com/Azure/blobxfer/compare/1.0.0a3...HEAD
[1.0.0a3]: https://github.com/Azure/blobxfer/compare/0.12.1...1.0.0a3
[0.12.1]: https://github.com/Azure/blobxfer/compare/0.12.0...0.12.1
[0.12.0]: https://github.com/Azure/blobxfer/compare/0.11.5...0.12.0
[0.11.5]: https://github.com/Azure/blobxfer/compare/0.11.4...0.11.5
Expand Down
6 changes: 5 additions & 1 deletion blobxfer/models/options.py
Original file line number Diff line number Diff line change
Expand Up @@ -105,13 +105,14 @@ class Concurrency(object):
"""Concurrency Options"""
def __init__(
self, crypto_processes, md5_processes, disk_threads,
transfer_threads):
transfer_threads, is_download=None):
"""Ctor for Concurrency Options
:param Concurrency self: this
:param int crypto_processes: number of crypto procs
:param int md5_processes: number of md5 procs
:param int disk_threads: number of disk threads
:param int transfer_threads: number of transfer threads
:param bool is_download: download hint
"""
self.crypto_processes = crypto_processes
self.md5_processes = md5_processes
Expand All @@ -131,6 +132,9 @@ def __init__(
# cap maximum number of disk threads from cpu count to 64
if self.disk_threads > 64:
self.disk_threads = 64
# for downloads, cap disk threads to lower value
if is_download and self.disk_threads > 16:
self.disk_threads = 16
auto_disk = True
if self.transfer_threads is None or self.transfer_threads < 1:
if auto_disk:
Expand Down
8 changes: 4 additions & 4 deletions blobxfer/operations/download.py
Original file line number Diff line number Diff line change
Expand Up @@ -430,7 +430,7 @@ def _wait_for_disk_threads(self, terminate):
if terminate:
self._download_terminate = terminate
for thr in self._disk_threads:
thr.join()
blobxfer.util.join_thread(thr)

def _wait_for_transfer_threads(self, terminate):
# type: (Downloader, bool) -> None
Expand All @@ -441,7 +441,7 @@ def _wait_for_transfer_threads(self, terminate):
if terminate:
self._download_terminate = terminate
for thr in self._transfer_threads:
thr.join()
blobxfer.util.join_thread(thr)

def _worker_thread_transfer(self):
# type: (Downloader) -> None
Expand All @@ -452,7 +452,7 @@ def _worker_thread_transfer(self):
while not self.termination_check:
try:
if len(self._disk_set) > max_set_len:
time.sleep(0.2)
time.sleep(0.1)
continue
else:
dd = self._transfer_queue.get(block=False, timeout=0.1)
Expand Down Expand Up @@ -792,8 +792,8 @@ def start(self):
'KeyboardInterrupt detected, force terminating '
'processes and threads (this may take a while)...')
try:
self._wait_for_transfer_threads(terminate=True)
self._wait_for_disk_threads(terminate=True)
self._wait_for_transfer_threads(terminate=True)
finally:
self._cleanup_temporary_files()
raise
Expand Down
4 changes: 2 additions & 2 deletions blobxfer/operations/upload.py
Original file line number Diff line number Diff line change
Expand Up @@ -447,10 +447,10 @@ def _worker_thread_upload(self):
while not self.termination_check:
try:
if len(self._transfer_set) > max_set_len:
time.sleep(0.2)
time.sleep(0.1)
continue
else:
ud = self._upload_queue.get(False, 0.1)
ud = self._upload_queue.get(block=False, timeout=0.1)
except queue.Empty:
continue
try:
Expand Down
14 changes: 14 additions & 0 deletions blobxfer/util.py
Original file line number Diff line number Diff line change
Expand Up @@ -124,6 +124,20 @@ def is_not_empty(obj):
return obj is not None and len(obj) > 0


def join_thread(thr):
# type: (threading.Thread) -> None
"""Join a thread
:type threading.Thread thr: thread to join
"""
if on_python2():
while True:
thr.join(timeout=1)
if not thr.isAlive():
break
else:
thr.join()


def merge_dict(dict1, dict2):
# type: (dict, dict) -> dict
"""Recursively merge dictionaries: dict2 on to dict1. This differs
Expand Down
2 changes: 1 addition & 1 deletion blobxfer/version.py
Original file line number Diff line number Diff line change
Expand Up @@ -22,4 +22,4 @@
# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
# DEALINGS IN THE SOFTWARE.

__version__ = '1.0.0a2'
__version__ = '1.0.0a3'
42 changes: 23 additions & 19 deletions cli/cli.py
Original file line number Diff line number Diff line change
Expand Up @@ -62,13 +62,15 @@ def __init__(self):
self.credentials = None
self.general_options = None

def initialize(self):
# type: (CliContext) -> None
def initialize(self, action):
# type: (CliContext, settings.TransferAction) -> None
"""Initialize context
:param CliContext self: this
:param settings.TransferAction action: transfer action
"""
self._init_config()
self.general_options = settings.create_general_options(self.config)
self.general_options = settings.create_general_options(
self.config, action)
self.credentials = settings.create_azure_storage_credentials(
self.config, self.general_options)

Expand Down Expand Up @@ -164,7 +166,8 @@ def callback(ctx, param, value):
'--log-file',
expose_value=False,
default=None,
help='Log to file specified',
help='Log to file specified; this must be specified for progress '
'bar to show',
callback=callback)(f)


Expand All @@ -191,7 +194,8 @@ def callback(ctx, param, value):
'--progress-bar/--no-progress-bar',
expose_value=False,
default=True,
help='Display progress bar instead of console logs [True]',
help='Display progress bar instead of console logs; log file must '
'be specified [True]',
callback=callback)(f)


Expand Down Expand Up @@ -254,22 +258,22 @@ def callback(ctx, param, value):
clictx.cli_options['local_resource'] = value
return value
return click.option(
'--local-resource',
'--local-path',
expose_value=False,
help='Local resource; use - for stdin',
help='Local path; use - for stdin',
callback=callback)(f)


def _storage_account_name_option(f):
def _storage_account_option(f):
def callback(ctx, param, value):
clictx = ctx.ensure_object(CliContext)
clictx.cli_options['storage_account'] = value
return value
return click.option(
'--storage-account-name',
'--storage-account',
expose_value=False,
help='Storage account name',
envvar='BLOBXFER_STORAGE_ACCOUNT_NAME',
envvar='BLOBXFER_STORAGE_ACCOUNT',
callback=callback)(f)


Expand Down Expand Up @@ -301,7 +305,7 @@ def common_options(f):

def upload_download_options(f):
f = _remote_path_option(f)
f = _storage_account_name_option(f)
f = _storage_account_option(f)
f = _local_resource_option(f)
return f

Expand Down Expand Up @@ -633,16 +637,16 @@ def callback(ctx, param, value):
callback=callback)(f)


def _sync_copy_dest_storage_account_name_option(f):
def _sync_copy_dest_storage_account_option(f):
def callback(ctx, param, value):
clictx = ctx.ensure_object(CliContext)
clictx.cli_options['sync_copy_dest_storage_account'] = value
return value
return click.option(
'--sync-copy-dest-storage-account-name',
'--sync-copy-dest-storage-account',
expose_value=False,
help='Storage account name for synccopy destination',
envvar='BLOBXFER_SYNC_COPY_DEST_STORAGE_ACCOUNT_NAME',
envvar='BLOBXFER_SYNC_COPY_DEST_STORAGE_ACCOUNT',
callback=callback)(f)


Expand Down Expand Up @@ -721,11 +725,11 @@ def download_options(f):


def sync_copy_options(f):
f = _sync_copy_dest_storage_account_name_option(f)
f = _sync_copy_dest_storage_account_option(f)
f = _sync_copy_dest_sas_option(f)
f = _sync_copy_dest_remote_path_option(f)
f = _sync_copy_dest_access_key_option(f)
f = _storage_account_name_option(f)
f = _storage_account_option(f)
f = _skip_on_md5_match_option(f)
f = _skip_on_lmt_ge_option(f)
f = _skip_on_filesize_match_option(f)
Expand Down Expand Up @@ -757,7 +761,7 @@ def cli(ctx):
def download(ctx):
"""Download blobs or files from Azure Storage"""
settings.add_cli_options(ctx.cli_options, settings.TransferAction.Download)
ctx.initialize()
ctx.initialize(settings.TransferAction.Download)
specs = settings.create_download_specifications(ctx.config)
for spec in specs:
blobxfer.api.Downloader(
Expand All @@ -773,7 +777,7 @@ def synccopy(ctx):
"""Synchronously copy blobs between Azure Storage accounts"""
raise NotImplementedError()
settings.add_cli_options(ctx.cli_options, settings.TransferAction.Synccopy)
ctx.initialize()
ctx.initialize(settings.TransferAction.Synccopy)


@cli.command('upload')
Expand All @@ -784,7 +788,7 @@ def synccopy(ctx):
def upload(ctx):
"""Upload files to Azure Storage"""
settings.add_cli_options(ctx.cli_options, settings.TransferAction.Upload)
ctx.initialize()
ctx.initialize(settings.TransferAction.Upload)
specs = settings.create_upload_specifications(ctx.config)
for spec in specs:
blobxfer.api.Uploader(
Expand Down
12 changes: 7 additions & 5 deletions cli/settings.py
Original file line number Diff line number Diff line change
Expand Up @@ -61,13 +61,13 @@ def add_cli_options(cli_options, action):
if blobxfer.util.is_none_or_empty(local_resource):
raise KeyError()
except KeyError:
raise ValueError('--local-resource must be specified')
raise ValueError('--local-path must be specified')
try:
storage_account = cli_options['storage_account']
if blobxfer.util.is_none_or_empty(storage_account):
raise KeyError()
except KeyError:
raise ValueError('--storage-account-name must be specified')
raise ValueError('--storage-account must be specified')
try:
remote_path = cli_options['remote_path']
if blobxfer.util.is_none_or_empty(remote_path):
Expand Down Expand Up @@ -167,7 +167,7 @@ def add_cli_options(cli_options, action):
raise KeyError()
except KeyError:
raise ValueError(
'--sync-copy-dest-storage-account-name must be specified')
'--sync-copy-dest-storage-account must be specified')
try:
sync_copy_dest_remote_path = \
cli_options['sync_copy_dest_remote_path']
Expand Down Expand Up @@ -278,10 +278,11 @@ def create_azure_storage_credentials(config, general_options):
return creds


def create_general_options(config):
# type: (dict) -> blobxfer.models.options.General
def create_general_options(config, action):
# type: (dict, TransferAction) -> blobxfer.models.options.General
"""Create a General Options object from configuration
:param dict config: config dict
:param TransferAction action: transfer action
:rtype: blobxfer.models.options.General
:return: general options object
"""
Expand All @@ -292,6 +293,7 @@ def create_general_options(config):
disk_threads=conc.get('disk_threads', 0),
md5_processes=conc.get('md5_processes', 0),
transfer_threads=conc.get('transfer_threads', 0),
is_download=action == TransferAction.Download,
),
log_file=config['options'].get('log_file', None),
progress_bar=config['options'].get('progress_bar', True),
Expand Down
7 changes: 4 additions & 3 deletions docs/01-installation.md
Original file line number Diff line number Diff line change
Expand Up @@ -72,9 +72,10 @@ docker pull alfpark/blobxfer

## Troubleshooting
#### `azure.storage` dependency not found
If you get an error that `azure.storage` cannot be found or loaded, then
most likely there was a conflict with this package with other `azure` packages
that share the same base namespace. You can correct this by issuing:
If you get an error such as `ImportError: No module named storage` or that
`azure.storage` cannot be found or loaded, then most likely there was a
conflict with this package with other `azure` packages that share the same
base namespace. You can correct this by issuing:
```shell
# for Python3
pip3 install --upgrade --force-reinstall azure-storage
Expand Down
Loading

0 comments on commit e1d97fa

Please sign in to comment.