2012-05-08 11:17:04 +01:00
|
|
|
#!/usr/bin/python -u
|
|
|
|
# Copyright (c) 2010-2012 OpenStack, LLC.
|
|
|
|
#
|
|
|
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
|
|
|
# you may not use this file except in compliance with the License.
|
|
|
|
# You may obtain a copy of the License at
|
|
|
|
#
|
|
|
|
# http://www.apache.org/licenses/LICENSE-2.0
|
|
|
|
#
|
|
|
|
# Unless required by applicable law or agreed to in writing, software
|
|
|
|
# distributed under the License is distributed on an "AS IS" BASIS,
|
|
|
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
|
|
|
|
# implied.
|
|
|
|
# See the License for the specific language governing permissions and
|
|
|
|
# limitations under the License.
|
2014-03-30 12:47:40 -07:00
|
|
|
|
2016-03-02 16:02:28 +00:00
|
|
|
import argparse
|
2018-06-11 13:19:05 +01:00
|
|
|
import getpass
|
2017-06-15 20:53:04 -07:00
|
|
|
import io
|
2016-02-04 10:25:15 -08:00
|
|
|
import json
|
2015-01-04 21:14:02 +00:00
|
|
|
import logging
|
2012-06-18 09:46:54 -07:00
|
|
|
import signal
|
2012-05-08 11:17:04 +01:00
|
|
|
import socket
|
2018-06-11 13:19:05 +01:00
|
|
|
import warnings
|
2012-05-08 11:17:04 +01:00
|
|
|
|
2014-04-04 21:13:01 +02:00
|
|
|
from os import environ, walk, _exit as os_exit
|
|
|
|
from os.path import isfile, isdir, join
|
2022-02-17 11:37:13 +00:00
|
|
|
from urllib.parse import unquote, urlparse
|
2017-06-15 20:53:04 -07:00
|
|
|
from sys import argv as sys_argv, exit, stderr, stdin
|
2014-04-04 21:13:01 +02:00
|
|
|
from time import gmtime, strftime
|
|
|
|
|
|
|
|
from swiftclient import RequestException
|
2019-07-02 11:23:22 -05:00
|
|
|
from swiftclient.utils import config_true_value, generate_temp_url, \
|
2023-03-20 11:27:50 -07:00
|
|
|
prt_bytes, parse_timeout, JSONableIterable
|
2014-04-04 21:13:01 +02:00
|
|
|
from swiftclient.multithreading import OutputManager
|
2013-06-26 22:47:49 -07:00
|
|
|
from swiftclient.exceptions import ClientException
|
2013-10-08 14:16:32 -07:00
|
|
|
from swiftclient import __version__ as client_version
|
2016-01-18 17:05:28 -08:00
|
|
|
from swiftclient.client import logger_settings as client_logger_settings, \
|
|
|
|
parse_header_string
|
2014-04-08 21:14:13 -07:00
|
|
|
from swiftclient.service import SwiftService, SwiftError, \
|
2016-05-03 14:18:34 -07:00
|
|
|
SwiftUploadObject, get_conn, process_options
|
2014-04-04 21:13:01 +02:00
|
|
|
from swiftclient.command_helpers import print_account_stats, \
|
|
|
|
print_container_stats, print_object_stats
|
2012-05-08 11:17:04 +01:00
|
|
|
|
2014-04-08 21:14:13 -07:00
|
|
|
try:
|
|
|
|
from shlex import quote as sh_quote
|
|
|
|
except ImportError:
|
|
|
|
from pipes import quote as sh_quote
|
2012-05-08 11:17:04 +01:00
|
|
|
|
2014-02-26 09:52:38 +00:00
|
|
|
BASENAME = 'swift'
|
2016-02-15 12:14:17 +01:00
|
|
|
commands = ('delete', 'download', 'list', 'post', 'copy', 'stat', 'upload',
|
2018-06-29 11:07:00 +10:00
|
|
|
'capabilities', 'info', 'tempurl', 'auth', 'bash_completion')
|
2014-02-26 09:52:38 +00:00
|
|
|
|
|
|
|
|
2013-06-26 11:41:29 -07:00
|
|
|
def immediate_exit(signum, frame):
|
|
|
|
stderr.write(" Aborted\n")
|
|
|
|
os_exit(2)
|
|
|
|
|
2019-06-25 15:25:53 -07:00
|
|
|
|
2015-09-09 17:41:21 -07:00
|
|
|
st_delete_options = '''[--all] [--leave-segments]
|
2013-08-17 22:43:09 +10:00
|
|
|
[--object-threads <threads>]
|
|
|
|
[--container-threads <threads>]
|
2016-09-19 23:18:18 +08:00
|
|
|
[--header <header:value>]
|
2017-02-23 11:00:09 +00:00
|
|
|
[--prefix <prefix>]
|
2019-10-29 09:59:03 -05:00
|
|
|
[--versions]
|
|
|
|
[<container> [<object>] [--version-id <version_id>] [...]]
|
2013-08-17 22:43:09 +10:00
|
|
|
'''
|
2013-06-26 11:41:29 -07:00
|
|
|
|
2012-05-08 11:17:04 +01:00
|
|
|
st_delete_help = '''
|
2014-05-01 08:37:39 +02:00
|
|
|
Delete a container or objects within a container.
|
2013-08-17 22:43:09 +10:00
|
|
|
|
|
|
|
Positional arguments:
|
2015-11-13 17:20:20 +01:00
|
|
|
[<container>] Name of container to delete from.
|
|
|
|
[<object>] Name of object to delete. Specify multiple times
|
2014-02-26 15:52:07 -05:00
|
|
|
for multiple objects.
|
2013-08-17 22:43:09 +10:00
|
|
|
|
|
|
|
Optional arguments:
|
2020-04-03 09:34:38 -07:00
|
|
|
-a, --all Delete all containers and objects. Implies --versions.
|
|
|
|
--versions Delete all versions.
|
2014-02-26 15:52:07 -05:00
|
|
|
--leave-segments Do not delete segments of manifest objects.
|
2016-09-19 23:18:18 +08:00
|
|
|
-H, --header <header:value>
|
|
|
|
Adds a custom request header to use for deleting
|
|
|
|
objects or an entire container .
|
2013-08-17 22:43:09 +10:00
|
|
|
--object-threads <threads>
|
2014-02-07 11:39:24 +00:00
|
|
|
Number of threads to use for deleting objects.
|
2014-03-08 13:56:17 +01:00
|
|
|
Default is 10.
|
2013-08-17 22:43:09 +10:00
|
|
|
--container-threads <threads>
|
2014-02-07 11:39:24 +00:00
|
|
|
Number of threads to use for deleting containers.
|
2014-03-08 13:56:17 +01:00
|
|
|
Default is 10.
|
2017-02-23 11:00:09 +00:00
|
|
|
--prefix <prefix> Only delete objects beginning with <prefix>.
|
2019-10-29 09:59:03 -05:00
|
|
|
--version-id <version-id>
|
|
|
|
Delete specific version of a versioned object.
|
2013-08-17 22:43:09 +10:00
|
|
|
'''.strip("\n")
|
2012-05-08 11:17:04 +01:00
|
|
|
|
|
|
|
|
2018-06-29 11:07:00 +10:00
|
|
|
def st_delete(parser, args, output_manager, return_parser=False):
|
2016-03-02 16:02:28 +00:00
|
|
|
parser.add_argument(
|
2013-04-28 19:16:38 -07:00
|
|
|
'-a', '--all', action='store_true', dest='yes_all',
|
2014-02-26 14:27:03 -05:00
|
|
|
default=False, help='Delete all containers and objects.')
|
2019-10-29 09:59:03 -05:00
|
|
|
parser.add_argument('--versions', action='store_true',
|
|
|
|
help='delete all versions')
|
2016-03-02 16:02:28 +00:00
|
|
|
parser.add_argument(
|
2015-06-11 14:33:39 -07:00
|
|
|
'-p', '--prefix', dest='prefix',
|
2017-02-23 11:00:09 +00:00
|
|
|
help='Only delete items beginning with <prefix>.')
|
2019-10-29 09:59:03 -05:00
|
|
|
parser.add_argument(
|
|
|
|
'--version-id', action='store', default=None,
|
|
|
|
help='Delete a specific version of a versioned object')
|
2016-09-19 23:18:18 +08:00
|
|
|
parser.add_argument(
|
|
|
|
'-H', '--header', action='append', dest='header',
|
|
|
|
default=[],
|
|
|
|
help='Adds a custom request header to use for deleting objects '
|
|
|
|
'or an entire container.')
|
2016-03-02 16:02:28 +00:00
|
|
|
parser.add_argument(
|
|
|
|
'--leave-segments', action='store_true',
|
2013-04-28 19:16:38 -07:00
|
|
|
dest='leave_segments', default=False,
|
2014-02-26 14:27:03 -05:00
|
|
|
help='Do not delete segments of manifest objects.')
|
2016-03-02 16:02:28 +00:00
|
|
|
parser.add_argument(
|
|
|
|
'--object-threads', type=int,
|
2014-02-26 14:27:03 -05:00
|
|
|
default=10, help='Number of threads to use for deleting objects. '
|
2016-02-24 19:38:30 +08:00
|
|
|
'Its value must be a positive integer. Default is 10.')
|
2016-03-02 16:02:28 +00:00
|
|
|
parser.add_argument(
|
|
|
|
'--container-threads', type=int,
|
2015-07-17 16:03:39 +09:00
|
|
|
default=10, help='Number of threads to use for deleting containers. '
|
2016-02-24 19:38:30 +08:00
|
|
|
'Its value must be a positive integer. Default is 10.')
|
2018-06-29 11:07:00 +10:00
|
|
|
|
|
|
|
# We return the parser to build up the bash_completion
|
|
|
|
if return_parser:
|
|
|
|
return parser
|
|
|
|
|
2012-05-08 11:17:04 +01:00
|
|
|
(options, args) = parse_args(parser, args)
|
|
|
|
args = args[1:]
|
2020-04-03 09:34:38 -07:00
|
|
|
if options['yes_all']:
|
|
|
|
options['versions'] = True
|
2016-05-03 11:34:02 -07:00
|
|
|
if (not args and not options['yes_all']) or (args and options['yes_all']):
|
2014-04-04 21:13:01 +02:00
|
|
|
output_manager.error('Usage: %s delete %s\n%s',
|
2014-02-26 09:52:38 +00:00
|
|
|
BASENAME, st_delete_options,
|
2013-08-17 22:43:09 +10:00
|
|
|
st_delete_help)
|
2012-05-08 11:17:04 +01:00
|
|
|
return
|
2019-10-29 09:59:03 -05:00
|
|
|
if options['versions'] and len(args) >= 2:
|
|
|
|
exit('--versions option not allowed for object deletes')
|
|
|
|
if options['version_id'] and len(args) < 2:
|
|
|
|
exit('--version-id option only allowed for object deletes')
|
2012-05-08 11:17:04 +01:00
|
|
|
|
2016-05-03 11:34:02 -07:00
|
|
|
if options['object_threads'] <= 0:
|
2016-02-24 19:38:30 +08:00
|
|
|
output_manager.error(
|
|
|
|
'ERROR: option --object-threads should be a positive integer.'
|
|
|
|
'\n\nUsage: %s delete %s\n%s',
|
|
|
|
BASENAME, st_delete_options,
|
|
|
|
st_delete_help)
|
|
|
|
return
|
|
|
|
|
2016-05-03 11:34:02 -07:00
|
|
|
if options['container_threads'] <= 0:
|
2016-02-24 19:38:30 +08:00
|
|
|
output_manager.error(
|
|
|
|
'ERROR: option --container-threads should be a positive integer.'
|
|
|
|
'\n\nUsage: %s delete %s\n%s',
|
|
|
|
BASENAME, st_delete_options,
|
|
|
|
st_delete_help)
|
|
|
|
return
|
|
|
|
|
2016-05-03 11:34:02 -07:00
|
|
|
options['object_dd_threads'] = options['object_threads']
|
|
|
|
with SwiftService(options=options) as swift:
|
2014-04-04 21:13:01 +02:00
|
|
|
try:
|
|
|
|
if not args:
|
|
|
|
del_iter = swift.delete()
|
2012-05-08 11:17:04 +01:00
|
|
|
else:
|
2014-04-04 21:13:01 +02:00
|
|
|
container = args[0]
|
|
|
|
if '/' in container:
|
|
|
|
output_manager.error(
|
|
|
|
'WARNING: / in container name; you '
|
2015-07-17 16:16:07 +09:00
|
|
|
"might have meant '%s' instead of '%s'." %
|
|
|
|
(container.replace('/', ' ', 1), container)
|
2014-04-04 21:13:01 +02:00
|
|
|
)
|
|
|
|
return
|
|
|
|
objects = args[1:]
|
|
|
|
if objects:
|
|
|
|
del_iter = swift.delete(container=container,
|
|
|
|
objects=objects)
|
|
|
|
else:
|
|
|
|
del_iter = swift.delete(container=container)
|
|
|
|
|
|
|
|
for r in del_iter:
|
2014-10-20 11:55:12 +01:00
|
|
|
c = r.get('container', '')
|
|
|
|
o = r.get('object', '')
|
2019-11-15 22:37:38 +01:00
|
|
|
a = (' [after {0} attempts]'.format(r.get('attempts'))
|
2019-11-15 22:08:51 +00:00
|
|
|
if r.get('attempts', 1) > 1 else '')
|
2014-10-20 11:55:12 +01:00
|
|
|
|
2015-06-11 14:33:39 -07:00
|
|
|
if r['action'] == 'bulk_delete':
|
|
|
|
if r['success']:
|
|
|
|
objs = r.get('objects', [])
|
|
|
|
for o, err in r.get('result', {}).get('Errors', []):
|
|
|
|
# o will be of the form quote("/<cont>/<obj>")
|
|
|
|
o = unquote(o)
|
|
|
|
output_manager.error('Error Deleting: {0}: {1}'
|
|
|
|
.format(o[1:], err))
|
|
|
|
try:
|
|
|
|
objs.remove(o[len(c) + 2:])
|
|
|
|
except ValueError:
|
|
|
|
# shouldn't happen, but ignoring it won't hurt
|
|
|
|
pass
|
|
|
|
|
|
|
|
for o in objs:
|
2016-05-03 11:34:02 -07:00
|
|
|
if options['yes_all']:
|
2014-10-20 11:55:12 +01:00
|
|
|
p = '{0}/{1}'.format(c, o)
|
2014-04-04 21:13:01 +02:00
|
|
|
else:
|
2014-10-20 11:55:12 +01:00
|
|
|
p = o
|
2015-06-11 14:33:39 -07:00
|
|
|
output_manager.print_msg('{0}{1}'.format(p, a))
|
|
|
|
else:
|
|
|
|
for o in r.get('objects', []):
|
|
|
|
output_manager.error('Error Deleting: {0}/{1}: {2}'
|
|
|
|
.format(c, o, r['error']))
|
2012-05-08 11:17:04 +01:00
|
|
|
else:
|
2015-06-11 14:33:39 -07:00
|
|
|
if r['success']:
|
2016-05-03 11:34:02 -07:00
|
|
|
if options['verbose']:
|
2015-06-11 14:33:39 -07:00
|
|
|
if r['action'] == 'delete_object':
|
2016-05-03 11:34:02 -07:00
|
|
|
if options['yes_all']:
|
2015-06-11 14:33:39 -07:00
|
|
|
p = '{0}/{1}'.format(c, o)
|
|
|
|
else:
|
|
|
|
p = o
|
|
|
|
elif r['action'] == 'delete_segment':
|
|
|
|
p = '{0}/{1}'.format(c, o)
|
|
|
|
elif r['action'] == 'delete_container':
|
|
|
|
p = c
|
|
|
|
|
|
|
|
output_manager.print_msg('{0}{1}'.format(p, a))
|
|
|
|
else:
|
|
|
|
p = '{0}/{1}'.format(c, o) if o else c
|
|
|
|
output_manager.error('Error Deleting: {0}: {1}'
|
|
|
|
.format(p, r['error']))
|
2014-04-04 21:13:01 +02:00
|
|
|
except SwiftError as err:
|
|
|
|
output_manager.error(err.value)
|
2012-05-08 11:17:04 +01:00
|
|
|
|
|
|
|
|
2016-04-03 07:37:33 +03:00
|
|
|
st_download_options = '''[--all] [--marker <marker>] [--prefix <prefix>]
|
2015-03-02 17:54:05 +08:00
|
|
|
[--output <out_file>] [--output-dir <out_directory>]
|
2016-05-31 14:32:58 +08:00
|
|
|
[--object-threads <threads>] [--ignore-checksum]
|
2013-08-17 22:43:09 +10:00
|
|
|
[--container-threads <threads>] [--no-download]
|
2015-03-02 17:54:05 +08:00
|
|
|
[--skip-identical] [--remove-prefix]
|
2019-10-29 09:59:03 -05:00
|
|
|
[--version-id <version_id>]
|
2015-11-13 17:20:20 +01:00
|
|
|
[--header <header:value>] [--no-shuffle]
|
2016-09-08 11:09:25 +01:00
|
|
|
[<container> [<object>] [...]]
|
2013-08-17 22:43:09 +10:00
|
|
|
'''
|
2012-05-08 11:17:04 +01:00
|
|
|
|
|
|
|
st_download_help = '''
|
2014-05-01 08:37:39 +02:00
|
|
|
Download objects from containers.
|
2013-08-17 22:43:09 +10:00
|
|
|
|
|
|
|
Positional arguments:
|
2016-09-06 15:48:09 +00:00
|
|
|
[<container>] Name of container to download from. To download a
|
|
|
|
whole account, omit this and specify --all.
|
|
|
|
[<object>] Name of object to download. Specify multiple times
|
|
|
|
for multiple objects. Omit this to download all
|
|
|
|
objects from the container.
|
2013-08-17 22:43:09 +10:00
|
|
|
|
|
|
|
Optional arguments:
|
2015-07-17 16:03:39 +09:00
|
|
|
-a, --all Indicates that you really want to download
|
2014-04-04 21:13:01 +02:00
|
|
|
everything in the account.
|
2016-04-03 07:37:33 +03:00
|
|
|
-m, --marker <marker> Marker to use when starting a container or account
|
2014-02-26 15:52:07 -05:00
|
|
|
download.
|
2015-07-17 16:03:39 +09:00
|
|
|
-p, --prefix <prefix> Only download items beginning with <prefix>
|
|
|
|
-r, --remove-prefix An optional flag for --prefix <prefix>, use this
|
2015-03-02 17:54:05 +08:00
|
|
|
option to download items without <prefix>
|
2015-07-17 16:03:39 +09:00
|
|
|
-o, --output <out_file>
|
|
|
|
For a single file download, stream the output to
|
2013-08-17 22:43:09 +10:00
|
|
|
<out_file>. Specifying "-" as <out_file> will
|
2014-02-26 15:52:07 -05:00
|
|
|
redirect to stdout.
|
2015-07-17 16:03:39 +09:00
|
|
|
-D, --output-dir <out_directory>
|
2015-03-02 17:54:05 +08:00
|
|
|
An optional directory to which to store objects.
|
|
|
|
By default, all objects are recreated in the current
|
|
|
|
directory.
|
2013-08-17 22:43:09 +10:00
|
|
|
--object-threads <threads>
|
2014-02-07 11:39:24 +00:00
|
|
|
Number of threads to use for downloading objects.
|
2014-04-04 21:13:01 +02:00
|
|
|
Default is 10.
|
2013-08-17 22:43:09 +10:00
|
|
|
--container-threads <threads>
|
2014-02-07 11:39:24 +00:00
|
|
|
Number of threads to use for downloading containers.
|
2014-04-04 21:13:01 +02:00
|
|
|
Default is 10.
|
2013-08-17 22:43:09 +10:00
|
|
|
--no-download Perform download(s), but don't actually write anything
|
2014-02-26 15:52:07 -05:00
|
|
|
to disk.
|
2015-07-17 16:03:39 +09:00
|
|
|
-H, --header <header:value>
|
2013-08-13 17:58:55 +02:00
|
|
|
Adds a customized request header to the query, like
|
2015-07-17 16:03:39 +09:00
|
|
|
"Range" or "If-Match". This option may be repeated.
|
2016-02-04 10:25:15 -08:00
|
|
|
Example: --header "content-type:text/plain"
|
2014-02-26 15:52:07 -05:00
|
|
|
--skip-identical Skip downloading files that are identical on both
|
|
|
|
sides.
|
2019-10-29 09:59:03 -05:00
|
|
|
--version-id <version-id>
|
|
|
|
Download specific version of a versioned object.
|
2016-05-31 14:32:58 +08:00
|
|
|
--ignore-checksum Turn off checksum validation for downloads.
|
2014-10-14 16:54:41 +01:00
|
|
|
--no-shuffle By default, when downloading a complete account or
|
|
|
|
container, download order is randomised in order to
|
2016-03-18 16:14:18 +07:00
|
|
|
reduce the load on individual drives when multiple
|
2014-10-14 16:54:41 +01:00
|
|
|
clients are executed simultaneously to download the
|
|
|
|
same set of objects (e.g. a nightly automated download
|
|
|
|
script to multiple servers). Enable this option to
|
|
|
|
submit download jobs to the thread pool in the order
|
|
|
|
they are listed in the object store.
|
2017-07-06 17:30:48 +02:00
|
|
|
--ignore-mtime Ignore the 'X-Object-Meta-Mtime' header when
|
|
|
|
downloading an object. Instead, create atime and mtime
|
|
|
|
with fresh timestamps.
|
2013-08-17 22:43:09 +10:00
|
|
|
'''.strip("\n")
|
2012-05-08 11:17:04 +01:00
|
|
|
|
|
|
|
|
2018-06-29 11:07:00 +10:00
|
|
|
def st_download(parser, args, output_manager, return_parser=False):
|
2016-03-02 16:02:28 +00:00
|
|
|
parser.add_argument(
|
2013-04-28 19:16:38 -07:00
|
|
|
'-a', '--all', action='store_true', dest='yes_all',
|
2012-05-08 11:17:04 +01:00
|
|
|
default=False, help='Indicates that you really want to download '
|
2014-02-26 15:52:07 -05:00
|
|
|
'everything in the account.')
|
2016-03-02 16:02:28 +00:00
|
|
|
parser.add_argument(
|
2013-04-28 19:16:38 -07:00
|
|
|
'-m', '--marker', dest='marker',
|
2012-05-08 11:17:04 +01:00
|
|
|
default='', help='Marker to use when starting a container or '
|
2014-02-26 15:52:07 -05:00
|
|
|
'account download.')
|
2016-03-02 16:02:28 +00:00
|
|
|
parser.add_argument(
|
2013-06-26 11:41:29 -07:00
|
|
|
'-p', '--prefix', dest='prefix',
|
2014-02-26 14:27:03 -05:00
|
|
|
help='Only download items beginning with the <prefix>.')
|
2016-03-02 16:02:28 +00:00
|
|
|
parser.add_argument(
|
2013-04-28 19:16:38 -07:00
|
|
|
'-o', '--output', dest='out_file', help='For a single '
|
2014-03-08 13:56:17 +01:00
|
|
|
'download, stream the output to <out_file>. '
|
2014-02-26 14:27:03 -05:00
|
|
|
'Specifying "-" as <out_file> will redirect to stdout.')
|
2016-03-02 16:02:28 +00:00
|
|
|
parser.add_argument(
|
2015-03-02 17:54:05 +08:00
|
|
|
'-D', '--output-dir', dest='out_directory',
|
|
|
|
help='An optional directory to which to store objects. '
|
|
|
|
'By default, all objects are recreated in the current directory.')
|
2016-03-02 16:02:28 +00:00
|
|
|
parser.add_argument(
|
2015-03-02 17:54:05 +08:00
|
|
|
'-r', '--remove-prefix', action='store_true', dest='remove_prefix',
|
|
|
|
default=False, help='An optional flag for --prefix <prefix>, '
|
|
|
|
'use this option to download items without <prefix>.')
|
2016-03-02 16:02:28 +00:00
|
|
|
parser.add_argument(
|
|
|
|
'--object-threads', type=int,
|
2014-02-26 15:52:07 -05:00
|
|
|
default=10, help='Number of threads to use for downloading objects. '
|
2016-02-24 19:38:30 +08:00
|
|
|
'Its value must be a positive integer. Default is 10.')
|
2016-03-02 16:02:28 +00:00
|
|
|
parser.add_argument(
|
|
|
|
'--container-threads', type=int, default=10,
|
2014-02-26 15:52:07 -05:00
|
|
|
help='Number of threads to use for downloading containers. '
|
2016-02-24 19:38:30 +08:00
|
|
|
'Its value must be a positive integer. Default is 10.')
|
2016-03-02 16:02:28 +00:00
|
|
|
parser.add_argument(
|
|
|
|
'--no-download', action='store_true',
|
2013-04-28 19:16:38 -07:00
|
|
|
default=False,
|
2014-02-26 15:52:07 -05:00
|
|
|
help="Perform download(s), but don't actually write anything to disk.")
|
2016-03-02 16:02:28 +00:00
|
|
|
parser.add_argument(
|
2013-08-13 17:58:55 +02:00
|
|
|
'-H', '--header', action='append', dest='header',
|
|
|
|
default=[],
|
2014-02-26 14:27:03 -05:00
|
|
|
help='Adds a customized request header to the query, like "Range" or '
|
2015-07-17 16:03:39 +09:00
|
|
|
'"If-Match". This option may be repeated. '
|
2014-02-26 14:27:03 -05:00
|
|
|
'Example: --header "content-type:text/plain"')
|
2016-03-02 16:02:28 +00:00
|
|
|
parser.add_argument(
|
2014-02-11 09:13:26 +00:00
|
|
|
'--skip-identical', action='store_true', dest='skip_identical',
|
|
|
|
default=False, help='Skip downloading files that are identical on '
|
2014-02-26 15:52:07 -05:00
|
|
|
'both sides.')
|
2019-10-29 09:59:03 -05:00
|
|
|
parser.add_argument(
|
|
|
|
'--version-id', action='store', default=None,
|
|
|
|
help='Download a specific version of a versioned object')
|
2016-05-31 14:32:58 +08:00
|
|
|
parser.add_argument(
|
|
|
|
'--ignore-checksum', action='store_false', dest='checksum',
|
|
|
|
default=True, help='Turn off checksum validation for downloads.')
|
2016-03-02 16:02:28 +00:00
|
|
|
parser.add_argument(
|
2014-10-14 16:54:41 +01:00
|
|
|
'--no-shuffle', action='store_false', dest='shuffle',
|
|
|
|
default=True, help='By default, download order is randomised in order '
|
|
|
|
'to reduce the load on individual drives when multiple clients are '
|
|
|
|
'executed simultaneously to download the same set of objects (e.g. a '
|
|
|
|
'nightly automated download script to multiple servers). Enable this '
|
|
|
|
'option to submit download jobs to the thread pool in the order they '
|
|
|
|
'are listed in the object store.')
|
2017-07-06 17:30:48 +02:00
|
|
|
parser.add_argument(
|
|
|
|
'--ignore-mtime', action='store_true', dest='ignore_mtime',
|
|
|
|
default=False, help='By default, the object-meta-mtime header is used '
|
|
|
|
'to store the access and modified timestamp for the downloaded file. '
|
|
|
|
'With this option, the header is ignored and the timestamps are '
|
|
|
|
'created freshly.')
|
2018-06-29 11:07:00 +10:00
|
|
|
|
|
|
|
# We return the parser to build up the bash_completion
|
|
|
|
if return_parser:
|
|
|
|
return parser
|
|
|
|
|
2012-05-08 11:17:04 +01:00
|
|
|
(options, args) = parse_args(parser, args)
|
|
|
|
args = args[1:]
|
2016-05-03 11:34:02 -07:00
|
|
|
if options['out_file'] == '-':
|
|
|
|
options['verbose'] = 0
|
2014-04-04 21:13:01 +02:00
|
|
|
|
2016-05-03 11:34:02 -07:00
|
|
|
if options['out_file'] and len(args) != 2:
|
2012-05-08 11:17:04 +01:00
|
|
|
exit('-o option only allowed for single file downloads')
|
2014-04-04 21:13:01 +02:00
|
|
|
|
2016-05-03 11:34:02 -07:00
|
|
|
if not options['prefix']:
|
|
|
|
options['remove_prefix'] = False
|
2015-03-02 17:54:05 +08:00
|
|
|
|
2016-05-03 11:34:02 -07:00
|
|
|
if options['out_directory'] and len(args) == 2:
|
2015-03-02 17:54:05 +08:00
|
|
|
exit('Please use -o option for single file downloads and renames')
|
|
|
|
|
2016-05-03 11:34:02 -07:00
|
|
|
if (not args and not options['yes_all']) or (args and options['yes_all']):
|
2014-04-04 21:13:01 +02:00
|
|
|
output_manager.error('Usage: %s download %s\n%s', BASENAME,
|
2013-08-17 22:43:09 +10:00
|
|
|
st_download_options, st_download_help)
|
2012-05-08 11:17:04 +01:00
|
|
|
return
|
2019-10-29 09:59:03 -05:00
|
|
|
if options['version_id'] and len(args) < 2:
|
|
|
|
exit('--version-id option only allowed for object downloads')
|
2014-04-04 21:13:01 +02:00
|
|
|
|
2016-05-03 11:34:02 -07:00
|
|
|
if options['object_threads'] <= 0:
|
2016-02-24 19:38:30 +08:00
|
|
|
output_manager.error(
|
|
|
|
'ERROR: option --object-threads should be a positive integer.\n\n'
|
|
|
|
'Usage: %s download %s\n%s', BASENAME,
|
|
|
|
st_download_options, st_download_help)
|
|
|
|
return
|
|
|
|
|
2016-05-03 11:34:02 -07:00
|
|
|
if options['container_threads'] <= 0:
|
2016-02-24 19:38:30 +08:00
|
|
|
output_manager.error(
|
|
|
|
'ERROR: option --container-threads should be a positive integer.'
|
|
|
|
'\n\nUsage: %s download %s\n%s', BASENAME,
|
|
|
|
st_download_options, st_download_help)
|
|
|
|
return
|
|
|
|
|
2016-05-03 11:34:02 -07:00
|
|
|
options['object_dd_threads'] = options['object_threads']
|
|
|
|
with SwiftService(options=options) as swift:
|
2012-05-08 11:17:04 +01:00
|
|
|
try:
|
2013-06-26 22:47:49 -07:00
|
|
|
if not args:
|
2014-04-04 21:13:01 +02:00
|
|
|
down_iter = swift.download()
|
2013-06-26 11:41:29 -07:00
|
|
|
else:
|
2014-04-04 21:13:01 +02:00
|
|
|
container = args[0]
|
|
|
|
if '/' in container:
|
|
|
|
output_manager.error(
|
|
|
|
'WARNING: / in container name; you '
|
2015-07-17 16:16:07 +09:00
|
|
|
"might have meant '%s' instead of '%s'." %
|
|
|
|
(container.replace('/', ' ', 1), container)
|
2014-04-04 21:13:01 +02:00
|
|
|
)
|
|
|
|
return
|
|
|
|
objects = args[1:]
|
|
|
|
if not objects:
|
|
|
|
down_iter = swift.download(container)
|
2013-06-26 22:47:49 -07:00
|
|
|
else:
|
2014-04-04 21:13:01 +02:00
|
|
|
down_iter = swift.download(container, objects)
|
|
|
|
|
|
|
|
for down in down_iter:
|
2016-05-03 11:34:02 -07:00
|
|
|
if options['out_file'] == '-' and 'contents' in down:
|
2015-01-04 21:14:02 +00:00
|
|
|
contents = down['contents']
|
|
|
|
for chunk in contents:
|
|
|
|
output_manager.print_raw(chunk)
|
2014-04-04 21:13:01 +02:00
|
|
|
else:
|
|
|
|
if down['success']:
|
2016-05-03 11:34:02 -07:00
|
|
|
if options['verbose']:
|
2014-04-04 21:13:01 +02:00
|
|
|
start_time = down['start_time']
|
|
|
|
headers_receipt = \
|
|
|
|
down['headers_receipt'] - start_time
|
|
|
|
auth_time = down['auth_end_time'] - start_time
|
|
|
|
finish_time = down['finish_time']
|
|
|
|
read_length = down['read_length']
|
|
|
|
attempts = down['attempts']
|
|
|
|
total_time = finish_time - start_time
|
|
|
|
down_time = total_time - auth_time
|
|
|
|
_mega = 1000000
|
|
|
|
if down['pseudodir']:
|
|
|
|
time_str = (
|
|
|
|
'auth %.3fs, headers %.3fs, total %.3fs, '
|
|
|
|
'pseudo' % (
|
|
|
|
auth_time, headers_receipt,
|
|
|
|
total_time
|
|
|
|
)
|
|
|
|
)
|
|
|
|
else:
|
|
|
|
speed = float(read_length) / down_time / _mega
|
|
|
|
time_str = (
|
|
|
|
'auth %.3fs, headers %.3fs, total %.3fs, '
|
|
|
|
'%.3f MB/s' % (
|
|
|
|
auth_time, headers_receipt,
|
|
|
|
total_time, speed
|
|
|
|
)
|
|
|
|
)
|
|
|
|
path = down['path']
|
|
|
|
if attempts > 1:
|
|
|
|
output_manager.print_msg(
|
|
|
|
'%s [%s after %d attempts]',
|
|
|
|
path, time_str, attempts
|
|
|
|
)
|
|
|
|
else:
|
|
|
|
output_manager.print_msg(
|
|
|
|
'%s [%s]', path, time_str
|
|
|
|
)
|
|
|
|
else:
|
|
|
|
error = down['error']
|
|
|
|
path = down['path']
|
|
|
|
container = down['container']
|
|
|
|
obj = down['object']
|
|
|
|
if isinstance(error, ClientException):
|
|
|
|
if error.http_status == 304 and \
|
2016-05-03 11:34:02 -07:00
|
|
|
options['skip_identical']:
|
2014-04-04 21:13:01 +02:00
|
|
|
output_manager.print_msg(
|
|
|
|
"Skipped identical file '%s'", path)
|
|
|
|
continue
|
|
|
|
if error.http_status == 404:
|
|
|
|
output_manager.error(
|
|
|
|
"Object '%s/%s' not found", container, obj)
|
|
|
|
continue
|
|
|
|
output_manager.error(
|
|
|
|
"Error downloading object '%s/%s': %s",
|
|
|
|
container, obj, error)
|
|
|
|
|
|
|
|
except SwiftError as e:
|
|
|
|
output_manager.error(e.value)
|
2014-10-14 16:54:41 +01:00
|
|
|
except Exception as e:
|
|
|
|
output_manager.error(e)
|
2014-04-04 21:13:01 +02:00
|
|
|
|
2012-05-08 11:17:04 +01:00
|
|
|
|
2014-02-07 11:39:24 +00:00
|
|
|
st_list_options = '''[--long] [--lh] [--totals] [--prefix <prefix>]
|
2016-09-19 23:18:18 +08:00
|
|
|
[--delimiter <delimiter>] [--header <header:value>]
|
2019-10-29 09:59:03 -05:00
|
|
|
[--versions] [<container>]
|
2013-08-17 22:43:09 +10:00
|
|
|
'''
|
2014-04-04 21:13:01 +02:00
|
|
|
|
2012-05-08 11:17:04 +01:00
|
|
|
st_list_help = '''
|
2014-05-01 08:37:39 +02:00
|
|
|
Lists the containers for the account or the objects for a container.
|
2013-08-17 22:43:09 +10:00
|
|
|
|
|
|
|
Positional arguments:
|
2016-09-06 15:48:09 +00:00
|
|
|
[<container>] Name of container to list object in.
|
2013-08-17 22:43:09 +10:00
|
|
|
|
|
|
|
Optional arguments:
|
2015-07-17 16:03:39 +09:00
|
|
|
-l, --long Long listing format, similar to ls -l.
|
2014-02-26 15:52:07 -05:00
|
|
|
--lh Report sizes in human readable format similar to
|
|
|
|
ls -lh.
|
2015-07-17 16:03:39 +09:00
|
|
|
-t, --totals Used with -l or --lh, only report totals.
|
2015-11-13 17:20:20 +01:00
|
|
|
-p <prefix>, --prefix <prefix>
|
|
|
|
Only list items beginning with the prefix.
|
|
|
|
-d <delim>, --delimiter <delim>
|
|
|
|
Roll up items with the given delimiter. For containers
|
2013-08-17 22:43:09 +10:00
|
|
|
only. See OpenStack Swift API documentation for what
|
|
|
|
this means.
|
2019-10-29 09:59:03 -05:00
|
|
|
-j, --json Display listing information in json
|
|
|
|
--versions Display listing information for all versions
|
2016-09-19 23:18:18 +08:00
|
|
|
-H, --header <header:value>
|
|
|
|
Adds a custom request header to use for listing.
|
2012-05-08 11:17:04 +01:00
|
|
|
'''.strip('\n')
|
|
|
|
|
|
|
|
|
2018-06-29 11:07:00 +10:00
|
|
|
def st_list(parser, args, output_manager, return_parser=False):
|
2015-02-26 15:46:13 -07:00
|
|
|
|
2021-09-21 16:02:47 -07:00
|
|
|
def _print_stats(options, stats, human, totals):
|
2015-02-26 15:46:13 -07:00
|
|
|
container = stats.get("container", None)
|
|
|
|
for item in stats["listing"]:
|
|
|
|
item_name = item.get('name')
|
2020-04-10 17:16:11 -07:00
|
|
|
if not options['long'] and not human and not options['versions']:
|
2015-02-26 15:46:13 -07:00
|
|
|
output_manager.print_msg(item.get('name', item.get('subdir')))
|
|
|
|
else:
|
|
|
|
if not container: # listing containers
|
|
|
|
item_bytes = item.get('bytes')
|
2016-05-03 11:34:02 -07:00
|
|
|
byte_str = prt_bytes(item_bytes, human)
|
2015-02-26 15:46:13 -07:00
|
|
|
count = item.get('count')
|
2021-09-21 16:02:47 -07:00
|
|
|
totals['count'] += count
|
2015-02-26 15:46:13 -07:00
|
|
|
try:
|
|
|
|
meta = item.get('meta')
|
|
|
|
utc = gmtime(float(meta.get('x-timestamp')))
|
|
|
|
datestamp = strftime('%Y-%m-%d %H:%M:%S', utc)
|
|
|
|
except TypeError:
|
|
|
|
datestamp = '????-??-?? ??:??:??'
|
2021-04-08 17:16:05 -07:00
|
|
|
storage_policy = meta.get('x-storage-policy', '???')
|
2016-05-03 11:34:02 -07:00
|
|
|
if not options['totals']:
|
2015-02-26 15:46:13 -07:00
|
|
|
output_manager.print_msg(
|
2021-04-08 17:16:05 -07:00
|
|
|
"%12s %s %s %-15s %s", count, byte_str,
|
|
|
|
datestamp, storage_policy, item_name)
|
2015-02-26 15:46:13 -07:00
|
|
|
else: # list container contents
|
|
|
|
subdir = item.get('subdir')
|
2015-11-03 16:11:32 +01:00
|
|
|
content_type = item.get('content_type')
|
2015-02-26 15:46:13 -07:00
|
|
|
if subdir is None:
|
|
|
|
item_bytes = item.get('bytes')
|
2016-05-03 11:34:02 -07:00
|
|
|
byte_str = prt_bytes(item_bytes, human)
|
2015-02-26 15:46:13 -07:00
|
|
|
date, xtime = item.get('last_modified').split('T')
|
|
|
|
xtime = xtime.split('.')[0]
|
|
|
|
else:
|
|
|
|
item_bytes = 0
|
2016-05-03 11:34:02 -07:00
|
|
|
byte_str = prt_bytes(item_bytes, human)
|
2015-02-26 15:46:13 -07:00
|
|
|
date = xtime = ''
|
|
|
|
item_name = subdir
|
2016-05-03 11:34:02 -07:00
|
|
|
if not options['totals']:
|
2020-04-10 17:16:11 -07:00
|
|
|
if options['versions']:
|
|
|
|
output_manager.print_msg(
|
|
|
|
"%s %10s %8s %16s %24s %s",
|
|
|
|
byte_str, date, xtime,
|
|
|
|
item.get('version_id', 'null'),
|
|
|
|
content_type, item_name)
|
|
|
|
else:
|
|
|
|
output_manager.print_msg(
|
|
|
|
"%s %10s %8s %24s %s",
|
|
|
|
byte_str, date, xtime, content_type, item_name)
|
2021-09-21 16:02:47 -07:00
|
|
|
totals['bytes'] += item_bytes
|
2015-02-26 15:46:13 -07:00
|
|
|
|
2016-03-02 16:02:28 +00:00
|
|
|
parser.add_argument(
|
2014-02-26 14:27:03 -05:00
|
|
|
'-l', '--long', dest='long', action='store_true', default=False,
|
|
|
|
help='Long listing format, similar to ls -l.')
|
2016-03-02 16:02:28 +00:00
|
|
|
parser.add_argument(
|
2014-02-26 14:27:03 -05:00
|
|
|
'--lh', dest='human', action='store_true',
|
2014-03-08 13:56:17 +01:00
|
|
|
default=False, help='Report sizes in human readable format, '
|
2014-02-26 14:27:03 -05:00
|
|
|
"similar to ls -lh.")
|
2016-03-02 16:02:28 +00:00
|
|
|
parser.add_argument(
|
2014-04-04 21:13:01 +02:00
|
|
|
'-t', '--totals', dest='totals',
|
|
|
|
help='used with -l or --lh, only report totals.',
|
|
|
|
action='store_true', default=False)
|
2016-03-02 16:02:28 +00:00
|
|
|
parser.add_argument(
|
2013-04-28 19:16:38 -07:00
|
|
|
'-p', '--prefix', dest='prefix',
|
2014-02-26 14:27:03 -05:00
|
|
|
help='Only list items beginning with the prefix.')
|
2016-03-02 16:02:28 +00:00
|
|
|
parser.add_argument(
|
2013-04-28 19:16:38 -07:00
|
|
|
'-d', '--delimiter', dest='delimiter',
|
2014-04-04 21:13:01 +02:00
|
|
|
help='Roll up items with the given delimiter. For containers '
|
|
|
|
'only. See OpenStack Swift API documentation for '
|
|
|
|
'what this means.')
|
2019-07-02 11:23:22 -05:00
|
|
|
parser.add_argument('-j', '--json', action='store_true',
|
|
|
|
help='print listing information in json')
|
2019-10-29 09:59:03 -05:00
|
|
|
parser.add_argument('--versions', action='store_true',
|
|
|
|
help='display all versions')
|
2016-09-19 23:18:18 +08:00
|
|
|
parser.add_argument(
|
|
|
|
'-H', '--header', action='append', dest='header',
|
|
|
|
default=[],
|
|
|
|
help='Adds a custom request header to use for listing.')
|
2018-06-29 11:07:00 +10:00
|
|
|
|
|
|
|
# We return the parser to build up the bash_completion
|
|
|
|
if return_parser:
|
|
|
|
return parser
|
|
|
|
|
2016-03-02 16:02:28 +00:00
|
|
|
options, args = parse_args(parser, args)
|
2012-05-08 11:17:04 +01:00
|
|
|
args = args[1:]
|
2016-05-03 11:34:02 -07:00
|
|
|
if options['delimiter'] and not args:
|
2012-05-08 11:17:04 +01:00
|
|
|
exit('-d option only allowed for container listings')
|
2019-10-29 09:59:03 -05:00
|
|
|
if options['versions'] and not args:
|
|
|
|
exit('--versions option only allowed for container listings')
|
2012-05-08 11:17:04 +01:00
|
|
|
|
2016-05-03 11:34:02 -07:00
|
|
|
human = options.pop('human')
|
|
|
|
if human:
|
|
|
|
options['long'] = True
|
2014-04-04 21:13:01 +02:00
|
|
|
|
2016-05-03 11:34:02 -07:00
|
|
|
if options['totals'] and not options['long']:
|
2014-10-23 10:48:33 +02:00
|
|
|
output_manager.error(
|
|
|
|
"Listing totals only works with -l or --lh.")
|
|
|
|
return
|
|
|
|
|
2016-05-03 11:34:02 -07:00
|
|
|
with SwiftService(options=options) as swift:
|
2014-04-04 21:13:01 +02:00
|
|
|
try:
|
2012-05-08 11:17:04 +01:00
|
|
|
if not args:
|
2014-04-04 21:13:01 +02:00
|
|
|
stats_parts_gen = swift.list()
|
2021-09-21 16:02:47 -07:00
|
|
|
container = None
|
2012-05-08 11:17:04 +01:00
|
|
|
else:
|
2014-04-04 21:13:01 +02:00
|
|
|
container = args[0]
|
|
|
|
args = args[1:]
|
|
|
|
if "/" in container or args:
|
|
|
|
output_manager.error(
|
|
|
|
'Usage: %s list %s\n%s', BASENAME,
|
|
|
|
st_list_options, st_list_help)
|
|
|
|
return
|
2013-05-22 18:45:42 +00:00
|
|
|
else:
|
2014-04-04 21:13:01 +02:00
|
|
|
stats_parts_gen = swift.list(container=container)
|
|
|
|
|
2019-07-02 11:23:22 -05:00
|
|
|
if options.get('json', False):
|
|
|
|
def listing(stats_parts_gen=stats_parts_gen):
|
|
|
|
for stats in stats_parts_gen:
|
|
|
|
if stats["success"]:
|
|
|
|
for item in stats['listing']:
|
|
|
|
yield item
|
|
|
|
else:
|
|
|
|
raise stats["error"]
|
|
|
|
|
|
|
|
json.dump(
|
|
|
|
JSONableIterable(listing()), output_manager.print_stream,
|
|
|
|
sort_keys=True, indent=2)
|
|
|
|
output_manager.print_msg('')
|
|
|
|
return
|
2021-09-21 16:02:47 -07:00
|
|
|
|
|
|
|
totals = {'count': 0, 'bytes': 0}
|
2014-04-04 21:13:01 +02:00
|
|
|
for stats in stats_parts_gen:
|
|
|
|
if stats["success"]:
|
2021-09-21 16:02:47 -07:00
|
|
|
_print_stats(options, stats, human, totals)
|
2014-04-04 21:13:01 +02:00
|
|
|
else:
|
|
|
|
raise stats["error"]
|
|
|
|
|
2021-09-21 16:02:47 -07:00
|
|
|
# report totals
|
|
|
|
if options['long'] or human:
|
|
|
|
if container is None:
|
|
|
|
output_manager.print_msg(
|
|
|
|
"%12s %s", prt_bytes(totals['count'], True),
|
|
|
|
prt_bytes(totals['bytes'], human))
|
|
|
|
else:
|
|
|
|
output_manager.print_msg(
|
|
|
|
prt_bytes(totals['bytes'], human))
|
|
|
|
|
2014-04-04 21:13:01 +02:00
|
|
|
except SwiftError as e:
|
|
|
|
output_manager.error(e.value)
|
2013-05-22 18:45:42 +00:00
|
|
|
|
2012-05-08 11:17:04 +01:00
|
|
|
|
2016-09-19 23:18:18 +08:00
|
|
|
st_stat_options = '''[--lh] [--header <header:value>]
|
2019-10-29 09:59:03 -05:00
|
|
|
[--version-id <version_id>]
|
2016-09-08 11:09:25 +01:00
|
|
|
[<container> [<object>]]
|
2013-08-17 22:43:09 +10:00
|
|
|
'''
|
|
|
|
|
2012-05-08 11:17:04 +01:00
|
|
|
st_stat_help = '''
|
2014-05-01 08:37:39 +02:00
|
|
|
Displays information for the account, container, or object.
|
2013-08-17 22:43:09 +10:00
|
|
|
|
|
|
|
Positional arguments:
|
2016-09-06 15:48:09 +00:00
|
|
|
[<container>] Name of container to stat from.
|
|
|
|
[<object>] Name of object to stat.
|
2013-08-17 22:43:09 +10:00
|
|
|
|
|
|
|
Optional arguments:
|
2014-02-26 15:52:07 -05:00
|
|
|
--lh Report sizes in human readable format similar to
|
|
|
|
ls -lh.
|
2019-10-29 09:59:03 -05:00
|
|
|
--version-id <version-id>
|
|
|
|
Report stat of specific version of a versioned object.
|
2016-09-19 23:18:18 +08:00
|
|
|
-H, --header <header:value>
|
|
|
|
Adds a custom request header to use for stat.
|
2013-08-17 22:43:09 +10:00
|
|
|
'''.strip('\n')
|
2012-05-08 11:17:04 +01:00
|
|
|
|
|
|
|
|
2018-06-29 11:07:00 +10:00
|
|
|
def st_stat(parser, args, output_manager, return_parser=False):
|
2016-03-02 16:02:28 +00:00
|
|
|
parser.add_argument(
|
2014-02-26 14:27:03 -05:00
|
|
|
'--lh', dest='human', action='store_true', default=False,
|
|
|
|
help='Report sizes in human readable format similar to ls -lh.')
|
2019-10-29 09:59:03 -05:00
|
|
|
parser.add_argument(
|
|
|
|
'--version-id', action='store', default=None,
|
|
|
|
help='Report stat of a specific version of a versioned object')
|
2016-09-19 23:18:18 +08:00
|
|
|
parser.add_argument(
|
|
|
|
'-H', '--header', action='append', dest='header',
|
|
|
|
default=[],
|
|
|
|
help='Adds a custom request header to use for stat.')
|
|
|
|
|
2018-06-29 11:07:00 +10:00
|
|
|
# We return the parser to build up the bash_completion
|
|
|
|
if return_parser:
|
|
|
|
return parser
|
|
|
|
|
2016-03-02 16:02:28 +00:00
|
|
|
options, args = parse_args(parser, args)
|
2012-05-08 11:17:04 +01:00
|
|
|
args = args[1:]
|
2019-10-29 09:59:03 -05:00
|
|
|
if options['version_id'] and len(args) < 2:
|
|
|
|
exit('--version-id option only allowed for object stats')
|
2014-04-04 21:13:01 +02:00
|
|
|
|
2016-05-03 11:34:02 -07:00
|
|
|
with SwiftService(options=options) as swift:
|
2014-10-20 18:00:01 +01:00
|
|
|
try:
|
|
|
|
if not args:
|
|
|
|
stat_result = swift.stat()
|
|
|
|
if not stat_result['success']:
|
|
|
|
raise stat_result['error']
|
|
|
|
items = stat_result['items']
|
|
|
|
headers = stat_result['headers']
|
|
|
|
print_account_stats(items, headers, output_manager)
|
|
|
|
else:
|
2014-04-04 21:13:01 +02:00
|
|
|
container = args[0]
|
|
|
|
if '/' in container:
|
|
|
|
output_manager.error(
|
|
|
|
'WARNING: / in container name; you might have '
|
2015-05-18 10:14:09 -07:00
|
|
|
"meant '%s' instead of '%s'." %
|
2014-04-04 21:13:01 +02:00
|
|
|
(container.replace('/', ' ', 1), container))
|
|
|
|
return
|
|
|
|
args = args[1:]
|
|
|
|
if not args:
|
|
|
|
stat_result = swift.stat(container=container)
|
2014-10-20 18:00:01 +01:00
|
|
|
if not stat_result['success']:
|
|
|
|
raise stat_result['error']
|
2014-04-04 21:13:01 +02:00
|
|
|
items = stat_result['items']
|
|
|
|
headers = stat_result['headers']
|
|
|
|
print_container_stats(items, headers, output_manager)
|
|
|
|
else:
|
|
|
|
if len(args) == 1:
|
|
|
|
objects = [args[0]]
|
|
|
|
stat_results = swift.stat(
|
|
|
|
container=container, objects=objects)
|
|
|
|
for stat_result in stat_results: # only 1 result
|
|
|
|
if stat_result["success"]:
|
|
|
|
items = stat_result['items']
|
|
|
|
headers = stat_result['headers']
|
|
|
|
print_object_stats(
|
|
|
|
items, headers, output_manager
|
|
|
|
)
|
|
|
|
else:
|
2024-02-20 16:21:50 +11:00
|
|
|
raise stat_result["error"]
|
2014-04-04 21:13:01 +02:00
|
|
|
else:
|
|
|
|
output_manager.error(
|
|
|
|
'Usage: %s stat %s\n%s', BASENAME,
|
|
|
|
st_stat_options, st_stat_help)
|
|
|
|
|
2014-10-20 18:00:01 +01:00
|
|
|
except SwiftError as e:
|
|
|
|
output_manager.error(e.value)
|
2013-08-17 22:43:09 +10:00
|
|
|
|
2012-05-08 11:17:04 +01:00
|
|
|
|
2018-02-23 18:02:40 +09:00
|
|
|
st_post_options = '''[--read-acl <acl>] [--write-acl <acl>] [--sync-to <sync-to>]
|
2013-08-17 22:43:09 +10:00
|
|
|
[--sync-key <sync-key>] [--meta <name:value>]
|
|
|
|
[--header <header>]
|
2016-09-08 11:09:25 +01:00
|
|
|
[<container> [<object>]]
|
2024-02-20 16:21:50 +11:00
|
|
|
''' # noqa
|
2012-05-08 11:17:04 +01:00
|
|
|
|
|
|
|
st_post_help = '''
|
2013-08-17 22:43:09 +10:00
|
|
|
Updates meta information for the account, container, or object.
|
|
|
|
If the container is not found, it will be created automatically.
|
|
|
|
|
|
|
|
Positional arguments:
|
2016-09-06 15:48:09 +00:00
|
|
|
[<container>] Name of container to post to.
|
|
|
|
[<object>] Name of object to post.
|
2013-08-17 22:43:09 +10:00
|
|
|
|
|
|
|
Optional arguments:
|
2015-07-17 16:03:39 +09:00
|
|
|
-r, --read-acl <acl> Read ACL for containers. Quick summary of ACL syntax:
|
2016-06-02 17:51:20 +00:00
|
|
|
.r:*, .r:-.example.com, .r:www.example.com,
|
|
|
|
account1 (v1.0 identity API only),
|
|
|
|
account1:*, account2:user2 (v2.0+ identity API).
|
2015-07-17 16:03:39 +09:00
|
|
|
-w, --write-acl <acl> Write ACL for containers. Quick summary of ACL syntax:
|
2016-06-02 17:51:20 +00:00
|
|
|
account1 (v1.0 identity API only),
|
|
|
|
account1:*, account2:user2 (v2.0+ identity API).
|
2015-07-17 16:03:39 +09:00
|
|
|
-t, --sync-to <sync-to>
|
|
|
|
Sync To for containers, for multi-cluster replication.
|
|
|
|
-k, --sync-key <sync-key>
|
|
|
|
Sync Key for containers, for multi-cluster replication.
|
|
|
|
-m, --meta <name:value>
|
|
|
|
Sets a meta data item. This option may be repeated.
|
2013-08-17 22:43:09 +10:00
|
|
|
Example: -m Color:Blue -m Size:Large
|
2015-07-17 16:03:39 +09:00
|
|
|
-H, --header <header:value>
|
|
|
|
Adds a customized request header.
|
|
|
|
This option may be repeated. Example
|
|
|
|
-H "content-type:text/plain" -H "Content-Length: 4000"
|
2013-08-17 22:43:09 +10:00
|
|
|
'''.strip('\n')
|
2012-05-08 11:17:04 +01:00
|
|
|
|
|
|
|
|
2018-06-29 11:07:00 +10:00
|
|
|
def st_post(parser, args, output_manager, return_parser=False):
|
2016-03-02 16:02:28 +00:00
|
|
|
parser.add_argument(
|
2014-02-26 14:27:03 -05:00
|
|
|
'-r', '--read-acl', dest='read_acl', help='Read ACL for containers. '
|
|
|
|
'Quick summary of ACL syntax: .r:*, .r:-.example.com, '
|
|
|
|
'.r:www.example.com, account1, account2:user2')
|
2016-03-02 16:02:28 +00:00
|
|
|
parser.add_argument(
|
2014-02-26 14:27:03 -05:00
|
|
|
'-w', '--write-acl', dest='write_acl', help='Write ACL for '
|
|
|
|
'containers. Quick summary of ACL syntax: account1, '
|
2012-05-08 11:17:04 +01:00
|
|
|
'account2:user2')
|
2016-03-02 16:02:28 +00:00
|
|
|
parser.add_argument(
|
2013-04-28 19:16:38 -07:00
|
|
|
'-t', '--sync-to', dest='sync_to', help='Sets the '
|
2012-05-08 11:17:04 +01:00
|
|
|
'Sync To for containers, for multi-cluster replication.')
|
2016-03-02 16:02:28 +00:00
|
|
|
parser.add_argument(
|
2013-04-28 19:16:38 -07:00
|
|
|
'-k', '--sync-key', dest='sync_key', help='Sets the '
|
2012-05-08 11:17:04 +01:00
|
|
|
'Sync Key for containers, for multi-cluster replication.')
|
2016-03-02 16:02:28 +00:00
|
|
|
parser.add_argument(
|
2013-04-28 19:16:38 -07:00
|
|
|
'-m', '--meta', action='append', dest='meta', default=[],
|
2014-02-26 14:27:03 -05:00
|
|
|
help='Sets a meta data item. This option may be repeated. '
|
|
|
|
'Example: -m Color:Blue -m Size:Large')
|
2016-03-02 16:02:28 +00:00
|
|
|
parser.add_argument(
|
2013-04-28 19:16:38 -07:00
|
|
|
'-H', '--header', action='append', dest='header',
|
2015-07-17 16:03:39 +09:00
|
|
|
default=[], help='Adds a customized request header. '
|
|
|
|
'This option may be repeated. '
|
2014-02-26 14:27:03 -05:00
|
|
|
'Example: -H "content-type:text/plain" '
|
2013-03-18 09:47:59 -04:00
|
|
|
'-H "Content-Length: 4000"')
|
2018-06-29 11:07:00 +10:00
|
|
|
|
|
|
|
# We return the parser to build up the bash_completion
|
|
|
|
if return_parser:
|
|
|
|
return parser
|
|
|
|
|
2012-05-08 11:17:04 +01:00
|
|
|
(options, args) = parse_args(parser, args)
|
|
|
|
args = args[1:]
|
2016-05-03 11:34:02 -07:00
|
|
|
if (options['read_acl'] or options['write_acl'] or options['sync_to'] or
|
|
|
|
options['sync_key']) and not args:
|
2012-05-08 11:17:04 +01:00
|
|
|
exit('-r, -w, -t, and -k options only allowed for containers')
|
2014-04-04 21:13:01 +02:00
|
|
|
|
2016-05-03 11:34:02 -07:00
|
|
|
with SwiftService(options=options) as swift:
|
2012-05-08 11:17:04 +01:00
|
|
|
try:
|
2014-04-04 21:13:01 +02:00
|
|
|
if not args:
|
2014-10-20 10:41:34 +01:00
|
|
|
result = swift.post()
|
2014-04-04 21:13:01 +02:00
|
|
|
else:
|
|
|
|
container = args[0]
|
|
|
|
if '/' in container:
|
|
|
|
output_manager.error(
|
|
|
|
'WARNING: / in container name; you might have '
|
2015-05-18 10:14:09 -07:00
|
|
|
"meant '%s' instead of '%s'." %
|
2014-04-04 21:13:01 +02:00
|
|
|
(args[0].replace('/', ' ', 1), args[0]))
|
|
|
|
return
|
|
|
|
args = args[1:]
|
|
|
|
if args:
|
|
|
|
if len(args) == 1:
|
|
|
|
objects = [args[0]]
|
|
|
|
results_iterator = swift.post(
|
|
|
|
container=container, objects=objects
|
|
|
|
)
|
2014-10-20 10:41:34 +01:00
|
|
|
result = next(results_iterator)
|
2014-04-04 21:13:01 +02:00
|
|
|
else:
|
|
|
|
output_manager.error(
|
|
|
|
'Usage: %s post %s\n%s', BASENAME,
|
|
|
|
st_post_options, st_post_help)
|
2014-10-20 10:41:34 +01:00
|
|
|
return
|
2014-04-04 21:13:01 +02:00
|
|
|
else:
|
2014-10-20 10:41:34 +01:00
|
|
|
result = swift.post(container=container)
|
|
|
|
if not result["success"]:
|
2024-02-20 16:21:50 +11:00
|
|
|
raise result["error"]
|
2014-04-04 21:13:01 +02:00
|
|
|
|
|
|
|
except SwiftError as e:
|
|
|
|
output_manager.error(e.value)
|
|
|
|
|
2012-05-08 11:17:04 +01:00
|
|
|
|
2016-02-15 12:14:17 +01:00
|
|
|
st_copy_options = '''[--destination </container/object>] [--fresh-metadata]
|
2016-09-06 15:48:09 +00:00
|
|
|
[--meta <name:value>] [--header <header>] <container>
|
|
|
|
<object> [<object>] [...]
|
2016-02-15 12:14:17 +01:00
|
|
|
'''
|
|
|
|
|
|
|
|
st_copy_help = '''
|
|
|
|
Copies object to new destination, optionally updates objects metadata.
|
|
|
|
If destination is not set, will update metadata of object
|
|
|
|
|
|
|
|
Positional arguments:
|
2016-09-06 15:48:09 +00:00
|
|
|
<container> Name of container to copy from.
|
|
|
|
<object> Name of object to copy. Specify multiple times
|
|
|
|
for multiple objects
|
2016-02-15 12:14:17 +01:00
|
|
|
|
|
|
|
Optional arguments:
|
|
|
|
-d, --destination </container[/object]>
|
|
|
|
The container and name of the destination object. Name
|
2016-12-16 02:41:35 +00:00
|
|
|
of destination object can be omitted, then will be
|
2016-02-15 12:14:17 +01:00
|
|
|
same as name of source object. Supplying multiple
|
|
|
|
objects and destination with object name is invalid.
|
|
|
|
-M, --fresh-metadata Copy the object without any existing metadata,
|
|
|
|
If not set, metadata will be preserved or appended
|
|
|
|
-m, --meta <name:value>
|
|
|
|
Sets a meta data item. This option may be repeated.
|
|
|
|
Example: -m Color:Blue -m Size:Large
|
|
|
|
-H, --header <header:value>
|
|
|
|
Adds a customized request header.
|
|
|
|
This option may be repeated. Example
|
|
|
|
-H "content-type:text/plain" -H "Content-Length: 4000"
|
|
|
|
'''.strip('\n')
|
|
|
|
|
|
|
|
|
2018-06-29 11:07:00 +10:00
|
|
|
def st_copy(parser, args, output_manager, return_parser=False):
|
2016-02-15 12:14:17 +01:00
|
|
|
parser.add_argument(
|
|
|
|
'-d', '--destination', help='The container and name of the '
|
|
|
|
'destination object')
|
|
|
|
parser.add_argument(
|
|
|
|
'-M', '--fresh-metadata', action='store_true',
|
|
|
|
help='Copy the object without any existing metadata', default=False)
|
|
|
|
parser.add_argument(
|
|
|
|
'-m', '--meta', action='append', dest='meta', default=[],
|
|
|
|
help='Sets a meta data item. This option may be repeated. '
|
|
|
|
'Example: -m Color:Blue -m Size:Large')
|
|
|
|
parser.add_argument(
|
|
|
|
'-H', '--header', action='append', dest='header',
|
|
|
|
default=[], help='Adds a customized request header. '
|
|
|
|
'This option may be repeated. '
|
|
|
|
'Example: -H "content-type:text/plain" '
|
|
|
|
'-H "Content-Length: 4000"')
|
2018-06-29 11:07:00 +10:00
|
|
|
|
|
|
|
# We return the parser to build up the bash_completion
|
|
|
|
if return_parser:
|
|
|
|
return parser
|
|
|
|
|
2016-02-15 12:14:17 +01:00
|
|
|
(options, args) = parse_args(parser, args)
|
|
|
|
args = args[1:]
|
|
|
|
|
|
|
|
with SwiftService(options=options) as swift:
|
|
|
|
try:
|
|
|
|
if len(args) >= 2:
|
|
|
|
container = args[0]
|
|
|
|
if '/' in container:
|
|
|
|
output_manager.error(
|
|
|
|
'WARNING: / in container name; you might have '
|
|
|
|
"meant '%s' instead of '%s'." %
|
|
|
|
(args[0].replace('/', ' ', 1), args[0]))
|
|
|
|
return
|
|
|
|
objects = [arg for arg in args[1:]]
|
|
|
|
|
|
|
|
for r in swift.copy(
|
|
|
|
container=container, objects=objects,
|
|
|
|
options=options):
|
|
|
|
if r['success']:
|
|
|
|
if options['verbose']:
|
|
|
|
if r['action'] == 'copy_object':
|
|
|
|
output_manager.print_msg(
|
|
|
|
'%s/%s copied to %s' % (
|
|
|
|
r['container'],
|
|
|
|
r['object'],
|
|
|
|
r['destination'] or '<self>'))
|
|
|
|
if r['action'] == 'create_container':
|
|
|
|
output_manager.print_msg(
|
|
|
|
'created container %s' % r['container']
|
|
|
|
)
|
|
|
|
else:
|
|
|
|
error = r['error']
|
|
|
|
if 'action' in r and r['action'] == 'create_container':
|
|
|
|
# it is not an error to be unable to create the
|
|
|
|
# container so print a warning and carry on
|
|
|
|
output_manager.warning(
|
|
|
|
'Warning: failed to create container '
|
|
|
|
"'%s': %s", container, error
|
|
|
|
)
|
|
|
|
else:
|
|
|
|
output_manager.error("%s" % error)
|
|
|
|
else:
|
|
|
|
output_manager.error(
|
|
|
|
'Usage: %s copy %s\n%s', BASENAME,
|
|
|
|
st_copy_options, st_copy_help)
|
|
|
|
return
|
|
|
|
|
|
|
|
except SwiftError as e:
|
|
|
|
output_manager.error(e.value)
|
|
|
|
|
|
|
|
|
2014-02-11 09:13:26 +00:00
|
|
|
st_upload_options = '''[--changed] [--skip-identical] [--segment-size <size>]
|
2013-08-17 22:43:09 +10:00
|
|
|
[--segment-container <container>] [--leave-segments]
|
|
|
|
[--object-threads <thread>] [--segment-threads <threads>]
|
2022-11-14 13:17:47 -08:00
|
|
|
[--meta <name:value>] [--header <header>] [--use-slo]
|
|
|
|
[--use-dlo] [--ignore-checksum] [--skip-container-put]
|
2022-01-11 16:05:39 -08:00
|
|
|
[--object-name <object-name>]
|
2015-11-13 17:20:20 +01:00
|
|
|
<container> <file_or_directory> [<file_or_directory>] [...]
|
2013-08-17 22:43:09 +10:00
|
|
|
'''
|
2012-05-08 11:17:04 +01:00
|
|
|
|
2016-10-23 19:59:11 +00:00
|
|
|
st_upload_help = '''
|
|
|
|
Uploads specified files and directories to the given container.
|
2013-08-17 22:43:09 +10:00
|
|
|
|
|
|
|
Positional arguments:
|
2014-02-26 15:52:07 -05:00
|
|
|
<container> Name of container to upload to.
|
2013-08-17 22:43:09 +10:00
|
|
|
<file_or_directory> Name of file or directory to upload. Specify multiple
|
2017-06-15 20:53:04 -07:00
|
|
|
times for multiple uploads. If "-" is specified, reads
|
|
|
|
content from standard input (--object-name is required
|
|
|
|
in this case).
|
2013-08-17 22:43:09 +10:00
|
|
|
|
|
|
|
Optional arguments:
|
2015-07-17 16:03:39 +09:00
|
|
|
-c, --changed Only upload files that have changed since the last
|
2014-02-26 15:52:07 -05:00
|
|
|
upload.
|
|
|
|
--skip-identical Skip uploading files that are identical on both sides.
|
2015-07-17 16:03:39 +09:00
|
|
|
-S, --segment-size <size>
|
|
|
|
Upload files in segments no larger than <size> (in
|
2014-04-04 13:04:54 +00:00
|
|
|
Bytes) and then create a "manifest" file that will
|
|
|
|
download all the segments as if it were the original
|
|
|
|
file.
|
2013-08-17 22:43:09 +10:00
|
|
|
--segment-container <container>
|
|
|
|
Upload the segments into the specified container. If
|
|
|
|
not specified, the segments will be uploaded to a
|
2014-05-02 18:39:14 +02:00
|
|
|
<container>_segments container to not pollute the
|
2013-08-17 22:43:09 +10:00
|
|
|
main <container> listings.
|
|
|
|
--leave-segments Indicates that you want the older segments of manifest
|
2014-02-26 15:52:07 -05:00
|
|
|
objects left alone (in the case of overwrites).
|
2013-08-17 22:43:09 +10:00
|
|
|
--object-threads <threads>
|
|
|
|
Number of threads to use for uploading full objects.
|
|
|
|
Default is 10.
|
|
|
|
--segment-threads <threads>
|
|
|
|
Number of threads to use for uploading object segments.
|
|
|
|
Default is 10.
|
2017-07-06 12:43:11 -07:00
|
|
|
-m, --meta <name:value>
|
|
|
|
Sets a meta data item. This option may be repeated.
|
|
|
|
Example: -m Color:Blue -m Size:Large
|
2015-07-17 16:03:39 +09:00
|
|
|
-H, --header <header:value>
|
|
|
|
Adds a customized request header. This option may be
|
2016-02-04 10:25:15 -08:00
|
|
|
repeated. Example: -H "content-type:text/plain"
|
2015-07-17 16:03:39 +09:00
|
|
|
-H "Content-Length: 4000".
|
2014-02-26 15:52:07 -05:00
|
|
|
--use-slo When used in conjunction with --segment-size it will
|
2022-11-14 13:17:47 -08:00
|
|
|
create a Static Large Object. Deprecated; this is now
|
|
|
|
the default behavior when the cluster supports it.
|
|
|
|
--use-dlo When used in conjunction with --segment-size it will
|
|
|
|
create a Dynamic Large Object. May be useful with old
|
|
|
|
swift clusters.
|
2022-01-11 16:05:39 -08:00
|
|
|
--ignore-checksum Turn off checksum validation for uploads.
|
|
|
|
--skip-container-put Assume all necessary containers already exist; don't
|
|
|
|
automatically try to create them.
|
2013-12-13 10:57:23 +08:00
|
|
|
--object-name <object-name>
|
|
|
|
Upload file and name object to <object-name> or upload
|
|
|
|
dir and use <object-name> as object prefix instead of
|
2014-02-26 15:52:07 -05:00
|
|
|
folder name.
|
2012-05-08 11:17:04 +01:00
|
|
|
'''.strip('\n')
|
|
|
|
|
|
|
|
|
2018-06-29 11:07:00 +10:00
|
|
|
def st_upload(parser, args, output_manager, return_parser=False):
|
2017-06-28 12:02:21 -07:00
|
|
|
DEFAULT_STDIN_SEGMENT = 10 * 1024 * 1024
|
|
|
|
|
2016-03-02 16:02:28 +00:00
|
|
|
parser.add_argument(
|
2013-04-28 19:16:38 -07:00
|
|
|
'-c', '--changed', action='store_true', dest='changed',
|
2014-02-26 14:27:03 -05:00
|
|
|
default=False, help='Only upload files that have changed since '
|
|
|
|
'the last upload.')
|
2016-03-02 16:02:28 +00:00
|
|
|
parser.add_argument(
|
2014-02-11 09:13:26 +00:00
|
|
|
'--skip-identical', action='store_true', dest='skip_identical',
|
|
|
|
default=False, help='Skip uploading files that are identical on '
|
2014-02-26 15:52:07 -05:00
|
|
|
'both sides.')
|
2022-01-11 16:05:39 -08:00
|
|
|
parser.add_argument(
|
|
|
|
'--skip-container-put', action='store_true', dest='skip_container_put',
|
|
|
|
default=False, help='Assume all necessary containers already exist; '
|
|
|
|
"don't automatically try to create them.")
|
2016-03-02 16:02:28 +00:00
|
|
|
parser.add_argument(
|
2014-02-26 14:27:03 -05:00
|
|
|
'-S', '--segment-size', dest='segment_size', help='Upload files '
|
2014-04-04 13:04:54 +00:00
|
|
|
'in segments no larger than <size> (in Bytes) and then create a '
|
|
|
|
'"manifest" file that will download all the segments as if it were '
|
2014-10-06 15:24:19 +01:00
|
|
|
'the original file. Sizes may also be expressed as bytes with the '
|
|
|
|
'B suffix, kilobytes with the K suffix, megabytes with the M suffix '
|
|
|
|
'or gigabytes with the G suffix.')
|
2016-03-02 16:02:28 +00:00
|
|
|
parser.add_argument(
|
2013-04-28 19:16:38 -07:00
|
|
|
'-C', '--segment-container', dest='segment_container',
|
2014-02-26 14:27:03 -05:00
|
|
|
help='Upload the segments into the specified container. '
|
|
|
|
'If not specified, the segments will be uploaded to a '
|
2014-05-02 18:39:14 +02:00
|
|
|
'<container>_segments container to not pollute the main '
|
2012-05-08 11:17:04 +01:00
|
|
|
'<container> listings.')
|
2016-03-02 16:02:28 +00:00
|
|
|
parser.add_argument(
|
|
|
|
'--leave-segments', action='store_true',
|
2012-05-08 11:17:04 +01:00
|
|
|
dest='leave_segments', default=False, help='Indicates that you want '
|
|
|
|
'the older segments of manifest objects left alone (in the case of '
|
2014-02-26 15:52:07 -05:00
|
|
|
'overwrites).')
|
2016-03-02 16:02:28 +00:00
|
|
|
parser.add_argument(
|
|
|
|
'--object-threads', type=int, default=10,
|
2014-02-26 14:27:03 -05:00
|
|
|
help='Number of threads to use for uploading full objects. '
|
2016-02-24 19:38:30 +08:00
|
|
|
'Its value must be a positive integer. Default is 10.')
|
2016-03-02 16:02:28 +00:00
|
|
|
parser.add_argument(
|
|
|
|
'--segment-threads', type=int, default=10,
|
2014-02-26 14:27:03 -05:00
|
|
|
help='Number of threads to use for uploading object segments. '
|
2016-02-24 19:38:30 +08:00
|
|
|
'Its value must be a positive integer. Default is 10.')
|
2017-07-06 12:43:11 -07:00
|
|
|
parser.add_argument(
|
|
|
|
'-m', '--meta', action='append', dest='meta', default=[],
|
|
|
|
help='Sets a meta data item. This option may be repeated. '
|
|
|
|
'Example: -m Color:Blue -m Size:Large')
|
2016-03-02 16:02:28 +00:00
|
|
|
parser.add_argument(
|
2013-04-28 19:16:38 -07:00
|
|
|
'-H', '--header', action='append', dest='header',
|
2013-03-18 09:47:59 -04:00
|
|
|
default=[], help='Set request headers with the syntax header:value. '
|
2016-02-04 10:25:15 -08:00
|
|
|
' This option may be repeated. Example: -H "content-type:text/plain" '
|
2013-03-18 09:47:59 -04:00
|
|
|
'-H "Content-Length: 4000"')
|
2016-03-02 16:02:28 +00:00
|
|
|
parser.add_argument(
|
2022-11-14 13:17:47 -08:00
|
|
|
'--use-slo', action='store_true', default=None,
|
2014-03-08 13:56:17 +01:00
|
|
|
help='When used in conjunction with --segment-size, it will '
|
2022-11-14 13:17:47 -08:00
|
|
|
'create a Static Large Object.')
|
|
|
|
parser.add_argument(
|
|
|
|
'--use-dlo', action='store_false', dest="use_slo", default=None,
|
|
|
|
help='When used in conjunction with --segment-size, it will '
|
|
|
|
'create a Dynamic Large Object.')
|
2016-03-02 16:02:28 +00:00
|
|
|
parser.add_argument(
|
|
|
|
'--object-name', dest='object_name',
|
2014-02-26 14:27:03 -05:00
|
|
|
help='Upload file and name object to <object-name> or upload dir and '
|
|
|
|
'use <object-name> as object prefix instead of folder name.')
|
2016-03-02 16:02:28 +00:00
|
|
|
parser.add_argument(
|
|
|
|
'--ignore-checksum', dest='checksum', default=True,
|
2015-03-04 14:01:55 +00:00
|
|
|
action='store_false', help='Turn off checksum validation for uploads.')
|
2018-06-29 11:07:00 +10:00
|
|
|
|
|
|
|
# We return the parser to build up the bash_completion
|
|
|
|
if return_parser:
|
|
|
|
return parser
|
|
|
|
|
2016-03-02 16:02:28 +00:00
|
|
|
options, args = parse_args(parser, args)
|
2012-05-08 11:17:04 +01:00
|
|
|
args = args[1:]
|
|
|
|
if len(args) < 2:
|
2014-04-04 21:13:01 +02:00
|
|
|
output_manager.error(
|
2014-02-26 09:52:38 +00:00
|
|
|
'Usage: %s upload %s\n%s', BASENAME, st_upload_options,
|
2013-08-17 22:43:09 +10:00
|
|
|
st_upload_help)
|
2012-05-08 11:17:04 +01:00
|
|
|
return
|
2014-04-04 21:13:01 +02:00
|
|
|
else:
|
|
|
|
container = args[0]
|
|
|
|
files = args[1:]
|
2017-06-15 20:53:04 -07:00
|
|
|
from_stdin = '-' in files
|
|
|
|
if from_stdin and len(files) > 1:
|
|
|
|
output_manager.error(
|
|
|
|
'upload from stdin cannot be used along with other files')
|
|
|
|
return
|
2012-05-08 11:17:04 +01:00
|
|
|
|
2016-05-03 11:34:02 -07:00
|
|
|
if options['object_name'] is not None:
|
2014-04-04 21:13:01 +02:00
|
|
|
if len(files) > 1:
|
|
|
|
output_manager.error('object-name only be used with 1 file or dir')
|
|
|
|
return
|
2012-05-08 11:17:04 +01:00
|
|
|
else:
|
2014-04-04 21:13:01 +02:00
|
|
|
orig_path = files[0]
|
2017-06-15 20:53:04 -07:00
|
|
|
elif from_stdin:
|
|
|
|
output_manager.error(
|
|
|
|
'object-name must be specified with uploads from stdin')
|
|
|
|
return
|
2014-04-04 21:13:01 +02:00
|
|
|
|
2016-05-03 11:34:02 -07:00
|
|
|
if options['segment_size']:
|
2014-10-06 15:24:19 +01:00
|
|
|
try:
|
|
|
|
# If segment size only has digits assume it is bytes
|
2016-05-03 11:34:02 -07:00
|
|
|
int(options['segment_size'])
|
2014-10-06 15:24:19 +01:00
|
|
|
except ValueError:
|
|
|
|
try:
|
2016-05-03 11:34:02 -07:00
|
|
|
size_mod = "BKMG".index(options['segment_size'][-1].upper())
|
|
|
|
multiplier = int(options['segment_size'][:-1])
|
2014-10-06 15:24:19 +01:00
|
|
|
except ValueError:
|
|
|
|
output_manager.error("Invalid segment size")
|
|
|
|
return
|
|
|
|
|
2016-05-03 11:34:02 -07:00
|
|
|
options['segment_size'] = str((1024 ** size_mod) * multiplier)
|
|
|
|
if int(options['segment_size']) <= 0:
|
2015-06-23 10:48:50 +09:00
|
|
|
output_manager.error("segment-size should be positive")
|
|
|
|
return
|
2014-10-06 15:24:19 +01:00
|
|
|
|
2016-05-03 11:34:02 -07:00
|
|
|
if options['object_threads'] <= 0:
|
2016-02-24 19:38:30 +08:00
|
|
|
output_manager.error(
|
|
|
|
'ERROR: option --object-threads should be a positive integer.'
|
|
|
|
'\n\nUsage: %s upload %s\n%s', BASENAME, st_upload_options,
|
|
|
|
st_upload_help)
|
|
|
|
return
|
|
|
|
|
2016-05-03 11:34:02 -07:00
|
|
|
if options['segment_threads'] <= 0:
|
2016-02-24 19:38:30 +08:00
|
|
|
output_manager.error(
|
|
|
|
'ERROR: option --segment-threads should be a positive integer.'
|
|
|
|
'\n\nUsage: %s upload %s\n%s', BASENAME, st_upload_options,
|
|
|
|
st_upload_help)
|
|
|
|
return
|
|
|
|
|
2017-06-28 12:02:21 -07:00
|
|
|
if from_stdin:
|
2023-03-23 09:03:05 -07:00
|
|
|
if options['use_slo'] is None:
|
2017-06-28 12:02:21 -07:00
|
|
|
options['use_slo'] = True
|
|
|
|
if not options['segment_size']:
|
|
|
|
options['segment_size'] = DEFAULT_STDIN_SEGMENT
|
|
|
|
|
2016-05-03 11:34:02 -07:00
|
|
|
options['object_uu_threads'] = options['object_threads']
|
|
|
|
with SwiftService(options=options) as swift:
|
2012-05-08 11:17:04 +01:00
|
|
|
try:
|
2014-04-04 21:13:01 +02:00
|
|
|
objs = []
|
|
|
|
dir_markers = []
|
|
|
|
for f in files:
|
2017-06-15 20:53:04 -07:00
|
|
|
if f == '-':
|
|
|
|
fd = io.open(stdin.fileno(), mode='rb')
|
|
|
|
objs.append(SwiftUploadObject(
|
|
|
|
fd, object_name=options['object_name']))
|
|
|
|
# We ensure that there is exactly one "file" to upload in
|
|
|
|
# this case -- stdin
|
|
|
|
break
|
|
|
|
|
2014-04-04 21:13:01 +02:00
|
|
|
if isfile(f):
|
|
|
|
objs.append(f)
|
|
|
|
elif isdir(f):
|
|
|
|
for (_dir, _ds, _fs) in walk(f):
|
|
|
|
if not (_ds + _fs):
|
|
|
|
dir_markers.append(_dir)
|
2014-02-11 09:13:26 +00:00
|
|
|
else:
|
2014-04-04 21:13:01 +02:00
|
|
|
objs.extend([join(_dir, _f) for _f in _fs])
|
2012-05-08 11:17:04 +01:00
|
|
|
else:
|
2014-04-04 21:13:01 +02:00
|
|
|
output_manager.error("Local file '%s' not found" % f)
|
|
|
|
|
|
|
|
# Now that we've collected all the required files and dir markers
|
|
|
|
# build the tuples for the call to upload
|
2017-06-15 20:53:04 -07:00
|
|
|
if options['object_name'] is not None and not from_stdin:
|
2014-04-04 21:13:01 +02:00
|
|
|
objs = [
|
|
|
|
SwiftUploadObject(
|
|
|
|
o, object_name=o.replace(
|
2016-05-03 11:34:02 -07:00
|
|
|
orig_path, options['object_name'], 1
|
2014-04-04 21:13:01 +02:00
|
|
|
)
|
|
|
|
) for o in objs
|
|
|
|
]
|
|
|
|
dir_markers = [
|
|
|
|
SwiftUploadObject(
|
|
|
|
None, object_name=d.replace(
|
2016-05-03 11:34:02 -07:00
|
|
|
orig_path, options['object_name'], 1
|
2014-04-04 21:13:01 +02:00
|
|
|
), options={'dir_marker': True}
|
|
|
|
) for d in dir_markers
|
|
|
|
]
|
|
|
|
|
|
|
|
for r in swift.upload(container, objs + dir_markers):
|
|
|
|
if r['success']:
|
2016-05-03 11:34:02 -07:00
|
|
|
if options['verbose']:
|
2014-04-04 21:13:01 +02:00
|
|
|
if 'attempts' in r and r['attempts'] > 1:
|
|
|
|
if 'object' in r:
|
|
|
|
output_manager.print_msg(
|
|
|
|
'%s [after %d attempts]' %
|
|
|
|
(r['object'],
|
|
|
|
r['attempts'])
|
|
|
|
)
|
|
|
|
else:
|
|
|
|
if 'object' in r:
|
|
|
|
output_manager.print_msg(r['object'])
|
|
|
|
elif 'for_object' in r:
|
|
|
|
output_manager.print_msg(
|
|
|
|
'%s segment %s' % (r['for_object'],
|
|
|
|
r['segment_index'])
|
|
|
|
)
|
2012-05-08 11:17:04 +01:00
|
|
|
else:
|
2014-04-04 21:13:01 +02:00
|
|
|
error = r['error']
|
2014-09-29 18:26:33 +01:00
|
|
|
if 'action' in r and r['action'] == "create_container":
|
|
|
|
# it is not an error to be unable to create the
|
|
|
|
# container so print a warning and carry on
|
|
|
|
if isinstance(error, ClientException):
|
|
|
|
if (r['headers'] and
|
|
|
|
'X-Storage-Policy' in r['headers']):
|
|
|
|
msg = ' with Storage Policy %s' % \
|
|
|
|
r['headers']['X-Storage-Policy'].strip()
|
2014-04-04 21:13:01 +02:00
|
|
|
else:
|
|
|
|
msg = ' '.join(str(x) for x in (
|
|
|
|
error.http_status, error.http_reason)
|
|
|
|
)
|
|
|
|
if error.http_response_content:
|
|
|
|
if msg:
|
|
|
|
msg += ': '
|
2015-08-17 17:06:44 +08:00
|
|
|
msg += (error.http_response_content
|
|
|
|
.decode('utf8')[:60])
|
2014-09-29 18:26:33 +01:00
|
|
|
msg = ': %s' % msg
|
2014-04-04 21:13:01 +02:00
|
|
|
else:
|
2014-09-29 18:26:33 +01:00
|
|
|
msg = ': %s' % error
|
|
|
|
output_manager.warning(
|
|
|
|
'Warning: failed to create container '
|
2016-01-18 14:54:10 -08:00
|
|
|
"'%s'%s", r['container'], msg
|
2014-09-29 18:26:33 +01:00
|
|
|
)
|
2014-04-04 21:13:01 +02:00
|
|
|
else:
|
2014-09-29 18:26:33 +01:00
|
|
|
output_manager.error("%s" % error)
|
2015-03-05 15:28:42 -08:00
|
|
|
too_large = (isinstance(error, ClientException) and
|
|
|
|
error.http_status == 413)
|
2016-05-03 11:34:02 -07:00
|
|
|
if too_large and options['verbose'] > 0:
|
2015-03-05 15:28:42 -08:00
|
|
|
output_manager.error(
|
|
|
|
"Consider using the --segment-size option "
|
|
|
|
"to chunk the object")
|
2013-12-13 10:57:23 +08:00
|
|
|
|
2014-04-04 21:13:01 +02:00
|
|
|
except SwiftError as e:
|
2015-03-04 14:01:55 +00:00
|
|
|
output_manager.error(e.value)
|
2012-05-08 11:17:04 +01:00
|
|
|
|
|
|
|
|
2016-10-23 19:59:11 +00:00
|
|
|
st_capabilities_options = '''[--json] [<proxy_url>]
|
|
|
|
'''
|
2014-03-06 10:51:59 -08:00
|
|
|
st_info_options = st_capabilities_options
|
2014-01-13 22:39:28 +01:00
|
|
|
st_capabilities_help = '''
|
2014-05-01 08:37:39 +02:00
|
|
|
Retrieve capability of the proxy.
|
2014-01-13 22:39:28 +01:00
|
|
|
|
|
|
|
Optional positional arguments:
|
2014-05-01 08:37:39 +02:00
|
|
|
<proxy_url> Proxy URL of the cluster to retrieve capabilities.
|
2016-02-04 10:25:15 -08:00
|
|
|
|
|
|
|
Optional arguments:
|
|
|
|
--json Print the cluster capabilities in JSON format.
|
2014-07-10 11:29:42 +01:00
|
|
|
'''.strip('\n')
|
2014-03-06 10:51:59 -08:00
|
|
|
st_info_help = st_capabilities_help
|
2014-01-13 22:39:28 +01:00
|
|
|
|
|
|
|
|
2018-06-29 11:07:00 +10:00
|
|
|
def st_capabilities(parser, args, output_manager, return_parser=False):
|
2014-01-13 22:39:28 +01:00
|
|
|
def _print_compo_cap(name, capabilities):
|
|
|
|
for feature, options in sorted(capabilities.items(),
|
|
|
|
key=lambda x: x[0]):
|
2014-04-04 21:13:01 +02:00
|
|
|
output_manager.print_msg("%s: %s" % (name, feature))
|
2014-01-13 22:39:28 +01:00
|
|
|
if options:
|
2014-04-04 21:13:01 +02:00
|
|
|
output_manager.print_msg(" Options:")
|
2014-01-13 22:39:28 +01:00
|
|
|
for key, value in sorted(options.items(),
|
|
|
|
key=lambda x: x[0]):
|
2014-04-04 21:13:01 +02:00
|
|
|
output_manager.print_msg(" %s: %s" % (key, value))
|
|
|
|
|
2016-02-04 10:25:15 -08:00
|
|
|
parser.add_argument('--json', action='store_true',
|
|
|
|
help='print capability information in json')
|
2018-06-29 11:07:00 +10:00
|
|
|
|
|
|
|
# We return the parser to build up the bash_completion
|
|
|
|
if return_parser:
|
|
|
|
return parser
|
|
|
|
|
2014-01-13 22:39:28 +01:00
|
|
|
(options, args) = parse_args(parser, args)
|
2014-04-04 21:13:01 +02:00
|
|
|
if args and len(args) > 2:
|
|
|
|
output_manager.error('Usage: %s capabilities %s\n%s',
|
2014-02-26 09:52:38 +00:00
|
|
|
BASENAME,
|
2014-01-13 22:39:28 +01:00
|
|
|
st_capabilities_options, st_capabilities_help)
|
|
|
|
return
|
2014-04-04 21:13:01 +02:00
|
|
|
|
2016-05-03 11:34:02 -07:00
|
|
|
with SwiftService(options=options) as swift:
|
2014-04-04 21:13:01 +02:00
|
|
|
try:
|
|
|
|
if len(args) == 2:
|
|
|
|
url = args[1]
|
|
|
|
capabilities_result = swift.capabilities(url)
|
|
|
|
capabilities = capabilities_result['capabilities']
|
|
|
|
else:
|
|
|
|
capabilities_result = swift.capabilities()
|
|
|
|
capabilities = capabilities_result['capabilities']
|
|
|
|
|
2016-02-04 10:25:15 -08:00
|
|
|
if options['json']:
|
|
|
|
output_manager.print_msg(
|
|
|
|
json.dumps(capabilities, sort_keys=True, indent=2))
|
|
|
|
else:
|
|
|
|
capabilities = dict(capabilities)
|
|
|
|
_print_compo_cap('Core', {'swift': capabilities['swift']})
|
|
|
|
del capabilities['swift']
|
|
|
|
_print_compo_cap('Additional middleware', capabilities)
|
2014-04-04 21:13:01 +02:00
|
|
|
except SwiftError as e:
|
|
|
|
output_manager.error(e.value)
|
|
|
|
|
2014-01-13 22:39:28 +01:00
|
|
|
|
2014-03-06 10:51:59 -08:00
|
|
|
st_info = st_capabilities
|
|
|
|
|
2014-04-08 21:14:13 -07:00
|
|
|
st_auth_help = '''
|
|
|
|
Display auth related authentication variables in shell friendly format.
|
|
|
|
|
|
|
|
Commands to run to export storage url and auth token into
|
|
|
|
OS_STORAGE_URL and OS_AUTH_TOKEN:
|
|
|
|
|
|
|
|
swift auth
|
|
|
|
|
|
|
|
Commands to append to a runcom file (e.g. ~/.bashrc, /etc/profile) for
|
|
|
|
automatic authentication:
|
|
|
|
|
|
|
|
swift auth -v -U test:tester -K testing \
|
|
|
|
-A http://localhost:8080/auth/v1.0
|
|
|
|
|
|
|
|
'''.strip('\n')
|
|
|
|
|
|
|
|
|
2018-06-29 11:07:00 +10:00
|
|
|
def st_auth(parser, args, thread_manager, return_parser=False):
|
|
|
|
|
|
|
|
# We return the parser to build up the bash_completion
|
|
|
|
if return_parser:
|
|
|
|
return parser
|
|
|
|
|
2014-04-08 21:14:13 -07:00
|
|
|
(options, args) = parse_args(parser, args)
|
2016-05-03 11:34:02 -07:00
|
|
|
if options['verbose'] > 1:
|
|
|
|
if options['auth_version'] in ('1', '1.0'):
|
|
|
|
print('export ST_AUTH=%s' % sh_quote(options['auth']))
|
|
|
|
print('export ST_USER=%s' % sh_quote(options['user']))
|
|
|
|
print('export ST_KEY=%s' % sh_quote(options['key']))
|
2014-04-08 21:14:13 -07:00
|
|
|
else:
|
|
|
|
print('export OS_IDENTITY_API_VERSION=%s' % sh_quote(
|
2016-05-03 11:34:02 -07:00
|
|
|
options['auth_version']))
|
|
|
|
print('export OS_AUTH_VERSION=%s' % sh_quote(
|
|
|
|
options['auth_version']))
|
|
|
|
print('export OS_AUTH_URL=%s' % sh_quote(options['auth']))
|
|
|
|
for k, v in sorted(options.items()):
|
2014-04-08 21:14:13 -07:00
|
|
|
if v and k.startswith('os_') and \
|
|
|
|
k not in ('os_auth_url', 'os_options'):
|
|
|
|
print('export %s=%s' % (k.upper(), sh_quote(v)))
|
|
|
|
else:
|
2016-05-03 11:34:02 -07:00
|
|
|
conn = get_conn(options)
|
2014-04-08 21:14:13 -07:00
|
|
|
url, token = conn.get_auth()
|
|
|
|
print('export OS_STORAGE_URL=%s' % sh_quote(url))
|
|
|
|
print('export OS_AUTH_TOKEN=%s' % sh_quote(token))
|
|
|
|
|
2014-01-13 22:39:28 +01:00
|
|
|
|
2017-01-20 18:04:31 +01:00
|
|
|
st_tempurl_options = '''[--absolute] [--prefix-based] [--iso8601]
|
|
|
|
<method> <time> <path> <key>'''
|
2014-06-25 13:28:42 -07:00
|
|
|
|
2014-04-04 21:13:01 +02:00
|
|
|
|
2014-06-25 13:28:42 -07:00
|
|
|
st_tempurl_help = '''
|
|
|
|
Generates a temporary URL for a Swift object.
|
|
|
|
|
2015-06-08 20:20:21 +02:00
|
|
|
Positional arguments:
|
2015-06-30 15:05:40 +08:00
|
|
|
<method> An HTTP method to allow for this temporary URL.
|
2014-06-25 13:28:42 -07:00
|
|
|
Usually 'GET' or 'PUT'.
|
2017-01-20 18:04:31 +01:00
|
|
|
<time> The amount of time the temporary URL will be
|
2020-10-15 14:05:28 -07:00
|
|
|
valid. The time can be specified in three ways:
|
|
|
|
an integer representing the time in seconds;
|
|
|
|
a number with a 's', 'm', 'h', or 'd' suffix to specify
|
|
|
|
the time in seconds, minutes, hours, or days; or
|
|
|
|
an ISO 8601 timestamp in a specific format.
|
|
|
|
If --absolute is passed and time is an integer, the
|
|
|
|
seconds are intepreted as the Unix timestamp when the
|
|
|
|
temporary URL will expire.
|
|
|
|
The ISO 8601 timestamp can be specified in one of
|
|
|
|
following formats:
|
2017-01-20 18:04:31 +01:00
|
|
|
|
|
|
|
i) Complete date: YYYY-MM-DD (eg 1997-07-16)
|
|
|
|
|
|
|
|
ii) Complete date plus hours, minutes and seconds:
|
|
|
|
|
|
|
|
YYYY-MM-DDThh:mm:ss
|
|
|
|
|
|
|
|
(eg 1997-07-16T19:20:30)
|
|
|
|
|
|
|
|
iii) Complete date plus hours, minutes and seconds with
|
|
|
|
UTC designator:
|
|
|
|
|
|
|
|
YYYY-MM-DDThh:mm:ssZ
|
|
|
|
|
|
|
|
(eg 1997-07-16T19:20:30Z)
|
|
|
|
|
|
|
|
Please be aware that if you don't provide the UTC
|
|
|
|
designator (i.e., Z) the timestamp is generated using
|
|
|
|
your local timezone. If only a date is specified,
|
|
|
|
the time part used will equal to 00:00:00.
|
2016-08-18 09:44:03 +00:00
|
|
|
<path> The full path or storage URL to the Swift object.
|
|
|
|
Example: /v1/AUTH_account/c/o
|
|
|
|
or: http://saio:8080/v1/AUTH_account/c/o
|
2015-06-30 15:05:40 +08:00
|
|
|
<key> The secret temporary URL key set on the Swift cluster.
|
2014-06-25 13:28:42 -07:00
|
|
|
To set a key, run \'swift post -m
|
|
|
|
"Temp-URL-Key:b3968d0207b54ece87cccc06515a89d4"\'
|
2015-09-04 14:57:30 -07:00
|
|
|
|
|
|
|
Optional arguments:
|
2017-01-20 18:04:31 +01:00
|
|
|
--absolute Interpret the <time> positional argument as a Unix
|
2015-09-04 14:57:30 -07:00
|
|
|
timestamp rather than a number of seconds in the
|
2017-01-20 18:04:31 +01:00
|
|
|
future. If an ISO 8601 timestamp is passed for <time>,
|
|
|
|
this argument is ignored.
|
|
|
|
--prefix-based If present, a prefix-based temporary URL will be
|
|
|
|
generated.
|
|
|
|
--iso8601 If present, the generated temporary URL will contain an
|
|
|
|
ISO 8601 UTC timestamp instead of a Unix timestamp.
|
2018-07-10 14:45:32 +01:00
|
|
|
--ip-range If present, the temporary URL will be restricted to the
|
|
|
|
given ip or ip range.
|
2022-06-08 09:30:17 -07:00
|
|
|
--digest The digest algorithm to use. Defaults to sha256, but
|
|
|
|
older clusters may only support sha1.
|
2014-06-25 13:28:42 -07:00
|
|
|
'''.strip('\n')
|
|
|
|
|
|
|
|
|
2018-06-29 11:07:00 +10:00
|
|
|
def st_tempurl(parser, args, thread_manager, return_parser=False):
|
2016-03-02 16:02:28 +00:00
|
|
|
parser.add_argument(
|
2015-09-04 14:57:30 -07:00
|
|
|
'--absolute', action='store_true',
|
|
|
|
dest='absolute_expiry', default=False,
|
2017-01-20 18:04:31 +01:00
|
|
|
help=("If present, and time argument is an integer, "
|
|
|
|
"time argument will be interpreted as a Unix "
|
|
|
|
"timestamp representing when the temporary URL should expire, "
|
|
|
|
"rather than an offset from the current time."),
|
2015-09-04 14:57:30 -07:00
|
|
|
)
|
2016-12-08 13:42:35 +01:00
|
|
|
parser.add_argument(
|
|
|
|
'--prefix-based', action='store_true',
|
|
|
|
default=False,
|
2017-01-20 18:04:31 +01:00
|
|
|
help=("If present, a prefix-based temporary URL will be generated."),
|
|
|
|
)
|
|
|
|
parser.add_argument(
|
|
|
|
'--iso8601', action='store_true',
|
|
|
|
default=False,
|
|
|
|
help=("If present, the temporary URL will contain an ISO 8601 UTC "
|
|
|
|
"timestamp instead of a Unix timestamp."),
|
2016-12-08 13:42:35 +01:00
|
|
|
)
|
2018-07-10 14:45:32 +01:00
|
|
|
parser.add_argument(
|
|
|
|
'--ip-range', action='store',
|
|
|
|
default=None,
|
|
|
|
help=("If present, the temporary URL will be restricted to the "
|
|
|
|
"given ip or ip range."),
|
|
|
|
)
|
2022-06-08 09:30:17 -07:00
|
|
|
parser.add_argument(
|
|
|
|
'--digest', choices=('sha1', 'sha256', 'sha512'),
|
|
|
|
default='sha256',
|
|
|
|
help=("The digest algorithm to use. Defaults to sha256, but "
|
|
|
|
"older clusters may only support sha1."),
|
|
|
|
)
|
2016-12-08 13:42:35 +01:00
|
|
|
|
2018-06-29 11:07:00 +10:00
|
|
|
# We return the parser to build up the bash_completion
|
|
|
|
if return_parser:
|
|
|
|
return parser
|
|
|
|
|
2014-06-25 13:28:42 -07:00
|
|
|
(options, args) = parse_args(parser, args)
|
|
|
|
args = args[1:]
|
|
|
|
if len(args) < 4:
|
|
|
|
thread_manager.error('Usage: %s tempurl %s\n%s', BASENAME,
|
|
|
|
st_tempurl_options, st_tempurl_help)
|
|
|
|
return
|
2017-01-20 18:04:31 +01:00
|
|
|
method, timestamp, path, key = args[:4]
|
2016-08-18 09:44:03 +00:00
|
|
|
|
|
|
|
parsed = urlparse(path)
|
|
|
|
|
2014-06-25 13:28:42 -07:00
|
|
|
if method.upper() not in ['GET', 'PUT', 'HEAD', 'POST', 'DELETE']:
|
|
|
|
thread_manager.print_msg('WARNING: Non default HTTP method %s for '
|
|
|
|
'tempurl specified, possibly an error' %
|
|
|
|
method.upper())
|
2016-09-08 14:15:40 +01:00
|
|
|
try:
|
2017-01-20 18:04:31 +01:00
|
|
|
path = generate_temp_url(parsed.path, timestamp, key, method,
|
2016-12-08 13:42:35 +01:00
|
|
|
absolute=options['absolute_expiry'],
|
2017-01-20 18:04:31 +01:00
|
|
|
iso8601=options['iso8601'],
|
2018-07-10 14:45:32 +01:00
|
|
|
prefix=options['prefix_based'],
|
2022-06-08 09:30:17 -07:00
|
|
|
ip_range=options['ip_range'],
|
|
|
|
digest=options['digest'])
|
2016-09-08 14:15:40 +01:00
|
|
|
except ValueError as err:
|
|
|
|
thread_manager.error(err)
|
|
|
|
return
|
|
|
|
|
2016-08-18 09:44:03 +00:00
|
|
|
if parsed.scheme and parsed.netloc:
|
|
|
|
url = "%s://%s%s" % (parsed.scheme, parsed.netloc, path)
|
|
|
|
else:
|
|
|
|
url = path
|
2014-06-25 13:28:42 -07:00
|
|
|
thread_manager.print_msg(url)
|
|
|
|
|
|
|
|
|
2018-06-29 11:07:00 +10:00
|
|
|
st_bash_completion_help = '''Retrieve command specific flags used by bash_completion.
|
|
|
|
|
|
|
|
Optional positional arguments:
|
|
|
|
<command> Swift client command to filter the flags by.
|
2024-02-20 16:21:50 +11:00
|
|
|
'''.strip('\n') # noqa
|
2018-06-29 11:07:00 +10:00
|
|
|
|
|
|
|
|
|
|
|
st_bash_completion_options = '''[command]
|
|
|
|
'''
|
|
|
|
|
|
|
|
|
|
|
|
def st_bash_completion(parser, args, thread_manager, return_parser=False):
|
|
|
|
if return_parser:
|
|
|
|
return parser
|
|
|
|
|
|
|
|
global commands
|
|
|
|
com = args[1] if len(args) > 1 else None
|
|
|
|
|
|
|
|
if com:
|
|
|
|
if com in commands:
|
|
|
|
fn_commands = ["st_%s" % com]
|
|
|
|
else:
|
|
|
|
print("")
|
|
|
|
return
|
|
|
|
else:
|
|
|
|
fn_commands = [fn for fn in globals().keys()
|
2018-12-28 23:04:37 +08:00
|
|
|
if fn.startswith('st_') and
|
|
|
|
not fn.endswith('_options') and
|
|
|
|
not fn.endswith('_help')]
|
2018-06-29 11:07:00 +10:00
|
|
|
|
|
|
|
subparsers = parser.add_subparsers()
|
|
|
|
subcommands = {}
|
|
|
|
if not com:
|
|
|
|
subcommands['base'] = parser
|
|
|
|
for command in fn_commands:
|
|
|
|
cmd = command[3:]
|
|
|
|
if com:
|
|
|
|
subparser = subparsers.add_parser(
|
|
|
|
cmd, help=globals()['%s_help' % command])
|
|
|
|
add_default_args(subparser)
|
|
|
|
subparser = globals()[command](
|
|
|
|
subparser, args, thread_manager, True)
|
|
|
|
subcommands[cmd] = subparser
|
|
|
|
else:
|
|
|
|
subcommands[cmd] = None
|
|
|
|
|
|
|
|
cmds = set()
|
|
|
|
opts = set()
|
|
|
|
for sc_str, sc in list(subcommands.items()):
|
|
|
|
cmds.add(sc_str)
|
|
|
|
if sc:
|
|
|
|
for option in sc._optionals._option_string_actions:
|
|
|
|
opts.add(option)
|
|
|
|
|
|
|
|
for cmd_to_remove in (com, 'bash_completion', 'base'):
|
|
|
|
if cmd_to_remove in cmds:
|
|
|
|
cmds.remove(cmd_to_remove)
|
|
|
|
print(' '.join(cmds | opts))
|
|
|
|
|
|
|
|
|
2016-03-02 16:02:28 +00:00
|
|
|
class HelpFormatter(argparse.HelpFormatter):
|
|
|
|
def _format_action_invocation(self, action):
|
|
|
|
if not action.option_strings:
|
|
|
|
default = self._get_default_metavar_for_positional(action)
|
|
|
|
metavar, = self._metavar_formatter(action, default)(1)
|
|
|
|
return metavar
|
|
|
|
|
|
|
|
else:
|
|
|
|
parts = []
|
|
|
|
|
|
|
|
# if the Optional doesn't take a value, format is:
|
|
|
|
# -s, --long
|
|
|
|
if action.nargs == 0:
|
|
|
|
parts.extend(action.option_strings)
|
|
|
|
|
|
|
|
# if the Optional takes a value, format is:
|
|
|
|
# -s=ARGS, --long=ARGS
|
|
|
|
else:
|
|
|
|
default = self._get_default_metavar_for_optional(action)
|
|
|
|
args_string = self._format_args(action, default)
|
|
|
|
for option_string in action.option_strings:
|
|
|
|
parts.append('%s=%s' % (option_string, args_string))
|
|
|
|
|
|
|
|
return ', '.join(parts)
|
|
|
|
|
|
|
|
# Back-port py3 methods
|
|
|
|
def _get_default_metavar_for_optional(self, action):
|
|
|
|
return action.dest.upper()
|
|
|
|
|
|
|
|
def _get_default_metavar_for_positional(self, action):
|
|
|
|
return action.dest
|
|
|
|
|
|
|
|
|
2018-06-11 13:19:05 +01:00
|
|
|
def prompt_for_password():
|
|
|
|
"""
|
|
|
|
Prompt the user for a password.
|
|
|
|
|
|
|
|
:raise SystemExit: if a password cannot be entered without it being echoed
|
|
|
|
to the terminal.
|
|
|
|
:return: the entered password.
|
|
|
|
"""
|
|
|
|
with warnings.catch_warnings():
|
|
|
|
warnings.filterwarnings('error', category=getpass.GetPassWarning,
|
|
|
|
append=True)
|
|
|
|
try:
|
|
|
|
# temporarily set signal handling back to default to avoid user
|
|
|
|
# Ctrl-c leaving terminal in weird state
|
|
|
|
signal.signal(signal.SIGINT, signal.SIG_DFL)
|
|
|
|
return getpass.getpass()
|
|
|
|
except EOFError:
|
|
|
|
return None
|
|
|
|
except getpass.GetPassWarning:
|
|
|
|
exit('Input stream incompatible with --prompt option')
|
|
|
|
finally:
|
|
|
|
signal.signal(signal.SIGINT, immediate_exit)
|
|
|
|
|
|
|
|
|
2012-05-08 11:17:04 +01:00
|
|
|
def parse_args(parser, args, enforce_requires=True):
|
2016-03-02 16:02:28 +00:00
|
|
|
options, args = parser.parse_known_args(args or ['-h'])
|
2016-05-03 11:34:02 -07:00
|
|
|
options = vars(options)
|
2016-08-18 09:44:03 +00:00
|
|
|
if enforce_requires and (options.get('debug') or options.get('info')):
|
2015-12-02 09:49:50 +00:00
|
|
|
logging.getLogger("swiftclient")
|
2016-08-18 09:44:03 +00:00
|
|
|
if options.get('debug'):
|
2015-12-02 09:49:50 +00:00
|
|
|
logging.basicConfig(level=logging.DEBUG)
|
|
|
|
logging.getLogger('iso8601').setLevel(logging.WARNING)
|
2016-02-24 16:56:55 -08:00
|
|
|
client_logger_settings['redact_sensitive_headers'] = False
|
2016-08-18 09:44:03 +00:00
|
|
|
elif options.get('info'):
|
2015-12-02 09:49:50 +00:00
|
|
|
logging.basicConfig(level=logging.INFO)
|
2012-05-08 11:17:04 +01:00
|
|
|
|
2016-08-18 09:44:03 +00:00
|
|
|
if args and options.get('help'):
|
2016-10-23 19:59:11 +00:00
|
|
|
_help = globals().get('st_%s_help' % args[0])
|
|
|
|
_options = globals().get('st_%s_options' % args[0], "\n")
|
|
|
|
if _help:
|
|
|
|
print("Usage: %s %s %s\n%s" % (BASENAME, args[0], _options, _help))
|
|
|
|
else:
|
|
|
|
print("no such command: %s" % args[0])
|
2014-07-10 11:29:42 +01:00
|
|
|
exit()
|
|
|
|
|
2014-06-25 13:28:42 -07:00
|
|
|
# Short circuit for tempurl, which doesn't need auth
|
2016-03-02 16:02:28 +00:00
|
|
|
if args and args[0] == 'tempurl':
|
2014-06-25 13:28:42 -07:00
|
|
|
return options, args
|
|
|
|
|
2018-06-11 13:19:05 +01:00
|
|
|
# do this before process_options sets default auth version
|
|
|
|
if enforce_requires and options['prompt']:
|
|
|
|
options['key'] = options['os_password'] = prompt_for_password()
|
|
|
|
|
2016-05-03 14:18:34 -07:00
|
|
|
# Massage auth version; build out os_options subdict
|
|
|
|
process_options(options)
|
2012-07-04 21:46:02 +02:00
|
|
|
|
2014-01-13 22:39:28 +01:00
|
|
|
if len(args) > 1 and args[0] == "capabilities":
|
|
|
|
return options, args
|
|
|
|
|
2016-05-03 11:34:02 -07:00
|
|
|
if (options['os_options']['object_storage_url'] and
|
2016-04-27 16:45:15 -05:00
|
|
|
options['os_options']['auth_token']):
|
2013-02-27 13:52:24 +01:00
|
|
|
return options, args
|
|
|
|
|
2014-03-25 08:21:21 +00:00
|
|
|
if enforce_requires:
|
2020-05-13 10:30:30 -07:00
|
|
|
if options['os_auth_type'] and options['os_auth_type'] not in (
|
|
|
|
'password', 'v1password', 'v2password', 'v3password',
|
|
|
|
'v3applicationcredential'):
|
|
|
|
exit('Only "v3applicationcredential" is supported for '
|
|
|
|
'--os-auth-type')
|
|
|
|
elif options['os_auth_type'] == 'v3applicationcredential':
|
2019-12-18 00:32:36 +08:00
|
|
|
if not (options['os_application_credential_id'] and
|
|
|
|
options['os_application_credential_secret']):
|
|
|
|
exit('Auth version 3 (application credential) requires '
|
|
|
|
'OS_APPLICATION_CREDENTIAL_ID and '
|
|
|
|
'OS_APPLICATION_CREDENTIAL_SECRET to be set or '
|
|
|
|
'overridden with --os-application-credential-id and '
|
|
|
|
'--os-application-credential-secret respectively.')
|
|
|
|
elif options['auth_version'] == '3':
|
2016-05-03 11:34:02 -07:00
|
|
|
if not options['auth']:
|
2019-12-18 00:32:36 +08:00
|
|
|
exit('Auth version 3 requires OS_AUTH_URL to be set or '
|
2014-03-25 08:21:21 +00:00
|
|
|
'overridden with --os-auth-url')
|
2016-05-03 11:34:02 -07:00
|
|
|
if not (options['user'] or options['os_user_id']):
|
2019-12-18 00:32:36 +08:00
|
|
|
exit('Auth version 3 requires either OS_USERNAME or '
|
|
|
|
'OS_USER_ID to be set or overridden with '
|
2014-03-25 08:21:21 +00:00
|
|
|
'--os-username or --os-user-id respectively.')
|
2016-05-03 11:34:02 -07:00
|
|
|
if not options['key']:
|
2019-12-18 00:32:36 +08:00
|
|
|
exit('Auth version 3 requires OS_PASSWORD to be set or '
|
2014-03-25 08:21:21 +00:00
|
|
|
'overridden with --os-password')
|
2016-05-03 11:34:02 -07:00
|
|
|
elif not (options['auth'] and options['user'] and options['key']):
|
2014-03-25 08:21:21 +00:00
|
|
|
exit('''
|
2012-06-22 11:12:21 -04:00
|
|
|
Auth version 1.0 requires ST_AUTH, ST_USER, and ST_KEY environment variables
|
|
|
|
to be set or overridden with -A, -U, or -K.
|
|
|
|
|
|
|
|
Auth version 2.0 requires OS_AUTH_URL, OS_USERNAME, OS_PASSWORD, and
|
2012-08-23 14:09:56 -05:00
|
|
|
OS_TENANT_NAME OS_TENANT_ID to be set or overridden with --os-auth-url,
|
2013-07-07 16:17:28 +08:00
|
|
|
--os-username, --os-password, --os-tenant-name or os-tenant-id. Note:
|
|
|
|
adding "-V 2" is necessary for this.'''.strip('\n'))
|
2012-05-08 11:17:04 +01:00
|
|
|
return options, args
|
|
|
|
|
|
|
|
|
2018-06-29 11:07:00 +10:00
|
|
|
def add_default_args(parser):
|
2016-02-03 13:44:53 +00:00
|
|
|
default_auth_version = '1.0'
|
|
|
|
for k in ('ST_AUTH_VERSION', 'OS_AUTH_VERSION', 'OS_IDENTITY_API_VERSION'):
|
|
|
|
try:
|
|
|
|
default_auth_version = environ[k]
|
|
|
|
break
|
|
|
|
except KeyError:
|
|
|
|
pass
|
|
|
|
|
2016-03-02 16:02:28 +00:00
|
|
|
parser.add_argument('--os-help', action='store_true', dest='os_help',
|
|
|
|
help='Show OpenStack authentication options.')
|
|
|
|
parser.add_argument('--os_help', action='store_true',
|
|
|
|
help=argparse.SUPPRESS)
|
|
|
|
parser.add_argument('-s', '--snet', action='store_true', dest='snet',
|
|
|
|
default=False, help='Use SERVICENET internal network.')
|
|
|
|
parser.add_argument('-v', '--verbose', action='count', dest='verbose',
|
|
|
|
default=1, help='Print more info.')
|
|
|
|
parser.add_argument('--debug', action='store_true', dest='debug',
|
|
|
|
default=False, help='Show the curl commands and '
|
|
|
|
'results of all http queries regardless of result '
|
|
|
|
'status.')
|
|
|
|
parser.add_argument('--info', action='store_true', dest='info',
|
|
|
|
default=False, help='Show the curl commands and '
|
|
|
|
'results of all http queries which return an error.')
|
|
|
|
parser.add_argument('-q', '--quiet', action='store_const', dest='verbose',
|
|
|
|
const=0, default=1, help='Suppress status output.')
|
|
|
|
parser.add_argument('-A', '--auth', dest='auth',
|
|
|
|
default=environ.get('ST_AUTH'),
|
|
|
|
help='URL for obtaining an auth token.')
|
|
|
|
parser.add_argument('-V', '--auth-version', '--os-identity-api-version',
|
|
|
|
dest='auth_version',
|
|
|
|
default=default_auth_version,
|
|
|
|
type=str,
|
|
|
|
help='Specify a version for authentication. '
|
|
|
|
'Defaults to env[ST_AUTH_VERSION], '
|
|
|
|
'env[OS_AUTH_VERSION], '
|
|
|
|
'env[OS_IDENTITY_API_VERSION] or 1.0.')
|
|
|
|
parser.add_argument('-U', '--user', dest='user',
|
|
|
|
default=environ.get('ST_USER'),
|
|
|
|
help='User name for obtaining an auth token.')
|
|
|
|
parser.add_argument('-K', '--key', dest='key',
|
|
|
|
default=environ.get('ST_KEY'),
|
|
|
|
help='Key for obtaining an auth token.')
|
2023-03-20 11:27:50 -07:00
|
|
|
parser.add_argument('-T', '--timeout', type=parse_timeout, dest='timeout',
|
2022-04-21 17:08:17 +03:00
|
|
|
default=None,
|
|
|
|
help='Timeout in seconds to wait for response.')
|
2016-03-02 16:02:28 +00:00
|
|
|
parser.add_argument('-R', '--retries', type=int, default=5, dest='retries',
|
|
|
|
help='The number of times to retry a failed '
|
|
|
|
'connection.')
|
2014-03-25 08:21:21 +00:00
|
|
|
default_val = config_true_value(environ.get('SWIFTCLIENT_INSECURE'))
|
2016-03-02 16:02:28 +00:00
|
|
|
parser.add_argument('--insecure',
|
|
|
|
action="store_true", dest="insecure",
|
|
|
|
default=default_val,
|
|
|
|
help='Allow swiftclient to access servers without '
|
|
|
|
'having to verify the SSL certificate. '
|
|
|
|
'Defaults to env[SWIFTCLIENT_INSECURE] '
|
|
|
|
'(set to \'true\' to enable).')
|
|
|
|
parser.add_argument('--no-ssl-compression',
|
|
|
|
action='store_false', dest='ssl_compression',
|
|
|
|
default=True,
|
|
|
|
help='This option is deprecated and not used anymore. '
|
|
|
|
'SSL compression should be disabled by default '
|
|
|
|
'by the system SSL library.')
|
Add force auth retry mode in swiftclient
This patch attemps to add an option to force get_auth call while retrying
an operation even if it gets errors other than 401 Unauthorized.
Why we need this:
The main reason why we need this is current python-swiftclient requests could
never get succeeded under certion situation using third party proxies/load balancers
between the client and swift-proxy server. I think, it would be general situation
of the use case.
Specifically describing nginx case, the nginx can close the socket from the client
when the response code from swift is not 2xx series. In default, nginx can wait the
buffers from the client for a while (default 30s)[1] but after the time past, nginx
will close the socket immediately. Unfortunately, if python-swiftclient has still been
sending the data into the socket, python-swiftclient will get socket error (EPIPE,
BrokenPipe). From the swiftclient perspective, this is absolutely not an auth error,
so current python-swiftclient will continue to retry without re-auth.
However, if the root cause is sort of 401 (i.e. nginx got 401 unauthorized from the
swift-proxy because of token expiration), swiftclient will loop 401 -> EPIPE -> 401...
until it consume the max retry times.
In particlar, less time to live of the token and multipart object upload with large
segments could not get succeeded as below:
Connection Model:
python-swiftclient -> nginx -> swift-proxy -> swift-backend
Case: Try to create slo with large segments and the auth token expired with 1 hour
1. client create a connection to nginx with successful response from swift-proxy and its auth
2. client continue to put large segment objects
(e.g. 1~5GB for each and the total would 20~30GB, i.e. 20~30 segments)
3. after some of segments uploaded, 1 hour past but client is still trying to
send remaining segment objects.
4. nginx got 401 from swift-proxy for a request and wait that the connection is closed
from the client but timeout past because the python-swiftclient is still sending much data
into the socket before reading the 401 response.
5. client got socket error because nginx closed the connection during sending the buffer.
6. client retries a new connection to nginx without re-auth...
<loop 4-6>
7. finally python-swiftclient failed with socket error (Broken Pipe)
In operational perspective, setting longer timeout for lingering close would be an option but
it's not complete solution because any other proxy/LB may not support the options.
If we actually do THE RIGHT THING in python-swiftclient, we should send expects: 100-continue
header and handle the first response to re-auth correctly.
HOWEVER, the current python's httplib and requests module used by python-swiftclient doesn't
support expects: 100-continue header [2] and the thread proposed a fix [3] is not super active.
And we know the reason we depends on the library is to fix a security issue that existed
in older python-swiftclient [4] so that we should touch around it super carefully.
In the reality, as the hot fix, this patch try to mitigate the unfortunate situation
described above WITHOUT 100-continue fix, just users can force to re-auth when any errors
occurred during the retries that can be accepted in the upstream.
1: http://nginx.org/en/docs/http/ngx_http_core_module.html#lingering_close
2: https://github.com/requests/requests/issues/713
3: https://bugs.python.org/issue1346874
4: https://review.openstack.org/#/c/69187/
Change-Id: I3470b56e3f9cf9cdb8c2fc2a94b2c551927a3440
2018-03-12 17:54:17 +09:00
|
|
|
parser.add_argument('--force-auth-retry',
|
|
|
|
action='store_true', dest='force_auth_retry',
|
|
|
|
default=False,
|
|
|
|
help='Force a re-auth attempt on '
|
|
|
|
'any error other than 401 unauthorized')
|
2018-06-11 13:19:05 +01:00
|
|
|
parser.add_argument('--prompt',
|
|
|
|
action='store_true', dest='prompt',
|
|
|
|
default=False,
|
|
|
|
help='Prompt user to enter a password which overrides '
|
|
|
|
'any password supplied via --key, --os-password '
|
|
|
|
'or environment variables.')
|
2016-03-02 16:02:28 +00:00
|
|
|
|
|
|
|
os_grp = parser.add_argument_group("OpenStack authentication options")
|
|
|
|
os_grp.add_argument('--os-username',
|
|
|
|
metavar='<auth-user-name>',
|
|
|
|
default=environ.get('OS_USERNAME'),
|
|
|
|
help='OpenStack username. Defaults to '
|
|
|
|
'env[OS_USERNAME].')
|
|
|
|
os_grp.add_argument('--os_username',
|
|
|
|
help=argparse.SUPPRESS)
|
|
|
|
os_grp.add_argument('--os-user-id',
|
|
|
|
metavar='<auth-user-id>',
|
|
|
|
default=environ.get('OS_USER_ID'),
|
|
|
|
help='OpenStack user ID. '
|
|
|
|
'Defaults to env[OS_USER_ID].')
|
|
|
|
os_grp.add_argument('--os_user_id',
|
|
|
|
help=argparse.SUPPRESS)
|
|
|
|
os_grp.add_argument('--os-user-domain-id',
|
|
|
|
metavar='<auth-user-domain-id>',
|
|
|
|
default=environ.get('OS_USER_DOMAIN_ID'),
|
|
|
|
help='OpenStack user domain ID. '
|
|
|
|
'Defaults to env[OS_USER_DOMAIN_ID].')
|
|
|
|
os_grp.add_argument('--os_user_domain_id',
|
|
|
|
help=argparse.SUPPRESS)
|
|
|
|
os_grp.add_argument('--os-user-domain-name',
|
|
|
|
metavar='<auth-user-domain-name>',
|
|
|
|
default=environ.get('OS_USER_DOMAIN_NAME'),
|
|
|
|
help='OpenStack user domain name. '
|
|
|
|
'Defaults to env[OS_USER_DOMAIN_NAME].')
|
|
|
|
os_grp.add_argument('--os_user_domain_name',
|
|
|
|
help=argparse.SUPPRESS)
|
|
|
|
os_grp.add_argument('--os-password',
|
|
|
|
metavar='<auth-password>',
|
|
|
|
default=environ.get('OS_PASSWORD'),
|
|
|
|
help='OpenStack password. Defaults to '
|
|
|
|
'env[OS_PASSWORD].')
|
|
|
|
os_grp.add_argument('--os_password',
|
|
|
|
help=argparse.SUPPRESS)
|
|
|
|
os_grp.add_argument('--os-tenant-id',
|
|
|
|
metavar='<auth-tenant-id>',
|
|
|
|
default=environ.get('OS_TENANT_ID'),
|
|
|
|
help='OpenStack tenant ID. '
|
|
|
|
'Defaults to env[OS_TENANT_ID].')
|
|
|
|
os_grp.add_argument('--os_tenant_id',
|
|
|
|
help=argparse.SUPPRESS)
|
|
|
|
os_grp.add_argument('--os-tenant-name',
|
|
|
|
metavar='<auth-tenant-name>',
|
|
|
|
default=environ.get('OS_TENANT_NAME'),
|
|
|
|
help='OpenStack tenant name. '
|
|
|
|
'Defaults to env[OS_TENANT_NAME].')
|
|
|
|
os_grp.add_argument('--os_tenant_name',
|
|
|
|
help=argparse.SUPPRESS)
|
|
|
|
os_grp.add_argument('--os-project-id',
|
|
|
|
metavar='<auth-project-id>',
|
|
|
|
default=environ.get('OS_PROJECT_ID'),
|
|
|
|
help='OpenStack project ID. '
|
|
|
|
'Defaults to env[OS_PROJECT_ID].')
|
|
|
|
os_grp.add_argument('--os_project_id',
|
|
|
|
help=argparse.SUPPRESS)
|
|
|
|
os_grp.add_argument('--os-project-name',
|
|
|
|
metavar='<auth-project-name>',
|
|
|
|
default=environ.get('OS_PROJECT_NAME'),
|
|
|
|
help='OpenStack project name. '
|
|
|
|
'Defaults to env[OS_PROJECT_NAME].')
|
|
|
|
os_grp.add_argument('--os_project_name',
|
|
|
|
help=argparse.SUPPRESS)
|
|
|
|
os_grp.add_argument('--os-project-domain-id',
|
|
|
|
metavar='<auth-project-domain-id>',
|
|
|
|
default=environ.get('OS_PROJECT_DOMAIN_ID'),
|
|
|
|
help='OpenStack project domain ID. '
|
|
|
|
'Defaults to env[OS_PROJECT_DOMAIN_ID].')
|
|
|
|
os_grp.add_argument('--os_project_domain_id',
|
|
|
|
help=argparse.SUPPRESS)
|
|
|
|
os_grp.add_argument('--os-project-domain-name',
|
|
|
|
metavar='<auth-project-domain-name>',
|
|
|
|
default=environ.get('OS_PROJECT_DOMAIN_NAME'),
|
|
|
|
help='OpenStack project domain name. '
|
|
|
|
'Defaults to env[OS_PROJECT_DOMAIN_NAME].')
|
|
|
|
os_grp.add_argument('--os_project_domain_name',
|
|
|
|
help=argparse.SUPPRESS)
|
|
|
|
os_grp.add_argument('--os-auth-url',
|
|
|
|
metavar='<auth-url>',
|
|
|
|
default=environ.get('OS_AUTH_URL'),
|
|
|
|
help='OpenStack auth URL. Defaults to '
|
|
|
|
'env[OS_AUTH_URL].')
|
|
|
|
os_grp.add_argument('--os_auth_url',
|
|
|
|
help=argparse.SUPPRESS)
|
2019-12-18 00:32:36 +08:00
|
|
|
os_grp.add_argument('--os-auth-type',
|
|
|
|
metavar='<auth-type>',
|
|
|
|
default=environ.get('OS_AUTH_TYPE'),
|
|
|
|
help='OpenStack auth type for v3. Defaults to '
|
|
|
|
'env[OS_AUTH_TYPE].')
|
|
|
|
os_grp.add_argument('--os_auth_type',
|
|
|
|
help=argparse.SUPPRESS)
|
|
|
|
os_grp.add_argument('--os-application-credential-id',
|
|
|
|
metavar='<auth-application-credential-id>',
|
|
|
|
default=environ.get('OS_APPLICATION_CREDENTIAL_ID'),
|
|
|
|
help='OpenStack appplication credential id. '
|
|
|
|
'Defaults to env[OS_APPLICATION_CREDENTIAL_ID].')
|
|
|
|
os_grp.add_argument('--os_application_credential_id',
|
|
|
|
help=argparse.SUPPRESS)
|
|
|
|
os_grp.add_argument('--os-application-credential-secret',
|
|
|
|
metavar='<auth-application-credential-secret>',
|
|
|
|
default=environ.get(
|
|
|
|
'OS_APPLICATION_CREDENTIAL_SECRET'),
|
|
|
|
help='OpenStack appplication credential secret. '
|
|
|
|
'Defaults to '
|
|
|
|
'env[OS_APPLICATION_CREDENTIAL_SECRET].')
|
|
|
|
os_grp.add_argument('--os_application_credential_secret',
|
|
|
|
help=argparse.SUPPRESS)
|
2016-03-02 16:02:28 +00:00
|
|
|
os_grp.add_argument('--os-auth-token',
|
|
|
|
metavar='<auth-token>',
|
|
|
|
default=environ.get('OS_AUTH_TOKEN'),
|
|
|
|
help='OpenStack token. Defaults to '
|
|
|
|
'env[OS_AUTH_TOKEN]. Used with --os-storage-url '
|
|
|
|
'to bypass the usual username/password '
|
|
|
|
'authentication.')
|
|
|
|
os_grp.add_argument('--os_auth_token',
|
|
|
|
help=argparse.SUPPRESS)
|
|
|
|
os_grp.add_argument('--os-storage-url',
|
|
|
|
metavar='<storage-url>',
|
|
|
|
default=environ.get('OS_STORAGE_URL'),
|
|
|
|
help='OpenStack storage URL. '
|
|
|
|
'Defaults to env[OS_STORAGE_URL]. '
|
|
|
|
'Overrides the storage url returned during auth. '
|
|
|
|
'Will bypass authentication when used with '
|
|
|
|
'--os-auth-token.')
|
|
|
|
os_grp.add_argument('--os_storage_url',
|
|
|
|
help=argparse.SUPPRESS)
|
|
|
|
os_grp.add_argument('--os-region-name',
|
|
|
|
metavar='<region-name>',
|
|
|
|
default=environ.get('OS_REGION_NAME'),
|
|
|
|
help='OpenStack region name. '
|
|
|
|
'Defaults to env[OS_REGION_NAME].')
|
|
|
|
os_grp.add_argument('--os_region_name',
|
|
|
|
help=argparse.SUPPRESS)
|
|
|
|
os_grp.add_argument('--os-service-type',
|
|
|
|
metavar='<service-type>',
|
|
|
|
default=environ.get('OS_SERVICE_TYPE'),
|
|
|
|
help='OpenStack Service type. '
|
|
|
|
'Defaults to env[OS_SERVICE_TYPE].')
|
|
|
|
os_grp.add_argument('--os_service_type',
|
|
|
|
help=argparse.SUPPRESS)
|
|
|
|
os_grp.add_argument('--os-endpoint-type',
|
|
|
|
metavar='<endpoint-type>',
|
|
|
|
default=environ.get('OS_ENDPOINT_TYPE'),
|
|
|
|
help='OpenStack Endpoint type. '
|
|
|
|
'Defaults to env[OS_ENDPOINT_TYPE].')
|
|
|
|
os_grp.add_argument('--os_endpoint_type',
|
|
|
|
help=argparse.SUPPRESS)
|
|
|
|
os_grp.add_argument('--os-cacert',
|
|
|
|
metavar='<ca-certificate>',
|
|
|
|
default=environ.get('OS_CACERT'),
|
|
|
|
help='Specify a CA bundle file to use in verifying a '
|
|
|
|
'TLS (https) server certificate. '
|
|
|
|
'Defaults to env[OS_CACERT].')
|
2016-04-10 23:18:17 +02:00
|
|
|
os_grp.add_argument('--os-cert',
|
|
|
|
metavar='<client-certificate-file>',
|
|
|
|
default=environ.get('OS_CERT'),
|
|
|
|
help='Specify a client certificate file (for client '
|
|
|
|
'auth). Defaults to env[OS_CERT].')
|
|
|
|
os_grp.add_argument('--os-key',
|
|
|
|
metavar='<client-certificate-key-file>',
|
|
|
|
default=environ.get('OS_KEY'),
|
|
|
|
help='Specify a client certificate key file (for '
|
|
|
|
'client auth). Defaults to env[OS_KEY].')
|
2018-06-29 11:07:00 +10:00
|
|
|
|
|
|
|
|
|
|
|
def main(arguments=None):
|
|
|
|
argv = sys_argv if arguments is None else arguments
|
|
|
|
|
|
|
|
parser = argparse.ArgumentParser(
|
|
|
|
add_help=False, formatter_class=HelpFormatter, usage='''
|
|
|
|
%(prog)s [--version] [--help] [--os-help] [--snet] [--verbose]
|
|
|
|
[--debug] [--info] [--quiet] [--auth <auth_url>]
|
|
|
|
[--auth-version <auth_version> |
|
|
|
|
--os-identity-api-version <auth_version> ]
|
|
|
|
[--user <username>]
|
|
|
|
[--key <api_key>] [--retries <num_retries>]
|
|
|
|
[--os-username <auth-user-name>]
|
|
|
|
[--os-password <auth-password>]
|
|
|
|
[--os-user-id <auth-user-id>]
|
|
|
|
[--os-user-domain-id <auth-user-domain-id>]
|
|
|
|
[--os-user-domain-name <auth-user-domain-name>]
|
|
|
|
[--os-tenant-id <auth-tenant-id>]
|
|
|
|
[--os-tenant-name <auth-tenant-name>]
|
|
|
|
[--os-project-id <auth-project-id>]
|
|
|
|
[--os-project-name <auth-project-name>]
|
|
|
|
[--os-project-domain-id <auth-project-domain-id>]
|
|
|
|
[--os-project-domain-name <auth-project-domain-name>]
|
|
|
|
[--os-auth-url <auth-url>]
|
|
|
|
[--os-auth-token <auth-token>]
|
2019-12-18 00:32:36 +08:00
|
|
|
[--os-auth-type <os-auth-type>]
|
|
|
|
[--os-application-credential-id
|
|
|
|
<auth-application-credential-id>]
|
|
|
|
[--os-application-credential-secret
|
|
|
|
<auth-application-credential-secret>]
|
2018-06-29 11:07:00 +10:00
|
|
|
[--os-storage-url <storage-url>]
|
|
|
|
[--os-region-name <region-name>]
|
|
|
|
[--os-service-type <service-type>]
|
|
|
|
[--os-endpoint-type <endpoint-type>]
|
|
|
|
[--os-cacert <ca-certificate>]
|
|
|
|
[--insecure]
|
|
|
|
[--os-cert <client-certificate-file>]
|
|
|
|
[--os-key <client-certificate-key-file>]
|
|
|
|
[--no-ssl-compression]
|
|
|
|
[--force-auth-retry]
|
|
|
|
<subcommand> [--help] [<subcommand options>]
|
|
|
|
|
|
|
|
Command-line interface to the OpenStack Swift API.
|
|
|
|
|
|
|
|
Positional arguments:
|
|
|
|
<subcommand>
|
|
|
|
delete Delete a container or objects within a container.
|
|
|
|
download Download objects from containers.
|
|
|
|
list Lists the containers for the account or the objects
|
|
|
|
for a container.
|
|
|
|
post Updates meta information for the account, container,
|
|
|
|
or object; creates containers if not present.
|
|
|
|
copy Copies object, optionally adds meta
|
|
|
|
stat Displays information for the account, container,
|
|
|
|
or object.
|
|
|
|
upload Uploads files or directories to the given container.
|
|
|
|
capabilities List cluster capabilities.
|
|
|
|
tempurl Create a temporary URL.
|
|
|
|
auth Display auth related environment variables.
|
|
|
|
bash_completion Outputs option and flag cli data ready for
|
|
|
|
bash_completion.
|
|
|
|
|
|
|
|
Examples:
|
|
|
|
%(prog)s download --help
|
|
|
|
|
|
|
|
%(prog)s -A https://api.example.com/v1.0 \\
|
|
|
|
-U user -K api_key stat -v
|
|
|
|
|
|
|
|
%(prog)s --os-auth-url https://api.example.com/v2.0 \\
|
|
|
|
--os-tenant-name tenant \\
|
|
|
|
--os-username user --os-password password list
|
|
|
|
|
|
|
|
%(prog)s --os-auth-url https://api.example.com/v3 --auth-version 3\\
|
|
|
|
--os-project-name project1 --os-project-domain-name domain1 \\
|
|
|
|
--os-username user --os-user-domain-name domain1 \\
|
|
|
|
--os-password password list
|
|
|
|
|
|
|
|
%(prog)s --os-auth-url https://api.example.com/v3 --auth-version 3\\
|
|
|
|
--os-project-id 0123456789abcdef0123456789abcdef \\
|
|
|
|
--os-user-id abcdef0123456789abcdef0123456789 \\
|
|
|
|
--os-password password list
|
|
|
|
|
2019-12-18 00:32:36 +08:00
|
|
|
%(prog)s --os-auth-url https://api.example.com/v3 --auth-version 3\\
|
|
|
|
--os-application-credential-id d78683c92f0e4f9b9b02a2e208039412 \\
|
2021-04-29 17:39:28 +02:00
|
|
|
--os-application-credential-secret APPLICATION_CREDENTIAL_SECRET \\
|
2019-12-18 00:32:36 +08:00
|
|
|
--os-auth-type v3applicationcredential list
|
|
|
|
|
2018-06-29 11:07:00 +10:00
|
|
|
%(prog)s --os-auth-token 6ee5eb33efad4e45ab46806eac010566 \\
|
|
|
|
--os-storage-url https://10.1.5.2:8080/v1/AUTH_ced809b6a4baea7aeab61a \\
|
|
|
|
list
|
|
|
|
|
|
|
|
%(prog)s list --lh
|
|
|
|
'''.strip('\n'))
|
|
|
|
|
|
|
|
version = client_version
|
|
|
|
parser.add_argument('--version', action='version',
|
|
|
|
version='python-swiftclient %s' % version)
|
|
|
|
parser.add_argument('-h', '--help', action='store_true')
|
|
|
|
|
|
|
|
add_default_args(parser)
|
|
|
|
|
2016-03-02 16:02:28 +00:00
|
|
|
options, args = parse_args(parser, argv[1:], enforce_requires=False)
|
|
|
|
|
2016-05-03 11:34:02 -07:00
|
|
|
if options['help'] or options['os_help']:
|
|
|
|
if options['help']:
|
2016-03-02 16:02:28 +00:00
|
|
|
parser._action_groups.pop()
|
|
|
|
parser.print_help()
|
|
|
|
exit()
|
2012-05-08 11:17:04 +01:00
|
|
|
|
|
|
|
if not args or args[0] not in commands:
|
|
|
|
parser.print_usage()
|
|
|
|
if args:
|
|
|
|
exit('no such command: %s' % args[0])
|
|
|
|
exit()
|
|
|
|
|
2013-05-23 16:39:31 -04:00
|
|
|
signal.signal(signal.SIGINT, immediate_exit)
|
2012-06-18 09:46:54 -07:00
|
|
|
|
2014-04-04 21:13:01 +02:00
|
|
|
with OutputManager() as output:
|
2013-06-26 22:47:49 -07:00
|
|
|
parser.usage = globals()['st_%s_help' % args[0]]
|
2016-06-01 16:45:54 -07:00
|
|
|
if options['insecure']:
|
|
|
|
import requests
|
2018-05-16 17:33:40 +00:00
|
|
|
try:
|
|
|
|
from requests.packages.urllib3.exceptions import \
|
|
|
|
InsecureRequestWarning
|
|
|
|
except ImportError:
|
|
|
|
pass
|
|
|
|
else:
|
|
|
|
requests.packages.urllib3.disable_warnings(
|
|
|
|
InsecureRequestWarning)
|
2013-06-26 22:47:49 -07:00
|
|
|
try:
|
2014-04-04 21:13:01 +02:00
|
|
|
globals()['st_%s' % args[0]](parser, argv[1:], output)
|
2016-01-18 17:05:28 -08:00
|
|
|
except ClientException as err:
|
2020-08-14 10:41:15 -07:00
|
|
|
trans_id = err.transaction_id
|
|
|
|
err.transaction_id = None # clear it so we aren't overly noisy
|
2016-01-18 17:05:28 -08:00
|
|
|
output.error(str(err))
|
|
|
|
if trans_id:
|
|
|
|
output.error("Failed Transaction ID: %s",
|
|
|
|
parse_header_string(trans_id))
|
|
|
|
except (RequestException, socket.error) as err:
|
2014-04-04 21:13:01 +02:00
|
|
|
output.error(str(err))
|
2012-05-08 11:17:04 +01:00
|
|
|
|
2014-12-04 16:59:11 +00:00
|
|
|
if output.get_error_count() > 0:
|
2013-05-23 16:39:31 -04:00
|
|
|
exit(1)
|
2014-02-26 09:52:38 +00:00
|
|
|
|
|
|
|
|
|
|
|
if __name__ == '__main__':
|
|
|
|
main()
|