summaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
-rw-r--r--gluster/swift/__init__.py2
-rw-r--r--gluster/swift/account/utils.py4
-rw-r--r--gluster/swift/common/DiskDir.py23
-rw-r--r--gluster/swift/common/constraints.py11
-rw-r--r--gluster/swift/common/exceptions.py6
-rw-r--r--gluster/swift/common/middleware/gswauth/swauth/middleware.py6
-rw-r--r--gluster/swift/common/utils.py208
-rw-r--r--gluster/swift/container/server.py6
-rw-r--r--gluster/swift/obj/diskfile.py65
-rw-r--r--gluster/swift/obj/expirer.py2
-rw-r--r--glusterfs-openstack-swift.spec15
-rw-r--r--requirements.txt8
-rw-r--r--test-requirements.txt5
-rw-r--r--test/functional/__init__.py286
-rw-r--r--test/functional/swift_test_client.py148
-rwxr-xr-xtest/functional/test_account.py135
-rwxr-xr-xtest/functional/test_container.py307
-rwxr-xr-xtest/functional/test_object.py579
-rw-r--r--test/functional/tests.py2447
-rw-r--r--test/unit/__init__.py296
-rw-r--r--test/unit/obj/test_diskfile.py2
-rw-r--r--test/unit/obj/test_expirer.py162
-rw-r--r--test/unit/proxy/controllers/test_account.py218
-rw-r--r--test/unit/proxy/controllers/test_base.py741
-rw-r--r--test/unit/proxy/controllers/test_container.py190
-rw-r--r--test/unit/proxy/test_server.py20
-rw-r--r--tox.ini2
27 files changed, 4746 insertions, 1148 deletions
diff --git a/gluster/swift/__init__.py b/gluster/swift/__init__.py
index c0b415a..ac0c566 100644
--- a/gluster/swift/__init__.py
+++ b/gluster/swift/__init__.py
@@ -45,6 +45,6 @@ class PkgInfo(object):
#
# Change the Package version here
#
-_pkginfo = PkgInfo('2.3.0', '0', 'gluster_swift', False)
+_pkginfo = PkgInfo('2.10.1', '0', 'gluster_swift', False)
__version__ = _pkginfo.pretty_version
__canonical_version__ = _pkginfo.canonical_version
diff --git a/gluster/swift/account/utils.py b/gluster/swift/account/utils.py
index 99fe5ea..4424835 100644
--- a/gluster/swift/account/utils.py
+++ b/gluster/swift/account/utils.py
@@ -21,7 +21,7 @@ from xml.sax import saxutils
def account_listing_response(account, req, response_content_type, broker=None,
limit='', marker='', end_marker='', prefix='',
- delimiter=''):
+ delimiter='', reverse=False):
"""
This is an exact copy of swift.account.utis.account_listing_response()
except for one difference i.e this method passes response_content_type
@@ -34,7 +34,7 @@ def account_listing_response(account, req, response_content_type, broker=None,
account_list = broker.list_containers_iter(limit, marker, end_marker,
prefix, delimiter,
- response_content_type)
+ response_content_type, reverse)
if response_content_type == 'application/json':
data = []
for (name, object_count, bytes_used, is_subdir) in account_list:
diff --git a/gluster/swift/common/DiskDir.py b/gluster/swift/common/DiskDir.py
index 4f4a2ef..0bc95df 100644
--- a/gluster/swift/common/DiskDir.py
+++ b/gluster/swift/common/DiskDir.py
@@ -33,7 +33,7 @@ from gluster.swift.common.exceptions import FileOrDirNotFoundError, \
from gluster.swift.obj.expirer import delete_tracker_object
from swift.common.constraints import MAX_META_COUNT, MAX_META_OVERALL_SIZE
from swift.common.swob import HTTPBadRequest
-from swift.common.utils import ThreadPool
+from gluster.swift.common.utils import ThreadPool
DATADIR = 'containers'
@@ -399,7 +399,7 @@ class DiskDir(DiskCommon):
def list_objects_iter(self, limit, marker, end_marker,
prefix, delimiter, path=None,
storage_policy_index=0,
- out_content_type=None):
+ out_content_type=None, reverse=False):
"""
Returns tuple of name, created_at, size, content_type, etag.
"""
@@ -427,6 +427,9 @@ class DiskDir(DiskCommon):
# No objects in container , return empty list
return container_list
+ if marker and end_marker and reverse:
+ marker, end_marker = end_marker, marker
+
if end_marker:
objects = filter_end_marker(objects, end_marker)
@@ -471,6 +474,8 @@ class DiskDir(DiskCommon):
container_list.append((obj, '0', 0, 'text/plain', ''))
if len(container_list) >= limit:
break
+ if reverse:
+ container_list.reverse()
return container_list
count = 0
@@ -512,7 +517,8 @@ class DiskDir(DiskCommon):
count += 1
if count >= limit:
break
-
+ if reverse:
+ container_list.reverse()
return container_list
def _update_object_count(self):
@@ -778,7 +784,8 @@ class DiskAccount(DiskCommon):
return containers
def list_containers_iter(self, limit, marker, end_marker,
- prefix, delimiter, response_content_type=None):
+ prefix, delimiter, response_content_type=None,
+ reverse=False):
"""
Return tuple of name, object_count, bytes_used, 0(is_subdir).
Used by account server.
@@ -794,6 +801,9 @@ class DiskAccount(DiskCommon):
# No containers in account, return empty list
return account_list
+ if marker and end_marker and reverse:
+ marker, end_marker = end_marker, marker
+
if containers and end_marker:
containers = filter_end_marker(containers, end_marker)
@@ -841,6 +851,8 @@ class DiskAccount(DiskCommon):
account_list.append((container, 0, 0, 0))
if len(account_list) >= limit:
break
+ if reverse:
+ account_list.reverse()
return account_list
count = 0
@@ -866,7 +878,8 @@ class DiskAccount(DiskCommon):
count += 1
if count >= limit:
break
-
+ if reverse:
+ account_list.reverse()
return account_list
def get_info(self):
diff --git a/gluster/swift/common/constraints.py b/gluster/swift/common/constraints.py
index 98e2a27..2007b71 100644
--- a/gluster/swift/common/constraints.py
+++ b/gluster/swift/common/constraints.py
@@ -102,3 +102,14 @@ _ring.Ring = ring.Ring
import swift.account.utils
from gluster.swift.account.utils import account_listing_response as gf_als
swift.account.utils.account_listing_response = gf_als
+
+# Monkey patch StoragePolicy.load_ring as POLICIES are initialized already
+from swift.common.storage_policy import StoragePolicy
+
+
+def load_ring(self, swift_dir):
+ if self.object_ring:
+ return
+ self.object_ring = ring.Ring(swift_dir, ring_name='object')
+
+StoragePolicy.load_ring = load_ring
diff --git a/gluster/swift/common/exceptions.py b/gluster/swift/common/exceptions.py
index 8260dd9..4dc2878 100644
--- a/gluster/swift/common/exceptions.py
+++ b/gluster/swift/common/exceptions.py
@@ -13,6 +13,8 @@
# See the License for the specific language governing permissions and
# limitations under the License.
+from swift.common.exceptions import SwiftException
+
class GlusterFileSystemOSError(OSError):
pass
@@ -48,3 +50,7 @@ class AlreadyExistsAsFile(GlusterfsException):
class DiskFileContainerDoesNotExist(GlusterfsException):
pass
+
+
+class ThreadPoolDead(SwiftException):
+ pass
diff --git a/gluster/swift/common/middleware/gswauth/swauth/middleware.py b/gluster/swift/common/middleware/gswauth/swauth/middleware.py
index 7a6d713..a266d74 100644
--- a/gluster/swift/common/middleware/gswauth/swauth/middleware.py
+++ b/gluster/swift/common/middleware/gswauth/swauth/middleware.py
@@ -379,7 +379,7 @@ class Swauth(object):
if memcache_client:
memcache_client.set(
memcache_key, (time() + expires_from_now, groups),
- timeout=expires_from_now)
+ time=expires_from_now)
else:
path = quote('/v1/%s/.token_%s/%s' %
(self.auth_account, token[-1], token))
@@ -401,7 +401,7 @@ class Swauth(object):
memcache_client.set(
memcache_key,
(detail['expires'], groups),
- timeout=float(detail['expires'] - time()))
+ time=float(detail['expires'] - time()))
return groups
def authorize(self, req):
@@ -1448,7 +1448,7 @@ class Swauth(object):
(self.itoken_expires,
'%s,.reseller_admin,%s' % (self.metadata_volume,
self.auth_account)),
- timeout=self.token_life)
+ time=self.token_life)
return self.itoken
def get_admin_detail(self, req):
diff --git a/gluster/swift/common/utils.py b/gluster/swift/common/utils.py
index 8f68319..ac41698 100644
--- a/gluster/swift/common/utils.py
+++ b/gluster/swift/common/utils.py
@@ -14,17 +14,23 @@
# limitations under the License.
import os
+import sys
import stat
import json
import errno
import random
import logging
from hashlib import md5
-from eventlet import sleep
+from eventlet import sleep, Timeout, tpool, greenthread, \
+ greenio, event
+from Queue import Queue, Empty
+import threading as stdlib_threading
+
import cPickle as pickle
from cStringIO import StringIO
import pickletools
-from gluster.swift.common.exceptions import GlusterFileSystemIOError
+from gluster.swift.common.exceptions import GlusterFileSystemIOError, \
+ ThreadPoolDead
from swift.common.exceptions import DiskFileNoSpace
from swift.common.db import utf8encodekeys
from gluster.swift.common.fs_utils import do_getctime, do_getmtime, do_stat, \
@@ -69,6 +75,204 @@ PICKLE_PROTOCOL = 2
CHUNK_SIZE = 65536
+class ThreadPool(object):
+ """
+ Perform blocking operations in background threads.
+
+ Call its methods from within greenlets to green-wait for results without
+ blocking the eventlet reactor (hopefully).
+ """
+
+ BYTE = 'a'.encode('utf-8')
+
+ def __init__(self, nthreads=2):
+ self.nthreads = nthreads
+ self._run_queue = Queue()
+ self._result_queue = Queue()
+ self._threads = []
+ self._alive = True
+
+ if nthreads <= 0:
+ return
+
+ # We spawn a greenthread whose job it is to pull results from the
+ # worker threads via a real Queue and send them to eventlet Events so
+ # that the calling greenthreads can be awoken.
+ #
+ # Since each OS thread has its own collection of greenthreads, it
+ # doesn't work to have the worker thread send stuff to the event, as
+ # it then notifies its own thread-local eventlet hub to wake up, which
+ # doesn't do anything to help out the actual calling greenthread over
+ # in the main thread.
+ #
+ # Thus, each worker sticks its results into a result queue and then
+ # writes a byte to a pipe, signaling the result-consuming greenlet (in
+ # the main thread) to wake up and consume results.
+ #
+ # This is all stuff that eventlet.tpool does, but that code can't have
+ # multiple instances instantiated. Since the object server uses one
+ # pool per disk, we have to reimplement this stuff.
+ _raw_rpipe, self.wpipe = os.pipe()
+ self.rpipe = greenio.GreenPipe(_raw_rpipe, 'rb', bufsize=0)
+
+ for _junk in xrange(nthreads):
+ thr = stdlib_threading.Thread(
+ target=self._worker,
+ args=(self._run_queue, self._result_queue))
+ thr.daemon = True
+ thr.start()
+ self._threads.append(thr)
+ # This is the result-consuming greenthread that runs in the main OS
+ # thread, as described above.
+ self._consumer_coro = greenthread.spawn_n(self._consume_results,
+ self._result_queue)
+
+ def _worker(self, work_queue, result_queue):
+ """
+ Pulls an item from the queue and runs it, then puts the result into
+ the result queue. Repeats forever.
+
+ :param work_queue: queue from which to pull work
+ :param result_queue: queue into which to place results
+ """
+ while True:
+ item = work_queue.get()
+ if item is None:
+ break
+ ev, func, args, kwargs = item
+ try:
+ result = func(*args, **kwargs)
+ result_queue.put((ev, True, result))
+ except BaseException:
+ result_queue.put((ev, False, sys.exc_info()))
+ finally:
+ work_queue.task_done()
+ os.write(self.wpipe, self.BYTE)
+
+ def _consume_results(self, queue):
+ """
+ Runs as a greenthread in the same OS thread as callers of
+ run_in_thread().
+
+ Takes results from the worker OS threads and sends them to the waiting
+ greenthreads.
+ """
+ while True:
+ try:
+ self.rpipe.read(1)
+ except ValueError:
+ # can happen at process shutdown when pipe is closed
+ break
+
+ while True:
+ try:
+ ev, success, result = queue.get(block=False)
+ except Empty:
+ break
+
+ try:
+ if success:
+ ev.send(result)
+ else:
+ ev.send_exception(*result)
+ finally:
+ queue.task_done()
+
+ def run_in_thread(self, func, *args, **kwargs):
+ """
+ Runs func(*args, **kwargs) in a thread. Blocks the current greenlet
+ until results are available.
+
+ Exceptions thrown will be reraised in the calling thread.
+
+ If the threadpool was initialized with nthreads=0, it invokes
+ func(*args, **kwargs) directly, followed by eventlet.sleep() to ensure
+ the eventlet hub has a chance to execute. It is more likely the hub
+ will be invoked when queuing operations to an external thread.
+
+ :returns: result of calling func
+ :raises: whatever func raises
+ """
+ if not self._alive:
+ raise ThreadPoolDead()
+
+ if self.nthreads <= 0:
+ result = func(*args, **kwargs)
+ sleep()
+ return result
+
+ ev = event.Event()
+ self._run_queue.put((ev, func, args, kwargs), block=False)
+
+ # blocks this greenlet (and only *this* greenlet) until the real
+ # thread calls ev.send().
+ result = ev.wait()
+ return result
+
+ def _run_in_eventlet_tpool(self, func, *args, **kwargs):
+ """
+ Really run something in an external thread, even if we haven't got any
+ threads of our own.
+ """
+ def inner():
+ try:
+ return (True, func(*args, **kwargs))
+ except (Timeout, BaseException) as err:
+ return (False, err)
+
+ success, result = tpool.execute(inner)
+ if success:
+ return result
+ else:
+ raise result
+
+ def force_run_in_thread(self, func, *args, **kwargs):
+ """
+ Runs func(*args, **kwargs) in a thread. Blocks the current greenlet
+ until results are available.
+
+ Exceptions thrown will be reraised in the calling thread.
+
+ If the threadpool was initialized with nthreads=0, uses eventlet.tpool
+ to run the function. This is in contrast to run_in_thread(), which
+ will (in that case) simply execute func in the calling thread.
+
+ :returns: result of calling func
+ :raises: whatever func raises
+ """
+ if not self._alive:
+ raise ThreadPoolDead()
+
+ if self.nthreads <= 0:
+ return self._run_in_eventlet_tpool(func, *args, **kwargs)
+ else:
+ return self.run_in_thread(func, *args, **kwargs)
+
+ def terminate(self):
+ """
+ Releases the threadpool's resources (OS threads, greenthreads, pipes,
+ etc.) and renders it unusable.
+
+ Don't call run_in_thread() or force_run_in_thread() after calling
+ terminate().
+ """
+ self._alive = False
+ if self.nthreads <= 0:
+ return
+
+ for _junk in range(self.nthreads):
+ self._run_queue.put(None)
+ for thr in self._threads:
+ thr.join()
+ self._threads = []
+ self.nthreads = 0
+
+ greenthread.kill(self._consumer_coro)
+
+ self.rpipe.close()
+ os.close(self.wpipe)
+
+
class SafeUnpickler(object):
"""
Loading a pickled stream is potentially unsafe and exploitable because
diff --git a/gluster/swift/container/server.py b/gluster/swift/container/server.py
index e62076a..82e682a 100644
--- a/gluster/swift/container/server.py
+++ b/gluster/swift/container/server.py
@@ -21,7 +21,7 @@ import gluster.swift.common.constraints # noqa
from swift.container import server
from gluster.swift.common.DiskDir import DiskDir
-from swift.common.utils import public, timing_stats
+from swift.common.utils import public, timing_stats, config_true_value
from swift.common.exceptions import DiskFileNoSpace
from swift.common.swob import HTTPInsufficientStorage, HTTPNotFound, \
HTTPPreconditionFailed
@@ -105,6 +105,8 @@ class ContainerController(server.ContainerController):
end_marker = get_param(req, 'end_marker')
limit = constraints.CONTAINER_LISTING_LIMIT
given_limit = get_param(req, 'limit')
+ reverse = config_true_value(get_param(req, 'reverse'))
+
if given_limit and given_limit.isdigit():
limit = int(given_limit)
if limit > constraints.CONTAINER_LISTING_LIMIT:
@@ -125,7 +127,7 @@ class ContainerController(server.ContainerController):
container_list = broker.list_objects_iter(
limit, marker, end_marker, prefix, delimiter, path,
storage_policy_index=info['storage_policy_index'],
- out_content_type=out_content_type)
+ out_content_type=out_content_type, reverse=reverse)
return self.create_listing(req, out_content_type, info, resp_headers,
broker.metadata, container_list, container)
diff --git a/gluster/swift/obj/diskfile.py b/gluster/swift/obj/diskfile.py
index b94cf3d..be0669f 100644
--- a/gluster/swift/obj/diskfile.py
+++ b/gluster/swift/obj/diskfile.py
@@ -16,6 +16,7 @@
import os
import stat
import errno
+from collections import defaultdict
try:
from random import SystemRandom
random = SystemRandom()
@@ -25,10 +26,11 @@ import logging
import time
from uuid import uuid4
from eventlet import sleep
+from swift.common.utils import Timestamp
from contextlib import contextmanager
from gluster.swift.common.exceptions import AlreadyExistsAsFile, \
AlreadyExistsAsDir, DiskFileContainerDoesNotExist
-from swift.common.utils import ThreadPool
+from gluster.swift.common.utils import ThreadPool
from swift.common.exceptions import DiskFileNotExist, DiskFileError, \
DiskFileNoSpace, DiskFileDeviceUnavailable, DiskFileNotOpen, \
DiskFileExpired
@@ -212,9 +214,16 @@ class DiskFileManager(SwiftDiskFileManager):
:param conf: caller provided configuration object
:param logger: caller provided logger
"""
+ def __init__(self, conf, logger):
+ super(DiskFileManager, self).__init__(conf, logger)
+ threads_per_disk = int(conf.get('threads_per_disk', '0'))
+ self.threadpools = defaultdict(
+ lambda: ThreadPool(nthreads=threads_per_disk))
+
def get_diskfile(self, device, partition, account, container, obj,
policy=None, **kwargs):
dev_path = self.get_dev_path(device, self.mount_check)
+
if not dev_path:
raise DiskFileDeviceUnavailable()
return DiskFile(self, dev_path, self.threadpools[device],
@@ -553,7 +562,7 @@ class DiskFile(object):
"""
def __init__(self, mgr, dev_path, threadpool, partition,
account=None, container=None, obj=None,
- policy=None, uid=DEFAULT_UID, gid=DEFAULT_GID):
+ policy=None, uid=DEFAULT_UID, gid=DEFAULT_GID, **kwargs):
# Variables partition and policy is currently unused.
self._mgr = mgr
self._device_path = dev_path
@@ -588,6 +597,48 @@ class DiskFile(object):
self._data_file = os.path.join(self._put_datadir, self._obj)
self._disk_file_open = False
+ @property
+ def timestamp(self):
+ if self._metadata is None:
+ raise DiskFileNotOpen()
+ return Timestamp(self._metadata.get(X_TIMESTAMP))
+
+ @property
+ def data_timestamp(self):
+ return self.timestamp
+
+ @property
+ def durable_timestamp(self):
+ """
+ Provides the timestamp of the newest data file found in the object
+ directory.
+
+ :return: A Timestamp instance, or None if no data file was found.
+ :raises DiskFileNotOpen: if the open() method has not been previously
+ called on this instance.
+ """
+ if self._metadata:
+ return Timestamp(self._metadata.get(X_TIMESTAMP))
+ return None
+
+ @property
+ def fragments(self):
+ return None
+
+ @property
+ def content_type(self):
+ if self._metadata is None:
+ raise DiskFileNotOpen()
+ return self._metadata.get(X_CONTENT_TYPE)
+
+ @property
+ def content_type_timestamp(self):
+ if self._metadata is None:
+ raise DiskFileNotOpen()
+ t = self._metadata.get('Content-Type-Timestamp') or \
+ self._metadata.get(X_TIMESTAMP)
+ return Timestamp(t)
+
def open(self):
"""
Open the object.
@@ -710,6 +761,15 @@ class DiskFile(object):
self._disk_file_open = False
self._close_fd()
+ def get_datafile_metadata(self):
+ '''gluster swift dont have seperate data,meta files '''
+ if self._metadata is None:
+ raise DiskFileNotOpen()
+ return self._metadata
+
+ def get_metafile_metadata(self):
+ return None
+
def get_metadata(self):
"""
Provide the metadata for a previously opened object as a dictionary.
@@ -879,7 +939,6 @@ class DiskFile(object):
:raises AlreadyExistsAsFile: if path or part of a path is not a \
directory
"""
-
data_file = os.path.join(self._put_datadir, self._obj)
# Assume the full directory path exists to the file already, and
diff --git a/gluster/swift/obj/expirer.py b/gluster/swift/obj/expirer.py
index 38f870e..97e08e9 100644
--- a/gluster/swift/obj/expirer.py
+++ b/gluster/swift/obj/expirer.py
@@ -25,7 +25,7 @@ from gluster.swift.common.utils import delete_tracker_object
from swift.obj.expirer import ObjectExpirer as SwiftObjectExpirer
from swift.common.http import HTTP_NOT_FOUND
from swift.common.internal_client import InternalClient, UnexpectedResponse
-from swift.common.utils import ThreadPool
+from gluster.swift.common.utils import ThreadPool
EXCLUDE_DIRS = ('.trashcan', '.glusterfs')
diff --git a/glusterfs-openstack-swift.spec b/glusterfs-openstack-swift.spec
index 4f265c4..8547eea 100644
--- a/glusterfs-openstack-swift.spec
+++ b/glusterfs-openstack-swift.spec
@@ -24,14 +24,14 @@ Requires : memcached
Requires : openssl
Requires : python
Requires : python-prettytable
-Requires : openstack-swift = 2.3.0
-Requires : openstack-swift-account = 2.3.0
-Requires : openstack-swift-container = 2.3.0
-Requires : openstack-swift-object = 2.3.0
-Requires : openstack-swift-proxy = 2.3.0
+Requires : openstack-swift = 2.10.1
+Requires : openstack-swift-account = 2.10.1
+Requires : openstack-swift-container = 2.10.1
+Requires : openstack-swift-object = 2.10.1
+Requires : openstack-swift-proxy = 2.10.1
# gluster-swift has no hard-dependency on particular version of glusterfs
# so don't bump this up unless you want to force users to upgrade their
-# glusterfs deployment.
+# glusterfs deployment
Requires : python-gluster >= 3.8.0
Obsoletes: glusterfs-swift-plugin
Obsoletes: glusterfs-swift
@@ -102,6 +102,9 @@ done
%config(noreplace) %{_confdir}/object-expirer.conf-gluster
%changelog
+* Wed May 10 2017 Venkata R Edara <redara@redhat.com> - 2.10.1
+- Rebase to Swift 2.10.1 (newton)
+
* Tue Mar 15 2016 Prashanth Pai <ppai@redhat.com> - 2.3.0-0
- Rebase to swift kilo (2.3.0)
diff --git a/requirements.txt b/requirements.txt
index 81ae220..4537934 100644
--- a/requirements.txt
+++ b/requirements.txt
@@ -2,14 +2,14 @@
# of appearance. Changing the order has an impact on the overall integration
# process, which may cause wedges in the gate later.
-dnspython>=1.9.4
-eventlet>=0.16.1,!=0.17.0
+dnspython>=1.14.0 # http://www.dnspython.org/LICENSE
+eventlet>=0.17.4 # MIT
greenlet>=0.3.1
netifaces>=0.5,!=0.10.0,!=0.10.1
pastedeploy>=1.3.3
-simplejson>=2.0.9
+six>=1.9.0
xattr>=0.4
-PyECLib==1.0.7 # BSD
+PyECLib>=1.2.0 # BSD
# gluster-swift specific requirements
prettytable # needed by gswauth
diff --git a/test-requirements.txt b/test-requirements.txt
index 27953e7..92aa503 100644
--- a/test-requirements.txt
+++ b/test-requirements.txt
@@ -3,10 +3,13 @@
# process, which may cause wedges in the gate later.
# Hacking already pins down pep8, pyflakes and flake8
-hacking>=0.8.0,<0.9
+hacking>=0.10.0,<0.11
coverage
nose
nosexcover
nosehtmloutput
+os-api-ref>=0.1.0 # Apache-2.0
+os-testr>=0.4.1
mock>=1.0
python-swiftclient
+python-keystoneclient>=1.3.0
diff --git a/test/functional/__init__.py b/test/functional/__init__.py
index 580de56..4d0b71f 100644
--- a/test/functional/__init__.py
+++ b/test/functional/__init__.py
@@ -13,9 +13,10 @@
# See the License for the specific language governing permissions and
# limitations under the License.
-import httplib
+from __future__ import print_function
import mock
import os
+from six.moves.urllib.parse import urlparse
import sys
import pickle
import socket
@@ -24,20 +25,23 @@ import eventlet
import eventlet.debug
import functools
import random
-from ConfigParser import ConfigParser, NoSectionError
+
from time import time, sleep
-from httplib import HTTPException
-from urlparse import urlparse
-from nose import SkipTest
from contextlib import closing
from gzip import GzipFile
from shutil import rmtree
from tempfile import mkdtemp
+from unittest2 import SkipTest
+
+from six.moves.configparser import ConfigParser, NoSectionError
+from six.moves import http_client
+from six.moves.http_client import HTTPException
+
from swift.common.middleware.memcache import MemcacheMiddleware
from swift.common.storage_policy import parse_storage_policies, PolicyError
from test import get_config
-from test.functional.swift_test_client import Account, Connection, \
+from test.functional.swift_test_client import Account, Connection, Container, \
ResponseError
# This has the side effect of mocking out the xattr module so that unit tests
# (and in this case, when in-process functional tests are called for) can run
@@ -47,13 +51,13 @@ from test.unit import debug_logger, FakeMemcache
from swift.common import constraints, utils, ring, storage_policy
from swift.common.ring import Ring
from swift.common.wsgi import monkey_patch_mimetools, loadapp
-from swift.common.utils import config_true_value
+from swift.common.utils import config_true_value, split_path
from swift.account import server as account_server
from swift.container import server as container_server
from swift.obj import server as object_server, mem_server as mem_object_server
import swift.proxy.controllers.obj
-httplib._MAXHEADERS = constraints.MAX_HEADER_COUNT
+http_client._MAXHEADERS = constraints.MAX_HEADER_COUNT
DEBUG = True
# In order to get the proper blocking behavior of sockets without using
@@ -88,15 +92,16 @@ normalized_urls = None
# If no config was read, we will fall back to old school env vars
swift_test_auth_version = None
swift_test_auth = os.environ.get('SWIFT_TEST_AUTH')
-swift_test_user = [os.environ.get('SWIFT_TEST_USER'), None, None, '', '']
-swift_test_key = [os.environ.get('SWIFT_TEST_KEY'), None, None, '', '']
-swift_test_tenant = ['', '', '', '', '']
-swift_test_perm = ['', '', '', '', '']
-swift_test_domain = ['', '', '', '', '']
-swift_test_user_id = ['', '', '', '', '']
-swift_test_tenant_id = ['', '', '', '', '']
+swift_test_user = [os.environ.get('SWIFT_TEST_USER'), None, None, '', '', '']
+swift_test_key = [os.environ.get('SWIFT_TEST_KEY'), None, None, '', '', '']
+swift_test_tenant = ['', '', '', '', '', '']
+swift_test_perm = ['', '', '', '', '', '']
+swift_test_domain = ['', '', '', '', '', '']
+swift_test_user_id = ['', '', '', '', '', '']
+swift_test_tenant_id = ['', '', '', '', '', '']
-skip, skip2, skip3, skip_service_tokens = False, False, False, False
+skip, skip2, skip3, skip_service_tokens, skip_if_no_reseller_admin = \
+ False, False, False, False, False
orig_collate = ''
insecure = False
@@ -105,7 +110,8 @@ orig_hash_path_suff_pref = ('', '')
orig_swift_conf_name = None
in_process = False
-_testdir = _test_servers = _test_coros = None
+_testdir = _test_servers = _test_coros = _test_socks = None
+policy_specified = None
class FakeMemcacheMiddleware(MemcacheMiddleware):
@@ -124,7 +130,7 @@ class InProcessException(BaseException):
def _info(msg):
- print >> sys.stderr, msg
+ print(msg, file=sys.stderr)
def _debug(msg):
@@ -210,7 +216,6 @@ def _in_process_setup_ring(swift_conf, conf_src_dir, testdir):
for policy in policies:
conf.remove_section(sp_prefix + str(policy.idx))
- policy_specified = os.environ.get('SWIFT_TEST_POLICY')
if policy_specified:
policy_to_test = policies.get_by_name(policy_specified)
if policy_to_test is None:
@@ -282,10 +287,61 @@ def _in_process_setup_ring(swift_conf, conf_src_dir, testdir):
return obj_sockets
+def _load_encryption(proxy_conf_file, **kwargs):
+ """
+ Load encryption configuration and override proxy-server.conf contents.
+
+ :param proxy_conf_file: Source proxy conf filename
+ :returns: Path to the test proxy conf file to use
+ :raises InProcessException: raised if proxy conf contents are invalid
+ """
+ _debug('Setting configuration for encryption')
+
+ # The global conf dict cannot be used to modify the pipeline.
+ # The pipeline loader requires the pipeline to be set in the local_conf.
+ # If pipeline is set in the global conf dict (which in turn populates the
+ # DEFAULTS options) then it prevents pipeline being loaded into the local
+ # conf during wsgi load_app.
+ # Therefore we must modify the [pipeline:main] section.
+
+ conf = ConfigParser()
+ conf.read(proxy_conf_file)
+ try:
+ section = 'pipeline:main'
+ pipeline = conf.get(section, 'pipeline')
+ pipeline = pipeline.replace(
+ "proxy-logging proxy-server",
+ "keymaster encryption proxy-logging proxy-server")
+ conf.set(section, 'pipeline', pipeline)
+ root_secret = os.urandom(32).encode("base64")
+ conf.set('filter:keymaster', 'encryption_root_secret', root_secret)
+ except NoSectionError as err:
+ msg = 'Error problem with proxy conf file %s: %s' % \
+ (proxy_conf_file, err)
+ raise InProcessException(msg)
+
+ test_conf_file = os.path.join(_testdir, 'proxy-server.conf')
+ with open(test_conf_file, 'w') as fp:
+ conf.write(fp)
+
+ return test_conf_file
+
+
+# Mapping from possible values of the variable
+# SWIFT_TEST_IN_PROCESS_CONF_LOADER
+# to the method to call for loading the associated configuration
+# The expected signature for these methods is:
+# conf_filename_to_use loader(input_conf_filename, **kwargs)
+conf_loaders = {
+ 'encryption': _load_encryption
+}
+
+
def in_process_setup(the_object_server=object_server):
_info('IN-PROCESS SERVERS IN USE FOR FUNCTIONAL TESTS')
_info('Using object_server class: %s' % the_object_server.__name__)
conf_src_dir = os.environ.get('SWIFT_TEST_IN_PROCESS_CONF_DIR')
+ show_debug_logs = os.environ.get('SWIFT_TEST_DEBUG_LOGS')
if conf_src_dir is not None:
if not os.path.isdir(conf_src_dir):
@@ -312,6 +368,26 @@ def in_process_setup(the_object_server=object_server):
utils.mkdirs(os.path.join(_testdir, 'sdb1'))
utils.mkdirs(os.path.join(_testdir, 'sdb1', 'tmp'))
+ # Call the associated method for the value of
+ # 'SWIFT_TEST_IN_PROCESS_CONF_LOADER', if one exists
+ conf_loader_label = os.environ.get(
+ 'SWIFT_TEST_IN_PROCESS_CONF_LOADER')
+ if conf_loader_label is not None:
+ try:
+ conf_loader = conf_loaders[conf_loader_label]
+ _debug('Calling method %s mapped to conf loader %s' %
+ (conf_loader.__name__, conf_loader_label))
+ except KeyError as missing_key:
+ raise InProcessException('No function mapped for conf loader %s' %
+ missing_key)
+
+ try:
+ # Pass-in proxy_conf
+ proxy_conf = conf_loader(proxy_conf)
+ _debug('Now using proxy conf %s' % proxy_conf)
+ except Exception as err: # noqa
+ raise InProcessException(err)
+
swift_conf = _in_process_setup_swift_conf(swift_conf_src, _testdir)
obj_sockets = _in_process_setup_ring(swift_conf, conf_src_dir, _testdir)
@@ -335,10 +411,13 @@ def in_process_setup(the_object_server=object_server):
orig_hash_path_suff_pref = utils.HASH_PATH_PREFIX, utils.HASH_PATH_SUFFIX
utils.validate_hash_conf()
+ global _test_socks
+ _test_socks = []
# We create the proxy server listening socket to get its port number so
# that we can add it as the "auth_port" value for the functional test
# clients.
prolis = eventlet.listen(('localhost', 0))
+ _test_socks.append(prolis)
# The following set of configuration values is used both for the
# functional test frame work and for the various proxy, account, container
@@ -377,13 +456,27 @@ def in_process_setup(the_object_server=object_server):
'service_prefix': 'SERVICE',
# For tempauth middleware. Update reseller_prefix
'reseller_prefix': 'AUTH, SERVICE',
- 'SERVICE_require_group': 'service'
+ 'SERVICE_require_group': 'service',
+ # Reseller admin user (needs reseller_admin_role)
+ 'account6': 'test6',
+ 'username6': 'tester6',
+ 'password6': 'testing6'
})
+ # If an env var explicitly specifies the proxy-server object_post_as_copy
+ # option then use its value, otherwise leave default config unchanged.
+ object_post_as_copy = os.environ.get(
+ 'SWIFT_TEST_IN_PROCESS_OBJECT_POST_AS_COPY')
+ if object_post_as_copy is not None:
+ object_post_as_copy = config_true_value(object_post_as_copy)
+ config['object_post_as_copy'] = str(object_post_as_copy)
+ _debug('Setting object_post_as_copy to %r' % object_post_as_copy)
+
acc1lis = eventlet.listen(('localhost', 0))
acc2lis = eventlet.listen(('localhost', 0))
con1lis = eventlet.listen(('localhost', 0))
con2lis = eventlet.listen(('localhost', 0))
+ _test_socks += [acc1lis, acc2lis, con1lis, con2lis] + obj_sockets
account_ring_path = os.path.join(_testdir, 'account.ring.gz')
with closing(GzipFile(account_ring_path, 'wb')) as f:
@@ -412,23 +505,30 @@ def in_process_setup(the_object_server=object_server):
# Default to only 4 seconds for in-process functional test runs
eventlet.wsgi.WRITE_TIMEOUT = 4
+ def get_logger_name(name):
+ if show_debug_logs:
+ return debug_logger(name)
+ else:
+ return None
+
acc1srv = account_server.AccountController(
- config, logger=debug_logger('acct1'))
+ config, logger=get_logger_name('acct1'))
acc2srv = account_server.AccountController(
- config, logger=debug_logger('acct2'))
+ config, logger=get_logger_name('acct2'))
con1srv = container_server.ContainerController(
- config, logger=debug_logger('cont1'))
+ config, logger=get_logger_name('cont1'))
con2srv = container_server.ContainerController(
- config, logger=debug_logger('cont2'))
+ config, logger=get_logger_name('cont2'))
objsrvs = [
(obj_sockets[index],
the_object_server.ObjectController(
- config, logger=debug_logger('obj%d' % (index + 1))))
+ config, logger=get_logger_name('obj%d' % (index + 1))))
for index in range(len(obj_sockets))
]
- logger = debug_logger('proxy')
+ if show_debug_logs:
+ logger = debug_logger('proxy')
def get_logger(name, *args, **kwargs):
return logger
@@ -442,6 +542,8 @@ def in_process_setup(the_object_server=object_server):
raise InProcessException(e)
nl = utils.NullLogger()
+ global proxy_srv
+ proxy_srv = prolis
prospa = eventlet.spawn(eventlet.wsgi.server, prolis, app, nl)
acc1spa = eventlet.spawn(eventlet.wsgi.server, acc1lis, acc1srv, nl)
acc2spa = eventlet.spawn(eventlet.wsgi.server, acc2lis, acc2srv, nl)
@@ -483,6 +585,7 @@ def get_cluster_info():
# We'll update those constraints based on what the /info API provides, if
# anything.
global cluster_info
+ global config
try:
conn = Connection(config)
conn.authenticate()
@@ -498,7 +601,7 @@ def get_cluster_info():
# Most likely the swift cluster has "expose_info = false" set
# in its proxy-server.conf file, so we'll just do the best we
# can.
- print >>sys.stderr, "** Swift Cluster not exposing /info **"
+ print("** Swift Cluster not exposing /info **", file=sys.stderr)
# Finally, we'll allow any constraint present in the swift-constraints
# section of test.conf to override everything. Note that only those
@@ -510,8 +613,8 @@ def get_cluster_info():
except KeyError:
pass
except ValueError:
- print >>sys.stderr, "Invalid constraint value: %s = %s" % (
- k, test_constraints[k])
+ print("Invalid constraint value: %s = %s" % (
+ k, test_constraints[k]), file=sys.stderr)
eff_constraints.update(test_constraints)
# Just make it look like these constraints were loaded from a /info call,
@@ -521,6 +624,9 @@ def get_cluster_info():
def setup_package():
+
+ global policy_specified
+ policy_specified = os.environ.get('SWIFT_TEST_POLICY')
in_process_env = os.environ.get('SWIFT_TEST_IN_PROCESS')
if in_process_env is not None:
use_in_process = utils.config_true_value(in_process_env)
@@ -529,6 +635,7 @@ def setup_package():
global in_process
+ global config
if use_in_process:
# Explicitly set to True, so barrel on ahead with in-process
# functional test setup.
@@ -540,10 +647,13 @@ def setup_package():
# if the test.conf file is not found, or does not provide a usable
# configuration.
config.update(get_config('func_test'))
- if config:
- in_process = False
- else:
+ if not config:
in_process = True
+ # else... leave in_process value unchanged. It may be that
+ # setup_package is called twice, in which case in_process_setup may
+ # have loaded config before we reach here a second time, so the
+ # existence of config is not reliable to determine that in_process
+ # should be False. Anyway, it's default value is False.
else:
# Explicitly set to False, do not attempt to use in-process
# functional tests, be sure we attempt to read from local
@@ -558,8 +668,8 @@ def setup_package():
in_process_setup(the_object_server=(
mem_object_server if in_mem_obj else object_server))
except InProcessException as exc:
- print >> sys.stderr, ('Exception during in-process setup: %s'
- % str(exc))
+ print(('Exception during in-process setup: %s'
+ % str(exc)), file=sys.stderr)
raise
global web_front_end
@@ -660,6 +770,10 @@ def setup_package():
swift_test_user[4] = config['username5']
swift_test_tenant[4] = config['account5']
swift_test_key[4] = config['password5']
+ if 'username6' in config:
+ swift_test_user[5] = config['username6']
+ swift_test_tenant[5] = config['account6']
+ swift_test_key[5] = config['password6']
for _ in range(5):
swift_test_perm[_] = swift_test_tenant[_] + ':' \
@@ -668,20 +782,19 @@ def setup_package():
global skip
skip = not all([swift_test_auth, swift_test_user[0], swift_test_key[0]])
if skip:
- print >>sys.stderr, 'SKIPPING FUNCTIONAL TESTS DUE TO NO CONFIG'
+ print('SKIPPING FUNCTIONAL TESTS DUE TO NO CONFIG', file=sys.stderr)
global skip2
skip2 = not all([not skip, swift_test_user[1], swift_test_key[1]])
if not skip and skip2:
- print >>sys.stderr, \
- 'SKIPPING SECOND ACCOUNT FUNCTIONAL TESTS' \
- ' DUE TO NO CONFIG FOR THEM'
+ print('SKIPPING SECOND ACCOUNT FUNCTIONAL TESTS '
+ 'DUE TO NO CONFIG FOR THEM', file=sys.stderr)
global skip3
skip3 = not all([not skip, swift_test_user[2], swift_test_key[2]])
if not skip and skip3:
- print >>sys.stderr, \
- 'SKIPPING THIRD ACCOUNT FUNCTIONAL TESTS DUE TO NO CONFIG FOR THEM'
+ print('SKIPPING THIRD ACCOUNT FUNCTIONAL TESTS'
+ 'DUE TO NO CONFIG FOR THEM', file=sys.stderr)
global skip_if_not_v3
skip_if_not_v3 = (swift_test_auth_version != '3'
@@ -689,16 +802,42 @@ def setup_package():
swift_test_user[3],
swift_test_key[3]]))
if not skip and skip_if_not_v3:
- print >>sys.stderr, \
- 'SKIPPING FUNCTIONAL TESTS SPECIFIC TO AUTH VERSION 3'
+ print('SKIPPING FUNCTIONAL TESTS SPECIFIC TO AUTH VERSION 3',
+ file=sys.stderr)
global skip_service_tokens
skip_service_tokens = not all([not skip, swift_test_user[4],
swift_test_key[4], swift_test_tenant[4],
swift_test_service_prefix])
if not skip and skip_service_tokens:
- print >>sys.stderr, \
- 'SKIPPING FUNCTIONAL TESTS SPECIFIC TO SERVICE TOKENS'
+ print(
+ 'SKIPPING FUNCTIONAL TESTS SPECIFIC TO SERVICE TOKENS',
+ file=sys.stderr)
+
+ if policy_specified:
+ policies = FunctionalStoragePolicyCollection.from_info()
+ for p in policies:
+ # policy names are case-insensitive
+ if policy_specified.lower() == p['name'].lower():
+ _info('Using specified policy %s' % policy_specified)
+ FunctionalStoragePolicyCollection.policy_specified = p
+ Container.policy_specified = policy_specified
+ break
+ else:
+ _info(
+ 'SKIPPING FUNCTIONAL TESTS: Failed to find specified policy %s'
+ % policy_specified)
+ raise Exception('Failed to find specified policy %s'
+ % policy_specified)
+
+ global skip_if_no_reseller_admin
+ skip_if_no_reseller_admin = not all([not skip, swift_test_user[5],
+ swift_test_key[5],
+ swift_test_tenant[5]])
+ if not skip and skip_if_no_reseller_admin:
+ print(
+ 'SKIPPING FUNCTIONAL TESTS DUE TO NO CONFIG FOR RESELLER ADMIN',
+ file=sys.stderr)
get_cluster_info()
@@ -708,16 +847,23 @@ def teardown_package():
locale.setlocale(locale.LC_COLLATE, orig_collate)
# clean up containers and objects left behind after running tests
- conn = Connection(config)
- conn.authenticate()
- account = Account(conn, config.get('account', config['username']))
- account.delete_containers()
+ global config
+
+ if config:
+ conn = Connection(config)
+ conn.authenticate()
+ account = Account(conn, config.get('account', config['username']))
+ account.delete_containers()
global in_process
+ global _test_socks
if in_process:
try:
- for server in _test_coros:
+ for i, server in enumerate(_test_coros):
server.kill()
+ if not server.dead:
+ # kill it from the socket level
+ _test_socks[i].close()
except Exception:
pass
try:
@@ -728,6 +874,7 @@ def teardown_package():
orig_hash_path_suff_pref
utils.SWIFT_CONF_FILE = orig_swift_conf_name
constraints.reload_constraints()
+ reset_globals()
class AuthError(Exception):
@@ -745,10 +892,37 @@ parsed = [None, None, None, None, None]
conn = [None, None, None, None, None]
+def reset_globals():
+ global url, token, service_token, parsed, conn, config
+ url = [None, None, None, None, None]
+ token = [None, None, None, None, None]
+ service_token = [None, None, None, None, None]
+ parsed = [None, None, None, None, None]
+ conn = [None, None, None, None, None]
+ if config:
+ config = {}
+
+
def connection(url):
if has_insecure:
- return http_connection(url, insecure=insecure)
- return http_connection(url)
+ parsed_url, http_conn = http_connection(url, insecure=insecure)
+ else:
+ parsed_url, http_conn = http_connection(url)
+
+ orig_request = http_conn.request
+
+ # Add the policy header if policy_specified is set
+ def request_with_policy(method, url, body=None, headers={}):
+ version, account, container, obj = split_path(url, 1, 4, True)
+ if policy_specified and method == 'PUT' and container and not obj \
+ and 'X-Storage-Policy' not in headers:
+ headers['X-Storage-Policy'] = policy_specified
+
+ return orig_request(method, url, body, headers)
+
+ http_conn.request = request_with_policy
+
+ return parsed_url, http_conn
def get_url_token(user_index, os_options):
@@ -899,6 +1073,9 @@ def requires_acls(f):
class FunctionalStoragePolicyCollection(object):
+ # policy_specified is set in __init__.py when tests are being set up.
+ policy_specified = None
+
def __init__(self, policies):
self._all = policies
self.default = None
@@ -940,7 +1117,12 @@ class FunctionalStoragePolicyCollection(object):
p.get(k) != v for k, v in kwargs.items())])
def select(self):
- return random.choice(self)
+ # check that a policy was specified and that it is available
+ # in the current list (i.e., hasn't been excluded of the current list)
+ if self.policy_specified and self.policy_specified in self:
+ return self.policy_specified
+ else:
+ return random.choice(self)
def requires_policies(f):
diff --git a/test/functional/swift_test_client.py b/test/functional/swift_test_client.py
index 5c0ab87..d98af92 100644
--- a/test/functional/swift_test_client.py
+++ b/test/functional/swift_test_client.py
@@ -14,19 +14,18 @@
# limitations under the License.
import hashlib
-import httplib
+import json
import os
import random
import socket
-import StringIO
import time
-import urllib
-import simplejson as json
-
-from nose import SkipTest
+from unittest2 import SkipTest
from xml.dom import minidom
+import six
+from six.moves import http_client
+from six.moves import urllib
from swiftclient import get_auth
from swift.common import constraints
@@ -34,7 +33,7 @@ from swift.common.utils import config_true_value
from test import safe_repr
-httplib._MAXHEADERS = constraints.MAX_HEADER_COUNT
+http_client._MAXHEADERS = constraints.MAX_HEADER_COUNT
class AuthenticationFailed(Exception):
@@ -71,7 +70,7 @@ class ResponseError(Exception):
def listing_empty(method):
- for i in xrange(6):
+ for i in range(6):
if len(method()) == 0:
return True
@@ -166,10 +165,10 @@ class Connection(object):
x = storage_url.split('/')
if x[0] == 'http:':
- self.conn_class = httplib.HTTPConnection
+ self.conn_class = http_client.HTTPConnection
self.storage_port = 80
elif x[0] == 'https:':
- self.conn_class = httplib.HTTPSConnection
+ self.conn_class = http_client.HTTPSConnection
self.storage_port = 443
else:
raise ValueError('unexpected protocol %s' % (x[0]))
@@ -209,7 +208,7 @@ class Connection(object):
def http_connect(self):
self.connection = self.conn_class(self.storage_host,
port=self.storage_port)
- #self.connection.set_debuglevel(3)
+ # self.connection.set_debuglevel(3)
def make_path(self, path=None, cfg=None):
if path is None:
@@ -221,7 +220,7 @@ class Connection(object):
return '/' + self.storage_url.split('/')[1]
if path:
- quote = urllib.quote
+ quote = urllib.parse.quote
if cfg.get('no_quote') or cfg.get('no_path_quote'):
quote = lambda x: x
return '%s/%s' % (self.storage_url,
@@ -237,6 +236,9 @@ class Connection(object):
if not cfg.get('no_auth_token'):
headers['X-Auth-Token'] = self.storage_token
+ if cfg.get('use_token'):
+ headers['X-Auth-Token'] = cfg.get('use_token')
+
if isinstance(hdrs, dict):
headers.update(hdrs)
return headers
@@ -258,7 +260,7 @@ class Connection(object):
path = self.make_path(path, cfg=cfg)
headers = self.make_headers(hdrs, cfg=cfg)
if isinstance(parms, dict) and parms:
- quote = urllib.quote
+ quote = urllib.parse.quote
if cfg.get('no_quote') or cfg.get('no_parms_quote'):
quote = lambda x: x
query_args = ['%s=%s' % (quote(x), quote(str(y)))
@@ -283,7 +285,7 @@ class Connection(object):
try:
self.response = try_request()
- except httplib.HTTPException as e:
+ except http_client.HTTPException as e:
fail_messages.append(safe_repr(e))
continue
@@ -326,7 +328,7 @@ class Connection(object):
headers.pop('Content-Length', None)
if isinstance(parms, dict) and parms:
- quote = urllib.quote
+ quote = urllib.parse.quote
if cfg.get('no_quote') or cfg.get('no_parms_quote'):
quote = lambda x: x
query_args = ['%s=%s' % (quote(x), quote(str(y)))
@@ -335,9 +337,9 @@ class Connection(object):
self.connection = self.conn_class(self.storage_host,
port=self.storage_port)
- #self.connection.set_debuglevel(3)
+ # self.connection.set_debuglevel(3)
self.connection.putrequest('PUT', path)
- for key, value in headers.iteritems():
+ for key, value in headers.items():
self.connection.putheader(key, value)
self.connection.endheaders()
@@ -423,7 +425,6 @@ class Account(Base):
raise RequestError('Invalid format: %s' % format_type)
if format_type is None and 'format' in parms:
del parms['format']
-
status = self.conn.make_request('GET', self.path, hdrs=hdrs,
parms=parms, cfg=cfg)
if status == 200:
@@ -458,6 +459,7 @@ class Account(Base):
def delete_containers(self):
for c in listing_items(self.containers):
cont = self.container(c)
+ cont.update_metadata(hdrs={'x-versions-location': ''})
if not cont.delete_recursive():
return False
@@ -488,6 +490,9 @@ class Account(Base):
class Container(Base):
+ # policy_specified is set in __init__.py when tests are being set up.
+ policy_specified = None
+
def __init__(self, conn, account, name):
self.conn = conn
self.account = str(account)
@@ -500,9 +505,23 @@ class Container(Base):
parms = {}
if cfg is None:
cfg = {}
+ if self.policy_specified and 'X-Storage-Policy' not in hdrs:
+ hdrs['X-Storage-Policy'] = self.policy_specified
return self.conn.make_request('PUT', self.path, hdrs=hdrs,
parms=parms, cfg=cfg) in (201, 202)
+ def update_metadata(self, hdrs=None, cfg=None):
+ if hdrs is None:
+ hdrs = {}
+ if cfg is None:
+ cfg = {}
+
+ self.conn.make_request('POST', self.path, hdrs=hdrs, cfg=cfg)
+ if not 200 <= self.conn.response.status <= 299:
+ raise ResponseError(self.conn.response, 'POST',
+ self.conn.make_path(self.path))
+ return True
+
def delete(self, hdrs=None, parms=None):
if hdrs is None:
hdrs = {}
@@ -537,7 +556,6 @@ class Container(Base):
raise RequestError('Invalid format: %s' % format_type)
if format_type is None and 'format' in parms:
del parms['format']
-
status = self.conn.make_request('GET', self.path, hdrs=hdrs,
parms=parms, cfg=cfg)
if status == 200:
@@ -545,26 +563,38 @@ class Container(Base):
files = json.loads(self.conn.response.read())
for file_item in files:
- file_item['name'] = file_item['name'].encode('utf-8')
- file_item['content_type'] = file_item['content_type'].\
- encode('utf-8')
+ for key in ('name', 'subdir', 'content_type'):
+ if key in file_item:
+ file_item[key] = file_item[key].encode('utf-8')
return files
elif format_type == 'xml':
files = []
tree = minidom.parseString(self.conn.response.read())
- for x in tree.getElementsByTagName('object'):
+ container = tree.getElementsByTagName('container')[0]
+ for x in container.childNodes:
file_item = {}
- for key in ['name', 'hash', 'bytes', 'content_type',
- 'last_modified']:
-
- file_item[key] = x.getElementsByTagName(key)[0].\
- childNodes[0].nodeValue
+ if x.tagName == 'object':
+ for key in ['name', 'hash', 'bytes', 'content_type',
+ 'last_modified']:
+ file_item[key] = x.getElementsByTagName(key)[0].\
+ childNodes[0].nodeValue
+ elif x.tagName == 'subdir':
+ file_item['subdir'] = x.getElementsByTagName(
+ 'name')[0].childNodes[0].nodeValue
+ else:
+ raise ValueError('Found unexpected element %s'
+ % x.tagName)
files.append(file_item)
for file_item in files:
- file_item['name'] = file_item['name'].encode('utf-8')
- file_item['content_type'] = file_item['content_type'].\
- encode('utf-8')
+ if 'subdir' in file_item:
+ file_item['subdir'] = file_item['subdir'].\
+ encode('utf-8')
+ else:
+ file_item['name'] = file_item['name'].encode('utf-8')
+ file_item['content_type'] = file_item['content_type'].\
+ encode('utf-8')
+ file_item['bytes'] = int(file_item['bytes'])
return files
else:
content = self.conn.response.read()
@@ -593,7 +623,8 @@ class Container(Base):
if self.conn.response.status == 204:
required_fields = [['bytes_used', 'x-container-bytes-used'],
- ['object_count', 'x-container-object-count']]
+ ['object_count', 'x-container-object-count'],
+ ['last_modified', 'last-modified']]
optional_fields = [
['versions', 'x-versions-location'],
['tempurl_key', 'x-container-meta-temp-url-key'],
@@ -618,6 +649,7 @@ class File(Base):
self.chunked_write_in_progress = False
self.content_type = None
+ self.content_range = None
self.size = None
self.metadata = {}
@@ -633,6 +665,9 @@ class File(Base):
else:
headers['Content-Length'] = 0
+ if cfg.get('use_token'):
+ headers['X-Auth-Token'] = cfg.get('use_token')
+
if cfg.get('no_content_type'):
pass
elif self.content_type:
@@ -655,7 +690,7 @@ class File(Base):
block_size = 4096
if isinstance(data, str):
- data = StringIO.StringIO(data)
+ data = six.StringIO(data)
checksum = hashlib.md5()
buff = data.read(block_size)
@@ -681,7 +716,7 @@ class File(Base):
headers.update(hdrs)
if 'Destination' in headers:
- headers['Destination'] = urllib.quote(headers['Destination'])
+ headers['Destination'] = urllib.parse.quote(headers['Destination'])
return self.conn.make_request('COPY', self.path, hdrs=headers,
parms=parms) == 201
@@ -705,20 +740,20 @@ class File(Base):
if 'Destination-Account' in headers:
headers['Destination-Account'] = \
- urllib.quote(headers['Destination-Account'])
+ urllib.parse.quote(headers['Destination-Account'])
if 'Destination' in headers:
- headers['Destination'] = urllib.quote(headers['Destination'])
+ headers['Destination'] = urllib.parse.quote(headers['Destination'])
return self.conn.make_request('COPY', self.path, hdrs=headers,
parms=parms) == 201
- def delete(self, hdrs=None, parms=None):
+ def delete(self, hdrs=None, parms=None, cfg=None):
if hdrs is None:
hdrs = {}
if parms is None:
parms = {}
if self.conn.make_request('DELETE', self.path, hdrs=hdrs,
- parms=parms) != 204:
+ cfg=cfg, parms=parms) != 204:
raise ResponseError(self.conn.response, 'DELETE',
self.conn.make_path(self.path))
@@ -823,6 +858,8 @@ class File(Base):
for hdr in self.conn.response.getheaders():
if hdr[0].lower() == 'content-type':
self.content_type = hdr[1]
+ if hdr[0].lower() == 'content-range':
+ self.content_range = hdr[1]
if hasattr(buffer, 'write'):
scratch = self.conn.response.read(8192)
@@ -861,7 +898,7 @@ class File(Base):
finally:
fobj.close()
- def sync_metadata(self, metadata=None, cfg=None):
+ def sync_metadata(self, metadata=None, cfg=None, parms=None):
if metadata is None:
metadata = {}
if cfg is None:
@@ -877,8 +914,8 @@ class File(Base):
cfg.get('set_content_length')
else:
headers['Content-Length'] = 0
-
- self.conn.make_request('POST', self.path, hdrs=headers, cfg=cfg)
+ self.conn.make_request('POST', self.path, hdrs=headers,
+ parms=parms, cfg=cfg)
if self.conn.response.status not in (201, 202):
raise ResponseError(self.conn.response, 'POST',
@@ -931,7 +968,7 @@ class File(Base):
pass
self.size = int(os.fstat(data.fileno())[6])
else:
- data = StringIO.StringIO(data)
+ data = six.StringIO(data)
self.size = data.len
headers = self.make_headers(cfg=cfg)
@@ -983,7 +1020,7 @@ class File(Base):
if not self.write(data, hdrs=hdrs, parms=parms, cfg=cfg):
raise ResponseError(self.conn.response, 'PUT',
self.conn.make_path(self.path))
- self.md5 = self.compute_md5sum(StringIO.StringIO(data))
+ self.md5 = self.compute_md5sum(six.StringIO(data))
return data
def write_random_return_resp(self, size=None, hdrs=None, parms=None,
@@ -1000,5 +1037,28 @@ class File(Base):
return_resp=True)
if not resp:
raise ResponseError(self.conn.response)
- self.md5 = self.compute_md5sum(StringIO.StringIO(data))
+ self.md5 = self.compute_md5sum(six.StringIO(data))
return resp
+
+ def post(self, hdrs=None, parms=None, cfg=None, return_resp=False):
+ if hdrs is None:
+ hdrs = {}
+ if parms is None:
+ parms = {}
+ if cfg is None:
+ cfg = {}
+
+ headers = self.make_headers(cfg=cfg)
+ headers.update(hdrs)
+
+ self.conn.make_request('POST', self.path, hdrs=headers,
+ parms=parms, cfg=cfg)
+
+ if self.conn.response.status not in (201, 202):
+ raise ResponseError(self.conn.response, 'POST',
+ self.conn.make_path(self.path))
+
+ if return_resp:
+ return self.conn.response
+
+ return True
diff --git a/test/functional/test_account.py b/test/functional/test_account.py
index 30a8e74..57bbe6b 100755
--- a/test/functional/test_account.py
+++ b/test/functional/test_account.py
@@ -15,12 +15,13 @@
# See the License for the specific language governing permissions and
# limitations under the License.
-import unittest
+import unittest2
import json
from uuid import uuid4
-from nose import SkipTest
-from string import letters
+from unittest2 import SkipTest
+from string import ascii_letters
+from six.moves import range
from swift.common.middleware.acl import format_acl
from test.functional import check_response, retry, requires_acls, \
@@ -28,7 +29,15 @@ from test.functional import check_response, retry, requires_acls, \
import test.functional as tf
-class TestAccount(unittest.TestCase):
+def setUpModule():
+ tf.setup_package()
+
+
+def tearDownModule():
+ tf.teardown_package()
+
+
+class TestAccount(unittest2.TestCase):
def setUp(self):
self.max_meta_count = load_constraint('max_meta_count')
@@ -88,22 +97,22 @@ class TestAccount(unittest.TestCase):
self.assertEqual(resp.status, 204)
resp = retry(head)
resp.read()
- self.assert_(resp.status in (200, 204), resp.status)
- self.assertEqual(resp.getheader('x-account-meta-test'), None)
+ self.assertIn(resp.status, (200, 204))
+ self.assertIsNone(resp.getheader('x-account-meta-test'))
resp = retry(get)
resp.read()
- self.assert_(resp.status in (200, 204), resp.status)
- self.assertEqual(resp.getheader('x-account-meta-test'), None)
+ self.assertIn(resp.status, (200, 204))
+ self.assertIsNone(resp.getheader('x-account-meta-test'))
resp = retry(post, 'Value')
resp.read()
self.assertEqual(resp.status, 204)
resp = retry(head)
resp.read()
- self.assert_(resp.status in (200, 204), resp.status)
+ self.assertIn(resp.status, (200, 204))
self.assertEqual(resp.getheader('x-account-meta-test'), 'Value')
resp = retry(get)
resp.read()
- self.assert_(resp.status in (200, 204), resp.status)
+ self.assertIn(resp.status, (200, 204))
self.assertEqual(resp.getheader('x-account-meta-test'), 'Value')
def test_invalid_acls(self):
@@ -118,7 +127,7 @@ class TestAccount(unittest.TestCase):
# needs to be an acceptable header size
num_keys = 8
max_key_size = load_constraint('max_header_size') / num_keys
- acl = {'admin': [c * max_key_size for c in letters[:num_keys]]}
+ acl = {'admin': [c * max_key_size for c in ascii_letters[:num_keys]]}
headers = {'x-account-access-control': format_acl(
version=2, acl_dict=acl)}
resp = retry(post, headers=headers, use_account=1)
@@ -126,7 +135,8 @@ class TestAccount(unittest.TestCase):
self.assertEqual(resp.status, 400)
# and again a touch smaller
- acl = {'admin': [c * max_key_size for c in letters[:num_keys - 1]]}
+ acl = {'admin': [c * max_key_size for c
+ in ascii_letters[:num_keys - 1]]}
headers = {'x-account-access-control': format_acl(
version=2, acl_dict=acl)}
resp = retry(post, headers=headers, use_account=1)
@@ -154,7 +164,7 @@ class TestAccount(unittest.TestCase):
resp = retry(post, headers, use_account=1)
resp.read()
self.assertEqual(resp.status, 400)
- self.assertEqual(resp.getheader('X-Account-Access-Control'), None)
+ self.assertIsNone(resp.getheader('X-Account-Access-Control'))
@requires_acls
def test_invalid_acl_values(self):
@@ -170,7 +180,7 @@ class TestAccount(unittest.TestCase):
resp = retry(post, headers=headers, use_account=1)
resp.read()
self.assertEqual(resp.status, 400)
- self.assertEqual(resp.getheader('X-Account-Access-Control'), None)
+ self.assertIsNone(resp.getheader('X-Account-Access-Control'))
@requires_acls
def test_read_only_acl(self):
@@ -189,7 +199,7 @@ class TestAccount(unittest.TestCase):
# cannot read account
resp = retry(get, use_account=3)
resp.read()
- self.assertEquals(resp.status, 403)
+ self.assertEqual(resp.status, 403)
# grant read access
acl_user = tf.swift_test_user[2]
@@ -203,9 +213,9 @@ class TestAccount(unittest.TestCase):
# read-only can read account headers
resp = retry(get, use_account=3)
resp.read()
- self.assert_(resp.status in (200, 204))
+ self.assertIn(resp.status, (200, 204))
# but not acls
- self.assertEqual(resp.getheader('X-Account-Access-Control'), None)
+ self.assertIsNone(resp.getheader('X-Account-Access-Control'))
# read-only can not write metadata
headers = {'x-account-meta-test': 'value'}
@@ -220,7 +230,7 @@ class TestAccount(unittest.TestCase):
self.assertEqual(resp.status, 204)
resp = retry(get, use_account=3)
resp.read()
- self.assert_(resp.status in (200, 204))
+ self.assertIn(resp.status, (200, 204))
self.assertEqual(resp.getheader('X-Account-Meta-Test'), 'value')
@requires_acls
@@ -240,7 +250,7 @@ class TestAccount(unittest.TestCase):
# cannot read account
resp = retry(get, use_account=3)
resp.read()
- self.assertEquals(resp.status, 403)
+ self.assertEqual(resp.status, 403)
# grant read-write access
acl_user = tf.swift_test_user[2]
@@ -254,9 +264,9 @@ class TestAccount(unittest.TestCase):
# read-write can read account headers
resp = retry(get, use_account=3)
resp.read()
- self.assert_(resp.status in (200, 204))
+ self.assertIn(resp.status, (200, 204))
# but not acls
- self.assertEqual(resp.getheader('X-Account-Access-Control'), None)
+ self.assertIsNone(resp.getheader('X-Account-Access-Control'))
# read-write can not write account metadata
headers = {'x-account-meta-test': 'value'}
@@ -281,7 +291,7 @@ class TestAccount(unittest.TestCase):
# cannot read account
resp = retry(get, use_account=3)
resp.read()
- self.assertEquals(resp.status, 403)
+ self.assertEqual(resp.status, 403)
# grant admin access
acl_user = tf.swift_test_user[2]
@@ -295,7 +305,7 @@ class TestAccount(unittest.TestCase):
# admin can read account headers
resp = retry(get, use_account=3)
resp.read()
- self.assert_(resp.status in (200, 204))
+ self.assertIn(resp.status, (200, 204))
# including acls
self.assertEqual(resp.getheader('X-Account-Access-Control'),
acl_json_str)
@@ -308,7 +318,7 @@ class TestAccount(unittest.TestCase):
self.assertEqual(resp.status, 204)
resp = retry(get, use_account=3)
resp.read()
- self.assert_(resp.status in (200, 204))
+ self.assertIn(resp.status, (200, 204))
self.assertEqual(resp.getheader('X-Account-Meta-Test'), value)
# admin can even revoke their own access
@@ -320,7 +330,7 @@ class TestAccount(unittest.TestCase):
# and again, cannot read account
resp = retry(get, use_account=3)
resp.read()
- self.assertEquals(resp.status, 403)
+ self.assertEqual(resp.status, 403)
@requires_acls
def test_protected_tempurl(self):
@@ -358,11 +368,11 @@ class TestAccount(unittest.TestCase):
# read-only tester3 can read account metadata
resp = retry(get, use_account=3)
resp.read()
- self.assert_(resp.status in (200, 204),
- 'Expected status in (200, 204), got %s' % resp.status)
+ self.assertIn(resp.status, (200, 204),
+ 'Expected status in (200, 204), got %s' % resp.status)
self.assertEqual(resp.getheader('X-Account-Meta-Test'), value)
# but not temp-url-key
- self.assertEqual(resp.getheader('X-Account-Meta-Temp-Url-Key'), None)
+ self.assertIsNone(resp.getheader('X-Account-Meta-Temp-Url-Key'))
# grant read-write access to tester3
acl_user = tf.swift_test_user[2]
@@ -376,11 +386,11 @@ class TestAccount(unittest.TestCase):
# read-write tester3 can read account metadata
resp = retry(get, use_account=3)
resp.read()
- self.assert_(resp.status in (200, 204),
- 'Expected status in (200, 204), got %s' % resp.status)
+ self.assertIn(resp.status, (200, 204),
+ 'Expected status in (200, 204), got %s' % resp.status)
self.assertEqual(resp.getheader('X-Account-Meta-Test'), value)
# but not temp-url-key
- self.assertEqual(resp.getheader('X-Account-Meta-Temp-Url-Key'), None)
+ self.assertIsNone(resp.getheader('X-Account-Meta-Temp-Url-Key'))
# grant admin access to tester3
acl_user = tf.swift_test_user[2]
@@ -394,8 +404,8 @@ class TestAccount(unittest.TestCase):
# admin tester3 can read account metadata
resp = retry(get, use_account=3)
resp.read()
- self.assert_(resp.status in (200, 204),
- 'Expected status in (200, 204), got %s' % resp.status)
+ self.assertIn(resp.status, (200, 204),
+ 'Expected status in (200, 204), got %s' % resp.status)
self.assertEqual(resp.getheader('X-Account-Meta-Test'), value)
# including temp-url-key
self.assertEqual(resp.getheader('X-Account-Meta-Temp-Url-Key'),
@@ -411,8 +421,8 @@ class TestAccount(unittest.TestCase):
self.assertEqual(resp.status, 204)
resp = retry(get, use_account=3)
resp.read()
- self.assert_(resp.status in (200, 204),
- 'Expected status in (200, 204), got %s' % resp.status)
+ self.assertIn(resp.status, (200, 204),
+ 'Expected status in (200, 204), got %s' % resp.status)
self.assertEqual(resp.getheader('X-Account-Meta-Temp-Url-Key'),
secret)
@@ -450,13 +460,13 @@ class TestAccount(unittest.TestCase):
use_account=1)
resp.read()
self.assertEqual(resp.status, 204)
- self.assertEqual(resp.getheader('X-Account-Access-Control'), None)
+ self.assertIsNone(resp.getheader('X-Account-Access-Control'))
# User1 can GET their own empty account
resp = retry(get, use_account=1)
resp.read()
self.assertEqual(resp.status // 100, 2)
- self.assertEqual(resp.getheader('X-Account-Access-Control'), None)
+ self.assertIsNone(resp.getheader('X-Account-Access-Control'))
# User2 can't GET User1's account
resp = retry(get, use_account=2, url_account=1)
@@ -500,7 +510,7 @@ class TestAccount(unittest.TestCase):
resp = retry(head, use_account=2, url_account=1)
resp.read()
self.assertEqual(resp.status, 204)
- self.assertEqual(resp.getheader('x-account-access-control'), None)
+ self.assertIsNone(resp.getheader('x-account-access-control'))
# User2 can PUT and DELETE a container
resp = retry(put, use_account=2, url_account=1,
@@ -525,7 +535,7 @@ class TestAccount(unittest.TestCase):
resp = retry(head, use_account=2, url_account=1)
resp.read()
self.assertEqual(resp.status, 204)
- self.assertEqual(resp.getheader('x-account-access-control'), None)
+ self.assertIsNone(resp.getheader('x-account-access-control'))
# User2 can't PUT a container
resp = retry(put, use_account=2, url_account=1,
@@ -563,13 +573,13 @@ class TestAccount(unittest.TestCase):
resp = retry(post, headers={'X-Account-Access-Control': '{}'})
resp.read()
self.assertEqual(resp.status, 204)
- self.assertEqual(resp.getheader('X-Account-Access-Control'), None)
+ self.assertIsNone(resp.getheader('X-Account-Access-Control'))
# User1 can GET their own empty account
resp = retry(get)
resp.read()
self.assertEqual(resp.status // 100, 2)
- self.assertEqual(resp.getheader('X-Account-Access-Control'), None)
+ self.assertIsNone(resp.getheader('X-Account-Access-Control'))
# User1 can POST non-empty data
acl_json = '{"admin":["bob"]}'
@@ -622,13 +632,13 @@ class TestAccount(unittest.TestCase):
resp = retry(post, headers={'X-Account-Access-Control': '{}'})
resp.read()
self.assertEqual(resp.status, 204)
- self.assertEqual(resp.getheader('X-Account-Access-Control'), None)
+ self.assertIsNone(resp.getheader('X-Account-Access-Control'))
# User1 can GET their own empty account
resp = retry(get)
resp.read()
self.assertEqual(resp.status // 100, 2)
- self.assertEqual(resp.getheader('X-Account-Access-Control'), None)
+ self.assertIsNone(resp.getheader('X-Account-Access-Control'))
# User1 can POST non-empty data
acl_json = '{"admin":["bob"]}'
@@ -688,17 +698,17 @@ class TestAccount(unittest.TestCase):
if (tf.web_front_end == 'integral'):
resp = retry(post, uni_key, '1')
resp.read()
- self.assertTrue(resp.status in (201, 204))
+ self.assertIn(resp.status, (201, 204))
resp = retry(head)
resp.read()
- self.assert_(resp.status in (200, 204), resp.status)
+ self.assertIn(resp.status, (200, 204))
self.assertEqual(resp.getheader(uni_key.encode('utf-8')), '1')
resp = retry(post, 'X-Account-Meta-uni', uni_value)
resp.read()
self.assertEqual(resp.status, 204)
resp = retry(head)
resp.read()
- self.assert_(resp.status in (200, 204), resp.status)
+ self.assertIn(resp.status, (200, 204))
self.assertEqual(resp.getheader('X-Account-Meta-uni'),
uni_value.encode('utf-8'))
if (tf.web_front_end == 'integral'):
@@ -707,7 +717,7 @@ class TestAccount(unittest.TestCase):
self.assertEqual(resp.status, 204)
resp = retry(head)
resp.read()
- self.assert_(resp.status in (200, 204), resp.status)
+ self.assertIn(resp.status, (200, 204))
self.assertEqual(resp.getheader(uni_key.encode('utf-8')),
uni_value.encode('utf-8'))
@@ -729,14 +739,14 @@ class TestAccount(unittest.TestCase):
self.assertEqual(resp.status, 204)
resp = retry(head)
resp.read()
- self.assert_(resp.status in (200, 204), resp.status)
+ self.assertIn(resp.status, (200, 204))
self.assertEqual(resp.getheader('x-account-meta-one'), '1')
resp = retry(post, 'X-Account-Meta-Two', '2')
resp.read()
self.assertEqual(resp.status, 204)
resp = retry(head)
resp.read()
- self.assert_(resp.status in (200, 204), resp.status)
+ self.assertIn(resp.status, (200, 204))
self.assertEqual(resp.getheader('x-account-meta-one'), '1')
self.assertEqual(resp.getheader('x-account-meta-two'), '2')
@@ -790,13 +800,13 @@ class TestAccount(unittest.TestCase):
resp = retry(post, headers)
headers = {}
- for x in xrange(self.max_meta_count):
+ for x in range(self.max_meta_count):
headers['X-Account-Meta-%d' % x] = 'v'
resp = retry(post, headers)
resp.read()
self.assertEqual(resp.status, 204)
headers = {}
- for x in xrange(self.max_meta_count + 1):
+ for x in range(self.max_meta_count + 1):
headers['X-Account-Meta-%d' % x] = 'v'
resp = retry(post, headers)
resp.read()
@@ -827,14 +837,29 @@ class TestAccount(unittest.TestCase):
resp = retry(post, headers)
resp.read()
self.assertEqual(resp.status, 204)
+ # this POST includes metadata size that is over limit
headers['X-Account-Meta-k'] = \
- 'v' * (self.max_meta_overall_size - size)
+ 'x' * (self.max_meta_overall_size - size)
+ resp = retry(post, headers)
+ resp.read()
+ self.assertEqual(resp.status, 400)
+ # this POST would be ok and the aggregate backend metadata
+ # size is on the border
+ headers = {'X-Account-Meta-k':
+ 'y' * (self.max_meta_overall_size - size - 1)}
+ resp = retry(post, headers)
+ resp.read()
+ self.assertEqual(resp.status, 204)
+ # this last POST would be ok by itself but takes the aggregate
+ # backend metadata size over limit
+ headers = {'X-Account-Meta-k':
+ 'z' * (self.max_meta_overall_size - size)}
resp = retry(post, headers)
resp.read()
self.assertEqual(resp.status, 400)
-class TestAccountInNonDefaultDomain(unittest.TestCase):
+class TestAccountInNonDefaultDomain(unittest2.TestCase):
def setUp(self):
if tf.skip or tf.skip2 or tf.skip_if_not_v3:
raise SkipTest('AUTH VERSION 3 SPECIFIC TEST')
@@ -859,8 +884,8 @@ class TestAccountInNonDefaultDomain(unittest.TestCase):
resp = retry(head, use_account=4)
resp.read()
self.assertEqual(resp.status, 204)
- self.assertTrue('X-Account-Project-Domain-Id' in resp.headers)
+ self.assertIn('X-Account-Project-Domain-Id', resp.headers)
if __name__ == '__main__':
- unittest.main()
+ unittest2.main()
diff --git a/test/functional/test_container.py b/test/functional/test_container.py
index d7896a4..5abaaa5 100755
--- a/test/functional/test_container.py
+++ b/test/functional/test_container.py
@@ -16,16 +16,26 @@
# limitations under the License.
import json
-import unittest
-from nose import SkipTest
+import unittest2
+from unittest2 import SkipTest
from uuid import uuid4
-from test.functional import check_response, retry, requires_acls, \
- load_constraint, requires_policies
+from test.functional import check_response, cluster_info, retry, \
+ requires_acls, load_constraint, requires_policies
import test.functional as tf
+from six.moves import range
-class TestContainer(unittest.TestCase):
+
+def setUpModule():
+ tf.setup_package()
+
+
+def tearDownModule():
+ tf.teardown_package()
+
+
+class TestContainer(unittest2.TestCase):
def setUp(self):
if tf.skip:
@@ -70,7 +80,7 @@ class TestContainer(unittest.TestCase):
body = resp.read()
if resp.status == 404:
break
- self.assert_(resp.status // 100 == 2, resp.status)
+ self.assertEqual(resp.status // 100, 2, resp.status)
objs = json.loads(body)
if not objs:
break
@@ -91,7 +101,7 @@ class TestContainer(unittest.TestCase):
# container may have not been created
resp = retry(delete, self.container)
resp.read()
- self.assert_(resp.status in (204, 404))
+ self.assertIn(resp.status, (204, 404))
def test_multi_metadata(self):
if tf.skip:
@@ -112,14 +122,14 @@ class TestContainer(unittest.TestCase):
self.assertEqual(resp.status, 204)
resp = retry(head)
resp.read()
- self.assert_(resp.status in (200, 204), resp.status)
+ self.assertIn(resp.status, (200, 204))
self.assertEqual(resp.getheader('x-container-meta-one'), '1')
resp = retry(post, 'X-Container-Meta-Two', '2')
resp.read()
self.assertEqual(resp.status, 204)
resp = retry(head)
resp.read()
- self.assert_(resp.status in (200, 204), resp.status)
+ self.assertIn(resp.status, (200, 204))
self.assertEqual(resp.getheader('x-container-meta-one'), '1')
self.assertEqual(resp.getheader('x-container-meta-two'), '2')
@@ -145,14 +155,14 @@ class TestContainer(unittest.TestCase):
self.assertEqual(resp.status, 204)
resp = retry(head)
resp.read()
- self.assert_(resp.status in (200, 204), resp.status)
+ self.assertIn(resp.status, (200, 204))
self.assertEqual(resp.getheader(uni_key.encode('utf-8')), '1')
resp = retry(post, 'X-Container-Meta-uni', uni_value)
resp.read()
self.assertEqual(resp.status, 204)
resp = retry(head)
resp.read()
- self.assert_(resp.status in (200, 204), resp.status)
+ self.assertIn(resp.status, (200, 204))
self.assertEqual(resp.getheader('X-Container-Meta-uni'),
uni_value.encode('utf-8'))
if (tf.web_front_end == 'integral'):
@@ -161,7 +171,7 @@ class TestContainer(unittest.TestCase):
self.assertEqual(resp.status, 204)
resp = retry(head)
resp.read()
- self.assert_(resp.status in (200, 204), resp.status)
+ self.assertIn(resp.status, (200, 204))
self.assertEqual(resp.getheader(uni_key.encode('utf-8')),
uni_value.encode('utf-8'))
@@ -196,11 +206,11 @@ class TestContainer(unittest.TestCase):
self.assertEqual(resp.status, 201)
resp = retry(head, name)
resp.read()
- self.assert_(resp.status in (200, 204), resp.status)
+ self.assertIn(resp.status, (200, 204))
self.assertEqual(resp.getheader('x-container-meta-test'), 'Value')
resp = retry(get, name)
resp.read()
- self.assert_(resp.status in (200, 204), resp.status)
+ self.assertIn(resp.status, (200, 204))
self.assertEqual(resp.getheader('x-container-meta-test'), 'Value')
resp = retry(delete, name)
resp.read()
@@ -212,12 +222,12 @@ class TestContainer(unittest.TestCase):
self.assertEqual(resp.status, 201)
resp = retry(head, name)
resp.read()
- self.assert_(resp.status in (200, 204), resp.status)
- self.assertEqual(resp.getheader('x-container-meta-test'), None)
+ self.assertIn(resp.status, (200, 204))
+ self.assertIsNone(resp.getheader('x-container-meta-test'))
resp = retry(get, name)
resp.read()
- self.assert_(resp.status in (200, 204), resp.status)
- self.assertEqual(resp.getheader('x-container-meta-test'), None)
+ self.assertIn(resp.status, (200, 204))
+ self.assertIsNone(resp.getheader('x-container-meta-test'))
resp = retry(delete, name)
resp.read()
self.assertEqual(resp.status, 204)
@@ -244,22 +254,22 @@ class TestContainer(unittest.TestCase):
resp = retry(head)
resp.read()
- self.assert_(resp.status in (200, 204), resp.status)
- self.assertEqual(resp.getheader('x-container-meta-test'), None)
+ self.assertIn(resp.status, (200, 204))
+ self.assertIsNone(resp.getheader('x-container-meta-test'))
resp = retry(get)
resp.read()
- self.assert_(resp.status in (200, 204), resp.status)
- self.assertEqual(resp.getheader('x-container-meta-test'), None)
+ self.assertIn(resp.status, (200, 204))
+ self.assertIsNone(resp.getheader('x-container-meta-test'))
resp = retry(post, 'Value')
resp.read()
self.assertEqual(resp.status, 204)
resp = retry(head)
resp.read()
- self.assert_(resp.status in (200, 204), resp.status)
+ self.assertIn(resp.status, (200, 204))
self.assertEqual(resp.getheader('x-container-meta-test'), 'Value')
resp = retry(get)
resp.read()
- self.assert_(resp.status in (200, 204), resp.status)
+ self.assertIn(resp.status, (200, 204))
self.assertEqual(resp.getheader('x-container-meta-test'), 'Value')
def test_PUT_bad_metadata(self):
@@ -319,7 +329,7 @@ class TestContainer(unittest.TestCase):
name = uuid4().hex
headers = {}
- for x in xrange(self.max_meta_count):
+ for x in range(self.max_meta_count):
headers['X-Container-Meta-%d' % x] = 'v'
resp = retry(put, name, headers)
resp.read()
@@ -329,7 +339,7 @@ class TestContainer(unittest.TestCase):
self.assertEqual(resp.status, 204)
name = uuid4().hex
headers = {}
- for x in xrange(self.max_meta_count + 1):
+ for x in range(self.max_meta_count + 1):
headers['X-Container-Meta-%d' % x] = 'v'
resp = retry(put, name, headers)
resp.read()
@@ -412,13 +422,13 @@ class TestContainer(unittest.TestCase):
return check_response(conn)
headers = {}
- for x in xrange(self.max_meta_count):
+ for x in range(self.max_meta_count):
headers['X-Container-Meta-%d' % x] = 'v'
resp = retry(post, headers)
resp.read()
self.assertEqual(resp.status, 204)
headers = {}
- for x in xrange(self.max_meta_count + 1):
+ for x in range(self.max_meta_count + 1):
headers['X-Container-Meta-%d' % x] = 'v'
resp = retry(post, headers)
resp.read()
@@ -449,8 +459,23 @@ class TestContainer(unittest.TestCase):
resp = retry(post, headers)
resp.read()
self.assertEqual(resp.status, 204)
+ # this POST includes metadata size that is over limit
headers['X-Container-Meta-k'] = \
- 'v' * (self.max_meta_overall_size - size)
+ 'x' * (self.max_meta_overall_size - size)
+ resp = retry(post, headers)
+ resp.read()
+ self.assertEqual(resp.status, 400)
+ # this POST would be ok and the aggregate backend metadata
+ # size is on the border
+ headers = {'X-Container-Meta-k':
+ 'y' * (self.max_meta_overall_size - size - 1)}
+ resp = retry(post, headers)
+ resp.read()
+ self.assertEqual(resp.status, 204)
+ # this last POST would be ok by itself but takes the aggregate
+ # backend metadata size over limit
+ headers = {'X-Container-Meta-k':
+ 'z' * (self.max_meta_overall_size - size)}
resp = retry(post, headers)
resp.read()
self.assertEqual(resp.status, 400)
@@ -467,7 +492,7 @@ class TestContainer(unittest.TestCase):
resp = retry(get)
raise Exception('Should not have been able to GET')
except Exception as err:
- self.assert_(str(err).startswith('No result after '), err)
+ self.assertTrue(str(err).startswith('No result after '), err)
def post(url, token, parsed, conn):
conn.request('POST', parsed.path + '/' + self.name, '',
@@ -494,7 +519,7 @@ class TestContainer(unittest.TestCase):
resp = retry(get)
raise Exception('Should not have been able to GET')
except Exception as err:
- self.assert_(str(err).startswith('No result after '), err)
+ self.assertTrue(str(err).startswith('No result after '), err)
def test_cross_account_container(self):
if tf.skip or tf.skip2:
@@ -707,12 +732,13 @@ class TestContainer(unittest.TestCase):
def put(url, token, parsed, conn, name):
conn.request('PUT', parsed.path + '/%s' % name, '',
{'X-Auth-Token': token})
+ print("PUT X-Auth-Token:%s"%(token))
return check_response(conn)
# cannot list containers
resp = retry(get, use_account=3)
resp.read()
- self.assertEquals(resp.status, 403)
+ self.assertEqual(resp.status, 403)
# grant read-only access
acl_user = tf.swift_test_user[2]
@@ -725,23 +751,23 @@ class TestContainer(unittest.TestCase):
# read-only can list containers
resp = retry(get, use_account=3)
listing = resp.read()
- self.assertEquals(resp.status, 200)
- self.assert_(self.name in listing)
+ self.assertEqual(resp.status, 200)
+ self.assertIn(self.name, listing)
# read-only can not create containers
new_container_name = str(uuid4())
resp = retry(put, new_container_name, use_account=3)
resp.read()
- self.assertEquals(resp.status, 403)
+ self.assertEqual(resp.status, 403)
# but it can see newly created ones
resp = retry(put, new_container_name, use_account=1)
resp.read()
- self.assertEquals(resp.status, 201)
+ self.assertEqual(resp.status, 201)
resp = retry(get, use_account=3)
listing = resp.read()
- self.assertEquals(resp.status, 200)
- self.assert_(new_container_name in listing)
+ self.assertEqual(resp.status, 200)
+ self.assertIn(new_container_name, listing)
@requires_acls
def test_read_only_acl_metadata(self):
@@ -771,13 +797,13 @@ class TestContainer(unittest.TestCase):
self.assertEqual(resp.status, 204)
resp = retry(get, self.name, use_account=1)
resp.read()
- self.assertEquals(resp.status, 204)
+ self.assertEqual(resp.status, 204)
self.assertEqual(resp.getheader('X-Container-Meta-Test'), value)
# cannot see metadata
resp = retry(get, self.name, use_account=3)
resp.read()
- self.assertEquals(resp.status, 403)
+ self.assertEqual(resp.status, 403)
# grant read-only access
acl_user = tf.swift_test_user[2]
@@ -797,7 +823,7 @@ class TestContainer(unittest.TestCase):
# read-only can read container metadata
resp = retry(get, self.name, use_account=3)
resp.read()
- self.assertEquals(resp.status, 204)
+ self.assertEqual(resp.status, 204)
self.assertEqual(resp.getheader('X-Container-Meta-Test'), value)
@requires_acls
@@ -827,7 +853,7 @@ class TestContainer(unittest.TestCase):
# cannot list containers
resp = retry(get, use_account=3)
resp.read()
- self.assertEquals(resp.status, 403)
+ self.assertEqual(resp.status, 403)
# grant read-write access
acl_user = tf.swift_test_user[2]
@@ -840,36 +866,36 @@ class TestContainer(unittest.TestCase):
# can list containers
resp = retry(get, use_account=3)
listing = resp.read()
- self.assertEquals(resp.status, 200)
- self.assert_(self.name in listing)
+ self.assertEqual(resp.status, 200)
+ self.assertIn(self.name, listing)
# can create new containers
new_container_name = str(uuid4())
resp = retry(put, new_container_name, use_account=3)
resp.read()
- self.assertEquals(resp.status, 201)
+ self.assertEqual(resp.status, 201)
resp = retry(get, use_account=3)
listing = resp.read()
- self.assertEquals(resp.status, 200)
- self.assert_(new_container_name in listing)
+ self.assertEqual(resp.status, 200)
+ self.assertIn(new_container_name, listing)
# can also delete them
resp = retry(delete, new_container_name, use_account=3)
resp.read()
- self.assertEquals(resp.status, 204)
+ self.assertEqual(resp.status, 204)
resp = retry(get, use_account=3)
listing = resp.read()
- self.assertEquals(resp.status, 200)
- self.assert_(new_container_name not in listing)
+ self.assertEqual(resp.status, 200)
+ self.assertNotIn(new_container_name, listing)
# even if they didn't create them
empty_container_name = str(uuid4())
resp = retry(put, empty_container_name, use_account=1)
resp.read()
- self.assertEquals(resp.status, 201)
+ self.assertEqual(resp.status, 201)
resp = retry(delete, empty_container_name, use_account=3)
resp.read()
- self.assertEquals(resp.status, 204)
+ self.assertEqual(resp.status, 204)
@requires_acls
def test_read_write_acl_metadata(self):
@@ -899,13 +925,13 @@ class TestContainer(unittest.TestCase):
self.assertEqual(resp.status, 204)
resp = retry(get, self.name, use_account=1)
resp.read()
- self.assertEquals(resp.status, 204)
+ self.assertEqual(resp.status, 204)
self.assertEqual(resp.getheader('X-Container-Meta-Test'), value)
# cannot see metadata
resp = retry(get, self.name, use_account=3)
resp.read()
- self.assertEquals(resp.status, 403)
+ self.assertEqual(resp.status, 403)
# grant read-write access
acl_user = tf.swift_test_user[2]
@@ -918,7 +944,7 @@ class TestContainer(unittest.TestCase):
# read-write can read container metadata
resp = retry(get, self.name, use_account=3)
resp.read()
- self.assertEquals(resp.status, 204)
+ self.assertEqual(resp.status, 204)
self.assertEqual(resp.getheader('X-Container-Meta-Test'), value)
# read-write can also write container metadata
@@ -926,21 +952,21 @@ class TestContainer(unittest.TestCase):
headers = {'x-container-meta-test': new_value}
resp = retry(post, self.name, headers=headers, use_account=3)
resp.read()
- self.assertEquals(resp.status, 204)
+ self.assertEqual(resp.status, 204)
resp = retry(get, self.name, use_account=3)
resp.read()
- self.assertEquals(resp.status, 204)
+ self.assertEqual(resp.status, 204)
self.assertEqual(resp.getheader('X-Container-Meta-Test'), new_value)
# and remove it
headers = {'x-remove-container-meta-test': 'true'}
resp = retry(post, self.name, headers=headers, use_account=3)
resp.read()
- self.assertEquals(resp.status, 204)
+ self.assertEqual(resp.status, 204)
resp = retry(get, self.name, use_account=3)
resp.read()
- self.assertEquals(resp.status, 204)
- self.assertEqual(resp.getheader('X-Container-Meta-Test'), None)
+ self.assertEqual(resp.status, 204)
+ self.assertIsNone(resp.getheader('X-Container-Meta-Test'))
@requires_acls
def test_admin_acl_listing(self):
@@ -969,7 +995,7 @@ class TestContainer(unittest.TestCase):
# cannot list containers
resp = retry(get, use_account=3)
resp.read()
- self.assertEquals(resp.status, 403)
+ self.assertEqual(resp.status, 403)
# grant admin access
acl_user = tf.swift_test_user[2]
@@ -982,36 +1008,36 @@ class TestContainer(unittest.TestCase):
# can list containers
resp = retry(get, use_account=3)
listing = resp.read()
- self.assertEquals(resp.status, 200)
- self.assert_(self.name in listing)
+ self.assertEqual(resp.status, 200)
+ self.assertIn(self.name, listing)
# can create new containers
new_container_name = str(uuid4())
resp = retry(put, new_container_name, use_account=3)
resp.read()
- self.assertEquals(resp.status, 201)
+ self.assertEqual(resp.status, 201)
resp = retry(get, use_account=3)
listing = resp.read()
- self.assertEquals(resp.status, 200)
- self.assert_(new_container_name in listing)
+ self.assertEqual(resp.status, 200)
+ self.assertIn(new_container_name, listing)
# can also delete them
resp = retry(delete, new_container_name, use_account=3)
resp.read()
- self.assertEquals(resp.status, 204)
+ self.assertEqual(resp.status, 204)
resp = retry(get, use_account=3)
listing = resp.read()
- self.assertEquals(resp.status, 200)
- self.assert_(new_container_name not in listing)
+ self.assertEqual(resp.status, 200)
+ self.assertNotIn(new_container_name, listing)
# even if they didn't create them
empty_container_name = str(uuid4())
resp = retry(put, empty_container_name, use_account=1)
resp.read()
- self.assertEquals(resp.status, 201)
+ self.assertEqual(resp.status, 201)
resp = retry(delete, empty_container_name, use_account=3)
resp.read()
- self.assertEquals(resp.status, 204)
+ self.assertEqual(resp.status, 204)
@requires_acls
def test_admin_acl_metadata(self):
@@ -1041,13 +1067,13 @@ class TestContainer(unittest.TestCase):
self.assertEqual(resp.status, 204)
resp = retry(get, self.name, use_account=1)
resp.read()
- self.assertEquals(resp.status, 204)
+ self.assertEqual(resp.status, 204)
self.assertEqual(resp.getheader('X-Container-Meta-Test'), value)
# cannot see metadata
resp = retry(get, self.name, use_account=3)
resp.read()
- self.assertEquals(resp.status, 403)
+ self.assertEqual(resp.status, 403)
# grant access
acl_user = tf.swift_test_user[2]
@@ -1060,7 +1086,7 @@ class TestContainer(unittest.TestCase):
# can read container metadata
resp = retry(get, self.name, use_account=3)
resp.read()
- self.assertEquals(resp.status, 204)
+ self.assertEqual(resp.status, 204)
self.assertEqual(resp.getheader('X-Container-Meta-Test'), value)
# can also write container metadata
@@ -1068,21 +1094,21 @@ class TestContainer(unittest.TestCase):
headers = {'x-container-meta-test': new_value}
resp = retry(post, self.name, headers=headers, use_account=3)
resp.read()
- self.assertEquals(resp.status, 204)
+ self.assertEqual(resp.status, 204)
resp = retry(get, self.name, use_account=3)
resp.read()
- self.assertEquals(resp.status, 204)
+ self.assertEqual(resp.status, 204)
self.assertEqual(resp.getheader('X-Container-Meta-Test'), new_value)
# and remove it
headers = {'x-remove-container-meta-test': 'true'}
resp = retry(post, self.name, headers=headers, use_account=3)
resp.read()
- self.assertEquals(resp.status, 204)
+ self.assertEqual(resp.status, 204)
resp = retry(get, self.name, use_account=3)
resp.read()
- self.assertEquals(resp.status, 204)
- self.assertEqual(resp.getheader('X-Container-Meta-Test'), None)
+ self.assertEqual(resp.status, 204)
+ self.assertIsNone(resp.getheader('X-Container-Meta-Test'))
@requires_acls
def test_protected_container_sync(self):
@@ -1115,7 +1141,7 @@ class TestContainer(unittest.TestCase):
self.assertEqual(resp.status, 204)
resp = retry(get, self.name, use_account=1)
resp.read()
- self.assertEquals(resp.status, 204)
+ self.assertEqual(resp.status, 204)
self.assertEqual(resp.getheader('X-Container-Sync-Key'), 'secret')
self.assertEqual(resp.getheader('X-Container-Meta-Test'), value)
@@ -1130,10 +1156,10 @@ class TestContainer(unittest.TestCase):
# can read container metadata
resp = retry(get, self.name, use_account=3)
resp.read()
- self.assertEquals(resp.status, 204)
+ self.assertEqual(resp.status, 204)
self.assertEqual(resp.getheader('X-Container-Meta-Test'), value)
# but not sync-key
- self.assertEqual(resp.getheader('X-Container-Sync-Key'), None)
+ self.assertIsNone(resp.getheader('X-Container-Sync-Key'))
# and can not write
headers = {'x-container-sync-key': str(uuid4())}
@@ -1152,15 +1178,15 @@ class TestContainer(unittest.TestCase):
# can read container metadata
resp = retry(get, self.name, use_account=3)
resp.read()
- self.assertEquals(resp.status, 204)
+ self.assertEqual(resp.status, 204)
self.assertEqual(resp.getheader('X-Container-Meta-Test'), value)
# but not sync-key
- self.assertEqual(resp.getheader('X-Container-Sync-Key'), None)
+ self.assertIsNone(resp.getheader('X-Container-Sync-Key'))
# sanity check sync-key w/ account1
resp = retry(get, self.name, use_account=1)
resp.read()
- self.assertEquals(resp.status, 204)
+ self.assertEqual(resp.status, 204)
self.assertEqual(resp.getheader('X-Container-Sync-Key'), 'secret')
# and can write
@@ -1174,7 +1200,7 @@ class TestContainer(unittest.TestCase):
self.assertEqual(resp.status, 204)
resp = retry(get, self.name, use_account=1) # validate w/ account1
resp.read()
- self.assertEquals(resp.status, 204)
+ self.assertEqual(resp.status, 204)
self.assertEqual(resp.getheader('X-Container-Meta-Test'), new_value)
# but can not write sync-key
self.assertEqual(resp.getheader('X-Container-Sync-Key'), 'secret')
@@ -1190,7 +1216,7 @@ class TestContainer(unittest.TestCase):
# admin can read container metadata
resp = retry(get, self.name, use_account=3)
resp.read()
- self.assertEquals(resp.status, 204)
+ self.assertEqual(resp.status, 204)
self.assertEqual(resp.getheader('X-Container-Meta-Test'), new_value)
# and ALSO sync-key
self.assertEqual(resp.getheader('X-Container-Sync-Key'), 'secret')
@@ -1203,7 +1229,7 @@ class TestContainer(unittest.TestCase):
self.assertEqual(resp.status, 204)
resp = retry(get, self.name, use_account=3)
resp.read()
- self.assertEquals(resp.status, 204)
+ self.assertEqual(resp.status, 204)
self.assertEqual(resp.getheader('X-Container-Sync-Key'), new_secret)
@requires_acls
@@ -1238,7 +1264,7 @@ class TestContainer(unittest.TestCase):
self.assertEqual(resp.status, 204)
resp = retry(get, self.name, use_account=1)
resp.read()
- self.assertEquals(resp.status, 204)
+ self.assertEqual(resp.status, 204)
self.assertEqual(resp.getheader('X-Container-Read'), 'jdoe')
self.assertEqual(resp.getheader('X-Container-Write'), 'jdoe')
self.assertEqual(resp.getheader('X-Container-Meta-Test'), value)
@@ -1254,11 +1280,11 @@ class TestContainer(unittest.TestCase):
# can read container metadata
resp = retry(get, self.name, use_account=3)
resp.read()
- self.assertEquals(resp.status, 204)
+ self.assertEqual(resp.status, 204)
self.assertEqual(resp.getheader('X-Container-Meta-Test'), value)
# but not container acl
- self.assertEqual(resp.getheader('X-Container-Read'), None)
- self.assertEqual(resp.getheader('X-Container-Write'), None)
+ self.assertIsNone(resp.getheader('X-Container-Read'))
+ self.assertIsNone(resp.getheader('X-Container-Write'))
# and can not write
headers = {
@@ -1280,16 +1306,16 @@ class TestContainer(unittest.TestCase):
# can read container metadata
resp = retry(get, self.name, use_account=3)
resp.read()
- self.assertEquals(resp.status, 204)
+ self.assertEqual(resp.status, 204)
self.assertEqual(resp.getheader('X-Container-Meta-Test'), value)
# but not container acl
- self.assertEqual(resp.getheader('X-Container-Read'), None)
- self.assertEqual(resp.getheader('X-Container-Write'), None)
+ self.assertIsNone(resp.getheader('X-Container-Read'))
+ self.assertIsNone(resp.getheader('X-Container-Write'))
# sanity check container acls with account1
resp = retry(get, self.name, use_account=1)
resp.read()
- self.assertEquals(resp.status, 204)
+ self.assertEqual(resp.status, 204)
self.assertEqual(resp.getheader('X-Container-Read'), 'jdoe')
self.assertEqual(resp.getheader('X-Container-Write'), 'jdoe')
@@ -1305,7 +1331,7 @@ class TestContainer(unittest.TestCase):
self.assertEqual(resp.status, 204)
resp = retry(get, self.name, use_account=1) # validate w/ account1
resp.read()
- self.assertEquals(resp.status, 204)
+ self.assertEqual(resp.status, 204)
self.assertEqual(resp.getheader('X-Container-Meta-Test'), new_value)
# but can not write container acls
self.assertEqual(resp.getheader('X-Container-Read'), 'jdoe')
@@ -1322,7 +1348,7 @@ class TestContainer(unittest.TestCase):
# admin can read container metadata
resp = retry(get, self.name, use_account=3)
resp.read()
- self.assertEquals(resp.status, 204)
+ self.assertEqual(resp.status, 204)
self.assertEqual(resp.getheader('X-Container-Meta-Test'), new_value)
# and ALSO container acls
self.assertEqual(resp.getheader('X-Container-Read'), 'jdoe')
@@ -1338,7 +1364,7 @@ class TestContainer(unittest.TestCase):
self.assertEqual(resp.status, 204)
resp = retry(get, self.name, use_account=3)
resp.read()
- self.assertEquals(resp.status, 204)
+ self.assertEqual(resp.status, 204)
self.assertEqual(resp.getheader('X-Container-Read'), '.r:*')
def test_long_name_content_type(self):
@@ -1381,8 +1407,11 @@ class TestContainer(unittest.TestCase):
raise SkipTest()
def put(url, token, parsed, conn):
+ # using the empty storage policy header value here to ensure
+ # that the default policy is chosen in case policy_specified is set
+ # see __init__.py for details on policy_specified
conn.request('PUT', parsed.path + '/' + self.container, '',
- {'X-Auth-Token': token})
+ {'X-Auth-Token': token, 'X-Storage-Policy': ''})
return check_response(conn)
resp = retry(put)
resp.read()
@@ -1395,8 +1424,8 @@ class TestContainer(unittest.TestCase):
resp = retry(head)
resp.read()
headers = dict((k.lower(), v) for k, v in resp.getheaders())
- self.assertEquals(headers.get('x-storage-policy'),
- default_policy['name'])
+ self.assertEqual(headers.get('x-storage-policy'),
+ default_policy['name'])
def test_error_invalid_storage_policy_name(self):
def put(url, token, parsed, conn, headers):
@@ -1433,8 +1462,8 @@ class TestContainer(unittest.TestCase):
resp = retry(head)
resp.read()
headers = dict((k.lower(), v) for k, v in resp.getheaders())
- self.assertEquals(headers.get('x-storage-policy'),
- policy['name'])
+ self.assertEqual(headers.get('x-storage-policy'),
+ policy['name'])
# and test recreate with-out specifying Storage Policy
resp = retry(put)
@@ -1444,8 +1473,8 @@ class TestContainer(unittest.TestCase):
resp = retry(head)
resp.read()
headers = dict((k.lower(), v) for k, v in resp.getheaders())
- self.assertEquals(headers.get('x-storage-policy'),
- policy['name'])
+ self.assertEqual(headers.get('x-storage-policy'),
+ policy['name'])
# delete it
def delete(url, token, parsed, conn):
@@ -1460,7 +1489,7 @@ class TestContainer(unittest.TestCase):
resp = retry(head)
resp.read()
headers = dict((k.lower(), v) for k, v in resp.getheaders())
- self.assertEquals(headers.get('x-storage-policy'), None)
+ self.assertIsNone(headers.get('x-storage-policy'))
@requires_policies
def test_conflict_change_storage_policy_with_put(self):
@@ -1490,8 +1519,8 @@ class TestContainer(unittest.TestCase):
resp = retry(head)
resp.read()
headers = dict((k.lower(), v) for k, v in resp.getheaders())
- self.assertEquals(headers.get('x-storage-policy'),
- policy['name'])
+ self.assertEqual(headers.get('x-storage-policy'),
+ policy['name'])
@requires_policies
def test_noop_change_storage_policy_with_post(self):
@@ -1527,11 +1556,61 @@ class TestContainer(unittest.TestCase):
resp = retry(head)
resp.read()
headers = dict((k.lower(), v) for k, v in resp.getheaders())
- self.assertEquals(headers.get('x-storage-policy'),
- policy['name'])
+ self.assertEqual(headers.get('x-storage-policy'),
+ policy['name'])
+
+ def test_container_quota_bytes(self):
+ if 'container_quotas' not in cluster_info:
+ raise SkipTest('Container quotas not enabled')
+
+ def post(url, token, parsed, conn, name, value):
+ conn.request('POST', parsed.path + '/' + self.name, '',
+ {'X-Auth-Token': token, name: value})
+ return check_response(conn)
+
+ def head(url, token, parsed, conn):
+ conn.request('HEAD', parsed.path + '/' + self.name, '',
+ {'X-Auth-Token': token})
+ return check_response(conn)
+
+ # set X-Container-Meta-Quota-Bytes is 10
+ resp = retry(post, 'X-Container-Meta-Quota-Bytes', '10')
+ resp.read()
+ self.assertEqual(resp.status, 204)
+ resp = retry(head)
+ resp.read()
+ self.assertIn(resp.status, (200, 204))
+ # confirm X-Container-Meta-Quota-Bytes
+ self.assertEqual(resp.getheader('X-Container-Meta-Quota-Bytes'), '10')
+
+ def put(url, token, parsed, conn, data):
+ conn.request('PUT', parsed.path + '/' + self.name + '/object',
+ data, {'X-Auth-Token': token})
+ return check_response(conn)
+
+ # upload 11 bytes object
+ resp = retry(put, '01234567890')
+ resp.read()
+ self.assertEqual(resp.status, 413)
+
+ # upload 10 bytes object
+ resp = retry(put, '0123456789')
+ resp.read()
+ self.assertEqual(resp.status, 201)
+
+ def get(url, token, parsed, conn):
+ conn.request('GET', parsed.path + '/' + self.name + '/object',
+ '', {'X-Auth-Token': token})
+ return check_response(conn)
+
+ # download 10 bytes object
+ resp = retry(get)
+ body = resp.read()
+ self.assertEqual(resp.status, 200)
+ self.assertEqual(body, '0123456789')
-class BaseTestContainerACLs(unittest.TestCase):
+class BaseTestContainerACLs(unittest2.TestCase):
# subclasses can change the account in which container
# is created/deleted by setUp/tearDown
account = 1
@@ -1575,7 +1654,7 @@ class BaseTestContainerACLs(unittest.TestCase):
while True:
resp = retry(get, use_account=self.account)
body = resp.read()
- self.assert_(resp.status // 100 == 2, resp.status)
+ self.assertEqual(resp.status // 100, 2, resp.status)
objs = json.loads(body)
if not objs:
break
@@ -1706,4 +1785,4 @@ class TestContainerACLsAccount4(BaseTestContainerACLs):
if __name__ == '__main__':
- unittest.main()
+ unittest2.main()
diff --git a/test/functional/test_object.py b/test/functional/test_object.py
index e74a7f6..f23ccbc 100755
--- a/test/functional/test_object.py
+++ b/test/functional/test_object.py
@@ -15,18 +15,29 @@
# See the License for the specific language governing permissions and
# limitations under the License.
-import unittest
-from nose import SkipTest
+import datetime
+import json
+import unittest2
+from unittest2 import SkipTest
from uuid import uuid4
+import time
-from swift.common.utils import json
+from six.moves import range
from test.functional import check_response, retry, requires_acls, \
requires_policies
import test.functional as tf
-class TestObject(unittest.TestCase):
+def setUpModule():
+ tf.setup_package()
+
+
+def tearDownModule():
+ tf.teardown_package()
+
+
+class TestObject(unittest2.TestCase):
def setUp(self):
if tf.skip:
@@ -62,6 +73,20 @@ class TestObject(unittest.TestCase):
resp = retry(put, name, use_account=use_account)
resp.read()
self.assertEqual(resp.status, 201)
+
+ # With keystoneauth we need the accounts to have had the project
+ # domain id persisted as sysmeta prior to testing ACLs. This may
+ # not be the case if, for example, the account was created using
+ # a request with reseller_admin role, when project domain id may
+ # not have been known. So we ensure that the project domain id is
+ # in sysmeta by making a POST to the accounts using an admin role.
+ def post(url, token, parsed, conn):
+ conn.request('POST', parsed.path, '', {'X-Auth-Token': token})
+ return check_response(conn)
+ resp = retry(post, use_account=use_account)
+ resp.read()
+ self.assertEqual(resp.status, 204)
+
return name
def tearDown(self):
@@ -88,14 +113,14 @@ class TestObject(unittest.TestCase):
body = resp.read()
if resp.status == 404:
break
- self.assert_(resp.status // 100 == 2, resp.status)
+ self.assertEqual(resp.status // 100, 2, resp.status)
objs = json.loads(body)
if not objs:
break
for obj in objs:
resp = retry(delete, container, obj)
resp.read()
- self.assertEqual(resp.status, 204)
+ self.assertIn(resp.status, (204, 404))
# delete the container
def delete(url, token, parsed, conn, name):
@@ -106,7 +131,146 @@ class TestObject(unittest.TestCase):
for container in self.containers:
resp = retry(delete, container)
resp.read()
- self.assert_(resp.status in (204, 404))
+ self.assertIn(resp.status, (204, 404))
+
+ def test_metadata(self):
+ obj = 'test_metadata'
+ req_metadata = {}
+
+ def put(url, token, parsed, conn):
+ headers = {'X-Auth-Token': token}
+ headers.update(req_metadata)
+ conn.request('PUT', '%s/%s/%s' % (
+ parsed.path, self.container, obj
+ ), '', headers)
+ return check_response(conn)
+
+ def get(url, token, parsed, conn):
+ conn.request(
+ 'GET',
+ '%s/%s/%s' % (parsed.path, self.container, obj),
+ '',
+ {'X-Auth-Token': token})
+ return check_response(conn)
+
+ def post(url, token, parsed, conn):
+ headers = {'X-Auth-Token': token}
+ headers.update(req_metadata)
+ conn.request('POST', '%s/%s/%s' % (
+ parsed.path, self.container, obj
+ ), '', headers)
+ return check_response(conn)
+
+ def metadata(resp):
+ metadata = {}
+ for k, v in resp.headers.items():
+ if 'meta' in k.lower():
+ metadata[k] = v
+ return metadata
+
+ # empty put
+ resp = retry(put)
+ resp.read()
+ self.assertEqual(resp.status, 201)
+ resp = retry(get)
+ self.assertEqual('', resp.read())
+ self.assertEqual(resp.status, 200)
+ self.assertEqual(metadata(resp), {})
+ # empty post
+ resp = retry(post)
+ resp.read()
+ self.assertEqual(resp.status, 202)
+ resp = retry(get)
+ self.assertEqual('', resp.read())
+ self.assertEqual(resp.status, 200)
+ self.assertEqual(metadata(resp), {})
+
+ # metadata put
+ req_metadata = {
+ 'x-object-meta-Color': 'blUe',
+ 'X-Object-Meta-food': 'PizZa',
+ }
+ resp = retry(put)
+ resp.read()
+ self.assertEqual(resp.status, 201)
+ resp = retry(get)
+ self.assertEqual('', resp.read())
+ self.assertEqual(resp.status, 200)
+ self.assertEqual(metadata(resp), {
+ 'X-Object-Meta-Color': 'blUe',
+ 'X-Object-Meta-Food': 'PizZa',
+ })
+ # metadata post
+ req_metadata = {'X-Object-Meta-color': 'oraNge'}
+ resp = retry(post)
+ resp.read()
+ self.assertEqual(resp.status, 202)
+ resp = retry(get)
+ self.assertEqual('', resp.read())
+ self.assertEqual(resp.status, 200)
+ self.assertEqual(metadata(resp), {
+ 'X-Object-Meta-Color': 'oraNge'
+ })
+
+ # sysmeta put
+ req_metadata = {
+ 'X-Object-Meta-Color': 'Red',
+ 'X-Object-Sysmeta-Color': 'Green',
+ 'X-Object-Transient-Sysmeta-Color': 'Blue',
+ }
+ resp = retry(put)
+ resp.read()
+ self.assertEqual(resp.status, 201)
+ resp = retry(get)
+ self.assertEqual('', resp.read())
+ self.assertEqual(resp.status, 200)
+ self.assertEqual(metadata(resp), {
+ 'X-Object-Meta-Color': 'Red',
+ })
+ # sysmeta post
+ req_metadata = {
+ 'X-Object-Meta-Food': 'Burger',
+ 'X-Object-Meta-Animal': 'Cat',
+ 'X-Object-Sysmeta-Animal': 'Cow',
+ 'X-Object-Transient-Sysmeta-Food': 'Burger',
+ }
+ resp = retry(post)
+ resp.read()
+ self.assertEqual(resp.status, 202)
+ resp = retry(get)
+ self.assertEqual('', resp.read())
+ self.assertEqual(resp.status, 200)
+ self.assertEqual(metadata(resp), {
+ 'X-Object-Meta-Food': 'Burger',
+ 'X-Object-Meta-Animal': 'Cat',
+ })
+
+ # non-ascii put
+ req_metadata = {
+ 'X-Object-Meta-Foo': u'B\u00e2r',
+ }
+ resp = retry(put)
+ resp.read()
+ self.assertEqual(resp.status, 201)
+ resp = retry(get)
+ self.assertEqual('', resp.read())
+ self.assertEqual(resp.status, 200)
+ self.assertEqual(metadata(resp), {
+ 'X-Object-Meta-Foo': 'B\xc3\xa2r',
+ })
+ # non-ascii post
+ req_metadata = {
+ 'X-Object-Meta-Foo': u'B\u00e5z',
+ }
+ resp = retry(post)
+ resp.read()
+ self.assertEqual(resp.status, 202)
+ resp = retry(get)
+ self.assertEqual('', resp.read())
+ self.assertEqual(resp.status, 200)
+ self.assertEqual(metadata(resp), {
+ 'X-Object-Meta-Foo': 'B\xc3\xa5z',
+ })
def test_if_none_match(self):
def put(url, token, parsed, conn):
@@ -118,10 +282,10 @@ class TestObject(unittest.TestCase):
return check_response(conn)
resp = retry(put)
resp.read()
- self.assertEquals(resp.status, 201)
+ self.assertEqual(resp.status, 201)
resp = retry(put)
resp.read()
- self.assertEquals(resp.status, 412)
+ self.assertEqual(resp.status, 412)
def put(url, token, parsed, conn):
conn.request('PUT', '%s/%s/%s' % (
@@ -132,7 +296,148 @@ class TestObject(unittest.TestCase):
return check_response(conn)
resp = retry(put)
resp.read()
- self.assertEquals(resp.status, 400)
+ self.assertEqual(resp.status, 400)
+
+ def test_too_small_x_timestamp(self):
+ def put(url, token, parsed, conn):
+ conn.request('PUT', '%s/%s/%s' % (parsed.path, self.container,
+ 'too_small_x_timestamp'),
+ '', {'X-Auth-Token': token,
+ 'Content-Length': '0',
+ 'X-Timestamp': '-1'})
+ return check_response(conn)
+
+ def head(url, token, parsed, conn):
+ conn.request('HEAD', '%s/%s/%s' % (parsed.path, self.container,
+ 'too_small_x_timestamp'),
+ '', {'X-Auth-Token': token,
+ 'Content-Length': '0'})
+ return check_response(conn)
+ ts_before = time.time()
+ resp = retry(put)
+ body = resp.read()
+ ts_after = time.time()
+ if resp.status == 400:
+ # shunt_inbound_x_timestamp must be false
+ self.assertIn(
+ 'X-Timestamp should be a UNIX timestamp float value', body)
+ else:
+ self.assertEqual(resp.status, 201)
+ self.assertEqual(body, '')
+ resp = retry(head)
+ resp.read()
+ self.assertGreater(float(resp.headers['x-timestamp']), ts_before)
+ self.assertLess(float(resp.headers['x-timestamp']), ts_after)
+
+ def test_too_big_x_timestamp(self):
+ def put(url, token, parsed, conn):
+ conn.request('PUT', '%s/%s/%s' % (parsed.path, self.container,
+ 'too_big_x_timestamp'),
+ '', {'X-Auth-Token': token,
+ 'Content-Length': '0',
+ 'X-Timestamp': '99999999999.9999999999'})
+ return check_response(conn)
+
+ def head(url, token, parsed, conn):
+ conn.request('HEAD', '%s/%s/%s' % (parsed.path, self.container,
+ 'too_big_x_timestamp'),
+ '', {'X-Auth-Token': token,
+ 'Content-Length': '0'})
+ return check_response(conn)
+ ts_before = time.time()
+ resp = retry(put)
+ body = resp.read()
+ ts_after = time.time()
+ if resp.status == 400:
+ # shunt_inbound_x_timestamp must be false
+ self.assertIn(
+ 'X-Timestamp should be a UNIX timestamp float value', body)
+ else:
+ self.assertEqual(resp.status, 201)
+ self.assertEqual(body, '')
+ resp = retry(head)
+ resp.read()
+ self.assertGreater(float(resp.headers['x-timestamp']), ts_before)
+ self.assertLess(float(resp.headers['x-timestamp']), ts_after)
+
+ def test_x_delete_after(self):
+ def put(url, token, parsed, conn):
+ conn.request('PUT', '%s/%s/%s' % (parsed.path, self.container,
+ 'x_delete_after'),
+ '', {'X-Auth-Token': token,
+ 'Content-Length': '0',
+ 'X-Delete-After': '1'})
+ return check_response(conn)
+ resp = retry(put)
+ resp.read()
+ self.assertEqual(resp.status, 201)
+
+ def get(url, token, parsed, conn):
+ conn.request(
+ 'GET',
+ '%s/%s/%s' % (parsed.path, self.container, 'x_delete_after'),
+ '',
+ {'X-Auth-Token': token})
+ return check_response(conn)
+
+ resp = retry(get)
+ resp.read()
+ count = 0
+ while resp.status == 200 and count < 10:
+ resp = retry(get)
+ resp.read()
+ count += 1
+ time.sleep(1)
+
+ self.assertEqual(resp.status, 404)
+
+ # To avoid an error when the object deletion in tearDown(),
+ # the object is added again.
+ resp = retry(put)
+ resp.read()
+ self.assertEqual(resp.status, 201)
+
+ def test_x_delete_at(self):
+ def put(url, token, parsed, conn):
+ dt = datetime.datetime.now()
+ epoch = time.mktime(dt.timetuple())
+ delete_time = str(int(epoch) + 3)
+ conn.request(
+ 'PUT',
+ '%s/%s/%s' % (parsed.path, self.container, 'x_delete_at'),
+ '',
+ {'X-Auth-Token': token,
+ 'Content-Length': '0',
+ 'X-Delete-At': delete_time})
+ return check_response(conn)
+ resp = retry(put)
+ resp.read()
+ self.assertEqual(resp.status, 201)
+
+ def get(url, token, parsed, conn):
+ conn.request(
+ 'GET',
+ '%s/%s/%s' % (parsed.path, self.container, 'x_delete_at'),
+ '',
+ {'X-Auth-Token': token})
+ return check_response(conn)
+
+ resp = retry(get)
+ resp.read()
+ count = 0
+ while resp.status == 200 and count < 10:
+ resp = retry(get)
+ resp.read()
+ count += 1
+ time.sleep(1)
+
+ self.assertEqual(resp.status, 404)
+
+ # To avoid an error when the object deletion in tearDown(),
+ # the object is added again.
+ resp = retry(put)
+ resp.read()
+ self.assertEqual(resp.status, 201)
def test_non_integer_x_delete_after(self):
def put(url, token, parsed, conn):
@@ -144,7 +449,7 @@ class TestObject(unittest.TestCase):
return check_response(conn)
resp = retry(put)
body = resp.read()
- self.assertEquals(resp.status, 400)
+ self.assertEqual(resp.status, 400)
self.assertEqual(body, 'Non-integer X-Delete-After')
def test_non_integer_x_delete_at(self):
@@ -157,7 +462,7 @@ class TestObject(unittest.TestCase):
return check_response(conn)
resp = retry(put)
body = resp.read()
- self.assertEquals(resp.status, 400)
+ self.assertEqual(resp.status, 400)
self.assertEqual(body, 'Non-integer X-Delete-At')
def test_x_delete_at_in_the_past(self):
@@ -170,7 +475,7 @@ class TestObject(unittest.TestCase):
return check_response(conn)
resp = retry(put)
body = resp.read()
- self.assertEquals(resp.status, 400)
+ self.assertEqual(resp.status, 400)
self.assertEqual(body, 'X-Delete-At in past')
def test_copy_object(self):
@@ -220,7 +525,7 @@ class TestObject(unittest.TestCase):
return check_response(conn)
resp = retry(delete)
resp.read()
- self.assertEqual(resp.status, 204)
+ self.assertIn(resp.status, (204, 404))
# verify dest does not exist
resp = retry(get_dest)
resp.read()
@@ -242,10 +547,27 @@ class TestObject(unittest.TestCase):
self.assertEqual(resp.status, 200)
self.assertEqual(dest_contents, source_contents)
+ # copy source to dest with COPY and range
+ def copy(url, token, parsed, conn):
+ conn.request('COPY', '%s/%s' % (parsed.path, source), '',
+ {'X-Auth-Token': token,
+ 'Destination': dest,
+ 'Range': 'bytes=1-2'})
+ return check_response(conn)
+ resp = retry(copy)
+ resp.read()
+ self.assertEqual(resp.status, 201)
+
+ # contents of dest should be the same as source
+ resp = retry(get_dest)
+ dest_contents = resp.read()
+ self.assertEqual(resp.status, 200)
+ self.assertEqual(dest_contents, source_contents[1:3])
+
# delete the copy
resp = retry(delete)
resp.read()
- self.assertEqual(resp.status, 204)
+ self.assertIn(resp.status, (204, 404))
def test_copy_between_accounts(self):
if tf.skip:
@@ -311,7 +633,7 @@ class TestObject(unittest.TestCase):
return check_response(conn)
resp = retry(delete, use_account=2)
resp.read()
- self.assertEqual(resp.status, 204)
+ self.assertIn(resp.status, (204, 404))
# verify dest does not exist
resp = retry(get_dest, use_account=2)
resp.read()
@@ -355,7 +677,7 @@ class TestObject(unittest.TestCase):
# delete the copy
resp = retry(delete, use_account=2)
resp.read()
- self.assertEqual(resp.status, 204)
+ self.assertIn(resp.status, (204, 404))
def test_public_object(self):
if tf.skip:
@@ -369,7 +691,7 @@ class TestObject(unittest.TestCase):
resp = retry(get)
raise Exception('Should not have been able to GET')
except Exception as err:
- self.assert_(str(err).startswith('No result after '))
+ self.assertTrue(str(err).startswith('No result after '))
def post(url, token, parsed, conn):
conn.request('POST', parsed.path + '/' + self.container, '',
@@ -394,7 +716,7 @@ class TestObject(unittest.TestCase):
resp = retry(get)
raise Exception('Should not have been able to GET')
except Exception as err:
- self.assert_(str(err).startswith('No result after '))
+ self.assertTrue(str(err).startswith('No result after '))
def test_private_object(self):
if tf.skip or tf.skip3:
@@ -477,7 +799,7 @@ class TestObject(unittest.TestCase):
return check_response(conn)
resp = retry(delete)
resp.read()
- self.assertEqual(resp.status, 204)
+ self.assertIn(resp.status, (204, 404))
# clean up shared_container
def delete(url, token, parsed, conn):
@@ -487,7 +809,86 @@ class TestObject(unittest.TestCase):
return check_response(conn)
resp = retry(delete)
resp.read()
- self.assertEqual(resp.status, 204)
+ self.assertIn(resp.status, (204, 404))
+
+ def test_container_write_only(self):
+ if tf.skip or tf.skip3:
+ raise SkipTest
+
+ # Ensure we can't access the object with the third account
+ def get(url, token, parsed, conn):
+ conn.request('GET', '%s/%s/%s' % (
+ parsed.path, self.container, self.obj), '',
+ {'X-Auth-Token': token})
+ return check_response(conn)
+ resp = retry(get, use_account=3)
+ resp.read()
+ self.assertEqual(resp.status, 403)
+
+ # create a shared container writable (but not readable) by account3
+ shared_container = uuid4().hex
+
+ def put(url, token, parsed, conn):
+ conn.request('PUT', '%s/%s' % (
+ parsed.path, shared_container), '',
+ {'X-Auth-Token': token,
+ 'X-Container-Write': tf.swift_test_perm[2]})
+ return check_response(conn)
+ resp = retry(put)
+ resp.read()
+ self.assertEqual(resp.status, 201)
+
+ # verify third account can write "obj1" to shared container
+ def put(url, token, parsed, conn):
+ conn.request('PUT', '%s/%s/%s' % (
+ parsed.path, shared_container, 'obj1'), 'test',
+ {'X-Auth-Token': token})
+ return check_response(conn)
+ resp = retry(put, use_account=3)
+ resp.read()
+ self.assertEqual(resp.status, 201)
+
+ # verify third account cannot copy "obj1" to shared container
+ def copy(url, token, parsed, conn):
+ conn.request('COPY', '%s/%s/%s' % (
+ parsed.path, shared_container, 'obj1'), '',
+ {'X-Auth-Token': token,
+ 'Destination': '%s/%s' % (shared_container, 'obj2')})
+ return check_response(conn)
+ resp = retry(copy, use_account=3)
+ resp.read()
+ self.assertEqual(resp.status, 403)
+
+ # verify third account can POST to "obj1" in shared container
+ def post(url, token, parsed, conn):
+ conn.request('POST', '%s/%s/%s' % (
+ parsed.path, shared_container, 'obj1'), '',
+ {'X-Auth-Token': token,
+ 'X-Object-Meta-Color': 'blue'})
+ return check_response(conn)
+ resp = retry(post, use_account=3)
+ resp.read()
+ self.assertEqual(resp.status, 202)
+
+ # verify third account can DELETE from shared container
+ def delete(url, token, parsed, conn):
+ conn.request('DELETE', '%s/%s/%s' % (
+ parsed.path, shared_container, 'obj1'), '',
+ {'X-Auth-Token': token})
+ return check_response(conn)
+ resp = retry(delete, use_account=3)
+ resp.read()
+ self.assertIn(resp.status, (204, 404))
+
+ # clean up shared_container
+ def delete(url, token, parsed, conn):
+ conn.request('DELETE',
+ parsed.path + '/' + shared_container, '',
+ {'X-Auth-Token': token})
+ return check_response(conn)
+ resp = retry(delete)
+ resp.read()
+ self.assertIn(resp.status, (204, 404))
@requires_acls
def test_read_only(self):
@@ -525,12 +926,12 @@ class TestObject(unittest.TestCase):
# cannot list objects
resp = retry(get_listing, use_account=3)
resp.read()
- self.assertEquals(resp.status, 403)
+ self.assertEqual(resp.status, 403)
# cannot get object
resp = retry(get, self.obj, use_account=3)
resp.read()
- self.assertEquals(resp.status, 403)
+ self.assertEqual(resp.status, 403)
# grant read-only access
acl_user = tf.swift_test_user[2]
@@ -543,32 +944,32 @@ class TestObject(unittest.TestCase):
# can list objects
resp = retry(get_listing, use_account=3)
listing = resp.read()
- self.assertEquals(resp.status, 200)
- self.assert_(self.obj in listing)
+ self.assertEqual(resp.status, 200)
+ self.assertIn(self.obj, listing)
# can get object
resp = retry(get, self.obj, use_account=3)
body = resp.read()
- self.assertEquals(resp.status, 200)
- self.assertEquals(body, 'test')
+ self.assertEqual(resp.status, 200)
+ self.assertEqual(body, 'test')
# can not put an object
obj_name = str(uuid4())
resp = retry(put, obj_name, use_account=3)
body = resp.read()
- self.assertEquals(resp.status, 403)
+ self.assertEqual(resp.status, 403)
# can not delete an object
resp = retry(delete, self.obj, use_account=3)
body = resp.read()
- self.assertEquals(resp.status, 403)
+ self.assertEqual(resp.status, 403)
# sanity with account1
resp = retry(get_listing, use_account=3)
listing = resp.read()
- self.assertEquals(resp.status, 200)
- self.assert_(obj_name not in listing)
- self.assert_(self.obj in listing)
+ self.assertEqual(resp.status, 200)
+ self.assertNotIn(obj_name, listing)
+ self.assertIn(self.obj, listing)
@requires_acls
def test_read_write(self):
@@ -606,12 +1007,12 @@ class TestObject(unittest.TestCase):
# cannot list objects
resp = retry(get_listing, use_account=3)
resp.read()
- self.assertEquals(resp.status, 403)
+ self.assertEqual(resp.status, 403)
# cannot get object
resp = retry(get, self.obj, use_account=3)
resp.read()
- self.assertEquals(resp.status, 403)
+ self.assertEqual(resp.status, 403)
# grant read-write access
acl_user = tf.swift_test_user[2]
@@ -624,32 +1025,32 @@ class TestObject(unittest.TestCase):
# can list objects
resp = retry(get_listing, use_account=3)
listing = resp.read()
- self.assertEquals(resp.status, 200)
- self.assert_(self.obj in listing)
+ self.assertEqual(resp.status, 200)
+ self.assertIn(self.obj, listing)
# can get object
resp = retry(get, self.obj, use_account=3)
body = resp.read()
- self.assertEquals(resp.status, 200)
- self.assertEquals(body, 'test')
+ self.assertEqual(resp.status, 200)
+ self.assertEqual(body, 'test')
# can put an object
obj_name = str(uuid4())
resp = retry(put, obj_name, use_account=3)
body = resp.read()
- self.assertEquals(resp.status, 201)
+ self.assertEqual(resp.status, 201)
# can delete an object
resp = retry(delete, self.obj, use_account=3)
body = resp.read()
- self.assertEquals(resp.status, 204)
+ self.assertIn(resp.status, (204, 404))
# sanity with account1
resp = retry(get_listing, use_account=3)
listing = resp.read()
- self.assertEquals(resp.status, 200)
- self.assert_(obj_name in listing)
- self.assert_(self.obj not in listing)
+ self.assertEqual(resp.status, 200)
+ self.assertIn(obj_name, listing)
+ self.assertNotIn(self.obj, listing)
@requires_acls
def test_admin(self):
@@ -687,12 +1088,12 @@ class TestObject(unittest.TestCase):
# cannot list objects
resp = retry(get_listing, use_account=3)
resp.read()
- self.assertEquals(resp.status, 403)
+ self.assertEqual(resp.status, 403)
# cannot get object
resp = retry(get, self.obj, use_account=3)
resp.read()
- self.assertEquals(resp.status, 403)
+ self.assertEqual(resp.status, 403)
# grant admin access
acl_user = tf.swift_test_user[2]
@@ -705,32 +1106,32 @@ class TestObject(unittest.TestCase):
# can list objects
resp = retry(get_listing, use_account=3)
listing = resp.read()
- self.assertEquals(resp.status, 200)
- self.assert_(self.obj in listing)
+ self.assertEqual(resp.status, 200)
+ self.assertIn(self.obj, listing)
# can get object
resp = retry(get, self.obj, use_account=3)
body = resp.read()
- self.assertEquals(resp.status, 200)
- self.assertEquals(body, 'test')
+ self.assertEqual(resp.status, 200)
+ self.assertEqual(body, 'test')
# can put an object
obj_name = str(uuid4())
resp = retry(put, obj_name, use_account=3)
body = resp.read()
- self.assertEquals(resp.status, 201)
+ self.assertEqual(resp.status, 201)
# can delete an object
resp = retry(delete, self.obj, use_account=3)
body = resp.read()
- self.assertEquals(resp.status, 204)
+ self.assertIn(resp.status, (204, 404))
# sanity with account1
resp = retry(get_listing, use_account=3)
listing = resp.read()
- self.assertEquals(resp.status, 200)
- self.assert_(obj_name in listing)
- self.assert_(self.obj not in listing)
+ self.assertEqual(resp.status, 200)
+ self.assertIn(obj_name, listing)
+ self.assertNotIn(self.obj, listing)
def test_manifest(self):
if tf.skip:
@@ -746,7 +1147,7 @@ class TestObject(unittest.TestCase):
parsed.path, self.container, str(objnum)), segments1[objnum],
{'X-Auth-Token': token})
return check_response(conn)
- for objnum in xrange(len(segments1)):
+ for objnum in range(len(segments1)):
resp = retry(put, objnum)
resp.read()
self.assertEqual(resp.status, 201)
@@ -809,7 +1210,7 @@ class TestObject(unittest.TestCase):
parsed.path, self.container, str(objnum)), segments2[objnum],
{'X-Auth-Token': token})
return check_response(conn)
- for objnum in xrange(len(segments2)):
+ for objnum in range(len(segments2)):
resp = retry(put, objnum)
resp.read()
self.assertEqual(resp.status, 201)
@@ -891,7 +1292,7 @@ class TestObject(unittest.TestCase):
parsed.path, acontainer, str(objnum)), segments3[objnum],
{'X-Auth-Token': token})
return check_response(conn)
- for objnum in xrange(len(segments3)):
+ for objnum in range(len(segments3)):
resp = retry(put, objnum)
resp.read()
self.assertEqual(resp.status, 201)
@@ -958,7 +1359,7 @@ class TestObject(unittest.TestCase):
return check_response(conn)
resp = retry(delete, objnum)
resp.read()
- self.assertEqual(resp.status, 204)
+ self.assertIn(resp.status, (204, 404))
# Delete the third set of segments
def delete(url, token, parsed, conn, objnum):
@@ -966,10 +1367,10 @@ class TestObject(unittest.TestCase):
parsed.path, acontainer, str(objnum)), '',
{'X-Auth-Token': token})
return check_response(conn)
- for objnum in xrange(len(segments3)):
+ for objnum in range(len(segments3)):
resp = retry(delete, objnum)
resp.read()
- self.assertEqual(resp.status, 204)
+ self.assertIn(resp.status, (204, 404))
# Delete the second set of segments
def delete(url, token, parsed, conn, objnum):
@@ -977,10 +1378,10 @@ class TestObject(unittest.TestCase):
parsed.path, self.container, str(objnum)), '',
{'X-Auth-Token': token})
return check_response(conn)
- for objnum in xrange(len(segments2)):
+ for objnum in range(len(segments2)):
resp = retry(delete, objnum)
resp.read()
- self.assertEqual(resp.status, 204)
+ self.assertIn(resp.status, (204, 404))
# Delete the first set of segments
def delete(url, token, parsed, conn, objnum):
@@ -988,10 +1389,10 @@ class TestObject(unittest.TestCase):
parsed.path, self.container, str(objnum)), '',
{'X-Auth-Token': token})
return check_response(conn)
- for objnum in xrange(len(segments1)):
+ for objnum in range(len(segments1)):
resp = retry(delete, objnum)
resp.read()
- self.assertEqual(resp.status, 204)
+ self.assertIn(resp.status, (204, 404))
# Delete the extra container
def delete(url, token, parsed, conn):
@@ -1000,7 +1401,7 @@ class TestObject(unittest.TestCase):
return check_response(conn)
resp = retry(delete)
resp.read()
- self.assertEqual(resp.status, 204)
+ self.assertIn(resp.status, (204, 404))
def test_delete_content_type(self):
if tf.skip:
@@ -1020,7 +1421,7 @@ class TestObject(unittest.TestCase):
return check_response(conn)
resp = retry(delete)
resp.read()
- self.assertEqual(resp.status, 204)
+ self.assertIn(resp.status, (204, 404))
self.assertEqual(resp.getheader('Content-Type'),
'text/html; charset=UTF-8')
@@ -1095,78 +1496,78 @@ class TestObject(unittest.TestCase):
resp = retry(put_cors_cont, '*')
resp.read()
- self.assertEquals(resp.status // 100, 2)
+ self.assertEqual(resp.status // 100, 2)
resp = retry(put_obj, 'cat')
resp.read()
- self.assertEquals(resp.status // 100, 2)
+ self.assertEqual(resp.status // 100, 2)
resp = retry(check_cors,
'OPTIONS', 'cat', {'Origin': 'http://m.com'})
- self.assertEquals(resp.status, 401)
+ self.assertEqual(resp.status, 401)
resp = retry(check_cors,
'OPTIONS', 'cat',
{'Origin': 'http://m.com',
'Access-Control-Request-Method': 'GET'})
- self.assertEquals(resp.status, 200)
+ self.assertEqual(resp.status, 200)
resp.read()
headers = dict((k.lower(), v) for k, v in resp.getheaders())
- self.assertEquals(headers.get('access-control-allow-origin'),
- '*')
+ self.assertEqual(headers.get('access-control-allow-origin'),
+ '*')
resp = retry(check_cors,
'GET', 'cat', {'Origin': 'http://m.com'})
- self.assertEquals(resp.status, 200)
+ self.assertEqual(resp.status, 200)
headers = dict((k.lower(), v) for k, v in resp.getheaders())
- self.assertEquals(headers.get('access-control-allow-origin'),
- '*')
+ self.assertEqual(headers.get('access-control-allow-origin'),
+ '*')
resp = retry(check_cors,
'GET', 'cat', {'Origin': 'http://m.com',
'X-Web-Mode': 'True'})
- self.assertEquals(resp.status, 200)
+ self.assertEqual(resp.status, 200)
headers = dict((k.lower(), v) for k, v in resp.getheaders())
- self.assertEquals(headers.get('access-control-allow-origin'),
- '*')
+ self.assertEqual(headers.get('access-control-allow-origin'),
+ '*')
####################
resp = retry(put_cors_cont, 'http://secret.com')
resp.read()
- self.assertEquals(resp.status // 100, 2)
+ self.assertEqual(resp.status // 100, 2)
resp = retry(check_cors,
'OPTIONS', 'cat',
{'Origin': 'http://m.com',
'Access-Control-Request-Method': 'GET'})
resp.read()
- self.assertEquals(resp.status, 401)
+ self.assertEqual(resp.status, 401)
if strict_cors:
resp = retry(check_cors,
'GET', 'cat', {'Origin': 'http://m.com'})
resp.read()
- self.assertEquals(resp.status, 200)
+ self.assertEqual(resp.status, 200)
headers = dict((k.lower(), v) for k, v in resp.getheaders())
- self.assertTrue('access-control-allow-origin' not in headers)
+ self.assertNotIn('access-control-allow-origin', headers)
resp = retry(check_cors,
'GET', 'cat', {'Origin': 'http://secret.com'})
resp.read()
- self.assertEquals(resp.status, 200)
+ self.assertEqual(resp.status, 200)
headers = dict((k.lower(), v) for k, v in resp.getheaders())
- self.assertEquals(headers.get('access-control-allow-origin'),
- 'http://secret.com')
+ self.assertEqual(headers.get('access-control-allow-origin'),
+ 'http://secret.com')
else:
resp = retry(check_cors,
'GET', 'cat', {'Origin': 'http://m.com'})
resp.read()
- self.assertEquals(resp.status, 200)
+ self.assertEqual(resp.status, 200)
headers = dict((k.lower(), v) for k, v in resp.getheaders())
- self.assertEquals(headers.get('access-control-allow-origin'),
- 'http://m.com')
+ self.assertEqual(headers.get('access-control-allow-origin'),
+ 'http://m.com')
@requires_policies
def test_cross_policy_copy(self):
@@ -1228,4 +1629,4 @@ class TestObject(unittest.TestCase):
if __name__ == '__main__':
- unittest.main()
+ unittest2.main()
diff --git a/test/functional/tests.py b/test/functional/tests.py
index 9cb328d..2412147 100644
--- a/test/functional/tests.py
+++ b/test/functional/tests.py
@@ -15,24 +15,26 @@
# limitations under the License.
from datetime import datetime
+import email.parser
import hashlib
import hmac
+import itertools
import json
import locale
import random
-import StringIO
-import time
+import six
+from six.moves import urllib
import os
-import unittest
-import urllib
+import time
+import unittest2
import uuid
from copy import deepcopy
import eventlet
-from nose import SkipTest
+from unittest2 import SkipTest
from swift.common.http import is_success, is_client_error
from test.functional import normalized_urls, load_constraint, cluster_info
-from test.functional import check_response, retry
+from test.functional import check_response, retry, requires_acls
import test.functional as tf
from test.functional.swift_test_client import Account, Connection, File, \
ResponseError
@@ -41,6 +43,7 @@ from gluster.swift.common.constraints import \
set_object_name_component_length()
+
def create_limit_filename(name_limit):
"""
Convert a split a large object name with
@@ -66,6 +69,14 @@ def create_limit_filename(name_limit):
return "".join(filename_list)
+def setUpModule():
+ tf.setup_package()
+
+
+def tearDownModule():
+ tf.teardown_package()
+
+
class Utils(object):
@classmethod
def create_ascii_name(cls, length=None):
@@ -84,12 +95,12 @@ class Utils(object):
u'\u3705\u1803\u0902\uF112\uD210\uB30E\u940C\u850B'\
u'\u5608\u3706\u1804\u0903\u03A9\u2603'
return ''.join([random.choice(utf8_chars)
- for x in xrange(length)]).encode('utf-8')
+ for x in range(length)]).encode('utf-8')
create_name = create_ascii_name
-class Base(unittest.TestCase):
+class Base(unittest2.TestCase):
def setUp(self):
cls = type(self)
if not cls.set_up:
@@ -98,15 +109,24 @@ class Base(unittest.TestCase):
def assert_body(self, body):
response_body = self.env.conn.response.read()
- self.assert_(response_body == body,
- 'Body returned: %s' % (response_body))
+ self.assertEqual(response_body, body,
+ 'Body returned: %s' % (response_body))
def assert_status(self, status_or_statuses):
- self.assert_(self.env.conn.response.status == status_or_statuses or
- (hasattr(status_or_statuses, '__iter__') and
- self.env.conn.response.status in status_or_statuses),
- 'Status returned: %d Expected: %s' %
- (self.env.conn.response.status, status_or_statuses))
+ self.assertTrue(
+ self.env.conn.response.status == status_or_statuses or
+ (hasattr(status_or_statuses, '__iter__') and
+ self.env.conn.response.status in status_or_statuses),
+ 'Status returned: %d Expected: %s' %
+ (self.env.conn.response.status, status_or_statuses))
+
+ def assert_header(self, header_name, expected_value):
+ try:
+ actual_value = self.env.conn.response.getheader(header_name)
+ except KeyError:
+ self.fail(
+ 'Expected header name %r not found in response.' % header_name)
+ self.assertEqual(expected_value, actual_value)
class Base2(object):
@@ -161,7 +181,7 @@ class TestAccount(Base):
def testInvalidUTF8Path(self):
invalid_utf8 = Utils.create_utf8_name()[::-1]
container = self.env.account.container(invalid_utf8)
- self.assert_(not container.create(cfg={'no_path_quote': True}))
+ self.assertFalse(container.create(cfg={'no_path_quote': True}))
self.assert_status(412)
self.assert_body('Invalid UTF8 or contains NULL')
@@ -183,7 +203,9 @@ class TestAccount(Base):
finally:
self.env.account.conn.storage_url = was_url
- def testPUT(self):
+ def testPUTError(self):
+ if load_constraint('allow_account_management'):
+ raise SkipTest("Allow account management is enabled")
self.env.account.conn.make_request('PUT')
self.assert_status([403, 405])
@@ -194,7 +216,7 @@ class TestAccount(Base):
info = self.env.account.info()
for field in ['object_count', 'container_count', 'bytes_used']:
- self.assert_(info[field] >= 0)
+ self.assertGreaterEqual(info[field], 0)
if info['container_count'] == len(self.env.containers):
break
@@ -221,8 +243,8 @@ class TestAccount(Base):
for format_type in ['json', 'xml']:
for a in self.env.account.containers(
parms={'format': format_type}):
- self.assert_(a['count'] >= 0)
- self.assert_(a['bytes'] >= 0)
+ self.assertGreaterEqual(a['count'], 0)
+ self.assertGreaterEqual(a['bytes'], 0)
headers = dict(self.env.conn.response.getheaders())
if format_type == 'json':
@@ -238,7 +260,8 @@ class TestAccount(Base):
p = {'limit': l}
if l <= limit:
- self.assert_(len(self.env.account.containers(parms=p)) <= l)
+ self.assertLessEqual(len(self.env.account.containers(parms=p)),
+ l)
self.assert_status(200)
else:
self.assertRaises(ResponseError,
@@ -285,11 +308,12 @@ class TestAccount(Base):
parms={'format': format_type,
'marker': marker,
'limit': limit})
- self.assert_(len(containers) <= limit)
+ self.assertLessEqual(len(containers), limit)
if containers:
if isinstance(containers[0], dict):
containers = [x['name'] for x in containers]
- self.assert_(locale.strcoll(containers[0], marker) > 0)
+ self.assertGreater(locale.strcoll(containers[0], marker),
+ 0)
def testContainersOrderedByName(self):
for format_type in [None, 'json', 'xml']:
@@ -308,19 +332,17 @@ class TestAccount(Base):
inserted_html = '<b>Hello World'
hax = 'AUTH_haxx"\nContent-Length: %d\n\n%s' % (len(inserted_html),
inserted_html)
- quoted_hax = urllib.quote(hax)
+ quoted_hax = urllib.parse.quote(hax)
conn.connection.request('GET', '/v1/' + quoted_hax, None, {})
resp = conn.connection.getresponse()
resp_headers = dict(resp.getheaders())
- self.assertTrue('www-authenticate' in resp_headers,
- 'www-authenticate not found in %s' % resp_headers)
+ self.assertIn('www-authenticate', resp_headers)
actual = resp_headers['www-authenticate']
expected = 'Swift realm="%s"' % quoted_hax
# other middleware e.g. auth_token may also set www-authenticate
# headers in which case actual values will be a comma separated list.
# check that expected value is among the actual values
- self.assertTrue(expected in actual,
- '%s not found in %s' % (expected, actual))
+ self.assertIn(expected, actual)
class TestAccountUTF8(Base2, TestAccount):
@@ -343,7 +365,7 @@ class TestAccountNoContainers(Base):
def testGetRequest(self):
for format_type in [None, 'json', 'xml']:
- self.assert_(not self.env.account.containers(
+ self.assertFalse(self.env.account.containers(
parms={'format': format_type}))
if format_type is None:
@@ -356,6 +378,92 @@ class TestAccountNoContainersUTF8(Base2, TestAccountNoContainers):
set_up = False
+class TestAccountSortingEnv(object):
+ @classmethod
+ def setUp(cls):
+ cls.conn = Connection(tf.config)
+ cls.conn.authenticate()
+ cls.account = Account(cls.conn, tf.config.get('account',
+ tf.config['username']))
+ cls.account.delete_containers()
+
+ postfix = Utils.create_name()
+ cls.cont_items = ('a1', 'a2', 'A3', 'b1', 'B2', 'a10', 'b10', 'zz')
+ cls.cont_items = ['%s%s' % (x, postfix) for x in cls.cont_items]
+
+ for container in cls.cont_items:
+ c = cls.account.container(container)
+ if not c.create():
+ raise ResponseError(cls.conn.response)
+
+
+class TestAccountSorting(Base):
+ env = TestAccountSortingEnv
+ set_up = False
+
+ def testAccountContainerListSorting(self):
+ # name (byte order) sorting.
+ cont_list = sorted(self.env.cont_items)
+ for reverse in ('false', 'no', 'off', '', 'garbage'):
+ cont_listing = self.env.account.containers(
+ parms={'reverse': reverse})
+ self.assert_status(200)
+ self.assertEqual(cont_list, cont_listing,
+ 'Expected %s but got %s with reverse param %r'
+ % (cont_list, cont_listing, reverse))
+
+ def testAccountContainerListSortingReverse(self):
+ # name (byte order) sorting.
+ cont_list = sorted(self.env.cont_items)
+ cont_list.reverse()
+ for reverse in ('true', '1', 'yes', 'on', 't', 'y'):
+ cont_listing = self.env.account.containers(
+ parms={'reverse': reverse})
+ self.assert_status(200)
+ self.assertEqual(cont_list, cont_listing,
+ 'Expected %s but got %s with reverse param %r'
+ % (cont_list, cont_listing, reverse))
+
+ def testAccountContainerListSortingByPrefix(self):
+ cont_list = sorted(c for c in self.env.cont_items if c.startswith('a'))
+ cont_list.reverse()
+ cont_listing = self.env.account.containers(parms={
+ 'reverse': 'on', 'prefix': 'a'})
+ self.assert_status(200)
+ self.assertEqual(cont_list, cont_listing)
+
+ def testAccountContainerListSortingByMarkersExclusive(self):
+ first_item = self.env.cont_items[3] # 'b1' + postfix
+ last_item = self.env.cont_items[4] # 'B2' + postfix
+
+ cont_list = sorted(c for c in self.env.cont_items
+ if last_item < c < first_item)
+ cont_list.reverse()
+ cont_listing = self.env.account.containers(parms={
+ 'reverse': 'on', 'marker': first_item, 'end_marker': last_item})
+ self.assert_status(200)
+ self.assertEqual(cont_list, cont_listing)
+
+ def testAccountContainerListSortingByMarkersInclusive(self):
+ first_item = self.env.cont_items[3] # 'b1' + postfix
+ last_item = self.env.cont_items[4] # 'B2' + postfix
+
+ cont_list = sorted(c for c in self.env.cont_items
+ if last_item <= c <= first_item)
+ cont_list.reverse()
+ cont_listing = self.env.account.containers(parms={
+ 'reverse': 'on', 'marker': first_item + '\x00',
+ 'end_marker': last_item[:-1] + chr(ord(last_item[-1]) - 1)})
+ self.assert_status(200)
+ self.assertEqual(cont_list, cont_listing)
+
+ def testAccountContainerListSortingByReversedMarkers(self):
+ cont_listing = self.env.account.containers(parms={
+ 'reverse': 'on', 'marker': 'B', 'end_marker': 'b1'})
+ self.assert_status(204)
+ self.assertEqual([], cont_listing)
+
+
class TestContainerEnv(object):
@classmethod
def setUp(cls):
@@ -398,48 +506,48 @@ class TestContainer(Base):
limit + 1, limit + 10, limit + 100):
cont = self.env.account.container('a' * l)
if l <= limit:
- self.assert_(cont.create())
+ self.assertTrue(cont.create())
self.assert_status(201)
else:
- self.assert_(not cont.create())
+ self.assertFalse(cont.create())
self.assert_status(400)
def testFileThenContainerDelete(self):
cont = self.env.account.container(Utils.create_name())
- self.assert_(cont.create())
+ self.assertTrue(cont.create())
file_item = cont.file(Utils.create_name())
- self.assert_(file_item.write_random())
+ self.assertTrue(file_item.write_random())
- self.assert_(file_item.delete())
+ self.assertTrue(file_item.delete())
self.assert_status(204)
- self.assert_(file_item.name not in cont.files())
+ self.assertNotIn(file_item.name, cont.files())
- self.assert_(cont.delete())
+ self.assertTrue(cont.delete())
self.assert_status(204)
- self.assert_(cont.name not in self.env.account.containers())
+ self.assertNotIn(cont.name, self.env.account.containers())
def testFileListingLimitMarkerPrefix(self):
cont = self.env.account.container(Utils.create_name())
- self.assert_(cont.create())
+ self.assertTrue(cont.create())
- files = sorted([Utils.create_name() for x in xrange(10)])
+ files = sorted([Utils.create_name() for x in range(10)])
for f in files:
file_item = cont.file(f)
- self.assert_(file_item.write_random())
+ self.assertTrue(file_item.write_random())
- for i in xrange(len(files)):
+ for i in range(len(files)):
f = files[i]
- for j in xrange(1, len(files) - i):
- self.assert_(cont.files(parms={'limit': j, 'marker': f}) ==
- files[i + 1: i + j + 1])
- self.assert_(cont.files(parms={'marker': f}) == files[i + 1:])
- self.assert_(cont.files(parms={'marker': f, 'prefix': f}) == [])
- self.assert_(cont.files(parms={'prefix': f}) == [f])
+ for j in range(1, len(files) - i):
+ self.assertEqual(cont.files(parms={'limit': j, 'marker': f}),
+ files[i + 1: i + j + 1])
+ self.assertEqual(cont.files(parms={'marker': f}), files[i + 1:])
+ self.assertEqual(cont.files(parms={'marker': f, 'prefix': f}), [])
+ self.assertEqual(cont.files(parms={'prefix': f}), [f])
def testPrefixAndLimit(self):
load_constraint('container_listing_limit')
cont = self.env.account.container(Utils.create_name())
- self.assert_(cont.create())
+ self.assertTrue(cont.create())
prefix_file_count = 10
limit_count = 2
@@ -456,23 +564,93 @@ class TestContainer(Base):
for format_type in [None, 'json', 'xml']:
for prefix in prefixs:
- files = cont.files(parms={'prefix': prefix})
+ files = cont.files(parms={'prefix': prefix,
+ 'format': format_type})
+ if isinstance(files[0], dict):
+ files = [x.get('name', x.get('subdir')) for x in files]
self.assertEqual(files, sorted(prefix_files[prefix]))
for format_type in [None, 'json', 'xml']:
for prefix in prefixs:
files = cont.files(parms={'limit': limit_count,
- 'prefix': prefix})
+ 'prefix': prefix,
+ 'format': format_type})
+ if isinstance(files[0], dict):
+ files = [x.get('name', x.get('subdir')) for x in files]
self.assertEqual(len(files), limit_count)
for file_item in files:
- self.assert_(file_item.startswith(prefix))
+ self.assertTrue(file_item.startswith(prefix))
+
+ def testListDelimiter(self):
+ raise SkipTest("Bad Test")
+ cont = self.env.account.container(Utils.create_name())
+ self.assertTrue(cont.create())
+
+ delimiter = '-'
+ files = ['test', delimiter.join(['test', 'bar']),
+ delimiter.join(['test', 'foo'])]
+ for f in files:
+ file_item = cont.file(f)
+ self.assertTrue(file_item.write_random())
+
+ for format_type in [None, 'json', 'xml']:
+ results = cont.files(parms={'format': format_type})
+ if isinstance(results[0], dict):
+ results = [x.get('name', x.get('subdir')) for x in results]
+ self.assertEqual(results, ['test', 'test-bar', 'test-foo'])
+
+ results = cont.files(parms={'delimiter': delimiter,
+ 'format': format_type})
+ if isinstance(results[0], dict):
+ results = [x.get('name', x.get('subdir')) for x in results]
+ self.assertEqual(results, ['test', 'test-'])
+
+ results = cont.files(parms={'delimiter': delimiter,
+ 'format': format_type,
+ 'reverse': 'yes'})
+ if isinstance(results[0], dict):
+ results = [x.get('name', x.get('subdir')) for x in results]
+ self.assertEqual(results, ['test-', 'test'])
+
+ def testListDelimiterAndPrefix(self):
+ cont = self.env.account.container(Utils.create_name())
+ self.assertTrue(cont.create())
+
+ delimiter = 'a'
+ files = ['bar', 'bazar']
+ for f in files:
+ file_item = cont.file(f)
+ self.assertTrue(file_item.write_random())
+
+ results = cont.files(parms={'delimiter': delimiter, 'prefix': 'ba'})
+ self.assertEqual(results, ['bar', 'baza'])
+
+ results = cont.files(parms={'delimiter': delimiter,
+ 'prefix': 'ba',
+ 'reverse': 'yes'})
+ self.assertEqual(results, ['baza', 'bar'])
+
+ def testLeadingDelimiter(self):
+ raise SkipTest("Bad Test, NO support for double // ")
+ cont = self.env.account.container(Utils.create_name())
+ self.assertTrue(cont.create())
+
+ delimiter = '/'
+ files = ['test', delimiter.join(['', 'test', 'bar']),
+ delimiter.join(['', 'test', 'bar', 'foo'])]
+ for f in files:
+ file_item = cont.file(f)
+ self.assertTrue(file_item.write_random())
+
+ results = cont.files(parms={'delimiter': delimiter})
+ self.assertEqual(results, [delimiter, 'test'])
def testCreate(self):
cont = self.env.account.container(Utils.create_name())
- self.assert_(cont.create())
+ self.assertTrue(cont.create())
self.assert_status(201)
- self.assert_(cont.name in self.env.account.containers())
+ self.assertIn(cont.name, self.env.account.containers())
def testContainerFileListOnContainerThatDoesNotExist(self):
for format_type in [None, 'json', 'xml']:
@@ -485,13 +663,13 @@ class TestContainer(Base):
valid_utf8 = Utils.create_utf8_name()
invalid_utf8 = valid_utf8[::-1]
container = self.env.account.container(valid_utf8)
- self.assert_(container.create(cfg={'no_path_quote': True}))
- self.assert_(container.name in self.env.account.containers())
+ self.assertTrue(container.create(cfg={'no_path_quote': True}))
+ self.assertIn(container.name, self.env.account.containers())
self.assertEqual(container.files(), [])
- self.assert_(container.delete())
+ self.assertTrue(container.delete())
container = self.env.account.container(invalid_utf8)
- self.assert_(not container.create(cfg={'no_path_quote': True}))
+ self.assertFalse(container.create(cfg={'no_path_quote': True}))
self.assert_status(412)
self.assertRaises(ResponseError, container.files,
cfg={'no_path_quote': True})
@@ -499,14 +677,14 @@ class TestContainer(Base):
def testCreateOnExisting(self):
cont = self.env.account.container(Utils.create_name())
- self.assert_(cont.create())
+ self.assertTrue(cont.create())
self.assert_status(201)
- self.assert_(cont.create())
+ self.assertTrue(cont.create())
self.assert_status(202)
def testSlashInName(self):
if Utils.create_name == Utils.create_utf8_name:
- cont_name = list(unicode(Utils.create_name(), 'utf-8'))
+ cont_name = list(six.text_type(Utils.create_name(), 'utf-8'))
else:
cont_name = list(Utils.create_name())
@@ -517,31 +695,31 @@ class TestContainer(Base):
cont_name = cont_name.encode('utf-8')
cont = self.env.account.container(cont_name)
- self.assert_(not cont.create(cfg={'no_path_quote': True}),
- 'created container with name %s' % (cont_name))
+ self.assertFalse(cont.create(cfg={'no_path_quote': True}),
+ 'created container with name %s' % (cont_name))
self.assert_status(404)
- self.assert_(cont.name not in self.env.account.containers())
+ self.assertNotIn(cont.name, self.env.account.containers())
def testDelete(self):
cont = self.env.account.container(Utils.create_name())
- self.assert_(cont.create())
+ self.assertTrue(cont.create())
self.assert_status(201)
- self.assert_(cont.delete())
+ self.assertTrue(cont.delete())
self.assert_status(204)
- self.assert_(cont.name not in self.env.account.containers())
+ self.assertNotIn(cont.name, self.env.account.containers())
def testDeleteOnContainerThatDoesNotExist(self):
cont = self.env.account.container(Utils.create_name())
- self.assert_(not cont.delete())
+ self.assertFalse(cont.delete())
self.assert_status(404)
def testDeleteOnContainerWithFiles(self):
cont = self.env.account.container(Utils.create_name())
- self.assert_(cont.create())
+ self.assertTrue(cont.create())
file_item = cont.file(Utils.create_name())
file_item.write_random(self.env.file_size)
- self.assert_(file_item.name in cont.files())
- self.assert_(not cont.delete())
+ self.assertIn(file_item.name, cont.files())
+ self.assertFalse(cont.delete())
self.assert_status(409)
def testFileCreateInContainerThatDoesNotExist(self):
@@ -573,10 +751,34 @@ class TestContainer(Base):
files = [x['name'] for x in files]
for file_item in self.env.files:
- self.assert_(file_item in files)
+ self.assertIn(file_item, files)
for file_item in files:
- self.assert_(file_item in self.env.files)
+ self.assertIn(file_item, self.env.files)
+
+ def _testContainerFormattedFileList(self, format_type):
+ expected = {}
+ for name in self.env.files:
+ expected[name] = self.env.container.file(name).info()
+
+ file_list = self.env.container.files(parms={'format': format_type})
+ self.assert_status(200)
+ for actual in file_list:
+ name = actual['name']
+ self.assertIn(name, expected)
+ self.assertEqual(expected[name]['etag'], actual['hash'])
+ self.assertEqual(
+ expected[name]['content_type'], actual['content_type'])
+ self.assertEqual(
+ expected[name]['content_length'], actual['bytes'])
+ expected.pop(name)
+ self.assertFalse(expected) # sanity check
+
+ def testContainerJsonFileList(self):
+ self._testContainerFormattedFileList('json')
+
+ def testContainerXmlFileList(self):
+ self._testContainerFormattedFileList('xml')
def testMarkerLimitFileList(self):
for format_type in [None, 'json', 'xml']:
@@ -593,11 +795,11 @@ class TestContainer(Base):
if isinstance(files[0], dict):
files = [x['name'] for x in files]
- self.assert_(len(files) <= limit)
+ self.assertLessEqual(len(files), limit)
if files:
if isinstance(files[0], dict):
files = [x['name'] for x in files]
- self.assert_(locale.strcoll(files[0], marker) > 0)
+ self.assertGreater(locale.strcoll(files[0], marker), 0)
def testFileOrder(self):
for format_type in [None, 'json', 'xml']:
@@ -626,31 +828,183 @@ class TestContainer(Base):
def testTooLongName(self):
cont = self.env.account.container('x' * 257)
- self.assert_(not cont.create(),
- 'created container with name %s' % (cont.name))
+ self.assertFalse(cont.create(),
+ 'created container with name %s' % (cont.name))
self.assert_status(400)
def testContainerExistenceCachingProblem(self):
cont = self.env.account.container(Utils.create_name())
self.assertRaises(ResponseError, cont.files)
- self.assert_(cont.create())
+ self.assertTrue(cont.create())
cont.files()
cont = self.env.account.container(Utils.create_name())
self.assertRaises(ResponseError, cont.files)
- self.assert_(cont.create())
+ self.assertTrue(cont.create())
file_item = cont.file(Utils.create_name())
file_item.write_random()
+ def testContainerLastModified(self):
+ raise SkipTest("NA")
+ container = self.env.account.container(Utils.create_name())
+ self.assertTrue(container.create())
+ info = container.info()
+ t0 = info['last_modified']
+ # last modified header is in date format which supports in second
+ # so we need to wait to increment a sec in the header.
+ eventlet.sleep(1)
+
+ # POST container change last modified timestamp
+ self.assertTrue(
+ container.update_metadata({'x-container-meta-japan': 'mitaka'}))
+ info = container.info()
+ t1 = info['last_modified']
+ self.assertNotEqual(t0, t1)
+ eventlet.sleep(1)
+
+ # PUT container (overwrite) also change last modified
+ self.assertTrue(container.create())
+ info = container.info()
+ t2 = info['last_modified']
+ self.assertNotEqual(t1, t2)
+ eventlet.sleep(1)
+
+ # PUT object doesn't change container last modified timestamp
+ obj = container.file(Utils.create_name())
+ self.assertTrue(
+ obj.write("aaaaa", hdrs={'Content-Type': 'text/plain'}))
+ info = container.info()
+ t3 = info['last_modified']
+ self.assertEqual(t2, t3)
+
+ # POST object also doesn't change container last modified timestamp
+ self.assertTrue(
+ obj.sync_metadata({'us': 'austin'}))
+ info = container.info()
+ t4 = info['last_modified']
+ self.assertEqual(t2, t4)
+
class TestContainerUTF8(Base2, TestContainer):
set_up = False
+class TestContainerSortingEnv(object):
+ @classmethod
+ def setUp(cls):
+ cls.conn = Connection(tf.config)
+ cls.conn.authenticate()
+ cls.account = Account(cls.conn, tf.config.get('account',
+ tf.config['username']))
+ cls.account.delete_containers()
+
+ cls.container = cls.account.container(Utils.create_name())
+ if not cls.container.create():
+ raise ResponseError(cls.conn.response)
+
+ cls.file_items = ('a1', 'a2', 'A3', 'b1', 'B2', 'a10', 'b10', 'zz')
+ cls.files = list()
+ cls.file_size = 128
+ for name in cls.file_items:
+ file_item = cls.container.file(name)
+ file_item.write_random(cls.file_size)
+ cls.files.append(file_item.name)
+
+
+class TestContainerSorting(Base):
+ env = TestContainerSortingEnv
+ set_up = False
+
+ def testContainerFileListSortingReversed(self):
+ file_list = list(sorted(self.env.file_items))
+ file_list.reverse()
+ for reverse in ('true', '1', 'yes', 'on', 't', 'y'):
+ cont_files = self.env.container.files(parms={'reverse': reverse})
+ self.assert_status(200)
+ self.assertEqual(file_list, cont_files,
+ 'Expected %s but got %s with reverse param %r'
+ % (file_list, cont_files, reverse))
+
+ def testContainerFileSortingByPrefixReversed(self):
+ cont_list = sorted(c for c in self.env.file_items if c.startswith('a'))
+ cont_list.reverse()
+ cont_listing = self.env.container.files(parms={
+ 'reverse': 'on', 'prefix': 'a'})
+ self.assert_status(200)
+ self.assertEqual(cont_list, cont_listing)
+
+ def testContainerFileSortingByMarkersExclusiveReversed(self):
+ first_item = self.env.file_items[3] # 'b1' + postfix
+ last_item = self.env.file_items[4] # 'B2' + postfix
+
+ cont_list = sorted(c for c in self.env.file_items
+ if last_item < c < first_item)
+ cont_list.reverse()
+ cont_listing = self.env.container.files(parms={
+ 'reverse': 'on', 'marker': first_item, 'end_marker': last_item})
+ self.assert_status(200)
+ self.assertEqual(cont_list, cont_listing)
+
+ def testContainerFileSortingByMarkersInclusiveReversed(self):
+ first_item = self.env.file_items[3] # 'b1' + postfix
+ last_item = self.env.file_items[4] # 'B2' + postfix
+
+ cont_list = sorted(c for c in self.env.file_items
+ if last_item <= c <= first_item)
+ cont_list.reverse()
+ cont_listing = self.env.container.files(parms={
+ 'reverse': 'on', 'marker': first_item + '\x00',
+ 'end_marker': last_item[:-1] + chr(ord(last_item[-1]) - 1)})
+ self.assert_status(200)
+ self.assertEqual(cont_list, cont_listing)
+
+ def testContainerFileSortingByReversedMarkersReversed(self):
+ cont_listing = self.env.container.files(parms={
+ 'reverse': 'on', 'marker': 'B', 'end_marker': 'b1'})
+ self.assert_status(204)
+ self.assertEqual([], cont_listing)
+
+ def testContainerFileListSorting(self):
+ file_list = list(sorted(self.env.file_items))
+ cont_files = self.env.container.files()
+ self.assert_status(200)
+ self.assertEqual(file_list, cont_files)
+
+ # Lets try again but with reverse is specifically turned off
+ cont_files = self.env.container.files(parms={'reverse': 'off'})
+ self.assert_status(200)
+ self.assertEqual(file_list, cont_files)
+
+ cont_files = self.env.container.files(parms={'reverse': 'false'})
+ self.assert_status(200)
+ self.assertEqual(file_list, cont_files)
+
+ cont_files = self.env.container.files(parms={'reverse': 'no'})
+ self.assert_status(200)
+ self.assertEqual(file_list, cont_files)
+
+ cont_files = self.env.container.files(parms={'reverse': ''})
+ self.assert_status(200)
+ self.assertEqual(file_list, cont_files)
+
+ # Lets try again but with a incorrect reverse values
+ cont_files = self.env.container.files(parms={'reverse': 'foo'})
+ self.assert_status(200)
+ self.assertEqual(file_list, cont_files)
+
+ cont_files = self.env.container.files(parms={'reverse': 'hai'})
+ self.assert_status(200)
+ self.assertEqual(file_list, cont_files)
+
+ cont_files = self.env.container.files(parms={'reverse': 'o=[]::::>'})
+ self.assert_status(200)
+ self.assertEqual(file_list, cont_files)
+
+
class TestContainerPathsEnv(object):
@classmethod
def setUp(cls):
- raise SkipTest('Objects ending in / are not supported')
+ raise SkipTest('Objects ending in / are not supported')
cls.conn = Connection(tf.config)
cls.conn.authenticate()
cls.account = Account(cls.conn, tf.config.get('account',
@@ -737,7 +1091,7 @@ class TestContainerPaths(Base):
raise ValueError('too deep recursion')
for file_item in self.env.container.files(parms={'path': path}):
- self.assert_(file_item.startswith(path))
+ self.assertTrue(file_item.startswith(path))
if file_item.endswith('/'):
recurse_path(file_item, count + 1)
found_dirs.append(file_item)
@@ -747,28 +1101,28 @@ class TestContainerPaths(Base):
recurse_path('')
for file_item in self.env.stored_files:
if file_item.startswith('/'):
- self.assert_(file_item not in found_dirs)
- self.assert_(file_item not in found_files)
+ self.assertNotIn(file_item, found_dirs)
+ self.assertNotIn(file_item, found_files)
elif file_item.endswith('/'):
- self.assert_(file_item in found_dirs)
- self.assert_(file_item not in found_files)
+ self.assertIn(file_item, found_dirs)
+ self.assertNotIn(file_item, found_files)
else:
- self.assert_(file_item in found_files)
- self.assert_(file_item not in found_dirs)
+ self.assertIn(file_item, found_files)
+ self.assertNotIn(file_item, found_dirs)
found_files = []
found_dirs = []
recurse_path('/')
for file_item in self.env.stored_files:
if not file_item.startswith('/'):
- self.assert_(file_item not in found_dirs)
- self.assert_(file_item not in found_files)
+ self.assertNotIn(file_item, found_dirs)
+ self.assertNotIn(file_item, found_files)
elif file_item.endswith('/'):
- self.assert_(file_item in found_dirs)
- self.assert_(file_item not in found_files)
+ self.assertIn(file_item, found_dirs)
+ self.assertNotIn(file_item, found_files)
else:
- self.assert_(file_item in found_files)
- self.assert_(file_item not in found_dirs)
+ self.assertIn(file_item, found_files)
+ self.assertNotIn(file_item, found_dirs)
def testContainerListing(self):
for format_type in (None, 'json', 'xml'):
@@ -782,8 +1136,8 @@ class TestContainerPaths(Base):
for format_type in ('json', 'xml'):
for file_item in self.env.container.files(parms={'format':
format_type}):
- self.assert_(int(file_item['bytes']) >= 0)
- self.assert_('last_modified' in file_item)
+ self.assertGreaterEqual(int(file_item['bytes']), 0)
+ self.assertIn('last_modified', file_item)
if file_item['name'].endswith('/'):
self.assertEqual(file_item['content_type'],
'application/directory')
@@ -855,6 +1209,15 @@ class TestFileEnv(object):
cls.file_size = 128
+ # With keystoneauth we need the accounts to have had the project
+ # domain id persisted as sysmeta prior to testing ACLs. This may
+ # not be the case if, for example, the account was created using
+ # a request with reseller_admin role, when project domain id may
+ # not have been known. So we ensure that the project domain id is
+ # in sysmeta by making a POST to the accounts using an admin role.
+ cls.account.update_metadata()
+ cls.account2.update_metadata()
+
class TestFileDev(Base):
env = TestFileEnv
@@ -875,31 +1238,179 @@ class TestFile(Base):
file_item = self.env.container.file(source_filename)
metadata = {}
- for i in range(1):
- metadata[Utils.create_ascii_name()] = Utils.create_name()
+ metadata[Utils.create_ascii_name()] = Utils.create_name()
+ put_headers = {'Content-Type': 'application/test',
+ 'Content-Encoding': 'gzip',
+ 'Content-Disposition': 'attachment; filename=myfile'}
+ file_item.metadata = metadata
+ data = file_item.write_random(hdrs=put_headers)
+
+ # the allowed headers are configurable in object server, so we cannot
+ # assert that content-encoding and content-disposition get *copied*
+ # unless they were successfully set on the original PUT, so populate
+ # expected_headers by making a HEAD on the original object
+ file_item.initialize()
+ self.assertEqual('application/test', file_item.content_type)
+ resp_headers = dict(file_item.conn.response.getheaders())
+ expected_headers = {}
+ for k, v in put_headers.items():
+ if k.lower() in resp_headers:
+ expected_headers[k] = v
- data = file_item.write_random()
+ dest_cont = self.env.account.container(Utils.create_name())
+ self.assertTrue(dest_cont.create())
+
+ # copy both from within and across containers
+ for cont in (self.env.container, dest_cont):
+ # copy both with and without initial slash
+ for prefix in ('', '/'):
+ dest_filename = Utils.create_name()
+
+ extra_hdrs = {'X-Object-Meta-Extra': 'fresh'}
+ self.assertTrue(file_item.copy(
+ '%s%s' % (prefix, cont), dest_filename, hdrs=extra_hdrs))
+
+ # verify container listing for copy
+ listing = cont.files(parms={'format': 'json'})
+ for obj in listing:
+ if obj['name'] == dest_filename:
+ break
+ else:
+ self.fail('Failed to find %s in listing' % dest_filename)
+
+ self.assertEqual(file_item.size, obj['bytes'])
+ self.assertEqual(file_item.etag, obj['hash'])
+ self.assertEqual(file_item.content_type, obj['content_type'])
+
+ file_copy = cont.file(dest_filename)
+
+ self.assertEqual(data, file_copy.read())
+ self.assertTrue(file_copy.initialize())
+ expected_metadata = dict(metadata)
+ # new metadata should be merged with existing
+ expected_metadata['extra'] = 'fresh'
+ self.assertDictEqual(expected_metadata, file_copy.metadata)
+ resp_headers = dict(file_copy.conn.response.getheaders())
+ for k, v in expected_headers.items():
+ self.assertIn(k.lower(), resp_headers)
+ self.assertEqual(v, resp_headers[k.lower()])
+
+ # repeat copy with updated content-type, content-encoding and
+ # content-disposition, which should get updated
+ extra_hdrs = {
+ 'X-Object-Meta-Extra': 'fresher',
+ 'Content-Type': 'application/test-changed',
+ 'Content-Encoding': 'not_gzip',
+ 'Content-Disposition': 'attachment; filename=notmyfile'}
+ self.assertTrue(file_item.copy(
+ '%s%s' % (prefix, cont), dest_filename, hdrs=extra_hdrs))
+
+ self.assertIn(dest_filename, cont.files())
+
+ file_copy = cont.file(dest_filename)
+
+ self.assertEqual(data, file_copy.read())
+ self.assertTrue(file_copy.initialize())
+ expected_metadata['extra'] = 'fresher'
+ self.assertDictEqual(expected_metadata, file_copy.metadata)
+ resp_headers = dict(file_copy.conn.response.getheaders())
+ # if k is in expected_headers then we can assert its new value
+ for k, v in expected_headers.items():
+ v = extra_hdrs.get(k, v)
+ self.assertIn(k.lower(), resp_headers)
+ self.assertEqual(v, resp_headers[k.lower()])
+
+ # verify container listing for copy
+ listing = cont.files(parms={'format': 'json'})
+ for obj in listing:
+ if obj['name'] == dest_filename:
+ break
+ else:
+ self.fail('Failed to find %s in listing' % dest_filename)
+
+ self.assertEqual(file_item.size, obj['bytes'])
+ self.assertEqual(file_item.etag, obj['hash'])
+ self.assertEqual(
+ 'application/test-changed', obj['content_type'])
+
+ # repeat copy with X-Fresh-Metadata header - existing user
+ # metadata should not be copied, new completely replaces it.
+ extra_hdrs = {'Content-Type': 'application/test-updated',
+ 'X-Object-Meta-Extra': 'fresher',
+ 'X-Fresh-Metadata': 'true'}
+ self.assertTrue(file_item.copy(
+ '%s%s' % (prefix, cont), dest_filename, hdrs=extra_hdrs))
+
+ self.assertIn(dest_filename, cont.files())
+
+ file_copy = cont.file(dest_filename)
+
+ self.assertEqual(data, file_copy.read())
+ self.assertTrue(file_copy.initialize())
+ self.assertEqual('application/test-updated',
+ file_copy.content_type)
+ expected_metadata = {'extra': 'fresher'}
+ self.assertDictEqual(expected_metadata, file_copy.metadata)
+ resp_headers = dict(file_copy.conn.response.getheaders())
+ for k in ('Content-Disposition', 'Content-Encoding'):
+ self.assertNotIn(k.lower(), resp_headers)
+
+ # verify container listing for copy
+ listing = cont.files(parms={'format': 'json'})
+ for obj in listing:
+ if obj['name'] == dest_filename:
+ break
+ else:
+ self.fail('Failed to find %s in listing' % dest_filename)
+
+ self.assertEqual(file_item.size, obj['bytes'])
+ self.assertEqual(file_item.etag, obj['hash'])
+ self.assertEqual(
+ 'application/test-updated', obj['content_type'])
+
+ def testCopyRange(self):
+ # makes sure to test encoded characters
+ source_filename = 'dealde%2Fl04 011e%204c8df/flash.png'
+ file_item = self.env.container.file(source_filename)
+
+ metadata = {Utils.create_ascii_name(): Utils.create_name()}
+
+ data = file_item.write_random(1024)
file_item.sync_metadata(metadata)
+ file_item.initialize()
dest_cont = self.env.account.container(Utils.create_name())
- self.assert_(dest_cont.create())
+ self.assertTrue(dest_cont.create())
+ expected_body = data[100:201]
+ expected_etag = hashlib.md5(expected_body)
# copy both from within and across containers
for cont in (self.env.container, dest_cont):
# copy both with and without initial slash
for prefix in ('', '/'):
dest_filename = Utils.create_name()
- file_item = self.env.container.file(source_filename)
- file_item.copy('%s%s' % (prefix, cont), dest_filename)
+ file_item.copy('%s%s' % (prefix, cont), dest_filename,
+ hdrs={'Range': 'bytes=100-200'})
+ self.assertEqual(201, file_item.conn.response.status)
- self.assert_(dest_filename in cont.files())
+ # verify container listing for copy
+ listing = cont.files(parms={'format': 'json'})
+ for obj in listing:
+ if obj['name'] == dest_filename:
+ break
+ else:
+ self.fail('Failed to find %s in listing' % dest_filename)
- file_item = cont.file(dest_filename)
+ self.assertEqual(101, obj['bytes'])
+ self.assertEqual(expected_etag.hexdigest(), obj['hash'])
+ self.assertEqual(file_item.content_type, obj['content_type'])
- self.assert_(data == file_item.read())
- self.assert_(file_item.initialize())
- self.assert_(metadata == file_item.metadata)
+ # verify copy object
+ copy_file_item = cont.file(dest_filename)
+ self.assertEqual(expected_body, copy_file_item.read())
+ self.assertTrue(copy_file_item.initialize())
+ self.assertEqual(metadata, copy_file_item.metadata)
def testCopyAccount(self):
# makes sure to test encoded characters
@@ -912,7 +1423,7 @@ class TestFile(Base):
file_item.sync_metadata(metadata)
dest_cont = self.env.account.container(Utils.create_name())
- self.assert_(dest_cont.create())
+ self.assertTrue(dest_cont.create())
acct = self.env.conn.account_name
# copy both from within and across containers
@@ -926,16 +1437,16 @@ class TestFile(Base):
'%s%s' % (prefix, cont),
dest_filename)
- self.assert_(dest_filename in cont.files())
+ self.assertIn(dest_filename, cont.files())
file_item = cont.file(dest_filename)
- self.assert_(data == file_item.read())
- self.assert_(file_item.initialize())
- self.assert_(metadata == file_item.metadata)
+ self.assertEqual(data, file_item.read())
+ self.assertTrue(file_item.initialize())
+ self.assertEqual(metadata, file_item.metadata)
dest_cont = self.env.account2.container(Utils.create_name())
- self.assert_(dest_cont.create(hdrs={
+ self.assertTrue(dest_cont.create(hdrs={
'X-Container-Write': self.env.conn.user_acl
}))
@@ -949,13 +1460,13 @@ class TestFile(Base):
'%s%s' % (prefix, dest_cont),
dest_filename)
- self.assert_(dest_filename in dest_cont.files())
+ self.assertIn(dest_filename, dest_cont.files())
file_item = dest_cont.file(dest_filename)
- self.assert_(data == file_item.read())
- self.assert_(file_item.initialize())
- self.assert_(metadata == file_item.metadata)
+ self.assertEqual(data, file_item.read())
+ self.assertTrue(file_item.initialize())
+ self.assertEqual(metadata, file_item.metadata)
def testCopy404s(self):
source_filename = Utils.create_name()
@@ -963,35 +1474,35 @@ class TestFile(Base):
file_item.write_random()
dest_cont = self.env.account.container(Utils.create_name())
- self.assert_(dest_cont.create())
+ self.assertTrue(dest_cont.create())
for prefix in ('', '/'):
# invalid source container
source_cont = self.env.account.container(Utils.create_name())
file_item = source_cont.file(source_filename)
- self.assert_(not file_item.copy(
+ self.assertFalse(file_item.copy(
'%s%s' % (prefix, self.env.container),
Utils.create_name()))
self.assert_status(404)
- self.assert_(not file_item.copy('%s%s' % (prefix, dest_cont),
- Utils.create_name()))
+ self.assertFalse(file_item.copy('%s%s' % (prefix, dest_cont),
+ Utils.create_name()))
self.assert_status(404)
# invalid source object
file_item = self.env.container.file(Utils.create_name())
- self.assert_(not file_item.copy(
+ self.assertFalse(file_item.copy(
'%s%s' % (prefix, self.env.container),
Utils.create_name()))
self.assert_status(404)
- self.assert_(not file_item.copy('%s%s' % (prefix, dest_cont),
+ self.assertFalse(file_item.copy('%s%s' % (prefix, dest_cont),
Utils.create_name()))
self.assert_status(404)
# invalid destination container
file_item = self.env.container.file(source_filename)
- self.assert_(not file_item.copy(
+ self.assertFalse(file_item.copy(
'%s%s' % (prefix, Utils.create_name()),
Utils.create_name()))
@@ -1003,11 +1514,11 @@ class TestFile(Base):
file_item.write_random()
dest_cont = self.env.account.container(Utils.create_name())
- self.assert_(dest_cont.create(hdrs={
+ self.assertTrue(dest_cont.create(hdrs={
'X-Container-Read': self.env.conn2.user_acl
}))
dest_cont2 = self.env.account2.container(Utils.create_name())
- self.assert_(dest_cont2.create(hdrs={
+ self.assertTrue(dest_cont2.create(hdrs={
'X-Container-Write': self.env.conn.user_acl,
'X-Container-Read': self.env.conn.user_acl
}))
@@ -1017,18 +1528,16 @@ class TestFile(Base):
# invalid source container
source_cont = self.env.account.container(Utils.create_name())
file_item = source_cont.file(source_filename)
- self.assert_(not file_item.copy_account(
+ self.assertFalse(file_item.copy_account(
acct,
'%s%s' % (prefix, self.env.container),
Utils.create_name()))
- if acct == acct2:
- # there is no such source container
- # and foreign user can have no permission to read it
- self.assert_status(403)
- else:
- self.assert_status(404)
+ # there is no such source container but user has
+ # permissions to do a GET (done internally via COPY) for
+ # objects in his own account.
+ self.assert_status(404)
- self.assert_(not file_item.copy_account(
+ self.assertFalse(file_item.copy_account(
acct,
'%s%s' % (prefix, cont),
Utils.create_name()))
@@ -1036,18 +1545,16 @@ class TestFile(Base):
# invalid source object
file_item = self.env.container.file(Utils.create_name())
- self.assert_(not file_item.copy_account(
+ self.assertFalse(file_item.copy_account(
acct,
'%s%s' % (prefix, self.env.container),
Utils.create_name()))
- if acct == acct2:
- # there is no such object
- # and foreign user can have no permission to read it
- self.assert_status(403)
- else:
- self.assert_status(404)
+ # there is no such source container but user has
+ # permissions to do a GET (done internally via COPY) for
+ # objects in his own account.
+ self.assert_status(404)
- self.assert_(not file_item.copy_account(
+ self.assertFalse(file_item.copy_account(
acct,
'%s%s' % (prefix, cont),
Utils.create_name()))
@@ -1055,7 +1562,7 @@ class TestFile(Base):
# invalid destination container
file_item = self.env.container.file(source_filename)
- self.assert_(not file_item.copy_account(
+ self.assertFalse(file_item.copy_account(
acct,
'%s%s' % (prefix, Utils.create_name()),
Utils.create_name()))
@@ -1072,9 +1579,9 @@ class TestFile(Base):
file_item.write_random()
file_item = self.env.container.file(source_filename)
- self.assert_(not file_item.copy(Utils.create_name(),
- Utils.create_name(),
- cfg={'no_destination': True}))
+ self.assertFalse(file_item.copy(Utils.create_name(),
+ Utils.create_name(),
+ cfg={'no_destination': True}))
self.assert_status(412)
def testCopyDestinationSlashProblems(self):
@@ -1083,9 +1590,9 @@ class TestFile(Base):
file_item.write_random()
# no slash
- self.assert_(not file_item.copy(Utils.create_name(),
- Utils.create_name(),
- cfg={'destination': Utils.create_name()}))
+ self.assertFalse(file_item.copy(Utils.create_name(),
+ Utils.create_name(),
+ cfg={'destination': Utils.create_name()}))
self.assert_status(412)
def testCopyFromHeader(self):
@@ -1100,7 +1607,7 @@ class TestFile(Base):
data = file_item.write_random()
dest_cont = self.env.account.container(Utils.create_name())
- self.assert_(dest_cont.create())
+ self.assertTrue(dest_cont.create())
# copy both from within and across containers
for cont in (self.env.container, dest_cont):
@@ -1112,18 +1619,18 @@ class TestFile(Base):
file_item.write(hdrs={'X-Copy-From': '%s%s/%s' % (
prefix, self.env.container.name, source_filename)})
- self.assert_(dest_filename in cont.files())
+ self.assertIn(dest_filename, cont.files())
file_item = cont.file(dest_filename)
- self.assert_(data == file_item.read())
- self.assert_(file_item.initialize())
- self.assert_(metadata == file_item.metadata)
+ self.assertEqual(data, file_item.read())
+ self.assertTrue(file_item.initialize())
+ self.assertEqual(metadata, file_item.metadata)
def testCopyFromAccountHeader(self):
acct = self.env.conn.account_name
src_cont = self.env.account.container(Utils.create_name())
- self.assert_(src_cont.create(hdrs={
+ self.assertTrue(src_cont.create(hdrs={
'X-Container-Read': self.env.conn2.user_acl
}))
source_filename = Utils.create_name()
@@ -1137,9 +1644,9 @@ class TestFile(Base):
data = file_item.write_random()
dest_cont = self.env.account.container(Utils.create_name())
- self.assert_(dest_cont.create())
+ self.assertTrue(dest_cont.create())
dest_cont2 = self.env.account2.container(Utils.create_name())
- self.assert_(dest_cont2.create(hdrs={
+ self.assertTrue(dest_cont2.create(hdrs={
'X-Container-Write': self.env.conn.user_acl
}))
@@ -1155,13 +1662,13 @@ class TestFile(Base):
src_cont.name,
source_filename)})
- self.assert_(dest_filename in cont.files())
+ self.assertIn(dest_filename, cont.files())
file_item = cont.file(dest_filename)
- self.assert_(data == file_item.read())
- self.assert_(file_item.initialize())
- self.assert_(metadata == file_item.metadata)
+ self.assertEqual(data, file_item.read())
+ self.assertTrue(file_item.initialize())
+ self.assertEqual(metadata, file_item.metadata)
def testCopyFromHeader404s(self):
source_filename = Utils.create_name()
@@ -1171,40 +1678,41 @@ class TestFile(Base):
for prefix in ('', '/'):
# invalid source container
file_item = self.env.container.file(Utils.create_name())
+ copy_from = ('%s%s/%s'
+ % (prefix, Utils.create_name(), source_filename))
self.assertRaises(ResponseError, file_item.write,
- hdrs={'X-Copy-From': '%s%s/%s' %
- (prefix,
- Utils.create_name(), source_filename)})
+ hdrs={'X-Copy-From': copy_from})
self.assert_status(404)
# invalid source object
+ copy_from = ('%s%s/%s'
+ % (prefix, self.env.container.name,
+ Utils.create_name()))
file_item = self.env.container.file(Utils.create_name())
self.assertRaises(ResponseError, file_item.write,
- hdrs={'X-Copy-From': '%s%s/%s' %
- (prefix,
- self.env.container.name, Utils.create_name())})
+ hdrs={'X-Copy-From': copy_from})
self.assert_status(404)
# invalid destination container
dest_cont = self.env.account.container(Utils.create_name())
file_item = dest_cont.file(Utils.create_name())
+ copy_from = ('%s%s/%s'
+ % (prefix, self.env.container.name, source_filename))
self.assertRaises(ResponseError, file_item.write,
- hdrs={'X-Copy-From': '%s%s/%s' %
- (prefix,
- self.env.container.name, source_filename)})
+ hdrs={'X-Copy-From': copy_from})
self.assert_status(404)
def testCopyFromAccountHeader404s(self):
acct = self.env.conn2.account_name
src_cont = self.env.account2.container(Utils.create_name())
- self.assert_(src_cont.create(hdrs={
+ self.assertTrue(src_cont.create(hdrs={
'X-Container-Read': self.env.conn.user_acl
}))
source_filename = Utils.create_name()
file_item = src_cont.file(source_filename)
file_item.write_random()
dest_cont = self.env.account.container(Utils.create_name())
- self.assert_(dest_cont.create())
+ self.assertTrue(dest_cont.create())
for prefix in ('', '/'):
# invalid source container
@@ -1247,7 +1755,7 @@ class TestFile(Base):
file_item = self.env.container.file(create_limit_filename(l))
if l <= limit:
- self.assert_(file_item.write())
+ self.assertTrue(file_item.write())
self.assert_status(201)
else:
self.assertRaises(ResponseError, file_item.write)
@@ -1262,16 +1770,16 @@ class TestFile(Base):
file_name = Utils.create_name(6) + '?' + Utils.create_name(6)
file_item = self.env.container.file(file_name)
- self.assert_(file_item.write(cfg={'no_path_quote': True}))
- self.assert_(file_name not in self.env.container.files())
- self.assert_(file_name.split('?')[0] in self.env.container.files())
+ self.assertTrue(file_item.write(cfg={'no_path_quote': True}))
+ self.assertNotIn(file_name, self.env.container.files())
+ self.assertIn(file_name.split('?')[0], self.env.container.files())
def testDeleteThen404s(self):
file_item = self.env.container.file(Utils.create_name())
- self.assert_(file_item.write_random())
+ self.assertTrue(file_item.write_random())
self.assert_status(201)
- self.assert_(file_item.delete())
+ self.assertTrue(file_item.delete())
self.assert_status(204)
file_item.metadata = {Utils.create_ascii_name(): Utils.create_name()}
@@ -1301,7 +1809,6 @@ class TestFile(Base):
j = size_limit / (i * 2)
- size = 0
metadata = {}
while len(metadata.keys()) < i:
key = Utils.create_ascii_name()
@@ -1311,28 +1818,24 @@ class TestFile(Base):
if len(key) > j:
key = key[:j]
# This slicing done below can make the 'utf8' byte
- # sequence invalid and hence it cannot be decoded.
+ # sequence invalid and hence it cannot be decoded
val = val[:j]
- size += len(key) + len(val)
metadata[key] = val
file_item = self.env.container.file(Utils.create_name())
file_item.metadata = metadata
if i <= number_limit:
- self.assert_(file_item.write())
+ self.assertTrue(file_item.write())
self.assert_status(201)
- self.assert_(file_item.sync_metadata())
+ self.assertTrue(file_item.sync_metadata())
self.assert_status((201, 202))
- self.assert_(file_item.initialize())
- self.assert_status(200)
- self.assertEqual(file_item.metadata, metadata)
else:
self.assertRaises(ResponseError, file_item.write)
self.assert_status(400)
file_item.metadata = {}
- self.assert_(file_item.write())
+ self.assertTrue(file_item.write())
self.assert_status(201)
file_item.metadata = metadata
self.assertRaises(ResponseError, file_item.sync_metadata)
@@ -1343,7 +1846,7 @@ class TestFile(Base):
'zip': 'application/zip'}
container = self.env.account.container(Utils.create_name())
- self.assert_(container.create())
+ self.assertTrue(container.create())
for i in file_types.keys():
file_item = container.file(Utils.create_name() + '.' + i)
@@ -1369,8 +1872,9 @@ class TestFile(Base):
for i in range(0, file_length, range_size):
range_string = 'bytes=%d-%d' % (i, i + range_size - 1)
hdrs = {'Range': range_string}
- self.assert_(data[i: i + range_size] == file_item.read(hdrs=hdrs),
- range_string)
+ self.assertEqual(
+ data[i: i + range_size], file_item.read(hdrs=hdrs),
+ range_string)
range_string = 'bytes=-%d' % (i)
hdrs = {'Range': range_string}
@@ -1384,33 +1888,185 @@ class TestFile(Base):
self.assert_status(416)
else:
self.assertEqual(file_item.read(hdrs=hdrs), data[-i:])
+ self.assert_header('etag', file_item.md5)
+ self.assert_header('accept-ranges', 'bytes')
range_string = 'bytes=%d-' % (i)
hdrs = {'Range': range_string}
- self.assert_(file_item.read(hdrs=hdrs) == data[i - file_length:],
- range_string)
+ self.assertEqual(
+ file_item.read(hdrs=hdrs), data[i - file_length:],
+ range_string)
range_string = 'bytes=%d-%d' % (file_length + 1000, file_length + 2000)
hdrs = {'Range': range_string}
self.assertRaises(ResponseError, file_item.read, hdrs=hdrs)
self.assert_status(416)
+ self.assert_header('etag', file_item.md5)
+ self.assert_header('accept-ranges', 'bytes')
range_string = 'bytes=%d-%d' % (file_length - 1000, file_length + 2000)
hdrs = {'Range': range_string}
- self.assert_(file_item.read(hdrs=hdrs) == data[-1000:], range_string)
+ self.assertEqual(file_item.read(hdrs=hdrs), data[-1000:], range_string)
hdrs = {'Range': '0-4'}
- self.assert_(file_item.read(hdrs=hdrs) == data, range_string)
+ self.assertEqual(file_item.read(hdrs=hdrs), data, '0-4')
# RFC 2616 14.35.1
# "If the entity is shorter than the specified suffix-length, the
# entire entity-body is used."
range_string = 'bytes=-%d' % (file_length + 10)
hdrs = {'Range': range_string}
- self.assert_(file_item.read(hdrs=hdrs) == data, range_string)
+ self.assertEqual(file_item.read(hdrs=hdrs), data, range_string)
+
+ def testMultiRangeGets(self):
+ file_length = 10000
+ range_size = file_length / 10
+ subrange_size = range_size / 10
+ file_item = self.env.container.file(Utils.create_name())
+ data = file_item.write_random(
+ file_length, hdrs={"Content-Type":
+ "lovecraft/rugose; squamous=true"})
+
+ for i in range(0, file_length, range_size):
+ range_string = 'bytes=%d-%d,%d-%d,%d-%d' % (
+ i, i + subrange_size - 1,
+ i + 2 * subrange_size, i + 3 * subrange_size - 1,
+ i + 4 * subrange_size, i + 5 * subrange_size - 1)
+ hdrs = {'Range': range_string}
+
+ fetched = file_item.read(hdrs=hdrs)
+ self.assert_status(206)
+ content_type = file_item.content_type
+ self.assertTrue(content_type.startswith("multipart/byteranges"))
+ self.assertIsNone(file_item.content_range)
+
+ # email.parser.FeedParser wants a message with headers on the
+ # front, then two CRLFs, and then a body (like emails have but
+ # HTTP response bodies don't). We fake it out by constructing a
+ # one-header preamble containing just the Content-Type, then
+ # feeding in the response body.
+ parser = email.parser.FeedParser()
+ parser.feed("Content-Type: %s\r\n\r\n" % content_type)
+ parser.feed(fetched)
+ root_message = parser.close()
+ self.assertTrue(root_message.is_multipart())
+
+ byteranges = root_message.get_payload()
+ self.assertEqual(len(byteranges), 3)
+
+ self.assertEqual(byteranges[0]['Content-Type'],
+ "lovecraft/rugose; squamous=true")
+ self.assertEqual(
+ byteranges[0]['Content-Range'],
+ "bytes %d-%d/%d" % (i, i + subrange_size - 1, file_length))
+ self.assertEqual(
+ byteranges[0].get_payload(),
+ data[i:(i + subrange_size)])
+
+ self.assertEqual(byteranges[1]['Content-Type'],
+ "lovecraft/rugose; squamous=true")
+ self.assertEqual(
+ byteranges[1]['Content-Range'],
+ "bytes %d-%d/%d" % (i + 2 * subrange_size,
+ i + 3 * subrange_size - 1, file_length))
+ self.assertEqual(
+ byteranges[1].get_payload(),
+ data[(i + 2 * subrange_size):(i + 3 * subrange_size)])
+
+ self.assertEqual(byteranges[2]['Content-Type'],
+ "lovecraft/rugose; squamous=true")
+ self.assertEqual(
+ byteranges[2]['Content-Range'],
+ "bytes %d-%d/%d" % (i + 4 * subrange_size,
+ i + 5 * subrange_size - 1, file_length))
+ self.assertEqual(
+ byteranges[2].get_payload(),
+ data[(i + 4 * subrange_size):(i + 5 * subrange_size)])
+
+ # The first two ranges are satisfiable but the third is not; the
+ # result is a multipart/byteranges response containing only the two
+ # satisfiable byteranges.
+ range_string = 'bytes=%d-%d,%d-%d,%d-%d' % (
+ 0, subrange_size - 1,
+ 2 * subrange_size, 3 * subrange_size - 1,
+ file_length, file_length + subrange_size - 1)
+ hdrs = {'Range': range_string}
+ fetched = file_item.read(hdrs=hdrs)
+ self.assert_status(206)
+ content_type = file_item.content_type
+ self.assertTrue(content_type.startswith("multipart/byteranges"))
+ self.assertIsNone(file_item.content_range)
+
+ parser = email.parser.FeedParser()
+ parser.feed("Content-Type: %s\r\n\r\n" % content_type)
+ parser.feed(fetched)
+ root_message = parser.close()
+
+ self.assertTrue(root_message.is_multipart())
+ byteranges = root_message.get_payload()
+ self.assertEqual(len(byteranges), 2)
+
+ self.assertEqual(byteranges[0]['Content-Type'],
+ "lovecraft/rugose; squamous=true")
+ self.assertEqual(
+ byteranges[0]['Content-Range'],
+ "bytes %d-%d/%d" % (0, subrange_size - 1, file_length))
+ self.assertEqual(byteranges[0].get_payload(), data[:subrange_size])
+
+ self.assertEqual(byteranges[1]['Content-Type'],
+ "lovecraft/rugose; squamous=true")
+ self.assertEqual(
+ byteranges[1]['Content-Range'],
+ "bytes %d-%d/%d" % (2 * subrange_size, 3 * subrange_size - 1,
+ file_length))
+ self.assertEqual(
+ byteranges[1].get_payload(),
+ data[(2 * subrange_size):(3 * subrange_size)])
+
+ # The first range is satisfiable but the second is not; the
+ # result is either a multipart/byteranges response containing one
+ # byterange or a normal, non-MIME 206 response.
+ range_string = 'bytes=%d-%d,%d-%d' % (
+ 0, subrange_size - 1,
+ file_length, file_length + subrange_size - 1)
+ hdrs = {'Range': range_string}
+ fetched = file_item.read(hdrs=hdrs)
+ self.assert_status(206)
+ content_type = file_item.content_type
+ if content_type.startswith("multipart/byteranges"):
+ self.assertIsNone(file_item.content_range)
+ parser = email.parser.FeedParser()
+ parser.feed("Content-Type: %s\r\n\r\n" % content_type)
+ parser.feed(fetched)
+ root_message = parser.close()
+
+ self.assertTrue(root_message.is_multipart())
+ byteranges = root_message.get_payload()
+ self.assertEqual(len(byteranges), 1)
+
+ self.assertEqual(byteranges[0]['Content-Type'],
+ "lovecraft/rugose; squamous=true")
+ self.assertEqual(
+ byteranges[0]['Content-Range'],
+ "bytes %d-%d/%d" % (0, subrange_size - 1, file_length))
+ self.assertEqual(byteranges[0].get_payload(), data[:subrange_size])
+ else:
+ self.assertEqual(
+ file_item.content_range,
+ "bytes %d-%d/%d" % (0, subrange_size - 1, file_length))
+ self.assertEqual(content_type, "lovecraft/rugose; squamous=true")
+ self.assertEqual(fetched, data[:subrange_size])
+
+ # No byterange is satisfiable, so we get a 416 response.
+ range_string = 'bytes=%d-%d,%d-%d' % (
+ file_length, file_length + 2,
+ file_length + 100, file_length + 102)
+ hdrs = {'Range': range_string}
+
+ self.assertRaises(ResponseError, file_item.read, hdrs=hdrs)
+ self.assert_status(416)
def testRangedGetsWithLWSinHeader(self):
- #Skip this test until webob 1.2 can tolerate LWS in Range header.
file_length = 10000
file_item = self.env.container.file(Utils.create_name())
data = file_item.write_random(file_length)
@@ -1418,7 +2074,7 @@ class TestFile(Base):
for r in ('BYTES=0-999', 'bytes = 0-999', 'BYTES = 0 - 999',
'bytes = 0 - 999', 'bytes=0 - 999', 'bytes=0-999 '):
- self.assert_(file_item.read(hdrs={'Range': r}) == data[0:1000])
+ self.assertEqual(file_item.read(hdrs={'Range': r}), data[0:1000])
def testFileSizeLimit(self):
limit = load_constraint('max_file_size')
@@ -1433,14 +2089,24 @@ class TestFile(Base):
else:
return False
+ # This loop will result in fallocate calls for 4x the limit
+ # (minus 111 bytes). With fallocate turned on in the object servers,
+ # this may fail if you don't have 4x the limit available on your
+ # data drives.
+
+ # Note that this test does not actually send any data to the system.
+ # All it does is ensure that a response (success or failure) comes
+ # back within 3 seconds. For the successful tests (size smaller
+ # than limit), the cluster will log a 499.
+
for i in (limit - 100, limit - 10, limit - 1, limit, limit + 1,
limit + 10, limit + 100):
file_item = self.env.container.file(Utils.create_name())
if i <= limit:
- self.assert_(timeout(tsecs, file_item.write,
- cfg={'set_content_length': i}))
+ self.assertTrue(timeout(tsecs, file_item.write,
+ cfg={'set_content_length': i}))
else:
self.assertRaises(ResponseError, timeout, tsecs,
file_item.write,
@@ -1456,9 +2122,9 @@ class TestFile(Base):
file_item = self.env.container.file(Utils.create_name())
file_item.write_random(self.env.file_size)
- self.assert_(file_item.name in self.env.container.files())
- self.assert_(file_item.delete())
- self.assert_(file_item.name not in self.env.container.files())
+ self.assertIn(file_item.name, self.env.container.files())
+ self.assertTrue(file_item.delete())
+ self.assertNotIn(file_item.name, self.env.container.files())
def testBadHeaders(self):
file_length = 100
@@ -1485,15 +2151,16 @@ class TestFile(Base):
self.assert_status(501)
# bad request types
- #for req in ('LICK', 'GETorHEAD_base', 'container_info',
- # 'best_response'):
+ # for req in ('LICK', 'GETorHEAD_base', 'container_info',
+ # 'best_response'):
for req in ('LICK', 'GETorHEAD_base'):
self.env.account.conn.make_request(req)
self.assert_status(405)
# bad range headers
- self.assert_(len(file_item.read(hdrs={'Range': 'parsecs=8-12'})) ==
- file_length)
+ self.assertEqual(
+ len(file_item.read(hdrs={'Range': 'parsecs=8-12'})),
+ file_length)
self.assert_status(200)
def testMetadataLengthLimits(self):
@@ -1510,17 +2177,14 @@ class TestFile(Base):
file_item.metadata = metadata
if l[0] <= key_limit and l[1] <= value_limit:
- self.assert_(file_item.write())
+ self.assertTrue(file_item.write())
self.assert_status(201)
- self.assert_(file_item.sync_metadata())
- self.assert_(file_item.initialize())
- self.assert_status(200)
- self.assertEqual(file_item.metadata, metadata)
+ self.assertTrue(file_item.sync_metadata())
else:
self.assertRaises(ResponseError, file_item.write)
self.assert_status(400)
file_item.metadata = {}
- self.assert_(file_item.write())
+ self.assertTrue(file_item.write())
self.assert_status(201)
file_item.metadata = metadata
self.assertRaises(ResponseError, file_item.sync_metadata)
@@ -1537,7 +2201,7 @@ class TestFile(Base):
file_item = self.env.container.file(Utils.create_name())
data = file_item.write_random()
self.assert_status(201)
- self.assert_(data == file_item.read())
+ self.assertEqual(data, file_item.read())
self.assert_status(200)
def testHead(self):
@@ -1557,7 +2221,7 @@ class TestFile(Base):
self.assertEqual(info['content_length'], self.env.file_size)
self.assertEqual(info['etag'], md5)
self.assertEqual(info['content_type'], content_type)
- self.assert_('last_modified' in info)
+ self.assertIn('last_modified', info)
def testDeleteOfFileThatDoesNotExist(self):
# in container that exists
@@ -1593,11 +2257,11 @@ class TestFile(Base):
metadata[Utils.create_ascii_name()] = Utils.create_name()
file_item.metadata = metadata
- self.assert_(file_item.sync_metadata())
+ self.assertTrue(file_item.sync_metadata())
self.assert_status((201, 202))
file_item = self.env.container.file(file_item.name)
- self.assert_(file_item.initialize())
+ self.assertTrue(file_item.initialize())
self.assert_status(200)
self.assertEqual(file_item.metadata, metadata)
@@ -1651,13 +2315,13 @@ class TestFile(Base):
file_item.write_random(self.env.file_size)
file_item = self.env.container.file(file_item.name)
- self.assert_(file_item.initialize())
+ self.assertTrue(file_item.initialize())
self.assert_status(200)
self.assertEqual(file_item.metadata, metadata)
def testSerialization(self):
container = self.env.account.container(Utils.create_name())
- self.assert_(container.create())
+ self.assertTrue(container.create())
files = []
for i in (0, 1, 10, 100, 1000, 10000):
@@ -1699,8 +2363,9 @@ class TestFile(Base):
f[format_type] = True
found = True
- self.assert_(found, 'Unexpected file %s found in '
- '%s listing' % (file_item['name'], format_type))
+ self.assertTrue(
+ found, 'Unexpected file %s found in '
+ '%s listing' % (file_item['name'], format_type))
headers = dict(self.env.conn.response.getheaders())
if format_type == 'json':
@@ -1712,13 +2377,15 @@ class TestFile(Base):
lm_diff = max([f['last_modified'] for f in files]) -\
min([f['last_modified'] for f in files])
- self.assert_(lm_diff < write_time + 1, 'Diff in last '
- 'modified times should be less than time to write files')
+ self.assertLess(lm_diff, write_time + 1,
+ 'Diff in last modified times '
+ 'should be less than time to write files')
for f in files:
for format_type in ['json', 'xml']:
- self.assert_(f[format_type], 'File %s not found in %s listing'
- % (f['name'], format_type))
+ self.assertTrue(
+ f[format_type], 'File %s not found in %s listing'
+ % (f['name'], format_type))
def testStackedOverwrite(self):
file_item = self.env.container.file(Utils.create_name())
@@ -1727,7 +2394,7 @@ class TestFile(Base):
data = file_item.write_random(512)
file_item.write(data)
- self.assert_(file_item.read() == data)
+ self.assertEqual(file_item.read(), data)
def testTooLongName(self):
file_item = self.env.container.file('x' * 1025)
@@ -1737,18 +2404,18 @@ class TestFile(Base):
def testZeroByteFile(self):
file_item = self.env.container.file(Utils.create_name())
- self.assert_(file_item.write(''))
- self.assert_(file_item.name in self.env.container.files())
- self.assert_(file_item.read() == '')
+ self.assertTrue(file_item.write(''))
+ self.assertIn(file_item.name, self.env.container.files())
+ self.assertEqual(file_item.read(), '')
def testEtagResponse(self):
file_item = self.env.container.file(Utils.create_name())
- data = StringIO.StringIO(file_item.write_random(512))
+ data = six.StringIO(file_item.write_random(512))
etag = File.compute_md5sum(data)
headers = dict(self.env.conn.response.getheaders())
- self.assert_('etag' in headers.keys())
+ self.assertIn('etag', headers.keys())
header_etag = headers['etag'].strip('"')
self.assertEqual(etag, header_etag)
@@ -1773,12 +2440,63 @@ class TestFile(Base):
for j in chunks(data, i):
file_item.chunked_write(j)
- self.assert_(file_item.chunked_write())
- self.assert_(data == file_item.read())
+ self.assertTrue(file_item.chunked_write())
+ self.assertEqual(data, file_item.read())
info = file_item.info()
self.assertEqual(etag, info['etag'])
+ def test_POST(self):
+ raise SkipTest("Gluster preserves orig sys metadata - invalid test")
+ # verify consistency between object and container listing metadata
+ file_name = Utils.create_name()
+ file_item = self.env.container.file(file_name)
+ file_item.content_type = 'text/foobar'
+ file_item.write_random(1024)
+
+ # sanity check
+ file_item = self.env.container.file(file_name)
+ file_item.initialize()
+ self.assertEqual('text/foobar', file_item.content_type)
+ self.assertEqual(1024, file_item.size)
+ etag = file_item.etag
+
+ # check container listing is consistent
+ listing = self.env.container.files(parms={'format': 'json'})
+ for f_dict in listing:
+ if f_dict['name'] == file_name:
+ break
+ else:
+ self.fail('Failed to find file %r in listing' % file_name)
+ self.assertEqual(1024, f_dict['bytes'])
+ self.assertEqual('text/foobar', f_dict['content_type'])
+ self.assertEqual(etag, f_dict['hash'])
+
+ # now POST updated content-type to each file
+ file_item = self.env.container.file(file_name)
+ file_item.content_type = 'image/foobarbaz'
+ file_item.sync_metadata({'Test': 'blah'})
+
+ # sanity check object metadata
+ file_item = self.env.container.file(file_name)
+ file_item.initialize()
+
+ self.assertEqual(1024, file_item.size)
+ self.assertEqual('image/foobarbaz', file_item.content_type)
+ self.assertEqual(etag, file_item.etag)
+ self.assertIn('test', file_item.metadata)
+
+ # check for consistency between object and container listing
+ listing = self.env.container.files(parms={'format': 'json'})
+ for f_dict in listing:
+ if f_dict['name'] == file_name:
+ break
+ else:
+ self.fail('Failed to find file %r in listing' % file_name)
+ self.assertEqual(1024, f_dict['bytes'])
+ self.assertEqual('image/foobarbaz', f_dict['content_type'])
+ self.assertEqual(etag, f_dict['hash'])
+
class TestFileUTF8(Base2, TestFile):
set_up = False
@@ -1789,14 +2507,23 @@ class TestDloEnv(object):
def setUp(cls):
cls.conn = Connection(tf.config)
cls.conn.authenticate()
+
+ config2 = tf.config.copy()
+ config2['username'] = tf.config['username3']
+ config2['password'] = tf.config['password3']
+ cls.conn2 = Connection(config2)
+ cls.conn2.authenticate()
+
cls.account = Account(cls.conn, tf.config.get('account',
tf.config['username']))
cls.account.delete_containers()
cls.container = cls.account.container(Utils.create_name())
+ cls.container2 = cls.account.container(Utils.create_name())
- if not cls.container.create():
- raise ResponseError(cls.conn.response)
+ for cont in (cls.container, cls.container2):
+ if not cont.create():
+ raise ResponseError(cls.conn.response)
# avoid getting a prefix that stops halfway through an encoded
# character
@@ -1810,13 +2537,18 @@ class TestDloEnv(object):
file_item = cls.container.file("%s/seg_upper%s" % (prefix, letter))
file_item.write(letter.upper() * 10)
+ for letter in ('f', 'g', 'h', 'i', 'j'):
+ file_item = cls.container2.file("%s/seg_lower%s" %
+ (prefix, letter))
+ file_item.write(letter * 10)
+
man1 = cls.container.file("man1")
man1.write('man1-contents',
hdrs={"X-Object-Manifest": "%s/%s/seg_lower" %
(cls.container.name, prefix)})
- man1 = cls.container.file("man2")
- man1.write('man2-contents',
+ man2 = cls.container.file("man2")
+ man2.write('man2-contents',
hdrs={"X-Object-Manifest": "%s/%s/seg_upper" %
(cls.container.name, prefix)})
@@ -1825,6 +2557,12 @@ class TestDloEnv(object):
hdrs={"X-Object-Manifest": "%s/%s/seg" %
(cls.container.name, prefix)})
+ mancont2 = cls.container.file("mancont2")
+ mancont2.write(
+ 'mancont2-contents',
+ hdrs={"X-Object-Manifest": "%s/%s/seg_lower" %
+ (cls.container2.name, prefix)})
+
class TestDlo(Base):
env = TestDloEnv
@@ -1892,7 +2630,7 @@ class TestDlo(Base):
file_contents,
"aaaaaaaaaabbbbbbbbbbccccccccccddddddddddeeeeeeeeeeffffffffff")
# The copied object must not have X-Object-Manifest
- self.assertTrue("x_object_manifest" not in file_item.info())
+ self.assertNotIn("x_object_manifest", file_item.info())
def test_copy_account(self):
# dlo use same account and same container only
@@ -1918,7 +2656,7 @@ class TestDlo(Base):
file_contents,
"aaaaaaaaaabbbbbbbbbbccccccccccddddddddddeeeeeeeeeeffffffffff")
# The copied object must not have X-Object-Manifest
- self.assertTrue("x_object_manifest" not in file_item.info())
+ self.assertNotIn("x_object_manifest", file_item.info())
def test_copy_manifest(self):
# Copying the manifest with multipart-manifest=get query string
@@ -1986,6 +2724,147 @@ class TestDlo(Base):
manifest.info(hdrs={'If-None-Match': "not-%s" % etag})
self.assert_status(200)
+ def test_dlo_referer_on_segment_container(self):
+ # First the account2 (test3) should fail
+ headers = {'X-Auth-Token': self.env.conn2.storage_token,
+ 'Referer': 'http://blah.example.com'}
+ dlo_file = self.env.container.file("mancont2")
+ self.assertRaises(ResponseError, dlo_file.read,
+ hdrs=headers)
+ self.assert_status(403)
+
+ # Now set the referer on the dlo container only
+ referer_metadata = {'X-Container-Read': '.r:*.example.com,.rlistings'}
+ self.env.container.update_metadata(referer_metadata)
+
+ self.assertRaises(ResponseError, dlo_file.read,
+ hdrs=headers)
+ self.assert_status(403)
+
+ # Finally set the referer on the segment container
+ self.env.container2.update_metadata(referer_metadata)
+
+ contents = dlo_file.read(hdrs=headers)
+ self.assertEqual(
+ contents,
+ "ffffffffffgggggggggghhhhhhhhhhiiiiiiiiiijjjjjjjjjj")
+
+ def test_dlo_post_with_manifest_header(self):
+ # verify that performing a POST to a DLO manifest
+ # preserves the fact that it is a manifest file.
+ # verify that the x-object-manifest header may be updated.
+
+ # create a new manifest for this test to avoid test coupling.
+ x_o_m = self.env.container.file('man1').info()['x_object_manifest']
+ file_item = self.env.container.file(Utils.create_name())
+ file_item.write('manifest-contents', hdrs={"X-Object-Manifest": x_o_m})
+
+ # sanity checks
+ manifest_contents = file_item.read(parms={'multipart-manifest': 'get'})
+ self.assertEqual('manifest-contents', manifest_contents)
+ expected_contents = ''.join([(c * 10) for c in 'abcde'])
+ contents = file_item.read(parms={})
+ self.assertEqual(expected_contents, contents)
+
+ # POST a modified x-object-manifest value
+ new_x_o_m = x_o_m.rstrip('lower') + 'upper'
+ file_item.post({'x-object-meta-foo': 'bar',
+ 'x-object-manifest': new_x_o_m})
+
+ # verify that x-object-manifest was updated
+ file_item.info()
+ resp_headers = file_item.conn.response.getheaders()
+ self.assertIn(('x-object-manifest', new_x_o_m), resp_headers)
+ self.assertIn(('x-object-meta-foo', 'bar'), resp_headers)
+
+ # verify that manifest content was not changed
+ manifest_contents = file_item.read(parms={'multipart-manifest': 'get'})
+ self.assertEqual('manifest-contents', manifest_contents)
+
+ # verify that updated manifest points to new content
+ expected_contents = ''.join([(c * 10) for c in 'ABCDE'])
+ contents = file_item.read(parms={})
+ self.assertEqual(expected_contents, contents)
+
+ # Now revert the manifest to point to original segments, including a
+ # multipart-manifest=get param just to check that has no effect
+ file_item.post({'x-object-manifest': x_o_m},
+ parms={'multipart-manifest': 'get'})
+
+ # verify that x-object-manifest was reverted
+ info = file_item.info()
+ self.assertIn('x_object_manifest', info)
+ self.assertEqual(x_o_m, info['x_object_manifest'])
+
+ # verify that manifest content was not changed
+ manifest_contents = file_item.read(parms={'multipart-manifest': 'get'})
+ self.assertEqual('manifest-contents', manifest_contents)
+
+ # verify that updated manifest points new content
+ expected_contents = ''.join([(c * 10) for c in 'abcde'])
+ contents = file_item.read(parms={})
+ self.assertEqual(expected_contents, contents)
+
+ def test_dlo_post_without_manifest_header(self):
+ # verify that a POST to a DLO manifest object with no
+ # x-object-manifest header will cause the existing x-object-manifest
+ # header to be lost
+
+ # create a new manifest for this test to avoid test coupling.
+ x_o_m = self.env.container.file('man1').info()['x_object_manifest']
+ file_item = self.env.container.file(Utils.create_name())
+ file_item.write('manifest-contents', hdrs={"X-Object-Manifest": x_o_m})
+
+ # sanity checks
+ manifest_contents = file_item.read(parms={'multipart-manifest': 'get'})
+ self.assertEqual('manifest-contents', manifest_contents)
+ expected_contents = ''.join([(c * 10) for c in 'abcde'])
+ contents = file_item.read(parms={})
+ self.assertEqual(expected_contents, contents)
+
+ # POST with no x-object-manifest header
+ file_item.post({})
+
+ # verify that existing x-object-manifest was removed
+ info = file_item.info()
+ self.assertNotIn('x_object_manifest', info)
+
+ # verify that object content was not changed
+ manifest_contents = file_item.read(parms={'multipart-manifest': 'get'})
+ self.assertEqual('manifest-contents', manifest_contents)
+
+ # verify that object is no longer a manifest
+ contents = file_item.read(parms={})
+ self.assertEqual('manifest-contents', contents)
+
+ def test_dlo_post_with_manifest_regular_object(self):
+ # verify that performing a POST to a regular object
+ # with a manifest header will create a DLO.
+
+ # Put a regular object
+ file_item = self.env.container.file(Utils.create_name())
+ file_item.write('file contents', hdrs={})
+
+ # sanity checks
+ file_contents = file_item.read(parms={})
+ self.assertEqual('file contents', file_contents)
+
+ # get the path associated with man1
+ x_o_m = self.env.container.file('man1').info()['x_object_manifest']
+
+ # POST a x-object-manifest value to the regular object
+ file_item.post({'x-object-manifest': x_o_m})
+
+ # verify that the file is now a manifest
+ manifest_contents = file_item.read(parms={'multipart-manifest': 'get'})
+ self.assertEqual('file contents', manifest_contents)
+ expected_contents = ''.join([(c * 10) for c in 'abcde'])
+ contents = file_item.read(parms={})
+ self.assertEqual(expected_contents, contents)
+ file_item.info()
+ resp_headers = file_item.conn.response.getheaders()
+ self.assertIn(('x-object-manifest', x_o_m), resp_headers)
+
class TestDloUTF8(Base2, TestDlo):
set_up = False
@@ -2030,82 +2909,118 @@ class TestFileComparison(Base):
def testIfMatch(self):
for file_item in self.env.files:
hdrs = {'If-Match': file_item.md5}
- self.assert_(file_item.read(hdrs=hdrs))
+ self.assertTrue(file_item.read(hdrs=hdrs))
hdrs = {'If-Match': 'bogus'}
self.assertRaises(ResponseError, file_item.read, hdrs=hdrs)
self.assert_status(412)
+ self.assert_header('etag', file_item.md5)
+
+ def testIfMatchMultipleEtags(self):
+ for file_item in self.env.files:
+ hdrs = {'If-Match': '"bogus1", "%s", "bogus2"' % file_item.md5}
+ self.assertTrue(file_item.read(hdrs=hdrs))
+
+ hdrs = {'If-Match': '"bogus1", "bogus2", "bogus3"'}
+ self.assertRaises(ResponseError, file_item.read, hdrs=hdrs)
+ self.assert_status(412)
+ self.assert_header('etag', file_item.md5)
def testIfNoneMatch(self):
for file_item in self.env.files:
hdrs = {'If-None-Match': 'bogus'}
- self.assert_(file_item.read(hdrs=hdrs))
+ self.assertTrue(file_item.read(hdrs=hdrs))
hdrs = {'If-None-Match': file_item.md5}
self.assertRaises(ResponseError, file_item.read, hdrs=hdrs)
self.assert_status(304)
+ self.assert_header('etag', file_item.md5)
+ self.assert_header('accept-ranges', 'bytes')
+
+ def testIfNoneMatchMultipleEtags(self):
+ for file_item in self.env.files:
+ hdrs = {'If-None-Match': '"bogus1", "bogus2", "bogus3"'}
+ self.assertTrue(file_item.read(hdrs=hdrs))
+
+ hdrs = {'If-None-Match':
+ '"bogus1", "bogus2", "%s"' % file_item.md5}
+ self.assertRaises(ResponseError, file_item.read, hdrs=hdrs)
+ self.assert_status(304)
+ self.assert_header('etag', file_item.md5)
+ self.assert_header('accept-ranges', 'bytes')
def testIfModifiedSince(self):
for file_item in self.env.files:
hdrs = {'If-Modified-Since': self.env.time_old_f1}
- self.assert_(file_item.read(hdrs=hdrs))
- self.assert_(file_item.info(hdrs=hdrs))
+ self.assertTrue(file_item.read(hdrs=hdrs))
+ self.assertTrue(file_item.info(hdrs=hdrs))
hdrs = {'If-Modified-Since': self.env.time_new}
self.assertRaises(ResponseError, file_item.read, hdrs=hdrs)
self.assert_status(304)
+ self.assert_header('etag', file_item.md5)
+ self.assert_header('accept-ranges', 'bytes')
self.assertRaises(ResponseError, file_item.info, hdrs=hdrs)
self.assert_status(304)
+ self.assert_header('etag', file_item.md5)
+ self.assert_header('accept-ranges', 'bytes')
def testIfUnmodifiedSince(self):
for file_item in self.env.files:
hdrs = {'If-Unmodified-Since': self.env.time_new}
- self.assert_(file_item.read(hdrs=hdrs))
- self.assert_(file_item.info(hdrs=hdrs))
+ self.assertTrue(file_item.read(hdrs=hdrs))
+ self.assertTrue(file_item.info(hdrs=hdrs))
hdrs = {'If-Unmodified-Since': self.env.time_old_f2}
self.assertRaises(ResponseError, file_item.read, hdrs=hdrs)
self.assert_status(412)
+ self.assert_header('etag', file_item.md5)
self.assertRaises(ResponseError, file_item.info, hdrs=hdrs)
self.assert_status(412)
+ self.assert_header('etag', file_item.md5)
def testIfMatchAndUnmodified(self):
for file_item in self.env.files:
hdrs = {'If-Match': file_item.md5,
'If-Unmodified-Since': self.env.time_new}
- self.assert_(file_item.read(hdrs=hdrs))
+ self.assertTrue(file_item.read(hdrs=hdrs))
hdrs = {'If-Match': 'bogus',
'If-Unmodified-Since': self.env.time_new}
self.assertRaises(ResponseError, file_item.read, hdrs=hdrs)
self.assert_status(412)
+ self.assert_header('etag', file_item.md5)
hdrs = {'If-Match': file_item.md5,
'If-Unmodified-Since': self.env.time_old_f3}
self.assertRaises(ResponseError, file_item.read, hdrs=hdrs)
self.assert_status(412)
+ self.assert_header('etag', file_item.md5)
def testLastModified(self):
file_name = Utils.create_name()
content_type = Utils.create_name()
- file = self.env.container.file(file_name)
- file.content_type = content_type
- resp = file.write_random_return_resp(self.env.file_size)
+ file_item = self.env.container.file(file_name)
+ file_item.content_type = content_type
+ resp = file_item.write_random_return_resp(self.env.file_size)
put_last_modified = resp.getheader('last-modified')
+ etag = file_item.md5
- file = self.env.container.file(file_name)
- info = file.info()
- self.assert_('last_modified' in info)
+ file_item = self.env.container.file(file_name)
+ info = file_item.info()
+ self.assertIn('last_modified', info)
last_modified = info['last_modified']
self.assertEqual(put_last_modified, info['last_modified'])
hdrs = {'If-Modified-Since': last_modified}
- self.assertRaises(ResponseError, file.read, hdrs=hdrs)
+ self.assertRaises(ResponseError, file_item.read, hdrs=hdrs)
self.assert_status(304)
+ self.assert_header('etag', etag)
+ self.assert_header('accept-ranges', 'bytes')
hdrs = {'If-Unmodified-Since': last_modified}
- self.assert_(file.read(hdrs=hdrs))
+ self.assertTrue(file_item.read(hdrs=hdrs))
class TestFileComparisonUTF8(Base2, TestFileComparison):
@@ -2116,6 +3031,23 @@ class TestSloEnv(object):
slo_enabled = None # tri-state: None initially, then True/False
@classmethod
+ def create_segments(cls, container):
+ seg_info = {}
+ for letter, size in (('a', 1024 * 1024),
+ ('b', 1024 * 1024),
+ ('c', 1024 * 1024),
+ ('d', 1024 * 1024),
+ ('e', 1)):
+ seg_name = "seg_%s" % letter
+ file_item = container.file(seg_name)
+ file_item.write(letter * size)
+ seg_info[seg_name] = {
+ 'size_bytes': size,
+ 'etag': file_item.md5,
+ 'path': '/%s/%s' % (container.name, seg_name)}
+ return seg_info
+
+ @classmethod
def setUp(cls):
cls.conn = Connection(tf.config)
cls.conn.authenticate()
@@ -2127,6 +3059,11 @@ class TestSloEnv(object):
cls.conn2.authenticate()
cls.account2 = cls.conn2.get_account()
cls.account2.delete_containers()
+ config3 = tf.config.copy()
+ config3['username'] = tf.config['username3']
+ config3['password'] = tf.config['password3']
+ cls.conn3 = Connection(config3)
+ cls.conn3.authenticate()
if cls.slo_enabled is None:
cls.slo_enabled = 'slo' in cluster_info
@@ -2138,23 +3075,13 @@ class TestSloEnv(object):
cls.account.delete_containers()
cls.container = cls.account.container(Utils.create_name())
+ cls.container2 = cls.account.container(Utils.create_name())
- if not cls.container.create():
- raise ResponseError(cls.conn.response)
+ for cont in (cls.container, cls.container2):
+ if not cont.create():
+ raise ResponseError(cls.conn.response)
- seg_info = {}
- for letter, size in (('a', 1024 * 1024),
- ('b', 1024 * 1024),
- ('c', 1024 * 1024),
- ('d', 1024 * 1024),
- ('e', 1)):
- seg_name = "seg_%s" % letter
- file_item = cls.container.file(seg_name)
- file_item.write(letter * size)
- seg_info[seg_name] = {
- 'size_bytes': size,
- 'etag': file_item.md5,
- 'path': '/%s/%s' % (cls.container.name, seg_name)}
+ cls.seg_info = seg_info = cls.create_segments(cls.container)
file_item = cls.container.file("manifest-abcde")
file_item.write(
@@ -2163,6 +3090,14 @@ class TestSloEnv(object):
seg_info['seg_e']]),
parms={'multipart-manifest': 'put'})
+ # Put the same manifest in the container2
+ file_item = cls.container2.file("manifest-abcde")
+ file_item.write(
+ json.dumps([seg_info['seg_a'], seg_info['seg_b'],
+ seg_info['seg_c'], seg_info['seg_d'],
+ seg_info['seg_e']]),
+ parms={'multipart-manifest': 'put'})
+
file_item = cls.container.file('manifest-cd')
cd_json = json.dumps([seg_info['seg_c'], seg_info['seg_d']])
file_item.write(cd_json, parms={'multipart-manifest': 'put'})
@@ -2193,6 +3128,79 @@ class TestSloEnv(object):
'manifest-bcd-submanifest')},
seg_info['seg_e']]),
parms={'multipart-manifest': 'put'})
+ abcde_submanifest_etag = hashlib.md5(
+ seg_info['seg_a']['etag'] + bcd_submanifest_etag +
+ seg_info['seg_e']['etag']).hexdigest()
+ abcde_submanifest_size = (seg_info['seg_a']['size_bytes'] +
+ seg_info['seg_b']['size_bytes'] +
+ seg_info['seg_c']['size_bytes'] +
+ seg_info['seg_d']['size_bytes'] +
+ seg_info['seg_e']['size_bytes'])
+
+ file_item = cls.container.file("ranged-manifest")
+ file_item.write(
+ json.dumps([
+ {'etag': abcde_submanifest_etag,
+ 'size_bytes': abcde_submanifest_size,
+ 'path': '/%s/%s' % (cls.container.name,
+ 'manifest-abcde-submanifest'),
+ 'range': '-1048578'}, # 'c' + ('d' * 2**20) + 'e'
+ {'etag': abcde_submanifest_etag,
+ 'size_bytes': abcde_submanifest_size,
+ 'path': '/%s/%s' % (cls.container.name,
+ 'manifest-abcde-submanifest'),
+ 'range': '524288-1572863'}, # 'a' * 2**19 + 'b' * 2**19
+ {'etag': abcde_submanifest_etag,
+ 'size_bytes': abcde_submanifest_size,
+ 'path': '/%s/%s' % (cls.container.name,
+ 'manifest-abcde-submanifest'),
+ 'range': '3145727-3145728'}]), # 'cd'
+ parms={'multipart-manifest': 'put'})
+ ranged_manifest_etag = hashlib.md5(
+ abcde_submanifest_etag + ':3145727-4194304;' +
+ abcde_submanifest_etag + ':524288-1572863;' +
+ abcde_submanifest_etag + ':3145727-3145728;').hexdigest()
+ ranged_manifest_size = 2 * 1024 * 1024 + 4
+
+ file_item = cls.container.file("ranged-submanifest")
+ file_item.write(
+ json.dumps([
+ seg_info['seg_c'],
+ {'etag': ranged_manifest_etag,
+ 'size_bytes': ranged_manifest_size,
+ 'path': '/%s/%s' % (cls.container.name,
+ 'ranged-manifest')},
+ {'etag': ranged_manifest_etag,
+ 'size_bytes': ranged_manifest_size,
+ 'path': '/%s/%s' % (cls.container.name,
+ 'ranged-manifest'),
+ 'range': '524289-1572865'},
+ {'etag': ranged_manifest_etag,
+ 'size_bytes': ranged_manifest_size,
+ 'path': '/%s/%s' % (cls.container.name,
+ 'ranged-manifest'),
+ 'range': '-3'}]),
+ parms={'multipart-manifest': 'put'})
+
+ file_item = cls.container.file("manifest-db")
+ file_item.write(
+ json.dumps([
+ {'path': seg_info['seg_d']['path'], 'etag': None,
+ 'size_bytes': None},
+ {'path': seg_info['seg_b']['path'], 'etag': None,
+ 'size_bytes': None},
+ ]), parms={'multipart-manifest': 'put'})
+
+ file_item = cls.container.file("ranged-manifest-repeated-segment")
+ file_item.write(
+ json.dumps([
+ {'path': seg_info['seg_a']['path'], 'etag': None,
+ 'size_bytes': None, 'range': '-1048578'},
+ {'path': seg_info['seg_a']['path'], 'etag': None,
+ 'size_bytes': None},
+ {'path': seg_info['seg_b']['path'], 'etag': None,
+ 'size_bytes': None, 'range': '-1048578'},
+ ]), parms={'multipart-manifest': 'put'})
class TestSlo(Base):
@@ -2219,6 +3227,70 @@ class TestSlo(Base):
self.assertEqual('d', file_contents[-2])
self.assertEqual('e', file_contents[-1])
+ def test_slo_container_listing(self):
+ raise SkipTest("Gluster preserves orig sys metadata - invalid test")
+ # the listing object size should equal the sum of the size of the
+ # segments, not the size of the manifest body
+ file_item = self.env.container.file(Utils.create_name())
+ file_item.write(
+ json.dumps([self.env.seg_info['seg_a']]),
+ parms={'multipart-manifest': 'put'})
+ # The container listing has the etag of the actual manifest object
+ # contents which we get using multipart-manifest=get. Arguably this
+ # should be the etag that we get when NOT using multipart-manifest=get,
+ # to be consistent with size and content-type. But here we at least
+ # verify that it remains consistent when the object is updated with a
+ # POST.
+ file_item.initialize(parms={'multipart-manifest': 'get'})
+ expected_etag = file_item.etag
+
+ listing = self.env.container.files(parms={'format': 'json'})
+ for f_dict in listing:
+ if f_dict['name'] == file_item.name:
+ self.assertEqual(1024 * 1024, f_dict['bytes'])
+ self.assertEqual('application/octet-stream',
+ f_dict['content_type'])
+ self.assertEqual(expected_etag, f_dict['hash'])
+ break
+ else:
+ self.fail('Failed to find manifest file in container listing')
+
+ # now POST updated content-type file
+ file_item.content_type = 'image/jpeg'
+ file_item.sync_metadata({'X-Object-Meta-Test': 'blah'})
+ file_item.initialize()
+ self.assertEqual('image/jpeg', file_item.content_type) # sanity
+
+ # verify that the container listing is consistent with the file
+ listing = self.env.container.files(parms={'format': 'json'})
+ for f_dict in listing:
+ if f_dict['name'] == file_item.name:
+ self.assertEqual(1024 * 1024, f_dict['bytes'])
+ self.assertEqual(file_item.content_type,
+ f_dict['content_type'])
+ self.assertEqual(expected_etag, f_dict['hash'])
+ break
+ else:
+ self.fail('Failed to find manifest file in container listing')
+
+ # now POST with no change to content-type
+ file_item.sync_metadata({'X-Object-Meta-Test': 'blah'},
+ cfg={'no_content_type': True})
+ file_item.initialize()
+ self.assertEqual('image/jpeg', file_item.content_type) # sanity
+
+ # verify that the container listing is consistent with the file
+ listing = self.env.container.files(parms={'format': 'json'})
+ for f_dict in listing:
+ if f_dict['name'] == file_item.name:
+ self.assertEqual(1024 * 1024, f_dict['bytes'])
+ self.assertEqual(file_item.content_type,
+ f_dict['content_type'])
+ self.assertEqual(expected_etag, f_dict['hash'])
+ break
+ else:
+ self.fail('Failed to find manifest file in container listing')
+
def test_slo_get_nested_manifest(self):
file_item = self.env.container.file('manifest-abcde-submanifest')
file_contents = file_item.read()
@@ -2229,6 +3301,48 @@ class TestSlo(Base):
self.assertEqual('d', file_contents[-2])
self.assertEqual('e', file_contents[-1])
+ def test_slo_get_ranged_manifest(self):
+ file_item = self.env.container.file('ranged-manifest')
+ grouped_file_contents = [
+ (char, sum(1 for _char in grp))
+ for char, grp in itertools.groupby(file_item.read())]
+ self.assertEqual([
+ ('c', 1),
+ ('d', 1024 * 1024),
+ ('e', 1),
+ ('a', 512 * 1024),
+ ('b', 512 * 1024),
+ ('c', 1),
+ ('d', 1)], grouped_file_contents)
+
+ def test_slo_get_ranged_manifest_repeated_segment(self):
+ file_item = self.env.container.file('ranged-manifest-repeated-segment')
+ grouped_file_contents = [
+ (char, sum(1 for _char in grp))
+ for char, grp in itertools.groupby(file_item.read())]
+ self.assertEqual(
+ [('a', 2097152), ('b', 1048576)],
+ grouped_file_contents)
+
+ def test_slo_get_ranged_submanifest(self):
+ file_item = self.env.container.file('ranged-submanifest')
+ grouped_file_contents = [
+ (char, sum(1 for _char in grp))
+ for char, grp in itertools.groupby(file_item.read())]
+ self.assertEqual([
+ ('c', 1024 * 1024 + 1),
+ ('d', 1024 * 1024),
+ ('e', 1),
+ ('a', 512 * 1024),
+ ('b', 512 * 1024),
+ ('c', 1),
+ ('d', 512 * 1024 + 1),
+ ('e', 1),
+ ('a', 512 * 1024),
+ ('b', 1),
+ ('c', 1),
+ ('d', 1)], grouped_file_contents)
+
def test_slo_ranged_get(self):
file_item = self.env.container.file('manifest-abcde')
file_contents = file_item.read(size=1024 * 1024 + 2,
@@ -2301,6 +3415,69 @@ class TestSlo(Base):
else:
self.fail("Expected ResponseError but didn't get it")
+ def test_slo_unspecified_etag(self):
+ file_item = self.env.container.file("manifest-a-unspecified-etag")
+ file_item.write(
+ json.dumps([{
+ 'size_bytes': 1024 * 1024,
+ 'etag': None,
+ 'path': '/%s/%s' % (self.env.container.name, 'seg_a')}]),
+ parms={'multipart-manifest': 'put'})
+ self.assert_status(201)
+
+ def test_slo_unspecified_size(self):
+ file_item = self.env.container.file("manifest-a-unspecified-size")
+ file_item.write(
+ json.dumps([{
+ 'size_bytes': None,
+ 'etag': hashlib.md5('a' * 1024 * 1024).hexdigest(),
+ 'path': '/%s/%s' % (self.env.container.name, 'seg_a')}]),
+ parms={'multipart-manifest': 'put'})
+ self.assert_status(201)
+
+ def test_slo_missing_etag(self):
+ file_item = self.env.container.file("manifest-a-missing-etag")
+ try:
+ file_item.write(
+ json.dumps([{
+ 'size_bytes': 1024 * 1024,
+ 'path': '/%s/%s' % (self.env.container.name, 'seg_a')}]),
+ parms={'multipart-manifest': 'put'})
+ except ResponseError as err:
+ self.assertEqual(400, err.status)
+ else:
+ self.fail("Expected ResponseError but didn't get it")
+
+ def test_slo_missing_size(self):
+ file_item = self.env.container.file("manifest-a-missing-size")
+ try:
+ file_item.write(
+ json.dumps([{
+ 'etag': hashlib.md5('a' * 1024 * 1024).hexdigest(),
+ 'path': '/%s/%s' % (self.env.container.name, 'seg_a')}]),
+ parms={'multipart-manifest': 'put'})
+ except ResponseError as err:
+ self.assertEqual(400, err.status)
+ else:
+ self.fail("Expected ResponseError but didn't get it")
+
+ def test_slo_overwrite_segment_with_manifest(self):
+ file_item = self.env.container.file("seg_b")
+ with self.assertRaises(ResponseError) as catcher:
+ file_item.write(
+ json.dumps([
+ {'size_bytes': 1024 * 1024,
+ 'etag': hashlib.md5('a' * 1024 * 1024).hexdigest(),
+ 'path': '/%s/%s' % (self.env.container.name, 'seg_a')},
+ {'size_bytes': 1024 * 1024,
+ 'etag': hashlib.md5('b' * 1024 * 1024).hexdigest(),
+ 'path': '/%s/%s' % (self.env.container.name, 'seg_b')},
+ {'size_bytes': 1024 * 1024,
+ 'etag': hashlib.md5('c' * 1024 * 1024).hexdigest(),
+ 'path': '/%s/%s' % (self.env.container.name, 'seg_c')}]),
+ parms={'multipart-manifest': 'put'})
+ self.assertEqual(400, catcher.exception.status)
+
def test_slo_copy(self):
file_item = self.env.container.file("manifest-abcde")
file_item.copy(self.env.container.name, "copied-abcde")
@@ -2322,7 +3499,7 @@ class TestSlo(Base):
# copy to different account
acct = self.env.conn2.account_name
dest_cont = self.env.account2.container(Utils.create_name())
- self.assert_(dest_cont.create(hdrs={
+ self.assertTrue(dest_cont.create(hdrs={
'X-Container-Write': self.env.conn.user_acl
}))
file_item = self.env.container.file("manifest-abcde")
@@ -2333,16 +3510,109 @@ class TestSlo(Base):
self.assertEqual(4 * 1024 * 1024 + 1, len(copied_contents))
def test_slo_copy_the_manifest(self):
- file_item = self.env.container.file("manifest-abcde")
- file_item.copy(self.env.container.name, "copied-abcde-manifest-only",
- parms={'multipart-manifest': 'get'})
+ source = self.env.container.file("manifest-abcde")
+ source_contents = source.read(parms={'multipart-manifest': 'get'})
+ source_json = json.loads(source_contents)
+ source.initialize()
+ self.assertEqual('application/octet-stream', source.content_type)
+ source.initialize(parms={'multipart-manifest': 'get'})
+ source_hash = hashlib.md5()
+ source_hash.update(source_contents)
+ self.assertEqual(source_hash.hexdigest(), source.etag)
+
+ self.assertTrue(source.copy(self.env.container.name,
+ "copied-abcde-manifest-only",
+ parms={'multipart-manifest': 'get'}))
copied = self.env.container.file("copied-abcde-manifest-only")
copied_contents = copied.read(parms={'multipart-manifest': 'get'})
try:
- json.loads(copied_contents)
+ copied_json = json.loads(copied_contents)
+ except ValueError:
+ self.fail("COPY didn't copy the manifest (invalid json on GET)")
+ self.assertEqual(source_json, copied_json)
+ copied.initialize()
+ self.assertEqual('application/octet-stream', copied.content_type)
+ copied.initialize(parms={'multipart-manifest': 'get'})
+ copied_hash = hashlib.md5()
+ copied_hash.update(copied_contents)
+ self.assertEqual(copied_hash.hexdigest(), copied.etag)
+
+ # verify the listing metadata
+ listing = self.env.container.files(parms={'format': 'json'})
+ names = {}
+ for f_dict in listing:
+ if f_dict['name'] in ('manifest-abcde',
+ 'copied-abcde-manifest-only'):
+ names[f_dict['name']] = f_dict
+
+ self.assertIn('manifest-abcde', names)
+ actual = names['manifest-abcde']
+ self.assertEqual(4 * 1024 * 1024 + 1, actual['bytes'])
+ self.assertEqual('application/octet-stream', actual['content_type'])
+ self.assertEqual(source.etag, actual['hash'])
+
+ self.assertIn('copied-abcde-manifest-only', names)
+ actual = names['copied-abcde-manifest-only']
+ self.assertEqual(4 * 1024 * 1024 + 1, actual['bytes'])
+ self.assertEqual('application/octet-stream', actual['content_type'])
+ self.assertEqual(copied.etag, actual['hash'])
+
+ def test_slo_copy_the_manifest_updating_metadata(self):
+ source = self.env.container.file("manifest-abcde")
+ source.content_type = 'application/octet-stream'
+ source.sync_metadata({'test': 'original'})
+ source_contents = source.read(parms={'multipart-manifest': 'get'})
+ source_json = json.loads(source_contents)
+ source.initialize()
+ self.assertEqual('application/octet-stream', source.content_type)
+ source.initialize(parms={'multipart-manifest': 'get'})
+ source_hash = hashlib.md5()
+ source_hash.update(source_contents)
+ self.assertEqual(source_hash.hexdigest(), source.etag)
+ self.assertEqual(source.metadata['test'], 'original')
+
+ self.assertTrue(
+ source.copy(self.env.container.name, "copied-abcde-manifest-only",
+ parms={'multipart-manifest': 'get'},
+ hdrs={'Content-Type': 'image/jpeg',
+ 'X-Object-Meta-Test': 'updated'}))
+
+ copied = self.env.container.file("copied-abcde-manifest-only")
+ copied_contents = copied.read(parms={'multipart-manifest': 'get'})
+ try:
+ copied_json = json.loads(copied_contents)
except ValueError:
self.fail("COPY didn't copy the manifest (invalid json on GET)")
+ self.assertEqual(source_json, copied_json)
+ copied.initialize()
+ self.assertEqual('image/jpeg', copied.content_type)
+ copied.initialize(parms={'multipart-manifest': 'get'})
+ copied_hash = hashlib.md5()
+ copied_hash.update(copied_contents)
+ self.assertEqual(copied_hash.hexdigest(), copied.etag)
+ self.assertEqual(copied.metadata['test'], 'updated')
+
+ # verify the listing metadata
+ listing = self.env.container.files(parms={'format': 'json'})
+ names = {}
+ for f_dict in listing:
+ if f_dict['name'] in ('manifest-abcde',
+ 'copied-abcde-manifest-only'):
+ names[f_dict['name']] = f_dict
+
+ self.assertIn('manifest-abcde', names)
+ actual = names['manifest-abcde']
+ self.assertEqual(4 * 1024 * 1024 + 1, actual['bytes'])
+ self.assertEqual('application/octet-stream', actual['content_type'])
+ # the container listing should have the etag of the manifest contents
+ self.assertEqual(source.etag, actual['hash'])
+
+ self.assertIn('copied-abcde-manifest-only', names)
+ actual = names['copied-abcde-manifest-only']
+ self.assertEqual(4 * 1024 * 1024 + 1, actual['bytes'])
+ self.assertEqual('image/jpeg', actual['content_type'])
+ self.assertEqual(copied.etag, actual['hash'])
def test_slo_copy_the_manifest_account(self):
acct = self.env.conn.account_name
@@ -2363,13 +3633,43 @@ class TestSlo(Base):
# different account
acct = self.env.conn2.account_name
dest_cont = self.env.account2.container(Utils.create_name())
- self.assert_(dest_cont.create(hdrs={
+ self.assertTrue(dest_cont.create(hdrs={
'X-Container-Write': self.env.conn.user_acl
}))
- file_item.copy_account(acct,
- dest_cont,
- "copied-abcde-manifest-only",
- parms={'multipart-manifest': 'get'})
+
+ # manifest copy will fail because there is no read access to segments
+ # in destination account
+ file_item.copy_account(
+ acct, dest_cont, "copied-abcde-manifest-only",
+ parms={'multipart-manifest': 'get'})
+ self.assertEqual(400, file_item.conn.response.status)
+ resp_body = file_item.conn.response.read()
+ self.assertEqual(5, resp_body.count('403 Forbidden'),
+ 'Unexpected response body %r' % resp_body)
+
+ # create segments container in account2 with read access for account1
+ segs_container = self.env.account2.container(self.env.container.name)
+ self.assertTrue(segs_container.create(hdrs={
+ 'X-Container-Read': self.env.conn.user_acl
+ }))
+
+ # manifest copy will still fail because there are no segments in
+ # destination account
+ file_item.copy_account(
+ acct, dest_cont, "copied-abcde-manifest-only",
+ parms={'multipart-manifest': 'get'})
+ self.assertEqual(400, file_item.conn.response.status)
+ resp_body = file_item.conn.response.read()
+ self.assertEqual(5, resp_body.count('404 Not Found'),
+ 'Unexpected response body %r' % resp_body)
+
+ # create segments in account2 container with same name as in account1,
+ # manifest copy now succeeds
+ self.env.create_segments(segs_container)
+
+ self.assertTrue(file_item.copy_account(
+ acct, dest_cont, "copied-abcde-manifest-only",
+ parms={'multipart-manifest': 'get'}))
copied = dest_cont.file("copied-abcde-manifest-only")
copied_contents = copied.read(parms={'multipart-manifest': 'get'})
@@ -2378,6 +3678,58 @@ class TestSlo(Base):
except ValueError:
self.fail("COPY didn't copy the manifest (invalid json on GET)")
+ def _make_manifest(self):
+ file_item = self.env.container.file("manifest-post")
+ seg_info = self.env.seg_info
+ file_item.write(
+ json.dumps([seg_info['seg_a'], seg_info['seg_b'],
+ seg_info['seg_c'], seg_info['seg_d'],
+ seg_info['seg_e']]),
+ parms={'multipart-manifest': 'put'})
+ return file_item
+
+ def test_slo_post_the_manifest_metadata_update(self):
+ file_item = self._make_manifest()
+ # sanity check, check the object is an SLO manifest
+ file_item.info()
+ file_item.header_fields([('slo', 'x-static-large-object')])
+
+ # POST a user metadata (i.e. x-object-meta-post)
+ file_item.sync_metadata({'post': 'update'})
+
+ updated = self.env.container.file("manifest-post")
+ updated.info()
+ updated.header_fields([('user-meta', 'x-object-meta-post')]) # sanity
+ updated.header_fields([('slo', 'x-static-large-object')])
+ updated_contents = updated.read(parms={'multipart-manifest': 'get'})
+ try:
+ json.loads(updated_contents)
+ except ValueError:
+ self.fail("Unexpected content on GET, expected a json body")
+
+ def test_slo_post_the_manifest_metadata_update_with_qs(self):
+ # multipart-manifest query should be ignored on post
+ for verb in ('put', 'get', 'delete'):
+ file_item = self._make_manifest()
+ # sanity check, check the object is an SLO manifest
+ file_item.info()
+ file_item.header_fields([('slo', 'x-static-large-object')])
+ # POST a user metadata (i.e. x-object-meta-post)
+ file_item.sync_metadata(metadata={'post': 'update'},
+ parms={'multipart-manifest': verb})
+ updated = self.env.container.file("manifest-post")
+ updated.info()
+ updated.header_fields(
+ [('user-meta', 'x-object-meta-post')]) # sanity
+ updated.header_fields([('slo', 'x-static-large-object')])
+ updated_contents = updated.read(
+ parms={'multipart-manifest': 'get'})
+ try:
+ json.loads(updated_contents)
+ except ValueError:
+ self.fail(
+ "Unexpected content on GET, expected a json body")
+
def test_slo_get_the_manifest(self):
manifest = self.env.container.file("manifest-abcde")
got_body = manifest.read(parms={'multipart-manifest': 'get'})
@@ -2389,6 +3741,63 @@ class TestSlo(Base):
except ValueError:
self.fail("GET with multipart-manifest=get got invalid json")
+ def test_slo_get_the_manifest_with_details_from_server(self):
+ manifest = self.env.container.file("manifest-db")
+ got_body = manifest.read(parms={'multipart-manifest': 'get'})
+
+ self.assertEqual('application/json; charset=utf-8',
+ manifest.content_type)
+ try:
+ value = json.loads(got_body)
+ except ValueError:
+ self.fail("GET with multipart-manifest=get got invalid json")
+
+ self.assertEqual(len(value), 2)
+ self.assertEqual(value[0]['bytes'], 1024 * 1024)
+ self.assertEqual(value[0]['hash'],
+ hashlib.md5('d' * 1024 * 1024).hexdigest())
+ self.assertEqual(value[0]['name'],
+ '/%s/seg_d' % self.env.container.name.decode("utf-8"))
+
+ self.assertEqual(value[1]['bytes'], 1024 * 1024)
+ self.assertEqual(value[1]['hash'],
+ hashlib.md5('b' * 1024 * 1024).hexdigest())
+ self.assertEqual(value[1]['name'],
+ '/%s/seg_b' % self.env.container.name.decode("utf-8"))
+
+ def test_slo_get_raw_the_manifest_with_details_from_server(self):
+ manifest = self.env.container.file("manifest-db")
+ got_body = manifest.read(parms={'multipart-manifest': 'get',
+ 'format': 'raw'})
+
+ # raw format should have the actual manifest object content-type
+ self.assertEqual('application/octet-stream', manifest.content_type)
+ try:
+ value = json.loads(got_body)
+ except ValueError:
+ msg = "GET with multipart-manifest=get&format=raw got invalid json"
+ self.fail(msg)
+
+ self.assertEqual(
+ set(value[0].keys()), set(('size_bytes', 'etag', 'path')))
+ self.assertEqual(len(value), 2)
+ self.assertEqual(value[0]['size_bytes'], 1024 * 1024)
+ self.assertEqual(value[0]['etag'],
+ hashlib.md5('d' * 1024 * 1024).hexdigest())
+ self.assertEqual(value[0]['path'],
+ '/%s/seg_d' % self.env.container.name.decode("utf-8"))
+ self.assertEqual(value[1]['size_bytes'], 1024 * 1024)
+ self.assertEqual(value[1]['etag'],
+ hashlib.md5('b' * 1024 * 1024).hexdigest())
+ self.assertEqual(value[1]['path'],
+ '/%s/seg_b' % self.env.container.name.decode("utf-8"))
+
+ file_item = self.env.container.file("manifest-from-get-raw")
+ file_item.write(got_body, parms={'multipart-manifest': 'put'})
+
+ file_contents = file_item.read()
+ self.assertEqual(2 * 1024 * 1024, len(file_contents))
+
def test_slo_head_the_manifest(self):
manifest = self.env.container.file("manifest-abcde")
got_info = manifest.info(parms={'multipart-manifest': 'get'})
@@ -2407,6 +3816,27 @@ class TestSlo(Base):
manifest.read(hdrs={'If-Match': etag})
self.assert_status(200)
+ def test_slo_if_none_match_put(self):
+ file_item = self.env.container.file("manifest-if-none-match")
+ manifest = json.dumps([{
+ 'size_bytes': 1024 * 1024,
+ 'etag': None,
+ 'path': '/%s/%s' % (self.env.container.name, 'seg_a')}])
+
+ self.assertRaises(ResponseError, file_item.write, manifest,
+ parms={'multipart-manifest': 'put'},
+ hdrs={'If-None-Match': '"not-star"'})
+ self.assert_status(400)
+
+ file_item.write(manifest, parms={'multipart-manifest': 'put'},
+ hdrs={'If-None-Match': '*'})
+ self.assert_status(201)
+
+ self.assertRaises(ResponseError, file_item.write, manifest,
+ parms={'multipart-manifest': 'put'},
+ hdrs={'If-None-Match': '*'})
+ self.assert_status(412)
+
def test_slo_if_none_match_get(self):
manifest = self.env.container.file("manifest-abcde")
etag = manifest.info()['etag']
@@ -2440,6 +3870,33 @@ class TestSlo(Base):
manifest.info(hdrs={'If-None-Match': "not-%s" % etag})
self.assert_status(200)
+ def test_slo_referer_on_segment_container(self):
+ # First the account2 (test3) should fail
+ headers = {'X-Auth-Token': self.env.conn3.storage_token,
+ 'Referer': 'http://blah.example.com'}
+ slo_file = self.env.container2.file('manifest-abcde')
+ self.assertRaises(ResponseError, slo_file.read,
+ hdrs=headers)
+ self.assert_status(403)
+
+ # Now set the referer on the slo container only
+ referer_metadata = {'X-Container-Read': '.r:*.example.com,.rlistings'}
+ self.env.container2.update_metadata(referer_metadata)
+
+ self.assertRaises(ResponseError, slo_file.read,
+ hdrs=headers)
+ self.assert_status(409)
+
+ # Finally set the referer on the segment container
+ self.env.container.update_metadata(referer_metadata)
+ contents = slo_file.read(hdrs=headers)
+ self.assertEqual(4 * 1024 * 1024 + 1, len(contents))
+ self.assertEqual('a', contents[0])
+ self.assertEqual('a', contents[1024 * 1024 - 1])
+ self.assertEqual('b', contents[1024 * 1024])
+ self.assertEqual('d', contents[-2])
+ self.assertEqual('e', contents[-1])
+
class TestSloUTF8(Base2, TestSlo):
set_up = False
@@ -2451,7 +3908,7 @@ class TestObjectVersioningEnv(object):
@classmethod
def setUp(cls):
cls.conn = Connection(tf.config)
- cls.conn.authenticate()
+ cls.storage_url, cls.storage_token = cls.conn.authenticate()
cls.account = Account(cls.conn, tf.config.get('account',
tf.config['username']))
@@ -2475,12 +3932,39 @@ class TestObjectVersioningEnv(object):
cls.container = cls.account.container(prefix + "-objs")
if not cls.container.create(
hdrs={'X-Versions-Location': cls.versions_container.name}):
+ if cls.conn.response.status == 412:
+ cls.versioning_enabled = False
+ return
raise ResponseError(cls.conn.response)
container_info = cls.container.info()
# if versioning is off, then X-Versions-Location won't persist
cls.versioning_enabled = 'versions' in container_info
+ # setup another account to test ACLs
+ config2 = deepcopy(tf.config)
+ config2['account'] = tf.config['account2']
+ config2['username'] = tf.config['username2']
+ config2['password'] = tf.config['password2']
+ cls.conn2 = Connection(config2)
+ cls.storage_url2, cls.storage_token2 = cls.conn2.authenticate()
+ cls.account2 = cls.conn2.get_account()
+ cls.account2.delete_containers()
+
+ # setup another account with no access to anything to test ACLs
+ config3 = deepcopy(tf.config)
+ config3['account'] = tf.config['account']
+ config3['username'] = tf.config['username3']
+ config3['password'] = tf.config['password3']
+ cls.conn3 = Connection(config3)
+ cls.storage_url3, cls.storage_token3 = cls.conn3.authenticate()
+ cls.account3 = cls.conn3.get_account()
+
+ @classmethod
+ def tearDown(cls):
+ cls.account.delete_containers()
+ cls.account2.delete_containers()
+
class TestCrossPolicyObjectVersioningEnv(object):
# tri-state: None initially, then True/False
@@ -2503,12 +3987,10 @@ class TestCrossPolicyObjectVersioningEnv(object):
cls.multiple_policies_enabled = True
else:
cls.multiple_policies_enabled = False
- # We have to lie here that versioning is enabled. We actually
- # don't know, but it does not matter. We know these tests cannot
- # run without multiple policies present. If multiple policies are
- # present, we won't be setting this field to any value, so it
- # should all still work.
cls.versioning_enabled = True
+ # We don't actually know the state of versioning, but without
+ # multiple policies the tests should be skipped anyway. Claiming
+ # versioning support lets us report the right reason for skipping.
return
policy = cls.policies.select()
@@ -2538,12 +4020,39 @@ class TestCrossPolicyObjectVersioningEnv(object):
if not cls.container.create(
hdrs={'X-Versions-Location': cls.versions_container.name,
'X-Storage-Policy': version_policy['name']}):
+ if cls.conn.response.status == 412:
+ cls.versioning_enabled = False
+ return
raise ResponseError(cls.conn.response)
container_info = cls.container.info()
# if versioning is off, then X-Versions-Location won't persist
cls.versioning_enabled = 'versions' in container_info
+ # setup another account to test ACLs
+ config2 = deepcopy(tf.config)
+ config2['account'] = tf.config['account2']
+ config2['username'] = tf.config['username2']
+ config2['password'] = tf.config['password2']
+ cls.conn2 = Connection(config2)
+ cls.storage_url2, cls.storage_token2 = cls.conn2.authenticate()
+ cls.account2 = cls.conn2.get_account()
+ cls.account2.delete_containers()
+
+ # setup another account with no access to anything to test ACLs
+ config3 = deepcopy(tf.config)
+ config3['account'] = tf.config['account']
+ config3['username'] = tf.config['username3']
+ config3['password'] = tf.config['password3']
+ cls.conn3 = Connection(config3)
+ cls.storage_url3, cls.storage_token3 = cls.conn3.authenticate()
+ cls.account3 = cls.conn3.get_account()
+
+ @classmethod
+ def tearDown(cls):
+ cls.account.delete_containers()
+ cls.account2.delete_containers()
+
class TestObjectVersioning(Base):
env = TestObjectVersioningEnv
@@ -2559,43 +4068,136 @@ class TestObjectVersioning(Base):
"Expected versioning_enabled to be True/False, got %r" %
(self.env.versioning_enabled,))
- def tearDown(self):
- super(TestObjectVersioning, self).tearDown()
+ def _tear_down_files(self):
try:
- # delete versions first!
+ # only delete files and not containers
+ # as they were configured in self.env
self.env.versions_container.delete_files()
self.env.container.delete_files()
except ResponseError:
pass
+ def tearDown(self):
+ super(TestObjectVersioning, self).tearDown()
+ self._tear_down_files()
+
+ def test_clear_version_option(self):
+ # sanity
+ self.assertEqual(self.env.container.info()['versions'],
+ self.env.versions_container.name)
+ self.env.container.update_metadata(
+ hdrs={'X-Versions-Location': ''})
+ self.assertIsNone(self.env.container.info().get('versions'))
+
+ # set location back to the way it was
+ self.env.container.update_metadata(
+ hdrs={'X-Versions-Location': self.env.versions_container.name})
+ self.assertEqual(self.env.container.info()['versions'],
+ self.env.versions_container.name)
+
def test_overwriting(self):
container = self.env.container
versions_container = self.env.versions_container
+ cont_info = container.info()
+ self.assertEqual(cont_info['versions'], versions_container.name)
+
obj_name = Utils.create_name()
versioned_obj = container.file(obj_name)
- versioned_obj.write("aaaaa")
+ put_headers = {'Content-Type': 'text/jibberish01',
+ 'Content-Encoding': 'gzip',
+ 'Content-Disposition': 'attachment; filename=myfile'}
+ versioned_obj.write("aaaaa", hdrs=put_headers)
+ obj_info = versioned_obj.info()
+ self.assertEqual('text/jibberish01', obj_info['content_type'])
+
+ # the allowed headers are configurable in object server, so we cannot
+ # assert that content-encoding or content-disposition get *copied* to
+ # the object version unless they were set on the original PUT, so
+ # populate expected_headers by making a HEAD on the original object
+ resp_headers = dict(versioned_obj.conn.response.getheaders())
+ expected_headers = {}
+ for k, v in put_headers.items():
+ if k.lower() in resp_headers:
+ expected_headers[k] = v
self.assertEqual(0, versions_container.info()['object_count'])
-
- versioned_obj.write("bbbbb")
+ versioned_obj.write("bbbbb", hdrs={'Content-Type': 'text/jibberish02',
+ 'X-Object-Meta-Foo': 'Bar'})
+ versioned_obj.initialize()
+ self.assertEqual(versioned_obj.content_type, 'text/jibberish02')
+ self.assertEqual(versioned_obj.metadata['foo'], 'Bar')
# the old version got saved off
self.assertEqual(1, versions_container.info()['object_count'])
versioned_obj_name = versions_container.files()[0]
- self.assertEqual(
- "aaaaa", versions_container.file(versioned_obj_name).read())
+ prev_version = versions_container.file(versioned_obj_name)
+ prev_version.initialize()
+ self.assertEqual("aaaaa", prev_version.read())
+ self.assertEqual(prev_version.content_type, 'text/jibberish01')
+
+ resp_headers = dict(prev_version.conn.response.getheaders())
+ for k, v in expected_headers.items():
+ self.assertIn(k.lower(), resp_headers)
+ self.assertEqual(v, resp_headers[k.lower()])
+
+ # make sure the new obj metadata did not leak to the prev. version
+ self.assertNotIn('foo', prev_version.metadata)
+
+ # check that POST does not create a new version
+ versioned_obj.sync_metadata(metadata={'fu': 'baz'})
+ self.assertEqual(1, versions_container.info()['object_count'])
# if we overwrite it again, there are two versions
versioned_obj.write("ccccc")
self.assertEqual(2, versions_container.info()['object_count'])
+ versioned_obj_name = versions_container.files()[1]
+ prev_version = versions_container.file(versioned_obj_name)
+ prev_version.initialize()
+ self.assertEqual("bbbbb", prev_version.read())
+ self.assertEqual(prev_version.content_type, 'text/jibberish02')
+ self.assertIn('foo', prev_version.metadata)
+ self.assertIn('fu', prev_version.metadata)
# as we delete things, the old contents return
self.assertEqual("ccccc", versioned_obj.read())
+
+ # test copy from a different container
+ src_container = self.env.account.container(Utils.create_name())
+ self.assertTrue(src_container.create())
+ src_name = Utils.create_name()
+ src_obj = src_container.file(src_name)
+ src_obj.write("ddddd", hdrs={'Content-Type': 'text/jibberish04'})
+ src_obj.copy(container.name, obj_name)
+
+ self.assertEqual("ddddd", versioned_obj.read())
+ versioned_obj.initialize()
+ self.assertEqual(versioned_obj.content_type, 'text/jibberish04')
+
+ # make sure versions container has the previous version
+ self.assertEqual(3, versions_container.info()['object_count'])
+ versioned_obj_name = versions_container.files()[2]
+ prev_version = versions_container.file(versioned_obj_name)
+ prev_version.initialize()
+ self.assertEqual("ccccc", prev_version.read())
+
+ # test delete
+ versioned_obj.delete()
+ self.assertEqual("ccccc", versioned_obj.read())
versioned_obj.delete()
self.assertEqual("bbbbb", versioned_obj.read())
versioned_obj.delete()
self.assertEqual("aaaaa", versioned_obj.read())
+ self.assertEqual(0, versions_container.info()['object_count'])
+
+ # verify that all the original object headers have been copied back
+ obj_info = versioned_obj.info()
+ self.assertEqual('text/jibberish01', obj_info['content_type'])
+ resp_headers = dict(versioned_obj.conn.response.getheaders())
+ for k, v in expected_headers.items():
+ self.assertIn(k.lower(), resp_headers)
+ self.assertEqual(v, resp_headers[k.lower()])
+
versioned_obj.delete()
self.assertRaises(ResponseError, versioned_obj.read)
@@ -2628,6 +4230,92 @@ class TestObjectVersioning(Base):
self.assertEqual(3, versions_container.info()['object_count'])
self.assertEqual("112233", man_file.read())
+ def test_versioning_container_acl(self):
+ # create versions container and DO NOT give write access to account2
+ versions_container = self.env.account.container(Utils.create_name())
+ self.assertTrue(versions_container.create(hdrs={
+ 'X-Container-Write': ''
+ }))
+
+ # check account2 cannot write to versions container
+ fail_obj_name = Utils.create_name()
+ fail_obj = versions_container.file(fail_obj_name)
+ self.assertRaises(ResponseError, fail_obj.write, "should fail",
+ cfg={'use_token': self.env.storage_token2})
+
+ # create container and give write access to account2
+ # don't set X-Versions-Location just yet
+ container = self.env.account.container(Utils.create_name())
+ self.assertTrue(container.create(hdrs={
+ 'X-Container-Write': self.env.conn2.user_acl}))
+
+ # check account2 cannot set X-Versions-Location on container
+ self.assertRaises(ResponseError, container.update_metadata, hdrs={
+ 'X-Versions-Location': versions_container},
+ cfg={'use_token': self.env.storage_token2})
+
+ # good! now let admin set the X-Versions-Location
+ # p.s.: sticking a 'x-remove' header here to test precedence
+ # of both headers. Setting the location should succeed.
+ self.assertTrue(container.update_metadata(hdrs={
+ 'X-Remove-Versions-Location': versions_container,
+ 'X-Versions-Location': versions_container}))
+
+ # write object twice to container and check version
+ obj_name = Utils.create_name()
+ versioned_obj = container.file(obj_name)
+ self.assertTrue(versioned_obj.write("never argue with the data",
+ cfg={'use_token': self.env.storage_token2}))
+ self.assertEqual(versioned_obj.read(), "never argue with the data")
+
+ self.assertTrue(
+ versioned_obj.write("we don't have no beer, just tequila",
+ cfg={'use_token': self.env.storage_token2}))
+ self.assertEqual(versioned_obj.read(),
+ "we don't have no beer, just tequila")
+ self.assertEqual(1, versions_container.info()['object_count'])
+
+ # read the original uploaded object
+ for filename in versions_container.files():
+ backup_file = versions_container.file(filename)
+ break
+ self.assertEqual(backup_file.read(), "never argue with the data")
+
+ # user3 (some random user with no access to anything)
+ # tries to read from versioned container
+ self.assertRaises(ResponseError, backup_file.read,
+ cfg={'use_token': self.env.storage_token3})
+
+ # user3 cannot write or delete from source container either
+ number_of_versions = versions_container.info()['object_count']
+ self.assertRaises(ResponseError, versioned_obj.write,
+ "some random user trying to write data",
+ cfg={'use_token': self.env.storage_token3})
+ self.assertEqual(number_of_versions,
+ versions_container.info()['object_count'])
+ self.assertRaises(ResponseError, versioned_obj.delete,
+ cfg={'use_token': self.env.storage_token3})
+ self.assertEqual(number_of_versions,
+ versions_container.info()['object_count'])
+
+ # user2 can't read or delete from versions-location
+ self.assertRaises(ResponseError, backup_file.read,
+ cfg={'use_token': self.env.storage_token2})
+ self.assertRaises(ResponseError, backup_file.delete,
+ cfg={'use_token': self.env.storage_token2})
+
+ # but is able to delete from the source container
+ # this could be a helpful scenario for dev ops that want to setup
+ # just one container to hold object versions of multiple containers
+ # and each one of those containers are owned by different users
+ self.assertTrue(versioned_obj.delete(
+ cfg={'use_token': self.env.storage_token2}))
+
+ # tear-down since we create these containers here
+ # and not in self.env
+ versions_container.delete_recursive()
+ container.delete_recursive()
+
def test_versioning_check_acl(self):
container = self.env.container
versions_container = self.env.versions_container
@@ -2659,6 +4347,10 @@ class TestObjectVersioning(Base):
class TestObjectVersioningUTF8(Base2, TestObjectVersioning):
set_up = False
+ def tearDown(self):
+ self._tear_down_files()
+ super(TestObjectVersioningUTF8, self).tearDown()
+
class TestCrossPolicyObjectVersioning(TestObjectVersioning):
env = TestCrossPolicyObjectVersioningEnv
@@ -2675,6 +4367,107 @@ class TestCrossPolicyObjectVersioning(TestObjectVersioning):
self.env.versioning_enabled,))
+class TestSloWithVersioning(Base):
+
+ def setUp(self):
+ if 'slo' not in cluster_info:
+ raise SkipTest("SLO not enabled")
+
+ self.conn = Connection(tf.config)
+ self.conn.authenticate()
+ self.account = Account(
+ self.conn, tf.config.get('account', tf.config['username']))
+ self.account.delete_containers()
+
+ # create a container with versioning
+ self.versions_container = self.account.container(Utils.create_name())
+ self.container = self.account.container(Utils.create_name())
+ self.segments_container = self.account.container(Utils.create_name())
+ if not self.container.create(
+ hdrs={'X-Versions-Location': self.versions_container.name}):
+ raise ResponseError(self.conn.response)
+ if 'versions' not in self.container.info():
+ raise SkipTest("Object versioning not enabled")
+
+ for cont in (self.versions_container, self.segments_container):
+ if not cont.create():
+ raise ResponseError(self.conn.response)
+
+ # create some segments
+ self.seg_info = {}
+ for letter, size in (('a', 1024 * 1024),
+ ('b', 1024 * 1024)):
+ seg_name = letter
+ file_item = self.segments_container.file(seg_name)
+ file_item.write(letter * size)
+ self.seg_info[seg_name] = {
+ 'size_bytes': size,
+ 'etag': file_item.md5,
+ 'path': '/%s/%s' % (self.segments_container.name, seg_name)}
+
+ def _create_manifest(self, seg_name):
+ # create a manifest in the versioning container
+ file_item = self.container.file("my-slo-manifest")
+ file_item.write(
+ json.dumps([self.seg_info[seg_name]]),
+ parms={'multipart-manifest': 'put'})
+ return file_item
+
+ def _assert_is_manifest(self, file_item, seg_name):
+ manifest_body = file_item.read(parms={'multipart-manifest': 'get'})
+ resp_headers = dict(file_item.conn.response.getheaders())
+ self.assertIn('x-static-large-object', resp_headers)
+ self.assertEqual('application/json; charset=utf-8',
+ file_item.content_type)
+ try:
+ manifest = json.loads(manifest_body)
+ except ValueError:
+ self.fail("GET with multipart-manifest=get got invalid json")
+
+ self.assertEqual(1, len(manifest))
+ key_map = {'etag': 'hash', 'size_bytes': 'bytes', 'path': 'name'}
+ for k_client, k_slo in key_map.items():
+ self.assertEqual(self.seg_info[seg_name][k_client],
+ manifest[0][k_slo])
+
+ def _assert_is_object(self, file_item, seg_name):
+ file_contents = file_item.read()
+ self.assertEqual(1024 * 1024, len(file_contents))
+ self.assertEqual(seg_name, file_contents[0])
+ self.assertEqual(seg_name, file_contents[-1])
+
+ def tearDown(self):
+ # remove versioning to allow simple container delete
+ self.container.update_metadata(hdrs={'X-Versions-Location': ''})
+ self.account.delete_containers()
+
+ def test_slo_manifest_version(self):
+ file_item = self._create_manifest('a')
+ # sanity check: read the manifest, then the large object
+ self._assert_is_manifest(file_item, 'a')
+ self._assert_is_object(file_item, 'a')
+
+ # upload new manifest
+ file_item = self._create_manifest('b')
+ # sanity check: read the manifest, then the large object
+ self._assert_is_manifest(file_item, 'b')
+ self._assert_is_object(file_item, 'b')
+
+ versions_list = self.versions_container.files()
+ self.assertEqual(1, len(versions_list))
+ version_file = self.versions_container.file(versions_list[0])
+ # check the version is still a manifest
+ self._assert_is_manifest(version_file, 'a')
+ self._assert_is_object(version_file, 'a')
+
+ # delete the newest manifest
+ file_item.delete()
+
+ # expect the original manifest file to be restored
+ self._assert_is_manifest(file_item, 'a')
+ self._assert_is_object(file_item, 'a')
+
+
class TestTempurlEnv(object):
tempurl_enabled = None # tri-state: None initially, then True/False
@@ -2733,7 +4526,7 @@ class TestTempurl(Base):
def tempurl_sig(self, method, expires, path, key):
return hmac.new(
key,
- '%s\n%s\n%s' % (method, expires, urllib.unquote(path)),
+ '%s\n%s\n%s' % (method, expires, urllib.parse.unquote(path)),
hashlib.sha1).hexdigest()
def test_GET(self):
@@ -2743,8 +4536,8 @@ class TestTempurl(Base):
self.assertEqual(contents, "obj contents")
# GET tempurls also allow HEAD requests
- self.assert_(self.env.obj.info(parms=self.obj_tempurl_parms,
- cfg={'no_auth_token': True}))
+ self.assertTrue(self.env.obj.info(parms=self.obj_tempurl_parms,
+ cfg={'no_auth_token': True}))
def test_GET_with_key_2(self):
expires = int(time.time()) + 86400
@@ -2825,8 +4618,8 @@ class TestTempurl(Base):
self.assertEqual(new_obj.read(), "new obj contents")
# PUT tempurls also allow HEAD requests
- self.assert_(new_obj.info(parms=put_parms,
- cfg={'no_auth_token': True}))
+ self.assertTrue(new_obj.info(parms=put_parms,
+ cfg={'no_auth_token': True}))
def test_PUT_manifest_access(self):
new_obj = self.env.container.file(Utils.create_name())
@@ -2864,6 +4657,22 @@ class TestTempurl(Base):
else:
self.fail('request did not error')
+ # try again using a tempurl POST to an already created object
+ new_obj.write('', {}, parms=put_parms, cfg={'no_auth_token': True})
+ expires = int(time.time()) + 86400
+ sig = self.tempurl_sig(
+ 'POST', expires, self.env.conn.make_path(new_obj.path),
+ self.env.tempurl_key)
+ post_parms = {'temp_url_sig': sig,
+ 'temp_url_expires': str(expires)}
+ try:
+ new_obj.post({'x-object-manifest': '%s/foo' % other_container},
+ parms=post_parms, cfg={'no_auth_token': True})
+ except ResponseError as e:
+ self.assertEqual(e.status, 400)
+ else:
+ self.fail('request did not error')
+
def test_HEAD(self):
expires = int(time.time()) + 86400
sig = self.tempurl_sig(
@@ -2872,8 +4681,8 @@ class TestTempurl(Base):
head_parms = {'temp_url_sig': sig,
'temp_url_expires': str(expires)}
- self.assert_(self.env.obj.info(parms=head_parms,
- cfg={'no_auth_token': True}))
+ self.assertTrue(self.env.obj.info(parms=head_parms,
+ cfg={'no_auth_token': True}))
# HEAD tempurls don't allow PUT or GET requests, despite the fact that
# PUT and GET tempurls both allow HEAD requests
self.assertRaises(ResponseError, self.env.other_obj.read,
@@ -3006,7 +4815,7 @@ class TestContainerTempurl(Base):
def tempurl_sig(self, method, expires, path, key):
return hmac.new(
key,
- '%s\n%s\n%s' % (method, expires, urllib.unquote(path)),
+ '%s\n%s\n%s' % (method, expires, urllib.parse.unquote(path)),
hashlib.sha1).hexdigest()
def test_GET(self):
@@ -3016,8 +4825,8 @@ class TestContainerTempurl(Base):
self.assertEqual(contents, "obj contents")
# GET tempurls also allow HEAD requests
- self.assert_(self.env.obj.info(parms=self.obj_tempurl_parms,
- cfg={'no_auth_token': True}))
+ self.assertTrue(self.env.obj.info(parms=self.obj_tempurl_parms,
+ cfg={'no_auth_token': True}))
def test_GET_with_key_2(self):
expires = int(time.time()) + 86400
@@ -3045,8 +4854,8 @@ class TestContainerTempurl(Base):
self.assertEqual(new_obj.read(), "new obj contents")
# PUT tempurls also allow HEAD requests
- self.assert_(new_obj.info(parms=put_parms,
- cfg={'no_auth_token': True}))
+ self.assertTrue(new_obj.info(parms=put_parms,
+ cfg={'no_auth_token': True}))
def test_HEAD(self):
expires = int(time.time()) + 86400
@@ -3056,8 +4865,8 @@ class TestContainerTempurl(Base):
head_parms = {'temp_url_sig': sig,
'temp_url_expires': str(expires)}
- self.assert_(self.env.obj.info(parms=head_parms,
- cfg={'no_auth_token': True}))
+ self.assertTrue(self.env.obj.info(parms=head_parms,
+ cfg={'no_auth_token': True}))
# HEAD tempurls don't allow PUT or GET requests, despite the fact that
# PUT and GET tempurls both allow HEAD requests
self.assertRaises(ResponseError, self.env.other_obj.read,
@@ -3116,6 +4925,7 @@ class TestContainerTempurl(Base):
parms=parms)
self.assert_status([401])
+ @requires_acls
def test_tempurl_keys_visible_to_account_owner(self):
if not tf.cluster_info.get('tempauth'):
raise SkipTest('TEMP AUTH SPECIFIC TEST')
@@ -3123,6 +4933,7 @@ class TestContainerTempurl(Base):
self.assertEqual(metadata.get('tempurl_key'), self.env.tempurl_key)
self.assertEqual(metadata.get('tempurl_key2'), self.env.tempurl_key2)
+ @requires_acls
def test_tempurl_keys_hidden_from_acl_readonly(self):
if not tf.cluster_info.get('tempauth'):
raise SkipTest('TEMP AUTH SPECIFIC TEST')
@@ -3131,12 +4942,14 @@ class TestContainerTempurl(Base):
metadata = self.env.container.info()
self.env.container.conn.storage_token = original_token
- self.assertTrue('tempurl_key' not in metadata,
- 'Container TempURL key found, should not be visible '
- 'to readonly ACLs')
- self.assertTrue('tempurl_key2' not in metadata,
- 'Container TempURL key-2 found, should not be visible '
- 'to readonly ACLs')
+ self.assertNotIn(
+ 'tempurl_key', metadata,
+ 'Container TempURL key found, should not be visible '
+ 'to readonly ACLs')
+ self.assertNotIn(
+ 'tempurl_key2', metadata,
+ 'Container TempURL key-2 found, should not be visible '
+ 'to readonly ACLs')
def test_GET_DLO_inside_container(self):
seg1 = self.env.container.file(
@@ -3267,7 +5080,7 @@ class TestSloTempurl(Base):
def tempurl_sig(self, method, expires, path, key):
return hmac.new(
key,
- '%s\n%s\n%s' % (method, expires, urllib.unquote(path)),
+ '%s\n%s\n%s' % (method, expires, urllib.parse.unquote(path)),
hashlib.sha1).hexdigest()
def test_GET(self):
@@ -3283,7 +5096,7 @@ class TestSloTempurl(Base):
self.assertEqual(len(contents), 2 * 1024 * 1024)
# GET tempurls also allow HEAD requests
- self.assert_(self.env.manifest.info(
+ self.assertTrue(self.env.manifest.info(
parms=parms, cfg={'no_auth_token': True}))
@@ -3291,7 +5104,7 @@ class TestSloTempurlUTF8(Base2, TestSloTempurl):
set_up = False
-class TestServiceToken(unittest.TestCase):
+class TestServiceToken(unittest2.TestCase):
def setUp(self):
if tf.skip_service_tokens:
@@ -3346,7 +5159,7 @@ class TestServiceToken(unittest.TestCase):
a token from the test service user. We save options here so that
do_request() can make the appropriate request.
- :param method: The operation (e.g'. 'HEAD')
+ :param method: The operation (e.g. 'HEAD')
:param use_service_account: Optional. Set True to change the path to
be the service account
:param container: Optional. Adds a container name to the path
@@ -3390,8 +5203,6 @@ class TestServiceToken(unittest.TestCase):
headers = {}
if self.body:
headers.update({'Content-Length': len(self.body)})
- if self.headers:
- headers.update(self.headers)
if self.x_auth_token == self.SET_TO_USERS_TOKEN:
headers.update({'X-Auth-Token': token})
elif self.x_auth_token == self.SET_TO_SERVICE_TOKEN:
@@ -3424,7 +5235,7 @@ class TestServiceToken(unittest.TestCase):
self.prepare_request('HEAD')
resp = retry(self.do_request)
resp.read()
- self.assert_(resp.status in (200, 204), resp.status)
+ self.assertIn(resp.status, (200, 204))
def test_user_cannot_access_service_account(self):
for method, container, obj in self._scenario_generator():
@@ -3461,4 +5272,4 @@ class TestServiceToken(unittest.TestCase):
if __name__ == '__main__':
- unittest.main()
+ unittest2.main()
diff --git a/test/unit/__init__.py b/test/unit/__init__.py
index 372fb58..ee2a262 100644
--- a/test/unit/__init__.py
+++ b/test/unit/__init__.py
@@ -15,10 +15,12 @@
""" Swift tests """
+from __future__ import print_function
import os
import copy
import logging
import errno
+from six.moves import range
import sys
from contextlib import contextmanager, closing
from collections import defaultdict, Iterable
@@ -30,20 +32,28 @@ import eventlet
from eventlet.green import socket
from tempfile import mkdtemp
from shutil import rmtree
-from swift.common.utils import Timestamp
+import signal
+import json
+
+
+from swift.common.utils import Timestamp, NOTICE
from test import get_config
-from swift.common import swob, utils
+from swift.common import utils
+from swift.common.header_key_dict import HeaderKeyDict
from swift.common.ring import Ring, RingData
from hashlib import md5
import logging.handlers
-from httplib import HTTPException
+
+from six.moves.http_client import HTTPException
from swift.common import storage_policy
-from swift.common.storage_policy import StoragePolicy, ECStoragePolicy
+from swift.common.storage_policy import (StoragePolicy, ECStoragePolicy,
+ VALID_EC_TYPES)
import functools
-import cPickle as pickle
+import six.moves.cPickle as pickle
from gzip import GzipFile
import mock as mocklib
import inspect
+from nose import SkipTest
EMPTY_ETAG = md5().hexdigest()
@@ -53,6 +63,22 @@ if not os.path.basename(sys.argv[0]).startswith('swift'):
utils.HASH_PATH_SUFFIX = 'endcap'
+EC_TYPE_PREFERENCE = [
+ 'liberasurecode_rs_vand',
+ 'jerasure_rs_vand',
+]
+for eclib_name in EC_TYPE_PREFERENCE:
+ if eclib_name in VALID_EC_TYPES:
+ break
+else:
+ raise SystemExit('ERROR: unable to find suitable PyECLib type'
+ ' (none of %r found in %r)' % (
+ EC_TYPE_PREFERENCE,
+ VALID_EC_TYPES,
+ ))
+DEFAULT_TEST_EC_TYPE = eclib_name
+
+
def patch_policies(thing_or_policies=None, legacy_only=False,
with_ec_default=False, fake_ring_args=None):
if isinstance(thing_or_policies, (
@@ -67,7 +93,7 @@ def patch_policies(thing_or_policies=None, legacy_only=False,
elif with_ec_default:
default_policies = [
ECStoragePolicy(0, name='ec', is_default=True,
- ec_type='jerasure_rs_vand', ec_ndata=10,
+ ec_type=DEFAULT_TEST_EC_TYPE, ec_ndata=10,
ec_nparity=4, ec_segment_size=4096),
StoragePolicy(1, name='unu'),
]
@@ -183,13 +209,6 @@ class FakeRing(Ring):
def __init__(self, replicas=3, max_more_nodes=0, part_power=0,
base_port=1000):
- """
- :param part_power: make part calculation based on the path
-
- If you set a part_power when you setup your FakeRing the parts you get
- out of ring methods will actually be based on the path - otherwise we
- exercise the real ring code, but ignore the result and return 1.
- """
self._base_port = base_port
self.max_more_nodes = max_more_nodes
self._part_shift = 32 - part_power
@@ -207,7 +226,8 @@ class FakeRing(Ring):
for x in range(self.replicas):
ip = '10.0.0.%s' % x
port = self._base_port + x
- self._devs.append({
+ # round trip through json to ensure unicode like real rings
+ self._devs.append(json.loads(json.dumps({
'ip': ip,
'replication_ip': ip,
'port': port,
@@ -216,7 +236,7 @@ class FakeRing(Ring):
'zone': x % 3,
'region': x % 2,
'id': x,
- })
+ })))
@property
def replica_count(self):
@@ -226,9 +246,7 @@ class FakeRing(Ring):
return [dict(node, index=i) for i, node in enumerate(list(self._devs))]
def get_more_nodes(self, part):
- # replicas^2 is the true cap
- for x in xrange(self.replicas, min(self.replicas + self.max_more_nodes,
- self.replicas * self.replicas)):
+ for x in range(self.replicas, (self.replicas + self.max_more_nodes)):
yield {'ip': '10.0.0.%s' % x,
'replication_ip': '10.0.0.%s' % x,
'port': self._base_port + x,
@@ -244,9 +262,9 @@ def write_fake_ring(path, *devs):
Pretty much just a two node, two replica, 2 part power ring...
"""
dev1 = {'id': 0, 'zone': 0, 'device': 'sda1', 'ip': '127.0.0.1',
- 'port': 6000}
+ 'port': 6200}
dev2 = {'id': 0, 'zone': 0, 'device': 'sdb1', 'ip': '127.0.0.1',
- 'port': 6000}
+ 'port': 6200}
dev1_updates, dev2_updates = devs or ({}, {})
@@ -266,7 +284,7 @@ class FabricatedRing(Ring):
your tests needs.
"""
- def __init__(self, replicas=6, devices=8, nodes=4, port=6000,
+ def __init__(self, replicas=6, devices=8, nodes=4, port=6200,
part_power=4):
self.devices = devices
self.nodes = nodes
@@ -459,6 +477,12 @@ class UnmockTimeModule(object):
logging.time = UnmockTimeModule()
+class WARN_DEPRECATED(Exception):
+ def __init__(self, msg):
+ self.msg = msg
+ print(self.msg)
+
+
class FakeLogger(logging.Logger, object):
# a thread safe fake logger
@@ -478,8 +502,21 @@ class FakeLogger(logging.Logger, object):
logging.INFO: 'info',
logging.DEBUG: 'debug',
logging.CRITICAL: 'critical',
+ NOTICE: 'notice',
}
+ def warn(self, *args, **kwargs):
+ raise WARN_DEPRECATED("Deprecated Method warn use warning instead")
+
+ def notice(self, msg, *args, **kwargs):
+ """
+ Convenience function for syslog priority LOG_NOTICE. The python
+ logging lvl is set to 25, just above info. SysLogHandler is
+ monkey patched to map this log lvl to the LOG_NOTICE syslog
+ priority.
+ """
+ self.log(NOTICE, msg, *args, **kwargs)
+
def _log(self, level, msg, *args, **kwargs):
store_name = self.store_in[level]
cargs = [msg]
@@ -495,7 +532,9 @@ class FakeLogger(logging.Logger, object):
def _clear(self):
self.log_dict = defaultdict(list)
self.lines_dict = {'critical': [], 'error': [], 'info': [],
- 'warning': [], 'debug': []}
+ 'warning': [], 'debug': [], 'notice': []}
+
+ clear = _clear # this is a public interface
def get_lines_for_level(self, level):
if level not in self.lines_dict:
@@ -560,8 +599,8 @@ class FakeLogger(logging.Logger, object):
try:
line = record.getMessage()
except TypeError:
- print 'WARNING: unable to format log message %r %% %r' % (
- record.msg, record.args)
+ print('WARNING: unable to format log message %r %% %r' % (
+ record.msg, record.args))
raise
self.lines_dict[record.levelname.lower()].append(line)
@@ -575,17 +614,24 @@ class FakeLogger(logging.Logger, object):
pass
+class DebugSwiftLogFormatter(utils.SwiftLogFormatter):
+
+ def format(self, record):
+ msg = super(DebugSwiftLogFormatter, self).format(record)
+ return msg.replace('#012', '\n')
+
+
class DebugLogger(FakeLogger):
"""A simple stdout logging version of FakeLogger"""
def __init__(self, *args, **kwargs):
FakeLogger.__init__(self, *args, **kwargs)
- self.formatter = logging.Formatter(
+ self.formatter = DebugSwiftLogFormatter(
"%(server)s %(levelname)s: %(message)s")
def handle(self, record):
self._handle(record)
- print self.formatter.format(record)
+ print(self.formatter.format(record))
class DebugLogAdapter(utils.LogAdapter):
@@ -704,6 +750,74 @@ def mock(update):
delattr(module, attr)
+class FakeStatus(object):
+ """
+ This will work with our fake_http_connect, if you hand in one of these
+ instead of a status int or status int tuple to the "codes" iter you can
+ add some eventlet sleep to the expect and response stages of the
+ connection.
+ """
+
+ def __init__(self, status, expect_sleep=None, response_sleep=None):
+ """
+ :param status: the response status int, or a tuple of
+ ([expect_status, ...], response_status)
+ :param expect_sleep: float, time to eventlet sleep during expect, can
+ be a iter of floats
+ :param response_sleep: float, time to eventlet sleep during response
+ """
+ # connect exception
+ if isinstance(status, (Exception, eventlet.Timeout)):
+ raise status
+ if isinstance(status, tuple):
+ self.expect_status = list(status[:-1])
+ self.status = status[-1]
+ self.explicit_expect_list = True
+ else:
+ self.expect_status, self.status = ([], status)
+ self.explicit_expect_list = False
+ if not self.expect_status:
+ # when a swift backend service returns a status before reading
+ # from the body (mostly an error response) eventlet.wsgi will
+ # respond with that status line immediately instead of 100
+ # Continue, even if the client sent the Expect 100 header.
+ # BufferedHttp and the proxy both see these error statuses
+ # when they call getexpect, so our FakeConn tries to act like
+ # our backend services and return certain types of responses
+ # as expect statuses just like a real backend server would do.
+ if self.status in (507, 412, 409):
+ self.expect_status = [status]
+ else:
+ self.expect_status = [100, 100]
+
+ # setup sleep attributes
+ if not isinstance(expect_sleep, (list, tuple)):
+ expect_sleep = [expect_sleep] * len(self.expect_status)
+ self.expect_sleep_list = list(expect_sleep)
+ while len(self.expect_sleep_list) < len(self.expect_status):
+ self.expect_sleep_list.append(None)
+ self.response_sleep = response_sleep
+
+ def get_response_status(self):
+ if self.response_sleep is not None:
+ eventlet.sleep(self.response_sleep)
+ if self.expect_status and self.explicit_expect_list:
+ raise Exception('Test did not consume all fake '
+ 'expect status: %r' % (self.expect_status,))
+ if isinstance(self.status, (Exception, eventlet.Timeout)):
+ raise self.status
+ return self.status
+
+ def get_expect_status(self):
+ expect_sleep = self.expect_sleep_list.pop(0)
+ if expect_sleep is not None:
+ eventlet.sleep(expect_sleep)
+ expect_status = self.expect_status.pop(0)
+ if isinstance(expect_status, (Exception, eventlet.Timeout)):
+ raise expect_status
+ return expect_status
+
+
class SlowBody(object):
"""
This will work with our fake_http_connect, if you hand in these
@@ -740,30 +854,10 @@ def fake_http_connect(*code_iter, **kwargs):
def __init__(self, status, etag=None, body='', timestamp='1',
headers=None, expect_headers=None, connection_id=None,
- give_send=None):
- # connect exception
- if isinstance(status, (Exception, eventlet.Timeout)):
- raise status
- if isinstance(status, tuple):
- self.expect_status = list(status[:-1])
- self.status = status[-1]
- self.explicit_expect_list = True
- else:
- self.expect_status, self.status = ([], status)
- self.explicit_expect_list = False
- if not self.expect_status:
- # when a swift backend service returns a status before reading
- # from the body (mostly an error response) eventlet.wsgi will
- # respond with that status line immediately instead of 100
- # Continue, even if the client sent the Expect 100 header.
- # BufferedHttp and the proxy both see these error statuses
- # when they call getexpect, so our FakeConn tries to act like
- # our backend services and return certain types of responses
- # as expect statuses just like a real backend server would do.
- if self.status in (507, 412, 409):
- self.expect_status = [status]
- else:
- self.expect_status = [100, 100]
+ give_send=None, give_expect=None):
+ if not isinstance(status, FakeStatus):
+ status = FakeStatus(status)
+ self._status = status
self.reason = 'Fake'
self.host = '1.2.3.4'
self.port = '1234'
@@ -776,6 +870,8 @@ def fake_http_connect(*code_iter, **kwargs):
self.timestamp = timestamp
self.connection_id = connection_id
self.give_send = give_send
+ self.give_expect = give_expect
+ self.closed = False
if 'slow' in kwargs and isinstance(kwargs['slow'], list):
try:
self._next_sleep = kwargs['slow'].pop(0)
@@ -785,11 +881,6 @@ def fake_http_connect(*code_iter, **kwargs):
eventlet.sleep()
def getresponse(self):
- if self.expect_status and self.explicit_expect_list:
- raise Exception('Test did not consume all fake '
- 'expect status: %r' % (self.expect_status,))
- if isinstance(self.status, (Exception, eventlet.Timeout)):
- raise self.status
exc = kwargs.get('raise_exc')
if exc:
if isinstance(exc, (Exception, eventlet.Timeout)):
@@ -797,16 +888,21 @@ def fake_http_connect(*code_iter, **kwargs):
raise Exception('test')
if kwargs.get('raise_timeout_exc'):
raise eventlet.Timeout()
+ self.status = self._status.get_response_status()
return self
def getexpect(self):
- expect_status = self.expect_status.pop(0)
- if isinstance(self.expect_status, (Exception, eventlet.Timeout)):
- raise self.expect_status
+ if self.give_expect:
+ self.give_expect(self)
+ expect_status = self._status.get_expect_status()
headers = dict(self.expect_headers)
if expect_status == 409:
headers['X-Backend-Timestamp'] = self.timestamp
- return FakeConn(expect_status, headers=headers)
+ response = FakeConn(expect_status,
+ timestamp=self.timestamp,
+ headers=headers)
+ response.status = expect_status
+ return response
def getheaders(self):
etag = self.etag
@@ -816,7 +912,7 @@ def fake_http_connect(*code_iter, **kwargs):
else:
etag = '"68b329da9893e34099c7d8ad5cb9c940"'
- headers = swob.HeaderKeyDict({
+ headers = HeaderKeyDict({
'content-length': len(self.body),
'content-type': 'x-application/test',
'x-timestamp': self.timestamp,
@@ -834,7 +930,7 @@ def fake_http_connect(*code_iter, **kwargs):
# when timestamp is None, HeaderKeyDict raises KeyError
headers.pop('x-timestamp', None)
try:
- if container_ts_iter.next() is False:
+ if next(container_ts_iter) is False:
headers['x-container-timestamp'] = '1'
except StopIteration:
pass
@@ -865,9 +961,9 @@ def fake_http_connect(*code_iter, **kwargs):
self.body = self.body[amt:]
return rv
- def send(self, amt=None):
+ def send(self, data=None):
if self.give_send:
- self.give_send(self.connection_id, amt)
+ self.give_send(self, data)
am_slow, value = self.get_slow()
if am_slow:
if self.received < 4:
@@ -875,10 +971,10 @@ def fake_http_connect(*code_iter, **kwargs):
eventlet.sleep(value)
def getheader(self, name, default=None):
- return swob.HeaderKeyDict(self.getheaders()).get(name, default)
+ return HeaderKeyDict(self.getheaders()).get(name, default)
def close(self):
- pass
+ self.closed = True
timestamps_iter = iter(kwargs.get('timestamps') or ['1'] * len(code_iter))
etag_iter = iter(kwargs.get('etags') or [None] * len(code_iter))
@@ -911,27 +1007,28 @@ def fake_http_connect(*code_iter, **kwargs):
kwargs['give_content_type'](args[6]['Content-Type'])
else:
kwargs['give_content_type']('')
- i, status = conn_id_and_code_iter.next()
+ i, status = next(conn_id_and_code_iter)
if 'give_connect' in kwargs:
give_conn_fn = kwargs['give_connect']
argspec = inspect.getargspec(give_conn_fn)
if argspec.keywords or 'connection_id' in argspec.args:
ckwargs['connection_id'] = i
give_conn_fn(*args, **ckwargs)
- etag = etag_iter.next()
- headers = headers_iter.next()
- expect_headers = expect_headers_iter.next()
- timestamp = timestamps_iter.next()
+ etag = next(etag_iter)
+ headers = next(headers_iter)
+ expect_headers = next(expect_headers_iter)
+ timestamp = next(timestamps_iter)
if status <= 0:
raise HTTPException()
if body_iter is None:
body = static_body or ''
else:
- body = body_iter.next()
+ body = next(body_iter)
return FakeConn(status, etag, body=body, timestamp=timestamp,
headers=headers, expect_headers=expect_headers,
- connection_id=i, give_send=kwargs.get('give_send'))
+ connection_id=i, give_send=kwargs.get('give_send'),
+ give_expect=kwargs.get('give_expect'))
connect.code_iter = code_iter
@@ -966,3 +1063,58 @@ def mocked_http_conn(*args, **kwargs):
def make_timestamp_iter():
return iter(Timestamp(t) for t in itertools.count(int(time.time())))
+
+
+class Timeout(object):
+ def __init__(self, seconds):
+ self.seconds = seconds
+
+ def __enter__(self):
+ signal.signal(signal.SIGALRM, self._exit)
+ signal.alarm(self.seconds)
+
+ def __exit__(self, type, value, traceback):
+ signal.alarm(0)
+
+ def _exit(self, signum, frame):
+ class TimeoutException(Exception):
+ pass
+ raise TimeoutException
+
+
+def requires_o_tmpfile_support(func):
+ @functools.wraps(func)
+ def wrapper(*args, **kwargs):
+ if not utils.o_tmpfile_supported():
+ raise SkipTest('Requires O_TMPFILE support')
+ return func(*args, **kwargs)
+ return wrapper
+
+
+def encode_frag_archive_bodies(policy, body):
+ """
+ Given a stub body produce a list of complete frag_archive bodies as
+ strings in frag_index order.
+
+ :param policy: a StoragePolicy instance, with policy_type EC_POLICY
+ :param body: a string, the body to encode into frag archives
+
+ :returns: list of strings, the complete frag_archive bodies for the given
+ plaintext
+ """
+ segment_size = policy.ec_segment_size
+ # split up the body into buffers
+ chunks = [body[x:x + segment_size]
+ for x in range(0, len(body), segment_size)]
+ # encode the buffers into fragment payloads
+ fragment_payloads = []
+ for chunk in chunks:
+ fragments = policy.pyeclib_driver.encode(chunk)
+ if not fragments:
+ break
+ fragment_payloads.append(fragments)
+
+ # join up the fragment payloads per node
+ ec_archive_bodies = [''.join(frags)
+ for frags in zip(*fragment_payloads)]
+ return ec_archive_bodies
diff --git a/test/unit/obj/test_diskfile.py b/test/unit/obj/test_diskfile.py
index 9701f44..2aed737 100644
--- a/test/unit/obj/test_diskfile.py
+++ b/test/unit/obj/test_diskfile.py
@@ -31,7 +31,7 @@ from gluster.swift.common.exceptions import AlreadyExistsAsDir, \
AlreadyExistsAsFile
from swift.common.exceptions import DiskFileNoSpace, DiskFileNotOpen, \
DiskFileNotExist, DiskFileExpired
-from swift.common.utils import ThreadPool
+from gluster.swift.common.utils import ThreadPool
import gluster.swift.common.utils
from gluster.swift.common.utils import normalize_timestamp
diff --git a/test/unit/obj/test_expirer.py b/test/unit/obj/test_expirer.py
index 9701027..4830a90 100644
--- a/test/unit/obj/test_expirer.py
+++ b/test/unit/obj/test_expirer.py
@@ -13,7 +13,6 @@
# See the License for the specific language governing permissions and
# limitations under the License.
-import urllib
from time import time
from unittest import main, TestCase
from test.unit import FakeRing, mocked_http_conn, debug_logger
@@ -22,8 +21,10 @@ from tempfile import mkdtemp
from shutil import rmtree
import mock
+import six
+from six.moves import urllib
-from swift.common import internal_client, utils
+from swift.common import internal_client, utils, swob
from swift.obj import expirer
@@ -54,7 +55,7 @@ class TestObjectExpirer(TestCase):
self.rcache = mkdtemp()
self.conf = {'recon_cache_path': self.rcache}
- self.logger = debug_logger('test-recon')
+ self.logger = debug_logger('test-expirer')
def tearDown(self):
rmtree(self.rcache)
@@ -88,10 +89,16 @@ class TestObjectExpirer(TestCase):
}
# from config
x = expirer.ObjectExpirer(vals)
- self.assertRaises(ValueError, x.get_process_values, {})
+ expected_msg = 'process must be an integer greater' \
+ ' than or equal to 0'
+ with self.assertRaises(ValueError) as ctx:
+ x.get_process_values({})
+ self.assertEqual(str(ctx.exception), expected_msg)
# from kwargs
x = expirer.ObjectExpirer({})
- self.assertRaises(ValueError, x.get_process_values, vals)
+ with self.assertRaises(ValueError) as ctx:
+ x.get_process_values(vals)
+ self.assertEqual(str(ctx.exception), expected_msg)
def test_get_process_values_negative_processes(self):
vals = {
@@ -100,10 +107,16 @@ class TestObjectExpirer(TestCase):
}
# from config
x = expirer.ObjectExpirer(vals)
- self.assertRaises(ValueError, x.get_process_values, {})
+ expected_msg = 'processes must be an integer greater' \
+ ' than or equal to 0'
+ with self.assertRaises(ValueError) as ctx:
+ x.get_process_values({})
+ self.assertEqual(str(ctx.exception), expected_msg)
# from kwargs
x = expirer.ObjectExpirer({})
- self.assertRaises(ValueError, x.get_process_values, vals)
+ with self.assertRaises(ValueError) as ctx:
+ x.get_process_values(vals)
+ self.assertEqual(str(ctx.exception), expected_msg)
def test_get_process_values_process_greater_than_processes(self):
vals = {
@@ -112,10 +125,32 @@ class TestObjectExpirer(TestCase):
}
# from config
x = expirer.ObjectExpirer(vals)
- self.assertRaises(ValueError, x.get_process_values, {})
+ expected_msg = 'process must be less than processes'
+ with self.assertRaises(ValueError) as ctx:
+ x.get_process_values({})
+ self.assertEqual(str(ctx.exception), expected_msg)
# from kwargs
x = expirer.ObjectExpirer({})
- self.assertRaises(ValueError, x.get_process_values, vals)
+ with self.assertRaises(ValueError) as ctx:
+ x.get_process_values(vals)
+ self.assertEqual(str(ctx.exception), expected_msg)
+
+ def test_get_process_values_process_equal_to_processes(self):
+ vals = {
+ 'processes': 5,
+ 'process': 5,
+ }
+ # from config
+ x = expirer.ObjectExpirer(vals)
+ expected_msg = 'process must be less than processes'
+ with self.assertRaises(ValueError) as ctx:
+ x.get_process_values({})
+ self.assertEqual(str(ctx.exception), expected_msg)
+ # from kwargs
+ x = expirer.ObjectExpirer({})
+ with self.assertRaises(ValueError) as ctx:
+ x.get_process_values(vals)
+ self.assertEqual(str(ctx.exception), expected_msg)
def test_init_concurrency_too_small(self):
conf = {
@@ -153,10 +188,11 @@ class TestObjectExpirer(TestCase):
sum([len(self.containers[x]) for x in self.containers])
def iter_containers(self, *a, **kw):
- return [{'name': unicode(x)} for x in self.containers.keys()]
+ return [{'name': six.text_type(x)}
+ for x in self.containers.keys()]
def iter_objects(self, account, container):
- return [{'name': unicode(x)}
+ return [{'name': six.text_type(x)}
for x in self.containers[container]]
def delete_container(*a, **kw):
@@ -172,7 +208,7 @@ class TestObjectExpirer(TestCase):
x.swift = InternalClient(containers)
deleted_objects = {}
- for i in xrange(3):
+ for i in range(3):
x.process = i
x.run_once()
self.assertNotEqual(deleted_objects, x.deleted_objects)
@@ -183,52 +219,55 @@ class TestObjectExpirer(TestCase):
self.assertEqual(len(set(x.obj_containers_in_order[:4])), 4)
def test_delete_object(self):
- class InternalClient(object):
-
- container_ring = None
-
- def __init__(self, test, account, container, obj):
- self.test = test
- self.account = account
- self.container = container
- self.obj = obj
- self.delete_object_called = False
-
- class DeleteActualObject(object):
- def __init__(self, test, actual_obj, timestamp):
- self.test = test
- self.actual_obj = actual_obj
- self.timestamp = timestamp
- self.called = False
-
- def __call__(self, actual_obj, timestamp):
- self.test.assertEqual(self.actual_obj, actual_obj)
- self.test.assertEqual(self.timestamp, timestamp)
- self.called = True
-
+ x = expirer.ObjectExpirer({}, logger=self.logger)
+ actual_obj = 'actual_obj'
+ timestamp = int(time())
+ reclaim_ts = timestamp - x.reclaim_age
container = 'container'
obj = 'obj'
- actual_obj = 'actual_obj'
- timestamp = 'timestamp'
-
- x = expirer.ObjectExpirer({}, logger=self.logger)
- x.swift = \
- InternalClient(self, x.expiring_objects_account, container, obj)
- x.delete_actual_object = \
- DeleteActualObject(self, actual_obj, timestamp)
- delete_object_called = []
-
- def pop_queue(c, o):
- self.assertEqual(container, c)
- self.assertEqual(obj, o)
- delete_object_called[:] = [True]
-
- x.pop_queue = pop_queue
-
- x.delete_object(actual_obj, timestamp, container, obj)
- self.assertTrue(delete_object_called)
- self.assertTrue(x.delete_actual_object.called)
+ http_exc = {
+ resp_code:
+ internal_client.UnexpectedResponse(
+ str(resp_code), swob.HTTPException(status=resp_code))
+ for resp_code in {404, 412, 500}
+ }
+ exc_other = Exception()
+
+ def check_call_to_delete_object(exc, ts, should_pop):
+ x.logger.clear()
+ start_reports = x.report_objects
+ with mock.patch.object(x, 'delete_actual_object',
+ side_effect=exc) as delete_actual:
+ with mock.patch.object(x, 'pop_queue') as pop_queue:
+ x.delete_object(actual_obj, ts, container, obj)
+
+ delete_actual.assert_called_once_with(actual_obj, ts)
+ log_lines = x.logger.get_lines_for_level('error')
+ if should_pop:
+ pop_queue.assert_called_once_with(container, obj)
+ self.assertEqual(start_reports + 1, x.report_objects)
+ self.assertFalse(log_lines)
+ else:
+ self.assertFalse(pop_queue.called)
+ self.assertEqual(start_reports, x.report_objects)
+ self.assertEqual(1, len(log_lines))
+ self.assertIn('Exception while deleting object container obj',
+ log_lines[0])
+
+ # verify pop_queue logic on exceptions
+ for exc, ts, should_pop in [(None, timestamp, True),
+ (http_exc[404], timestamp, False),
+ (http_exc[412], timestamp, False),
+ (http_exc[500], reclaim_ts, False),
+ (exc_other, reclaim_ts, False),
+ (http_exc[404], reclaim_ts, True),
+ (http_exc[412], reclaim_ts, True)]:
+
+ try:
+ check_call_to_delete_object(exc, ts, should_pop)
+ except AssertionError as err:
+ self.fail("Failed on %r at %f: %s" % (exc, ts, err))
def test_report(self):
x = expirer.ObjectExpirer({}, logger=self.logger)
@@ -525,7 +564,7 @@ class TestObjectExpirer(TestCase):
got_unicode = [False]
def delete_actual_object_test_for_unicode(actual_obj, timestamp):
- if isinstance(actual_obj, unicode):
+ if isinstance(actual_obj, six.text_type):
got_unicode[0] = True
fake_swift = InternalClient(
@@ -673,6 +712,8 @@ class TestObjectExpirer(TestCase):
ts = '1234'
x.delete_actual_object('/path/to/object', ts)
self.assertEqual(got_env[0]['HTTP_X_IF_DELETE_AT'], ts)
+ self.assertEqual(got_env[0]['HTTP_X_TIMESTAMP'],
+ got_env[0]['HTTP_X_IF_DELETE_AT'])
def test_delete_actual_object_nourlquoting(self):
# delete_actual_object should not do its own url quoting because
@@ -690,6 +731,8 @@ class TestObjectExpirer(TestCase):
ts = '1234'
x.delete_actual_object('/path/to/object name', ts)
self.assertEqual(got_env[0]['HTTP_X_IF_DELETE_AT'], ts)
+ self.assertEqual(got_env[0]['HTTP_X_TIMESTAMP'],
+ got_env[0]['HTTP_X_IF_DELETE_AT'])
self.assertEqual(got_env[0]['PATH_INFO'], '/v1/path/to/object name')
def test_delete_actual_object_raises_404(self):
@@ -704,7 +747,7 @@ class TestObjectExpirer(TestCase):
self.assertRaises(internal_client.UnexpectedResponse,
x.delete_actual_object, '/path/to/object', '1234')
- def test_delete_actual_object_handles_412(self):
+ def test_delete_actual_object_raises_412(self):
def fake_app(env, start_response):
start_response('412 Precondition Failed',
@@ -714,7 +757,8 @@ class TestObjectExpirer(TestCase):
internal_client.loadapp = lambda *a, **kw: fake_app
x = expirer.ObjectExpirer({})
- x.delete_actual_object('/path/to/object', '1234')
+ self.assertRaises(internal_client.UnexpectedResponse,
+ x.delete_actual_object, '/path/to/object', '1234')
def test_delete_actual_object_does_not_handle_odd_stuff(self):
@@ -744,7 +788,7 @@ class TestObjectExpirer(TestCase):
x.delete_actual_object(name, timestamp)
self.assertEqual(x.swift.make_request.call_count, 1)
self.assertEqual(x.swift.make_request.call_args[0][1],
- '/v1/' + urllib.quote(name))
+ '/v1/' + urllib.parse.quote(name))
def test_pop_queue(self):
class InternalClient(object):
diff --git a/test/unit/proxy/controllers/test_account.py b/test/unit/proxy/controllers/test_account.py
index 23ad0a1..a46dcc9 100644
--- a/test/unit/proxy/controllers/test_account.py
+++ b/test/unit/proxy/controllers/test_account.py
@@ -25,6 +25,7 @@ from test.unit import fake_http_connect, FakeRing, FakeMemcache
from swift.common.storage_policy import StoragePolicy
from swift.common.request_helpers import get_sys_meta_prefix
import swift.proxy.controllers.base
+from swift.proxy.controllers.base import get_account_info
from test.unit import patch_policies
@@ -36,6 +37,31 @@ class TestAccountController(unittest.TestCase):
None, FakeMemcache(),
account_ring=FakeRing(), container_ring=FakeRing())
+ def _make_callback_func(self, context):
+ def callback(ipaddr, port, device, partition, method, path,
+ headers=None, query_string=None, ssl=False):
+ context['method'] = method
+ context['path'] = path
+ context['headers'] = headers or {}
+ return callback
+
+ def _assert_responses(self, method, test_cases):
+ if method in ('PUT', 'DELETE'):
+ self.app.allow_account_management = True
+ controller = proxy_server.AccountController(self.app, 'AUTH_bob')
+
+ for responses, expected in test_cases:
+ with mock.patch(
+ 'swift.proxy.controllers.base.http_connect',
+ fake_http_connect(*responses)):
+ req = Request.blank('/v1/AUTH_bob')
+ resp = getattr(controller, method)(req)
+
+ self.assertEqual(expected,
+ resp.status_int,
+ 'Expected %s but got %s. Failed case: %s' %
+ (expected, resp.status_int, str(responses)))
+
def test_account_info_in_response_env(self):
controller = proxy_server.AccountController(self.app, 'AUTH_bob')
with mock.patch('swift.proxy.controllers.base.http_connect',
@@ -43,9 +69,10 @@ class TestAccountController(unittest.TestCase):
req = Request.blank('/v1/AUTH_bob', {'PATH_INFO': '/v1/AUTH_bob'})
resp = controller.HEAD(req)
self.assertEqual(2, resp.status_int // 100)
- self.assertTrue('swift.account/AUTH_bob' in resp.environ)
- self.assertEqual(headers_to_account_info(resp.headers),
- resp.environ['swift.account/AUTH_bob'])
+ self.assertIn('account/AUTH_bob', resp.environ['swift.infocache'])
+ self.assertEqual(
+ headers_to_account_info(resp.headers),
+ resp.environ['swift.infocache']['account/AUTH_bob'])
def test_swift_owner(self):
owner_headers = {
@@ -57,17 +84,17 @@ class TestAccountController(unittest.TestCase):
with mock.patch('swift.proxy.controllers.base.http_connect',
fake_http_connect(200, headers=owner_headers)):
resp = controller.HEAD(req)
- self.assertEquals(2, resp.status_int // 100)
+ self.assertEqual(2, resp.status_int // 100)
for key in owner_headers:
- self.assertTrue(key not in resp.headers)
+ self.assertNotIn(key, resp.headers)
req = Request.blank('/v1/a', environ={'swift_owner': True})
with mock.patch('swift.proxy.controllers.base.http_connect',
fake_http_connect(200, headers=owner_headers)):
resp = controller.HEAD(req)
- self.assertEquals(2, resp.status_int // 100)
+ self.assertEqual(2, resp.status_int // 100)
for key in owner_headers:
- self.assertTrue(key in resp.headers)
+ self.assertIn(key, resp.headers)
def test_get_deleted_account(self):
resp_headers = {
@@ -79,7 +106,7 @@ class TestAccountController(unittest.TestCase):
with mock.patch('swift.proxy.controllers.base.http_connect',
fake_http_connect(404, headers=resp_headers)):
resp = controller.HEAD(req)
- self.assertEquals(410, resp.status_int)
+ self.assertEqual(410, resp.status_int)
def test_long_acct_names(self):
long_acct_name = '%sLongAccountName' % (
@@ -90,25 +117,17 @@ class TestAccountController(unittest.TestCase):
with mock.patch('swift.proxy.controllers.base.http_connect',
fake_http_connect(200)):
resp = controller.HEAD(req)
- self.assertEquals(400, resp.status_int)
+ self.assertEqual(400, resp.status_int)
with mock.patch('swift.proxy.controllers.base.http_connect',
fake_http_connect(200)):
resp = controller.GET(req)
- self.assertEquals(400, resp.status_int)
+ self.assertEqual(400, resp.status_int)
with mock.patch('swift.proxy.controllers.base.http_connect',
fake_http_connect(200)):
resp = controller.POST(req)
- self.assertEquals(400, resp.status_int)
-
- def _make_callback_func(self, context):
- def callback(ipaddr, port, device, partition, method, path,
- headers=None, query_string=None, ssl=False):
- context['method'] = method
- context['path'] = path
- context['headers'] = headers or {}
- return callback
+ self.assertEqual(400, resp.status_int)
def test_sys_meta_headers_PUT(self):
# check that headers in sys meta namespace make it through
@@ -129,9 +148,9 @@ class TestAccountController(unittest.TestCase):
fake_http_connect(200, 200, give_connect=callback)):
controller.PUT(req)
self.assertEqual(context['method'], 'PUT')
- self.assertTrue(sys_meta_key in context['headers'])
+ self.assertIn(sys_meta_key, context['headers'])
self.assertEqual(context['headers'][sys_meta_key], 'foo')
- self.assertTrue(user_meta_key in context['headers'])
+ self.assertIn(user_meta_key, context['headers'])
self.assertEqual(context['headers'][user_meta_key], 'bar')
self.assertNotEqual(context['headers']['x-timestamp'], '1.0')
@@ -152,9 +171,9 @@ class TestAccountController(unittest.TestCase):
fake_http_connect(200, 200, give_connect=callback)):
controller.POST(req)
self.assertEqual(context['method'], 'POST')
- self.assertTrue(sys_meta_key in context['headers'])
+ self.assertIn(sys_meta_key, context['headers'])
self.assertEqual(context['headers'][sys_meta_key], 'foo')
- self.assertTrue(user_meta_key in context['headers'])
+ self.assertIn(user_meta_key, context['headers'])
self.assertEqual(context['headers'][user_meta_key], 'bar')
self.assertNotEqual(context['headers']['x-timestamp'], '1.0')
@@ -193,7 +212,7 @@ class TestAccountController(unittest.TestCase):
self.assertEqual(resp.headers.get(header), value)
else:
# blank ACLs should result in no header
- self.assert_(header not in resp.headers)
+ self.assertNotIn(header, resp.headers)
def test_add_acls_impossible_cases(self):
# For test coverage: verify that defensive coding does defend, in cases
@@ -208,19 +227,25 @@ class TestAccountController(unittest.TestCase):
self.assertEqual(1, len(resp.headers)) # we always get Content-Type
self.assertEqual(2, len(resp.environ))
- def test_memcache_key_impossible_cases(self):
+ def test_cache_key_impossible_cases(self):
# For test coverage: verify that defensive coding does defend, in cases
# that shouldn't arise naturally
- self.assertRaises(
- ValueError,
- lambda: swift.proxy.controllers.base.get_container_memcache_key(
- '/a', None))
+ with self.assertRaises(ValueError):
+ # Container needs account
+ swift.proxy.controllers.base.get_cache_key(None, 'c')
+
+ with self.assertRaises(ValueError):
+ # Object needs account
+ swift.proxy.controllers.base.get_cache_key(None, 'c', 'o')
+
+ with self.assertRaises(ValueError):
+ # Object needs container
+ swift.proxy.controllers.base.get_cache_key('a', None, 'o')
def test_stripping_swift_admin_headers(self):
# Verify that a GET/HEAD which receives privileged headers from the
# account server will strip those headers for non-swift_owners
- hdrs_ext, hdrs_int = self._make_user_and_sys_acl_headers_data()
headers = {
'x-account-meta-harmless': 'hi mom',
'x-account-meta-temp-url-key': 's3kr1t',
@@ -243,6 +268,139 @@ class TestAccountController(unittest.TestCase):
'x-account-meta-temp-url-key' in resp.headers)
self.assertEqual(privileged_header_present, env['swift_owner'])
+ def test_response_code_for_PUT(self):
+ PUT_TEST_CASES = [
+ ((201, 201, 201), 201),
+ ((201, 201, 404), 201),
+ ((201, 201, 503), 201),
+ ((201, 404, 404), 404),
+ ((201, 404, 503), 503),
+ ((201, 503, 503), 503),
+ ((404, 404, 404), 404),
+ ((404, 404, 503), 404),
+ ((404, 503, 503), 503),
+ ((503, 503, 503), 503)
+ ]
+ self._assert_responses('PUT', PUT_TEST_CASES)
+
+ def test_response_code_for_DELETE(self):
+ DELETE_TEST_CASES = [
+ ((204, 204, 204), 204),
+ ((204, 204, 404), 204),
+ ((204, 204, 503), 204),
+ ((204, 404, 404), 404),
+ ((204, 404, 503), 503),
+ ((204, 503, 503), 503),
+ ((404, 404, 404), 404),
+ ((404, 404, 503), 404),
+ ((404, 503, 503), 503),
+ ((503, 503, 503), 503)
+ ]
+ self._assert_responses('DELETE', DELETE_TEST_CASES)
+
+ def test_response_code_for_POST(self):
+ POST_TEST_CASES = [
+ ((204, 204, 204), 204),
+ ((204, 204, 404), 204),
+ ((204, 204, 503), 204),
+ ((204, 404, 404), 404),
+ ((204, 404, 503), 503),
+ ((204, 503, 503), 503),
+ ((404, 404, 404), 404),
+ ((404, 404, 503), 404),
+ ((404, 503, 503), 503),
+ ((503, 503, 503), 503)
+ ]
+ self._assert_responses('POST', POST_TEST_CASES)
+
+
+@patch_policies(
+ [StoragePolicy(0, 'zero', True, object_ring=FakeRing(replicas=4))])
+class TestAccountController4Replicas(TestAccountController):
+ def setUp(self):
+ self.app = proxy_server.Application(
+ None,
+ FakeMemcache(),
+ account_ring=FakeRing(replicas=4),
+ container_ring=FakeRing(replicas=4))
+
+ def test_response_code_for_PUT(self):
+ PUT_TEST_CASES = [
+ ((201, 201, 201, 201), 201),
+ ((201, 201, 201, 404), 201),
+ ((201, 201, 201, 503), 201),
+ ((201, 201, 404, 404), 201),
+ ((201, 201, 404, 503), 201),
+ ((201, 201, 503, 503), 201),
+ ((201, 404, 404, 404), 404),
+ ((201, 404, 404, 503), 404),
+ ((201, 404, 503, 503), 503),
+ ((201, 503, 503, 503), 503),
+ ((404, 404, 404, 404), 404),
+ ((404, 404, 404, 503), 404),
+ ((404, 404, 503, 503), 404),
+ ((404, 503, 503, 503), 503),
+ ((503, 503, 503, 503), 503)
+ ]
+ self._assert_responses('PUT', PUT_TEST_CASES)
+
+ def test_response_code_for_DELETE(self):
+ DELETE_TEST_CASES = [
+ ((204, 204, 204, 204), 204),
+ ((204, 204, 204, 404), 204),
+ ((204, 204, 204, 503), 204),
+ ((204, 204, 404, 404), 204),
+ ((204, 204, 404, 503), 204),
+ ((204, 204, 503, 503), 204),
+ ((204, 404, 404, 404), 404),
+ ((204, 404, 404, 503), 404),
+ ((204, 404, 503, 503), 503),
+ ((204, 503, 503, 503), 503),
+ ((404, 404, 404, 404), 404),
+ ((404, 404, 404, 503), 404),
+ ((404, 404, 503, 503), 404),
+ ((404, 503, 503, 503), 503),
+ ((503, 503, 503, 503), 503)
+ ]
+ self._assert_responses('DELETE', DELETE_TEST_CASES)
+
+ def test_response_code_for_POST(self):
+ POST_TEST_CASES = [
+ ((204, 204, 204, 204), 204),
+ ((204, 204, 204, 404), 204),
+ ((204, 204, 204, 503), 204),
+ ((204, 204, 404, 404), 204),
+ ((204, 204, 404, 503), 204),
+ ((204, 204, 503, 503), 204),
+ ((204, 404, 404, 404), 404),
+ ((204, 404, 404, 503), 404),
+ ((204, 404, 503, 503), 503),
+ ((204, 503, 503, 503), 503),
+ ((404, 404, 404, 404), 404),
+ ((404, 404, 404, 503), 404),
+ ((404, 404, 503, 503), 404),
+ ((404, 503, 503, 503), 503),
+ ((503, 503, 503, 503), 503)
+ ]
+ self._assert_responses('POST', POST_TEST_CASES)
+
+
+@patch_policies([StoragePolicy(0, 'zero', True, object_ring=FakeRing())])
+class TestGetAccountInfo(unittest.TestCase):
+ def setUp(self):
+ self.app = proxy_server.Application(
+ None, FakeMemcache(),
+ account_ring=FakeRing(), container_ring=FakeRing())
+
+ def test_get_deleted_account_410(self):
+ resp_headers = {'x-account-status': 'deleted'}
+
+ req = Request.blank('/v1/a')
+ with mock.patch('swift.proxy.controllers.base.http_connect',
+ fake_http_connect(404, headers=resp_headers)):
+ info = get_account_info(req.environ, self.app)
+ self.assertEqual(410, info.get('status'))
+
if __name__ == '__main__':
unittest.main()
diff --git a/test/unit/proxy/controllers/test_base.py b/test/unit/proxy/controllers/test_base.py
index 037e28b..1ab0037 100644
--- a/test/unit/proxy/controllers/test_base.py
+++ b/test/unit/proxy/controllers/test_base.py
@@ -16,22 +16,22 @@
import itertools
from collections import defaultdict
import unittest
-from mock import patch
+import mock
from swift.proxy.controllers.base import headers_to_container_info, \
headers_to_account_info, headers_to_object_info, get_container_info, \
- get_container_memcache_key, get_account_info, get_account_memcache_key, \
- get_object_env_key, get_info, get_object_info, \
- Controller, GetOrHeadHandler, _set_info_cache, _set_object_info_cache, \
- bytes_to_skip
-from swift.common.swob import Request, HTTPException, HeaderKeyDict, \
- RESPONSE_REASONS
+ get_cache_key, get_account_info, get_info, get_object_info, \
+ Controller, GetOrHeadHandler, bytes_to_skip
+from swift.common.swob import Request, HTTPException, RESPONSE_REASONS
from swift.common import exceptions
from swift.common.utils import split_path
+from swift.common.header_key_dict import HeaderKeyDict
from swift.common.http import is_success
from swift.common.storage_policy import StoragePolicy
from test.unit import fake_http_connect, FakeRing, FakeMemcache
from swift.proxy import server as proxy_server
-from swift.common.request_helpers import get_sys_meta_prefix
+from swift.common.request_helpers import (
+ get_sys_meta_prefix, get_object_transient_sysmeta
+)
from test.unit import patch_policies
@@ -95,7 +95,7 @@ class DynamicResponseFactory(object):
def _get_response(self, type_):
self.stats[type_] += 1
class_ = self.response_type[type_]
- return class_(self.statuses.next())
+ return class_(next(self.statuses))
def get_response(self, environ):
(version, account, container, obj) = split_path(
@@ -112,6 +112,31 @@ class DynamicResponseFactory(object):
return resp
+class ZeroCacheAccountResponse(FakeResponse):
+ base_headers = {
+ 'X-Backend-Recheck-Account-Existence': '0',
+ 'x-account-container-count': 333,
+ 'x-account-object-count': 1000,
+ 'x-account-bytes-used': 6666,
+ }
+
+
+class ZeroCacheContainerResponse(FakeResponse):
+ base_headers = {
+ 'X-Backend-Recheck-Container-Existence': '0',
+ 'x-container-object-count': 1000,
+ 'x-container-bytes-used': 6666,
+ }
+
+
+class ZeroCacheDynamicResponseFactory(DynamicResponseFactory):
+ response_type = {
+ 'obj': ObjectResponse,
+ 'container': ZeroCacheContainerResponse,
+ 'account': ZeroCacheAccountResponse,
+ }
+
+
class FakeApp(object):
recheck_container_existence = 30
@@ -120,23 +145,14 @@ class FakeApp(object):
def __init__(self, response_factory=None, statuses=None):
self.responses = response_factory or \
DynamicResponseFactory(*statuses or [])
- self.sources = []
+ self.captured_envs = []
def __call__(self, environ, start_response):
- self.sources.append(environ.get('swift.source'))
+ self.captured_envs.append(environ)
response = self.responses.get_response(environ)
reason = RESPONSE_REASONS[response.status_int][0]
start_response('%d %s' % (response.status_int, reason),
[(k, v) for k, v in response.headers.items()])
- # It's a bit strnage, but the get_info cache stuff relies on the
- # app setting some keys in the environment as it makes requests
- # (in particular GETorHEAD_base) - so our fake does the same
- _set_info_cache(self, environ, response.account,
- response.container, response)
- if response.obj:
- _set_object_info_cache(self, environ, response.account,
- response.container, response.obj,
- response)
return iter(response.body)
@@ -158,40 +174,80 @@ class TestFuncs(unittest.TestCase):
account_ring=FakeRing(),
container_ring=FakeRing())
- def test_GETorHEAD_base(self):
- base = Controller(self.app)
- req = Request.blank('/v1/a/c/o/with/slashes')
- ring = FakeRing()
- nodes = list(ring.get_part_nodes(0)) + list(ring.get_more_nodes(0))
- with patch('swift.proxy.controllers.base.'
- 'http_connect', fake_http_connect(200)):
- resp = base.GETorHEAD_base(req, 'object', iter(nodes), 'part',
- '/a/c/o/with/slashes')
- self.assertTrue('swift.object/a/c/o/with/slashes' in resp.environ)
- self.assertEqual(
- resp.environ['swift.object/a/c/o/with/slashes']['status'], 200)
- req = Request.blank('/v1/a/c/o')
- with patch('swift.proxy.controllers.base.'
- 'http_connect', fake_http_connect(200)):
- resp = base.GETorHEAD_base(req, 'object', iter(nodes), 'part',
- '/a/c/o')
- self.assertTrue('swift.object/a/c/o' in resp.environ)
- self.assertEqual(resp.environ['swift.object/a/c/o']['status'], 200)
- req = Request.blank('/v1/a/c')
- with patch('swift.proxy.controllers.base.'
- 'http_connect', fake_http_connect(200)):
- resp = base.GETorHEAD_base(req, 'container', iter(nodes), 'part',
- '/a/c')
- self.assertTrue('swift.container/a/c' in resp.environ)
- self.assertEqual(resp.environ['swift.container/a/c']['status'], 200)
-
- req = Request.blank('/v1/a')
- with patch('swift.proxy.controllers.base.'
- 'http_connect', fake_http_connect(200)):
- resp = base.GETorHEAD_base(req, 'account', iter(nodes), 'part',
- '/a')
- self.assertTrue('swift.account/a' in resp.environ)
- self.assertEqual(resp.environ['swift.account/a']['status'], 200)
+ def test_get_info_zero_recheck(self):
+ mock_cache = mock.Mock()
+ mock_cache.get.return_value = None
+ app = FakeApp(ZeroCacheDynamicResponseFactory())
+ env = {'swift.cache': mock_cache}
+ info_a = get_info(app, env, 'a')
+ # Check that you got proper info
+ self.assertEqual(info_a['status'], 200)
+ self.assertEqual(info_a['bytes'], 6666)
+ self.assertEqual(info_a['total_object_count'], 1000)
+ self.assertEqual(info_a['container_count'], 333)
+ # Make sure the env cache is set
+ exp_cached_info_a = {
+ k: str(v) if k in (
+ 'bytes', 'container_count', 'total_object_count') else v
+ for k, v in info_a.items()}
+ self.assertEqual(env['swift.infocache'].get('account/a'),
+ exp_cached_info_a)
+ # Make sure the app was called
+ self.assertEqual(app.responses.stats['account'], 1)
+ self.assertEqual(app.responses.stats['container'], 0)
+ # Make sure memcache was called
+ self.assertEqual(mock_cache.mock_calls, [
+ mock.call.get('account/a'),
+ mock.call.set('account/a', exp_cached_info_a, time=0),
+ ])
+
+ mock_cache.reset_mock()
+ info_c = get_info(app, env, 'a', 'c')
+ # Check that you got proper info
+ self.assertEqual(info_c['status'], 200)
+ self.assertEqual(info_c['bytes'], 6666)
+ self.assertEqual(info_c['object_count'], 1000)
+ # Make sure the env cache is set
+ exp_cached_info_c = {
+ k: str(v) if k in ('bytes', 'object_count') else v
+ for k, v in info_c.items()}
+ self.assertEqual(env['swift.infocache'].get('account/a'),
+ exp_cached_info_a)
+ self.assertEqual(env['swift.infocache'].get('container/a/c'),
+ exp_cached_info_c)
+ # Check app call for container, but no new calls for account
+ self.assertEqual(app.responses.stats['account'], 1)
+ self.assertEqual(app.responses.stats['container'], 1)
+ # Make sure container info was cached
+ self.assertEqual(mock_cache.mock_calls, [
+ mock.call.get('container/a/c'),
+ mock.call.set('container/a/c', exp_cached_info_c, time=0),
+ ])
+
+ # reset call counts
+ app = FakeApp(ZeroCacheDynamicResponseFactory())
+ env = {'swift.cache': mock_cache}
+ mock_cache.reset_mock()
+ info_c = get_info(app, env, 'a', 'c')
+ # Check that you got proper info
+ self.assertEqual(info_c['status'], 200)
+ self.assertEqual(info_c['bytes'], 6666)
+ self.assertEqual(info_c['object_count'], 1000)
+ # Make sure the env cache is set
+ self.assertEqual(env['swift.infocache'].get('account/a'),
+ exp_cached_info_a)
+ self.assertEqual(env['swift.infocache'].get('container/a/c'),
+ exp_cached_info_c)
+ # check app calls both account and container
+ self.assertEqual(app.responses.stats['account'], 1)
+ self.assertEqual(app.responses.stats['container'], 1)
+ # Make sure account info was cached but container was not
+ self.assertEqual(mock_cache.mock_calls, [
+ mock.call.get('container/a/c'),
+ mock.call.get('account/a'),
+ mock.call.set('account/a', exp_cached_info_a, time=0),
+ mock.call.set('container/a/c', exp_cached_info_c, time=0),
+ ])
def test_get_info(self):
app = FakeApp()
@@ -199,196 +255,177 @@ class TestFuncs(unittest.TestCase):
env = {}
info_a = get_info(app, env, 'a')
# Check that you got proper info
- self.assertEquals(info_a['status'], 200)
- self.assertEquals(info_a['bytes'], 6666)
- self.assertEquals(info_a['total_object_count'], 1000)
- # Make sure the env cache is set
- self.assertEquals(env.get('swift.account/a'), info_a)
+ self.assertEqual(info_a['status'], 200)
+ self.assertEqual(info_a['bytes'], 6666)
+ self.assertEqual(info_a['total_object_count'], 1000)
+
# Make sure the app was called
self.assertEqual(app.responses.stats['account'], 1)
+ # Make sure the return value matches get_account_info
+ account_info = get_account_info({'PATH_INFO': '/v1/a'}, app)
+ self.assertEqual(info_a, account_info)
+
# Do an env cached call to account
+ app.responses.stats['account'] = 0
+ app.responses.stats['container'] = 0
+
info_a = get_info(app, env, 'a')
# Check that you got proper info
- self.assertEquals(info_a['status'], 200)
- self.assertEquals(info_a['bytes'], 6666)
- self.assertEquals(info_a['total_object_count'], 1000)
- # Make sure the env cache is set
- self.assertEquals(env.get('swift.account/a'), info_a)
+ self.assertEqual(info_a['status'], 200)
+ self.assertEqual(info_a['bytes'], 6666)
+ self.assertEqual(info_a['total_object_count'], 1000)
+
# Make sure the app was NOT called AGAIN
- self.assertEqual(app.responses.stats['account'], 1)
+ self.assertEqual(app.responses.stats['account'], 0)
# This time do env cached call to account and non cached to container
+ app.responses.stats['account'] = 0
+ app.responses.stats['container'] = 0
+
info_c = get_info(app, env, 'a', 'c')
# Check that you got proper info
- self.assertEquals(info_c['status'], 200)
- self.assertEquals(info_c['bytes'], 6666)
- self.assertEquals(info_c['object_count'], 1000)
- # Make sure the env cache is set
- self.assertEquals(env.get('swift.account/a'), info_a)
- self.assertEquals(env.get('swift.container/a/c'), info_c)
- # Make sure the app was called for container
+ self.assertEqual(info_c['status'], 200)
+ self.assertEqual(info_c['bytes'], 6666)
+ self.assertEqual(info_c['object_count'], 1000)
+ # Make sure the app was called for container but not account
+ self.assertEqual(app.responses.stats['account'], 0)
self.assertEqual(app.responses.stats['container'], 1)
- # This time do a non cached call to account than non cached to
+ # This time do a non-cached call to account then non-cached to
# container
+ app.responses.stats['account'] = 0
+ app.responses.stats['container'] = 0
app = FakeApp()
env = {} # abandon previous call to env
info_c = get_info(app, env, 'a', 'c')
# Check that you got proper info
- self.assertEquals(info_c['status'], 200)
- self.assertEquals(info_c['bytes'], 6666)
- self.assertEquals(info_c['object_count'], 1000)
- # Make sure the env cache is set
- self.assertEquals(env.get('swift.account/a'), info_a)
- self.assertEquals(env.get('swift.container/a/c'), info_c)
+ self.assertEqual(info_c['status'], 200)
+ self.assertEqual(info_c['bytes'], 6666)
+ self.assertEqual(info_c['object_count'], 1000)
# check app calls both account and container
self.assertEqual(app.responses.stats['account'], 1)
self.assertEqual(app.responses.stats['container'], 1)
- # This time do an env cached call to container while account is not
+ # This time do an env-cached call to container while account is not
# cached
- del(env['swift.account/a'])
+ app.responses.stats['account'] = 0
+ app.responses.stats['container'] = 0
info_c = get_info(app, env, 'a', 'c')
# Check that you got proper info
- self.assertEquals(info_a['status'], 200)
- self.assertEquals(info_c['bytes'], 6666)
- self.assertEquals(info_c['object_count'], 1000)
- # Make sure the env cache is set and account still not cached
- self.assertEquals(env.get('swift.container/a/c'), info_c)
- # no additional calls were made
- self.assertEqual(app.responses.stats['account'], 1)
- self.assertEqual(app.responses.stats['container'], 1)
-
- # Do a non cached call to account not found with ret_not_found
- app = FakeApp(statuses=(404,))
- env = {}
- info_a = get_info(app, env, 'a', ret_not_found=True)
- # Check that you got proper info
- self.assertEquals(info_a['status'], 404)
- self.assertEquals(info_a['bytes'], None)
- self.assertEquals(info_a['total_object_count'], None)
- # Make sure the env cache is set
- self.assertEquals(env.get('swift.account/a'), info_a)
- # and account was called
- self.assertEqual(app.responses.stats['account'], 1)
+ self.assertEqual(info_a['status'], 200)
+ self.assertEqual(info_c['bytes'], 6666)
+ self.assertEqual(info_c['object_count'], 1000)
- # Do a cached call to account not found with ret_not_found
- info_a = get_info(app, env, 'a', ret_not_found=True)
- # Check that you got proper info
- self.assertEquals(info_a['status'], 404)
- self.assertEquals(info_a['bytes'], None)
- self.assertEquals(info_a['total_object_count'], None)
- # Make sure the env cache is set
- self.assertEquals(env.get('swift.account/a'), info_a)
- # add account was NOT called AGAIN
- self.assertEqual(app.responses.stats['account'], 1)
-
- # Do a non cached call to account not found without ret_not_found
- app = FakeApp(statuses=(404,))
- env = {}
- info_a = get_info(app, env, 'a')
- # Check that you got proper info
- self.assertEquals(info_a, None)
- self.assertEquals(env['swift.account/a']['status'], 404)
- # and account was called
- self.assertEqual(app.responses.stats['account'], 1)
-
- # Do a cached call to account not found without ret_not_found
- info_a = get_info(None, env, 'a')
- # Check that you got proper info
- self.assertEquals(info_a, None)
- self.assertEquals(env['swift.account/a']['status'], 404)
- # add account was NOT called AGAIN
- self.assertEqual(app.responses.stats['account'], 1)
+ # no additional calls were made
+ self.assertEqual(app.responses.stats['account'], 0)
+ self.assertEqual(app.responses.stats['container'], 0)
def test_get_container_info_swift_source(self):
app = FakeApp()
req = Request.blank("/v1/a/c", environ={'swift.cache': FakeCache()})
get_container_info(req.environ, app, swift_source='MC')
- self.assertEqual(app.sources, ['GET_INFO', 'MC'])
+ self.assertEqual([e['swift.source'] for e in app.captured_envs],
+ ['MC', 'MC'])
def test_get_object_info_swift_source(self):
app = FakeApp()
req = Request.blank("/v1/a/c/o",
environ={'swift.cache': FakeCache()})
get_object_info(req.environ, app, swift_source='LU')
- self.assertEqual(app.sources, ['LU'])
+ self.assertEqual([e['swift.source'] for e in app.captured_envs],
+ ['LU'])
def test_get_container_info_no_cache(self):
req = Request.blank("/v1/AUTH_account/cont",
environ={'swift.cache': FakeCache({})})
resp = get_container_info(req.environ, FakeApp())
- self.assertEquals(resp['storage_policy'], '0')
- self.assertEquals(resp['bytes'], 6666)
- self.assertEquals(resp['object_count'], 1000)
+ self.assertEqual(resp['storage_policy'], '0')
+ self.assertEqual(resp['bytes'], 6666)
+ self.assertEqual(resp['object_count'], 1000)
def test_get_container_info_no_account(self):
- responses = DynamicResponseFactory(404, 200)
- app = FakeApp(responses)
+ app = FakeApp(statuses=[404, 200])
req = Request.blank("/v1/AUTH_does_not_exist/cont")
info = get_container_info(req.environ, app)
self.assertEqual(info['status'], 0)
def test_get_container_info_no_auto_account(self):
- responses = DynamicResponseFactory(404, 200)
- app = FakeApp(responses)
+ app = FakeApp(statuses=[200])
req = Request.blank("/v1/.system_account/cont")
info = get_container_info(req.environ, app)
self.assertEqual(info['status'], 200)
- self.assertEquals(info['bytes'], 6666)
- self.assertEquals(info['object_count'], 1000)
+ self.assertEqual(info['bytes'], 6666)
+ self.assertEqual(info['object_count'], 1000)
def test_get_container_info_cache(self):
cache_stub = {
'status': 404, 'bytes': 3333, 'object_count': 10,
- # simplejson sometimes hands back strings, sometimes unicodes
'versions': u"\u1F4A9"}
req = Request.blank("/v1/account/cont",
environ={'swift.cache': FakeCache(cache_stub)})
resp = get_container_info(req.environ, FakeApp())
- self.assertEquals(resp['storage_policy'], '0')
- self.assertEquals(resp['bytes'], 3333)
- self.assertEquals(resp['object_count'], 10)
- self.assertEquals(resp['status'], 404)
- self.assertEquals(resp['versions'], "\xe1\xbd\x8a\x39")
+ self.assertEqual(resp['storage_policy'], '0')
+ self.assertEqual(resp['bytes'], 3333)
+ self.assertEqual(resp['object_count'], 10)
+ self.assertEqual(resp['status'], 404)
+ self.assertEqual(resp['versions'], "\xe1\xbd\x8a\x39")
def test_get_container_info_env(self):
- cache_key = get_container_memcache_key("account", "cont")
- env_key = 'swift.%s' % cache_key
- req = Request.blank("/v1/account/cont",
- environ={env_key: {'bytes': 3867},
- 'swift.cache': FakeCache({})})
+ cache_key = get_cache_key("account", "cont")
+ req = Request.blank(
+ "/v1/account/cont",
+ environ={'swift.infocache': {cache_key: {'bytes': 3867}},
+ 'swift.cache': FakeCache({})})
resp = get_container_info(req.environ, 'xxx')
- self.assertEquals(resp['bytes'], 3867)
+ self.assertEqual(resp['bytes'], 3867)
def test_get_account_info_swift_source(self):
app = FakeApp()
req = Request.blank("/v1/a", environ={'swift.cache': FakeCache()})
get_account_info(req.environ, app, swift_source='MC')
- self.assertEqual(app.sources, ['MC'])
+ self.assertEqual([e['swift.source'] for e in app.captured_envs],
+ ['MC'])
+
+ def test_get_account_info_swift_owner(self):
+ app = FakeApp()
+ req = Request.blank("/v1/a", environ={'swift.cache': FakeCache()})
+ get_account_info(req.environ, app)
+ self.assertEqual([e['swift_owner'] for e in app.captured_envs],
+ [True])
+
+ def test_get_account_info_infocache(self):
+ app = FakeApp()
+ ic = {}
+ req = Request.blank("/v1/a", environ={'swift.cache': FakeCache(),
+ 'swift.infocache': ic})
+ get_account_info(req.environ, app)
+ got_infocaches = [e['swift.infocache'] for e in app.captured_envs]
+ self.assertEqual(1, len(got_infocaches))
+ self.assertIs(ic, got_infocaches[0])
def test_get_account_info_no_cache(self):
app = FakeApp()
req = Request.blank("/v1/AUTH_account",
environ={'swift.cache': FakeCache({})})
resp = get_account_info(req.environ, app)
- self.assertEquals(resp['bytes'], 6666)
- self.assertEquals(resp['total_object_count'], 1000)
+ self.assertEqual(resp['bytes'], 6666)
+ self.assertEqual(resp['total_object_count'], 1000)
def test_get_account_info_cache(self):
- # The original test that we prefer to preserve
+ # Works with fake apps that return ints in the headers
cached = {'status': 404,
'bytes': 3333,
'total_object_count': 10}
req = Request.blank("/v1/account/cont",
environ={'swift.cache': FakeCache(cached)})
resp = get_account_info(req.environ, FakeApp())
- self.assertEquals(resp['bytes'], 3333)
- self.assertEquals(resp['total_object_count'], 10)
- self.assertEquals(resp['status'], 404)
+ self.assertEqual(resp['bytes'], 3333)
+ self.assertEqual(resp['total_object_count'], 10)
+ self.assertEqual(resp['status'], 404)
- # Here is a more realistic test
+ # Works with strings too, like you get when parsing HTTP headers
+ # that came in through a socket from the account server
cached = {'status': 404,
'bytes': '3333',
'container_count': '234',
@@ -397,33 +434,34 @@ class TestFuncs(unittest.TestCase):
req = Request.blank("/v1/account/cont",
environ={'swift.cache': FakeCache(cached)})
resp = get_account_info(req.environ, FakeApp())
- self.assertEquals(resp['status'], 404)
- self.assertEquals(resp['bytes'], '3333')
- self.assertEquals(resp['container_count'], 234)
- self.assertEquals(resp['meta'], {})
- self.assertEquals(resp['total_object_count'], '10')
+ self.assertEqual(resp['status'], 404)
+ self.assertEqual(resp['bytes'], 3333)
+ self.assertEqual(resp['container_count'], 234)
+ self.assertEqual(resp['meta'], {})
+ self.assertEqual(resp['total_object_count'], 10)
def test_get_account_info_env(self):
- cache_key = get_account_memcache_key("account")
- env_key = 'swift.%s' % cache_key
- req = Request.blank("/v1/account",
- environ={env_key: {'bytes': 3867},
- 'swift.cache': FakeCache({})})
+ cache_key = get_cache_key("account")
+ req = Request.blank(
+ "/v1/account",
+ environ={'swift.infocache': {cache_key: {'bytes': 3867}},
+ 'swift.cache': FakeCache({})})
resp = get_account_info(req.environ, 'xxx')
- self.assertEquals(resp['bytes'], 3867)
+ self.assertEqual(resp['bytes'], 3867)
def test_get_object_info_env(self):
cached = {'status': 200,
'length': 3333,
'type': 'application/json',
'meta': {}}
- env_key = get_object_env_key("account", "cont", "obj")
- req = Request.blank("/v1/account/cont/obj",
- environ={env_key: cached,
- 'swift.cache': FakeCache({})})
+ cache_key = get_cache_key("account", "cont", "obj")
+ req = Request.blank(
+ "/v1/account/cont/obj",
+ environ={'swift.infocache': {cache_key: cached},
+ 'swift.cache': FakeCache({})})
resp = get_object_info(req.environ, 'xxx')
- self.assertEquals(resp['length'], 3333)
- self.assertEquals(resp['type'], 'application/json')
+ self.assertEqual(resp['length'], 3333)
+ self.assertEqual(resp['type'], 'application/json')
def test_get_object_info_no_env(self):
app = FakeApp()
@@ -433,31 +471,84 @@ class TestFuncs(unittest.TestCase):
self.assertEqual(app.responses.stats['account'], 0)
self.assertEqual(app.responses.stats['container'], 0)
self.assertEqual(app.responses.stats['obj'], 1)
- self.assertEquals(resp['length'], 5555)
- self.assertEquals(resp['type'], 'text/plain')
+ self.assertEqual(resp['length'], 5555)
+ self.assertEqual(resp['type'], 'text/plain')
+
+ def test_options(self):
+ base = Controller(self.app)
+ base.account_name = 'a'
+ base.container_name = 'c'
+ origin = 'http://m.com'
+ self.app.cors_allow_origin = [origin]
+ req = Request.blank('/v1/a/c/o',
+ environ={'swift.cache': FakeCache()},
+ headers={'Origin': origin,
+ 'Access-Control-Request-Method': 'GET'})
+
+ with mock.patch('swift.proxy.controllers.base.'
+ 'http_connect', fake_http_connect(200)):
+ resp = base.OPTIONS(req)
+ self.assertEqual(resp.status_int, 200)
+
+ def test_options_with_null_allow_origin(self):
+ base = Controller(self.app)
+ base.account_name = 'a'
+ base.container_name = 'c'
+
+ def my_container_info(*args):
+ return {
+ 'cors': {
+ 'allow_origin': '*',
+ }
+ }
+ base.container_info = my_container_info
+ req = Request.blank('/v1/a/c/o',
+ environ={'swift.cache': FakeCache()},
+ headers={'Origin': '*',
+ 'Access-Control-Request-Method': 'GET'})
+
+ with mock.patch('swift.proxy.controllers.base.'
+ 'http_connect', fake_http_connect(200)):
+ resp = base.OPTIONS(req)
+ self.assertEqual(resp.status_int, 200)
+
+ def test_options_unauthorized(self):
+ base = Controller(self.app)
+ base.account_name = 'a'
+ base.container_name = 'c'
+ self.app.cors_allow_origin = ['http://NOT_IT']
+ req = Request.blank('/v1/a/c/o',
+ environ={'swift.cache': FakeCache()},
+ headers={'Origin': 'http://m.com',
+ 'Access-Control-Request-Method': 'GET'})
+
+ with mock.patch('swift.proxy.controllers.base.'
+ 'http_connect', fake_http_connect(200)):
+ resp = base.OPTIONS(req)
+ self.assertEqual(resp.status_int, 401)
def test_headers_to_container_info_missing(self):
resp = headers_to_container_info({}, 404)
- self.assertEquals(resp['status'], 404)
- self.assertEquals(resp['read_acl'], None)
- self.assertEquals(resp['write_acl'], None)
+ self.assertEqual(resp['status'], 404)
+ self.assertIsNone(resp['read_acl'])
+ self.assertIsNone(resp['write_acl'])
def test_headers_to_container_info_meta(self):
headers = {'X-Container-Meta-Whatevs': 14,
'x-container-meta-somethingelse': 0}
resp = headers_to_container_info(headers.items(), 200)
- self.assertEquals(len(resp['meta']), 2)
- self.assertEquals(resp['meta']['whatevs'], 14)
- self.assertEquals(resp['meta']['somethingelse'], 0)
+ self.assertEqual(len(resp['meta']), 2)
+ self.assertEqual(resp['meta']['whatevs'], 14)
+ self.assertEqual(resp['meta']['somethingelse'], 0)
def test_headers_to_container_info_sys_meta(self):
prefix = get_sys_meta_prefix('container')
headers = {'%sWhatevs' % prefix: 14,
'%ssomethingelse' % prefix: 0}
resp = headers_to_container_info(headers.items(), 200)
- self.assertEquals(len(resp['sysmeta']), 2)
- self.assertEquals(resp['sysmeta']['whatevs'], 14)
- self.assertEquals(resp['sysmeta']['somethingelse'], 0)
+ self.assertEqual(len(resp['sysmeta']), 2)
+ self.assertEqual(resp['sysmeta']['whatevs'], 14)
+ self.assertEqual(resp['sysmeta']['somethingelse'], 0)
def test_headers_to_container_info_values(self):
headers = {
@@ -467,37 +558,47 @@ class TestFuncs(unittest.TestCase):
'x-container-meta-access-control-allow-origin': 'here',
}
resp = headers_to_container_info(headers.items(), 200)
- self.assertEquals(resp['read_acl'], 'readvalue')
- self.assertEquals(resp['write_acl'], 'writevalue')
- self.assertEquals(resp['cors']['allow_origin'], 'here')
+ self.assertEqual(resp['read_acl'], 'readvalue')
+ self.assertEqual(resp['write_acl'], 'writevalue')
+ self.assertEqual(resp['cors']['allow_origin'], 'here')
headers['x-unused-header'] = 'blahblahblah'
- self.assertEquals(
+ self.assertEqual(
resp,
headers_to_container_info(headers.items(), 200))
+ def test_container_info_without_req(self):
+ base = Controller(self.app)
+ base.account_name = 'a'
+ base.container_name = 'c'
+
+ container_info = \
+ base.container_info(base.account_name,
+ base.container_name)
+ self.assertEqual(container_info['status'], 0)
+
def test_headers_to_account_info_missing(self):
resp = headers_to_account_info({}, 404)
- self.assertEquals(resp['status'], 404)
- self.assertEquals(resp['bytes'], None)
- self.assertEquals(resp['container_count'], None)
+ self.assertEqual(resp['status'], 404)
+ self.assertIsNone(resp['bytes'])
+ self.assertIsNone(resp['container_count'])
def test_headers_to_account_info_meta(self):
headers = {'X-Account-Meta-Whatevs': 14,
'x-account-meta-somethingelse': 0}
resp = headers_to_account_info(headers.items(), 200)
- self.assertEquals(len(resp['meta']), 2)
- self.assertEquals(resp['meta']['whatevs'], 14)
- self.assertEquals(resp['meta']['somethingelse'], 0)
+ self.assertEqual(len(resp['meta']), 2)
+ self.assertEqual(resp['meta']['whatevs'], 14)
+ self.assertEqual(resp['meta']['somethingelse'], 0)
def test_headers_to_account_info_sys_meta(self):
prefix = get_sys_meta_prefix('account')
headers = {'%sWhatevs' % prefix: 14,
'%ssomethingelse' % prefix: 0}
resp = headers_to_account_info(headers.items(), 200)
- self.assertEquals(len(resp['sysmeta']), 2)
- self.assertEquals(resp['sysmeta']['whatevs'], 14)
- self.assertEquals(resp['sysmeta']['somethingelse'], 0)
+ self.assertEqual(len(resp['sysmeta']), 2)
+ self.assertEqual(resp['sysmeta']['whatevs'], 14)
+ self.assertEqual(resp['sysmeta']['somethingelse'], 0)
def test_headers_to_account_info_values(self):
headers = {
@@ -505,36 +606,44 @@ class TestFuncs(unittest.TestCase):
'x-account-container-count': '20',
}
resp = headers_to_account_info(headers.items(), 200)
- self.assertEquals(resp['total_object_count'], '10')
- self.assertEquals(resp['container_count'], '20')
+ self.assertEqual(resp['total_object_count'], '10')
+ self.assertEqual(resp['container_count'], '20')
headers['x-unused-header'] = 'blahblahblah'
- self.assertEquals(
+ self.assertEqual(
resp,
headers_to_account_info(headers.items(), 200))
def test_headers_to_object_info_missing(self):
resp = headers_to_object_info({}, 404)
- self.assertEquals(resp['status'], 404)
- self.assertEquals(resp['length'], None)
- self.assertEquals(resp['etag'], None)
+ self.assertEqual(resp['status'], 404)
+ self.assertIsNone(resp['length'])
+ self.assertIsNone(resp['etag'])
def test_headers_to_object_info_meta(self):
headers = {'X-Object-Meta-Whatevs': 14,
'x-object-meta-somethingelse': 0}
resp = headers_to_object_info(headers.items(), 200)
- self.assertEquals(len(resp['meta']), 2)
- self.assertEquals(resp['meta']['whatevs'], 14)
- self.assertEquals(resp['meta']['somethingelse'], 0)
+ self.assertEqual(len(resp['meta']), 2)
+ self.assertEqual(resp['meta']['whatevs'], 14)
+ self.assertEqual(resp['meta']['somethingelse'], 0)
def test_headers_to_object_info_sys_meta(self):
prefix = get_sys_meta_prefix('object')
headers = {'%sWhatevs' % prefix: 14,
'%ssomethingelse' % prefix: 0}
resp = headers_to_object_info(headers.items(), 200)
- self.assertEquals(len(resp['sysmeta']), 2)
- self.assertEquals(resp['sysmeta']['whatevs'], 14)
- self.assertEquals(resp['sysmeta']['somethingelse'], 0)
+ self.assertEqual(len(resp['sysmeta']), 2)
+ self.assertEqual(resp['sysmeta']['whatevs'], 14)
+ self.assertEqual(resp['sysmeta']['somethingelse'], 0)
+
+ def test_headers_to_object_info_transient_sysmeta(self):
+ headers = {get_object_transient_sysmeta('Whatevs'): 14,
+ get_object_transient_sysmeta('somethingelse'): 0}
+ resp = headers_to_object_info(headers.items(), 200)
+ self.assertEqual(len(resp['transient_sysmeta']), 2)
+ self.assertEqual(resp['transient_sysmeta']['whatevs'], 14)
+ self.assertEqual(resp['transient_sysmeta']['somethingelse'], 0)
def test_headers_to_object_info_values(self):
headers = {
@@ -542,26 +651,29 @@ class TestFuncs(unittest.TestCase):
'content-type': 'application/json',
}
resp = headers_to_object_info(headers.items(), 200)
- self.assertEquals(resp['length'], '1024')
- self.assertEquals(resp['type'], 'application/json')
+ self.assertEqual(resp['length'], '1024')
+ self.assertEqual(resp['type'], 'application/json')
headers['x-unused-header'] = 'blahblahblah'
- self.assertEquals(
+ self.assertEqual(
resp,
headers_to_object_info(headers.items(), 200))
def test_base_have_quorum(self):
base = Controller(self.app)
# just throw a bunch of test cases at it
- self.assertEqual(base.have_quorum([201, 404], 3), False)
- self.assertEqual(base.have_quorum([201, 201], 4), False)
- self.assertEqual(base.have_quorum([201, 201, 404, 404], 4), False)
- self.assertEqual(base.have_quorum([201, 503, 503, 201], 4), False)
- self.assertEqual(base.have_quorum([201, 201], 3), True)
- self.assertEqual(base.have_quorum([404, 404], 3), True)
- self.assertEqual(base.have_quorum([201, 201], 2), True)
- self.assertEqual(base.have_quorum([404, 404], 2), True)
- self.assertEqual(base.have_quorum([201, 404, 201, 201], 4), True)
+ self.assertFalse(base.have_quorum([201, 404], 3))
+ self.assertTrue(base.have_quorum([201, 201], 4))
+ self.assertFalse(base.have_quorum([201], 4))
+ self.assertTrue(base.have_quorum([201, 201, 404, 404], 4))
+ self.assertFalse(base.have_quorum([201, 302, 418, 503], 4))
+ self.assertTrue(base.have_quorum([201, 503, 503, 201], 4))
+ self.assertTrue(base.have_quorum([201, 201], 3))
+ self.assertTrue(base.have_quorum([404, 404], 3))
+ self.assertTrue(base.have_quorum([201, 201], 2))
+ self.assertTrue(base.have_quorum([201, 404], 2))
+ self.assertTrue(base.have_quorum([404, 404], 2))
+ self.assertTrue(base.have_quorum([201, 404, 201, 201], 4))
def test_best_response_overrides(self):
base = Controller(self.app)
@@ -579,7 +691,7 @@ class TestFuncs(unittest.TestCase):
overrides = {302: 204, 100: 204}
resp = base.best_response(req, statuses, reasons, bodies, server_type,
headers=headers, overrides=overrides)
- self.assertEqual(resp.status, '503 Internal Server Error')
+ self.assertEqual(resp.status, '503 Service Unavailable')
# next make a 404 quorum and make sure the last delete (real) 404
# status is the one returned.
@@ -593,24 +705,61 @@ class TestFuncs(unittest.TestCase):
req = Request.blank('/')
handler = GetOrHeadHandler(None, req, None, None, None, None, {})
handler.fast_forward(50)
- self.assertEquals(handler.backend_headers['Range'], 'bytes=50-')
+ self.assertEqual(handler.backend_headers['Range'], 'bytes=50-')
handler = GetOrHeadHandler(None, req, None, None, None, None,
{'Range': 'bytes=23-50'})
handler.fast_forward(20)
- self.assertEquals(handler.backend_headers['Range'], 'bytes=43-50')
+ self.assertEqual(handler.backend_headers['Range'], 'bytes=43-50')
self.assertRaises(HTTPException,
handler.fast_forward, 80)
+ self.assertRaises(exceptions.RangeAlreadyComplete,
+ handler.fast_forward, 8)
handler = GetOrHeadHandler(None, req, None, None, None, None,
{'Range': 'bytes=23-'})
handler.fast_forward(20)
- self.assertEquals(handler.backend_headers['Range'], 'bytes=43-')
+ self.assertEqual(handler.backend_headers['Range'], 'bytes=43-')
handler = GetOrHeadHandler(None, req, None, None, None, None,
{'Range': 'bytes=-100'})
handler.fast_forward(20)
- self.assertEquals(handler.backend_headers['Range'], 'bytes=-80')
+ self.assertEqual(handler.backend_headers['Range'], 'bytes=-80')
+ self.assertRaises(HTTPException,
+ handler.fast_forward, 100)
+ self.assertRaises(exceptions.RangeAlreadyComplete,
+ handler.fast_forward, 80)
+
+ handler = GetOrHeadHandler(None, req, None, None, None, None,
+ {'Range': 'bytes=0-0'})
+ self.assertRaises(exceptions.RangeAlreadyComplete,
+ handler.fast_forward, 1)
+
+ def test_range_fast_forward_after_data_timeout(self):
+ req = Request.blank('/')
+
+ # We get a 200 and learn that it's a 1000-byte object, but receive 0
+ # bytes of data, so then we get a new node, fast_forward(0), and
+ # send out a new request. That new request must be for all 1000
+ # bytes.
+ handler = GetOrHeadHandler(None, req, None, None, None, None, {})
+ handler.learn_size_from_content_range(0, 999, 1000)
+ handler.fast_forward(0)
+ self.assertEqual(handler.backend_headers['Range'], 'bytes=0-999')
+
+ # Same story as above, but a 1-byte object so we can have our byte
+ # indices be 0.
+ handler = GetOrHeadHandler(None, req, None, None, None, None, {})
+ handler.learn_size_from_content_range(0, 0, 1)
+ handler.fast_forward(0)
+ self.assertEqual(handler.backend_headers['Range'], 'bytes=0-0')
+
+ # last 100 bytes
+ handler = GetOrHeadHandler(None, req, None, None, None, None,
+ {'Range': 'bytes=-100'})
+ handler.learn_size_from_content_range(900, 999, 1000)
+ handler.fast_forward(0)
+ self.assertEqual(handler.backend_headers['Range'], 'bytes=900-999')
def test_transfer_headers_with_sysmeta(self):
base = Controller(self.app)
@@ -633,10 +782,10 @@ class TestFuncs(unittest.TestCase):
expected_headers = {'x-base-meta-owner': '',
'x-base-meta-size': '151M',
'connection': 'close'}
- for k, v in expected_headers.iteritems():
- self.assertTrue(k in dst_headers)
+ for k, v in expected_headers.items():
+ self.assertIn(k, dst_headers)
self.assertEqual(v, dst_headers[k])
- self.assertFalse('new-owner' in dst_headers)
+ self.assertNotIn('new-owner', dst_headers)
def test_generate_request_headers_with_sysmeta(self):
base = Controller(self.app)
@@ -647,17 +796,32 @@ class TestFuncs(unittest.TestCase):
hdrs.update(bad_hdrs)
req = Request.blank('/v1/a/c/o', headers=hdrs)
dst_headers = base.generate_request_headers(req, transfer=True)
- for k, v in good_hdrs.iteritems():
- self.assertTrue(k.lower() in dst_headers)
+ for k, v in good_hdrs.items():
+ self.assertIn(k.lower(), dst_headers)
self.assertEqual(v, dst_headers[k.lower()])
- for k, v in bad_hdrs.iteritems():
- self.assertFalse(k.lower() in dst_headers)
+ for k, v in bad_hdrs.items():
+ self.assertNotIn(k.lower(), dst_headers)
+
+ def test_generate_request_headers_with_no_orig_req(self):
+ base = Controller(self.app)
+ src_headers = {'x-remove-base-meta-owner': 'x',
+ 'x-base-meta-size': '151M',
+ 'new-owner': 'Kun'}
+ dst_headers = base.generate_request_headers(None,
+ additional=src_headers)
+ expected_headers = {'x-base-meta-size': '151M',
+ 'connection': 'close'}
+ for k, v in expected_headers.items():
+ self.assertIn(k, dst_headers)
+ self.assertEqual(v, dst_headers[k])
+ self.assertEqual('', dst_headers['Referer'])
def test_client_chunk_size(self):
class TestSource(object):
def __init__(self, chunks):
self.chunks = list(chunks)
+ self.status = 200
def read(self, _read_size):
if self.chunks:
@@ -665,6 +829,13 @@ class TestFuncs(unittest.TestCase):
else:
return ''
+ def getheader(self, header):
+ if header.lower() == "content-length":
+ return str(sum(len(c) for c in self.chunks))
+
+ def getheaders(self):
+ return [('content-length', self.getheader('content-length'))]
+
source = TestSource((
'abcd', '1234', 'abc', 'd1', '234abcd1234abcd1', '2'))
req = Request.blank('/v1/a/c/o')
@@ -682,6 +853,7 @@ class TestFuncs(unittest.TestCase):
class TestSource(object):
def __init__(self, chunks):
self.chunks = list(chunks)
+ self.status = 200
def read(self, _read_size):
if self.chunks:
@@ -693,7 +865,15 @@ class TestFuncs(unittest.TestCase):
else:
return ''
- node = {'ip': '1.2.3.4', 'port': 6000, 'device': 'sda'}
+ def getheader(self, header):
+ if header.lower() == "content-length":
+ return str(sum(len(c) for c in self.chunks
+ if c is not None))
+
+ def getheaders(self):
+ return [('content-length', self.getheader('content-length'))]
+
+ node = {'ip': '1.2.3.4', 'port': 6200, 'device': 'sda'}
source1 = TestSource(['abcd', '1234', 'abc', None])
source2 = TestSource(['efgh5678'])
@@ -703,11 +883,88 @@ class TestFuncs(unittest.TestCase):
client_chunk_size=8)
app_iter = handler._make_app_iter(req, node, source1)
- with patch.object(handler, '_get_source_and_node',
- lambda: (source2, node)):
+ with mock.patch.object(handler, '_get_source_and_node',
+ lambda: (source2, node)):
+ client_chunks = list(app_iter)
+ self.assertEqual(client_chunks, ['abcd1234', 'efgh5678'])
+
+ def test_client_chunk_size_resuming_chunked(self):
+
+ class TestChunkedSource(object):
+ def __init__(self, chunks):
+ self.chunks = list(chunks)
+ self.status = 200
+ self.headers = {'transfer-encoding': 'chunked',
+ 'content-type': 'text/plain'}
+
+ def read(self, _read_size):
+ if self.chunks:
+ chunk = self.chunks.pop(0)
+ if chunk is None:
+ raise exceptions.ChunkReadTimeout()
+ else:
+ return chunk
+ else:
+ return ''
+
+ def getheader(self, header):
+ return self.headers.get(header.lower())
+
+ def getheaders(self):
+ return self.headers
+
+ node = {'ip': '1.2.3.4', 'port': 6200, 'device': 'sda'}
+
+ source1 = TestChunkedSource(['abcd', '1234', 'abc', None])
+ source2 = TestChunkedSource(['efgh5678'])
+ req = Request.blank('/v1/a/c/o')
+ handler = GetOrHeadHandler(
+ self.app, req, 'Object', None, None, None, {},
+ client_chunk_size=8)
+
+ app_iter = handler._make_app_iter(req, node, source1)
+ with mock.patch.object(handler, '_get_source_and_node',
+ lambda: (source2, node)):
client_chunks = list(app_iter)
self.assertEqual(client_chunks, ['abcd1234', 'efgh5678'])
- self.assertEqual(handler.backend_headers['Range'], 'bytes=8-')
+
+ def test_disconnected_warning(self):
+ self.app.logger = mock.Mock()
+ req = Request.blank('/v1/a/c/o')
+
+ class TestSource(object):
+ def __init__(self):
+ self.headers = {'content-type': 'text/plain',
+ 'content-length': len(self.read(-1))}
+ self.status = 200
+
+ def read(self, _read_size):
+ return 'the cake is a lie'
+
+ def getheader(self, header):
+ return self.headers.get(header.lower())
+
+ def getheaders(self):
+ return self.headers
+
+ source = TestSource()
+
+ node = {'ip': '1.2.3.4', 'port': 6200, 'device': 'sda'}
+ handler = GetOrHeadHandler(
+ self.app, req, 'Object', None, None, None, {})
+ app_iter = handler._make_app_iter(req, node, source)
+ app_iter.close()
+ self.app.logger.warning.assert_called_once_with(
+ 'Client disconnected on read')
+
+ self.app.logger = mock.Mock()
+ node = {'ip': '1.2.3.4', 'port': 6200, 'device': 'sda'}
+ handler = GetOrHeadHandler(
+ self.app, req, 'Object', None, None, None, {})
+ app_iter = handler._make_app_iter(req, node, source)
+ next(app_iter)
+ app_iter.close()
+ self.app.logger.warning.assert_not_called()
def test_bytes_to_skip(self):
# if you start at the beginning, skip nothing
diff --git a/test/unit/proxy/controllers/test_container.py b/test/unit/proxy/controllers/test_container.py
index 715cd94..b5367aa 100644
--- a/test/unit/proxy/controllers/test_container.py
+++ b/test/unit/proxy/controllers/test_container.py
@@ -24,22 +24,22 @@ from swift.proxy.controllers.base import headers_to_container_info
from test.unit import fake_http_connect, FakeRing, FakeMemcache
from swift.common.storage_policy import StoragePolicy
from swift.common.request_helpers import get_sys_meta_prefix
-from swift.common import utils
from test.unit import patch_policies, mocked_http_conn, debug_logger
+#from test.unit.common.ring.test_ring import TestRingBase
from test.unit.proxy.test_server import node_error_count
@patch_policies([StoragePolicy(0, 'zero', True, object_ring=FakeRing())])
class TestContainerController(unittest.TestCase):
+
+ CONTAINER_REPLICAS = 3
+
def setUp(self):
- # SOF
- self._orig_hash_suffix = utils.HASH_PATH_SUFFIX
- self._orig_hash_prefix = utils.HASH_PATH_PREFIX
- utils.HASH_PATH_SUFFIX = 'endcap'
- utils.HASH_PATH_PREFIX = ''
+ TestRingBase.setUp(self)
self.logger = debug_logger()
- self.container_ring = FakeRing(max_more_nodes=9)
+ self.container_ring = FakeRing(replicas=self.CONTAINER_REPLICAS,
+ max_more_nodes=9)
self.app = proxy_server.Application(None, FakeMemcache(),
logger=self.logger,
account_ring=FakeRing(),
@@ -58,7 +58,7 @@ class TestContainerController(unittest.TestCase):
proxy_server.ContainerController):
def account_info(controller, *args, **kwargs):
- patch_path = 'swift.proxy.controllers.base.get_info'
+ patch_path = 'swift.proxy.controllers.base.get_account_info'
with mock.patch(patch_path) as mock_get_info:
mock_get_info.return_value = dict(self.account_info)
return super(FakeAccountInfoContainerController,
@@ -72,16 +72,43 @@ class TestContainerController(unittest.TestCase):
return _orig_get_controller(*args, **kwargs)
self.app.get_controller = wrapped_get_controller
- def test_container_info_in_response_env(self):
+ def _make_callback_func(self, context):
+ def callback(ipaddr, port, device, partition, method, path,
+ headers=None, query_string=None, ssl=False):
+ context['method'] = method
+ context['path'] = path
+ context['headers'] = headers or {}
+ return callback
+
+ def _assert_responses(self, method, test_cases):
+ controller = proxy_server.ContainerController(self.app, 'a', 'c')
+
+ for responses, expected in test_cases:
+ with mock.patch(
+ 'swift.proxy.controllers.base.http_connect',
+ fake_http_connect(*responses)):
+ req = Request.blank('/v1/a/c')
+ resp = getattr(controller, method)(req)
+
+ self.assertEqual(expected,
+ resp.status_int,
+ 'Expected %s but got %s. Failed case: %s' %
+ (expected, resp.status_int, str(responses)))
+
+ def test_container_info_got_cached(self):
controller = proxy_server.ContainerController(self.app, 'a', 'c')
with mock.patch('swift.proxy.controllers.base.http_connect',
fake_http_connect(200, 200, body='')):
req = Request.blank('/v1/a/c', {'PATH_INFO': '/v1/a/c'})
resp = controller.HEAD(req)
self.assertEqual(2, resp.status_int // 100)
- self.assertTrue("swift.container/a/c" in resp.environ)
- self.assertEqual(headers_to_container_info(resp.headers),
- resp.environ['swift.container/a/c'])
+ # Make sure it's in both swift.infocache and memcache
+ self.assertIn("container/a/c", resp.environ['swift.infocache'])
+ self.assertEqual(
+ headers_to_container_info(resp.headers),
+ resp.environ['swift.infocache']['container/a/c'])
+ from_memcache = self.app.memcache.get('container/a/c')
+ self.assertTrue(from_memcache)
def test_swift_owner(self):
owner_headers = {
@@ -93,25 +120,17 @@ class TestContainerController(unittest.TestCase):
with mock.patch('swift.proxy.controllers.base.http_connect',
fake_http_connect(200, 200, headers=owner_headers)):
resp = controller.HEAD(req)
- self.assertEquals(2, resp.status_int // 100)
+ self.assertEqual(2, resp.status_int // 100)
for key in owner_headers:
- self.assertTrue(key not in resp.headers)
+ self.assertNotIn(key, resp.headers)
req = Request.blank('/v1/a/c', environ={'swift_owner': True})
with mock.patch('swift.proxy.controllers.base.http_connect',
fake_http_connect(200, 200, headers=owner_headers)):
resp = controller.HEAD(req)
- self.assertEquals(2, resp.status_int // 100)
+ self.assertEqual(2, resp.status_int // 100)
for key in owner_headers:
- self.assertTrue(key in resp.headers)
-
- def _make_callback_func(self, context):
- def callback(ipaddr, port, device, partition, method, path,
- headers=None, query_string=None, ssl=False):
- context['method'] = method
- context['path'] = path
- context['headers'] = headers or {}
- return callback
+ self.assertIn(key, resp.headers)
def test_sys_meta_headers_PUT(self):
# check that headers in sys meta namespace make it through
@@ -131,9 +150,9 @@ class TestContainerController(unittest.TestCase):
fake_http_connect(200, 200, give_connect=callback)):
controller.PUT(req)
self.assertEqual(context['method'], 'PUT')
- self.assertTrue(sys_meta_key in context['headers'])
+ self.assertIn(sys_meta_key, context['headers'])
self.assertEqual(context['headers'][sys_meta_key], 'foo')
- self.assertTrue(user_meta_key in context['headers'])
+ self.assertIn(user_meta_key, context['headers'])
self.assertEqual(context['headers'][user_meta_key], 'bar')
self.assertNotEqual(context['headers']['x-timestamp'], '1.0')
@@ -154,9 +173,9 @@ class TestContainerController(unittest.TestCase):
fake_http_connect(200, 200, give_connect=callback)):
controller.POST(req)
self.assertEqual(context['method'], 'POST')
- self.assertTrue(sys_meta_key in context['headers'])
+ self.assertIn(sys_meta_key, context['headers'])
self.assertEqual(context['headers'][sys_meta_key], 'foo')
- self.assertTrue(user_meta_key in context['headers'])
+ self.assertIn(user_meta_key, context['headers'])
self.assertEqual(context['headers'][user_meta_key], 'bar')
self.assertNotEqual(context['headers']['x-timestamp'], '1.0')
@@ -168,12 +187,11 @@ class TestContainerController(unittest.TestCase):
self.app._error_limiting = {}
req = Request.blank('/v1/a/c', method=method)
with mocked_http_conn(*statuses) as fake_conn:
- print 'a' * 50
resp = req.get_response(self.app)
self.assertEqual(resp.status_int, expected)
for req in fake_conn.requests:
self.assertEqual(req['method'], method)
- self.assert_(req['path'].endswith('/a/c'))
+ self.assertTrue(req['path'].endswith('/a/c'))
base_status = [201] * 3
# test happy path
@@ -207,6 +225,118 @@ class TestContainerController(unittest.TestCase):
self.app, self.container_ring.devs[2]),
self.app.error_suppression_limit + 1)
+ def test_response_code_for_PUT(self):
+ PUT_TEST_CASES = [
+ ((201, 201, 201), 201),
+ ((201, 201, 404), 201),
+ ((201, 201, 503), 201),
+ ((201, 404, 404), 404),
+ ((201, 404, 503), 503),
+ ((201, 503, 503), 503),
+ ((404, 404, 404), 404),
+ ((404, 404, 503), 404),
+ ((404, 503, 503), 503),
+ ((503, 503, 503), 503)
+ ]
+ self._assert_responses('PUT', PUT_TEST_CASES)
+
+ def test_response_code_for_DELETE(self):
+ DELETE_TEST_CASES = [
+ ((204, 204, 204), 204),
+ ((204, 204, 404), 204),
+ ((204, 204, 503), 204),
+ ((204, 404, 404), 404),
+ ((204, 404, 503), 503),
+ ((204, 503, 503), 503),
+ ((404, 404, 404), 404),
+ ((404, 404, 503), 404),
+ ((404, 503, 503), 503),
+ ((503, 503, 503), 503)
+ ]
+ self._assert_responses('DELETE', DELETE_TEST_CASES)
+
+ def test_response_code_for_POST(self):
+ POST_TEST_CASES = [
+ ((204, 204, 204), 204),
+ ((204, 204, 404), 204),
+ ((204, 204, 503), 204),
+ ((204, 404, 404), 404),
+ ((204, 404, 503), 503),
+ ((204, 503, 503), 503),
+ ((404, 404, 404), 404),
+ ((404, 404, 503), 404),
+ ((404, 503, 503), 503),
+ ((503, 503, 503), 503)
+ ]
+ self._assert_responses('POST', POST_TEST_CASES)
+
+
+@patch_policies(
+ [StoragePolicy(0, 'zero', True, object_ring=FakeRing(replicas=4))])
+class TestContainerController4Replicas(TestContainerController):
+
+ CONTAINER_REPLICAS = 4
+
+ def test_response_code_for_PUT(self):
+ PUT_TEST_CASES = [
+ ((201, 201, 201, 201), 201),
+ ((201, 201, 201, 404), 201),
+ ((201, 201, 201, 503), 201),
+ ((201, 201, 404, 404), 201),
+ ((201, 201, 404, 503), 201),
+ ((201, 201, 503, 503), 201),
+ ((201, 404, 404, 404), 404),
+ ((201, 404, 404, 503), 404),
+ ((201, 404, 503, 503), 503),
+ ((201, 503, 503, 503), 503),
+ ((404, 404, 404, 404), 404),
+ ((404, 404, 404, 503), 404),
+ ((404, 404, 503, 503), 404),
+ ((404, 503, 503, 503), 503),
+ ((503, 503, 503, 503), 503)
+ ]
+ self._assert_responses('PUT', PUT_TEST_CASES)
+
+ def test_response_code_for_DELETE(self):
+ DELETE_TEST_CASES = [
+ ((204, 204, 204, 204), 204),
+ ((204, 204, 204, 404), 204),
+ ((204, 204, 204, 503), 204),
+ ((204, 204, 404, 404), 204),
+ ((204, 204, 404, 503), 204),
+ ((204, 204, 503, 503), 204),
+ ((204, 404, 404, 404), 404),
+ ((204, 404, 404, 503), 404),
+ ((204, 404, 503, 503), 503),
+ ((204, 503, 503, 503), 503),
+ ((404, 404, 404, 404), 404),
+ ((404, 404, 404, 503), 404),
+ ((404, 404, 503, 503), 404),
+ ((404, 503, 503, 503), 503),
+ ((503, 503, 503, 503), 503)
+ ]
+ self._assert_responses('DELETE', DELETE_TEST_CASES)
+
+ def test_response_code_for_POST(self):
+ POST_TEST_CASES = [
+ ((204, 204, 204, 204), 204),
+ ((204, 204, 204, 404), 204),
+ ((204, 204, 204, 503), 204),
+ ((204, 204, 404, 404), 204),
+ ((204, 204, 404, 503), 204),
+ ((204, 204, 503, 503), 204),
+ ((204, 404, 404, 404), 404),
+ ((204, 404, 404, 503), 404),
+ ((204, 404, 503, 503), 503),
+ ((204, 503, 503, 503), 503),
+ ((404, 404, 404, 404), 404),
+ ((404, 404, 404, 503), 404),
+ ((404, 404, 503, 503), 404),
+ ((404, 503, 503, 503), 503),
+ ((503, 503, 503, 503), 503)
+ ]
+ self._assert_responses('POST', POST_TEST_CASES)
+
if __name__ == '__main__':
unittest.main()
diff --git a/test/unit/proxy/test_server.py b/test/unit/proxy/test_server.py
index bdce41f..64c5825 100644
--- a/test/unit/proxy/test_server.py
+++ b/test/unit/proxy/test_server.py
@@ -66,8 +66,7 @@ from swift.common.ring import RingData
from swift.common.utils import mkdirs, normalize_timestamp, NullLogger
from swift.common.wsgi import monkey_patch_mimetools, loadapp
from swift.proxy.controllers import base as proxy_base
-from swift.proxy.controllers.base import get_container_memcache_key, \
- get_account_memcache_key, cors_validation
+from swift.proxy.controllers.base import get_cache_key,cors_validation
import swift.proxy.controllers
import swift.proxy.controllers.obj
from swift.common.swob import Request, Response, HTTPUnauthorized, \
@@ -474,14 +473,14 @@ class TestController(unittest.TestCase):
self.controller.account_info(self.account)
self.assertEquals(count, 123)
with save_globals():
- cache_key = get_account_memcache_key(self.account)
+ cache_key = get_cache_key(self.account)
account_info = {'status': 200, 'container_count': 1234}
self.memcache.set(cache_key, account_info)
partition, nodes, count = \
self.controller.account_info(self.account)
self.assertEquals(count, 1234)
with save_globals():
- cache_key = get_account_memcache_key(self.account)
+ cache_key = get_cache_key(self.account)
account_info = {'status': 200, 'container_count': '1234'}
self.memcache.set(cache_key, account_info)
partition, nodes, count = \
@@ -509,7 +508,7 @@ class TestController(unittest.TestCase):
# Test the internal representation in memcache
# 'container_count' changed from int to str
- cache_key = get_account_memcache_key(self.account)
+ cache_key = get_cache_key(self.account)
container_info = {'status': 200,
'container_count': '12345',
'total_object_count': None,
@@ -536,7 +535,7 @@ class TestController(unittest.TestCase):
# Test the internal representation in memcache
# 'container_count' changed from 0 to None
- cache_key = get_account_memcache_key(self.account)
+ cache_key = get_cache_key(self.account)
account_info = {'status': 404,
'container_count': None, # internally keep None
'total_object_count': None,
@@ -613,8 +612,7 @@ class TestController(unittest.TestCase):
self.account, self.container, self.request)
self.check_container_info_return(ret)
- cache_key = get_container_memcache_key(self.account,
- self.container)
+ cache_key = get_cache_key(self.account,self.container)
cache_value = self.memcache.get(cache_key)
self.assertTrue(isinstance(cache_value, dict))
self.assertEquals(200, cache_value.get('status'))
@@ -636,8 +634,8 @@ class TestController(unittest.TestCase):
self.account, self.container, self.request)
self.check_container_info_return(ret, True)
- cache_key = get_container_memcache_key(self.account,
- self.container)
+ cache_key = get_cache_key(self.account,
+ self.container)
cache_value = self.memcache.get(cache_key)
self.assertTrue(isinstance(cache_value, dict))
self.assertEquals(404, cache_value.get('status'))
@@ -652,7 +650,7 @@ class TestController(unittest.TestCase):
self.account, self.container, self.request)
self.check_container_info_return(ret, True)
- cache_key = get_container_memcache_key(self.account,
+ cache_key = get_cache_key(self.account,
self.container)
cache_value = self.memcache.get(cache_key)
self.assertTrue(isinstance(cache_value, dict))
diff --git a/tox.ini b/tox.ini
index 6e2bba6..4ff8d89 100644
--- a/tox.ini
+++ b/tox.ini
@@ -12,7 +12,7 @@ setenv = VIRTUAL_ENV={envdir}
NOSE_COVER_BRANCHES=1
NOSE_COVER_PACKAGE=gluster
deps =
- git+https://github.com/openstack/swift.git@kilo-eol
+ git+https://github.com/openstack/swift.git@2.10.1
-r{toxinidir}/requirements.txt
-r{toxinidir}/test-requirements.txt
# Just having testtools package installed fixes some dependency issue