From 1a57043523e1cfc88f9dc549165a0f4d5889ec0b Mon Sep 17 00:00:00 2001 From: Danny Hermes Date: Fri, 17 Oct 2014 11:17:38 -0700 Subject: [PATCH 1/2] Adding pylintrc config and custom runner and making it pass. Some key things to notice: - The tools (pep8 / pylint) disagree about continutation lines. This is because PEP8 is ambiguous about "multi-line constructs". One of the examples for bracket indentation in the PEP is my_list = [ 1, 2, 3, 4, 5, 6, ] but the PEP doesn't explicity say whether or not my_list = [1, 2, 3, 4, 5, 6, ] would be allowed. As it turns out, pep8 (the tool) considers the latter to be a continuation while pylint does not. - I changed all indentation where this was a problem and went with the preferred form my_list = [ 1, 2, 3, 4, 5, 6, ] - I made a _MetadataMixin helper class for methods shared between storage.Bucket and storage.Key. I did not refactor the affected tests but they do still give 100% coverage. - I increased the maximum number of arguments from 5 to 10 and the maximum public methods from 20 to 30. Offenders were: - storage.connection.Connection.make_request (6 arguments) - storage.connection.Connection.api_request (9 arguments) - storage.connection.Connection.generate_signed_url (6 arguments) - storage.bucket.Bucket (27 public methods) and - storage.key.Key (23 public methods) I think refactoring these should be considered (though is not mandatory). I certainly don't know a good solution for the class public methods. --- gcloud/__init__.py | 2 + gcloud/connection.py | 4 +- gcloud/datastore/_helpers.py | 14 +- gcloud/datastore/connection.py | 14 +- gcloud/datastore/dataset.py | 3 + gcloud/datastore/entity.py | 8 +- gcloud/datastore/key.py | 14 +- gcloud/datastore/query.py | 14 +- gcloud/datastore/test___init__.py | 28 +- gcloud/datastore/test__helpers.py | 20 +- gcloud/datastore/test_connection.py | 503 +++++++++++++++------------ gcloud/datastore/test_dataset.py | 3 +- gcloud/datastore/test_entity.py | 12 +- gcloud/datastore/test_key.py | 15 +- gcloud/datastore/test_query.py | 22 +- gcloud/datastore/test_transaction.py | 3 +- gcloud/demo.py | 3 +- gcloud/storage/_helpers.py | 241 +++++++++++++ gcloud/storage/acl.py | 21 +- gcloud/storage/bucket.py | 243 ++----------- gcloud/storage/connection.py | 4 +- gcloud/storage/exceptions.py | 8 +- gcloud/storage/iterator.py | 5 +- gcloud/storage/key.py | 202 ++--------- gcloud/storage/test___init__.py | 17 +- gcloud/storage/test__helpers.py | 19 + gcloud/storage/test_acl.py | 27 +- gcloud/storage/test_bucket.py | 37 +- gcloud/storage/test_connection.py | 487 +++++++++++++------------- gcloud/storage/test_exceptions.py | 8 +- gcloud/storage/test_iterator.py | 25 +- gcloud/storage/test_key.py | 46 +-- gcloud/test_connection.py | 4 +- gcloud/test_credentials.py | 41 +-- pylintrc_default | 37 ++ pylintrc_reduced | 37 ++ run_pylint.py | 82 +++++ tox.ini | 3 + 38 files changed, 1250 insertions(+), 1026 deletions(-) create mode 100644 gcloud/storage/_helpers.py create mode 100644 gcloud/storage/test__helpers.py create mode 100644 pylintrc_default create mode 100644 pylintrc_reduced create mode 100644 run_pylint.py diff --git a/gcloud/__init__.py b/gcloud/__init__.py index aaece64626ac..17231bc0b96e 100644 --- a/gcloud/__init__.py +++ b/gcloud/__init__.py @@ -1,2 +1,4 @@ """GCloud API access in idiomatic Python.""" + + __version__ = '0.02.2' diff --git a/gcloud/connection.py b/gcloud/connection.py index 10d84e321bed..30eae893b95b 100644 --- a/gcloud/connection.py +++ b/gcloud/connection.py @@ -1,4 +1,5 @@ """ Shared implementation of connections to API servers.""" + from pkg_resources import get_distribution import httplib2 @@ -28,6 +29,7 @@ def __init__(self, credentials=None): :type credentials: :class:`oauth2client.client.OAuth2Credentials` :param credentials: The OAuth2 Credentials to use for this connection. """ + self._http = None self._credentials = credentials @property @@ -45,7 +47,7 @@ def http(self): :rtype: :class:`httplib2.Http` :returns: A Http object used to transport data. """ - if not hasattr(self, '_http'): + if self._http is None: self._http = httplib2.Http() if self._credentials: self._http = self._credentials.authorize(self._http) diff --git a/gcloud/datastore/_helpers.py b/gcloud/datastore/_helpers.py index 2b23d62484c3..bbb075cee81d 100644 --- a/gcloud/datastore/_helpers.py +++ b/gcloud/datastore/_helpers.py @@ -3,7 +3,7 @@ These functions are *not* part of the API. """ import calendar -from datetime import datetime, timedelta +import datetime from google.protobuf.internal.type_checkers import Int64ValueChecker import pytz @@ -43,7 +43,7 @@ def _get_protobuf_attribute_and_value(val): :returns: A tuple of the attribute name and proper value type. """ - if isinstance(val, datetime): + if isinstance(val, datetime.datetime): name = 'timestamp_microseconds' # If the datetime is naive (no timezone), consider that it was # intended to be UTC and replace the tzinfo to that effect. @@ -91,8 +91,8 @@ def _get_value_from_value_pb(value_pb): result = None if value_pb.HasField('timestamp_microseconds_value'): microseconds = value_pb.timestamp_microseconds_value - naive = (datetime.utcfromtimestamp(0) + - timedelta(microseconds=microseconds)) + naive = (datetime.datetime.utcfromtimestamp(0) + + datetime.timedelta(microseconds=microseconds)) result = naive.replace(tzinfo=pytz.utc) elif value_pb.HasField('key_value'): @@ -163,9 +163,9 @@ def _set_protobuf_value(value_pb, val): key = val.key() if key is not None: e_pb.key.CopyFrom(key.to_protobuf()) - for k, v in val.items(): + for item_key, value in val.iteritems(): p_pb = e_pb.property.add() - p_pb.name = k - _set_protobuf_value(p_pb.value, v) + p_pb.name = item_key + _set_protobuf_value(p_pb.value, value) else: # scalar, just assign setattr(value_pb, attr, val) diff --git a/gcloud/datastore/connection.py b/gcloud/datastore/connection.py index f11c528b2727..c2ead0315f40 100644 --- a/gcloud/datastore/connection.py +++ b/gcloud/datastore/connection.py @@ -1,4 +1,5 @@ """Connections to gcloud datastore API servers.""" + from gcloud import connection from gcloud.datastore import datastore_v1_pb2 as datastore_pb from gcloud.datastore import _helpers @@ -23,7 +24,7 @@ class Connection(connection.Connection): """A template for the URL of a particular API call.""" def __init__(self, credentials=None): - self._credentials = credentials + super(Connection, self).__init__(credentials=credentials) self._current_transaction = None def _request(self, dataset_id, method, data): @@ -240,11 +241,12 @@ def run_query(self, dataset_id, query_pb, namespace=None): request.query.CopyFrom(query_pb) response = self._rpc(dataset_id, 'runQuery', request, datastore_pb.RunQueryResponse) - return ([e.entity for e in response.batch.entity_result], - response.batch.end_cursor, - response.batch.more_results, - response.batch.skipped_results, - ) + return ( + [e.entity for e in response.batch.entity_result], + response.batch.end_cursor, + response.batch.more_results, + response.batch.skipped_results, + ) def lookup(self, dataset_id, key_pbs): """Lookup keys from a dataset in the Cloud Datastore. diff --git a/gcloud/datastore/dataset.py b/gcloud/datastore/dataset.py index 63d9ed3a6670..8ec22035c929 100644 --- a/gcloud/datastore/dataset.py +++ b/gcloud/datastore/dataset.py @@ -70,6 +70,7 @@ def query(self, *args, **kwargs): :rtype: :class:`gcloud.datastore.query.Query` :returns: a new Query instance, bound to this dataset. """ + # This import is here to avoid circular references. from gcloud.datastore.query import Query kwargs['dataset'] = self return Query(*args, **kwargs) @@ -83,6 +84,7 @@ def entity(self, kind): :rtype: :class:`gcloud.datastore.entity.Entity` :returns: a new Entity instance, bound to this dataset. """ + # This import is here to avoid circular references. from gcloud.datastore.entity import Entity return Entity(dataset=self, kind=kind) @@ -96,6 +98,7 @@ def transaction(self, *args, **kwargs): :rtype: :class:`gcloud.datastore.transaction.Transaction` :returns: a new Transaction instance, bound to this dataset. """ + # This import is here to avoid circular references. from gcloud.datastore.transaction import Transaction kwargs['dataset'] = self return Transaction(*args, **kwargs) diff --git a/gcloud/datastore/entity.py b/gcloud/datastore/entity.py index 5f3dc7218fb3..4171408345e8 100644 --- a/gcloud/datastore/entity.py +++ b/gcloud/datastore/entity.py @@ -23,7 +23,7 @@ class NoKey(RuntimeError): """Exception raised by Entity methods which require a key.""" -class Entity(dict): # pylint: disable=too-many-public-methods +class Entity(dict): """:type dataset: :class:`gcloud.datastore.dataset.Dataset` :param dataset: The dataset in which this entity belongs. @@ -95,7 +95,9 @@ def key(self, key=None): :type key: :class:`glcouddatastore.key.Key` :param key: The key you want to set on the entity. - :returns: Either the current key or the :class:`Entity`. + :rtype: :class:`gcloud.datastore.key.Key` or :class:`Entity`. + :returns: Either the current key (on get) or the current + object (on set). >>> entity.key(my_other_key) # This returns the original entity. @@ -141,7 +143,7 @@ def from_key(cls, key): return cls().key(key) @classmethod - def from_protobuf(cls, pb, dataset=None): # pylint: disable=invalid-name + def from_protobuf(cls, pb, dataset=None): """Factory method for creating an entity based on a protobuf. The protobuf should be one returned from the Cloud Datastore diff --git a/gcloud/datastore/key.py b/gcloud/datastore/key.py index 9950b1cd9b75..ccc89c2360c9 100644 --- a/gcloud/datastore/key.py +++ b/gcloud/datastore/key.py @@ -159,7 +159,7 @@ def is_partial(self): :returns: True if the last element of the key's path does not have an 'id' or a 'name'. """ - return (self.id_or_name() is None) + return self.id_or_name() is None def dataset(self, dataset=None): """Dataset setter / getter. @@ -231,19 +231,19 @@ def kind(self, kind=None): elif self.path(): return self._path[-1]['kind'] - def id(self, id=None): + def id(self, id_to_set=None): """ID setter / getter. Based on the last element of path. - :type kind: :class:`str` - :param kind: The new kind for the key. + :type id_to_set: :class:`int` + :param id_to_set: The new ID for the key. :rtype: :class:`Key` (for setter); or :class:`int` (for getter) :returns: a new key, cloned from self., with the given id (setter); - or self's id (getter). + or self's id (getter). """ - if id: + if id_to_set: clone = self._clone() - clone._path[-1]['id'] = id + clone._path[-1]['id'] = id_to_set return clone elif self.path(): return self._path[-1].get('id') diff --git a/gcloud/datastore/query.py b/gcloud/datastore/query.py index bc829ccfa69f..fa1961864340 100644 --- a/gcloud/datastore/query.py +++ b/gcloud/datastore/query.py @@ -315,11 +315,9 @@ def fetch(self, limit=None): if limit: clone = self.limit(limit) - (entity_pbs, - end_cursor, - more_results, - skipped_results) = self.dataset().connection().run_query( + query_results = self.dataset().connection().run_query( query_pb=clone.to_protobuf(), dataset_id=self.dataset().id()) + entity_pbs, end_cursor = query_results[:2] self._cursor = end_cursor return [Entity.from_protobuf(entity, dataset=self.dataset()) @@ -379,14 +377,14 @@ def order(self, *properties): """ clone = self._clone() - for p in properties: + for prop in properties: property_order = clone._pb.order.add() - if p.startswith('-'): - property_order.property.name = p[1:] + if prop.startswith('-'): + property_order.property.name = prop[1:] property_order.direction = property_order.DESCENDING else: - property_order.property.name = p + property_order.property.name = prop property_order.direction = property_order.ASCENDING return clone diff --git a/gcloud/datastore/test___init__.py b/gcloud/datastore/test___init__.py index bfe7bd5c3ec2..95c5bd374c5e 100644 --- a/gcloud/datastore/test___init__.py +++ b/gcloud/datastore/test___init__.py @@ -3,7 +3,8 @@ class Test_get_connection(unittest2.TestCase): - def _callFUT(self, client_email, private_key_path): + @staticmethod + def _callFUT(client_email, private_key_path): from gcloud.datastore import get_connection return get_connection(client_email, private_key_path) @@ -25,16 +26,18 @@ def test_it(self): found = self._callFUT(CLIENT_EMAIL, f.name) self.assertTrue(isinstance(found, Connection)) self.assertTrue(found._credentials is client._signed) - self.assertEqual(client._called_with, - {'service_account_name': CLIENT_EMAIL, - 'private_key': PRIVATE_KEY, - 'scope': SCOPE, - }) + expected_called_with = { + 'service_account_name': CLIENT_EMAIL, + 'private_key': PRIVATE_KEY, + 'scope': SCOPE, + } + self.assertEqual(client._called_with, expected_called_with) class Test_get_dataset(unittest2.TestCase): - def _callFUT(self, dataset_id, client_email, private_key_path): + @staticmethod + def _callFUT(dataset_id, client_email, private_key_path): from gcloud.datastore import get_dataset return get_dataset(dataset_id, client_email, private_key_path) @@ -59,8 +62,9 @@ def test_it(self): self.assertTrue(isinstance(found, Dataset)) self.assertTrue(isinstance(found.connection(), Connection)) self.assertEqual(found.id(), DATASET_ID) - self.assertEqual(client._called_with, - {'service_account_name': CLIENT_EMAIL, - 'private_key': PRIVATE_KEY, - 'scope': SCOPE, - }) + expected_called_with = { + 'service_account_name': CLIENT_EMAIL, + 'private_key': PRIVATE_KEY, + 'scope': SCOPE, + } + self.assertEqual(client._called_with, expected_called_with) diff --git a/gcloud/datastore/test__helpers.py b/gcloud/datastore/test__helpers.py index 52538301ee18..74e5878d548f 100644 --- a/gcloud/datastore/test__helpers.py +++ b/gcloud/datastore/test__helpers.py @@ -3,7 +3,8 @@ class Test__get_protobuf_attribute_and_value(unittest2.TestCase): - def _callFUT(self, val): + @staticmethod + def _callFUT(val): from gcloud.datastore._helpers import _get_protobuf_attribute_and_value return _get_protobuf_attribute_and_value(val) @@ -96,12 +97,14 @@ def test_object(self): class Test__get_value_from_value_pb(unittest2.TestCase): - def _callFUT(self, pb): + @staticmethod + def _callFUT(pb): from gcloud.datastore._helpers import _get_value_from_value_pb return _get_value_from_value_pb(pb) - def _makePB(self, attr_name, value): + @staticmethod + def _makePB(attr_name, value): from gcloud.datastore.datastore_v1_pb2 import Value pb = Value() @@ -182,12 +185,13 @@ def test_unknown(self): from gcloud.datastore.datastore_v1_pb2 import Value pb = Value() - self.assertEqual(self._callFUT(pb), None) # XXX desirable? + self.assertEqual(self._callFUT(pb), None) class Test__get_value_from_property_pb(unittest2.TestCase): - def _callFUT(self, pb): + @staticmethod + def _callFUT(pb): from gcloud.datastore._helpers import _get_value_from_property_pb return _get_value_from_property_pb(pb) @@ -202,12 +206,14 @@ def test_it(self): class Test_set_protobuf_value(unittest2.TestCase): - def _callFUT(self, value_pb, val): + @staticmethod + def _callFUT(value_pb, val): from gcloud.datastore._helpers import _set_protobuf_value return _set_protobuf_value(value_pb, val) - def _makePB(self): + @staticmethod + def _makePB(): from gcloud.datastore.datastore_v1_pb2 import Value return Value() diff --git a/gcloud/datastore/test_connection.py b/gcloud/datastore/test_connection.py index 8471e4cb1c81..ce028594bfc8 100644 --- a/gcloud/datastore/test_connection.py +++ b/gcloud/datastore/test_connection.py @@ -3,7 +3,8 @@ class TestConnection(unittest2.TestCase): - def _getTargetClass(self): + @staticmethod + def _getTargetClass(): from gcloud.datastore.connection import Connection return Connection @@ -26,13 +27,13 @@ def test_http_w_existing(self): self.assertTrue(conn.http is http) def test_http_wo_creds(self): - from httplib2 import Http + import httplib2 conn = self._makeOne() - self.assertTrue(isinstance(conn.http, Http)) + self.assertTrue(isinstance(conn.http, httplib2.Http)) def test_http_w_creds(self): - from httplib2 import Http + import httplib2 authorized = object() @@ -44,31 +45,34 @@ def authorize(self, http): creds = Creds() conn = self._makeOne(creds) self.assertTrue(conn.http is authorized) - self.assertTrue(isinstance(creds._called_with, Http)) + self.assertTrue(isinstance(creds._called_with, httplib2.Http)) def test__request_w_200(self): DATASET_ID = 'DATASET' METHOD = 'METHOD' DATA = b'DATA' conn = self._makeOne() - URI = '/'.join([conn.API_BASE_URL, - 'datastore', - conn.API_VERSION, - 'datasets', - DATASET_ID, - METHOD, - ]) + URI = '/'.join([ + conn.API_BASE_URL, + 'datastore', + conn.API_VERSION, + 'datasets', + DATASET_ID, + METHOD, + ]) http = conn._http = Http({'status': '200'}, 'CONTENT') self.assertEqual(conn._request(DATASET_ID, METHOD, DATA), 'CONTENT') - self.assertEqual(http._called_with, - {'uri': URI, - 'method': 'POST', - 'headers': {'Content-Type': 'application/x-protobuf', - 'Content-Length': '4', - 'User-Agent': conn.USER_AGENT, - }, - 'body': DATA, - }) + expected_called_with = { + 'uri': URI, + 'method': 'POST', + 'headers': { + 'Content-Type': 'application/x-protobuf', + 'Content-Length': '4', + 'User-Agent': conn.USER_AGENT, + }, + 'body': DATA, + } + self.assertEqual(http._called_with, expected_called_with) def test__request_not_200(self): DATASET_ID = 'DATASET' @@ -85,7 +89,8 @@ def test__rpc(self): class ReqPB(object): - def SerializeToString(self): + @staticmethod + def SerializeToString(): return b'REQPB' class RspPB(object): @@ -100,38 +105,42 @@ def FromString(cls, pb): DATASET_ID = 'DATASET' METHOD = 'METHOD' conn = self._makeOne() - URI = '/'.join([conn.API_BASE_URL, - 'datastore', - conn.API_VERSION, - 'datasets', - DATASET_ID, - METHOD, - ]) + URI = '/'.join([ + conn.API_BASE_URL, + 'datastore', + conn.API_VERSION, + 'datasets', + DATASET_ID, + METHOD, + ]) http = conn._http = Http({'status': '200'}, 'CONTENT') response = conn._rpc(DATASET_ID, METHOD, ReqPB(), RspPB) self.assertTrue(isinstance(response, RspPB)) self.assertEqual(response._pb, 'CONTENT') - self.assertEqual(http._called_with, - {'uri': URI, - 'method': 'POST', - 'headers': {'Content-Type': 'application/x-protobuf', - 'Content-Length': '5', - 'User-Agent': conn.USER_AGENT, - }, - 'body': b'REQPB', - }) + expected_called_with = { + 'uri': URI, + 'method': 'POST', + 'headers': { + 'Content-Type': 'application/x-protobuf', + 'Content-Length': '5', + 'User-Agent': conn.USER_AGENT, + }, + 'body': b'REQPB', + } + self.assertEqual(http._called_with, expected_called_with) def test_build_api_url_w_default_base_version(self): DATASET_ID = 'DATASET' METHOD = 'METHOD' klass = self._getTargetClass() - URI = '/'.join([klass.API_BASE_URL, - 'datastore', - klass.API_VERSION, - 'datasets', - DATASET_ID, - METHOD, - ]) + URI = '/'.join([ + klass.API_BASE_URL, + 'datastore', + klass.API_VERSION, + 'datasets', + DATASET_ID, + METHOD, + ]) self.assertEqual(klass.build_api_url(DATASET_ID, METHOD), URI) def test_build_api_url_w_explicit_base_version(self): @@ -140,13 +149,14 @@ def test_build_api_url_w_explicit_base_version(self): DATASET_ID = 'DATASET' METHOD = 'METHOD' klass = self._getTargetClass() - URI = '/'.join([BASE, - 'datastore', - VER, - 'datasets', - DATASET_ID, - METHOD, - ]) + URI = '/'.join([ + BASE, + 'datastore', + VER, + 'datasets', + DATASET_ID, + METHOD, + ]) self.assertEqual(klass.build_api_url(DATASET_ID, METHOD, BASE, VER), URI) @@ -177,7 +187,8 @@ class Mutation(object): pass class Xact(object): - def mutation(self): + @staticmethod + def mutation(): return Mutation() conn = self._makeOne() conn.transaction(Xact()) @@ -205,23 +216,25 @@ def test_begin_transaction_default_serialize(self): rsp_pb = datastore_pb.BeginTransactionResponse() rsp_pb.transaction = TRANSACTION conn = self._makeOne() - URI = '/'.join([conn.API_BASE_URL, - 'datastore', - conn.API_VERSION, - 'datasets', - DATASET_ID, - 'beginTransaction', - ]) + URI = '/'.join([ + conn.API_BASE_URL, + 'datastore', + conn.API_VERSION, + 'datasets', + DATASET_ID, + 'beginTransaction', + ]) http = conn._http = Http({'status': '200'}, rsp_pb.SerializeToString()) self.assertEqual(conn.begin_transaction(DATASET_ID), TRANSACTION) cw = http._called_with self.assertEqual(cw['uri'], URI) self.assertEqual(cw['method'], 'POST') - self.assertEqual(cw['headers'], - {'Content-Type': 'application/x-protobuf', - 'Content-Length': '2', - 'User-Agent': conn.USER_AGENT, - }) + expected_headers = { + 'Content-Type': 'application/x-protobuf', + 'Content-Length': '2', + 'User-Agent': conn.USER_AGENT, + } + self.assertEqual(cw['headers'], expected_headers) rq_class = datastore_pb.BeginTransactionRequest request = rq_class() request.ParseFromString(cw['body']) @@ -235,23 +248,25 @@ def test_begin_transaction_explicit_serialize(self): rsp_pb = datastore_pb.BeginTransactionResponse() rsp_pb.transaction = TRANSACTION conn = self._makeOne() - URI = '/'.join([conn.API_BASE_URL, - 'datastore', - conn.API_VERSION, - 'datasets', - DATASET_ID, - 'beginTransaction', - ]) + URI = '/'.join([ + conn.API_BASE_URL, + 'datastore', + conn.API_VERSION, + 'datasets', + DATASET_ID, + 'beginTransaction', + ]) http = conn._http = Http({'status': '200'}, rsp_pb.SerializeToString()) self.assertEqual(conn.begin_transaction(DATASET_ID, True), TRANSACTION) cw = http._called_with self.assertEqual(cw['uri'], URI) self.assertEqual(cw['method'], 'POST') - self.assertEqual(cw['headers'], - {'Content-Type': 'application/x-protobuf', - 'Content-Length': '2', - 'User-Agent': conn.USER_AGENT, - }) + expected_headers = { + 'Content-Type': 'application/x-protobuf', + 'Content-Length': '2', + 'User-Agent': conn.USER_AGENT, + } + self.assertEqual(cw['headers'], expected_headers) rq_class = datastore_pb.BeginTransactionRequest request = rq_class() request.ParseFromString(cw['body']) @@ -267,7 +282,8 @@ def test_rollback_transaction_w_existing_transaction_no_id(self): class Xact(object): - def id(self): + @staticmethod + def id(): return None DATASET_ID = 'DATASET' conn = self._makeOne() @@ -282,28 +298,31 @@ def test_rollback_transaction_ok(self): class Xact(object): - def id(self): + @staticmethod + def id(): return TRANSACTION rsp_pb = datastore_pb.RollbackResponse() conn = self._makeOne() conn.transaction(Xact()) - URI = '/'.join([conn.API_BASE_URL, - 'datastore', - conn.API_VERSION, - 'datasets', - DATASET_ID, - 'rollback', - ]) + URI = '/'.join([ + conn.API_BASE_URL, + 'datastore', + conn.API_VERSION, + 'datasets', + DATASET_ID, + 'rollback', + ]) http = conn._http = Http({'status': '200'}, rsp_pb.SerializeToString()) self.assertEqual(conn.rollback_transaction(DATASET_ID), None) cw = http._called_with self.assertEqual(cw['uri'], URI) self.assertEqual(cw['method'], 'POST') - self.assertEqual(cw['headers'], - {'Content-Type': 'application/x-protobuf', - 'Content-Length': '6', - 'User-Agent': conn.USER_AGENT, - }) + expected_headers = { + 'Content-Type': 'application/x-protobuf', + 'Content-Length': '6', + 'User-Agent': conn.USER_AGENT, + } + self.assertEqual(cw['headers'], expected_headers) rq_class = datastore_pb.RollbackRequest request = rq_class() request.ParseFromString(cw['body']) @@ -318,13 +337,14 @@ def test_run_query_wo_namespace_empty_result(self): q_pb = Query(KIND, DATASET_ID).to_protobuf() rsp_pb = datastore_pb.RunQueryResponse() conn = self._makeOne() - URI = '/'.join([conn.API_BASE_URL, - 'datastore', - conn.API_VERSION, - 'datasets', - DATASET_ID, - 'runQuery', - ]) + URI = '/'.join([ + conn.API_BASE_URL, + 'datastore', + conn.API_VERSION, + 'datasets', + DATASET_ID, + 'runQuery', + ]) http = conn._http = Http({'status': '200'}, rsp_pb.SerializeToString()) pbs, end, more, skipped = conn.run_query(DATASET_ID, q_pb) self.assertEqual(pbs, []) @@ -334,11 +354,12 @@ def test_run_query_wo_namespace_empty_result(self): cw = http._called_with self.assertEqual(cw['uri'], URI) self.assertEqual(cw['method'], 'POST') - self.assertEqual(cw['headers'], - {'Content-Type': 'application/x-protobuf', - 'Content-Length': '14', - 'User-Agent': conn.USER_AGENT, - }) + expected_headers = { + 'Content-Type': 'application/x-protobuf', + 'Content-Length': '14', + 'User-Agent': conn.USER_AGENT, + } + self.assertEqual(cw['headers'], expected_headers) rq_class = datastore_pb.RunQueryRequest request = rq_class() request.ParseFromString(cw['body']) @@ -358,24 +379,26 @@ def test_run_query_w_namespace_nonempty_result(self): rsp_pb.batch.entity_result_type = 1 # FULL rsp_pb.batch.more_results = 3 # NO_MORE_RESULTS conn = self._makeOne() - URI = '/'.join([conn.API_BASE_URL, - 'datastore', - conn.API_VERSION, - 'datasets', - DATASET_ID, - 'runQuery', - ]) + URI = '/'.join([ + conn.API_BASE_URL, + 'datastore', + conn.API_VERSION, + 'datasets', + DATASET_ID, + 'runQuery', + ]) http = conn._http = Http({'status': '200'}, rsp_pb.SerializeToString()) - pbs, end, more, skipped = conn.run_query(DATASET_ID, q_pb, 'NS') + pbs = conn.run_query(DATASET_ID, q_pb, 'NS')[0] self.assertEqual(len(pbs), 1) cw = http._called_with self.assertEqual(cw['uri'], URI) self.assertEqual(cw['method'], 'POST') - self.assertEqual(cw['headers'], - {'Content-Type': 'application/x-protobuf', - 'Content-Length': '16', - 'User-Agent': conn.USER_AGENT, - }) + expected_headers = { + 'Content-Type': 'application/x-protobuf', + 'Content-Length': '16', + 'User-Agent': conn.USER_AGENT, + } + self.assertEqual(cw['headers'], expected_headers) rq_class = datastore_pb.RunQueryRequest request = rq_class() request.ParseFromString(cw['body']) @@ -392,23 +415,25 @@ def test_lookup_single_key_empty_response(self): path=[{'kind': 'Kind', 'id': 1234}]).to_protobuf() rsp_pb = datastore_pb.LookupResponse() conn = self._makeOne() - URI = '/'.join([conn.API_BASE_URL, - 'datastore', - conn.API_VERSION, - 'datasets', - DATASET_ID, - 'lookup', - ]) + URI = '/'.join([ + conn.API_BASE_URL, + 'datastore', + conn.API_VERSION, + 'datasets', + DATASET_ID, + 'lookup', + ]) http = conn._http = Http({'status': '200'}, rsp_pb.SerializeToString()) self.assertEqual(conn.lookup(DATASET_ID, key_pb), None) cw = http._called_with self.assertEqual(cw['uri'], URI) self.assertEqual(cw['method'], 'POST') - self.assertEqual(cw['headers'], - {'Content-Type': 'application/x-protobuf', - 'Content-Length': '26', - 'User-Agent': conn.USER_AGENT, - }) + expected_headers = { + 'Content-Type': 'application/x-protobuf', + 'Content-Length': '26', + 'User-Agent': conn.USER_AGENT, + } + self.assertEqual(cw['headers'], expected_headers) rq_class = datastore_pb.LookupRequest request = rq_class() request.ParseFromString(cw['body']) @@ -429,13 +454,14 @@ def test_lookup_single_key_nonempty_response(self): entity.key.CopyFrom(key_pb) rsp_pb.found.add(entity=entity) conn = self._makeOne() - URI = '/'.join([conn.API_BASE_URL, - 'datastore', - conn.API_VERSION, - 'datasets', - DATASET_ID, - 'lookup', - ]) + URI = '/'.join([ + conn.API_BASE_URL, + 'datastore', + conn.API_VERSION, + 'datasets', + DATASET_ID, + 'lookup', + ]) http = conn._http = Http({'status': '200'}, rsp_pb.SerializeToString()) found = conn.lookup(DATASET_ID, key_pb) self.assertEqual(found.key.path_element[0].kind, 'Kind') @@ -443,11 +469,12 @@ def test_lookup_single_key_nonempty_response(self): cw = http._called_with self.assertEqual(cw['uri'], URI) self.assertEqual(cw['method'], 'POST') - self.assertEqual(cw['headers'], - {'Content-Type': 'application/x-protobuf', - 'Content-Length': '26', - 'User-Agent': conn.USER_AGENT, - }) + expected_headers = { + 'Content-Type': 'application/x-protobuf', + 'Content-Length': '26', + 'User-Agent': conn.USER_AGENT, + } + self.assertEqual(cw['headers'], expected_headers) rq_class = datastore_pb.LookupRequest request = rq_class() request.ParseFromString(cw['body']) @@ -467,23 +494,25 @@ def test_lookup_multiple_keys_empty_response(self): path=[{'kind': 'Kind', 'id': 2345}]).to_protobuf() rsp_pb = datastore_pb.LookupResponse() conn = self._makeOne() - URI = '/'.join([conn.API_BASE_URL, - 'datastore', - conn.API_VERSION, - 'datasets', - DATASET_ID, - 'lookup', - ]) + URI = '/'.join([ + conn.API_BASE_URL, + 'datastore', + conn.API_VERSION, + 'datasets', + DATASET_ID, + 'lookup', + ]) http = conn._http = Http({'status': '200'}, rsp_pb.SerializeToString()) self.assertEqual(conn.lookup(DATASET_ID, [key_pb1, key_pb2]), []) cw = http._called_with self.assertEqual(cw['uri'], URI) self.assertEqual(cw['method'], 'POST') - self.assertEqual(cw['headers'], - {'Content-Type': 'application/x-protobuf', - 'Content-Length': '52', - 'User-Agent': conn.USER_AGENT, - }) + expected_headers = { + 'Content-Type': 'application/x-protobuf', + 'Content-Length': '52', + 'User-Agent': conn.USER_AGENT, + } + self.assertEqual(cw['headers'], expected_headers) rq_class = datastore_pb.LookupRequest request = rq_class() request.ParseFromString(cw['body']) @@ -508,13 +537,14 @@ def test_commit_wo_transaction(self): prop.name = 'foo' prop.value.string_value = 'Foo' conn = self._makeOne() - URI = '/'.join([conn.API_BASE_URL, - 'datastore', - conn.API_VERSION, - 'datasets', - DATASET_ID, - 'commit', - ]) + URI = '/'.join([ + conn.API_BASE_URL, + 'datastore', + conn.API_VERSION, + 'datasets', + DATASET_ID, + 'commit', + ]) http = conn._http = Http({'status': '200'}, rsp_pb.SerializeToString()) result = conn.commit(DATASET_ID, mutation) self.assertEqual(result.index_updates, 0) @@ -522,11 +552,12 @@ def test_commit_wo_transaction(self): cw = http._called_with self.assertEqual(cw['uri'], URI) self.assertEqual(cw['method'], 'POST') - self.assertEqual(cw['headers'], - {'Content-Type': 'application/x-protobuf', - 'Content-Length': '47', - 'User-Agent': conn.USER_AGENT, - }) + expected_headers = { + 'Content-Type': 'application/x-protobuf', + 'Content-Length': '47', + 'User-Agent': conn.USER_AGENT, + } + self.assertEqual(cw['headers'], expected_headers) rq_class = datastore_pb.CommitRequest request = rq_class() request.ParseFromString(cw['body']) @@ -540,7 +571,8 @@ def test_commit_w_transaction(self): from gcloud.datastore.key import Key class Xact(object): - def id(self): + @staticmethod + def id(): return 'xact' DATASET_ID = 'DATASET' key_pb = Key(dataset=Dataset(DATASET_ID), @@ -554,13 +586,14 @@ def id(self): prop.value.string_value = 'Foo' conn = self._makeOne() conn.transaction(Xact()) - URI = '/'.join([conn.API_BASE_URL, - 'datastore', - conn.API_VERSION, - 'datasets', - DATASET_ID, - 'commit', - ]) + URI = '/'.join([ + conn.API_BASE_URL, + 'datastore', + conn.API_VERSION, + 'datasets', + DATASET_ID, + 'commit', + ]) http = conn._http = Http({'status': '200'}, rsp_pb.SerializeToString()) result = conn.commit(DATASET_ID, mutation) self.assertEqual(result.index_updates, 0) @@ -568,11 +601,12 @@ def id(self): cw = http._called_with self.assertEqual(cw['uri'], URI) self.assertEqual(cw['method'], 'POST') - self.assertEqual(cw['headers'], - {'Content-Type': 'application/x-protobuf', - 'Content-Length': '53', - 'User-Agent': conn.USER_AGENT, - }) + expected_headers = { + 'Content-Type': 'application/x-protobuf', + 'Content-Length': '53', + 'User-Agent': conn.USER_AGENT, + } + self.assertEqual(cw['headers'], expected_headers) rq_class = datastore_pb.CommitRequest request = rq_class() request.ParseFromString(cw['body']) @@ -590,24 +624,26 @@ def test_save_entity_wo_transaction_w_upsert(self): path=[{'kind': 'Kind', 'id': 1234}]).to_protobuf() rsp_pb = datastore_pb.CommitResponse() conn = self._makeOne() - URI = '/'.join([conn.API_BASE_URL, - 'datastore', - conn.API_VERSION, - 'datasets', - DATASET_ID, - 'commit', - ]) + URI = '/'.join([ + conn.API_BASE_URL, + 'datastore', + conn.API_VERSION, + 'datasets', + DATASET_ID, + 'commit', + ]) http = conn._http = Http({'status': '200'}, rsp_pb.SerializeToString()) result = conn.save_entity(DATASET_ID, key_pb, {'foo': 'Foo'}) self.assertEqual(result, True) cw = http._called_with self.assertEqual(cw['uri'], URI) self.assertEqual(cw['method'], 'POST') - self.assertEqual(cw['headers'], - {'Content-Type': 'application/x-protobuf', - 'Content-Length': '47', - 'User-Agent': conn.USER_AGENT, - }) + expected_headers = { + 'Content-Type': 'application/x-protobuf', + 'Content-Length': '47', + 'User-Agent': conn.USER_AGENT, + } + self.assertEqual(cw['headers'], expected_headers) rq_class = datastore_pb.CommitRequest request = rq_class() request.ParseFromString(cw['body']) @@ -641,24 +677,26 @@ def test_save_entity_wo_transaction_w_auto_id(self): iaik_pb = mr_pb.insert_auto_id_key.add() iaik_pb.CopyFrom(updated_key_pb) conn = self._makeOne() - URI = '/'.join([conn.API_BASE_URL, - 'datastore', - conn.API_VERSION, - 'datasets', - DATASET_ID, - 'commit', - ]) + URI = '/'.join([ + conn.API_BASE_URL, + 'datastore', + conn.API_VERSION, + 'datasets', + DATASET_ID, + 'commit', + ]) http = conn._http = Http({'status': '200'}, rsp_pb.SerializeToString()) result = conn.save_entity(DATASET_ID, key_pb, {'foo': 'Foo'}) self.assertEqual(result, updated_key_pb) cw = http._called_with self.assertEqual(cw['uri'], URI) self.assertEqual(cw['method'], 'POST') - self.assertEqual(cw['headers'], - {'Content-Type': 'application/x-protobuf', - 'Content-Length': '44', - 'User-Agent': conn.USER_AGENT, - }) + expected_headers = { + 'Content-Type': 'application/x-protobuf', + 'Content-Length': '44', + 'User-Agent': conn.USER_AGENT, + } + self.assertEqual(cw['headers'], expected_headers) rq_class = datastore_pb.CommitRequest request = rq_class() request.ParseFromString(cw['body']) @@ -685,7 +723,8 @@ def test_save_entity_w_transaction(self): mutation = datastore_pb.Mutation() class Xact(object): - def mutation(self): + @staticmethod + def mutation(): return mutation DATASET_ID = 'DATASET' key_pb = Key(dataset=Dataset(DATASET_ID), @@ -709,7 +748,8 @@ def test_save_entity_w_transaction_nested_entity(self): mutation = datastore_pb.Mutation() class Xact(object): - def mutation(self): + @staticmethod + def mutation(): return mutation DATASET_ID = 'DATASET' nested = Entity() @@ -736,12 +776,14 @@ def test_delete_entities_wo_transaction(self): path=[{'kind': 'Kind', 'id': 1234}]).to_protobuf() rsp_pb = datastore_pb.CommitResponse() conn = self._makeOne() - URI = '/'.join([conn.API_BASE_URL, - 'datastore', - conn.API_VERSION, - 'datasets', - DATASET_ID, - 'commit', ]) + URI = '/'.join([ + conn.API_BASE_URL, + 'datastore', + conn.API_VERSION, + 'datasets', + DATASET_ID, + 'commit', + ]) http = conn._http = Http({'status': '200'}, rsp_pb.SerializeToString()) result = conn.delete_entities(DATASET_ID, [key_pb]) self.assertEqual(result.index_updates, 0) @@ -749,11 +791,12 @@ def test_delete_entities_wo_transaction(self): cw = http._called_with self.assertEqual(cw['uri'], URI) self.assertEqual(cw['method'], 'POST') - self.assertEqual(cw['headers'], - {'Content-Type': 'application/x-protobuf', - 'Content-Length': '30', - 'User-Agent': conn.USER_AGENT, - }) + expected_headers = { + 'Content-Type': 'application/x-protobuf', + 'Content-Length': '30', + 'User-Agent': conn.USER_AGENT, + } + self.assertEqual(cw['headers'], expected_headers) rq_class = datastore_pb.CommitRequest request = rq_class() request.ParseFromString(cw['body']) @@ -775,7 +818,8 @@ def test_delete_entities_w_transaction(self): mutation = datastore_pb.Mutation() class Xact(object): - def mutation(self): + @staticmethod + def mutation(): return mutation DATASET_ID = 'DATASET' key_pb = Key(dataset=Dataset(DATASET_ID), @@ -800,13 +844,14 @@ def test_delete_entity_wo_transaction(self): path=[{'kind': 'Kind', 'id': 1234}]).to_protobuf() rsp_pb = datastore_pb.CommitResponse() conn = self._makeOne() - URI = '/'.join([conn.API_BASE_URL, - 'datastore', - conn.API_VERSION, - 'datasets', - DATASET_ID, - 'commit', - ]) + URI = '/'.join([ + conn.API_BASE_URL, + 'datastore', + conn.API_VERSION, + 'datasets', + DATASET_ID, + 'commit', + ]) http = conn._http = Http({'status': '200'}, rsp_pb.SerializeToString()) result = conn.delete_entity(DATASET_ID, key_pb) self.assertEqual(result.index_updates, 0) @@ -814,11 +859,12 @@ def test_delete_entity_wo_transaction(self): cw = http._called_with self.assertEqual(cw['uri'], URI) self.assertEqual(cw['method'], 'POST') - self.assertEqual(cw['headers'], - {'Content-Type': 'application/x-protobuf', - 'Content-Length': '30', - 'User-Agent': conn.USER_AGENT, - }) + expected_headers = { + 'Content-Type': 'application/x-protobuf', + 'Content-Length': '30', + 'User-Agent': conn.USER_AGENT, + } + self.assertEqual(cw['headers'], expected_headers) rq_class = datastore_pb.CommitRequest request = rq_class() request.ParseFromString(cw['body']) @@ -840,7 +886,8 @@ def test_delete_entity_w_transaction(self): mutation = datastore_pb.Mutation() class Xact(object): - def mutation(self): + @staticmethod + def mutation(): return mutation DATASET_ID = 'DATASET' key_pb = Key(dataset=Dataset(DATASET_ID), diff --git a/gcloud/datastore/test_dataset.py b/gcloud/datastore/test_dataset.py index a734512119d0..8db28d748dea 100644 --- a/gcloud/datastore/test_dataset.py +++ b/gcloud/datastore/test_dataset.py @@ -3,7 +3,8 @@ class TestDataset(unittest2.TestCase): - def _getTargetClass(self): + @staticmethod + def _getTargetClass(): from gcloud.datastore.dataset import Dataset return Dataset diff --git a/gcloud/datastore/test_entity.py b/gcloud/datastore/test_entity.py index a3171c2bca36..b60582d32b1a 100644 --- a/gcloud/datastore/test_entity.py +++ b/gcloud/datastore/test_entity.py @@ -8,7 +8,8 @@ class TestEntity(unittest2.TestCase): - def _getTargetClass(self): + @staticmethod + def _getTargetClass(): from gcloud.datastore.entity import Entity return Entity @@ -160,7 +161,7 @@ def test_save_w_transaction_w_partial_key(self): self.assertEqual(entity['foo'], 'Foo') self.assertEqual(connection._saved, (_DATASET_ID, 'KEY', {'foo': 'Foo'})) - self.assertEqual(transaction._added, (entity, )) + self.assertEqual(transaction._added, (entity,)) self.assertEqual(key._path, None) def test_save_w_returned_key(self): @@ -238,10 +239,13 @@ def path(self, path=_MARKER): class _Dataset(dict): + def __init__(self, connection=None): + super(_Dataset, self).__init__() self._connection = connection - def id(self): + @staticmethod + def id(): return _DATASET_ID def connection(self): @@ -274,4 +278,4 @@ def __nonzero__(self): __bool__ = __nonzero__ def add_auto_id_entity(self, entity): - self._added += (entity, ) + self._added += (entity,) diff --git a/gcloud/datastore/test_key.py b/gcloud/datastore/test_key.py index 5fca22f24358..fe239b04aa67 100644 --- a/gcloud/datastore/test_key.py +++ b/gcloud/datastore/test_key.py @@ -3,14 +3,16 @@ class TestKey(unittest2.TestCase): - def _getTargetClass(self): + @staticmethod + def _getTargetClass(): from gcloud.datastore.key import Key return Key def _makeOne(self, dataset=None, namespace=None, path=None): return self._getTargetClass()(dataset, namespace, path) - def _makePB(self, dataset_id=None, namespace=None, path=()): + @staticmethod + def _makePB(dataset_id=None, namespace=None, path=()): from gcloud.datastore.datastore_v1_pb2 import Key pb = Key() if dataset_id is not None: @@ -212,10 +214,11 @@ def test_from_path_two_elements_second_int(self): def test_from_path_nested(self): key = self._getTargetClass().from_path('abc', 'def', 'ghi', 123) self.assertEqual(key.kind(), 'ghi') - self.assertEqual(key.path(), - [{'kind': 'abc', 'name': 'def'}, - {'kind': 'ghi', 'id': 123}, - ]) + expected_path = [ + {'kind': 'abc', 'name': 'def'}, + {'kind': 'ghi', 'id': 123}, + ] + self.assertEqual(key.path(), expected_path) def test_is_partial_no_name_or_id(self): key = self._makeOne() diff --git a/gcloud/datastore/test_query.py b/gcloud/datastore/test_query.py index 6d0e42aab236..82f3d01b4a3e 100644 --- a/gcloud/datastore/test_query.py +++ b/gcloud/datastore/test_query.py @@ -3,7 +3,8 @@ class TestQuery(unittest2.TestCase): - def _getTargetClass(self): + @staticmethod + def _getTargetClass(): from gcloud.datastore.query import Query return Query @@ -99,7 +100,6 @@ def test_filter_w_known_operator_and_entity(self): def test_ancestor_w_non_key_non_list(self): query = self._makeOne() - # XXX s.b. ValueError self.assertRaises(TypeError, query.ancestor, object()) def test_ancester_wo_existing_ancestor_query_w_key_and_propfilter(self): @@ -255,10 +255,11 @@ def test_fetch_default_limit(self): self.assertEqual(len(entities), 1) self.assertEqual(entities[0].key().path(), [{'kind': _KIND, 'id': _ID}]) - self.assertEqual(connection._called_with, - {'dataset_id': _DATASET, - 'query_pb': query.to_protobuf(), - }) + expected_called_with = { + 'dataset_id': _DATASET, + 'query_pb': query.to_protobuf(), + } + self.assertEqual(connection._called_with, expected_called_with) def test_fetch_explicit_limit(self): from gcloud.datastore.datastore_v1_pb2 import Entity @@ -283,10 +284,11 @@ def test_fetch_explicit_limit(self): self.assertEqual(len(entities), 1) self.assertEqual(entities[0].key().path(), [{'kind': _KIND, 'id': _ID}]) - self.assertEqual(connection._called_with, - {'dataset_id': _DATASET, - 'query_pb': limited.to_protobuf(), - }) + expected_called_with = { + 'dataset_id': _DATASET, + 'query_pb': limited.to_protobuf(), + } + self.assertEqual(connection._called_with, expected_called_with) def test_cursor_not_fetched(self): _DATASET = 'DATASET' diff --git a/gcloud/datastore/test_transaction.py b/gcloud/datastore/test_transaction.py index f6f9bc088dc3..e1393f9125f9 100644 --- a/gcloud/datastore/test_transaction.py +++ b/gcloud/datastore/test_transaction.py @@ -3,7 +3,8 @@ class TestTransaction(unittest2.TestCase): - def _getTargetClass(self): + @staticmethod + def _getTargetClass(): from gcloud.datastore.transaction import Transaction return Transaction diff --git a/gcloud/demo.py b/gcloud/demo.py index 258e9e5444d9..d80328ee8a0b 100644 --- a/gcloud/demo.py +++ b/gcloud/demo.py @@ -35,7 +35,8 @@ def run(self): interact('(Hit CTRL-D to exit...)', local=self.LOCALS) - def wait(self): + @staticmethod + def wait(): raw_input() @classmethod diff --git a/gcloud/storage/_helpers.py b/gcloud/storage/_helpers.py new file mode 100644 index 000000000000..b4400fe6b93b --- /dev/null +++ b/gcloud/storage/_helpers.py @@ -0,0 +1,241 @@ +"""Helper functions for Cloud Storage utility classes. + +These are *not* part of the API. +""" + + +class _MetadataMixin(object): + """Abstract mixin for cloud storage classes with associated metadata. + + Expected to be subclasses by :class:`gcloud.storage.bucket.Bucket` + and :class:`gcloud.storage.key.Key` and both of those classes + will implemented the abstract parts: + - LOAD_FULL_FIELDS + - ACL_CLASS + - ACL_KEYWORD + - connection + - path + """ + + LOAD_FULL_FIELDS = None + """Tuple of fields which pertain to metadata. + + Expected to be set by subclasses. Fields in this tuple will cause + `get_metadata()` to do a full reload of all metadata before + returning. + """ + + ACL_CLASS = type(None) + """Class which holds ACL data for a given type. + + Expected to be set by subclasses. + """ + + ACL_KEYWORD = None + """Keyword for ACL_CLASS constructor to pass an object in. + + Expected to be set by subclasses. + """ + + def __init__(self): + # These should be set by the superclass. + self.metadata = None + self.acl = None + + @property + def connection(self): + """Abstract getter for the connection to use.""" + raise NotImplementedError + + @property + def path(self): + """Abstract getter for the object path.""" + raise NotImplementedError + + def has_metadata(self, field=None): + """Check if metadata is available. + + :type field: string + :param field: (optional) the particular field to check for. + + :rtype: bool + :returns: Whether metadata is available locally. + """ + if not self.metadata: + return False + elif field and field not in self.metadata: + return False + else: + return True + + def reload_metadata(self, full=False): + """Reload metadata. + + :type full: bool + :param full: If True, loads all data (include ACL data). + + :rtype: :class:`_MetadataMixin` + :returns: The object you just reloaded data for. + """ + + projection = 'full' if full else 'noAcl' + query_params = {'projection': projection} + self.metadata = self.connection.api_request( + method='GET', path=self.path, query_params=query_params) + return self + + def get_metadata(self, field=None, default=None): + """Get all metadata or a specific field. + + If you request a field that isn't available, + and that field can be retrieved by refreshing data, + this method will reload the data using + :func:`_MetadataMixin.reload_metadata`. + + :type field: string + :param field: (optional) A particular field to retrieve from metadata. + + :type default: anything + :param default: The value to return if the field provided wasn't found. + + :rtype: dict or anything + :returns: All metadata or the value of the specific field. + """ + + if not self.has_metadata(field=field): + full = (field and field in self.LOAD_FULL_FIELDS) + self.reload_metadata(full=full) + + if field: + return self.metadata.get(field, default) + else: + return self.metadata + + def patch_metadata(self, metadata): + """Update particular fields of this object's metadata. + + This method will only update the fields provided + and will not touch the other fields. + + It will also reload the metadata locally + based on the servers response. + + :type metadata: dict + :param metadata: The dictionary of values to update. + + :rtype: :class:`_MetadataMixin` + :returns: The current object. + """ + self.metadata = self.connection.api_request( + method='PATCH', path=self.path, data=metadata, + query_params={'projection': 'full'}) + return self + + def reload_acl(self): + """Reload the ACL data. + + :rtype: :class:`_MetadataMixin` + :returns: The current object. + """ + self.acl = self.ACL_CLASS(**{self.ACL_KEYWORD: self}) + + for entry in self.get_metadata('acl', []): + entity = self.acl.entity_from_dict(entry) + self.acl.add_entity(entity) + + return self + + def get_acl(self): + """Get ACL metadata as an object of type `ACL_CLASS`. + + :returns: An ACL object for the current object. + """ + if not self.acl: + self.reload_acl() + return self.acl + + def save_acl(self, acl=None): + """Save the ACL data for this object. + + If called without arguments, this will save the ACL currently + stored on the current object. + + For example, for a `metadata_object` this will save the ACL + stored in ``some_other_acl``:: + + >>> metadata_object.acl = some_other_acl + >>> metadata_object.save_acl() + + You can also provide a specific ACL to save instead of the one + currently set on the object:: + + >>> metadata_object.save_acl(acl=my_other_acl) + + You can use this to set access controls to be consistent from + one object to another:: + + >>> metadata_object1 = get_object(object1_name) + >>> metadata_object2 = get_object(object2_name) + >>> metadata_object2.save_acl(metadata_object1.get_acl()) + + If you want to **clear** the ACL for the object, you must save + an empty list (``[]``) rather than using ``None`` (which is + interpreted as wanting to save the current ACL):: + + >>> metadata_object.save_acl(None) # Saves current ACL (self.acl). + >>> metadata_object.save_acl([]) # Clears current ACL. + + :param acl: The ACL object to save. + If left blank, this will save the ACL + set locally on the object. + """ + # NOTE: If acl is [], it is False-y but the acl can be set to an + # empty list, so we only override a null input. + if acl is None: + acl = self.acl + + if acl is None: + return self + + self.patch_metadata({'acl': list(acl)}) + self.reload_acl() + return self + + def clear_acl(self): + """Remove all ACL rules from the object. + + Note that this won't actually remove *ALL* the rules, but it + will remove all the non-default rules. In short, you'll still + have access to the object that you created even after you + clear ACL rules with this method. + + For example, imagine that you granted access to a Bucket + (inheriting from this class) to a bunch of coworkers:: + + >>> from gcloud import storage + >>> connection = storage.get_connection(project, email, + private_key_path) + >>> bucket = connection.get_bucket(bucket_name) + >>> acl = bucket.get_acl() + >>> acl.user('coworker1@example.org').grant_read() + >>> acl.user('coworker2@example.org').grant_read() + >>> acl.save() + + Now they work in another part of the company + and you want to 'start fresh' on who has access:: + + >>> acl.clear_acl() + + At this point all the custom rules you created have been removed. + """ + return self.save_acl(acl=[]) + + def make_public(self): + """Make this object public giving all users read access. + + :returns: The current object. + """ + + self.get_acl().all().grant_read() + self.save_acl() + return self diff --git a/gcloud/storage/acl.py b/gcloud/storage/acl.py index 2d40e2733318..f4bb49f715bb 100644 --- a/gcloud/storage/acl.py +++ b/gcloud/storage/acl.py @@ -75,12 +75,9 @@ class ACL(object): """Container class representing a list of access controls.""" - class Role(object): - """Enum style class for role-type constants.""" - - Reader = 'READER' - Writer = 'WRITER' - Owner = 'OWNER' + READER_ROLE = 'READER' + WRITER_ROLE = 'WRITER' + OWNER_ROLE = 'OWNER' class Entity(object): """Class representing a set of roles for an entity. @@ -153,32 +150,32 @@ def revoke(self, role): def grant_read(self): """Grant read access to the current entity.""" - return self.grant(ACL.Role.Reader) + return self.grant(ACL.READER_ROLE) def grant_write(self): """Grant write access to the current entity.""" - return self.grant(ACL.Role.Writer) + return self.grant(ACL.WRITER_ROLE) def grant_owner(self): """Grant owner access to the current entity.""" - return self.grant(ACL.Role.Owner) + return self.grant(ACL.OWNER_ROLE) def revoke_read(self): """Revoke read access from the current entity.""" - return self.revoke(ACL.Role.Reader) + return self.revoke(ACL.READER_ROLE) def revoke_write(self): """Revoke write access from the current entity.""" - return self.revoke(ACL.Role.Writer) + return self.revoke(ACL.WRITER_ROLE) def revoke_owner(self): """Revoke owner access from the current entity.""" - return self.revoke(ACL.Role.Owner) + return self.revoke(ACL.OWNER_ROLE) def __init__(self): self.entities = {} diff --git a/gcloud/storage/bucket.py b/gcloud/storage/bucket.py index 6fbf4883fa9b..859e9a131a60 100644 --- a/gcloud/storage/bucket.py +++ b/gcloud/storage/bucket.py @@ -2,6 +2,7 @@ import os +from gcloud.storage._helpers import _MetadataMixin from gcloud.storage import exceptions from gcloud.storage.acl import BucketACL from gcloud.storage.acl import DefaultObjectACL @@ -9,7 +10,7 @@ from gcloud.storage.key import Key -class Bucket(object): +class Bucket(_MetadataMixin): """A class representing a Bucket on Cloud Storage. :type connection: :class:`gcloud.storage.connection.Connection` @@ -19,8 +20,19 @@ class Bucket(object): :param name: The name of the bucket. """ + LOAD_FULL_FIELDS = ('acl', 'defaultObjectAcl') + """Tuple of metadata fields pertaining to bucket ACLs.""" + + ACL_CLASS = BucketACL + """Class which holds ACL data for buckets.""" + + ACL_KEYWORD = 'bucket' + """Keyword for BucketACL constructor to pass a bucket in.""" + def __init__(self, connection=None, name=None, metadata=None): - self.connection = connection + super(Bucket, self).__init__() + + self._connection = connection self.name = name self.metadata = metadata @@ -28,6 +40,7 @@ def __init__(self, connection=None, name=None, metadata=None): self.acl = None self.default_object_acl = None + # NOTE: Could also put this in _MetadataMixin. @classmethod def from_dict(cls, bucket_dict, connection=None): """Construct a new bucket from a dictionary of data from Cloud Storage. @@ -51,6 +64,15 @@ def __iter__(self): def __contains__(self, key): return self.get_key(key) is not None + @property + def connection(self): + """Getter property for the connection to use with this Bucket. + + :rtype: :class:`gcloud.storage.connection.Connection` + :returns: The connection to use. + """ + return self._connection + @property def path(self): """The URL path to this bucket.""" @@ -266,7 +288,7 @@ def upload_file(self, filename, key=None): key = self.new_key(key) return key.set_contents_from_filename(filename) - def upload_file_object(self, fh, key=None): + def upload_file_object(self, file_obj, key=None): """Shortcut method to upload a file object into this bucket. Use this method to quickly put a local file in Cloud Storage. @@ -292,8 +314,8 @@ def upload_file_object(self, fh, key=None): >>> print bucket.get_all_keys() [] - :type fh: file - :param fh: A file handle open for reading. + :type file_obj: file + :param file_obj: A file handle open for reading. :type key: string or :class:`gcloud.storage.key.Key` :param key: The key (either an object or a remote path) @@ -307,90 +329,8 @@ def upload_file_object(self, fh, key=None): if key: key = self.new_key(key) else: - key = self.new_key(os.path.basename(fh.name)) - return key.set_contents_from_file(fh) - - def has_metadata(self, field=None): - """Check if metadata is available locally. - - :type field: string - :param field: (optional) the particular field to check for. - - :rtype: bool - :returns: Whether metadata is available locally. - """ - - if not self.metadata: - return False - elif field and field not in self.metadata: - return False - else: - return True - - def reload_metadata(self, full=False): - """Reload metadata from Cloud Storage. - - :type full: bool - :param full: If True, loads all data (include ACL data). - - :rtype: :class:`Bucket` - :returns: The bucket you just reloaded data for. - """ - - projection = 'full' if full else 'noAcl' - query_params = {'projection': projection} - self.metadata = self.connection.api_request( - method='GET', path=self.path, query_params=query_params) - return self - - def get_metadata(self, field=None, default=None): - """Get all metadata or a specific field. - - If you request a field that isn't available, - and that field can be retrieved by refreshing data - from Cloud Storage, - this method will reload the data using - :func:`Bucket.reload_metadata`. - - :type field: string - :param field: (optional) A particular field to retrieve from metadata. - - :type default: anything - :param default: The value to return if the field provided wasn't found. - - :rtype: dict or anything - :returns: All metadata or the value of the specific field. - """ - - if not self.has_metadata(field=field): - full = (field and field in ('acl', 'defaultObjectAcl')) - self.reload_metadata(full=full) - - if field: - return self.metadata.get(field, default) - else: - return self.metadata - - def patch_metadata(self, metadata): - """Update particular fields of this bucket's metadata. - - This method will only update the fields provided - and will not touch the other fields. - - It will also reload the metadata locally - based on the servers response. - - :type metadata: dict - :param metadata: The dictionary of values to update. - - :rtype: :class:`Bucket` - :returns: The current bucket. - """ - - self.metadata = self.connection.api_request( - method='PATCH', path=self.path, data=metadata, - query_params={'projection': 'full'}) - return self + key = self.new_key(os.path.basename(file_obj.name)) + return key.set_contents_from_file(file_obj) def configure_website(self, main_page_suffix=None, not_found_page=None): """Configure website-related metadata. @@ -432,10 +372,12 @@ def configure_website(self, main_page_suffix=None, not_found_page=None): :param not_found_page: The file to use when a page isn't found. """ - data = {'website': {'mainPageSuffix': main_page_suffix, - 'notFoundPage': not_found_page, - } - } + data = { + 'website': { + 'mainPageSuffix': main_page_suffix, + 'notFoundPage': not_found_page, + }, + } return self.patch_metadata(data) def disable_website(self): @@ -447,117 +389,6 @@ def disable_website(self): return self.configure_website(None, None) - def reload_acl(self): - """Reload the ACL data from Cloud Storage. - - :rtype: :class:`Bucket` - :returns: The current bucket. - """ - - self.acl = BucketACL(bucket=self) - - for entry in self.get_metadata('acl', []): - entity = self.acl.entity_from_dict(entry) - self.acl.add_entity(entity) - - return self - - def get_acl(self): - """Get ACL metadata as a :class:`gcloud.storage.acl.BucketACL` object. - - :rtype: :class:`gcloud.storage.acl.BucketACL` - :returns: An ACL object for the current bucket. - """ - - if not self.acl: - self.reload_acl() - return self.acl - - def save_acl(self, acl=None): - """Save the ACL data for this bucket. - - If called without arguments, - this will save the ACL currently stored on the Bucket object. - For example, - this will save - the ACL stored in ``some_other_acl``:: - - >>> bucket.acl = some_other_acl - >>> bucket.save_acl() - - You can also provide a specific ACL to save - instead of the one currently set - on the Bucket object:: - - >>> bucket.save_acl(acl=my_other_acl) - - You can use this to set access controls - to be consistent from one bucket to another:: - - >>> bucket1 = connection.get_bucket(bucket1_name) - >>> bucket2 = connection.get_bucket(bucket2_name) - >>> bucket2.save_acl(bucket1.get_acl()) - - If you want to **clear** the ACL for the bucket, - you must save an empty list (``[]``) - rather than using ``None`` - (which is interpreted as wanting to save the current ACL):: - - >>> bucket.save_acl(None) # Saves the current ACL (self.acl). - >>> bucket.save_acl([]) # Clears the current ACL. - - :type acl: :class:`gcloud.storage.acl.ACL` - :param acl: The ACL object to save. - If left blank, this will save the ACL - set locally on the bucket. - """ - - # We do things in this weird way because [] and None - # both evaluate to False, but mean very different things. - if acl is None: - acl = self.acl - - if acl is None: - return self - - self.patch_metadata({'acl': list(acl)}) - self.reload_acl() - return self - - def clear_acl(self): - """Remove all ACL rules from the bucket. - - Note that this won't actually remove *ALL* the rules, - but it will remove all the non-default rules. - In short, - you'll still have access - to a bucket that you created - even after you clear ACL rules - with this method. - - For example, - imagine that you granted access to this bucket - to a bunch of coworkers:: - - >>> from gcloud import storage - >>> connection = storage.get_connection(project, email, - private_key_path) - >>> bucket = connection.get_bucket(bucket_name) - >>> acl = bucket.get_acl() - >>> acl.user('coworker1@example.org').grant_read() - >>> acl.user('coworker2@example.org').grant_read() - >>> acl.save() - - Now they work in another part of the company - and you want to 'start fresh' on who has access:: - - >>> acl.clear_acl() - - At this point all the custom rules you created have been removed. - """ - - return self.save_acl(acl=[]) - def reload_default_object_acl(self): """Reload the Default Object ACL rules for this bucket. @@ -623,9 +454,7 @@ def make_public(self, recursive=False, future=False): :param future: If True, this will make all objects created in the future public as well. """ - - self.get_acl().all().grant_read() - self.save_acl() + super(Bucket, self).make_public() if future: self.get_default_object_acl().all().grant_read() diff --git a/gcloud/storage/connection.py b/gcloud/storage/connection.py index 72a93c6fabd7..55b76ce3688d 100644 --- a/gcloud/storage/connection.py +++ b/gcloud/storage/connection.py @@ -238,7 +238,7 @@ def api_request(self, method, path, query_params=None, method=method, url=url, data=data, content_type=content_type) if response.status == 404: - raise exceptions.NotFoundError(response, content) + raise exceptions.NotFoundError(response) elif not 200 <= response.status < 300: raise exceptions.ConnectionError(response, content) @@ -491,7 +491,7 @@ def generate_signed_url(self, resource, expiration, query_params = { 'GoogleAccessId': self.credentials.service_account_name, 'Expires': str(expiration), - 'Signature': signature + 'Signature': signature, } # Return the built URL. diff --git a/gcloud/storage/exceptions.py b/gcloud/storage/exceptions.py index 80785efe45f3..77fed212b354 100644 --- a/gcloud/storage/exceptions.py +++ b/gcloud/storage/exceptions.py @@ -15,8 +15,10 @@ def __init__(self, response, content): self.message = message -class NotFoundError(ConnectionError): +class NotFoundError(StorageError): """Exception corresponding to a 404 not found bad connection.""" - def __init__(self, response, content): - self.message = 'Request returned a 404. Headers: %s' % (response) + def __init__(self, response): + super(NotFoundError, self).__init__('') + # suppress deprecation warning under 2.6.x + self.message = 'Request returned a 404. Headers: %s' % (response,) diff --git a/gcloud/storage/iterator.py b/gcloud/storage/iterator.py index cd134a50ade2..9bc01cb1eb56 100644 --- a/gcloud/storage/iterator.py +++ b/gcloud/storage/iterator.py @@ -201,6 +201,9 @@ class KeyDataIterator(object): def __init__(self, key): self.key = key + # NOTE: These variables will be initialized by reset(). + self._bytes_written = None + self._total_bytes = None self.reset() def __iter__(self): @@ -226,7 +229,7 @@ def has_more_data(self): # If it isn't, something is wrong. raise ValueError('Size of object is unknown.') else: - return (self._bytes_written < self._total_bytes) + return self._bytes_written < self._total_bytes def get_headers(self): """Gets range header(s) for next chunk of data. diff --git a/gcloud/storage/key.py b/gcloud/storage/key.py index 9bf76f820643..d4222649bcaf 100644 --- a/gcloud/storage/key.py +++ b/gcloud/storage/key.py @@ -1,17 +1,26 @@ """Create / interact with gcloud storage keys.""" -import errno import mimetypes import os from StringIO import StringIO +from gcloud.storage._helpers import _MetadataMixin from gcloud.storage.acl import ObjectACL from gcloud.storage.iterator import KeyDataIterator -class Key(object): +class Key(_MetadataMixin): """A wrapper around Cloud Storage's concept of an ``Object``.""" + LOAD_FULL_FIELDS = ('acl',) + """Tuple of metadata fields pertaining to key ACLs.""" + + ACL_CLASS = ObjectACL + """Class which holds ACL data for keys.""" + + ACL_KEYWORD = 'key' + """Keyword for ObjectACL constructor to pass a key in.""" + CHUNK_SIZE = 1024 * 1024 # 1 MB. """The size of a chunk of data whenever iterating (1 MB). @@ -32,6 +41,7 @@ def __init__(self, bucket=None, name=None, metadata=None): :type metadata: dict :param metadata: All the other data provided by Cloud Storage. """ + super(Key, self).__init__() self.bucket = bucket self.name = name @@ -170,17 +180,17 @@ def delete(self): return self.bucket.delete_key(self) - def get_contents_to_file(self, fh): + def get_contents_to_file(self, file_obj): """Gets the contents of this key to a file-like object. - :type fh: file - :param fh: A file handle to which to write the key's data. + :type file_obj: file + :param file_obj: A file handle to which to write the key's data. :raises: :class:`gcloud.storage.exceptions.NotFoundError` """ for chunk in KeyDataIterator(self): - fh.write(chunk) + file_obj.write(chunk) def get_contents_to_filename(self, filename): """Get the contents of this key to a file by name. @@ -191,8 +201,8 @@ def get_contents_to_filename(self, filename): :raises: :class:`gcloud.storage.exceptions.NotFoundError` """ - with open(filename, 'wb') as fh: - self.get_contents_to_file(fh) + with open(filename, 'wb') as file_obj: + self.get_contents_to_file(file_obj) def get_contents_as_string(self): """Gets the data stored on this Key as a string. @@ -206,12 +216,12 @@ def get_contents_as_string(self): self.get_contents_to_file(string_buffer) return string_buffer.getvalue() - def set_contents_from_file(self, fh, rewind=False, size=None, + def set_contents_from_file(self, file_obj, rewind=False, size=None, content_type=None): """Set the contents of this key to the contents of a file handle. - :type fh: file - :param fh: A file handle open for reading. + :type file_obj: file + :param file_obj: A file handle open for reading. :type rewind: bool :param rewind: If True, seek to the beginning of the file handle before @@ -225,10 +235,10 @@ def set_contents_from_file(self, fh, rewind=False, size=None, # Rewind the file if desired. if rewind: - fh.seek(0, os.SEEK_SET) + file_obj.seek(0, os.SEEK_SET) # Get the basic stats about the file. - total_bytes = size or os.fstat(fh.fileno()).st_size + total_bytes = size or os.fstat(file_obj.fileno()).st_size bytes_uploaded = 0 # Set up a resumable upload session. @@ -242,7 +252,7 @@ def set_contents_from_file(self, fh, rewind=False, size=None, query_params={'uploadType': 'resumable', 'name': self.name}, api_base_url=self.connection.API_BASE_URL + '/upload') - response, content = self.connection.make_request( + response, _ = self.connection.make_request( method='POST', url=upload_url, headers=headers) @@ -251,7 +261,7 @@ def set_contents_from_file(self, fh, rewind=False, size=None, while bytes_uploaded < total_bytes: # Construct the range header. - data = fh.read(self.CHUNK_SIZE) + data = file_obj.read(self.CHUNK_SIZE) chunk_size = len(data) start = bytes_uploaded @@ -261,7 +271,7 @@ def set_contents_from_file(self, fh, rewind=False, size=None, 'Content-Range': 'bytes %d-%d/%d' % (start, end, total_bytes), } - response, content = self.connection.make_request( + response, _ = self.connection.make_request( content_type='text/plain', method='POST', url=upload_url, headers=headers, data=data) @@ -276,8 +286,8 @@ def set_contents_from_filename(self, filename): content_type, _ = mimetypes.guess_type(filename) - with open(filename, 'rb') as fh: - self.set_contents_from_file(fh, content_type=content_type) + with open(filename, 'rb') as file_obj: + self.set_contents_from_file(file_obj, content_type=content_type) def set_contents_from_string(self, data, content_type='text/plain'): """Sets the contents of this key to the provided string. @@ -302,161 +312,7 @@ def set_contents_from_string(self, data, content_type='text/plain'): string_buffer = StringIO() string_buffer.write(data) - self.set_contents_from_file(fh=string_buffer, rewind=True, + self.set_contents_from_file(file_obj=string_buffer, rewind=True, size=string_buffer.len, content_type=content_type) return self - - def has_metadata(self, field=None): - """Check if metadata is available locally. - - :type field: string - :param field: (optional) the particular field to check for. - - :rtype: bool - :returns: Whether metadata is available locally. - """ - - if not self.metadata: - return False - elif field and field not in self.metadata: - return False - else: - return True - - def reload_metadata(self, full=False): - """Reload metadata from Cloud Storage. - - :type full: bool - :param full: If True, loads all data (include ACL data). - - :rtype: :class:`Key` - :returns: The key you just reloaded data for. - """ - - projection = 'full' if full else 'noAcl' - query_params = {'projection': projection} - self.metadata = self.connection.api_request( - method='GET', path=self.path, query_params=query_params) - return self - - def get_metadata(self, field=None, default=None): - """Get all metadata or a specific field. - - If you request a field that isn't available, - and that field can be retrieved by refreshing data - from Cloud Storage, - this method will reload the data using - :func:`Key.reload_metadata`. - - :type field: string - :param field: (optional) A particular field to retrieve from metadata. - - :type default: anything - :param default: The value to return if the field provided wasn't found. - - :rtype: dict or anything - :returns: All metadata or the value of the specific field. - """ - - if not self.has_metadata(field=field): - full = (field and field == 'acl') - self.reload_metadata(full=full) - - if field: - return self.metadata.get(field, default) - else: - return self.metadata - - def patch_metadata(self, metadata): - """Update particular fields of this key's metadata. - - This method will only update the fields provided - and will not touch the other fields. - - It will also reload the metadata locally - based on the servers response. - - :type metadata: dict - :param metadata: The dictionary of values to update. - - :rtype: :class:`Key` - :returns: The current key. - """ - - self.metadata = self.connection.api_request( - method='PATCH', path=self.path, data=metadata, - query_params={'projection': 'full'}) - return self - - def reload_acl(self): - """Reload the ACL data from Cloud Storage. - - :rtype: :class:`Key` - :returns: The current key. - """ - - self.acl = ObjectACL(key=self) - - for entry in self.get_metadata('acl', []): - entity = self.acl.entity_from_dict(entry) - self.acl.add_entity(entity) - - return self - - def get_acl(self): - """Get ACL metadata as a :class:`gcloud.storage.acl.ObjectACL` object. - - :rtype: :class:`gcloud.storage.acl.ObjectACL` - :returns: An ACL object for the current key. - """ - - if not self.acl: - self.reload_acl() - return self.acl - - def save_acl(self, acl=None): - """Save the ACL data for this key. - - :type acl: :class:`gcloud.storage.acl.ACL` - :param acl: The ACL object to save. - If left blank, this will save the ACL - set locally on the key. - """ - - # We do things in this weird way because [] and None - # both evaluate to False, but mean very different things. - if acl is None: - acl = self.acl - - if acl is None: - return self - - self.patch_metadata({'acl': list(acl)}) - self.reload_acl() - return self - - def clear_acl(self): - """Remove all ACL rules from the key. - - Note that this won't actually remove *ALL* the rules, - but it will remove all the non-default rules. - In short, - you'll still have access - to a key that you created - even after you clear ACL rules - with this method. - """ - - return self.save_acl(acl=[]) - - def make_public(self): - """Make this key public giving all users read access. - - :rtype: :class:`Key` - :returns: The current key. - """ - - self.get_acl().all().grant_read() - self.save_acl() - return self diff --git a/gcloud/storage/test___init__.py b/gcloud/storage/test___init__.py index bc5e237497a1..40f2ed277f98 100644 --- a/gcloud/storage/test___init__.py +++ b/gcloud/storage/test___init__.py @@ -3,7 +3,8 @@ class Test_get_connection(unittest2.TestCase): - def _callFUT(self, *args, **kw): + @staticmethod + def _callFUT(*args, **kw): from gcloud.storage import get_connection return get_connection(*args, **kw) @@ -26,16 +27,18 @@ def test_it(self): self.assertTrue(isinstance(found, Connection)) self.assertEqual(found.project, PROJECT) self.assertTrue(found._credentials is client._signed) - self.assertEqual(client._called_with, - {'service_account_name': CLIENT_EMAIL, - 'private_key': PRIVATE_KEY, - 'scope': SCOPE, - }) + expected_called_with = { + 'service_account_name': CLIENT_EMAIL, + 'private_key': PRIVATE_KEY, + 'scope': SCOPE, + } + self.assertEqual(client._called_with, expected_called_with) class Test_get_bucket(unittest2.TestCase): - def _callFUT(self, *args, **kw): + @staticmethod + def _callFUT(*args, **kw): from gcloud.storage import get_bucket return get_bucket(*args, **kw) diff --git a/gcloud/storage/test__helpers.py b/gcloud/storage/test__helpers.py new file mode 100644 index 000000000000..6e8ff81799d6 --- /dev/null +++ b/gcloud/storage/test__helpers.py @@ -0,0 +1,19 @@ +import unittest2 + + +class Test_MetadataMixin(unittest2.TestCase): + + @staticmethod + def _getTargetClass(): + from gcloud.storage._helpers import _MetadataMixin + return _MetadataMixin + + def _makeOne(self, *args, **kw): + return self._getTargetClass()(*args, **kw) + + def test_abstract_properties(self): + metadata_object = self._makeOne() + self.assertRaises(NotImplementedError, + lambda: metadata_object.connection) + self.assertRaises(NotImplementedError, + lambda: metadata_object.path) diff --git a/gcloud/storage/test_acl.py b/gcloud/storage/test_acl.py index d72ec0fb782e..3ee5cfe95b29 100644 --- a/gcloud/storage/test_acl.py +++ b/gcloud/storage/test_acl.py @@ -3,7 +3,8 @@ class Test_ACL_Entity(unittest2.TestCase): - def _getTargetClass(self): + @staticmethod + def _getTargetClass(): from gcloud.storage.acl import ACL return ACL.Entity @@ -77,27 +78,27 @@ def test_grant_read(self): TYPE = 'type' entity = self._makeOne(TYPE) entity.grant_read() - self.assertEqual(entity.get_roles(), set([ACL.Role.Reader])) + self.assertEqual(entity.get_roles(), set([ACL.READER_ROLE])) def test_grant_write(self): from gcloud.storage.acl import ACL TYPE = 'type' entity = self._makeOne(TYPE) entity.grant_write() - self.assertEqual(entity.get_roles(), set([ACL.Role.Writer])) + self.assertEqual(entity.get_roles(), set([ACL.WRITER_ROLE])) def test_grant_owner(self): from gcloud.storage.acl import ACL TYPE = 'type' entity = self._makeOne(TYPE) entity.grant_owner() - self.assertEqual(entity.get_roles(), set([ACL.Role.Owner])) + self.assertEqual(entity.get_roles(), set([ACL.OWNER_ROLE])) def test_revoke_read(self): from gcloud.storage.acl import ACL TYPE = 'type' entity = self._makeOne(TYPE) - entity.grant(ACL.Role.Reader) + entity.grant(ACL.READER_ROLE) entity.revoke_read() self.assertEqual(entity.get_roles(), set()) @@ -105,7 +106,7 @@ def test_revoke_write(self): from gcloud.storage.acl import ACL TYPE = 'type' entity = self._makeOne(TYPE) - entity.grant(ACL.Role.Writer) + entity.grant(ACL.WRITER_ROLE) entity.revoke_write() self.assertEqual(entity.get_roles(), set()) @@ -113,14 +114,15 @@ def test_revoke_owner(self): from gcloud.storage.acl import ACL TYPE = 'type' entity = self._makeOne(TYPE) - entity.grant(ACL.Role.Owner) + entity.grant(ACL.OWNER_ROLE) entity.revoke_owner() self.assertEqual(entity.get_roles(), set()) class Test_ACL(unittest2.TestCase): - def _getTargetClass(self): + @staticmethod + def _getTargetClass(): from gcloud.storage.acl import ACL return ACL @@ -394,7 +396,8 @@ def test_save_raises_NotImplementedError(self): class Test_BucketACL(unittest2.TestCase): - def _getTargetClass(self): + @staticmethod + def _getTargetClass(): from gcloud.storage.acl import BucketACL return BucketACL @@ -420,7 +423,8 @@ def save_acl(self, acl): class Test_DefaultObjectACL(unittest2.TestCase): - def _getTargetClass(self): + @staticmethod + def _getTargetClass(): from gcloud.storage.acl import DefaultObjectACL return DefaultObjectACL @@ -439,7 +443,8 @@ def save_default_object_acl(self, acl): class Test_ObjectACL(unittest2.TestCase): - def _getTargetClass(self): + @staticmethod + def _getTargetClass(): from gcloud.storage.acl import ObjectACL return ObjectACL diff --git a/gcloud/storage/test_bucket.py b/gcloud/storage/test_bucket.py index 2d57f7778ace..0a3cd358d2e1 100644 --- a/gcloud/storage/test_bucket.py +++ b/gcloud/storage/test_bucket.py @@ -5,7 +5,8 @@ class Test_Bucket(unittest2.TestCase): - def _getTargetClass(self): + @staticmethod + def _getTargetClass(): from gcloud.storage.bucket import Bucket return Bucket @@ -728,10 +729,10 @@ def test_save_default_object_acl_none_set_none_passed(self): def test_save_default_object_acl_existing_set_none_passed(self): NAME = 'name' connection = _Connection({'foo': 'Foo', 'acl': []}) - connection = _Connection({'foo': 'Foo', 'acl': []}, - {'foo': 'Foo', 'acl': [], - 'defaultObjectAcl': []}, - ) + connection = _Connection( + {'foo': 'Foo', 'acl': []}, + {'foo': 'Foo', 'acl': [], 'defaultObjectAcl': []}, + ) metadata = {'defaultObjectAcl': []} bucket = self._makeOne(connection, NAME, metadata) bucket.reload_default_object_acl() @@ -750,10 +751,10 @@ def test_save_default_object_acl_existing_set_new_passed(self): NAME = 'name' ROLE = 'role' new_acl = [{'entity': 'allUsers', 'role': ROLE}] - connection = _Connection({'foo': 'Foo', 'acl': new_acl}, - {'foo': 'Foo', 'acl': new_acl, - 'defaultObjectAcl': new_acl}, - ) + connection = _Connection( + {'foo': 'Foo', 'acl': new_acl}, + {'foo': 'Foo', 'acl': new_acl, 'defaultObjectAcl': new_acl}, + ) metadata = {'defaultObjectAcl': []} bucket = self._makeOne(connection, NAME, metadata) bucket.reload_default_object_acl() @@ -773,10 +774,10 @@ def test_clear_default_object_acl(self): NAME = 'name' ROLE = 'role' old_acl = [{'entity': 'allUsers', 'role': ROLE}] - connection = _Connection({'foo': 'Foo', 'acl': []}, - {'foo': 'Foo', 'acl': [], - 'defaultObjectAcl': []}, - ) + connection = _Connection( + {'foo': 'Foo', 'acl': []}, + {'foo': 'Foo', 'acl': [], 'defaultObjectAcl': []}, + ) metadata = {'defaultObjectAcl': old_acl} bucket = self._makeOne(connection, NAME, metadata) bucket.reload_default_object_acl() @@ -796,7 +797,7 @@ def test_make_public_defaults(self): from gcloud.storage.acl import ACL NAME = 'name' before = {'acl': [], 'defaultObjectAcl': []} - permissive = [{'entity': 'allUsers', 'role': ACL.Role.Reader}] + permissive = [{'entity': 'allUsers', 'role': ACL.READER_ROLE}] after = {'acl': permissive, 'defaultObjectAcl': []} connection = _Connection(after) bucket = self._makeOne(connection, NAME, before) @@ -815,7 +816,7 @@ def test_make_public_w_future(self): from gcloud.storage.acl import ACL NAME = 'name' before = {'acl': [], 'defaultObjectAcl': []} - permissive = [{'entity': 'allUsers', 'role': ACL.Role.Reader}] + permissive = [{'entity': 'allUsers', 'role': ACL.READER_ROLE}] after1 = {'acl': permissive, 'defaultObjectAcl': []} after2 = {'acl': permissive, 'defaultObjectAcl': permissive} connection = _Connection(after1, after2) @@ -870,7 +871,7 @@ def get_items_from_response(self, response): NAME = 'name' KEY = 'key' before = {'acl': [], 'defaultObjectAcl': []} - permissive = [{'entity': 'allUsers', 'role': ACL.Role.Reader}] + permissive = [{'entity': 'allUsers', 'role': ACL.READER_ROLE}] after = {'acl': permissive, 'defaultObjectAcl': []} connection = _Connection(after, {'items': [{'name': KEY}]}) bucket = self._makeOne(connection, NAME, before) @@ -906,7 +907,7 @@ def api_request(self, **kw): try: response, self._responses = self._responses[0], self._responses[1:] except: - raise NotFoundError('miss', None) + raise NotFoundError('miss') else: return response @@ -914,7 +915,7 @@ def delete_bucket(self, bucket, force=False): from gcloud.storage.exceptions import NotFoundError self._deleted.append((bucket, force)) if not self._delete_ok: - raise NotFoundError('miss', None) + raise NotFoundError('miss') return True diff --git a/gcloud/storage/test_connection.py b/gcloud/storage/test_connection.py index c3787cbe2c82..590270a713a0 100644 --- a/gcloud/storage/test_connection.py +++ b/gcloud/storage/test_connection.py @@ -3,7 +3,8 @@ class TestConnection(unittest2.TestCase): - def _getTargetClass(self): + @staticmethod + def _getTargetClass(): from gcloud.storage.connection import Connection return Connection @@ -30,13 +31,13 @@ def test_http_w_existing(self): self.assertTrue(conn.http is http) def test_http_wo_creds(self): - from httplib2 import Http + import httplib2 PROJECT = 'project' conn = self._makeOne(PROJECT) - self.assertTrue(isinstance(conn.http, Http)) + self.assertTrue(isinstance(conn.http, httplib2.Http)) def test_http_w_creds(self): - from httplib2 import Http + import httplib2 PROJECT = 'project' authorized = object() @@ -47,20 +48,21 @@ def authorize(self, http): creds = Creds() conn = self._makeOne(PROJECT, creds) self.assertTrue(conn.http is authorized) - self.assertTrue(isinstance(creds._called_with, Http)) + self.assertTrue(isinstance(creds._called_with, httplib2.Http)) def test___iter___empty(self): PROJECT = 'project' conn = self._makeOne(PROJECT) - URI = '/'.join([conn.API_BASE_URL, - 'storage', - conn.API_VERSION, - 'b?project=%s' % PROJECT, - ]) - http = conn._http = Http({'status': '200', - 'content-type': 'application/json', - }, - '{}') + URI = '/'.join([ + conn.API_BASE_URL, + 'storage', + conn.API_VERSION, + 'b?project=%s' % PROJECT, + ]) + http = conn._http = Http( + {'status': '200', 'content-type': 'application/json'}, + '{}', + ) keys = list(conn) self.assertEqual(len(keys), 0) self.assertEqual(http._called_with['method'], 'GET') @@ -70,15 +72,16 @@ def test___iter___non_empty(self): PROJECT = 'project' KEY = 'key' conn = self._makeOne(PROJECT) - URI = '/'.join([conn.API_BASE_URL, - 'storage', - conn.API_VERSION, - 'b?project=%s' % PROJECT, - ]) - http = conn._http = Http({'status': '200', - 'content-type': 'application/json', - }, - '{"items": [{"name": "%s"}]}' % KEY) + URI = '/'.join([ + conn.API_BASE_URL, + 'storage', + conn.API_VERSION, + 'b?project=%s' % PROJECT, + ]) + http = conn._http = Http( + {'status': '200', 'content-type': 'application/json'}, + '{"items": [{"name": "%s"}]}' % KEY, + ) keys = list(conn) self.assertEqual(len(keys), 1) self.assertEqual(keys[0].name, KEY) @@ -89,16 +92,17 @@ def test___contains___miss(self): PROJECT = 'project' NONESUCH = 'nonesuch' conn = self._makeOne(PROJECT) - URI = '/'.join([conn.API_BASE_URL, - 'storage', - conn.API_VERSION, - 'b', - 'nonesuch?project=%s' % PROJECT, - ]) - http = conn._http = Http({'status': '404', - 'content-type': 'application/json', - }, - '{}') + URI = '/'.join([ + conn.API_BASE_URL, + 'storage', + conn.API_VERSION, + 'b', + 'nonesuch?project=%s' % PROJECT, + ]) + http = conn._http = Http( + {'status': '404', 'content-type': 'application/json'}, + '{}', + ) self.assertFalse(NONESUCH in conn) self.assertEqual(http._called_with['method'], 'GET') self.assertEqual(http._called_with['uri'], URI) @@ -107,16 +111,17 @@ def test___contains___hit(self): PROJECT = 'project' KEY = 'key' conn = self._makeOne(PROJECT) - URI = '/'.join([conn.API_BASE_URL, - 'storage', - conn.API_VERSION, - 'b', - 'key?project=%s' % PROJECT, - ]) - http = conn._http = Http({'status': '200', - 'content-type': 'application/json', - }, - '{"name": "%s"}' % KEY) + URI = '/'.join([ + conn.API_BASE_URL, + 'storage', + conn.API_VERSION, + 'b', + 'key?project=%s' % PROJECT, + ]) + http = conn._http = Http( + {'status': '200', 'content-type': 'application/json'}, + '{"name": "%s"}' % KEY, + ) self.assertTrue(KEY in conn) self.assertEqual(http._called_with['method'], 'GET') self.assertEqual(http._called_with['uri'], URI) @@ -124,11 +129,12 @@ def test___contains___hit(self): def test_build_api_url_no_extra_query_params(self): PROJECT = 'project' conn = self._makeOne(PROJECT) - URI = '/'.join([conn.API_BASE_URL, - 'storage', - conn.API_VERSION, - 'foo?project=%s' % PROJECT, - ]) + URI = '/'.join([ + conn.API_BASE_URL, + 'storage', + conn.API_VERSION, + 'foo?project=%s' % PROJECT, + ]) self.assertEqual(conn.build_api_url('/foo'), URI) def test_build_api_url_w_extra_query_params(self): @@ -137,7 +143,7 @@ def test_build_api_url_w_extra_query_params(self): PROJECT = 'project' conn = self._makeOne(PROJECT) uri = conn.build_api_url('/foo', {'bar': 'baz'}) - scheme, netloc, path, qs, frag = urlsplit(uri) + scheme, netloc, path, qs, _ = urlsplit(uri) self.assertEqual('%s://%s' % (scheme, netloc), conn.API_BASE_URL) self.assertEqual(path, '/'.join(['', 'storage', conn.API_VERSION, 'foo'])) @@ -149,10 +155,10 @@ def test_make_request_no_data_no_content_type_no_headers(self): PROJECT = 'project' conn = self._makeOne(PROJECT) URI = 'http://example.com/test' - http = conn._http = Http({'status': '200', - 'content-type': 'text/plain', - }, - '') + http = conn._http = Http( + {'status': '200', 'content-type': 'text/plain'}, + '', + ) headers, content = conn.make_request('GET', URI) self.assertEqual(headers['status'], '200') self.assertEqual(headers['content-type'], 'text/plain') @@ -160,89 +166,94 @@ def test_make_request_no_data_no_content_type_no_headers(self): self.assertEqual(http._called_with['method'], 'GET') self.assertEqual(http._called_with['uri'], URI) self.assertEqual(http._called_with['body'], None) - self.assertEqual(http._called_with['headers'], - {'Accept-Encoding': 'gzip', - 'Content-Length': 0, - 'User-Agent': conn.USER_AGENT, - }) + expected_headers = { + 'Accept-Encoding': 'gzip', + 'Content-Length': 0, + 'User-Agent': conn.USER_AGENT, + } + self.assertEqual(http._called_with['headers'], expected_headers) def test_make_request_w_data_no_extra_headers(self): PROJECT = 'project' conn = self._makeOne(PROJECT) URI = 'http://example.com/test' - http = conn._http = Http({'status': '200', - 'content-type': 'text/plain', - }, - '') - headers, content = conn.make_request('GET', URI, {}, - 'application/json') + http = conn._http = Http( + {'status': '200', 'content-type': 'text/plain'}, + '', + ) + conn.make_request('GET', URI, {}, 'application/json') self.assertEqual(http._called_with['method'], 'GET') self.assertEqual(http._called_with['uri'], URI) self.assertEqual(http._called_with['body'], {}) - self.assertEqual(http._called_with['headers'], - {'Accept-Encoding': 'gzip', - 'Content-Length': 0, - 'Content-Type': 'application/json', - 'User-Agent': conn.USER_AGENT, - }) + expected_headers = { + 'Accept-Encoding': 'gzip', + 'Content-Length': 0, + 'Content-Type': 'application/json', + 'User-Agent': conn.USER_AGENT, + } + self.assertEqual(http._called_with['headers'], expected_headers) def test_make_request_w_extra_headers(self): PROJECT = 'project' conn = self._makeOne(PROJECT) URI = 'http://example.com/test' - http = conn._http = Http({'status': '200', - 'content-type': 'text/plain', - }, - '') - headers, content = conn.make_request('GET', URI, - headers={'X-Foo': 'foo'}) + http = conn._http = Http( + {'status': '200', 'content-type': 'text/plain'}, + '', + ) + conn.make_request('GET', URI, headers={'X-Foo': 'foo'}) self.assertEqual(http._called_with['method'], 'GET') self.assertEqual(http._called_with['uri'], URI) self.assertEqual(http._called_with['body'], None) - self.assertEqual(http._called_with['headers'], - {'Accept-Encoding': 'gzip', - 'Content-Length': 0, - 'X-Foo': 'foo', - 'User-Agent': conn.USER_AGENT, - }) + expected_headers = { + 'Accept-Encoding': 'gzip', + 'Content-Length': 0, + 'X-Foo': 'foo', + 'User-Agent': conn.USER_AGENT, + } + self.assertEqual(http._called_with['headers'], expected_headers) def test_api_request_defaults(self): PROJECT = 'project' PATH = '/path/required' conn = self._makeOne(PROJECT) - URI = '/'.join([conn.API_BASE_URL, - 'storage', - conn.API_VERSION, - ]) + '%s?project=%s' % (PATH, PROJECT) - http = conn._http = Http({'status': '200', - 'content-type': 'application/json', - }, '{}') + URI = '/'.join([ + conn.API_BASE_URL, + 'storage', + '%s%s?project=%s' % (conn.API_VERSION, PATH, PROJECT), + ]) + http = conn._http = Http( + {'status': '200', 'content-type': 'application/json'}, + '{}', + ) self.assertEqual(conn.api_request('GET', PATH), {}) self.assertEqual(http._called_with['method'], 'GET') self.assertEqual(http._called_with['uri'], URI) self.assertEqual(http._called_with['body'], None) - self.assertEqual(http._called_with['headers'], - {'Accept-Encoding': 'gzip', - 'Content-Length': 0, - 'User-Agent': conn.USER_AGENT, - }) + expected_headers = { + 'Accept-Encoding': 'gzip', + 'Content-Length': 0, + 'User-Agent': conn.USER_AGENT, + } + self.assertEqual(http._called_with['headers'], expected_headers) def test_api_request_w_non_json_response(self): PROJECT = 'project' conn = self._makeOne(PROJECT) - conn._http = Http({'status': '200', - 'content-type': 'text/plain', - }, - 'CONTENT') + conn._http = Http( + {'status': '200', 'content-type': 'text/plain'}, + 'CONTENT', + ) + self.assertRaises(TypeError, conn.api_request, 'GET', '/') def test_api_request_wo_json_expected(self): PROJECT = 'project' conn = self._makeOne(PROJECT) - conn._http = Http({'status': '200', - 'content-type': 'text/plain', - }, - 'CONTENT') + conn._http = Http( + {'status': '200', 'content-type': 'text/plain'}, + 'CONTENT', + ) self.assertEqual(conn.api_request('GET', '/', expect_json=False), 'CONTENT') @@ -251,14 +262,14 @@ def test_api_request_w_query_params(self): from urlparse import urlsplit PROJECT = 'project' conn = self._makeOne(PROJECT) - http = conn._http = Http({'status': '200', - 'content-type': 'application/json', - }, - '{}') + http = conn._http = Http( + {'status': '200', 'content-type': 'application/json'}, + '{}', + ) self.assertEqual(conn.api_request('GET', '/', {'foo': 'bar'}), {}) self.assertEqual(http._called_with['method'], 'GET') uri = http._called_with['uri'] - scheme, netloc, path, qs, frag = urlsplit(uri) + scheme, netloc, path, qs, _ = urlsplit(uri) self.assertEqual('%s://%s' % (scheme, netloc), conn.API_BASE_URL) self.assertEqual(path, '/'.join(['', 'storage', conn.API_VERSION, ''])) @@ -266,11 +277,12 @@ def test_api_request_w_query_params(self): self.assertEqual(parms['project'], PROJECT) self.assertEqual(parms['foo'], 'bar') self.assertEqual(http._called_with['body'], None) - self.assertEqual(http._called_with['headers'], - {'Accept-Encoding': 'gzip', - 'Content-Length': 0, - 'User-Agent': conn.USER_AGENT, - }) + expected_headers = { + 'Accept-Encoding': 'gzip', + 'Content-Length': 0, + 'User-Agent': conn.USER_AGENT, + } + self.assertEqual(http._called_with['headers'], expected_headers) def test_api_request_w_data(self): import json @@ -278,58 +290,61 @@ def test_api_request_w_data(self): DATA = {'foo': 'bar'} DATAJ = json.dumps(DATA) conn = self._makeOne(PROJECT) - URI = '/'.join([conn.API_BASE_URL, - 'storage', - conn.API_VERSION, - '?project=%s' % PROJECT, - ]) - http = conn._http = Http({'status': '200', - 'content-type': 'application/json', - }, - '{}') + URI = '/'.join([ + conn.API_BASE_URL, + 'storage', + conn.API_VERSION, + '?project=%s' % PROJECT, + ]) + http = conn._http = Http( + {'status': '200', 'content-type': 'application/json'}, + '{}', + ) self.assertEqual(conn.api_request('POST', '/', data=DATA), {}) self.assertEqual(http._called_with['method'], 'POST') self.assertEqual(http._called_with['uri'], URI) self.assertEqual(http._called_with['body'], DATAJ) - self.assertEqual(http._called_with['headers'], - {'Accept-Encoding': 'gzip', - 'Content-Length': len(DATAJ), - 'Content-Type': 'application/json', - 'User-Agent': conn.USER_AGENT, - }) + expected_headers = { + 'Accept-Encoding': 'gzip', + 'Content-Length': len(DATAJ), + 'Content-Type': 'application/json', + 'User-Agent': conn.USER_AGENT, + } + self.assertEqual(http._called_with['headers'], expected_headers) def test_api_request_w_404(self): from gcloud.storage.exceptions import NotFoundError PROJECT = 'project' conn = self._makeOne(PROJECT) - conn._http = Http({'status': '404', - 'content-type': 'text/plain', - }, - '') + conn._http = Http( + {'status': '404', 'content-type': 'text/plain'}, + '', + ) self.assertRaises(NotFoundError, conn.api_request, 'GET', '/') def test_api_request_w_500(self): from gcloud.storage.exceptions import ConnectionError PROJECT = 'project' conn = self._makeOne(PROJECT) - conn._http = Http({'status': '500', - 'content-type': 'text/plain', - }, - '') + conn._http = Http( + {'status': '500', 'content-type': 'text/plain'}, + '', + ) self.assertRaises(ConnectionError, conn.api_request, 'GET', '/') def test_get_all_buckets_empty(self): PROJECT = 'project' conn = self._makeOne(PROJECT) - URI = '/'.join([conn.API_BASE_URL, - 'storage', - conn.API_VERSION, - 'b?project=%s' % PROJECT, - ]) - http = conn._http = Http({'status': '200', - 'content-type': 'application/json', - }, - '{}') + URI = '/'.join([ + conn.API_BASE_URL, + 'storage', + conn.API_VERSION, + 'b?project=%s' % PROJECT, + ]) + http = conn._http = Http( + {'status': '200', 'content-type': 'application/json'}, + '{}', + ) keys = conn.get_all_buckets() self.assertEqual(len(keys), 0) self.assertEqual(http._called_with['method'], 'GET') @@ -339,15 +354,16 @@ def test_get_all_buckets_non_empty(self): PROJECT = 'project' KEY = 'key' conn = self._makeOne(PROJECT) - URI = '/'.join([conn.API_BASE_URL, - 'storage', - conn.API_VERSION, - 'b?project=%s' % PROJECT, - ]) - http = conn._http = Http({'status': '200', - 'content-type': 'application/json', - }, - '{"items": [{"name": "%s"}]}' % KEY) + URI = '/'.join([ + conn.API_BASE_URL, + 'storage', + conn.API_VERSION, + 'b?project=%s' % PROJECT, + ]) + http = conn._http = Http( + {'status': '200', 'content-type': 'application/json'}, + '{"items": [{"name": "%s"}]}' % KEY, + ) keys = conn.get_all_buckets() self.assertEqual(len(keys), 1) self.assertEqual(keys[0].name, KEY) @@ -359,16 +375,17 @@ def test_get_bucket_miss(self): PROJECT = 'project' NONESUCH = 'nonesuch' conn = self._makeOne(PROJECT) - URI = '/'.join([conn.API_BASE_URL, - 'storage', - conn.API_VERSION, - 'b', - 'nonesuch?project=%s' % PROJECT, - ]) - http = conn._http = Http({'status': '404', - 'content-type': 'application/json', - }, - '{}') + URI = '/'.join([ + conn.API_BASE_URL, + 'storage', + conn.API_VERSION, + 'b', + 'nonesuch?project=%s' % PROJECT, + ]) + http = conn._http = Http( + {'status': '404', 'content-type': 'application/json'}, + '{}', + ) self.assertRaises(NotFoundError, conn.get_bucket, NONESUCH) self.assertEqual(http._called_with['method'], 'GET') self.assertEqual(http._called_with['uri'], URI) @@ -378,16 +395,17 @@ def test_get_bucket_hit(self): PROJECT = 'project' KEY = 'key' conn = self._makeOne(PROJECT) - URI = '/'.join([conn.API_BASE_URL, - 'storage', - conn.API_VERSION, - 'b', - 'key?project=%s' % PROJECT, - ]) - http = conn._http = Http({'status': '200', - 'content-type': 'application/json', - }, - '{"name": "%s"}' % KEY) + URI = '/'.join([ + conn.API_BASE_URL, + 'storage', + conn.API_VERSION, + 'b', + 'key?project=%s' % PROJECT, + ]) + http = conn._http = Http( + {'status': '200', 'content-type': 'application/json'}, + '{"name": "%s"}' % KEY, + ) bucket = conn.get_bucket(KEY) self.assertTrue(isinstance(bucket, Bucket)) self.assertTrue(bucket.connection is conn) @@ -399,16 +417,17 @@ def test_lookup_miss(self): PROJECT = 'project' NONESUCH = 'nonesuch' conn = self._makeOne(PROJECT) - URI = '/'.join([conn.API_BASE_URL, - 'storage', - conn.API_VERSION, - 'b', - 'nonesuch?project=%s' % PROJECT, - ]) - http = conn._http = Http({'status': '404', - 'content-type': 'application/json', - }, - '{}') + URI = '/'.join([ + conn.API_BASE_URL, + 'storage', + conn.API_VERSION, + 'b', + 'nonesuch?project=%s' % PROJECT, + ]) + http = conn._http = Http( + {'status': '404', 'content-type': 'application/json'}, + '{}', + ) self.assertEqual(conn.lookup(NONESUCH), None) self.assertEqual(http._called_with['method'], 'GET') self.assertEqual(http._called_with['uri'], URI) @@ -418,16 +437,17 @@ def test_lookup_hit(self): PROJECT = 'project' KEY = 'key' conn = self._makeOne(PROJECT) - URI = '/'.join([conn.API_BASE_URL, - 'storage', - conn.API_VERSION, - 'b', - 'key?project=%s' % PROJECT, - ]) - http = conn._http = Http({'status': '200', - 'content-type': 'application/json', - }, - '{"name": "%s"}' % KEY) + URI = '/'.join([ + conn.API_BASE_URL, + 'storage', + conn.API_VERSION, + 'b', + 'key?project=%s' % PROJECT, + ]) + http = conn._http = Http( + {'status': '200', 'content-type': 'application/json'}, + '{"name": "%s"}' % KEY, + ) bucket = conn.lookup(KEY) self.assertTrue(isinstance(bucket, Bucket)) self.assertTrue(bucket.connection is conn) @@ -440,15 +460,16 @@ def test_create_bucket_ok(self): PROJECT = 'project' KEY = 'key' conn = self._makeOne(PROJECT) - URI = '/'.join([conn.API_BASE_URL, - 'storage', - conn.API_VERSION, - 'b?project=%s' % PROJECT, - ]) - http = conn._http = Http({'status': '200', - 'content-type': 'application/json', - }, - '{"name": "%s"}' % KEY) + URI = '/'.join([ + conn.API_BASE_URL, + 'storage', + conn.API_VERSION, + 'b?project=%s' % PROJECT, + ]) + http = conn._http = Http( + {'status': '200', 'content-type': 'application/json'}, + '{"name": "%s"}' % KEY, + ) bucket = conn.create_bucket(KEY) self.assertTrue(isinstance(bucket, Bucket)) self.assertTrue(bucket.connection is conn) @@ -459,9 +480,6 @@ def test_create_bucket_ok(self): def test_delete_bucket_defaults_miss(self): _deleted_keys = [] - class _Key(object): - pass - class _Bucket(object): def __init__(self, name): @@ -471,16 +489,17 @@ def __init__(self, name): PROJECT = 'project' KEY = 'key' conn = self._makeOne(PROJECT) - URI = '/'.join([conn.API_BASE_URL, - 'storage', - conn.API_VERSION, - 'b', - 'key?project=%s' % PROJECT, - ]) - http = conn._http = Http({'status': '200', - 'content-type': 'application/json', - }, - '{}') + URI = '/'.join([ + conn.API_BASE_URL, + 'storage', + conn.API_VERSION, + 'b', + 'key?project=%s' % PROJECT, + ]) + http = conn._http = Http( + {'status': '200', 'content-type': 'application/json'}, + '{}', + ) def _new_bucket(name): return _Bucket(name) @@ -514,16 +533,17 @@ def __iter__(self): PROJECT = 'project' KEY = 'key' conn = self._makeOne(PROJECT) - URI = '/'.join([conn.API_BASE_URL, - 'storage', - conn.API_VERSION, - 'b', - 'key?project=%s' % PROJECT, - ]) - http = conn._http = Http({'status': '200', - 'content-type': 'application/json', - }, - '{}') + URI = '/'.join([ + conn.API_BASE_URL, + 'storage', + conn.API_VERSION, + 'b', + 'key?project=%s' % PROJECT, + ]) + http = conn._http = Http( + {'status': '200', 'content-type': 'application/json'}, + '{}', + ) def _new_bucket(name): return _Bucket(name) @@ -566,7 +586,6 @@ def test_generate_signed_url_w_expiration_int(self): ENDPOINT = 'http://api.example.com' RESOURCE = '/name/key' PROJECT = 'project' - KEY = 'key' SIGNED = base64.b64encode('DEADBEEF') crypto = _Crypto() rsa = _RSA() @@ -594,12 +613,14 @@ def test_generate_signed_url_w_expiration_int(self): class Test__get_expiration_seconds(unittest2.TestCase): - def _callFUT(self, expiration): + @staticmethod + def _callFUT(expiration): from gcloud.storage.connection import _get_expiration_seconds return _get_expiration_seconds(expiration) - def _utc_seconds(self, when): + @staticmethod + def _utc_seconds(when): import calendar return int(calendar.timegm(when.timetuple())) @@ -690,11 +711,8 @@ def request(self, **kw): class _Credentials(object): service_account_name = 'testing@example.com' - - @property - def private_key(self): - import base64 - return base64.b64encode('SEEKRIT') + # Base64 encoded 'SEEKRIT'. + private_key = 'U0VFS1JJVA==' class _Crypto(object): @@ -706,7 +724,8 @@ def load_pkcs12(self, buffer, passphrase): self._loaded = (buffer, passphrase) return self - def get_privatekey(self): + @staticmethod + def get_privatekey(): return '__PKCS12__' def dump_privatekey(self, type, pkey, cipher=None, passphrase=None): diff --git a/gcloud/storage/test_exceptions.py b/gcloud/storage/test_exceptions.py index 85484ae2186c..9e06b689179c 100644 --- a/gcloud/storage/test_exceptions.py +++ b/gcloud/storage/test_exceptions.py @@ -3,7 +3,8 @@ class TestConnectionError(unittest2.TestCase): - def _getTargetClass(self): + @staticmethod + def _getTargetClass(): from gcloud.storage.exceptions import ConnectionError return ConnectionError @@ -18,7 +19,8 @@ def test_no_headers(self): class TestNotFoundError(unittest2.TestCase): - def _getTargetClass(self): + @staticmethod + def _getTargetClass(): from gcloud.storage.exceptions import NotFoundError return NotFoundError @@ -26,6 +28,6 @@ def _makeOne(self, *args, **kw): return self._getTargetClass()(*args, **kw) def test_no_headers(self): - e = self._makeOne({}, None) + e = self._makeOne({}) self.assertEqual(str(e), '') self.assertEqual(e.message, 'Request returned a 404. Headers: {}') diff --git a/gcloud/storage/test_iterator.py b/gcloud/storage/test_iterator.py index 07155d00b077..d490393fce6f 100644 --- a/gcloud/storage/test_iterator.py +++ b/gcloud/storage/test_iterator.py @@ -3,7 +3,8 @@ class TestIterator(unittest2.TestCase): - def _getTargetClass(self): + @staticmethod + def _getTargetClass(): from gcloud.storage.iterator import Iterator return Iterator @@ -73,8 +74,7 @@ def test_get_query_params_w_token(self): iterator = self._makeOne(connection, PATH) iterator.next_page_token = TOKEN self.assertEqual(iterator.get_query_params(), - {'pageToken': TOKEN, - }) + {'pageToken': TOKEN}) def test_get_next_page_response_new_no_token_in_response(self): PATH = '/foo' @@ -121,7 +121,8 @@ def test_get_items_from_response_raises_NotImplementedError(self): class TestBucketIterator(unittest2.TestCase): - def _getTargetClass(self): + @staticmethod + def _getTargetClass(): from gcloud.storage.iterator import BucketIterator return BucketIterator @@ -157,7 +158,8 @@ def test_get_items_from_response_non_empty(self): class TestKeyIterator(unittest2.TestCase): - def _getTargetClass(self): + @staticmethod + def _getTargetClass(): from gcloud.storage.iterator import KeyIterator return KeyIterator @@ -197,7 +199,8 @@ def test_get_items_from_response_non_empty(self): class TestKeyDataIterator(unittest2.TestCase): - def _getTargetClass(self): + @staticmethod + def _getTargetClass(): from gcloud.storage.iterator import KeyDataIterator return KeyDataIterator @@ -217,9 +220,10 @@ def test__iter__(self): response1['content-range'] = '0-9/15' response2 = _Response(status=200) response2['content-range'] = '10-14/15' - connection = _Connection((response1, '0123456789'), - (response2, '01234'), - ) + connection = _Connection( + (response1, '0123456789'), + (response2, '01234'), + ) key = _Key(connection) iterator = self._makeOne(key) chunks = list(iterator) @@ -382,7 +386,8 @@ def api_request(self, **kw): response, self._responses = self._responses[0], self._responses[1:] return response - def build_api_url(self, path, query_params=None): + @staticmethod + def build_api_url(path, query_params=None): from urllib import urlencode from urlparse import urlunsplit qs = urlencode(query_params or {}) diff --git a/gcloud/storage/test_key.py b/gcloud/storage/test_key.py index 8b41912ecdfc..375a9c4ffb49 100644 --- a/gcloud/storage/test_key.py +++ b/gcloud/storage/test_key.py @@ -3,7 +3,8 @@ class Test_Key(unittest2.TestCase): - def _getTargetClass(self): + @staticmethod + def _getTargetClass(): from gcloud.storage.key import Key return Key @@ -126,7 +127,6 @@ def test_rename(self): bucket = _Bucket(connection) key = self._makeOne(bucket, KEY) bucket._keys[KEY] = 1 - orig_key_path = key.path new_key = key.rename(NEW_NAME) self.assertEqual(key.name, KEY) self.assertEqual(new_key.name, NEW_NAME) @@ -196,10 +196,11 @@ def test_set_contents_from_file(self): loc_response = {'location': UPLOAD_URL} chunk1_response = {} chunk2_response = {} - connection = _Connection((loc_response, ''), - (chunk1_response, ''), - (chunk2_response, ''), - ) + connection = _Connection( + (loc_response, ''), + (chunk1_response, ''), + (chunk2_response, ''), + ) bucket = _Bucket(connection) key = self._makeOne(bucket, KEY) key.CHUNK_SIZE = 5 @@ -211,7 +212,7 @@ def test_set_contents_from_file(self): self.assertEqual(len(rq), 3) self.assertEqual(rq[0]['method'], 'POST') uri = rq[0]['url'] - scheme, netloc, path, qs, frag = urlsplit(uri) + scheme, netloc, path, qs, _ = urlsplit(uri) self.assertEqual(scheme, 'http') self.assertEqual(netloc, 'example.com') self.assertEqual(path, '/b/name/o') @@ -241,10 +242,11 @@ def test_set_contents_from_filename(self): loc_response = {'location': UPLOAD_URL} chunk1_response = {} chunk2_response = {} - connection = _Connection((loc_response, ''), - (chunk1_response, ''), - (chunk2_response, ''), - ) + connection = _Connection( + (loc_response, ''), + (chunk1_response, ''), + (chunk2_response, ''), + ) bucket = _Bucket(connection) key = self._makeOne(bucket, KEY) key.CHUNK_SIZE = 5 @@ -256,7 +258,7 @@ def test_set_contents_from_filename(self): self.assertEqual(len(rq), 3) self.assertEqual(rq[0]['method'], 'POST') uri = rq[0]['url'] - scheme, netloc, path, qs, frag = urlsplit(uri) + scheme, netloc, path, qs, _ = urlsplit(uri) self.assertEqual(scheme, 'http') self.assertEqual(netloc, 'example.com') self.assertEqual(path, '/b/name/o') @@ -285,10 +287,11 @@ def test_set_contents_from_string(self): loc_response = {'location': UPLOAD_URL} chunk1_response = {} chunk2_response = {} - connection = _Connection((loc_response, ''), - (chunk1_response, ''), - (chunk2_response, ''), - ) + connection = _Connection( + (loc_response, ''), + (chunk1_response, ''), + (chunk2_response, ''), + ) bucket = _Bucket(connection) key = self._makeOne(bucket, KEY) key.CHUNK_SIZE = 5 @@ -297,7 +300,7 @@ def test_set_contents_from_string(self): self.assertEqual(len(rq), 3) self.assertEqual(rq[0]['method'], 'POST') uri = rq[0]['url'] - scheme, netloc, path, qs, frag = urlsplit(uri) + scheme, netloc, path, qs, _ = urlsplit(uri) self.assertEqual(scheme, 'http') self.assertEqual(netloc, 'example.com') self.assertEqual(path, '/b/name/o') @@ -573,7 +576,7 @@ def test_make_public(self): from gcloud.storage.acl import ACL KEY = 'key' before = {'acl': []} - permissive = [{'entity': 'allUsers', 'role': ACL.Role.Reader}] + permissive = [{'entity': 'allUsers', 'role': ACL.READER_ROLE}] after = {'acl': permissive} connection = _Connection(after) bucket = _Bucket(connection) @@ -607,7 +610,8 @@ def api_request(self, **kw): response, self._responses = self._responses[0], self._responses[1:] return response - def build_api_url(self, path, query_params=None, + @staticmethod + def build_api_url(path, query_params=None, api_base_url=API_BASE_URL): from urllib import urlencode from urlparse import urlsplit @@ -632,12 +636,12 @@ def __init__(self, connection): self._deleted = [] def get_key(self, key): - return self._keys.get(key) # XXX s.b. 'key.name'? + return self._keys.get(key) def copy_key(self, key, destination_bucket, new_name): destination_bucket._keys[new_name] = self._keys[key.name] return key.from_dict({'name': new_name}, bucket=destination_bucket) def delete_key(self, key): - del self._keys[key.name] # XXX s.b. 'key'? + del self._keys[key.name] self._deleted.append(key.name) diff --git a/gcloud/test_connection.py b/gcloud/test_connection.py index 1ea354b89f93..10f61f9c977c 100644 --- a/gcloud/test_connection.py +++ b/gcloud/test_connection.py @@ -3,7 +3,8 @@ class TestConnection(unittest2.TestCase): - def _getTargetClass(self): + @staticmethod + def _getTargetClass(): from gcloud.connection import Connection return Connection @@ -35,7 +36,6 @@ def test_http_w_creds(self): authorized = object() class Creds(object): - def authorize(self, http): self._called_with = http return authorized diff --git a/gcloud/test_credentials.py b/gcloud/test_credentials.py index ce5cdeea4fbd..15c8b8b4a0b3 100644 --- a/gcloud/test_credentials.py +++ b/gcloud/test_credentials.py @@ -11,17 +11,18 @@ def test_get_for_service_account_wo_scope(self): PRIVATE_KEY = 'SEEkR1t' client = _Client() with _Monkey(credentials, client=client): - with NamedTemporaryFile() as f: - f.write(PRIVATE_KEY) - f.flush() - found = credentials.get_for_service_account( - CLIENT_EMAIL, f.name) + with NamedTemporaryFile() as file_obj: + file_obj.write(PRIVATE_KEY) + file_obj.flush() + found = credentials.get_for_service_account(CLIENT_EMAIL, + file_obj.name) self.assertTrue(found is client._signed) - self.assertEqual(client._called_with, - {'service_account_name': CLIENT_EMAIL, - 'private_key': PRIVATE_KEY, - 'scope': None, - }) + expected_called_with = { + 'service_account_name': CLIENT_EMAIL, + 'private_key': PRIVATE_KEY, + 'scope': None, + } + self.assertEqual(client._called_with, expected_called_with) def test_get_for_service_account_w_scope(self): from tempfile import NamedTemporaryFile @@ -32,21 +33,21 @@ def test_get_for_service_account_w_scope(self): SCOPE = 'SCOPE' client = _Client() with _Monkey(credentials, client=client): - with NamedTemporaryFile() as f: - f.write(PRIVATE_KEY) - f.flush() + with NamedTemporaryFile() as file_obj: + file_obj.write(PRIVATE_KEY) + file_obj.flush() found = credentials.get_for_service_account( - CLIENT_EMAIL, f.name, SCOPE) + CLIENT_EMAIL, file_obj.name, SCOPE) self.assertTrue(found is client._signed) - self.assertEqual(client._called_with, - {'service_account_name': CLIENT_EMAIL, - 'private_key': PRIVATE_KEY, - 'scope': SCOPE, - }) + expected_called_with = { + 'service_account_name': CLIENT_EMAIL, + 'private_key': PRIVATE_KEY, + 'scope': SCOPE, + } + self.assertEqual(client._called_with, expected_called_with) class _Client(object): - def __init__(self): self._signed = object() diff --git a/pylintrc_default b/pylintrc_default new file mode 100644 index 000000000000..d393e36a2b9b --- /dev/null +++ b/pylintrc_default @@ -0,0 +1,37 @@ +# This config is intended to be used for library +# and other production code. + +[BASIC] +good-names = i, j, k, ex, Run, _, pb, id, + _get_protobuf_attribute_and_value + +[DESIGN] +max-args = 10 +max-public-methods = 30 + +[FORMAT] +# NOTE: By default pylint ignores the "dict-separator" check +# for cases like {1:2} and the "trailing-comma" check for +# cases like {1: 2, }. By setting "no-space-check" to empty +# both of these checks with occur. +no-space-check = + +[MASTER] +# NOTE: This path must be relative due to the use of +# os.walk in astroid.modutils.get_module_files. +ignore = datastore_v1_pb2.py + +[MESSAGES CONTROL] +disable = I, protected-access, maybe-no-member, no-member, + redefined-builtin, star-args, missing-format-attribute, + similarities, cyclic-import, arguments-differ, + + + + + +[REPORTS] +reports = no + +[VARIABLES] +dummy-variables-rgx = _$|dummy|^unused_ diff --git a/pylintrc_reduced b/pylintrc_reduced new file mode 100644 index 000000000000..fda04876c77a --- /dev/null +++ b/pylintrc_reduced @@ -0,0 +1,37 @@ +# This config is intended to be used for test code +# and other non-production code, like demos. + +[BASIC] +good-names = i, j, k, ex, Run, _, pb, id, + _get_protobuf_attribute_and_value + +[DESIGN] +max-args = 10 +max-public-methods = 30 + +[FORMAT] +# NOTE: By default pylint ignores the "dict-separator" check +# for cases like {1:2} and the "trailing-comma" check for +# cases like {1: 2, }. By setting "no-space-check" to empty +# both of these checks with occur. +no-space-check = + +[MASTER] +# NOTE: This path must be relative due to the use of +# os.walk in astroid.modutils.get_module_files. +ignore = datastore_v1_pb2.py + +[MESSAGES CONTROL] +disable = I, protected-access, maybe-no-member, no-member, + redefined-builtin, star-args, missing-format-attribute, + similarities, cyclic-import, arguments-differ, + invalid-name, missing-docstring, too-many-public-methods, + too-few-public-methods, attribute-defined-outside-init, + unbalanced-tuple-unpacking, too-many-locals, exec-used, + no-init + +[REPORTS] +reports = no + +[VARIABLES] +dummy-variables-rgx = _$|dummy|^unused_ diff --git a/run_pylint.py b/run_pylint.py new file mode 100644 index 000000000000..c3e6f2d1f84d --- /dev/null +++ b/run_pylint.py @@ -0,0 +1,82 @@ +"""Custom script to run PyLint on gcloud codebase. + +This runs pylint as a script via subprocess in two different +subprocesses. The first lints the production/library code +using the default rc file (PRODUCTION_RC). The second lints the +demo/test code using an rc file (TEST_RC) which allows more style +violations (hence it has a reduced number of style checks). +""" + +import subprocess +import sys + + +IGNORED_FILES = [ + 'gcloud/datastore/datastore_v1_pb2.py', + 'docs/conf.py', + 'setup.py', +] +PRODUCTION_RC = 'pylintrc_default' +TEST_RC = 'pylintrc_reduced' + + +def valid_filename(filename): + """Checks if a file is a Python file and is not ignored.""" + return (filename.endswith('.py') and + filename not in IGNORED_FILES) + + +def is_production_filename(filename): + """Checks if the file contains production code. + + :rtype: `bool` + :returns: Boolean indicating production status. + """ + return not ('demo' in filename or 'test' in filename) + + +def get_python_files(): + """Gets a list of all Python files in the repository. + + NOTE: This requires `git` to be installed and requires that this + is run within the `git` repository. + + :rtype: `tuple` + :returns: A tuple containing two lists. The first list contains + all production files and the next all test/demo files. + """ + all_files = subprocess.check_output(['git', 'ls-files']) + + library_files = [] + non_library_files = [] + for filename in all_files.split('\n'): + if valid_filename(filename): + if is_production_filename(filename): + library_files.append(filename) + else: + non_library_files.append(filename) + + return library_files, non_library_files + + +def lint_fileset(filenames, rcfile, description): + """Lints a group of files using a given rcfile.""" + rc_flag = '--rcfile=%s' % (rcfile,) + pylint_shell_command = ['pylint', rc_flag] + filenames + status_code = subprocess.call(pylint_shell_command) + if status_code != 0: + error_message = ('Pylint failed on %s with ' + 'status %d.' % (description, status_code)) + print >> sys.stderr, error_message + sys.exit(status_code) + + +def main(): + """Script entry point. Lints both sets of files.""" + library_files, non_library_files = get_python_files() + lint_fileset(library_files, PRODUCTION_RC, 'library code') + lint_fileset(non_library_files, TEST_RC, 'test and demo code') + + +if __name__ == '__main__': + main() diff --git a/tox.ini b/tox.ini index dad00b1bb22d..80d0dac3edd1 100644 --- a/tox.ini +++ b/tox.ini @@ -38,5 +38,8 @@ basepython = python2.7 commands = pep8 + python run_pylint.py deps = pep8 + pylint + unittest2 From d609071d0abb07722d1926a7824a8afe7afb1bf9 Mon Sep 17 00:00:00 2001 From: Danny Hermes Date: Fri, 17 Oct 2014 15:18:19 -0700 Subject: [PATCH 2/2] Addressing feedback in review for main pylint changes. - Reverting refactoring changes of code re-use between storage.Key and storage.Bucket. These will be committed in a separate PR. - Turning off the `no-self-use` warning for test code and reverting all the copious `@staticmethod` decorations in test and demo code. - Removing a refactroing note about storage.bucket.Bucket.from_dict from the code. - Fixing indent on a module attribute docstring in storage.key.Key - Updating note in pylintrc files to make the intent more clear. --- gcloud/datastore/test___init__.py | 6 +- gcloud/datastore/test__helpers.py | 18 +- gcloud/datastore/test_connection.py | 30 ++-- gcloud/datastore/test_dataset.py | 3 +- gcloud/datastore/test_entity.py | 6 +- gcloud/datastore/test_key.py | 6 +- gcloud/datastore/test_query.py | 3 +- gcloud/datastore/test_transaction.py | 3 +- gcloud/demo.py | 6 +- gcloud/storage/_helpers.py | 241 --------------------------- gcloud/storage/bucket.py | 223 ++++++++++++++++++++++--- gcloud/storage/key.py | 171 +++++++++++++++++-- gcloud/storage/test___init__.py | 6 +- gcloud/storage/test__helpers.py | 19 --- gcloud/storage/test_acl.py | 15 +- gcloud/storage/test_bucket.py | 3 +- gcloud/storage/test_connection.py | 19 +-- gcloud/storage/test_exceptions.py | 6 +- gcloud/storage/test_iterator.py | 15 +- gcloud/storage/test_key.py | 6 +- gcloud/test_connection.py | 3 +- pylintrc_default | 8 +- pylintrc_reduced | 10 +- 23 files changed, 418 insertions(+), 408 deletions(-) delete mode 100644 gcloud/storage/_helpers.py delete mode 100644 gcloud/storage/test__helpers.py diff --git a/gcloud/datastore/test___init__.py b/gcloud/datastore/test___init__.py index 95c5bd374c5e..147563109066 100644 --- a/gcloud/datastore/test___init__.py +++ b/gcloud/datastore/test___init__.py @@ -3,8 +3,7 @@ class Test_get_connection(unittest2.TestCase): - @staticmethod - def _callFUT(client_email, private_key_path): + def _callFUT(self, client_email, private_key_path): from gcloud.datastore import get_connection return get_connection(client_email, private_key_path) @@ -36,8 +35,7 @@ def test_it(self): class Test_get_dataset(unittest2.TestCase): - @staticmethod - def _callFUT(dataset_id, client_email, private_key_path): + def _callFUT(self, dataset_id, client_email, private_key_path): from gcloud.datastore import get_dataset return get_dataset(dataset_id, client_email, private_key_path) diff --git a/gcloud/datastore/test__helpers.py b/gcloud/datastore/test__helpers.py index 74e5878d548f..5a095f11df78 100644 --- a/gcloud/datastore/test__helpers.py +++ b/gcloud/datastore/test__helpers.py @@ -3,8 +3,7 @@ class Test__get_protobuf_attribute_and_value(unittest2.TestCase): - @staticmethod - def _callFUT(val): + def _callFUT(self, val): from gcloud.datastore._helpers import _get_protobuf_attribute_and_value return _get_protobuf_attribute_and_value(val) @@ -97,14 +96,12 @@ def test_object(self): class Test__get_value_from_value_pb(unittest2.TestCase): - @staticmethod - def _callFUT(pb): + def _callFUT(self, pb): from gcloud.datastore._helpers import _get_value_from_value_pb return _get_value_from_value_pb(pb) - @staticmethod - def _makePB(attr_name, value): + def _makePB(self, attr_name, value): from gcloud.datastore.datastore_v1_pb2 import Value pb = Value() @@ -190,8 +187,7 @@ def test_unknown(self): class Test__get_value_from_property_pb(unittest2.TestCase): - @staticmethod - def _callFUT(pb): + def _callFUT(self, pb): from gcloud.datastore._helpers import _get_value_from_property_pb return _get_value_from_property_pb(pb) @@ -206,14 +202,12 @@ def test_it(self): class Test_set_protobuf_value(unittest2.TestCase): - @staticmethod - def _callFUT(value_pb, val): + def _callFUT(self, value_pb, val): from gcloud.datastore._helpers import _set_protobuf_value return _set_protobuf_value(value_pb, val) - @staticmethod - def _makePB(): + def _makePB(self): from gcloud.datastore.datastore_v1_pb2 import Value return Value() diff --git a/gcloud/datastore/test_connection.py b/gcloud/datastore/test_connection.py index ce028594bfc8..9ad853bf3a56 100644 --- a/gcloud/datastore/test_connection.py +++ b/gcloud/datastore/test_connection.py @@ -3,8 +3,7 @@ class TestConnection(unittest2.TestCase): - @staticmethod - def _getTargetClass(): + def _getTargetClass(self): from gcloud.datastore.connection import Connection return Connection @@ -89,8 +88,7 @@ def test__rpc(self): class ReqPB(object): - @staticmethod - def SerializeToString(): + def SerializeToString(self): return b'REQPB' class RspPB(object): @@ -187,8 +185,7 @@ class Mutation(object): pass class Xact(object): - @staticmethod - def mutation(): + def mutation(self): return Mutation() conn = self._makeOne() conn.transaction(Xact()) @@ -282,8 +279,7 @@ def test_rollback_transaction_w_existing_transaction_no_id(self): class Xact(object): - @staticmethod - def id(): + def id(self): return None DATASET_ID = 'DATASET' conn = self._makeOne() @@ -298,8 +294,7 @@ def test_rollback_transaction_ok(self): class Xact(object): - @staticmethod - def id(): + def id(self): return TRANSACTION rsp_pb = datastore_pb.RollbackResponse() conn = self._makeOne() @@ -571,8 +566,7 @@ def test_commit_w_transaction(self): from gcloud.datastore.key import Key class Xact(object): - @staticmethod - def id(): + def id(self): return 'xact' DATASET_ID = 'DATASET' key_pb = Key(dataset=Dataset(DATASET_ID), @@ -723,8 +717,7 @@ def test_save_entity_w_transaction(self): mutation = datastore_pb.Mutation() class Xact(object): - @staticmethod - def mutation(): + def mutation(self): return mutation DATASET_ID = 'DATASET' key_pb = Key(dataset=Dataset(DATASET_ID), @@ -748,8 +741,7 @@ def test_save_entity_w_transaction_nested_entity(self): mutation = datastore_pb.Mutation() class Xact(object): - @staticmethod - def mutation(): + def mutation(self): return mutation DATASET_ID = 'DATASET' nested = Entity() @@ -818,8 +810,7 @@ def test_delete_entities_w_transaction(self): mutation = datastore_pb.Mutation() class Xact(object): - @staticmethod - def mutation(): + def mutation(self): return mutation DATASET_ID = 'DATASET' key_pb = Key(dataset=Dataset(DATASET_ID), @@ -886,8 +877,7 @@ def test_delete_entity_w_transaction(self): mutation = datastore_pb.Mutation() class Xact(object): - @staticmethod - def mutation(): + def mutation(self): return mutation DATASET_ID = 'DATASET' key_pb = Key(dataset=Dataset(DATASET_ID), diff --git a/gcloud/datastore/test_dataset.py b/gcloud/datastore/test_dataset.py index 8db28d748dea..a734512119d0 100644 --- a/gcloud/datastore/test_dataset.py +++ b/gcloud/datastore/test_dataset.py @@ -3,8 +3,7 @@ class TestDataset(unittest2.TestCase): - @staticmethod - def _getTargetClass(): + def _getTargetClass(self): from gcloud.datastore.dataset import Dataset return Dataset diff --git a/gcloud/datastore/test_entity.py b/gcloud/datastore/test_entity.py index b60582d32b1a..89e56e813e20 100644 --- a/gcloud/datastore/test_entity.py +++ b/gcloud/datastore/test_entity.py @@ -8,8 +8,7 @@ class TestEntity(unittest2.TestCase): - @staticmethod - def _getTargetClass(): + def _getTargetClass(self): from gcloud.datastore.entity import Entity return Entity @@ -244,8 +243,7 @@ def __init__(self, connection=None): super(_Dataset, self).__init__() self._connection = connection - @staticmethod - def id(): + def id(self): return _DATASET_ID def connection(self): diff --git a/gcloud/datastore/test_key.py b/gcloud/datastore/test_key.py index fe239b04aa67..9e778758df41 100644 --- a/gcloud/datastore/test_key.py +++ b/gcloud/datastore/test_key.py @@ -3,16 +3,14 @@ class TestKey(unittest2.TestCase): - @staticmethod - def _getTargetClass(): + def _getTargetClass(self): from gcloud.datastore.key import Key return Key def _makeOne(self, dataset=None, namespace=None, path=None): return self._getTargetClass()(dataset, namespace, path) - @staticmethod - def _makePB(dataset_id=None, namespace=None, path=()): + def _makePB(self, dataset_id=None, namespace=None, path=()): from gcloud.datastore.datastore_v1_pb2 import Key pb = Key() if dataset_id is not None: diff --git a/gcloud/datastore/test_query.py b/gcloud/datastore/test_query.py index 82f3d01b4a3e..ed5c9a2b54c8 100644 --- a/gcloud/datastore/test_query.py +++ b/gcloud/datastore/test_query.py @@ -3,8 +3,7 @@ class TestQuery(unittest2.TestCase): - @staticmethod - def _getTargetClass(): + def _getTargetClass(self): from gcloud.datastore.query import Query return Query diff --git a/gcloud/datastore/test_transaction.py b/gcloud/datastore/test_transaction.py index e1393f9125f9..f6f9bc088dc3 100644 --- a/gcloud/datastore/test_transaction.py +++ b/gcloud/datastore/test_transaction.py @@ -3,8 +3,7 @@ class TestTransaction(unittest2.TestCase): - @staticmethod - def _getTargetClass(): + def _getTargetClass(self): from gcloud.datastore.transaction import Transaction return Transaction diff --git a/gcloud/demo.py b/gcloud/demo.py index d80328ee8a0b..5dbd8476ae12 100644 --- a/gcloud/demo.py +++ b/gcloud/demo.py @@ -35,8 +35,7 @@ def run(self): interact('(Hit CTRL-D to exit...)', local=self.LOCALS) - @staticmethod - def wait(): + def wait(self): raw_input() @classmethod @@ -46,8 +45,7 @@ def get_line_type(cls, line): else: return cls.CODE - @staticmethod - def get_indent_level(line): + def get_indent_level(self, line): if not line.strip(): return None return len(line) - len(line.lstrip()) diff --git a/gcloud/storage/_helpers.py b/gcloud/storage/_helpers.py deleted file mode 100644 index b4400fe6b93b..000000000000 --- a/gcloud/storage/_helpers.py +++ /dev/null @@ -1,241 +0,0 @@ -"""Helper functions for Cloud Storage utility classes. - -These are *not* part of the API. -""" - - -class _MetadataMixin(object): - """Abstract mixin for cloud storage classes with associated metadata. - - Expected to be subclasses by :class:`gcloud.storage.bucket.Bucket` - and :class:`gcloud.storage.key.Key` and both of those classes - will implemented the abstract parts: - - LOAD_FULL_FIELDS - - ACL_CLASS - - ACL_KEYWORD - - connection - - path - """ - - LOAD_FULL_FIELDS = None - """Tuple of fields which pertain to metadata. - - Expected to be set by subclasses. Fields in this tuple will cause - `get_metadata()` to do a full reload of all metadata before - returning. - """ - - ACL_CLASS = type(None) - """Class which holds ACL data for a given type. - - Expected to be set by subclasses. - """ - - ACL_KEYWORD = None - """Keyword for ACL_CLASS constructor to pass an object in. - - Expected to be set by subclasses. - """ - - def __init__(self): - # These should be set by the superclass. - self.metadata = None - self.acl = None - - @property - def connection(self): - """Abstract getter for the connection to use.""" - raise NotImplementedError - - @property - def path(self): - """Abstract getter for the object path.""" - raise NotImplementedError - - def has_metadata(self, field=None): - """Check if metadata is available. - - :type field: string - :param field: (optional) the particular field to check for. - - :rtype: bool - :returns: Whether metadata is available locally. - """ - if not self.metadata: - return False - elif field and field not in self.metadata: - return False - else: - return True - - def reload_metadata(self, full=False): - """Reload metadata. - - :type full: bool - :param full: If True, loads all data (include ACL data). - - :rtype: :class:`_MetadataMixin` - :returns: The object you just reloaded data for. - """ - - projection = 'full' if full else 'noAcl' - query_params = {'projection': projection} - self.metadata = self.connection.api_request( - method='GET', path=self.path, query_params=query_params) - return self - - def get_metadata(self, field=None, default=None): - """Get all metadata or a specific field. - - If you request a field that isn't available, - and that field can be retrieved by refreshing data, - this method will reload the data using - :func:`_MetadataMixin.reload_metadata`. - - :type field: string - :param field: (optional) A particular field to retrieve from metadata. - - :type default: anything - :param default: The value to return if the field provided wasn't found. - - :rtype: dict or anything - :returns: All metadata or the value of the specific field. - """ - - if not self.has_metadata(field=field): - full = (field and field in self.LOAD_FULL_FIELDS) - self.reload_metadata(full=full) - - if field: - return self.metadata.get(field, default) - else: - return self.metadata - - def patch_metadata(self, metadata): - """Update particular fields of this object's metadata. - - This method will only update the fields provided - and will not touch the other fields. - - It will also reload the metadata locally - based on the servers response. - - :type metadata: dict - :param metadata: The dictionary of values to update. - - :rtype: :class:`_MetadataMixin` - :returns: The current object. - """ - self.metadata = self.connection.api_request( - method='PATCH', path=self.path, data=metadata, - query_params={'projection': 'full'}) - return self - - def reload_acl(self): - """Reload the ACL data. - - :rtype: :class:`_MetadataMixin` - :returns: The current object. - """ - self.acl = self.ACL_CLASS(**{self.ACL_KEYWORD: self}) - - for entry in self.get_metadata('acl', []): - entity = self.acl.entity_from_dict(entry) - self.acl.add_entity(entity) - - return self - - def get_acl(self): - """Get ACL metadata as an object of type `ACL_CLASS`. - - :returns: An ACL object for the current object. - """ - if not self.acl: - self.reload_acl() - return self.acl - - def save_acl(self, acl=None): - """Save the ACL data for this object. - - If called without arguments, this will save the ACL currently - stored on the current object. - - For example, for a `metadata_object` this will save the ACL - stored in ``some_other_acl``:: - - >>> metadata_object.acl = some_other_acl - >>> metadata_object.save_acl() - - You can also provide a specific ACL to save instead of the one - currently set on the object:: - - >>> metadata_object.save_acl(acl=my_other_acl) - - You can use this to set access controls to be consistent from - one object to another:: - - >>> metadata_object1 = get_object(object1_name) - >>> metadata_object2 = get_object(object2_name) - >>> metadata_object2.save_acl(metadata_object1.get_acl()) - - If you want to **clear** the ACL for the object, you must save - an empty list (``[]``) rather than using ``None`` (which is - interpreted as wanting to save the current ACL):: - - >>> metadata_object.save_acl(None) # Saves current ACL (self.acl). - >>> metadata_object.save_acl([]) # Clears current ACL. - - :param acl: The ACL object to save. - If left blank, this will save the ACL - set locally on the object. - """ - # NOTE: If acl is [], it is False-y but the acl can be set to an - # empty list, so we only override a null input. - if acl is None: - acl = self.acl - - if acl is None: - return self - - self.patch_metadata({'acl': list(acl)}) - self.reload_acl() - return self - - def clear_acl(self): - """Remove all ACL rules from the object. - - Note that this won't actually remove *ALL* the rules, but it - will remove all the non-default rules. In short, you'll still - have access to the object that you created even after you - clear ACL rules with this method. - - For example, imagine that you granted access to a Bucket - (inheriting from this class) to a bunch of coworkers:: - - >>> from gcloud import storage - >>> connection = storage.get_connection(project, email, - private_key_path) - >>> bucket = connection.get_bucket(bucket_name) - >>> acl = bucket.get_acl() - >>> acl.user('coworker1@example.org').grant_read() - >>> acl.user('coworker2@example.org').grant_read() - >>> acl.save() - - Now they work in another part of the company - and you want to 'start fresh' on who has access:: - - >>> acl.clear_acl() - - At this point all the custom rules you created have been removed. - """ - return self.save_acl(acl=[]) - - def make_public(self): - """Make this object public giving all users read access. - - :returns: The current object. - """ - - self.get_acl().all().grant_read() - self.save_acl() - return self diff --git a/gcloud/storage/bucket.py b/gcloud/storage/bucket.py index 859e9a131a60..f6c97ceef109 100644 --- a/gcloud/storage/bucket.py +++ b/gcloud/storage/bucket.py @@ -2,7 +2,6 @@ import os -from gcloud.storage._helpers import _MetadataMixin from gcloud.storage import exceptions from gcloud.storage.acl import BucketACL from gcloud.storage.acl import DefaultObjectACL @@ -10,7 +9,7 @@ from gcloud.storage.key import Key -class Bucket(_MetadataMixin): +class Bucket(object): """A class representing a Bucket on Cloud Storage. :type connection: :class:`gcloud.storage.connection.Connection` @@ -20,19 +19,8 @@ class Bucket(_MetadataMixin): :param name: The name of the bucket. """ - LOAD_FULL_FIELDS = ('acl', 'defaultObjectAcl') - """Tuple of metadata fields pertaining to bucket ACLs.""" - - ACL_CLASS = BucketACL - """Class which holds ACL data for buckets.""" - - ACL_KEYWORD = 'bucket' - """Keyword for BucketACL constructor to pass a bucket in.""" - def __init__(self, connection=None, name=None, metadata=None): - super(Bucket, self).__init__() - - self._connection = connection + self.connection = connection self.name = name self.metadata = metadata @@ -40,7 +28,6 @@ def __init__(self, connection=None, name=None, metadata=None): self.acl = None self.default_object_acl = None - # NOTE: Could also put this in _MetadataMixin. @classmethod def from_dict(cls, bucket_dict, connection=None): """Construct a new bucket from a dictionary of data from Cloud Storage. @@ -64,15 +51,6 @@ def __iter__(self): def __contains__(self, key): return self.get_key(key) is not None - @property - def connection(self): - """Getter property for the connection to use with this Bucket. - - :rtype: :class:`gcloud.storage.connection.Connection` - :returns: The connection to use. - """ - return self._connection - @property def path(self): """The URL path to this bucket.""" @@ -332,6 +310,88 @@ def upload_file_object(self, file_obj, key=None): key = self.new_key(os.path.basename(file_obj.name)) return key.set_contents_from_file(file_obj) + def has_metadata(self, field=None): + """Check if metadata is available locally. + + :type field: string + :param field: (optional) the particular field to check for. + + :rtype: bool + :returns: Whether metadata is available locally. + """ + + if not self.metadata: + return False + elif field and field not in self.metadata: + return False + else: + return True + + def reload_metadata(self, full=False): + """Reload metadata from Cloud Storage. + + :type full: bool + :param full: If True, loads all data (include ACL data). + + :rtype: :class:`Bucket` + :returns: The bucket you just reloaded data for. + """ + + projection = 'full' if full else 'noAcl' + query_params = {'projection': projection} + self.metadata = self.connection.api_request( + method='GET', path=self.path, query_params=query_params) + return self + + def get_metadata(self, field=None, default=None): + """Get all metadata or a specific field. + + If you request a field that isn't available, + and that field can be retrieved by refreshing data + from Cloud Storage, + this method will reload the data using + :func:`Bucket.reload_metadata`. + + :type field: string + :param field: (optional) A particular field to retrieve from metadata. + + :type default: anything + :param default: The value to return if the field provided wasn't found. + + :rtype: dict or anything + :returns: All metadata or the value of the specific field. + """ + + if not self.has_metadata(field=field): + full = (field and field in ('acl', 'defaultObjectAcl')) + self.reload_metadata(full=full) + + if field: + return self.metadata.get(field, default) + else: + return self.metadata + + def patch_metadata(self, metadata): + """Update particular fields of this bucket's metadata. + + This method will only update the fields provided + and will not touch the other fields. + + It will also reload the metadata locally + based on the servers response. + + :type metadata: dict + :param metadata: The dictionary of values to update. + + :rtype: :class:`Bucket` + :returns: The current bucket. + """ + + self.metadata = self.connection.api_request( + method='PATCH', path=self.path, data=metadata, + query_params={'projection': 'full'}) + return self + def configure_website(self, main_page_suffix=None, not_found_page=None): """Configure website-related metadata. @@ -389,6 +449,117 @@ def disable_website(self): return self.configure_website(None, None) + def reload_acl(self): + """Reload the ACL data from Cloud Storage. + + :rtype: :class:`Bucket` + :returns: The current bucket. + """ + + self.acl = BucketACL(bucket=self) + + for entry in self.get_metadata('acl', []): + entity = self.acl.entity_from_dict(entry) + self.acl.add_entity(entity) + + return self + + def get_acl(self): + """Get ACL metadata as a :class:`gcloud.storage.acl.BucketACL` object. + + :rtype: :class:`gcloud.storage.acl.BucketACL` + :returns: An ACL object for the current bucket. + """ + + if not self.acl: + self.reload_acl() + return self.acl + + def save_acl(self, acl=None): + """Save the ACL data for this bucket. + + If called without arguments, + this will save the ACL currently stored on the Bucket object. + For example, + this will save + the ACL stored in ``some_other_acl``:: + + >>> bucket.acl = some_other_acl + >>> bucket.save_acl() + + You can also provide a specific ACL to save + instead of the one currently set + on the Bucket object:: + + >>> bucket.save_acl(acl=my_other_acl) + + You can use this to set access controls + to be consistent from one bucket to another:: + + >>> bucket1 = connection.get_bucket(bucket1_name) + >>> bucket2 = connection.get_bucket(bucket2_name) + >>> bucket2.save_acl(bucket1.get_acl()) + + If you want to **clear** the ACL for the bucket, + you must save an empty list (``[]``) + rather than using ``None`` + (which is interpreted as wanting to save the current ACL):: + + >>> bucket.save_acl(None) # Saves the current ACL (self.acl). + >>> bucket.save_acl([]) # Clears the current ACL. + + :type acl: :class:`gcloud.storage.acl.ACL` + :param acl: The ACL object to save. + If left blank, this will save the ACL + set locally on the bucket. + """ + + # We do things in this weird way because [] and None + # both evaluate to False, but mean very different things. + if acl is None: + acl = self.acl + + if acl is None: + return self + + self.patch_metadata({'acl': list(acl)}) + self.reload_acl() + return self + + def clear_acl(self): + """Remove all ACL rules from the bucket. + + Note that this won't actually remove *ALL* the rules, + but it will remove all the non-default rules. + In short, + you'll still have access + to a bucket that you created + even after you clear ACL rules + with this method. + + For example, + imagine that you granted access to this bucket + to a bunch of coworkers:: + + >>> from gcloud import storage + >>> connection = storage.get_connection(project, email, + private_key_path) + >>> bucket = connection.get_bucket(bucket_name) + >>> acl = bucket.get_acl() + >>> acl.user('coworker1@example.org').grant_read() + >>> acl.user('coworker2@example.org').grant_read() + >>> acl.save() + + Now they work in another part of the company + and you want to 'start fresh' on who has access:: + + >>> acl.clear_acl() + + At this point all the custom rules you created have been removed. + """ + + return self.save_acl(acl=[]) + def reload_default_object_acl(self): """Reload the Default Object ACL rules for this bucket. @@ -454,7 +625,9 @@ def make_public(self, recursive=False, future=False): :param future: If True, this will make all objects created in the future public as well. """ - super(Bucket, self).make_public() + + self.get_acl().all().grant_read() + self.save_acl() if future: self.get_default_object_acl().all().grant_read() diff --git a/gcloud/storage/key.py b/gcloud/storage/key.py index d4222649bcaf..fa19a9afc05f 100644 --- a/gcloud/storage/key.py +++ b/gcloud/storage/key.py @@ -4,28 +4,18 @@ import os from StringIO import StringIO -from gcloud.storage._helpers import _MetadataMixin from gcloud.storage.acl import ObjectACL from gcloud.storage.iterator import KeyDataIterator -class Key(_MetadataMixin): +class Key(object): """A wrapper around Cloud Storage's concept of an ``Object``.""" - LOAD_FULL_FIELDS = ('acl',) - """Tuple of metadata fields pertaining to key ACLs.""" - - ACL_CLASS = ObjectACL - """Class which holds ACL data for keys.""" - - ACL_KEYWORD = 'key' - """Keyword for ObjectACL constructor to pass a key in.""" - CHUNK_SIZE = 1024 * 1024 # 1 MB. """The size of a chunk of data whenever iterating (1 MB). - This must be a multiple of 256 KB per the API specification. - """ + This must be a multiple of 256 KB per the API specification. + """ def __init__(self, bucket=None, name=None, metadata=None): """Key constructor. @@ -41,7 +31,6 @@ def __init__(self, bucket=None, name=None, metadata=None): :type metadata: dict :param metadata: All the other data provided by Cloud Storage. """ - super(Key, self).__init__() self.bucket = bucket self.name = name @@ -316,3 +305,157 @@ def set_contents_from_string(self, data, content_type='text/plain'): size=string_buffer.len, content_type=content_type) return self + + def has_metadata(self, field=None): + """Check if metadata is available locally. + + :type field: string + :param field: (optional) the particular field to check for. + + :rtype: bool + :returns: Whether metadata is available locally. + """ + + if not self.metadata: + return False + elif field and field not in self.metadata: + return False + else: + return True + + def reload_metadata(self, full=False): + """Reload metadata from Cloud Storage. + + :type full: bool + :param full: If True, loads all data (include ACL data). + + :rtype: :class:`Key` + :returns: The key you just reloaded data for. + """ + + projection = 'full' if full else 'noAcl' + query_params = {'projection': projection} + self.metadata = self.connection.api_request( + method='GET', path=self.path, query_params=query_params) + return self + + def get_metadata(self, field=None, default=None): + """Get all metadata or a specific field. + + If you request a field that isn't available, + and that field can be retrieved by refreshing data + from Cloud Storage, + this method will reload the data using + :func:`Key.reload_metadata`. + + :type field: string + :param field: (optional) A particular field to retrieve from metadata. + + :type default: anything + :param default: The value to return if the field provided wasn't found. + + :rtype: dict or anything + :returns: All metadata or the value of the specific field. + """ + + if not self.has_metadata(field=field): + full = (field and field == 'acl') + self.reload_metadata(full=full) + + if field: + return self.metadata.get(field, default) + else: + return self.metadata + + def patch_metadata(self, metadata): + """Update particular fields of this key's metadata. + + This method will only update the fields provided + and will not touch the other fields. + + It will also reload the metadata locally + based on the servers response. + + :type metadata: dict + :param metadata: The dictionary of values to update. + + :rtype: :class:`Key` + :returns: The current key. + """ + + self.metadata = self.connection.api_request( + method='PATCH', path=self.path, data=metadata, + query_params={'projection': 'full'}) + return self + + def reload_acl(self): + """Reload the ACL data from Cloud Storage. + + :rtype: :class:`Key` + :returns: The current key. + """ + + self.acl = ObjectACL(key=self) + + for entry in self.get_metadata('acl', []): + entity = self.acl.entity_from_dict(entry) + self.acl.add_entity(entity) + + return self + + def get_acl(self): + """Get ACL metadata as a :class:`gcloud.storage.acl.ObjectACL` object. + + :rtype: :class:`gcloud.storage.acl.ObjectACL` + :returns: An ACL object for the current key. + """ + + if not self.acl: + self.reload_acl() + return self.acl + + def save_acl(self, acl=None): + """Save the ACL data for this key. + + :type acl: :class:`gcloud.storage.acl.ACL` + :param acl: The ACL object to save. + If left blank, this will save the ACL + set locally on the key. + """ + + # We do things in this weird way because [] and None + # both evaluate to False, but mean very different things. + if acl is None: + acl = self.acl + + if acl is None: + return self + + self.patch_metadata({'acl': list(acl)}) + self.reload_acl() + return self + + def clear_acl(self): + """Remove all ACL rules from the key. + + Note that this won't actually remove *ALL* the rules, + but it will remove all the non-default rules. + In short, + you'll still have access + to a key that you created + even after you clear ACL rules + with this method. + """ + + return self.save_acl(acl=[]) + + def make_public(self): + """Make this key public giving all users read access. + + :rtype: :class:`Key` + :returns: The current key. + """ + + self.get_acl().all().grant_read() + self.save_acl() + return self diff --git a/gcloud/storage/test___init__.py b/gcloud/storage/test___init__.py index 40f2ed277f98..13d296b73d98 100644 --- a/gcloud/storage/test___init__.py +++ b/gcloud/storage/test___init__.py @@ -3,8 +3,7 @@ class Test_get_connection(unittest2.TestCase): - @staticmethod - def _callFUT(*args, **kw): + def _callFUT(self, *args, **kw): from gcloud.storage import get_connection return get_connection(*args, **kw) @@ -37,8 +36,7 @@ def test_it(self): class Test_get_bucket(unittest2.TestCase): - @staticmethod - def _callFUT(*args, **kw): + def _callFUT(self, *args, **kw): from gcloud.storage import get_bucket return get_bucket(*args, **kw) diff --git a/gcloud/storage/test__helpers.py b/gcloud/storage/test__helpers.py deleted file mode 100644 index 6e8ff81799d6..000000000000 --- a/gcloud/storage/test__helpers.py +++ /dev/null @@ -1,19 +0,0 @@ -import unittest2 - - -class Test_MetadataMixin(unittest2.TestCase): - - @staticmethod - def _getTargetClass(): - from gcloud.storage._helpers import _MetadataMixin - return _MetadataMixin - - def _makeOne(self, *args, **kw): - return self._getTargetClass()(*args, **kw) - - def test_abstract_properties(self): - metadata_object = self._makeOne() - self.assertRaises(NotImplementedError, - lambda: metadata_object.connection) - self.assertRaises(NotImplementedError, - lambda: metadata_object.path) diff --git a/gcloud/storage/test_acl.py b/gcloud/storage/test_acl.py index 3ee5cfe95b29..b47a1538b143 100644 --- a/gcloud/storage/test_acl.py +++ b/gcloud/storage/test_acl.py @@ -3,8 +3,7 @@ class Test_ACL_Entity(unittest2.TestCase): - @staticmethod - def _getTargetClass(): + def _getTargetClass(self): from gcloud.storage.acl import ACL return ACL.Entity @@ -121,8 +120,7 @@ def test_revoke_owner(self): class Test_ACL(unittest2.TestCase): - @staticmethod - def _getTargetClass(): + def _getTargetClass(self): from gcloud.storage.acl import ACL return ACL @@ -396,8 +394,7 @@ def test_save_raises_NotImplementedError(self): class Test_BucketACL(unittest2.TestCase): - @staticmethod - def _getTargetClass(): + def _getTargetClass(self): from gcloud.storage.acl import BucketACL return BucketACL @@ -423,8 +420,7 @@ def save_acl(self, acl): class Test_DefaultObjectACL(unittest2.TestCase): - @staticmethod - def _getTargetClass(): + def _getTargetClass(self): from gcloud.storage.acl import DefaultObjectACL return DefaultObjectACL @@ -443,8 +439,7 @@ def save_default_object_acl(self, acl): class Test_ObjectACL(unittest2.TestCase): - @staticmethod - def _getTargetClass(): + def _getTargetClass(self): from gcloud.storage.acl import ObjectACL return ObjectACL diff --git a/gcloud/storage/test_bucket.py b/gcloud/storage/test_bucket.py index 0a3cd358d2e1..cd289dc17fa2 100644 --- a/gcloud/storage/test_bucket.py +++ b/gcloud/storage/test_bucket.py @@ -5,8 +5,7 @@ class Test_Bucket(unittest2.TestCase): - @staticmethod - def _getTargetClass(): + def _getTargetClass(self): from gcloud.storage.bucket import Bucket return Bucket diff --git a/gcloud/storage/test_connection.py b/gcloud/storage/test_connection.py index 590270a713a0..036d8f36da00 100644 --- a/gcloud/storage/test_connection.py +++ b/gcloud/storage/test_connection.py @@ -3,8 +3,7 @@ class TestConnection(unittest2.TestCase): - @staticmethod - def _getTargetClass(): + def _getTargetClass(self): from gcloud.storage.connection import Connection return Connection @@ -613,14 +612,12 @@ def test_generate_signed_url_w_expiration_int(self): class Test__get_expiration_seconds(unittest2.TestCase): - @staticmethod - def _callFUT(expiration): + def _callFUT(self, expiration): from gcloud.storage.connection import _get_expiration_seconds return _get_expiration_seconds(expiration) - @staticmethod - def _utc_seconds(when): + def _utc_seconds(self, when): import calendar return int(calendar.timegm(when.timetuple())) @@ -711,8 +708,11 @@ def request(self, **kw): class _Credentials(object): service_account_name = 'testing@example.com' - # Base64 encoded 'SEEKRIT'. - private_key = 'U0VFS1JJVA==' + + @property + def private_key(self): + import base64 + return base64.b64encode('SEEKRIT') class _Crypto(object): @@ -724,8 +724,7 @@ def load_pkcs12(self, buffer, passphrase): self._loaded = (buffer, passphrase) return self - @staticmethod - def get_privatekey(): + def get_privatekey(self): return '__PKCS12__' def dump_privatekey(self, type, pkey, cipher=None, passphrase=None): diff --git a/gcloud/storage/test_exceptions.py b/gcloud/storage/test_exceptions.py index 9e06b689179c..03d67a1ed968 100644 --- a/gcloud/storage/test_exceptions.py +++ b/gcloud/storage/test_exceptions.py @@ -3,8 +3,7 @@ class TestConnectionError(unittest2.TestCase): - @staticmethod - def _getTargetClass(): + def _getTargetClass(self): from gcloud.storage.exceptions import ConnectionError return ConnectionError @@ -19,8 +18,7 @@ def test_no_headers(self): class TestNotFoundError(unittest2.TestCase): - @staticmethod - def _getTargetClass(): + def _getTargetClass(self): from gcloud.storage.exceptions import NotFoundError return NotFoundError diff --git a/gcloud/storage/test_iterator.py b/gcloud/storage/test_iterator.py index d490393fce6f..6dc1c9005bbc 100644 --- a/gcloud/storage/test_iterator.py +++ b/gcloud/storage/test_iterator.py @@ -3,8 +3,7 @@ class TestIterator(unittest2.TestCase): - @staticmethod - def _getTargetClass(): + def _getTargetClass(self): from gcloud.storage.iterator import Iterator return Iterator @@ -121,8 +120,7 @@ def test_get_items_from_response_raises_NotImplementedError(self): class TestBucketIterator(unittest2.TestCase): - @staticmethod - def _getTargetClass(): + def _getTargetClass(self): from gcloud.storage.iterator import BucketIterator return BucketIterator @@ -158,8 +156,7 @@ def test_get_items_from_response_non_empty(self): class TestKeyIterator(unittest2.TestCase): - @staticmethod - def _getTargetClass(): + def _getTargetClass(self): from gcloud.storage.iterator import KeyIterator return KeyIterator @@ -199,8 +196,7 @@ def test_get_items_from_response_non_empty(self): class TestKeyDataIterator(unittest2.TestCase): - @staticmethod - def _getTargetClass(): + def _getTargetClass(self): from gcloud.storage.iterator import KeyDataIterator return KeyDataIterator @@ -386,8 +382,7 @@ def api_request(self, **kw): response, self._responses = self._responses[0], self._responses[1:] return response - @staticmethod - def build_api_url(path, query_params=None): + def build_api_url(self, path, query_params=None): from urllib import urlencode from urlparse import urlunsplit qs = urlencode(query_params or {}) diff --git a/gcloud/storage/test_key.py b/gcloud/storage/test_key.py index 375a9c4ffb49..ac48e15abbff 100644 --- a/gcloud/storage/test_key.py +++ b/gcloud/storage/test_key.py @@ -3,8 +3,7 @@ class Test_Key(unittest2.TestCase): - @staticmethod - def _getTargetClass(): + def _getTargetClass(self): from gcloud.storage.key import Key return Key @@ -610,8 +609,7 @@ def api_request(self, **kw): response, self._responses = self._responses[0], self._responses[1:] return response - @staticmethod - def build_api_url(path, query_params=None, + def build_api_url(self, path, query_params=None, api_base_url=API_BASE_URL): from urllib import urlencode from urlparse import urlsplit diff --git a/gcloud/test_connection.py b/gcloud/test_connection.py index 10f61f9c977c..27db46ba187f 100644 --- a/gcloud/test_connection.py +++ b/gcloud/test_connection.py @@ -3,8 +3,7 @@ class TestConnection(unittest2.TestCase): - @staticmethod - def _getTargetClass(): + def _getTargetClass(self): from gcloud.connection import Connection return Connection diff --git a/pylintrc_default b/pylintrc_default index d393e36a2b9b..e2c61f529e52 100644 --- a/pylintrc_default +++ b/pylintrc_default @@ -10,10 +10,10 @@ max-args = 10 max-public-methods = 30 [FORMAT] -# NOTE: By default pylint ignores the "dict-separator" check -# for cases like {1:2} and the "trailing-comma" check for -# cases like {1: 2, }. By setting "no-space-check" to empty -# both of these checks with occur. +# NOTE: By default pylint ignores whitespace checks around the +# constructs "dict-separator" (cases like {1:2}) and "trailing-comma" +# (cases like {1: 2, }). By setting "no-space-check" to empty +# whitespace checks will be enforced around both constructs. no-space-check = [MASTER] diff --git a/pylintrc_reduced b/pylintrc_reduced index fda04876c77a..3342430ab6f2 100644 --- a/pylintrc_reduced +++ b/pylintrc_reduced @@ -10,10 +10,10 @@ max-args = 10 max-public-methods = 30 [FORMAT] -# NOTE: By default pylint ignores the "dict-separator" check -# for cases like {1:2} and the "trailing-comma" check for -# cases like {1: 2, }. By setting "no-space-check" to empty -# both of these checks with occur. +# NOTE: By default pylint ignores whitespace checks around the +# constructs "dict-separator" (cases like {1:2}) and "trailing-comma" +# (cases like {1: 2, }). By setting "no-space-check" to empty +# whitespace checks will be enforced around both constructs. no-space-check = [MASTER] @@ -28,7 +28,7 @@ disable = I, protected-access, maybe-no-member, no-member, invalid-name, missing-docstring, too-many-public-methods, too-few-public-methods, attribute-defined-outside-init, unbalanced-tuple-unpacking, too-many-locals, exec-used, - no-init + no-init, no-self-use [REPORTS] reports = no