Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
19 changes: 10 additions & 9 deletions _gcloud_vendor/apitools/base/py/http_wrapper.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,13 +6,14 @@
"""

import collections
import httplib
import logging
import socket
import time
import urlparse

import httplib2
from six.moves import http_client
from six.moves import range

from _gcloud_vendor.apitools.base.py import exceptions
from _gcloud_vendor.apitools.base.py import util
Expand All @@ -28,10 +29,10 @@
RESUME_INCOMPLETE = 308
TOO_MANY_REQUESTS = 429
_REDIRECT_STATUS_CODES = (
httplib.MOVED_PERMANENTLY,
httplib.FOUND,
httplib.SEE_OTHER,
httplib.TEMPORARY_REDIRECT,
http_client.MOVED_PERMANENTLY,
http_client.FOUND,
http_client.SEE_OTHER,
http_client.TEMPORARY_REDIRECT,
RESUME_INCOMPLETE,
)

Expand Down Expand Up @@ -129,7 +130,7 @@ def MakeRequest(http, http_request, retries=5, redirections=5):
url_scheme = urlparse.urlsplit(http_request.url).scheme
if url_scheme and url_scheme in http.connections:
connection_type = http.connections[url_scheme]
for retry in xrange(retries + 1):
for retry in range(retries + 1):
# Note that the str() calls here are important for working around
# some funny business with message construction and unicode in
# httplib itself. See, eg,
Expand All @@ -140,15 +141,15 @@ def MakeRequest(http, http_request, retries=5, redirections=5):
str(http_request.url), method=str(http_request.http_method),
body=http_request.body, headers=http_request.headers,
redirections=redirections, connection_type=connection_type)
except httplib.BadStatusLine as e:
except http_client.BadStatusLine as e:
logging.error('Caught BadStatusLine from httplib, retrying: %s', e)
exc = e
except socket.error as e:
if http_request.http_method != 'GET':
raise
logging.error('Caught socket error, retrying: %s', e)
exc = e
except httplib.IncompleteRead as e:
except http_client.IncompleteRead as e:
if http_request.http_method != 'GET':
raise
logging.error('Caught IncompleteRead error, retrying: %s', e)
Expand All @@ -161,7 +162,7 @@ def MakeRequest(http, http_request, retries=5, redirections=5):
break
logging.info('Retrying request to url <%s> after status code %s.',
response.request_url, response.status_code)
elif isinstance(exc, httplib.IncompleteRead):
elif isinstance(exc, http_client.IncompleteRead):
logging.info('Retrying request to url <%s> after incomplete read.',
str(http_request.url))
else:
Expand Down
40 changes: 21 additions & 19 deletions _gcloud_vendor/apitools/base/py/transfer.py
Original file line number Diff line number Diff line change
@@ -1,17 +1,19 @@
#!/usr/bin/env python
"""Upload and download support for apitools."""
from __future__ import print_function

import email.generator as email_generator
import email.mime.multipart as mime_multipart
import email.mime.nonmultipart as mime_nonmultipart
import httplib
import io
import json
import mimetypes
import os
import StringIO
import threading

from six.moves import http_client

from _gcloud_vendor.apitools.base.py import exceptions
from _gcloud_vendor.apitools.base.py import http_wrapper
from _gcloud_vendor.apitools.base.py import util
Expand All @@ -38,7 +40,7 @@ def __init__(self, stream, close_stream=False, chunksize=None,
self.__url = None

self.auto_transfer = auto_transfer
self.chunksize = chunksize or 1048576L
self.chunksize = chunksize or 1048576

def __repr__(self):
return str(self)
Expand Down Expand Up @@ -121,10 +123,10 @@ class Download(_Transfer):
chunksize: default chunksize to use for transfers.
"""
_ACCEPTABLE_STATUSES = set((
httplib.OK,
httplib.NO_CONTENT,
httplib.PARTIAL_CONTENT,
httplib.REQUESTED_RANGE_NOT_SATISFIABLE,
http_client.OK,
http_client.NO_CONTENT,
http_client.PARTIAL_CONTENT,
http_client.REQUESTED_RANGE_NOT_SATISFIABLE,
))
_REQUIRED_SERIALIZATION_KEYS = set((
'auto_transfer', 'progress', 'total_size', 'url'))
Expand Down Expand Up @@ -242,13 +244,13 @@ def InitializeDownload(self, http_request, http=None, client=None):
@staticmethod
def _ArgPrinter(response, unused_download):
if 'content-range' in response.info:
print 'Received %s' % response.info['content-range']
print('Received %s' % response.info['content-range'])
else:
print 'Received %d bytes' % len(response)
print('Received %d bytes' % len(response))

@staticmethod
def _CompletePrinter(*unused_args):
print 'Download complete'
print('Download complete')

def __NormalizeStartEnd(self, start, end=None):
if end is not None:
Expand Down Expand Up @@ -290,10 +292,10 @@ def __ProcessResponse(self, response):
"""Process this response (by updating self and writing to self.stream)."""
if response.status_code not in self._ACCEPTABLE_STATUSES:
raise exceptions.TransferInvalidError(response.content)
if response.status_code in (httplib.OK, httplib.PARTIAL_CONTENT):
if response.status_code in (http_client.OK, http_client.PARTIAL_CONTENT):
self.stream.write(response.content)
self.__progress += len(response)
elif response.status_code == httplib.NO_CONTENT:
elif response.status_code == http_client.NO_CONTENT:
# It's important to write something to the stream for the case
# of a 0-byte download to a file, as otherwise python won't
# create the file.
Expand Down Expand Up @@ -348,7 +350,7 @@ def StreamInChunks(self, callback=None, finish_callback=None,
additional_headers=additional_headers)
response = self.__ProcessResponse(response)
self._ExecuteCallback(callback, response)
if (response.status_code == httplib.OK or
if (response.status_code == http_client.OK or
self.progress >= self.total_size):
break
self._ExecuteCallback(finish_callback, response)
Expand Down Expand Up @@ -591,7 +593,7 @@ def _RefreshResumableUploadState(self):
self.http, refresh_request, redirections=0)
range_header = refresh_response.info.get(
'Range', refresh_response.info.get('range'))
if refresh_response.status_code in (httplib.OK, httplib.CREATED):
if refresh_response.status_code in (http_client.OK, http_client.CREATED):
self.__complete = True
elif refresh_response.status_code == http_wrapper.RESUME_INCOMPLETE:
if range_header is None:
Expand Down Expand Up @@ -619,7 +621,7 @@ def InitializeUpload(self, http_request, http=None, client=None):
http_request.url = client.FinalizeTransferUrl(http_request.url)
self.EnsureUninitialized()
http_response = http_wrapper.MakeRequest(http, http_request)
if http_response.status_code != httplib.OK:
if http_response.status_code != http_client.OK:
raise exceptions.HttpError.FromResponse(http_response)

self.__server_chunk_granularity = http_response.info.get(
Expand Down Expand Up @@ -651,11 +653,11 @@ def __ValidateChunksize(self, chunksize=None):

@staticmethod
def _ArgPrinter(response, unused_upload):
print 'Sent %s' % response.info['range']
print('Sent %s' % response.info['range'])

@staticmethod
def _CompletePrinter(*unused_args):
print 'Upload complete'
print('Upload complete')

def StreamInChunks(self, callback=None, finish_callback=None,
additional_headers=None):
Expand All @@ -674,7 +676,7 @@ def StreamInChunks(self, callback=None, finish_callback=None,
while not self.complete:
response = self.__SendChunk(self.stream.tell(),
additional_headers=additional_headers)
if response.status_code in (httplib.OK, httplib.CREATED):
if response.status_code in (http_client.OK, http_client.CREATED):
self.__complete = True
break
self.__progress = self.__GetLastByte(response.info['range'])
Expand Down Expand Up @@ -703,10 +705,10 @@ def __SendChunk(self, start, additional_headers=None, data=None):
request.headers.update(additional_headers)

response = http_wrapper.MakeRequest(self.bytes_http, request)
if response.status_code not in (httplib.OK, httplib.CREATED,
if response.status_code not in (http_client.OK, http_client.CREATED,
http_wrapper.RESUME_INCOMPLETE):
raise exceptions.HttpError.FromResponse(response)
if response.status_code in (httplib.OK, httplib.CREATED):
if response.status_code in (http_client.OK, http_client.CREATED):
return response
# TODO(craigcitro): Add retries on no progress?
last_byte = self.__GetLastByte(response.info['range'])
Expand Down
11 changes: 6 additions & 5 deletions _gcloud_vendor/apitools/base/py/util.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,13 +2,14 @@
"""Assorted utilities shared between parts of apitools."""

import collections
import httplib
import os
import random
import types
import urllib
import urllib2

import six
from six.moves import http_client

from _gcloud_vendor.apitools.base.py import exceptions

__all__ = [
Expand Down Expand Up @@ -46,13 +47,13 @@ def DetectGce():
o = urllib2.urlopen('http://metadata.google.internal')
except urllib2.URLError:
return False
return (o.getcode() == httplib.OK and
return (o.getcode() == http_client.OK and
o.headers.get('metadata-flavor') == 'Google')


def NormalizeScopes(scope_spec):
"""Normalize scope_spec to a set of strings."""
if isinstance(scope_spec, types.StringTypes):
if isinstance(scope_spec, six.string_types):
return set(scope_spec.split(' '))
elif isinstance(scope_spec, collections.Iterable):
return set(scope_spec)
Expand Down Expand Up @@ -99,7 +100,7 @@ def ExpandRelativePath(method_config, params, relative_path=None):
raise exceptions.InvalidUserInputError(
'Request missing required parameter %s' % param)
try:
if not isinstance(value, basestring):
if not isinstance(value, six.string_types):
value = str(value)
path = path.replace(param_template,
urllib.quote(value.encode('utf_8'), reserved_chars))
Expand Down
Loading