summaryrefslogtreecommitdiff
path: root/python3/httplib2/__init__.py
diff options
context:
space:
mode:
Diffstat (limited to 'python3/httplib2/__init__.py')
-rw-r--r--python3/httplib2/__init__.py294
1 files changed, 147 insertions, 147 deletions
diff --git a/python3/httplib2/__init__.py b/python3/httplib2/__init__.py
index fb63594..bf6c2e9 100644
--- a/python3/httplib2/__init__.py
+++ b/python3/httplib2/__init__.py
@@ -3,7 +3,7 @@
httplib2
A caching http interface that supports ETags and gzip
-to conserve bandwidth.
+to conserve bandwidth.
Requires Python 3.0 or later
@@ -26,8 +26,8 @@ __contributors__ = ["Thomas Broyer (t.broyer@ltgt.net)",
__license__ = "MIT"
__version__ = "0.7.7"
-import re
-import sys
+import re
+import sys
import email
import email.utils
import email.message
@@ -64,9 +64,10 @@ def has_timeout(timeout):
return (timeout is not None)
__all__ = ['Http', 'Response', 'ProxyInfo', 'HttpLib2Error',
- 'RedirectMissingLocation', 'RedirectLimit', 'FailedToDecompressContent',
- 'UnimplementedDigestAuthOptionError', 'UnimplementedHmacDigestAuthOptionError',
- 'debuglevel', 'RETRIES']
+ 'RedirectMissingLocation', 'RedirectLimit',
+ 'FailedToDecompressContent', 'UnimplementedDigestAuthOptionError',
+ 'UnimplementedHmacDigestAuthOptionError',
+ 'debuglevel', 'RETRIES']
# The httplib debug level, set to a non-zero value to get debug output
@@ -78,8 +79,8 @@ RETRIES = 2
# All exceptions raised here derive from HttpLib2Error
class HttpLib2Error(Exception): pass
-# Some exceptions can be caught and optionally
-# be turned back into responses.
+# Some exceptions can be caught and optionally
+# be turned back into responses.
class HttpLib2ErrorWithResponse(HttpLib2Error):
def __init__(self, desc, response, content):
self.response = response
@@ -147,7 +148,7 @@ def urlnorm(uri):
raise RelativeURIError("Only absolute URIs are allowed. uri = %s" % uri)
authority = authority.lower()
scheme = scheme.lower()
- if not path:
+ if not path:
path = "/"
# Could do syntax based normalization of the URI before
# computing the digest. See Section 6.2.2 of Std 66.
@@ -200,7 +201,7 @@ def _parse_cache_control(headers):
parts_with_args = [tuple([x.strip().lower() for x in part.split("=", 1)]) for part in parts if -1 != part.find("=")]
parts_wo_args = [(name.strip().lower(), 1) for name in parts if -1 == name.find("=")]
retval = dict(parts_with_args + parts_wo_args)
- return retval
+ return retval
# Whether to use a strict mode to parse WWW-Authenticate headers
# Might lead to bad results in case of ill-formed header value,
@@ -222,27 +223,27 @@ def _parse_www_authenticate(headers, headername='www-authenticate'):
retval = {}
if headername in headers:
try:
- authenticate = headers[headername].strip()
- www_auth = USE_WWW_AUTH_STRICT_PARSING and WWW_AUTH_STRICT or WWW_AUTH_RELAXED
- while authenticate:
- # Break off the scheme at the beginning of the line
- if headername == 'authentication-info':
- (auth_scheme, the_rest) = ('digest', authenticate)
- else:
- (auth_scheme, the_rest) = authenticate.split(" ", 1)
- # Now loop over all the key value pairs that come after the scheme,
- # being careful not to roll into the next scheme
- match = www_auth.search(the_rest)
- auth_params = {}
- while match:
- if match and len(match.groups()) == 3:
- (key, value, the_rest) = match.groups()
- auth_params[key.lower()] = UNQUOTE_PAIRS.sub(r'\1', value) # '\\'.join([x.replace('\\', '') for x in value.split('\\\\')])
- match = www_auth.search(the_rest)
- retval[auth_scheme.lower()] = auth_params
- authenticate = the_rest.strip()
+ authenticate = headers[headername].strip()
+ www_auth = USE_WWW_AUTH_STRICT_PARSING and WWW_AUTH_STRICT or WWW_AUTH_RELAXED
+ while authenticate:
+ # Break off the scheme at the beginning of the line
+ if headername == 'authentication-info':
+ (auth_scheme, the_rest) = ('digest', authenticate)
+ else:
+ (auth_scheme, the_rest) = authenticate.split(" ", 1)
+ # Now loop over all the key value pairs that come after the scheme,
+ # being careful not to roll into the next scheme
+ match = www_auth.search(the_rest)
+ auth_params = {}
+ while match:
+ if match and len(match.groups()) == 3:
+ (key, value, the_rest) = match.groups()
+ auth_params[key.lower()] = UNQUOTE_PAIRS.sub(r'\1', value) # '\\'.join([x.replace('\\', '') for x in value.split('\\\\')])
+ match = www_auth.search(the_rest)
+ retval[auth_scheme.lower()] = auth_params
+ authenticate = the_rest.strip()
except ValueError:
- raise MalformedHeader("WWW-Authenticate")
+ raise MalformedHeader("WWW-Authenticate")
return retval
@@ -254,17 +255,17 @@ def _entry_disposition(response_headers, request_headers):
1. Cache-Control: max-stale
2. Age: headers are not used in the calculations.
- Not that this algorithm is simpler than you might think
+ Not that this algorithm is simpler than you might think
because we are operating as a private (non-shared) cache.
This lets us ignore 's-maxage'. We can also ignore
'proxy-invalidate' since we aren't a proxy.
- We will never return a stale document as
- fresh as a design decision, and thus the non-implementation
- of 'max-stale'. This also lets us safely ignore 'must-revalidate'
+ We will never return a stale document as
+ fresh as a design decision, and thus the non-implementation
+ of 'max-stale'. This also lets us safely ignore 'must-revalidate'
since we operate as if every server has sent 'must-revalidate'.
Since we are private we get to ignore both 'public' and
'private' parameters. We also ignore 'no-transform' since
- we don't do any transformations.
+ we don't do any transformations.
The 'no-store' parameter is handled at a higher level.
So the only Cache-Control parameters we look at are:
@@ -273,7 +274,7 @@ def _entry_disposition(response_headers, request_headers):
max-age
min-fresh
"""
-
+
retval = "STALE"
cc = _parse_cache_control(request_headers)
cc_response = _parse_cache_control(response_headers)
@@ -315,10 +316,10 @@ def _entry_disposition(response_headers, request_headers):
min_fresh = int(cc['min-fresh'])
except ValueError:
min_fresh = 0
- current_age += min_fresh
+ current_age += min_fresh
if freshness_lifetime > current_age:
retval = "FRESH"
- return retval
+ return retval
def _decompressContent(response, new_content):
content = new_content
@@ -386,10 +387,10 @@ def _updateCache(request_headers, response_headers, content, cache, cachekey):
status_header = 'status: %d\r\n' % status
try:
- header_str = info.as_string()
+ header_str = info.as_string()
except UnicodeEncodeError:
- setattr(info, '_write_headers', _bind_write_headers(info))
- header_str = info.as_string()
+ setattr(info, '_write_headers', _bind_write_headers(info))
+ header_str = info.as_string()
header_str = re.sub("\r(?!\n)|(?<!\r)\n", "\r\n", header_str)
text = b"".join([status_header.encode('utf-8'), header_str.encode('utf-8'), content])
@@ -404,10 +405,10 @@ def _wsse_username_token(cnonce, iso_now, password):
return base64.b64encode(_sha(("%s%s%s" % (cnonce, iso_now, password)).encode('utf-8')).digest()).strip()
-# For credentials we need two things, first
+# For credentials we need two things, first
# a pool of credential to try (not necesarily tied to BAsic, Digest, etc.)
# Then we also need a list of URIs that have already demanded authentication
-# That list is tricky since sub-URIs can take the same auth, or the
+# That list is tricky since sub-URIs can take the same auth, or the
# auth scheme may change as you descend the tree.
# So we also need each Auth instance to be able to tell us
# how close to the 'top' it is.
@@ -439,7 +440,7 @@ class Authentication(object):
or such returned from the last authorized response.
Over-rise this in sub-classes if necessary.
- Return TRUE is the request is to be retried, for
+ Return TRUE is the request is to be retried, for
example Digest may return stale=true.
"""
return False
@@ -477,7 +478,7 @@ class BasicAuthentication(Authentication):
class DigestAuthentication(Authentication):
- """Only do qop='auth' and MD5, since that
+ """Only do qop='auth' and MD5, since that
is all Apache currently implements"""
def __init__(self, credentials, host, request_uri, headers, response, content, http):
Authentication.__init__(self, credentials, host, request_uri, headers, response, content, http)
@@ -490,7 +491,7 @@ class DigestAuthentication(Authentication):
self.challenge['algorithm'] = self.challenge.get('algorithm', 'MD5').upper()
if self.challenge['algorithm'] != 'MD5':
raise UnimplementedDigestAuthOptionError( _("Unsupported value for algorithm: %s." % self.challenge['algorithm']))
- self.A1 = "".join([self.credentials[0], ":", self.challenge['realm'], ":", self.credentials[1]])
+ self.A1 = "".join([self.credentials[0], ":", self.challenge['realm'], ":", self.credentials[1]])
self.challenge['nc'] = 1
def request(self, method, request_uri, headers, content, cnonce = None):
@@ -498,23 +499,22 @@ class DigestAuthentication(Authentication):
H = lambda x: _md5(x.encode('utf-8')).hexdigest()
KD = lambda s, d: H("%s:%s" % (s, d))
A2 = "".join([method, ":", request_uri])
- self.challenge['cnonce'] = cnonce or _cnonce()
- request_digest = '"%s"' % KD(H(self.A1), "%s:%s:%s:%s:%s" % (self.challenge['nonce'],
- '%08x' % self.challenge['nc'],
- self.challenge['cnonce'],
- self.challenge['qop'], H(A2)
- ))
+ self.challenge['cnonce'] = cnonce or _cnonce()
+ request_digest = '"%s"' % KD(H(self.A1), "%s:%s:%s:%s:%s" % (
+ self.challenge['nonce'],
+ '%08x' % self.challenge['nc'],
+ self.challenge['cnonce'],
+ self.challenge['qop'], H(A2)))
headers['authorization'] = 'Digest username="%s", realm="%s", nonce="%s", uri="%s", algorithm=%s, response=%s, qop=%s, nc=%08x, cnonce="%s"' % (
- self.credentials[0],
+ self.credentials[0],
self.challenge['realm'],
self.challenge['nonce'],
- request_uri,
+ request_uri,
self.challenge['algorithm'],
request_digest,
self.challenge['qop'],
self.challenge['nc'],
- self.challenge['cnonce'],
- )
+ self.challenge['cnonce'])
if self.challenge.get('opaque'):
headers['authorization'] += ', opaque="%s"' % self.challenge['opaque']
self.challenge['nc'] += 1
@@ -524,14 +524,14 @@ class DigestAuthentication(Authentication):
challenge = _parse_www_authenticate(response, 'www-authenticate').get('digest', {})
if 'true' == challenge.get('stale'):
self.challenge['nonce'] = challenge['nonce']
- self.challenge['nc'] = 1
+ self.challenge['nc'] = 1
return True
else:
updated_challenge = _parse_www_authenticate(response, 'authentication-info').get('digest', {})
if 'nextnonce' in updated_challenge:
self.challenge['nonce'] = updated_challenge['nextnonce']
- self.challenge['nc'] = 1
+ self.challenge['nc'] = 1
return False
@@ -565,9 +565,8 @@ class HmacDigestAuthentication(Authentication):
else:
self.pwhashmod = _sha
self.key = "".join([self.credentials[0], ":",
- self.pwhashmod.new("".join([self.credentials[1], self.challenge['salt']])).hexdigest().lower(),
- ":", self.challenge['realm']
- ])
+ self.pwhashmod.new("".join([self.credentials[1], self.challenge['salt']])).hexdigest().lower(),
+ ":", self.challenge['realm']])
self.key = self.pwhashmod.new(self.key).hexdigest().lower()
def request(self, method, request_uri, headers, content):
@@ -580,15 +579,14 @@ class HmacDigestAuthentication(Authentication):
request_digest = "%s:%s:%s:%s:%s" % (method, request_uri, cnonce, self.challenge['snonce'], headers_val)
request_digest = hmac.new(self.key, request_digest, self.hashmod).hexdigest().lower()
headers['authorization'] = 'HMACDigest username="%s", realm="%s", snonce="%s", cnonce="%s", uri="%s", created="%s", response="%s", headers="%s"' % (
- self.credentials[0],
+ self.credentials[0],
self.challenge['realm'],
self.challenge['snonce'],
cnonce,
- request_uri,
+ request_uri,
created,
request_digest,
- keylist,
- )
+ keylist)
def response(self, response, content):
challenge = _parse_www_authenticate(response, 'www-authenticate').get('hmacdigest', {})
@@ -601,7 +599,7 @@ class WsseAuthentication(Authentication):
"""This is thinly tested and should not be relied upon.
At this time there isn't any third party server to test against.
Blogger and TypePad implemented this algorithm at one point
- but Blogger has since switched to Basic over HTTPS and
+ but Blogger has since switched to Basic over HTTPS and
TypePad has implemented it wrong, by never issuing a 401
challenge but instead requiring your client to telepathically know that
their endpoint is expecting WSSE profile="UsernameToken"."""
@@ -647,7 +645,7 @@ class GoogleLoginAuthentication(Authentication):
def request(self, method, request_uri, headers, content):
"""Modify the request headers to add the appropriate
Authorization header."""
- headers['authorization'] = 'GoogleLogin Auth=' + self.Auth
+ headers['authorization'] = 'GoogleLogin Auth=' + self.Auth
AUTH_SCHEME_CLASSES = {
@@ -662,13 +660,13 @@ AUTH_SCHEME_ORDER = ["hmacdigest", "googlelogin", "digest", "wsse", "basic"]
class FileCache(object):
"""Uses a local directory as a store for cached files.
- Not really safe to use if multiple threads or processes are going to
+ Not really safe to use if multiple threads or processes are going to
be running on the same cache.
"""
def __init__(self, cache, safe=safename): # use safe=lambda x: md5.new(x).hexdigest() for the old behavior
self.cache = cache
self.safe = safe
- if not os.path.exists(cache):
+ if not os.path.exists(cache):
os.makedirs(self.cache)
def get(self, key):
@@ -706,7 +704,7 @@ class Credentials(object):
def iter(self, domain):
for (cdomain, name, password) in self.credentials:
if cdomain == "" or domain == cdomain:
- yield (name, password)
+ yield (name, password)
class KeyCerts(Credentials):
"""Identical to Credentials except that
@@ -726,7 +724,7 @@ p = ProxyInfo(proxy_type=socks.PROXY_TYPE_HTTP, proxy_host='localhost', proxy_po
def astuple(self):
return (self.proxy_type, self.proxy_host, self.proxy_port, self.proxy_rdns,
- self.proxy_user, self.proxy_pass)
+ self.proxy_user, self.proxy_pass)
def isgood(self):
return socks and (self.proxy_host != None) and (self.proxy_port != None)
@@ -737,12 +735,12 @@ def proxy_info_from_environment(method='http'):
Read proxy info from the environment variables.
"""
if method not in ('http', 'https'):
- return
+ return
env_var = method + '_proxy'
url = os.environ.get(env_var, os.environ.get(env_var.upper()))
if not url:
- return
+ return
return proxy_info_from_url(url, method)
@@ -755,17 +753,17 @@ def proxy_info_from_url(url, method='http'):
password = None
port = None
if '@' in url[1]:
- ident, host_port = url[1].split('@', 1)
- if ':' in ident:
- username, password = ident.split(':', 1)
- else:
- password = ident
+ ident, host_port = url[1].split('@', 1)
+ if ':' in ident:
+ username, password = ident.split(':', 1)
+ else:
+ password = ident
else:
- host_port = url[1]
+ host_port = url[1]
if ':' in host_port:
- host, port = host_port.split(':', 1)
+ host, port = host_port.split(':', 1)
else:
- host = host_port
+ host = host_port
if port:
port = int(port)
@@ -815,38 +813,40 @@ class HTTPSConnectionWithTimeout(http.client.HTTPSConnection):
self.proxy_info = proxy_info
context = None
if ca_certs is None:
- ca_certs = CA_CERTS
+ ca_certs = CA_CERTS
if (cert_file or ca_certs) and not disable_ssl_certificate_validation:
- if not hasattr(ssl, 'SSLContext'):
- raise CertificateValidationUnsupportedInPython31()
- context = ssl.SSLContext(ssl.PROTOCOL_TLSv1)
- context.verify_mode = ssl.CERT_REQUIRED
- if cert_file:
- context.load_cert_chain(cert_file, key_file)
- if ca_certs:
- context.load_verify_locations(ca_certs)
- http.client.HTTPSConnection.__init__(self, host, port=port, key_file=key_file,
+ if not hasattr(ssl, 'SSLContext'):
+ raise CertificateValidationUnsupportedInPython31()
+ context = ssl.SSLContext(ssl.PROTOCOL_TLSv1)
+ context.verify_mode = ssl.CERT_REQUIRED
+ if cert_file:
+ context.load_cert_chain(cert_file, key_file)
+ if ca_certs:
+ context.load_verify_locations(ca_certs)
+ http.client.HTTPSConnection.__init__(
+ self, host, port=port, key_file=key_file,
cert_file=cert_file, timeout=timeout, context=context,
- check_hostname=True)
+ check_hostname=True)
SCHEME_TO_CONNECTION = {
'http': HTTPConnectionWithTimeout,
- 'https': HTTPSConnectionWithTimeout
- }
+ 'https': HTTPSConnectionWithTimeout,
+}
class Http(object):
"""An HTTP client that handles:
-- all methods
-- caching
-- ETags
-- compression,
-- HTTPS
-- Basic
-- Digest
-- WSSE
-
-and more.
+
+ - all methods
+ - caching
+ - ETags
+ - compression,
+ - HTTPS
+ - Basic
+ - Digest
+ - WSSE
+
+ and more.
"""
def __init__(self, cache=None, timeout=None,
proxy_info=proxy_info_from_environment,
@@ -960,7 +960,7 @@ and more.
for i in range(RETRIES):
try:
if conn.sock is None:
- conn.connect()
+ conn.connect()
conn.request(method, request_uri, body, headers)
except socket.timeout:
conn.close()
@@ -1020,12 +1020,12 @@ and more.
auths = [(auth.depth(request_uri), auth) for auth in self.authorizations if auth.inscope(host, request_uri)]
auth = auths and sorted(auths)[0][1] or None
- if auth:
+ if auth:
auth.request(method, request_uri, headers, body)
(response, content) = self._conn_request(conn, request_uri, method, body, headers)
- if auth:
+ if auth:
if auth.response(response, body):
auth.request(method, request_uri, headers, body)
(response, content) = self._conn_request(conn, request_uri, method, body, headers )
@@ -1033,7 +1033,7 @@ and more.
if response.status == 401:
for authorization in self._auth_from_challenge(host, request_uri, headers, response, content):
- authorization.request(method, request_uri, headers, body)
+ authorization.request(method, request_uri, headers, body)
(response, content) = self._conn_request(conn, request_uri, method, body, headers, )
if response.status != 401:
self.authorizations.append(authorization)
@@ -1056,7 +1056,7 @@ and more.
if response.status == 301 and method in ["GET", "HEAD"]:
response['-x-permanent-redirect-url'] = response['location']
if 'content-location' not in response:
- response['content-location'] = absolute_uri
+ response['content-location'] = absolute_uri
_updateCache(headers, response, content, self.cache, cachekey)
if 'if-none-match' in headers:
del headers['if-none-match']
@@ -1068,7 +1068,7 @@ and more.
location = response['location']
old_response = copy.deepcopy(response)
if 'content-location' not in old_response:
- old_response['content-location'] = absolute_uri
+ old_response['content-location'] = absolute_uri
redirect_method = method
if response.status in [302, 303]:
redirect_method = "GET"
@@ -1080,7 +1080,7 @@ and more.
elif response.status in [200, 203] and method in ["GET", "HEAD"]:
# Don't cache 206's since we aren't going to handle byte range requests
if 'content-location' not in response:
- response['content-location'] = absolute_uri
+ response['content-location'] = absolute_uri
_updateCache(headers, response, content, self.cache, cachekey)
return (response, content)
@@ -1095,10 +1095,10 @@ and more.
def request(self, uri, method="GET", body=None, headers=None, redirections=DEFAULT_MAX_REDIRECTS, connection_type=None):
""" Performs a single HTTP request.
-The 'uri' is the URI of the HTTP resource and can begin
+The 'uri' is the URI of the HTTP resource and can begin
with either 'http' or 'https'. The value of 'uri' must be an absolute URI.
-The 'method' is the HTTP method to perform, such as GET, POST, DELETE, etc.
+The 'method' is the HTTP method to perform, such as GET, POST, DELETE, etc.
There is no restriction on the methods allowed.
The 'body' is the entity body to be sent with the request. It is a string
@@ -1107,11 +1107,11 @@ object.
Any extra headers that are to be sent with the request should be provided in the
'headers' dictionary.
-The maximum number of redirect to follow before raising an
+The maximum number of redirect to follow before raising an
exception is 'redirections. The default is 5.
-The return value is a tuple of (response, content), the first
-being and instance of the 'Response' class, the second being
+The return value is a tuple of (response, content), the first
+being and instance of the 'Response' class, the second being
a string that contains the response entity body.
"""
try:
@@ -1215,13 +1215,13 @@ a string that contains the response entity body.
# Determine our course of action:
# Is the cached entry fresh or stale?
# Has the client requested a non-cached response?
- #
- # There seems to be three possible answers:
+ #
+ # There seems to be three possible answers:
# 1. [FRESH] Return the cache entry w/o doing a GET
# 2. [STALE] Do the GET (but add in cache validators if available)
# 3. [TRANSPARENT] Do a GET w/o any cache validators (Cache-Control: no-cache) on the request
- entry_disposition = _entry_disposition(info, headers)
-
+ entry_disposition = _entry_disposition(info, headers)
+
if entry_disposition == "FRESH":
if not cached_value:
info['status'] = '504'
@@ -1243,7 +1243,7 @@ a string that contains the response entity body.
if response.status == 304 and method == "GET":
# Rewrite the cache entry with the new end-to-end headers
- # Take all headers that are in response
+ # Take all headers that are in response
# and overwrite their values in info.
# unless they are hop-by-hop, or are listed in the connection header.
@@ -1255,14 +1255,14 @@ a string that contains the response entity body.
_updateCache(headers, merged_response, content, self.cache, cachekey)
response = merged_response
response.status = 200
- response.fromcache = True
+ response.fromcache = True
elif response.status == 200:
content = new_content
else:
self.cache.delete(cachekey)
- content = new_content
- else:
+ content = new_content
+ else:
cc = _parse_cache_control(headers)
if 'only-if-cached'in cc:
info['status'] = '504'
@@ -1276,34 +1276,34 @@ a string that contains the response entity body.
response = e.response
content = e.content
response.status = 500
- response.reason = str(e)
+ response.reason = str(e)
elif isinstance(e, socket.timeout):
content = b"Request Timeout"
- response = Response( {
- "content-type": "text/plain",
- "status": "408",
- "content-length": len(content)
- })
+ response = Response({
+ "content-type": "text/plain",
+ "status": "408",
+ "content-length": len(content)
+ })
response.reason = "Request Timeout"
else:
content = str(e).encode('utf-8')
- response = Response( {
- "content-type": "text/plain",
- "status": "400",
- "content-length": len(content)
- })
- response.reason = "Bad Request"
+ response = Response({
+ "content-type": "text/plain",
+ "status": "400",
+ "content-length": len(content)
+ })
+ response.reason = "Bad Request"
else:
raise
-
+
return (response, content)
-
+
class Response(dict):
"""An object more like email.message than httplib.HTTPResponse."""
-
+
"""Is this response from our local cache"""
fromcache = False
@@ -1319,7 +1319,7 @@ class Response(dict):
previous = None
def __init__(self, info):
- # info is either an email.message or
+ # info is either an email.message or
# an httplib.HTTPResponse object.
if isinstance(info, http.client.HTTPResponse):
for key, value in info.getheaders():
@@ -1333,17 +1333,17 @@ class Response(dict):
self.reason = info.reason
self.version = info.version
elif isinstance(info, email.message.Message):
- for key, value in list(info.items()):
- self[key.lower()] = value
+ for key, value in list(info.items()):
+ self[key.lower()] = value
self.status = int(self['status'])
else:
- for key, value in info.items():
- self[key.lower()] = value
+ for key, value in info.items():
+ self[key.lower()] = value
self.status = int(self.get('status', self.status))
def __getattr__(self, name):
if name == 'dict':
- return self
- else:
- raise AttributeError(name)
+ return self
+ else:
+ raise AttributeError(name)