feat(SublimeText2.GitPackages): cache packages
This commit is contained in:
@@ -0,0 +1,34 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
# __
|
||||
# /__) _ _ _ _ _/ _
|
||||
# / ( (- (/ (/ (- _) / _)
|
||||
# /
|
||||
|
||||
"""
|
||||
requests
|
||||
~~~~~~~~
|
||||
|
||||
:copyright: (c) 2012 by Kenneth Reitz.
|
||||
:license: ISC, see LICENSE for more details.
|
||||
|
||||
"""
|
||||
|
||||
__title__ = 'requests'
|
||||
__version__ = '0.10.2'
|
||||
__build__ = 0x001002
|
||||
__author__ = 'Kenneth Reitz'
|
||||
__license__ = 'ISC'
|
||||
__copyright__ = 'Copyright 2012 Kenneth Reitz'
|
||||
|
||||
|
||||
|
||||
from . import utils
|
||||
from .models import Request, Response
|
||||
from .api import request, get, head, post, patch, put, delete, options
|
||||
from .sessions import session, Session
|
||||
from .status_codes import codes
|
||||
from .exceptions import (
|
||||
RequestException, Timeout, URLRequired,
|
||||
TooManyRedirects, HTTPError, ConnectionError
|
||||
)
|
@@ -0,0 +1,116 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
"""
|
||||
requests.api
|
||||
~~~~~~~~~~~~
|
||||
|
||||
This module implements the Requests API.
|
||||
|
||||
:copyright: (c) 2012 by Kenneth Reitz.
|
||||
:license: ISC, see LICENSE for more details.
|
||||
|
||||
"""
|
||||
|
||||
from . import sessions
|
||||
|
||||
def request(method, url, **kwargs):
|
||||
"""Constructs and sends a :class:`Request <Request>`.
|
||||
Returns :class:`Response <Response>` object.
|
||||
|
||||
:param method: method for the new :class:`Request` object.
|
||||
:param url: URL for the new :class:`Request` object.
|
||||
:param params: (optional) Dictionary or bytes to be sent in the query string for the :class:`Request`.
|
||||
:param data: (optional) Dictionary or bytes to send in the body of the :class:`Request`.
|
||||
:param headers: (optional) Dictionary of HTTP Headers to send with the :class:`Request`.
|
||||
:param cookies: (optional) Dict or CookieJar object to send with the :class:`Request`.
|
||||
:param files: (optional) Dictionary of 'name': file-like-objects (or {'name': ('filename', fileobj)}) for multipart encoding upload.
|
||||
:param auth: (optional) Auth tuple to enable Basic/Digest/Custom HTTP Auth.
|
||||
:param timeout: (optional) Float describing the timeout of the request.
|
||||
:param allow_redirects: (optional) Boolean. Set to True if POST/PUT/DELETE redirect following is allowed.
|
||||
:param proxies: (optional) Dictionary mapping protocol to the URL of the proxy.
|
||||
:param return_response: (optional) If False, an un-sent Request object will returned.
|
||||
:param session: (optional) A :class:`Session` object to be used for the request.
|
||||
:param config: (optional) A configuration dictionary.
|
||||
:param verify: (optional) if ``True``, the SSL cert will be verified. A CA_BUNDLE path can also be provided.
|
||||
:param prefetch: (optional) if ``True``, the response content will be immediately downloaded.
|
||||
"""
|
||||
|
||||
s = kwargs.pop('session') if 'session' in kwargs else sessions.session()
|
||||
return s.request(method=method, url=url, **kwargs)
|
||||
|
||||
|
||||
|
||||
def get(url, **kwargs):
|
||||
"""Sends a GET request. Returns :class:`Response` object.
|
||||
|
||||
:param url: URL for the new :class:`Request` object.
|
||||
:param **kwargs: Optional arguments that ``request`` takes.
|
||||
"""
|
||||
|
||||
kwargs.setdefault('allow_redirects', True)
|
||||
return request('get', url, **kwargs)
|
||||
|
||||
|
||||
def options(url, **kwargs):
|
||||
"""Sends a OPTIONS request. Returns :class:`Response` object.
|
||||
|
||||
:param url: URL for the new :class:`Request` object.
|
||||
:param **kwargs: Optional arguments that ``request`` takes.
|
||||
"""
|
||||
|
||||
kwargs.setdefault('allow_redirects', True)
|
||||
return request('options', url, **kwargs)
|
||||
|
||||
|
||||
def head(url, **kwargs):
|
||||
"""Sends a HEAD request. Returns :class:`Response` object.
|
||||
|
||||
:param url: URL for the new :class:`Request` object.
|
||||
:param **kwargs: Optional arguments that ``request`` takes.
|
||||
"""
|
||||
|
||||
kwargs.setdefault('allow_redirects', True)
|
||||
return request('head', url, **kwargs)
|
||||
|
||||
|
||||
def post(url, data=None, **kwargs):
|
||||
"""Sends a POST request. Returns :class:`Response` object.
|
||||
|
||||
:param url: URL for the new :class:`Request` object.
|
||||
:param data: (optional) Dictionary or bytes to send in the body of the :class:`Request`.
|
||||
:param **kwargs: Optional arguments that ``request`` takes.
|
||||
"""
|
||||
|
||||
return request('post', url, data=data, **kwargs)
|
||||
|
||||
|
||||
def put(url, data=None, **kwargs):
|
||||
"""Sends a PUT request. Returns :class:`Response` object.
|
||||
|
||||
:param url: URL for the new :class:`Request` object.
|
||||
:param data: (optional) Dictionary or bytes to send in the body of the :class:`Request`.
|
||||
:param **kwargs: Optional arguments that ``request`` takes.
|
||||
"""
|
||||
|
||||
return request('put', url, data=data, **kwargs)
|
||||
|
||||
|
||||
def patch(url, data=None, **kwargs):
|
||||
"""Sends a PATCH request. Returns :class:`Response` object.
|
||||
|
||||
:param url: URL for the new :class:`Request` object.
|
||||
:param data: (optional) Dictionary or bytes to send in the body of the :class:`Request`.
|
||||
:param **kwargs: Optional arguments that ``request`` takes.
|
||||
"""
|
||||
|
||||
return request('patch', url, data=data, **kwargs)
|
||||
|
||||
|
||||
def delete(url, **kwargs):
|
||||
"""Sends a DELETE request. Returns :class:`Response` object.
|
||||
|
||||
:param url: URL for the new :class:`Request` object.
|
||||
:param **kwargs: Optional arguments that ``request`` takes.
|
||||
"""
|
||||
|
||||
return request('delete', url, **kwargs)
|
@@ -0,0 +1,85 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
"""
|
||||
requests.async
|
||||
~~~~~~~~~~~~~~
|
||||
|
||||
This module contains an asynchronous replica of ``requests.api``, powered
|
||||
by gevent. All API methods return a ``Request`` instance (as opposed to
|
||||
``Response``). A list of requests can be sent with ``map()``.
|
||||
"""
|
||||
|
||||
try:
|
||||
import gevent
|
||||
from gevent import monkey as curious_george
|
||||
from gevent.pool import Pool
|
||||
except ImportError:
|
||||
raise RuntimeError('Gevent is required for requests.async.')
|
||||
|
||||
# Monkey-patch.
|
||||
curious_george.patch_all(thread=False)
|
||||
|
||||
from . import api
|
||||
|
||||
|
||||
__all__ = (
|
||||
'map',
|
||||
'get', 'options', 'head', 'post', 'put', 'patch', 'delete', 'request'
|
||||
)
|
||||
|
||||
|
||||
def patched(f):
|
||||
"""Patches a given API function to not send."""
|
||||
|
||||
def wrapped(*args, **kwargs):
|
||||
|
||||
kwargs['return_response'] = False
|
||||
kwargs['prefetch'] = True
|
||||
|
||||
config = kwargs.get('config', {})
|
||||
config.update(safe_mode=True)
|
||||
|
||||
kwargs['config'] = config
|
||||
|
||||
return f(*args, **kwargs)
|
||||
|
||||
return wrapped
|
||||
|
||||
|
||||
def send(r, pool=None, prefetch=False):
|
||||
"""Sends the request object using the specified pool. If a pool isn't
|
||||
specified this method blocks. Pools are useful because you can specify size
|
||||
and can hence limit concurrency."""
|
||||
|
||||
if pool != None:
|
||||
return pool.spawn(r.send, prefetch=prefetch)
|
||||
|
||||
return gevent.spawn(r.send, prefetch=prefetch)
|
||||
|
||||
|
||||
# Patched requests.api functions.
|
||||
get = patched(api.get)
|
||||
options = patched(api.options)
|
||||
head = patched(api.head)
|
||||
post = patched(api.post)
|
||||
put = patched(api.put)
|
||||
patch = patched(api.patch)
|
||||
delete = patched(api.delete)
|
||||
request = patched(api.request)
|
||||
|
||||
|
||||
def map(requests, prefetch=True, size=None):
|
||||
"""Concurrently converts a list of Requests to Responses.
|
||||
|
||||
:param requests: a collection of Request objects.
|
||||
:param prefetch: If False, the content will not be downloaded immediately.
|
||||
:param size: Specifies the number of requests to make at a time. If None, no throttling occurs.
|
||||
"""
|
||||
|
||||
requests = list(requests)
|
||||
|
||||
pool = Pool(size) if size else None
|
||||
jobs = [send(r, pool, prefetch=prefetch) for r in requests]
|
||||
gevent.joinall(jobs)
|
||||
|
||||
return [r.response for r in requests]
|
@@ -0,0 +1,150 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
"""
|
||||
requests.auth
|
||||
~~~~~~~~~~~~~
|
||||
|
||||
This module contains the authentication handlers for Requests.
|
||||
"""
|
||||
|
||||
from __future__ import unicode_literals
|
||||
|
||||
import time
|
||||
import hashlib
|
||||
|
||||
from base64 import b64encode
|
||||
from .compat import urlparse, str, bytes
|
||||
from .utils import randombytes, parse_dict_header
|
||||
|
||||
|
||||
|
||||
def _basic_auth_str(username, password):
|
||||
"""Returns a Basic Auth string."""
|
||||
|
||||
return 'Basic ' + b64encode(("%s:%s" % (username, password)).encode('utf-8')).strip().decode('utf-8')
|
||||
|
||||
|
||||
class AuthBase(object):
|
||||
"""Base class that all auth implementations derive from"""
|
||||
|
||||
def __call__(self, r):
|
||||
raise NotImplementedError('Auth hooks must be callable.')
|
||||
|
||||
|
||||
class HTTPBasicAuth(AuthBase):
|
||||
"""Attaches HTTP Basic Authentication to the given Request object."""
|
||||
def __init__(self, username, password):
|
||||
self.username = username
|
||||
self.password = password
|
||||
|
||||
def __call__(self, r):
|
||||
r.headers['Authorization'] = _basic_auth_str(self.username, self.password)
|
||||
return r
|
||||
|
||||
|
||||
class HTTPProxyAuth(HTTPBasicAuth):
|
||||
"""Attaches HTTP Proxy Authenetication to a given Request object."""
|
||||
def __call__(self, r):
|
||||
r.headers['Proxy-Authorization'] = _basic_auth_str(self.username, self.password)
|
||||
return r
|
||||
|
||||
|
||||
class HTTPDigestAuth(AuthBase):
|
||||
"""Attaches HTTP Digest Authentication to the given Request object."""
|
||||
def __init__(self, username, password):
|
||||
self.username = username
|
||||
self.password = password
|
||||
|
||||
def handle_401(self, r):
|
||||
"""Takes the given response and tries digest-auth, if needed."""
|
||||
|
||||
s_auth = r.headers.get('www-authenticate', '')
|
||||
|
||||
if 'digest' in s_auth.lower():
|
||||
|
||||
last_nonce = ''
|
||||
nonce_count = 0
|
||||
|
||||
chal = parse_dict_header(s_auth.replace('Digest ', ''))
|
||||
|
||||
realm = chal['realm']
|
||||
nonce = chal['nonce']
|
||||
qop = chal.get('qop')
|
||||
algorithm = chal.get('algorithm', 'MD5')
|
||||
opaque = chal.get('opaque', None)
|
||||
|
||||
algorithm = algorithm.upper()
|
||||
# lambdas assume digest modules are imported at the top level
|
||||
if algorithm == 'MD5':
|
||||
def h(x):
|
||||
if isinstance(x, str):
|
||||
x = x.encode('utf-8')
|
||||
return hashlib.md5(x).hexdigest()
|
||||
H = h
|
||||
elif algorithm == 'SHA':
|
||||
def h(x):
|
||||
if isinstance(x, str):
|
||||
x = x.encode('utf-8')
|
||||
return hashlib.sha1(x).hexdigest()
|
||||
H = h
|
||||
# XXX MD5-sess
|
||||
KD = lambda s, d: H("%s:%s" % (s, d))
|
||||
|
||||
if H is None:
|
||||
return None
|
||||
|
||||
# XXX not implemented yet
|
||||
entdig = None
|
||||
p_parsed = urlparse(r.request.url)
|
||||
path = p_parsed.path
|
||||
if p_parsed.query:
|
||||
path += '?' + p_parsed.query
|
||||
|
||||
A1 = '%s:%s:%s' % (self.username, realm, self.password)
|
||||
A2 = '%s:%s' % (r.request.method, path)
|
||||
|
||||
if qop == 'auth':
|
||||
if nonce == last_nonce:
|
||||
nonce_count += 1
|
||||
else:
|
||||
nonce_count = 1
|
||||
last_nonce = nonce
|
||||
|
||||
ncvalue = '%08x' % nonce_count
|
||||
s = str(nonce_count).encode('utf-8')
|
||||
s += nonce.encode('utf-8')
|
||||
s += time.ctime().encode('utf-8')
|
||||
s += randombytes(8)
|
||||
|
||||
cnonce = (hashlib.sha1(s).hexdigest()[:16])
|
||||
noncebit = "%s:%s:%s:%s:%s" % (nonce, ncvalue, cnonce, qop, H(A2))
|
||||
respdig = KD(H(A1), noncebit)
|
||||
elif qop is None:
|
||||
respdig = KD(H(A1), "%s:%s" % (nonce, H(A2)))
|
||||
else:
|
||||
# XXX handle auth-int.
|
||||
return None
|
||||
|
||||
# XXX should the partial digests be encoded too?
|
||||
base = 'username="%s", realm="%s", nonce="%s", uri="%s", ' \
|
||||
'response="%s"' % (self.username, realm, nonce, path, respdig)
|
||||
if opaque:
|
||||
base += ', opaque="%s"' % opaque
|
||||
if entdig:
|
||||
base += ', digest="%s"' % entdig
|
||||
base += ', algorithm="%s"' % algorithm
|
||||
if qop:
|
||||
base += ', qop=auth, nc=%s, cnonce="%s"' % (ncvalue, cnonce)
|
||||
|
||||
r.request.headers['Authorization'] = 'Digest %s' % (base)
|
||||
r.request.send(anyway=True)
|
||||
_r = r.request.response
|
||||
_r.history.append(r)
|
||||
|
||||
return _r
|
||||
|
||||
return r
|
||||
|
||||
def __call__(self, r):
|
||||
r.register_hook('response', self.handle_401)
|
||||
return r
|
@@ -0,0 +1,105 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
"""
|
||||
pythoncompat
|
||||
"""
|
||||
|
||||
|
||||
import sys
|
||||
|
||||
# -------
|
||||
# Pythons
|
||||
# -------
|
||||
|
||||
# Syntax sugar.
|
||||
_ver = sys.version_info
|
||||
|
||||
#: Python 2.x?
|
||||
is_py2 = (_ver[0] == 2)
|
||||
|
||||
#: Python 3.x?
|
||||
is_py3 = (_ver[0] == 3)
|
||||
|
||||
#: Python 3.0.x
|
||||
is_py30 = (is_py3 and _ver[1] == 0)
|
||||
|
||||
#: Python 3.1.x
|
||||
is_py31 = (is_py3 and _ver[1] == 1)
|
||||
|
||||
#: Python 3.2.x
|
||||
is_py32 = (is_py3 and _ver[1] == 2)
|
||||
|
||||
#: Python 3.3.x
|
||||
is_py33 = (is_py3 and _ver[1] == 3)
|
||||
|
||||
#: Python 3.4.x
|
||||
is_py34 = (is_py3 and _ver[1] == 4)
|
||||
|
||||
#: Python 2.7.x
|
||||
is_py27 = (is_py2 and _ver[1] == 7)
|
||||
|
||||
#: Python 2.6.x
|
||||
is_py26 = (is_py2 and _ver[1] == 6)
|
||||
|
||||
#: Python 2.5.x
|
||||
is_py25 = (is_py2 and _ver[1] == 5)
|
||||
|
||||
#: Python 2.4.x
|
||||
is_py24 = (is_py2 and _ver[1] == 4) # I'm assuming this is not by choice.
|
||||
|
||||
|
||||
# ---------
|
||||
# Platforms
|
||||
# ---------
|
||||
|
||||
|
||||
# Syntax sugar.
|
||||
_ver = sys.version.lower()
|
||||
|
||||
is_pypy = ('pypy' in _ver)
|
||||
is_jython = ('jython' in _ver)
|
||||
is_ironpython = ('iron' in _ver)
|
||||
|
||||
# Assume CPython, if nothing else.
|
||||
is_cpython = not any((is_pypy, is_jython, is_ironpython))
|
||||
|
||||
# Windows-based system.
|
||||
is_windows = 'win32' in str(sys.platform).lower()
|
||||
|
||||
# Standard Linux 2+ system.
|
||||
is_linux = ('linux' in str(sys.platform).lower())
|
||||
is_osx = ('darwin' in str(sys.platform).lower())
|
||||
is_hpux = ('hpux' in str(sys.platform).lower()) # Complete guess.
|
||||
is_solaris = ('solar==' in str(sys.platform).lower()) # Complete guess.
|
||||
|
||||
|
||||
# ---------
|
||||
# Specifics
|
||||
# ---------
|
||||
|
||||
|
||||
if is_py2:
|
||||
from urllib import quote, unquote, urlencode
|
||||
from urlparse import urlparse, urlunparse, urljoin, urlsplit
|
||||
from urllib2 import parse_http_list
|
||||
import cookielib
|
||||
from .packages.oreos.monkeys import SimpleCookie
|
||||
from StringIO import StringIO
|
||||
|
||||
bytes = str
|
||||
str = unicode
|
||||
basestring = basestring
|
||||
|
||||
|
||||
|
||||
elif is_py3:
|
||||
from urllib.parse import urlparse, urlunparse, urljoin, urlsplit, urlencode, quote, unquote
|
||||
from urllib.request import parse_http_list
|
||||
from http import cookiejar as cookielib
|
||||
from http.cookies import SimpleCookie
|
||||
from io import StringIO
|
||||
|
||||
str = str
|
||||
bytes = bytes
|
||||
basestring = (str,bytes)
|
||||
|
@@ -0,0 +1,40 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
"""
|
||||
requests.defaults
|
||||
~~~~~~~~~~~~~~~~~
|
||||
|
||||
This module provides the Requests configuration defaults.
|
||||
|
||||
Configurations:
|
||||
|
||||
:base_headers: Default HTTP headers.
|
||||
:verbose: Stream to write request logging to.
|
||||
:max_redirects: Maximum number of redirects allowed within a request.s
|
||||
:keep_alive: Reuse HTTP Connections?
|
||||
:max_retries: The number of times a request should be retried in the event of a connection failure.
|
||||
:danger_mode: If true, Requests will raise errors immediately.
|
||||
:safe_mode: If true, Requests will catch all errors.
|
||||
:pool_maxsize: The maximium size of an HTTP connection pool.
|
||||
:pool_connections: The number of active HTTP connection pools to use.
|
||||
"""
|
||||
|
||||
from . import __version__
|
||||
|
||||
defaults = dict()
|
||||
|
||||
|
||||
defaults['base_headers'] = {
|
||||
'User-Agent': 'python-requests/%s' % __version__,
|
||||
'Accept-Encoding': ', '.join(('identity', 'deflate', 'compress', 'gzip')),
|
||||
'Accept': '*/*'
|
||||
}
|
||||
|
||||
defaults['verbose'] = None
|
||||
defaults['max_redirects'] = 30
|
||||
defaults['pool_connections'] = 10
|
||||
defaults['pool_maxsize'] = 10
|
||||
defaults['max_retries'] = 0
|
||||
defaults['danger_mode'] = False
|
||||
defaults['safe_mode'] = False
|
||||
defaults['keep_alive'] = True
|
@@ -0,0 +1,31 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
"""
|
||||
requests.exceptions
|
||||
~~~~~~~~~~~~~~~~~~~
|
||||
|
||||
This module contains the set of Requests' exceptions.
|
||||
|
||||
"""
|
||||
|
||||
class RequestException(Exception):
|
||||
"""There was an ambiguous exception that occurred while handling your
|
||||
request."""
|
||||
|
||||
class HTTPError(RequestException):
|
||||
"""An HTTP error occurred."""
|
||||
|
||||
class ConnectionError(RequestException):
|
||||
"""A Connection error occurred."""
|
||||
|
||||
class SSLError(ConnectionError):
|
||||
"""An SSL error occurred."""
|
||||
|
||||
class Timeout(RequestException):
|
||||
"""The request timed out."""
|
||||
|
||||
class URLRequired(RequestException):
|
||||
"""A valid URL is required to make a request."""
|
||||
|
||||
class TooManyRedirects(RequestException):
|
||||
"""Too many redirects."""
|
@@ -0,0 +1,48 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
"""
|
||||
requests.hooks
|
||||
~~~~~~~~~~~~~~
|
||||
|
||||
This module provides the capabilities for the Requests hooks system.
|
||||
|
||||
Available hooks:
|
||||
|
||||
``args``:
|
||||
A dictionary of the arguments being sent to Request().
|
||||
|
||||
``pre_request``:
|
||||
The Request object, directly before being sent.
|
||||
|
||||
``post_request``:
|
||||
The Request object, directly after being sent.
|
||||
|
||||
``response``:
|
||||
The response generated from a Request.
|
||||
|
||||
"""
|
||||
|
||||
import traceback
|
||||
|
||||
|
||||
HOOKS = ('args', 'pre_request', 'post_request', 'response')
|
||||
|
||||
|
||||
def dispatch_hook(key, hooks, hook_data):
|
||||
"""Dispatches a hook dictionary on a given piece of data."""
|
||||
|
||||
hooks = hooks or dict()
|
||||
|
||||
if key in hooks:
|
||||
hooks = hooks.get(key)
|
||||
|
||||
if hasattr(hooks, '__call__'):
|
||||
hooks = [hooks]
|
||||
|
||||
for hook in hooks:
|
||||
try:
|
||||
hook_data = hook(hook_data) or hook_data
|
||||
except Exception:
|
||||
traceback.print_exc()
|
||||
|
||||
return hook_data
|
@@ -0,0 +1,796 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
"""
|
||||
requests.models
|
||||
~~~~~~~~~~~~~~~
|
||||
|
||||
This module contains the primary objects that power Requests.
|
||||
"""
|
||||
|
||||
import os
|
||||
from datetime import datetime
|
||||
|
||||
from .hooks import dispatch_hook, HOOKS
|
||||
from .structures import CaseInsensitiveDict
|
||||
from .status_codes import codes
|
||||
|
||||
from .auth import HTTPBasicAuth, HTTPProxyAuth
|
||||
from .packages.urllib3.response import HTTPResponse
|
||||
from .packages.urllib3.exceptions import MaxRetryError
|
||||
from .packages.urllib3.exceptions import SSLError as _SSLError
|
||||
from .packages.urllib3.exceptions import HTTPError as _HTTPError
|
||||
from .packages.urllib3 import connectionpool, poolmanager
|
||||
from .packages.urllib3.filepost import encode_multipart_formdata
|
||||
from .exceptions import (
|
||||
ConnectionError, HTTPError, RequestException, Timeout, TooManyRedirects,
|
||||
URLRequired, SSLError)
|
||||
from .utils import (
|
||||
get_encoding_from_headers, stream_decode_response_unicode,
|
||||
stream_decompress, guess_filename, requote_path, dict_from_string)
|
||||
|
||||
from .compat import urlparse, urlunparse, urljoin, urlsplit, urlencode, quote, unquote, str, bytes, SimpleCookie, is_py3, is_py2
|
||||
|
||||
# Import chardet if it is available.
|
||||
try:
|
||||
import chardet
|
||||
except ImportError:
|
||||
pass
|
||||
|
||||
REDIRECT_STATI = (codes.moved, codes.found, codes.other, codes.temporary_moved)
|
||||
|
||||
|
||||
|
||||
class Request(object):
|
||||
"""The :class:`Request <Request>` object. It carries out all functionality of
|
||||
Requests. Recommended interface is with the Requests functions.
|
||||
"""
|
||||
|
||||
def __init__(self,
|
||||
url=None,
|
||||
headers=dict(),
|
||||
files=None,
|
||||
method=None,
|
||||
data=dict(),
|
||||
params=dict(),
|
||||
auth=None,
|
||||
cookies=None,
|
||||
timeout=None,
|
||||
redirect=False,
|
||||
allow_redirects=False,
|
||||
proxies=None,
|
||||
hooks=None,
|
||||
config=None,
|
||||
_poolmanager=None,
|
||||
verify=None,
|
||||
session=None):
|
||||
|
||||
#: Float describes the timeout of the request.
|
||||
# (Use socket.setdefaulttimeout() as fallback)
|
||||
self.timeout = timeout
|
||||
|
||||
#: Request URL.
|
||||
|
||||
# if isinstance(url, str):
|
||||
# url = url.encode('utf-8')
|
||||
# print(dir(url))
|
||||
|
||||
self.url = url
|
||||
|
||||
#: Dictionary of HTTP Headers to attach to the :class:`Request <Request>`.
|
||||
self.headers = dict(headers or [])
|
||||
|
||||
#: Dictionary of files to multipart upload (``{filename: content}``).
|
||||
self.files = files
|
||||
|
||||
#: HTTP Method to use.
|
||||
self.method = method
|
||||
|
||||
#: Dictionary or byte of request body data to attach to the
|
||||
#: :class:`Request <Request>`.
|
||||
self.data = None
|
||||
|
||||
#: Dictionary or byte of querystring data to attach to the
|
||||
#: :class:`Request <Request>`.
|
||||
self.params = None
|
||||
|
||||
#: True if :class:`Request <Request>` is part of a redirect chain (disables history
|
||||
#: and HTTPError storage).
|
||||
self.redirect = redirect
|
||||
|
||||
#: Set to True if full redirects are allowed (e.g. re-POST-ing of data at new ``Location``)
|
||||
self.allow_redirects = allow_redirects
|
||||
|
||||
# Dictionary mapping protocol to the URL of the proxy (e.g. {'http': 'foo.bar:3128'})
|
||||
self.proxies = dict(proxies or [])
|
||||
|
||||
self.data, self._enc_data = self._encode_params(data)
|
||||
self.params, self._enc_params = self._encode_params(params)
|
||||
|
||||
#: :class:`Response <Response>` instance, containing
|
||||
#: content and metadata of HTTP Response, once :attr:`sent <send>`.
|
||||
self.response = Response()
|
||||
|
||||
#: Authentication tuple or object to attach to :class:`Request <Request>`.
|
||||
self.auth = auth
|
||||
|
||||
#: CookieJar to attach to :class:`Request <Request>`.
|
||||
self.cookies = dict(cookies or [])
|
||||
|
||||
#: Dictionary of configurations for this request.
|
||||
self.config = dict(config or [])
|
||||
|
||||
#: True if Request has been sent.
|
||||
self.sent = False
|
||||
|
||||
#: Event-handling hooks.
|
||||
self.hooks = {}
|
||||
|
||||
for event in HOOKS:
|
||||
self.hooks[event] = []
|
||||
|
||||
hooks = hooks or {}
|
||||
|
||||
for (k, v) in list(hooks.items()):
|
||||
self.register_hook(event=k, hook=v)
|
||||
|
||||
#: Session.
|
||||
self.session = session
|
||||
|
||||
#: SSL Verification.
|
||||
self.verify = verify
|
||||
|
||||
if headers:
|
||||
headers = CaseInsensitiveDict(self.headers)
|
||||
else:
|
||||
headers = CaseInsensitiveDict()
|
||||
|
||||
# Add configured base headers.
|
||||
for (k, v) in list(self.config.get('base_headers', {}).items()):
|
||||
if k not in headers:
|
||||
headers[k] = v
|
||||
|
||||
self.headers = headers
|
||||
self._poolmanager = _poolmanager
|
||||
|
||||
|
||||
def __repr__(self):
|
||||
return '<Request [%s]>' % (self.method)
|
||||
|
||||
|
||||
def _build_response(self, resp):
|
||||
"""Build internal :class:`Response <Response>` object
|
||||
from given response.
|
||||
"""
|
||||
|
||||
def build(resp):
|
||||
|
||||
response = Response()
|
||||
|
||||
# Pass settings over.
|
||||
response.config = self.config
|
||||
|
||||
if resp:
|
||||
|
||||
# Fallback to None if there's no status_code, for whatever reason.
|
||||
response.status_code = getattr(resp, 'status', None)
|
||||
|
||||
# Make headers case-insensitive.
|
||||
response.headers = CaseInsensitiveDict(getattr(resp, 'headers', None))
|
||||
|
||||
# Set encoding.
|
||||
response.encoding = get_encoding_from_headers(response.headers)
|
||||
|
||||
# Start off with our local cookies.
|
||||
cookies = self.cookies or dict()
|
||||
|
||||
# Add new cookies from the server.
|
||||
if 'set-cookie' in response.headers:
|
||||
cookie_header = response.headers['set-cookie']
|
||||
cookies = dict_from_string(cookie_header)
|
||||
|
||||
# Save cookies in Response.
|
||||
response.cookies = cookies
|
||||
|
||||
# No exceptions were harmed in the making of this request.
|
||||
response.error = getattr(resp, 'error', None)
|
||||
|
||||
# Save original response for later.
|
||||
response.raw = resp
|
||||
response.url = self.full_url
|
||||
|
||||
return response
|
||||
|
||||
history = []
|
||||
|
||||
r = build(resp)
|
||||
|
||||
self.cookies.update(r.cookies)
|
||||
|
||||
if r.status_code in REDIRECT_STATI and not self.redirect:
|
||||
while (('location' in r.headers) and
|
||||
((r.status_code is codes.see_other) or (self.allow_redirects))):
|
||||
|
||||
r.content # Consume socket so it can be released
|
||||
|
||||
if not len(history) < self.config.get('max_redirects'):
|
||||
raise TooManyRedirects()
|
||||
|
||||
# Release the connection back into the pool.
|
||||
r.raw.release_conn()
|
||||
|
||||
history.append(r)
|
||||
|
||||
url = r.headers['location']
|
||||
|
||||
# Handle redirection without scheme (see: RFC 1808 Section 4)
|
||||
if url.startswith('//'):
|
||||
parsed_rurl = urlparse(r.url)
|
||||
url = '%s:%s' % (parsed_rurl.scheme, url)
|
||||
|
||||
# Facilitate non-RFC2616-compliant 'location' headers
|
||||
# (e.g. '/path/to/resource' instead of 'http://domain.tld/path/to/resource')
|
||||
if not urlparse(url).netloc:
|
||||
url = urljoin(r.url, url)
|
||||
|
||||
# http://www.w3.org/Protocols/rfc2616/rfc2616-sec10.html#sec10.3.4
|
||||
if r.status_code is codes.see_other:
|
||||
method = 'GET'
|
||||
else:
|
||||
method = self.method
|
||||
|
||||
# Remove the cookie headers that were sent.
|
||||
headers = self.headers
|
||||
try:
|
||||
del headers['Cookie']
|
||||
except KeyError:
|
||||
pass
|
||||
|
||||
request = Request(
|
||||
url=url,
|
||||
headers=headers,
|
||||
files=self.files,
|
||||
method=method,
|
||||
params=self.session.params,
|
||||
auth=self.auth,
|
||||
cookies=self.cookies,
|
||||
redirect=True,
|
||||
config=self.config,
|
||||
timeout=self.timeout,
|
||||
_poolmanager=self._poolmanager,
|
||||
proxies = self.proxies,
|
||||
verify = self.verify,
|
||||
session = self.session
|
||||
)
|
||||
|
||||
request.send()
|
||||
r = request.response
|
||||
self.cookies.update(r.cookies)
|
||||
|
||||
r.history = history
|
||||
|
||||
self.response = r
|
||||
self.response.request = self
|
||||
self.response.cookies.update(self.cookies)
|
||||
|
||||
|
||||
@staticmethod
|
||||
def _encode_params(data):
|
||||
"""Encode parameters in a piece of data.
|
||||
|
||||
If the data supplied is a dictionary, encodes each parameter in it, and
|
||||
returns a list of tuples containing the encoded parameters, and a urlencoded
|
||||
version of that.
|
||||
|
||||
Otherwise, assumes the data is already encoded appropriately, and
|
||||
returns it twice.
|
||||
"""
|
||||
|
||||
if hasattr(data, '__iter__') and not isinstance(data, str):
|
||||
data = dict(data)
|
||||
|
||||
|
||||
if hasattr(data, 'items'):
|
||||
result = []
|
||||
for k, vs in list(data.items()):
|
||||
for v in isinstance(vs, list) and vs or [vs]:
|
||||
result.append((k.encode('utf-8') if isinstance(k, str) else k,
|
||||
v.encode('utf-8') if isinstance(v, str) else v))
|
||||
return result, urlencode(result, doseq=True)
|
||||
else:
|
||||
return data, data
|
||||
|
||||
@property
|
||||
def full_url(self):
|
||||
"""Build the actual URL to use."""
|
||||
|
||||
if not self.url:
|
||||
raise URLRequired()
|
||||
|
||||
url = self.url
|
||||
|
||||
# Support for unicode domain names and paths.
|
||||
scheme, netloc, path, params, query, fragment = urlparse(url)
|
||||
|
||||
|
||||
if not scheme:
|
||||
raise ValueError("Invalid URL %r: No schema supplied" % url)
|
||||
|
||||
netloc = netloc.encode('idna').decode('utf-8')
|
||||
|
||||
if not path:
|
||||
path = '/'
|
||||
|
||||
if is_py2:
|
||||
if isinstance(path, str):
|
||||
path = path.encode('utf-8')
|
||||
|
||||
path = requote_path(path)
|
||||
|
||||
url = (urlunparse([ scheme, netloc, path, params, query, fragment ]))
|
||||
|
||||
if self._enc_params:
|
||||
if urlparse(url).query:
|
||||
return '%s&%s' % (url, self._enc_params)
|
||||
else:
|
||||
return '%s?%s' % (url, self._enc_params)
|
||||
else:
|
||||
return url
|
||||
|
||||
@property
|
||||
def path_url(self):
|
||||
"""Build the path URL to use."""
|
||||
|
||||
url = []
|
||||
|
||||
p = urlsplit(self.full_url)
|
||||
|
||||
# Proxies use full URLs.
|
||||
if p.scheme in self.proxies:
|
||||
return self.full_url
|
||||
|
||||
path = p.path
|
||||
if not path:
|
||||
path = '/'
|
||||
|
||||
if is_py3:
|
||||
path = quote(path.encode('utf-8'))
|
||||
|
||||
url.append(path)
|
||||
|
||||
query = p.query
|
||||
if query:
|
||||
url.append('?')
|
||||
url.append(query)
|
||||
|
||||
return ''.join(url)
|
||||
|
||||
|
||||
def register_hook(self, event, hook):
|
||||
"""Properly register a hook."""
|
||||
|
||||
return self.hooks[event].append(hook)
|
||||
|
||||
|
||||
def send(self, anyway=False, prefetch=False):
|
||||
"""Sends the request. Returns True of successful, false if not.
|
||||
If there was an HTTPError during transmission,
|
||||
self.response.status_code will contain the HTTPError code.
|
||||
|
||||
Once a request is successfully sent, `sent` will equal True.
|
||||
|
||||
:param anyway: If True, request will be sent, even if it has
|
||||
already been sent.
|
||||
"""
|
||||
|
||||
# Build the URL
|
||||
url = self.full_url
|
||||
|
||||
# Logging
|
||||
if self.config.get('verbose'):
|
||||
self.config.get('verbose').write('%s %s %s\n' % (
|
||||
datetime.now().isoformat(), self.method, url
|
||||
))
|
||||
|
||||
# Nottin' on you.
|
||||
body = None
|
||||
content_type = None
|
||||
|
||||
# Multi-part file uploads.
|
||||
if self.files:
|
||||
if not isinstance(self.data, str):
|
||||
|
||||
try:
|
||||
fields = self.data.copy()
|
||||
except AttributeError:
|
||||
fields = dict(self.data)
|
||||
|
||||
for (k, v) in list(self.files.items()):
|
||||
# support for explicit filename
|
||||
if isinstance(v, (tuple, list)):
|
||||
fn, fp = v
|
||||
else:
|
||||
fn = guess_filename(v) or k
|
||||
fp = v
|
||||
fields.update({k: (fn, fp.read())})
|
||||
|
||||
(body, content_type) = encode_multipart_formdata(fields)
|
||||
else:
|
||||
pass
|
||||
# TODO: Conflict?
|
||||
else:
|
||||
if self.data:
|
||||
|
||||
body = self._enc_data
|
||||
if isinstance(self.data, str):
|
||||
content_type = None
|
||||
else:
|
||||
content_type = 'application/x-www-form-urlencoded'
|
||||
|
||||
# Add content-type if it wasn't explicitly provided.
|
||||
if (content_type) and (not 'content-type' in self.headers):
|
||||
self.headers['Content-Type'] = content_type
|
||||
|
||||
if self.auth:
|
||||
if isinstance(self.auth, tuple) and len(self.auth) == 2:
|
||||
# special-case basic HTTP auth
|
||||
self.auth = HTTPBasicAuth(*self.auth)
|
||||
|
||||
# Allow auth to make its changes.
|
||||
r = self.auth(self)
|
||||
|
||||
# Update self to reflect the auth changes.
|
||||
self.__dict__.update(r.__dict__)
|
||||
|
||||
_p = urlparse(url)
|
||||
proxy = self.proxies.get(_p.scheme)
|
||||
|
||||
if proxy:
|
||||
conn = poolmanager.proxy_from_url(proxy)
|
||||
_proxy = urlparse(proxy)
|
||||
if '@' in _proxy.netloc:
|
||||
auth, url = _proxy.netloc.split('@', 1)
|
||||
self.proxy_auth = HTTPProxyAuth(*auth.split(':', 1))
|
||||
r = self.proxy_auth(self)
|
||||
self.__dict__.update(r.__dict__)
|
||||
else:
|
||||
# Check to see if keep_alive is allowed.
|
||||
if self.config.get('keep_alive'):
|
||||
conn = self._poolmanager.connection_from_url(url)
|
||||
else:
|
||||
conn = connectionpool.connection_from_url(url)
|
||||
|
||||
if url.startswith('https') and self.verify:
|
||||
|
||||
cert_loc = None
|
||||
|
||||
# Allow self-specified cert location.
|
||||
if self.verify is not True:
|
||||
cert_loc = self.verify
|
||||
|
||||
|
||||
# Look for configuration.
|
||||
if not cert_loc:
|
||||
cert_loc = os.environ.get('REQUESTS_CA_BUNDLE')
|
||||
|
||||
# Curl compatiblity.
|
||||
if not cert_loc:
|
||||
cert_loc = os.environ.get('CURL_CA_BUNDLE')
|
||||
|
||||
# Use the awesome certifi list.
|
||||
if not cert_loc:
|
||||
cert_loc = __import__('certifi').where()
|
||||
|
||||
conn.cert_reqs = 'CERT_REQUIRED'
|
||||
conn.ca_certs = cert_loc
|
||||
else:
|
||||
conn.cert_reqs = 'CERT_NONE'
|
||||
conn.ca_certs = None
|
||||
|
||||
if not self.sent or anyway:
|
||||
|
||||
if self.cookies:
|
||||
|
||||
# Skip if 'cookie' header is explicitly set.
|
||||
if 'cookie' not in self.headers:
|
||||
|
||||
# Simple cookie with our dict.
|
||||
c = SimpleCookie()
|
||||
for (k, v) in list(self.cookies.items()):
|
||||
c[k] = v
|
||||
|
||||
# Turn it into a header.
|
||||
cookie_header = c.output(header='', sep='; ').strip()
|
||||
|
||||
# Attach Cookie header to request.
|
||||
self.headers['Cookie'] = cookie_header
|
||||
|
||||
# Pre-request hook.
|
||||
r = dispatch_hook('pre_request', self.hooks, self)
|
||||
self.__dict__.update(r.__dict__)
|
||||
|
||||
try:
|
||||
# The inner try .. except re-raises certain exceptions as
|
||||
# internal exception types; the outer suppresses exceptions
|
||||
# when safe mode is set.
|
||||
try:
|
||||
# Send the request.
|
||||
r = conn.urlopen(
|
||||
method=self.method,
|
||||
url=self.path_url,
|
||||
body=body,
|
||||
headers=self.headers,
|
||||
redirect=False,
|
||||
assert_same_host=False,
|
||||
preload_content=False,
|
||||
decode_content=True,
|
||||
retries=self.config.get('max_retries', 0),
|
||||
timeout=self.timeout,
|
||||
)
|
||||
self.sent = True
|
||||
|
||||
except MaxRetryError as e:
|
||||
raise ConnectionError(e)
|
||||
|
||||
except (_SSLError, _HTTPError) as e:
|
||||
if self.verify and isinstance(e, _SSLError):
|
||||
raise SSLError(e)
|
||||
|
||||
raise Timeout('Request timed out.')
|
||||
|
||||
except RequestException as e:
|
||||
if self.config.get('safe_mode', False):
|
||||
# In safe mode, catch the exception and attach it to
|
||||
# a blank urllib3.HTTPResponse object.
|
||||
r = HTTPResponse()
|
||||
r.error = e
|
||||
else:
|
||||
raise
|
||||
|
||||
self._build_response(r)
|
||||
|
||||
# Response manipulation hook.
|
||||
self.response = dispatch_hook('response', self.hooks, self.response)
|
||||
|
||||
# Post-request hook.
|
||||
r = dispatch_hook('post_request', self.hooks, self)
|
||||
self.__dict__.update(r.__dict__)
|
||||
|
||||
# If prefetch is True, mark content as consumed.
|
||||
if prefetch:
|
||||
# Save the response.
|
||||
self.response.content
|
||||
|
||||
if self.config.get('danger_mode'):
|
||||
self.response.raise_for_status()
|
||||
|
||||
return self.sent
|
||||
|
||||
|
||||
class Response(object):
|
||||
"""The core :class:`Response <Response>` object. All
|
||||
:class:`Request <Request>` objects contain a
|
||||
:class:`response <Response>` attribute, which is an instance
|
||||
of this class.
|
||||
"""
|
||||
|
||||
def __init__(self):
|
||||
|
||||
self._content = None
|
||||
self._content_consumed = False
|
||||
|
||||
#: Integer Code of responded HTTP Status.
|
||||
self.status_code = None
|
||||
|
||||
#: Case-insensitive Dictionary of Response Headers.
|
||||
#: For example, ``headers['content-encoding']`` will return the
|
||||
#: value of a ``'Content-Encoding'`` response header.
|
||||
self.headers = CaseInsensitiveDict()
|
||||
|
||||
#: File-like object representation of response (for advanced usage).
|
||||
self.raw = None
|
||||
|
||||
#: Final URL location of Response.
|
||||
self.url = None
|
||||
|
||||
#: Resulting :class:`HTTPError` of request, if one occurred.
|
||||
self.error = None
|
||||
|
||||
#: Encoding to decode with when accessing r.content.
|
||||
self.encoding = None
|
||||
|
||||
#: A list of :class:`Response <Response>` objects from
|
||||
#: the history of the Request. Any redirect responses will end
|
||||
#: up here.
|
||||
self.history = []
|
||||
|
||||
#: The :class:`Request <Request>` that created the Response.
|
||||
self.request = None
|
||||
|
||||
#: A dictionary of Cookies the server sent back.
|
||||
self.cookies = {}
|
||||
|
||||
#: Dictionary of configurations for this request.
|
||||
self.config = {}
|
||||
|
||||
|
||||
def __repr__(self):
|
||||
return '<Response [%s]>' % (self.status_code)
|
||||
|
||||
def __bool__(self):
|
||||
"""Returns true if :attr:`status_code` is 'OK'."""
|
||||
return self.ok
|
||||
|
||||
def __nonzero__(self):
|
||||
"""Returns true if :attr:`status_code` is 'OK'."""
|
||||
return self.ok
|
||||
|
||||
@property
|
||||
def ok(self):
|
||||
try:
|
||||
self.raise_for_status()
|
||||
except HTTPError:
|
||||
return False
|
||||
return True
|
||||
|
||||
|
||||
def iter_content(self, chunk_size=10 * 1024, decode_unicode=False):
|
||||
"""Iterates over the response data. This avoids reading the content
|
||||
at once into memory for large responses. The chunk size is the number
|
||||
of bytes it should read into memory. This is not necessarily the
|
||||
length of each item returned as decoding can take place.
|
||||
"""
|
||||
if self._content_consumed:
|
||||
raise RuntimeError(
|
||||
'The content for this response was already consumed'
|
||||
)
|
||||
|
||||
def generate():
|
||||
while 1:
|
||||
chunk = self.raw.read(chunk_size)
|
||||
if not chunk:
|
||||
break
|
||||
yield chunk
|
||||
self._content_consumed = True
|
||||
|
||||
def generate_chunked():
|
||||
resp = self.raw._original_response
|
||||
fp = resp.fp
|
||||
if resp.chunk_left is not None:
|
||||
pending_bytes = resp.chunk_left
|
||||
while pending_bytes:
|
||||
chunk = fp.read(min(chunk_size, pending_bytes))
|
||||
pending_bytes-=len(chunk)
|
||||
yield chunk
|
||||
fp.read(2) # throw away crlf
|
||||
while 1:
|
||||
#XXX correct line size? (httplib has 64kb, seems insane)
|
||||
pending_bytes = fp.readline(40).strip()
|
||||
pending_bytes = int(pending_bytes, 16)
|
||||
if pending_bytes == 0:
|
||||
break
|
||||
while pending_bytes:
|
||||
chunk = fp.read(min(chunk_size, pending_bytes))
|
||||
pending_bytes-=len(chunk)
|
||||
yield chunk
|
||||
fp.read(2) # throw away crlf
|
||||
self._content_consumed = True
|
||||
fp.close()
|
||||
|
||||
|
||||
if getattr(getattr(self.raw, '_original_response', None), 'chunked', False):
|
||||
gen = generate_chunked()
|
||||
else:
|
||||
gen = generate()
|
||||
|
||||
if 'gzip' in self.headers.get('content-encoding', ''):
|
||||
gen = stream_decompress(gen, mode='gzip')
|
||||
elif 'deflate' in self.headers.get('content-encoding', ''):
|
||||
gen = stream_decompress(gen, mode='deflate')
|
||||
|
||||
if decode_unicode:
|
||||
gen = stream_decode_response_unicode(gen, self)
|
||||
|
||||
return gen
|
||||
|
||||
|
||||
def iter_lines(self, chunk_size=10 * 1024, decode_unicode=None):
|
||||
"""Iterates over the response data, one line at a time. This
|
||||
avoids reading the content at once into memory for large
|
||||
responses.
|
||||
"""
|
||||
|
||||
#TODO: why rstrip by default
|
||||
pending = None
|
||||
|
||||
for chunk in self.iter_content(chunk_size, decode_unicode=decode_unicode):
|
||||
|
||||
if pending is not None:
|
||||
chunk = pending + chunk
|
||||
lines = chunk.splitlines(True)
|
||||
|
||||
for line in lines[:-1]:
|
||||
yield line.rstrip()
|
||||
|
||||
# Save the last part of the chunk for next iteration, to keep full line together
|
||||
# lines may be empty for the last chunk of a chunked response
|
||||
|
||||
if lines:
|
||||
pending = lines[-1]
|
||||
#if pending is a complete line, give it baack
|
||||
if pending[-1] == '\n':
|
||||
yield pending.rstrip()
|
||||
pending = None
|
||||
else:
|
||||
pending = None
|
||||
|
||||
# Yield the last line
|
||||
if pending is not None:
|
||||
yield pending.rstrip()
|
||||
|
||||
|
||||
@property
|
||||
def content(self):
|
||||
"""Content of the response, in bytes."""
|
||||
|
||||
if self._content is None:
|
||||
# Read the contents.
|
||||
try:
|
||||
if self._content_consumed:
|
||||
raise RuntimeError(
|
||||
'The content for this response was already consumed')
|
||||
|
||||
self._content = self.raw.read()
|
||||
except AttributeError:
|
||||
self._content = None
|
||||
|
||||
self._content_consumed = True
|
||||
return self._content
|
||||
|
||||
|
||||
@property
|
||||
def text(self):
|
||||
"""Content of the response, in unicode.
|
||||
|
||||
if Response.encoding is None and chardet module is available, encoding
|
||||
will be guessed.
|
||||
"""
|
||||
|
||||
# Try charset from content-type
|
||||
content = None
|
||||
encoding = self.encoding
|
||||
|
||||
# Fallback to auto-detected encoding if chardet is available.
|
||||
if self.encoding is None:
|
||||
try:
|
||||
detected = chardet.detect(self.content) or {}
|
||||
encoding = detected.get('encoding')
|
||||
|
||||
# Trust that chardet isn't available or something went terribly wrong.
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
# Decode unicode from given encoding.
|
||||
try:
|
||||
content = str(self.content, encoding, errors='replace')
|
||||
except (UnicodeError, TypeError):
|
||||
pass
|
||||
|
||||
return content
|
||||
|
||||
|
||||
def raise_for_status(self):
|
||||
"""Raises stored :class:`HTTPError` or :class:`URLError`, if one occurred."""
|
||||
|
||||
if self.error:
|
||||
raise self.error
|
||||
|
||||
if (self.status_code >= 300) and (self.status_code < 400):
|
||||
raise HTTPError('%s Redirection' % self.status_code)
|
||||
|
||||
elif (self.status_code >= 400) and (self.status_code < 500):
|
||||
raise HTTPError('%s Client Error' % self.status_code)
|
||||
|
||||
elif (self.status_code >= 500) and (self.status_code < 600):
|
||||
raise HTTPError('%s Server Error' % self.status_code)
|
||||
|
||||
|
@@ -0,0 +1,288 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
"""
|
||||
requests.session
|
||||
~~~~~~~~~~~~~~~~
|
||||
|
||||
This module provides a Session object to manage and persist settings across
|
||||
requests (cookies, auth, proxies).
|
||||
|
||||
"""
|
||||
|
||||
from .defaults import defaults
|
||||
from .models import Request
|
||||
from .hooks import dispatch_hook
|
||||
from .utils import header_expand
|
||||
from .packages.urllib3.poolmanager import PoolManager
|
||||
|
||||
|
||||
def merge_kwargs(local_kwarg, default_kwarg):
|
||||
"""Merges kwarg dictionaries.
|
||||
|
||||
If a local key in the dictionary is set to None, it will be removed.
|
||||
"""
|
||||
|
||||
if default_kwarg is None:
|
||||
return local_kwarg
|
||||
|
||||
if isinstance(local_kwarg, str):
|
||||
return local_kwarg
|
||||
|
||||
if local_kwarg is None:
|
||||
return default_kwarg
|
||||
|
||||
# Bypass if not a dictionary (e.g. timeout)
|
||||
if not hasattr(default_kwarg, 'items'):
|
||||
return local_kwarg
|
||||
|
||||
# Update new values.
|
||||
kwargs = default_kwarg.copy()
|
||||
kwargs.update(local_kwarg)
|
||||
|
||||
# Remove keys that are set to None.
|
||||
for (k,v) in list(local_kwarg.items()):
|
||||
if v is None:
|
||||
del kwargs[k]
|
||||
|
||||
return kwargs
|
||||
|
||||
|
||||
class Session(object):
|
||||
"""A Requests session."""
|
||||
|
||||
__attrs__ = [
|
||||
'headers', 'cookies', 'auth', 'timeout', 'proxies', 'hooks',
|
||||
'params', 'config']
|
||||
|
||||
|
||||
def __init__(self,
|
||||
headers=None,
|
||||
cookies=None,
|
||||
auth=None,
|
||||
timeout=None,
|
||||
proxies=None,
|
||||
hooks=None,
|
||||
params=None,
|
||||
config=None,
|
||||
verify=True):
|
||||
|
||||
self.headers = headers or {}
|
||||
self.cookies = cookies or {}
|
||||
self.auth = auth
|
||||
self.timeout = timeout
|
||||
self.proxies = proxies or {}
|
||||
self.hooks = hooks or {}
|
||||
self.params = params or {}
|
||||
self.config = config or {}
|
||||
self.verify = verify
|
||||
|
||||
for (k, v) in list(defaults.items()):
|
||||
self.config.setdefault(k, v)
|
||||
|
||||
self.poolmanager = PoolManager(
|
||||
num_pools=self.config.get('pool_connections'),
|
||||
maxsize=self.config.get('pool_maxsize')
|
||||
)
|
||||
|
||||
# Set up a CookieJar to be used by default
|
||||
self.cookies = {}
|
||||
|
||||
# Add passed cookies in.
|
||||
if cookies is not None:
|
||||
self.cookies.update(cookies)
|
||||
|
||||
def __repr__(self):
|
||||
return '<requests-client at 0x%x>' % (id(self))
|
||||
|
||||
def __enter__(self):
|
||||
return self
|
||||
|
||||
def __exit__(self, *args):
|
||||
pass
|
||||
|
||||
def request(self, method, url,
|
||||
params=None,
|
||||
data=None,
|
||||
headers=None,
|
||||
cookies=None,
|
||||
files=None,
|
||||
auth=None,
|
||||
timeout=None,
|
||||
allow_redirects=False,
|
||||
proxies=None,
|
||||
hooks=None,
|
||||
return_response=True,
|
||||
config=None,
|
||||
prefetch=False,
|
||||
verify=None):
|
||||
|
||||
"""Constructs and sends a :class:`Request <Request>`.
|
||||
Returns :class:`Response <Response>` object.
|
||||
|
||||
:param method: method for the new :class:`Request` object.
|
||||
:param url: URL for the new :class:`Request` object.
|
||||
:param params: (optional) Dictionary or bytes to be sent in the query string for the :class:`Request`.
|
||||
:param data: (optional) Dictionary or bytes to send in the body of the :class:`Request`.
|
||||
:param headers: (optional) Dictionary of HTTP Headers to send with the :class:`Request`.
|
||||
:param cookies: (optional) Dict or CookieJar object to send with the :class:`Request`.
|
||||
:param files: (optional) Dictionary of 'filename': file-like-objects for multipart encoding upload.
|
||||
:param auth: (optional) Auth tuple to enable Basic/Digest/Custom HTTP Auth.
|
||||
:param timeout: (optional) Float describing the timeout of the request.
|
||||
:param allow_redirects: (optional) Boolean. Set to True if POST/PUT/DELETE redirect following is allowed.
|
||||
:param proxies: (optional) Dictionary mapping protocol to the URL of the proxy.
|
||||
:param return_response: (optional) If False, an un-sent Request object will returned.
|
||||
:param config: (optional) A configuration dictionary.
|
||||
:param prefetch: (optional) if ``True``, the response content will be immediately downloaded.
|
||||
:param verify: (optional) if ``True``, the SSL cert will be verified. A CA_BUNDLE path can also be provided.
|
||||
"""
|
||||
|
||||
method = str(method).upper()
|
||||
|
||||
# Default empty dicts for dict params.
|
||||
cookies = {} if cookies is None else cookies
|
||||
data = {} if data is None else data
|
||||
files = {} if files is None else files
|
||||
headers = {} if headers is None else headers
|
||||
params = {} if params is None else params
|
||||
hooks = {} if hooks is None else hooks
|
||||
|
||||
if verify is None:
|
||||
verify = self.verify
|
||||
|
||||
# use session's hooks as defaults
|
||||
for key, cb in list(self.hooks.items()):
|
||||
hooks.setdefault(key, cb)
|
||||
|
||||
# Expand header values.
|
||||
if headers:
|
||||
for k, v in list(headers.items()) or {}:
|
||||
headers[k] = header_expand(v)
|
||||
|
||||
args = dict(
|
||||
method=method,
|
||||
url=url,
|
||||
data=data,
|
||||
params=params,
|
||||
headers=headers,
|
||||
cookies=cookies,
|
||||
files=files,
|
||||
auth=auth,
|
||||
hooks=hooks,
|
||||
timeout=timeout,
|
||||
allow_redirects=allow_redirects,
|
||||
proxies=proxies,
|
||||
config=config,
|
||||
verify=verify,
|
||||
_poolmanager=self.poolmanager
|
||||
)
|
||||
|
||||
# Merge local kwargs with session kwargs.
|
||||
for attr in self.__attrs__:
|
||||
session_val = getattr(self, attr, None)
|
||||
local_val = args.get(attr)
|
||||
|
||||
args[attr] = merge_kwargs(local_val, session_val)
|
||||
|
||||
# Arguments manipulation hook.
|
||||
args = dispatch_hook('args', args['hooks'], args)
|
||||
|
||||
# Create the (empty) response.
|
||||
r = Request(**args)
|
||||
|
||||
# Give the response some context.
|
||||
r.session = self
|
||||
|
||||
# Don't send if asked nicely.
|
||||
if not return_response:
|
||||
return r
|
||||
|
||||
# Send the HTTP Request.
|
||||
r.send(prefetch=prefetch)
|
||||
|
||||
# Send any cookies back up the to the session.
|
||||
self.cookies.update(r.response.cookies)
|
||||
|
||||
# Return the response.
|
||||
return r.response
|
||||
|
||||
|
||||
def get(self, url, **kwargs):
|
||||
"""Sends a GET request. Returns :class:`Response` object.
|
||||
|
||||
:param url: URL for the new :class:`Request` object.
|
||||
:param **kwargs: Optional arguments that ``request`` takes.
|
||||
"""
|
||||
|
||||
kwargs.setdefault('allow_redirects', True)
|
||||
return self.request('get', url, **kwargs)
|
||||
|
||||
|
||||
def options(self, url, **kwargs):
|
||||
"""Sends a OPTIONS request. Returns :class:`Response` object.
|
||||
|
||||
:param url: URL for the new :class:`Request` object.
|
||||
:param **kwargs: Optional arguments that ``request`` takes.
|
||||
"""
|
||||
|
||||
kwargs.setdefault('allow_redirects', True)
|
||||
return self.request('options', url, **kwargs)
|
||||
|
||||
|
||||
def head(self, url, **kwargs):
|
||||
"""Sends a HEAD request. Returns :class:`Response` object.
|
||||
|
||||
:param url: URL for the new :class:`Request` object.
|
||||
:param **kwargs: Optional arguments that ``request`` takes.
|
||||
"""
|
||||
|
||||
kwargs.setdefault('allow_redirects', True)
|
||||
return self.request('head', url, **kwargs)
|
||||
|
||||
|
||||
def post(self, url, data=None, **kwargs):
|
||||
"""Sends a POST request. Returns :class:`Response` object.
|
||||
|
||||
:param url: URL for the new :class:`Request` object.
|
||||
:param data: (optional) Dictionary or bytes to send in the body of the :class:`Request`.
|
||||
:param **kwargs: Optional arguments that ``request`` takes.
|
||||
"""
|
||||
|
||||
return self.request('post', url, data=data, **kwargs)
|
||||
|
||||
|
||||
def put(self, url, data=None, **kwargs):
|
||||
"""Sends a PUT request. Returns :class:`Response` object.
|
||||
|
||||
:param url: URL for the new :class:`Request` object.
|
||||
:param data: (optional) Dictionary or bytes to send in the body of the :class:`Request`.
|
||||
:param **kwargs: Optional arguments that ``request`` takes.
|
||||
"""
|
||||
|
||||
return self.request('put', url, data=data, **kwargs)
|
||||
|
||||
|
||||
def patch(self, url, data=None, **kwargs):
|
||||
"""Sends a PATCH request. Returns :class:`Response` object.
|
||||
|
||||
:param url: URL for the new :class:`Request` object.
|
||||
:param data: (optional) Dictionary or bytes to send in the body of the :class:`Request`.
|
||||
:param **kwargs: Optional arguments that ``request`` takes.
|
||||
"""
|
||||
|
||||
return self.request('patch', url, data=data, **kwargs)
|
||||
|
||||
|
||||
def delete(self, url, **kwargs):
|
||||
"""Sends a DELETE request. Returns :class:`Response` object.
|
||||
|
||||
:param url: URL for the new :class:`Request` object.
|
||||
:param **kwargs: Optional arguments that ``request`` takes.
|
||||
"""
|
||||
|
||||
return self.request('delete', url, **kwargs)
|
||||
|
||||
|
||||
def session(**kwargs):
|
||||
"""Returns a :class:`Session` for context-management."""
|
||||
|
||||
return Session(**kwargs)
|
@@ -0,0 +1,86 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
from .structures import LookupDict
|
||||
|
||||
_codes = {
|
||||
|
||||
# Informational.
|
||||
100: ('continue',),
|
||||
101: ('switching_protocols',),
|
||||
102: ('processing',),
|
||||
103: ('checkpoint',),
|
||||
122: ('uri_too_long', 'request_uri_too_long'),
|
||||
200: ('ok', 'okay', 'all_ok', 'all_okay', 'all_good', '\\o/'),
|
||||
201: ('created',),
|
||||
202: ('accepted',),
|
||||
203: ('non_authoritative_info', 'non_authoritative_information'),
|
||||
204: ('no_content',),
|
||||
205: ('reset_content', 'reset'),
|
||||
206: ('partial_content', 'partial'),
|
||||
207: ('multi_status', 'multiple_status', 'multi_stati', 'multiple_stati'),
|
||||
208: ('im_used',),
|
||||
|
||||
# Redirection.
|
||||
300: ('multiple_choices',),
|
||||
301: ('moved_permanently', 'moved', '\\o-'),
|
||||
302: ('found',),
|
||||
303: ('see_other', 'other'),
|
||||
304: ('not_modified',),
|
||||
305: ('use_proxy',),
|
||||
306: ('switch_proxy',),
|
||||
307: ('temporary_redirect', 'temporary_moved', 'temporary'),
|
||||
308: ('resume_incomplete', 'resume'),
|
||||
|
||||
# Client Error.
|
||||
400: ('bad_request', 'bad'),
|
||||
401: ('unauthorized',),
|
||||
402: ('payment_required', 'payment'),
|
||||
403: ('forbidden',),
|
||||
404: ('not_found', '-o-'),
|
||||
405: ('method_not_allowed', 'not_allowed'),
|
||||
406: ('not_acceptable',),
|
||||
407: ('proxy_authentication_required', 'proxy_auth', 'proxy_authentication'),
|
||||
408: ('request_timeout', 'timeout'),
|
||||
409: ('conflict',),
|
||||
410: ('gone',),
|
||||
411: ('length_required',),
|
||||
412: ('precondition_failed', 'precondition'),
|
||||
413: ('request_entity_too_large',),
|
||||
414: ('request_uri_too_large',),
|
||||
415: ('unsupported_media_type', 'unsupported_media', 'media_type'),
|
||||
416: ('requested_range_not_satisfiable', 'requested_range', 'range_not_satisfiable'),
|
||||
417: ('expectation_failed',),
|
||||
418: ('im_a_teapot', 'teapot', 'i_am_a_teapot'),
|
||||
422: ('unprocessable_entity', 'unprocessable'),
|
||||
423: ('locked',),
|
||||
424: ('failed_dependency', 'dependency'),
|
||||
425: ('unordered_collection', 'unordered'),
|
||||
426: ('upgrade_required', 'upgrade'),
|
||||
428: ('precondition_required', 'precondition'),
|
||||
429: ('too_many_requests', 'too_many'),
|
||||
431: ('header_fields_too_large', 'fields_too_large'),
|
||||
444: ('no_response', 'none'),
|
||||
449: ('retry_with', 'retry'),
|
||||
450: ('blocked_by_windows_parental_controls', 'parental_controls'),
|
||||
499: ('client_closed_request',),
|
||||
|
||||
# Server Error.
|
||||
500: ('internal_server_error', 'server_error', '/o\\'),
|
||||
501: ('not_implemented',),
|
||||
502: ('bad_gateway',),
|
||||
503: ('service_unavailable', 'unavailable'),
|
||||
504: ('gateway_timeout',),
|
||||
505: ('http_version_not_supported', 'http_version'),
|
||||
506: ('variant_also_negotiates',),
|
||||
507: ('insufficient_storage',),
|
||||
509: ('bandwidth_limit_exceeded', 'bandwidth'),
|
||||
510: ('not_extended',),
|
||||
}
|
||||
|
||||
codes = LookupDict(name='status_codes')
|
||||
|
||||
for (code, titles) in list(_codes.items()):
|
||||
for title in titles:
|
||||
setattr(codes, title, code)
|
||||
if not title.startswith('\\'):
|
||||
setattr(codes, title.upper(), code)
|
@@ -0,0 +1,66 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
"""
|
||||
requests.structures
|
||||
~~~~~~~~~~~~~~~~~~~
|
||||
|
||||
Data structures that power Requests.
|
||||
|
||||
"""
|
||||
|
||||
|
||||
class CaseInsensitiveDict(dict):
|
||||
"""Case-insensitive Dictionary
|
||||
|
||||
For example, ``headers['content-encoding']`` will return the
|
||||
value of a ``'Content-Encoding'`` response header."""
|
||||
|
||||
@property
|
||||
def lower_keys(self):
|
||||
if not hasattr(self, '_lower_keys') or not self._lower_keys:
|
||||
self._lower_keys = dict((k.lower(), k) for k in list(self.keys()))
|
||||
return self._lower_keys
|
||||
|
||||
def _clear_lower_keys(self):
|
||||
if hasattr(self, '_lower_keys'):
|
||||
self._lower_keys.clear()
|
||||
|
||||
def __setitem__(self, key, value):
|
||||
dict.__setitem__(self, key, value)
|
||||
self._clear_lower_keys()
|
||||
|
||||
def __delitem__(self, key):
|
||||
dict.__delitem__(self, key)
|
||||
self._lower_keys.clear()
|
||||
|
||||
def __contains__(self, key):
|
||||
return key.lower() in self.lower_keys
|
||||
|
||||
def __getitem__(self, key):
|
||||
# We allow fall-through here, so values default to None
|
||||
if key in self:
|
||||
return dict.__getitem__(self, self.lower_keys[key.lower()])
|
||||
|
||||
def get(self, key, default=None):
|
||||
if key in self:
|
||||
return self[key]
|
||||
else:
|
||||
return default
|
||||
|
||||
class LookupDict(dict):
|
||||
"""Dictionary lookup object."""
|
||||
|
||||
def __init__(self, name=None):
|
||||
self.name = name
|
||||
super(LookupDict, self).__init__()
|
||||
|
||||
def __repr__(self):
|
||||
return '<lookup \'%s\'>' % (self.name)
|
||||
|
||||
def __getitem__(self, key):
|
||||
# We allow fall-through here, so values default to None
|
||||
|
||||
return self.__dict__.get(key, None)
|
||||
|
||||
def get(self, key, default=None):
|
||||
return self.__dict__.get(key, default)
|
@@ -0,0 +1,408 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
"""
|
||||
requests.utils
|
||||
~~~~~~~~~~~~~~
|
||||
|
||||
This module provides utility functions that are used within Requests
|
||||
that are also useful for external consumption.
|
||||
|
||||
"""
|
||||
|
||||
import cgi
|
||||
import codecs
|
||||
import os
|
||||
import random
|
||||
import re
|
||||
import zlib
|
||||
|
||||
from .compat import parse_http_list as _parse_list_header
|
||||
from .compat import quote, unquote, cookielib, SimpleCookie, is_py2
|
||||
from .compat import basestring
|
||||
|
||||
|
||||
def dict_from_string(s):
|
||||
"""Returns a MultiDict with Cookies."""
|
||||
|
||||
cookies = dict()
|
||||
|
||||
c = SimpleCookie()
|
||||
c.load(s)
|
||||
|
||||
for k,v in list(c.items()):
|
||||
cookies.update({k: v.value})
|
||||
|
||||
return cookies
|
||||
|
||||
def guess_filename(obj):
|
||||
"""Tries to guess the filename of the given object."""
|
||||
name = getattr(obj, 'name', None)
|
||||
if name and name[0] != '<' and name[-1] != '>':
|
||||
return name
|
||||
|
||||
# From mitsuhiko/werkzeug (used with permission).
|
||||
def parse_list_header(value):
|
||||
"""Parse lists as described by RFC 2068 Section 2.
|
||||
|
||||
In particular, parse comma-separated lists where the elements of
|
||||
the list may include quoted-strings. A quoted-string could
|
||||
contain a comma. A non-quoted string could have quotes in the
|
||||
middle. Quotes are removed automatically after parsing.
|
||||
|
||||
It basically works like :func:`parse_set_header` just that items
|
||||
may appear multiple times and case sensitivity is preserved.
|
||||
|
||||
The return value is a standard :class:`list`:
|
||||
|
||||
>>> parse_list_header('token, "quoted value"')
|
||||
['token', 'quoted value']
|
||||
|
||||
To create a header from the :class:`list` again, use the
|
||||
:func:`dump_header` function.
|
||||
|
||||
:param value: a string with a list header.
|
||||
:return: :class:`list`
|
||||
"""
|
||||
result = []
|
||||
for item in _parse_list_header(value):
|
||||
if item[:1] == item[-1:] == '"':
|
||||
item = unquote_header_value(item[1:-1])
|
||||
result.append(item)
|
||||
return result
|
||||
|
||||
|
||||
# From mitsuhiko/werkzeug (used with permission).
|
||||
def parse_dict_header(value):
|
||||
"""Parse lists of key, value pairs as described by RFC 2068 Section 2 and
|
||||
convert them into a python dict:
|
||||
|
||||
>>> d = parse_dict_header('foo="is a fish", bar="as well"')
|
||||
>>> type(d) is dict
|
||||
True
|
||||
>>> sorted(d.items())
|
||||
[('bar', 'as well'), ('foo', 'is a fish')]
|
||||
|
||||
If there is no value for a key it will be `None`:
|
||||
|
||||
>>> parse_dict_header('key_without_value')
|
||||
{'key_without_value': None}
|
||||
|
||||
To create a header from the :class:`dict` again, use the
|
||||
:func:`dump_header` function.
|
||||
|
||||
:param value: a string with a dict header.
|
||||
:return: :class:`dict`
|
||||
"""
|
||||
result = {}
|
||||
for item in _parse_list_header(value):
|
||||
if '=' not in item:
|
||||
result[item] = None
|
||||
continue
|
||||
name, value = item.split('=', 1)
|
||||
if value[:1] == value[-1:] == '"':
|
||||
value = unquote_header_value(value[1:-1])
|
||||
result[name] = value
|
||||
return result
|
||||
|
||||
|
||||
# From mitsuhiko/werkzeug (used with permission).
|
||||
def unquote_header_value(value, is_filename=False):
|
||||
r"""Unquotes a header value. (Reversal of :func:`quote_header_value`).
|
||||
This does not use the real unquoting but what browsers are actually
|
||||
using for quoting.
|
||||
|
||||
:param value: the header value to unquote.
|
||||
"""
|
||||
if value and value[0] == value[-1] == '"':
|
||||
# this is not the real unquoting, but fixing this so that the
|
||||
# RFC is met will result in bugs with internet explorer and
|
||||
# probably some other browsers as well. IE for example is
|
||||
# uploading files with "C:\foo\bar.txt" as filename
|
||||
value = value[1:-1]
|
||||
|
||||
# if this is a filename and the starting characters look like
|
||||
# a UNC path, then just return the value without quotes. Using the
|
||||
# replace sequence below on a UNC path has the effect of turning
|
||||
# the leading double slash into a single slash and then
|
||||
# _fix_ie_filename() doesn't work correctly. See #458.
|
||||
if not is_filename or value[:2] != '\\\\':
|
||||
return value.replace('\\\\', '\\').replace('\\"', '"')
|
||||
return value
|
||||
|
||||
|
||||
def header_expand(headers):
|
||||
"""Returns an HTTP Header value string from a dictionary.
|
||||
|
||||
Example expansion::
|
||||
|
||||
{'text/x-dvi': {'q': '.8', 'mxb': '100000', 'mxt': '5.0'}, 'text/x-c': {}}
|
||||
# Accept: text/x-dvi; q=.8; mxb=100000; mxt=5.0, text/x-c
|
||||
|
||||
(('text/x-dvi', {'q': '.8', 'mxb': '100000', 'mxt': '5.0'}), ('text/x-c', {}))
|
||||
# Accept: text/x-dvi; q=.8; mxb=100000; mxt=5.0, text/x-c
|
||||
"""
|
||||
|
||||
collector = []
|
||||
|
||||
if isinstance(headers, dict):
|
||||
headers = list(headers.items())
|
||||
|
||||
elif isinstance(headers, basestring):
|
||||
return headers
|
||||
|
||||
for i, (value, params) in enumerate(headers):
|
||||
|
||||
_params = []
|
||||
|
||||
for (p_k, p_v) in list(params.items()):
|
||||
|
||||
_params.append('%s=%s' % (p_k, p_v))
|
||||
|
||||
collector.append(value)
|
||||
collector.append('; ')
|
||||
|
||||
if len(params):
|
||||
|
||||
collector.append('; '.join(_params))
|
||||
|
||||
if not len(headers) == i+1:
|
||||
collector.append(', ')
|
||||
|
||||
|
||||
# Remove trailing separators.
|
||||
if collector[-1] in (', ', '; '):
|
||||
del collector[-1]
|
||||
|
||||
return ''.join(collector)
|
||||
|
||||
|
||||
|
||||
def randombytes(n):
|
||||
"""Return n random bytes."""
|
||||
if is_py2:
|
||||
L = [chr(random.randrange(0, 256)) for i in range(n)]
|
||||
else:
|
||||
L = [chr(random.randrange(0, 256)).encode('utf-8') for i in range(n)]
|
||||
return b"".join(L)
|
||||
|
||||
|
||||
def dict_from_cookiejar(cj):
|
||||
"""Returns a key/value dictionary from a CookieJar.
|
||||
|
||||
:param cj: CookieJar object to extract cookies from.
|
||||
"""
|
||||
|
||||
cookie_dict = {}
|
||||
|
||||
for _, cookies in list(cj._cookies.items()):
|
||||
for _, cookies in list(cookies.items()):
|
||||
for cookie in list(cookies.values()):
|
||||
# print cookie
|
||||
cookie_dict[cookie.name] = cookie.value
|
||||
|
||||
return cookie_dict
|
||||
|
||||
|
||||
def cookiejar_from_dict(cookie_dict):
|
||||
"""Returns a CookieJar from a key/value dictionary.
|
||||
|
||||
:param cookie_dict: Dict of key/values to insert into CookieJar.
|
||||
"""
|
||||
|
||||
# return cookiejar if one was passed in
|
||||
if isinstance(cookie_dict, cookielib.CookieJar):
|
||||
return cookie_dict
|
||||
|
||||
# create cookiejar
|
||||
cj = cookielib.CookieJar()
|
||||
|
||||
cj = add_dict_to_cookiejar(cj, cookie_dict)
|
||||
|
||||
return cj
|
||||
|
||||
|
||||
def add_dict_to_cookiejar(cj, cookie_dict):
|
||||
"""Returns a CookieJar from a key/value dictionary.
|
||||
|
||||
:param cj: CookieJar to insert cookies into.
|
||||
:param cookie_dict: Dict of key/values to insert into CookieJar.
|
||||
"""
|
||||
|
||||
for k, v in list(cookie_dict.items()):
|
||||
|
||||
cookie = cookielib.Cookie(
|
||||
version=0,
|
||||
name=k,
|
||||
value=v,
|
||||
port=None,
|
||||
port_specified=False,
|
||||
domain='',
|
||||
domain_specified=False,
|
||||
domain_initial_dot=False,
|
||||
path='/',
|
||||
path_specified=True,
|
||||
secure=False,
|
||||
expires=None,
|
||||
discard=True,
|
||||
comment=None,
|
||||
comment_url=None,
|
||||
rest={'HttpOnly': None},
|
||||
rfc2109=False
|
||||
)
|
||||
|
||||
# add cookie to cookiejar
|
||||
cj.set_cookie(cookie)
|
||||
|
||||
return cj
|
||||
|
||||
|
||||
def get_encodings_from_content(content):
|
||||
"""Returns encodings from given content string.
|
||||
|
||||
:param content: bytestring to extract encodings from.
|
||||
"""
|
||||
|
||||
charset_re = re.compile(r'<meta.*?charset=["\']*(.+?)["\'>]', flags=re.I)
|
||||
|
||||
return charset_re.findall(content)
|
||||
|
||||
|
||||
def get_encoding_from_headers(headers):
|
||||
"""Returns encodings from given HTTP Header Dict.
|
||||
|
||||
:param headers: dictionary to extract encoding from.
|
||||
"""
|
||||
|
||||
content_type = headers.get('content-type')
|
||||
|
||||
if not content_type:
|
||||
return None
|
||||
|
||||
content_type, params = cgi.parse_header(content_type)
|
||||
|
||||
if 'charset' in params:
|
||||
return params['charset'].strip("'\"")
|
||||
|
||||
if 'text' in content_type:
|
||||
return 'ISO-8859-1'
|
||||
|
||||
|
||||
def unicode_from_html(content):
|
||||
"""Attempts to decode an HTML string into unicode.
|
||||
If unsuccessful, the original content is returned.
|
||||
"""
|
||||
|
||||
encodings = get_encodings_from_content(content)
|
||||
|
||||
for encoding in encodings:
|
||||
|
||||
try:
|
||||
return str(content, encoding)
|
||||
except (UnicodeError, TypeError):
|
||||
pass
|
||||
|
||||
return content
|
||||
|
||||
|
||||
def stream_decode_response_unicode(iterator, r):
|
||||
"""Stream decodes a iterator."""
|
||||
|
||||
if r.encoding is None:
|
||||
for item in iterator:
|
||||
yield item
|
||||
return
|
||||
|
||||
decoder = codecs.getincrementaldecoder(r.encoding)(errors='replace')
|
||||
for chunk in iterator:
|
||||
rv = decoder.decode(chunk)
|
||||
if rv:
|
||||
yield rv
|
||||
rv = decoder.decode('', final=True)
|
||||
if rv:
|
||||
yield rv
|
||||
|
||||
|
||||
def get_unicode_from_response(r):
|
||||
"""Returns the requested content back in unicode.
|
||||
|
||||
:param r: Response object to get unicode content from.
|
||||
|
||||
Tried:
|
||||
|
||||
1. charset from content-type
|
||||
|
||||
2. every encodings from ``<meta ... charset=XXX>``
|
||||
|
||||
3. fall back and replace all unicode characters
|
||||
|
||||
"""
|
||||
|
||||
tried_encodings = []
|
||||
|
||||
# Try charset from content-type
|
||||
encoding = get_encoding_from_headers(r.headers)
|
||||
|
||||
if encoding:
|
||||
try:
|
||||
return str(r.content, encoding)
|
||||
except UnicodeError:
|
||||
tried_encodings.append(encoding)
|
||||
|
||||
# Fall back:
|
||||
try:
|
||||
return str(r.content, encoding, errors='replace')
|
||||
except TypeError:
|
||||
return r.content
|
||||
|
||||
|
||||
def decode_gzip(content):
|
||||
"""Return gzip-decoded string.
|
||||
|
||||
:param content: bytestring to gzip-decode.
|
||||
"""
|
||||
|
||||
return zlib.decompress(content, 16 + zlib.MAX_WBITS)
|
||||
|
||||
|
||||
def stream_decompress(iterator, mode='gzip'):
|
||||
"""
|
||||
Stream decodes an iterator over compressed data
|
||||
|
||||
:param iterator: An iterator over compressed data
|
||||
:param mode: 'gzip' or 'deflate'
|
||||
:return: An iterator over decompressed data
|
||||
"""
|
||||
|
||||
if mode not in ['gzip', 'deflate']:
|
||||
raise ValueError('stream_decompress mode must be gzip or deflate')
|
||||
|
||||
zlib_mode = 16 + zlib.MAX_WBITS if mode == 'gzip' else -zlib.MAX_WBITS
|
||||
dec = zlib.decompressobj(zlib_mode)
|
||||
try:
|
||||
for chunk in iterator:
|
||||
rv = dec.decompress(chunk)
|
||||
if rv:
|
||||
yield rv
|
||||
except zlib.error:
|
||||
# If there was an error decompressing, just return the raw chunk
|
||||
yield chunk
|
||||
# Continue to return the rest of the raw data
|
||||
for chunk in iterator:
|
||||
yield chunk
|
||||
else:
|
||||
# Make sure everything has been returned from the decompression object
|
||||
buf = dec.decompress('')
|
||||
rv = buf + dec.flush()
|
||||
if rv:
|
||||
yield rv
|
||||
|
||||
|
||||
def requote_path(path):
|
||||
"""Re-quote the given URL path component.
|
||||
|
||||
This function passes the given path through an unquote/quote cycle to
|
||||
ensure that it is fully and consistently quoted.
|
||||
"""
|
||||
parts = path.split(b"/")
|
||||
parts = (quote(unquote(part), safe=b"") for part in parts)
|
||||
return b"/".join(parts)
|
Reference in New Issue
Block a user