feat(SublimeText2.GitPackages): cache packages
This commit is contained in:
1
EthanBrown.SublimeText2.GitPackages/tools/PackageCache/sublime-github/.gitignore
vendored
Normal file
1
EthanBrown.SublimeText2.GitPackages/tools/PackageCache/sublime-github/.gitignore
vendored
Normal file
@@ -0,0 +1 @@
|
||||
*.pyc
|
@@ -0,0 +1,6 @@
|
||||
[
|
||||
{ "keys": ["super+g", "super+n"], "command": "public_gist_from_selection" },
|
||||
{ "keys": ["super+g", "super+p","super+n"], "command": "private_gist_from_selection" },
|
||||
{ "keys": ["super+g", "super+o"], "command": "open_gist_in_editor" },
|
||||
{ "keys": ["super+g", "super+c"], "command": "open_gist_url" }
|
||||
]
|
@@ -0,0 +1,37 @@
|
||||
/* Sublime GitHub default settings */
|
||||
{
|
||||
"accounts": {
|
||||
"GitHub": {
|
||||
"base_uri": "https://api.github.com",
|
||||
"github_token": ""
|
||||
}
|
||||
},
|
||||
|
||||
// You can add support for a private GitHub installation by adding another entry
|
||||
// to the accounts entry in the User settings file (Preferences ->
|
||||
// Package Settings -> GitHub -> Settings - User). You can then switch between
|
||||
// accounts via the GitHub: Switch Accounts command
|
||||
//
|
||||
// "YourCo": {
|
||||
// "base_uri": "https://github.yourco.com/api/v3",
|
||||
// "github_token": ""
|
||||
// }
|
||||
|
||||
// The format of the each line in the list of gists.
|
||||
// The value is either a Python format string, or a list of Python format
|
||||
// strings. In the latter case, each element of the list will be a separate
|
||||
// line in the select list.
|
||||
// Valid parameters:
|
||||
// filename - filename of [first file in] gist
|
||||
// description - description of gist
|
||||
// index - 1-based index of gist in the list
|
||||
//
|
||||
// Some things to try:
|
||||
// "gist_list_format": "%(index)d. %(filename)s: %(description)s",
|
||||
// "gist_list_format": ["%(filename)s", "%(description)s"],
|
||||
"gist_list_format": "%(filename)s: %(description)s",
|
||||
// If true, display the list of gists in a monospace font
|
||||
"gist_list_monospace": false,
|
||||
// output debug info to the console
|
||||
"debug": false
|
||||
}
|
@@ -0,0 +1,15 @@
|
||||
[
|
||||
{ "caption": "GitHub: Private Gist from Selection", "command": "private_gist_from_selection" },
|
||||
{ "caption": "GitHub: Public Gist from Selection", "command": "public_gist_from_selection" },
|
||||
{ "caption": "GitHub: Copy Gist to Clipboard", "command": "open_gist" },
|
||||
{ "caption": "GitHub: Copy Starred Gist to Clipboard", "command": "open_starred_gist" },
|
||||
{ "caption": "GitHub: Open Gist in Editor", "command": "open_gist_in_editor" },
|
||||
{ "caption": "GitHub: Open Starred Gist in Editor", "command": "open_starred_gist_in_editor" },
|
||||
{ "caption": "GitHub: Copy Gist URL to Clipboard", "command": "open_gist_url" },
|
||||
{ "caption": "GitHub: Open Gist in Browser", "command": "open_gist_in_browser" },
|
||||
{ "caption": "GitHub: Open Starred Gist in Browser", "command": "open_starred_gist_in_browser" },
|
||||
{ "caption": "GitHub: Update Gist", "command": "update_gist" },
|
||||
{ "caption": "GitHub: Switch Accounts", "command": "switch_accounts" },
|
||||
{ "caption": "GitHub: Copy Remote URL to Clipboard", "command": "copy_remote_url" },
|
||||
{ "caption": "GitHub: Open Remote URL in Browser", "command": "open_remote_url" }
|
||||
]
|
@@ -0,0 +1,20 @@
|
||||
Copyright (c) 2011 Brad Greenlee
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining
|
||||
a copy of this software and associated documentation files (the
|
||||
"Software"), to deal in the Software without restriction, including
|
||||
without limitation the rights to use, copy, modify, merge, publish,
|
||||
distribute, sublicense, and/or sell copies of the Software, and to
|
||||
permit persons to whom the Software is furnished to do so, subject to
|
||||
the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be
|
||||
included in all copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
|
||||
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
|
||||
MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
|
||||
NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
|
||||
LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
|
||||
OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
|
||||
WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
|
@@ -0,0 +1,34 @@
|
||||
[
|
||||
{
|
||||
"caption": "Preferences",
|
||||
"mnemonic": "n",
|
||||
"id": "preferences",
|
||||
"children":
|
||||
[
|
||||
{
|
||||
"caption": "Package Settings",
|
||||
"mnemonic": "P",
|
||||
"id": "package-settings",
|
||||
"children":
|
||||
[
|
||||
{
|
||||
"caption": "GitHub",
|
||||
"children":
|
||||
[
|
||||
{
|
||||
"command": "open_file",
|
||||
"args": {"file": "${packages}/sublime-github/GitHub.sublime-settings"},
|
||||
"caption": "Settings – Default"
|
||||
},
|
||||
{
|
||||
"command": "open_file",
|
||||
"args": {"file": "${packages}/User/GitHub.sublime-settings"},
|
||||
"caption": "Settings – User"
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
@@ -0,0 +1,18 @@
|
||||
Sublime GitHub includes some external libraries to simplify installation.
|
||||
|
||||
Requests License
|
||||
================
|
||||
|
||||
Copyright (c) 2012 Kenneth Reitz.
|
||||
|
||||
Permission to use, copy, modify, and/or distribute this software for any
|
||||
purpose with or without fee is hereby granted, provided that the above
|
||||
copyright notice and this permission notice appear in all copies.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
|
||||
WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
|
||||
MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
|
||||
ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
|
||||
WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
|
||||
ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
|
||||
OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
|
@@ -0,0 +1,169 @@
|
||||
# Sublime GitHub
|
||||
|
||||
This is a plugin for the [Sublime Text 2](http://www.sublimetext.com/) text
|
||||
editor that allows you to create and browse your [GitHub Gists](http://gist.github.com).
|
||||
|
||||
## Installation
|
||||
|
||||
**The easiest way to install is via the** [**Sublime Package Control**](http://wbond.net/sublime_packages/package_control) **plugin.**
|
||||
Just open "Package Control: Install Package" in your Command Palette and search for
|
||||
"sublime-github" (or, if you already have it installed, select "Package Control: Upgrade Package"
|
||||
to upgrade).
|
||||
|
||||
To install it manually in a shell/Terminal (on OS X, Linux or Cygwin), via git:
|
||||
|
||||
cd ~/"Library/Application Support/Sublime Text 2/Packages/" # location on OS X; will be different on Linux & Windows
|
||||
git clone https://github.com/bgreenlee/sublime-github.git
|
||||
|
||||
or, if you don't have git installed:
|
||||
|
||||
cd ~/"Library/Application Support/Sublime Text 2/Packages/"
|
||||
rm -rf bgreenlee-sublime-github* # remove any old versions
|
||||
curl -L https://github.com/bgreenlee/sublime-github/tarball/master | tar xf -
|
||||
|
||||
The plugin should be picked up automatically. If not, restart Sublime Text.
|
||||
|
||||
## Usage
|
||||
|
||||
The first time you run one of the commands, it will ask you for your GitHub
|
||||
username and password in order to create a GitHub API access token, which gets saved
|
||||
in the Sublime GitHub user settings file. Your username and password are not
|
||||
stored anywhere, but if you would rather generate the access token yourself, see
|
||||
the "Generating Your Own Access Token" section below.
|
||||
|
||||
The following commands are available in the Command Palette:
|
||||
|
||||
* **GitHub: Private Gist from Selection**
|
||||
|
||||
Create a private gist from the currently selected text (or, if nothing is selected,
|
||||
the contents of the active editor.
|
||||
|
||||
* **GitHub: Public Gist from Selection**
|
||||
|
||||
Create a public gist from the currently selected text (or, if nothing is selected,
|
||||
the contents of the active editor.
|
||||
|
||||
* **GitHub: Copy Gist to Clipboard**
|
||||
|
||||
Displays a quick select panel listing all of your gists, and selecting one will
|
||||
copy the contents of that gist to your clipboard.
|
||||
|
||||
* **GitHub: Copy Starred Gist to Clipboard**
|
||||
|
||||
Displays a quick select panel listing only your starred gists, and selecting one will
|
||||
copy the contents of that gist to your clipboard.
|
||||
|
||||
* **GitHub: Open Gist in Editor**
|
||||
|
||||
Displays a quick select panel listing all of your gists, and selecting one will
|
||||
open a new editor tab with the contents of that gist.
|
||||
|
||||
* **GitHub: Open Starred Gist in Editor**
|
||||
|
||||
Displays a quick select panel listing only your starred gists, and selecting one will
|
||||
open a new editor tab with the contents of that gist.
|
||||
|
||||
* **GitHub: Open Gist in Browser**
|
||||
|
||||
Displays a quick select panel listing all of your gists, and selecting one will
|
||||
open that gist in your default web browser.
|
||||
|
||||
* **GitHub: Open Starred Gist in Browser**
|
||||
|
||||
Displays a quick select panel listing only your starred gists, and selecting one will
|
||||
open that gist in your default web browser.
|
||||
|
||||
* **GitHub: Update Gist**
|
||||
|
||||
Update the gist open in the current editor.
|
||||
|
||||
* **GitHub: Switch Accounts**
|
||||
|
||||
Switch to another GitHub account (see Adding Additional Accounts below)
|
||||
|
||||
* **GitHub: Open Remote URL in Browser**
|
||||
|
||||
Open the current file's location in the repository in the browser. *Note:* Requires
|
||||
the Git plugin, available through the Package Manager. After installing, restart
|
||||
Sublime Text.
|
||||
|
||||
* **GitHub: Copy Remote URL to Clipboard**
|
||||
|
||||
Put the url of the current file's location in the repository into the clipboard.
|
||||
*Note:* Requires the Git plugin, available through the Package Manager. After
|
||||
installing, restart Sublime Text.
|
||||
|
||||
## Adding Additional Accounts
|
||||
|
||||
If have multiple GitHub accounts, or have a private GitHub installation, you can add the other
|
||||
accounts and switch between them whenever you like.
|
||||
|
||||
Go to the GitHub user settings file (Preferences -> Package Settings -> GitHub -> Settings - User),
|
||||
and add another entry to the `accounts` dictionary. If it is another GitHub account, copy the
|
||||
`base_uri` for the default GitHub entry (if you don't see it, you can get it from Preferences ->
|
||||
Package Settings -> GitHub -> Settings - Default, or in the example below), and just give the
|
||||
account a different name. If you're adding a private GitHub installation, the `base_uri` will be
|
||||
whatever the base url is for your private GitHub, plus "/api/v3". For example:
|
||||
|
||||
"accounts":
|
||||
{
|
||||
"GitHub":
|
||||
{
|
||||
"base_uri": "https://api.github.com",
|
||||
"github_token": "..."
|
||||
},
|
||||
"YourCo":
|
||||
{
|
||||
"base_uri": "https://github.yourco.com/api/v3",
|
||||
"github_token": ""
|
||||
},
|
||||
|
||||
}
|
||||
|
||||
Don't worry about setting the `github_token`--that will be set for you automatically, after you
|
||||
switch accounts (Shift-Cmd-P, "GitHub: Switch Accounts").
|
||||
|
||||
## Issues
|
||||
|
||||
* Linux requires the [curl](http://curl.haxx.se/) binary to be installed on your system (in one of:
|
||||
`/usr/local/sbin`, `/usr/local/bin`, `/usr/sbin`, `/usr/bin`, `/sbin`, or `/bin`).
|
||||
|
||||
* Depending on the number of gists you have, there can be a considerable delay the first time
|
||||
your list of gists is fetched. Subsequent requests will be cached and should be a bit faster
|
||||
(although the GitHub API's ETags are currently not correct; once that fix that, it should speed
|
||||
things up). In the meantime, if there are gists that you open frequently, open them on GitHub and
|
||||
"Star" them, then access them via the Open/Copy Starred Gist commands.
|
||||
|
||||
* Setting the file type for syntax highlighting when opening a gist in the editor does not work
|
||||
in Linux. I could get it to work with significant effort, so if you desperately want it, open
|
||||
an issue.
|
||||
|
||||
## Generating Your Own Access Token
|
||||
|
||||
If you feel uncomfortable giving your GitHub username and password to the
|
||||
plugin, you can generate a GitHub API access token yourself. Just open up
|
||||
a Terminal window/shell (on OS X, Linux or Cygwin), and run:
|
||||
|
||||
curl -u username -d '{"scopes":["gist"]}' https://api.github.com/authorizations
|
||||
|
||||
where `username` is your GitHub username. You'll be prompt for your password first. Then you'll get back
|
||||
a response that includes a 40-digit "token" value (e.g. `6423ba8429a152ff4a7279d1e8f4674029d3ef87`).
|
||||
Go to Sublime Text 2 -> Preferences -> Package Settings -> GitHub -> Settings - User,
|
||||
and insert the token there. It should look like:
|
||||
|
||||
{
|
||||
"github_token": "6423ba8429a152ff4a7279d1e8f4674029d3ef87"
|
||||
}
|
||||
|
||||
Restart Sublime.
|
||||
|
||||
That's it!
|
||||
|
||||
## Bugs and Feature Requests
|
||||
|
||||
<http://github.com/bgreenlee/sublime-github/issues>
|
||||
|
||||
## Copyright
|
||||
|
||||
Copyright © 2011+ Brad Greenlee. See LICENSE for details.
|
||||
|
File diff suppressed because it is too large
Load Diff
@@ -0,0 +1,28 @@
|
||||
# adapted from https://github.com/wbond/sublime_package_control/blob/master/Package%20Control.py
|
||||
import os.path
|
||||
import subprocess
|
||||
|
||||
|
||||
class BinaryNotFoundError(Exception):
|
||||
pass
|
||||
|
||||
|
||||
def find_binary(name):
|
||||
dirs = ['/usr/local/sbin', '/usr/local/bin', '/usr/sbin', '/usr/bin',
|
||||
'/sbin', '/bin']
|
||||
for dir in dirs:
|
||||
path = os.path.join(dir, name)
|
||||
if os.path.exists(path):
|
||||
return path
|
||||
|
||||
raise BinaryNotFoundError('The binary ' + name + ' could not be ' + \
|
||||
'located')
|
||||
|
||||
|
||||
def execute(args):
|
||||
proc = subprocess.Popen(args, stdin=subprocess.PIPE,
|
||||
stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
|
||||
|
||||
output = proc.stdout.read()
|
||||
proc.wait()
|
||||
return output
|
@@ -0,0 +1,131 @@
|
||||
import sublime
|
||||
import os.path
|
||||
import json
|
||||
import sublime_requests as requests
|
||||
import sys
|
||||
import logging
|
||||
|
||||
logging.basicConfig(format='%(asctime)s %(message)s')
|
||||
logger = logging.getLogger()
|
||||
|
||||
|
||||
class GitHubApi(object):
|
||||
"Encapsulates the GitHub API"
|
||||
PER_PAGE = 100
|
||||
etags = {}
|
||||
cache = {}
|
||||
|
||||
class UnauthorizedException(Exception):
|
||||
"Raised if we get a 401 from GitHub"
|
||||
pass
|
||||
|
||||
class UnknownException(Exception):
|
||||
"Raised if we get a response code we don't recognize from GitHub"
|
||||
pass
|
||||
|
||||
def __init__(self, base_uri="https://api.github.com", token=None, debug=False):
|
||||
self.base_uri = base_uri
|
||||
self.token = token
|
||||
self.debug = debug
|
||||
if debug:
|
||||
logger.setLevel(logging.DEBUG)
|
||||
|
||||
# set up requests session with the root CA cert bundle
|
||||
cert_path = os.path.join(sublime.packages_path(), "sublime-github", "ca-bundle.crt")
|
||||
if not os.path.isfile(cert_path):
|
||||
logger.warning("Root CA cert bundle not found at %s! Not verifying requests." % cert_path)
|
||||
cert_path = None
|
||||
self.rsession = requests.session(verify=cert_path,
|
||||
config={'verbose': sys.stderr if self.debug else None})
|
||||
|
||||
def get_token(self, username, password):
|
||||
auth_data = {
|
||||
"scopes": ["gist"],
|
||||
"note": "Sublime GitHub",
|
||||
"note_url": "https://github.com/bgreenlee/sublime-github"
|
||||
}
|
||||
resp = self.rsession.post(self.base_uri + "/authorizations",
|
||||
auth=(username, password),
|
||||
data=json.dumps(auth_data))
|
||||
if resp.status_code == requests.codes.CREATED:
|
||||
data = json.loads(resp.text)
|
||||
return data["token"]
|
||||
elif resp.status_code == requests.codes.UNAUTHORIZED:
|
||||
raise self.UnauthorizedException()
|
||||
else:
|
||||
raise self.UnknownException("%d %s" % (resp.status_code, resp.text))
|
||||
|
||||
def post(self, endpoint, data=None, content_type='application/json'):
|
||||
return self.request('post', endpoint, data=data, content_type=content_type)
|
||||
|
||||
def patch(self, endpoint, data=None, content_type='application/json'):
|
||||
return self.request('patch', endpoint, data=data, content_type=content_type)
|
||||
|
||||
def get(self, endpoint, params=None):
|
||||
return self.request('get', endpoint, params=params)
|
||||
|
||||
def request(self, method, url, params=None, data=None, content_type=None):
|
||||
if not url.startswith("http"):
|
||||
url = self.base_uri + url
|
||||
if data:
|
||||
data = json.dumps(data)
|
||||
|
||||
headers = {"Authorization": "token %s" % self.token}
|
||||
|
||||
if content_type:
|
||||
headers["Content-Type"] = content_type
|
||||
|
||||
# add an etag to the header if we have one
|
||||
if method == 'get' and url in self.etags:
|
||||
headers["If-None-Match"] = self.etags[url]
|
||||
logger.debug("request: %s %s %s %s" % (method, url, headers, params))
|
||||
resp = self.rsession.request(method, url,
|
||||
headers=headers,
|
||||
params=params,
|
||||
data=data,
|
||||
allow_redirects=True)
|
||||
full_url = resp.url
|
||||
logger.debug("response: %s" % resp.headers)
|
||||
if resp.status_code in [requests.codes.OK,
|
||||
requests.codes.CREATED,
|
||||
requests.codes.FOUND,
|
||||
requests.codes.CONTINUE]:
|
||||
if 'application/json' in resp.headers['content-type']:
|
||||
resp_data = json.loads(resp.text)
|
||||
else:
|
||||
resp_data = resp.text
|
||||
if method == 'get': # cache the response
|
||||
etag = resp.headers['etag']
|
||||
self.etags[full_url] = etag
|
||||
self.cache[etag] = resp_data
|
||||
return resp_data
|
||||
elif resp.status_code == requests.codes.NOT_MODIFIED:
|
||||
return self.cache[resp.headers['etag']]
|
||||
elif resp.status_code == requests.codes.UNAUTHORIZED:
|
||||
raise self.UnauthorizedException()
|
||||
else:
|
||||
raise self.UnknownException("%d %s" % (resp.status_code, resp.text))
|
||||
|
||||
def create_gist(self, description="", filename="", content="", public=False):
|
||||
return self.post("/gists", {"description": description,
|
||||
"public": public,
|
||||
"files": {filename: {"content": content}}})
|
||||
|
||||
def update_gist(self, gist, content):
|
||||
filename = gist["files"].keys()[0]
|
||||
return self.patch("/gists/" + gist["id"],
|
||||
{"description": gist["description"],
|
||||
"files": {filename: {"content": content}}})
|
||||
|
||||
def list_gists(self, starred=False):
|
||||
page = 1
|
||||
data = []
|
||||
# fetch all pages
|
||||
while True:
|
||||
endpoint = "/gists" + ("/starred" if starred else "")
|
||||
page_data = self.get(endpoint, params={'page': page, 'per_page': self.PER_PAGE})
|
||||
data.extend(page_data)
|
||||
if len(page_data) < self.PER_PAGE:
|
||||
break
|
||||
page += 1
|
||||
return data
|
@@ -0,0 +1,34 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
# __
|
||||
# /__) _ _ _ _ _/ _
|
||||
# / ( (- (/ (/ (- _) / _)
|
||||
# /
|
||||
|
||||
"""
|
||||
requests
|
||||
~~~~~~~~
|
||||
|
||||
:copyright: (c) 2012 by Kenneth Reitz.
|
||||
:license: ISC, see LICENSE for more details.
|
||||
|
||||
"""
|
||||
|
||||
__title__ = 'requests'
|
||||
__version__ = '0.10.2'
|
||||
__build__ = 0x001002
|
||||
__author__ = 'Kenneth Reitz'
|
||||
__license__ = 'ISC'
|
||||
__copyright__ = 'Copyright 2012 Kenneth Reitz'
|
||||
|
||||
|
||||
|
||||
from . import utils
|
||||
from .models import Request, Response
|
||||
from .api import request, get, head, post, patch, put, delete, options
|
||||
from .sessions import session, Session
|
||||
from .status_codes import codes
|
||||
from .exceptions import (
|
||||
RequestException, Timeout, URLRequired,
|
||||
TooManyRedirects, HTTPError, ConnectionError
|
||||
)
|
@@ -0,0 +1,116 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
"""
|
||||
requests.api
|
||||
~~~~~~~~~~~~
|
||||
|
||||
This module implements the Requests API.
|
||||
|
||||
:copyright: (c) 2012 by Kenneth Reitz.
|
||||
:license: ISC, see LICENSE for more details.
|
||||
|
||||
"""
|
||||
|
||||
from . import sessions
|
||||
|
||||
def request(method, url, **kwargs):
|
||||
"""Constructs and sends a :class:`Request <Request>`.
|
||||
Returns :class:`Response <Response>` object.
|
||||
|
||||
:param method: method for the new :class:`Request` object.
|
||||
:param url: URL for the new :class:`Request` object.
|
||||
:param params: (optional) Dictionary or bytes to be sent in the query string for the :class:`Request`.
|
||||
:param data: (optional) Dictionary or bytes to send in the body of the :class:`Request`.
|
||||
:param headers: (optional) Dictionary of HTTP Headers to send with the :class:`Request`.
|
||||
:param cookies: (optional) Dict or CookieJar object to send with the :class:`Request`.
|
||||
:param files: (optional) Dictionary of 'name': file-like-objects (or {'name': ('filename', fileobj)}) for multipart encoding upload.
|
||||
:param auth: (optional) Auth tuple to enable Basic/Digest/Custom HTTP Auth.
|
||||
:param timeout: (optional) Float describing the timeout of the request.
|
||||
:param allow_redirects: (optional) Boolean. Set to True if POST/PUT/DELETE redirect following is allowed.
|
||||
:param proxies: (optional) Dictionary mapping protocol to the URL of the proxy.
|
||||
:param return_response: (optional) If False, an un-sent Request object will returned.
|
||||
:param session: (optional) A :class:`Session` object to be used for the request.
|
||||
:param config: (optional) A configuration dictionary.
|
||||
:param verify: (optional) if ``True``, the SSL cert will be verified. A CA_BUNDLE path can also be provided.
|
||||
:param prefetch: (optional) if ``True``, the response content will be immediately downloaded.
|
||||
"""
|
||||
|
||||
s = kwargs.pop('session') if 'session' in kwargs else sessions.session()
|
||||
return s.request(method=method, url=url, **kwargs)
|
||||
|
||||
|
||||
|
||||
def get(url, **kwargs):
|
||||
"""Sends a GET request. Returns :class:`Response` object.
|
||||
|
||||
:param url: URL for the new :class:`Request` object.
|
||||
:param **kwargs: Optional arguments that ``request`` takes.
|
||||
"""
|
||||
|
||||
kwargs.setdefault('allow_redirects', True)
|
||||
return request('get', url, **kwargs)
|
||||
|
||||
|
||||
def options(url, **kwargs):
|
||||
"""Sends a OPTIONS request. Returns :class:`Response` object.
|
||||
|
||||
:param url: URL for the new :class:`Request` object.
|
||||
:param **kwargs: Optional arguments that ``request`` takes.
|
||||
"""
|
||||
|
||||
kwargs.setdefault('allow_redirects', True)
|
||||
return request('options', url, **kwargs)
|
||||
|
||||
|
||||
def head(url, **kwargs):
|
||||
"""Sends a HEAD request. Returns :class:`Response` object.
|
||||
|
||||
:param url: URL for the new :class:`Request` object.
|
||||
:param **kwargs: Optional arguments that ``request`` takes.
|
||||
"""
|
||||
|
||||
kwargs.setdefault('allow_redirects', True)
|
||||
return request('head', url, **kwargs)
|
||||
|
||||
|
||||
def post(url, data=None, **kwargs):
|
||||
"""Sends a POST request. Returns :class:`Response` object.
|
||||
|
||||
:param url: URL for the new :class:`Request` object.
|
||||
:param data: (optional) Dictionary or bytes to send in the body of the :class:`Request`.
|
||||
:param **kwargs: Optional arguments that ``request`` takes.
|
||||
"""
|
||||
|
||||
return request('post', url, data=data, **kwargs)
|
||||
|
||||
|
||||
def put(url, data=None, **kwargs):
|
||||
"""Sends a PUT request. Returns :class:`Response` object.
|
||||
|
||||
:param url: URL for the new :class:`Request` object.
|
||||
:param data: (optional) Dictionary or bytes to send in the body of the :class:`Request`.
|
||||
:param **kwargs: Optional arguments that ``request`` takes.
|
||||
"""
|
||||
|
||||
return request('put', url, data=data, **kwargs)
|
||||
|
||||
|
||||
def patch(url, data=None, **kwargs):
|
||||
"""Sends a PATCH request. Returns :class:`Response` object.
|
||||
|
||||
:param url: URL for the new :class:`Request` object.
|
||||
:param data: (optional) Dictionary or bytes to send in the body of the :class:`Request`.
|
||||
:param **kwargs: Optional arguments that ``request`` takes.
|
||||
"""
|
||||
|
||||
return request('patch', url, data=data, **kwargs)
|
||||
|
||||
|
||||
def delete(url, **kwargs):
|
||||
"""Sends a DELETE request. Returns :class:`Response` object.
|
||||
|
||||
:param url: URL for the new :class:`Request` object.
|
||||
:param **kwargs: Optional arguments that ``request`` takes.
|
||||
"""
|
||||
|
||||
return request('delete', url, **kwargs)
|
@@ -0,0 +1,85 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
"""
|
||||
requests.async
|
||||
~~~~~~~~~~~~~~
|
||||
|
||||
This module contains an asynchronous replica of ``requests.api``, powered
|
||||
by gevent. All API methods return a ``Request`` instance (as opposed to
|
||||
``Response``). A list of requests can be sent with ``map()``.
|
||||
"""
|
||||
|
||||
try:
|
||||
import gevent
|
||||
from gevent import monkey as curious_george
|
||||
from gevent.pool import Pool
|
||||
except ImportError:
|
||||
raise RuntimeError('Gevent is required for requests.async.')
|
||||
|
||||
# Monkey-patch.
|
||||
curious_george.patch_all(thread=False)
|
||||
|
||||
from . import api
|
||||
|
||||
|
||||
__all__ = (
|
||||
'map',
|
||||
'get', 'options', 'head', 'post', 'put', 'patch', 'delete', 'request'
|
||||
)
|
||||
|
||||
|
||||
def patched(f):
|
||||
"""Patches a given API function to not send."""
|
||||
|
||||
def wrapped(*args, **kwargs):
|
||||
|
||||
kwargs['return_response'] = False
|
||||
kwargs['prefetch'] = True
|
||||
|
||||
config = kwargs.get('config', {})
|
||||
config.update(safe_mode=True)
|
||||
|
||||
kwargs['config'] = config
|
||||
|
||||
return f(*args, **kwargs)
|
||||
|
||||
return wrapped
|
||||
|
||||
|
||||
def send(r, pool=None, prefetch=False):
|
||||
"""Sends the request object using the specified pool. If a pool isn't
|
||||
specified this method blocks. Pools are useful because you can specify size
|
||||
and can hence limit concurrency."""
|
||||
|
||||
if pool != None:
|
||||
return pool.spawn(r.send, prefetch=prefetch)
|
||||
|
||||
return gevent.spawn(r.send, prefetch=prefetch)
|
||||
|
||||
|
||||
# Patched requests.api functions.
|
||||
get = patched(api.get)
|
||||
options = patched(api.options)
|
||||
head = patched(api.head)
|
||||
post = patched(api.post)
|
||||
put = patched(api.put)
|
||||
patch = patched(api.patch)
|
||||
delete = patched(api.delete)
|
||||
request = patched(api.request)
|
||||
|
||||
|
||||
def map(requests, prefetch=True, size=None):
|
||||
"""Concurrently converts a list of Requests to Responses.
|
||||
|
||||
:param requests: a collection of Request objects.
|
||||
:param prefetch: If False, the content will not be downloaded immediately.
|
||||
:param size: Specifies the number of requests to make at a time. If None, no throttling occurs.
|
||||
"""
|
||||
|
||||
requests = list(requests)
|
||||
|
||||
pool = Pool(size) if size else None
|
||||
jobs = [send(r, pool, prefetch=prefetch) for r in requests]
|
||||
gevent.joinall(jobs)
|
||||
|
||||
return [r.response for r in requests]
|
@@ -0,0 +1,150 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
"""
|
||||
requests.auth
|
||||
~~~~~~~~~~~~~
|
||||
|
||||
This module contains the authentication handlers for Requests.
|
||||
"""
|
||||
|
||||
from __future__ import unicode_literals
|
||||
|
||||
import time
|
||||
import hashlib
|
||||
|
||||
from base64 import b64encode
|
||||
from .compat import urlparse, str, bytes
|
||||
from .utils import randombytes, parse_dict_header
|
||||
|
||||
|
||||
|
||||
def _basic_auth_str(username, password):
|
||||
"""Returns a Basic Auth string."""
|
||||
|
||||
return 'Basic ' + b64encode(("%s:%s" % (username, password)).encode('utf-8')).strip().decode('utf-8')
|
||||
|
||||
|
||||
class AuthBase(object):
|
||||
"""Base class that all auth implementations derive from"""
|
||||
|
||||
def __call__(self, r):
|
||||
raise NotImplementedError('Auth hooks must be callable.')
|
||||
|
||||
|
||||
class HTTPBasicAuth(AuthBase):
|
||||
"""Attaches HTTP Basic Authentication to the given Request object."""
|
||||
def __init__(self, username, password):
|
||||
self.username = username
|
||||
self.password = password
|
||||
|
||||
def __call__(self, r):
|
||||
r.headers['Authorization'] = _basic_auth_str(self.username, self.password)
|
||||
return r
|
||||
|
||||
|
||||
class HTTPProxyAuth(HTTPBasicAuth):
|
||||
"""Attaches HTTP Proxy Authenetication to a given Request object."""
|
||||
def __call__(self, r):
|
||||
r.headers['Proxy-Authorization'] = _basic_auth_str(self.username, self.password)
|
||||
return r
|
||||
|
||||
|
||||
class HTTPDigestAuth(AuthBase):
|
||||
"""Attaches HTTP Digest Authentication to the given Request object."""
|
||||
def __init__(self, username, password):
|
||||
self.username = username
|
||||
self.password = password
|
||||
|
||||
def handle_401(self, r):
|
||||
"""Takes the given response and tries digest-auth, if needed."""
|
||||
|
||||
s_auth = r.headers.get('www-authenticate', '')
|
||||
|
||||
if 'digest' in s_auth.lower():
|
||||
|
||||
last_nonce = ''
|
||||
nonce_count = 0
|
||||
|
||||
chal = parse_dict_header(s_auth.replace('Digest ', ''))
|
||||
|
||||
realm = chal['realm']
|
||||
nonce = chal['nonce']
|
||||
qop = chal.get('qop')
|
||||
algorithm = chal.get('algorithm', 'MD5')
|
||||
opaque = chal.get('opaque', None)
|
||||
|
||||
algorithm = algorithm.upper()
|
||||
# lambdas assume digest modules are imported at the top level
|
||||
if algorithm == 'MD5':
|
||||
def h(x):
|
||||
if isinstance(x, str):
|
||||
x = x.encode('utf-8')
|
||||
return hashlib.md5(x).hexdigest()
|
||||
H = h
|
||||
elif algorithm == 'SHA':
|
||||
def h(x):
|
||||
if isinstance(x, str):
|
||||
x = x.encode('utf-8')
|
||||
return hashlib.sha1(x).hexdigest()
|
||||
H = h
|
||||
# XXX MD5-sess
|
||||
KD = lambda s, d: H("%s:%s" % (s, d))
|
||||
|
||||
if H is None:
|
||||
return None
|
||||
|
||||
# XXX not implemented yet
|
||||
entdig = None
|
||||
p_parsed = urlparse(r.request.url)
|
||||
path = p_parsed.path
|
||||
if p_parsed.query:
|
||||
path += '?' + p_parsed.query
|
||||
|
||||
A1 = '%s:%s:%s' % (self.username, realm, self.password)
|
||||
A2 = '%s:%s' % (r.request.method, path)
|
||||
|
||||
if qop == 'auth':
|
||||
if nonce == last_nonce:
|
||||
nonce_count += 1
|
||||
else:
|
||||
nonce_count = 1
|
||||
last_nonce = nonce
|
||||
|
||||
ncvalue = '%08x' % nonce_count
|
||||
s = str(nonce_count).encode('utf-8')
|
||||
s += nonce.encode('utf-8')
|
||||
s += time.ctime().encode('utf-8')
|
||||
s += randombytes(8)
|
||||
|
||||
cnonce = (hashlib.sha1(s).hexdigest()[:16])
|
||||
noncebit = "%s:%s:%s:%s:%s" % (nonce, ncvalue, cnonce, qop, H(A2))
|
||||
respdig = KD(H(A1), noncebit)
|
||||
elif qop is None:
|
||||
respdig = KD(H(A1), "%s:%s" % (nonce, H(A2)))
|
||||
else:
|
||||
# XXX handle auth-int.
|
||||
return None
|
||||
|
||||
# XXX should the partial digests be encoded too?
|
||||
base = 'username="%s", realm="%s", nonce="%s", uri="%s", ' \
|
||||
'response="%s"' % (self.username, realm, nonce, path, respdig)
|
||||
if opaque:
|
||||
base += ', opaque="%s"' % opaque
|
||||
if entdig:
|
||||
base += ', digest="%s"' % entdig
|
||||
base += ', algorithm="%s"' % algorithm
|
||||
if qop:
|
||||
base += ', qop=auth, nc=%s, cnonce="%s"' % (ncvalue, cnonce)
|
||||
|
||||
r.request.headers['Authorization'] = 'Digest %s' % (base)
|
||||
r.request.send(anyway=True)
|
||||
_r = r.request.response
|
||||
_r.history.append(r)
|
||||
|
||||
return _r
|
||||
|
||||
return r
|
||||
|
||||
def __call__(self, r):
|
||||
r.register_hook('response', self.handle_401)
|
||||
return r
|
@@ -0,0 +1,105 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
"""
|
||||
pythoncompat
|
||||
"""
|
||||
|
||||
|
||||
import sys
|
||||
|
||||
# -------
|
||||
# Pythons
|
||||
# -------
|
||||
|
||||
# Syntax sugar.
|
||||
_ver = sys.version_info
|
||||
|
||||
#: Python 2.x?
|
||||
is_py2 = (_ver[0] == 2)
|
||||
|
||||
#: Python 3.x?
|
||||
is_py3 = (_ver[0] == 3)
|
||||
|
||||
#: Python 3.0.x
|
||||
is_py30 = (is_py3 and _ver[1] == 0)
|
||||
|
||||
#: Python 3.1.x
|
||||
is_py31 = (is_py3 and _ver[1] == 1)
|
||||
|
||||
#: Python 3.2.x
|
||||
is_py32 = (is_py3 and _ver[1] == 2)
|
||||
|
||||
#: Python 3.3.x
|
||||
is_py33 = (is_py3 and _ver[1] == 3)
|
||||
|
||||
#: Python 3.4.x
|
||||
is_py34 = (is_py3 and _ver[1] == 4)
|
||||
|
||||
#: Python 2.7.x
|
||||
is_py27 = (is_py2 and _ver[1] == 7)
|
||||
|
||||
#: Python 2.6.x
|
||||
is_py26 = (is_py2 and _ver[1] == 6)
|
||||
|
||||
#: Python 2.5.x
|
||||
is_py25 = (is_py2 and _ver[1] == 5)
|
||||
|
||||
#: Python 2.4.x
|
||||
is_py24 = (is_py2 and _ver[1] == 4) # I'm assuming this is not by choice.
|
||||
|
||||
|
||||
# ---------
|
||||
# Platforms
|
||||
# ---------
|
||||
|
||||
|
||||
# Syntax sugar.
|
||||
_ver = sys.version.lower()
|
||||
|
||||
is_pypy = ('pypy' in _ver)
|
||||
is_jython = ('jython' in _ver)
|
||||
is_ironpython = ('iron' in _ver)
|
||||
|
||||
# Assume CPython, if nothing else.
|
||||
is_cpython = not any((is_pypy, is_jython, is_ironpython))
|
||||
|
||||
# Windows-based system.
|
||||
is_windows = 'win32' in str(sys.platform).lower()
|
||||
|
||||
# Standard Linux 2+ system.
|
||||
is_linux = ('linux' in str(sys.platform).lower())
|
||||
is_osx = ('darwin' in str(sys.platform).lower())
|
||||
is_hpux = ('hpux' in str(sys.platform).lower()) # Complete guess.
|
||||
is_solaris = ('solar==' in str(sys.platform).lower()) # Complete guess.
|
||||
|
||||
|
||||
# ---------
|
||||
# Specifics
|
||||
# ---------
|
||||
|
||||
|
||||
if is_py2:
|
||||
from urllib import quote, unquote, urlencode
|
||||
from urlparse import urlparse, urlunparse, urljoin, urlsplit
|
||||
from urllib2 import parse_http_list
|
||||
import cookielib
|
||||
from .packages.oreos.monkeys import SimpleCookie
|
||||
from StringIO import StringIO
|
||||
|
||||
bytes = str
|
||||
str = unicode
|
||||
basestring = basestring
|
||||
|
||||
|
||||
|
||||
elif is_py3:
|
||||
from urllib.parse import urlparse, urlunparse, urljoin, urlsplit, urlencode, quote, unquote
|
||||
from urllib.request import parse_http_list
|
||||
from http import cookiejar as cookielib
|
||||
from http.cookies import SimpleCookie
|
||||
from io import StringIO
|
||||
|
||||
str = str
|
||||
bytes = bytes
|
||||
basestring = (str,bytes)
|
||||
|
@@ -0,0 +1,40 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
"""
|
||||
requests.defaults
|
||||
~~~~~~~~~~~~~~~~~
|
||||
|
||||
This module provides the Requests configuration defaults.
|
||||
|
||||
Configurations:
|
||||
|
||||
:base_headers: Default HTTP headers.
|
||||
:verbose: Stream to write request logging to.
|
||||
:max_redirects: Maximum number of redirects allowed within a request.s
|
||||
:keep_alive: Reuse HTTP Connections?
|
||||
:max_retries: The number of times a request should be retried in the event of a connection failure.
|
||||
:danger_mode: If true, Requests will raise errors immediately.
|
||||
:safe_mode: If true, Requests will catch all errors.
|
||||
:pool_maxsize: The maximium size of an HTTP connection pool.
|
||||
:pool_connections: The number of active HTTP connection pools to use.
|
||||
"""
|
||||
|
||||
from . import __version__
|
||||
|
||||
defaults = dict()
|
||||
|
||||
|
||||
defaults['base_headers'] = {
|
||||
'User-Agent': 'python-requests/%s' % __version__,
|
||||
'Accept-Encoding': ', '.join(('identity', 'deflate', 'compress', 'gzip')),
|
||||
'Accept': '*/*'
|
||||
}
|
||||
|
||||
defaults['verbose'] = None
|
||||
defaults['max_redirects'] = 30
|
||||
defaults['pool_connections'] = 10
|
||||
defaults['pool_maxsize'] = 10
|
||||
defaults['max_retries'] = 0
|
||||
defaults['danger_mode'] = False
|
||||
defaults['safe_mode'] = False
|
||||
defaults['keep_alive'] = True
|
@@ -0,0 +1,31 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
"""
|
||||
requests.exceptions
|
||||
~~~~~~~~~~~~~~~~~~~
|
||||
|
||||
This module contains the set of Requests' exceptions.
|
||||
|
||||
"""
|
||||
|
||||
class RequestException(Exception):
|
||||
"""There was an ambiguous exception that occurred while handling your
|
||||
request."""
|
||||
|
||||
class HTTPError(RequestException):
|
||||
"""An HTTP error occurred."""
|
||||
|
||||
class ConnectionError(RequestException):
|
||||
"""A Connection error occurred."""
|
||||
|
||||
class SSLError(ConnectionError):
|
||||
"""An SSL error occurred."""
|
||||
|
||||
class Timeout(RequestException):
|
||||
"""The request timed out."""
|
||||
|
||||
class URLRequired(RequestException):
|
||||
"""A valid URL is required to make a request."""
|
||||
|
||||
class TooManyRedirects(RequestException):
|
||||
"""Too many redirects."""
|
@@ -0,0 +1,48 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
"""
|
||||
requests.hooks
|
||||
~~~~~~~~~~~~~~
|
||||
|
||||
This module provides the capabilities for the Requests hooks system.
|
||||
|
||||
Available hooks:
|
||||
|
||||
``args``:
|
||||
A dictionary of the arguments being sent to Request().
|
||||
|
||||
``pre_request``:
|
||||
The Request object, directly before being sent.
|
||||
|
||||
``post_request``:
|
||||
The Request object, directly after being sent.
|
||||
|
||||
``response``:
|
||||
The response generated from a Request.
|
||||
|
||||
"""
|
||||
|
||||
import traceback
|
||||
|
||||
|
||||
HOOKS = ('args', 'pre_request', 'post_request', 'response')
|
||||
|
||||
|
||||
def dispatch_hook(key, hooks, hook_data):
|
||||
"""Dispatches a hook dictionary on a given piece of data."""
|
||||
|
||||
hooks = hooks or dict()
|
||||
|
||||
if key in hooks:
|
||||
hooks = hooks.get(key)
|
||||
|
||||
if hasattr(hooks, '__call__'):
|
||||
hooks = [hooks]
|
||||
|
||||
for hook in hooks:
|
||||
try:
|
||||
hook_data = hook(hook_data) or hook_data
|
||||
except Exception:
|
||||
traceback.print_exc()
|
||||
|
||||
return hook_data
|
@@ -0,0 +1,796 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
"""
|
||||
requests.models
|
||||
~~~~~~~~~~~~~~~
|
||||
|
||||
This module contains the primary objects that power Requests.
|
||||
"""
|
||||
|
||||
import os
|
||||
from datetime import datetime
|
||||
|
||||
from .hooks import dispatch_hook, HOOKS
|
||||
from .structures import CaseInsensitiveDict
|
||||
from .status_codes import codes
|
||||
|
||||
from .auth import HTTPBasicAuth, HTTPProxyAuth
|
||||
from .packages.urllib3.response import HTTPResponse
|
||||
from .packages.urllib3.exceptions import MaxRetryError
|
||||
from .packages.urllib3.exceptions import SSLError as _SSLError
|
||||
from .packages.urllib3.exceptions import HTTPError as _HTTPError
|
||||
from .packages.urllib3 import connectionpool, poolmanager
|
||||
from .packages.urllib3.filepost import encode_multipart_formdata
|
||||
from .exceptions import (
|
||||
ConnectionError, HTTPError, RequestException, Timeout, TooManyRedirects,
|
||||
URLRequired, SSLError)
|
||||
from .utils import (
|
||||
get_encoding_from_headers, stream_decode_response_unicode,
|
||||
stream_decompress, guess_filename, requote_path, dict_from_string)
|
||||
|
||||
from .compat import urlparse, urlunparse, urljoin, urlsplit, urlencode, quote, unquote, str, bytes, SimpleCookie, is_py3, is_py2
|
||||
|
||||
# Import chardet if it is available.
|
||||
try:
|
||||
import chardet
|
||||
except ImportError:
|
||||
pass
|
||||
|
||||
REDIRECT_STATI = (codes.moved, codes.found, codes.other, codes.temporary_moved)
|
||||
|
||||
|
||||
|
||||
class Request(object):
|
||||
"""The :class:`Request <Request>` object. It carries out all functionality of
|
||||
Requests. Recommended interface is with the Requests functions.
|
||||
"""
|
||||
|
||||
def __init__(self,
|
||||
url=None,
|
||||
headers=dict(),
|
||||
files=None,
|
||||
method=None,
|
||||
data=dict(),
|
||||
params=dict(),
|
||||
auth=None,
|
||||
cookies=None,
|
||||
timeout=None,
|
||||
redirect=False,
|
||||
allow_redirects=False,
|
||||
proxies=None,
|
||||
hooks=None,
|
||||
config=None,
|
||||
_poolmanager=None,
|
||||
verify=None,
|
||||
session=None):
|
||||
|
||||
#: Float describes the timeout of the request.
|
||||
# (Use socket.setdefaulttimeout() as fallback)
|
||||
self.timeout = timeout
|
||||
|
||||
#: Request URL.
|
||||
|
||||
# if isinstance(url, str):
|
||||
# url = url.encode('utf-8')
|
||||
# print(dir(url))
|
||||
|
||||
self.url = url
|
||||
|
||||
#: Dictionary of HTTP Headers to attach to the :class:`Request <Request>`.
|
||||
self.headers = dict(headers or [])
|
||||
|
||||
#: Dictionary of files to multipart upload (``{filename: content}``).
|
||||
self.files = files
|
||||
|
||||
#: HTTP Method to use.
|
||||
self.method = method
|
||||
|
||||
#: Dictionary or byte of request body data to attach to the
|
||||
#: :class:`Request <Request>`.
|
||||
self.data = None
|
||||
|
||||
#: Dictionary or byte of querystring data to attach to the
|
||||
#: :class:`Request <Request>`.
|
||||
self.params = None
|
||||
|
||||
#: True if :class:`Request <Request>` is part of a redirect chain (disables history
|
||||
#: and HTTPError storage).
|
||||
self.redirect = redirect
|
||||
|
||||
#: Set to True if full redirects are allowed (e.g. re-POST-ing of data at new ``Location``)
|
||||
self.allow_redirects = allow_redirects
|
||||
|
||||
# Dictionary mapping protocol to the URL of the proxy (e.g. {'http': 'foo.bar:3128'})
|
||||
self.proxies = dict(proxies or [])
|
||||
|
||||
self.data, self._enc_data = self._encode_params(data)
|
||||
self.params, self._enc_params = self._encode_params(params)
|
||||
|
||||
#: :class:`Response <Response>` instance, containing
|
||||
#: content and metadata of HTTP Response, once :attr:`sent <send>`.
|
||||
self.response = Response()
|
||||
|
||||
#: Authentication tuple or object to attach to :class:`Request <Request>`.
|
||||
self.auth = auth
|
||||
|
||||
#: CookieJar to attach to :class:`Request <Request>`.
|
||||
self.cookies = dict(cookies or [])
|
||||
|
||||
#: Dictionary of configurations for this request.
|
||||
self.config = dict(config or [])
|
||||
|
||||
#: True if Request has been sent.
|
||||
self.sent = False
|
||||
|
||||
#: Event-handling hooks.
|
||||
self.hooks = {}
|
||||
|
||||
for event in HOOKS:
|
||||
self.hooks[event] = []
|
||||
|
||||
hooks = hooks or {}
|
||||
|
||||
for (k, v) in list(hooks.items()):
|
||||
self.register_hook(event=k, hook=v)
|
||||
|
||||
#: Session.
|
||||
self.session = session
|
||||
|
||||
#: SSL Verification.
|
||||
self.verify = verify
|
||||
|
||||
if headers:
|
||||
headers = CaseInsensitiveDict(self.headers)
|
||||
else:
|
||||
headers = CaseInsensitiveDict()
|
||||
|
||||
# Add configured base headers.
|
||||
for (k, v) in list(self.config.get('base_headers', {}).items()):
|
||||
if k not in headers:
|
||||
headers[k] = v
|
||||
|
||||
self.headers = headers
|
||||
self._poolmanager = _poolmanager
|
||||
|
||||
|
||||
def __repr__(self):
|
||||
return '<Request [%s]>' % (self.method)
|
||||
|
||||
|
||||
def _build_response(self, resp):
|
||||
"""Build internal :class:`Response <Response>` object
|
||||
from given response.
|
||||
"""
|
||||
|
||||
def build(resp):
|
||||
|
||||
response = Response()
|
||||
|
||||
# Pass settings over.
|
||||
response.config = self.config
|
||||
|
||||
if resp:
|
||||
|
||||
# Fallback to None if there's no status_code, for whatever reason.
|
||||
response.status_code = getattr(resp, 'status', None)
|
||||
|
||||
# Make headers case-insensitive.
|
||||
response.headers = CaseInsensitiveDict(getattr(resp, 'headers', None))
|
||||
|
||||
# Set encoding.
|
||||
response.encoding = get_encoding_from_headers(response.headers)
|
||||
|
||||
# Start off with our local cookies.
|
||||
cookies = self.cookies or dict()
|
||||
|
||||
# Add new cookies from the server.
|
||||
if 'set-cookie' in response.headers:
|
||||
cookie_header = response.headers['set-cookie']
|
||||
cookies = dict_from_string(cookie_header)
|
||||
|
||||
# Save cookies in Response.
|
||||
response.cookies = cookies
|
||||
|
||||
# No exceptions were harmed in the making of this request.
|
||||
response.error = getattr(resp, 'error', None)
|
||||
|
||||
# Save original response for later.
|
||||
response.raw = resp
|
||||
response.url = self.full_url
|
||||
|
||||
return response
|
||||
|
||||
history = []
|
||||
|
||||
r = build(resp)
|
||||
|
||||
self.cookies.update(r.cookies)
|
||||
|
||||
if r.status_code in REDIRECT_STATI and not self.redirect:
|
||||
while (('location' in r.headers) and
|
||||
((r.status_code is codes.see_other) or (self.allow_redirects))):
|
||||
|
||||
r.content # Consume socket so it can be released
|
||||
|
||||
if not len(history) < self.config.get('max_redirects'):
|
||||
raise TooManyRedirects()
|
||||
|
||||
# Release the connection back into the pool.
|
||||
r.raw.release_conn()
|
||||
|
||||
history.append(r)
|
||||
|
||||
url = r.headers['location']
|
||||
|
||||
# Handle redirection without scheme (see: RFC 1808 Section 4)
|
||||
if url.startswith('//'):
|
||||
parsed_rurl = urlparse(r.url)
|
||||
url = '%s:%s' % (parsed_rurl.scheme, url)
|
||||
|
||||
# Facilitate non-RFC2616-compliant 'location' headers
|
||||
# (e.g. '/path/to/resource' instead of 'http://domain.tld/path/to/resource')
|
||||
if not urlparse(url).netloc:
|
||||
url = urljoin(r.url, url)
|
||||
|
||||
# http://www.w3.org/Protocols/rfc2616/rfc2616-sec10.html#sec10.3.4
|
||||
if r.status_code is codes.see_other:
|
||||
method = 'GET'
|
||||
else:
|
||||
method = self.method
|
||||
|
||||
# Remove the cookie headers that were sent.
|
||||
headers = self.headers
|
||||
try:
|
||||
del headers['Cookie']
|
||||
except KeyError:
|
||||
pass
|
||||
|
||||
request = Request(
|
||||
url=url,
|
||||
headers=headers,
|
||||
files=self.files,
|
||||
method=method,
|
||||
params=self.session.params,
|
||||
auth=self.auth,
|
||||
cookies=self.cookies,
|
||||
redirect=True,
|
||||
config=self.config,
|
||||
timeout=self.timeout,
|
||||
_poolmanager=self._poolmanager,
|
||||
proxies = self.proxies,
|
||||
verify = self.verify,
|
||||
session = self.session
|
||||
)
|
||||
|
||||
request.send()
|
||||
r = request.response
|
||||
self.cookies.update(r.cookies)
|
||||
|
||||
r.history = history
|
||||
|
||||
self.response = r
|
||||
self.response.request = self
|
||||
self.response.cookies.update(self.cookies)
|
||||
|
||||
|
||||
@staticmethod
|
||||
def _encode_params(data):
|
||||
"""Encode parameters in a piece of data.
|
||||
|
||||
If the data supplied is a dictionary, encodes each parameter in it, and
|
||||
returns a list of tuples containing the encoded parameters, and a urlencoded
|
||||
version of that.
|
||||
|
||||
Otherwise, assumes the data is already encoded appropriately, and
|
||||
returns it twice.
|
||||
"""
|
||||
|
||||
if hasattr(data, '__iter__') and not isinstance(data, str):
|
||||
data = dict(data)
|
||||
|
||||
|
||||
if hasattr(data, 'items'):
|
||||
result = []
|
||||
for k, vs in list(data.items()):
|
||||
for v in isinstance(vs, list) and vs or [vs]:
|
||||
result.append((k.encode('utf-8') if isinstance(k, str) else k,
|
||||
v.encode('utf-8') if isinstance(v, str) else v))
|
||||
return result, urlencode(result, doseq=True)
|
||||
else:
|
||||
return data, data
|
||||
|
||||
@property
|
||||
def full_url(self):
|
||||
"""Build the actual URL to use."""
|
||||
|
||||
if not self.url:
|
||||
raise URLRequired()
|
||||
|
||||
url = self.url
|
||||
|
||||
# Support for unicode domain names and paths.
|
||||
scheme, netloc, path, params, query, fragment = urlparse(url)
|
||||
|
||||
|
||||
if not scheme:
|
||||
raise ValueError("Invalid URL %r: No schema supplied" % url)
|
||||
|
||||
netloc = netloc.encode('idna').decode('utf-8')
|
||||
|
||||
if not path:
|
||||
path = '/'
|
||||
|
||||
if is_py2:
|
||||
if isinstance(path, str):
|
||||
path = path.encode('utf-8')
|
||||
|
||||
path = requote_path(path)
|
||||
|
||||
url = (urlunparse([ scheme, netloc, path, params, query, fragment ]))
|
||||
|
||||
if self._enc_params:
|
||||
if urlparse(url).query:
|
||||
return '%s&%s' % (url, self._enc_params)
|
||||
else:
|
||||
return '%s?%s' % (url, self._enc_params)
|
||||
else:
|
||||
return url
|
||||
|
||||
@property
|
||||
def path_url(self):
|
||||
"""Build the path URL to use."""
|
||||
|
||||
url = []
|
||||
|
||||
p = urlsplit(self.full_url)
|
||||
|
||||
# Proxies use full URLs.
|
||||
if p.scheme in self.proxies:
|
||||
return self.full_url
|
||||
|
||||
path = p.path
|
||||
if not path:
|
||||
path = '/'
|
||||
|
||||
if is_py3:
|
||||
path = quote(path.encode('utf-8'))
|
||||
|
||||
url.append(path)
|
||||
|
||||
query = p.query
|
||||
if query:
|
||||
url.append('?')
|
||||
url.append(query)
|
||||
|
||||
return ''.join(url)
|
||||
|
||||
|
||||
def register_hook(self, event, hook):
|
||||
"""Properly register a hook."""
|
||||
|
||||
return self.hooks[event].append(hook)
|
||||
|
||||
|
||||
def send(self, anyway=False, prefetch=False):
|
||||
"""Sends the request. Returns True of successful, false if not.
|
||||
If there was an HTTPError during transmission,
|
||||
self.response.status_code will contain the HTTPError code.
|
||||
|
||||
Once a request is successfully sent, `sent` will equal True.
|
||||
|
||||
:param anyway: If True, request will be sent, even if it has
|
||||
already been sent.
|
||||
"""
|
||||
|
||||
# Build the URL
|
||||
url = self.full_url
|
||||
|
||||
# Logging
|
||||
if self.config.get('verbose'):
|
||||
self.config.get('verbose').write('%s %s %s\n' % (
|
||||
datetime.now().isoformat(), self.method, url
|
||||
))
|
||||
|
||||
# Nottin' on you.
|
||||
body = None
|
||||
content_type = None
|
||||
|
||||
# Multi-part file uploads.
|
||||
if self.files:
|
||||
if not isinstance(self.data, str):
|
||||
|
||||
try:
|
||||
fields = self.data.copy()
|
||||
except AttributeError:
|
||||
fields = dict(self.data)
|
||||
|
||||
for (k, v) in list(self.files.items()):
|
||||
# support for explicit filename
|
||||
if isinstance(v, (tuple, list)):
|
||||
fn, fp = v
|
||||
else:
|
||||
fn = guess_filename(v) or k
|
||||
fp = v
|
||||
fields.update({k: (fn, fp.read())})
|
||||
|
||||
(body, content_type) = encode_multipart_formdata(fields)
|
||||
else:
|
||||
pass
|
||||
# TODO: Conflict?
|
||||
else:
|
||||
if self.data:
|
||||
|
||||
body = self._enc_data
|
||||
if isinstance(self.data, str):
|
||||
content_type = None
|
||||
else:
|
||||
content_type = 'application/x-www-form-urlencoded'
|
||||
|
||||
# Add content-type if it wasn't explicitly provided.
|
||||
if (content_type) and (not 'content-type' in self.headers):
|
||||
self.headers['Content-Type'] = content_type
|
||||
|
||||
if self.auth:
|
||||
if isinstance(self.auth, tuple) and len(self.auth) == 2:
|
||||
# special-case basic HTTP auth
|
||||
self.auth = HTTPBasicAuth(*self.auth)
|
||||
|
||||
# Allow auth to make its changes.
|
||||
r = self.auth(self)
|
||||
|
||||
# Update self to reflect the auth changes.
|
||||
self.__dict__.update(r.__dict__)
|
||||
|
||||
_p = urlparse(url)
|
||||
proxy = self.proxies.get(_p.scheme)
|
||||
|
||||
if proxy:
|
||||
conn = poolmanager.proxy_from_url(proxy)
|
||||
_proxy = urlparse(proxy)
|
||||
if '@' in _proxy.netloc:
|
||||
auth, url = _proxy.netloc.split('@', 1)
|
||||
self.proxy_auth = HTTPProxyAuth(*auth.split(':', 1))
|
||||
r = self.proxy_auth(self)
|
||||
self.__dict__.update(r.__dict__)
|
||||
else:
|
||||
# Check to see if keep_alive is allowed.
|
||||
if self.config.get('keep_alive'):
|
||||
conn = self._poolmanager.connection_from_url(url)
|
||||
else:
|
||||
conn = connectionpool.connection_from_url(url)
|
||||
|
||||
if url.startswith('https') and self.verify:
|
||||
|
||||
cert_loc = None
|
||||
|
||||
# Allow self-specified cert location.
|
||||
if self.verify is not True:
|
||||
cert_loc = self.verify
|
||||
|
||||
|
||||
# Look for configuration.
|
||||
if not cert_loc:
|
||||
cert_loc = os.environ.get('REQUESTS_CA_BUNDLE')
|
||||
|
||||
# Curl compatiblity.
|
||||
if not cert_loc:
|
||||
cert_loc = os.environ.get('CURL_CA_BUNDLE')
|
||||
|
||||
# Use the awesome certifi list.
|
||||
if not cert_loc:
|
||||
cert_loc = __import__('certifi').where()
|
||||
|
||||
conn.cert_reqs = 'CERT_REQUIRED'
|
||||
conn.ca_certs = cert_loc
|
||||
else:
|
||||
conn.cert_reqs = 'CERT_NONE'
|
||||
conn.ca_certs = None
|
||||
|
||||
if not self.sent or anyway:
|
||||
|
||||
if self.cookies:
|
||||
|
||||
# Skip if 'cookie' header is explicitly set.
|
||||
if 'cookie' not in self.headers:
|
||||
|
||||
# Simple cookie with our dict.
|
||||
c = SimpleCookie()
|
||||
for (k, v) in list(self.cookies.items()):
|
||||
c[k] = v
|
||||
|
||||
# Turn it into a header.
|
||||
cookie_header = c.output(header='', sep='; ').strip()
|
||||
|
||||
# Attach Cookie header to request.
|
||||
self.headers['Cookie'] = cookie_header
|
||||
|
||||
# Pre-request hook.
|
||||
r = dispatch_hook('pre_request', self.hooks, self)
|
||||
self.__dict__.update(r.__dict__)
|
||||
|
||||
try:
|
||||
# The inner try .. except re-raises certain exceptions as
|
||||
# internal exception types; the outer suppresses exceptions
|
||||
# when safe mode is set.
|
||||
try:
|
||||
# Send the request.
|
||||
r = conn.urlopen(
|
||||
method=self.method,
|
||||
url=self.path_url,
|
||||
body=body,
|
||||
headers=self.headers,
|
||||
redirect=False,
|
||||
assert_same_host=False,
|
||||
preload_content=False,
|
||||
decode_content=True,
|
||||
retries=self.config.get('max_retries', 0),
|
||||
timeout=self.timeout,
|
||||
)
|
||||
self.sent = True
|
||||
|
||||
except MaxRetryError as e:
|
||||
raise ConnectionError(e)
|
||||
|
||||
except (_SSLError, _HTTPError) as e:
|
||||
if self.verify and isinstance(e, _SSLError):
|
||||
raise SSLError(e)
|
||||
|
||||
raise Timeout('Request timed out.')
|
||||
|
||||
except RequestException as e:
|
||||
if self.config.get('safe_mode', False):
|
||||
# In safe mode, catch the exception and attach it to
|
||||
# a blank urllib3.HTTPResponse object.
|
||||
r = HTTPResponse()
|
||||
r.error = e
|
||||
else:
|
||||
raise
|
||||
|
||||
self._build_response(r)
|
||||
|
||||
# Response manipulation hook.
|
||||
self.response = dispatch_hook('response', self.hooks, self.response)
|
||||
|
||||
# Post-request hook.
|
||||
r = dispatch_hook('post_request', self.hooks, self)
|
||||
self.__dict__.update(r.__dict__)
|
||||
|
||||
# If prefetch is True, mark content as consumed.
|
||||
if prefetch:
|
||||
# Save the response.
|
||||
self.response.content
|
||||
|
||||
if self.config.get('danger_mode'):
|
||||
self.response.raise_for_status()
|
||||
|
||||
return self.sent
|
||||
|
||||
|
||||
class Response(object):
|
||||
"""The core :class:`Response <Response>` object. All
|
||||
:class:`Request <Request>` objects contain a
|
||||
:class:`response <Response>` attribute, which is an instance
|
||||
of this class.
|
||||
"""
|
||||
|
||||
def __init__(self):
|
||||
|
||||
self._content = None
|
||||
self._content_consumed = False
|
||||
|
||||
#: Integer Code of responded HTTP Status.
|
||||
self.status_code = None
|
||||
|
||||
#: Case-insensitive Dictionary of Response Headers.
|
||||
#: For example, ``headers['content-encoding']`` will return the
|
||||
#: value of a ``'Content-Encoding'`` response header.
|
||||
self.headers = CaseInsensitiveDict()
|
||||
|
||||
#: File-like object representation of response (for advanced usage).
|
||||
self.raw = None
|
||||
|
||||
#: Final URL location of Response.
|
||||
self.url = None
|
||||
|
||||
#: Resulting :class:`HTTPError` of request, if one occurred.
|
||||
self.error = None
|
||||
|
||||
#: Encoding to decode with when accessing r.content.
|
||||
self.encoding = None
|
||||
|
||||
#: A list of :class:`Response <Response>` objects from
|
||||
#: the history of the Request. Any redirect responses will end
|
||||
#: up here.
|
||||
self.history = []
|
||||
|
||||
#: The :class:`Request <Request>` that created the Response.
|
||||
self.request = None
|
||||
|
||||
#: A dictionary of Cookies the server sent back.
|
||||
self.cookies = {}
|
||||
|
||||
#: Dictionary of configurations for this request.
|
||||
self.config = {}
|
||||
|
||||
|
||||
def __repr__(self):
|
||||
return '<Response [%s]>' % (self.status_code)
|
||||
|
||||
def __bool__(self):
|
||||
"""Returns true if :attr:`status_code` is 'OK'."""
|
||||
return self.ok
|
||||
|
||||
def __nonzero__(self):
|
||||
"""Returns true if :attr:`status_code` is 'OK'."""
|
||||
return self.ok
|
||||
|
||||
@property
|
||||
def ok(self):
|
||||
try:
|
||||
self.raise_for_status()
|
||||
except HTTPError:
|
||||
return False
|
||||
return True
|
||||
|
||||
|
||||
def iter_content(self, chunk_size=10 * 1024, decode_unicode=False):
|
||||
"""Iterates over the response data. This avoids reading the content
|
||||
at once into memory for large responses. The chunk size is the number
|
||||
of bytes it should read into memory. This is not necessarily the
|
||||
length of each item returned as decoding can take place.
|
||||
"""
|
||||
if self._content_consumed:
|
||||
raise RuntimeError(
|
||||
'The content for this response was already consumed'
|
||||
)
|
||||
|
||||
def generate():
|
||||
while 1:
|
||||
chunk = self.raw.read(chunk_size)
|
||||
if not chunk:
|
||||
break
|
||||
yield chunk
|
||||
self._content_consumed = True
|
||||
|
||||
def generate_chunked():
|
||||
resp = self.raw._original_response
|
||||
fp = resp.fp
|
||||
if resp.chunk_left is not None:
|
||||
pending_bytes = resp.chunk_left
|
||||
while pending_bytes:
|
||||
chunk = fp.read(min(chunk_size, pending_bytes))
|
||||
pending_bytes-=len(chunk)
|
||||
yield chunk
|
||||
fp.read(2) # throw away crlf
|
||||
while 1:
|
||||
#XXX correct line size? (httplib has 64kb, seems insane)
|
||||
pending_bytes = fp.readline(40).strip()
|
||||
pending_bytes = int(pending_bytes, 16)
|
||||
if pending_bytes == 0:
|
||||
break
|
||||
while pending_bytes:
|
||||
chunk = fp.read(min(chunk_size, pending_bytes))
|
||||
pending_bytes-=len(chunk)
|
||||
yield chunk
|
||||
fp.read(2) # throw away crlf
|
||||
self._content_consumed = True
|
||||
fp.close()
|
||||
|
||||
|
||||
if getattr(getattr(self.raw, '_original_response', None), 'chunked', False):
|
||||
gen = generate_chunked()
|
||||
else:
|
||||
gen = generate()
|
||||
|
||||
if 'gzip' in self.headers.get('content-encoding', ''):
|
||||
gen = stream_decompress(gen, mode='gzip')
|
||||
elif 'deflate' in self.headers.get('content-encoding', ''):
|
||||
gen = stream_decompress(gen, mode='deflate')
|
||||
|
||||
if decode_unicode:
|
||||
gen = stream_decode_response_unicode(gen, self)
|
||||
|
||||
return gen
|
||||
|
||||
|
||||
def iter_lines(self, chunk_size=10 * 1024, decode_unicode=None):
|
||||
"""Iterates over the response data, one line at a time. This
|
||||
avoids reading the content at once into memory for large
|
||||
responses.
|
||||
"""
|
||||
|
||||
#TODO: why rstrip by default
|
||||
pending = None
|
||||
|
||||
for chunk in self.iter_content(chunk_size, decode_unicode=decode_unicode):
|
||||
|
||||
if pending is not None:
|
||||
chunk = pending + chunk
|
||||
lines = chunk.splitlines(True)
|
||||
|
||||
for line in lines[:-1]:
|
||||
yield line.rstrip()
|
||||
|
||||
# Save the last part of the chunk for next iteration, to keep full line together
|
||||
# lines may be empty for the last chunk of a chunked response
|
||||
|
||||
if lines:
|
||||
pending = lines[-1]
|
||||
#if pending is a complete line, give it baack
|
||||
if pending[-1] == '\n':
|
||||
yield pending.rstrip()
|
||||
pending = None
|
||||
else:
|
||||
pending = None
|
||||
|
||||
# Yield the last line
|
||||
if pending is not None:
|
||||
yield pending.rstrip()
|
||||
|
||||
|
||||
@property
|
||||
def content(self):
|
||||
"""Content of the response, in bytes."""
|
||||
|
||||
if self._content is None:
|
||||
# Read the contents.
|
||||
try:
|
||||
if self._content_consumed:
|
||||
raise RuntimeError(
|
||||
'The content for this response was already consumed')
|
||||
|
||||
self._content = self.raw.read()
|
||||
except AttributeError:
|
||||
self._content = None
|
||||
|
||||
self._content_consumed = True
|
||||
return self._content
|
||||
|
||||
|
||||
@property
|
||||
def text(self):
|
||||
"""Content of the response, in unicode.
|
||||
|
||||
if Response.encoding is None and chardet module is available, encoding
|
||||
will be guessed.
|
||||
"""
|
||||
|
||||
# Try charset from content-type
|
||||
content = None
|
||||
encoding = self.encoding
|
||||
|
||||
# Fallback to auto-detected encoding if chardet is available.
|
||||
if self.encoding is None:
|
||||
try:
|
||||
detected = chardet.detect(self.content) or {}
|
||||
encoding = detected.get('encoding')
|
||||
|
||||
# Trust that chardet isn't available or something went terribly wrong.
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
# Decode unicode from given encoding.
|
||||
try:
|
||||
content = str(self.content, encoding, errors='replace')
|
||||
except (UnicodeError, TypeError):
|
||||
pass
|
||||
|
||||
return content
|
||||
|
||||
|
||||
def raise_for_status(self):
|
||||
"""Raises stored :class:`HTTPError` or :class:`URLError`, if one occurred."""
|
||||
|
||||
if self.error:
|
||||
raise self.error
|
||||
|
||||
if (self.status_code >= 300) and (self.status_code < 400):
|
||||
raise HTTPError('%s Redirection' % self.status_code)
|
||||
|
||||
elif (self.status_code >= 400) and (self.status_code < 500):
|
||||
raise HTTPError('%s Client Error' % self.status_code)
|
||||
|
||||
elif (self.status_code >= 500) and (self.status_code < 600):
|
||||
raise HTTPError('%s Server Error' % self.status_code)
|
||||
|
||||
|
@@ -0,0 +1,288 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
"""
|
||||
requests.session
|
||||
~~~~~~~~~~~~~~~~
|
||||
|
||||
This module provides a Session object to manage and persist settings across
|
||||
requests (cookies, auth, proxies).
|
||||
|
||||
"""
|
||||
|
||||
from .defaults import defaults
|
||||
from .models import Request
|
||||
from .hooks import dispatch_hook
|
||||
from .utils import header_expand
|
||||
from .packages.urllib3.poolmanager import PoolManager
|
||||
|
||||
|
||||
def merge_kwargs(local_kwarg, default_kwarg):
|
||||
"""Merges kwarg dictionaries.
|
||||
|
||||
If a local key in the dictionary is set to None, it will be removed.
|
||||
"""
|
||||
|
||||
if default_kwarg is None:
|
||||
return local_kwarg
|
||||
|
||||
if isinstance(local_kwarg, str):
|
||||
return local_kwarg
|
||||
|
||||
if local_kwarg is None:
|
||||
return default_kwarg
|
||||
|
||||
# Bypass if not a dictionary (e.g. timeout)
|
||||
if not hasattr(default_kwarg, 'items'):
|
||||
return local_kwarg
|
||||
|
||||
# Update new values.
|
||||
kwargs = default_kwarg.copy()
|
||||
kwargs.update(local_kwarg)
|
||||
|
||||
# Remove keys that are set to None.
|
||||
for (k,v) in list(local_kwarg.items()):
|
||||
if v is None:
|
||||
del kwargs[k]
|
||||
|
||||
return kwargs
|
||||
|
||||
|
||||
class Session(object):
|
||||
"""A Requests session."""
|
||||
|
||||
__attrs__ = [
|
||||
'headers', 'cookies', 'auth', 'timeout', 'proxies', 'hooks',
|
||||
'params', 'config']
|
||||
|
||||
|
||||
def __init__(self,
|
||||
headers=None,
|
||||
cookies=None,
|
||||
auth=None,
|
||||
timeout=None,
|
||||
proxies=None,
|
||||
hooks=None,
|
||||
params=None,
|
||||
config=None,
|
||||
verify=True):
|
||||
|
||||
self.headers = headers or {}
|
||||
self.cookies = cookies or {}
|
||||
self.auth = auth
|
||||
self.timeout = timeout
|
||||
self.proxies = proxies or {}
|
||||
self.hooks = hooks or {}
|
||||
self.params = params or {}
|
||||
self.config = config or {}
|
||||
self.verify = verify
|
||||
|
||||
for (k, v) in list(defaults.items()):
|
||||
self.config.setdefault(k, v)
|
||||
|
||||
self.poolmanager = PoolManager(
|
||||
num_pools=self.config.get('pool_connections'),
|
||||
maxsize=self.config.get('pool_maxsize')
|
||||
)
|
||||
|
||||
# Set up a CookieJar to be used by default
|
||||
self.cookies = {}
|
||||
|
||||
# Add passed cookies in.
|
||||
if cookies is not None:
|
||||
self.cookies.update(cookies)
|
||||
|
||||
def __repr__(self):
|
||||
return '<requests-client at 0x%x>' % (id(self))
|
||||
|
||||
def __enter__(self):
|
||||
return self
|
||||
|
||||
def __exit__(self, *args):
|
||||
pass
|
||||
|
||||
def request(self, method, url,
|
||||
params=None,
|
||||
data=None,
|
||||
headers=None,
|
||||
cookies=None,
|
||||
files=None,
|
||||
auth=None,
|
||||
timeout=None,
|
||||
allow_redirects=False,
|
||||
proxies=None,
|
||||
hooks=None,
|
||||
return_response=True,
|
||||
config=None,
|
||||
prefetch=False,
|
||||
verify=None):
|
||||
|
||||
"""Constructs and sends a :class:`Request <Request>`.
|
||||
Returns :class:`Response <Response>` object.
|
||||
|
||||
:param method: method for the new :class:`Request` object.
|
||||
:param url: URL for the new :class:`Request` object.
|
||||
:param params: (optional) Dictionary or bytes to be sent in the query string for the :class:`Request`.
|
||||
:param data: (optional) Dictionary or bytes to send in the body of the :class:`Request`.
|
||||
:param headers: (optional) Dictionary of HTTP Headers to send with the :class:`Request`.
|
||||
:param cookies: (optional) Dict or CookieJar object to send with the :class:`Request`.
|
||||
:param files: (optional) Dictionary of 'filename': file-like-objects for multipart encoding upload.
|
||||
:param auth: (optional) Auth tuple to enable Basic/Digest/Custom HTTP Auth.
|
||||
:param timeout: (optional) Float describing the timeout of the request.
|
||||
:param allow_redirects: (optional) Boolean. Set to True if POST/PUT/DELETE redirect following is allowed.
|
||||
:param proxies: (optional) Dictionary mapping protocol to the URL of the proxy.
|
||||
:param return_response: (optional) If False, an un-sent Request object will returned.
|
||||
:param config: (optional) A configuration dictionary.
|
||||
:param prefetch: (optional) if ``True``, the response content will be immediately downloaded.
|
||||
:param verify: (optional) if ``True``, the SSL cert will be verified. A CA_BUNDLE path can also be provided.
|
||||
"""
|
||||
|
||||
method = str(method).upper()
|
||||
|
||||
# Default empty dicts for dict params.
|
||||
cookies = {} if cookies is None else cookies
|
||||
data = {} if data is None else data
|
||||
files = {} if files is None else files
|
||||
headers = {} if headers is None else headers
|
||||
params = {} if params is None else params
|
||||
hooks = {} if hooks is None else hooks
|
||||
|
||||
if verify is None:
|
||||
verify = self.verify
|
||||
|
||||
# use session's hooks as defaults
|
||||
for key, cb in list(self.hooks.items()):
|
||||
hooks.setdefault(key, cb)
|
||||
|
||||
# Expand header values.
|
||||
if headers:
|
||||
for k, v in list(headers.items()) or {}:
|
||||
headers[k] = header_expand(v)
|
||||
|
||||
args = dict(
|
||||
method=method,
|
||||
url=url,
|
||||
data=data,
|
||||
params=params,
|
||||
headers=headers,
|
||||
cookies=cookies,
|
||||
files=files,
|
||||
auth=auth,
|
||||
hooks=hooks,
|
||||
timeout=timeout,
|
||||
allow_redirects=allow_redirects,
|
||||
proxies=proxies,
|
||||
config=config,
|
||||
verify=verify,
|
||||
_poolmanager=self.poolmanager
|
||||
)
|
||||
|
||||
# Merge local kwargs with session kwargs.
|
||||
for attr in self.__attrs__:
|
||||
session_val = getattr(self, attr, None)
|
||||
local_val = args.get(attr)
|
||||
|
||||
args[attr] = merge_kwargs(local_val, session_val)
|
||||
|
||||
# Arguments manipulation hook.
|
||||
args = dispatch_hook('args', args['hooks'], args)
|
||||
|
||||
# Create the (empty) response.
|
||||
r = Request(**args)
|
||||
|
||||
# Give the response some context.
|
||||
r.session = self
|
||||
|
||||
# Don't send if asked nicely.
|
||||
if not return_response:
|
||||
return r
|
||||
|
||||
# Send the HTTP Request.
|
||||
r.send(prefetch=prefetch)
|
||||
|
||||
# Send any cookies back up the to the session.
|
||||
self.cookies.update(r.response.cookies)
|
||||
|
||||
# Return the response.
|
||||
return r.response
|
||||
|
||||
|
||||
def get(self, url, **kwargs):
|
||||
"""Sends a GET request. Returns :class:`Response` object.
|
||||
|
||||
:param url: URL for the new :class:`Request` object.
|
||||
:param **kwargs: Optional arguments that ``request`` takes.
|
||||
"""
|
||||
|
||||
kwargs.setdefault('allow_redirects', True)
|
||||
return self.request('get', url, **kwargs)
|
||||
|
||||
|
||||
def options(self, url, **kwargs):
|
||||
"""Sends a OPTIONS request. Returns :class:`Response` object.
|
||||
|
||||
:param url: URL for the new :class:`Request` object.
|
||||
:param **kwargs: Optional arguments that ``request`` takes.
|
||||
"""
|
||||
|
||||
kwargs.setdefault('allow_redirects', True)
|
||||
return self.request('options', url, **kwargs)
|
||||
|
||||
|
||||
def head(self, url, **kwargs):
|
||||
"""Sends a HEAD request. Returns :class:`Response` object.
|
||||
|
||||
:param url: URL for the new :class:`Request` object.
|
||||
:param **kwargs: Optional arguments that ``request`` takes.
|
||||
"""
|
||||
|
||||
kwargs.setdefault('allow_redirects', True)
|
||||
return self.request('head', url, **kwargs)
|
||||
|
||||
|
||||
def post(self, url, data=None, **kwargs):
|
||||
"""Sends a POST request. Returns :class:`Response` object.
|
||||
|
||||
:param url: URL for the new :class:`Request` object.
|
||||
:param data: (optional) Dictionary or bytes to send in the body of the :class:`Request`.
|
||||
:param **kwargs: Optional arguments that ``request`` takes.
|
||||
"""
|
||||
|
||||
return self.request('post', url, data=data, **kwargs)
|
||||
|
||||
|
||||
def put(self, url, data=None, **kwargs):
|
||||
"""Sends a PUT request. Returns :class:`Response` object.
|
||||
|
||||
:param url: URL for the new :class:`Request` object.
|
||||
:param data: (optional) Dictionary or bytes to send in the body of the :class:`Request`.
|
||||
:param **kwargs: Optional arguments that ``request`` takes.
|
||||
"""
|
||||
|
||||
return self.request('put', url, data=data, **kwargs)
|
||||
|
||||
|
||||
def patch(self, url, data=None, **kwargs):
|
||||
"""Sends a PATCH request. Returns :class:`Response` object.
|
||||
|
||||
:param url: URL for the new :class:`Request` object.
|
||||
:param data: (optional) Dictionary or bytes to send in the body of the :class:`Request`.
|
||||
:param **kwargs: Optional arguments that ``request`` takes.
|
||||
"""
|
||||
|
||||
return self.request('patch', url, data=data, **kwargs)
|
||||
|
||||
|
||||
def delete(self, url, **kwargs):
|
||||
"""Sends a DELETE request. Returns :class:`Response` object.
|
||||
|
||||
:param url: URL for the new :class:`Request` object.
|
||||
:param **kwargs: Optional arguments that ``request`` takes.
|
||||
"""
|
||||
|
||||
return self.request('delete', url, **kwargs)
|
||||
|
||||
|
||||
def session(**kwargs):
|
||||
"""Returns a :class:`Session` for context-management."""
|
||||
|
||||
return Session(**kwargs)
|
@@ -0,0 +1,86 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
from .structures import LookupDict
|
||||
|
||||
_codes = {
|
||||
|
||||
# Informational.
|
||||
100: ('continue',),
|
||||
101: ('switching_protocols',),
|
||||
102: ('processing',),
|
||||
103: ('checkpoint',),
|
||||
122: ('uri_too_long', 'request_uri_too_long'),
|
||||
200: ('ok', 'okay', 'all_ok', 'all_okay', 'all_good', '\\o/'),
|
||||
201: ('created',),
|
||||
202: ('accepted',),
|
||||
203: ('non_authoritative_info', 'non_authoritative_information'),
|
||||
204: ('no_content',),
|
||||
205: ('reset_content', 'reset'),
|
||||
206: ('partial_content', 'partial'),
|
||||
207: ('multi_status', 'multiple_status', 'multi_stati', 'multiple_stati'),
|
||||
208: ('im_used',),
|
||||
|
||||
# Redirection.
|
||||
300: ('multiple_choices',),
|
||||
301: ('moved_permanently', 'moved', '\\o-'),
|
||||
302: ('found',),
|
||||
303: ('see_other', 'other'),
|
||||
304: ('not_modified',),
|
||||
305: ('use_proxy',),
|
||||
306: ('switch_proxy',),
|
||||
307: ('temporary_redirect', 'temporary_moved', 'temporary'),
|
||||
308: ('resume_incomplete', 'resume'),
|
||||
|
||||
# Client Error.
|
||||
400: ('bad_request', 'bad'),
|
||||
401: ('unauthorized',),
|
||||
402: ('payment_required', 'payment'),
|
||||
403: ('forbidden',),
|
||||
404: ('not_found', '-o-'),
|
||||
405: ('method_not_allowed', 'not_allowed'),
|
||||
406: ('not_acceptable',),
|
||||
407: ('proxy_authentication_required', 'proxy_auth', 'proxy_authentication'),
|
||||
408: ('request_timeout', 'timeout'),
|
||||
409: ('conflict',),
|
||||
410: ('gone',),
|
||||
411: ('length_required',),
|
||||
412: ('precondition_failed', 'precondition'),
|
||||
413: ('request_entity_too_large',),
|
||||
414: ('request_uri_too_large',),
|
||||
415: ('unsupported_media_type', 'unsupported_media', 'media_type'),
|
||||
416: ('requested_range_not_satisfiable', 'requested_range', 'range_not_satisfiable'),
|
||||
417: ('expectation_failed',),
|
||||
418: ('im_a_teapot', 'teapot', 'i_am_a_teapot'),
|
||||
422: ('unprocessable_entity', 'unprocessable'),
|
||||
423: ('locked',),
|
||||
424: ('failed_dependency', 'dependency'),
|
||||
425: ('unordered_collection', 'unordered'),
|
||||
426: ('upgrade_required', 'upgrade'),
|
||||
428: ('precondition_required', 'precondition'),
|
||||
429: ('too_many_requests', 'too_many'),
|
||||
431: ('header_fields_too_large', 'fields_too_large'),
|
||||
444: ('no_response', 'none'),
|
||||
449: ('retry_with', 'retry'),
|
||||
450: ('blocked_by_windows_parental_controls', 'parental_controls'),
|
||||
499: ('client_closed_request',),
|
||||
|
||||
# Server Error.
|
||||
500: ('internal_server_error', 'server_error', '/o\\'),
|
||||
501: ('not_implemented',),
|
||||
502: ('bad_gateway',),
|
||||
503: ('service_unavailable', 'unavailable'),
|
||||
504: ('gateway_timeout',),
|
||||
505: ('http_version_not_supported', 'http_version'),
|
||||
506: ('variant_also_negotiates',),
|
||||
507: ('insufficient_storage',),
|
||||
509: ('bandwidth_limit_exceeded', 'bandwidth'),
|
||||
510: ('not_extended',),
|
||||
}
|
||||
|
||||
codes = LookupDict(name='status_codes')
|
||||
|
||||
for (code, titles) in list(_codes.items()):
|
||||
for title in titles:
|
||||
setattr(codes, title, code)
|
||||
if not title.startswith('\\'):
|
||||
setattr(codes, title.upper(), code)
|
@@ -0,0 +1,66 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
"""
|
||||
requests.structures
|
||||
~~~~~~~~~~~~~~~~~~~
|
||||
|
||||
Data structures that power Requests.
|
||||
|
||||
"""
|
||||
|
||||
|
||||
class CaseInsensitiveDict(dict):
|
||||
"""Case-insensitive Dictionary
|
||||
|
||||
For example, ``headers['content-encoding']`` will return the
|
||||
value of a ``'Content-Encoding'`` response header."""
|
||||
|
||||
@property
|
||||
def lower_keys(self):
|
||||
if not hasattr(self, '_lower_keys') or not self._lower_keys:
|
||||
self._lower_keys = dict((k.lower(), k) for k in list(self.keys()))
|
||||
return self._lower_keys
|
||||
|
||||
def _clear_lower_keys(self):
|
||||
if hasattr(self, '_lower_keys'):
|
||||
self._lower_keys.clear()
|
||||
|
||||
def __setitem__(self, key, value):
|
||||
dict.__setitem__(self, key, value)
|
||||
self._clear_lower_keys()
|
||||
|
||||
def __delitem__(self, key):
|
||||
dict.__delitem__(self, key)
|
||||
self._lower_keys.clear()
|
||||
|
||||
def __contains__(self, key):
|
||||
return key.lower() in self.lower_keys
|
||||
|
||||
def __getitem__(self, key):
|
||||
# We allow fall-through here, so values default to None
|
||||
if key in self:
|
||||
return dict.__getitem__(self, self.lower_keys[key.lower()])
|
||||
|
||||
def get(self, key, default=None):
|
||||
if key in self:
|
||||
return self[key]
|
||||
else:
|
||||
return default
|
||||
|
||||
class LookupDict(dict):
|
||||
"""Dictionary lookup object."""
|
||||
|
||||
def __init__(self, name=None):
|
||||
self.name = name
|
||||
super(LookupDict, self).__init__()
|
||||
|
||||
def __repr__(self):
|
||||
return '<lookup \'%s\'>' % (self.name)
|
||||
|
||||
def __getitem__(self, key):
|
||||
# We allow fall-through here, so values default to None
|
||||
|
||||
return self.__dict__.get(key, None)
|
||||
|
||||
def get(self, key, default=None):
|
||||
return self.__dict__.get(key, default)
|
@@ -0,0 +1,408 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
"""
|
||||
requests.utils
|
||||
~~~~~~~~~~~~~~
|
||||
|
||||
This module provides utility functions that are used within Requests
|
||||
that are also useful for external consumption.
|
||||
|
||||
"""
|
||||
|
||||
import cgi
|
||||
import codecs
|
||||
import os
|
||||
import random
|
||||
import re
|
||||
import zlib
|
||||
|
||||
from .compat import parse_http_list as _parse_list_header
|
||||
from .compat import quote, unquote, cookielib, SimpleCookie, is_py2
|
||||
from .compat import basestring
|
||||
|
||||
|
||||
def dict_from_string(s):
|
||||
"""Returns a MultiDict with Cookies."""
|
||||
|
||||
cookies = dict()
|
||||
|
||||
c = SimpleCookie()
|
||||
c.load(s)
|
||||
|
||||
for k,v in list(c.items()):
|
||||
cookies.update({k: v.value})
|
||||
|
||||
return cookies
|
||||
|
||||
def guess_filename(obj):
|
||||
"""Tries to guess the filename of the given object."""
|
||||
name = getattr(obj, 'name', None)
|
||||
if name and name[0] != '<' and name[-1] != '>':
|
||||
return name
|
||||
|
||||
# From mitsuhiko/werkzeug (used with permission).
|
||||
def parse_list_header(value):
|
||||
"""Parse lists as described by RFC 2068 Section 2.
|
||||
|
||||
In particular, parse comma-separated lists where the elements of
|
||||
the list may include quoted-strings. A quoted-string could
|
||||
contain a comma. A non-quoted string could have quotes in the
|
||||
middle. Quotes are removed automatically after parsing.
|
||||
|
||||
It basically works like :func:`parse_set_header` just that items
|
||||
may appear multiple times and case sensitivity is preserved.
|
||||
|
||||
The return value is a standard :class:`list`:
|
||||
|
||||
>>> parse_list_header('token, "quoted value"')
|
||||
['token', 'quoted value']
|
||||
|
||||
To create a header from the :class:`list` again, use the
|
||||
:func:`dump_header` function.
|
||||
|
||||
:param value: a string with a list header.
|
||||
:return: :class:`list`
|
||||
"""
|
||||
result = []
|
||||
for item in _parse_list_header(value):
|
||||
if item[:1] == item[-1:] == '"':
|
||||
item = unquote_header_value(item[1:-1])
|
||||
result.append(item)
|
||||
return result
|
||||
|
||||
|
||||
# From mitsuhiko/werkzeug (used with permission).
|
||||
def parse_dict_header(value):
|
||||
"""Parse lists of key, value pairs as described by RFC 2068 Section 2 and
|
||||
convert them into a python dict:
|
||||
|
||||
>>> d = parse_dict_header('foo="is a fish", bar="as well"')
|
||||
>>> type(d) is dict
|
||||
True
|
||||
>>> sorted(d.items())
|
||||
[('bar', 'as well'), ('foo', 'is a fish')]
|
||||
|
||||
If there is no value for a key it will be `None`:
|
||||
|
||||
>>> parse_dict_header('key_without_value')
|
||||
{'key_without_value': None}
|
||||
|
||||
To create a header from the :class:`dict` again, use the
|
||||
:func:`dump_header` function.
|
||||
|
||||
:param value: a string with a dict header.
|
||||
:return: :class:`dict`
|
||||
"""
|
||||
result = {}
|
||||
for item in _parse_list_header(value):
|
||||
if '=' not in item:
|
||||
result[item] = None
|
||||
continue
|
||||
name, value = item.split('=', 1)
|
||||
if value[:1] == value[-1:] == '"':
|
||||
value = unquote_header_value(value[1:-1])
|
||||
result[name] = value
|
||||
return result
|
||||
|
||||
|
||||
# From mitsuhiko/werkzeug (used with permission).
|
||||
def unquote_header_value(value, is_filename=False):
|
||||
r"""Unquotes a header value. (Reversal of :func:`quote_header_value`).
|
||||
This does not use the real unquoting but what browsers are actually
|
||||
using for quoting.
|
||||
|
||||
:param value: the header value to unquote.
|
||||
"""
|
||||
if value and value[0] == value[-1] == '"':
|
||||
# this is not the real unquoting, but fixing this so that the
|
||||
# RFC is met will result in bugs with internet explorer and
|
||||
# probably some other browsers as well. IE for example is
|
||||
# uploading files with "C:\foo\bar.txt" as filename
|
||||
value = value[1:-1]
|
||||
|
||||
# if this is a filename and the starting characters look like
|
||||
# a UNC path, then just return the value without quotes. Using the
|
||||
# replace sequence below on a UNC path has the effect of turning
|
||||
# the leading double slash into a single slash and then
|
||||
# _fix_ie_filename() doesn't work correctly. See #458.
|
||||
if not is_filename or value[:2] != '\\\\':
|
||||
return value.replace('\\\\', '\\').replace('\\"', '"')
|
||||
return value
|
||||
|
||||
|
||||
def header_expand(headers):
|
||||
"""Returns an HTTP Header value string from a dictionary.
|
||||
|
||||
Example expansion::
|
||||
|
||||
{'text/x-dvi': {'q': '.8', 'mxb': '100000', 'mxt': '5.0'}, 'text/x-c': {}}
|
||||
# Accept: text/x-dvi; q=.8; mxb=100000; mxt=5.0, text/x-c
|
||||
|
||||
(('text/x-dvi', {'q': '.8', 'mxb': '100000', 'mxt': '5.0'}), ('text/x-c', {}))
|
||||
# Accept: text/x-dvi; q=.8; mxb=100000; mxt=5.0, text/x-c
|
||||
"""
|
||||
|
||||
collector = []
|
||||
|
||||
if isinstance(headers, dict):
|
||||
headers = list(headers.items())
|
||||
|
||||
elif isinstance(headers, basestring):
|
||||
return headers
|
||||
|
||||
for i, (value, params) in enumerate(headers):
|
||||
|
||||
_params = []
|
||||
|
||||
for (p_k, p_v) in list(params.items()):
|
||||
|
||||
_params.append('%s=%s' % (p_k, p_v))
|
||||
|
||||
collector.append(value)
|
||||
collector.append('; ')
|
||||
|
||||
if len(params):
|
||||
|
||||
collector.append('; '.join(_params))
|
||||
|
||||
if not len(headers) == i+1:
|
||||
collector.append(', ')
|
||||
|
||||
|
||||
# Remove trailing separators.
|
||||
if collector[-1] in (', ', '; '):
|
||||
del collector[-1]
|
||||
|
||||
return ''.join(collector)
|
||||
|
||||
|
||||
|
||||
def randombytes(n):
|
||||
"""Return n random bytes."""
|
||||
if is_py2:
|
||||
L = [chr(random.randrange(0, 256)) for i in range(n)]
|
||||
else:
|
||||
L = [chr(random.randrange(0, 256)).encode('utf-8') for i in range(n)]
|
||||
return b"".join(L)
|
||||
|
||||
|
||||
def dict_from_cookiejar(cj):
|
||||
"""Returns a key/value dictionary from a CookieJar.
|
||||
|
||||
:param cj: CookieJar object to extract cookies from.
|
||||
"""
|
||||
|
||||
cookie_dict = {}
|
||||
|
||||
for _, cookies in list(cj._cookies.items()):
|
||||
for _, cookies in list(cookies.items()):
|
||||
for cookie in list(cookies.values()):
|
||||
# print cookie
|
||||
cookie_dict[cookie.name] = cookie.value
|
||||
|
||||
return cookie_dict
|
||||
|
||||
|
||||
def cookiejar_from_dict(cookie_dict):
|
||||
"""Returns a CookieJar from a key/value dictionary.
|
||||
|
||||
:param cookie_dict: Dict of key/values to insert into CookieJar.
|
||||
"""
|
||||
|
||||
# return cookiejar if one was passed in
|
||||
if isinstance(cookie_dict, cookielib.CookieJar):
|
||||
return cookie_dict
|
||||
|
||||
# create cookiejar
|
||||
cj = cookielib.CookieJar()
|
||||
|
||||
cj = add_dict_to_cookiejar(cj, cookie_dict)
|
||||
|
||||
return cj
|
||||
|
||||
|
||||
def add_dict_to_cookiejar(cj, cookie_dict):
|
||||
"""Returns a CookieJar from a key/value dictionary.
|
||||
|
||||
:param cj: CookieJar to insert cookies into.
|
||||
:param cookie_dict: Dict of key/values to insert into CookieJar.
|
||||
"""
|
||||
|
||||
for k, v in list(cookie_dict.items()):
|
||||
|
||||
cookie = cookielib.Cookie(
|
||||
version=0,
|
||||
name=k,
|
||||
value=v,
|
||||
port=None,
|
||||
port_specified=False,
|
||||
domain='',
|
||||
domain_specified=False,
|
||||
domain_initial_dot=False,
|
||||
path='/',
|
||||
path_specified=True,
|
||||
secure=False,
|
||||
expires=None,
|
||||
discard=True,
|
||||
comment=None,
|
||||
comment_url=None,
|
||||
rest={'HttpOnly': None},
|
||||
rfc2109=False
|
||||
)
|
||||
|
||||
# add cookie to cookiejar
|
||||
cj.set_cookie(cookie)
|
||||
|
||||
return cj
|
||||
|
||||
|
||||
def get_encodings_from_content(content):
|
||||
"""Returns encodings from given content string.
|
||||
|
||||
:param content: bytestring to extract encodings from.
|
||||
"""
|
||||
|
||||
charset_re = re.compile(r'<meta.*?charset=["\']*(.+?)["\'>]', flags=re.I)
|
||||
|
||||
return charset_re.findall(content)
|
||||
|
||||
|
||||
def get_encoding_from_headers(headers):
|
||||
"""Returns encodings from given HTTP Header Dict.
|
||||
|
||||
:param headers: dictionary to extract encoding from.
|
||||
"""
|
||||
|
||||
content_type = headers.get('content-type')
|
||||
|
||||
if not content_type:
|
||||
return None
|
||||
|
||||
content_type, params = cgi.parse_header(content_type)
|
||||
|
||||
if 'charset' in params:
|
||||
return params['charset'].strip("'\"")
|
||||
|
||||
if 'text' in content_type:
|
||||
return 'ISO-8859-1'
|
||||
|
||||
|
||||
def unicode_from_html(content):
|
||||
"""Attempts to decode an HTML string into unicode.
|
||||
If unsuccessful, the original content is returned.
|
||||
"""
|
||||
|
||||
encodings = get_encodings_from_content(content)
|
||||
|
||||
for encoding in encodings:
|
||||
|
||||
try:
|
||||
return str(content, encoding)
|
||||
except (UnicodeError, TypeError):
|
||||
pass
|
||||
|
||||
return content
|
||||
|
||||
|
||||
def stream_decode_response_unicode(iterator, r):
|
||||
"""Stream decodes a iterator."""
|
||||
|
||||
if r.encoding is None:
|
||||
for item in iterator:
|
||||
yield item
|
||||
return
|
||||
|
||||
decoder = codecs.getincrementaldecoder(r.encoding)(errors='replace')
|
||||
for chunk in iterator:
|
||||
rv = decoder.decode(chunk)
|
||||
if rv:
|
||||
yield rv
|
||||
rv = decoder.decode('', final=True)
|
||||
if rv:
|
||||
yield rv
|
||||
|
||||
|
||||
def get_unicode_from_response(r):
|
||||
"""Returns the requested content back in unicode.
|
||||
|
||||
:param r: Response object to get unicode content from.
|
||||
|
||||
Tried:
|
||||
|
||||
1. charset from content-type
|
||||
|
||||
2. every encodings from ``<meta ... charset=XXX>``
|
||||
|
||||
3. fall back and replace all unicode characters
|
||||
|
||||
"""
|
||||
|
||||
tried_encodings = []
|
||||
|
||||
# Try charset from content-type
|
||||
encoding = get_encoding_from_headers(r.headers)
|
||||
|
||||
if encoding:
|
||||
try:
|
||||
return str(r.content, encoding)
|
||||
except UnicodeError:
|
||||
tried_encodings.append(encoding)
|
||||
|
||||
# Fall back:
|
||||
try:
|
||||
return str(r.content, encoding, errors='replace')
|
||||
except TypeError:
|
||||
return r.content
|
||||
|
||||
|
||||
def decode_gzip(content):
|
||||
"""Return gzip-decoded string.
|
||||
|
||||
:param content: bytestring to gzip-decode.
|
||||
"""
|
||||
|
||||
return zlib.decompress(content, 16 + zlib.MAX_WBITS)
|
||||
|
||||
|
||||
def stream_decompress(iterator, mode='gzip'):
|
||||
"""
|
||||
Stream decodes an iterator over compressed data
|
||||
|
||||
:param iterator: An iterator over compressed data
|
||||
:param mode: 'gzip' or 'deflate'
|
||||
:return: An iterator over decompressed data
|
||||
"""
|
||||
|
||||
if mode not in ['gzip', 'deflate']:
|
||||
raise ValueError('stream_decompress mode must be gzip or deflate')
|
||||
|
||||
zlib_mode = 16 + zlib.MAX_WBITS if mode == 'gzip' else -zlib.MAX_WBITS
|
||||
dec = zlib.decompressobj(zlib_mode)
|
||||
try:
|
||||
for chunk in iterator:
|
||||
rv = dec.decompress(chunk)
|
||||
if rv:
|
||||
yield rv
|
||||
except zlib.error:
|
||||
# If there was an error decompressing, just return the raw chunk
|
||||
yield chunk
|
||||
# Continue to return the rest of the raw data
|
||||
for chunk in iterator:
|
||||
yield chunk
|
||||
else:
|
||||
# Make sure everything has been returned from the decompression object
|
||||
buf = dec.decompress('')
|
||||
rv = buf + dec.flush()
|
||||
if rv:
|
||||
yield rv
|
||||
|
||||
|
||||
def requote_path(path):
|
||||
"""Re-quote the given URL path component.
|
||||
|
||||
This function passes the given path through an unquote/quote cycle to
|
||||
ensure that it is fully and consistently quoted.
|
||||
"""
|
||||
parts = path.split(b"/")
|
||||
parts = (quote(unquote(part), safe=b"") for part in parts)
|
||||
return b"/".join(parts)
|
@@ -0,0 +1 @@
|
||||
{"url": "https://github.com/bgreenlee/sublime-github", "version": "2012.12.19.02.19.29", "description": "Github Gist plugin for Sublime Text 2"}
|
@@ -0,0 +1,412 @@
|
||||
import os
|
||||
import sys
|
||||
import os.path
|
||||
import re
|
||||
import sublime
|
||||
import sublime_plugin
|
||||
import webbrowser
|
||||
import plistlib
|
||||
from github import GitHubApi
|
||||
import logging as logger
|
||||
try:
|
||||
import xml.parsers.expat as expat
|
||||
except ImportError:
|
||||
expat = None
|
||||
|
||||
try:
|
||||
sys.path.append(os.path.join(sublime.packages_path(), 'Git'))
|
||||
git = __import__("git")
|
||||
sys.path.remove(os.path.join(sublime.packages_path(), 'Git'))
|
||||
except ImportError:
|
||||
git = None
|
||||
|
||||
|
||||
logger.basicConfig(format='[sublime-github] %(levelname)s: %(message)s')
|
||||
|
||||
|
||||
class BaseGitHubCommand(sublime_plugin.TextCommand):
|
||||
"""
|
||||
Base class for all GitHub commands. Handles getting an auth token.
|
||||
"""
|
||||
MSG_USERNAME = "GitHub username:"
|
||||
MSG_PASSWORD = "GitHub password:"
|
||||
MSG_TOKEN_SUCCESS = "Your access token has been saved. We'll now resume your command."
|
||||
ERR_NO_USER_TOKEN = "Your GitHub Gist access token needs to be configured.\n\n"\
|
||||
"Click OK and then enter your GitHub username and password below (neither will "\
|
||||
"be stored; they are only used to generate an access token)."
|
||||
ERR_UNAUTHORIZED = "Your Github username or password appears to be incorrect. "\
|
||||
"Please try again."
|
||||
ERR_UNAUTHORIZED_TOKEN = "Your Github token appears to be incorrect. Please re-enter your "\
|
||||
"username and password to generate a new token."
|
||||
|
||||
def run(self, edit):
|
||||
self.settings = sublime.load_settings("GitHub.sublime-settings")
|
||||
self.github_user = None
|
||||
self.accounts = self.settings.get("accounts")
|
||||
self.active_account = self.settings.get("active_account")
|
||||
if not self.active_account:
|
||||
self.active_account = self.accounts.keys()[0]
|
||||
self.github_token = self.accounts[self.active_account]["github_token"]
|
||||
if not self.github_token:
|
||||
self.github_token = self.settings.get("github_token")
|
||||
if self.github_token:
|
||||
# migrate to new structure
|
||||
self.settings.set("accounts", {"GitHub": {"base_uri": "https://api.github.com", "github_token": self.github_token}})
|
||||
self.settings.set("active_account", "GitHub")
|
||||
self.active_account = self.settings.get("active_account")
|
||||
self.settings.erase("github_token")
|
||||
sublime.save_settings("GitHub.sublime-settings")
|
||||
self.base_uri = self.accounts[self.active_account]["base_uri"]
|
||||
self.debug = self.settings.get('debug')
|
||||
self.gistapi = GitHubApi(self.base_uri, self.github_token, debug=self.debug)
|
||||
|
||||
def get_token(self):
|
||||
sublime.error_message(self.ERR_NO_USER_TOKEN)
|
||||
self.get_username()
|
||||
|
||||
def get_username(self):
|
||||
self.view.window().show_input_panel(self.MSG_USERNAME, self.github_user or "", self.on_done_username, None, None)
|
||||
|
||||
def get_password(self):
|
||||
self.view.window().show_input_panel(self.MSG_PASSWORD, "", self.on_done_password, None, None)
|
||||
|
||||
def on_done_username(self, value):
|
||||
"Callback for the username show_input_panel."
|
||||
self.github_user = value
|
||||
# need to do this or the input panel doesn't show
|
||||
sublime.set_timeout(self.get_password, 50)
|
||||
|
||||
def on_done_password(self, value):
|
||||
"Callback for the password show_input_panel"
|
||||
try:
|
||||
self.github_token = GitHubApi(self.base_uri, debug=self.debug).get_token(self.github_user, value)
|
||||
self.accounts[self.active_account]["github_token"] = self.github_token
|
||||
self.settings.set("accounts", self.accounts)
|
||||
sublime.save_settings("GitHub.sublime-settings")
|
||||
self.gistapi = GitHubApi(self.base_uri, self.github_token, debug=self.debug)
|
||||
try:
|
||||
if self.callback:
|
||||
sublime.error_message(self.MSG_TOKEN_SUCCESS)
|
||||
callback = self.callback
|
||||
self.callback = None
|
||||
sublime.set_timeout(callback, 50)
|
||||
except AttributeError:
|
||||
pass
|
||||
except GitHubApi.UnauthorizedException:
|
||||
sublime.error_message(self.ERR_UNAUTHORIZED)
|
||||
sublime.set_timeout(self.get_username, 50)
|
||||
except GitHubApi.UnknownException, e:
|
||||
sublime.error_message(e.message)
|
||||
|
||||
|
||||
class OpenGistCommand(BaseGitHubCommand):
|
||||
"""
|
||||
Open a gist.
|
||||
Defaults to all gists and copying it to the clipboard
|
||||
"""
|
||||
MSG_SUCCESS = "Contents of '%s' copied to the clipboard."
|
||||
starred = False
|
||||
open_in_editor = False
|
||||
syntax_file_map = None
|
||||
copy_gist_id = False
|
||||
|
||||
def run(self, edit):
|
||||
super(OpenGistCommand, self).run(edit)
|
||||
if self.github_token:
|
||||
self.get_gists()
|
||||
else:
|
||||
self.callback = self.get_gists
|
||||
self.get_token()
|
||||
|
||||
def get_gists(self):
|
||||
try:
|
||||
self.gists = self.gistapi.list_gists(starred=self.starred)
|
||||
format = self.settings.get("gist_list_format")
|
||||
packed_gists = []
|
||||
for idx, gist in enumerate(self.gists):
|
||||
attribs = {"index": idx + 1,
|
||||
"filename": gist["files"].keys()[0],
|
||||
"description": gist["description"] or ''}
|
||||
if isinstance(format, basestring):
|
||||
item = format % attribs
|
||||
else:
|
||||
item = [(format_str % attribs) for format_str in format]
|
||||
packed_gists.append(item)
|
||||
|
||||
args = [packed_gists, self.on_done]
|
||||
if self.settings.get("gist_list_monospace"):
|
||||
args.append(sublime.MONOSPACE_FONT)
|
||||
self.view.window().show_quick_panel(*args)
|
||||
except GitHubApi.UnauthorizedException:
|
||||
sublime.error_message(self.ERR_UNAUTHORIZED_TOKEN)
|
||||
sublime.set_timeout(self.get_username, 50)
|
||||
except GitHubApi.UnknownException, e:
|
||||
sublime.error_message(e.message)
|
||||
|
||||
def on_done(self, idx):
|
||||
if idx == -1:
|
||||
return
|
||||
gist = self.gists[idx]
|
||||
filename = gist["files"].keys()[0]
|
||||
filedata = gist["files"][filename]
|
||||
content = self.gistapi.get(filedata["raw_url"])
|
||||
if self.open_in_editor:
|
||||
new_view = self.view.window().new_file()
|
||||
if expat: # not present in Linux
|
||||
# set syntax file
|
||||
if not self.syntax_file_map:
|
||||
self.syntax_file_map = self._generate_syntax_file_map()
|
||||
try:
|
||||
extension = os.path.splitext(filename)[1][1:].lower()
|
||||
syntax_file = self.syntax_file_map[extension]
|
||||
new_view.set_syntax_file(syntax_file)
|
||||
except KeyError:
|
||||
logger.warn("no mapping for '%s'" % extension)
|
||||
pass
|
||||
# insert the gist
|
||||
edit = new_view.begin_edit('gist')
|
||||
new_view.insert(edit, 0, content)
|
||||
new_view.end_edit(edit)
|
||||
new_view.set_name(filename)
|
||||
new_view.settings().set('gist', gist)
|
||||
elif self.copy_gist_id:
|
||||
sublime.set_clipboard(gist["html_url"])
|
||||
else:
|
||||
sublime.set_clipboard(content)
|
||||
sublime.status_message(self.MSG_SUCCESS % filename)
|
||||
|
||||
@staticmethod
|
||||
def _generate_syntax_file_map():
|
||||
"""
|
||||
Generate a map of all file types to their syntax files.
|
||||
"""
|
||||
syntax_file_map = {}
|
||||
packages_path = sublime.packages_path()
|
||||
packages = [f for f in os.listdir(packages_path) if os.path.isdir(os.path.join(packages_path, f))]
|
||||
for package in packages:
|
||||
package_dir = os.path.join(packages_path, package)
|
||||
syntax_files = [os.path.join(package_dir, f) for f in os.listdir(package_dir) if f.endswith(".tmLanguage")]
|
||||
for syntax_file in syntax_files:
|
||||
try:
|
||||
plist = plistlib.readPlist(syntax_file)
|
||||
if plist:
|
||||
for file_type in plist['fileTypes']:
|
||||
syntax_file_map[file_type.lower()] = syntax_file
|
||||
except expat.ExpatError: # can't parse
|
||||
logger.warn("could not parse '%s'" % syntax_file)
|
||||
except KeyError: # no file types
|
||||
pass
|
||||
|
||||
return syntax_file_map
|
||||
|
||||
|
||||
class OpenStarredGistCommand(OpenGistCommand):
|
||||
"""
|
||||
Browse starred gists
|
||||
"""
|
||||
starred = True
|
||||
|
||||
|
||||
class OpenGistInEditorCommand(OpenGistCommand):
|
||||
"""
|
||||
Open a gist in a new editor.
|
||||
"""
|
||||
open_in_editor = True
|
||||
|
||||
|
||||
class OpenGistUrlCommand(OpenGistCommand):
|
||||
"""
|
||||
Open a gist url in a new editor.
|
||||
"""
|
||||
copy_gist_id = True
|
||||
|
||||
|
||||
class OpenStarredGistInEditorCommand(OpenGistCommand):
|
||||
"""
|
||||
Open a starred gist in a new editor.
|
||||
"""
|
||||
starred = True
|
||||
open_in_editor = True
|
||||
|
||||
|
||||
class OpenGistInBrowserCommand(OpenGistCommand):
|
||||
"""
|
||||
Open a gist in a browser
|
||||
"""
|
||||
def on_done(self, idx):
|
||||
if idx == -1:
|
||||
return
|
||||
gist = self.gists[idx]
|
||||
webbrowser.open(gist["html_url"])
|
||||
|
||||
|
||||
class OpenStarredGistInBrowserCommand(OpenGistInBrowserCommand):
|
||||
"""
|
||||
Open a gist in a browser
|
||||
"""
|
||||
starred = True
|
||||
|
||||
|
||||
class GistFromSelectionCommand(BaseGitHubCommand):
|
||||
"""
|
||||
Base class for creating a Github Gist from the current selection.
|
||||
"""
|
||||
MSG_DESCRIPTION = "Gist description:"
|
||||
MSG_FILENAME = "Gist filename:"
|
||||
MSG_SUCCESS = "Gist created and url copied to the clipboard."
|
||||
|
||||
def run(self, edit):
|
||||
self.description = None
|
||||
self.filename = None
|
||||
super(GistFromSelectionCommand, self).run(edit)
|
||||
if self.github_token:
|
||||
self.get_description()
|
||||
else:
|
||||
self.callback = self.get_description
|
||||
self.get_token()
|
||||
|
||||
def get_description(self):
|
||||
self.view.window().show_input_panel(self.MSG_DESCRIPTION, "", self.on_done_description, None, None)
|
||||
|
||||
def get_filename(self):
|
||||
# use the current filename as the default
|
||||
current_filename = self.view.file_name() or "snippet.txt"
|
||||
filename = os.path.basename(current_filename)
|
||||
self.view.window().show_input_panel(self.MSG_FILENAME, filename, self.on_done_filename, None, None)
|
||||
|
||||
def on_done_description(self, value):
|
||||
"Callback for description show_input_panel."
|
||||
self.description = value
|
||||
# need to do this or the input panel doesn't show
|
||||
sublime.set_timeout(self.get_filename, 50)
|
||||
|
||||
def on_done_filename(self, value):
|
||||
self.filename = value
|
||||
# get selected text, or the whole file if nothing selected
|
||||
if all([region.empty() for region in self.view.sel()]):
|
||||
text = self.view.substr(sublime.Region(0, self.view.size()))
|
||||
else:
|
||||
text = "\n".join([self.view.substr(region) for region in self.view.sel()])
|
||||
|
||||
try:
|
||||
gist = self.gistapi.create_gist(description=self.description,
|
||||
filename=self.filename,
|
||||
content=text,
|
||||
public=self.public)
|
||||
self.view.settings().set('gist', gist)
|
||||
sublime.set_clipboard(gist["html_url"])
|
||||
sublime.status_message(self.MSG_SUCCESS)
|
||||
except GitHubApi.UnauthorizedException:
|
||||
# clear out the bad token so we can reset it
|
||||
self.settings.set("github_token", "")
|
||||
sublime.save_settings("GitHub.sublime-settings")
|
||||
sublime.error_message(self.ERR_UNAUTHORIZED_TOKEN)
|
||||
sublime.set_timeout(self.get_username, 50)
|
||||
except GitHubApi.UnknownException, e:
|
||||
sublime.error_message(e.message)
|
||||
|
||||
|
||||
class PrivateGistFromSelectionCommand(GistFromSelectionCommand):
|
||||
"""
|
||||
Command to create a private Github gist from the current selection.
|
||||
"""
|
||||
public = False
|
||||
|
||||
|
||||
class PublicGistFromSelectionCommand(GistFromSelectionCommand):
|
||||
"""
|
||||
Command to create a public Github gist from the current selection.
|
||||
"""
|
||||
public = True
|
||||
|
||||
|
||||
class UpdateGistCommand(BaseGitHubCommand):
|
||||
MSG_SUCCESS = "Gist updated and url copied to the clipboard."
|
||||
|
||||
def run(self, edit):
|
||||
super(UpdateGistCommand, self).run(edit)
|
||||
self.gist = self.view.settings().get('gist')
|
||||
if not self.gist:
|
||||
sublime.error_message("Can't update: this doesn't appear to be a valid gist.")
|
||||
return
|
||||
if self.github_token:
|
||||
self.update()
|
||||
else:
|
||||
self.callback = self.update
|
||||
self.get_token()
|
||||
|
||||
def update(self):
|
||||
text = self.view.substr(sublime.Region(0, self.view.size()))
|
||||
try:
|
||||
updated_gist = self.gistapi.update_gist(self.gist, text)
|
||||
sublime.set_clipboard(updated_gist["html_url"])
|
||||
sublime.status_message(self.MSG_SUCCESS)
|
||||
except GitHubApi.UnauthorizedException:
|
||||
# clear out the bad token so we can reset it
|
||||
self.settings.set("github_token", "")
|
||||
sublime.save_settings("GitHub.sublime-settings")
|
||||
sublime.error_message(self.ERR_UNAUTHORIZED_TOKEN)
|
||||
sublime.set_timeout(self.get_username, 50)
|
||||
except GitHubApi.UnknownException, e:
|
||||
sublime.error_message(e.message)
|
||||
|
||||
|
||||
class SwitchAccountsCommand(BaseGitHubCommand):
|
||||
def run(self, edit):
|
||||
super(SwitchAccountsCommand, self).run(edit)
|
||||
accounts = self.accounts.keys()
|
||||
self.view.window().show_quick_panel(accounts, self.account_selected)
|
||||
|
||||
def account_selected(self, index):
|
||||
if index == -1:
|
||||
return # canceled
|
||||
else:
|
||||
self.active_account = self.accounts.keys()[index]
|
||||
self.settings.set("active_account", self.active_account)
|
||||
sublime.save_settings("GitHub.sublime-settings")
|
||||
self.base_uri = self.accounts[self.active_account]["base_uri"]
|
||||
self.github_token = self.accounts[self.active_account]["github_token"]
|
||||
|
||||
if git:
|
||||
class RemoteUrlCommand(git.GitTextCommand):
|
||||
def run(self, edit):
|
||||
self.run_command("git remote -v".split(), self.done_remote)
|
||||
|
||||
def done_remote(self, result):
|
||||
remote_origin = [r for r in result.split("\n") if "origin" in r][0]
|
||||
remote_loc = re.split('\s+', remote_origin)[1]
|
||||
repo_url = re.sub('^git@', 'https://', remote_loc)
|
||||
repo_url = re.sub('\.com:', '.com/', repo_url)
|
||||
repo_url = re.sub('\.git$', '', repo_url)
|
||||
self.repo_url = repo_url
|
||||
self.run_command("git rev-parse --abbrev-ref HEAD".split(), self.done_rev_parse)
|
||||
|
||||
def done_rev_parse(self, result):
|
||||
# get current branch
|
||||
current_branch = result.strip()
|
||||
# get file path within repo
|
||||
repo_name = self.repo_url.split("/").pop()
|
||||
relative_path = self.view.file_name().split(repo_name).pop()
|
||||
self.url = "%s/blob/%s%s" % (self.repo_url, current_branch, relative_path)
|
||||
self.on_done()
|
||||
else:
|
||||
class RemoteUrlCommand(sublime_plugin.TextCommand):
|
||||
def run(self, edit):
|
||||
sublime.error_message("I couldn't find the Git plugin. Please install it, restart Sublime Text, and try again.")
|
||||
|
||||
|
||||
class OpenRemoteUrlCommand(RemoteUrlCommand):
|
||||
def run(self, edit):
|
||||
super(OpenRemoteUrlCommand, self).run(edit)
|
||||
|
||||
def on_done(self):
|
||||
webbrowser.open(self.url)
|
||||
|
||||
|
||||
class CopyRemoteUrlCommand(RemoteUrlCommand):
|
||||
def run(self, edit):
|
||||
super(CopyRemoteUrlCommand, self).run(edit)
|
||||
|
||||
def on_done(self):
|
||||
sublime.set_clipboard(self.url)
|
||||
sublime.status_message("Remote URL copied to clipboard")
|
@@ -0,0 +1,91 @@
|
||||
import sys
|
||||
import os.path
|
||||
sys.path.insert(0, os.path.join(os.path.dirname(os.path.abspath(__file__)), "lib"))
|
||||
import re
|
||||
import requests
|
||||
from requests.status_codes import codes
|
||||
import httplib
|
||||
import commandline
|
||||
import sublime
|
||||
from StringIO import StringIO
|
||||
from httplib import HTTPResponse
|
||||
import logging
|
||||
|
||||
logging.basicConfig(format='%(asctime)s %(message)s')
|
||||
logger = logging.getLogger()
|
||||
|
||||
|
||||
class CurlSession(object):
|
||||
class FakeSocket(StringIO):
|
||||
def makefile(self, *args, **kw):
|
||||
return self
|
||||
|
||||
def __init__(self, verify=None):
|
||||
self.verify = verify
|
||||
|
||||
def _parse_http(self, text):
|
||||
# if the response text starts with a 302, skip to the next non-302 header
|
||||
if re.match(r'^HTTP/.*?\s302 Found', text):
|
||||
m = re.search(r'(HTTP/\d+\.\d+\s(?!302 Found).*$)', text, re.S)
|
||||
if not m:
|
||||
raise Exception("Unrecognized response: %s" % text)
|
||||
else:
|
||||
text = m.group(1)
|
||||
# remove Transfer-Encoding: chunked header, as it causes reading the response to fail
|
||||
# first do a quick check for it, so we can avoid doing the expensive negative-lookbehind
|
||||
# regex if we don't need it
|
||||
if "Transfer-Encoding: chunked" in text:
|
||||
# we do the negative-lookbehind to make sure we only strip the Transfer-Encoding
|
||||
# string in the header
|
||||
text = re.sub(r'(?<!\r\n\r\n).*?Transfer-Encoding: chunked\r\n', '', text, count=1)
|
||||
socket = self.FakeSocket(text)
|
||||
response = HTTPResponse(socket)
|
||||
response.begin()
|
||||
return response
|
||||
|
||||
def _build_response(self, text):
|
||||
raw_response = self._parse_http(text)
|
||||
response = requests.models.Response()
|
||||
response.encoding = 'utf-8'
|
||||
response.status_code = raw_response.status
|
||||
response.headers = dict(raw_response.getheaders())
|
||||
response._content = raw_response.read()
|
||||
return response
|
||||
|
||||
def request(self, method, url, headers=None, params=None, data=None, auth=None, allow_redirects=False, config=None):
|
||||
try:
|
||||
curl = commandline.find_binary('curl')
|
||||
except commandline.BinaryNotFoundError:
|
||||
sublime.error_message("I couldn't find \"curl\" on your system. Curl is required on Linux. Please install it and try again.")
|
||||
return
|
||||
|
||||
curl_options = ['-i', '-L', '--user-agent', 'Sublime Github', '-s']
|
||||
if auth:
|
||||
curl_options.extend(['--user', "%s:%s" % auth])
|
||||
if self.verify:
|
||||
curl_options.extend(['--cacert', self.verify])
|
||||
if headers:
|
||||
for k, v in headers.iteritems():
|
||||
curl_options.extend(['-H', "%s: %s" % (k, v)])
|
||||
if method in ('post', 'patch'):
|
||||
curl_options.extend(['-d', data])
|
||||
if method == 'patch':
|
||||
curl_options.extend(['-X', 'PATCH'])
|
||||
if params:
|
||||
url += '?' + '&'.join(['='.join([k, str(v)]) for k, v in params.iteritems()])
|
||||
|
||||
command = [curl] + curl_options + [url]
|
||||
|
||||
response = self._build_response(commandline.execute(command))
|
||||
response.url = url
|
||||
return response
|
||||
|
||||
def post(self, *args, **kwargs):
|
||||
return self.request("post", *args, **kwargs)
|
||||
|
||||
|
||||
def session(verify=None, config=None):
|
||||
if hasattr(httplib, "HTTPSConnection"):
|
||||
return requests.session(verify=verify, config=config)
|
||||
else: # try curl
|
||||
return CurlSession(verify=verify)
|
Reference in New Issue
Block a user