feat(ST2.GitPackages): bump up all packages

- Refresh PackageCache with latest versions of everything
This commit is contained in:
Iristyle
2013-09-16 22:32:31 -04:00
parent fad58909f7
commit 3a0c5ce9e2
43 changed files with 6367 additions and 1395 deletions

View File

@@ -7,3 +7,6 @@ end_of_line = lf
charset = utf-8 charset = utf-8
trim_trailing_whitespace = true trim_trailing_whitespace = true
insert_final_newline = true insert_final_newline = true
[*.md]
trim_trailing_whitespace = false

View File

@@ -1,4 +1,12 @@
import sublime_plugin import sublime_plugin
try:
import os, sys
# stupid python module system
sys.path.append(os.path.dirname(os.path.realpath(__file__)))
from .editorconfig import get_properties, EditorConfigError
except:
# Python 2
from editorconfig import get_properties, EditorConfigError from editorconfig import get_properties, EditorConfigError
@@ -30,7 +38,7 @@ class EditorConfig(sublime_plugin.EventListener):
try: try:
config = get_properties(path) config = get_properties(path)
except EditorConfigError: except EditorConfigError:
print 'Error occurred while getting EditorConfig properties' print('Error occurred while getting EditorConfig properties')
else: else:
if config: if config:
if pre_save: if pre_save:

View File

@@ -2,7 +2,7 @@
from editorconfig.versiontools import join_version from editorconfig.versiontools import join_version
VERSION = (0, 11, 1, "final") VERSION = (0, 11, 3, "final")
__all__ = ['get_properties', 'EditorConfigError', 'exceptions'] __all__ = ['get_properties', 'EditorConfigError', 'exceptions']

View File

@@ -40,7 +40,8 @@ class EditorConfigHandler(object):
""" """
def __init__(self, filepath, conf_filename='.editorconfig', version=None): def __init__(self, filepath, conf_filename='.editorconfig',
version=VERSION):
"""Create EditorConfigHandler for matching given filepath""" """Create EditorConfigHandler for matching given filepath"""
self.filepath = filepath self.filepath = filepath
self.conf_filename = conf_filename self.conf_filename = conf_filename
@@ -110,7 +111,7 @@ class EditorConfigHandler(object):
# Set indent_size to "tab" if indent_size is unspecified and # Set indent_size to "tab" if indent_size is unspecified and
# indent_style is set to "tab". # indent_style is set to "tab".
if (opts.get("indent_style") == "tab" and if (opts.get("indent_style") == "tab" and
not "indent_size" in opts and self.version >= VERSION[:3]): not "indent_size" in opts and self.version >= (0, 10, 0)):
opts["indent_size"] = "tab" opts["indent_size"] = "tab"
# Set tab_width to indent_size if indent_size is specified and # Set tab_width to indent_size if indent_size is specified and

View File

@@ -14,7 +14,7 @@ from editorconfig.exceptions import ParsingError, PathError, VersionError
def version(): def version():
print("Version %s" % __version__) print("EditorConfig Python Core Version %s" % __version__)
def usage(command, error=False): def usage(command, error=False):

View File

@@ -1 +1 @@
{"url": "http://sindresorhus.com", "version": "2013.03.18.18.13.22", "description": "Sublime Text plugin for EditorConfig - helps developers define and maintain consistent coding styles between different editors and IDEs"} {"url": "https://github.com/sindresorhus/editorconfig-sublime", "version": "2013.08.26.21.50.33", "description": "EditorConfig - helps developers define and maintain consistent coding styles between different editors and IDEs - Sublime plugin"}

View File

@@ -5,16 +5,7 @@
## Install ## Install
Install `EditorConfig` with [Package Control](https://sublime.wbond.net) and restart Sublime.
### Sublime Text 2
Install with [Package Control](http://wbond.net/sublime_packages/package_control)
### Sublime Text 3
[Download](https://github.com/sindresorhus/editorconfig-sublime/archive/st3.zip), unzip, and put the contents in `~/Library/Application Support/Sublime Text 3/Packages/EditorConfig`.
Will be easier when Package Control is fully compatible.
## Getting started ## Getting started
@@ -66,8 +57,7 @@ This plugin does its changes transparently in the background. I would recommend
## License ## License
[MIT License](http://en.wikipedia.org/wiki/MIT_License) MIT License • © [Sindre Sorhus](http://sindresorhus.com)
(c) [Sindre Sorhus](http://sindresorhus.com)

View File

@@ -1,3 +1,4 @@
{ {
"default_remote": "origin" "default_remote": "origin",
"default_host": "github.com"
} }

View File

@@ -21,10 +21,11 @@ The plugin should be picked up automatically. If not, restart Sublime Text.
## Configuration ## Configuration
The defaults should work for most setups, but if you have a different remote name, you can configure it in the `Githubinator.sublime-settings` file: The defaults should work for most setups, but if you have a different remote name or use GitHub Enterprise, you can configure remote and host in the `Githubinator.sublime-settings` file:
{ {
"default_remote": "origin" "default_remote": "origin",
"default_host": "github.com"
} }

View File

@@ -11,7 +11,14 @@ class GithubinatorCommand(sublime_plugin.TextCommand):
def load_config(self): def load_config(self):
s = sublime.load_settings("Githubinator.sublime-settings") s = sublime.load_settings("Githubinator.sublime-settings")
global DEFAULT_GIT_REMOTE; DEFAULT_GIT_REMOTE = s.get("default_remote") global DEFAULT_GIT_REMOTE, DEFAULT_GITHUB_HOST
DEFAULT_GIT_REMOTE = s.get("default_remote")
if not isinstance(DEFAULT_GIT_REMOTE, list):
DEFAULT_GIT_REMOTE = [DEFAULT_GIT_REMOTE]
DEFAULT_GITHUB_HOST = s.get("default_host")
if DEFAULT_GITHUB_HOST is None:
DEFAULT_GITHUB_HOST = "github.com"
def run(self, edit, permalink = False, mode = 'blob'): def run(self, edit, permalink = False, mode = 'blob'):
self.load_config() self.load_config()
@@ -41,8 +48,9 @@ class GithubinatorCommand(sublime_plugin.TextCommand):
else: else:
lines = '%s-%s' % (begin_line, end_line) lines = '%s-%s' % (begin_line, end_line)
for remote in [DEFAULT_GIT_REMOTE]: re_host = re.escape(DEFAULT_GITHUB_HOST)
regex = r'.*\s.*(?:https://github\.com/|github\.com:|git://github\.com/)(.*)/(.*?)(?:\.git)?\r?\n' for remote in DEFAULT_GIT_REMOTE:
regex = r'.*\s.*(?:https://%s/|%s:|git://%s/)(.*)/(.*?)(?:\.git)?\r?\n' % (re_host, re_host, re_host)
result = re.search(remote + regex, config) result = re.search(remote + regex, config)
if not result: if not result:
continue continue
@@ -53,8 +61,8 @@ class GithubinatorCommand(sublime_plugin.TextCommand):
sha = open(os.path.join(git_path, '.git', ref_path), "r").read()[:-1] sha = open(os.path.join(git_path, '.git', ref_path), "r").read()[:-1]
target = sha if permalink else branch target = sha if permalink else branch
full_link = 'https://github.com/%s/%s/%s/%s%s/%s#L%s' % \ full_link = 'https://%s/%s/%s/%s/%s%s/%s#L%s' % \
(matches[0], matches[1], mode, target, new_git_path, file_name, lines) (DEFAULT_GITHUB_HOST, matches[0], matches[1], mode, target, new_git_path, file_name, lines)
sublime.set_clipboard(full_link) sublime.set_clipboard(full_link)
sublime.status_message('Copied %s to clipboard.' % full_link) sublime.status_message('Copied %s to clipboard.' % full_link)
print('Copied %s to clipboard.' % full_link) print('Copied %s to clipboard.' % full_link)
@@ -73,4 +81,7 @@ class GithubinatorCommand(sublime_plugin.TextCommand):
def is_enabled(self): def is_enabled(self):
return self.view.file_name() and len(self.view.file_name()) > 0 if self.view.file_name() and len(self.view.file_name()) > 0:
return True
else:
return False

View File

@@ -1 +1 @@
{"url": "https://github.com/ehamiter/ST2-GitHubinator", "version": "2013.03.02.08.48.58", "description": "Sublime Text 2 plugin that shows selected ST2 text on GitHub"} {"url": "https://github.com/ehamiter/ST2-GitHubinator", "version": "2013.09.09.18.14.32", "description": "Sublime Text 2 plugin that shows selected ST2 text on GitHub"}

View File

@@ -268,23 +268,32 @@ class DiffCommand(VcsCommand):
pass pass
def git_diff_command(self, file_name): def git_diff_command(self, file_name):
return [self.get_user_command('git') or 'git', 'diff', '--no-color', '--no-ext-diff', '--', file_name] vcs_options = self.settings.get('vcs_options', {}).get('git') or ['--no-color', '--no-ext-diff']
return [self.get_user_command('git') or 'git', 'diff'] + vcs_options + ['--', file_name]
def svn_diff_command(self, file_name): def svn_diff_command(self, file_name):
params = [self.get_user_command('svn') or 'svn', 'diff'] params = [self.get_user_command('svn') or 'svn', 'diff']
if self.settings.get('svn_use_internal_diff', True): params.extend(self.settings.get('vcs_options', {}).get('svn', []))
if '--internal-diff' not in params and self.settings.get('svn_use_internal_diff', True):
params.append('--internal-diff') params.append('--internal-diff')
# if file starts with @, use `--revision HEAD` option
# https://github.com/gornostal/Modific/issues/17
if file_name.find('@') != -1: if file_name.find('@') != -1:
file_name += '@' file_name += '@'
params.extend(['--revision', 'HEAD']) params.extend(['--revision', 'HEAD'])
params.extend([file_name])
params.append(file_name)
return params return params
def bzr_diff_command(self, file_name): def bzr_diff_command(self, file_name):
return [self.get_user_command('bzr') or 'bzr', 'diff', file_name] vcs_options = self.settings.get('vcs_options', {}).get('bzr', [])
return [self.get_user_command('bzr') or 'bzr', 'diff'] + vcs_options + [file_name]
def hg_diff_command(self, file_name): def hg_diff_command(self, file_name):
return [self.get_user_command('hg') or 'hg', 'diff', file_name] vcs_options = self.settings.get('vcs_options', {}).get('hg', [])
return [self.get_user_command('hg') or 'hg', 'diff'] + vcs_options + [file_name]
class ShowDiffCommand(DiffCommand, sublime_plugin.TextCommand): class ShowDiffCommand(DiffCommand, sublime_plugin.TextCommand):
@@ -411,7 +420,7 @@ class DiffParser(object):
class HlChangesCommand(DiffCommand, sublime_plugin.TextCommand): class HlChangesCommand(DiffCommand, sublime_plugin.TextCommand):
def hl_lines(self, lines, hl_key): def hl_lines(self, lines, hl_key):
if (not len(lines)): if (not len(lines) or not self.settings.get('highlight_changes')):
self.view.erase_regions(hl_key) self.view.erase_regions(hl_key)
return return

View File

@@ -1,5 +1,8 @@
// Modific default settings // Modific default settings
{ {
// Highlight changes
"highlight_changes": true,
// Name of a region icon // Name of a region icon
// Valid icon names are: modific, dot, circle, bookmark and cross // Valid icon names are: modific, dot, circle, bookmark and cross
// WARNING: if you set value different than 'modific', // WARNING: if you set value different than 'modific',
@@ -16,6 +19,11 @@
["hg" , "hg"] ["hg" , "hg"]
], ],
// default list of options for a diff command for a certain VCS
"vcs_options": {
"git": ["--no-color", "--no-ext-diff"]
},
//if you have some weird OS, that has non-unicode console //if you have some weird OS, that has non-unicode console
//place its console encoding here //place its console encoding here
"console_encoding" : "", "console_encoding" : "",
@@ -26,7 +34,10 @@
// set to false to disable automatic saving // set to false to disable automatic saving
"autosave": true, "autosave": true,
"svn_use_internal_diff": true, // Turn this option on if you're using SVN 1.7 or higher
// this instructs Subversion to use its built-in differencing engine
// despite any external differencing mechanism that may be specified for use in the user's runtime configuration.
"svn_use_internal_diff": false,
// File size limit (in KB) for drawing icons on the gutter // File size limit (in KB) for drawing icons on the gutter
"max_file_size": 1024 "max_file_size": 1024

View File

@@ -36,13 +36,13 @@ The "Packages" directory is located at:
Please, make sure your VCS binaries is in the PATH (**especially if you are on Windows**). Please, make sure your VCS binaries is in the PATH (**especially if you are on Windows**).
To do that on Windows, open `Controll Panel -> System -> Advanced system settings -> Environment variables -> System Variables`, find PATH, click "Edit" and append `;C:\path\to\VCS\binaries` for every VCS you will use (or make sure it's already there). To do that on Windows, open `Control Panel -> System -> Advanced system settings -> Environment variables -> System Variables`, find PATH, click "Edit" and append `;C:\path\to\VCS\binaries` for every VCS you will use (or make sure it's already there).
Features / Usage Features / Usage
---------------- ----------------
**Highlight changes** *(automatically: on save or when window gets focus)* **Highlight changes** *(automatically: on save or when window gets focus)*
[![Highlight changes](http://i.imgur.com/FgpyRl.jpg)](http://i.imgur.com/FgpyR.jpg) [![Highlight changes](http://i.imgur.com/DX8TeJTl.jpg)](http://i.imgur.com/DX8TeJT.jpg)
**Show diff** `Ctrl+Alt+D` on Linux/Windows and OS X **Show diff** `Ctrl+Alt+D` on Linux/Windows and OS X
[![Show diff](http://i.imgur.com/csCw7l.jpg)](http://i.imgur.com/csCw7.jpg) [![Show diff](http://i.imgur.com/csCw7l.jpg)](http://i.imgur.com/csCw7.jpg)
@@ -76,6 +76,11 @@ If some sacred punishment has been bestowed upon you, and you have no other choi
If you use different than the default theme, you can customize colors of bullets on the gutter by adding [this](https://gist.github.com/3692073) chunk of code to your theme. If you use different than the default theme, you can customize colors of bullets on the gutter by adding [this](https://gist.github.com/3692073) chunk of code to your theme.
### SVN users
If you are using SVN 1.7 you may want to turn on option `svn_use_internal_diff`.
This instructs Subversion to use its built-in differencing engine
despite any external differencing mechanism that may be specified for use in the user's runtime configuration.
Thanks to Thanks to
--------- ---------

View File

@@ -1 +1 @@
{"url": "https://github.com/gornostal/Modific", "version": "2013.03.01.06.02.08", "description": "Highlight lines changed since the last commit (supports Git, SVN, Bazaar and Mercurial) / ST2(3) plugin"} {"url": "https://github.com/gornostal/Modific", "version": "2013.08.23.12.05.13", "description": "Highlight lines changed since the last commit (supports Git, SVN, Bazaar and Mercurial) / ST2(3) plugin"}

View File

@@ -0,0 +1,34 @@
[
// Git Chords - https://github.com/kemayo/sublime-text-2-git
{ "keys": ["ctrl+shift+g", "ctrl+shift+a"], "command": "git_add_choice" },
{ "keys": ["ctrl+shift+g", "ctrl+shift+b"], "command": "git_branch" },
{ "keys": ["ctrl+shift+g", "ctrl+shift+c"], "command": "git_commit" },
{ "keys": ["ctrl+shift+g", "ctrl+shift+d"], "command": "git_diff" },
{ "keys": ["ctrl+shift+g", "ctrl+shift+f"], "command": "git_fetch" },
{ "keys": ["ctrl+shift+g", "ctrl+shift+g"], "command": "git_graph" },
{ "keys": ["ctrl+shift+g", "ctrl+shift+h"], "command": "git_commit_history" },
{ "keys": ["ctrl+shift+g", "ctrl+shift+l"], "command": "git_log" },
{ "keys": ["ctrl+shift+g", "ctrl+shift+m"], "command": "git_merge" },
{ "keys": ["ctrl+shift+g", "ctrl+shift+n"], "command": "git_new_branch" },
{ "keys": ["ctrl+shift+g", "ctrl+shift+p"], "command": "git_pull_current_branch" },
{ "keys": ["ctrl+shift+g", "ctrl+shift+q"], "command": "git_quick_commit" },
// dangerous
// { "keys": ["ctrl+shift+g", "ctrl+shift+r"], "command": "git_reset_hard_head" },
{ "keys": ["ctrl+shift+g", "ctrl+shift+s"], "command": "git_status" },
{ "keys": ["ctrl+shift+g", "ctrl+shift+t"], "command": "git_new_tag" },
{ "keys": ["ctrl+shift+g", "ctrl+shift+z"], "command": "git_commit_amend" },
{ "keys": ["ctrl+shift+g", "ctrl+shift+up"], "command": "git_push" },
{ "keys": ["ctrl+shift+g", "ctrl+shift+down"], "command": "git_pull" },
{ "keys": ["ctrl+shift+g", "ctrl+shift+right"], "command": "git_stash" },
{ "keys": ["ctrl+shift+g", "ctrl+shift+left"], "command": "git_stash_pop" },
//Modific - https://github.com/gornostal/Modific
{ "keys": ["ctrl+super+c"], "command": "show_original_part" },
//using this binding with https://github.com/braindamageinc/SublimeHttpRequester
//{ "keys": ["ctrl+super+r"], "command": "replace_modified_part" },
//Git plugin does this already
//{ "keys": ["ctrl+alt+d"], "command": "show_diff" },
{ "keys": ["ctrl+super+u"], "command": "uncommitted_files" }
//{ "keys": ["ctrl+shift+pageup"], "command": "jump_between_changes", "args": {"direction": "prev"} },
//{ "keys": ["ctrl+shift+pagedown"], "command": "jump_between_changes", "args": {"direction": "next"} }
]

View File

@@ -1 +1 @@
{"url": "https://github.com/Iristyle/SublimeKeyMap.Git", "version": "2013.03.17.19.57.57", "description": "A simple repository used to host / share my customized Sublime Text 2 key bindings for Git plugins"} {"url": "https://github.com/Iristyle/SublimeKeyMap.Git", "version": "2013.09.17.01.37.21", "description": "A simple repository used to host / share my customized Sublime Text 2 key bindings for Git plugins"}

View File

@@ -1,6 +0,0 @@
[
{ "keys": ["super+g", "super+n"], "command": "public_gist_from_selection" },
{ "keys": ["super+g", "super+p","super+n"], "command": "private_gist_from_selection" },
{ "keys": ["super+g", "super+o"], "command": "open_gist_in_editor" },
{ "keys": ["super+g", "super+c"], "command": "open_gist_url" }
]

View File

@@ -11,5 +11,8 @@
{ "caption": "GitHub: Update Gist", "command": "update_gist" }, { "caption": "GitHub: Update Gist", "command": "update_gist" },
{ "caption": "GitHub: Switch Accounts", "command": "switch_accounts" }, { "caption": "GitHub: Switch Accounts", "command": "switch_accounts" },
{ "caption": "GitHub: Copy Remote URL to Clipboard", "command": "copy_remote_url" }, { "caption": "GitHub: Copy Remote URL to Clipboard", "command": "copy_remote_url" },
{ "caption": "GitHub: Open Remote URL in Browser", "command": "open_remote_url" } { "caption": "GitHub: Open Remote URL in Browser", "command": "open_remote_url" },
{ "caption": "GitHub: Blame", "command": "blame"},
{ "caption": "GitHub: History", "command": "history"},
{ "caption": "GitHub: Edit", "command": "edit"}
] ]

View File

@@ -1,38 +1,24 @@
# Sublime GitHub # Sublime GitHub
This is a plugin for the [Sublime Text 2](http://www.sublimetext.com/) text This is a plugin for the [Sublime Text](http://www.sublimetext.com/) text
editor that allows you to create and browse your [GitHub Gists](http://gist.github.com). editor (both versions 2 and 3) that provides a number of useful commands for GitHub, including creating and browsing gists,
opening and editing files on GitHub, and bringing up the blame and commit history views.
## Installation ## Installation
**The easiest way to install is via the** [**Sublime Package Control**](http://wbond.net/sublime_packages/package_control) **plugin.** You can install via [**Sublime Package Control**](http://wbond.net/sublime_packages/package_control) plugin.
Just open "Package Control: Install Package" in your Command Palette and search for Just open "Package Control: Install Package" in your Command Palette and search for "sublime-github". The plugin should be picked up automatically. If not, restart Sublime Text.
"sublime-github" (or, if you already have it installed, select "Package Control: Upgrade Package"
to upgrade).
To install it manually in a shell/Terminal (on OS X, Linux or Cygwin), via git:
cd ~/"Library/Application Support/Sublime Text 2/Packages/" # location on OS X; will be different on Linux & Windows
git clone https://github.com/bgreenlee/sublime-github.git
or, if you don't have git installed:
cd ~/"Library/Application Support/Sublime Text 2/Packages/"
rm -rf bgreenlee-sublime-github* # remove any old versions
curl -L https://github.com/bgreenlee/sublime-github/tarball/master | tar xf -
The plugin should be picked up automatically. If not, restart Sublime Text.
## Usage ## Usage
The first time you run one of the commands, it will ask you for your GitHub The first time you run one of the commands, it will ask you for your GitHub username and password in order to create a GitHub API access token, which gets saved in the Sublime GitHub user settings file. Your username and password are not stored anywhere, but if you would rather generate the access token yourself, see the "Generating Your Own Access Token" section below.
username and password in order to create a GitHub API access token, which gets saved
in the Sublime GitHub user settings file. Your username and password are not
stored anywhere, but if you would rather generate the access token yourself, see
the "Generating Your Own Access Token" section below.
The following commands are available in the Command Palette: The following commands are available in the Command Palette:
* **GitHub: Switch Accounts**
Switch to another GitHub account (see Adding Additional Accounts below)
* **GitHub: Private Gist from Selection** * **GitHub: Private Gist from Selection**
Create a private gist from the currently selected text (or, if nothing is selected, Create a private gist from the currently selected text (or, if nothing is selected,
@@ -77,21 +63,27 @@ The following commands are available in the Command Palette:
Update the gist open in the current editor. Update the gist open in the current editor.
* **GitHub: Switch Accounts** **The following commands require the Git plugin, available through the Package Manager. After installing, restart Sublime Text.**
Switch to another GitHub account (see Adding Additional Accounts below)
* **GitHub: Open Remote URL in Browser** * **GitHub: Open Remote URL in Browser**
Open the current file's location in the repository in the browser. *Note:* Requires Open the current file's location in the repository in the browser.
the Git plugin, available through the Package Manager. After installing, restart
Sublime Text.
* **GitHub: Copy Remote URL to Clipboard** * **GitHub: Copy Remote URL to Clipboard**
Put the url of the current file's location in the repository into the clipboard. Put the url of the current file's location in the repository into the clipboard.
*Note:* Requires the Git plugin, available through the Package Manager. After
installing, restart Sublime Text. * **GitHub: Blame**
Open the GitHub blame view of the current file in the browser
* **GitHub: History**
Open the GitHub commit history view of the current file in the browser.
* **GitHub: Edit**
Open the current file for editing on GitHub. I'm not sure why you'd want to do that, but it was easy enough to add.
## Adding Additional Accounts ## Adding Additional Accounts
@@ -116,40 +108,42 @@ whatever the base url is for your private GitHub, plus "/api/v3". For example:
{ {
"base_uri": "https://github.yourco.com/api/v3", "base_uri": "https://github.yourco.com/api/v3",
"github_token": "" "github_token": ""
}, }
} }
Don't worry about setting the `github_token`--that will be set for you automatically, after you Don't worry about setting the `github_token`--that will be set for you automatically, after you
switch accounts (Shift-Cmd-P, "GitHub: Switch Accounts"). switch accounts (Shift-Cmd-P, "GitHub: Switch Accounts").
## Key Bindings
You can add your own keyboard shortcuts in Preferences -> Key Bindings - User. For example:
[
{ "keys": ["ctrl+super+g", "ctrl+super+n"], "command": "public_gist_from_selection" },
{ "keys": ["ctrl+super+g", "ctrl+super+p","super+n"], "command": "private_gist_from_selection" },
{ "keys": ["ctrl+super+g", "ctrl+super+o"], "command": "open_gist_in_editor" },
{ "keys": ["ctrl+super+g", "ctrl+super+c"], "command": "open_gist_url" }
]
(Note that `ctrl+super+g` (^⌘G) conflicts with Sublime Text's Quick Find All, so adjust accordingly.)
Available commands can be seen in <https://github.com/bgreenlee/sublime-github/blob/master/Github.sublime-commands>.
## Issues ## Issues
* Linux requires the [curl](http://curl.haxx.se/) binary to be installed on your system (in one of: * Linux requires the [curl](http://curl.haxx.se/) binary to be installed on your system (in one of:
`/usr/local/sbin`, `/usr/local/bin`, `/usr/sbin`, `/usr/bin`, `/sbin`, or `/bin`). `/usr/local/sbin`, `/usr/local/bin`, `/usr/sbin`, `/usr/bin`, `/sbin`, or `/bin`).
* Depending on the number of gists you have, there can be a considerable delay the first time * Depending on the number of gists you have, there can be a considerable delay the first time your list of gists is fetched. Subsequent requests will be cached and should be a bit faster (although the GitHub API's ETags are currently not correct; once they fix that, it should speed things up). In the meantime, if there are gists that you open frequently, open them on GitHub and "Star" them, then access them via the Open/Copy Starred Gist commands.
your list of gists is fetched. Subsequent requests will be cached and should be a bit faster
(although the GitHub API's ETags are currently not correct; once that fix that, it should speed
things up). In the meantime, if there are gists that you open frequently, open them on GitHub and
"Star" them, then access them via the Open/Copy Starred Gist commands.
* Setting the file type for syntax highlighting when opening a gist in the editor does not work * Setting the file type for syntax highlighting when opening a gist in the editor does not work in Linux. I could get it to work with significant effort, so if you desperately want it, open an issue.
in Linux. I could get it to work with significant effort, so if you desperately want it, open
an issue.
## Generating Your Own Access Token ## Generating Your Own Access Token
If you feel uncomfortable giving your GitHub username and password to the If you feel uncomfortable giving your GitHub username and password to the plugin, you can generate a GitHub API access token yourself. Just open up a Terminal window/shell (on OS X, Linux or Cygwin), and run:
plugin, you can generate a GitHub API access token yourself. Just open up
a Terminal window/shell (on OS X, Linux or Cygwin), and run:
curl -u username -d '{"scopes":["gist"]}' https://api.github.com/authorizations curl -u username -d '{"scopes":["gist"]}' https://api.github.com/authorizations
where `username` is your GitHub username. You'll be prompt for your password first. Then you'll get back where `username` is your GitHub username. You'll be prompt for your password first. Then you'll get back a response that includes a 40-digit "token" value (e.g. `6423ba8429a152ff4a7279d1e8f4674029d3ef87`). Go to Sublime Text 2 -> Preferences -> Package Settings -> GitHub -> Settings - User, and insert the token there. It should look like:
a response that includes a 40-digit "token" value (e.g. `6423ba8429a152ff4a7279d1e8f4674029d3ef87`).
Go to Sublime Text 2 -> Preferences -> Package Settings -> GitHub -> Settings - User,
and insert the token there. It should look like:
{ {
"github_token": "6423ba8429a152ff4a7279d1e8f4674029d3ef87" "github_token": "6423ba8429a152ff4a7279d1e8f4674029d3ef87"
@@ -159,6 +153,24 @@ Restart Sublime.
That's it! That's it!
## Configuring a proxy
If you are behind a proxy you can configure it for each account.
Note that until a [bug](https://github.com/shazow/urllib3/pull/170) in urllib3 is fixed, in order to use a proxy you also have to force curl mode (Curl is required obviously).
For example:
"accounts":
{
"GitHub":
{
"base_uri": "https://api.github.com",
"https_proxy": "...",
"force_curl": true
}
}
## Bugs and Feature Requests ## Bugs and Feature Requests
<http://github.com/bgreenlee/sublime-github/issues> <http://github.com/bgreenlee/sublime-github/issues>

View File

@@ -6,6 +6,12 @@ import subprocess
class BinaryNotFoundError(Exception): class BinaryNotFoundError(Exception):
pass pass
class CommandExecutionError(Exception):
def __init__(self, errorcode):
self.errorcode = errorcode
def __str__(self):
return repr('An error has occurred while executing the command')
def find_binary(name): def find_binary(name):
dirs = ['/usr/local/sbin', '/usr/local/bin', '/usr/sbin', '/usr/bin', dirs = ['/usr/local/sbin', '/usr/local/bin', '/usr/sbin', '/usr/bin',
@@ -24,5 +30,8 @@ def execute(args):
stdout=subprocess.PIPE, stderr=subprocess.STDOUT) stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
output = proc.stdout.read() output = proc.stdout.read()
proc.wait()
if proc.wait() == 0:
return output return output
raise CommandExecutionError(proc.returncode)

View File

@@ -1,9 +1,12 @@
import sublime import sys
import os.path import os.path
sys.path.insert(0, os.path.dirname(os.path.abspath(__file__)))
import sublime
import json import json
import sublime_requests as requests import sublime_requests as requests
import sys
import logging import logging
from requests.exceptions import ConnectionError
import pprint
logging.basicConfig(format='%(asctime)s %(message)s') logging.basicConfig(format='%(asctime)s %(message)s')
logger = logging.getLogger() logger = logging.getLogger()
@@ -19,16 +22,38 @@ class GitHubApi(object):
"Raised if we get a 401 from GitHub" "Raised if we get a 401 from GitHub"
pass pass
class OTPNeededException(Exception):
"Raised if 2FA is configured and we need a one-time password"
pass
class UnknownException(Exception): class UnknownException(Exception):
"Raised if we get a response code we don't recognize from GitHub" "Raised if we get a response code we don't recognize from GitHub"
pass pass
def __init__(self, base_uri="https://api.github.com", token=None, debug=False): class ConnectionException(Exception):
"Raised if we get a ConnectionError"
pass
class NullResponseException(Exception):
"Raised if we get an empty response (i.e., CurlSession failure)"
pass
def __init__(self, base_uri="https://api.github.com", token=None, debug=False, proxies=None, force_curl=False):
self.base_uri = base_uri self.base_uri = base_uri
self.token = token self.token = token
self.debug = debug self.debug = debug
self.proxies = proxies
if debug: if debug:
try:
import http.client as httplib
except ImportError:
import httplib
httplib.HTTPConnection.debuglevel = 1
logger.setLevel(logging.DEBUG) logger.setLevel(logging.DEBUG)
requests_log = logging.getLogger("requests.packages.urllib3")
requests_log.setLevel(logging.DEBUG)
requests_log.propagate = True
# set up requests session with the root CA cert bundle # set up requests session with the root CA cert bundle
cert_path = os.path.join(sublime.packages_path(), "sublime-github", "ca-bundle.crt") cert_path = os.path.join(sublime.packages_path(), "sublime-github", "ca-bundle.crt")
@@ -36,21 +61,28 @@ class GitHubApi(object):
logger.warning("Root CA cert bundle not found at %s! Not verifying requests." % cert_path) logger.warning("Root CA cert bundle not found at %s! Not verifying requests." % cert_path)
cert_path = None cert_path = None
self.rsession = requests.session(verify=cert_path, self.rsession = requests.session(verify=cert_path,
config={'verbose': sys.stderr if self.debug else None}) force_curl=force_curl)
def get_token(self, username, password): def get_token(self, username, password, one_time_password=None):
auth_data = { auth_data = {
"scopes": ["gist"], "scopes": ["gist"],
"note": "Sublime GitHub", "note": "Sublime GitHub",
"note_url": "https://github.com/bgreenlee/sublime-github" "note_url": "https://github.com/bgreenlee/sublime-github"
} }
headers = {'X-GitHub-OTP': one_time_password} if one_time_password else {}
resp = self.rsession.post(self.base_uri + "/authorizations", resp = self.rsession.post(self.base_uri + "/authorizations",
headers=headers,
auth=(username, password), auth=(username, password),
proxies=self.proxies,
data=json.dumps(auth_data)) data=json.dumps(auth_data))
if resp.status_code == requests.codes.CREATED: if resp.status_code == requests.codes.CREATED:
logger.debug(pprint.saferepr(resp))
data = json.loads(resp.text) data = json.loads(resp.text)
return data["token"] return data["token"]
elif resp.status_code == requests.codes.UNAUTHORIZED: elif resp.status_code == requests.codes.UNAUTHORIZED:
if resp.headers['X-GitHub-OTP'].startswith('required'):
raise self.OTPNeededException()
else:
raise self.UnauthorizedException() raise self.UnauthorizedException()
else: else:
raise self.UnknownException("%d %s" % (resp.status_code, resp.text)) raise self.UnknownException("%d %s" % (resp.status_code, resp.text))
@@ -79,11 +111,20 @@ class GitHubApi(object):
if method == 'get' and url in self.etags: if method == 'get' and url in self.etags:
headers["If-None-Match"] = self.etags[url] headers["If-None-Match"] = self.etags[url]
logger.debug("request: %s %s %s %s" % (method, url, headers, params)) logger.debug("request: %s %s %s %s" % (method, url, headers, params))
try:
resp = self.rsession.request(method, url, resp = self.rsession.request(method, url,
headers=headers, headers=headers,
params=params, params=params,
data=data, data=data,
proxies=self.proxies,
allow_redirects=True) allow_redirects=True)
if not resp:
raise self.NullResponseException("Empty response received.")
except ConnectionError as e:
raise self.ConnectionException("Connection error, "
"please verify your internet connection: %s" % e)
full_url = resp.url full_url = resp.url
logger.debug("response: %s" % resp.headers) logger.debug("response: %s" % resp.headers)
if resp.status_code in [requests.codes.OK, if resp.status_code in [requests.codes.OK,
@@ -112,7 +153,7 @@ class GitHubApi(object):
"files": {filename: {"content": content}}}) "files": {filename: {"content": content}}})
def update_gist(self, gist, content): def update_gist(self, gist, content):
filename = gist["files"].keys()[0] filename = list(gist["files"].keys())[0]
return self.patch("/gists/" + gist["id"], return self.patch("/gists/" + gist["id"],
{"description": gist["description"], {"description": gist["description"],
"files": {filename: {"content": content}}}) "files": {filename: {"content": content}}})

View File

@@ -0,0 +1,390 @@
# python3-compatible git library from https://github.com/kemayo/sublime-text-2-git
# including this temporarily until a Package Control installable version of Git
# is available
import os
import sublime
import sublime_plugin
import threading
import subprocess
import functools
import os.path
import time
# In a complete inversion from ST2, in ST3 when a plugin is loaded we
# actually can trust __file__.
# Goal is to get: "Packages/Git", allowing for people who rename things
FULL_PLUGIN_DIRECTORY = os.path.dirname(os.path.realpath(__file__))
PLUGIN_DIRECTORY = FULL_PLUGIN_DIRECTORY.replace(os.path.normpath(os.path.join(FULL_PLUGIN_DIRECTORY, '..', '..')) + os.path.sep, '').replace(os.path.sep, '/')
git_root_cache = {}
def main_thread(callback, *args, **kwargs):
# sublime.set_timeout gets used to send things onto the main thread
# most sublime.[something] calls need to be on the main thread
sublime.set_timeout(functools.partial(callback, *args, **kwargs), 0)
def open_url(url):
sublime.active_window().run_command('open_url', {"url": url})
def git_root(directory):
global git_root_cache
retval = False
leaf_dir = directory
if leaf_dir in git_root_cache and git_root_cache[leaf_dir]['expires'] > time.time():
return git_root_cache[leaf_dir]['retval']
while directory:
if os.path.exists(os.path.join(directory, '.git')):
retval = directory
break
parent = os.path.realpath(os.path.join(directory, os.path.pardir))
if parent == directory:
# /.. == /
retval = False
break
directory = parent
git_root_cache[leaf_dir] = {
'retval': retval,
'expires': time.time() + 5
}
return retval
# for readability code
def git_root_exist(directory):
return git_root(directory)
def view_contents(view):
region = sublime.Region(0, view.size())
return view.substr(region)
def plugin_file(name):
return os.path.join(PLUGIN_DIRECTORY, name)
def do_when(conditional, callback, *args, **kwargs):
if conditional():
return callback(*args, **kwargs)
sublime.set_timeout(functools.partial(do_when, conditional, callback, *args, **kwargs), 50)
def _make_text_safeish(text, fallback_encoding, method='decode'):
# The unicode decode here is because sublime converts to unicode inside
# insert in such a way that unknown characters will cause errors, which is
# distinctly non-ideal... and there's no way to tell what's coming out of
# git in output. So...
try:
unitext = getattr(text, method)('utf-8')
except (UnicodeEncodeError, UnicodeDecodeError):
unitext = getattr(text, method)(fallback_encoding)
except AttributeError:
# strongly implies we're already unicode, but just in case let's cast
# to string
unitext = str(text)
return unitext
def _test_paths_for_executable(paths, test_file):
for directory in paths:
file_path = os.path.join(directory, test_file)
if os.path.exists(file_path) and os.access(file_path, os.X_OK):
return file_path
def find_git():
# It turns out to be difficult to reliably run git, with varying paths
# and subprocess environments across different platforms. So. Let's hack
# this a bit.
# (Yes, I could fall back on a hardline "set your system path properly"
# attitude. But that involves a lot more arguing with people.)
path = os.environ.get('PATH', '').split(os.pathsep)
if os.name == 'nt':
git_cmd = 'git.exe'
else:
git_cmd = 'git'
git_path = _test_paths_for_executable(path, git_cmd)
if not git_path:
# /usr/local/bin:/usr/local/git/bin
if os.name == 'nt':
extra_paths = (
os.path.join(os.environ["ProgramFiles"], "Git", "bin"),
os.path.join(os.environ["ProgramFiles(x86)"], "Git", "bin"),
)
else:
extra_paths = (
'/usr/local/bin',
'/usr/local/git/bin',
)
git_path = _test_paths_for_executable(extra_paths, git_cmd)
return git_path
GIT = find_git()
class CommandThread(threading.Thread):
def __init__(self, command, on_done, working_dir="", fallback_encoding="", **kwargs):
threading.Thread.__init__(self)
self.command = command
self.on_done = on_done
self.working_dir = working_dir
if "stdin" in kwargs:
self.stdin = kwargs["stdin"].encode()
else:
self.stdin = None
if "stdout" in kwargs:
self.stdout = kwargs["stdout"]
else:
self.stdout = subprocess.PIPE
self.fallback_encoding = fallback_encoding
self.kwargs = kwargs
def run(self):
try:
# Ignore directories that no longer exist
if not os.path.isdir(self.working_dir):
return
if self.working_dir != "":
os.chdir(self.working_dir)
# Windows needs startupinfo in order to start process in background
startupinfo = None
if os.name == 'nt':
startupinfo = subprocess.STARTUPINFO()
startupinfo.dwFlags |= subprocess.STARTF_USESHOWWINDOW
# universal_newlines seems to break `log` in python3
proc = subprocess.Popen(self.command,
stdout=self.stdout, stderr=subprocess.STDOUT,
stdin=subprocess.PIPE, startupinfo=startupinfo,
shell=False, universal_newlines=False)
output = proc.communicate(self.stdin)[0]
if not output:
output = ''
main_thread(self.on_done,
_make_text_safeish(output, self.fallback_encoding), **self.kwargs)
except subprocess.CalledProcessError as e:
main_thread(self.on_done, e.returncode)
except OSError as e:
if e.errno == 2:
main_thread(sublime.error_message, "Git binary could not be found in PATH\n\nConsider using the git_command setting for the Git plugin\n\nPATH is: %s" % os.environ['PATH'])
else:
raise e
class GitScratchOutputCommand(sublime_plugin.TextCommand):
def run(self, edit, output = '', output_file = None, clear = False):
if clear:
region = sublime.Region(0, self.view.size())
self.view.erase(edit, region)
self.view.insert(edit, 0, output)
# A base for all commands
class GitCommand(object):
may_change_files = False
def run_command(self, command, callback=None, show_status=True,
filter_empty_args=True, no_save=False, **kwargs):
if filter_empty_args:
command = [arg for arg in command if arg]
if 'working_dir' not in kwargs:
kwargs['working_dir'] = self.get_working_dir()
if 'fallback_encoding' not in kwargs and self.active_view() and self.active_view().settings().get('fallback_encoding'):
kwargs['fallback_encoding'] = self.active_view().settings().get('fallback_encoding').rpartition('(')[2].rpartition(')')[0]
s = sublime.load_settings("Git.sublime-settings")
if s.get('save_first') and self.active_view() and self.active_view().is_dirty() and not no_save:
self.active_view().run_command('save')
if command[0] == 'git':
if s.get('git_command'):
command[0] = s.get('git_command')
elif GIT:
command[0] = GIT
if command[0] == 'git-flow' and s.get('git_flow_command'):
command[0] = s.get('git_flow_command')
if not callback:
callback = self.generic_done
thread = CommandThread(command, callback, **kwargs)
thread.start()
if show_status:
message = kwargs.get('status_message', False) or ' '.join(command)
sublime.status_message(message)
def generic_done(self, result):
if self.may_change_files and self.active_view() and self.active_view().file_name():
if self.active_view().is_dirty():
result = "WARNING: Current view is dirty.\n\n"
else:
# just asking the current file to be re-opened doesn't do anything
print("reverting")
position = self.active_view().viewport_position()
self.active_view().run_command('revert')
do_when(lambda: not self.active_view().is_loading(), lambda: self.active_view().set_viewport_position(position, False))
# self.active_view().show(position)
view = self.active_view()
if view and view.settings().get('live_git_annotations'):
self.view.run_command('git_annotate')
if not result.strip():
return
self.panel(result)
def _output_to_view(self, output_file, output, clear=False,
syntax="Packages/Diff/Diff.tmLanguage", **kwargs):
output_file.set_syntax_file(syntax)
args = {
'output': output,
'clear': clear
}
output_file.run_command('git_scratch_output', args)
def scratch(self, output, title=False, position=None, **kwargs):
scratch_file = self.get_window().new_file()
if title:
scratch_file.set_name(title)
scratch_file.set_scratch(True)
self._output_to_view(scratch_file, output, **kwargs)
scratch_file.set_read_only(True)
if position:
sublime.set_timeout(lambda: scratch_file.set_viewport_position(position), 0)
return scratch_file
def panel(self, output, **kwargs):
if not hasattr(self, 'output_view'):
self.output_view = self.get_window().get_output_panel("git")
self.output_view.set_read_only(False)
self._output_to_view(self.output_view, output, clear=True, **kwargs)
self.output_view.set_read_only(True)
self.get_window().run_command("show_panel", {"panel": "output.git"})
def quick_panel(self, *args, **kwargs):
self.get_window().show_quick_panel(*args, **kwargs)
# A base for all git commands that work with the entire repository
class GitWindowCommand(GitCommand, sublime_plugin.WindowCommand):
def active_view(self):
return self.window.active_view()
def _active_file_name(self):
view = self.active_view()
if view and view.file_name() and len(view.file_name()) > 0:
return view.file_name()
@property
def fallback_encoding(self):
if self.active_view() and self.active_view().settings().get('fallback_encoding'):
return self.active_view().settings().get('fallback_encoding').rpartition('(')[2].rpartition(')')[0]
# If there's no active view or the active view is not a file on the
# filesystem (e.g. a search results view), we can infer the folder
# that the user intends Git commands to run against when there's only
# only one.
def is_enabled(self):
if self._active_file_name() or len(self.window.folders()) == 1:
return bool(git_root(self.get_working_dir()))
return False
def get_file_name(self):
return ''
def get_relative_file_name(self):
return ''
# If there is a file in the active view use that file's directory to
# search for the Git root. Otherwise, use the only folder that is
# open.
def get_working_dir(self):
file_name = self._active_file_name()
if file_name:
return os.path.realpath(os.path.dirname(file_name))
else:
try: # handle case with no open folder
return self.window.folders()[0]
except IndexError:
return ''
def get_window(self):
return self.window
# A base for all git commands that work with the file in the active view
class GitTextCommand(GitCommand, sublime_plugin.TextCommand):
def active_view(self):
return self.view
def is_enabled(self):
# First, is this actually a file on the file system?
if self.view.file_name() and len(self.view.file_name()) > 0:
return bool(git_root(self.get_working_dir()))
return False
def get_file_name(self):
return os.path.basename(self.view.file_name())
def get_relative_file_name(self):
working_dir = self.get_working_dir()
file_path = working_dir.replace(git_root(working_dir), '')[1:]
file_name = os.path.join(file_path, self.get_file_name())
return file_name.replace('\\', '/') # windows issues
def get_working_dir(self):
return os.path.realpath(os.path.dirname(self.view.file_name()))
def get_window(self):
# Fun discovery: if you switch tabs while a command is working,
# self.view.window() is None. (Admittedly this is a consequence
# of my deciding to do async command processing... but, hey,
# got to live with that now.)
# I did try tracking the window used at the start of the command
# and using it instead of view.window() later, but that results
# panels on a non-visible window, which is especially useless in
# the case of the quick panel.
# So, this is not necessarily ideal, but it does work.
return self.view.window() or sublime.active_window()
# A few miscellaneous commands
class GitCustomCommand(GitWindowCommand):
may_change_files = True
def run(self):
self.get_window().show_input_panel("Git command", "",
self.on_input, None, None)
def on_input(self, command):
command = str(command) # avoiding unicode
if command.strip() == "":
self.panel("No git command provided")
return
import shlex
command_splitted = ['git'] + shlex.split(command)
print(command_splitted)
self.run_command(command_splitted)
class GitGuiCommand(GitTextCommand):
def run(self, edit):
command = ['git', 'gui']
self.run_command(command)
class GitGitkCommand(GitTextCommand):
def run(self, edit):
command = ['gitk']
self.run_command(command)

View File

@@ -6,25 +6,57 @@
# / # /
""" """
requests requests HTTP library
~~~~~~~~ ~~~~~~~~~~~~~~~~~~~~~
:copyright: (c) 2012 by Kenneth Reitz. Requests is an HTTP library, written in Python, for human beings. Basic GET
:license: ISC, see LICENSE for more details. usage:
>>> import requests
>>> r = requests.get('http://python.org')
>>> r.status_code
200
>>> 'Python is a programming language' in r.content
True
... or POST:
>>> payload = dict(key1='value1', key2='value2')
>>> r = requests.post("http://httpbin.org/post", data=payload)
>>> print r.text
{
...
"form": {
"key2": "value2",
"key1": "value1"
},
...
}
The other HTTP methods are supported - see `requests.api`. Full documentation
is at <http://python-requests.org>.
:copyright: (c) 2013 by Kenneth Reitz.
:license: Apache 2.0, see LICENSE for more details.
""" """
__title__ = 'requests' __title__ = 'requests'
__version__ = '0.10.2' __version__ = '1.2.2'
__build__ = 0x001002 __build__ = 0x010202
__author__ = 'Kenneth Reitz' __author__ = 'Kenneth Reitz'
__license__ = 'ISC' __license__ = 'Apache 2.0'
__copyright__ = 'Copyright 2012 Kenneth Reitz' __copyright__ = 'Copyright 2013 Kenneth Reitz'
# Attempt to enable urllib3's SNI support, if possible
try:
from requests.packages.urllib3.contrib import pyopenssl
pyopenssl.inject_into_urllib3()
except ImportError:
pass
from . import utils from . import utils
from .models import Request, Response from .models import Request, Response, PreparedRequest
from .api import request, get, head, post, patch, put, delete, options from .api import request, get, head, post, patch, put, delete, options
from .sessions import session, Session from .sessions import session, Session
from .status_codes import codes from .status_codes import codes
@@ -32,3 +64,14 @@ from .exceptions import (
RequestException, Timeout, URLRequired, RequestException, Timeout, URLRequired,
TooManyRedirects, HTTPError, ConnectionError TooManyRedirects, HTTPError, ConnectionError
) )
# Set default logging handler to avoid "No handler found" warnings.
import logging
try: # Python 2.7+
from logging import NullHandler
except ImportError:
class NullHandler(logging.Handler):
def emit(self, record):
pass
logging.getLogger(__name__).addHandler(NullHandler())

View File

@@ -0,0 +1,342 @@
# -*- coding: utf-8 -*-
"""
requests.adapters
~~~~~~~~~~~~~~~~~
This module contains the transport adapters that Requests uses to define
and maintain connections.
"""
import socket
from .models import Response
from .packages.urllib3.poolmanager import PoolManager, ProxyManager
from .packages.urllib3.response import HTTPResponse
from .compat import urlparse, basestring, urldefrag, unquote
from .utils import (DEFAULT_CA_BUNDLE_PATH, get_encoding_from_headers,
prepend_scheme_if_needed, get_auth_from_url)
from .structures import CaseInsensitiveDict
from .packages.urllib3.exceptions import MaxRetryError
from .packages.urllib3.exceptions import TimeoutError
from .packages.urllib3.exceptions import SSLError as _SSLError
from .packages.urllib3.exceptions import HTTPError as _HTTPError
from .cookies import extract_cookies_to_jar
from .exceptions import ConnectionError, Timeout, SSLError
from .auth import _basic_auth_str
DEFAULT_POOLBLOCK = False
DEFAULT_POOLSIZE = 10
DEFAULT_RETRIES = 0
class BaseAdapter(object):
"""The Base Transport Adapter"""
def __init__(self):
super(BaseAdapter, self).__init__()
def send(self):
raise NotImplementedError
def close(self):
raise NotImplementedError
class HTTPAdapter(BaseAdapter):
"""The built-in HTTP Adapter for urllib3.
Provides a general-case interface for Requests sessions to contact HTTP and
HTTPS urls by implementing the Transport Adapter interface. This class will
usually be created by the :class:`Session <Session>` class under the
covers.
:param pool_connections: The number of urllib3 connection pools to cache.
:param pool_maxsize: The maximum number of connections to save in the pool.
:param max_retries: The maximum number of retries each connection should attempt.
:param pool_block: Whether the connection pool should block for connections.
Usage::
>>> import requests
>>> s = requests.Session()
>>> a = requests.adapters.HTTPAdapter()
>>> s.mount('http://', a)
"""
__attrs__ = ['max_retries', 'config', '_pool_connections', '_pool_maxsize',
'_pool_block']
def __init__(self, pool_connections=DEFAULT_POOLSIZE,
pool_maxsize=DEFAULT_POOLSIZE, max_retries=DEFAULT_RETRIES,
pool_block=DEFAULT_POOLBLOCK):
self.max_retries = max_retries
self.config = {}
super(HTTPAdapter, self).__init__()
self._pool_connections = pool_connections
self._pool_maxsize = pool_maxsize
self._pool_block = pool_block
self.init_poolmanager(pool_connections, pool_maxsize, block=pool_block)
def __getstate__(self):
return dict((attr, getattr(self, attr, None)) for attr in
self.__attrs__)
def __setstate__(self, state):
for attr, value in state.items():
setattr(self, attr, value)
self.init_poolmanager(self._pool_connections, self._pool_maxsize,
block=self._pool_block)
def init_poolmanager(self, connections, maxsize, block=DEFAULT_POOLBLOCK):
"""Initializes a urllib3 PoolManager. This method should not be called
from user code, and is only exposed for use when subclassing the
:class:`HTTPAdapter <requests.adapters.HTTPAdapter>`.
:param connections: The number of urllib3 connection pools to cache.
:param maxsize: The maximum number of connections to save in the pool.
:param block: Block when no free connections are available.
"""
# save these values for pickling
self._pool_connections = connections
self._pool_maxsize = maxsize
self._pool_block = block
self.poolmanager = PoolManager(num_pools=connections, maxsize=maxsize,
block=block)
def cert_verify(self, conn, url, verify, cert):
"""Verify a SSL certificate. This method should not be called from user
code, and is only exposed for use when subclassing the
:class:`HTTPAdapter <requests.adapters.HTTPAdapter>`.
:param conn: The urllib3 connection object associated with the cert.
:param url: The requested URL.
:param verify: Whether we should actually verify the certificate.
:param cert: The SSL certificate to verify.
"""
if url.startswith('https') and verify:
cert_loc = None
# Allow self-specified cert location.
if verify is not True:
cert_loc = verify
if not cert_loc:
cert_loc = DEFAULT_CA_BUNDLE_PATH
if not cert_loc:
raise Exception("Could not find a suitable SSL CA certificate bundle.")
conn.cert_reqs = 'CERT_REQUIRED'
conn.ca_certs = cert_loc
else:
conn.cert_reqs = 'CERT_NONE'
conn.ca_certs = None
if cert:
if not isinstance(cert, basestring):
conn.cert_file = cert[0]
conn.key_file = cert[1]
else:
conn.cert_file = cert
def build_response(self, req, resp):
"""Builds a :class:`Response <requests.Response>` object from a urllib3
response. This should not be called from user code, and is only exposed
for use when subclassing the
:class:`HTTPAdapter <requests.adapters.HTTPAdapter>`
:param req: The :class:`PreparedRequest <PreparedRequest>` used to generate the response.
:param resp: The urllib3 response object.
"""
response = Response()
# Fallback to None if there's no status_code, for whatever reason.
response.status_code = getattr(resp, 'status', None)
# Make headers case-insensitive.
response.headers = CaseInsensitiveDict(getattr(resp, 'headers', {}))
# Set encoding.
response.encoding = get_encoding_from_headers(response.headers)
response.raw = resp
response.reason = response.raw.reason
if isinstance(req.url, bytes):
response.url = req.url.decode('utf-8')
else:
response.url = req.url
# Add new cookies from the server.
extract_cookies_to_jar(response.cookies, req, resp)
# Give the Response some context.
response.request = req
response.connection = self
return response
def get_connection(self, url, proxies=None):
"""Returns a urllib3 connection for the given URL. This should not be
called from user code, and is only exposed for use when subclassing the
:class:`HTTPAdapter <reqeusts.adapters.HTTPAdapter>`.
:param url: The URL to connect to.
:param proxies: (optional) A Requests-style dictionary of proxies used on this request.
"""
proxies = proxies or {}
proxy = proxies.get(urlparse(url).scheme)
if proxy:
proxy = prepend_scheme_if_needed(proxy, urlparse(url).scheme)
conn = ProxyManager(self.poolmanager.connection_from_url(proxy))
else:
conn = self.poolmanager.connection_from_url(url)
return conn
def close(self):
"""Disposes of any internal state.
Currently, this just closes the PoolManager, which closes pooled
connections.
"""
self.poolmanager.clear()
def request_url(self, request, proxies):
"""Obtain the url to use when making the final request.
If the message is being sent through a proxy, the full URL has to be
used. Otherwise, we should only use the path portion of the URL.
This shoudl not be called from user code, and is only exposed for use
when subclassing the
:class:`HTTPAdapter <requests.adapters.HTTPAdapter>`.
:param request: The :class:`PreparedRequest <PreparedRequest>` being sent.
:param proxies: A dictionary of schemes to proxy URLs.
"""
proxies = proxies or {}
proxy = proxies.get(urlparse(request.url).scheme)
if proxy:
url, _ = urldefrag(request.url)
else:
url = request.path_url
return url
def add_headers(self, request, **kwargs):
"""Add any headers needed by the connection. Currently this adds a
Proxy-Authorization header.
This should not be called from user code, and is only exposed for use
when subclassing the
:class:`HTTPAdapter <requests.adapters.HTTPAdapter>`.
:param request: The :class:`PreparedRequest <PreparedRequest>` to add headers to.
:param kwargs: The keyword arguments from the call to send().
"""
proxies = kwargs.get('proxies', {})
if proxies is None:
proxies = {}
proxy = proxies.get(urlparse(request.url).scheme)
username, password = get_auth_from_url(proxy)
if username and password:
# Proxy auth usernames and passwords will be urlencoded, we need
# to decode them.
username = unquote(username)
password = unquote(password)
request.headers['Proxy-Authorization'] = _basic_auth_str(username,
password)
def send(self, request, stream=False, timeout=None, verify=True, cert=None, proxies=None):
"""Sends PreparedRequest object. Returns Response object.
:param request: The :class:`PreparedRequest <PreparedRequest>` being sent.
:param stream: (optional) Whether to stream the request content.
:param timeout: (optional) The timeout on the request.
:param verify: (optional) Whether to verify SSL certificates.
:param vert: (optional) Any user-provided SSL certificate to be trusted.
:param proxies: (optional) The proxies dictionary to apply to the request.
"""
conn = self.get_connection(request.url, proxies)
self.cert_verify(conn, request.url, verify, cert)
url = self.request_url(request, proxies)
self.add_headers(request, proxies=proxies)
chunked = not (request.body is None or 'Content-Length' in request.headers)
try:
if not chunked:
resp = conn.urlopen(
method=request.method,
url=url,
body=request.body,
headers=request.headers,
redirect=False,
assert_same_host=False,
preload_content=False,
decode_content=False,
retries=self.max_retries,
timeout=timeout
)
# Send the request.
else:
if hasattr(conn, 'proxy_pool'):
conn = conn.proxy_pool
low_conn = conn._get_conn(timeout=timeout)
low_conn.putrequest(request.method, url, skip_accept_encoding=True)
for header, value in request.headers.items():
low_conn.putheader(header, value)
low_conn.endheaders()
for i in request.body:
low_conn.send(hex(len(i))[2:].encode('utf-8'))
low_conn.send(b'\r\n')
low_conn.send(i)
low_conn.send(b'\r\n')
low_conn.send(b'0\r\n\r\n')
r = low_conn.getresponse()
resp = HTTPResponse.from_httplib(r,
pool=conn,
connection=low_conn,
preload_content=False,
decode_content=False
)
except socket.error as sockerr:
raise ConnectionError(sockerr)
except MaxRetryError as e:
raise ConnectionError(e)
except (_SSLError, _HTTPError) as e:
if isinstance(e, _SSLError):
raise SSLError(e)
elif isinstance(e, TimeoutError):
raise Timeout(e)
else:
raise
r = self.build_response(request, resp)
if not stream:
r.content
return r

View File

@@ -7,12 +7,13 @@ requests.api
This module implements the Requests API. This module implements the Requests API.
:copyright: (c) 2012 by Kenneth Reitz. :copyright: (c) 2012 by Kenneth Reitz.
:license: ISC, see LICENSE for more details. :license: Apache2, see LICENSE for more details.
""" """
from . import sessions from . import sessions
def request(method, url, **kwargs): def request(method, url, **kwargs):
"""Constructs and sends a :class:`Request <Request>`. """Constructs and sends a :class:`Request <Request>`.
Returns :class:`Response <Response>` object. Returns :class:`Response <Response>` object.
@@ -20,7 +21,7 @@ def request(method, url, **kwargs):
:param method: method for the new :class:`Request` object. :param method: method for the new :class:`Request` object.
:param url: URL for the new :class:`Request` object. :param url: URL for the new :class:`Request` object.
:param params: (optional) Dictionary or bytes to be sent in the query string for the :class:`Request`. :param params: (optional) Dictionary or bytes to be sent in the query string for the :class:`Request`.
:param data: (optional) Dictionary or bytes to send in the body of the :class:`Request`. :param data: (optional) Dictionary, bytes, or file-like object to send in the body of the :class:`Request`.
:param headers: (optional) Dictionary of HTTP Headers to send with the :class:`Request`. :param headers: (optional) Dictionary of HTTP Headers to send with the :class:`Request`.
:param cookies: (optional) Dict or CookieJar object to send with the :class:`Request`. :param cookies: (optional) Dict or CookieJar object to send with the :class:`Request`.
:param files: (optional) Dictionary of 'name': file-like-objects (or {'name': ('filename', fileobj)}) for multipart encoding upload. :param files: (optional) Dictionary of 'name': file-like-objects (or {'name': ('filename', fileobj)}) for multipart encoding upload.
@@ -28,23 +29,26 @@ def request(method, url, **kwargs):
:param timeout: (optional) Float describing the timeout of the request. :param timeout: (optional) Float describing the timeout of the request.
:param allow_redirects: (optional) Boolean. Set to True if POST/PUT/DELETE redirect following is allowed. :param allow_redirects: (optional) Boolean. Set to True if POST/PUT/DELETE redirect following is allowed.
:param proxies: (optional) Dictionary mapping protocol to the URL of the proxy. :param proxies: (optional) Dictionary mapping protocol to the URL of the proxy.
:param return_response: (optional) If False, an un-sent Request object will returned.
:param session: (optional) A :class:`Session` object to be used for the request.
:param config: (optional) A configuration dictionary.
:param verify: (optional) if ``True``, the SSL cert will be verified. A CA_BUNDLE path can also be provided. :param verify: (optional) if ``True``, the SSL cert will be verified. A CA_BUNDLE path can also be provided.
:param prefetch: (optional) if ``True``, the response content will be immediately downloaded. :param stream: (optional) if ``False``, the response content will be immediately downloaded.
:param cert: (optional) if String, path to ssl client cert file (.pem). If Tuple, ('cert', 'key') pair.
Usage::
>>> import requests
>>> req = requests.request('GET', 'http://httpbin.org/get')
<Response [200]>
""" """
s = kwargs.pop('session') if 'session' in kwargs else sessions.session() session = sessions.Session()
return s.request(method=method, url=url, **kwargs) return session.request(method=method, url=url, **kwargs)
def get(url, **kwargs): def get(url, **kwargs):
"""Sends a GET request. Returns :class:`Response` object. """Sends a GET request. Returns :class:`Response` object.
:param url: URL for the new :class:`Request` object. :param url: URL for the new :class:`Request` object.
:param **kwargs: Optional arguments that ``request`` takes. :param \*\*kwargs: Optional arguments that ``request`` takes.
""" """
kwargs.setdefault('allow_redirects', True) kwargs.setdefault('allow_redirects', True)
@@ -55,7 +59,7 @@ def options(url, **kwargs):
"""Sends a OPTIONS request. Returns :class:`Response` object. """Sends a OPTIONS request. Returns :class:`Response` object.
:param url: URL for the new :class:`Request` object. :param url: URL for the new :class:`Request` object.
:param **kwargs: Optional arguments that ``request`` takes. :param \*\*kwargs: Optional arguments that ``request`` takes.
""" """
kwargs.setdefault('allow_redirects', True) kwargs.setdefault('allow_redirects', True)
@@ -66,10 +70,10 @@ def head(url, **kwargs):
"""Sends a HEAD request. Returns :class:`Response` object. """Sends a HEAD request. Returns :class:`Response` object.
:param url: URL for the new :class:`Request` object. :param url: URL for the new :class:`Request` object.
:param **kwargs: Optional arguments that ``request`` takes. :param \*\*kwargs: Optional arguments that ``request`` takes.
""" """
kwargs.setdefault('allow_redirects', True) kwargs.setdefault('allow_redirects', False)
return request('head', url, **kwargs) return request('head', url, **kwargs)
@@ -77,8 +81,8 @@ def post(url, data=None, **kwargs):
"""Sends a POST request. Returns :class:`Response` object. """Sends a POST request. Returns :class:`Response` object.
:param url: URL for the new :class:`Request` object. :param url: URL for the new :class:`Request` object.
:param data: (optional) Dictionary or bytes to send in the body of the :class:`Request`. :param data: (optional) Dictionary, bytes, or file-like object to send in the body of the :class:`Request`.
:param **kwargs: Optional arguments that ``request`` takes. :param \*\*kwargs: Optional arguments that ``request`` takes.
""" """
return request('post', url, data=data, **kwargs) return request('post', url, data=data, **kwargs)
@@ -88,8 +92,8 @@ def put(url, data=None, **kwargs):
"""Sends a PUT request. Returns :class:`Response` object. """Sends a PUT request. Returns :class:`Response` object.
:param url: URL for the new :class:`Request` object. :param url: URL for the new :class:`Request` object.
:param data: (optional) Dictionary or bytes to send in the body of the :class:`Request`. :param data: (optional) Dictionary, bytes, or file-like object to send in the body of the :class:`Request`.
:param **kwargs: Optional arguments that ``request`` takes. :param \*\*kwargs: Optional arguments that ``request`` takes.
""" """
return request('put', url, data=data, **kwargs) return request('put', url, data=data, **kwargs)
@@ -99,8 +103,8 @@ def patch(url, data=None, **kwargs):
"""Sends a PATCH request. Returns :class:`Response` object. """Sends a PATCH request. Returns :class:`Response` object.
:param url: URL for the new :class:`Request` object. :param url: URL for the new :class:`Request` object.
:param data: (optional) Dictionary or bytes to send in the body of the :class:`Request`. :param data: (optional) Dictionary, bytes, or file-like object to send in the body of the :class:`Request`.
:param **kwargs: Optional arguments that ``request`` takes. :param \*\*kwargs: Optional arguments that ``request`` takes.
""" """
return request('patch', url, data=data, **kwargs) return request('patch', url, data=data, **kwargs)
@@ -110,7 +114,7 @@ def delete(url, **kwargs):
"""Sends a DELETE request. Returns :class:`Response` object. """Sends a DELETE request. Returns :class:`Response` object.
:param url: URL for the new :class:`Request` object. :param url: URL for the new :class:`Request` object.
:param **kwargs: Optional arguments that ``request`` takes. :param \*\*kwargs: Optional arguments that ``request`` takes.
""" """
return request('delete', url, **kwargs) return request('delete', url, **kwargs)

View File

@@ -1,85 +0,0 @@
# -*- coding: utf-8 -*-
"""
requests.async
~~~~~~~~~~~~~~
This module contains an asynchronous replica of ``requests.api``, powered
by gevent. All API methods return a ``Request`` instance (as opposed to
``Response``). A list of requests can be sent with ``map()``.
"""
try:
import gevent
from gevent import monkey as curious_george
from gevent.pool import Pool
except ImportError:
raise RuntimeError('Gevent is required for requests.async.')
# Monkey-patch.
curious_george.patch_all(thread=False)
from . import api
__all__ = (
'map',
'get', 'options', 'head', 'post', 'put', 'patch', 'delete', 'request'
)
def patched(f):
"""Patches a given API function to not send."""
def wrapped(*args, **kwargs):
kwargs['return_response'] = False
kwargs['prefetch'] = True
config = kwargs.get('config', {})
config.update(safe_mode=True)
kwargs['config'] = config
return f(*args, **kwargs)
return wrapped
def send(r, pool=None, prefetch=False):
"""Sends the request object using the specified pool. If a pool isn't
specified this method blocks. Pools are useful because you can specify size
and can hence limit concurrency."""
if pool != None:
return pool.spawn(r.send, prefetch=prefetch)
return gevent.spawn(r.send, prefetch=prefetch)
# Patched requests.api functions.
get = patched(api.get)
options = patched(api.options)
head = patched(api.head)
post = patched(api.post)
put = patched(api.put)
patch = patched(api.patch)
delete = patched(api.delete)
request = patched(api.request)
def map(requests, prefetch=True, size=None):
"""Concurrently converts a list of Requests to Responses.
:param requests: a collection of Request objects.
:param prefetch: If False, the content will not be downloaded immediately.
:param size: Specifies the number of requests to make at a time. If None, no throttling occurs.
"""
requests = list(requests)
pool = Pool(size) if size else None
jobs = [send(r, pool, prefetch=prefetch) for r in requests]
gevent.joinall(jobs)
return [r.response for r in requests]

View File

@@ -7,21 +7,28 @@ requests.auth
This module contains the authentication handlers for Requests. This module contains the authentication handlers for Requests.
""" """
from __future__ import unicode_literals import os
import re
import time import time
import hashlib import hashlib
import logging
from base64 import b64encode from base64 import b64encode
from .compat import urlparse, str, bytes
from .utils import randombytes, parse_dict_header
from .compat import urlparse, str
from .utils import parse_dict_header
log = logging.getLogger(__name__)
CONTENT_TYPE_FORM_URLENCODED = 'application/x-www-form-urlencoded'
CONTENT_TYPE_MULTI_PART = 'multipart/form-data'
def _basic_auth_str(username, password): def _basic_auth_str(username, password):
"""Returns a Basic Auth string.""" """Returns a Basic Auth string."""
return 'Basic ' + b64encode(("%s:%s" % (username, password)).encode('utf-8')).strip().decode('utf-8') return 'Basic ' + b64encode(('%s:%s' % (username, password)).encode('latin1')).strip().decode('latin1')
class AuthBase(object): class AuthBase(object):
@@ -43,7 +50,7 @@ class HTTPBasicAuth(AuthBase):
class HTTPProxyAuth(HTTPBasicAuth): class HTTPProxyAuth(HTTPBasicAuth):
"""Attaches HTTP Proxy Authenetication to a given Request object.""" """Attaches HTTP Proxy Authentication to a given Request object."""
def __call__(self, r): def __call__(self, r):
r.headers['Proxy-Authorization'] = _basic_auth_str(self.username, self.password) r.headers['Proxy-Authorization'] = _basic_auth_str(self.username, self.password)
return r return r
@@ -54,97 +61,117 @@ class HTTPDigestAuth(AuthBase):
def __init__(self, username, password): def __init__(self, username, password):
self.username = username self.username = username
self.password = password self.password = password
self.last_nonce = ''
self.nonce_count = 0
self.chal = {}
def handle_401(self, r): def build_digest_header(self, method, url):
"""Takes the given response and tries digest-auth, if needed."""
s_auth = r.headers.get('www-authenticate', '') realm = self.chal['realm']
nonce = self.chal['nonce']
qop = self.chal.get('qop')
algorithm = self.chal.get('algorithm')
opaque = self.chal.get('opaque')
if 'digest' in s_auth.lower(): if algorithm is None:
_algorithm = 'MD5'
last_nonce = '' else:
nonce_count = 0 _algorithm = algorithm.upper()
chal = parse_dict_header(s_auth.replace('Digest ', ''))
realm = chal['realm']
nonce = chal['nonce']
qop = chal.get('qop')
algorithm = chal.get('algorithm', 'MD5')
opaque = chal.get('opaque', None)
algorithm = algorithm.upper()
# lambdas assume digest modules are imported at the top level # lambdas assume digest modules are imported at the top level
if algorithm == 'MD5': if _algorithm == 'MD5':
def h(x): def md5_utf8(x):
if isinstance(x, str): if isinstance(x, str):
x = x.encode('utf-8') x = x.encode('utf-8')
return hashlib.md5(x).hexdigest() return hashlib.md5(x).hexdigest()
H = h hash_utf8 = md5_utf8
elif algorithm == 'SHA': elif _algorithm == 'SHA':
def h(x): def sha_utf8(x):
if isinstance(x, str): if isinstance(x, str):
x = x.encode('utf-8') x = x.encode('utf-8')
return hashlib.sha1(x).hexdigest() return hashlib.sha1(x).hexdigest()
H = h hash_utf8 = sha_utf8
# XXX MD5-sess # XXX MD5-sess
KD = lambda s, d: H("%s:%s" % (s, d)) KD = lambda s, d: hash_utf8("%s:%s" % (s, d))
if H is None: if hash_utf8 is None:
return None return None
# XXX not implemented yet # XXX not implemented yet
entdig = None entdig = None
p_parsed = urlparse(r.request.url) p_parsed = urlparse(url)
path = p_parsed.path path = p_parsed.path
if p_parsed.query: if p_parsed.query:
path += '?' + p_parsed.query path += '?' + p_parsed.query
A1 = '%s:%s:%s' % (self.username, realm, self.password) A1 = '%s:%s:%s' % (self.username, realm, self.password)
A2 = '%s:%s' % (r.request.method, path) A2 = '%s:%s' % (method, path)
if qop == 'auth': if qop == 'auth':
if nonce == last_nonce: if nonce == self.last_nonce:
nonce_count += 1 self.nonce_count += 1
else: else:
nonce_count = 1 self.nonce_count = 1
last_nonce = nonce
ncvalue = '%08x' % nonce_count ncvalue = '%08x' % self.nonce_count
s = str(nonce_count).encode('utf-8') s = str(self.nonce_count).encode('utf-8')
s += nonce.encode('utf-8') s += nonce.encode('utf-8')
s += time.ctime().encode('utf-8') s += time.ctime().encode('utf-8')
s += randombytes(8) s += os.urandom(8)
cnonce = (hashlib.sha1(s).hexdigest()[:16]) cnonce = (hashlib.sha1(s).hexdigest()[:16])
noncebit = "%s:%s:%s:%s:%s" % (nonce, ncvalue, cnonce, qop, H(A2)) noncebit = "%s:%s:%s:%s:%s" % (nonce, ncvalue, cnonce, qop, hash_utf8(A2))
respdig = KD(H(A1), noncebit) respdig = KD(hash_utf8(A1), noncebit)
elif qop is None: elif qop is None:
respdig = KD(H(A1), "%s:%s" % (nonce, H(A2))) respdig = KD(hash_utf8(A1), "%s:%s" % (nonce, hash_utf8(A2)))
else: else:
# XXX handle auth-int. # XXX handle auth-int.
return None return None
self.last_nonce = nonce
# XXX should the partial digests be encoded too? # XXX should the partial digests be encoded too?
base = 'username="%s", realm="%s", nonce="%s", uri="%s", ' \ base = 'username="%s", realm="%s", nonce="%s", uri="%s", ' \
'response="%s"' % (self.username, realm, nonce, path, respdig) 'response="%s"' % (self.username, realm, nonce, path, respdig)
if opaque: if opaque:
base += ', opaque="%s"' % opaque base += ', opaque="%s"' % opaque
if algorithm:
base += ', algorithm="%s"' % algorithm
if entdig: if entdig:
base += ', digest="%s"' % entdig base += ', digest="%s"' % entdig
base += ', algorithm="%s"' % algorithm
if qop: if qop:
base += ', qop=auth, nc=%s, cnonce="%s"' % (ncvalue, cnonce) base += ', qop=auth, nc=%s, cnonce="%s"' % (ncvalue, cnonce)
r.request.headers['Authorization'] = 'Digest %s' % (base) return 'Digest %s' % (base)
r.request.send(anyway=True)
_r = r.request.response def handle_401(self, r, **kwargs):
"""Takes the given response and tries digest-auth, if needed."""
num_401_calls = getattr(self, 'num_401_calls', 1)
s_auth = r.headers.get('www-authenticate', '')
if 'digest' in s_auth.lower() and num_401_calls < 2:
setattr(self, 'num_401_calls', num_401_calls + 1)
pat = re.compile(r'digest ', flags=re.IGNORECASE)
self.chal = parse_dict_header(pat.sub('', s_auth, count=1))
# Consume content and release the original connection
# to allow our new request to reuse the same one.
r.content
r.raw.release_conn()
r.request.headers['Authorization'] = self.build_digest_header(r.request.method, r.request.url)
_r = r.connection.send(r.request, **kwargs)
_r.history.append(r) _r.history.append(r)
return _r return _r
setattr(self, 'num_401_calls', 1)
return r return r
def __call__(self, r): def __call__(self, r):
# If we have a saved nonce, skip the 401
if self.last_nonce:
r.headers['Authorization'] = self.build_digest_header(r.method, r.url)
r.register_hook('response', self.handle_401) r.register_hook('response', self.handle_401)
return r return r

View File

@@ -0,0 +1,24 @@
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
certs.py
~~~~~~~~
This module returns the preferred default CA certificate bundle.
If you are packaging Requests, e.g., for a Linux distribution or a managed
environment, you can change the definition of where() to return a separately
packaged CA bundle.
"""
import os.path
def where():
"""Return the preferred certificate bundle."""
# vendored bundle inside Requests
return os.path.join(os.path.dirname(__file__), 'cacert.pem')
if __name__ == '__main__':
print(where())

View File

@@ -4,6 +4,7 @@
pythoncompat pythoncompat
""" """
from .packages import charade as chardet
import sys import sys
@@ -72,34 +73,41 @@ is_osx = ('darwin' in str(sys.platform).lower())
is_hpux = ('hpux' in str(sys.platform).lower()) # Complete guess. is_hpux = ('hpux' in str(sys.platform).lower()) # Complete guess.
is_solaris = ('solar==' in str(sys.platform).lower()) # Complete guess. is_solaris = ('solar==' in str(sys.platform).lower()) # Complete guess.
try:
import simplejson as json
except ImportError:
import json
# --------- # ---------
# Specifics # Specifics
# --------- # ---------
if is_py2: if is_py2:
from urllib import quote, unquote, urlencode from urllib import quote, unquote, quote_plus, unquote_plus, urlencode
from urlparse import urlparse, urlunparse, urljoin, urlsplit from urlparse import urlparse, urlunparse, urljoin, urlsplit, urldefrag
from urllib2 import parse_http_list from urllib2 import parse_http_list
import cookielib import cookielib
from .packages.oreos.monkeys import SimpleCookie from Cookie import Morsel
from StringIO import StringIO from StringIO import StringIO
from .packages.urllib3.packages.ordered_dict import OrderedDict
builtin_str = str
bytes = str bytes = str
str = unicode str = unicode
basestring = basestring basestring = basestring
numeric_types = (int, long, float)
elif is_py3: elif is_py3:
from urllib.parse import urlparse, urlunparse, urljoin, urlsplit, urlencode, quote, unquote from urllib.parse import urlparse, urlunparse, urljoin, urlsplit, urlencode, quote, unquote, quote_plus, unquote_plus, urldefrag
from urllib.request import parse_http_list from urllib.request import parse_http_list
from http import cookiejar as cookielib from http import cookiejar as cookielib
from http.cookies import SimpleCookie from http.cookies import Morsel
from io import StringIO from io import StringIO
from collections import OrderedDict
builtin_str = str
str = str str = str
bytes = bytes bytes = bytes
basestring = (str, bytes) basestring = (str, bytes)
numeric_types = (int, float)

View File

@@ -0,0 +1,389 @@
# -*- coding: utf-8 -*-
"""
Compatibility code to be able to use `cookielib.CookieJar` with requests.
requests.utils imports from here, so be careful with imports.
"""
import collections
from .compat import cookielib, urlparse, Morsel
try:
import threading
# grr, pyflakes: this fixes "redefinition of unused 'threading'"
threading
except ImportError:
import dummy_threading as threading
class MockRequest(object):
"""Wraps a `requests.Request` to mimic a `urllib2.Request`.
The code in `cookielib.CookieJar` expects this interface in order to correctly
manage cookie policies, i.e., determine whether a cookie can be set, given the
domains of the request and the cookie.
The original request object is read-only. The client is responsible for collecting
the new headers via `get_new_headers()` and interpreting them appropriately. You
probably want `get_cookie_header`, defined below.
"""
def __init__(self, request):
self._r = request
self._new_headers = {}
self.type = urlparse(self._r.url).scheme
def get_type(self):
return self.type
def get_host(self):
return urlparse(self._r.url).netloc
def get_origin_req_host(self):
return self.get_host()
def get_full_url(self):
return self._r.url
def is_unverifiable(self):
return True
def has_header(self, name):
return name in self._r.headers or name in self._new_headers
def get_header(self, name, default=None):
return self._r.headers.get(name, self._new_headers.get(name, default))
def add_header(self, key, val):
"""cookielib has no legitimate use for this method; add it back if you find one."""
raise NotImplementedError("Cookie headers should be added with add_unredirected_header()")
def add_unredirected_header(self, name, value):
self._new_headers[name] = value
def get_new_headers(self):
return self._new_headers
@property
def unverifiable(self):
return self.is_unverifiable()
@property
def origin_req_host(self):
return self.get_origin_req_host()
class MockResponse(object):
"""Wraps a `httplib.HTTPMessage` to mimic a `urllib.addinfourl`.
...what? Basically, expose the parsed HTTP headers from the server response
the way `cookielib` expects to see them.
"""
def __init__(self, headers):
"""Make a MockResponse for `cookielib` to read.
:param headers: a httplib.HTTPMessage or analogous carrying the headers
"""
self._headers = headers
def info(self):
return self._headers
def getheaders(self, name):
self._headers.getheaders(name)
def extract_cookies_to_jar(jar, request, response):
"""Extract the cookies from the response into a CookieJar.
:param jar: cookielib.CookieJar (not necessarily a RequestsCookieJar)
:param request: our own requests.Request object
:param response: urllib3.HTTPResponse object
"""
# the _original_response field is the wrapped httplib.HTTPResponse object,
req = MockRequest(request)
# pull out the HTTPMessage with the headers and put it in the mock:
res = MockResponse(response._original_response.msg)
jar.extract_cookies(res, req)
def get_cookie_header(jar, request):
"""Produce an appropriate Cookie header string to be sent with `request`, or None."""
r = MockRequest(request)
jar.add_cookie_header(r)
return r.get_new_headers().get('Cookie')
def remove_cookie_by_name(cookiejar, name, domain=None, path=None):
"""Unsets a cookie by name, by default over all domains and paths.
Wraps CookieJar.clear(), is O(n).
"""
clearables = []
for cookie in cookiejar:
if cookie.name == name:
if domain is None or domain == cookie.domain:
if path is None or path == cookie.path:
clearables.append((cookie.domain, cookie.path, cookie.name))
for domain, path, name in clearables:
cookiejar.clear(domain, path, name)
class CookieConflictError(RuntimeError):
"""There are two cookies that meet the criteria specified in the cookie jar.
Use .get and .set and include domain and path args in order to be more specific."""
class RequestsCookieJar(cookielib.CookieJar, collections.MutableMapping):
"""Compatibility class; is a cookielib.CookieJar, but exposes a dict interface.
This is the CookieJar we create by default for requests and sessions that
don't specify one, since some clients may expect response.cookies and
session.cookies to support dict operations.
Don't use the dict interface internally; it's just for compatibility with
with external client code. All `requests` code should work out of the box
with externally provided instances of CookieJar, e.g., LWPCookieJar and
FileCookieJar.
Caution: dictionary operations that are normally O(1) may be O(n).
Unlike a regular CookieJar, this class is pickleable.
"""
def get(self, name, default=None, domain=None, path=None):
"""Dict-like get() that also supports optional domain and path args in
order to resolve naming collisions from using one cookie jar over
multiple domains. Caution: operation is O(n), not O(1)."""
try:
return self._find_no_duplicates(name, domain, path)
except KeyError:
return default
def set(self, name, value, **kwargs):
"""Dict-like set() that also supports optional domain and path args in
order to resolve naming collisions from using one cookie jar over
multiple domains."""
# support client code that unsets cookies by assignment of a None value:
if value is None:
remove_cookie_by_name(self, name, domain=kwargs.get('domain'), path=kwargs.get('path'))
return
if isinstance(value, Morsel):
c = morsel_to_cookie(value)
else:
c = create_cookie(name, value, **kwargs)
self.set_cookie(c)
return c
def keys(self):
"""Dict-like keys() that returns a list of names of cookies from the jar.
See values() and items()."""
keys = []
for cookie in iter(self):
keys.append(cookie.name)
return keys
def values(self):
"""Dict-like values() that returns a list of values of cookies from the jar.
See keys() and items()."""
values = []
for cookie in iter(self):
values.append(cookie.value)
return values
def items(self):
"""Dict-like items() that returns a list of name-value tuples from the jar.
See keys() and values(). Allows client-code to call "dict(RequestsCookieJar)
and get a vanilla python dict of key value pairs."""
items = []
for cookie in iter(self):
items.append((cookie.name, cookie.value))
return items
def list_domains(self):
"""Utility method to list all the domains in the jar."""
domains = []
for cookie in iter(self):
if cookie.domain not in domains:
domains.append(cookie.domain)
return domains
def list_paths(self):
"""Utility method to list all the paths in the jar."""
paths = []
for cookie in iter(self):
if cookie.path not in paths:
paths.append(cookie.path)
return paths
def multiple_domains(self):
"""Returns True if there are multiple domains in the jar.
Returns False otherwise."""
domains = []
for cookie in iter(self):
if cookie.domain is not None and cookie.domain in domains:
return True
domains.append(cookie.domain)
return False # there is only one domain in jar
def get_dict(self, domain=None, path=None):
"""Takes as an argument an optional domain and path and returns a plain old
Python dict of name-value pairs of cookies that meet the requirements."""
dictionary = {}
for cookie in iter(self):
if (domain is None or cookie.domain == domain) and (path is None
or cookie.path == path):
dictionary[cookie.name] = cookie.value
return dictionary
def __getitem__(self, name):
"""Dict-like __getitem__() for compatibility with client code. Throws exception
if there are more than one cookie with name. In that case, use the more
explicit get() method instead. Caution: operation is O(n), not O(1)."""
return self._find_no_duplicates(name)
def __setitem__(self, name, value):
"""Dict-like __setitem__ for compatibility with client code. Throws exception
if there is already a cookie of that name in the jar. In that case, use the more
explicit set() method instead."""
self.set(name, value)
def __delitem__(self, name):
"""Deletes a cookie given a name. Wraps cookielib.CookieJar's remove_cookie_by_name()."""
remove_cookie_by_name(self, name)
def update(self, other):
"""Updates this jar with cookies from another CookieJar or dict-like"""
if isinstance(other, cookielib.CookieJar):
for cookie in other:
self.set_cookie(cookie)
else:
super(RequestsCookieJar, self).update(other)
def _find(self, name, domain=None, path=None):
"""Requests uses this method internally to get cookie values. Takes as args name
and optional domain and path. Returns a cookie.value. If there are conflicting cookies,
_find arbitrarily chooses one. See _find_no_duplicates if you want an exception thrown
if there are conflicting cookies."""
for cookie in iter(self):
if cookie.name == name:
if domain is None or cookie.domain == domain:
if path is None or cookie.path == path:
return cookie.value
raise KeyError('name=%r, domain=%r, path=%r' % (name, domain, path))
def _find_no_duplicates(self, name, domain=None, path=None):
"""__get_item__ and get call _find_no_duplicates -- never used in Requests internally.
Takes as args name and optional domain and path. Returns a cookie.value.
Throws KeyError if cookie is not found and CookieConflictError if there are
multiple cookies that match name and optionally domain and path."""
toReturn = None
for cookie in iter(self):
if cookie.name == name:
if domain is None or cookie.domain == domain:
if path is None or cookie.path == path:
if toReturn is not None: # if there are multiple cookies that meet passed in criteria
raise CookieConflictError('There are multiple cookies with name, %r' % (name))
toReturn = cookie.value # we will eventually return this as long as no cookie conflict
if toReturn:
return toReturn
raise KeyError('name=%r, domain=%r, path=%r' % (name, domain, path))
def __getstate__(self):
"""Unlike a normal CookieJar, this class is pickleable."""
state = self.__dict__.copy()
# remove the unpickleable RLock object
state.pop('_cookies_lock')
return state
def __setstate__(self, state):
"""Unlike a normal CookieJar, this class is pickleable."""
self.__dict__.update(state)
if '_cookies_lock' not in self.__dict__:
self._cookies_lock = threading.RLock()
def copy(self):
"""Return a copy of this RequestsCookieJar."""
new_cj = RequestsCookieJar()
new_cj.update(self)
return new_cj
def create_cookie(name, value, **kwargs):
"""Make a cookie from underspecified parameters.
By default, the pair of `name` and `value` will be set for the domain ''
and sent on every request (this is sometimes called a "supercookie").
"""
result = dict(
version=0,
name=name,
value=value,
port=None,
domain='',
path='/',
secure=False,
expires=None,
discard=True,
comment=None,
comment_url=None,
rest={'HttpOnly': None},
rfc2109=False,)
badargs = set(kwargs) - set(result)
if badargs:
err = 'create_cookie() got unexpected keyword arguments: %s'
raise TypeError(err % list(badargs))
result.update(kwargs)
result['port_specified'] = bool(result['port'])
result['domain_specified'] = bool(result['domain'])
result['domain_initial_dot'] = result['domain'].startswith('.')
result['path_specified'] = bool(result['path'])
return cookielib.Cookie(**result)
def morsel_to_cookie(morsel):
"""Convert a Morsel object into a Cookie containing the one k/v pair."""
c = create_cookie(
name=morsel.key,
value=morsel.value,
version=morsel['version'] or 0,
port=None,
port_specified=False,
domain=morsel['domain'],
domain_specified=bool(morsel['domain']),
domain_initial_dot=morsel['domain'].startswith('.'),
path=morsel['path'],
path_specified=bool(morsel['path']),
secure=bool(morsel['secure']),
expires=morsel['max-age'] or morsel['expires'],
discard=False,
comment=morsel['comment'],
comment_url=bool(morsel['comment']),
rest={'HttpOnly': morsel['httponly']},
rfc2109=False,)
return c
def cookiejar_from_dict(cookie_dict, cookiejar=None):
"""Returns a CookieJar from a key/value dictionary.
:param cookie_dict: Dict of key/values to insert into CookieJar.
"""
if cookiejar is None:
cookiejar = RequestsCookieJar()
if cookie_dict is not None:
for name in cookie_dict:
cookiejar.set_cookie(create_cookie(name, cookie_dict[name]))
return cookiejar

View File

@@ -1,40 +0,0 @@
# -*- coding: utf-8 -*-
"""
requests.defaults
~~~~~~~~~~~~~~~~~
This module provides the Requests configuration defaults.
Configurations:
:base_headers: Default HTTP headers.
:verbose: Stream to write request logging to.
:max_redirects: Maximum number of redirects allowed within a request.s
:keep_alive: Reuse HTTP Connections?
:max_retries: The number of times a request should be retried in the event of a connection failure.
:danger_mode: If true, Requests will raise errors immediately.
:safe_mode: If true, Requests will catch all errors.
:pool_maxsize: The maximium size of an HTTP connection pool.
:pool_connections: The number of active HTTP connection pools to use.
"""
from . import __version__
defaults = dict()
defaults['base_headers'] = {
'User-Agent': 'python-requests/%s' % __version__,
'Accept-Encoding': ', '.join(('identity', 'deflate', 'compress', 'gzip')),
'Accept': '*/*'
}
defaults['verbose'] = None
defaults['max_redirects'] = 30
defaults['pool_connections'] = 10
defaults['pool_maxsize'] = 10
defaults['max_retries'] = 0
defaults['danger_mode'] = False
defaults['safe_mode'] = False
defaults['keep_alive'] = True

View File

@@ -8,24 +8,48 @@ This module contains the set of Requests' exceptions.
""" """
class RequestException(Exception):
class RequestException(RuntimeError):
"""There was an ambiguous exception that occurred while handling your """There was an ambiguous exception that occurred while handling your
request.""" request."""
class HTTPError(RequestException): class HTTPError(RequestException):
"""An HTTP error occurred.""" """An HTTP error occurred."""
def __init__(self, *args, **kwargs):
""" Initializes HTTPError with optional `response` object. """
self.response = kwargs.pop('response', None)
super(HTTPError, self).__init__(*args, **kwargs)
class ConnectionError(RequestException): class ConnectionError(RequestException):
"""A Connection error occurred.""" """A Connection error occurred."""
class SSLError(ConnectionError): class SSLError(ConnectionError):
"""An SSL error occurred.""" """An SSL error occurred."""
class Timeout(RequestException): class Timeout(RequestException):
"""The request timed out.""" """The request timed out."""
class URLRequired(RequestException): class URLRequired(RequestException):
"""A valid URL is required to make a request.""" """A valid URL is required to make a request."""
class TooManyRedirects(RequestException): class TooManyRedirects(RequestException):
"""Too many redirects.""" """Too many redirects."""
class MissingSchema(RequestException, ValueError):
"""The URL schema (e.g. http or https) is missing."""
class InvalidSchema(RequestException, ValueError):
"""See defaults.py for valid schemas."""
class InvalidURL(RequestException, ValueError):
""" The URL provided was somehow invalid. """

View File

@@ -8,27 +8,25 @@ This module provides the capabilities for the Requests hooks system.
Available hooks: Available hooks:
``args``:
A dictionary of the arguments being sent to Request().
``pre_request``:
The Request object, directly before being sent.
``post_request``:
The Request object, directly after being sent.
``response``: ``response``:
The response generated from a Request. The response generated from a Request.
""" """
import traceback
HOOKS = ['response']
HOOKS = ('args', 'pre_request', 'post_request', 'response') def default_hooks():
hooks = {}
for event in HOOKS:
hooks[event] = []
return hooks
# TODO: response is the only one
def dispatch_hook(key, hooks, hook_data): def dispatch_hook(key, hooks, hook_data, **kwargs):
"""Dispatches a hook dictionary on a given piece of data.""" """Dispatches a hook dictionary on a given piece of data."""
hooks = hooks or dict() hooks = hooks or dict()
@@ -40,9 +38,8 @@ def dispatch_hook(key, hooks, hook_data):
hooks = [hooks] hooks = [hooks]
for hook in hooks: for hook in hooks:
try: _hook_data = hook(hook_data, **kwargs)
hook_data = hook(hook_data) or hook_data if _hook_data is not None:
except Exception: hook_data = _hook_data
traceback.print_exc()
return hook_data return hook_data

View File

@@ -8,97 +8,219 @@ This module provides a Session object to manage and persist settings across
requests (cookies, auth, proxies). requests (cookies, auth, proxies).
""" """
import os
from collections import Mapping
from datetime import datetime
from .defaults import defaults from .compat import cookielib, OrderedDict, urljoin, urlparse
from .models import Request from .cookies import cookiejar_from_dict, extract_cookies_to_jar, RequestsCookieJar
from .hooks import dispatch_hook from .models import Request, PreparedRequest
from .utils import header_expand from .hooks import default_hooks, dispatch_hook
from .packages.urllib3.poolmanager import PoolManager from .utils import to_key_val_list, default_headers
from .exceptions import TooManyRedirects, InvalidSchema
from .structures import CaseInsensitiveDict
from .adapters import HTTPAdapter
from .utils import requote_uri, get_environ_proxies, get_netrc_auth
from .status_codes import codes
REDIRECT_STATI = (
codes.moved, # 301
codes.found, # 302
codes.other, # 303
codes.temporary_moved, # 307
)
DEFAULT_REDIRECT_LIMIT = 30
def merge_kwargs(local_kwarg, default_kwarg): def merge_setting(request_setting, session_setting, dict_class=OrderedDict):
"""Merges kwarg dictionaries. """
Determines appropriate setting for a given request, taking into account the
If a local key in the dictionary is set to None, it will be removed. explicit setting on that request, and the setting in the session. If a
setting is a dictionary, they will be merged together using `dict_class`
""" """
if default_kwarg is None: if session_setting is None:
return local_kwarg return request_setting
if isinstance(local_kwarg, str): if request_setting is None:
return local_kwarg return session_setting
if local_kwarg is None: # Bypass if not a dictionary (e.g. verify)
return default_kwarg if not (
isinstance(session_setting, Mapping) and
isinstance(request_setting, Mapping)
):
return request_setting
# Bypass if not a dictionary (e.g. timeout) merged_setting = dict_class(to_key_val_list(session_setting))
if not hasattr(default_kwarg, 'items'): merged_setting.update(to_key_val_list(request_setting))
return local_kwarg
# Update new values.
kwargs = default_kwarg.copy()
kwargs.update(local_kwarg)
# Remove keys that are set to None. # Remove keys that are set to None.
for (k,v) in list(local_kwarg.items()): for (k, v) in request_setting.items():
if v is None: if v is None:
del kwargs[k] del merged_setting[k]
return kwargs return merged_setting
class Session(object): class SessionRedirectMixin(object):
"""A Requests session.""" def resolve_redirects(self, resp, req, stream=False, timeout=None,
verify=True, cert=None, proxies=None):
"""Receives a Response. Returns a generator of Responses."""
i = 0
prepared_request = PreparedRequest()
prepared_request.body = req.body
prepared_request.headers = req.headers.copy()
prepared_request.hooks = req.hooks
prepared_request.method = req.method
prepared_request.url = req.url
# ((resp.status_code is codes.see_other))
while (('location' in resp.headers and resp.status_code in REDIRECT_STATI)):
resp.content # Consume socket so it can be released
if i >= self.max_redirects:
raise TooManyRedirects('Exceeded %s redirects.' % self.max_redirects)
# Release the connection back into the pool.
resp.close()
url = resp.headers['location']
method = prepared_request.method
# Handle redirection without scheme (see: RFC 1808 Section 4)
if url.startswith('//'):
parsed_rurl = urlparse(resp.url)
url = '%s:%s' % (parsed_rurl.scheme, url)
# Facilitate non-RFC2616-compliant 'location' headers
# (e.g. '/path/to/resource' instead of 'http://domain.tld/path/to/resource')
# Compliant with RFC3986, we percent encode the url.
if not urlparse(url).netloc:
url = urljoin(resp.url, requote_uri(url))
else:
url = requote_uri(url)
prepared_request.url = url
# http://www.w3.org/Protocols/rfc2616/rfc2616-sec10.html#sec10.3.4
if (resp.status_code == codes.see_other and
prepared_request.method != 'HEAD'):
method = 'GET'
# Do what the browsers do, despite standards...
if (resp.status_code in (codes.moved, codes.found) and
prepared_request.method not in ('GET', 'HEAD')):
method = 'GET'
prepared_request.method = method
# https://github.com/kennethreitz/requests/issues/1084
if resp.status_code not in (codes.temporary, codes.resume):
if 'Content-Length' in prepared_request.headers:
del prepared_request.headers['Content-Length']
prepared_request.body = None
headers = prepared_request.headers
try:
del headers['Cookie']
except KeyError:
pass
prepared_request.prepare_cookies(self.cookies)
resp = self.send(
prepared_request,
stream=stream,
timeout=timeout,
verify=verify,
cert=cert,
proxies=proxies,
allow_redirects=False,
)
extract_cookies_to_jar(self.cookies, prepared_request, resp.raw)
i += 1
yield resp
class Session(SessionRedirectMixin):
"""A Requests session.
Provides cookie persistience, connection-pooling, and configuration.
Basic Usage::
>>> import requests
>>> s = requests.Session()
>>> s.get('http://httpbin.org/get')
200
"""
__attrs__ = [ __attrs__ = [
'headers', 'cookies', 'auth', 'timeout', 'proxies', 'hooks', 'headers', 'cookies', 'auth', 'timeout', 'proxies', 'hooks',
'params', 'config'] 'params', 'verify', 'cert', 'prefetch', 'adapters', 'stream',
'trust_env', 'max_redirects']
def __init__(self):
def __init__(self, #: A case-insensitive dictionary of headers to be sent on each
headers=None, #: :class:`Request <Request>` sent from this
cookies=None, #: :class:`Session <Session>`.
auth=None, self.headers = default_headers()
timeout=None,
proxies=None,
hooks=None,
params=None,
config=None,
verify=True):
self.headers = headers or {} #: Default Authentication tuple or object to attach to
self.cookies = cookies or {} #: :class:`Request <Request>`.
self.auth = auth self.auth = None
self.timeout = timeout
self.proxies = proxies or {}
self.hooks = hooks or {}
self.params = params or {}
self.config = config or {}
self.verify = verify
for (k, v) in list(defaults.items()): #: Dictionary mapping protocol to the URL of the proxy (e.g.
self.config.setdefault(k, v) #: {'http': 'foo.bar:3128'}) to be used on each
#: :class:`Request <Request>`.
self.proxies = {}
self.poolmanager = PoolManager( #: Event-handling hooks.
num_pools=self.config.get('pool_connections'), self.hooks = default_hooks()
maxsize=self.config.get('pool_maxsize')
) #: Dictionary of querystring data to attach to each
#: :class:`Request <Request>`. The dictionary values may be lists for
#: representing multivalued query parameters.
self.params = {}
#: Stream response content default.
self.stream = False
#: SSL Verification default.
self.verify = True
#: SSL certificate default.
self.cert = None
#: Maximum number of redirects allowed. If the request exceeds this
#: limit, a :class:`TooManyRedirects` exception is raised.
self.max_redirects = DEFAULT_REDIRECT_LIMIT
#: Should we trust the environment?
self.trust_env = True
# Set up a CookieJar to be used by default # Set up a CookieJar to be used by default
self.cookies = {} self.cookies = cookiejar_from_dict({})
# Add passed cookies in. # Default connection adapters.
if cookies is not None: self.adapters = OrderedDict()
self.cookies.update(cookies) self.mount('https://', HTTPAdapter())
self.mount('http://', HTTPAdapter())
def __repr__(self):
return '<requests-client at 0x%x>' % (id(self))
def __enter__(self): def __enter__(self):
return self return self
def __exit__(self, *args): def __exit__(self, *args):
pass self.close()
def request(self, method, url, def request(self, method, url,
params=None, params=None,
@@ -108,181 +230,272 @@ class Session(object):
files=None, files=None,
auth=None, auth=None,
timeout=None, timeout=None,
allow_redirects=False, allow_redirects=True,
proxies=None, proxies=None,
hooks=None, hooks=None,
return_response=True, stream=None,
config=None, verify=None,
prefetch=False, cert=None):
verify=None): """Constructs a :class:`Request <Request>`, prepares it and sends it.
"""Constructs and sends a :class:`Request <Request>`.
Returns :class:`Response <Response>` object. Returns :class:`Response <Response>` object.
:param method: method for the new :class:`Request` object. :param method: method for the new :class:`Request` object.
:param url: URL for the new :class:`Request` object. :param url: URL for the new :class:`Request` object.
:param params: (optional) Dictionary or bytes to be sent in the query string for the :class:`Request`. :param params: (optional) Dictionary or bytes to be sent in the query
:param data: (optional) Dictionary or bytes to send in the body of the :class:`Request`. string for the :class:`Request`.
:param headers: (optional) Dictionary of HTTP Headers to send with the :class:`Request`. :param data: (optional) Dictionary or bytes to send in the body of the
:param cookies: (optional) Dict or CookieJar object to send with the :class:`Request`. :class:`Request`.
:param files: (optional) Dictionary of 'filename': file-like-objects for multipart encoding upload. :param headers: (optional) Dictionary of HTTP Headers to send with the
:param auth: (optional) Auth tuple to enable Basic/Digest/Custom HTTP Auth. :class:`Request`.
:param timeout: (optional) Float describing the timeout of the request. :param cookies: (optional) Dict or CookieJar object to send with the
:param allow_redirects: (optional) Boolean. Set to True if POST/PUT/DELETE redirect following is allowed. :class:`Request`.
:param proxies: (optional) Dictionary mapping protocol to the URL of the proxy. :param files: (optional) Dictionary of 'filename': file-like-objects
:param return_response: (optional) If False, an un-sent Request object will returned. for multipart encoding upload.
:param config: (optional) A configuration dictionary. :param auth: (optional) Auth tuple or callable to enable
:param prefetch: (optional) if ``True``, the response content will be immediately downloaded. Basic/Digest/Custom HTTP Auth.
:param verify: (optional) if ``True``, the SSL cert will be verified. A CA_BUNDLE path can also be provided. :param timeout: (optional) Float describing the timeout of the
request.
:param allow_redirects: (optional) Boolean. Set to True by default.
:param proxies: (optional) Dictionary mapping protocol to the URL of
the proxy.
:param stream: (optional) whether to immediately download the response
content. Defaults to ``False``.
:param verify: (optional) if ``True``, the SSL cert will be verified.
A CA_BUNDLE path can also be provided.
:param cert: (optional) if String, path to ssl client cert file (.pem).
If Tuple, ('cert', 'key') pair.
""" """
method = str(method).upper() cookies = cookies or {}
proxies = proxies or {}
# Default empty dicts for dict params. # Bootstrap CookieJar.
cookies = {} if cookies is None else cookies if not isinstance(cookies, cookielib.CookieJar):
data = {} if data is None else data cookies = cookiejar_from_dict(cookies)
files = {} if files is None else files
headers = {} if headers is None else headers
params = {} if params is None else params
hooks = {} if hooks is None else hooks
if verify is None: # Merge with session cookies
verify = self.verify merged_cookies = RequestsCookieJar()
merged_cookies.update(self.cookies)
merged_cookies.update(cookies)
cookies = merged_cookies
# use session's hooks as defaults # Gather clues from the surrounding environment.
for key, cb in list(self.hooks.items()): if self.trust_env:
hooks.setdefault(key, cb) # Set environment's proxies.
env_proxies = get_environ_proxies(url) or {}
for (k, v) in env_proxies.items():
proxies.setdefault(k, v)
# Expand header values. # Set environment's basic authentication.
if headers: if not auth:
for k, v in list(headers.items()) or {}: auth = get_netrc_auth(url)
headers[k] = header_expand(v)
args = dict( # Look for configuration.
method=method, if not verify and verify is not False:
url=url, verify = os.environ.get('REQUESTS_CA_BUNDLE')
data=data,
params=params,
headers=headers,
cookies=cookies,
files=files,
auth=auth,
hooks=hooks,
timeout=timeout,
allow_redirects=allow_redirects,
proxies=proxies,
config=config,
verify=verify,
_poolmanager=self.poolmanager
)
# Merge local kwargs with session kwargs. # Curl compatibility.
for attr in self.__attrs__: if not verify and verify is not False:
session_val = getattr(self, attr, None) verify = os.environ.get('CURL_CA_BUNDLE')
local_val = args.get(attr)
args[attr] = merge_kwargs(local_val, session_val) # Merge all the kwargs.
params = merge_setting(params, self.params)
headers = merge_setting(headers, self.headers, dict_class=CaseInsensitiveDict)
auth = merge_setting(auth, self.auth)
proxies = merge_setting(proxies, self.proxies)
hooks = merge_setting(hooks, self.hooks)
stream = merge_setting(stream, self.stream)
verify = merge_setting(verify, self.verify)
cert = merge_setting(cert, self.cert)
# Arguments manipulation hook. # Create the Request.
args = dispatch_hook('args', args['hooks'], args) req = Request()
req.method = method.upper()
req.url = url
req.headers = headers
req.files = files
req.data = data
req.params = params
req.auth = auth
req.cookies = cookies
req.hooks = hooks
# Create the (empty) response. # Prepare the Request.
r = Request(**args) prep = req.prepare()
# Give the response some context. # Send the request.
r.session = self send_kwargs = {
'stream': stream,
# Don't send if asked nicely. 'timeout': timeout,
if not return_response: 'verify': verify,
return r 'cert': cert,
'proxies': proxies,
# Send the HTTP Request. 'allow_redirects': allow_redirects,
r.send(prefetch=prefetch) }
resp = self.send(prep, **send_kwargs)
# Send any cookies back up the to the session.
self.cookies.update(r.response.cookies)
# Return the response.
return r.response
return resp
def get(self, url, **kwargs): def get(self, url, **kwargs):
"""Sends a GET request. Returns :class:`Response` object. """Sends a GET request. Returns :class:`Response` object.
:param url: URL for the new :class:`Request` object. :param url: URL for the new :class:`Request` object.
:param **kwargs: Optional arguments that ``request`` takes. :param \*\*kwargs: Optional arguments that ``request`` takes.
""" """
kwargs.setdefault('allow_redirects', True) kwargs.setdefault('allow_redirects', True)
return self.request('get', url, **kwargs) return self.request('GET', url, **kwargs)
def options(self, url, **kwargs): def options(self, url, **kwargs):
"""Sends a OPTIONS request. Returns :class:`Response` object. """Sends a OPTIONS request. Returns :class:`Response` object.
:param url: URL for the new :class:`Request` object. :param url: URL for the new :class:`Request` object.
:param **kwargs: Optional arguments that ``request`` takes. :param \*\*kwargs: Optional arguments that ``request`` takes.
""" """
kwargs.setdefault('allow_redirects', True) kwargs.setdefault('allow_redirects', True)
return self.request('options', url, **kwargs) return self.request('OPTIONS', url, **kwargs)
def head(self, url, **kwargs): def head(self, url, **kwargs):
"""Sends a HEAD request. Returns :class:`Response` object. """Sends a HEAD request. Returns :class:`Response` object.
:param url: URL for the new :class:`Request` object. :param url: URL for the new :class:`Request` object.
:param **kwargs: Optional arguments that ``request`` takes. :param \*\*kwargs: Optional arguments that ``request`` takes.
""" """
kwargs.setdefault('allow_redirects', True) kwargs.setdefault('allow_redirects', False)
return self.request('head', url, **kwargs) return self.request('HEAD', url, **kwargs)
def post(self, url, data=None, **kwargs): def post(self, url, data=None, **kwargs):
"""Sends a POST request. Returns :class:`Response` object. """Sends a POST request. Returns :class:`Response` object.
:param url: URL for the new :class:`Request` object. :param url: URL for the new :class:`Request` object.
:param data: (optional) Dictionary or bytes to send in the body of the :class:`Request`. :param data: (optional) Dictionary, bytes, or file-like object to send in the body of the :class:`Request`.
:param **kwargs: Optional arguments that ``request`` takes. :param \*\*kwargs: Optional arguments that ``request`` takes.
""" """
return self.request('post', url, data=data, **kwargs) return self.request('POST', url, data=data, **kwargs)
def put(self, url, data=None, **kwargs): def put(self, url, data=None, **kwargs):
"""Sends a PUT request. Returns :class:`Response` object. """Sends a PUT request. Returns :class:`Response` object.
:param url: URL for the new :class:`Request` object. :param url: URL for the new :class:`Request` object.
:param data: (optional) Dictionary or bytes to send in the body of the :class:`Request`. :param data: (optional) Dictionary, bytes, or file-like object to send in the body of the :class:`Request`.
:param **kwargs: Optional arguments that ``request`` takes. :param \*\*kwargs: Optional arguments that ``request`` takes.
""" """
return self.request('put', url, data=data, **kwargs) return self.request('PUT', url, data=data, **kwargs)
def patch(self, url, data=None, **kwargs): def patch(self, url, data=None, **kwargs):
"""Sends a PATCH request. Returns :class:`Response` object. """Sends a PATCH request. Returns :class:`Response` object.
:param url: URL for the new :class:`Request` object. :param url: URL for the new :class:`Request` object.
:param data: (optional) Dictionary or bytes to send in the body of the :class:`Request`. :param data: (optional) Dictionary, bytes, or file-like object to send in the body of the :class:`Request`.
:param **kwargs: Optional arguments that ``request`` takes. :param \*\*kwargs: Optional arguments that ``request`` takes.
""" """
return self.request('patch', url, data=data, **kwargs) return self.request('PATCH', url, data=data, **kwargs)
def delete(self, url, **kwargs): def delete(self, url, **kwargs):
"""Sends a DELETE request. Returns :class:`Response` object. """Sends a DELETE request. Returns :class:`Response` object.
:param url: URL for the new :class:`Request` object. :param url: URL for the new :class:`Request` object.
:param **kwargs: Optional arguments that ``request`` takes. :param \*\*kwargs: Optional arguments that ``request`` takes.
""" """
return self.request('delete', url, **kwargs) return self.request('DELETE', url, **kwargs)
def send(self, request, **kwargs):
"""Send a given PreparedRequest."""
# Set defaults that the hooks can utilize to ensure they always have
# the correct parameters to reproduce the previous request.
kwargs.setdefault('stream', self.stream)
kwargs.setdefault('verify', self.verify)
kwargs.setdefault('cert', self.cert)
kwargs.setdefault('proxies', self.proxies)
# It's possible that users might accidentally send a Request object.
# Guard against that specific failure case.
if getattr(request, 'prepare', None):
raise ValueError('You can only send PreparedRequests.')
# Set up variables needed for resolve_redirects and dispatching of
# hooks
allow_redirects = kwargs.pop('allow_redirects', True)
stream = kwargs.get('stream')
timeout = kwargs.get('timeout')
verify = kwargs.get('verify')
cert = kwargs.get('cert')
proxies = kwargs.get('proxies')
hooks = request.hooks
# Get the appropriate adapter to use
adapter = self.get_adapter(url=request.url)
# Start time (approximately) of the request
start = datetime.utcnow()
# Send the request
r = adapter.send(request, **kwargs)
# Total elapsed time of the request (approximately)
r.elapsed = datetime.utcnow() - start
# Response manipulation hooks
r = dispatch_hook('response', hooks, r, **kwargs)
# Persist cookies
extract_cookies_to_jar(self.cookies, request, r.raw)
# Redirect resolving generator.
gen = self.resolve_redirects(r, request, stream=stream,
timeout=timeout, verify=verify, cert=cert,
proxies=proxies)
# Resolve redirects if allowed.
history = [resp for resp in gen] if allow_redirects else []
# Shuffle things around if there's history.
if history:
# Insert the first (original) request at the start
history.insert(0, r)
# Get the last request made
r = history.pop()
r.history = tuple(history)
return r
def get_adapter(self, url):
"""Returns the appropriate connnection adapter for the given URL."""
for (prefix, adapter) in self.adapters.items():
if url.startswith(prefix):
return adapter
# Nothing matches :-/
raise InvalidSchema("No connection adapters were found for '%s'" % url)
def close(self):
"""Closes all adapters and as such the session"""
for _, v in self.adapters.items():
v.close()
def mount(self, prefix, adapter):
"""Registers a connection adapter to a prefix.
Adapters are sorted in descending order by key length."""
self.adapters[prefix] = adapter
keys_to_move = [k for k in self.adapters if len(k) < len(prefix)]
for key in keys_to_move:
self.adapters[key] = self.adapters.pop(key)
def __getstate__(self):
return dict((attr, getattr(self, attr, None)) for attr in self.__attrs__)
def __setstate__(self, state):
for attr, value in state.items():
setattr(self, attr, value)
def session(**kwargs): def session():
"""Returns a :class:`Session` for context-management.""" """Returns a :class:`Session` for context-management."""
return Session(**kwargs) return Session()

View File

@@ -10,7 +10,7 @@ _codes = {
102: ('processing',), 102: ('processing',),
103: ('checkpoint',), 103: ('checkpoint',),
122: ('uri_too_long', 'request_uri_too_long'), 122: ('uri_too_long', 'request_uri_too_long'),
200: ('ok', 'okay', 'all_ok', 'all_okay', 'all_good', '\\o/'), 200: ('ok', 'okay', 'all_ok', 'all_okay', 'all_good', '\\o/', ''),
201: ('created',), 201: ('created',),
202: ('accepted',), 202: ('accepted',),
203: ('non_authoritative_info', 'non_authoritative_information'), 203: ('non_authoritative_info', 'non_authoritative_information'),
@@ -62,10 +62,11 @@ _codes = {
444: ('no_response', 'none'), 444: ('no_response', 'none'),
449: ('retry_with', 'retry'), 449: ('retry_with', 'retry'),
450: ('blocked_by_windows_parental_controls', 'parental_controls'), 450: ('blocked_by_windows_parental_controls', 'parental_controls'),
451: ('unavailable_for_legal_reasons', 'legal_reasons'),
499: ('client_closed_request',), 499: ('client_closed_request',),
# Server Error. # Server Error.
500: ('internal_server_error', 'server_error', '/o\\'), 500: ('internal_server_error', 'server_error', '/o\\', ''),
501: ('not_implemented',), 501: ('not_implemented',),
502: ('bad_gateway',), 502: ('bad_gateway',),
503: ('service_unavailable', 'unavailable'), 503: ('service_unavailable', 'unavailable'),

View File

@@ -8,44 +8,106 @@ Data structures that power Requests.
""" """
import os
import collections
from itertools import islice
class CaseInsensitiveDict(dict):
"""Case-insensitive Dictionary class IteratorProxy(object):
"""docstring for IteratorProxy"""
def __init__(self, i):
self.i = i
# self.i = chain.from_iterable(i)
def __iter__(self):
return self.i
def __len__(self):
if hasattr(self.i, '__len__'):
return len(self.i)
if hasattr(self.i, 'len'):
return self.i.len
if hasattr(self.i, 'fileno'):
return os.fstat(self.i.fileno()).st_size
def read(self, n):
return "".join(islice(self.i, None, n))
class CaseInsensitiveDict(collections.MutableMapping):
"""
A case-insensitive ``dict``-like object.
Implements all methods and operations of
``collections.MutableMapping`` as well as dict's ``copy``. Also
provides ``lower_items``.
All keys are expected to be strings. The structure remembers the
case of the last key to be set, and ``iter(instance)``,
``keys()``, ``items()``, ``iterkeys()``, and ``iteritems()``
will contain case-sensitive keys. However, querying and contains
testing is case insensitive:
cid = CaseInsensitiveDict()
cid['Accept'] = 'application/json'
cid['aCCEPT'] == 'application/json' # True
list(cid) == ['Accept'] # True
For example, ``headers['content-encoding']`` will return the For example, ``headers['content-encoding']`` will return the
value of a ``'Content-Encoding'`` response header.""" value of a ``'Content-Encoding'`` response header, regardless
of how the header name was originally stored.
@property If the constructor, ``.update``, or equality comparison
def lower_keys(self): operations are given keys that have equal ``.lower()``s, the
if not hasattr(self, '_lower_keys') or not self._lower_keys: behavior is undefined.
self._lower_keys = dict((k.lower(), k) for k in list(self.keys()))
return self._lower_keys
def _clear_lower_keys(self): """
if hasattr(self, '_lower_keys'): def __init__(self, data=None, **kwargs):
self._lower_keys.clear() self._store = dict()
if data is None:
data = {}
self.update(data, **kwargs)
def __setitem__(self, key, value): def __setitem__(self, key, value):
dict.__setitem__(self, key, value) # Use the lowercased key for lookups, but store the actual
self._clear_lower_keys() # key alongside the value.
self._store[key.lower()] = (key, value)
def __delitem__(self, key):
dict.__delitem__(self, key)
self._lower_keys.clear()
def __contains__(self, key):
return key.lower() in self.lower_keys
def __getitem__(self, key): def __getitem__(self, key):
# We allow fall-through here, so values default to None return self._store[key.lower()][1]
if key in self:
return dict.__getitem__(self, self.lower_keys[key.lower()])
def get(self, key, default=None): def __delitem__(self, key):
if key in self: del self._store[key.lower()]
return self[key]
def __iter__(self):
return (casedkey for casedkey, mappedvalue in self._store.values())
def __len__(self):
return len(self._store)
def lower_items(self):
"""Like iteritems(), but with all lowercase keys."""
return (
(lowerkey, keyval[1])
for (lowerkey, keyval)
in self._store.items()
)
def __eq__(self, other):
if isinstance(other, collections.Mapping):
other = CaseInsensitiveDict(other)
else: else:
return default return NotImplemented
# Compare insensitively
return dict(self.lower_items()) == dict(other.lower_items())
# Copy is required
def copy(self):
return CaseInsensitiveDict(self._store.values())
def __repr__(self):
return '%s(%r)' % (self.__class__.__name__, dict(self.items()))
class LookupDict(dict): class LookupDict(dict):
"""Dictionary lookup object.""" """Dictionary lookup object."""

View File

@@ -11,34 +11,135 @@ that are also useful for external consumption.
import cgi import cgi
import codecs import codecs
import collections
import os import os
import random import platform
import re import re
import zlib import sys
from netrc import netrc, NetrcParseError
from . import __version__
from . import certs
from .compat import parse_http_list as _parse_list_header from .compat import parse_http_list as _parse_list_header
from .compat import quote, unquote, cookielib, SimpleCookie, is_py2 from .compat import quote, urlparse, bytes, str, OrderedDict, urlunparse
from .compat import basestring from .cookies import RequestsCookieJar, cookiejar_from_dict
from .structures import CaseInsensitiveDict
_hush_pyflakes = (RequestsCookieJar,)
NETRC_FILES = ('.netrc', '_netrc')
DEFAULT_CA_BUNDLE_PATH = certs.where()
def dict_from_string(s): def dict_to_sequence(d):
"""Returns a MultiDict with Cookies.""" """Returns an internal sequence dictionary update."""
cookies = dict() if hasattr(d, 'items'):
d = d.items()
c = SimpleCookie() return d
c.load(s)
for k,v in list(c.items()):
cookies.update({k: v.value})
return cookies def super_len(o):
if hasattr(o, '__len__'):
return len(o)
if hasattr(o, 'len'):
return o.len
if hasattr(o, 'fileno'):
return os.fstat(o.fileno()).st_size
def get_netrc_auth(url):
"""Returns the Requests tuple auth for a given url from netrc."""
try:
locations = (os.path.expanduser('~/{0}'.format(f)) for f in NETRC_FILES)
netrc_path = None
for loc in locations:
if os.path.exists(loc) and not netrc_path:
netrc_path = loc
# Abort early if there isn't one.
if netrc_path is None:
return netrc_path
ri = urlparse(url)
# Strip port numbers from netloc
host = ri.netloc.split(':')[0]
try:
_netrc = netrc(netrc_path).authenticators(host)
if _netrc:
# Return with login / password
login_i = (0 if _netrc[0] else 1)
return (_netrc[login_i], _netrc[2])
except (NetrcParseError, IOError):
# If there was a parsing error or a permissions issue reading the file,
# we'll just skip netrc auth
pass
# AppEngine hackiness.
except (ImportError, AttributeError):
pass
def guess_filename(obj): def guess_filename(obj):
"""Tries to guess the filename of the given object.""" """Tries to guess the filename of the given object."""
name = getattr(obj, 'name', None) name = getattr(obj, 'name', None)
if name and name[0] != '<' and name[-1] != '>': if name and name[0] != '<' and name[-1] != '>':
return name return os.path.basename(name)
def from_key_val_list(value):
"""Take an object and test to see if it can be represented as a
dictionary. Unless it can not be represented as such, return an
OrderedDict, e.g.,
::
>>> from_key_val_list([('key', 'val')])
OrderedDict([('key', 'val')])
>>> from_key_val_list('string')
ValueError: need more than 1 value to unpack
>>> from_key_val_list({'key': 'val'})
OrderedDict([('key', 'val')])
"""
if value is None:
return None
if isinstance(value, (str, bytes, bool, int)):
raise ValueError('cannot encode objects that are not 2-tuples')
return OrderedDict(value)
def to_key_val_list(value):
"""Take an object and test to see if it can be represented as a
dictionary. If it can be, return a list of tuples, e.g.,
::
>>> to_key_val_list([('key', 'val')])
[('key', 'val')]
>>> to_key_val_list({'key': 'val'})
[('key', 'val')]
>>> to_key_val_list('string')
ValueError: cannot encode objects that are not 2-tuples.
"""
if value is None:
return None
if isinstance(value, (str, bytes, bool, int)):
raise ValueError('cannot encode objects that are not 2-tuples')
if isinstance(value, collections.Mapping):
value = value.items()
return list(value)
# From mitsuhiko/werkzeug (used with permission). # From mitsuhiko/werkzeug (used with permission).
def parse_list_header(value): def parse_list_header(value):
@@ -130,62 +231,6 @@ def unquote_header_value(value, is_filename=False):
return value return value
def header_expand(headers):
"""Returns an HTTP Header value string from a dictionary.
Example expansion::
{'text/x-dvi': {'q': '.8', 'mxb': '100000', 'mxt': '5.0'}, 'text/x-c': {}}
# Accept: text/x-dvi; q=.8; mxb=100000; mxt=5.0, text/x-c
(('text/x-dvi', {'q': '.8', 'mxb': '100000', 'mxt': '5.0'}), ('text/x-c', {}))
# Accept: text/x-dvi; q=.8; mxb=100000; mxt=5.0, text/x-c
"""
collector = []
if isinstance(headers, dict):
headers = list(headers.items())
elif isinstance(headers, basestring):
return headers
for i, (value, params) in enumerate(headers):
_params = []
for (p_k, p_v) in list(params.items()):
_params.append('%s=%s' % (p_k, p_v))
collector.append(value)
collector.append('; ')
if len(params):
collector.append('; '.join(_params))
if not len(headers) == i+1:
collector.append(', ')
# Remove trailing separators.
if collector[-1] in (', ', '; '):
del collector[-1]
return ''.join(collector)
def randombytes(n):
"""Return n random bytes."""
if is_py2:
L = [chr(random.randrange(0, 256)) for i in range(n)]
else:
L = [chr(random.randrange(0, 256)).encode('utf-8') for i in range(n)]
return b"".join(L)
def dict_from_cookiejar(cj): def dict_from_cookiejar(cj):
"""Returns a key/value dictionary from a CookieJar. """Returns a key/value dictionary from a CookieJar.
@@ -194,33 +239,12 @@ def dict_from_cookiejar(cj):
cookie_dict = {} cookie_dict = {}
for _, cookies in list(cj._cookies.items()): for cookie in cj:
for _, cookies in list(cookies.items()):
for cookie in list(cookies.values()):
# print cookie
cookie_dict[cookie.name] = cookie.value cookie_dict[cookie.name] = cookie.value
return cookie_dict return cookie_dict
def cookiejar_from_dict(cookie_dict):
"""Returns a CookieJar from a key/value dictionary.
:param cookie_dict: Dict of key/values to insert into CookieJar.
"""
# return cookiejar if one was passed in
if isinstance(cookie_dict, cookielib.CookieJar):
return cookie_dict
# create cookiejar
cj = cookielib.CookieJar()
cj = add_dict_to_cookiejar(cj, cookie_dict)
return cj
def add_dict_to_cookiejar(cj, cookie_dict): def add_dict_to_cookiejar(cj, cookie_dict):
"""Returns a CookieJar from a key/value dictionary. """Returns a CookieJar from a key/value dictionary.
@@ -228,31 +252,8 @@ def add_dict_to_cookiejar(cj, cookie_dict):
:param cookie_dict: Dict of key/values to insert into CookieJar. :param cookie_dict: Dict of key/values to insert into CookieJar.
""" """
for k, v in list(cookie_dict.items()): cj2 = cookiejar_from_dict(cookie_dict)
cj.update(cj2)
cookie = cookielib.Cookie(
version=0,
name=k,
value=v,
port=None,
port_specified=False,
domain='',
domain_specified=False,
domain_initial_dot=False,
path='/',
path_specified=True,
secure=False,
expires=None,
discard=True,
comment=None,
comment_url=None,
rest={'HttpOnly': None},
rfc2109=False
)
# add cookie to cookiejar
cj.set_cookie(cookie)
return cj return cj
@@ -287,23 +288,6 @@ def get_encoding_from_headers(headers):
return 'ISO-8859-1' return 'ISO-8859-1'
def unicode_from_html(content):
"""Attempts to decode an HTML string into unicode.
If unsuccessful, the original content is returned.
"""
encodings = get_encodings_from_content(content)
for encoding in encodings:
try:
return str(content, encoding)
except (UnicodeError, TypeError):
pass
return content
def stream_decode_response_unicode(iterator, r): def stream_decode_response_unicode(iterator, r):
"""Stream decodes a iterator.""" """Stream decodes a iterator."""
@@ -322,6 +306,14 @@ def stream_decode_response_unicode(iterator, r):
yield rv yield rv
def iter_slices(string, slice_length):
"""Iterate over slices of a string."""
pos = 0
while pos < len(string):
yield string[pos:pos + slice_length]
pos += slice_length
def get_unicode_from_response(r): def get_unicode_from_response(r):
"""Returns the requested content back in unicode. """Returns the requested content back in unicode.
@@ -355,54 +347,204 @@ def get_unicode_from_response(r):
return r.content return r.content
def decode_gzip(content): # The unreserved URI characters (RFC 3986)
"""Return gzip-decoded string. UNRESERVED_SET = frozenset(
"ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz"
+ "0123456789-._~")
:param content: bytestring to gzip-decode.
def unquote_unreserved(uri):
"""Un-escape any percent-escape sequences in a URI that are unreserved
characters. This leaves all reserved, illegal and non-ASCII bytes encoded.
""" """
parts = uri.split('%')
return zlib.decompress(content, 16 + zlib.MAX_WBITS) for i in range(1, len(parts)):
h = parts[i][0:2]
if len(h) == 2 and h.isalnum():
def stream_decompress(iterator, mode='gzip'): c = chr(int(h, 16))
""" if c in UNRESERVED_SET:
Stream decodes an iterator over compressed data parts[i] = c + parts[i][2:]
:param iterator: An iterator over compressed data
:param mode: 'gzip' or 'deflate'
:return: An iterator over decompressed data
"""
if mode not in ['gzip', 'deflate']:
raise ValueError('stream_decompress mode must be gzip or deflate')
zlib_mode = 16 + zlib.MAX_WBITS if mode == 'gzip' else -zlib.MAX_WBITS
dec = zlib.decompressobj(zlib_mode)
try:
for chunk in iterator:
rv = dec.decompress(chunk)
if rv:
yield rv
except zlib.error:
# If there was an error decompressing, just return the raw chunk
yield chunk
# Continue to return the rest of the raw data
for chunk in iterator:
yield chunk
else: else:
# Make sure everything has been returned from the decompression object parts[i] = '%' + parts[i]
buf = dec.decompress('') else:
rv = buf + dec.flush() parts[i] = '%' + parts[i]
if rv: return ''.join(parts)
yield rv
def requote_path(path): def requote_uri(uri):
"""Re-quote the given URL path component. """Re-quote the given URI.
This function passes the given path through an unquote/quote cycle to This function passes the given URI through an unquote/quote cycle to
ensure that it is fully and consistently quoted. ensure that it is fully and consistently quoted.
""" """
parts = path.split(b"/") # Unquote only the unreserved characters
parts = (quote(unquote(part), safe=b"") for part in parts) # Then quote only illegal characters (do not quote reserved, unreserved,
return b"/".join(parts) # or '%')
return quote(unquote_unreserved(uri), safe="!#$%&'()*+,/:;=?@[]~")
def get_environ_proxies(url):
"""Return a dict of environment proxies."""
proxy_keys = [
'all',
'http',
'https',
'ftp',
'socks'
]
get_proxy = lambda k: os.environ.get(k) or os.environ.get(k.upper())
# First check whether no_proxy is defined. If it is, check that the URL
# we're getting isn't in the no_proxy list.
no_proxy = get_proxy('no_proxy')
if no_proxy:
# We need to check whether we match here. We need to see if we match
# the end of the netloc, both with and without the port.
no_proxy = no_proxy.split(',')
netloc = urlparse(url).netloc
for host in no_proxy:
if netloc.endswith(host) or netloc.split(':')[0].endswith(host):
# The URL does match something in no_proxy, so we don't want
# to apply the proxies on this URL.
return {}
# If we get here, we either didn't have no_proxy set or we're not going
# anywhere that no_proxy applies to.
proxies = [(key, get_proxy(key + '_proxy')) for key in proxy_keys]
return dict([(key, val) for (key, val) in proxies if val])
def default_user_agent():
"""Return a string representing the default user agent."""
_implementation = platform.python_implementation()
if _implementation == 'CPython':
_implementation_version = platform.python_version()
elif _implementation == 'PyPy':
_implementation_version = '%s.%s.%s' % (sys.pypy_version_info.major,
sys.pypy_version_info.minor,
sys.pypy_version_info.micro)
if sys.pypy_version_info.releaselevel != 'final':
_implementation_version = ''.join([_implementation_version, sys.pypy_version_info.releaselevel])
elif _implementation == 'Jython':
_implementation_version = platform.python_version() # Complete Guess
elif _implementation == 'IronPython':
_implementation_version = platform.python_version() # Complete Guess
else:
_implementation_version = 'Unknown'
try:
p_system = platform.system()
p_release = platform.release()
except IOError:
p_system = 'Unknown'
p_release = 'Unknown'
return " ".join(['python-requests/%s' % __version__,
'%s/%s' % (_implementation, _implementation_version),
'%s/%s' % (p_system, p_release)])
def default_headers():
return CaseInsensitiveDict({
'User-Agent': default_user_agent(),
'Accept-Encoding': ', '.join(('gzip', 'deflate', 'compress')),
'Accept': '*/*'
})
def parse_header_links(value):
"""Return a dict of parsed link headers proxies.
i.e. Link: <http:/.../front.jpeg>; rel=front; type="image/jpeg",<http://.../back.jpeg>; rel=back;type="image/jpeg"
"""
links = []
replace_chars = " '\""
for val in value.split(","):
try:
url, params = val.split(";", 1)
except ValueError:
url, params = val, ''
link = {}
link["url"] = url.strip("<> '\"")
for param in params.split(";"):
try:
key, value = param.split("=")
except ValueError:
break
link[key.strip(replace_chars)] = value.strip(replace_chars)
links.append(link)
return links
# Null bytes; no need to recreate these on each call to guess_json_utf
_null = '\x00'.encode('ascii') # encoding to ASCII for Python 3
_null2 = _null * 2
_null3 = _null * 3
def guess_json_utf(data):
# JSON always starts with two ASCII characters, so detection is as
# easy as counting the nulls and from their location and count
# determine the encoding. Also detect a BOM, if present.
sample = data[:4]
if sample in (codecs.BOM_UTF32_LE, codecs.BOM32_BE):
return 'utf-32' # BOM included
if sample[:3] == codecs.BOM_UTF8:
return 'utf-8-sig' # BOM included, MS style (discouraged)
if sample[:2] in (codecs.BOM_UTF16_LE, codecs.BOM_UTF16_BE):
return 'utf-16' # BOM included
nullcount = sample.count(_null)
if nullcount == 0:
return 'utf-8'
if nullcount == 2:
if sample[::2] == _null2: # 1st and 3rd are null
return 'utf-16-be'
if sample[1::2] == _null2: # 2nd and 4th are null
return 'utf-16-le'
# Did not detect 2 valid UTF-16 ascii-range characters
if nullcount == 3:
if sample[:3] == _null3:
return 'utf-32-be'
if sample[1:] == _null3:
return 'utf-32-le'
# Did not detect a valid UTF-32 ascii-range character
return None
def prepend_scheme_if_needed(url, new_scheme):
'''Given a URL that may or may not have a scheme, prepend the given scheme.
Does not replace a present scheme with the one provided as an argument.'''
scheme, netloc, path, params, query, fragment = urlparse(url, new_scheme)
# urlparse is a finicky beast, and sometimes decides that there isn't a
# netloc present. Assume that it's being over-cautious, and switch netloc
# and path if urlparse decided there was no netloc.
if not netloc:
netloc, path = path, netloc
return urlunparse((scheme, netloc, path, params, query, fragment))
def get_auth_from_url(url):
"""Given a url with authentication components, extract them into a tuple of
username,password."""
if url:
parsed = urlparse(url)
return (parsed.username, parsed.password)
else:
return ('', '')

View File

@@ -1 +1 @@
{"url": "https://github.com/bgreenlee/sublime-github", "version": "2012.12.19.02.19.29", "description": "Github Gist plugin for Sublime Text 2"} {"url": "https://github.com/bgreenlee/sublime-github", "version": "2013.09.15.04.25.57", "description": "Sublime Text 2/3 plugin that provides a number of useful commands for GitHub."}

View File

@@ -1,6 +1,7 @@
import os import os
import sys import sys
import os.path import os.path
sys.path.insert(0, os.path.dirname(os.path.abspath(__file__)))
import re import re
import sublime import sublime
import sublime_plugin import sublime_plugin
@@ -15,7 +16,7 @@ except ImportError:
try: try:
sys.path.append(os.path.join(sublime.packages_path(), 'Git')) sys.path.append(os.path.join(sublime.packages_path(), 'Git'))
git = __import__("git") import git
sys.path.remove(os.path.join(sublime.packages_path(), 'Git')) sys.path.remove(os.path.join(sublime.packages_path(), 'Git'))
except ImportError: except ImportError:
git = None git = None
@@ -30,6 +31,7 @@ class BaseGitHubCommand(sublime_plugin.TextCommand):
""" """
MSG_USERNAME = "GitHub username:" MSG_USERNAME = "GitHub username:"
MSG_PASSWORD = "GitHub password:" MSG_PASSWORD = "GitHub password:"
MSG_ONE_TIME_PASSWORD = "One-time passowrd (for 2FA):"
MSG_TOKEN_SUCCESS = "Your access token has been saved. We'll now resume your command." MSG_TOKEN_SUCCESS = "Your access token has been saved. We'll now resume your command."
ERR_NO_USER_TOKEN = "Your GitHub Gist access token needs to be configured.\n\n"\ ERR_NO_USER_TOKEN = "Your GitHub Gist access token needs to be configured.\n\n"\
"Click OK and then enter your GitHub username and password below (neither will "\ "Click OK and then enter your GitHub username and password below (neither will "\
@@ -42,10 +44,12 @@ class BaseGitHubCommand(sublime_plugin.TextCommand):
def run(self, edit): def run(self, edit):
self.settings = sublime.load_settings("GitHub.sublime-settings") self.settings = sublime.load_settings("GitHub.sublime-settings")
self.github_user = None self.github_user = None
self.github_password = None
self.github_one_time_password = None
self.accounts = self.settings.get("accounts") self.accounts = self.settings.get("accounts")
self.active_account = self.settings.get("active_account") self.active_account = self.settings.get("active_account")
if not self.active_account: if not self.active_account:
self.active_account = self.accounts.keys()[0] self.active_account = list(self.accounts.keys())[0]
self.github_token = self.accounts[self.active_account]["github_token"] self.github_token = self.accounts[self.active_account]["github_token"]
if not self.github_token: if not self.github_token:
self.github_token = self.settings.get("github_token") self.github_token = self.settings.get("github_token")
@@ -58,7 +62,11 @@ class BaseGitHubCommand(sublime_plugin.TextCommand):
sublime.save_settings("GitHub.sublime-settings") sublime.save_settings("GitHub.sublime-settings")
self.base_uri = self.accounts[self.active_account]["base_uri"] self.base_uri = self.accounts[self.active_account]["base_uri"]
self.debug = self.settings.get('debug') self.debug = self.settings.get('debug')
self.gistapi = GitHubApi(self.base_uri, self.github_token, debug=self.debug)
self.proxies = {'https': self.accounts[self.active_account].get("https_proxy", None)}
self.force_curl = self.accounts[self.active_account].get("force_curl", False)
self.gistapi = GitHubApi(self.base_uri, self.github_token, debug=self.debug,
proxies=self.proxies, force_curl=self.force_curl)
def get_token(self): def get_token(self):
sublime.error_message(self.ERR_NO_USER_TOKEN) sublime.error_message(self.ERR_NO_USER_TOKEN)
@@ -70,16 +78,29 @@ class BaseGitHubCommand(sublime_plugin.TextCommand):
def get_password(self): def get_password(self):
self.view.window().show_input_panel(self.MSG_PASSWORD, "", self.on_done_password, None, None) self.view.window().show_input_panel(self.MSG_PASSWORD, "", self.on_done_password, None, None)
def get_one_time_password(self):
self.view.window().show_input_panel(self.MSG_ONE_TIME_PASSWORD, "", self.on_done_one_time_password, None, None)
def on_done_username(self, value): def on_done_username(self, value):
"Callback for the username show_input_panel." "Callback for the username show_input_panel."
self.github_user = value self.github_user = value
# need to do this or the input panel doesn't show # need to do this or the input panel doesn't show
sublime.set_timeout(self.get_password, 50) sublime.set_timeout(self.get_password, 50)
def on_done_one_time_password(self, value):
"Callback for the one-time password show_input_panel"
self.github_one_time_password = value
self.on_done_password(self.github_password)
def on_done_password(self, value): def on_done_password(self, value):
"Callback for the password show_input_panel" "Callback for the password show_input_panel"
self.github_password = value
try: try:
self.github_token = GitHubApi(self.base_uri, debug=self.debug).get_token(self.github_user, value) api = GitHubApi(self.base_uri, debug=self.debug)
self.github_token = api.get_token(self.github_user,
self.github_password,
self.github_one_time_password)
self.github_password = self.github_one_time_password = None # don't keep these around
self.accounts[self.active_account]["github_token"] = self.github_token self.accounts[self.active_account]["github_token"] = self.github_token
self.settings.set("accounts", self.accounts) self.settings.set("accounts", self.accounts)
sublime.save_settings("GitHub.sublime-settings") sublime.save_settings("GitHub.sublime-settings")
@@ -92,13 +113,23 @@ class BaseGitHubCommand(sublime_plugin.TextCommand):
sublime.set_timeout(callback, 50) sublime.set_timeout(callback, 50)
except AttributeError: except AttributeError:
pass pass
except GitHubApi.OTPNeededException:
sublime.set_timeout(self.get_one_time_password, 50)
except GitHubApi.UnauthorizedException: except GitHubApi.UnauthorizedException:
sublime.error_message(self.ERR_UNAUTHORIZED) sublime.error_message(self.ERR_UNAUTHORIZED)
sublime.set_timeout(self.get_username, 50) sublime.set_timeout(self.get_username, 50)
except GitHubApi.UnknownException, e: except GitHubApi.UnknownException as e:
sublime.error_message(e.message) sublime.error_message(e.message)
class InsertTextCommand(sublime_plugin.TextCommand):
"""
Internal command to insert text into a view.
"""
def run(self, edit, **args):
self.view.insert(edit, 0, args['text'])
class OpenGistCommand(BaseGitHubCommand): class OpenGistCommand(BaseGitHubCommand):
""" """
Open a gist. Open a gist.
@@ -125,12 +156,12 @@ class OpenGistCommand(BaseGitHubCommand):
packed_gists = [] packed_gists = []
for idx, gist in enumerate(self.gists): for idx, gist in enumerate(self.gists):
attribs = {"index": idx + 1, attribs = {"index": idx + 1,
"filename": gist["files"].keys()[0], "filename": list(gist["files"].keys())[0],
"description": gist["description"] or ''} "description": gist["description"] or ''}
if isinstance(format, basestring): if isinstance(format, list):
item = format % attribs
else:
item = [(format_str % attribs) for format_str in format] item = [(format_str % attribs) for format_str in format]
else:
item = format % attribs
packed_gists.append(item) packed_gists.append(item)
args = [packed_gists, self.on_done] args = [packed_gists, self.on_done]
@@ -140,14 +171,14 @@ class OpenGistCommand(BaseGitHubCommand):
except GitHubApi.UnauthorizedException: except GitHubApi.UnauthorizedException:
sublime.error_message(self.ERR_UNAUTHORIZED_TOKEN) sublime.error_message(self.ERR_UNAUTHORIZED_TOKEN)
sublime.set_timeout(self.get_username, 50) sublime.set_timeout(self.get_username, 50)
except GitHubApi.UnknownException, e: except GitHubApi.UnknownException as e:
sublime.error_message(e.message) sublime.error_message(e.message)
def on_done(self, idx): def on_done(self, idx):
if idx == -1: if idx == -1:
return return
gist = self.gists[idx] gist = self.gists[idx]
filename = gist["files"].keys()[0] filename = list(gist["files"].keys())[0]
filedata = gist["files"][filename] filedata = gist["files"][filename]
content = self.gistapi.get(filedata["raw_url"]) content = self.gistapi.get(filedata["raw_url"])
if self.open_in_editor: if self.open_in_editor:
@@ -164,9 +195,7 @@ class OpenGistCommand(BaseGitHubCommand):
logger.warn("no mapping for '%s'" % extension) logger.warn("no mapping for '%s'" % extension)
pass pass
# insert the gist # insert the gist
edit = new_view.begin_edit('gist') new_view.run_command("insert_text", {'text': content})
new_view.insert(edit, 0, content)
new_view.end_edit(edit)
new_view.set_name(filename) new_view.set_name(filename)
new_view.settings().set('gist', gist) new_view.settings().set('gist', gist)
elif self.copy_gist_id: elif self.copy_gist_id:
@@ -302,9 +331,10 @@ class GistFromSelectionCommand(BaseGitHubCommand):
sublime.save_settings("GitHub.sublime-settings") sublime.save_settings("GitHub.sublime-settings")
sublime.error_message(self.ERR_UNAUTHORIZED_TOKEN) sublime.error_message(self.ERR_UNAUTHORIZED_TOKEN)
sublime.set_timeout(self.get_username, 50) sublime.set_timeout(self.get_username, 50)
except GitHubApi.UnknownException, e: except GitHubApi.UnknownException as e:
sublime.error_message(e.message)
except GitHubApi.ConnectionException as e:
sublime.error_message(e.message) sublime.error_message(e.message)
class PrivateGistFromSelectionCommand(GistFromSelectionCommand): class PrivateGistFromSelectionCommand(GistFromSelectionCommand):
""" """
@@ -347,21 +377,21 @@ class UpdateGistCommand(BaseGitHubCommand):
sublime.save_settings("GitHub.sublime-settings") sublime.save_settings("GitHub.sublime-settings")
sublime.error_message(self.ERR_UNAUTHORIZED_TOKEN) sublime.error_message(self.ERR_UNAUTHORIZED_TOKEN)
sublime.set_timeout(self.get_username, 50) sublime.set_timeout(self.get_username, 50)
except GitHubApi.UnknownException, e: except GitHubApi.UnknownException as e:
sublime.error_message(e.message) sublime.error_message(e.message)
class SwitchAccountsCommand(BaseGitHubCommand): class SwitchAccountsCommand(BaseGitHubCommand):
def run(self, edit): def run(self, edit):
super(SwitchAccountsCommand, self).run(edit) super(SwitchAccountsCommand, self).run(edit)
accounts = self.accounts.keys() accounts = list(self.accounts.keys())
self.view.window().show_quick_panel(accounts, self.account_selected) self.view.window().show_quick_panel(accounts, self.account_selected)
def account_selected(self, index): def account_selected(self, index):
if index == -1: if index == -1:
return # canceled return # canceled
else: else:
self.active_account = self.accounts.keys()[index] self.active_account = list(self.accounts.keys())[index]
self.settings.set("active_account", self.active_account) self.settings.set("active_account", self.active_account)
sublime.save_settings("GitHub.sublime-settings") sublime.save_settings("GitHub.sublime-settings")
self.base_uri = self.accounts[self.active_account]["base_uri"] self.base_uri = self.accounts[self.active_account]["base_uri"]
@@ -369,14 +399,17 @@ class SwitchAccountsCommand(BaseGitHubCommand):
if git: if git:
class RemoteUrlCommand(git.GitTextCommand): class RemoteUrlCommand(git.GitTextCommand):
url_type = 'blob'
def run(self, edit): def run(self, edit):
self.run_command("git remote -v".split(), self.done_remote) self.run_command("git ls-remote --get-url".split(), self.done_remote)
def done_remote(self, result): def done_remote(self, result):
remote_origin = [r for r in result.split("\n") if "origin" in r][0] remote_loc = result.split()[0]
remote_loc = re.split('\s+', remote_origin)[1] repo_url = re.sub('^git(@|://)', 'http://', remote_loc)
repo_url = re.sub('^git@', 'https://', remote_loc) # Replace the "tld:" with "tld/"
repo_url = re.sub('\.com:', '.com/', repo_url) # https://github.com/bgreenlee/sublime-github/pull/49#commitcomment-3688312
repo_url = re.sub(r'^(https?://[^/:]+):', r'\1/', repo_url)
repo_url = re.sub('\.git$', '', repo_url) repo_url = re.sub('\.git$', '', repo_url)
self.repo_url = repo_url self.repo_url = repo_url
self.run_command("git rev-parse --abbrev-ref HEAD".split(), self.done_rev_parse) self.run_command("git rev-parse --abbrev-ref HEAD".split(), self.done_rev_parse)
@@ -386,8 +419,20 @@ if git:
current_branch = result.strip() current_branch = result.strip()
# get file path within repo # get file path within repo
repo_name = self.repo_url.split("/").pop() repo_name = self.repo_url.split("/").pop()
relative_path = self.view.file_name().split(repo_name).pop() relative_path = self.view.file_name().split(repo_name, 1).pop()
self.url = "%s/blob/%s%s" % (self.repo_url, current_branch, relative_path) line_nums = ""
if self.allows_line_highlights:
# if any lines are selected, the first of those
non_empty_regions = [region for region in self.view.sel() if not region.empty()]
if non_empty_regions:
selection = non_empty_regions[0]
(start_row, _) = self.view.rowcol(selection.begin())
(end_row, _) = self.view.rowcol(selection.end())
line_nums = "#L%s" % (start_row + 1)
if end_row > start_row:
line_nums += "-L%s" % (end_row + 1)
self.url = "%s/%s/%s%s%s" % (self.repo_url, self.url_type, current_branch, relative_path, line_nums)
self.on_done() self.on_done()
else: else:
class RemoteUrlCommand(sublime_plugin.TextCommand): class RemoteUrlCommand(sublime_plugin.TextCommand):
@@ -396,6 +441,8 @@ else:
class OpenRemoteUrlCommand(RemoteUrlCommand): class OpenRemoteUrlCommand(RemoteUrlCommand):
allows_line_highlights = True
def run(self, edit): def run(self, edit):
super(OpenRemoteUrlCommand, self).run(edit) super(OpenRemoteUrlCommand, self).run(edit)
@@ -410,3 +457,17 @@ class CopyRemoteUrlCommand(RemoteUrlCommand):
def on_done(self): def on_done(self):
sublime.set_clipboard(self.url) sublime.set_clipboard(self.url)
sublime.status_message("Remote URL copied to clipboard") sublime.status_message("Remote URL copied to clipboard")
class BlameCommand(OpenRemoteUrlCommand):
url_type = 'blame'
class HistoryCommand(OpenRemoteUrlCommand):
url_type = 'commits'
allows_line_highlights = False
class EditCommand(OpenRemoteUrlCommand):
url_type = 'edit'
allows_line_highlights = False

View File

@@ -1,14 +1,20 @@
import sys import sys
import os.path import os.path
sys.path.insert(0, os.path.dirname(os.path.abspath(__file__)))
sys.path.insert(0, os.path.join(os.path.dirname(os.path.abspath(__file__)), "lib")) sys.path.insert(0, os.path.join(os.path.dirname(os.path.abspath(__file__)), "lib"))
import re import re
import requests import requests
from requests.status_codes import codes from requests.status_codes import codes
try:
import http.client as httplib
except ImportError:
import httplib import httplib
import commandline import commandline
import sublime import sublime
from StringIO import StringIO try:
from httplib import HTTPResponse from io import StringIO
except ImportError:
from StringIO import StringIO # Linux version blows up when importing io.StringIO
import logging import logging
logging.basicConfig(format='%(asctime)s %(message)s') logging.basicConfig(format='%(asctime)s %(message)s')
@@ -16,6 +22,14 @@ logger = logging.getLogger()
class CurlSession(object): class CurlSession(object):
ERR_UNKNOWN_CODE = "Curl failed with an unrecognized code"
CURL_ERRORS = {
2: "Curl failed initialization.",
5: "Curl could not resolve the proxy specified.",
6: "Curl could not resolve the remote host.\n\nPlease verify that your Internet"
" connection works properly."
}
class FakeSocket(StringIO): class FakeSocket(StringIO):
def makefile(self, *args, **kw): def makefile(self, *args, **kw):
return self return self
@@ -31,6 +45,27 @@ class CurlSession(object):
raise Exception("Unrecognized response: %s" % text) raise Exception("Unrecognized response: %s" % text)
else: else:
text = m.group(1) text = m.group(1)
# if the response text starts with a "200 Connection established" but continues with a 201,
# skip the 200 header. This happens when using a proxy.
#
# e.g. HTTP/1.1 200 Connection established
# Via: 1.1 proxy
# Connection: Keep-Alive
# Proxy-Connection: Keep-Alive
#
# HTTP/1.1 201 Created
# Server: GitHub.com
# ...
# Status: 201 Created
# ...
if re.match(r'^HTTP/.*?\s200 Connection established', text):
m = re.search(r'(HTTP/\d+\.\d+\s(?!200 Connection established).*$)', text, re.S)
if not m:
raise Exception("Unrecognized response: %s" % text)
else:
text = m.group(1)
# remove Transfer-Encoding: chunked header, as it causes reading the response to fail # remove Transfer-Encoding: chunked header, as it causes reading the response to fail
# first do a quick check for it, so we can avoid doing the expensive negative-lookbehind # first do a quick check for it, so we can avoid doing the expensive negative-lookbehind
# regex if we don't need it # regex if we don't need it
@@ -38,12 +73,15 @@ class CurlSession(object):
# we do the negative-lookbehind to make sure we only strip the Transfer-Encoding # we do the negative-lookbehind to make sure we only strip the Transfer-Encoding
# string in the header # string in the header
text = re.sub(r'(?<!\r\n\r\n).*?Transfer-Encoding: chunked\r\n', '', text, count=1) text = re.sub(r'(?<!\r\n\r\n).*?Transfer-Encoding: chunked\r\n', '', text, count=1)
logger.debug("CurlSession - getting socket from %s" % text)
socket = self.FakeSocket(text) socket = self.FakeSocket(text)
response = HTTPResponse(socket) response = httplib.HTTPResponse(socket)
response.begin() response.begin()
return response return response
def _build_response(self, text): def _build_response(self, text):
logger.debug("CurlSession: building response from %s" % text)
raw_response = self._parse_http(text) raw_response = self._parse_http(text)
response = requests.models.Response() response = requests.models.Response()
response.encoding = 'utf-8' response.encoding = 'utf-8'
@@ -52,7 +90,7 @@ class CurlSession(object):
response._content = raw_response.read() response._content = raw_response.read()
return response return response
def request(self, method, url, headers=None, params=None, data=None, auth=None, allow_redirects=False, config=None): def request(self, method, url, headers=None, params=None, data=None, auth=None, allow_redirects=False, config=None, proxies=None):
try: try:
curl = commandline.find_binary('curl') curl = commandline.find_binary('curl')
except commandline.BinaryNotFoundError: except commandline.BinaryNotFoundError:
@@ -65,27 +103,43 @@ class CurlSession(object):
if self.verify: if self.verify:
curl_options.extend(['--cacert', self.verify]) curl_options.extend(['--cacert', self.verify])
if headers: if headers:
for k, v in headers.iteritems(): for k, v in headers.items():
curl_options.extend(['-H', "%s: %s" % (k, v)]) curl_options.extend(['-H', "%s: %s" % (k, v)])
if method in ('post', 'patch'): if method in ('post', 'patch'):
curl_options.extend(['-d', data]) curl_options.extend(['-d', data])
if method == 'patch': if method == 'patch':
curl_options.extend(['-X', 'PATCH']) curl_options.extend(['-X', 'PATCH'])
if params: if params:
url += '?' + '&'.join(['='.join([k, str(v)]) for k, v in params.iteritems()]) url += '?' + '&'.join(['='.join([k, str(v)]) for k, v in params.items()])
if proxies and proxies.get('https', None):
curl_options.extend(['-x', proxies['https']])
command = [curl] + curl_options + [url] command = [curl] + curl_options + [url]
response = self._build_response(commandline.execute(command)) logger.debug("CurlSession: invoking curl with %s" % command)
try:
command_response = commandline.execute(command)
except commandline.CommandExecutionError as e:
logger.error("Curl execution: %s" % repr(e))
self._handle_curl_error(e.errorcode)
return
response = self._build_response(command_response)
response.url = url response.url = url
return response return response
def post(self, *args, **kwargs): def post(self, *args, **kwargs):
return self.request("post", *args, **kwargs) return self.request("post", *args, **kwargs)
def _handle_curl_error(self, error):
sublime.error_message(
self.CURL_ERRORS.get(error, "%s: %s" % (self.ERR_UNKNOWN_CODE, error)))
def session(verify=None, config=None):
if hasattr(httplib, "HTTPSConnection"): def session(verify=None, force_curl=False):
return requests.session(verify=verify, config=config) if not force_curl and hasattr(httplib, "HTTPSConnection"):
session = requests.Session()
session.verify = verify
return session
else: # try curl else: # try curl
return CurlSession(verify=verify) return CurlSession(verify=verify)