feat(SublimeText2.WebPackages): cache packages
This commit is contained in:
1
EthanBrown.SublimeText2.WebPackages/tools/PackageCache/Http Requester/.gitignore
vendored
Normal file
1
EthanBrown.SublimeText2.WebPackages/tools/PackageCache/Http Requester/.gitignore
vendored
Normal file
@@ -0,0 +1 @@
|
||||
*.pyc
|
@@ -0,0 +1,3 @@
|
||||
[
|
||||
{ "command": "http_requester" }
|
||||
]
|
@@ -0,0 +1,10 @@
|
||||
[
|
||||
{
|
||||
"keys": ["ctrl+alt+r"],
|
||||
"command": "http_requester"
|
||||
},
|
||||
{
|
||||
"keys": ["f5"],
|
||||
"command": "http_requester_refresh"
|
||||
}
|
||||
]
|
@@ -0,0 +1,10 @@
|
||||
[
|
||||
{
|
||||
"keys": ["super+alt+r"],
|
||||
"command": "http_requester"
|
||||
},
|
||||
{
|
||||
"keys": ["f5"],
|
||||
"command": "http_requester_refresh"
|
||||
}
|
||||
]
|
@@ -0,0 +1,10 @@
|
||||
[
|
||||
{
|
||||
"keys": ["ctrl+alt+r"],
|
||||
"command": "http_requester"
|
||||
},
|
||||
{
|
||||
"keys": ["f5"],
|
||||
"command": "http_requester_refresh"
|
||||
}
|
||||
]
|
@@ -0,0 +1,5 @@
|
||||
[{
|
||||
"caption": "HTTP Requester",
|
||||
"command": "http_requester"
|
||||
}
|
||||
]
|
@@ -0,0 +1,77 @@
|
||||
***
|
||||
# SublimeHttpRequester - HTTP client plugin for Sublime Text 2
|
||||
***
|
||||
====================
|
||||
|
||||
Contact: [braindamageinc@gmail.com](mailto:braindamageinc@gmail.com)
|
||||
|
||||
##Summary
|
||||
Makes HTTP requests using the selected text as URL + headers. Useful for testing REST APIs from Sublime Text 2 editor.
|
||||
|
||||
##Update: Added client SSL certificates
|
||||
|
||||
##Usage
|
||||
Select the text that represents an URL. Examples of requests:
|
||||
|
||||
http://www.google.com/search?q=test
|
||||
GET http://www.google.com/search?q=test
|
||||
www.google.com/search?q=test
|
||||
|
||||
If you need to add extra headers just add them below the URL line, one on each line:
|
||||
|
||||
www.google.com/search?q=test
|
||||
Accept: text/plain
|
||||
Cookie : SOME_COOKIE
|
||||
|
||||
Use the right-click context menu command *Http Requester* or the keyboard shortcut *CTRL + ALT + R* ( *COMMAND + ALT + R* on Mac OS X ).
|
||||
Update: *F5* refreshes last request.
|
||||
|
||||
###POST/PUT usage
|
||||
Just add **POST_BODY:** after any extra headers and the body on the following lines:
|
||||
|
||||
POST http://posttestserver.com/post.php
|
||||
POST_BODY:
|
||||
this is the body that will be sent via HTTP POST
|
||||
a second line for body message
|
||||
|
||||
If you want to POST form variables:
|
||||
|
||||
POST http://posttestserver.com/post.php
|
||||
Content-type: application/x-www-form-urlencoded
|
||||
POST_BODY:
|
||||
variable1=avalue&variable2=1234&variable3=anothervalue
|
||||
|
||||
For PUT:
|
||||
|
||||
PUT http://yoururl.com/puthere
|
||||
POST_BODY:
|
||||
this body will be sent via HTTP PUT
|
||||
|
||||
###DELETE usage
|
||||
Same as HTTP GET:
|
||||
|
||||
DELETE http://yoururl.com/deletethis
|
||||
|
||||
###Requesting through a proxy
|
||||
If you need to send the request through a proxy server you can use:
|
||||
|
||||
GET www.yourtest.com
|
||||
USE_PROXY: 127.0.0.1:1234
|
||||
|
||||
Where *127.0.0.1* is the proxy server address (IP or URL) followed by the port number. **Warning** : allways append a port number, even if it's *80*
|
||||
|
||||
###Using client SSL certificates
|
||||
If you need client SSL certification you can use:
|
||||
|
||||
GET https://yoursecureserver.com
|
||||
CLIENT_SSL_CERT: certif_file.pem
|
||||
CLIENT_SSL_KEY: key_file.key
|
||||
|
||||
|
||||
|
||||
## Installation
|
||||
Using the Sublime Text 2 Package Control plugin (http://wbond.net/sublime_packages/package_control)
|
||||
press *CTRL + SHIFT + P* and find **Package Control: Install Package** and press *Enter*.
|
||||
Find this plugin in the list by name **Http Requester**.
|
||||
|
||||
Or git clone to your Sublime Text 2 packages folder directly (usually located at /Sublime Text 2/Packages/).
|
@@ -0,0 +1,399 @@
|
||||
import httplib
|
||||
import sublime
|
||||
import sublime_plugin
|
||||
import socket
|
||||
import types
|
||||
import threading
|
||||
|
||||
gPrevHttpRequest = ""
|
||||
|
||||
CHECK_DOWNLOAD_THREAD_TIME_MS = 1000
|
||||
|
||||
|
||||
def monitorDownloadThread(downloadThread):
|
||||
if downloadThread.is_alive():
|
||||
msg = downloadThread.getCurrentMessage()
|
||||
sublime.status_message(msg)
|
||||
sublime.set_timeout(lambda: monitorDownloadThread(downloadThread), CHECK_DOWNLOAD_THREAD_TIME_MS)
|
||||
else:
|
||||
downloadThread.showResultToPresenter()
|
||||
|
||||
|
||||
class HttpRequester(threading.Thread):
|
||||
|
||||
REQUEST_TYPE_GET = "GET"
|
||||
REQUEST_TYPE_POST = "POST"
|
||||
REQUEST_TYPE_DELETE = "DELETE"
|
||||
REQUEST_TYPE_PUT = "PUT"
|
||||
|
||||
httpRequestTypes = [REQUEST_TYPE_GET, REQUEST_TYPE_POST, REQUEST_TYPE_PUT, REQUEST_TYPE_DELETE]
|
||||
|
||||
HTTP_URL = "http://"
|
||||
HTTPS_URL = "https://"
|
||||
|
||||
httpProtocolTypes = [HTTP_URL, HTTPS_URL]
|
||||
|
||||
HTTP_POST_BODY_START = "POST_BODY:"
|
||||
|
||||
HTTP_PROXY_HEADER = "USE_PROXY"
|
||||
|
||||
HTTPS_SSL_CLIENT_CERT = "CLIENT_SSL_CERT"
|
||||
HTTPS_SSL_CLIENT_KEY = "CLIENT_SSL_KEY"
|
||||
|
||||
CONTENT_LENGTH_HEADER = "Content-lenght"
|
||||
|
||||
MAX_BYTES_BUFFER_SIZE = 8192
|
||||
|
||||
FILE_TYPE_HTML = "html"
|
||||
FILE_TYPE_JSON = "json"
|
||||
FILE_TYPE_XML = "xml"
|
||||
httpContentTypes = [FILE_TYPE_HTML, FILE_TYPE_JSON, FILE_TYPE_XML]
|
||||
|
||||
def __init__(self, resultsPresenter):
|
||||
self.totalBytesDownloaded = 0
|
||||
self.contentLenght = 0
|
||||
self.resultsPresenter = resultsPresenter
|
||||
threading.Thread.__init__(self)
|
||||
|
||||
def request(self, selection):
|
||||
self.selection = selection
|
||||
self.start()
|
||||
sublime.set_timeout(lambda: monitorDownloadThread(self), CHECK_DOWNLOAD_THREAD_TIME_MS)
|
||||
|
||||
def run(self):
|
||||
DEFAULT_TIMEOUT = 10
|
||||
FAKE_CURL_UA = "curl/7.21.0 (i486-pc-linux-gnu) libcurl/7.21.0 OpenSSL/0.9.8o zlib/1.2.3.4 libidn/1.15 libssh2/1.2.6"
|
||||
MY_UA = "python httpRequester 1.0.0"
|
||||
|
||||
selection = self.selection
|
||||
|
||||
lines = selection.split("\n")
|
||||
|
||||
# trim any whitespaces for all lines and remove lines starting with a pound character
|
||||
for idx in range(len(lines) - 1, -1, -1):
|
||||
lines[idx] = lines[idx].lstrip()
|
||||
lines[idx] = lines[idx].rstrip()
|
||||
if (len(lines[idx]) > 0):
|
||||
if lines[idx][0] == "#":
|
||||
del lines[idx]
|
||||
|
||||
# get request web address and req. type from the first line
|
||||
(url, port, request_page, requestType, httpProtocol) = self.extractRequestParams(lines[0])
|
||||
|
||||
print "Requesting...."
|
||||
print requestType, " ", httpProtocol, " HOST ", url, " PORT ", port, " PAGE: ", request_page
|
||||
|
||||
# get request headers from the lines below the http address
|
||||
(extra_headers, requestPOSTBody, proxyURL, proxyPort, clientSSLCertificateFile,
|
||||
clientSSLKeyFile) = self.extractExtraHeaders(lines)
|
||||
|
||||
headers = {"User-Agent": FAKE_CURL_UA, "Accept": "*/*"}
|
||||
|
||||
for key in extra_headers:
|
||||
headers[key] = extra_headers[key]
|
||||
|
||||
# if valid POST body add Content-lenght header
|
||||
if len(requestPOSTBody) > 0:
|
||||
headers[self.CONTENT_LENGTH_HEADER] = len(requestPOSTBody)
|
||||
|
||||
for key in headers:
|
||||
print "REQ HEADERS ", key, " : ", headers[key]
|
||||
|
||||
respText = ""
|
||||
fileType = ""
|
||||
|
||||
useProxy = False
|
||||
if len(proxyURL) > 0:
|
||||
useProxy = True
|
||||
|
||||
# make http request
|
||||
try:
|
||||
if not(useProxy):
|
||||
if httpProtocol == self.HTTP_URL:
|
||||
conn = httplib.HTTPConnection(url, port, timeout=DEFAULT_TIMEOUT)
|
||||
else:
|
||||
if len(clientSSLCertificateFile) > 0 or len(clientSSLKeyFile) > 0:
|
||||
print "Using client SSL certificate: ", clientSSLCertificateFile
|
||||
print "Using client SSL key file: ", clientSSLKeyFile
|
||||
conn = httplib.HTTPSConnection(
|
||||
url, port, timeout=DEFAULT_TIMEOUT, cert_file=clientSSLCertificateFile, key_file=clientSSLKeyFile)
|
||||
else:
|
||||
conn = httplib.HTTPSConnection(url, port, timeout=DEFAULT_TIMEOUT)
|
||||
|
||||
conn.request(requestType, request_page, requestPOSTBody, headers)
|
||||
else:
|
||||
print "Using proxy: ", proxyURL + ":" + str(proxyPort)
|
||||
conn = httplib.HTTPConnection(proxyURL, proxyPort, timeout=DEFAULT_TIMEOUT)
|
||||
conn.request(requestType, httpProtocol + url + request_page, requestPOSTBody, headers)
|
||||
|
||||
resp = conn.getresponse()
|
||||
(respText, fileType) = self.getParsedResponse(resp)
|
||||
conn.close()
|
||||
except (socket.error, httplib.HTTPException, socket.timeout) as e:
|
||||
if not(isinstance(e, types.NoneType)):
|
||||
respText = "Error connecting: " + str(e)
|
||||
else:
|
||||
respText = "Error connecting"
|
||||
except AttributeError as e:
|
||||
print e
|
||||
respText = "HTTPS not supported by your Python version"
|
||||
|
||||
self.respText = respText
|
||||
self.fileType = fileType
|
||||
|
||||
def extractHttpRequestType(self, line):
|
||||
for type in self.httpRequestTypes:
|
||||
if line.find(type) == 0:
|
||||
return type
|
||||
|
||||
return ""
|
||||
|
||||
def extractWebAdressPart(self, line):
|
||||
webAddress = ""
|
||||
for protocol in self.httpProtocolTypes:
|
||||
requestPartions = line.partition(protocol)
|
||||
if requestPartions[1] == "":
|
||||
webAddress = requestPartions[0]
|
||||
else:
|
||||
webAddress = requestPartions[2]
|
||||
return (webAddress, protocol)
|
||||
|
||||
return (webAddress, self.HTTP_URL)
|
||||
|
||||
def extractRequestParams(self, requestLine):
|
||||
requestType = self.extractHttpRequestType(requestLine)
|
||||
if requestType == "":
|
||||
requestType = self.REQUEST_TYPE_GET
|
||||
else:
|
||||
partition = requestLine.partition(requestType)
|
||||
requestLine = partition[2].lstrip()
|
||||
|
||||
# remove http:// or https:// from URL
|
||||
(webAddress, protocol) = self.extractWebAdressPart(requestLine)
|
||||
|
||||
request_parts = webAddress.split("/")
|
||||
request_page = ""
|
||||
if len(request_parts) > 1:
|
||||
for idx in range(1, len(request_parts)):
|
||||
request_page = request_page + "/" + request_parts[idx]
|
||||
else:
|
||||
request_page = "/"
|
||||
|
||||
url_parts = request_parts[0].split(":")
|
||||
|
||||
url_idx = 0
|
||||
url = url_parts[url_idx]
|
||||
|
||||
if protocol == self.HTTP_URL:
|
||||
port = httplib.HTTP_PORT
|
||||
else:
|
||||
port = httplib.HTTPS_PORT
|
||||
|
||||
if len(url_parts) > url_idx + 1:
|
||||
port = int(url_parts[url_idx + 1])
|
||||
|
||||
# convert requested page to utf-8 and replace spaces with +
|
||||
request_page = request_page.encode('utf-8')
|
||||
request_page = request_page.replace(' ', '+')
|
||||
|
||||
return (url, port, request_page, requestType, protocol)
|
||||
|
||||
def getHeaderNameAndValueFromLine(self, line):
|
||||
readingPOSTBody = False
|
||||
|
||||
line = line.lstrip()
|
||||
line = line.rstrip()
|
||||
|
||||
if line == self.HTTP_POST_BODY_START:
|
||||
readingPOSTBody = True
|
||||
else:
|
||||
header_parts = line.split(":")
|
||||
if len(header_parts) == 2:
|
||||
header_name = header_parts[0].rstrip()
|
||||
header_value = header_parts[1].lstrip()
|
||||
return (header_name, header_value, readingPOSTBody)
|
||||
else:
|
||||
# may be proxy address URL:port
|
||||
if len(header_parts) > 2:
|
||||
header_name = header_parts[0].rstrip()
|
||||
header_value = header_parts[1]
|
||||
header_value = header_value.lstrip()
|
||||
header_value = header_value.rstrip()
|
||||
for idx in range(2, len(header_parts)):
|
||||
currentValue = header_parts[idx]
|
||||
currentValue = currentValue.lstrip()
|
||||
currentValue = currentValue.rstrip()
|
||||
header_value = header_value + ":" + currentValue
|
||||
|
||||
return (header_name, header_value, readingPOSTBody)
|
||||
|
||||
return (None, None, readingPOSTBody)
|
||||
|
||||
def extractExtraHeaders(self, headerLines):
|
||||
requestPOSTBody = ""
|
||||
readingPOSTBody = False
|
||||
lastLine = False
|
||||
numLines = len(headerLines)
|
||||
|
||||
proxyURL = ""
|
||||
proxyPort = 0
|
||||
|
||||
clientSSLCertificateFile = ""
|
||||
clientSSLKeyFile = ""
|
||||
|
||||
extra_headers = {}
|
||||
if len(headerLines) > 1:
|
||||
for i in range(1, numLines):
|
||||
lastLine = (i == numLines - 1)
|
||||
if not(readingPOSTBody):
|
||||
(header_name, header_value, readingPOSTBody) = self.getHeaderNameAndValueFromLine(headerLines[i])
|
||||
if header_name is not None:
|
||||
if header_name == self.HTTP_PROXY_HEADER:
|
||||
(proxyURL, proxyPort) = self.getProxyURLandPort(header_value)
|
||||
elif header_name == self.HTTPS_SSL_CLIENT_CERT:
|
||||
clientSSLCertificateFile = header_value
|
||||
elif header_name == self.HTTPS_SSL_CLIENT_KEY:
|
||||
clientSSLKeyFile = header_value
|
||||
else:
|
||||
extra_headers[header_name] = header_value
|
||||
else: # read all following lines as HTTP POST body
|
||||
lineBreak = ""
|
||||
if not(lastLine):
|
||||
lineBreak = "\n"
|
||||
|
||||
requestPOSTBody = requestPOSTBody + headerLines[i] + lineBreak
|
||||
|
||||
return (extra_headers, requestPOSTBody, proxyURL, proxyPort, clientSSLCertificateFile, clientSSLKeyFile)
|
||||
|
||||
def getProxyURLandPort(self, proxyAddress):
|
||||
proxyURL = ""
|
||||
proxyPort = 0
|
||||
|
||||
proxyParts = proxyAddress.split(":")
|
||||
|
||||
proxyURL = proxyParts[0]
|
||||
|
||||
if len(proxyParts) > 1:
|
||||
proxyURL = proxyParts[0]
|
||||
for idx in range(1, len(proxyParts) - 1):
|
||||
proxyURL = proxyURL + ":" + proxyParts[idx]
|
||||
|
||||
lastIdx = len(proxyParts) - 1
|
||||
proxyPort = int(proxyParts[lastIdx])
|
||||
else:
|
||||
proxyPort = 80
|
||||
|
||||
return (proxyURL, proxyPort)
|
||||
|
||||
def getParsedResponse(self, resp):
|
||||
fileType = self.FILE_TYPE_HTML
|
||||
resp_status = "%d " % resp.status + resp.reason + "\n"
|
||||
respText = resp_status
|
||||
|
||||
for header in resp.getheaders():
|
||||
respText += header[0] + ":" + header[1] + "\n"
|
||||
|
||||
# get resp. file type (html, json and xml supported). fallback to html
|
||||
if header[0] == "content-type":
|
||||
fileType = self.getFileTypeFromContentType(header[1])
|
||||
|
||||
respText += "\n\n\n"
|
||||
|
||||
self.contentLenght = int(resp.getheader("content-length", 0))
|
||||
|
||||
# download a 8KB buffer at a time
|
||||
respBody = resp.read(self.MAX_BYTES_BUFFER_SIZE)
|
||||
numDownloaded = len(respBody)
|
||||
self.totalBytesDownloaded = numDownloaded
|
||||
while numDownloaded == self.MAX_BYTES_BUFFER_SIZE:
|
||||
data = resp.read(self.MAX_BYTES_BUFFER_SIZE)
|
||||
respBody += data
|
||||
numDownloaded = len(data)
|
||||
self.totalBytesDownloaded += numDownloaded
|
||||
|
||||
respText += respBody.decode("utf-8", "replace")
|
||||
|
||||
return (respText, fileType)
|
||||
|
||||
def getFileTypeFromContentType(self, contentType):
|
||||
fileType = self.FILE_TYPE_HTML
|
||||
contentType = contentType.lower()
|
||||
|
||||
print "File type: ", contentType
|
||||
|
||||
for cType in self.httpContentTypes:
|
||||
if cType in contentType:
|
||||
fileType = cType
|
||||
|
||||
return fileType
|
||||
|
||||
def getCurrentMessage(self):
|
||||
return "HttpRequester downloading " + str(self.totalBytesDownloaded) + " / " + str(self.contentLenght)
|
||||
|
||||
def showResultToPresenter(self):
|
||||
self.resultsPresenter.createWindowWithText(self.respText, self.fileType)
|
||||
|
||||
|
||||
class HttpRequesterRefreshCommand(sublime_plugin.TextCommand):
|
||||
|
||||
def run(self, edit):
|
||||
global gPrevHttpRequest
|
||||
selection = gPrevHttpRequest
|
||||
|
||||
resultsPresenter = ResultsPresenter()
|
||||
httpRequester = HttpRequester(resultsPresenter)
|
||||
httpRequester.request(selection)
|
||||
|
||||
|
||||
class ResultsPresenter():
|
||||
|
||||
def __init__(self):
|
||||
pass
|
||||
|
||||
def createWindowWithText(self, textToDisplay, fileType):
|
||||
newView = sublime.active_window().new_file()
|
||||
edit = newView.begin_edit()
|
||||
newView.insert(edit, 0, textToDisplay)
|
||||
newView.end_edit(edit)
|
||||
newView.set_scratch(True)
|
||||
newView.set_read_only(False)
|
||||
newView.set_name("http response")
|
||||
|
||||
if fileType == HttpRequester.FILE_TYPE_HTML:
|
||||
newView.set_syntax_file("Packages/HTML/HTML.tmLanguage")
|
||||
if fileType == HttpRequester.FILE_TYPE_JSON:
|
||||
newView.set_syntax_file("Packages/JavaScript/JSON.tmLanguage")
|
||||
if fileType == HttpRequester.FILE_TYPE_XML:
|
||||
newView.set_syntax_file("Packages/XML/XML.tmLanguage")
|
||||
|
||||
return newView.id()
|
||||
|
||||
|
||||
class HttpRequesterCommand(sublime_plugin.TextCommand):
|
||||
|
||||
def run(self, edit):
|
||||
global gPrevHttpRequest
|
||||
selection = ""
|
||||
if self.has_selection():
|
||||
for region in self.view.sel():
|
||||
# Concatenate selected regions together.
|
||||
selection += self.view.substr(region)
|
||||
else:
|
||||
# Use entire document as selection
|
||||
entireDocument = sublime.Region(0, self.view.size())
|
||||
selection = self.view.substr(entireDocument)
|
||||
|
||||
gPrevHttpRequest = selection
|
||||
resultsPresenter = ResultsPresenter()
|
||||
httpRequester = HttpRequester(resultsPresenter)
|
||||
httpRequester.request(selection)
|
||||
|
||||
def has_selection(self):
|
||||
has_selection = False
|
||||
|
||||
# Only enable menu option if at least one region contains selected text.
|
||||
for region in self.view.sel():
|
||||
if not region.empty():
|
||||
has_selection = True
|
||||
|
||||
return has_selection
|
@@ -0,0 +1 @@
|
||||
{"url": "https://github.com/braindamageinc/SublimeHttpRequester", "version": "2013.04.02.04.20.01", "description": "HTTP client plugin for Sublime Text 2"}
|
Reference in New Issue
Block a user